diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs new file mode 100644 index 00000000..82e4499f --- /dev/null +++ b/.git-blame-ignore-revs @@ -0,0 +1,3 @@ +# .git-blame-ignore-revs +# Black +09a35897e7fc8382b283d324c9602cd0763c056e diff --git a/README.md b/README.md index d140526e..22f59f20 100644 --- a/README.md +++ b/README.md @@ -2,142 +2,104 @@ ## Beroenden -* Python 2.7.x (Django) -* MongoDB 2.6.x - -Pakethanteraren [pip](http://pip-installer.org) används för att hantera beroenden i Python. -Denna finns inkluderad tillsammans med Python 2.7.9 eller senare. +* Python 3.6.x (Django) +* MongoDB 5.0.x ## Installation -Nedan är ett exempel på en minimal installation för Mac OS X. +Nedan är ett exempel på en minimal installation i Linux. - # Installera beroenden - https://docs.mongodb.com/v2.6/tutorial/install-mongodb-on-os-x/ - $ pip install virtualenvwrapper + # Installera MongoDB 5.0.x: + # https://www.mongodb.com/docs/v5.0/installation/ # Klona projektet - $ git clone git@github.com:libris/bibstat - $ cd bibstat + git clone git@github.com:libris/bibstat + cd bibstat # Starta MongoDB $ mkdir mongodb $ mongod --dbpath $(pwd)/mongodb # Skapa en användare för databasen - $ mongo - $ use bibstat - $ db.createUser({user:"bibstat", pwd:"bibstat", roles:["readWrite"]}) - $ exit + mongo + > use bibstat + > db.createUser({user:"bibstat", pwd:"bibstat", roles:["readWrite"]}) + > exit # Skapa en virtuell miljö för Python - # OSX: - $ source /usr/local/bin/virtualenvwrapper.sh - $ mkvirtualenv -p /usr/local/bin/python bibstat - $ workon bibstat - - # UBUNTU: - $ virtualenv -p /usr/bin/python2.7 venv && source venv/bin/activate - - # Installera paket och konfigurera - $ pip install -r requirements.txt - $ cp bibstat/settings_local.py.example bibstat/settings_local.py - $ python manage.py createsuperuser --username=super --email=a@b.com + python3 -m venv venv + # Aktivera virtuell miljö + source venv/bin/activate + # Installera Python-beroenden + pip install -r requirements.txt + + # Konfigurera + cp bibstat/settings_local.py.example bibstat/settings_local.py + vi bibstat/settings_local.py # eller nano, eller... + # Skapa admin-användare + python manage.py createmongodbsuperuser --username=super --email=a@b.com # Starta servern - $ python manage.py runserver - -### MongoDB i Docker - -Ett alternativ till att installera MongoDB är att köra det i Docker. -https://hub.docker.com/_/mongo?tab=description&page=1&name=2.6 - - -``` -# LINUX: -docker run --name bibstat_mongodb -v /abs/path/to/repo/mongodb:/data/db -d mongo:2.6 -# OSX: -docker run --publish 27017:27017 --name bibstat_mongodb -v /abs/path/to/repo/mongodb:/data/db -d mongo:2.6 -``` - -Konfigurera `MONGODB_HOST` i `bibstat/settings_local.py` till: - -```sh -# LINUX, returvärdet av: -docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' bibstat_mongodb -# OSX: -localhost:27017 -``` - -För att lägga till användare som beskrivs ovan, anslut först en terminal till containern -```sh -sudo docker exec -it bibstat_mongodb bash -``` - - + python manage.py runserver + # Alternativt (obs, gunicorn kommer inte serva statiska filer): + gunicorn bibstat.wsgi ### Importera produktionsdata till lokal utvecklingsmiljö # Gör en dump av produktionsdatabasen - $ ssh @bibstat.kb.se - $ mongodump -d bibstat -u -p - $ exit + ssh @bibstat.kb.se + mongodump -d bibstat -u -p + exit # där är din AD-användare och # och finns i Team Gul Vault # Hämta hem dumpen med sftp eller scp, t.ex: - $ sftp @bibstat.kb.se - $ get -r dump - $ exit + sftp @bibstat.kb.se + get -r dump + exit # Läs in datadumpen - $ mongorestore dump - - # Alternativt, för docker, kopiera datadumpen till containern - $ docker cp dump bibstat_mongodb:/data - - # och läs in den med - $ docker exec -it bibstat_mongodb mongorestore /data/dump/ + mongorestore dump - -Du kan alternativt ange att bara importera en `collection`, exempelvis enbart termerna med hjälp av `mongorestore dump/bibstat/libstat_variables.bson`. Användarnamn och lösenord för produktionsdatabasen finns i `/data/appl/config/bibstat_local.py`. +Du kan alternativt ange att bara importera en `collection`, exempelvis enbart termerna med hjälp av +`mongorestore dump/bibstat/libstat_variables.bson`. Användarnamn och lösenord för produktionsdatabasen +finns i `/data/appl/config/bibstat_local.py`. ## Utveckling -Sidans alla resurser (.js & .css filer) minifieras med hjälp av requirejs och requirejs. (Detta kräver nodejs https://nodejs.org/ installerat på datorn.) +Sidans alla resurser (.js- och .css-filer) minifieras med hjälp av requirejs och requirejs. +(Detta kräver Node.js https://nodejs.org/ installerat på datorn. Förslagsvis Node.js 16 LTS.) Installera requirejs genom: - $ npm install -g requirejs + npm install -g requirejs I mappen build finns byggfiler som körs med r.js. När ändringar är gjorda i Javascript och CSS måste dessa köras för att ändringarna ska ha effekt. För att köra jobbet: - $ ./build.sh + ./build.sh Nu är alla filer minifierade och redo för produktion! -I base-filerna i /bibstat/libstat/templates/base/ (admin.html resp. admin-survey.html) kan ni välja att använda de minifierade filerna eller orginal-filerna. -Detta gör ni genom att ta bort / lägga till ".min" efter både css filen och javascript-filen (ex i produktion används "/static/css/bundle.min.css" och under utveckling kan "/static/css/bundle.css" användas för att slippa bygga om mellan uppdateringar). +I base-filerna i /bibstat/libstat/templates/base/ (admin.html resp. admin-survey.html) kan du välja att använda de +minifierade filerna eller original-filerna. +Detta gör du genom att ta bort eller lägga till ".min" efter både css-filen och javascript-filen +(t.ex.: i produktion används `/static/css/bundle.min.css` och under utveckling kan `/static/css/bundle.css` användas +för att slippa bygga om mellan uppdateringar). ## Testning -Testerna kan köras genom att använda följande kommando. +Testerna kan köras genom att använda följande kommando. Både enhetstesterna och integrationstesterna kommer köras. - $ python manage.py test + python manage.py test ## Deploy -Det gemensamma verktyget [DevOps](https://github.com/libris/devops) används för att sköta deploy. -För att kunna göra en deploy krävs anslutning till det lokala nätverket. - -Båda miljöerna går att komma åt med SSH på det lokala nätverket. -Inloggningsuppgifterna till maskinerna går att få genom att fråga IT. -Notera att `sudo` är trasigt på maskinerna, så `root` måste användas. +Se https://github.com/libris/devops (skyddat repo). **Sökvägar** Konfiguration för Django: `/data/appl/config/bibstat_local.py`. @@ -149,14 +111,7 @@ Varje deploy får en egen tids- och datumstämplad mapp i `/data/appl/`. Den senaste versionen som används länkas in till `/data/appl/bibstat`. Tidigare versioner tas inte bort automatiskt, utan måste tas bort manuellt. -**Backup** -Båda miljöerna körs i var sin egen virtuell maskin med Red Hat Linux. -Version: `Red Hat Enterprise Linux Server release 6.6 (Santiago)`. - -Backup sker genom att varje natt ta en kopia på den virtuella maskinen. -IT kan hjälpa till med driften och återställning om det skulle behövas. -Mikko Yletyinen har tidigare varit kontaktperson gällande driften. - +**Backup** En separat backup av databasen görs varje dag klockan 18.00 med Cron. Dessa sparas på en nätverksvolym, som har monterats under `/backup`. Använd `crontab -l` för att visa det cron-jobb som används för detta. @@ -164,64 +119,48 @@ Använd `crontab -l` för att visa det cron-jobb som används för detta. **Setup** Det finns en [sammanfattning](docs/servers.md) av hur miljöerna sattes upp. -### Stage - -Redhat Enterprise Linux Server 6.10 -Adress: [bibstat-stg.kb.se](http://bibstat-stg.kb.se) -Hårdvara: 8 GB Minne, 80 GB Hårddisk -Deploy: `fab conf.bibstat_stg app.bibstat.deploy -u ` -Inloggning: Fråga IT-drift om `sudo`-åtkomst eller inloggningsuppgifter. - -### Produktion - -Redhat Enterprise Linux Server 6.10 -Adress: [bibstat.kb.se](http://bibstat.kb.se) -Hårdvara: 23 GB Minne, 80 GB Hårddisk -Deploy: `fab conf.bibstat_prod app.bibstat.deploy -u ` -Inloggning: Fråga IT-drift om `sudo`-åtkomst eller inloggningsuppgifter. - ## Import **Termer** Tidigare års statistiktermer kan importeras på följande sett. Filerna finns att hitta i projektkatalogen [`data/variables`](data/variables). - $ python manage.py import_variables --file=data/variables/folk_termer.xlsx --target_group=folkbib - $ python manage.py import_variables --file=data/variables/forsk_termer.xlsx --target_group=specbib - $ python manage.py import_variables --file=data/variables/skol_termer.xlsx --target_group=skolbib - $ python manage.py import_variables --file=data/variables/sjukhus_termer.xlsx --target_group=sjukbib + python manage.py import_variables --file=data/variables/folk_termer.xlsx --target_group=folkbib + python manage.py import_variables --file=data/variables/forsk_termer.xlsx --target_group=specbib + python manage.py import_variables --file=data/variables/skol_termer.xlsx --target_group=skolbib + python manage.py import_variables --file=data/variables/sjukhus_termer.xlsx --target_group=sjukbib **Enkäter** Tidigare års enkäter med de inlämnade värdena kan importeras på följande sett. En exekvering av ett kommando importerar ett års värden för en viss bibliotekstyp. Filerna finns på både stage- och produktionsmiljön i `/data/appl/old_bibstat_data`. - $ python manage.py import_survey_responses --file=/data/appl/old_bibstat_data/Folkbibliotek.xlsx --target_group=folkbib --year=2013 - $ python manage.py import_survey_responses --file=/data/appl/old_bibstat_data/Folkbibliotek.xlsx --target_group=folkbib --year=2012 - $ python manage.py import_survey_responses --file=/data/appl/old_bibstat_data/Folkbibliotek.xlsx --target_group=folkbib --year=2011 - $ python manage.py import_survey_responses --file=/data/appl/old_bibstat_data/Folkbibliotek.xlsx --target_group=folkbib --year=2010 + python manage.py import_survey_responses --file=/data/appl/old_bibstat_data/Folkbibliotek.xlsx --target_group=folkbib --year=2013 + python manage.py import_survey_responses --file=/data/appl/old_bibstat_data/Folkbibliotek.xlsx --target_group=folkbib --year=2012 + python manage.py import_survey_responses --file=/data/appl/old_bibstat_data/Folkbibliotek.xlsx --target_group=folkbib --year=2011 + python manage.py import_survey_responses --file=/data/appl/old_bibstat_data/Folkbibliotek.xlsx --target_group=folkbib --year=2010 ### Export -Export av enkäter till excelfil kan göras via administrationssidan [/surveys](http://bibstat.kb.se/surveys). Det öppna datat kan också exporteras till excelfil under "Öppna data". Då kommer enbart observationer med för variabler som är publika. +Export av enkäter till excelfil kan göras via administrationssidan [/surveys](https://bibstat.kb.se/surveys). Det öppna datat kan också exporteras till excelfil under "Öppna data". Då kommer enbart observationer med för variabler som är publika. Script för export finns även under [libstat/management/commands]([libstat/management/commands). För att ta ut enkäter: - $ python manage.py export_surveys_to_excel --year=2014 + python manage.py export_surveys_to_excel --year=2014 För att ta ut data om biblioteken (ange all=y för att ta ut alla bibliotek, eller all=n för att endast få med bibliotek som saknar sigel): - $ python manage.py export_libraries_to_excel --year=2012 --all=n + python manage.py export_libraries_to_excel --year=2012 --all=n I servermiljöerna måste man aktivera virtuell env genom - $ cd /data/appl/bibstat - $ source env/bin/activate + cd /data/appl/bibstat + source env/bin/activate För att köra script som bakgrundsprocess: - $ nohup python manage.py export_surveys_to_excel --year=2014 & + nohup python manage.py export_surveys_to_excel --year=2014 & Filerna hamnar under [data/excel_exports] (data/excel_exports) (under /data/appl/excel_exports på servrarna) @@ -229,7 +168,7 @@ Filerna hamnar under [data/excel_exports] (data/excel_exports) (under /data/appl Om ett bibliotek bytt sigel kan kommandot `update_sigel` köras för att ändra i Bibstat: - $ python manage.py update_sigel --from="GAMMALT_SIGEL" --to="NYTT_SIGEL" + python manage.py update_sigel --from="GAMMALT_SIGEL" --to="NYTT_SIGEL" ## Analytics diff --git a/bibstat/settings.py b/bibstat/settings.py index 49b87d1f..5cd5c953 100644 --- a/bibstat/settings.py +++ b/bibstat/settings.py @@ -11,15 +11,13 @@ # Build paths inside the project like this: os.path.join(BASE_DIR, ...) import os import sys -from django.conf.global_settings import TEMPLATE_CONTEXT_PROCESSORS as TCP +from pathlib import Path -reload(sys) -sys.setdefaultencoding('utf-8') +# Build paths inside the project like this: BASE_DIR / 'subdir'. +BASE_DIR = Path(__file__).resolve().parent.parent -BASE_DIR = os.path.dirname(os.path.dirname(__file__)) - -# Quick-start development settings - unsuitable for production -# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/ +# Bibstat version number - update this when making a new release +RELEASE_VERSION = "1.20.0" """ ---------------------------------------------------------- @@ -29,104 +27,117 @@ # SECURITY WARNING: don't run with debug turned on in production! DEBUG = False - TEMPLATE_DEBUG = False - BLOCK_SURVEY = False - BLOCK_REPORTS = False - ANALYTICS_ENABLED = False -TEMPLATE_CONTEXT_PROCESSORS = TCP + ( - 'django.core.context_processors.request', -) - -ALLOWED_HOSTS = [ - ".bibstat-stg.kb.se", - ".bibstat-stg.libris.kb.se" -] +ALLOWED_HOSTS = [".bibstat-stg.kb.se", ".bibstat-stg.libris.kb.se"] +USE_X_FORWARDED_HOST = True +USE_X_FORWARDED_PORT = True BIBSTAT_BLOG_BASE_URL = "https://www.kb.se/biblioteksstatistik" # DB connection details -MONGODB_HOST = 'localhost' -MONGODB_NAME = 'bibstat' -MONGODB_USER = 'bibstat' -MONGODB_PASSWD = 'bibstat' +MONGODB_DATABASES = { + "default": { + "name": "bibstat", + "host": "localhost", + "password": "bibstat", + "username": "bibstat", + "tz_aware": True, # if you using timezones in django (USE_TZ = True) + }, +} # Email details -EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend' -EMAIL_SENDER = 'biblioteksstatistik@kb.se' +EMAIL_BACKEND = "django.core.mail.backends.console.EmailBackend" +EMAIL_SENDER = "biblioteksstatistik@kb.se" -LOG_LEVEL = 'WARNING' +LOG_LEVEL = "WARNING" # Override above with local settings if present try: - from settings_local import * -except ImportError: - print "local settings could not be imported" + from .settings_local import * +except ImportError as e: + print(f"local settings could not be imported: {e}") """ ----------------------------------------------------------- """ # Application definition -INSTALLED_APPS = ( +INSTALLED_APPS = [ # Django standard apps - 'django.contrib.auth', - 'django.contrib.contenttypes', - 'django.contrib.sessions', - 'django.contrib.messages', - 'django.contrib.staticfiles', - 'django_js_reverse', - + "django.contrib.auth", + "django.contrib.contenttypes", + "django.contrib.sessions", + "django.contrib.messages", + "django.contrib.staticfiles", + "django.contrib.admin", + "django_js_reverse", # Bibstat specific apps - 'mongoengine.django.mongo_auth', - 'libstat' -) + "django_mongoengine", + "django_mongoengine.mongo_auth", + 'django_mongoengine.mongo_admin', + "libstat", +] -MIDDLEWARE_CLASSES = ( - 'django.contrib.sessions.middleware.SessionMiddleware', - 'django.middleware.common.CommonMiddleware', - 'django.middleware.csrf.CsrfViewMiddleware', - 'django.contrib.auth.middleware.AuthenticationMiddleware', - 'django.contrib.messages.middleware.MessageMiddleware', - 'django.middleware.clickjacking.XFrameOptionsMiddleware', -) +MIDDLEWARE = [ + "django.middleware.security.SecurityMiddleware", + "django.contrib.sessions.middleware.SessionMiddleware", + "django.middleware.common.CommonMiddleware", + "django.middleware.csrf.CsrfViewMiddleware", + "django.contrib.auth.middleware.AuthenticationMiddleware", + "django.contrib.messages.middleware.MessageMiddleware", + "django.middleware.clickjacking.XFrameOptionsMiddleware", +] -ROOT_URLCONF = 'bibstat.urls' +ROOT_URLCONF = "bibstat.urls" + +TEMPLATES = [ + { + "BACKEND": "django.template.backends.django.DjangoTemplates", + "DIRS": [], + "APP_DIRS": True, + "OPTIONS": { + "context_processors": [ + "django.template.context_processors.debug", + "django.template.context_processors.request", + "django.contrib.auth.context_processors.auth", + "django.contrib.messages.context_processors.messages", + ], + }, + }, +] -WSGI_APPLICATION = 'bibstat.wsgi.application' +SESSION_ENGINE = "django_mongoengine.sessions" +SESSION_SERIALIZER = "django_mongoengine.sessions.BSONSerializer" + +WSGI_APPLICATION = "bibstat.wsgi.application" # Database -# https://docs.djangoproject.com/en/1.6/ref/settings/#databases +# https://docs.djangoproject.com/en/3.2/ref/settings/#databases DATABASES = { # Configuring Django ORM with dummy DB since MongoEngine config does not use this setting - 'default': { - 'ENGINE': 'django.db.backends.dummy' - } + "default": {"ENGINE": ""} } -# Internationalization -# https://docs.djangoproject.com/en/1.6/topics/i18n/ - -LANGUAGE_CODE = 'sv-se' +DEFAULT_AUTO_FIELD = "django.db.models.AutoField" -TIME_ZONE = 'Europe/Stockholm' +# Internationalization +# https://docs.djangoproject.com/en/3.2/topics/i18n/ +LANGUAGE_CODE = "sv-se" +TIME_ZONE = "Europe/Stockholm" USE_I18N = True - USE_L10N = True - USE_TZ = True - # Static files (CSS, JavaScript, Images) -# https://docs.djangoproject.com/en/1.6/howto/static-files/ -STATIC_URL = '/static/' +# https://docs.djangoproject.com/en/3.2/howto/static-files/ +STATIC_URL = "/static/" # Common static resources not tied to any application. STATICFILES_DIRS = ( @@ -134,26 +145,25 @@ # TODO '/var/www/static/', ) -LOGIN_REDIRECT_URL = '/surveys' +LOGIN_REDIRECT_URL = "/surveys" +LOGOUT_REDIRECT_URL = "/admin" """ Logging """ LOGGING = { - 'version': 1, - 'formatters': { - 'verbose': { - 'format': '%(levelname)s %(asctime)s %(module)s %(process)d %(thread)d %(message)s' - }, - 'simple': { - 'format': '%(levelname)s %(asctime)s %(message)s' + "version": 1, + "formatters": { + "verbose": { + "format": "%(levelname)s %(asctime)s %(module)s %(process)d %(thread)d %(message)s" }, + "simple": {"format": "%(levelname)s %(asctime)s %(message)s"}, }, - 'handlers': { - 'console': { - 'level': 'DEBUG', - 'class': 'logging.StreamHandler', - 'formatter': 'verbose' + "handlers": { + "console": { + "level": "DEBUG", + "class": "logging.StreamHandler", + "formatter": "verbose", }, # 'file': { # 'level': 'DEBUG', @@ -162,46 +172,43 @@ # 'formatter': 'verbose' # }, }, - 'loggers': { - 'django.request': { - 'handlers': ['console'], - 'level': LOG_LEVEL, - 'propagate': True, + "loggers": { + "django.request": { + "handlers": ["console"], + "level": LOG_LEVEL, + "propagate": True, }, - 'libstat': { - 'handlers': ['console'], - 'level': LOG_LEVEL, - 'propagate': True, + "libstat": { + "handlers": ["console"], + "level": LOG_LEVEL, + "propagate": True, }, - } + }, } if DEBUG: # make all loggers use the console. - for logger in LOGGING['loggers']: - LOGGING['loggers'][logger]['handlers'] = ['console'] + for logger in LOGGING["loggers"]: + LOGGING["loggers"][logger]["handlers"] = ["console"] """ MongoEngine settings """ -import mongoengine # Enable some basic auth features such as get_user(). Define a custom user model if advanced auth features are required -AUTHENTICATION_BACKENDS = ( - 'mongoengine.django.auth.MongoEngineBackend', -) -AUTH_USER_MODEL = 'mongo_auth.MongoUser' -MONGOENGINE_USER_DOCUMENT = 'mongoengine.django.auth.User' +AUTHENTICATION_BACKENDS = ("django_mongoengine.mongo_auth.backends.MongoEngineBackend",) +AUTH_USER_MODEL = "mongo_auth.MongoUser" +# MONGOENGINE_USER_DOCUMENT = 'mongoengine.django.auth.User' # Store Django sessions in MongoDB backend -SESSION_ENGINE = 'mongoengine.django.sessions' -SESSION_SERIALIZER = 'mongoengine.django.sessions.BSONSerializer' -SESSION_COOKIE_AGE = 2592000 +SESSION_ENGINE = "django_mongoengine.sessions" +# SESSION_SERIALIZER = 'mongoengine.django.sessions.BSONSerializer' +# SESSION_COOKIE_AGE = 2592000 # Initialize MongoDB connection -MONGODB_DATABASE_HOST = 'mongodb://%s:%s@%s/%s' % (MONGODB_USER, MONGODB_PASSWD, MONGODB_HOST, MONGODB_NAME) +# MONGODB_DATABASE_HOST = 'mongodb://%s:%s@%s/%s' % (MONGODB_USER, MONGODB_PASSWD, MONGODB_HOST, MONGODB_NAME) -mongoengine.connect(MONGODB_NAME, host=MONGODB_DATABASE_HOST) +# mongoengine.connect(MONGODB_NAME, host=MONGODB_DATABASE_HOST) # mongoengine.connect(MONGODB_NAME) # Use custom test runner to skip setup/teardown of fixtures for test database -TEST_RUNNER = 'libstat.tests.MongoEngineTestRunner' +TEST_RUNNER = "libstat.tests.MongoEngineTestRunner" diff --git a/bibstat/settings_local.py.example b/bibstat/settings_local.py.example index 7d7c8f0e..b44348bd 100644 --- a/bibstat/settings_local.py.example +++ b/bibstat/settings_local.py.example @@ -1,28 +1,46 @@ -import os, urllib3, certifi +import os + +# This setting has to be adjusted for "stage" and "prod" environments +ENVIRONMENT = "local" # SECURITY WARNING: keep the secret key used in production secret! -SECRET_KEY = '3x%=t4cm@eszqbwuw@00f**ol@8^kqomtm8-%x&5_ydq9rm(nl' +SECRET_KEY = "CHAAAAANGEEE-MEEE-WOOOOOOOOOOOO" # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True TEMPLATE_DEBUG = True - -LOG_LEVEL = 'DEBUG' +LOG_LEVEL = "DEBUG" ALLOWED_HOSTS = [] +# prod: +#ALLOWED_HOSTS = [ +# ".bibstat.kb.se", +# ".bibstat.libris.kb.se", +# ".bibstat.libris.kb.se.", +#] # Base url for api, i.e. http://stats.kb.se -API_BASE_URL = "http://localhost:8000" -BIBDB_BASE_URL = "http://localhost:8001" +API_BASE_URL = "http://localhost:8000" # prod: https://bibstat.kb.se +BIBDB_BASE_URL = "http://localhost:8001" # prod: http://bibdb.libris.kb.se +# SECURITY WARNING: keep the pass for bibdb library updates in production secret! +BIBDB_UPDATE_PASS = "password" # DB connection details -MONGODB_HOST = 'localhost' -MONGODB_NAME = 'bibstat' -MONGODB_USER = 'bibstat' -MONGODB_PASSWD = 'bibstat' +MONGODB_DATABASES = { + "default": { + "name": "bibstat", + "host": "localhost", + "password": "bibstat", + "username": "bibstat", + "tz_aware": True, # if you're using timezones in Django (USE_TZ = True) + }, +} # Email details -EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend' +EMAIL_BACKEND = "django.core.mail.backends.console.EmailBackend" +# EMAIL_BACKEND = "django.core.mail.backends.smtp.EmailBackend" # uncomment for prod +# EMAIL_SENDER = "biblioteksstatistik@kb.se" # uncomment for prod +# EMAIL_HOST = "smtp.kb.se" # uncomment for prod # Path to log files for cleaning data functions # Change to /data/appl/log for production @@ -30,22 +48,6 @@ CLEAN_DATA_LOG_PATH = os.path.dirname(os.path.dirname(__file__)) # Path to excel file with sigel mappings # Change to /data/appl/sigel_match/sigel_mapping.xlsx for production -SIGEL_MAPPING_FILE_PATH = '/Users/ina/projects/bibstat/data/sigel_match/sigel_mapping.xlsx' +SIGEL_MAPPING_FILE_PATH = "/data/appl/sigel_match/sigel_mapping.xlsx" -# This setting has to be adjusted for 'stage' and 'prod' environments -ENVIRONMENT = 'local' - -# Getting version number from latest release tag in github libris/bibstat repository -http = urllib3.PoolManager(cert_reqs='CERT_REQUIRED', ca_certs=certifi.where()) -response = http.urlopen('GET', 'https://github.com/libris/bibstat/releases/latest', redirect=False) -location_header = response.headers.get('Location', None) -if location_header: - latest_version = location_header.split('/')[-1] -else: - latest_version = None -RELEASE_VERSION = latest_version - -# SECURITY WARNING: keep the pass for bibdb library updates in production secret! -BIBDB_UPDATE_PASS = 'password' -SURVEY_EDITING_LOCK_TIMEOUT_HOURS = 8 \ No newline at end of file diff --git a/bibstat/urls.py b/bibstat/urls.py index fd0852a7..3a53eb48 100644 --- a/bibstat/urls.py +++ b/bibstat/urls.py @@ -1,9 +1,9 @@ -from django.conf.urls import patterns, url -from django.core.urlresolvers import reverse_lazy -from django.views.generic import RedirectView, TemplateView -from django.contrib.auth.views import logout +from django.urls import re_path +from django.views.generic import TemplateView +from django.contrib.auth.views import LogoutView + +from django_js_reverse.views import urls_js -from django.contrib import admin import libstat from libstat.apis.open_data import data_api, observation_api, export_api from libstat.apis.terms import term_api, terms_api @@ -14,94 +14,132 @@ from libstat.views.dispatches import dispatches, dispatches_delete, dispatches_send from libstat.views.index import index from libstat.views.reports import reports, report -from libstat.views.surveys import (surveys, - surveys_statuses, - surveys_export, - surveys_export_with_previous, - surveys_activate, - surveys_inactivate, - surveys_overview, - import_and_create, - remove_empty, - match_libraries, - surveys_update_library) -from libstat.views.survey import (survey, - survey_status, - survey_notes, - example_survey, - sigel_survey, - release_survey_lock) -from libstat.views.variables import (variables, - edit_variable, - create_variable, replaceable_variables) - -admin.autodiscover() - -urlpatterns = patterns('', - - url(r'^robots\.txt$', TemplateView.as_view(template_name='robots.txt', content_type='text/plain'), name="robots"), - url(r'^humans\.txt$', TemplateView.as_view(template_name='humans.txt', content_type='text/plain; charset=utf-8'), name="humans"), +from libstat.views.surveys import ( + surveys, + surveys_statuses, + surveys_export, + surveys_export_with_previous, + surveys_activate, + surveys_inactivate, + surveys_overview, + import_and_create, + remove_empty, + match_libraries, + surveys_update_library, +) +from libstat.views.survey import ( + survey, + survey_status, + survey_notes, + example_survey, + sigel_survey, + release_survey_lock, +) +from libstat.views.variables import ( + variables, + edit_variable, + create_variable, + replaceable_variables, +) +urlpatterns = [ + re_path( + r"^robots\.txt$", + TemplateView.as_view(template_name="robots.txt", content_type="text/plain"), + name="robots", + ), + re_path( + r"^humans\.txt$", + TemplateView.as_view( + template_name="humans.txt", content_type="text/plain; charset=utf-8" + ), + name="humans", + ), # APIs - url(r'^data$', data_api, name="data_api"), - url(r'^data/(?P\w+)$', observation_api, name="observation_api"), - url(r'^def/terms$', terms_api, name="terms_api"), - url(r'^def/terms/(?P\w+)$', term_api, name="term_api"), - url(r'^export$', export_api, name='export_api'), - + re_path(r"^data$", data_api, name="data_api"), + re_path(r"^data/(?P\w+)$", observation_api, name="observation_api"), + re_path(r"^def/terms$", terms_api, name="terms_api"), + re_path(r"^def/terms/(?P\w+)$", term_api, name="term_api"), + re_path(r"^export$", export_api, name="export_api"), # Auth - url(r'^login$', login, name='login'), - url(r'^logout$', logout, {'next_page': 'admin'}, name='logout'), - + re_path(r"^login$", login, name="login"), + re_path(r"^logout$", LogoutView.as_view(), name='logout'), # Index - url(r'^$', index, name='index'), - url(r'^admin$', libstat.views.index.admin, name='admin'), - + re_path(r"^$", index, name="index"), + re_path(r"^admin$", libstat.views.index.admin, name="admin"), # Articles - url(r'^article/(?P\w+)$', article, name='article'), - url(r'^article$', article, name='article'), - url(r'^articles$', articles, name='articles'), - url(r'^articles/delete/(?P\w+)$', articles_delete, name='articles_delete'), - + re_path(r"^article/(?P\w+)$", article, name="article"), + re_path(r"^article$", article, name="article"), + re_path(r"^articles$", articles, name="articles"), + re_path( + r"^articles/delete/(?P\w+)$", + articles_delete, + name="articles_delete", + ), # Reports - url(r'^reports$', reports, name='reports'), - url(r'^report$', report, name='report'), - + re_path(r"^reports$", reports, name="reports"), + re_path(r"^report$", report, name="report"), # Administration - url(r'^administration/create_new_collection$', create_new_collection, name='create_new_collection'), - url(r'^administration$', administration, name='administration'), - + re_path( + r"^administration/create_new_collection$", + create_new_collection, + name="create_new_collection", + ), + re_path(r"^administration$", administration, name="administration"), # Survey - url(r'^surveys$', surveys, name='surveys'), - url(r'^surveys/example$', example_survey, name='example_survey'), - url(r'^surveys/activate$', surveys_activate, name='surveys_activate'), - url(r'^surveys/inactivate$', surveys_inactivate, name='surveys_inactivate'), - url(r'^surveys/export$', surveys_export, name='surveys_export'), - url(r'^surveys/export_with_previous$', surveys_export_with_previous, name='surveys_export_with_previous'), - url(r'^surveys/import_and_create$', import_and_create, name='surveys_import_and_create'), - url(r'^surveys/remove_empty$', remove_empty, name='surveys_remove_empty'), - url(r'^surveys/match_libraries$', match_libraries, name='surveys_match_libraries'), - url(r'^surveys/status$', surveys_statuses, name='surveys_statuses'), - url(r'^surveys/overview/(?P\w+)$', surveys_overview, name='surveys_overview'), - url(r'^surveys/status/(?P\w+)$', survey_status, name='survey_status'), - url(r'^surveys/notes/(?P\w+)$', survey_notes, name='survey_notes'), - url(r'^surveys/update_library$', surveys_update_library, name='surveys_update_library'), - url(r'^surveys/sigel/(?P\w+)$', sigel_survey, name='sigel_survey'), - url(r'^surveys/unlock/(?P\w+)$', release_survey_lock, name='release_survey_lock'), - url(r'^surveys/(?P\w+)$', survey, name='survey'), - + re_path(r"^surveys$", surveys, name="surveys"), + re_path(r"^surveys/example$", example_survey, name="example_survey"), + re_path(r"^surveys/activate$", surveys_activate, name="surveys_activate"), + re_path(r"^surveys/inactivate$", surveys_inactivate, name="surveys_inactivate"), + re_path(r"^surveys/export$", surveys_export, name="surveys_export"), + re_path( + r"^surveys/export_with_previous$", + surveys_export_with_previous, + name="surveys_export_with_previous", + ), + re_path( + r"^surveys/import_and_create$", + import_and_create, + name="surveys_import_and_create", + ), + re_path(r"^surveys/remove_empty$", remove_empty, name="surveys_remove_empty"), + re_path( + r"^surveys/match_libraries$", match_libraries, name="surveys_match_libraries" + ), + re_path(r"^surveys/status$", surveys_statuses, name="surveys_statuses"), + re_path( + r"^surveys/overview/(?P\w+)$", + surveys_overview, + name="surveys_overview", + ), + re_path( + r"^surveys/status/(?P\w+)$", survey_status, name="survey_status" + ), + re_path(r"^surveys/notes/(?P\w+)$", survey_notes, name="survey_notes"), + re_path( + r"^surveys/update_library$", + surveys_update_library, + name="surveys_update_library", + ), + re_path(r"^surveys/sigel/(?P\w+)$", sigel_survey, name="sigel_survey"), + re_path( + r"^surveys/unlock/(?P\w+)$", + release_survey_lock, + name="release_survey_lock", + ), + re_path(r"^surveys/(?P\w+)$", survey, name="survey"), # Dispatch - url(r'^dispatches$', dispatches, name='dispatches'), - url(r'^dispatches/delete$', dispatches_delete, name='dispatches_delete'), - url(r'^dispatches/send', dispatches_send, name='dispatches_send'), - + re_path(r"^dispatches$", dispatches, name="dispatches"), + re_path(r"^dispatches/delete$", dispatches_delete, name="dispatches_delete"), + re_path(r"^dispatches/send", dispatches_send, name="dispatches_send"), # Variables - url(r'^variables$', variables, name='variables'), - url(r'^variables/new$', create_variable, name='create_variable'), - url(r'^variables/replaceable$', replaceable_variables, name='replaceable_variables'), - url(r'^variables/(?P\w+)$', edit_variable, name='edit_variable'), - + re_path(r"^variables$", variables, name="variables"), + re_path(r"^variables/new$", create_variable, name="create_variable"), + re_path( + r"^variables/replaceable$", replaceable_variables, name="replaceable_variables" + ), + re_path(r"^variables/(?P\w+)$", edit_variable, name="edit_variable"), # Other - url(r'^.well-known/void$', RedirectView.as_view(url=reverse_lazy('open_data'), permanent=False)), - url(r'^jsreverse/$', 'django_js_reverse.views.urls_js', name='js_reverse') -) + # re_path(r'^.well-known/void$', RedirectView.as_view(url=reverse('open_data'), permanent=False)), + re_path(r"^jsreverse/$", urls_js, name="js_reverse"), +] diff --git a/bibstat/wsgi.py b/bibstat/wsgi.py index b72e27a9..8c2bcedf 100644 --- a/bibstat/wsgi.py +++ b/bibstat/wsgi.py @@ -4,13 +4,13 @@ It exposes the WSGI callable as a module-level variable named ``application``. For more information on this file, see -https://docs.djangoproject.com/en/1.6/howto/deployment/wsgi/ +https://docs.djangoproject.com/en/3.2/howto/deployment/wsgi/ """ import os -os.environ.setdefault("DJANGO_SETTINGS_MODULE", "bibstat.settings") - from django.core.wsgi import get_wsgi_application +os.environ.setdefault("DJANGO_SETTINGS_MODULE", "bibstat.settings") + application = get_wsgi_application() diff --git a/data/municipalities.py b/data/municipalities.py index 1787032c..20e226a5 100644 --- a/data/municipalities.py +++ b/data/municipalities.py @@ -1,332 +1,341 @@ -# -*- coding: UTF-8 -*- - MUNICIPALITIES = ( - (u"Riket", u"0000"), - (u"Stockholms län", u"0100"), - (u"Botkyrka", u"0127"), - (u"Danderyd", u"0162"), - (u"Ekerö", u"0125"), - (u"Haninge", u"0136"), - (u"Huddinge", u"0126"), - (u"Järfälla", u"0123"), - (u"Lidingö", u"0186"), - (u"Nacka", u"0182"), - (u"Norrtälje", u"0188"), - (u"Nykvarn", u"0140"), - (u"Nynäshamn", u"0192"), - (u"Salem", u"0128"), - (u"Sigtuna", u"0191"), - (u"Sollentuna", u"0163"), - (u"Solna", u"0184"), - (u"Stockholm", u"0180"), - (u"Sundbyberg", u"0183"), - (u"Södertälje", u"0181"), - (u"Tyresö", u"0138"), - (u"Täby", u"0160"), - (u"Upplands-Bro", u"0139"), - (u"Upplands Väsby", u"0114"), - (u"Vallentuna", u"0115"), - (u"Vaxholm", u"0187"), - (u"Värmdö", u"0120"), - (u"Österåker", u"0117"), - (u"Uppsala län", u"0300"), - (u"Enköping", u"0381"), - (u"Heby", u"0331"), - (u"Håbo", u"0305"), - (u"Knivsta", u"0330"), - (u"Tierp", u"0360"), - (u"Uppsala", u"0380"), - (u"Älvkarleby", u"0319"), - (u"Östhammar", u"0382"), - (u"Södermanlands län", u"0400"), - (u"Eskilstuna", u"0484"), - (u"Flen", u"0482"), - (u"Gnesta", u"0461"), - (u"Katrineholm", u"0483"), - (u"Nyköping", u"0480"), - (u"Oxelösund", u"0481"), - (u"Strängnäs", u"0486"), - (u"Trosa", u"0488"), - (u"Vingåker", u"0428"), - (u"Östergötlands län", u"0500"), - (u"Boxholm", u"0560"), - (u"Finspång", u"0562"), - (u"Kinda", u"0513"), - (u"Linköping", u"0580"), - (u"Mjölby", u"0586"), - (u"Motala", u"0583"), - (u"Norrköping", u"0581"), - (u"Söderköping", u"0582"), - (u"Vadstena", u"0584"), - (u"Valdemarsvik", u"0563"), - (u"Ydre", u"0512"), - (u"Åtvidaberg", u"0561"), - (u"Ödeshög", u"0509"), - (u"Jönköpings län", u"0600"), - (u"Aneby", u"0604"), - (u"Eksjö", u"0686"), - (u"Gislaved", u"0662"), - (u"Gnosjö", u"0617"), - (u"Habo", u"0643"), - (u"Jönköping", u"0680"), - (u"Mullsjö", u"0642"), - (u"Nässjö", u"0682"), - (u"Sävsjö", u"0684"), - (u"Tranås", u"0687"), - (u"Vaggeryd", u"0665"), - (u"Vetlanda", u"0685"), - (u"Värnamo", u"0683"), - (u"Kronobergs län", u"0700"), - (u"Alvesta", u"0764"), - (u"Lessebo", u"0761"), - (u"Ljungby", u"0781"), - (u"Markaryd", u"0767"), - (u"Tingsryd", u"0763"), - (u"Uppvidinge", u"0760"), - (u"Växjö", u"0780"), - (u"Älmhult", u"0765"), - (u"Kalmar län", u"0800"), - (u"Borgholm", u"0885"), - (u"Emmaboda", u"0862"), - (u"Hultsfred", u"0860"), - (u"Högsby", u"0821"), - (u"Kalmar", u"0880"), - (u"Mönsterås", u"0861"), - (u"Mörbylånga", u"0840"), - (u"Nybro", u"0881"), - (u"Oskarshamn", u"0882"), - (u"Torsås", u"0834"), - (u"Vimmerby", u"0884"), - (u"Västervik", u"0883"), - (u"Gotlands län", u"0900"), - (u"Gotland", u"0980"), - (u"Blekinge län", u"1000"), - (u"Karlshamn", u"1082"), - (u"Karlskrona", u"1080"), - (u"Olofström", u"1060"), - (u"Ronneby", u"1081"), - (u"Sölvesborg", u"1083"), - (u"Skåne län", u"1200"), - (u"Bjuv", u"1260"), - (u"Bromölla", u"1272"), - (u"Burlöv", u"1231"), - (u"Båstad", u"1278"), - (u"Eslöv", u"1285"), - (u"Helsingborg", u"1283"), - (u"Hässleholm", u"1293"), - (u"Höganäs", u"1284"), - (u"Hörby", u"1266"), - (u"Höör", u"1267"), - (u"Klippan", u"1276"), - (u"Kristianstad", u"1290"), - (u"Kävlinge", u"1261"), - (u"Landskrona", u"1282"), - (u"Lomma", u"1262"), - (u"Lund", u"1281"), - (u"Malmö", u"1280"), - (u"Osby", u"1273"), - (u"Perstorp", u"1275"), - (u"Simrishamn", u"1291"), - (u"Sjöbo", u"1265"), - (u"Skurup", u"1264"), - (u"Staffanstorp", u"1230"), - (u"Svalöv", u"1214"), - (u"Svedala", u"1263"), - (u"Tomelilla", u"1270"), - (u"Trelleborg", u"1287"), - (u"Vellinge", u"1233"), - (u"Ystad", u"1286"), - (u"Åstorp", u"1277"), - (u"Ängelholm", u"1292"), - (u"Örkelljunga", u"1257"), - (u"Östra Göinge", u"1256"), - (u"Hallands län", u"1300"), - (u"Falkenberg", u"1382"), - (u"Halmstad", u"1380"), - (u"Hylte", u"1315"), - (u"Kungsbacka", u"1384"), - (u"Laholm", u"1381"), - (u"Varberg", u"1383"), - (u"Västra Götalands län", u"1400"), - (u"Ale", u"1440"), - (u"Alingsås", u"1489"), - (u"Bengtsfors", u"1460"), - (u"Bollebygd", u"1443"), - (u"Borås", u"1490"), - (u"Dals-Ed", u"1438"), - (u"Essunga", u"1445"), - (u"Falköping", u"1499"), - (u"Färgelanda", u"1439"), - (u"Grästorp", u"1444"), - (u"Gullspång", u"1447"), - (u"Göteborg", u"1480"), - (u"Götene", u"1471"), - (u"Herrljunga", u"1466"), - (u"Hjo", u"1497"), - (u"Härryda", u"1401"), - (u"Karlsborg", u"1446"), - (u"Kungälv", u"1482"), - (u"Lerum", u"1441"), - (u"Lidköping", u"1494"), - (u"Lilla Edet", u"1462"), - (u"Lysekil", u"1484"), - (u"Mariestad", u"1493"), - (u"Mark", u"1463"), - (u"Mellerud", u"1461"), - (u"Munkedal", u"1430"), - (u"Mölndal", u"1481"), - (u"Orust", u"1421"), - (u"Partille", u"1402"), - (u"Skara", u"1495"), - (u"Skövde", u"1496"), - (u"Sotenäs", u"1427"), - (u"Stenungsund", u"1415"), - (u"Strömstad", u"1486"), - (u"Svenljunga", u"1465"), - (u"Tanum", u"1435"), - (u"Tibro", u"1472"), - (u"Tidaholm", u"1498"), - (u"Tjörn", u"1419"), - (u"Tranemo", u"1452"), - (u"Trollhättan", u"1488"), - (u"Töreboda", u"1473"), - (u"Uddevalla", u"1485"), - (u"Ulricehamn", u"1491"), - (u"Vara", u"1470"), - (u"Vårgårda", u"1442"), - (u"Vänersborg", u"1487"), - (u"Åmål", u"1492"), - (u"Öckerö", u"1407"), - (u"Värmlands län", u"1700"), - (u"Arvika", u"1784"), - (u"Eda", u"1730"), - (u"Filipstad", u"1782"), - (u"Forshaga", u"1763"), - (u"Grums", u"1764"), - (u"Hagfors", u"1783"), - (u"Hammarö", u"1761"), - (u"Karlstad", u"1780"), - (u"Kil", u"1715"), - (u"Kristinehamn", u"1781"), - (u"Munkfors", u"1762"), - (u"Storfors", u"1760"), - (u"Sunne", u"1766"), - (u"Säffle", u"1785"), - (u"Torsby", u"1737"), - (u"Årjäng", u"1765"), - (u"Örebro län", u"1800"), - (u"Askersund", u"1882"), - (u"Degerfors", u"1862"), - (u"Hallsberg", u"1861"), - (u"Hällefors", u"1863"), - (u"Karlskoga", u"1883"), - (u"Kumla", u"1881"), - (u"Laxå", u"1860"), - (u"Lekeberg", u"1814"), - (u"Lindesberg", u"1885"), - (u"Ljusnarsberg", u"1864"), - (u"Nora", u"1884"), - (u"Örebro", u"1880"), - (u"Västmanlands län", u"1900"), - (u"Arboga", u"1984"), - (u"Fagersta", u"1982"), - (u"Hallstahammar", u"1961"), - (u"Kungsör", u"1960"), - (u"Köping", u"1983"), - (u"Norberg", u"1962"), - (u"Sala", u"1981"), - (u"Skinnskatteberg", u"1904"), - (u"Surahammar", u"1907"), - (u"Västerås", u"1980"), - (u"Dalarnas län", u"2000"), - (u"Avesta", u"2084"), - (u"Borlänge", u"2081"), - (u"Falun", u"2080"), - (u"Gagnef", u"2026"), - (u"Hedemora", u"2083"), - (u"Leksand", u"2029"), - (u"Ludvika", u"2085"), - (u"Malung-Sälen", u"2023"), - (u"Mora", u"2062"), - (u"Orsa", u"2034"), - (u"Rättvik", u"2031"), - (u"Smedjebacken", u"2061"), - (u"Säter", u"2082"), - (u"Vansbro", u"2021"), - (u"Älvdalen", u"2039"), - (u"Gävleborgs län", u"2100"), - (u"Bollnäs", u"2183"), - (u"Gävle", u"2180"), - (u"Hofors", u"2104"), - (u"Hudiksvall", u"2184"), - (u"Ljusdal", u"2161"), - (u"Nordanstig", u"2132"), - (u"Ockelbo", u"2101"), - (u"Ovanåker", u"2121"), - (u"Sandviken", u"2181"), - (u"Söderhamn", u"2182"), - (u"Västernorrlands län", u"2200"), - (u"Härnösand", u"2280"), - (u"Kramfors", u"2282"), - (u"Sollefteå", u"2283"), - (u"Sundsvall", u"2281"), - (u"Timrå", u"2262"), - (u"Ånge", u"2260"), - (u"Örnsköldsvik", u"2284"), - (u"Jämtlands län", u"2300"), - (u"Berg", u"2326"), - (u"Bräcke", u"2305"), - (u"Härjedalen", u"2361"), - (u"Krokom", u"2309"), - (u"Ragunda", u"2303"), - (u"Strömsund", u"2313"), - (u"Åre", u"2321"), - (u"Östersund", u"2380"), - (u"Västerbottens län", u"2400"), - (u"Bjurholm", u"2403"), - (u"Dorotea", u"2425"), - (u"Lycksele", u"2481"), - (u"Malå", u"2418"), - (u"Nordmaling", u"2401"), - (u"Norsjö", u"2417"), - (u"Robertsfors", u"2409"), - (u"Skellefteå", u"2482"), - (u"Sorsele", u"2422"), - (u"Storuman", u"2421"), - (u"Umeå", u"2480"), - (u"Vilhelmina", u"2462"), - (u"Vindeln", u"2404"), - (u"Vännäs", u"2460"), - (u"Åsele", u"2463"), - (u"Norrbottens län", u"2500"), - (u"Arjeplog", u"2506"), - (u"Arvidsjaur", u"2505"), - (u"Boden", u"2582"), - (u"Gällivare", u"2523"), - (u"Haparanda", u"2583"), - (u"Jokkmokk", u"2510"), - (u"Kalix", u"2514"), - (u"Kiruna", u"2584"), - (u"Luleå", u"2580"), - (u"Pajala", u"2521"), - (u"Piteå", u"2581"), - (u"Älvsbyn", u"2560"), - (u"Överkalix", u"2513"), - (u"Övertorneå", u"2518")) + ("Riket", "0000"), + ("Stockholms län", "0100"), + ("Botkyrka", "0127"), + ("Danderyd", "0162"), + ("Ekerö", "0125"), + ("Haninge", "0136"), + ("Huddinge", "0126"), + ("Järfälla", "0123"), + ("Lidingö", "0186"), + ("Nacka", "0182"), + ("Norrtälje", "0188"), + ("Nykvarn", "0140"), + ("Nynäshamn", "0192"), + ("Salem", "0128"), + ("Sigtuna", "0191"), + ("Sollentuna", "0163"), + ("Solna", "0184"), + ("Stockholm", "0180"), + ("Sundbyberg", "0183"), + ("Södertälje", "0181"), + ("Tyresö", "0138"), + ("Täby", "0160"), + ("Upplands-Bro", "0139"), + ("Upplands Väsby", "0114"), + ("Vallentuna", "0115"), + ("Vaxholm", "0187"), + ("Värmdö", "0120"), + ("Österåker", "0117"), + ("Uppsala län", "0300"), + ("Enköping", "0381"), + ("Heby", "0331"), + ("Håbo", "0305"), + ("Knivsta", "0330"), + ("Tierp", "0360"), + ("Uppsala", "0380"), + ("Älvkarleby", "0319"), + ("Östhammar", "0382"), + ("Södermanlands län", "0400"), + ("Eskilstuna", "0484"), + ("Flen", "0482"), + ("Gnesta", "0461"), + ("Katrineholm", "0483"), + ("Nyköping", "0480"), + ("Oxelösund", "0481"), + ("Strängnäs", "0486"), + ("Trosa", "0488"), + ("Vingåker", "0428"), + ("Östergötlands län", "0500"), + ("Boxholm", "0560"), + ("Finspång", "0562"), + ("Kinda", "0513"), + ("Linköping", "0580"), + ("Mjölby", "0586"), + ("Motala", "0583"), + ("Norrköping", "0581"), + ("Söderköping", "0582"), + ("Vadstena", "0584"), + ("Valdemarsvik", "0563"), + ("Ydre", "0512"), + ("Åtvidaberg", "0561"), + ("Ödeshög", "0509"), + ("Jönköpings län", "0600"), + ("Aneby", "0604"), + ("Eksjö", "0686"), + ("Gislaved", "0662"), + ("Gnosjö", "0617"), + ("Habo", "0643"), + ("Jönköping", "0680"), + ("Mullsjö", "0642"), + ("Nässjö", "0682"), + ("Sävsjö", "0684"), + ("Tranås", "0687"), + ("Vaggeryd", "0665"), + ("Vetlanda", "0685"), + ("Värnamo", "0683"), + ("Kronobergs län", "0700"), + ("Alvesta", "0764"), + ("Lessebo", "0761"), + ("Ljungby", "0781"), + ("Markaryd", "0767"), + ("Tingsryd", "0763"), + ("Uppvidinge", "0760"), + ("Växjö", "0780"), + ("Älmhult", "0765"), + ("Kalmar län", "0800"), + ("Borgholm", "0885"), + ("Emmaboda", "0862"), + ("Hultsfred", "0860"), + ("Högsby", "0821"), + ("Kalmar", "0880"), + ("Mönsterås", "0861"), + ("Mörbylånga", "0840"), + ("Nybro", "0881"), + ("Oskarshamn", "0882"), + ("Torsås", "0834"), + ("Vimmerby", "0884"), + ("Västervik", "0883"), + ("Gotlands län", "0900"), + ("Gotland", "0980"), + ("Blekinge län", "1000"), + ("Karlshamn", "1082"), + ("Karlskrona", "1080"), + ("Olofström", "1060"), + ("Ronneby", "1081"), + ("Sölvesborg", "1083"), + ("Skåne län", "1200"), + ("Bjuv", "1260"), + ("Bromölla", "1272"), + ("Burlöv", "1231"), + ("Båstad", "1278"), + ("Eslöv", "1285"), + ("Helsingborg", "1283"), + ("Hässleholm", "1293"), + ("Höganäs", "1284"), + ("Hörby", "1266"), + ("Höör", "1267"), + ("Klippan", "1276"), + ("Kristianstad", "1290"), + ("Kävlinge", "1261"), + ("Landskrona", "1282"), + ("Lomma", "1262"), + ("Lund", "1281"), + ("Malmö", "1280"), + ("Osby", "1273"), + ("Perstorp", "1275"), + ("Simrishamn", "1291"), + ("Sjöbo", "1265"), + ("Skurup", "1264"), + ("Staffanstorp", "1230"), + ("Svalöv", "1214"), + ("Svedala", "1263"), + ("Tomelilla", "1270"), + ("Trelleborg", "1287"), + ("Vellinge", "1233"), + ("Ystad", "1286"), + ("Åstorp", "1277"), + ("Ängelholm", "1292"), + ("Örkelljunga", "1257"), + ("Östra Göinge", "1256"), + ("Hallands län", "1300"), + ("Falkenberg", "1382"), + ("Halmstad", "1380"), + ("Hylte", "1315"), + ("Kungsbacka", "1384"), + ("Laholm", "1381"), + ("Varberg", "1383"), + ("Västra Götalands län", "1400"), + ("Ale", "1440"), + ("Alingsås", "1489"), + ("Bengtsfors", "1460"), + ("Bollebygd", "1443"), + ("Borås", "1490"), + ("Dals-Ed", "1438"), + ("Essunga", "1445"), + ("Falköping", "1499"), + ("Färgelanda", "1439"), + ("Grästorp", "1444"), + ("Gullspång", "1447"), + ("Göteborg", "1480"), + ("Götene", "1471"), + ("Herrljunga", "1466"), + ("Hjo", "1497"), + ("Härryda", "1401"), + ("Karlsborg", "1446"), + ("Kungälv", "1482"), + ("Lerum", "1441"), + ("Lidköping", "1494"), + ("Lilla Edet", "1462"), + ("Lysekil", "1484"), + ("Mariestad", "1493"), + ("Mark", "1463"), + ("Mellerud", "1461"), + ("Munkedal", "1430"), + ("Mölndal", "1481"), + ("Orust", "1421"), + ("Partille", "1402"), + ("Skara", "1495"), + ("Skövde", "1496"), + ("Sotenäs", "1427"), + ("Stenungsund", "1415"), + ("Strömstad", "1486"), + ("Svenljunga", "1465"), + ("Tanum", "1435"), + ("Tibro", "1472"), + ("Tidaholm", "1498"), + ("Tjörn", "1419"), + ("Tranemo", "1452"), + ("Trollhättan", "1488"), + ("Töreboda", "1473"), + ("Uddevalla", "1485"), + ("Ulricehamn", "1491"), + ("Vara", "1470"), + ("Vårgårda", "1442"), + ("Vänersborg", "1487"), + ("Åmål", "1492"), + ("Öckerö", "1407"), + ("Värmlands län", "1700"), + ("Arvika", "1784"), + ("Eda", "1730"), + ("Filipstad", "1782"), + ("Forshaga", "1763"), + ("Grums", "1764"), + ("Hagfors", "1783"), + ("Hammarö", "1761"), + ("Karlstad", "1780"), + ("Kil", "1715"), + ("Kristinehamn", "1781"), + ("Munkfors", "1762"), + ("Storfors", "1760"), + ("Sunne", "1766"), + ("Säffle", "1785"), + ("Torsby", "1737"), + ("Årjäng", "1765"), + ("Örebro län", "1800"), + ("Askersund", "1882"), + ("Degerfors", "1862"), + ("Hallsberg", "1861"), + ("Hällefors", "1863"), + ("Karlskoga", "1883"), + ("Kumla", "1881"), + ("Laxå", "1860"), + ("Lekeberg", "1814"), + ("Lindesberg", "1885"), + ("Ljusnarsberg", "1864"), + ("Nora", "1884"), + ("Örebro", "1880"), + ("Västmanlands län", "1900"), + ("Arboga", "1984"), + ("Fagersta", "1982"), + ("Hallstahammar", "1961"), + ("Kungsör", "1960"), + ("Köping", "1983"), + ("Norberg", "1962"), + ("Sala", "1981"), + ("Skinnskatteberg", "1904"), + ("Surahammar", "1907"), + ("Västerås", "1980"), + ("Dalarnas län", "2000"), + ("Avesta", "2084"), + ("Borlänge", "2081"), + ("Falun", "2080"), + ("Gagnef", "2026"), + ("Hedemora", "2083"), + ("Leksand", "2029"), + ("Ludvika", "2085"), + ("Malung-Sälen", "2023"), + ("Mora", "2062"), + ("Orsa", "2034"), + ("Rättvik", "2031"), + ("Smedjebacken", "2061"), + ("Säter", "2082"), + ("Vansbro", "2021"), + ("Älvdalen", "2039"), + ("Gävleborgs län", "2100"), + ("Bollnäs", "2183"), + ("Gävle", "2180"), + ("Hofors", "2104"), + ("Hudiksvall", "2184"), + ("Ljusdal", "2161"), + ("Nordanstig", "2132"), + ("Ockelbo", "2101"), + ("Ovanåker", "2121"), + ("Sandviken", "2181"), + ("Söderhamn", "2182"), + ("Västernorrlands län", "2200"), + ("Härnösand", "2280"), + ("Kramfors", "2282"), + ("Sollefteå", "2283"), + ("Sundsvall", "2281"), + ("Timrå", "2262"), + ("Ånge", "2260"), + ("Örnsköldsvik", "2284"), + ("Jämtlands län", "2300"), + ("Berg", "2326"), + ("Bräcke", "2305"), + ("Härjedalen", "2361"), + ("Krokom", "2309"), + ("Ragunda", "2303"), + ("Strömsund", "2313"), + ("Åre", "2321"), + ("Östersund", "2380"), + ("Västerbottens län", "2400"), + ("Bjurholm", "2403"), + ("Dorotea", "2425"), + ("Lycksele", "2481"), + ("Malå", "2418"), + ("Nordmaling", "2401"), + ("Norsjö", "2417"), + ("Robertsfors", "2409"), + ("Skellefteå", "2482"), + ("Sorsele", "2422"), + ("Storuman", "2421"), + ("Umeå", "2480"), + ("Vilhelmina", "2462"), + ("Vindeln", "2404"), + ("Vännäs", "2460"), + ("Åsele", "2463"), + ("Norrbottens län", "2500"), + ("Arjeplog", "2506"), + ("Arvidsjaur", "2505"), + ("Boden", "2582"), + ("Gällivare", "2523"), + ("Haparanda", "2583"), + ("Jokkmokk", "2510"), + ("Kalix", "2514"), + ("Kiruna", "2584"), + ("Luleå", "2580"), + ("Pajala", "2521"), + ("Piteå", "2581"), + ("Älvsbyn", "2560"), + ("Överkalix", "2513"), + ("Övertorneå", "2518"), +) municipalities = dict([(tuple[1], tuple[0]) for tuple in MUNICIPALITIES]) -municipalities_without_counties = dict([(tuple[1], tuple[0]) for tuple in MUNICIPALITIES if not (len(tuple[0].split()) > 1 and tuple[0].split()[len(tuple[0].split())-1] == u"län")]) +municipalities_without_counties = dict( + [ + (tuple[1], tuple[0]) + for tuple in MUNICIPALITIES + if not ( + len(tuple[0].split()) > 1 + and tuple[0].split()[len(tuple[0].split()) - 1] == "län" + ) + ] +) + def get_counties(municipality_codes): county_list = [] for municipality_code in municipality_codes: - county_code = municipality_code[1][0:2] + u"00" + county_code = municipality_code[1][0:2] + "00" if county_code in municipalities: county_list.append((municipalities[county_code], county_code)) - for name, code in dict(county_list).iteritems(): - yield (name, code) + for name, code in list(dict(county_list).items()): + yield name, code def municipality_code_from(code): @@ -334,7 +343,9 @@ def municipality_code_from(code): return None code = int(code) if code > 9999: - raise ValueError("Municipality code can only contain four digits: {}".format(code)) + raise ValueError( + "Municipality code can only contain four digits: {}".format(code) + ) if code < 0: raise ValueError("Municipality code can not be negative: {}".format(code)) return "{0:04d}".format(code) @@ -344,4 +355,4 @@ def municipality_code_from_county_code(code): if not code and code != 0 and code != 0.0: return None code = int(code) - return municipality_code_from(code * 100) \ No newline at end of file + return municipality_code_from(code * 100) diff --git a/data/principals.py b/data/principals.py index 55e77cb2..0a2c7ef4 100644 --- a/data/principals.py +++ b/data/principals.py @@ -1,46 +1,75 @@ -# -*- coding: UTF-8 -*- - PRINCIPALS = ( - (u"musstat", u"stat"), - (u"muslan", u"landsting"), - (u"muskom", u"kommun"), - (u"folkbib", u"kommun"), - (u"folkskolbib", u"kommun"), - (u"specbib", u"stat"), - (u"univbib", u"stat"), - (u"sjukbib", u"landsting"), - (u"myndbib", u"stat"), - (u"folkhogbib", u"landsting"), - (u"ovrbib", u"stat"), - (u"frisgym", u"privat"), - (u"friskol", u"privat"), - (u"skolbib", u"kommun"), - (u"gymbib", u"kommun"), - (u"statskol", u"stat"), - (u"vuxbib", u"kommun"), - (u"natbib", u"stat") + ("musstat", "stat"), + ("muslan", "landsting"), + ("muskom", "kommun"), + ("folkbib", "kommun"), + ("folkskolbib", "kommun"), + ("specbib", "stat"), + ("univbib", "stat"), + ("sjukbib", "landsting"), + ("myndbib", "stat"), + ("folkhogbib", "landsting"), + ("ovrbib", "stat"), + ("frisgym", "privat"), + ("friskol", "privat"), + ("skolbib", "kommun"), + ("gymbib", "kommun"), + ("statskol", "stat"), + ("vuxbib", "kommun"), + ("natbib", "stat"), ) name_for_principal = { - u"landsting": u"Landsting", - u"kommun": u"Kommun", - u"stat": u"Stat", - u"privat": u"Privat" + "landsting": "Landsting", + "kommun": "Kommun", + "stat": "Stat", + "privat": "Privat", } principal_for_library_type = dict(PRINCIPALS) -library_types_with_principals = [u'univbib', u'muslan', u'muskom', u'vuxbib', u'frisgym', u'skolbib', u'sjukbib', u'myndbib', u'statskol', u'folkbib', u'folkskolbib', u'musstat', u'specbib', u'folkhogbib', u'friskol', u'ovrbib', u'natbib', u'gymbib'] +library_types_with_principals = [ + "univbib", + "muslan", + "muskom", + "vuxbib", + "frisgym", + "skolbib", + "sjukbib", + "myndbib", + "statskol", + "folkbib", + "folkskolbib", + "musstat", + "specbib", + "folkhogbib", + "friskol", + "ovrbib", + "natbib", + "gymbib", +] library_types_for_principal = { - u'stat': [u'univbib', u'myndbib', u'statskol', u'musstat', u'specbib', u'ovrbib', u'natbib'], - u'landsting': [u'muslan', u'sjukbib', u'folkhogbib'], - u'privat': [u'frisgym', u'friskol'], - u'kommun': [u'muskom', u'vuxbib', u'skolbib', u'folkbib', u'folkskolbib', u'gymbib'] + "stat": [ + "univbib", + "myndbib", + "statskol", + "musstat", + "specbib", + "ovrbib", + "natbib", + ], + "landsting": ["muslan", "sjukbib", "folkhogbib"], + "privat": ["frisgym", "friskol"], + "kommun": ["muskom", "vuxbib", "skolbib", "folkbib", "folkskolbib", "gymbib"], } + def get_library_types_with_same_principal(library): - if library.library_type is None or library.library_type not in principal_for_library_type: + if ( + library.library_type is None + or library.library_type not in principal_for_library_type + ): return library_types_with_principals principal = principal_for_library_type[library.library_type] - return library_types_for_principal[principal] \ No newline at end of file + return library_types_for_principal[principal] diff --git a/docs/servers.md b/docs/servers.md deleted file mode 100644 index cab96e91..00000000 --- a/docs/servers.md +++ /dev/null @@ -1,144 +0,0 @@ -# Serverkonfiguration - -Detta dokument sammanfattar hur stage- och produktionsmiljön sattes upp. - - # 1. Add environment variables to ~/.bashrc - export LC_ALL=en_US.UTF-8 - export LD_LIBRARY_PATH=/usr/local/lib - export LD_RUN_PATH=/usr/local/lib - export CFLAGS="$CFLAGS -fPIC" - - # 2. Reload ~/.bashrc - source ~/.bashrc - - # 3. Install required libs prior to building python - yum install bzip2 bzip2-devel - - # 4. Install python2.7 - wget http://www.python.org/ftp/python/2.7.5/Python-2.7.5.tgz - tar xvf Python-2.7.5.tgz - cd Python-2.7.5 - - ./configure --enable-shared --with-threads - make - make install - - cd .. - rm -rf Python-2.7.5 - rm -f Python-2.7.5.tgz - - # 5. Install Apache and python tools - yum install httpd httpd-devel python-pip python-virtualenv - - # 6. Install mod_wsgi and make sure it uses python2.7 - wget http://modwsgi.googlecode.com/files/mod_wsgi-3.4.tar.gz - tar xvf mod_wsgi-3.4.tar.gz - cd mod_wsgi-3.4 - - ./configure --with-python=/usr/local/bin/python2.7 - make - make install - - cd .. - rm -rf mod_wsgi-3.4 - rm -f mod_wsgi-3.4.tar.gz - - # 7. Add this to file /etc/yum.repos.d/mongodb.repo - [mongodb] - name=MongoDB Repository - baseurl=http://downloads-distro.mongodb.org/repo/redhat/os/x86_64/ - gpgcheck=0 - enabled=1 - - # 8. Create this directory, mongo needs it - mkdir /data/db - - # 9. Install mongo - yum install mongodb-org - - # 10. Make sure that these lines in /etc/mongod.conf are commented out and inactive - bind_ip = 127.0.0.1 - auth = true - - # 11. Start mongo - service mongod start - - # 12. Create admin user - mongo - > use admin - > db.createUser({user:"admin", pwd:"admin", roles: ["root"]}) - > db.runCommand({usersInfo:"admin", showPrivileges:true }) - - # 13. Restart mongo - service mongod restart - - # 14. Double check that authentication is working - mongo admin - > db.auth("admin", "admin") - - # 15. Create bibstat user - mongo - > use bibstat - > db.createUser({user:"bibstat", pwd:"", roles:["readWrite"]}) - > db.runCommand({usersInfo:"bibstat", showPrivileges:true }) - - # 16. Export dump from mongo on old prod bibstat machine - mongodump -d bibstat -u bibstat -p - - # 17. Transfer the dump from the old to the new prod bibstat machine - scp -r dump root@:/tmp/bibstat-prod-dump - - # 18. Load the dump on the new prod machine - mongorestore -u admin -p admin /tmp/bibstat-prod-dump/ - - # 19. Add this to /etc/httpd/conf.d/bibstat.conf - LoadModule wsgi_module modules/mod_wsgi.so - - ServerName - ServerAdmin niklas.lindstrom@kb.se - - Alias /static /data/appl/bibstat/static - - WSGIDaemonProcess bibstat python-path=/data/appl/bibstat:/data/appl/bibstat/env/lib/python2.7/site-packages processes=16 threads=4 - WSGIScriptAlias / /data/appl/bibstat/bibstat/wsgi.py - WSGIProcessGroup bibstat - WSGIApplicationGroup %{GLOBAL} - - ErrorLog logs/bibstat-error_log - CustomLog logs/bibstat-access_log combined - - RewriteEngine on - RewriteRule ^/.well-known/void$ /open_data/ [R] - - - # 20. Create deployment target folder and grant everyone read-write-rights - mkdir /data/appl - chmod a+rw /data/appl - - # 21. Prepare settings that will be symlinked into deployed repo - # Create file /data/appl/config/bibstat_local.py and add to it (making sure to keep the secret key for production environments a secret!): - SECRET_KEY = '3x%=t4cm@eszqbwuw@00f**ol@8^kqomtm8-%x&5_ydq9rm(nl' - DEBUG = False - TEMPLATE_DEBUG = False - ALLOWED_HOSTS = [ - ".", - "..", - ] - API_BASE_URL = "http:///statistics" - BIBDB_BASE_URL = "http://bibdb.libris.kb.se" - MONGODB_HOST = 'localhost' - MONGODB_NAME = 'bibstat' - MONGODB_USER = 'bibstat' - MONGODB_PASSWD = '' - - # 22. Link virtualenv to /usr/local/bin/ to make deployment scripts happy - ln -s $(which virtualenv) /usr/local/bin/ - - # 23. "Push" code from local machine to bibstat - fab conf.prodbibstat app.bibstat.deploy_without_sudo - - # 24. Restart apache server - service httpd restart - - # 25. Test that the service is up and running - wget "http://localhost" \ No newline at end of file diff --git a/etc/add-user.py b/etc/add-user.py deleted file mode 100755 index 0034b52a..00000000 --- a/etc/add-user.py +++ /dev/null @@ -1,21 +0,0 @@ -import django -import json -import os, sys - -os.environ.setdefault("DJANGO_SETTINGS_MODULE", "bibstat.settings") - -from django.conf import settings -from mongoengine.django.auth import User - -user = User.create_user(email = '', - username = '', - password = '') - - -user.is_active = True -user.is_staff = True -user.is_superuser = True - -user.save() - -print(user) diff --git a/libstat/apis/open_data.py b/libstat/apis/open_data.py index 6e04478a..06767cfb 100644 --- a/libstat/apis/open_data.py +++ b/libstat/apis/open_data.py @@ -1,13 +1,16 @@ -# -*- coding: utf-8 -*- import datetime import json import logging from time import strftime -from django.http import HttpResponse, Http404, HttpResponseBadRequest, HttpResponseNotFound -from django.core.servers.basehttp import FileWrapper +from django.http import ( + HttpResponse, + Http404, + HttpResponseBadRequest, + HttpResponseNotFound, +) +from wsgiref.util import FileWrapper from mongoengine import Q -from openpyxl.writer.excel import save_virtual_workbook from bibstat import settings from libstat.models import Variable, OpenData @@ -18,30 +21,31 @@ data_context = { - u"@vocab": u"{}/def/terms/".format(settings.API_BASE_URL), - u"xsd": u"http://www.w3.org/2001/XMLSchema#", - u"qb": u"http://purl.org/linked-data/cube#", - u"xhv": u"http://www.w3.org/1999/xhtml/vocab#", - u"foaf": u"http://xmlns.com/foaf/0.1/", - u"@base": u"{}/data/".format(settings.API_BASE_URL), - u"@language": u"sv", - u"DataSet": u"qb:DataSet", - u"Observation": u"qb:Observation", - u"observations": {u"@id": u"qb:observation", u"@container": u"@set"}, - u"dataSet": {u"@id": u"qb:dataSet", u"@type": u"@id"}, - u"next": {u"@id": u"xhv:next", u"@type": u"@id"}, - u"published": {u"@type": "xsd:dateTime"}, - u"modified": {u"@type": "xsd:dateTime"}, - u"name": u"foaf:name" + "@vocab": "{}/def/terms/".format(settings.API_BASE_URL), + "xsd": "http://www.w3.org/2001/XMLSchema#", + "qb": "http://purl.org/linked-data/cube#", + "xhv": "http://www.w3.org/1999/xhtml/vocab#", + "foaf": "http://xmlns.com/foaf/0.1/", + "@base": "{}/data/".format(settings.API_BASE_URL), + "@language": "sv", + "DataSet": "qb:DataSet", + "Observation": "qb:Observation", + "observations": {"@id": "qb:observation", "@container": "@set"}, + "dataSet": {"@id": "qb:dataSet", "@type": "@id"}, + "next": {"@id": "xhv:next", "@type": "@id"}, + "published": {"@type": "xsd:dateTime"}, + "modified": {"@type": "xsd:dateTime"}, + "name": "foaf:name", } data_set = { "@context": data_context, "@id": "", "@type": "DataSet", - u"label": u"Sveriges biblioteksstatistik" + "label": "Sveriges biblioteksstatistik", } + def data_api(request): from_date = parse_datetime_from_isodate_str(request.GET.get("from_date", None)) to_date = parse_datetime_from_isodate_str(request.GET.get("to_date", None)) @@ -63,28 +67,36 @@ def data_api(request): try: variable = Variable.objects.get(key=term) logger.debug( - u"Fetching statistics data for term {} published between {} and {}, items {} to {}".format( - variable.key, - from_date, - to_date, - offset, - offset + limit)) - objects = OpenData.objects.filter( - Q(variable=variable) - & modified_from_query - & modified_to_query - & is_active_query).skip(offset).limit(limit) + "Fetching statistics data for term {} published between {} and {}, items {} to {}".format( + variable.key, from_date, to_date, offset, offset + limit + ) + ) + objects = ( + OpenData.objects.filter( + Q(variable=variable) + & modified_from_query + & modified_to_query + & is_active_query + ) + .skip(offset) + .limit(limit) + ) except Exception: - logger.warn(u"Unknown variable {}, skipping..".format(term)) + logger.warning("Unknown variable {}, skipping..".format(term)) else: logger.debug( - u"Fetching statistics data published between {} and {}, items {} to {}".format( - from_date, to_date, offset, offset + limit)) - objects = OpenData.objects.filter( - modified_from_query - & modified_to_query - & is_active_query).skip(offset).limit(limit) + "Fetching statistics data published between {} and {}, items {} to {}".format( + from_date, to_date, offset, offset + limit + ) + ) + objects = ( + OpenData.objects.filter( + modified_from_query & modified_to_query & is_active_query + ) + .skip(offset) + .limit(limit) + ) observations = [] for item in objects: @@ -92,7 +104,7 @@ def data_api(request): data = dict(data_set, observations=observations) if len(observations) >= limit: - data[u"next"] = u"?limit={}&offset={}".format(limit, offset + limit) + data["next"] = "?limit={}&offset={}".format(limit, offset + limit) return HttpResponse(json.dumps(data), content_type="application/ld+json") @@ -102,7 +114,7 @@ def observation_api(request, observation_id): open_data = OpenData.objects.get(pk=observation_id) except Exception: raise Http404 - observation = {u"@context": data_context} + observation = {"@context": data_context} observation.update(open_data.to_dict()) observation["dataSet"] = data_set["@id"] return HttpResponse(json.dumps(observation), content_type="application/ld+json") @@ -124,9 +136,13 @@ def export_api(request): if sample_year not in valid_sample_years: return HttpResponseNotFound() - filename = u"Biblioteksstatistik för {} ({}).xlsx".format(sample_year, strftime("%Y-%m-%d %H.%M.%S")) + filename = "Biblioteksstatistik för {} ({}).xlsx".format( + sample_year, strftime("%Y-%m-%d %H.%M.%S") + ) path = public_excel_workbook(sample_year) - response = HttpResponse(FileWrapper(file(path)), content_type='application/vnd.ms-excel') - response['Content-Disposition'] = u'attachment; filename="{}"'.format(filename) + response = HttpResponse( + FileWrapper(open(path, "rb")), content_type="application/vnd.ms-excel" + ) + response["Content-Disposition"] = 'attachment; filename="{}"'.format(filename) return response diff --git a/libstat/apis/terms.py b/libstat/apis/terms.py index a6d80bd2..3f2a985f 100644 --- a/libstat/apis/terms.py +++ b/libstat/apis/terms.py @@ -1,87 +1,85 @@ -# -*- coding: utf-8 -*- - import json -from django.core.urlresolvers import reverse +from django.urls import reverse from django.http import HttpResponse, Http404 from bibstat import settings from libstat.models import Variable term_context = { - u"xsd": u"http://www.w3.org/2001/XMLSchema#", - u"rdf": u"http://www.w3.org/1999/02/22-rdf-syntax-ns#", - u"rdfs": u"http://www.w3.org/2000/01/rdf-schema#", - u"owl": u"http://www.w3.org/2002/07/owl#", - u"dcterms": "http://purl.org/dc/terms/", - u"qb": u"http://purl.org/linked-data/cube#", - u"@base": u"{}/def/terms/".format(settings.API_BASE_URL), - u"@language": u"sv", - u"label": u"rdfs:label", - u"range": {u"@id": u"rdfs:range", u"@type": u"@id"}, - u"comment": u"rdfs:comment", - u"subClassOf": {u"@id": u"rdfs:subClassOf", u"@type": u"@id"}, - u"subPropertyOf": {u"@id": u"rdfs:subPropertyOf", u"@type": u"@id"}, - u"isDefinedBy": {u"@id": u"rdfs:isDefinedBy", u"@type": u"@id"}, - u"terms": {u"@reverse": u"rdfs:isDefinedBy"}, - u"replaces": {u"@id": u"dcterms:replaces", u"@type": u"@id"}, - u"replacedBy": {u"@id": u"dcterms:isReplacedBy", u"@type": u"@id"}, - u"valid": {u"@id": u"dcterms:valid", u"@type": u"dcterms:Period"}, + "xsd": "http://www.w3.org/2001/XMLSchema#", + "rdf": "http://www.w3.org/1999/02/22-rdf-syntax-ns#", + "rdfs": "http://www.w3.org/2000/01/rdf-schema#", + "owl": "http://www.w3.org/2002/07/owl#", + "dcterms": "http://purl.org/dc/terms/", + "qb": "http://purl.org/linked-data/cube#", + "@base": "{}/def/terms/".format(settings.API_BASE_URL), + "@language": "sv", + "label": "rdfs:label", + "range": {"@id": "rdfs:range", "@type": "@id"}, + "comment": "rdfs:comment", + "subClassOf": {"@id": "rdfs:subClassOf", "@type": "@id"}, + "subPropertyOf": {"@id": "rdfs:subPropertyOf", "@type": "@id"}, + "isDefinedBy": {"@id": "rdfs:isDefinedBy", "@type": "@id"}, + "terms": {"@reverse": "rdfs:isDefinedBy"}, + "replaces": {"@id": "dcterms:replaces", "@type": "@id"}, + "replacedBy": {"@id": "dcterms:isReplacedBy", "@type": "@id"}, + "valid": {"@id": "dcterms:valid", "@type": "dcterms:Period"}, } terms_vocab = { "@context": term_context, "@id": "", "@type": "owl:Ontology", - u"label": u"Termer för Sveriges biblioteksstatistik" + "label": "Termer för Sveriges biblioteksstatistik", } core_terms = [ { - u"@id": u"library", - u"@type": [u"rdf:Property", u"qb:DimensionProperty"], - u"range": u"https://schema.org/Organization", - u"label": u"Bibliotek" + "@id": "library", + "@type": ["rdf:Property", "qb:DimensionProperty"], + "range": "https://schema.org/Organization", + "label": "Bibliotek", }, { - u"@id": u"sampleYear", - u"@type": [u"rdf:Property", u"qb:DimensionProperty"], - u"label": u"Mätår", - u"comment": u"Det mätår som statistikuppgiften avser", - u"range": u"xsd:gYear" + "@id": "sampleYear", + "@type": ["rdf:Property", "qb:DimensionProperty"], + "label": "Mätår", + "comment": "Det mätår som statistikuppgiften avser", + "range": "xsd:gYear", }, { - u"@id": u"targetGroup", - u"@type": [u"rdf:Property", u"qb:DimensionProperty"], - u"label": u"Målgrupp", - u"comment": u"Den målgrupp som svarande bibliotek ingår i.", - u"range": u"xsd:string" + "@id": "targetGroup", + "@type": ["rdf:Property", "qb:DimensionProperty"], + "label": "Målgrupp", + "comment": "Den målgrupp som svarande bibliotek ingår i.", + "range": "xsd:string", }, { - u"@id": u"modified", - u"@type": u"rdf:Property", - u"subPropertyOf": "dcterms:modified", - u"label": u"Uppdaterad", - u"comment": u"Datum då mätvärdet senast uppdaterades", - u"range": u"xsd:dateTime" + "@id": "modified", + "@type": "rdf:Property", + "subPropertyOf": "dcterms:modified", + "label": "Uppdaterad", + "comment": "Datum då mätvärdet senast uppdaterades", + "range": "xsd:dateTime", }, { - u"@id": u"published", - u"@type": u"rdf:Property", - u"subPropertyOf": "dcterms:issued", - u"label": u"Publicerad", - u"comment": u"Datum då mätvärdet först publicerades", - u"range": u"xsd:dateTime" + "@id": "published", + "@type": "rdf:Property", + "subPropertyOf": "dcterms:issued", + "label": "Publicerad", + "comment": "Datum då mätvärdet först publicerades", + "range": "xsd:dateTime", }, { - u"@id": u"Observation", - u"@type": u"rdfs:Class", - u"subClassOf": u"qb:Observation", - u"label": u"Observation", - u"comment": u"En observation för ett bibiliotek, mätår och variabel" - } + "@id": "Observation", + "@type": "rdfs:Class", + "subClassOf": "qb:Observation", + "label": "Observation", + "comment": "En observation för ett bibiliotek, mätår och variabel", + }, ] -core_term_ids = {term[u"@id"] for term in core_terms} +core_term_ids = {term["@id"] for term in core_terms} def term_api(request, term_key): @@ -94,7 +92,7 @@ def term_api(request, term_key): return http303 else: raise Http404 - data = {u"@context": term_context} + data = {"@context": term_context} data.update(term.to_dict()) data["isDefinedBy"] = terms_vocab["@id"] return HttpResponse(json.dumps(data), content_type="application/ld+json") @@ -107,4 +105,4 @@ def terms_api(request): for v in variables: terms.append(v.to_dict()) data = dict(terms_vocab, terms=terms) - return HttpResponse(json.dumps(data), content_type="application/ld+json") \ No newline at end of file + return HttpResponse(json.dumps(data), content_type="application/ld+json") diff --git a/libstat/forms/article.py b/libstat/forms/article.py index f34c1843..9764b0a6 100644 --- a/libstat/forms/article.py +++ b/libstat/forms/article.py @@ -1,15 +1,20 @@ -# -*- coding: utf-8 -* from django import forms from libstat.models import Article class ArticleForm(forms.Form): - title = forms.CharField(label=u"Rubrik", widget=forms.TextInput(attrs={"class": "form-control form-article-input"})) - content = forms.CharField(label=u"Innehåll", widget=forms.Textarea(attrs={"class": "form-control form-article-input"})) + title = forms.CharField( + label="Rubrik", + widget=forms.TextInput(attrs={"class": "form-control form-article-input"}), + ) + content = forms.CharField( + label="Innehåll", + widget=forms.Textarea(attrs={"class": "form-control form-article-input"}), + ) def __init__(self, *args, **kwargs): - self.article = kwargs.pop('article', None) + self.article = kwargs.pop("article", None) super(ArticleForm, self).__init__(*args, **kwargs) if self.article: self.fields["title"].initial = self.article.title diff --git a/libstat/forms/survey.py b/libstat/forms/survey.py index b9c38b9d..26aa6870 100644 --- a/libstat/forms/survey.py +++ b/libstat/forms/survey.py @@ -1,11 +1,7 @@ -# -*- coding: utf-8 -*- -from sets import Set import logging -import time -import json from django import forms -from django.core.urlresolvers import reverse +from django.urls import reverse from bibstat import settings from data.municipalities import municipalities @@ -14,15 +10,17 @@ logger = logging.getLogger(__name__) -class SurveyForm(forms.Form): +class SurveyForm(forms.Form): def _cell_to_input_field(self, cell, observation, authenticated, variable_type): - attrs = {"class": "form-control", - "id": cell.variable_key, - "name": cell.variable_key} + attrs = { + "class": "form-control", + "id": cell.variable_key, + "name": cell.variable_key, + } if cell.sum_of: - attrs["data-sum-of"] = " ".join(map(lambda s: s, cell.sum_of)) + attrs["data-sum-of"] = " ".join([s for s in cell.sum_of]) attrs["data-bv-notempty"] = "" attrs["placeholder"] = "Obligatorisk" @@ -31,10 +29,18 @@ def _cell_to_input_field(self, cell, observation, authenticated, variable_type): if isinstance(cell.part_of, list): part_of_as_string = " ".join(cell.part_of) attrs["data-part-of"] = part_of_as_string - attrs["data-bv-callback-message"] = u"Värdet får inte vara högre än värdet i följande fält: {}".format(part_of_as_string) + attrs[ + "data-bv-callback-message" + ] = "Värdet får inte vara högre än värdet i följande fält: {}".format( + part_of_as_string + ) else: attrs["data-part-of"] = cell.part_of - attrs["data-bv-callback-message"] = u"Värdet får inte vara högre än värdet i fältet {}".format(cell.part_of) + attrs[ + "data-bv-callback-message" + ] = "Värdet får inte vara högre än värdet i fältet {}".format( + cell.part_of + ) if cell.has_part: attrs["data-bv-callback"] = "" @@ -42,11 +48,19 @@ def _cell_to_input_field(self, cell, observation, authenticated, variable_type): has_part_as_string = " ".join(cell.has_part) attrs["data-has-part"] = has_part_as_string if not cell.part_of: - attrs["data-bv-callback-message"] = u"Värdet får inte vara mindre än värdet i följande fält: {}".format(has_part_as_string) + attrs[ + "data-bv-callback-message" + ] = "Värdet får inte vara mindre än värdet i följande fält: {}".format( + has_part_as_string + ) else: attrs["data-has-part"] = cell.has_part if not cell.part_of: - attrs["data-bv-callback-message"] = u"Värdet får inte vara mindre än värdet i fältet {}".format(cell.has_part) + attrs[ + "data-bv-callback-message" + ] = "Värdet får inte vara mindre än värdet i fältet {}".format( + cell.has_part + ) if cell.required == True: attrs["data-bv-notempty"] = "" @@ -58,10 +72,10 @@ def _cell_to_input_field(self, cell, observation, authenticated, variable_type): # thousands separators. The class "numerical" is how they can be # selected. attrs["class"] = "{} numerical".format(attrs["class"]) - if cell.previous_value is not None and cell.previous_value != '': + if cell.previous_value is not None and cell.previous_value != "": attrs["data-previous-value"] = cell.previous_value else: - attrs["data-previous-value"] = 'null' + attrs["data-previous-value"] = "null" if "Namn" in cell.variable_key: attrs["autocomplete"] = "name" @@ -75,10 +89,14 @@ def _cell_to_input_field(self, cell, observation, authenticated, variable_type): attrs["data-bv-regexp"] = "" if variable_type == "integer": attrs["data-bv-regexp-regexp"] = "^(-|(0|[1-9 ]([0-9 ]){0,10}))$" - attrs["data-bv-regexp-message"] = u"Vänligen mata in ett numeriskt värde mindre än eller lika med 999 999 999, alternativt '-' om värdet inte är relevant" + attrs[ + "data-bv-regexp-message" + ] = "Vänligen mata in ett numeriskt värde mindre än eller lika med 999 999 999, alternativt '-' om värdet inte är relevant" elif variable_type == "decimal": attrs["data-bv-regexp-regexp"] = "^(-|[\d ]{1,11}(\,[\d ]{1,3})?)$" - attrs["data-bv-regexp-message"] = u"Vänligen mata in ett numeriskt värde mindre än eller lika med 999 999 999,999 med max 3 decimaler (t ex 12,522), alternativt '-' om värdet inte är relevant" + attrs[ + "data-bv-regexp-message" + ] = "Vänligen mata in ett numeriskt värde mindre än eller lika med 999 999 999,999 med max 3 decimaler (t ex 12,522), alternativt '-' om värdet inte är relevant" else: @@ -86,21 +104,29 @@ def _cell_to_input_field(self, cell, observation, authenticated, variable_type): if variable_type == "integer": attrs["data-bv-regexp"] = "" attrs["data-bv-regexp-regexp"] = "^(-|(0|[1-9 ]([0-9 ]){0,9}))$" - attrs["data-bv-message"] = u"Vänligen mata in ett numeriskt värde mindre än eller lika med 99 999 999, alternativt '-' om värdet inte är relevant" + attrs[ + "data-bv-message" + ] = "Vänligen mata in ett numeriskt värde mindre än eller lika med 99 999 999, alternativt '-' om värdet inte är relevant" # Decimal max value is 99 999 999,999 if variable_type == "decimal": attrs["data-bv-regexp"] = "" attrs["data-bv-regexp-regexp"] = "^(-|[\d ]{1,10}(\,[\d ]{1,3})?)$" - attrs["data-bv-regexp-message"] = u"Vänligen mata in ett numeriskt värde mindre än eller lika med 99999999,999 med max 3 decimaler (t ex 12,522), alternativt '-' om värdet inte är relevant" + attrs[ + "data-bv-regexp-message" + ] = "Vänligen mata in ett numeriskt värde mindre än eller lika med 99999999,999 med max 3 decimaler (t ex 12,522), alternativt '-' om värdet inte är relevant" if variable_type == "email": - #attrs["data-bv-emailaddress"] = "" + # attrs["data-bv-emailaddress"] = "" attrs["data-bv-regexp"] = "" attrs["autocomplete"] = "email" attrs["type"] = "email" - attrs["data-bv-regexp-regexp"] = "^([\w!#$%&'*+/=?`{|}~^-]+(?:\.[\w!#$%&'*+/=?`{|}~^-]+)*@(?:[A-Za-z0-9-]+\.)+[A-Za-z]{2,6})$" - attrs["data-bv-regexp-message"] = u"Vänligen mata in en giltig emailadress" + attrs[ + "data-bv-regexp-regexp" + ] = "^([\w!#$%&'*+/=?`{|}~^-]+(?:\.[\w!#$%&'*+/=?`{|}~^-]+)*@(?:[A-Za-z0-9-]+\.)+[A-Za-z]{2,6})$" + attrs[ + "data-bv-regexp-message" + ] = "Vänligen mata in en giltig emailadress" if variable_type == "string": attrs["data-bv-stringlength"] = "" @@ -111,7 +137,9 @@ def _cell_to_input_field(self, cell, observation, authenticated, variable_type): attrs["autocomplete"] = "tel" attrs["type"] = "tel" attrs["data-bv-regexp-regexp"] = "^(-|\+?(\d\d?-?)+\d(\s?\d+)*\d+)$" - attrs["data-bv-regexp-message"] = u"Vänligen mata in ett giltigt telefonnummer utan bokstäver och parenteser, t ex 010-709 30 00" + attrs[ + "data-bv-regexp-message" + ] = "Vänligen mata in ett giltigt telefonnummer utan bokstäver och parenteser, t ex 010-709 30 00" # Number of hours per week if "Open103" in cell.variable_key: @@ -120,7 +148,7 @@ def _cell_to_input_field(self, cell, observation, authenticated, variable_type): attrs["data-bv-between-max"] = "168" # Number of days per year - if "Open101" in cell.variable_key: + if "Open101" in cell.variable_key: attrs["data-bv-between"] = "true" attrs["data-bv-between-min"] = "0" attrs["data-bv-between-max"] = "366" @@ -129,7 +157,11 @@ def _cell_to_input_field(self, cell, observation, authenticated, variable_type): attrs["disabled"] = "" attrs["class"] = "{} value-unknown".format(attrs["class"]) - attrs["data-original-value"] = observation.value if observation and observation.value is not None else "" + attrs["data-original-value"] = ( + observation.value + if observation and observation.value is not None + else "" + ) if authenticated: attrs["class"] = "{} survey-popover".format(attrs["class"]) @@ -144,40 +176,43 @@ def _cell_to_input_field(self, cell, observation, authenticated, variable_type): field = forms.CharField(required=False, widget=forms.TextInput(attrs=attrs)) if not observation or observation.value_unknown: - field.initial = u"Värdet är okänt" + field.initial = "Värdet är okänt" elif observation.value != None and variable_type == "decimal": - field.initial = str(observation.value).replace(".", ",") # decimals are displayed with comma in the form + field.initial = str(observation.value).replace( + ".", "," + ) # decimals are displayed with comma in the form else: field.initial = observation.value - if isinstance(field.initial, unicode): + if isinstance(field.initial, str): field.initial = field.initial.strip() if cell.variable_key == "Besok01": logger.debug("attrs:") - for attr, value in attrs.iteritems(): + for attr, value in list(attrs.items()): logger.debug(attr) return field - def _set_libraries(self, current_survey, this_surveys_selected_sigels, authenticated): - other_surveys_selected_sigels = current_survey.selected_sigels_in_other_surveys(self.sample_year) + def _set_libraries( + self, current_survey, this_surveys_selected_sigels, authenticated + ): + other_surveys_selected_sigels = current_survey.selected_sigels_in_other_surveys( + self.sample_year + ) def set_library(self, library, current_library=False): checkbox_id = str(library.sigel) - attrs = { - "value": checkbox_id, - "class": "select-library" - } + attrs = {"value": checkbox_id, "class": "select-library"} row = { "name": library.name, "city": library.city, "address": library.address, "sigel": library.sigel, - "checkbox_id": checkbox_id + "checkbox_id": checkbox_id, } if self.is_read_only: @@ -185,9 +220,13 @@ def set_library(self, library, current_library=False): if library.sigel in other_surveys_selected_sigels: attrs["disabled"] = "disabled" - row["comment"] = u"Detta bibliotek rapporteras redan för i en annan enkät." + row[ + "comment" + ] = "Detta bibliotek rapporteras redan för i en annan enkät." if current_library or library.sigel in this_surveys_selected_sigels: - row["comment"] = u"Rapporteringen för detta bibliotek kolliderar med en annan enkät." + row[ + "comment" + ] = "Rapporteringen för detta bibliotek kolliderar med en annan enkät." self.library_selection_conflict = True del attrs["disabled"] @@ -197,7 +236,9 @@ def set_library(self, library, current_library=False): attrs["checked"] = "checked" if not library.sigel in other_surveys_selected_sigels: - row["comment"] = u"Detta är det bibliotek som mottagit denna enkät. Om du samredovisar med andra bibliotek, glöm inte att även kryssa för dem här i listan." + row[ + "comment" + ] = "Detta är det bibliotek som mottagit denna enkät. Om du samredovisar med andra bibliotek, glöm inte att även kryssa för dem här i listan." elif library.sigel in this_surveys_selected_sigels: attrs["checked"] = "checked" @@ -207,7 +248,9 @@ def set_library(self, library, current_library=False): except KeyError: pass - self.fields[checkbox_id] = forms.BooleanField(required=False, widget=forms.CheckboxInput(attrs=attrs)) + self.fields[checkbox_id] = forms.BooleanField( + required=False, widget=forms.CheckboxInput(attrs=attrs) + ) self.libraries.append(row) self.libraries = [] @@ -220,7 +263,7 @@ def _status_label(self, key): return next((status[1] for status in Survey.STATUSES if status[0] == key)) def _conflicting_libraries(self, first_selection, second_selection): - intersection = Set(first_selection).intersection(Set(second_selection)) + intersection = set(first_selection).intersection(set(second_selection)) survey_list = [] sigel_list = [] for survey in Survey.objects.filter(library__sigel__in=intersection): @@ -231,26 +274,35 @@ def _conflicting_libraries(self, first_selection, second_selection): def _mailto_link(self): body = ( - u"%0D%0A" - u"----------" + "%0D%0A" - u"Var%20vänlig%20och%20låt%20följande%20information%20stå%20kvar%20i%20meddelandet." + "%0D%0A" - u"" + "%0D%0A" - u"Bibliotek:%20{}%20({})%20i%20{}".format(self.library_name, self.library_sigel, self.city) + "%0D%0A" - u"Kommun/län:%20{}%20({})".format(municipalities.get(self.municipality_code, ""), self.municipality_code) + "%0D%0A" - u"Statistikansvarig:%20{}".format(self.email) + "%0D%0A" - u"Insamlingsår:%20{}".format(self.sample_year) + "%0D%0A" - u"----------" + "%0D%0A" + "----------" + "%0D%0A" + "Var%20vänlig%20och%20låt%20följande%20information%20stå%20kvar%20i%20meddelandet." + + "%0D%0A" + "" + "%0D%0A" + "Bibliotek:%20{}%20({})%20i%20{}".format( + self.library_name, self.library_sigel, self.city + ) + + "%0D%0A" + "Kommun/län:%20{}%20({})".format( + municipalities.get(self.municipality_code, ""), self.municipality_code + ) + + "%0D%0A" + "Statistikansvarig:%20{}".format(self.email) + "%0D%0A" + "Insamlingsår:%20{}".format(self.sample_year) + "%0D%0A" + "----------" ) return ( - u"mailto:biblioteksstatistik@kb.se" - u"?subject=Fråga%20för%20statistikenkät:%20{}%20({})".format(self.library_name, self.library_sigel) + - u"&body={}".format(body) + "mailto:biblioteksstatistik@kb.se" + "?subject=Fråga%20för%20statistikenkät:%20{}%20({})".format( + self.library_name, self.library_sigel + ) + + "&body={}".format(body) ) def __init__(self, *args, **kwargs): - survey = kwargs.pop('survey', None) - authenticated = kwargs.pop('authenticated', False) + survey = kwargs.pop("survey", None) + authenticated = kwargs.pop("authenticated", False) super(SurveyForm, self).__init__(*args, **kwargs) # Cache variables for performance @@ -261,54 +313,80 @@ def __init__(self, *args, **kwargs): template = survey_template(survey.sample_year, survey) self.fields["disabled_inputs"] = forms.CharField( - required=False, widget=forms.HiddenInput(attrs={"id": "disabled_inputs"})) #TODO: remove? + required=False, widget=forms.HiddenInput(attrs={"id": "disabled_inputs"}) + ) # TODO: remove? self.fields["unknown_inputs"] = forms.CharField( - required=False, widget=forms.HiddenInput(attrs={"id": "unknown_inputs"})) + required=False, widget=forms.HiddenInput(attrs={"id": "unknown_inputs"}) + ) self.fields["altered_fields"] = forms.CharField( - required=False, widget=forms.HiddenInput(attrs={"id": "altered_fields"})) + required=False, widget=forms.HiddenInput(attrs={"id": "altered_fields"}) + ) self.fields["selected_libraries"] = forms.CharField( - required=False, widget=forms.HiddenInput(attrs={"id": "selected_libraries"})) + required=False, widget=forms.HiddenInput(attrs={"id": "selected_libraries"}) + ) self.fields["scroll_position"] = forms.CharField( - required=False, widget=forms.HiddenInput(attrs={"id": "scroll_position"})) + required=False, widget=forms.HiddenInput(attrs={"id": "scroll_position"}) + ) self.fields["submit_action"] = forms.CharField( - required=False, widget=forms.HiddenInput(attrs={"id": "submit_action"})) - self.fields["read_only"] = forms.CharField(required=False, widget=forms.HiddenInput(attrs={"id": "read_only"})) - self.fields["key"] = forms.CharField(required=False, widget=forms.HiddenInput(), initial=survey.pk) + required=False, widget=forms.HiddenInput(attrs={"id": "submit_action"}) + ) + self.fields["read_only"] = forms.CharField( + required=False, widget=forms.HiddenInput(attrs={"id": "read_only"}) + ) + self.fields["key"] = forms.CharField( + required=False, widget=forms.HiddenInput(), initial=survey.pk + ) self.fields["selected_status"] = forms.CharField( - required=False, widget=forms.HiddenInput(), initial=survey.status) + required=False, widget=forms.HiddenInput(), initial=survey.status + ) - intro_text = variables[template.intro_text_variable_key].description if template.intro_text_variable_key in variables else "" + intro_text = ( + variables[template.intro_text_variable_key].description + if template.intro_text_variable_key in variables + else "" + ) self.intro_text = intro_text self.library_name = survey.library.name self.library_sigel = survey.library.sigel self.city = survey.library.city self.municipality_code = survey.library.municipality_code self.sample_year = survey.sample_year - self.is_user_read_only = not survey.status in (u"not_viewed", u"initiated") + self.is_user_read_only = not survey.status in ("not_viewed", "initiated") self.is_read_only = not authenticated and self.is_user_read_only - self.can_submit = not authenticated and survey.status in ("not_viewed", "initiated") + self.can_submit = not authenticated and survey.status in ( + "not_viewed", + "initiated", + ) self.password = survey.password self.status = self._status_label(survey.status) self.notes = survey.notes if survey.notes else "" - self.notes_rows = min(max(5, survey.notes.count('\n') if survey.notes else 0) + 1, 10) + self.notes_rows = min( + max(5, survey.notes.count("\n") if survey.notes else 0) + 1, 10 + ) self.statuses = Survey.STATUSES self.is_published = survey.status == "published" self.latest_version_published = survey.latest_version_published - self.url = settings.API_BASE_URL + reverse('survey', args=(survey.pk,)) + self.url = settings.API_BASE_URL + reverse("survey", args=(survey.pk,)) self.url_with_password = "{}?p={}".format(self.url, self.password) self.email = survey.library.email self.mailto = self._mailto_link() self._set_libraries(survey, survey.selected_libraries, authenticated) - if hasattr(self, 'library_selection_conflict') and self.library_selection_conflict: + if ( + hasattr(self, "library_selection_conflict") + and self.library_selection_conflict + ): self.conflicting_surveys = survey.get_conflicting_surveys() for conflicting_survey in self.conflicting_surveys: - conflicting_survey.url = settings.API_BASE_URL + reverse('survey', args=(conflicting_survey.pk,)) + conflicting_survey.url = settings.API_BASE_URL + reverse( + "survey", args=(conflicting_survey.pk,) + ) conflicting_survey.conflicting_libraries = self._conflicting_libraries( survey.selected_libraries + [survey.library.sigel], - conflicting_survey.selected_libraries) + conflicting_survey.selected_libraries, + ) self.can_submit = False @@ -317,24 +395,30 @@ def __init__(self, *args, **kwargs): for cell in template.cells: variable_key = cell.variable_key if not variable_key in variables: - raise Exception("Can't find variable with key '{}'".format(variable_key)) + raise Exception( + "Can't find variable with key '{}'".format(variable_key) + ) variable_type = variables[variable_key].type - cell.types.append(variable_type) #cell is given same type as variable + cell.types.append(variable_type) # cell is given same type as variable observation = survey.get_observation(variable_key) if observation: - cell.disabled = observation.disabled #TODO: remove? + cell.disabled = observation.disabled # TODO: remove? cell.value_unknown = observation.value_unknown if previous_survey: - cell.previous_value = survey.previous_years_value(observation.variable, previous_survey) + cell.previous_value = survey.previous_years_value( + observation.variable, previous_survey + ) if not observation: observation = SurveyObservation(variable=variables[variable_key]) survey.observations.append(observation) - self.fields[variable_key] = self._cell_to_input_field(cell, observation, authenticated, variable_type) + self.fields[variable_key] = self._cell_to_input_field( + cell, observation, authenticated, variable_type + ) self.sections = template.sections if self.is_read_only: self.fields["read_only"].initial = "true" - for key, input in self.fields.iteritems(): - input.widget.attrs["readonly"] = "" \ No newline at end of file + for key, input in list(self.fields.items()): + input.widget.attrs["readonly"] = "" diff --git a/libstat/forms/variable.py b/libstat/forms/variable.py index 43f1cb09..c417f1b4 100644 --- a/libstat/forms/variable.py +++ b/libstat/forms/variable.py @@ -1,103 +1,154 @@ -# coding=utf-8 from django import forms from libstat.models import Variable from libstat.utils import VARIABLE_TYPES, SURVEY_TARGET_GROUPS -__author__ = 'vlovgr' +__author__ = "vlovgr" class VariableForm(forms.Form): - active_from = forms.DateField(required=False, widget=forms.TextInput(attrs={'class': 'form-control'})) - active_to = forms.DateField(required=False, widget=forms.TextInput(attrs={'class': 'form-control'})) - - question = forms.CharField(required=False, widget=forms.TextInput(attrs={'class': 'form-control'})) - question_part = forms.CharField(required=False, widget=forms.TextInput(attrs={'class': 'form-control'})) - category = forms.CharField(required=False, widget=forms.TextInput(attrs={'class': 'form-control'})) - sub_category = forms.CharField(required=False, widget=forms.TextInput(attrs={'class': 'form-control'})) + active_from = forms.DateField( + required=False, widget=forms.TextInput(attrs={"class": "form-control"}) + ) + active_to = forms.DateField( + required=False, widget=forms.TextInput(attrs={"class": "form-control"}) + ) + + question = forms.CharField( + required=False, widget=forms.TextInput(attrs={"class": "form-control"}) + ) + question_part = forms.CharField( + required=False, widget=forms.TextInput(attrs={"class": "form-control"}) + ) + category = forms.CharField( + required=False, widget=forms.TextInput(attrs={"class": "form-control"}) + ) + sub_category = forms.CharField( + required=False, widget=forms.TextInput(attrs={"class": "form-control"}) + ) type = forms.ChoiceField(required=True, widget=forms.RadioSelect()) # Since this is a checkbox, a value will only be returned in form if the # checkbox is checked. Hence the required=False. - is_public = forms.BooleanField(required=False, widget=forms.CheckboxInput(attrs={'value': '1'})) - - target_groups = forms.MultipleChoiceField(required=True, widget=forms.CheckboxSelectMultiple()) - - description = forms.CharField(widget=forms.Textarea(attrs={'class': 'form-control', 'rows': '2'})) - comment = forms.CharField(required=False, widget=forms.Textarea(attrs={'class': 'form-control', 'rows': '2'})) - - replaces = forms.CharField(required=False, widget=forms.TextInput(attrs={'class': 'form-control'})) + is_public = forms.BooleanField( + required=False, widget=forms.CheckboxInput(attrs={"value": "1"}) + ) + + target_groups = forms.MultipleChoiceField( + required=True, widget=forms.CheckboxSelectMultiple() + ) + + description = forms.CharField( + widget=forms.Textarea(attrs={"class": "form-control", "rows": "2"}) + ) + comment = forms.CharField( + required=False, + widget=forms.Textarea(attrs={"class": "form-control", "rows": "2"}), + ) + + replaces = forms.CharField( + required=False, widget=forms.TextInput(attrs={"class": "form-control"}) + ) def __init__(self, *args, **kwargs): - self.instance = kwargs.pop('instance', None) + self.instance = kwargs.pop("instance", None) super(VariableForm, self).__init__(*args, **kwargs) if not self.instance: - self.fields['key'] = forms.CharField(required=True, - widget=forms.TextInput(attrs={'class': 'form-control'})) + self.fields["key"] = forms.CharField( + required=True, widget=forms.TextInput(attrs={"class": "form-control"}) + ) - self.fields['type'].choices = [type for type in VARIABLE_TYPES] - self.fields['target_groups'].choices = [target_group for target_group in SURVEY_TARGET_GROUPS] + self.fields["type"].choices = [type for type in VARIABLE_TYPES] + self.fields["target_groups"].choices = [ + target_group for target_group in SURVEY_TARGET_GROUPS + ] if self.instance: - self.fields['active_from'].initial = (self.instance.active_from.date() - if self.instance.active_from - else None) - self.fields['active_to'].initial = self.instance.active_to.date() if self.instance.active_to else None - self.fields['question'].initial = self.instance.question - self.fields['question_part'].initial = self.instance.question_part - self.fields['category'].initial = self.instance.category - self.fields['sub_category'].initial = self.instance.sub_category - self.fields['type'].initial = self.instance.type - self.fields['is_public'].initial = self.instance.is_public - self.fields['target_groups'].initial = self.instance.target_groups - self.fields['description'].initial = self.instance.description - self.fields['comment'].initial = self.instance.comment - self.fields['replaces'].initial = ", ".join( - [str(v.id) for v in self.instance.replaces]) if self.instance.replaces else "" + self.fields["active_from"].initial = ( + self.instance.active_from.date() if self.instance.active_from else None + ) + self.fields["active_to"].initial = ( + self.instance.active_to.date() if self.instance.active_to else None + ) + self.fields["question"].initial = self.instance.question + self.fields["question_part"].initial = self.instance.question_part + self.fields["category"].initial = self.instance.category + self.fields["sub_category"].initial = self.instance.sub_category + self.fields["type"].initial = self.instance.type + self.fields["is_public"].initial = self.instance.is_public + self.fields["target_groups"].initial = self.instance.target_groups + self.fields["description"].initial = self.instance.description + self.fields["comment"].initial = self.instance.comment + self.fields["replaces"].initial = ( + ", ".join([str(v.id) for v in self.instance.replaces]) + if self.instance.replaces + else "" + ) self.replaces_initial_value = ", ".join( - ["{}:{}".format(v.key, str(v.id)) for v in self.instance.replaces] if self.instance.replaces else []) + ["{}:{}".format(v.key, str(v.id)) for v in self.instance.replaces] + if self.instance.replaces + else [] + ) def clean(self): cleaned_data = super(VariableForm, self).clean() - replaces = cleaned_data['replaces'] if 'replaces' in cleaned_data else None - active_from = cleaned_data['active_from'] if 'active_from' in cleaned_data else None + replaces = cleaned_data["replaces"] if "replaces" in cleaned_data else None + active_from = ( + cleaned_data["active_from"] if "active_from" in cleaned_data else None + ) if replaces and not active_from: - self._errors['replaces'] = self.error_class( - ["Ange när ersättning börjar gälla genom att sätta 'Giltig fr o m'"]) - self._errors['active_from'] = self.error_class([u"Måste anges"]) - - del cleaned_data['replaces'] - del cleaned_data['active_from'] - - active_to = cleaned_data['active_to'] if 'active_to' in cleaned_data else None - if (self.instance and self.instance.replaced_by and active_to and self.instance.active_to - and active_to != self.instance.active_to.date()): - self._errors['active_to'] = self.error_class([u"Styrs av ersättande term"]) - del cleaned_data['active_to'] + self._errors["replaces"] = self.error_class( + ["Ange när ersättning börjar gälla genom att sätta 'Giltig fr o m'"] + ) + self._errors["active_from"] = self.error_class(["Måste anges"]) + + del cleaned_data["replaces"] + del cleaned_data["active_from"] + + active_to = cleaned_data["active_to"] if "active_to" in cleaned_data else None + if ( + self.instance + and self.instance.replaced_by + and active_to + and self.instance.active_to + and active_to != self.instance.active_to.date() + ): + self._errors["active_to"] = self.error_class(["Styrs av ersättande term"]) + del cleaned_data["active_to"] return cleaned_data def save(self, commit=True, user=None, activate=False): variable = self.instance if self.instance else Variable(is_draft=True) - variable.key = self.instance.key if self.instance else self.cleaned_data['key'] + variable.key = self.instance.key if self.instance else self.cleaned_data["key"] variable.active_from = self.cleaned_data[ - 'active_from'] # Need to convert to UTC? It's a date and not a datetime... - variable.active_to = self.instance.active_to if self.instance and self.instance.replaced_by else \ - self.cleaned_data['active_to'] - variable.question = self.cleaned_data['question'] - variable.question_part = self.cleaned_data['question_part'] - variable.category = self.cleaned_data['category'] - variable.sub_category = self.cleaned_data['sub_category'] - variable.type = self.cleaned_data['type'] - variable.is_public = self.cleaned_data['is_public'] - variable.target_groups = self.cleaned_data['target_groups'] - variable.description = self.cleaned_data['description'] - variable.comment = self.cleaned_data['comment'] + "active_from" + ] # Need to convert to UTC? It's a date and not a datetime... + variable.active_to = ( + self.instance.active_to + if self.instance and self.instance.replaced_by + else self.cleaned_data["active_to"] + ) + variable.question = self.cleaned_data["question"] + variable.question_part = self.cleaned_data["question_part"] + variable.category = self.cleaned_data["category"] + variable.sub_category = self.cleaned_data["sub_category"] + variable.type = self.cleaned_data["type"] + variable.is_public = self.cleaned_data["is_public"] + variable.target_groups = self.cleaned_data["target_groups"] + variable.description = self.cleaned_data["description"] + variable.comment = self.cleaned_data["comment"] if activate: variable.is_draft = False - to_replace = self.cleaned_data['replaces'].split(", ") if self.cleaned_data['replaces'] else [] - modified_siblings = variable.replace_siblings(to_replace, switchover_date=variable.active_from) + to_replace = ( + self.cleaned_data["replaces"].split(", ") + if self.cleaned_data["replaces"] + else [] + ) + modified_siblings = variable.replace_siblings( + to_replace, switchover_date=variable.active_from + ) if commit: variable.save_updated_self_and_modified_replaced(modified_siblings) diff --git a/libstat/management/commands/export_libraries_to_excel.py b/libstat/management/commands/export_libraries_to_excel.py index 7e46c226..e8bf6801 100644 --- a/libstat/management/commands/export_libraries_to_excel.py +++ b/libstat/management/commands/export_libraries_to_excel.py @@ -1,7 +1,4 @@ -# -*- coding: UTF-8 -*- - from django.core.management.base import BaseCommand, CommandError -from optparse import make_option import logging, re, os from bibstat import settings from libstat.models import Survey @@ -11,46 +8,82 @@ logger = logging.getLogger(__name__) + class Command(BaseCommand): args = "--year= --all=" help = "Export libraries with published surveys to Excel file" - help_text = ("Usage: python manage.py export_libraries_to_excel --year= --all=\n\n") + help_text = "Usage: python manage.py export_libraries_to_excel --year= --all=\n\n" - option_list = BaseCommand.option_list + ( - make_option("--year", dest="year", type="int", help="Sample year, format YYYY"), - make_option("--all", dest="all", type="string", help="Y=Export all libraries with published surveys, N=Only export libraries from published surveys with missing sigel code") - ) + def add_arguments(self, parser): + parser.add_argument( + "--year", dest="year", type=int, help="Sample year, format YYYY" + ) + parser.add_argument( + "--all", + dest="all", + help="Y=Export all libraries with published surveys, N=Only export libraries from published surveys with missing sigel code", + ) def handle(self, *args, **options): year = options.get("year") all = options.get("all") def _valid_year(year): - return re.compile('^\d{4}$').match(str(year)) + return re.compile("^\d{4}$").match(str(year)) if not year: logger.info(self.help_text) return if not _valid_year(year): - raise CommandError(u"Invalid Year '{}', aborting".format(year)) + raise CommandError("Invalid Year '{}', aborting".format(year)) if all not in ["y", "Y", "n", "N"]: - raise CommandError(u"Invalid 'all' option '{}', aborting").format(all) + raise CommandError("Invalid 'all' option '{}', aborting").format(all) if all and (all == "Y" or all == "y"): - libraries = [s.library for s in Survey.objects.filter(sample_year=year, _status=u"published").only("library")] + libraries = [ + s.library + for s in Survey.objects.filter( + sample_year=year, _status="published" + ).only("library") + ] else: - #Find all surveys with a generated random code instead of a sigel - libraries = [s.library for s in Survey.objects.filter(sample_year=year, _status=u"published").only("library") if - len(s.library.sigel) == 10] + # Find all surveys with a generated random code instead of a sigel + libraries = [ + s.library + for s in Survey.objects.filter( + sample_year=year, _status="published" + ).only("library") + if len(s.library.sigel) == 10 + ] - workbook = Workbook(encoding="utf-8") + workbook = Workbook() worksheet = workbook.active - worksheet.append(["Bibliotek", "Adress", "Postnr", "Ort", "Kommunkod", "Bibliotekstyp", "Sigel"]) + worksheet.append( + [ + "Bibliotek", + "Adress", + "Postnr", + "Ort", + "Kommunkod", + "Bibliotekstyp", + "Sigel", + ] + ) for library in libraries: logger.debug(library.address) - worksheet.append([library.name, library.address, library.zip_code, library.city, library.municipality_code, library.library_type, library.sigel]) + worksheet.append( + [ + library.name, + library.address, + library.zip_code, + library.city, + library.municipality_code, + library.library_type, + library.sigel, + ] + ) file_name_str = "libraries_export_{}.xslx".format(year) if settings.ENVIRONMENT == "local": diff --git a/libstat/management/commands/export_surveys_to_excel.py b/libstat/management/commands/export_surveys_to_excel.py index 476c757b..cdd3459a 100644 --- a/libstat/management/commands/export_surveys_to_excel.py +++ b/libstat/management/commands/export_surveys_to_excel.py @@ -1,41 +1,47 @@ -# -*- coding: UTF-8 -*- - from django.core.management.base import BaseCommand, CommandError -from optparse import make_option import logging, re from libstat.models import Survey -from libstat.services.excel_export import surveys_to_excel_workbook, _cached_workbook_exists_and_is_valid, _cache_workbook, _cache_dir_path +from libstat.services.excel_export import ( + surveys_to_excel_workbook, + _cached_workbook_exists_and_is_valid, + _cache_workbook, + _cache_dir_path, +) logger = logging.getLogger(__name__) + class Command(BaseCommand): args = "--year=" help = "Export surveys to Excel file" - help_text = ("Usage: python manage.py export_surveys_to_excel --year=\n\n") + help_text = "Usage: python manage.py export_surveys_to_excel --year=\n\n" + + def add_arguments(self, parser): + parser.add_argument( + "--year", dest="year", type=int, help="Sample year, format YYYY" + ) - option_list = BaseCommand.option_list + ( - make_option("--year", dest="year", type="int", help="Sample year, format YYYY"), - ) - def handle(self, *args, **options): year = options.get("year") - + def _valid_year(year): - return re.compile('^\d{4}$').match(str(year)) - + return re.compile("^\d{4}$").match(str(year)) + if not year: logger.info(self.help_text) return - + if not _valid_year(year): - raise CommandError(u"Invalid Year '{}', aborting".format(year)) - + raise CommandError("Invalid Year '{}', aborting".format(year)) + surveys = Survey.objects.filter(sample_year=year, is_active=True) survey_ids = [s.id for s in surveys] workbook = surveys_to_excel_workbook(survey_ids) file_name_str = "survey_export_{} {}.xslx" - - if not _cached_workbook_exists_and_is_valid(year, file_name_str, workbook_is_public=False): + + if not _cached_workbook_exists_and_is_valid( + year, file_name_str, workbook_is_public=False + ): _cache_workbook(workbook, year, file_name_str, workbook_is_public=False) logger.info("Saved excel file %s" % _cache_dir_path) - return \ No newline at end of file + return diff --git a/libstat/management/commands/import_survey_responses.py b/libstat/management/commands/import_survey_responses.py index d66c39b4..fddba1dc 100644 --- a/libstat/management/commands/import_survey_responses.py +++ b/libstat/management/commands/import_survey_responses.py @@ -1,5 +1,3 @@ -# -*- coding: UTF-8 -*- -from optparse import make_option import re import logging import traceback @@ -7,7 +5,10 @@ from django.core.management.base import BaseCommand, CommandError from xlrd import open_workbook from xlrd.biffh import XLRDError -from data.municipalities import municipality_code_from, municipality_code_from_county_code +from data.municipalities import ( + municipality_code_from, + municipality_code_from_county_code, +) from libstat.utils import TYPE_BOOLEAN, TYPE_INTEGER, TYPE_LONG from libstat.models import Survey, SurveyObservation, Variable, Library @@ -17,34 +18,44 @@ class Command(BaseCommand): - args = "--file= --target_group= --year=" + args = ( + "--file= --target_group= --year=" + ) help = "Imports surveys from a spreadsheet" - help_text = ("Usage: python manage.py import_survey_responses --file=" - "--target_group= --year=\n\n") - - option_list = BaseCommand.option_list + ( - make_option(u'--target_group', dest=u"target_group", type=u'choice', - choices=["folkbib", "specbib", "sjukbib", "skolbib"], - help=u'Target group; public, research, hospital, school'), - make_option('--file', dest="file", type='string', - help='File; Absolute path to source spreadsheet. I.e. /home/MyUser/documents/sourcefile.xlsx'), - make_option('--year', dest="year", type='int', - help='Year; Measurment year, format YYYY'), + help_text = ( + "Usage: python manage.py import_survey_responses --file= " + "--target_group= --year=\n\n" ) + def add_arguments(self, parser): + parser.add_argument( + "--target_group", + dest="target_group", + choices=["folkbib", "specbib", "sjukbib", "skolbib"], + help="Target group; public, research, hospital, school", + ) + parser.add_argument( + "--file", + dest="file", + help="File; Absolute path to source spreadsheet. I.e. /home/MyUser/documents/sourcefile.xlsx", + ) + parser.add_argument( + "--year", dest="year", type=int, help="Measurement year, format YYYY" + ) + def _import_from_work_sheet(self, work_sheet, year, target_group): def _parse_value(value): - if isinstance(value, (int, float, long)): + if isinstance(value, (int, float)): if value == 0: value = None elif variable.type == TYPE_BOOLEAN[0]: - value = (True if value == 1 else False) + value = True if value == 1 else False elif variable.type == TYPE_INTEGER[0]: value = int(value) elif variable.type == TYPE_LONG[0]: - value = long(value) + value = int(value) - if (isinstance(value, str) and value.strip() == ""): + if isinstance(value, str) and value.strip() == "": value = None return value @@ -56,23 +67,23 @@ def _parse_value(value): variables = Variable.objects.filter(key=key) if len(variables) > 0: variable = variables[0] - if variable.sub_category in [u"Biblioteksnamn"]: + if variable.sub_category in ["Biblioteksnamn"]: library_name_column = i variable_keys.append((i, variable)) if library_name_column == -1: - raise CommandError(u"Library identifier variable not found, aborting!") + raise CommandError("Library identifier variable not found, aborting!") if not variable_keys: - raise CommandError(u"Failed to find any variables, aborting!") + raise CommandError("Failed to find any variables, aborting!") municipality_code_column = -1 county_code_column = -1 for i in range(0, work_sheet.ncols): - if work_sheet.cell_value(1, i) == u"Kommunkod": + if work_sheet.cell_value(1, i) == "Kommunkod": municipality_code_column = i break - elif work_sheet.cell_value(1, i) == u"Länskod": + elif work_sheet.cell_value(1, i) == "Länskod": county_code_column = i break @@ -85,8 +96,17 @@ def _parse_value(value): lib_col_value = row[library_name_column] # Research libraries file and hospital libraries file has summary rows mixed with library response rows - if (lib_col_value and isinstance(lib_col_value, basestring) - and not lib_col_value.startswith(("Summa", "summa", "Riket",))): + if ( + lib_col_value + and isinstance(lib_col_value, str) + and not lib_col_value.startswith( + ( + "Summa", + "summa", + "Riket", + ) + ) + ): library_name = lib_col_value.strip() else: continue @@ -95,15 +115,19 @@ def _parse_value(value): continue if municipality_code_column != -1: - municipality_code = municipality_code_from(row[municipality_code_column]) + municipality_code = municipality_code_from( + row[municipality_code_column] + ) elif county_code_column != -1: - municipality_code = municipality_code_from_county_code(row[county_code_column]) + municipality_code = municipality_code_from_county_code( + row[county_code_column] + ) if target_group == "specbib": library_type = { - u"Nationalbibliotek": u"natbib", - u"Högskolebibliotek": u"univbib", - u"Specialbibliotek": u"specbib" + "Nationalbibliotek": "natbib", + "Högskolebibliotek": "univbib", + "Specialbibliotek": "specbib", }[row[2]] else: library_type = target_group @@ -111,16 +135,23 @@ def _parse_value(value): library = Library(name=library_name, library_type=library_type) if municipality_code is not None: library.municipality_code = municipality_code - survey = Survey(sample_year=year, library=library, selected_libraries=[library.sigel]) + survey = Survey( + sample_year=year, library=library, selected_libraries=[library.sigel] + ) for col, variable in variable_keys: survey.observations.append( - SurveyObservation(variable=variable, value=_parse_value(row[col]), _is_public=variable.is_public)) + SurveyObservation( + variable=variable, + value=_parse_value(row[col]), + _is_public=variable.is_public, + ) + ) survey.save().publish() num_imported_surveys += 1 - logger.info(u"...{} surveys imported".format(num_imported_surveys)) + logger.info("...{} surveys imported".format(num_imported_surveys)) def handle(self, *args, **options): def _get_work_sheet(file_name, year): @@ -128,25 +159,27 @@ def _get_work_sheet(file_name, year): book = open_workbook(file_name, verbosity=0) return book.sheet_by_name(str(year)) except XLRDError as xld_e: - raise CommandError(u"No data for year {} in workbook: {}".format(year, xld_e)) + raise CommandError( + "No data for year {} in workbook: {}".format(year, xld_e) + ) def _valid_year(year): - return re.compile('^\d{4}$').match(str(year)) + return re.compile("^\d{4}$").match(str(year)) - file_name = options.get(u"file") - year = options.get(u"year") - target_group = options.get(u"target_group") + file_name = options.get("file") + year = options.get("year") + target_group = options.get("target_group") if not file_name or not target_group or not year: logger.info(self.help_text) return if not _valid_year(year): - raise CommandError(u"Invalid Year '{}', aborting".format(year)) + raise CommandError("Invalid Year '{}', aborting".format(year)) work_sheet = _get_work_sheet(file_name, year) try: self._import_from_work_sheet(work_sheet, year, target_group) except Exception as e: - print(traceback.format_exc()) \ No newline at end of file + print((traceback.format_exc())) diff --git a/libstat/management/commands/import_variables.py b/libstat/management/commands/import_variables.py index b90ea755..06bced0c 100644 --- a/libstat/management/commands/import_variables.py +++ b/libstat/management/commands/import_variables.py @@ -1,13 +1,17 @@ -# -*- coding: UTF-8 -*- import logging -from optparse import make_option - from django.core.management.base import BaseCommand, CommandError from xlrd import open_workbook from libstat.utils import DATA_IMPORT_nonMeasurementCategories -from libstat.utils import TYPE_STRING, TYPE_BOOLEAN, TYPE_INTEGER, TYPE_LONG, TYPE_DECIMAL, TYPE_PERCENT +from libstat.utils import ( + TYPE_STRING, + TYPE_BOOLEAN, + TYPE_INTEGER, + TYPE_LONG, + TYPE_DECIMAL, + TYPE_PERCENT, +) from libstat.models import Variable @@ -16,41 +20,47 @@ class Command(BaseCommand): variableTypes = { - u"Text": TYPE_STRING[0], - u"Numerisk": TYPE_STRING[0], - u"Boolesk": TYPE_BOOLEAN[0], - u"Integer": TYPE_INTEGER[0], - u"Long": TYPE_LONG[0], - u"Decimal två": TYPE_DECIMAL[0], - u"Decimal ett": TYPE_DECIMAL[0], - u"Procent": TYPE_PERCENT[0] + "Text": TYPE_STRING[0], + "Numerisk": TYPE_STRING[0], + "Boolesk": TYPE_BOOLEAN[0], + "Integer": TYPE_INTEGER[0], + "Long": TYPE_LONG[0], + "Decimal två": TYPE_DECIMAL[0], + "Decimal ett": TYPE_DECIMAL[0], + "Procent": TYPE_PERCENT[0], } - isPublic = { - u"Öppet": True, - u"Inte": False - } + isPublic = {"Öppet": True, "Inte": False} help = "Imports statistical variables from a spreadsheet" - option_list = BaseCommand.option_list + ( - make_option(u'--target_group', dest=u"target_group", type=u'choice', - choices=["folkbib", "specbib", "sjukbib", "skolbib"], - help=u'Target group; public, research, hospital, school'), - make_option('--file', dest="file", type='string', - help='File; Absolute path to source spreadsheet. I.e. /home/MyUser/documents/sourcefile.xlsx'), - ) + def add_arguments(self, parser): + parser.add_argument( + "--target-group", + dest="target_group", + choices=["folkbib", "specbib", "sjukbib", "skolbib"], + help="Target group; public, research, hospital, school", + ) + parser.add_argument( + "--file", + dest="file", + help="File; Absolute path to source spreadsheet. i.e. /home/MyUser/documents/sourcefile.xlsx", + ) def handle(self, *args, **options): if not options["target_group"] or not options["file"]: - logger.info(("Usage: python manage.py import_variables --file=" - "--target_group=\n\n")) + logger.info( + ( + "Usage: python manage.py import_variables --file=" + "--target_group=\n\n" + ) + ) return file = options["file"] target_group = options["target_group"] - logger.info(u"Importing {} variables from: {}...".format(target_group, file)) + logger.info("Importing {} variables from: {}...".format(target_group, file)) book = open_workbook(file) work_sheet = book.sheet_by_index(0) @@ -68,13 +78,19 @@ def handle(self, *args, **options): variable_type = row[4].strip() is_public = row[5].strip() - if variable_type not in self.variableTypes.keys(): - raise CommandError(u"Invalid variable type: {} for key: {}".format(variable_type, key)) + if variable_type not in list(self.variableTypes.keys()): + raise CommandError( + "Invalid variable type: {} for key: {}".format(variable_type, key) + ) else: variable_type = self.variableTypes[variable_type] - if is_public not in self.isPublic.keys(): - raise CommandError(u"Invalid public/private column value: {} for key: {}".format(is_public, key)) + if is_public not in list(self.isPublic.keys()): + raise CommandError( + "Invalid public/private column value: {} for key: {}".format( + is_public, key + ) + ) else: is_public = self.isPublic[is_public] @@ -83,8 +99,15 @@ def handle(self, *args, **options): existing_vars = Variable.objects.filter(key=key) if len(existing_vars) == 0: - object = Variable(key=key, description=description, category=category, sub_category=sub_category, - type=variable_type, is_public=is_public, target_groups=[target_group], ) + object = Variable( + key=key, + description=description, + category=category, + sub_category=sub_category, + type=variable_type, + is_public=is_public, + target_groups=[target_group], + ) object.save() imported_variables += 1 else: @@ -98,5 +121,8 @@ def handle(self, *args, **options): object.save() updated_variables += 1 - logger.info(u"...{} {} variables imported, {} updated.".format(imported_variables, - target_group, updated_variables)) + logger.info( + "...{} {} variables imported, {} updated.".format( + imported_variables, target_group, updated_variables + ) + ) diff --git a/libstat/management/commands/replace_ids_with_sigels.py b/libstat/management/commands/replace_ids_with_sigels.py index 8f19fabe..b90e5203 100644 --- a/libstat/management/commands/replace_ids_with_sigels.py +++ b/libstat/management/commands/replace_ids_with_sigels.py @@ -1,42 +1,45 @@ -# -*- coding: UTF-8 -*- - from django.core.management.base import BaseCommand, CommandError -from optparse import make_option import logging, re from libstat.services.clean_data import _load_sigel_mapping_from_workbook, _update_sigel from libstat.models import Survey logger = logging.getLogger(__name__) + class Command(BaseCommand): args = "--year=" help = "Replace code-string with sigel based on excel mapping file" - help_text = ("Usage: python manage.py replace_ids_with_sigels --year=\n\n") + help_text = "Usage: python manage.py replace_ids_with_sigels --year=\n\n" - option_list = BaseCommand.option_list + ( - make_option("--year", dest="year", type="str", help="Sample year, format YYYY"), - ) + def add_arguments(self, parser): + parser.add_argument( + "--year", dest="year", type=int, help="Sample year, format YYYY" + ) def handle(self, *args, **options): year = options.get("year") def _valid_year(year): - return re.compile('^\d{4}$').match(year) + return re.compile(r"^\d{4}$").match(year) if not year: logger.info(self.help_text) return if not _valid_year(year): - raise CommandError(u"Invalid Year '{}', aborting".format(year)) + raise CommandError("Invalid Year '{}', aborting".format(year)) logger.info("Changing sigels for surveys... year %s" % year) - sigel_mapping = _load_sigel_mapping_from_workbook(sheet=year, column_old_value=6, column_new_value=7) + sigel_mapping = _load_sigel_mapping_from_workbook( + sheet=year, column_old_value=6, column_new_value=7 + ) - for code in sigel_mapping.keys(): + for code in list(sigel_mapping.keys()): sigel = sigel_mapping[code] logger.debug("Updating %s to %s" % (code, sigel)) - survey = Survey.objects.filter(library__sigel=code, sample_year=year).first() + survey = Survey.objects.filter( + library__sigel=code, sample_year=year + ).first() if survey: _update_sigel(survey, sigel) diff --git a/libstat/management/commands/update_school_identifiers.py b/libstat/management/commands/update_school_identifiers.py index 97b4fd4d..78d80456 100644 --- a/libstat/management/commands/update_school_identifiers.py +++ b/libstat/management/commands/update_school_identifiers.py @@ -1,5 +1,3 @@ -# -*- coding: UTF-8 -*- - from django.core.management.base import BaseCommand, CommandError from xlrd import open_workbook from libstat.models import Survey, ExternalIdentifier @@ -7,20 +5,25 @@ logger = logging.getLogger(__name__) + class Command(BaseCommand): - help = 'Usage: python manage.py update_school_identifiers ' - args = '' + help = "Usage: python manage.py update_school_identifiers " + args = "" def handle(self, *args, **options): - def identifier_exists(external_identifier, external_identifier_list): for ex_id in external_identifier_list: - if ex_id.type == external_identifier.type and ex_id.identifier == external_identifier.identifier: + if ( + ex_id.type == external_identifier.type + and ex_id.identifier == external_identifier.identifier + ): return True return False if len(args) < 1: - raise CommandError('Usage: python manage.py update_school_identifiers ') + raise CommandError( + "Usage: python manage.py update_school_identifiers " + ) file = args[0] try: @@ -35,23 +38,29 @@ def identifier_exists(external_identifier, external_identifier_list): school_code = row[school_code_column] school_code_string = repr(school_code).split(".")[0] - logger.info('Updating school_code for sigel %s' % sigel) + logger.info("Updating school_code for sigel %s" % sigel) surveys = Survey.objects.filter(library__sigel=sigel) for survey in surveys: if survey.library: - external_identifier = ExternalIdentifier(type="school_code", identifier=school_code_string) + external_identifier = ExternalIdentifier( + type="school_code", identifier=school_code_string + ) if not survey.library.external_identifiers: - logger.info('Adding school_code') + logger.info("Adding school_code") survey.library.external_identifiers = [external_identifier] survey.save() - elif survey.library.external_identifiers and not identifier_exists(external_identifier, survey.library.external_identifiers): - survey.library.external_identifiers.append(external_identifier) + elif ( + survey.library.external_identifiers + and not identifier_exists( + external_identifier, survey.library.external_identifiers + ) + ): + survey.library.external_identifiers.append( + external_identifier + ) survey.save() except Exception as e: - print unicode(e) - - - + print((str(e))) diff --git a/libstat/management/commands/update_sigel.py b/libstat/management/commands/update_sigel.py index 18ed67fb..587aa5d3 100644 --- a/libstat/management/commands/update_sigel.py +++ b/libstat/management/commands/update_sigel.py @@ -1,7 +1,4 @@ -# -*- coding: UTF-8 -*- - from django.core.management.base import BaseCommand -from optparse import make_option import logging import sys @@ -13,27 +10,31 @@ class Command(BaseCommand): args = "--from= --to=" help = "Updates a sigel." - help_text = "Usage: python manage.py update_sigel --from= --to=\n\n" - - option_list = BaseCommand.option_list + ( - make_option('--from', dest='from', type='string', help=u'Old sigel'), - make_option('--to', dest='to', type='string', help=u'New sigel'), + help_text = ( + "Usage: python manage.py update_sigel --from= --to=\n\n" ) + def add_arguments(self, parser): + parser.add_argument("--from", dest="from", help="Old sigel") + parser.add_argument("--to", dest="to", help="New sigel") + def handle(self, *args, **options): - old_sigel = options.get('from') - new_sigel = options.get('to') + old_sigel = options.get("from") + new_sigel = options.get("to") changed = False if not old_sigel or not new_sigel: - print(self.help_text) + print((self.help_text)) sys.exit(1) logger.info("Changing sigel {} to {}".format(old_sigel, new_sigel)) for s in Survey.objects.filter(library__sigel__iexact=old_sigel): - logger.info("Survey {} {} {}: sigel {} to {}".format( - s.id, s.library.name, s.sample_year, s.library.sigel, new_sigel)) + logger.info( + "Survey {} {} {}: sigel {} to {}".format( + s.id, s.library.name, s.sample_year, s.library.sigel, new_sigel + ) + ) if s.library.sigel in s.selected_libraries: s.selected_libraries.remove(s.library.sigel) s.selected_libraries.append(new_sigel) @@ -41,17 +42,26 @@ def handle(self, *args, **options): s.library.sigel = new_sigel s.save() if s.is_published: - logger.info("Survey {} {} {}: republishing survey".format( - s.id, s.library.name, s.sample_year - )) + logger.info( + "Survey {} {} {}: republishing survey".format( + s.id, s.library.name, s.sample_year + ) + ) s.publish() changed = True for o in OpenData.objects.filter(sigel__iexact=old_sigel): if o.sigel.lower() == old_sigel.lower(): - logger.info("OpenData {} {} {} {}: changing sigel {} to {}".format( - o.id, o.library_name, o.sample_year, o.variable_key, o.sigel, - new_sigel)) + logger.info( + "OpenData {} {} {} {}: changing sigel {} to {}".format( + o.id, + o.library_name, + o.sample_year, + o.variable_key, + o.sigel, + new_sigel, + ) + ) o.sigel = new_sigel o.save() changed = True diff --git a/libstat/models.py b/libstat/models.py index 6fe2aa32..4dbec54e 100644 --- a/libstat/models.py +++ b/libstat/models.py @@ -1,53 +1,65 @@ -# -*- coding: UTF-8 -*- import logging -from sets import Set import string import random -from mongoengine import * -from mongoengine import signals +import mongoengine +from django_mongoengine import Document, EmbeddedDocument, fields, QuerySet + +# from mongoengine import * +from mongoengine import signals, Q, NULLIFY, PULL from django.conf import settings from datetime import datetime -from mongoengine.context_managers import no_dereference from data.municipalities import MUNICIPALITIES -from data.principals import get_library_types_with_same_principal, principal_for_library_type +from data.principals import ( + get_library_types_with_same_principal, + principal_for_library_type, +) from libstat.query_sets.variable import VariableQuerySet from libstat.utils import ISO8601_utc_format -from libstat.utils import SURVEY_TARGET_GROUPS, targetGroups, VARIABLE_TYPES, rdfVariableTypes +from libstat.utils import ( + SURVEY_TARGET_GROUPS, + targetGroups, + VARIABLE_TYPES, + rdfVariableTypes, +) logger = logging.getLogger(__name__) class VariableBase(Document): - description = StringField(required=True) + description = fields.StringField(blank=False) # Comment is a private field and should never be returned as open data - comment = StringField() - is_public = BooleanField(required=True, default=True) - type = StringField(required=True, choices=VARIABLE_TYPES) - target_groups = ListField(StringField(choices=SURVEY_TARGET_GROUPS), required=True) - category = StringField() - sub_category = StringField() + comment = fields.StringField(blank=True) + is_public = fields.BooleanField(blank=False, default=True) + type = fields.StringField(blank=False, choices=VARIABLE_TYPES) + target_groups = fields.ListField( + fields.StringField(choices=SURVEY_TARGET_GROUPS), blank=False + ) + category = fields.StringField(blank=True) + sub_category = fields.StringField(blank=True) # TODO: Inför frågor/delfrågor i termdokument och kör om importen - question = StringField() - question_part = StringField() - summary_of = ListField() + question = fields.StringField(blank=True) + question_part = fields.StringField(blank=True) + summary_of = fields.ListField(blank=True) - date_modified = DateTimeField() - is_draft = BooleanField() + date_modified = fields.DateTimeField(blank=True) + is_draft = fields.BooleanField(blank=True) # Only date-part of these fields is relevant, - active_from = DateTimeField() - active_to = DateTimeField() + active_from = fields.DateTimeField(blank=True) + active_to = fields.DateTimeField(blank=True) + + replaces = fields.ListField(fields.ReferenceField("Variable"), blank=True) + replaced_by = fields.ReferenceField("Variable", blank=True) - replaces = ListField(ReferenceField("Variable")) - replaced_by = ReferenceField("Variable") + modified_by = fields.ReferenceField("Variable", blank=True) meta = { - 'abstract': True, + "abstract": True, } @property @@ -64,16 +76,16 @@ def is_active(self): @property def state(self): if self.is_draft: - return {u"state": u"draft", u"label": u"utkast"} + return {"state": "draft", "label": "utkast"} elif self.replaced_by: - return {u"state": u"replaced", u"label": u"ersatt"} + return {"state": "replaced", "label": "ersatt"} elif self._is_no_longer_active(): - return {u"state": u"discontinued", u"label": u"avslutad"} + return {"state": "discontinued", "label": "avslutad"} elif self._is_not_yet_active(): - return {u"state": u"pending", u"label": u"vilande"} + return {"state": "pending", "label": "vilande"} else: # Cannot use 'active' as state/css class, it's already a class in Bootstrap... - return {u"state": u"current", u"label": u"aktiv"} + return {"state": "current", "label": "aktiv"} def _is_no_longer_active(self): return self.active_to and datetime.utcnow().date() > self.active_to.date() @@ -83,26 +95,27 @@ def _is_not_yet_active(self): class Variable(VariableBase): - key = StringField(required=True, unique=True) + key = fields.StringField(blank=False, unique=True) meta = { - 'collection': 'libstat_variables', - 'ordering': ['key'], - 'queryset_class': VariableQuerySet, - 'indexes': [ - 'key' - ] + "collection": "libstat_variables", + "ordering": ["key"], + "queryset_class": VariableQuerySet, + "indexes": ["key"], } @classmethod def store_version_and_update_date_modified(cls, sender, document, **kwargs): if document.id and not document.is_draft: - changed_fields = document.__dict__["_changed_fields"] if "_changed_fields" in document.__dict__ else [] - logger.debug(u"PRE_SAVE: Fields {} have changed, creating variable version from current version".format( - changed_fields)) + changed_fields = document._changed_fields + logger.debug( + "PRE_SAVE: Fields {} have changed, creating variable version from current version".format( + changed_fields + ) + ) query_set = Variable.objects.filter(pk=document.id) assert len(query_set) > 0 # Trigger lazy loading - versions = query_set.clone_into(VariableVersion.objects) + versions = query_set._clone_into(VariableVersion.objects) for v in versions: v.id = None v.variable_id = document.id @@ -111,15 +124,17 @@ def store_version_and_update_date_modified(cls, sender, document, **kwargs): document.date_modified = datetime.utcnow() @classmethod - def post_delete_actions(cls, sender, document, **kwargs): + def pre_delete_actions(cls, sender, document, **kwargs): if document.replaces: for replaced in document.replaces: if replaced.replaced_by and replaced.replaced_by.id == document.id: replaced.active_to = None replaced.save() logger.debug( - u"POST_DELETE: Setting 'active_to' to None on replaced {} when deleting replacement".format( - replaced.id)) + "POST_DELETE: Setting 'active_to' to None on replaced {} when deleting replacement".format( + replaced.id + ) + ) @property def is_summary_auto_field(self): @@ -136,10 +151,10 @@ def label(self): def replace_siblings(self, to_be_replaced=[], switchover_date=None, commit=False): """ - Important: If commit=False, make sure to use instance method - 'save_updated_self_and_modified_replaced(modified_siblings)' - to ensure that siblings are not saved for draft variables and - that all modifications are actually saved (no dirty transactions). + Important: If commit=False, make sure to use instance method + 'save_updated_self_and_modified_replaced(modified_siblings)' + to ensure that siblings are not saved for draft variables and + that all modifications are actually saved (no dirty transactions). """ current_replacements = set(self.replaces) modified_siblings = set() @@ -150,15 +165,22 @@ def replace_siblings(self, to_be_replaced=[], switchover_date=None, commit=False for object_id in to_be_replaced: try: variable = Variable.objects.get(pk=object_id) - if variable.replaced_by is not None and variable.replaced_by.id != self.id: + if ( + variable.replaced_by is not None + and variable.replaced_by.id != self.id + ): raise AttributeError( - u"Variable {} is already replaced by {}".format(object_id, variable.replaced_by.id)) + "Variable {} is already replaced by {}".format( + object_id, variable.replaced_by.id + ) + ) siblings_to_replace.add(variable) except Exception as e: logger.error( - u"Error while fetching Variable with id {} to be replaced by Variable {}: {}".format(object_id, - self.id, - e)) + "Error while fetching Variable with id {} to be replaced by Variable {}: {}".format( + object_id, self.id, e + ) + ) raise e siblings_to_release = current_replacements - siblings_to_replace @@ -173,12 +195,15 @@ def replace_siblings(self, to_be_replaced=[], switchover_date=None, commit=False # Replace sibling variables for to_replace in siblings_to_replace: """ - Nota bene: This modifies siblings for drafts as well as active variables. - It is important to use the instance method 'save_updated_self_and_modified_replaced(modified_siblings)' - to avoid saving siblings for draft variables. + Nota bene: This modifies siblings for drafts as well as active variables. + It is important to use the instance method 'save_updated_self_and_modified_replaced(modified_siblings)' + to avoid saving siblings for draft variables. """ - if (not to_replace.replaced_by or to_replace.replaced_by.id != self.id - or to_replace.active_to != switchover_date): + if ( + not to_replace.replaced_by + or to_replace.replaced_by.id != self.id + or to_replace.active_to != switchover_date + ): to_replace.replaced_by = self to_replace.active_to = switchover_date if switchover_date else None modified_siblings.add(to_replace) @@ -187,22 +212,27 @@ def replace_siblings(self, to_be_replaced=[], switchover_date=None, commit=False self.replaces = list(siblings_to_replace) if commit: - modified_siblings = self.save_updated_self_and_modified_replaced(modified_siblings) + modified_siblings = self.save_updated_self_and_modified_replaced( + modified_siblings + ) return modified_siblings def save_updated_self_and_modified_replaced(self, modified_siblings): """ - When updating both self and siblings with reference to self, we need to save self first - and then update the reference in modified siblings before saving then. Otherwise transactions - for siblings will be flagged as dirty (and never committed). - If self is a draft, siblings will not be saved. + When updating both self and siblings with reference to self, we need to save self first + and then update the reference in modified siblings before saving then. Otherwise transactions + for siblings will be flagged as dirty (and never committed). + If self is a draft, siblings will not be saved. """ updated_siblings = [] updated_instance = self.save() if not updated_instance.is_draft: for sibling in modified_siblings: - if sibling.replaced_by and sibling.replaced_by.id == updated_instance.id: + if ( + sibling.replaced_by + and sibling.replaced_by.id == updated_instance.id + ): sibling.replaced_by = updated_instance updated_siblings.append(sibling.save()) return updated_siblings @@ -214,25 +244,27 @@ def target_groups__descriptions(self): return display_names def to_dict(self, id_prefix=""): - json_ld_dict = {u"@id": u"{}{}".format(id_prefix, self.key), - u"@type": [u"rdf:Property", u"qb:MeasureProperty"], - u"comment": self.description, - u"range": self.type_to_rdf_type(self.type)} + json_ld_dict = { + "@id": "{}{}".format(id_prefix, self.key), + "@type": ["rdf:Property", "qb:MeasureProperty"], + "comment": self.description, + "range": self.type_to_rdf_type(self.type), + } if self.replaces: - json_ld_dict[u"replaces"] = [replaced.key for replaced in self.replaces] + json_ld_dict["replaces"] = [replaced.key for replaced in self.replaces] if self.replaced_by: - json_ld_dict[u"replacedBy"] = self.replaced_by.key + json_ld_dict["replacedBy"] = self.replaced_by.key if self.active_to or self.active_from: - range_str = u"name=Giltighetstid;" + range_str = "name=Giltighetstid;" if self.active_from: - range_str += u" start={};".format(self.active_from.date()) + range_str += " start={};".format(self.active_from.date()) if self.active_to: - range_str += u" end={};".format(self.active_to.date()) + range_str += " end={};".format(self.active_to.date()) - json_ld_dict[u"valid"] = range_str + json_ld_dict["valid"] = range_str return json_ld_dict @@ -240,15 +272,19 @@ def type_to_rdf_type(self, type): return rdfVariableTypes[type] def as_simple_dict(self): - return {u'key': self.key, u'id': str(self.id), u'description': self.description} + return {"key": self.key, "id": str(self.id), "description": self.description} def is_deletable(self): if self.is_draft: return True # TODO: Check if Survey is referencing variable when Survey model has been updated. - referenced_in_survey_response = Survey.objects.filter(observations__variable=str(self.id)).count() > 0 - referenced_in_open_data = OpenData.objects.filter(variable=str(self.id)).count() > 0 + referenced_in_survey_response = ( + Survey.objects.filter(observations__variable=str(self.id)).count() > 0 + ) + referenced_in_open_data = ( + OpenData.objects.filter(variable=str(self.id)).count() > 0 + ) return not referenced_in_survey_response and not referenced_in_open_data @@ -257,42 +293,40 @@ def __unicode__(self): class VariableVersion(VariableBase): - key = StringField(required=True) - variable_id = ObjectIdField(required=True) + key = fields.StringField(blank=False) + variable_id = fields.ObjectIdField(blank=False) meta = { - 'collection': 'libstat_variable_versions', + "collection": "libstat_variable_versions", } class ExternalIdentifier(EmbeddedDocument): - ID_TYPES = ( - (u"school_code", u"Skolenhetskod") - ) - identifier = StringField(required=True) - type = StringField(required=True, choices=ID_TYPES) + ID_TYPES = ("school_code", "Skolenhetskod") + identifier = fields.StringField(blank=False) + type = fields.StringField(blank=False, choices=ID_TYPES) class Library(EmbeddedDocument): - name = StringField() - bibdb_id = StringField() - sigel = StringField() - email = StringField() - city = StringField() - municipality_code = StringField() - address = StringField() - zip_code = StringField() - library_type = StringField(choices=SURVEY_TARGET_GROUPS) - external_identifiers = ListField(EmbeddedDocumentField(ExternalIdentifier), required=False, default=None) + name = fields.StringField(blank=True) + bibdb_id = fields.StringField(blank=True) + sigel = fields.StringField(blank=True) + email = fields.StringField(blank=True) + city = fields.StringField(blank=True) + municipality_code = fields.StringField(blank=True) + address = fields.StringField(blank=True) + zip_code = fields.StringField(blank=True) + library_type = fields.StringField(choices=SURVEY_TARGET_GROUPS, blank=True) + external_identifiers = fields.ListField( + fields.EmbeddedDocumentField(ExternalIdentifier), blank=True, default=None + ) - meta = { - 'collection': 'libstat_libraries' - } + meta = {"collection": "libstat_libraries"} # From: http://en.wikipedia.org/wiki/Random_password_generator#Python @classmethod def _random_sigel(cls): - alphabet = string.letters[0:52] + string.digits + alphabet = string.ascii_letters[0:52] + string.digits return str().join(random.SystemRandom().choice(alphabet) for _ in range(10)) def __init__(self, *args, **kwargs): @@ -302,21 +336,17 @@ def __init__(self, *args, **kwargs): class SurveyObservation(EmbeddedDocument): - variable = ReferenceField(Variable, required=True) - value = DynamicField() - disabled = BooleanField()#TODO: remove? - value_unknown = BooleanField() + variable = fields.ReferenceField(Variable, blank=False) + value = fields.DynamicField(blank=True) + disabled = fields.BooleanField(blank=True) # TODO: remove? + value_unknown = fields.BooleanField(blank=True) # Public API Optimization and traceability (was this field public at the time of the survey?) - _is_public = BooleanField(required=True, default=True) + _is_public = fields.BooleanField(blank=False, default=True) - meta = { - 'indexes': [ - 'variable' - ] - } + meta = {"indexes": ["variable"]} def __unicode__(self): - return u"{0}: {1}".format(self.variable, self.value) + return "{0}: {1}".format(self.variable, self.value) @property def instance_id(self): @@ -324,14 +354,31 @@ def instance_id(self): class SurveyQuerySet(QuerySet): - def by(self, sample_year=None, target_group=None, status=None, municipality_code=None, free_text=None, - is_active=None, with_email=False, without_email=False, invalid_email=False, exclude_co_reported_by_other=False, sigel=None): - target_group_query = Q(library__library_type=target_group) if target_group else Q() + def by( + self, + sample_year=None, + target_group=None, + status=None, + municipality_code=None, + free_text=None, + is_active=None, + with_email=False, + without_email=False, + invalid_email=False, + exclude_co_reported_by_other=False, + sigel=None, + ): + target_group_query = ( + Q(library__library_type=target_group) if target_group else Q() + ) sample_year_query = Q(sample_year=sample_year) if sample_year else Q() status_query = Q(_status=status) if status else Q() is_active_query = Q(is_active=is_active) if is_active is not None else Q() - municipality_code_query = (Q(library__municipality_code=municipality_code) - if municipality_code else Q()) + municipality_code_query = ( + Q(library__municipality_code=municipality_code) + if municipality_code + else Q() + ) order_by_field = "library__name" @@ -348,67 +395,97 @@ def by(self, sample_year=None, target_group=None, status=None, municipality_code free_text_query = Q() if free_text: free_text = free_text.strip().lower() - municipality_codes = [m[1] for m in MUNICIPALITIES if free_text in m[0].lower()] - - free_text_municipality_code_query = Q(library__municipality_code__icontains=free_text) - free_text_municipality_name_query = Q(library__municipality_code__in=municipality_codes) + municipality_codes = [ + m[1] for m in MUNICIPALITIES if free_text in m[0].lower() + ] + + free_text_municipality_code_query = Q( + library__municipality_code__icontains=free_text + ) + free_text_municipality_name_query = Q( + library__municipality_code__in=municipality_codes + ) free_text_email_query = Q(library__email__icontains=free_text) free_text_library_name_query = Q(library__name__icontains=free_text) - free_text_query = (free_text_municipality_code_query | free_text_email_query | free_text_library_name_query - | free_text_municipality_name_query) + free_text_query = ( + free_text_municipality_code_query + | free_text_email_query + | free_text_library_name_query + | free_text_municipality_name_query + ) sigel_query = Q() if sigel: sigel_query = Q(library__sigel__iexact=sigel) order_by_field = "library__sigel" - filtered_result = self.filter(target_group_query & sample_year_query & status_query & - municipality_code_query & email_query & free_text_query & - is_active_query & sigel_query).exclude("observations").order_by(order_by_field) + filtered_result = ( + self.filter( + target_group_query + & sample_year_query + & status_query + & municipality_code_query + & email_query + & free_text_query + & is_active_query + & sigel_query + ) + .exclude("observations") + .order_by(order_by_field) + ) if exclude_co_reported_by_other: - co_reported_by_others = filtered_result.filter(selected_libraries__size=0, library__sigel__in=Survey.objects.filter(sample_year=sample_year).distinct("selected_libraries")).exclude("observations") - filtered_result = set(filtered_result).difference(set(co_reported_by_others)) + co_reported_by_others = filtered_result.filter( + selected_libraries__size=0, + library__sigel__in=Survey.objects.filter( + sample_year=sample_year + ).distinct("selected_libraries"), + ).exclude("observations") + filtered_result = set(filtered_result).difference( + set(co_reported_by_others) + ) return filtered_result class SurveyBase(Document): PRINCIPALS = ( - (u"stat", "Stat"), - (u"kommun", "Kommun"), - (u"landsting", "Landsting"), - (u"foretag", "Företag"), - (u"stiftelse", "Stiftelse") + ("stat", "Stat"), + ("kommun", "Kommun"), + ("landsting", "Landsting"), + ("foretag", "Företag"), + ("stiftelse", "Stiftelse"), ) STATUSES = ( - (u"not_viewed", u"Ej öppnad"), - (u"initiated", u"Påbörjad"), - (u"submitted", u"Inskickad"), - (u"controlled", u"Kontrollerad"), - (u"published", u"Publicerad") + ("not_viewed", "Ej öppnad"), + ("initiated", "Påbörjad"), + ("submitted", "Inskickad"), + ("controlled", "Kontrollerad"), + ("published", "Publicerad"), ) _status_labels = dict(STATUSES) - published_at = DateTimeField() - date_created = DateTimeField(required=True, default=datetime.utcnow) - date_modified = DateTimeField(required=True, default=datetime.utcnow) - observations = ListField(EmbeddedDocumentField(SurveyObservation)) - _status = StringField(choices=STATUSES, default="not_viewed") - notes = StringField() - library = EmbeddedDocumentField(Library) - selected_libraries = ListField(StringField()) - sample_year = IntField() - password = StringField() - principal = StringField(choices=PRINCIPALS) - is_active = BooleanField(required=True, default=True) - - _municipality_code = StringField() - _library_type = StringField() + published_at = fields.DateTimeField(blank=True) + date_created = fields.DateTimeField(blank=False, default=datetime.utcnow) + date_modified = fields.DateTimeField(blank=False, default=datetime.utcnow) + observations = fields.ListField( + fields.EmbeddedDocumentField(SurveyObservation), blank=True + ) + _status = fields.StringField(choices=STATUSES, default="not_viewed", blank=True) + notes = fields.StringField(blank=True) + library = fields.EmbeddedDocumentField(Library, blank=True) + selected_libraries = fields.ListField(fields.StringField(), blank=True) + sample_year = fields.IntField(blank=True) + password = fields.StringField(blank=True) + principal = fields.StringField(choices=PRINCIPALS, blank=True) + is_active = fields.BooleanField(blank=False, default=True) + + _municipality_code = fields.StringField(blank=True) + _library_type = fields.StringField(blank=True) meta = { - 'abstract': True, + "abstract": True, } @classmethod @@ -418,7 +495,7 @@ def status_label(cls, status): # From: http://en.wikipedia.org/wiki/Random_password_generator#Python @classmethod def _generate_password(cls): - alphabet = string.letters[0:52] + string.digits + alphabet = string.ascii_letters[0:52] + string.digits return str().join(random.SystemRandom().choice(alphabet) for _ in range(10)) @property @@ -428,7 +505,7 @@ def status(self): @status.setter def status(self, status): if status not in [s[0] for s in Survey.STATUSES]: - raise KeyError(u"Invalid status '{}'".format(status)) + raise KeyError("Invalid status '{}'".format(status)) elif status == "published": self.publish() elif status != "published": @@ -436,9 +513,15 @@ def status(self, status): self.unpublish() self._status = status - def get_observation(self, key, variable=None, variable_id=None, backtrack_replaced_variables=False): + def get_observation( + self, key, variable=None, variable_id=None, backtrack_replaced_variables=False + ): if variable is None: - variables = Variable.objects.filter(id=variable_id) if variable_id else Variable.objects.filter(key=key) + variables = ( + Variable.objects.filter(id=variable_id) + if variable_id + else Variable.objects.filter(key=key) + ) if len(variables) == 0: return None variable = variables[0] @@ -449,8 +532,11 @@ def get_observation(self, key, variable=None, variable_id=None, backtrack_replac if backtrack_replaced_variables and len(variable.replaces) == 1: replaced_variable = variable.replaces[0] - return self.get_observation(key=replaced_variable.key, variable_id=replaced_variable.id, - backtrack_replaced_variables=True) + return self.get_observation( + key=replaced_variable.key, + variable_id=replaced_variable.id, + backtrack_replaced_variables=True, + ) return None @@ -466,7 +552,7 @@ def target_group__desc(self): return targetGroups[self.target_group] def __unicode__(self): - return u"{} {}".format(self.library.name, self.sample_year) + return "{} {}".format(self.library.name, self.sample_year) def __init__(self, *args, **kwargs): status = kwargs.pop("status", None) @@ -484,28 +570,32 @@ def selectable_libraries(self): surveys = Survey.objects.filter( sample_year=self.sample_year, library__municipality_code=self.library.municipality_code, - library__library_type__in=get_library_types_with_same_principal(self.library), - library__sigel__ne=self.library.sigel + library__library_type__in=get_library_types_with_same_principal( + self.library + ), + library__sigel__ne=self.library.sigel, ) - selectable_libs = surveys.values_list('library') + selectable_libs = surveys.values_list("library") return selectable_libs - #TODO: optimize by caching surveys by samle_year and municipality_code? + # TODO: optimize by caching surveys by samle_year and municipality_code? def selected_sigels_in_other_surveys(self, sample_year): if not self.library.municipality_code: - return Set() + return set() surveys = Survey.objects.filter( sample_year=sample_year, library__municipality_code=self.library.municipality_code, - library__library_type__in=get_library_types_with_same_principal(self.library), - library__sigel__ne=self.library.sigel + library__library_type__in=get_library_types_with_same_principal( + self.library + ), + library__sigel__ne=self.library.sigel, ).only("selected_libraries") - selected_sigels = Set() + selected_sigels = set() for survey in surveys: for sigel in survey.selected_libraries: selected_sigels.add(sigel) @@ -514,12 +604,15 @@ def selected_sigels_in_other_surveys(self, sample_year): def has_conflicts(self): for selected_sigel in self.selected_sigels_in_other_surveys(self.sample_year): - if selected_sigel in self.selected_libraries or selected_sigel == self.library.sigel: + if ( + selected_sigel in self.selected_libraries + or selected_sigel == self.library.sigel + ): return True return False - #TODO: optimize by caching surveys by samle_year and municipality_code? + # TODO: optimize by caching surveys by samle_year and municipality_code? def get_conflicting_surveys(self): if not self.library.municipality_code: @@ -528,13 +621,19 @@ def get_conflicting_surveys(self): other_surveys = Survey.objects.filter( sample_year=self.sample_year, library__municipality_code=self.library.municipality_code, - library__library_type__in=get_library_types_with_same_principal(self.library), - library__sigel__ne=self.library.sigel + library__library_type__in=get_library_types_with_same_principal( + self.library + ), + library__sigel__ne=self.library.sigel, ) return [ - other_survey for other_survey in other_surveys - if any(sigel in other_survey.selected_libraries for sigel in self.selected_libraries) + other_survey + for other_survey in other_surveys + if any( + sigel in other_survey.selected_libraries + for sigel in self.selected_libraries + ) or self.library.sigel in other_survey.selected_libraries ] @@ -545,26 +644,43 @@ def get_conflicting_surveys_return_only_libs_and_selected_libs(self): other_surveys = Survey.objects.filter( sample_year=self.sample_year, library__municipality_code=self.library.municipality_code, - library__library_type__in=get_library_types_with_same_principal(self.library), - library__sigel__ne=self.library.sigel + library__library_type__in=get_library_types_with_same_principal( + self.library + ), + library__sigel__ne=self.library.sigel, ).only("library", "selected_libraries") return [ - other_survey for other_survey in other_surveys - if any(sigel in other_survey.selected_libraries for sigel in self.selected_libraries) + other_survey + for other_survey in other_surveys + if any( + sigel in other_survey.selected_libraries + for sigel in self.selected_libraries + ) or self.library.sigel in other_survey.selected_libraries ] - #TODO: optimize by saving reported_by, is_reported_by_other and is_reporting_for_others to db? + # TODO: optimize by saving reported_by, is_reported_by_other and is_reporting_for_others to db? def reported_by(self): - surveys = Survey.objects.filter(sample_year=self.sample_year, selected_libraries__contains=self.library.sigel).only("library", "selected_libraries") + surveys = Survey.objects.filter( + sample_year=self.sample_year, + selected_libraries__contains=self.library.sigel, + ).only("library", "selected_libraries") - result = [survey.library.sigel for survey in surveys if self.library.sigel in survey.selected_libraries] + result = [ + survey.library.sigel + for survey in surveys + if self.library.sigel in survey.selected_libraries + ] return result def is_reported_by_other(self): - other_surveys_selected_sigels = Survey.objects.filter(sample_year=self.sample_year, pk__ne=self.pk, selected_libraries__contains=self.library.sigel).count() + other_surveys_selected_sigels = Survey.objects.filter( + sample_year=self.sample_year, + pk__ne=self.pk, + selected_libraries__contains=self.library.sigel, + ).count() if other_surveys_selected_sigels > 0: return True else: @@ -576,26 +692,32 @@ def is_reporting_for_others(self): class Survey(SurveyBase): meta = { - 'collection': 'libstat_surveys', - 'queryset_class': SurveyQuerySet, - 'indexes': [ + "collection": "libstat_surveys", + "queryset_class": SurveyQuerySet, + "indexes": [ "library.sigel", "library.municipality_code", "library.library_type", "library.name", "sample_year", "_status", - "is_active" - ] + "is_active", + ], } def previous_years_survey(self): previous_year = self.sample_year - 1 - previous_survey = Survey.objects.filter(_status=u"published", sample_year=previous_year, - library__sigel=self.library.sigel).first() + previous_survey = Survey.objects.filter( + _status="published", + sample_year=previous_year, + library__sigel=self.library.sigel, + ).first() if not previous_survey: - previous_survey = Survey.objects.filter(_status=u"published", sample_year=previous_year, - library__name__iexact=self.library.name).first() + previous_survey = Survey.objects.filter( + _status="published", + sample_year=previous_year, + library__name__iexact=self.library.name, + ).first() return previous_survey def previous_years_value(self, variable, previous_years_survey=None): @@ -611,9 +733,12 @@ def previous_years_value(self, variable, previous_years_survey=None): replaces = variable.replaces if len(replaces) > 1: - replaces = [v for v in replaces - if self.library.library_type in v.target_groups - or previous_years_survey.library.library_type in v.target_groups] + replaces = [ + v + for v in replaces + if self.library.library_type in v.target_groups + or previous_years_survey.library.library_type in v.target_groups + ] if len(replaces) != 1: return None @@ -639,24 +764,28 @@ def reports_for_same_libraries(self, other_survey): def pre_save(cls, sender, document, **kwargs): def store_version_of(document): survey = Survey.objects.filter(pk=document.id) + list(survey) # force evaluation, otherwise _clone_into won't work (ugh!) if survey: - survey_version = survey.clone_into(SurveyVersion.objects)[0] + survey_version = survey._clone_into(SurveyVersion.objects)[0] survey_version.id = None survey_version.survey_response_id = document.id survey_version.save() def remove_older_versions_of(document, max_versions): - for version in SurveyVersion.objects[max_versions:].filter(survey_response_id=document.id).only( - "date_modified"): + for version in ( + SurveyVersion.objects[max_versions:] + .filter(survey_response_id=document.id) + .only("date_modified") + ): version.delete() if document.id: if hasattr(document, "_action_publish"): document._status = "published" else: - changed_fields = document.__dict__["_changed_fields"] if "_changed_fields" in document.__dict__ else [] + changed_fields = document._changed_fields - if changed_fields == ['notes']: + if changed_fields == ["notes"]: return store_version_of(document) @@ -694,23 +823,40 @@ def reasons_for_not_able_to_publish(self): reasons.append("Inga bibliotek har valts") if self.has_conflicts(): - conflicting_surveys = ", ".join([survey.library.sigel for survey in self.get_conflicting_surveys_return_only_libs_and_selected_libs()]) - reasons.append("Konflikt i rapporteringen (med {})".format(conflicting_surveys)) + conflicting_surveys = ", ".join( + [ + survey.library.sigel + for survey in self.get_conflicting_surveys_return_only_libs_and_selected_libs() + ] + ) + reasons.append( + "Konflikt i rapporteringen (med {})".format(conflicting_surveys) + ) return ", ".join(reasons) def publish(self): def update_existing_open_data(self, publishing_date): for observation in self.observations: - open_datas = OpenData.objects.filter(source_survey=self.pk, variable=observation.variable) + open_datas = OpenData.objects.filter( + source_survey=self.pk, variable=observation.variable + ) if open_datas: for open_data in open_datas: - if not observation._is_public or self.library.library_type not in observation.variable.target_groups: + if ( + not observation._is_public + or self.library.library_type + not in observation.variable.target_groups + ): if open_data.is_active: open_data.date_modified = publishing_date open_data.is_active = False elif observation.value != open_data.value: - if observation.value is None or observation.value == "" or observation.value == "-": + if ( + observation.value is None + or observation.value == "" + or observation.value == "-" + ): open_data.delete() continue else: @@ -722,30 +868,37 @@ def update_existing_open_data(self, publishing_date): open_data.save() def create_new_open_data(self, publishing_date): - existing_open_data_variables = [open_data.variable for open_data in - OpenData.objects.filter(source_survey=self.pk)] - - observations = [observation for observation in self.observations if - observation._is_public and - self.library.library_type in observation.variable.target_groups and - observation.value is not None and - observation.value != "" and - observation.value != "-" and - not observation.variable in existing_open_data_variables] + existing_open_data_variables = [ + open_data.variable + for open_data in OpenData.objects.filter(source_survey=self.pk) + ] + + observations = [ + observation + for observation in self.observations + if observation._is_public + and self.library.library_type in observation.variable.target_groups + and observation.value is not None + and observation.value != "" + and observation.value != "-" + and not observation.variable in existing_open_data_variables + ] if observations: open_datas = [] for observation in observations: open_datas.append( - OpenData(source_survey=self, - sample_year=self.sample_year, - library_name=self.library.name, - sigel=self.library.sigel, - value=observation.value, - variable=observation.variable, - target_group=self.library.library_type, - date_created=publishing_date, - date_modified=publishing_date, - )) + OpenData( + source_survey=self, + sample_year=self.sample_year, + library_name=self.library.name, + sigel=self.library.sigel, + value=observation.value, + variable=observation.variable, + target_group=self.library.library_type, + date_created=publishing_date, + date_modified=publishing_date, + ) + ) OpenData.objects.insert(open_datas, load_bulk=False) publishing_date = datetime.utcnow() @@ -777,62 +930,56 @@ def __init__(self, *args, **kwargs): class SurveyVersion(SurveyBase): - survey_response_id = ObjectIdField(required=True) + survey_response_id = fields.ObjectIdField(blank=False) - meta = { - 'collection': 'libstat_survey_versions', - 'ordering': ['-date_modified'] - } + meta = {"collection": "libstat_survey_versions", "ordering": ["-date_modified"]} class Article(Document): - title = StringField() - content = StringField() - date_published = DateTimeField(default=datetime.utcnow) + title = fields.StringField(blank=True) + content = fields.StringField(blank=True) + date_published = fields.DateTimeField(default=datetime.utcnow) - meta = { - 'collection': 'libstat_articles', - 'ordering': ['date_published'] - } + meta = {"collection": "libstat_articles", "ordering": ["date_published"]} class Dispatch(Document): - description = StringField() - title = StringField() - message = StringField() - library_email = StringField() - library_city = StringField() - library_name = StringField() + description = fields.StringField(blank=True) + title = fields.StringField(blank=True) + message = fields.StringField(blank=True) + library_email = fields.StringField(blank=True) + library_city = fields.StringField(blank=True) + library_name = fields.StringField(blank=True) - meta = { - 'collection': 'libstat_dispatches' - } + meta = {"collection": "libstat_dispatches"} class OpenData(Document): - is_active = BooleanField(required=True, default=True) # Usage: False if source_survey has been unpublished, if source_survey.library is no longer in variable.target_group or if observation it's based on is no longer public - source_survey = ReferenceField(Survey) - library_name = StringField(required=True) - sigel = StringField() - sample_year = IntField(required=True) - target_group = StringField(required=True, choices=SURVEY_TARGET_GROUPS) - variable = ReferenceField(Variable, required=True) - variable_key = StringField() - value = DynamicField() - date_created = DateTimeField(required=True, default=datetime.utcnow) - date_modified = DateTimeField(required=True, default=datetime.utcnow) + is_active = fields.BooleanField( + blank=False, default=True + ) # Usage: False if source_survey has been unpublished, if source_survey.library is no longer in variable.target_group or if observation it's based on is no longer public + source_survey = fields.ReferenceField(Survey, blank=True) + library_name = fields.StringField(blank=False) + sigel = fields.StringField(blank=True) + sample_year = fields.IntField(blank=False) + target_group = fields.StringField(blank=False, choices=SURVEY_TARGET_GROUPS) + variable = fields.ReferenceField(Variable, blank=False) + variable_key = fields.StringField(blank=True) + value = fields.DynamicField(blank=True) + date_created = fields.DateTimeField(blank=False, default=datetime.utcnow) + date_modified = fields.DateTimeField(blank=False, default=datetime.utcnow) meta = { - 'collection': 'libstat_open_data', - 'ordering': ['-date_modified'], - 'indexes': [ + "collection": "libstat_open_data", + "ordering": ["-date_modified"], + "indexes": [ "is_active", "source_survey", "variable", "variable_key", "sample_year", - "date_modified" - ] + "date_modified", + ], } def date_created_str(self): @@ -843,22 +990,29 @@ def date_modified_str(self): def to_dict(self): return { - u"@id": str(self.id), - u"@type": u"Observation", - u"library": { - u"@id": u"{}/library/{}".format(settings.BIBDB_BASE_URL, self.sigel) if self.sigel else "", - u"name": self.library_name + "@id": str(self.id), + "@type": "Observation", + "library": { + "@id": "{}/library/{}".format(settings.BIBDB_BASE_URL, self.sigel) + if self.sigel + else "", + "name": self.library_name, }, - u"sampleYear": self.sample_year, - u"targetGroup": targetGroups[self.target_group], + "sampleYear": self.sample_year, + "targetGroup": targetGroups[self.target_group], self.variable.key: self.value, - u"published": self.date_created_str(), - u"modified": self.date_modified_str() + "published": self.date_created_str(), + "modified": self.date_modified_str(), } def __unicode__(self): - return u"{} {} {} {} {}".format(self.library_name, self.sample_year, self.target_group, self.variable.key, - self.value) + return "{} {} {} {} {}".format( + self.library_name, + self.sample_year, + self.target_group, + self.variable.key, + self.value, + ) def __init__(self, *args, **kwargs): variable = kwargs.pop("variable", None) @@ -869,15 +1023,15 @@ def __init__(self, *args, **kwargs): class Cell(EmbeddedDocument): - variable_key = StringField() - required = BooleanField() - previous_value = StringField() - sum_of = ListField(StringField()) - part_of = ListField(StringField()) - has_part = ListField(StringField()) - types = ListField(StringField()) - disabled = BooleanField() #TODO: remove? - _variable = ReferenceField(Variable) + variable_key = fields.StringField(blank=True) + required = fields.BooleanField(blank=True) + previous_value = fields.StringField(blank=True) + sum_of = fields.ListField(fields.StringField(), blank=True) + part_of = fields.ListField(fields.StringField(), blank=True) + has_part = fields.ListField(fields.StringField(), blank=True) + types = fields.ListField(fields.StringField(), blank=True) + disabled = fields.BooleanField(blank=True) # TODO: remove? + _variable = fields.ReferenceField(Variable, blank=True) @property def variable(self): @@ -897,7 +1051,7 @@ def previous_value_thousands_separators(self): class Row(EmbeddedDocument): - cells = ListField(EmbeddedDocumentField(Cell)) + cells = fields.ListField(fields.EmbeddedDocumentField(Cell), blank=True) @property def description(self): @@ -907,7 +1061,7 @@ def description(self): class Group(EmbeddedDocument): - rows = ListField(EmbeddedDocumentField(Row)) + rows = fields.ListField(fields.EmbeddedDocumentField(Row), blank=True) @property def description(self): @@ -932,13 +1086,13 @@ def columns(self): class Section(EmbeddedDocument): - title = StringField() - groups = ListField(EmbeddedDocumentField(Group)) + title = fields.StringField(blank=True) + groups = fields.ListField(fields.EmbeddedDocumentField(Group), blank=True) class SurveyTemplate(Document): - intro_text_variable_key = StringField() - sections = ListField(EmbeddedDocumentField(Section)) + intro_text_variable_key = fields.StringField(blank=True) + sections = fields.ListField(fields.EmbeddedDocumentField(Section), blank=True) @property def cells(self): @@ -958,28 +1112,26 @@ def get_cell(self, variable_key): class CachedReport(Document): - surveys = ListField(ReferenceField(Survey)) - report = DictField() - year = IntField() - date_created = DateTimeField(default=datetime.utcnow) + surveys = fields.ListField(fields.ReferenceField(Survey), blank=True) + report = fields.DictField(blank=True) + year = fields.IntField(blank=True) + date_created = fields.DateTimeField(default=datetime.utcnow, blank=True) meta = { - 'collection': 'libstat_reports', - 'ordering': ['-date_created'], - 'indexes': [ - '-date_created', - 'year', + "collection": "libstat_reports", + "ordering": ["-date_created"], + "indexes": [ + "-date_created", + "year", ], } + class SurveyEditingLock(Document): - survey_id = ObjectIdField(required=True) - date_locked = DateTimeField(required=True, default=datetime.utcnow) + survey_id = fields.ObjectIdField(blank=False) + date_locked = fields.DateTimeField(blank=False, default=datetime.utcnow) - meta = { - 'collection': 'libstat_survey_locks', - 'indexes': ['survey_id'] - } + meta = {"collection": "libstat_survey_locks", "indexes": ["survey_id"]} def renew_lock(self): self.date_locked = datetime.utcnow() @@ -987,8 +1139,9 @@ def renew_lock(self): @classmethod def lock_survey(cls, survey_id): - surveyEditingLock = SurveyEditingLock(survey_id = survey_id, - date_locked = datetime.utcnow()) + surveyEditingLock = SurveyEditingLock( + survey_id=survey_id, date_locked=datetime.utcnow() + ) surveyEditingLock.save() @classmethod @@ -1001,7 +1154,9 @@ def release_lock_on_survey(cls, survey_id): signals.pre_save.connect(Survey.pre_save, sender=Survey) -signals.pre_save.connect(Variable.store_version_and_update_date_modified, sender=Variable) +signals.pre_save.connect( + Variable.store_version_and_update_date_modified, sender=Variable +) Variable.register_delete_rule(Variable, "replaced_by", NULLIFY) Variable.register_delete_rule(Variable, "replaces", PULL) -signals.post_delete.connect(Variable.post_delete_actions, sender=Variable) +signals.pre_delete.connect(Variable.pre_delete_actions, sender=Variable) diff --git a/libstat/query_sets/variable.py b/libstat/query_sets/variable.py index 7facb09c..36d3cb2e 100644 --- a/libstat/query_sets/variable.py +++ b/libstat/query_sets/variable.py @@ -10,19 +10,30 @@ class VariableQuerySet(QuerySet): is_not_replaced_query = Q(replaced_by=None) def public_terms(self): - return self.filter(self.public_query & (self.is_draft_not_set_query | self.is_not_draft_query)) + return self.filter( + self.public_query & (self.is_draft_not_set_query | self.is_not_draft_query) + ) def public_term_by_key(self, key): if not key: raise DoesNotExist("No key value given") key_query = Q(key=key) - return self.get(key_query & self.public_query & (self.is_draft_not_set_query | self.is_not_draft_query)) + return self.get( + key_query + & self.public_query + & (self.is_draft_not_set_query | self.is_not_draft_query) + ) def replaceable(self): - return self.filter(self.is_not_replaced_query & (self.is_draft_not_set_query | self.is_not_draft_query)) + return self.filter( + self.is_not_replaced_query + & (self.is_draft_not_set_query | self.is_not_draft_query) + ) def surveyable(self): today = datetime.utcnow().replace(hour=0, minute=0, second=0, microsecond=0) active_to_not_set = Q(active_to=None) is_not_discontinued = Q(active_to__gt=today) - return self.filter((active_to_not_set | is_not_discontinued) & self.is_not_replaced_query) + return self.filter( + (active_to_not_set | is_not_discontinued) & self.is_not_replaced_query + ) diff --git a/libstat/report_templates.py b/libstat/report_templates.py index 20ead138..f7373b77 100644 --- a/libstat/report_templates.py +++ b/libstat/report_templates.py @@ -1,8 +1,7 @@ -# -*- coding: utf-8 -*- from libstat.models import Variable -class ReportTemplate(): +class ReportTemplate: @property def all_variable_keys(self): variable_keys = [] @@ -21,7 +20,7 @@ def __init__(self, *args, **kwargs): self.groups = kwargs.pop("groups", None) -class Group(): +class Group: def __init__(self, *args, **kwargs): self.title = kwargs.pop("title", None) self.extra = kwargs.pop("extra", None) @@ -29,14 +28,15 @@ def __init__(self, *args, **kwargs): self.show_chart = kwargs.pop("show_chart", True) -class Row(): +class Row: def compute(self, values): if values is None: return None if None in values: return None try: - return apply(self.computation, values) + return self.computation(*values) + # return apply(self.computation, values) except ZeroDivisionError: return None @@ -52,7 +52,9 @@ def __init__(self, *args, **kwargs): if self.description is None and self.variable_key is not None: variables = Variable.objects.filter(key=self.variable_key) - self.description = variables[0].question_part if len(variables) == 1 else None + self.description = ( + variables[0].question_part if len(variables) == 1 else None + ) @property def explanation(self): @@ -62,2081 +64,2681 @@ def explanation(self): return variables[0].description return None + def report_template_base(): - return ReportTemplate(groups=[ - Group(title=u"Organisation", - rows=[ - Row(variable_key=u"BemanService01"), - Row(variable_key=u"Integrerad01"), - Row(variable_key=u"Obeman01"), - Row(variable_key=u"ObemanLan01", - show_in_chart=False), - Row(variable_key=u"Bokbuss01", - show_in_chart=False), - Row(variable_key=u"BokbussHP01", - show_in_chart=False), - Row(variable_key=u"Bokbil01", - show_in_chart=False), - Row(variable_key=u"Population01", - show_in_chart=False), - Row(variable_key=u"Population02", - show_in_chart=False), - Row(variable_key=u"Population03", - show_in_chart=False), - Row(description=u"Andel integrerade serviceställen", - computation=(lambda a, b: a / b), - variable_keys=[u"Integrerad01", u"BemanService01"], - percentage=True), - Row( - description=u"Medelantal utlån till servicesställen där vidare låneregistrering inte sker", - computation=(lambda a, b: a / b), - variable_keys=[u"ObemanLan01", u"Obeman01"]) - ]), - Group(title=u"Årsverken", - rows=[ - Row(variable_key=u"Arsverke01"), - Row(variable_key=u"Arsverke02"), - Row(variable_key=u"Arsverke03"), - Row(variable_key=u"Arsverke04"), - Row(variable_key=u"Arsverke99", is_sum=True), - Row(variable_key=u"Arsverke05"), - Row(description=u"Andel årsverken för barn och unga", - computation=(lambda a, b: a / b), - variable_keys=[u"Arsverke05", u"Arsverke99"], - percentage=True - ), - Row(description=u"Andel årsverken med bibliotekariekompetens", - computation=(lambda a, b: a / b), - variable_keys=[u"Arsverke01", u"Arsverke99"], - percentage=True), - Row(description=u"Antal fysiska besök per årsverke", - computation=(lambda a, b: a / b), - variable_keys=[u"Besok01", u"Arsverke99"]), - Row(description=u"Antal aktiva låntagare per årsverke", - computation=(lambda a, b: a / b), - variable_keys=[u"Aktiv99", u"Arsverke99"]), - ]), - Group(title=u"Personal", - rows=[ - Row(variable_key=u"Personer01"), - Row(variable_key=u"Personer02"), - Row(variable_key=u"Personer99", is_sum=True), - Row(description=u"Andel anställda kvinnor", - computation=(lambda a, b: a / b), - variable_keys=[u"Personer01", u"Personer99"], - percentage=True), - Row(description=u"Antal årsverken per anställd", - computation=(lambda a, b: a / b), - variable_keys=[u"Arsverke99", u"Personer99"]), - ]), - Group(title=u"Ekonomi", - rows=[ - Row(variable_key=u"Utgift01"), - Row(variable_key=u"Utgift02"), - Row(variable_key=u"Utgift03"), - Row(variable_key=u"Utgift04"), - Row(variable_key=u"Utgift05"), - Row(variable_key=u"Utgift06"), - Row(variable_key=u"Utgift99", is_sum=True), - Row(variable_key=u"Utgift07"), - Row(description=u"Andel kostnad för medier av total driftkostnad", - computation=(lambda a, b, c: (a + b) / c), - variable_keys=[u"Utgift01", u"Utgift02", u"Utgift99"], - percentage=True), - Row(description=u"Andel kostnad för personal av total driftkostnad", - computation=(lambda a, b, c: (a + b) / c), - variable_keys=[u"Utgift03", u"Utgift04", u"Utgift99"], - percentage=True), - Row(description=u"Andel kostnad för e-medier av total driftskostnad", - computation=(lambda a, b: a / b), - variable_keys=[u"Utgift02", u"Utgift99"], - percentage=True), - ]), - Group(title=u"Egengenererade intäkter", - rows=[ - Row(variable_key=u"Intakt01"), - Row(variable_key=u"Intakt02"), - Row(variable_key=u"Intakt03"), - Row(variable_key=u"Intakt99", is_sum=True), - Row( - description=u"Andel egengenererade intäkter i förhållande till de totala driftskostnaderna", - computation=(lambda a, b: a / b), - variable_keys=[u"Intakt99", u"Utgift99"], - percentage=True) - ]), - Group(title=u"Fysiskt bestånd", - extra=u"Andel av totalt bestånd", - rows=[ - Row(variable_key=u"Bestand101", - computation=(lambda a, b: a / b), - variable_keys=["Bestand101", "Bestand199"]), - Row(variable_key=u"Bestand102", - computation=(lambda a, b: a / b), - variable_keys=["Bestand102", "Bestand199"]), - Row(variable_key=u"Bestand103", - computation=(lambda a, b: a / b), - variable_keys=["Bestand103", "Bestand199"]), - Row(variable_key=u"Bestand104", - computation=(lambda a, b: a / b), - variable_keys=["Bestand104", "Bestand199"]), - Row(variable_key=u"Bestand105", - computation=(lambda a, b: a / b), - variable_keys=["Bestand105", "Bestand199"]), - Row(variable_key=u"Bestand106", - computation=(lambda a, b: a / b), - variable_keys=["Bestand106", "Bestand199"]), - Row(variable_key=u"Bestand107", - computation=(lambda a, b: a / b), - variable_keys=["Bestand107", "Bestand199"]), - Row(variable_key=u"Bestand108", - computation=(lambda a, b: a / b), - variable_keys=["Bestand108", "Bestand199"]), - Row(variable_key=u"Bestand109", - computation=(lambda a, b: a / b), - variable_keys=["Bestand109", "Bestand199"]), - Row(variable_key=u"Bestand110", - computation=(lambda a, b: a / b), - variable_keys=["Bestand110", "Bestand199"]), - Row(variable_key=u"Bestand111", - computation=(lambda a, b: a / b), - variable_keys=["Bestand111", "Bestand199"]), - Row(variable_key=u"Bestand112", - computation=(lambda a, b: a / b), - variable_keys=["Bestand112", "Bestand199"]), - Row(variable_key=u"Bestand113", - computation=(lambda a, b: a / b), - variable_keys=["Bestand113", "Bestand199"]), - Row(variable_key=u"Bestand199", is_sum=True, - computation=(lambda a, b: a / b), - variable_keys=["Bestand199", "Bestand199"]), - ]), - Group(title=u"Fysiskt nyförvärv", - extra=u"Andel nyförvärv av motsvarande bestånd", - rows=[ - Row(variable_key=u"Bestand201", - computation=(lambda a, b: a / b), - variable_keys=["Bestand201", "Bestand101"]), - Row(variable_key=u"Bestand202", - computation=(lambda a, b: a / b), - variable_keys=["Bestand202", "Bestand102"]), - Row(variable_key=u"Bestand203", - computation=(lambda a, b: a / b), - variable_keys=["Bestand203", "Bestand103"]), - Row(variable_key=u"Bestand204", - computation=(lambda a, b: a / b), - variable_keys=["Bestand204", "Bestand104"]), - Row(variable_key=u"Bestand205", - computation=(lambda a, b: a / b), - variable_keys=["Bestand205", "Bestand105"]), - Row(variable_key=u"Bestand206", - computation=(lambda a, b: a / b), - variable_keys=["Bestand206", "Bestand106"]), - Row(variable_key=u"Bestand207", - computation=(lambda a, b: a / b), - variable_keys=["Bestand207", "Bestand107"]), - Row(variable_key=u"Bestand208", - computation=(lambda a, b: a / b), - variable_keys=["Bestand208", "Bestand108"]), - Row(variable_key=u"Bestand209", - computation=(lambda a, b: a / b), - variable_keys=["Bestand209", "Bestand109"]), - Row(variable_key=u"Bestand210", - computation=(lambda a, b: a / b), - variable_keys=["Bestand210", "Bestand110"]), - Row(variable_key=u"Bestand211", - computation=(lambda a, b: a / b), - variable_keys=["Bestand211", "Bestand111"]), - Row(variable_key=u"Bestand212", - computation=(lambda a, b: a / b), - variable_keys=["Bestand212", "Bestand112"]), - Row(variable_key=u"Bestand213", - computation=(lambda a, b: a / b), - variable_keys=["Bestand213", "Bestand113"]), - Row(variable_key=u"Bestand299", is_sum=True, - computation=(lambda a, b: a / b), - variable_keys=["Bestand299", "Bestand199"]), - ]), - Group(title=u"Elektroniskt titelbestånd", - rows=[ - Row(variable_key=u"Bestand301"), - #Row(variable_key=u"Bestand302"), - Row(variable_key=u"Bestand303"), - Row(variable_key=u"Bestand304"), - Row(variable_key=u"Bestand305"), - Row(variable_key=u"Bestand306"), - Row(variable_key=u"Bestand307"), - Row(variable_key=u"Bestand308"), - Row(variable_key=u"Bestand310"), - Row(variable_key=u"Bestand311"), - Row(variable_key=u"Bestand312"), - Row(variable_key=u"Bestand313"), - Row(variable_key=u"Bestand399", is_sum=True), - Row(description=u"Andel e-bokstitlar av det totala elektroniska titelbeståndet med skriven text", - computation=(lambda a, b: a / b), - variable_keys=[u"Bestand301", u"Bestand399"], - percentage=True) - ]), - Group(title=u"Barnmedier", - rows=[ - Row(variable_key=u"Barn01"), - Row(variable_key=u"Barn02"), - Row(variable_key=u"Barn03"), - #Row(description=u"Andel tryckta barnmedier av motsvarande totalbestånd", - #computation=(lambda a, b, c: a / (b + c)), - #variable_keys=[u"Barn01", u"Bestand101", u"Bestand105"], - #percentage=True), - Row(description=u"Andel nyförvärv tryckta barnmedier av motsvarande bestånd", - computation=(lambda a, b: a / b), - variable_keys=[u"Barn02", u"Barn01"], - percentage=True), - Row(description=u"Andel utlån tryckta barnmedier av motsvarande bestånd", - computation=(lambda a, b: a / b), - variable_keys=[u"Barn03", u"Barn01"], - percentage=True), - ]), - Group(title=u"", - rows=[ - Row(variable_key=u"HCG04"), - Row(variable_key=u"Ref05"), - ]), - Group(title=u"Personer med läsnedsättning", - rows=[ - Row(variable_key=u"LasnedBest01"), - Row(variable_key=u"LasnedUtlan01"), - Row(description=u"Andel utlån av anpassade medier av motsvarande bestånd", - computation=(lambda a, b: a / b), - variable_keys=[u"LasnedUtlan01", u"LasnedBest01"], - percentage=True), - Row(description=u"Andel anpassade medier av totala fysiska beståndet", - computation=(lambda a, b: a / b), - variable_keys=[u"LasnedBest01", u"Bestand199"], - percentage=True), - ]), - Group(title=u"Medier på olika språk", - rows=[ - Row(description=u"Titlar på svenska språket", label_only=True), - Row(variable_key=u"Titlar101"), - Row(variable_key=u"Titlar102"), - Row(variable_key=u"Titlar199", is_sum=True), - Row(description=u"Titlar på nationella minoritetsspråk", label_only=True), - Row(variable_key=u"Titlar201"), - Row(variable_key=u"Titlar202"), - Row(variable_key=u"Titlar299", is_sum=True), - Row(description=u"Titlar på utländska språk", label_only=True), - Row(variable_key=u"Titlar301"), - Row(variable_key=u"Titlar302"), - Row(variable_key=u"Titlar399", is_sum=True), - Row(description=u"Totalt antal titlar på olika medietyper", label_only=True), - Row(variable_key=u"Titlar497"), - Row(variable_key=u"Titlar498"), - Row(variable_key=u"Titlar499", is_sum=True), - ]), - Group(title=u"Elektroniskt bestånd", - rows=[ - Row(variable_key=u"Databas01"), - Row(variable_key=u"Databas02"), - Row(variable_key=u"Databas03"), - Row(variable_key=u"Databas04"), - Row(variable_key=u"Databas05"), - Row(variable_key=u"Databas06"), - Row(variable_key=u"Databas07"), - Row(variable_key=u"Databas08"), - Row(variable_key=u"Databas09"), - Row(variable_key=u"Databas99", is_sum=True), - ]), - Group(title=u"Antal initiala lån och omlån fysiskt bestånd", - rows=[ - Row(variable_key=u"Inilan101", - show_in_chart=False), - Row(variable_key=u"Inilan102", - show_in_chart=False), - Row(variable_key=u"Inilan103", - show_in_chart=False), - Row(variable_key=u"Inilan104", - show_in_chart=False), - Row(variable_key=u"Inilan105", - show_in_chart=False), - Row(variable_key=u"Inilan106", - show_in_chart=False), - Row(variable_key=u"Inilan107", - show_in_chart=False), - Row(variable_key=u"Inilan108", - show_in_chart=False), - Row(variable_key=u"Inilan109", - show_in_chart=False), - Row(variable_key=u"Inilan110", - show_in_chart=False), - Row(variable_key=u"Inilan111", - show_in_chart=False), - Row(variable_key=u"Inilan112", - show_in_chart=False), - Row(variable_key=u"Inilan113", - show_in_chart=False), - Row(variable_key=u"Inilan199", is_sum=True), - Row(description=u"Andel inititala lån av det totala antalet lån", - computation=(lambda a, b: a / b), - variable_keys=[u"Inilan199", u"Utlan399"], - percentage=True), - Row(description=u"", label_only=True), - Row(variable_key=u"Omlan201", - show_in_chart=False), - Row(variable_key=u"Omlan202", - show_in_chart=False), - Row(variable_key=u"Omlan203", - show_in_chart=False), - Row(variable_key=u"Omlan204", - show_in_chart=False), - Row(variable_key=u"Omlan205", - show_in_chart=False), - Row(variable_key=u"Omlan206", - show_in_chart=False), - Row(variable_key=u"Omlan207", - show_in_chart=False), - Row(variable_key=u"Omlan208", - show_in_chart=False), - Row(variable_key=u"Omlan209", - show_in_chart=False), - Row(variable_key=u"Omlan210", - show_in_chart=False), - Row(variable_key=u"Omlan211", - show_in_chart=False), - Row(variable_key=u"Omlan212", - show_in_chart=False), - Row(variable_key=u"Omlan213", - show_in_chart=False), - Row(variable_key=u"Omlan299", is_sum=True), - Row(description=u"Andel omlån av det totala antalet lån", - computation=(lambda a, b: a / b), - variable_keys=[u"Omlan299", u"Utlan399"], - percentage=True), - ]), - Group(title=u"Utlån fysiskt bestånd", - extra=u"Andel av total fysisk utlåning", - rows=[ - Row(variable_key=u"Utlan301", - computation=(lambda a, b: a / b), - variable_keys=[u"Utlan301", u"Utlan399"]), - Row(variable_key=u"Utlan302", - computation=(lambda a, b: a / b), - variable_keys=[u"Utlan302", u"Utlan399"]), - Row(variable_key=u"Utlan303", - computation=(lambda a, b: a / b), - variable_keys=[u"Utlan303", u"Utlan399"]), - Row(variable_key=u"Utlan304", - computation=(lambda a, b: a / b), - variable_keys=[u"Utlan304", u"Utlan399"]), - Row(variable_key=u"Utlan305", - computation=(lambda a, b: a / b), - variable_keys=[u"Utlan305", u"Utlan399"]), - Row(variable_key=u"Utlan306", - computation=(lambda a, b: a / b), - variable_keys=[u"Utlan306", u"Utlan399"]), - Row(variable_key=u"Utlan307", - computation=(lambda a, b: a / b), - variable_keys=[u"Utlan307", u"Utlan399"]), - Row(variable_key=u"Utlan308", - computation=(lambda a, b: a / b), - variable_keys=[u"Utlan308", u"Utlan399"]), - Row(variable_key=u"Utlan309", - computation=(lambda a, b: a / b), - variable_keys=[u"Utlan309", u"Utlan399"]), - Row(variable_key=u"Utlan310", - computation=(lambda a, b: a / b), - variable_keys=[u"Utlan310", u"Utlan399"]), - Row(variable_key=u"Utlan311", - computation=(lambda a, b: a / b), - variable_keys=[u"Utlan311", u"Utlan399"]), - Row(variable_key=u"Utlan312", - computation=(lambda a, b: a / b), - variable_keys=[u"Utlan312", u"Utlan399"]), - Row(variable_key=u"Utlan313", - computation=(lambda a, b: a / b), - variable_keys=[u"Utlan313", u"Utlan399"]), - Row(variable_key=u"Utlan399", is_sum=True), - ]), - Group(title=u"Läsning på plats i biblioteket", - show_chart=False, - rows=[ - Row(variable_key=u"Laslan01"), - Row(variable_key=u"Laslan02"), - Row(variable_key=u"Laslan99"), - Row(description=u"Beräkning lån på plats", - computation=(lambda a, b, c: ((a / b) / 2) / c), - variable_keys=[u"Laslan01", u"Laslan02", u"Open101"]), - ]), - Group(title=u"Fjärrlån", - rows=[ - Row(description=u"Inom Sverige", - label_only=True), - Row(variable_key=u"Fjarr101"), - Row(variable_key=u"Fjarr102"), - Row(description=u"Utanför Sverige", - label_only=True), - Row(variable_key=u"Fjarr201"), - Row(variable_key=u"Fjarr202"), - ]), - Group(title=u"Summering fjärrlån", - show_chart=False, - rows=[ - Row(variable_key=u"Fjarr397"), - Row(variable_key=u"Fjarr398"), - Row(variable_key=u"Fjarr399", - is_sum=True), - Row(description=u"Andel utländska fjärrlån totalt", - computation=(lambda a, b: a / b), - variable_keys=[u"Fjarr299", u"Fjarr399"], - percentage=True), - Row(description=u"Nettofjärrinlåning in-ut", - computation=(lambda a, b: a - b), - variable_keys=[u"Fjarr397", u"Fjarr398"]), - ]), - Group(title=u"Användning av elektroniska samlingar", - rows=[ - Row(description=u"Antal sökningar", - label_only=True), - Row(variable_key=u"Elan101"), - Row(variable_key=u"Elan102"), - Row(variable_key=u"Elan103"), - Row(variable_key=u"Elan104"), - Row(variable_key=u"Elan105"), - Row(variable_key=u"Elan106"), - Row(variable_key=u"Elan107"), - Row(variable_key=u"Elan108"), - Row(variable_key=u"Elan109"), - Row(variable_key=u"Elan199", - is_sum=True), - Row(description=u"Antal nedladdningar", - label_only=True), - Row(variable_key=u"Elan201"), - Row(variable_key=u"Elan202"), - Row(variable_key=u"Elan203"), - Row(variable_key=u"Elan204"), - Row(variable_key=u"Elan205"), - Row(variable_key=u"Elan206"), - Row(variable_key=u"Elan207"), - Row(variable_key=u"Elan208"), - Row(variable_key=u"Elan209"), - Row(variable_key=u"Elan299", - is_sum=True), - Row(description=u"Antal nedladdade sektioner", - label_only=True), - Row(variable_key=u"Elan301"), - Row(variable_key=u"Elan399", - is_sum=True), - Row(description=u"Total användning av de elektroniska samlingarna", - computation=(lambda a, b, c: a + b + c), - variable_keys=[u"Elan199", u"Elan299", u"Elan399"]) - ]), - Group(title=u"Besök", - rows=[ - Row(variable_key=u"Besok01"), - Row(variable_key=u"Besok02"), - Row(variable_key=u"Besok03"), - Row(variable_key=u"Besok04"), - Row(variable_key=u"Besok05"), - ]), - Group(title=u"Aktiva användare", - rows=[ - Row(variable_key=u"Aktiv01"), - Row(variable_key=u"Aktiv02"), - Row(variable_key=u"Aktiv04"), - Row(variable_key=u"Aktiv99", - is_sum=True), - Row(variable_key=u"Aktiv03"), - Row(description=u"Andel kvinnor som är aktiva låntagare", - computation=(lambda a, b: a / (a + b)), - variable_keys=[u"Aktiv01", u"Aktiv02"], - percentage=True), - Row(description=u"Andel barn och unga som är aktiva låntagare", - computation=(lambda a, b, c: a / (b + c)), - variable_keys=[u"Aktiv03", u"Aktiv01", u"Aktiv02"], - percentage=True), - Row(description=u"Antal fysiska besök per antal aktiva användare", - computation=(lambda a, b: a / b ), - variable_keys=[u"Besok01", u"Aktiv99"]), - ]), - Group(title=u"Resurser", - rows=[ - Row(variable_key=u"Resurs01"), - Row(variable_key=u"Resurs02"), - Row(variable_key=u"Resurs03"), - Row(variable_key=u"Resurs04"), - Row(variable_key=u"Resurs05"), - Row(variable_key=u"Resurs06"), - Row(variable_key=u"Resurs07"), - Row(variable_key=u"Resurs08"), - Row(variable_key=u"Resurs09"), - Row(variable_key=u"Resurs10"), - Row(description=u"Andel publika ytor", - computation=(lambda a, b: a / (a + b)), - variable_keys=[u"Resurs09", u"Resurs10"], - percentage=True) - ]), - Group(title=u"Öppettider", - rows=[ - Row(description=u"Servicestället med de generösaste öppettiderna", - label_only=True), - Row(variable_key=u"Open101"), - Row(variable_key=u"Open102"), - Row(variable_key=u"Open103"), - Row(variable_key=u"Open104"), - Row(variable_key=u"Open105"), - Row(variable_key=u"Open106"), - Row(description=u"Övriga serviceställen sammantaget", - label_only=True), - Row(variable_key=u"Open201"), - Row(variable_key=u"Open202"), - Row(variable_key=u"Open203"), - Row(variable_key=u"Open204"), - Row(variable_key=u"Open205"), - Row(variable_key=u"Open206"), - Row(description=u"Medelantal öppetdagar per år", - computation=(lambda a, b, c: (a + b) / c), - variable_keys=[u"Open201", u"Open101", u"BemanService01"]), - Row(description=u"Medelantal öppettimmar alla serviceställen", - computation=(lambda a, b, c: (a + b) / c), - variable_keys=[u"Open203", u"Open103", u"BemanService01"]), - Row(description=u"Andel öppettimmar med reducerad service", - computation=(lambda a, b, c, d: (a + b) / (c + d)), - variable_keys=[u"Open104", u"Open204", u"Open103", u"Open203"], - percentage=True), - Row(description=u"Andel öppettimmar utanför kontorstid", - computation=(lambda a, b, c, d: (a + b) / (c + d)), - variable_keys=[u"Open106", u"Open206", u"Open103", u"Open203"], - percentage=True), - ]), - Group(title=u"Service", - rows=[ - Row(variable_key=u"Serv01"), - Row(variable_key=u"Serv02"), - Row(variable_key=u"Serv03"), - Row(variable_key=u"Serv04"), - Row(variable_key=u"Serv05"), - Row(variable_key=u"Serv06"), - Row(variable_key=u"Serv07"), - ]), - Group(title=u"Publika aktivitetstillfällen", - extra=u"Varav andel tillfällen för barn och unga", - rows=[ - Row(variable_key=u"Publ101", - computation=(lambda a, b: a / b), - variable_keys=[u"Publ201", u"Publ101"]), - Row(variable_key=u"Publ102", - computation=(lambda a, b: a / b), - variable_keys=[u"Publ202", u"Publ102"]), - Row(variable_key=u"Publ103", - computation=(lambda a, b: a / b), - variable_keys=[u"Publ203", u"Publ103"]), - Row(variable_key=u"Publ104", - computation=(lambda a, b: a / b), - variable_keys=[u"Publ204", u"Publ104"]), - Row(variable_key=u"Publ105", - computation=(lambda a, b: a / b), - variable_keys=[u"Publ205", u"Publ105"]), - Row(variable_key=u"Publ106", - computation=(lambda a, b: a / b), - variable_keys=[u"Publ206", u"Publ106"]), - Row(variable_key=u"Publ107", - computation=(lambda a, b: a / b), - variable_keys=[u"Publ207", u"Publ107"]), - Row(variable_key=u"Publ108", - computation=(lambda a, b: a / b), - variable_keys=[u"Publ208", u"Publ108"]), - Row(variable_key=u"Publ109", - computation=(lambda a, b: a / b), - variable_keys=[u"Publ209", u"Publ109"]), - Row(variable_key=u"Publ110", - computation=(lambda a, b: a / b), - variable_keys=[u"Publ210", u"Publ110"]), - Row(variable_key=u"Publ111", - computation=(lambda a, b: a / b), - variable_keys=[u"Publ211", u"Publ111"]), - Row(variable_key=u"Publ112", - computation=(lambda a, b: a / b), - variable_keys=[u"Publ212", u"Publ112"]), - Row(variable_key=u"Publ113", - computation=(lambda a, b: a / b), - variable_keys=[u"Publ213", u"Publ113"]), - Row(variable_key=u"Publ114", - computation=(lambda a, b: a / b), - variable_keys=[u"Publ214", u"Publ114"]), - Row(variable_key=u"Publ115", - computation=(lambda a, b: a / b), - variable_keys=[u"Publ215", u"Publ115"]), - Row(variable_key=u"Publ116", - computation=(lambda a, b: a / b), - variable_keys=[u"Publ216", u"Publ116"]), - Row(variable_key=u"Publ117", - computation=(lambda a, b: a / b), - variable_keys=[u"Publ217", u"Publ117"]), - Row(variable_key=u"Publ118", - computation=(lambda a, b: a / b), - variable_keys=[u"Publ218", u"Publ118"]), - Row(variable_key=u"Publ119", - computation=(lambda a, b: a / b), - variable_keys=[u"Publ219", u"Publ119"]), - Row(variable_key=u"Publ120", - computation=(lambda a, b: a / b), - variable_keys=[u"Publ220", u"Publ120"]), - Row(variable_key=u"Publ199", - computation=(lambda a, b: a / b), - variable_keys=[u"Publ299", u"Publ199"], - is_sum=True), - Row(description=u"Andel publika aktiviteter primärt för barn/unga", - computation=(lambda a, b: a / b), - variable_keys=[u"Publ299", u"Publ199"], - percentage=True), - Row(description=u"", - label_only=True), - Row(variable_key=u"Publ201", - show_in_chart=False), - Row(variable_key=u"Publ202", - show_in_chart=False), - Row(variable_key=u"Publ203", - show_in_chart=False), - Row(variable_key=u"Publ204", - show_in_chart=False), - Row(variable_key=u"Publ205", - show_in_chart=False), - Row(variable_key=u"Publ206", - show_in_chart=False), - Row(variable_key=u"Publ207", - show_in_chart=False), - Row(variable_key=u"Publ208", - show_in_chart=False), - Row(variable_key=u"Publ209", - show_in_chart=False), - Row(variable_key=u"Publ210", - show_in_chart=False), - Row(variable_key=u"Publ211", - show_in_chart=False), - Row(variable_key=u"Publ212", - show_in_chart=False), - Row(variable_key=u"Publ213", - show_in_chart=False), - Row(variable_key=u"Publ214", - show_in_chart=False), - Row(variable_key=u"Publ215", - show_in_chart=False), - Row(variable_key=u"Publ216", - show_in_chart=False), - Row(variable_key=u"Publ217", - show_in_chart=False), - Row(variable_key=u"Publ218", - show_in_chart=False), - Row(variable_key=u"Publ219", - show_in_chart=False), - Row(variable_key=u"Publ220", - show_in_chart=False), - Row(variable_key=u"Publ299", - is_sum=True, - show_in_chart=False), - ]), - ]) + return ReportTemplate( + groups=[ + Group( + title="Organisation", + rows=[ + Row(variable_key="BemanService01"), + Row(variable_key="Integrerad01"), + Row(variable_key="Obeman01"), + Row(variable_key="ObemanLan01", show_in_chart=False), + Row(variable_key="Bokbuss01", show_in_chart=False), + Row(variable_key="BokbussHP01", show_in_chart=False), + Row(variable_key="Bokbil01", show_in_chart=False), + Row(variable_key="Population01", show_in_chart=False), + Row(variable_key="Population02", show_in_chart=False), + Row(variable_key="Population03", show_in_chart=False), + Row( + description="Andel integrerade serviceställen", + computation=(lambda a, b: a / b), + variable_keys=["Integrerad01", "BemanService01"], + percentage=True, + ), + Row( + description="Medelantal utlån till servicesställen där vidare låneregistrering inte sker", + computation=(lambda a, b: a / b), + variable_keys=["ObemanLan01", "Obeman01"], + ), + ], + ), + Group( + title="Årsverken", + rows=[ + Row(variable_key="Arsverke01"), + Row(variable_key="Arsverke02"), + Row(variable_key="Arsverke03"), + Row(variable_key="Arsverke04"), + Row(variable_key="Arsverke99", is_sum=True), + Row(variable_key="Arsverke05"), + Row( + description="Andel årsverken för barn och unga", + computation=(lambda a, b: a / b), + variable_keys=["Arsverke05", "Arsverke99"], + percentage=True, + ), + Row( + description="Andel årsverken med bibliotekariekompetens", + computation=(lambda a, b: a / b), + variable_keys=["Arsverke01", "Arsverke99"], + percentage=True, + ), + Row( + description="Antal fysiska besök per årsverke", + computation=(lambda a, b: a / b), + variable_keys=["Besok01", "Arsverke99"], + ), + Row( + description="Antal aktiva låntagare per årsverke", + computation=(lambda a, b: a / b), + variable_keys=["Aktiv99", "Arsverke99"], + ), + ], + ), + Group( + title="Personal", + rows=[ + Row(variable_key="Personer01"), + Row(variable_key="Personer02"), + Row(variable_key="Personer99", is_sum=True), + Row( + description="Andel anställda kvinnor", + computation=(lambda a, b: a / b), + variable_keys=["Personer01", "Personer99"], + percentage=True, + ), + Row( + description="Antal årsverken per anställd", + computation=(lambda a, b: a / b), + variable_keys=["Arsverke99", "Personer99"], + ), + ], + ), + Group( + title="Ekonomi", + rows=[ + Row(variable_key="Utgift01"), + Row(variable_key="Utgift02"), + Row(variable_key="Utgift03"), + Row(variable_key="Utgift04"), + Row(variable_key="Utgift05"), + Row(variable_key="Utgift06"), + Row(variable_key="Utgift99", is_sum=True), + Row(variable_key="Utgift07"), + Row( + description="Andel kostnad för medier av total driftkostnad", + computation=(lambda a, b, c: (a + b) / c), + variable_keys=["Utgift01", "Utgift02", "Utgift99"], + percentage=True, + ), + Row( + description="Andel kostnad för personal av total driftkostnad", + computation=(lambda a, b, c: (a + b) / c), + variable_keys=["Utgift03", "Utgift04", "Utgift99"], + percentage=True, + ), + Row( + description="Andel kostnad för e-medier av total driftskostnad", + computation=(lambda a, b: a / b), + variable_keys=["Utgift02", "Utgift99"], + percentage=True, + ), + ], + ), + Group( + title="Egengenererade intäkter", + rows=[ + Row(variable_key="Intakt01"), + Row(variable_key="Intakt02"), + Row(variable_key="Intakt03"), + Row(variable_key="Intakt99", is_sum=True), + Row( + description="Andel egengenererade intäkter i förhållande till de totala driftskostnaderna", + computation=(lambda a, b: a / b), + variable_keys=["Intakt99", "Utgift99"], + percentage=True, + ), + ], + ), + Group( + title="Fysiskt bestånd", + extra="Andel av totalt bestånd", + rows=[ + Row( + variable_key="Bestand101", + computation=(lambda a, b: a / b), + variable_keys=["Bestand101", "Bestand199"], + ), + Row( + variable_key="Bestand102", + computation=(lambda a, b: a / b), + variable_keys=["Bestand102", "Bestand199"], + ), + Row( + variable_key="Bestand103", + computation=(lambda a, b: a / b), + variable_keys=["Bestand103", "Bestand199"], + ), + Row( + variable_key="Bestand104", + computation=(lambda a, b: a / b), + variable_keys=["Bestand104", "Bestand199"], + ), + Row( + variable_key="Bestand105", + computation=(lambda a, b: a / b), + variable_keys=["Bestand105", "Bestand199"], + ), + Row( + variable_key="Bestand106", + computation=(lambda a, b: a / b), + variable_keys=["Bestand106", "Bestand199"], + ), + Row( + variable_key="Bestand107", + computation=(lambda a, b: a / b), + variable_keys=["Bestand107", "Bestand199"], + ), + Row( + variable_key="Bestand108", + computation=(lambda a, b: a / b), + variable_keys=["Bestand108", "Bestand199"], + ), + Row( + variable_key="Bestand109", + computation=(lambda a, b: a / b), + variable_keys=["Bestand109", "Bestand199"], + ), + Row( + variable_key="Bestand110", + computation=(lambda a, b: a / b), + variable_keys=["Bestand110", "Bestand199"], + ), + Row( + variable_key="Bestand111", + computation=(lambda a, b: a / b), + variable_keys=["Bestand111", "Bestand199"], + ), + Row( + variable_key="Bestand112", + computation=(lambda a, b: a / b), + variable_keys=["Bestand112", "Bestand199"], + ), + Row( + variable_key="Bestand113", + computation=(lambda a, b: a / b), + variable_keys=["Bestand113", "Bestand199"], + ), + Row( + variable_key="Bestand199", + is_sum=True, + computation=(lambda a, b: a / b), + variable_keys=["Bestand199", "Bestand199"], + ), + ], + ), + Group( + title="Fysiskt nyförvärv", + extra="Andel nyförvärv av motsvarande bestånd", + rows=[ + Row( + variable_key="Bestand201", + computation=(lambda a, b: a / b), + variable_keys=["Bestand201", "Bestand101"], + ), + Row( + variable_key="Bestand202", + computation=(lambda a, b: a / b), + variable_keys=["Bestand202", "Bestand102"], + ), + Row( + variable_key="Bestand203", + computation=(lambda a, b: a / b), + variable_keys=["Bestand203", "Bestand103"], + ), + Row( + variable_key="Bestand204", + computation=(lambda a, b: a / b), + variable_keys=["Bestand204", "Bestand104"], + ), + Row( + variable_key="Bestand205", + computation=(lambda a, b: a / b), + variable_keys=["Bestand205", "Bestand105"], + ), + Row( + variable_key="Bestand206", + computation=(lambda a, b: a / b), + variable_keys=["Bestand206", "Bestand106"], + ), + Row( + variable_key="Bestand207", + computation=(lambda a, b: a / b), + variable_keys=["Bestand207", "Bestand107"], + ), + Row( + variable_key="Bestand208", + computation=(lambda a, b: a / b), + variable_keys=["Bestand208", "Bestand108"], + ), + Row( + variable_key="Bestand209", + computation=(lambda a, b: a / b), + variable_keys=["Bestand209", "Bestand109"], + ), + Row( + variable_key="Bestand210", + computation=(lambda a, b: a / b), + variable_keys=["Bestand210", "Bestand110"], + ), + Row( + variable_key="Bestand211", + computation=(lambda a, b: a / b), + variable_keys=["Bestand211", "Bestand111"], + ), + Row( + variable_key="Bestand212", + computation=(lambda a, b: a / b), + variable_keys=["Bestand212", "Bestand112"], + ), + Row( + variable_key="Bestand213", + computation=(lambda a, b: a / b), + variable_keys=["Bestand213", "Bestand113"], + ), + Row( + variable_key="Bestand299", + is_sum=True, + computation=(lambda a, b: a / b), + variable_keys=["Bestand299", "Bestand199"], + ), + ], + ), + Group( + title="Elektroniskt titelbestånd", + rows=[ + Row(variable_key="Bestand301"), + # Row(variable_key=u"Bestand302"), + Row(variable_key="Bestand303"), + Row(variable_key="Bestand304"), + Row(variable_key="Bestand305"), + Row(variable_key="Bestand306"), + Row(variable_key="Bestand307"), + Row(variable_key="Bestand308"), + Row(variable_key="Bestand310"), + Row(variable_key="Bestand311"), + Row(variable_key="Bestand312"), + Row(variable_key="Bestand313"), + Row(variable_key="Bestand399", is_sum=True), + Row( + description="Andel e-bokstitlar av det totala elektroniska titelbeståndet med skriven text", + computation=(lambda a, b: a / b), + variable_keys=["Bestand301", "Bestand399"], + percentage=True, + ), + ], + ), + Group( + title="Barnmedier", + rows=[ + Row(variable_key="Barn01"), + Row(variable_key="Barn02"), + Row(variable_key="Barn03"), + # Row(description=u"Andel tryckta barnmedier av motsvarande totalbestånd", + # computation=(lambda a, b, c: a / (b + c)), + # variable_keys=[u"Barn01", u"Bestand101", u"Bestand105"], + # percentage=True), + Row( + description="Andel nyförvärv tryckta barnmedier av motsvarande bestånd", + computation=(lambda a, b: a / b), + variable_keys=["Barn02", "Barn01"], + percentage=True, + ), + Row( + description="Andel utlån tryckta barnmedier av motsvarande bestånd", + computation=(lambda a, b: a / b), + variable_keys=["Barn03", "Barn01"], + percentage=True, + ), + ], + ), + Group( + title="", + rows=[ + Row(variable_key="HCG04"), + Row(variable_key="Ref05"), + ], + ), + Group( + title="Personer med läsnedsättning", + rows=[ + Row(variable_key="LasnedBest01"), + Row(variable_key="LasnedUtlan01"), + Row( + description="Andel utlån av anpassade medier av motsvarande bestånd", + computation=(lambda a, b: a / b), + variable_keys=["LasnedUtlan01", "LasnedBest01"], + percentage=True, + ), + Row( + description="Andel anpassade medier av totala fysiska beståndet", + computation=(lambda a, b: a / b), + variable_keys=["LasnedBest01", "Bestand199"], + percentage=True, + ), + ], + ), + Group( + title="Medier på olika språk", + rows=[ + Row(description="Titlar på svenska språket", label_only=True), + Row(variable_key="Titlar101"), + Row(variable_key="Titlar102"), + Row(variable_key="Titlar199", is_sum=True), + Row( + description="Titlar på nationella minoritetsspråk", + label_only=True, + ), + Row(variable_key="Titlar201"), + Row(variable_key="Titlar202"), + Row(variable_key="Titlar299", is_sum=True), + Row(description="Titlar på utländska språk", label_only=True), + Row(variable_key="Titlar301"), + Row(variable_key="Titlar302"), + Row(variable_key="Titlar399", is_sum=True), + Row( + description="Totalt antal titlar på olika medietyper", + label_only=True, + ), + Row(variable_key="Titlar497"), + Row(variable_key="Titlar498"), + Row(variable_key="Titlar499", is_sum=True), + ], + ), + Group( + title="Elektroniskt bestånd", + rows=[ + Row(variable_key="Databas01"), + Row(variable_key="Databas02"), + Row(variable_key="Databas03"), + Row(variable_key="Databas04"), + Row(variable_key="Databas05"), + Row(variable_key="Databas06"), + Row(variable_key="Databas07"), + Row(variable_key="Databas08"), + Row(variable_key="Databas09"), + Row(variable_key="Databas99", is_sum=True), + ], + ), + Group( + title="Antal initiala lån och omlån fysiskt bestånd", + rows=[ + Row(variable_key="Inilan101", show_in_chart=False), + Row(variable_key="Inilan102", show_in_chart=False), + Row(variable_key="Inilan103", show_in_chart=False), + Row(variable_key="Inilan104", show_in_chart=False), + Row(variable_key="Inilan105", show_in_chart=False), + Row(variable_key="Inilan106", show_in_chart=False), + Row(variable_key="Inilan107", show_in_chart=False), + Row(variable_key="Inilan108", show_in_chart=False), + Row(variable_key="Inilan109", show_in_chart=False), + Row(variable_key="Inilan110", show_in_chart=False), + Row(variable_key="Inilan111", show_in_chart=False), + Row(variable_key="Inilan112", show_in_chart=False), + Row(variable_key="Inilan113", show_in_chart=False), + Row(variable_key="Inilan199", is_sum=True), + Row( + description="Andel inititala lån av det totala antalet lån", + computation=(lambda a, b: a / b), + variable_keys=["Inilan199", "Utlan399"], + percentage=True, + ), + Row(description="", label_only=True), + Row(variable_key="Omlan201", show_in_chart=False), + Row(variable_key="Omlan202", show_in_chart=False), + Row(variable_key="Omlan203", show_in_chart=False), + Row(variable_key="Omlan204", show_in_chart=False), + Row(variable_key="Omlan205", show_in_chart=False), + Row(variable_key="Omlan206", show_in_chart=False), + Row(variable_key="Omlan207", show_in_chart=False), + Row(variable_key="Omlan208", show_in_chart=False), + Row(variable_key="Omlan209", show_in_chart=False), + Row(variable_key="Omlan210", show_in_chart=False), + Row(variable_key="Omlan211", show_in_chart=False), + Row(variable_key="Omlan212", show_in_chart=False), + Row(variable_key="Omlan213", show_in_chart=False), + Row(variable_key="Omlan299", is_sum=True), + Row( + description="Andel omlån av det totala antalet lån", + computation=(lambda a, b: a / b), + variable_keys=["Omlan299", "Utlan399"], + percentage=True, + ), + ], + ), + Group( + title="Utlån fysiskt bestånd", + extra="Andel av total fysisk utlåning", + rows=[ + Row( + variable_key="Utlan301", + computation=(lambda a, b: a / b), + variable_keys=["Utlan301", "Utlan399"], + ), + Row( + variable_key="Utlan302", + computation=(lambda a, b: a / b), + variable_keys=["Utlan302", "Utlan399"], + ), + Row( + variable_key="Utlan303", + computation=(lambda a, b: a / b), + variable_keys=["Utlan303", "Utlan399"], + ), + Row( + variable_key="Utlan304", + computation=(lambda a, b: a / b), + variable_keys=["Utlan304", "Utlan399"], + ), + Row( + variable_key="Utlan305", + computation=(lambda a, b: a / b), + variable_keys=["Utlan305", "Utlan399"], + ), + Row( + variable_key="Utlan306", + computation=(lambda a, b: a / b), + variable_keys=["Utlan306", "Utlan399"], + ), + Row( + variable_key="Utlan307", + computation=(lambda a, b: a / b), + variable_keys=["Utlan307", "Utlan399"], + ), + Row( + variable_key="Utlan308", + computation=(lambda a, b: a / b), + variable_keys=["Utlan308", "Utlan399"], + ), + Row( + variable_key="Utlan309", + computation=(lambda a, b: a / b), + variable_keys=["Utlan309", "Utlan399"], + ), + Row( + variable_key="Utlan310", + computation=(lambda a, b: a / b), + variable_keys=["Utlan310", "Utlan399"], + ), + Row( + variable_key="Utlan311", + computation=(lambda a, b: a / b), + variable_keys=["Utlan311", "Utlan399"], + ), + Row( + variable_key="Utlan312", + computation=(lambda a, b: a / b), + variable_keys=["Utlan312", "Utlan399"], + ), + Row( + variable_key="Utlan313", + computation=(lambda a, b: a / b), + variable_keys=["Utlan313", "Utlan399"], + ), + Row(variable_key="Utlan399", is_sum=True), + ], + ), + Group( + title="Läsning på plats i biblioteket", + show_chart=False, + rows=[ + Row(variable_key="Laslan01"), + Row(variable_key="Laslan02"), + Row(variable_key="Laslan99"), + Row( + description="Beräkning lån på plats", + computation=(lambda a, b, c: ((a / b) / 2) / c), + variable_keys=["Laslan01", "Laslan02", "Open101"], + ), + ], + ), + Group( + title="Fjärrlån", + rows=[ + Row(description="Inom Sverige", label_only=True), + Row(variable_key="Fjarr101"), + Row(variable_key="Fjarr102"), + Row(description="Utanför Sverige", label_only=True), + Row(variable_key="Fjarr201"), + Row(variable_key="Fjarr202"), + ], + ), + Group( + title="Summering fjärrlån", + show_chart=False, + rows=[ + Row(variable_key="Fjarr397"), + Row(variable_key="Fjarr398"), + Row(variable_key="Fjarr399", is_sum=True), + Row( + description="Andel utländska fjärrlån totalt", + computation=(lambda a, b: a / b), + variable_keys=["Fjarr299", "Fjarr399"], + percentage=True, + ), + Row( + description="Nettofjärrinlåning in-ut", + computation=(lambda a, b: a - b), + variable_keys=["Fjarr397", "Fjarr398"], + ), + ], + ), + Group( + title="Användning av elektroniska samlingar", + rows=[ + Row(description="Antal sökningar", label_only=True), + Row(variable_key="Elan101"), + Row(variable_key="Elan102"), + Row(variable_key="Elan103"), + Row(variable_key="Elan104"), + Row(variable_key="Elan105"), + Row(variable_key="Elan106"), + Row(variable_key="Elan107"), + Row(variable_key="Elan108"), + Row(variable_key="Elan109"), + Row(variable_key="Elan199", is_sum=True), + Row(description="Antal nedladdningar", label_only=True), + Row(variable_key="Elan201"), + Row(variable_key="Elan202"), + Row(variable_key="Elan203"), + Row(variable_key="Elan204"), + Row(variable_key="Elan205"), + Row(variable_key="Elan206"), + Row(variable_key="Elan207"), + Row(variable_key="Elan208"), + Row(variable_key="Elan209"), + Row(variable_key="Elan299", is_sum=True), + Row(description="Antal nedladdade sektioner", label_only=True), + Row(variable_key="Elan301"), + Row(variable_key="Elan399", is_sum=True), + Row( + description="Total användning av de elektroniska samlingarna", + computation=(lambda a, b, c: a + b + c), + variable_keys=["Elan199", "Elan299", "Elan399"], + ), + ], + ), + Group( + title="Besök", + rows=[ + Row(variable_key="Besok01"), + Row(variable_key="Besok02"), + Row(variable_key="Besok03"), + Row(variable_key="Besok04"), + Row(variable_key="Besok05"), + ], + ), + Group( + title="Aktiva användare", + rows=[ + Row(variable_key="Aktiv01"), + Row(variable_key="Aktiv02"), + Row(variable_key="Aktiv04"), + Row(variable_key="Aktiv99", is_sum=True), + Row(variable_key="Aktiv03"), + Row( + description="Andel kvinnor som är aktiva låntagare", + computation=(lambda a, b: a / (a + b)), + variable_keys=["Aktiv01", "Aktiv02"], + percentage=True, + ), + Row( + description="Andel barn och unga som är aktiva låntagare", + computation=(lambda a, b, c: a / (b + c)), + variable_keys=["Aktiv03", "Aktiv01", "Aktiv02"], + percentage=True, + ), + Row( + description="Antal fysiska besök per antal aktiva användare", + computation=(lambda a, b: a / b), + variable_keys=["Besok01", "Aktiv99"], + ), + ], + ), + Group( + title="Resurser", + rows=[ + Row(variable_key="Resurs01"), + Row(variable_key="Resurs02"), + Row(variable_key="Resurs03"), + Row(variable_key="Resurs04"), + Row(variable_key="Resurs05"), + Row(variable_key="Resurs06"), + Row(variable_key="Resurs07"), + Row(variable_key="Resurs08"), + Row(variable_key="Resurs09"), + Row(variable_key="Resurs10"), + Row( + description="Andel publika ytor", + computation=(lambda a, b: a / (a + b)), + variable_keys=["Resurs09", "Resurs10"], + percentage=True, + ), + ], + ), + Group( + title="Öppettider", + rows=[ + Row( + description="Servicestället med de generösaste öppettiderna", + label_only=True, + ), + Row(variable_key="Open101"), + Row(variable_key="Open102"), + Row(variable_key="Open103"), + Row(variable_key="Open104"), + Row(variable_key="Open105"), + Row(variable_key="Open106"), + Row( + description="Övriga serviceställen sammantaget", label_only=True + ), + Row(variable_key="Open201"), + Row(variable_key="Open202"), + Row(variable_key="Open203"), + Row(variable_key="Open204"), + Row(variable_key="Open205"), + Row(variable_key="Open206"), + Row( + description="Medelantal öppetdagar per år", + computation=(lambda a, b, c: (a + b) / c), + variable_keys=["Open201", "Open101", "BemanService01"], + ), + Row( + description="Medelantal öppettimmar alla serviceställen", + computation=(lambda a, b, c: (a + b) / c), + variable_keys=["Open203", "Open103", "BemanService01"], + ), + Row( + description="Andel öppettimmar med reducerad service", + computation=(lambda a, b, c, d: (a + b) / (c + d)), + variable_keys=["Open104", "Open204", "Open103", "Open203"], + percentage=True, + ), + Row( + description="Andel öppettimmar utanför kontorstid", + computation=(lambda a, b, c, d: (a + b) / (c + d)), + variable_keys=["Open106", "Open206", "Open103", "Open203"], + percentage=True, + ), + ], + ), + Group( + title="Service", + rows=[ + Row(variable_key="Serv01"), + Row(variable_key="Serv02"), + Row(variable_key="Serv03"), + Row(variable_key="Serv04"), + Row(variable_key="Serv05"), + Row(variable_key="Serv06"), + Row(variable_key="Serv07"), + ], + ), + Group( + title="Publika aktivitetstillfällen", + extra="Varav andel tillfällen för barn och unga", + rows=[ + Row( + variable_key="Publ101", + computation=(lambda a, b: a / b), + variable_keys=["Publ201", "Publ101"], + ), + Row( + variable_key="Publ102", + computation=(lambda a, b: a / b), + variable_keys=["Publ202", "Publ102"], + ), + Row( + variable_key="Publ103", + computation=(lambda a, b: a / b), + variable_keys=["Publ203", "Publ103"], + ), + Row( + variable_key="Publ104", + computation=(lambda a, b: a / b), + variable_keys=["Publ204", "Publ104"], + ), + Row( + variable_key="Publ105", + computation=(lambda a, b: a / b), + variable_keys=["Publ205", "Publ105"], + ), + Row( + variable_key="Publ106", + computation=(lambda a, b: a / b), + variable_keys=["Publ206", "Publ106"], + ), + Row( + variable_key="Publ107", + computation=(lambda a, b: a / b), + variable_keys=["Publ207", "Publ107"], + ), + Row( + variable_key="Publ108", + computation=(lambda a, b: a / b), + variable_keys=["Publ208", "Publ108"], + ), + Row( + variable_key="Publ109", + computation=(lambda a, b: a / b), + variable_keys=["Publ209", "Publ109"], + ), + Row( + variable_key="Publ110", + computation=(lambda a, b: a / b), + variable_keys=["Publ210", "Publ110"], + ), + Row( + variable_key="Publ111", + computation=(lambda a, b: a / b), + variable_keys=["Publ211", "Publ111"], + ), + Row( + variable_key="Publ112", + computation=(lambda a, b: a / b), + variable_keys=["Publ212", "Publ112"], + ), + Row( + variable_key="Publ113", + computation=(lambda a, b: a / b), + variable_keys=["Publ213", "Publ113"], + ), + Row( + variable_key="Publ114", + computation=(lambda a, b: a / b), + variable_keys=["Publ214", "Publ114"], + ), + Row( + variable_key="Publ115", + computation=(lambda a, b: a / b), + variable_keys=["Publ215", "Publ115"], + ), + Row( + variable_key="Publ116", + computation=(lambda a, b: a / b), + variable_keys=["Publ216", "Publ116"], + ), + Row( + variable_key="Publ117", + computation=(lambda a, b: a / b), + variable_keys=["Publ217", "Publ117"], + ), + Row( + variable_key="Publ118", + computation=(lambda a, b: a / b), + variable_keys=["Publ218", "Publ118"], + ), + Row( + variable_key="Publ119", + computation=(lambda a, b: a / b), + variable_keys=["Publ219", "Publ119"], + ), + Row( + variable_key="Publ120", + computation=(lambda a, b: a / b), + variable_keys=["Publ220", "Publ120"], + ), + Row( + variable_key="Publ199", + computation=(lambda a, b: a / b), + variable_keys=["Publ299", "Publ199"], + is_sum=True, + ), + Row( + description="Andel publika aktiviteter primärt för barn/unga", + computation=(lambda a, b: a / b), + variable_keys=["Publ299", "Publ199"], + percentage=True, + ), + Row(description="", label_only=True), + Row(variable_key="Publ201", show_in_chart=False), + Row(variable_key="Publ202", show_in_chart=False), + Row(variable_key="Publ203", show_in_chart=False), + Row(variable_key="Publ204", show_in_chart=False), + Row(variable_key="Publ205", show_in_chart=False), + Row(variable_key="Publ206", show_in_chart=False), + Row(variable_key="Publ207", show_in_chart=False), + Row(variable_key="Publ208", show_in_chart=False), + Row(variable_key="Publ209", show_in_chart=False), + Row(variable_key="Publ210", show_in_chart=False), + Row(variable_key="Publ211", show_in_chart=False), + Row(variable_key="Publ212", show_in_chart=False), + Row(variable_key="Publ213", show_in_chart=False), + Row(variable_key="Publ214", show_in_chart=False), + Row(variable_key="Publ215", show_in_chart=False), + Row(variable_key="Publ216", show_in_chart=False), + Row(variable_key="Publ217", show_in_chart=False), + Row(variable_key="Publ218", show_in_chart=False), + Row(variable_key="Publ219", show_in_chart=False), + Row(variable_key="Publ220", show_in_chart=False), + Row(variable_key="Publ299", is_sum=True, show_in_chart=False), + ], + ), + ] + ) + def report_template_base_with_target_group_calculations(): - return ReportTemplate(groups=[ - Group(title=u"Organisation", - rows=[ - Row(variable_key=u"BemanService01"), - Row(variable_key=u"Integrerad01"), - Row(variable_key=u"Obeman01"), - Row(variable_key=u"ObemanLan01", - show_in_chart=False), - Row(variable_key=u"Bokbuss01", - show_in_chart=False), - Row(variable_key=u"BokbussHP01", - show_in_chart=False), - Row(variable_key=u"Bokbil01", - show_in_chart=False), - Row(variable_key=u"Population01", - show_in_chart=False), - Row(variable_key=u"Population02", - show_in_chart=False), - Row(variable_key=u"Population03", - show_in_chart=False), - Row(description=u"Andel integrerade serviceställen", - computation=(lambda a, b: a / b), - variable_keys=[u"Integrerad01", u"BemanService01"], - percentage=True), - Row( - description=u"Medelantal utlån till servicesställen där vidare låneregistrering inte sker", - computation=(lambda a, b: a / b), - variable_keys=[u"ObemanLan01", u"Obeman01"]) - ]), - Group(title=u"Årsverken", - rows=[ - Row(variable_key=u"Arsverke01"), - Row(variable_key=u"Arsverke02"), - Row(variable_key=u"Arsverke03"), - Row(variable_key=u"Arsverke04"), - Row(variable_key=u"Arsverke99", is_sum=True), - Row(variable_key=u"Arsverke05"), - Row(description=u"Andel årsverken för barn och unga", - computation=(lambda a, b: a / b), - variable_keys=[u"Arsverke05", u"Arsverke99"], - percentage=True - ), - Row(description=u"Andel årsverken med bibliotekariekompetens", - computation=(lambda a, b: a / b), - variable_keys=[u"Arsverke01", u"Arsverke99"], - percentage=True), - Row(description=u"Antal årsverken per 100 personer i målgruppen", - computation=(lambda a, b: a / (b / 100)), - variable_keys=[u"Arsverke99", u"Population02"]), - Row(description=u"Antal fysiska besök per årsverke", - computation=(lambda a, b: a / b), - variable_keys=[u"Besok01", u"Arsverke99"]), - Row(description=u"Antal aktiva låntagare per årsverke", - computation=(lambda a, b: a / b), - variable_keys=[u"Aktiv99", u"Arsverke99"]), - ]), - Group(title=u"Personal", - rows=[ - Row(variable_key=u"Personer01"), - Row(variable_key=u"Personer02"), - Row(variable_key=u"Personer99", is_sum=True), - Row(description=u"Andel anställda kvinnor", - computation=(lambda a, b: a / b), - variable_keys=[u"Personer01", u"Personer99"], - percentage=True), - Row(description=u"Antal årsverken per anställd", - computation=(lambda a, b: a / b), - variable_keys=[u"Arsverke99", u"Personer99"]), - ]), - Group(title=u"Ekonomi", - rows=[ - Row(variable_key=u"Utgift01"), - Row(variable_key=u"Utgift02"), - Row(variable_key=u"Utgift03"), - Row(variable_key=u"Utgift04"), - Row(variable_key=u"Utgift05"), - Row(variable_key=u"Utgift06"), - Row(variable_key=u"Utgift99", is_sum=True), - Row(variable_key=u"Utgift07"), - Row(description=u"Mediekostnad per person i målgruppen", - computation=(lambda a, b, c: (a + b) / c), - variable_keys=[u"Utgift01", u"Utgift02", u"Population02"]), - Row(description=u"Total driftkostnad per person i målgruppen", - computation=(lambda a, b: a / b), - variable_keys=[u"Utgift99", u"Population02"]), - Row(description=u"Andel kostnad för medier av total driftkostnad", - computation=(lambda a, b, c: (a + b) / c), - variable_keys=[u"Utgift01", u"Utgift02", u"Utgift99"], - percentage=True), - Row(description=u"Andel kostnad för personal av total driftkostnad", - computation=(lambda a, b, c: (a + b) / c), - variable_keys=[u"Utgift03", u"Utgift04", u"Utgift99"], - percentage=True), - Row(description=u"Andel kostnad för e-medier av total driftskostnad", - computation=(lambda a, b: a / b), - variable_keys=[u"Utgift02", u"Utgift99"], - percentage=True), - ]), - Group(title=u"Egengenererade intäkter", - rows=[ - Row(variable_key=u"Intakt01"), - Row(variable_key=u"Intakt02"), - Row(variable_key=u"Intakt03"), - Row(variable_key=u"Intakt99", is_sum=True), - Row( - description=u"Andel egengenererade intäkter i förhållande till de totala driftskostnaderna", - computation=(lambda a, b: a / b), - variable_keys=[u"Intakt99", u"Utgift99"], - percentage=True) - ]), - Group(title=u"Fysiskt bestånd", - extra=u"Andel av totalt bestånd", - rows=[ - Row(variable_key=u"Bestand101", - computation=(lambda a, b: a / b), - variable_keys=["Bestand101", "Bestand199"]), - Row(variable_key=u"Bestand102", - computation=(lambda a, b: a / b), - variable_keys=["Bestand102", "Bestand199"]), - Row(variable_key=u"Bestand103", - computation=(lambda a, b: a / b), - variable_keys=["Bestand103", "Bestand199"]), - Row(variable_key=u"Bestand104", - computation=(lambda a, b: a / b), - variable_keys=["Bestand104", "Bestand199"]), - Row(variable_key=u"Bestand105", - computation=(lambda a, b: a / b), - variable_keys=["Bestand105", "Bestand199"]), - Row(variable_key=u"Bestand106", - computation=(lambda a, b: a / b), - variable_keys=["Bestand106", "Bestand199"]), - Row(variable_key=u"Bestand107", - computation=(lambda a, b: a / b), - variable_keys=["Bestand107", "Bestand199"]), - Row(variable_key=u"Bestand108", - computation=(lambda a, b: a / b), - variable_keys=["Bestand108", "Bestand199"]), - Row(variable_key=u"Bestand109", - computation=(lambda a, b: a / b), - variable_keys=["Bestand109", "Bestand199"]), - Row(variable_key=u"Bestand110", - computation=(lambda a, b: a / b), - variable_keys=["Bestand110", "Bestand199"]), - Row(variable_key=u"Bestand111", - computation=(lambda a, b: a / b), - variable_keys=["Bestand111", "Bestand199"]), - Row(variable_key=u"Bestand112", - computation=(lambda a, b: a / b), - variable_keys=["Bestand112", "Bestand199"]), - Row(variable_key=u"Bestand113", - computation=(lambda a, b: a / b), - variable_keys=["Bestand113", "Bestand199"]), - Row(variable_key=u"Bestand199", is_sum=True, - computation=(lambda a, b: a / b), - variable_keys=["Bestand199", "Bestand199"]), - ]), - Group(title=u"Fysiskt nyförvärv", - extra=u"Andel nyförvärv av motsvarande bestånd", - rows=[ - Row(variable_key=u"Bestand201", - computation=(lambda a, b: a / b), - variable_keys=["Bestand201", "Bestand101"]), - Row(variable_key=u"Bestand202", - computation=(lambda a, b: a / b), - variable_keys=["Bestand202", "Bestand102"]), - Row(variable_key=u"Bestand203", - computation=(lambda a, b: a / b), - variable_keys=["Bestand203", "Bestand103"]), - Row(variable_key=u"Bestand204", - computation=(lambda a, b: a / b), - variable_keys=["Bestand204", "Bestand104"]), - Row(variable_key=u"Bestand205", - computation=(lambda a, b: a / b), - variable_keys=["Bestand205", "Bestand105"]), - Row(variable_key=u"Bestand206", - computation=(lambda a, b: a / b), - variable_keys=["Bestand206", "Bestand106"]), - Row(variable_key=u"Bestand207", - computation=(lambda a, b: a / b), - variable_keys=["Bestand207", "Bestand107"]), - Row(variable_key=u"Bestand208", - computation=(lambda a, b: a / b), - variable_keys=["Bestand208", "Bestand108"]), - Row(variable_key=u"Bestand209", - computation=(lambda a, b: a / b), - variable_keys=["Bestand209", "Bestand109"]), - Row(variable_key=u"Bestand210", - computation=(lambda a, b: a / b), - variable_keys=["Bestand210", "Bestand110"]), - Row(variable_key=u"Bestand211", - computation=(lambda a, b: a / b), - variable_keys=["Bestand211", "Bestand111"]), - Row(variable_key=u"Bestand212", - computation=(lambda a, b: a / b), - variable_keys=["Bestand212", "Bestand112"]), - Row(variable_key=u"Bestand213", - computation=(lambda a, b: a / b), - variable_keys=["Bestand213", "Bestand113"]), - Row(variable_key=u"Bestand299", is_sum=True, - computation=(lambda a, b: a / b), - variable_keys=["Bestand299", "Bestand199"]) - ]), - Group(title=u"Elektroniskt titelbestånd", - rows=[ - Row(variable_key=u"Bestand301"), - #Row(variable_key=u"Bestand302"), - Row(variable_key=u"Bestand303"), - Row(variable_key=u"Bestand304"), - Row(variable_key=u"Bestand305"), - Row(variable_key=u"Bestand306"), - Row(variable_key=u"Bestand307"), - Row(variable_key=u"Bestand308"), - Row(variable_key=u"Bestand310"), - Row(variable_key=u"Bestand311"), - Row(variable_key=u"Bestand312"), - Row(variable_key=u"Bestand313"), - Row(variable_key=u"Bestand399", is_sum=True), - Row(description=u"Andel e-bokstitlar av det totala elektroniska titelbeståndet med skriven text", - computation=(lambda a, b: a / b), - variable_keys=[u"Bestand301", u"Bestand399"], - percentage=True) - ]), - Group(title=u"Barnmedier", - rows=[ - Row(variable_key=u"Barn01"), - Row(variable_key=u"Barn02"), - Row(variable_key=u"Barn03"), - #Row(description=u"Andel tryckta barnmedier av motsvarande totalbestånd", - #computation=(lambda a, b, c: a / (b + c)), - #variable_keys=[u"Barn01", u"Bestand101", u"Bestand105"], - #percentage=True), - Row(description=u"Andel nyförvärv tryckta barnmedier av motsvarande bestånd", - computation=(lambda a, b: a / b), - variable_keys=[u"Barn02", u"Barn01"], - percentage=True), - Row(description=u"Andel utlån tryckta barnmedier av motsvarande bestånd", - computation=(lambda a, b: a / b), - variable_keys=[u"Barn03", u"Barn01"], - percentage=True), - ]), - Group(title=u"", - rows=[ - Row(variable_key=u"HCG04"), - Row(variable_key=u"Ref05"), - ]), - Group(title=u"Personer med läsnedsättning", - rows=[ - Row(variable_key=u"LasnedBest01"), - Row(variable_key=u"LasnedUtlan01"), - Row(description=u"Andel utlån av anpassade medier av motsvarande bestånd", - computation=(lambda a, b: a / b), - variable_keys=[u"LasnedUtlan01", u"LasnedBest01"], - percentage=True), - Row(description=u"Andel anpassade medier av totala fysiska beståndet", - computation=(lambda a, b: a / b), - variable_keys=[u"LasnedBest01", u"Bestand199"], - percentage=True), - ]), - Group(title=u"Medier på olika språk", - rows=[ - Row(description=u"Titlar på svenska språket", label_only=True), - Row(variable_key=u"Titlar101"), - Row(variable_key=u"Titlar102"), - Row(variable_key=u"Titlar199", is_sum=True), - Row(description=u"Titlar på nationella minoritetsspråk", label_only=True), - Row(variable_key=u"Titlar201"), - Row(variable_key=u"Titlar202"), - Row(variable_key=u"Titlar299", is_sum=True), - Row(description=u"Titlar på utländska språk", label_only=True), - Row(variable_key=u"Titlar301"), - Row(variable_key=u"Titlar302"), - Row(variable_key=u"Titlar399", is_sum=True), - Row(description=u"Totalt antal titlar på olika medietyper", label_only=True), - Row(variable_key=u"Titlar497"), - Row(variable_key=u"Titlar498"), - Row(variable_key=u"Titlar499", is_sum=True), - ]), - Group(title=u"Elektroniskt bestånd", - rows=[ - Row(variable_key=u"Databas01"), - Row(variable_key=u"Databas02"), - Row(variable_key=u"Databas03"), - Row(variable_key=u"Databas04"), - Row(variable_key=u"Databas05"), - Row(variable_key=u"Databas06"), - Row(variable_key=u"Databas07"), - Row(variable_key=u"Databas08"), - Row(variable_key=u"Databas09"), - Row(variable_key=u"Databas99", is_sum=True), - ]), - Group(title=u"Antal initiala lån och omlån fysiskt bestånd", - rows=[ - Row(variable_key=u"Inilan101", - show_in_chart=False), - Row(variable_key=u"Inilan102", - show_in_chart=False), - Row(variable_key=u"Inilan103", - show_in_chart=False), - Row(variable_key=u"Inilan104", - show_in_chart=False), - Row(variable_key=u"Inilan105", - show_in_chart=False), - Row(variable_key=u"Inilan106", - show_in_chart=False), - Row(variable_key=u"Inilan107", - show_in_chart=False), - Row(variable_key=u"Inilan108", - show_in_chart=False), - Row(variable_key=u"Inilan109", - show_in_chart=False), - Row(variable_key=u"Inilan110", - show_in_chart=False), - Row(variable_key=u"Inilan111", - show_in_chart=False), - Row(variable_key=u"Inilan112", - show_in_chart=False), - Row(variable_key=u"Inilan113", - show_in_chart=False), - Row(variable_key=u"Inilan199", is_sum=True), - Row(description=u"Andel inititala lån av det totala antalet lån", - computation=(lambda a, b: a / b), - variable_keys=[u"Inilan199", u"Utlan399"], - percentage=True), - Row(description=u"", label_only=True), - Row(variable_key=u"Omlan201", - show_in_chart=False), - Row(variable_key=u"Omlan202", - show_in_chart=False), - Row(variable_key=u"Omlan203", - show_in_chart=False), - Row(variable_key=u"Omlan204", - show_in_chart=False), - Row(variable_key=u"Omlan205", - show_in_chart=False), - Row(variable_key=u"Omlan206", - show_in_chart=False), - Row(variable_key=u"Omlan207", - show_in_chart=False), - Row(variable_key=u"Omlan208", - show_in_chart=False), - Row(variable_key=u"Omlan209", - show_in_chart=False), - Row(variable_key=u"Omlan210", - show_in_chart=False), - Row(variable_key=u"Omlan211", - show_in_chart=False), - Row(variable_key=u"Omlan212", - show_in_chart=False), - Row(variable_key=u"Omlan213", - show_in_chart=False), - Row(variable_key=u"Omlan299", is_sum=True), - Row(description=u"Andel omlån av det totala antalet lån", - computation=(lambda a, b: a / b), - variable_keys=[u"Omlan299", u"Utlan399"], - percentage=True), - ]), - Group(title=u"Utlån fysiskt bestånd", - extra=u"Andel av total fysisk utlåning", - rows=[ - Row(variable_key=u"Utlan301", - computation=(lambda a, b: a / b), - variable_keys=[u"Utlan301", u"Utlan399"]), - Row(variable_key=u"Utlan302", - computation=(lambda a, b: a / b), - variable_keys=[u"Utlan302", u"Utlan399"]), - Row(variable_key=u"Utlan303", - computation=(lambda a, b: a / b), - variable_keys=[u"Utlan303", u"Utlan399"]), - Row(variable_key=u"Utlan304", - computation=(lambda a, b: a / b), - variable_keys=[u"Utlan304", u"Utlan399"]), - Row(variable_key=u"Utlan305", - computation=(lambda a, b: a / b), - variable_keys=[u"Utlan305", u"Utlan399"]), - Row(variable_key=u"Utlan306", - computation=(lambda a, b: a / b), - variable_keys=[u"Utlan306", u"Utlan399"]), - Row(variable_key=u"Utlan307", - computation=(lambda a, b: a / b), - variable_keys=[u"Utlan307", u"Utlan399"]), - Row(variable_key=u"Utlan308", - computation=(lambda a, b: a / b), - variable_keys=[u"Utlan308", u"Utlan399"]), - Row(variable_key=u"Utlan309", - computation=(lambda a, b: a / b), - variable_keys=[u"Utlan309", u"Utlan399"]), - Row(variable_key=u"Utlan310", - computation=(lambda a, b: a / b), - variable_keys=[u"Utlan310", u"Utlan399"]), - Row(variable_key=u"Utlan311", - computation=(lambda a, b: a / b), - variable_keys=[u"Utlan311", u"Utlan399"]), - Row(variable_key=u"Utlan312", - computation=(lambda a, b: a / b), - variable_keys=[u"Utlan312", u"Utlan399"]), - Row(variable_key=u"Utlan313", - computation=(lambda a, b: a / b), - variable_keys=[u"Utlan313", u"Utlan399"]), - Row(variable_key=u"Utlan399", is_sum=True), - Row(description=u"Antal utlån per person i målgruppen", - computation=(lambda a, b: a / b), - variable_keys=[u"Utlan399", u"Population02"]), - Row(description=u"Fysiska böcker med skriven text per person i målgruppen", - computation=(lambda a, b: a / b), - variable_keys=[u"Utlan301", u"Population02"]), - ]), - Group(title=u"Läsning på plats i biblioteket", - show_chart=False, - rows=[ - Row(variable_key=u"Laslan01"), - Row(variable_key=u"Laslan02"), - Row(variable_key=u"Laslan99"), - Row(description=u"Beräkning lån på plats", - computation=(lambda a, b, c: ((a / b) / 2) / c), - variable_keys=[u"Laslan01", u"Laslan02", u"Open101"]), - ]), - Group(title=u"Fjärrlån", - rows=[ - Row(description=u"Inom Sverige", - label_only=True), - Row(variable_key=u"Fjarr101"), - Row(variable_key=u"Fjarr102"), - Row(description=u"Utanför Sverige", - label_only=True), - Row(variable_key=u"Fjarr201"), - Row(variable_key=u"Fjarr202"), - ]), - Group(title=u"Summering fjärrlån", - show_chart=False, - rows=[ - Row(variable_key=u"Fjarr397"), - Row(variable_key=u"Fjarr398"), - Row(variable_key=u"Fjarr399", - is_sum=True), - Row(description=u"Andel utländska fjärrlån totalt", - computation=(lambda a, b: a / b), - variable_keys=[u"Fjarr299", u"Fjarr399"], - percentage=True), - Row(description=u"Nettofjärrinlåning in-ut", - computation=(lambda a, b: a - b), - variable_keys=[u"Fjarr397", u"Fjarr398"]), - ]), - Group(title=u"Användning av elektroniska samlingar", - rows=[ - Row(description=u"Antal sökningar", - label_only=True), - Row(variable_key=u"Elan101"), - Row(variable_key=u"Elan102"), - Row(variable_key=u"Elan103"), - Row(variable_key=u"Elan104"), - Row(variable_key=u"Elan105"), - Row(variable_key=u"Elan106"), - Row(variable_key=u"Elan107"), - Row(variable_key=u"Elan108"), - Row(variable_key=u"Elan109"), - Row(variable_key=u"Elan199", - is_sum=True), - Row(description=u"Antal nedladdningar", - label_only=True), - Row(variable_key=u"Elan201"), - Row(variable_key=u"Elan202"), - Row(variable_key=u"Elan203"), - Row(variable_key=u"Elan204"), - Row(variable_key=u"Elan205"), - Row(variable_key=u"Elan206"), - Row(variable_key=u"Elan207"), - Row(variable_key=u"Elan208"), - Row(variable_key=u"Elan209"), - Row(variable_key=u"Elan299", - is_sum=True), - Row(description=u"Antal nedladdade sektioner", - label_only=True), - Row(variable_key=u"Elan301"), - Row(variable_key=u"Elan399", - is_sum=True), - Row(description=u"Total användning av de elektroniska samlingarna", - computation=(lambda a, b, c: a + b + c), - variable_keys=[u"Elan199", u"Elan299", u"Elan399"]) - ]), - Group(title=u"Besök", - rows=[ - Row(variable_key=u"Besok01"), - Row(variable_key=u"Besok02"), - Row(variable_key=u"Besok03"), - Row(variable_key=u"Besok04"), - Row(variable_key=u"Besok05"), - ]), - Group(title=u"Aktiva användare", - rows=[ - Row(variable_key=u"Aktiv01"), - Row(variable_key=u"Aktiv02"), - Row(variable_key=u"Aktiv04"), - Row(variable_key=u"Aktiv99", - is_sum=True), - Row(variable_key=u"Aktiv03"), - Row(description=u"Andel kvinnor som är aktiva låntagare", - computation=(lambda a, b: a / (a + b)), - variable_keys=[u"Aktiv01", u"Aktiv02"], - percentage=True), - Row(description=u"Andel barn och unga som är aktiva låntagare", - computation=(lambda a, b, c: a / (b + c)), - variable_keys=[u"Aktiv03", u"Aktiv01", u"Aktiv02"], - percentage=True), - Row(description=u"Andel aktiva användare per person i målgruppen", - computation=(lambda a, b: a / b), - variable_keys=[u"Aktiv99", u"Population02"], - percentage=True), - Row(description=u"Antal fysiska besök per antal aktiva användare", - computation=(lambda a, b: a / b ), - variable_keys=[u"Besok01", u"Aktiv99"]), - ]), - Group(title=u"Resurser", - rows=[ - Row(variable_key=u"Resurs01"), - Row(variable_key=u"Resurs02"), - Row(variable_key=u"Resurs03"), - Row(variable_key=u"Resurs04"), - Row(variable_key=u"Resurs05"), - Row(variable_key=u"Resurs06"), - Row(variable_key=u"Resurs07"), - Row(variable_key=u"Resurs08"), - Row(variable_key=u"Resurs09"), - Row(variable_key=u"Resurs10"), - Row(description=u"Andel publika ytor", - computation=(lambda a, b: a / (a + b)), - variable_keys=[u"Resurs09", u"Resurs10"], - percentage=True) - ]), - Group(title=u"Öppettider", - rows=[ - Row(description=u"Servicestället med de generösaste öppettiderna", - label_only=True), - Row(variable_key=u"Open101"), - Row(variable_key=u"Open102"), - Row(variable_key=u"Open103"), - Row(variable_key=u"Open104"), - Row(variable_key=u"Open105"), - Row(variable_key=u"Open106"), - Row(description=u"Övriga serviceställen sammantaget", - label_only=True), - Row(variable_key=u"Open201"), - Row(variable_key=u"Open202"), - Row(variable_key=u"Open203"), - Row(variable_key=u"Open204"), - Row(variable_key=u"Open205"), - Row(variable_key=u"Open206"), - Row(description=u"Medelantal öppetdagar per år", - computation=(lambda a, b, c: (a + b) / c), - variable_keys=[u"Open201", u"Open101", u"BemanService01"]), - Row(description=u"Medelantal öppettimmar alla serviceställen", - computation=(lambda a, b, c: (a + b) / c), - variable_keys=[u"Open203", u"Open103", u"BemanService01"]), - Row(description=u"Andel öppettimmar med reducerad service", - computation=(lambda a, b, c, d: (a + b) / (c + d)), - variable_keys=[u"Open104", u"Open204", u"Open103", u"Open203"], - percentage=True), - Row(description=u"Andel öppettimmar utanför kontorstid", - computation=(lambda a, b, c, d: (a + b) / (c + d)), - variable_keys=[u"Open106", u"Open206", u"Open103", u"Open203"], - percentage=True), - ]), - Group(title=u"Service", - rows=[ - Row(variable_key=u"Serv01"), - Row(variable_key=u"Serv02"), - Row(variable_key=u"Serv03"), - Row(variable_key=u"Serv04"), - Row(variable_key=u"Serv05"), - Row(variable_key=u"Serv06"), - Row(variable_key=u"Serv07"), - ]), - Group(title=u"Publika aktivitetstillfällen", - extra=u"Varav andel tillfällen för barn och unga", - rows=[ - Row(variable_key=u"Publ101", - computation=(lambda a, b: a / b), - variable_keys=[u"Publ201", u"Publ101"]), - Row(variable_key=u"Publ102", - computation=(lambda a, b: a / b), - variable_keys=[u"Publ202", u"Publ102"]), - Row(variable_key=u"Publ103", - computation=(lambda a, b: a / b), - variable_keys=[u"Publ203", u"Publ103"]), - Row(variable_key=u"Publ104", - computation=(lambda a, b: a / b), - variable_keys=[u"Publ204", u"Publ104"]), - Row(variable_key=u"Publ105", - computation=(lambda a, b: a / b), - variable_keys=[u"Publ205", u"Publ105"]), - Row(variable_key=u"Publ106", - computation=(lambda a, b: a / b), - variable_keys=[u"Publ206", u"Publ106"]), - Row(variable_key=u"Publ107", - computation=(lambda a, b: a / b), - variable_keys=[u"Publ207", u"Publ107"]), - Row(variable_key=u"Publ108", - computation=(lambda a, b: a / b), - variable_keys=[u"Publ208", u"Publ108"]), - Row(variable_key=u"Publ109", - computation=(lambda a, b: a / b), - variable_keys=[u"Publ209", u"Publ109"]), - Row(variable_key=u"Publ110", - computation=(lambda a, b: a / b), - variable_keys=[u"Publ210", u"Publ110"]), - Row(variable_key=u"Publ111", - computation=(lambda a, b: a / b), - variable_keys=[u"Publ211", u"Publ111"]), - Row(variable_key=u"Publ112", - computation=(lambda a, b: a / b), - variable_keys=[u"Publ212", u"Publ112"]), - Row(variable_key=u"Publ113", - computation=(lambda a, b: a / b), - variable_keys=[u"Publ213", u"Publ113"]), - Row(variable_key=u"Publ114", - computation=(lambda a, b: a / b), - variable_keys=[u"Publ214", u"Publ114"]), - Row(variable_key=u"Publ115", - computation=(lambda a, b: a / b), - variable_keys=[u"Publ215", u"Publ115"]), - Row(variable_key=u"Publ116", - computation=(lambda a, b: a / b), - variable_keys=[u"Publ216", u"Publ116"]), - Row(variable_key=u"Publ117", - computation=(lambda a, b: a / b), - variable_keys=[u"Publ217", u"Publ117"]), - Row(variable_key=u"Publ118", - computation=(lambda a, b: a / b), - variable_keys=[u"Publ218", u"Publ118"]), - Row(variable_key=u"Publ119", - computation=(lambda a, b: a / b), - variable_keys=[u"Publ219", u"Publ119"]), - Row(variable_key=u"Publ120", - computation=(lambda a, b: a / b), - variable_keys=[u"Publ220", u"Publ120"]), - Row(variable_key=u"Publ199", - computation=(lambda a, b: a / b), - variable_keys=[u"Publ299", u"Publ199"], - is_sum=True), - Row(description=u"Andel publika aktiviteter primärt för barn/unga", - computation=(lambda a, b: a / b), - variable_keys=[u"Publ299", u"Publ199"], - percentage=True), - Row(description=u"", - label_only=True), - Row(variable_key=u"Publ201", - show_in_chart=False), - Row(variable_key=u"Publ202", - show_in_chart=False), - Row(variable_key=u"Publ203", - show_in_chart=False), - Row(variable_key=u"Publ204", - show_in_chart=False), - Row(variable_key=u"Publ205", - show_in_chart=False), - Row(variable_key=u"Publ206", - show_in_chart=False), - Row(variable_key=u"Publ207", - show_in_chart=False), - Row(variable_key=u"Publ208", - show_in_chart=False), - Row(variable_key=u"Publ209", - show_in_chart=False), - Row(variable_key=u"Publ210", - show_in_chart=False), - Row(variable_key=u"Publ211", - show_in_chart=False), - Row(variable_key=u"Publ212", - show_in_chart=False), - Row(variable_key=u"Publ213", - show_in_chart=False), - Row(variable_key=u"Publ214", - show_in_chart=False), - Row(variable_key=u"Publ215", - show_in_chart=False), - Row(variable_key=u"Publ216", - show_in_chart=False), - Row(variable_key=u"Publ217", - show_in_chart=False), - Row(variable_key=u"Publ218", - show_in_chart=False), - Row(variable_key=u"Publ219", - show_in_chart=False), - Row(variable_key=u"Publ220", - show_in_chart=False), - Row(variable_key=u"Publ299", - is_sum=True, - show_in_chart=False), - ]), - ]) + return ReportTemplate( + groups=[ + Group( + title="Organisation", + rows=[ + Row(variable_key="BemanService01"), + Row(variable_key="Integrerad01"), + Row(variable_key="Obeman01"), + Row(variable_key="ObemanLan01", show_in_chart=False), + Row(variable_key="Bokbuss01", show_in_chart=False), + Row(variable_key="BokbussHP01", show_in_chart=False), + Row(variable_key="Bokbil01", show_in_chart=False), + Row(variable_key="Population01", show_in_chart=False), + Row(variable_key="Population02", show_in_chart=False), + Row(variable_key="Population03", show_in_chart=False), + Row( + description="Andel integrerade serviceställen", + computation=(lambda a, b: a / b), + variable_keys=["Integrerad01", "BemanService01"], + percentage=True, + ), + Row( + description="Medelantal utlån till servicesställen där vidare låneregistrering inte sker", + computation=(lambda a, b: a / b), + variable_keys=["ObemanLan01", "Obeman01"], + ), + ], + ), + Group( + title="Årsverken", + rows=[ + Row(variable_key="Arsverke01"), + Row(variable_key="Arsverke02"), + Row(variable_key="Arsverke03"), + Row(variable_key="Arsverke04"), + Row(variable_key="Arsverke99", is_sum=True), + Row(variable_key="Arsverke05"), + Row( + description="Andel årsverken för barn och unga", + computation=(lambda a, b: a / b), + variable_keys=["Arsverke05", "Arsverke99"], + percentage=True, + ), + Row( + description="Andel årsverken med bibliotekariekompetens", + computation=(lambda a, b: a / b), + variable_keys=["Arsverke01", "Arsverke99"], + percentage=True, + ), + Row( + description="Antal årsverken per 100 personer i målgruppen", + computation=(lambda a, b: a / (b / 100)), + variable_keys=["Arsverke99", "Population02"], + ), + Row( + description="Antal fysiska besök per årsverke", + computation=(lambda a, b: a / b), + variable_keys=["Besok01", "Arsverke99"], + ), + Row( + description="Antal aktiva låntagare per årsverke", + computation=(lambda a, b: a / b), + variable_keys=["Aktiv99", "Arsverke99"], + ), + ], + ), + Group( + title="Personal", + rows=[ + Row(variable_key="Personer01"), + Row(variable_key="Personer02"), + Row(variable_key="Personer99", is_sum=True), + Row( + description="Andel anställda kvinnor", + computation=(lambda a, b: a / b), + variable_keys=["Personer01", "Personer99"], + percentage=True, + ), + Row( + description="Antal årsverken per anställd", + computation=(lambda a, b: a / b), + variable_keys=["Arsverke99", "Personer99"], + ), + ], + ), + Group( + title="Ekonomi", + rows=[ + Row(variable_key="Utgift01"), + Row(variable_key="Utgift02"), + Row(variable_key="Utgift03"), + Row(variable_key="Utgift04"), + Row(variable_key="Utgift05"), + Row(variable_key="Utgift06"), + Row(variable_key="Utgift99", is_sum=True), + Row(variable_key="Utgift07"), + Row( + description="Mediekostnad per person i målgruppen", + computation=(lambda a, b, c: (a + b) / c), + variable_keys=["Utgift01", "Utgift02", "Population02"], + ), + Row( + description="Total driftkostnad per person i målgruppen", + computation=(lambda a, b: a / b), + variable_keys=["Utgift99", "Population02"], + ), + Row( + description="Andel kostnad för medier av total driftkostnad", + computation=(lambda a, b, c: (a + b) / c), + variable_keys=["Utgift01", "Utgift02", "Utgift99"], + percentage=True, + ), + Row( + description="Andel kostnad för personal av total driftkostnad", + computation=(lambda a, b, c: (a + b) / c), + variable_keys=["Utgift03", "Utgift04", "Utgift99"], + percentage=True, + ), + Row( + description="Andel kostnad för e-medier av total driftskostnad", + computation=(lambda a, b: a / b), + variable_keys=["Utgift02", "Utgift99"], + percentage=True, + ), + ], + ), + Group( + title="Egengenererade intäkter", + rows=[ + Row(variable_key="Intakt01"), + Row(variable_key="Intakt02"), + Row(variable_key="Intakt03"), + Row(variable_key="Intakt99", is_sum=True), + Row( + description="Andel egengenererade intäkter i förhållande till de totala driftskostnaderna", + computation=(lambda a, b: a / b), + variable_keys=["Intakt99", "Utgift99"], + percentage=True, + ), + ], + ), + Group( + title="Fysiskt bestånd", + extra="Andel av totalt bestånd", + rows=[ + Row( + variable_key="Bestand101", + computation=(lambda a, b: a / b), + variable_keys=["Bestand101", "Bestand199"], + ), + Row( + variable_key="Bestand102", + computation=(lambda a, b: a / b), + variable_keys=["Bestand102", "Bestand199"], + ), + Row( + variable_key="Bestand103", + computation=(lambda a, b: a / b), + variable_keys=["Bestand103", "Bestand199"], + ), + Row( + variable_key="Bestand104", + computation=(lambda a, b: a / b), + variable_keys=["Bestand104", "Bestand199"], + ), + Row( + variable_key="Bestand105", + computation=(lambda a, b: a / b), + variable_keys=["Bestand105", "Bestand199"], + ), + Row( + variable_key="Bestand106", + computation=(lambda a, b: a / b), + variable_keys=["Bestand106", "Bestand199"], + ), + Row( + variable_key="Bestand107", + computation=(lambda a, b: a / b), + variable_keys=["Bestand107", "Bestand199"], + ), + Row( + variable_key="Bestand108", + computation=(lambda a, b: a / b), + variable_keys=["Bestand108", "Bestand199"], + ), + Row( + variable_key="Bestand109", + computation=(lambda a, b: a / b), + variable_keys=["Bestand109", "Bestand199"], + ), + Row( + variable_key="Bestand110", + computation=(lambda a, b: a / b), + variable_keys=["Bestand110", "Bestand199"], + ), + Row( + variable_key="Bestand111", + computation=(lambda a, b: a / b), + variable_keys=["Bestand111", "Bestand199"], + ), + Row( + variable_key="Bestand112", + computation=(lambda a, b: a / b), + variable_keys=["Bestand112", "Bestand199"], + ), + Row( + variable_key="Bestand113", + computation=(lambda a, b: a / b), + variable_keys=["Bestand113", "Bestand199"], + ), + Row( + variable_key="Bestand199", + is_sum=True, + computation=(lambda a, b: a / b), + variable_keys=["Bestand199", "Bestand199"], + ), + ], + ), + Group( + title="Fysiskt nyförvärv", + extra="Andel nyförvärv av motsvarande bestånd", + rows=[ + Row( + variable_key="Bestand201", + computation=(lambda a, b: a / b), + variable_keys=["Bestand201", "Bestand101"], + ), + Row( + variable_key="Bestand202", + computation=(lambda a, b: a / b), + variable_keys=["Bestand202", "Bestand102"], + ), + Row( + variable_key="Bestand203", + computation=(lambda a, b: a / b), + variable_keys=["Bestand203", "Bestand103"], + ), + Row( + variable_key="Bestand204", + computation=(lambda a, b: a / b), + variable_keys=["Bestand204", "Bestand104"], + ), + Row( + variable_key="Bestand205", + computation=(lambda a, b: a / b), + variable_keys=["Bestand205", "Bestand105"], + ), + Row( + variable_key="Bestand206", + computation=(lambda a, b: a / b), + variable_keys=["Bestand206", "Bestand106"], + ), + Row( + variable_key="Bestand207", + computation=(lambda a, b: a / b), + variable_keys=["Bestand207", "Bestand107"], + ), + Row( + variable_key="Bestand208", + computation=(lambda a, b: a / b), + variable_keys=["Bestand208", "Bestand108"], + ), + Row( + variable_key="Bestand209", + computation=(lambda a, b: a / b), + variable_keys=["Bestand209", "Bestand109"], + ), + Row( + variable_key="Bestand210", + computation=(lambda a, b: a / b), + variable_keys=["Bestand210", "Bestand110"], + ), + Row( + variable_key="Bestand211", + computation=(lambda a, b: a / b), + variable_keys=["Bestand211", "Bestand111"], + ), + Row( + variable_key="Bestand212", + computation=(lambda a, b: a / b), + variable_keys=["Bestand212", "Bestand112"], + ), + Row( + variable_key="Bestand213", + computation=(lambda a, b: a / b), + variable_keys=["Bestand213", "Bestand113"], + ), + Row( + variable_key="Bestand299", + is_sum=True, + computation=(lambda a, b: a / b), + variable_keys=["Bestand299", "Bestand199"], + ), + ], + ), + Group( + title="Elektroniskt titelbestånd", + rows=[ + Row(variable_key="Bestand301"), + # Row(variable_key=u"Bestand302"), + Row(variable_key="Bestand303"), + Row(variable_key="Bestand304"), + Row(variable_key="Bestand305"), + Row(variable_key="Bestand306"), + Row(variable_key="Bestand307"), + Row(variable_key="Bestand308"), + Row(variable_key="Bestand310"), + Row(variable_key="Bestand311"), + Row(variable_key="Bestand312"), + Row(variable_key="Bestand313"), + Row(variable_key="Bestand399", is_sum=True), + Row( + description="Andel e-bokstitlar av det totala elektroniska titelbeståndet med skriven text", + computation=(lambda a, b: a / b), + variable_keys=["Bestand301", "Bestand399"], + percentage=True, + ), + ], + ), + Group( + title="Barnmedier", + rows=[ + Row(variable_key="Barn01"), + Row(variable_key="Barn02"), + Row(variable_key="Barn03"), + # Row(description=u"Andel tryckta barnmedier av motsvarande totalbestånd", + # computation=(lambda a, b, c: a / (b + c)), + # variable_keys=[u"Barn01", u"Bestand101", u"Bestand105"], + # percentage=True), + Row( + description="Andel nyförvärv tryckta barnmedier av motsvarande bestånd", + computation=(lambda a, b: a / b), + variable_keys=["Barn02", "Barn01"], + percentage=True, + ), + Row( + description="Andel utlån tryckta barnmedier av motsvarande bestånd", + computation=(lambda a, b: a / b), + variable_keys=["Barn03", "Barn01"], + percentage=True, + ), + ], + ), + Group( + title="", + rows=[ + Row(variable_key="HCG04"), + Row(variable_key="Ref05"), + ], + ), + Group( + title="Personer med läsnedsättning", + rows=[ + Row(variable_key="LasnedBest01"), + Row(variable_key="LasnedUtlan01"), + Row( + description="Andel utlån av anpassade medier av motsvarande bestånd", + computation=(lambda a, b: a / b), + variable_keys=["LasnedUtlan01", "LasnedBest01"], + percentage=True, + ), + Row( + description="Andel anpassade medier av totala fysiska beståndet", + computation=(lambda a, b: a / b), + variable_keys=["LasnedBest01", "Bestand199"], + percentage=True, + ), + ], + ), + Group( + title="Medier på olika språk", + rows=[ + Row(description="Titlar på svenska språket", label_only=True), + Row(variable_key="Titlar101"), + Row(variable_key="Titlar102"), + Row(variable_key="Titlar199", is_sum=True), + Row( + description="Titlar på nationella minoritetsspråk", + label_only=True, + ), + Row(variable_key="Titlar201"), + Row(variable_key="Titlar202"), + Row(variable_key="Titlar299", is_sum=True), + Row(description="Titlar på utländska språk", label_only=True), + Row(variable_key="Titlar301"), + Row(variable_key="Titlar302"), + Row(variable_key="Titlar399", is_sum=True), + Row( + description="Totalt antal titlar på olika medietyper", + label_only=True, + ), + Row(variable_key="Titlar497"), + Row(variable_key="Titlar498"), + Row(variable_key="Titlar499", is_sum=True), + ], + ), + Group( + title="Elektroniskt bestånd", + rows=[ + Row(variable_key="Databas01"), + Row(variable_key="Databas02"), + Row(variable_key="Databas03"), + Row(variable_key="Databas04"), + Row(variable_key="Databas05"), + Row(variable_key="Databas06"), + Row(variable_key="Databas07"), + Row(variable_key="Databas08"), + Row(variable_key="Databas09"), + Row(variable_key="Databas99", is_sum=True), + ], + ), + Group( + title="Antal initiala lån och omlån fysiskt bestånd", + rows=[ + Row(variable_key="Inilan101", show_in_chart=False), + Row(variable_key="Inilan102", show_in_chart=False), + Row(variable_key="Inilan103", show_in_chart=False), + Row(variable_key="Inilan104", show_in_chart=False), + Row(variable_key="Inilan105", show_in_chart=False), + Row(variable_key="Inilan106", show_in_chart=False), + Row(variable_key="Inilan107", show_in_chart=False), + Row(variable_key="Inilan108", show_in_chart=False), + Row(variable_key="Inilan109", show_in_chart=False), + Row(variable_key="Inilan110", show_in_chart=False), + Row(variable_key="Inilan111", show_in_chart=False), + Row(variable_key="Inilan112", show_in_chart=False), + Row(variable_key="Inilan113", show_in_chart=False), + Row(variable_key="Inilan199", is_sum=True), + Row( + description="Andel inititala lån av det totala antalet lån", + computation=(lambda a, b: a / b), + variable_keys=["Inilan199", "Utlan399"], + percentage=True, + ), + Row(description="", label_only=True), + Row(variable_key="Omlan201", show_in_chart=False), + Row(variable_key="Omlan202", show_in_chart=False), + Row(variable_key="Omlan203", show_in_chart=False), + Row(variable_key="Omlan204", show_in_chart=False), + Row(variable_key="Omlan205", show_in_chart=False), + Row(variable_key="Omlan206", show_in_chart=False), + Row(variable_key="Omlan207", show_in_chart=False), + Row(variable_key="Omlan208", show_in_chart=False), + Row(variable_key="Omlan209", show_in_chart=False), + Row(variable_key="Omlan210", show_in_chart=False), + Row(variable_key="Omlan211", show_in_chart=False), + Row(variable_key="Omlan212", show_in_chart=False), + Row(variable_key="Omlan213", show_in_chart=False), + Row(variable_key="Omlan299", is_sum=True), + Row( + description="Andel omlån av det totala antalet lån", + computation=(lambda a, b: a / b), + variable_keys=["Omlan299", "Utlan399"], + percentage=True, + ), + ], + ), + Group( + title="Utlån fysiskt bestånd", + extra="Andel av total fysisk utlåning", + rows=[ + Row( + variable_key="Utlan301", + computation=(lambda a, b: a / b), + variable_keys=["Utlan301", "Utlan399"], + ), + Row( + variable_key="Utlan302", + computation=(lambda a, b: a / b), + variable_keys=["Utlan302", "Utlan399"], + ), + Row( + variable_key="Utlan303", + computation=(lambda a, b: a / b), + variable_keys=["Utlan303", "Utlan399"], + ), + Row( + variable_key="Utlan304", + computation=(lambda a, b: a / b), + variable_keys=["Utlan304", "Utlan399"], + ), + Row( + variable_key="Utlan305", + computation=(lambda a, b: a / b), + variable_keys=["Utlan305", "Utlan399"], + ), + Row( + variable_key="Utlan306", + computation=(lambda a, b: a / b), + variable_keys=["Utlan306", "Utlan399"], + ), + Row( + variable_key="Utlan307", + computation=(lambda a, b: a / b), + variable_keys=["Utlan307", "Utlan399"], + ), + Row( + variable_key="Utlan308", + computation=(lambda a, b: a / b), + variable_keys=["Utlan308", "Utlan399"], + ), + Row( + variable_key="Utlan309", + computation=(lambda a, b: a / b), + variable_keys=["Utlan309", "Utlan399"], + ), + Row( + variable_key="Utlan310", + computation=(lambda a, b: a / b), + variable_keys=["Utlan310", "Utlan399"], + ), + Row( + variable_key="Utlan311", + computation=(lambda a, b: a / b), + variable_keys=["Utlan311", "Utlan399"], + ), + Row( + variable_key="Utlan312", + computation=(lambda a, b: a / b), + variable_keys=["Utlan312", "Utlan399"], + ), + Row( + variable_key="Utlan313", + computation=(lambda a, b: a / b), + variable_keys=["Utlan313", "Utlan399"], + ), + Row(variable_key="Utlan399", is_sum=True), + Row( + description="Antal utlån per person i målgruppen", + computation=(lambda a, b: a / b), + variable_keys=["Utlan399", "Population02"], + ), + Row( + description="Fysiska böcker med skriven text per person i målgruppen", + computation=(lambda a, b: a / b), + variable_keys=["Utlan301", "Population02"], + ), + ], + ), + Group( + title="Läsning på plats i biblioteket", + show_chart=False, + rows=[ + Row(variable_key="Laslan01"), + Row(variable_key="Laslan02"), + Row(variable_key="Laslan99"), + Row( + description="Beräkning lån på plats", + computation=(lambda a, b, c: ((a / b) / 2) / c), + variable_keys=["Laslan01", "Laslan02", "Open101"], + ), + ], + ), + Group( + title="Fjärrlån", + rows=[ + Row(description="Inom Sverige", label_only=True), + Row(variable_key="Fjarr101"), + Row(variable_key="Fjarr102"), + Row(description="Utanför Sverige", label_only=True), + Row(variable_key="Fjarr201"), + Row(variable_key="Fjarr202"), + ], + ), + Group( + title="Summering fjärrlån", + show_chart=False, + rows=[ + Row(variable_key="Fjarr397"), + Row(variable_key="Fjarr398"), + Row(variable_key="Fjarr399", is_sum=True), + Row( + description="Andel utländska fjärrlån totalt", + computation=(lambda a, b: a / b), + variable_keys=["Fjarr299", "Fjarr399"], + percentage=True, + ), + Row( + description="Nettofjärrinlåning in-ut", + computation=(lambda a, b: a - b), + variable_keys=["Fjarr397", "Fjarr398"], + ), + ], + ), + Group( + title="Användning av elektroniska samlingar", + rows=[ + Row(description="Antal sökningar", label_only=True), + Row(variable_key="Elan101"), + Row(variable_key="Elan102"), + Row(variable_key="Elan103"), + Row(variable_key="Elan104"), + Row(variable_key="Elan105"), + Row(variable_key="Elan106"), + Row(variable_key="Elan107"), + Row(variable_key="Elan108"), + Row(variable_key="Elan109"), + Row(variable_key="Elan199", is_sum=True), + Row(description="Antal nedladdningar", label_only=True), + Row(variable_key="Elan201"), + Row(variable_key="Elan202"), + Row(variable_key="Elan203"), + Row(variable_key="Elan204"), + Row(variable_key="Elan205"), + Row(variable_key="Elan206"), + Row(variable_key="Elan207"), + Row(variable_key="Elan208"), + Row(variable_key="Elan209"), + Row(variable_key="Elan299", is_sum=True), + Row(description="Antal nedladdade sektioner", label_only=True), + Row(variable_key="Elan301"), + Row(variable_key="Elan399", is_sum=True), + Row( + description="Total användning av de elektroniska samlingarna", + computation=(lambda a, b, c: a + b + c), + variable_keys=["Elan199", "Elan299", "Elan399"], + ), + ], + ), + Group( + title="Besök", + rows=[ + Row(variable_key="Besok01"), + Row(variable_key="Besok02"), + Row(variable_key="Besok03"), + Row(variable_key="Besok04"), + Row(variable_key="Besok05"), + ], + ), + Group( + title="Aktiva användare", + rows=[ + Row(variable_key="Aktiv01"), + Row(variable_key="Aktiv02"), + Row(variable_key="Aktiv04"), + Row(variable_key="Aktiv99", is_sum=True), + Row(variable_key="Aktiv03"), + Row( + description="Andel kvinnor som är aktiva låntagare", + computation=(lambda a, b: a / (a + b)), + variable_keys=["Aktiv01", "Aktiv02"], + percentage=True, + ), + Row( + description="Andel barn och unga som är aktiva låntagare", + computation=(lambda a, b, c: a / (b + c)), + variable_keys=["Aktiv03", "Aktiv01", "Aktiv02"], + percentage=True, + ), + Row( + description="Andel aktiva användare per person i målgruppen", + computation=(lambda a, b: a / b), + variable_keys=["Aktiv99", "Population02"], + percentage=True, + ), + Row( + description="Antal fysiska besök per antal aktiva användare", + computation=(lambda a, b: a / b), + variable_keys=["Besok01", "Aktiv99"], + ), + ], + ), + Group( + title="Resurser", + rows=[ + Row(variable_key="Resurs01"), + Row(variable_key="Resurs02"), + Row(variable_key="Resurs03"), + Row(variable_key="Resurs04"), + Row(variable_key="Resurs05"), + Row(variable_key="Resurs06"), + Row(variable_key="Resurs07"), + Row(variable_key="Resurs08"), + Row(variable_key="Resurs09"), + Row(variable_key="Resurs10"), + Row( + description="Andel publika ytor", + computation=(lambda a, b: a / (a + b)), + variable_keys=["Resurs09", "Resurs10"], + percentage=True, + ), + ], + ), + Group( + title="Öppettider", + rows=[ + Row( + description="Servicestället med de generösaste öppettiderna", + label_only=True, + ), + Row(variable_key="Open101"), + Row(variable_key="Open102"), + Row(variable_key="Open103"), + Row(variable_key="Open104"), + Row(variable_key="Open105"), + Row(variable_key="Open106"), + Row( + description="Övriga serviceställen sammantaget", label_only=True + ), + Row(variable_key="Open201"), + Row(variable_key="Open202"), + Row(variable_key="Open203"), + Row(variable_key="Open204"), + Row(variable_key="Open205"), + Row(variable_key="Open206"), + Row( + description="Medelantal öppetdagar per år", + computation=(lambda a, b, c: (a + b) / c), + variable_keys=["Open201", "Open101", "BemanService01"], + ), + Row( + description="Medelantal öppettimmar alla serviceställen", + computation=(lambda a, b, c: (a + b) / c), + variable_keys=["Open203", "Open103", "BemanService01"], + ), + Row( + description="Andel öppettimmar med reducerad service", + computation=(lambda a, b, c, d: (a + b) / (c + d)), + variable_keys=["Open104", "Open204", "Open103", "Open203"], + percentage=True, + ), + Row( + description="Andel öppettimmar utanför kontorstid", + computation=(lambda a, b, c, d: (a + b) / (c + d)), + variable_keys=["Open106", "Open206", "Open103", "Open203"], + percentage=True, + ), + ], + ), + Group( + title="Service", + rows=[ + Row(variable_key="Serv01"), + Row(variable_key="Serv02"), + Row(variable_key="Serv03"), + Row(variable_key="Serv04"), + Row(variable_key="Serv05"), + Row(variable_key="Serv06"), + Row(variable_key="Serv07"), + ], + ), + Group( + title="Publika aktivitetstillfällen", + extra="Varav andel tillfällen för barn och unga", + rows=[ + Row( + variable_key="Publ101", + computation=(lambda a, b: a / b), + variable_keys=["Publ201", "Publ101"], + ), + Row( + variable_key="Publ102", + computation=(lambda a, b: a / b), + variable_keys=["Publ202", "Publ102"], + ), + Row( + variable_key="Publ103", + computation=(lambda a, b: a / b), + variable_keys=["Publ203", "Publ103"], + ), + Row( + variable_key="Publ104", + computation=(lambda a, b: a / b), + variable_keys=["Publ204", "Publ104"], + ), + Row( + variable_key="Publ105", + computation=(lambda a, b: a / b), + variable_keys=["Publ205", "Publ105"], + ), + Row( + variable_key="Publ106", + computation=(lambda a, b: a / b), + variable_keys=["Publ206", "Publ106"], + ), + Row( + variable_key="Publ107", + computation=(lambda a, b: a / b), + variable_keys=["Publ207", "Publ107"], + ), + Row( + variable_key="Publ108", + computation=(lambda a, b: a / b), + variable_keys=["Publ208", "Publ108"], + ), + Row( + variable_key="Publ109", + computation=(lambda a, b: a / b), + variable_keys=["Publ209", "Publ109"], + ), + Row( + variable_key="Publ110", + computation=(lambda a, b: a / b), + variable_keys=["Publ210", "Publ110"], + ), + Row( + variable_key="Publ111", + computation=(lambda a, b: a / b), + variable_keys=["Publ211", "Publ111"], + ), + Row( + variable_key="Publ112", + computation=(lambda a, b: a / b), + variable_keys=["Publ212", "Publ112"], + ), + Row( + variable_key="Publ113", + computation=(lambda a, b: a / b), + variable_keys=["Publ213", "Publ113"], + ), + Row( + variable_key="Publ114", + computation=(lambda a, b: a / b), + variable_keys=["Publ214", "Publ114"], + ), + Row( + variable_key="Publ115", + computation=(lambda a, b: a / b), + variable_keys=["Publ215", "Publ115"], + ), + Row( + variable_key="Publ116", + computation=(lambda a, b: a / b), + variable_keys=["Publ216", "Publ116"], + ), + Row( + variable_key="Publ117", + computation=(lambda a, b: a / b), + variable_keys=["Publ217", "Publ117"], + ), + Row( + variable_key="Publ118", + computation=(lambda a, b: a / b), + variable_keys=["Publ218", "Publ118"], + ), + Row( + variable_key="Publ119", + computation=(lambda a, b: a / b), + variable_keys=["Publ219", "Publ119"], + ), + Row( + variable_key="Publ120", + computation=(lambda a, b: a / b), + variable_keys=["Publ220", "Publ120"], + ), + Row( + variable_key="Publ199", + computation=(lambda a, b: a / b), + variable_keys=["Publ299", "Publ199"], + is_sum=True, + ), + Row( + description="Andel publika aktiviteter primärt för barn/unga", + computation=(lambda a, b: a / b), + variable_keys=["Publ299", "Publ199"], + percentage=True, + ), + Row(description="", label_only=True), + Row(variable_key="Publ201", show_in_chart=False), + Row(variable_key="Publ202", show_in_chart=False), + Row(variable_key="Publ203", show_in_chart=False), + Row(variable_key="Publ204", show_in_chart=False), + Row(variable_key="Publ205", show_in_chart=False), + Row(variable_key="Publ206", show_in_chart=False), + Row(variable_key="Publ207", show_in_chart=False), + Row(variable_key="Publ208", show_in_chart=False), + Row(variable_key="Publ209", show_in_chart=False), + Row(variable_key="Publ210", show_in_chart=False), + Row(variable_key="Publ211", show_in_chart=False), + Row(variable_key="Publ212", show_in_chart=False), + Row(variable_key="Publ213", show_in_chart=False), + Row(variable_key="Publ214", show_in_chart=False), + Row(variable_key="Publ215", show_in_chart=False), + Row(variable_key="Publ216", show_in_chart=False), + Row(variable_key="Publ217", show_in_chart=False), + Row(variable_key="Publ218", show_in_chart=False), + Row(variable_key="Publ219", show_in_chart=False), + Row(variable_key="Publ220", show_in_chart=False), + Row(variable_key="Publ299", is_sum=True, show_in_chart=False), + ], + ), + ] + ) def report_template_base_with_municipality_calculations(): - return ReportTemplate(groups=[ - Group(title=u"Organisation", - rows=[ - Row(variable_key=u"BemanService01"), - Row(variable_key=u"Integrerad01"), - Row(variable_key=u"Obeman01"), - Row(variable_key=u"ObemanLan01", - show_in_chart=False), - Row(variable_key=u"Bokbuss01", - show_in_chart=False), - Row(variable_key=u"BokbussHP01", - show_in_chart=False), - Row(variable_key=u"Bokbil01", - show_in_chart=False), - Row(variable_key=u"Population01", - show_in_chart=False), - Row(variable_key=u"Population02", - show_in_chart=False), - Row(variable_key=u"Population03", - show_in_chart=False), - Row(description=u"Antal bemannade serviceställen per 1000 invånare", - computation=(lambda a, b: a / (b / 1000)), - variable_keys=[u"BemanService01", u"Population01"]), - Row(description=u"Andel integrerade serviceställen", - computation=(lambda a, b: a / b), - variable_keys=[u"Integrerad01", u"BemanService01"], - percentage=True), - Row( - description=u"Medelantal utlån till servicesställen där vidare låneregistrering inte sker", - computation=(lambda a, b: a / b), - variable_keys=[u"ObemanLan01", u"Obeman01"]) - ]), - Group(title=u"Årsverken", - rows=[ - Row(variable_key=u"Arsverke01"), - Row(variable_key=u"Arsverke02"), - Row(variable_key=u"Arsverke03"), - Row(variable_key=u"Arsverke04"), - Row(variable_key=u"Arsverke99", is_sum=True), - Row(variable_key=u"Arsverke05"), - Row(description=u"Andel årsverken för barn och unga", - computation=(lambda a, b: a / b), - variable_keys=[u"Arsverke05", u"Arsverke99"], - percentage=True - ), - Row(description=u"Andel årsverken med bibliotekariekompetens", - computation=(lambda a, b: a / b), - variable_keys=[u"Arsverke01", u"Arsverke99"], - percentage=True), - Row(description=u"Antal årsverken per 1000 invånare", - computation=(lambda a, b: a / (b / 1000)), - variable_keys=[u"Arsverke99", u"Population01"]), - Row(description=u"Antal fysiska besök per årsverke", - computation=(lambda a, b: a / b), - variable_keys=[u"Besok01", u"Arsverke99"]), - Row(description=u"Antal aktiva låntagare per årsverke", - computation=(lambda a, b: a / b), - variable_keys=[u"Aktiv99", u"Arsverke99"]), - ]), - Group(title=u"Personal", - rows=[ - Row(variable_key=u"Personer01"), - Row(variable_key=u"Personer02"), - Row(variable_key=u"Personer99", is_sum=True), - Row(description=u"Andel anställda kvinnor", - computation=(lambda a, b: a / b), - variable_keys=[u"Personer01", u"Personer99"], - percentage=True), - Row(description=u"Antal årsverken per anställd", - computation=(lambda a, b: a / b), - variable_keys=[u"Arsverke99", u"Personer99"]), - ]), - Group(title=u"Ekonomi", - rows=[ - Row(variable_key=u"Utgift01"), - Row(variable_key=u"Utgift02"), - Row(variable_key=u"Utgift03"), - Row(variable_key=u"Utgift04"), - Row(variable_key=u"Utgift05"), - Row(variable_key=u"Utgift06"), - Row(variable_key=u"Utgift99", is_sum=True), - Row(variable_key=u"Utgift07"), - Row(description=u"Mediekostnad per invånare i kommunen", - computation=(lambda a, b, c: (a + b) / c), - variable_keys=[u"Utgift01", u"Utgift02", u"Population01"]), - Row(description=u"Total driftkostnad per invånare i kommunen", - computation=(lambda a, b: a / b), - variable_keys=[u"Utgift99", u"Population01"]), - Row(description=u"Andel kostnad för medier av total driftkostnad", - computation=(lambda a, b, c: (a + b) / c), - variable_keys=[u"Utgift01", u"Utgift02", u"Utgift99"], - percentage=True), - Row(description=u"Andel kostnad för personal av total driftkostnad", - computation=(lambda a, b, c: (a + b) / c), - variable_keys=[u"Utgift03", u"Utgift04", u"Utgift99"], - percentage=True), - Row(description=u"Andel kostnad för e-medier av total driftskostnad", - computation=(lambda a, b: a / b), - variable_keys=[u"Utgift02", u"Utgift99"], - percentage=True), - ]), - Group(title=u"Egengenererade intäkter", - rows=[ - Row(variable_key=u"Intakt01"), - Row(variable_key=u"Intakt02"), - Row(variable_key=u"Intakt03"), - Row(variable_key=u"Intakt99", is_sum=True), - Row( - description=u"Andel egengenererade intäkter i förhållande till de totala driftskostnaderna", - computation=(lambda a, b: a / b), - variable_keys=[u"Intakt99", u"Utgift99"], - percentage=True) - ]), - Group(title=u"Fysiskt bestånd", - extra=u"Andel av totalt bestånd", - rows=[ - Row(variable_key=u"Bestand101", - computation=(lambda a, b: a / b), - variable_keys=["Bestand101", "Bestand199"]), - Row(variable_key=u"Bestand102", - computation=(lambda a, b: a / b), - variable_keys=["Bestand102", "Bestand199"]), - Row(variable_key=u"Bestand103", - computation=(lambda a, b: a / b), - variable_keys=["Bestand103", "Bestand199"]), - Row(variable_key=u"Bestand104", - computation=(lambda a, b: a / b), - variable_keys=["Bestand104", "Bestand199"]), - Row(variable_key=u"Bestand105", - computation=(lambda a, b: a / b), - variable_keys=["Bestand105", "Bestand199"]), - Row(variable_key=u"Bestand106", - computation=(lambda a, b: a / b), - variable_keys=["Bestand106", "Bestand199"]), - Row(variable_key=u"Bestand107", - computation=(lambda a, b: a / b), - variable_keys=["Bestand107", "Bestand199"]), - Row(variable_key=u"Bestand108", - computation=(lambda a, b: a / b), - variable_keys=["Bestand108", "Bestand199"]), - Row(variable_key=u"Bestand109", - computation=(lambda a, b: a / b), - variable_keys=["Bestand109", "Bestand199"]), - Row(variable_key=u"Bestand110", - computation=(lambda a, b: a / b), - variable_keys=["Bestand110", "Bestand199"]), - Row(variable_key=u"Bestand111", - computation=(lambda a, b: a / b), - variable_keys=["Bestand111", "Bestand199"]), - Row(variable_key=u"Bestand112", - computation=(lambda a, b: a / b), - variable_keys=["Bestand112", "Bestand199"]), - Row(variable_key=u"Bestand113", - computation=(lambda a, b: a / b), - variable_keys=["Bestand113", "Bestand199"]), - Row(variable_key=u"Bestand199", is_sum=True, - computation=(lambda a, b: a / b), - variable_keys=["Bestand199", "Bestand199"]), - Row(description=u"Totalt fysiskt mediebestånd per invånare", - computation=(lambda a, b: a / b), - variable_keys=[u"Bestand199", u"Population01"]), - Row(description=u"Antal fysiska böcker med skriven text per invånare i beståndet", - computation=(lambda a, b: a / b), - variable_keys=[u"Bestand101", u"Population01"]) - ]), - Group(title=u"Fysiskt nyförvärv", - extra=u"Andel nyförvärv av motsvarande bestånd", - rows=[ - Row(variable_key=u"Bestand201", - computation=(lambda a, b: a / b), - variable_keys=["Bestand201", "Bestand101"]), - Row(variable_key=u"Bestand202", - computation=(lambda a, b: a / b), - variable_keys=["Bestand202", "Bestand102"]), - Row(variable_key=u"Bestand203", - computation=(lambda a, b: a / b), - variable_keys=["Bestand203", "Bestand103"]), - Row(variable_key=u"Bestand204", - computation=(lambda a, b: a / b), - variable_keys=["Bestand204", "Bestand104"]), - Row(variable_key=u"Bestand205", - computation=(lambda a, b: a / b), - variable_keys=["Bestand205", "Bestand105"]), - Row(variable_key=u"Bestand206", - computation=(lambda a, b: a / b), - variable_keys=["Bestand206", "Bestand106"]), - Row(variable_key=u"Bestand207", - computation=(lambda a, b: a / b), - variable_keys=["Bestand207", "Bestand107"]), - Row(variable_key=u"Bestand208", - computation=(lambda a, b: a / b), - variable_keys=["Bestand208", "Bestand108"]), - Row(variable_key=u"Bestand209", - computation=(lambda a, b: a / b), - variable_keys=["Bestand209", "Bestand109"]), - Row(variable_key=u"Bestand210", - computation=(lambda a, b: a / b), - variable_keys=["Bestand210", "Bestand110"]), - Row(variable_key=u"Bestand211", - computation=(lambda a, b: a / b), - variable_keys=["Bestand211", "Bestand111"]), - Row(variable_key=u"Bestand212", - computation=(lambda a, b: a / b), - variable_keys=["Bestand212", "Bestand112"]), - Row(variable_key=u"Bestand213", - computation=(lambda a, b: a / b), - variable_keys=["Bestand213", "Bestand113"]), - Row(variable_key=u"Bestand299", is_sum=True, - computation=(lambda a, b: a / b), - variable_keys=["Bestand299", "Bestand199"]), - Row(description=u"Antal fysiska nyförvärv per 1000 invånare (ej tidn.tidskr.)", - computation=(lambda a1, a2, a3, a4, a5, a6, a7, a8, a9, a10, a11: - (a1 + a2 + a3 + a4 + a5 + a6 + a7 + a8 + a9 + a10) / (a11 / 1000)), - variable_keys=[u"Bestand101", u"Bestand103", u"Bestand104", u"Bestand107", u"Bestand108", - u"Bestand109", u"Bestand110", u"Bestand111", u"Bestand112", u"Bestand113", - u"Population01"]) - ]), - Group(title=u"Elektroniskt titelbestånd", - rows=[ - Row(variable_key=u"Bestand301"), - #Row(variable_key=u"Bestand302"), - Row(variable_key=u"Bestand303"), - Row(variable_key=u"Bestand304"), - Row(variable_key=u"Bestand305"), - Row(variable_key=u"Bestand306"), - Row(variable_key=u"Bestand307"), - Row(variable_key=u"Bestand308"), - Row(variable_key=u"Bestand310"), - Row(variable_key=u"Bestand311"), - Row(variable_key=u"Bestand312"), - Row(variable_key=u"Bestand313"), - Row(variable_key=u"Bestand399", is_sum=True), - Row(description=u"Andel e-bokstitlar av det totala elektroniska titelbeståndet med skriven text", - computation=(lambda a, b: a / b), - variable_keys=[u"Bestand301", u"Bestand399"], - percentage=True) - ]), - Group(title=u"Barnmedier", - rows=[ - Row(variable_key=u"Barn01"), - Row(variable_key=u"Barn02"), - Row(variable_key=u"Barn03"), - Row(description=u"Antal bestånd barnböcker per barn", - computation=(lambda a, b: a / b), - variable_keys=[u"Barn01", u"Population03"]), - #Row(description=u"Andel tryckta barnmedier av motsvarande totalbestånd", - #computation=(lambda a, b, c: a / (b + c)), - #variable_keys=[u"Barn01", u"Bestand101", u"Bestand105"], - #percentage=True), - Row(description=u"Andel nyförvärv tryckta barnmedier av motsvarande bestånd", - computation=(lambda a, b: a / b), - variable_keys=[u"Barn02", u"Barn01"], - percentage=True), - Row(description=u"Andel utlån tryckta barnmedier av motsvarande bestånd", - computation=(lambda a, b: a / b), - variable_keys=[u"Barn03", u"Barn01"], - percentage=True), - Row(description=u"Antal barnutlån per barninvånare", - computation=(lambda a, b: a / b), - variable_keys=[u"Barn03", u"Population03"]), - ]), - Group(title=u"", - rows=[ - Row(variable_key=u"HCG04"), - Row(variable_key=u"Ref05"), - ]), - Group(title=u"Personer med läsnedsättning", - rows=[ - Row(variable_key=u"LasnedBest01"), - Row(variable_key=u"LasnedUtlan01"), - Row(description=u"Andel utlån av anpassade medier av motsvarande bestånd", - computation=(lambda a, b: a / b), - variable_keys=[u"LasnedUtlan01", u"LasnedBest01"], - percentage=True), - Row(description=u"Andel anpassade medier av totala fysiska beståndet", - computation=(lambda a, b: a / b), - variable_keys=[u"LasnedBest01", u"Bestand199"], - percentage=True), - ]), - Group(title=u"Medier på olika språk", - rows=[ - Row(description=u"Titlar på svenska språket", label_only=True), - Row(variable_key=u"Titlar101"), - Row(variable_key=u"Titlar102"), - Row(variable_key=u"Titlar199", is_sum=True), - Row(description=u"Titlar på nationella minoritetsspråk", label_only=True), - Row(variable_key=u"Titlar201"), - Row(variable_key=u"Titlar202"), - Row(variable_key=u"Titlar299", is_sum=True), - Row(description=u"Titlar på utländska språk", label_only=True), - Row(variable_key=u"Titlar301"), - Row(variable_key=u"Titlar302"), - Row(variable_key=u"Titlar399", is_sum=True), - Row(description=u"Totalt antal titlar på olika medietyper", label_only=True), - Row(variable_key=u"Titlar497"), - Row(variable_key=u"Titlar498"), - Row(variable_key=u"Titlar499", is_sum=True), - ]), - Group(title=u"Elektroniskt bestånd", - rows=[ - Row(variable_key=u"Databas01"), - Row(variable_key=u"Databas02"), - Row(variable_key=u"Databas03"), - Row(variable_key=u"Databas04"), - Row(variable_key=u"Databas05"), - Row(variable_key=u"Databas06"), - Row(variable_key=u"Databas07"), - Row(variable_key=u"Databas08"), - Row(variable_key=u"Databas09"), - Row(variable_key=u"Databas99", is_sum=True), - ]), - Group(title=u"Antal initiala lån och omlån fysiskt bestånd", - rows=[ - Row(variable_key=u"Inilan101", - show_in_chart=False), - Row(variable_key=u"Inilan102", - show_in_chart=False), - Row(variable_key=u"Inilan103", - show_in_chart=False), - Row(variable_key=u"Inilan104", - show_in_chart=False), - Row(variable_key=u"Inilan105", - show_in_chart=False), - Row(variable_key=u"Inilan106", - show_in_chart=False), - Row(variable_key=u"Inilan107", - show_in_chart=False), - Row(variable_key=u"Inilan108", - show_in_chart=False), - Row(variable_key=u"Inilan109", - show_in_chart=False), - Row(variable_key=u"Inilan110", - show_in_chart=False), - Row(variable_key=u"Inilan111", - show_in_chart=False), - Row(variable_key=u"Inilan112", - show_in_chart=False), - Row(variable_key=u"Inilan113", - show_in_chart=False), - Row(variable_key=u"Inilan199", is_sum=True), - Row(description=u"Andel inititala lån av det totala antalet lån", - computation=(lambda a, b: a / b), - variable_keys=[u"Inilan199", u"Utlan399"], - percentage=True), - Row(description=u"", label_only=True), - Row(variable_key=u"Omlan201", - show_in_chart=False), - Row(variable_key=u"Omlan202", - show_in_chart=False), - Row(variable_key=u"Omlan203", - show_in_chart=False), - Row(variable_key=u"Omlan204", - show_in_chart=False), - Row(variable_key=u"Omlan205", - show_in_chart=False), - Row(variable_key=u"Omlan206", - show_in_chart=False), - Row(variable_key=u"Omlan207", - show_in_chart=False), - Row(variable_key=u"Omlan208", - show_in_chart=False), - Row(variable_key=u"Omlan209", - show_in_chart=False), - Row(variable_key=u"Omlan210", - show_in_chart=False), - Row(variable_key=u"Omlan211", - show_in_chart=False), - Row(variable_key=u"Omlan212", - show_in_chart=False), - Row(variable_key=u"Omlan213", - show_in_chart=False), - Row(variable_key=u"Omlan299", is_sum=True), - Row(description=u"Andel omlån av det totala antalet lån", - computation=(lambda a, b: a / b), - variable_keys=[u"Omlan299", u"Utlan399"], - percentage=True), - ]), - Group(title=u"Utlån fysiskt bestånd", - extra=u"Andel av total fysisk utlåning", - rows=[ - Row(variable_key=u"Utlan301", - computation=(lambda a, b: a / b), - variable_keys=[u"Utlan301", u"Utlan399"]), - Row(variable_key=u"Utlan302", - computation=(lambda a, b: a / b), - variable_keys=[u"Utlan302", u"Utlan399"]), - Row(variable_key=u"Utlan303", - computation=(lambda a, b: a / b), - variable_keys=[u"Utlan303", u"Utlan399"]), - Row(variable_key=u"Utlan304", - computation=(lambda a, b: a / b), - variable_keys=[u"Utlan304", u"Utlan399"]), - Row(variable_key=u"Utlan305", - computation=(lambda a, b: a / b), - variable_keys=[u"Utlan305", u"Utlan399"]), - Row(variable_key=u"Utlan306", - computation=(lambda a, b: a / b), - variable_keys=[u"Utlan306", u"Utlan399"]), - Row(variable_key=u"Utlan307", - computation=(lambda a, b: a / b), - variable_keys=[u"Utlan307", u"Utlan399"]), - Row(variable_key=u"Utlan308", - computation=(lambda a, b: a / b), - variable_keys=[u"Utlan308", u"Utlan399"]), - Row(variable_key=u"Utlan309", - computation=(lambda a, b: a / b), - variable_keys=[u"Utlan309", u"Utlan399"]), - Row(variable_key=u"Utlan310", - computation=(lambda a, b: a / b), - variable_keys=[u"Utlan310", u"Utlan399"]), - Row(variable_key=u"Utlan311", - computation=(lambda a, b: a / b), - variable_keys=[u"Utlan311", u"Utlan399"]), - Row(variable_key=u"Utlan312", - computation=(lambda a, b: a / b), - variable_keys=[u"Utlan312", u"Utlan399"]), - Row(variable_key=u"Utlan313", - computation=(lambda a, b: a / b), - variable_keys=[u"Utlan313", u"Utlan399"]), - Row(variable_key=u"Utlan399", is_sum=True), - Row(description=u"Antal fysiska utlån per kommuninvånare", - computation=(lambda a, b: a / b), - variable_keys=[u"Utlan399", u"Population01"]), - Row(description=u"Fysiska böcker med skriven text per invånare", - computation=(lambda a, b: a / b), - variable_keys=[u"Utlan301", u"Population01"]), - ]), - Group(title=u"Läsning på plats i biblioteket", - show_chart=False, - rows=[ - Row(variable_key=u"Laslan01"), - Row(variable_key=u"Laslan02"), - Row(variable_key=u"Laslan99"), - Row(description=u"Beräkning lån på plats", - computation=(lambda a, b, c: ((a / b) / 2) / c), - variable_keys=[u"Laslan01", u"Laslan02", u"Open101"]), - ]), - Group(title=u"Fjärrlån", - rows=[ - Row(description=u"Inom Sverige", - label_only=True), - Row(variable_key=u"Fjarr101"), - Row(variable_key=u"Fjarr102"), - Row(description=u"Utanför Sverige", - label_only=True), - Row(variable_key=u"Fjarr201"), - Row(variable_key=u"Fjarr202"), - ]), - Group(title=u"Summering fjärrlån", - show_chart=False, - rows=[ - Row(variable_key=u"Fjarr397"), - Row(variable_key=u"Fjarr398"), - Row(variable_key=u"Fjarr399", - is_sum=True), - Row(description=u"Andel utländska fjärrlån totalt", - computation=(lambda a, b: a / b), - variable_keys=[u"Fjarr299", u"Fjarr399"], - percentage=True), - Row(description=u"Nettofjärrinlåning in-ut", - computation=(lambda a, b: a - b), - variable_keys=[u"Fjarr397", u"Fjarr398"]), - ]), - Group(title=u"Användning av elektroniska samlingar", - rows=[ - Row(description=u"Antal sökningar", - label_only=True), - Row(variable_key=u"Elan101"), - Row(variable_key=u"Elan102"), - Row(variable_key=u"Elan103"), - Row(variable_key=u"Elan104"), - Row(variable_key=u"Elan105"), - Row(variable_key=u"Elan106"), - Row(variable_key=u"Elan107"), - Row(variable_key=u"Elan108"), - Row(variable_key=u"Elan109"), - Row(variable_key=u"Elan199", - is_sum=True), - Row(description=u"Antal nedladdningar", - label_only=True), - Row(variable_key=u"Elan201"), - Row(variable_key=u"Elan202"), - Row(variable_key=u"Elan203"), - Row(variable_key=u"Elan204"), - Row(variable_key=u"Elan205"), - Row(variable_key=u"Elan206"), - Row(variable_key=u"Elan207"), - Row(variable_key=u"Elan208"), - Row(variable_key=u"Elan209"), - Row(variable_key=u"Elan299", - is_sum=True), - Row(description=u"Antal nedladdade sektioner", - label_only=True), - Row(variable_key=u"Elan301"), - Row(variable_key=u"Elan399", - is_sum=True), - Row(description=u"Total användning av de elektroniska samlingarna", - computation=(lambda a, b, c: a + b + c), - variable_keys=[u"Elan199", u"Elan299", u"Elan399"]) - ]), - Group(title=u"Besök", - rows=[ - Row(variable_key=u"Besok01"), - Row(variable_key=u"Besok02"), - Row(variable_key=u"Besok03"), - Row(variable_key=u"Besok04"), - Row(variable_key=u"Besok05"), - Row(description=u"Antal fysiska besök per invånare", - computation=(lambda a, b: a / b), - variable_keys=[u"Besok01", u"Population01"]) - ]), - Group(title=u"Aktiva användare", - rows=[ - Row(variable_key=u"Aktiv01"), - Row(variable_key=u"Aktiv02"), - Row(variable_key=u"Aktiv04"), - Row(variable_key=u"Aktiv99", - is_sum=True), - Row(variable_key=u"Aktiv03"), - Row(description=u"Andel kvinnor som är aktiva låntagare", - computation=(lambda a, b: a / b), - variable_keys=[u"Aktiv01", u"Aktiv99"], - percentage=True), - Row(description=u"Andel barn och unga som är aktiva låntagare", - computation=(lambda a, b: a / b), - variable_keys=[u"Aktiv03", u"Population03"], - percentage=True), - Row(description=u"Andel aktiva användare per invånare", - computation=(lambda a, b: a / b ), - variable_keys=[u"Aktiv99", u"Population01"], - percentage=True), - Row(description=u"Antal fysiska besök per antal aktiva användare", - computation=(lambda a, b: a / b ), - variable_keys=[u"Besok01", u"Aktiv99"]), - ]), - Group(title=u"Resurser", - rows=[ - Row(variable_key=u"Resurs01"), - Row(variable_key=u"Resurs02"), - Row(variable_key=u"Resurs03"), - Row(variable_key=u"Resurs04"), - Row(variable_key=u"Resurs05"), - Row(variable_key=u"Resurs06"), - Row(variable_key=u"Resurs07"), - Row(variable_key=u"Resurs08"), - Row(variable_key=u"Resurs09"), - Row(variable_key=u"Resurs10"), - Row(description=u"Andel publika ytor", - computation=(lambda a, b: a / (a + b)), - variable_keys=[u"Resurs09", u"Resurs10"], - percentage=True) - ]), - Group(title=u"Öppettider", - rows=[ - Row(description=u"Servicestället med de generösaste öppettiderna", - label_only=True), - Row(variable_key=u"Open101"), - Row(variable_key=u"Open102"), - Row(variable_key=u"Open103"), - Row(variable_key=u"Open104"), - Row(variable_key=u"Open105"), - Row(variable_key=u"Open106"), - Row(description=u"Övriga serviceställen sammantaget", - label_only=True), - Row(variable_key=u"Open201"), - Row(variable_key=u"Open202"), - Row(variable_key=u"Open203"), - Row(variable_key=u"Open204"), - Row(variable_key=u"Open205"), - Row(variable_key=u"Open206"), - Row(description=u"Medelantal öppetdagar per år", - computation=(lambda a, b, c: (a + b) / c), - variable_keys=[u"Open201", u"Open101", u"BemanService01"]), - Row(description=u"Medelantal öppettimmar alla serviceställen", - computation=(lambda a, b, c: (a + b) / c), - variable_keys=[u"Open203", u"Open103", u"BemanService01"]), - Row(description=u"Andel öppettimmar med reducerad service", - computation=(lambda a, b, c, d: (a + b) / (c + d)), - variable_keys=[u"Open104", u"Open204", u"Open103", u"Open203"], - percentage=True), - Row(description=u"Andel öppettimmar utanför kontorstid", - computation=(lambda a, b, c, d: (a + b) / (c + d)), - variable_keys=[u"Open106", u"Open206", u"Open103", u"Open203"], - percentage=True), - ]), - Group(title=u"Service", - rows=[ - Row(variable_key=u"Serv01"), - Row(variable_key=u"Serv02"), - Row(variable_key=u"Serv03"), - Row(variable_key=u"Serv04"), - Row(variable_key=u"Serv05"), - Row(variable_key=u"Serv06"), - Row(variable_key=u"Serv07"), - ]), - Group(title=u"Publika aktivitetstillfällen", - extra=u"Varav andel tillfällen för barn och unga", - rows=[ - Row(variable_key=u"Publ101", - computation=(lambda a, b: a / b), - variable_keys=[u"Publ201", u"Publ101"]), - Row(variable_key=u"Publ102", - computation=(lambda a, b: a / b), - variable_keys=[u"Publ202", u"Publ102"]), - Row(variable_key=u"Publ103", - computation=(lambda a, b: a / b), - variable_keys=[u"Publ203", u"Publ103"]), - Row(variable_key=u"Publ104", - computation=(lambda a, b: a / b), - variable_keys=[u"Publ204", u"Publ104"]), - Row(variable_key=u"Publ105", - computation=(lambda a, b: a / b), - variable_keys=[u"Publ205", u"Publ105"]), - Row(variable_key=u"Publ106", - computation=(lambda a, b: a / b), - variable_keys=[u"Publ206", u"Publ106"]), - Row(variable_key=u"Publ107", - computation=(lambda a, b: a / b), - variable_keys=[u"Publ207", u"Publ107"]), - Row(variable_key=u"Publ108", - computation=(lambda a, b: a / b), - variable_keys=[u"Publ208", u"Publ108"]), - Row(variable_key=u"Publ109", - computation=(lambda a, b: a / b), - variable_keys=[u"Publ209", u"Publ109"]), - Row(variable_key=u"Publ110", - computation=(lambda a, b: a / b), - variable_keys=[u"Publ210", u"Publ110"]), - Row(variable_key=u"Publ111", - computation=(lambda a, b: a / b), - variable_keys=[u"Publ211", u"Publ111"]), - Row(variable_key=u"Publ112", - computation=(lambda a, b: a / b), - variable_keys=[u"Publ212", u"Publ112"]), - Row(variable_key=u"Publ113", - computation=(lambda a, b: a / b), - variable_keys=[u"Publ213", u"Publ113"]), - Row(variable_key=u"Publ114", - computation=(lambda a, b: a / b), - variable_keys=[u"Publ214", u"Publ114"]), - Row(variable_key=u"Publ115", - computation=(lambda a, b: a / b), - variable_keys=[u"Publ215", u"Publ115"]), - Row(variable_key=u"Publ116", - computation=(lambda a, b: a / b), - variable_keys=[u"Publ216", u"Publ116"]), - Row(variable_key=u"Publ117", - computation=(lambda a, b: a / b), - variable_keys=[u"Publ217", u"Publ117"]), - Row(variable_key=u"Publ118", - computation=(lambda a, b: a / b), - variable_keys=[u"Publ218", u"Publ118"]), - Row(variable_key=u"Publ119", - computation=(lambda a, b: a / b), - variable_keys=[u"Publ219", u"Publ119"]), - Row(variable_key=u"Publ120", - computation=(lambda a, b: a / b), - variable_keys=[u"Publ220", u"Publ120"]), - Row(variable_key=u"Publ199", - computation=(lambda a, b: a / b), - variable_keys=[u"Publ299", u"Publ199"], - is_sum=True), - Row(description=u"Andel publika aktiviteter primärt för barn/unga", - computation=(lambda a, b: a / b), - variable_keys=[u"Publ299", u"Publ199"], - percentage=True), - Row(description=u"", - label_only=True), - Row(variable_key=u"Publ201", - show_in_chart=False), - Row(variable_key=u"Publ202", - show_in_chart=False), - Row(variable_key=u"Publ203", - show_in_chart=False), - Row(variable_key=u"Publ204", - show_in_chart=False), - Row(variable_key=u"Publ205", - show_in_chart=False), - Row(variable_key=u"Publ206", - show_in_chart=False), - Row(variable_key=u"Publ207", - show_in_chart=False), - Row(variable_key=u"Publ208", - show_in_chart=False), - Row(variable_key=u"Publ209", - show_in_chart=False), - Row(variable_key=u"Publ210", - show_in_chart=False), - Row(variable_key=u"Publ211", - show_in_chart=False), - Row(variable_key=u"Publ212", - show_in_chart=False), - Row(variable_key=u"Publ213", - show_in_chart=False), - Row(variable_key=u"Publ214", - show_in_chart=False), - Row(variable_key=u"Publ215", - show_in_chart=False), - Row(variable_key=u"Publ216", - show_in_chart=False), - Row(variable_key=u"Publ217", - show_in_chart=False), - Row(variable_key=u"Publ218", - show_in_chart=False), - Row(variable_key=u"Publ219", - show_in_chart=False), - Row(variable_key=u"Publ220", - show_in_chart=False), - Row(variable_key=u"Publ299", - is_sum=True, - show_in_chart=False), - ]), - ]) + return ReportTemplate( + groups=[ + Group( + title="Organisation", + rows=[ + Row(variable_key="BemanService01"), + Row(variable_key="Integrerad01"), + Row(variable_key="Obeman01"), + Row(variable_key="ObemanLan01", show_in_chart=False), + Row(variable_key="Bokbuss01", show_in_chart=False), + Row(variable_key="BokbussHP01", show_in_chart=False), + Row(variable_key="Bokbil01", show_in_chart=False), + Row(variable_key="Population01", show_in_chart=False), + Row(variable_key="Population02", show_in_chart=False), + Row(variable_key="Population03", show_in_chart=False), + Row( + description="Antal bemannade serviceställen per 1000 invånare", + computation=(lambda a, b: a / (b / 1000)), + variable_keys=["BemanService01", "Population01"], + ), + Row( + description="Andel integrerade serviceställen", + computation=(lambda a, b: a / b), + variable_keys=["Integrerad01", "BemanService01"], + percentage=True, + ), + Row( + description="Medelantal utlån till servicesställen där vidare låneregistrering inte sker", + computation=(lambda a, b: a / b), + variable_keys=["ObemanLan01", "Obeman01"], + ), + ], + ), + Group( + title="Årsverken", + rows=[ + Row(variable_key="Arsverke01"), + Row(variable_key="Arsverke02"), + Row(variable_key="Arsverke03"), + Row(variable_key="Arsverke04"), + Row(variable_key="Arsverke99", is_sum=True), + Row(variable_key="Arsverke05"), + Row( + description="Andel årsverken för barn och unga", + computation=(lambda a, b: a / b), + variable_keys=["Arsverke05", "Arsverke99"], + percentage=True, + ), + Row( + description="Andel årsverken med bibliotekariekompetens", + computation=(lambda a, b: a / b), + variable_keys=["Arsverke01", "Arsverke99"], + percentage=True, + ), + Row( + description="Antal årsverken per 1000 invånare", + computation=(lambda a, b: a / (b / 1000)), + variable_keys=["Arsverke99", "Population01"], + ), + Row( + description="Antal fysiska besök per årsverke", + computation=(lambda a, b: a / b), + variable_keys=["Besok01", "Arsverke99"], + ), + Row( + description="Antal aktiva låntagare per årsverke", + computation=(lambda a, b: a / b), + variable_keys=["Aktiv99", "Arsverke99"], + ), + ], + ), + Group( + title="Personal", + rows=[ + Row(variable_key="Personer01"), + Row(variable_key="Personer02"), + Row(variable_key="Personer99", is_sum=True), + Row( + description="Andel anställda kvinnor", + computation=(lambda a, b: a / b), + variable_keys=["Personer01", "Personer99"], + percentage=True, + ), + Row( + description="Antal årsverken per anställd", + computation=(lambda a, b: a / b), + variable_keys=["Arsverke99", "Personer99"], + ), + ], + ), + Group( + title="Ekonomi", + rows=[ + Row(variable_key="Utgift01"), + Row(variable_key="Utgift02"), + Row(variable_key="Utgift03"), + Row(variable_key="Utgift04"), + Row(variable_key="Utgift05"), + Row(variable_key="Utgift06"), + Row(variable_key="Utgift99", is_sum=True), + Row(variable_key="Utgift07"), + Row( + description="Mediekostnad per invånare i kommunen", + computation=(lambda a, b, c: (a + b) / c), + variable_keys=["Utgift01", "Utgift02", "Population01"], + ), + Row( + description="Total driftkostnad per invånare i kommunen", + computation=(lambda a, b: a / b), + variable_keys=["Utgift99", "Population01"], + ), + Row( + description="Andel kostnad för medier av total driftkostnad", + computation=(lambda a, b, c: (a + b) / c), + variable_keys=["Utgift01", "Utgift02", "Utgift99"], + percentage=True, + ), + Row( + description="Andel kostnad för personal av total driftkostnad", + computation=(lambda a, b, c: (a + b) / c), + variable_keys=["Utgift03", "Utgift04", "Utgift99"], + percentage=True, + ), + Row( + description="Andel kostnad för e-medier av total driftskostnad", + computation=(lambda a, b: a / b), + variable_keys=["Utgift02", "Utgift99"], + percentage=True, + ), + ], + ), + Group( + title="Egengenererade intäkter", + rows=[ + Row(variable_key="Intakt01"), + Row(variable_key="Intakt02"), + Row(variable_key="Intakt03"), + Row(variable_key="Intakt99", is_sum=True), + Row( + description="Andel egengenererade intäkter i förhållande till de totala driftskostnaderna", + computation=(lambda a, b: a / b), + variable_keys=["Intakt99", "Utgift99"], + percentage=True, + ), + ], + ), + Group( + title="Fysiskt bestånd", + extra="Andel av totalt bestånd", + rows=[ + Row( + variable_key="Bestand101", + computation=(lambda a, b: a / b), + variable_keys=["Bestand101", "Bestand199"], + ), + Row( + variable_key="Bestand102", + computation=(lambda a, b: a / b), + variable_keys=["Bestand102", "Bestand199"], + ), + Row( + variable_key="Bestand103", + computation=(lambda a, b: a / b), + variable_keys=["Bestand103", "Bestand199"], + ), + Row( + variable_key="Bestand104", + computation=(lambda a, b: a / b), + variable_keys=["Bestand104", "Bestand199"], + ), + Row( + variable_key="Bestand105", + computation=(lambda a, b: a / b), + variable_keys=["Bestand105", "Bestand199"], + ), + Row( + variable_key="Bestand106", + computation=(lambda a, b: a / b), + variable_keys=["Bestand106", "Bestand199"], + ), + Row( + variable_key="Bestand107", + computation=(lambda a, b: a / b), + variable_keys=["Bestand107", "Bestand199"], + ), + Row( + variable_key="Bestand108", + computation=(lambda a, b: a / b), + variable_keys=["Bestand108", "Bestand199"], + ), + Row( + variable_key="Bestand109", + computation=(lambda a, b: a / b), + variable_keys=["Bestand109", "Bestand199"], + ), + Row( + variable_key="Bestand110", + computation=(lambda a, b: a / b), + variable_keys=["Bestand110", "Bestand199"], + ), + Row( + variable_key="Bestand111", + computation=(lambda a, b: a / b), + variable_keys=["Bestand111", "Bestand199"], + ), + Row( + variable_key="Bestand112", + computation=(lambda a, b: a / b), + variable_keys=["Bestand112", "Bestand199"], + ), + Row( + variable_key="Bestand113", + computation=(lambda a, b: a / b), + variable_keys=["Bestand113", "Bestand199"], + ), + Row( + variable_key="Bestand199", + is_sum=True, + computation=(lambda a, b: a / b), + variable_keys=["Bestand199", "Bestand199"], + ), + Row( + description="Totalt fysiskt mediebestånd per invånare", + computation=(lambda a, b: a / b), + variable_keys=["Bestand199", "Population01"], + ), + Row( + description="Antal fysiska böcker med skriven text per invånare i beståndet", + computation=(lambda a, b: a / b), + variable_keys=["Bestand101", "Population01"], + ), + ], + ), + Group( + title="Fysiskt nyförvärv", + extra="Andel nyförvärv av motsvarande bestånd", + rows=[ + Row( + variable_key="Bestand201", + computation=(lambda a, b: a / b), + variable_keys=["Bestand201", "Bestand101"], + ), + Row( + variable_key="Bestand202", + computation=(lambda a, b: a / b), + variable_keys=["Bestand202", "Bestand102"], + ), + Row( + variable_key="Bestand203", + computation=(lambda a, b: a / b), + variable_keys=["Bestand203", "Bestand103"], + ), + Row( + variable_key="Bestand204", + computation=(lambda a, b: a / b), + variable_keys=["Bestand204", "Bestand104"], + ), + Row( + variable_key="Bestand205", + computation=(lambda a, b: a / b), + variable_keys=["Bestand205", "Bestand105"], + ), + Row( + variable_key="Bestand206", + computation=(lambda a, b: a / b), + variable_keys=["Bestand206", "Bestand106"], + ), + Row( + variable_key="Bestand207", + computation=(lambda a, b: a / b), + variable_keys=["Bestand207", "Bestand107"], + ), + Row( + variable_key="Bestand208", + computation=(lambda a, b: a / b), + variable_keys=["Bestand208", "Bestand108"], + ), + Row( + variable_key="Bestand209", + computation=(lambda a, b: a / b), + variable_keys=["Bestand209", "Bestand109"], + ), + Row( + variable_key="Bestand210", + computation=(lambda a, b: a / b), + variable_keys=["Bestand210", "Bestand110"], + ), + Row( + variable_key="Bestand211", + computation=(lambda a, b: a / b), + variable_keys=["Bestand211", "Bestand111"], + ), + Row( + variable_key="Bestand212", + computation=(lambda a, b: a / b), + variable_keys=["Bestand212", "Bestand112"], + ), + Row( + variable_key="Bestand213", + computation=(lambda a, b: a / b), + variable_keys=["Bestand213", "Bestand113"], + ), + Row( + variable_key="Bestand299", + is_sum=True, + computation=(lambda a, b: a / b), + variable_keys=["Bestand299", "Bestand199"], + ), + Row( + description="Antal fysiska nyförvärv per 1000 invånare (ej tidn.tidskr.)", + computation=( + lambda a1, a2, a3, a4, a5, a6, a7, a8, a9, a10, a11: ( + a1 + a2 + a3 + a4 + a5 + a6 + a7 + a8 + a9 + a10 + ) + / (a11 / 1000) + ), + variable_keys=[ + "Bestand101", + "Bestand103", + "Bestand104", + "Bestand107", + "Bestand108", + "Bestand109", + "Bestand110", + "Bestand111", + "Bestand112", + "Bestand113", + "Population01", + ], + ), + ], + ), + Group( + title="Elektroniskt titelbestånd", + rows=[ + Row(variable_key="Bestand301"), + # Row(variable_key=u"Bestand302"), + Row(variable_key="Bestand303"), + Row(variable_key="Bestand304"), + Row(variable_key="Bestand305"), + Row(variable_key="Bestand306"), + Row(variable_key="Bestand307"), + Row(variable_key="Bestand308"), + Row(variable_key="Bestand310"), + Row(variable_key="Bestand311"), + Row(variable_key="Bestand312"), + Row(variable_key="Bestand313"), + Row(variable_key="Bestand399", is_sum=True), + Row( + description="Andel e-bokstitlar av det totala elektroniska titelbeståndet med skriven text", + computation=(lambda a, b: a / b), + variable_keys=["Bestand301", "Bestand399"], + percentage=True, + ), + ], + ), + Group( + title="Barnmedier", + rows=[ + Row(variable_key="Barn01"), + Row(variable_key="Barn02"), + Row(variable_key="Barn03"), + Row( + description="Antal bestånd barnböcker per barn", + computation=(lambda a, b: a / b), + variable_keys=["Barn01", "Population03"], + ), + # Row(description=u"Andel tryckta barnmedier av motsvarande totalbestånd", + # computation=(lambda a, b, c: a / (b + c)), + # variable_keys=[u"Barn01", u"Bestand101", u"Bestand105"], + # percentage=True), + Row( + description="Andel nyförvärv tryckta barnmedier av motsvarande bestånd", + computation=(lambda a, b: a / b), + variable_keys=["Barn02", "Barn01"], + percentage=True, + ), + Row( + description="Andel utlån tryckta barnmedier av motsvarande bestånd", + computation=(lambda a, b: a / b), + variable_keys=["Barn03", "Barn01"], + percentage=True, + ), + Row( + description="Antal barnutlån per barninvånare", + computation=(lambda a, b: a / b), + variable_keys=["Barn03", "Population03"], + ), + ], + ), + Group( + title="", + rows=[ + Row(variable_key="HCG04"), + Row(variable_key="Ref05"), + ], + ), + Group( + title="Personer med läsnedsättning", + rows=[ + Row(variable_key="LasnedBest01"), + Row(variable_key="LasnedUtlan01"), + Row( + description="Andel utlån av anpassade medier av motsvarande bestånd", + computation=(lambda a, b: a / b), + variable_keys=["LasnedUtlan01", "LasnedBest01"], + percentage=True, + ), + Row( + description="Andel anpassade medier av totala fysiska beståndet", + computation=(lambda a, b: a / b), + variable_keys=["LasnedBest01", "Bestand199"], + percentage=True, + ), + ], + ), + Group( + title="Medier på olika språk", + rows=[ + Row(description="Titlar på svenska språket", label_only=True), + Row(variable_key="Titlar101"), + Row(variable_key="Titlar102"), + Row(variable_key="Titlar199", is_sum=True), + Row( + description="Titlar på nationella minoritetsspråk", + label_only=True, + ), + Row(variable_key="Titlar201"), + Row(variable_key="Titlar202"), + Row(variable_key="Titlar299", is_sum=True), + Row(description="Titlar på utländska språk", label_only=True), + Row(variable_key="Titlar301"), + Row(variable_key="Titlar302"), + Row(variable_key="Titlar399", is_sum=True), + Row( + description="Totalt antal titlar på olika medietyper", + label_only=True, + ), + Row(variable_key="Titlar497"), + Row(variable_key="Titlar498"), + Row(variable_key="Titlar499", is_sum=True), + ], + ), + Group( + title="Elektroniskt bestånd", + rows=[ + Row(variable_key="Databas01"), + Row(variable_key="Databas02"), + Row(variable_key="Databas03"), + Row(variable_key="Databas04"), + Row(variable_key="Databas05"), + Row(variable_key="Databas06"), + Row(variable_key="Databas07"), + Row(variable_key="Databas08"), + Row(variable_key="Databas09"), + Row(variable_key="Databas99", is_sum=True), + ], + ), + Group( + title="Antal initiala lån och omlån fysiskt bestånd", + rows=[ + Row(variable_key="Inilan101", show_in_chart=False), + Row(variable_key="Inilan102", show_in_chart=False), + Row(variable_key="Inilan103", show_in_chart=False), + Row(variable_key="Inilan104", show_in_chart=False), + Row(variable_key="Inilan105", show_in_chart=False), + Row(variable_key="Inilan106", show_in_chart=False), + Row(variable_key="Inilan107", show_in_chart=False), + Row(variable_key="Inilan108", show_in_chart=False), + Row(variable_key="Inilan109", show_in_chart=False), + Row(variable_key="Inilan110", show_in_chart=False), + Row(variable_key="Inilan111", show_in_chart=False), + Row(variable_key="Inilan112", show_in_chart=False), + Row(variable_key="Inilan113", show_in_chart=False), + Row(variable_key="Inilan199", is_sum=True), + Row( + description="Andel inititala lån av det totala antalet lån", + computation=(lambda a, b: a / b), + variable_keys=["Inilan199", "Utlan399"], + percentage=True, + ), + Row(description="", label_only=True), + Row(variable_key="Omlan201", show_in_chart=False), + Row(variable_key="Omlan202", show_in_chart=False), + Row(variable_key="Omlan203", show_in_chart=False), + Row(variable_key="Omlan204", show_in_chart=False), + Row(variable_key="Omlan205", show_in_chart=False), + Row(variable_key="Omlan206", show_in_chart=False), + Row(variable_key="Omlan207", show_in_chart=False), + Row(variable_key="Omlan208", show_in_chart=False), + Row(variable_key="Omlan209", show_in_chart=False), + Row(variable_key="Omlan210", show_in_chart=False), + Row(variable_key="Omlan211", show_in_chart=False), + Row(variable_key="Omlan212", show_in_chart=False), + Row(variable_key="Omlan213", show_in_chart=False), + Row(variable_key="Omlan299", is_sum=True), + Row( + description="Andel omlån av det totala antalet lån", + computation=(lambda a, b: a / b), + variable_keys=["Omlan299", "Utlan399"], + percentage=True, + ), + ], + ), + Group( + title="Utlån fysiskt bestånd", + extra="Andel av total fysisk utlåning", + rows=[ + Row( + variable_key="Utlan301", + computation=(lambda a, b: a / b), + variable_keys=["Utlan301", "Utlan399"], + ), + Row( + variable_key="Utlan302", + computation=(lambda a, b: a / b), + variable_keys=["Utlan302", "Utlan399"], + ), + Row( + variable_key="Utlan303", + computation=(lambda a, b: a / b), + variable_keys=["Utlan303", "Utlan399"], + ), + Row( + variable_key="Utlan304", + computation=(lambda a, b: a / b), + variable_keys=["Utlan304", "Utlan399"], + ), + Row( + variable_key="Utlan305", + computation=(lambda a, b: a / b), + variable_keys=["Utlan305", "Utlan399"], + ), + Row( + variable_key="Utlan306", + computation=(lambda a, b: a / b), + variable_keys=["Utlan306", "Utlan399"], + ), + Row( + variable_key="Utlan307", + computation=(lambda a, b: a / b), + variable_keys=["Utlan307", "Utlan399"], + ), + Row( + variable_key="Utlan308", + computation=(lambda a, b: a / b), + variable_keys=["Utlan308", "Utlan399"], + ), + Row( + variable_key="Utlan309", + computation=(lambda a, b: a / b), + variable_keys=["Utlan309", "Utlan399"], + ), + Row( + variable_key="Utlan310", + computation=(lambda a, b: a / b), + variable_keys=["Utlan310", "Utlan399"], + ), + Row( + variable_key="Utlan311", + computation=(lambda a, b: a / b), + variable_keys=["Utlan311", "Utlan399"], + ), + Row( + variable_key="Utlan312", + computation=(lambda a, b: a / b), + variable_keys=["Utlan312", "Utlan399"], + ), + Row( + variable_key="Utlan313", + computation=(lambda a, b: a / b), + variable_keys=["Utlan313", "Utlan399"], + ), + Row(variable_key="Utlan399", is_sum=True), + Row( + description="Antal fysiska utlån per kommuninvånare", + computation=(lambda a, b: a / b), + variable_keys=["Utlan399", "Population01"], + ), + Row( + description="Fysiska böcker med skriven text per invånare", + computation=(lambda a, b: a / b), + variable_keys=["Utlan301", "Population01"], + ), + ], + ), + Group( + title="Läsning på plats i biblioteket", + show_chart=False, + rows=[ + Row(variable_key="Laslan01"), + Row(variable_key="Laslan02"), + Row(variable_key="Laslan99"), + Row( + description="Beräkning lån på plats", + computation=(lambda a, b, c: ((a / b) / 2) / c), + variable_keys=["Laslan01", "Laslan02", "Open101"], + ), + ], + ), + Group( + title="Fjärrlån", + rows=[ + Row(description="Inom Sverige", label_only=True), + Row(variable_key="Fjarr101"), + Row(variable_key="Fjarr102"), + Row(description="Utanför Sverige", label_only=True), + Row(variable_key="Fjarr201"), + Row(variable_key="Fjarr202"), + ], + ), + Group( + title="Summering fjärrlån", + show_chart=False, + rows=[ + Row(variable_key="Fjarr397"), + Row(variable_key="Fjarr398"), + Row(variable_key="Fjarr399", is_sum=True), + Row( + description="Andel utländska fjärrlån totalt", + computation=(lambda a, b: a / b), + variable_keys=["Fjarr299", "Fjarr399"], + percentage=True, + ), + Row( + description="Nettofjärrinlåning in-ut", + computation=(lambda a, b: a - b), + variable_keys=["Fjarr397", "Fjarr398"], + ), + ], + ), + Group( + title="Användning av elektroniska samlingar", + rows=[ + Row(description="Antal sökningar", label_only=True), + Row(variable_key="Elan101"), + Row(variable_key="Elan102"), + Row(variable_key="Elan103"), + Row(variable_key="Elan104"), + Row(variable_key="Elan105"), + Row(variable_key="Elan106"), + Row(variable_key="Elan107"), + Row(variable_key="Elan108"), + Row(variable_key="Elan109"), + Row(variable_key="Elan199", is_sum=True), + Row(description="Antal nedladdningar", label_only=True), + Row(variable_key="Elan201"), + Row(variable_key="Elan202"), + Row(variable_key="Elan203"), + Row(variable_key="Elan204"), + Row(variable_key="Elan205"), + Row(variable_key="Elan206"), + Row(variable_key="Elan207"), + Row(variable_key="Elan208"), + Row(variable_key="Elan209"), + Row(variable_key="Elan299", is_sum=True), + Row(description="Antal nedladdade sektioner", label_only=True), + Row(variable_key="Elan301"), + Row(variable_key="Elan399", is_sum=True), + Row( + description="Total användning av de elektroniska samlingarna", + computation=(lambda a, b, c: a + b + c), + variable_keys=["Elan199", "Elan299", "Elan399"], + ), + ], + ), + Group( + title="Besök", + rows=[ + Row(variable_key="Besok01"), + Row(variable_key="Besok02"), + Row(variable_key="Besok03"), + Row(variable_key="Besok04"), + Row(variable_key="Besok05"), + Row( + description="Antal fysiska besök per invånare", + computation=(lambda a, b: a / b), + variable_keys=["Besok01", "Population01"], + ), + ], + ), + Group( + title="Aktiva användare", + rows=[ + Row(variable_key="Aktiv01"), + Row(variable_key="Aktiv02"), + Row(variable_key="Aktiv04"), + Row(variable_key="Aktiv99", is_sum=True), + Row(variable_key="Aktiv03"), + Row( + description="Andel kvinnor som är aktiva låntagare", + computation=(lambda a, b: a / b), + variable_keys=["Aktiv01", "Aktiv99"], + percentage=True, + ), + Row( + description="Andel barn och unga som är aktiva låntagare", + computation=(lambda a, b: a / b), + variable_keys=["Aktiv03", "Population03"], + percentage=True, + ), + Row( + description="Andel aktiva användare per invånare", + computation=(lambda a, b: a / b), + variable_keys=["Aktiv99", "Population01"], + percentage=True, + ), + Row( + description="Antal fysiska besök per antal aktiva användare", + computation=(lambda a, b: a / b), + variable_keys=["Besok01", "Aktiv99"], + ), + ], + ), + Group( + title="Resurser", + rows=[ + Row(variable_key="Resurs01"), + Row(variable_key="Resurs02"), + Row(variable_key="Resurs03"), + Row(variable_key="Resurs04"), + Row(variable_key="Resurs05"), + Row(variable_key="Resurs06"), + Row(variable_key="Resurs07"), + Row(variable_key="Resurs08"), + Row(variable_key="Resurs09"), + Row(variable_key="Resurs10"), + Row( + description="Andel publika ytor", + computation=(lambda a, b: a / (a + b)), + variable_keys=["Resurs09", "Resurs10"], + percentage=True, + ), + ], + ), + Group( + title="Öppettider", + rows=[ + Row( + description="Servicestället med de generösaste öppettiderna", + label_only=True, + ), + Row(variable_key="Open101"), + Row(variable_key="Open102"), + Row(variable_key="Open103"), + Row(variable_key="Open104"), + Row(variable_key="Open105"), + Row(variable_key="Open106"), + Row( + description="Övriga serviceställen sammantaget", label_only=True + ), + Row(variable_key="Open201"), + Row(variable_key="Open202"), + Row(variable_key="Open203"), + Row(variable_key="Open204"), + Row(variable_key="Open205"), + Row(variable_key="Open206"), + Row( + description="Medelantal öppetdagar per år", + computation=(lambda a, b, c: (a + b) / c), + variable_keys=["Open201", "Open101", "BemanService01"], + ), + Row( + description="Medelantal öppettimmar alla serviceställen", + computation=(lambda a, b, c: (a + b) / c), + variable_keys=["Open203", "Open103", "BemanService01"], + ), + Row( + description="Andel öppettimmar med reducerad service", + computation=(lambda a, b, c, d: (a + b) / (c + d)), + variable_keys=["Open104", "Open204", "Open103", "Open203"], + percentage=True, + ), + Row( + description="Andel öppettimmar utanför kontorstid", + computation=(lambda a, b, c, d: (a + b) / (c + d)), + variable_keys=["Open106", "Open206", "Open103", "Open203"], + percentage=True, + ), + ], + ), + Group( + title="Service", + rows=[ + Row(variable_key="Serv01"), + Row(variable_key="Serv02"), + Row(variable_key="Serv03"), + Row(variable_key="Serv04"), + Row(variable_key="Serv05"), + Row(variable_key="Serv06"), + Row(variable_key="Serv07"), + ], + ), + Group( + title="Publika aktivitetstillfällen", + extra="Varav andel tillfällen för barn och unga", + rows=[ + Row( + variable_key="Publ101", + computation=(lambda a, b: a / b), + variable_keys=["Publ201", "Publ101"], + ), + Row( + variable_key="Publ102", + computation=(lambda a, b: a / b), + variable_keys=["Publ202", "Publ102"], + ), + Row( + variable_key="Publ103", + computation=(lambda a, b: a / b), + variable_keys=["Publ203", "Publ103"], + ), + Row( + variable_key="Publ104", + computation=(lambda a, b: a / b), + variable_keys=["Publ204", "Publ104"], + ), + Row( + variable_key="Publ105", + computation=(lambda a, b: a / b), + variable_keys=["Publ205", "Publ105"], + ), + Row( + variable_key="Publ106", + computation=(lambda a, b: a / b), + variable_keys=["Publ206", "Publ106"], + ), + Row( + variable_key="Publ107", + computation=(lambda a, b: a / b), + variable_keys=["Publ207", "Publ107"], + ), + Row( + variable_key="Publ108", + computation=(lambda a, b: a / b), + variable_keys=["Publ208", "Publ108"], + ), + Row( + variable_key="Publ109", + computation=(lambda a, b: a / b), + variable_keys=["Publ209", "Publ109"], + ), + Row( + variable_key="Publ110", + computation=(lambda a, b: a / b), + variable_keys=["Publ210", "Publ110"], + ), + Row( + variable_key="Publ111", + computation=(lambda a, b: a / b), + variable_keys=["Publ211", "Publ111"], + ), + Row( + variable_key="Publ112", + computation=(lambda a, b: a / b), + variable_keys=["Publ212", "Publ112"], + ), + Row( + variable_key="Publ113", + computation=(lambda a, b: a / b), + variable_keys=["Publ213", "Publ113"], + ), + Row( + variable_key="Publ114", + computation=(lambda a, b: a / b), + variable_keys=["Publ214", "Publ114"], + ), + Row( + variable_key="Publ115", + computation=(lambda a, b: a / b), + variable_keys=["Publ215", "Publ115"], + ), + Row( + variable_key="Publ116", + computation=(lambda a, b: a / b), + variable_keys=["Publ216", "Publ116"], + ), + Row( + variable_key="Publ117", + computation=(lambda a, b: a / b), + variable_keys=["Publ217", "Publ117"], + ), + Row( + variable_key="Publ118", + computation=(lambda a, b: a / b), + variable_keys=["Publ218", "Publ118"], + ), + Row( + variable_key="Publ119", + computation=(lambda a, b: a / b), + variable_keys=["Publ219", "Publ119"], + ), + Row( + variable_key="Publ120", + computation=(lambda a, b: a / b), + variable_keys=["Publ220", "Publ120"], + ), + Row( + variable_key="Publ199", + computation=(lambda a, b: a / b), + variable_keys=["Publ299", "Publ199"], + is_sum=True, + ), + Row( + description="Andel publika aktiviteter primärt för barn/unga", + computation=(lambda a, b: a / b), + variable_keys=["Publ299", "Publ199"], + percentage=True, + ), + Row(description="", label_only=True), + Row(variable_key="Publ201", show_in_chart=False), + Row(variable_key="Publ202", show_in_chart=False), + Row(variable_key="Publ203", show_in_chart=False), + Row(variable_key="Publ204", show_in_chart=False), + Row(variable_key="Publ205", show_in_chart=False), + Row(variable_key="Publ206", show_in_chart=False), + Row(variable_key="Publ207", show_in_chart=False), + Row(variable_key="Publ208", show_in_chart=False), + Row(variable_key="Publ209", show_in_chart=False), + Row(variable_key="Publ210", show_in_chart=False), + Row(variable_key="Publ211", show_in_chart=False), + Row(variable_key="Publ212", show_in_chart=False), + Row(variable_key="Publ213", show_in_chart=False), + Row(variable_key="Publ214", show_in_chart=False), + Row(variable_key="Publ215", show_in_chart=False), + Row(variable_key="Publ216", show_in_chart=False), + Row(variable_key="Publ217", show_in_chart=False), + Row(variable_key="Publ218", show_in_chart=False), + Row(variable_key="Publ219", show_in_chart=False), + Row(variable_key="Publ220", show_in_chart=False), + Row(variable_key="Publ299", is_sum=True, show_in_chart=False), + ], + ), + ] + ) diff --git a/libstat/services/bibdb_integration.py b/libstat/services/bibdb_integration.py index f9bd20c9..c79397d4 100644 --- a/libstat/services/bibdb_integration.py +++ b/libstat/services/bibdb_integration.py @@ -1,9 +1,9 @@ -# -*- coding: utf-8 -*- import requests from bibstat import settings from libstat.models import Library, ExternalIdentifier from libstat.utils import SURVEY_TARGET_GROUPS + def check_library_criteria(json_data): if not json_data.get("country_code", None) == "se": return False @@ -31,17 +31,29 @@ def library_from_json(json_data): library.name = library.name.strip() library.municipality_code = json_data.get("municipality_code", None) library.library_type = json_data.get("library_type", None) - location = next((a for a in json_data["address"] if a["address_type"] == "stat"), None) + location = next( + (a for a in json_data["address"] if a["address_type"] == "stat"), None + ) library.address = location["street"] if location and location["street"] else None library.city = location["city"] if location and location["city"] else None - library.zip_code = location["zip_code"] if location and location["zip_code"] else None + library.zip_code = ( + location["zip_code"] if location and location["zip_code"] else None + ) contacts = json_data.get("contact", None) if contacts: - library.email = next((c["email"] for c in contacts - if "email" in c and c["contact_type"] == "statans"), None) + library.email = next( + ( + c["email"] + for c in contacts + if "email" in c and c["contact_type"] == "statans" + ), + None, + ) school_code = json_data.get("school_code", None) if school_code: - external_identifier = ExternalIdentifier(type="school_code", identifier=school_code) + external_identifier = ExternalIdentifier( + type="school_code", identifier=school_code + ) library.external_identifiers = [external_identifier] return library @@ -52,8 +64,10 @@ def fetch_libraries(): # bibdb api pages by 200, let 30 000 be upper limit in case api is broken for start_index in range(0, 30000, 200): response = requests.get( - url="%s/api/lib?dump=true&start=%d" % (settings.BIBDB_BASE_URL, start_index), - headers={"APIKEY-AUTH-HEADER": "bibstataccess"}) # KP 180110 + url="%s/api/lib?dump=true&start=%d" + % (settings.BIBDB_BASE_URL, start_index), + headers={"APIKEY-AUTH-HEADER": "bibstataccess"}, + ) # KP 180110 if not response.json().get("libraries", None): break diff --git a/libstat/services/clean_data.py b/libstat/services/clean_data.py index 074b2b51..326b8a74 100644 --- a/libstat/services/clean_data.py +++ b/libstat/services/clean_data.py @@ -18,11 +18,15 @@ def _get_surveys_with_no_observations(sample_year): def _get_surveys_with_status_not_viewed(sample_year): - surveys = [s for s in Survey.objects.filter(sample_year=sample_year, _status=u"not_viewed")] + surveys = [ + s for s in Survey.objects.filter(sample_year=sample_year, _status="not_viewed") + ] return surveys -def _load_sigel_mapping_from_workbook(sheet="Blad1", column_old_value=1, column_new_value=0): +def _load_sigel_mapping_from_workbook( + sheet="Blad1", column_old_value=1, column_new_value=0 +): worksheet = None sigel_mapping = {} @@ -30,11 +34,13 @@ def _load_sigel_mapping_from_workbook(sheet="Blad1", column_old_value=1, column_ book = open_workbook(settings.SIGEL_MAPPING_FILE_PATH, verbosity=0) worksheet = book.sheet_by_name(str(sheet)) except XLRDError as xld_e: - logger.error(u"{}".format(xld_e)) + logger.error("{}".format(xld_e)) if worksheet: for i in range(1, worksheet.nrows): - sigel_mapping[worksheet.cell_value(i, column_old_value)] = worksheet.cell_value(i, column_new_value) + sigel_mapping[ + worksheet.cell_value(i, column_old_value) + ] = worksheet.cell_value(i, column_new_value) return sigel_mapping @@ -49,70 +55,109 @@ def _update_sigel(survey, matched_survey_sigel): def match_libraries_and_replace_sigel(sample_year): - all_published_surveys = Survey.objects.filter(sample_year=sample_year, _status=u"published") + all_published_surveys = Survey.objects.filter( + sample_year=sample_year, _status="published" + ) count = 0 matched = 0 - f = codecs.open(os.path.join(settings.CLEAN_DATA_LOG_PATH, 'match_libraries_log_%d' % sample_year), 'wt', 'utf-8') + f = codecs.open( + os.path.join( + settings.CLEAN_DATA_LOG_PATH, "match_libraries_log_%d" % sample_year + ), + "wt", + "utf-8", + ) logfile = File(f) - # Match against existing surveys on library name for survey in all_published_surveys: - if len(survey.library.sigel) == 10 or survey.library.sigel.startswith("8", 0, 1): # Random-sigel or 8*-sigel + if len(survey.library.sigel) == 10 or survey.library.sigel.startswith( + "8", 0, 1 + ): # Random-sigel or 8*-sigel count = count + 1 # Find other surveys with same library name - matching_surveys = Survey.objects.filter(library__name__iexact=survey.library.name, pk__ne=survey.pk) + matching_surveys = Survey.objects.filter( + library__name__iexact=survey.library.name, pk__ne=survey.pk + ) if matching_surveys.count() != 0: for index, matched_survey in enumerate(matching_surveys): - if matched_survey.library.sigel and matched_survey.library.sigel != survey.library.sigel and len(matched_survey.library.sigel) != 10: + if ( + matched_survey.library.sigel + and matched_survey.library.sigel != survey.library.sigel + and len(matched_survey.library.sigel) != 10 + ): # Only use 8*-sigel if no other is found - if matched_survey.library.sigel.startswith("8", 0, 1) == False or index == len(matching_surveys) - 1: + if ( + matched_survey.library.sigel.startswith("8", 0, 1) == False + or index == len(matching_surveys) - 1 + ): logfile.write( - "Matched %s to %s. Changing sigel %s to %s\n" % (survey.library.name, matched_survey.library.name, survey.library.sigel, matched_survey.library.sigel)) + "Matched %s to %s. Changing sigel %s to %s\n" + % ( + survey.library.name, + matched_survey.library.name, + survey.library.sigel, + matched_survey.library.sigel, + ) + ) _update_sigel(survey, matched_survey.library.sigel) matched = matched + 1 break - # Try to match remaining 8*-sigels against sigel mapping file - surveys_sigel_start_w_8 = [s for s in Survey.objects.filter(sample_year=sample_year, _status=u"published") if - s.library.sigel.startswith("8", 0, 1) and len(s.library.sigel) < 10] + surveys_sigel_start_w_8 = [ + s + for s in Survey.objects.filter(sample_year=sample_year, _status="published") + if s.library.sigel.startswith("8", 0, 1) and len(s.library.sigel) < 10 + ] sigel_dict = _load_sigel_mapping_from_workbook() for survey_8 in surveys_sigel_start_w_8: sigel_from_mapping = sigel_dict.get(survey_8.library.sigel, None) - if sigel_from_mapping: - logfile.write("Found mapping for 8*-sigel %s, (library: %s), changing to %s\n" % (survey_8.library.sigel, survey_8.library.name, sigel_from_mapping)) + if sigel_from_mapping: + logfile.write( + "Found mapping for 8*-sigel %s, (library: %s), changing to %s\n" + % (survey_8.library.sigel, survey_8.library.name, sigel_from_mapping) + ) _update_sigel(survey_8, sigel_from_mapping) surveys_sigel_start_w_8.remove(survey_8) matched = matched + 1 - logfile.write("\nMatched libraries for year %d\n" % sample_year) - logfile.write("Found %d number of published surveys with random- (or 8*-)sigels\n" % count) + logfile.write( + "Found %d number of published surveys with random- (or 8*-)sigels\n" % count + ) logfile.write("Changed sigel for %d number of surveys\n" % matched) - # Log remaining unmatched sigels no_surveys_sigel_start_w_8 = len(surveys_sigel_start_w_8) - unmatched_surveys = [s for s in Survey.objects.filter(sample_year=sample_year, _status=u"published") if - len(s.library.sigel) == 10] + unmatched_surveys = [ + s + for s in Survey.objects.filter(sample_year=sample_year, _status="published") + if len(s.library.sigel) == 10 + ] no_of_unmatched_surveys = len(unmatched_surveys) logfile.write( - "\nRemaining published surveys year %d with random-sigels: %d. Sigels: \n" % (sample_year, no_of_unmatched_surveys)) + "\nRemaining published surveys year %d with random-sigels: %d. Sigels: \n" + % (sample_year, no_of_unmatched_surveys) + ) for unmatched_survey in unmatched_surveys: - logfile.write("%s %s\n" % (unmatched_survey.library.name, unmatched_survey.library.sigel)) + logfile.write( + "%s %s\n" % (unmatched_survey.library.name, unmatched_survey.library.sigel) + ) logfile.write( - "\nRemaining 8*-sigels found in published surveys: %d, year %d:\n" % (no_surveys_sigel_start_w_8, sample_year)) + "\nRemaining 8*-sigels found in published surveys: %d, year %d:\n" + % (no_surveys_sigel_start_w_8, sample_year) + ) for surv in surveys_sigel_start_w_8: try: @@ -124,12 +169,18 @@ def match_libraries_and_replace_sigel(sample_year): def remove_empty_surveys(sample_year, mode): - if mode == 'empty': + if mode == "empty": remove_surveys = _get_surveys_with_no_observations(sample_year) - elif mode == 'not_viewed': + elif mode == "not_viewed": remove_surveys = _get_surveys_with_status_not_viewed(sample_year) - f = codecs.open(os.path.join(settings.CLEAN_DATA_LOG_PATH, 'remove_surveys_log_%d' % sample_year), 'wt', 'utf-8') + f = codecs.open( + os.path.join( + settings.CLEAN_DATA_LOG_PATH, "remove_surveys_log_%d" % sample_year + ), + "wt", + "utf-8", + ) logfile = File(f) logfile.write("Mode: %s\n" % mode) @@ -142,4 +193,3 @@ def remove_empty_surveys(sample_year, mode): logfile.write("\nRemoved %d number of surveys year %s" % (count, sample_year)) logfile.close() - diff --git a/libstat/services/excel_export.py b/libstat/services/excel_export.py index c99b31b3..21c26d3a 100644 --- a/libstat/services/excel_export.py +++ b/libstat/services/excel_export.py @@ -1,17 +1,15 @@ -# -*- coding: utf-8 -*- -from __future__ import division import glob import os import datetime import math from django.core.files import File -from openpyxl import Workbook, load_workbook +from openpyxl import Workbook from openpyxl.writer.excel import save_virtual_workbook from bibstat import settings from data.principals import principal_for_library_type -from libstat.models import Survey, OpenData, Variable, Library +from libstat.models import Survey, OpenData, Variable import logging @@ -20,6 +18,7 @@ logger = logging.getLogger(__name__) + def _cache_dir_path(): if settings.ENVIRONMENT == "local": return "{}/data/excel_exports/".format(os.getcwd()) @@ -34,27 +33,42 @@ def _cache_path(year, file_name_str="public_export_{} {}.xslx", date_str=None): return "/data/appl/excel_exports/{}".format(file_name) -def _cached_workbook_exists_and_is_valid(year, file_name="public_export_{} {}.xslx", workbook_is_public=True): +def _cached_workbook_exists_and_is_valid( + year, file_name="public_export_{} {}.xslx", workbook_is_public=True +): paths = sorted(glob.glob(_cache_path(year, file_name_str=file_name))) if not paths: return False - cache_date = datetime.datetime.strptime(paths[-1].split(" ")[-1].split(".")[0], DATE_FORMAT) - + cache_date = datetime.datetime.strptime( + paths[-1].split(" ")[-1].split(".")[0], DATE_FORMAT + ).replace(tzinfo=datetime.timezone.utc) + if workbook_is_public: latest_modification = OpenData.objects.first().date_modified else: - latest_modification = Survey.objects.all().order_by("-date_modified").first().date_modified - + latest_modification = ( + Survey.objects.all().order_by("-date_modified").first().date_modified + ) + return cache_date > latest_modification -def _cache_workbook(workbook, year, file_name_str="public_export_{} {}.xslx", workbook_is_public=True): +def _cache_workbook( + workbook, year, file_name_str="public_export_{} {}.xslx", workbook_is_public=True +): for filename in os.listdir(_cache_dir_path()): if ".xslx" in filename: - if (workbook_is_public == True and filename.startswith("public")) or (workbook_is_public == False and filename.startswith("survey")): + if (workbook_is_public == True and filename.startswith("public")) or ( + workbook_is_public == False and filename.startswith("survey") + ): os.remove("%s%s" % (_cache_dir_path(), filename)) - with open(_cache_path(year, file_name_str, datetime.datetime.utcnow().strftime(DATE_FORMAT)), "w") as f: + with open( + _cache_path( + year, file_name_str, datetime.datetime.utcnow().strftime(DATE_FORMAT) + ), + "wb", + ) as f: File(f).write(save_virtual_workbook(workbook)) @@ -65,20 +79,30 @@ def public_excel_workbook(year): def _published_open_data_as_workbook(year): - workbook = Workbook(encoding="utf-8") + workbook = Workbook() worksheet = workbook.active - worksheet.title = u"Värden" + worksheet.title = "Värden" public_variables = list(Variable.objects.filter(is_public=True).distinct("key")) - variable_keys = list(OpenData.objects.filter(is_active=True, sample_year=year, variable_key__in=public_variables).distinct("variable_key")) - sigels = list(OpenData.objects.filter(is_active=True, sample_year=year, variable_key__in=public_variables).distinct("sigel")) + variable_keys = list( + OpenData.objects.filter( + is_active=True, sample_year=year, variable_key__in=public_variables + ).distinct("variable_key") + ) + sigels = list( + OpenData.objects.filter( + is_active=True, sample_year=year, variable_key__in=public_variables + ).distinct("sigel") + ) libraries = {} for sigel in sigels: libraries[sigel] = dict.fromkeys(variable_keys) - for open_data in OpenData.objects.filter(is_active=True, sample_year=year, variable_key__in=public_variables).only("library_name", "variable_key", "sigel", "value"): + for open_data in OpenData.objects.filter( + is_active=True, sample_year=year, variable_key__in=public_variables + ).only("library_name", "variable_key", "sigel", "value"): libraries[open_data.sigel][open_data.variable_key] = open_data.value header = ["Bibliotek", "Sigel", "Bibliotekstyp", "Kommunkod", "Stad", "Externt id"] @@ -89,26 +113,34 @@ def _published_open_data_as_workbook(year): worksheet.append(header) for sigel in libraries: - #library = Survey.objects.filter(library__sigel=sigel, sample_year=year).first().library + # library = Survey.objects.filter(library__sigel=sigel, sample_year=year).first().library library = Survey.objects.filter(library__sigel=sigel, sample_year=year).first() if library != None: library = library.library else: - continue # KP 180419 + continue # KP 180419 row = [""] * len(header) row[0] = library.name - row[1] = sigel if year >= 2014 else "" # Do not show auto-generated sigels (used before 2014) + row[1] = ( + sigel if year >= 2014 else "" + ) # Do not show auto-generated sigels (used before 2014) row[2] = library.library_type row[3] = library.municipality_code row[4] = library.city - row[5] = library.external_identifiers[0].identifier if library.external_identifiers and len(library.external_identifiers) > 0 and library.external_identifiers[0].identifier else "" + row[5] = ( + library.external_identifiers[0].identifier + if library.external_identifiers + and len(library.external_identifiers) > 0 + and library.external_identifiers[0].identifier + else "" + ) for key in variable_keys: row[variable_index[key]] = libraries[sigel][key] worksheet.append(row) variable_sheet = workbook.create_sheet() - variable_sheet.title = u"Definitioner" + variable_sheet.title = "Definitioner" for variable in Variable.objects.filter(key__in=variable_keys): variable_sheet.append([variable.key, variable.description]) @@ -118,45 +150,88 @@ def _published_open_data_as_workbook(year): def _populate_survey_cells(survey, worksheet, headers_columns_dict, row_no): if not survey: return - worksheet.cell(row=row_no, column=headers_columns_dict["År"]).value = survey.sample_year - worksheet.cell(row=row_no, column=headers_columns_dict["Bibliotek"]).value = survey.library.name - worksheet.cell(row=row_no, column=headers_columns_dict["Sigel"]).value = survey.library.sigel - worksheet.cell(row=row_no, column=headers_columns_dict["Bibliotekstyp"]).value = survey.library.library_type - worksheet.cell(row=row_no, column=headers_columns_dict["Status"]).value = Survey.status_label(survey.status) - worksheet.cell(row=row_no, column=headers_columns_dict["Email"]).value = survey.library.email - worksheet.cell(row=row_no, column=headers_columns_dict["Kommunkod"]).value = survey.library.municipality_code - worksheet.cell(row=row_no, column=headers_columns_dict["Stad"]).value = survey.library.city - worksheet.cell(row=row_no, column=headers_columns_dict["Adress"]).value = survey.library.address - worksheet.cell(row=row_no, column=headers_columns_dict["Postkod"]).value = survey.library.zip_code - worksheet.cell(row=row_no, column=headers_columns_dict["Huvudman"]).value = principal_for_library_type[survey.library.library_type] if survey.library.library_type in principal_for_library_type else None - worksheet.cell(row=row_no, column=headers_columns_dict["Kan publiceras?"]).value = "Ja" if survey.can_publish() else "Nej: " + survey.reasons_for_not_able_to_publish() - worksheet.cell(row=row_no, column=headers_columns_dict["Samredovisar andra bibliotek"]).value = "Ja" if survey.is_reporting_for_others() else "Nej" - worksheet.cell(row=row_no, column=headers_columns_dict["Samredovisas"]).value = "Ja" if survey.is_reported_by_other() else "Nej" - worksheet.cell(row=row_no, column=headers_columns_dict["Redovisas av"]).value = ",".join(survey.reported_by()) + worksheet.cell( + row=row_no, column=headers_columns_dict["År"] + ).value = survey.sample_year + worksheet.cell( + row=row_no, column=headers_columns_dict["Bibliotek"] + ).value = survey.library.name + worksheet.cell( + row=row_no, column=headers_columns_dict["Sigel"] + ).value = survey.library.sigel + worksheet.cell( + row=row_no, column=headers_columns_dict["Bibliotekstyp"] + ).value = survey.library.library_type + worksheet.cell( + row=row_no, column=headers_columns_dict["Status"] + ).value = Survey.status_label(survey.status) + worksheet.cell( + row=row_no, column=headers_columns_dict["Email"] + ).value = survey.library.email + worksheet.cell( + row=row_no, column=headers_columns_dict["Kommunkod"] + ).value = survey.library.municipality_code + worksheet.cell( + row=row_no, column=headers_columns_dict["Stad"] + ).value = survey.library.city + worksheet.cell( + row=row_no, column=headers_columns_dict["Adress"] + ).value = survey.library.address + worksheet.cell( + row=row_no, column=headers_columns_dict["Postkod"] + ).value = survey.library.zip_code + worksheet.cell(row=row_no, column=headers_columns_dict["Huvudman"]).value = ( + principal_for_library_type[survey.library.library_type] + if survey.library.library_type in principal_for_library_type + else None + ) + worksheet.cell(row=row_no, column=headers_columns_dict["Kan publiceras?"]).value = ( + "Ja" + if survey.can_publish() + else "Nej: " + survey.reasons_for_not_able_to_publish() + ) + worksheet.cell( + row=row_no, column=headers_columns_dict["Samredovisar andra bibliotek"] + ).value = ("Ja" if survey.is_reporting_for_others() else "Nej") + worksheet.cell(row=row_no, column=headers_columns_dict["Samredovisas"]).value = ( + "Ja" if survey.is_reported_by_other() else "Nej" + ) + worksheet.cell( + row=row_no, column=headers_columns_dict["Redovisas av"] + ).value = ",".join(survey.reported_by()) for observation in survey.observations: variable_key = observation.variable.key if headers_columns_dict.get(variable_key, None): value = observation.value if observation.value_unknown: - value = u"okänt värde" - worksheet.cell(row=row_no, column=headers_columns_dict[variable_key]).value = value + value = "okänt värde" + worksheet.cell( + row=row_no, column=headers_columns_dict[variable_key] + ).value = value other_sigels = [s for s in survey.selected_libraries if s != survey.library.sigel] if len(other_sigels) > 0: - other_surveys = Survey.objects.filter(library__sigel__in=other_sigels, sample_year=survey.sample_year).only("library") + other_surveys = Survey.objects.filter( + library__sigel__in=other_sigels, sample_year=survey.sample_year + ).only("library") for other_survey in other_surveys: - worksheet.cell(row=row_no, column=headers_columns_dict["Samredovisat bibliotek"]).value = "%s (%s)" % (other_survey.library.name, other_survey.library.sigel) - worksheet.cell(row=row_no, column=headers_columns_dict["Gatuadress"]).value = other_survey.library.address - worksheet.cell(row=row_no, column=headers_columns_dict["Postnummer"]).value = other_survey.library.zip_code + worksheet.cell( + row=row_no, column=headers_columns_dict["Samredovisat bibliotek"] + ).value = "%s (%s)" % ( + other_survey.library.name, + other_survey.library.sigel, + ) + worksheet.cell( + row=row_no, column=headers_columns_dict["Gatuadress"] + ).value = other_survey.library.address + worksheet.cell( + row=row_no, column=headers_columns_dict["Postnummer"] + ).value = other_survey.library.zip_code def _load_surveys_and_append_worksheet_rows( - surveys, - worksheet, - headers_columns_dict, - offset=0, - include_previous_year=False + surveys, worksheet, headers_columns_dict, offset=0, include_previous_year=False ): row_no = 2 + offset for survey in surveys: @@ -196,9 +271,13 @@ def surveys_to_excel_workbook(survey_ids, include_previous_year=False): "Kan publiceras?", "Samredovisar andra bibliotek", "Samredovisas", - "Redovisas av" + "Redovisas av", + ] + headers += [ + str(observation.variable.key) + for observation in Survey.objects.get(pk=survey_ids[0]).observations + if observation.variable.key ] - headers += [unicode(observation.variable.key) for observation in Survey.objects.get(pk=survey_ids[0]).observations if observation.variable.key] headers += ["Samredovisat bibliotek", "Gatuadress", "Postnummer"] # Create a headers dictionary for lookup of column number @@ -207,7 +286,7 @@ def surveys_to_excel_workbook(survey_ids, include_previous_year=False): headers_dict[header] = column_no column_no = column_no + 1 - workbook = Workbook(encoding="utf-8") + workbook = Workbook() worksheet = workbook.active worksheet.append(headers) @@ -220,14 +299,14 @@ def surveys_to_excel_workbook(survey_ids, include_previous_year=False): ids = survey_ids[start_index:stop_index] - surveys = Survey.objects.filter(id__in=ids).order_by('library__name') + surveys = Survey.objects.filter(id__in=ids).order_by("library__name") _load_surveys_and_append_worksheet_rows( surveys, worksheet, headers_dict, offset=offset, - include_previous_year=include_previous_year + include_previous_year=include_previous_year, ) offset += bulk_size diff --git a/libstat/services/report_generation.py b/libstat/services/report_generation.py index c34f1f3c..cca09b86 100644 --- a/libstat/services/report_generation.py +++ b/libstat/services/report_generation.py @@ -1,23 +1,37 @@ -# -*- coding: utf-8 -*- -from pprint import pprint -import uuid, logging +import logging +import uuid from libstat.models import Survey, Variable, OpenData, CachedReport -from libstat.report_templates import report_template_base, report_template_base_with_municipality_calculations, report_template_base_with_target_group_calculations +from libstat.report_templates import ( + report_template_base, + report_template_base_with_municipality_calculations, + report_template_base_with_target_group_calculations, +) logger = logging.getLogger(__name__) REPORT_CACHE_LIMIT = 500 + def get_cached_report(surveys, year): - if (CachedReport.objects.count() != 0 and - ((OpenData.objects.count() != 0 - and OpenData.objects.first().date_modified > CachedReport.objects.first().date_created) or - (Variable.objects.count() != 0 and Variable.objects.all().order_by("-date_modified").first().date_modified > CachedReport.objects.first().date_created))): + if CachedReport.objects.count() != 0 and ( + ( + OpenData.objects.count() != 0 + and OpenData.objects.first().date_modified + > CachedReport.objects.first().date_created + ) + or ( + Variable.objects.count() != 0 + and Variable.objects.all().order_by("-date_modified").first().date_modified + > CachedReport.objects.first().date_created + ) + ): CachedReport.drop_collection() - reports = CachedReport.objects.filter(surveys__all=surveys, surveys__size=len(surveys), year=str(year)) + reports = CachedReport.objects.filter( + surveys__all=surveys, surveys__size=len(surveys), year=str(year) + ) report = reports[0].report if reports.count() == 1 else None return report @@ -37,14 +51,18 @@ def get_report(surveys, year): return cached_report else: library_types = [survey.library.library_type for survey in surveys] - only_folkbib_or_folkskolbib = all(libtype in [u"folkbib", u"folkskolbib"] for libtype in library_types) + only_folkbib_or_folkskolbib = all( + libtype in ["folkbib", "folkskolbib"] for libtype in library_types + ) # This should of course be updated when (and if) more report templates are added # Different report templates are used depending on types of libraries included if only_folkbib_or_folkskolbib: report_template = report_template_base_with_municipality_calculations() - elif len(surveys) > 1 and any(libtype in [u"folkbib", u"folkskolbib"] for libtype in library_types): + elif len(surveys) > 1 and any( + libtype in ["folkbib", "folkskolbib"] for libtype in library_types + ): report_template = report_template_base() else: report_template = report_template_base_with_target_group_calculations() @@ -52,7 +70,12 @@ def get_report(surveys, year): observations = pre_cache_observations(report_template, surveys, year) sigels = [sigel for survey in surveys for sigel in survey.selected_libraries] - libraries = [survey.library for survey in Survey.objects.filter(sample_year=year, library__sigel__in=sigels)] + libraries = [ + survey.library + for survey in Survey.objects.filter( + sample_year=year, library__sigel__in=sigels + ) + ] def sort_key(library): return library.name.lower() @@ -67,9 +90,13 @@ def sort_key(library): "sigel": library.sigel, "name": library.name, "address": library.address, - "city": library.city - } for library in libraries], - "measurements": generate_report(report_template, year, observations, library_types) + "city": library.city, + } + for library in libraries + ], + "measurements": generate_report( + report_template, year, observations, library_types + ), } store_cached_report(report, surveys, year) @@ -79,8 +106,17 @@ def sort_key(library): def is_variable_to_be_included(variable_key, library_types): variable = Variable.objects.filter(key=variable_key).first() - if variable and variable.target_groups and len(variable.target_groups) > 0 and \ - any([library_type not in variable.target_groups for library_type in library_types]): + if ( + variable + and variable.target_groups + and len(variable.target_groups) > 0 + and any( + [ + library_type not in variable.target_groups + for library_type in library_types + ] + ) + ): return False return True @@ -103,7 +139,7 @@ def group_skeleton(template_group): "years": [year2, year1, year0], "rows": [], "extra": template_group.extra, - "show_chart": template_group.show_chart + "show_chart": template_group.show_chart, } def row_skeleton(template_row): @@ -115,17 +151,18 @@ def row_skeleton(template_row): "extra": None, "incomplete_data": [], "description": template_row.explanation, - "show_in_chart": template_row.show_in_chart if template_row.variable_key else False, + "show_in_chart": template_row.show_in_chart + if template_row.variable_key + else False, "is_key_figure": None, "is_sum": template_row.is_sum if template_row.is_sum else None, "label": template_row.description, "label_only": template_row.label_only if template_row.label_only else None, - "percentage": template_row.percentage if template_row.percentage else None + "percentage": template_row.percentage if template_row.percentage else None, } def clear_nones(a_dict): - return dict([(k, v) for k, v in a_dict.iteritems() if v is not None]) - + return dict([(k, v) for k, v in list(a_dict.items()) if v is not None]) report = [] for template_group in report_template.groups: @@ -135,26 +172,42 @@ def clear_nones(a_dict): row = None - if template_row.variable_key and is_variable_to_be_included(template_row.variable_key, library_types): + if template_row.variable_key and is_variable_to_be_included( + template_row.variable_key, library_types + ): row = row_skeleton(template_row) observation = observations.get(template_row.variable_key, {}) row[year0] = observation.get(year, None) row[year1] = observation.get(year - 1, None) row[year2] = observation.get(year - 2, None) row["total"] = observation.get("total", None) - row["incomplete_data"] = observations.get(template_row.variable_key, {}).get("incomplete_data", None) + row["incomplete_data"] = observations.get( + template_row.variable_key, {} + ).get("incomplete_data", None) if template_row.computation: - row["extra"] = template_row.compute(values_for(template_row.variable_keys, year)) - row["extra"] = row["extra"] * 100 if row["extra"] is not None else None - - elif template_row.variable_keys and all(is_variable_to_be_included(variable_key, library_types) for variable_key in template_row.variable_keys): + row["extra"] = template_row.compute( + values_for(template_row.variable_keys, year) + ) + row["extra"] = ( + row["extra"] * 100 if row["extra"] is not None else None + ) + + elif template_row.variable_keys and all( + is_variable_to_be_included(variable_key, library_types) + for variable_key in template_row.variable_keys + ): row = row_skeleton(template_row) row["is_key_figure"] = True for y in (year0, year1, year2): - row[y] = template_row.compute(values_for(template_row.variable_keys, int(y))) + row[y] = template_row.compute( + values_for(template_row.variable_keys, int(y)) + ) for key in template_row.variable_keys: - if int(y) in observations.get(key, {}).get("incomplete_data", []) and int(y) not in row[ - "incomplete_data"]: + if ( + int(y) + in observations.get(key, {}).get("incomplete_data", []) + and int(y) not in row["incomplete_data"] + ): row["incomplete_data"].append(int(y)) elif not template_row.variable_key and not template_row.variable_keys: @@ -171,7 +224,11 @@ def clear_nones(a_dict): elif row[year0] is not None and row["total"]: row["nation_diff"] = (row[year0] / row["total"]) * 1000 - row["incomplete_data"] = [str(a) for a in row["incomplete_data"]] if row["incomplete_data"] else None + row["incomplete_data"] = ( + [str(a) for a in row["incomplete_data"]] + if row["incomplete_data"] + else None + ) row["total"] = None group["rows"].append(clear_nones(row)) @@ -181,14 +238,10 @@ def clear_nones(a_dict): def pre_cache_observations(template, surveys, year): def is_number(obj): - return isinstance(obj, (int, long, float, complex)) + return isinstance(obj, (int, float, complex)) def survey_ids_three_latest_years(): - survey_ids = { - year: [], - (year - 1): [], - (year - 2): [] - } + survey_ids = {year: [], (year - 1): [], (year - 2): []} for survey in surveys: survey_ids[year].append(survey.pk) @@ -202,7 +255,11 @@ def survey_ids_three_latest_years(): def observation_skeleton(variables): try: - total = float(OpenData.objects.filter(sample_year=year, is_active=True, variable__in=variables).sum("value")) + total = float( + OpenData.objects.filter( + sample_year=year, is_active=True, variable__in=variables + ).sum("value") + ) except Exception as e: total = None returns = { @@ -210,7 +267,7 @@ def observation_skeleton(variables): (year - 1): None, (year - 2): None, "incomplete_data": [], - "total": total + "total": total, } return returns @@ -224,15 +281,20 @@ def observation_skeleton(variables): variables = [variables[0]] if len(variables[0].replaces) > 0: - library_types = [target_group for variable in variables[0].replaces for target_group in variable.target_groups] + library_types = [ + target_group + for variable in variables[0].replaces + for target_group in variable.target_groups + ] if len(library_types) == len(set(library_types)): - variables += variables[0].replaces + variables += variables[0].replaces observations[key] = observation_skeleton(variables) for y in (year, year - 1, year - 2): - open_data = OpenData.objects.filter(source_survey__in=survey_ids[y], variable__in=variables, - is_active=True) + open_data = OpenData.objects.filter( + source_survey__in=survey_ids[y], variable__in=variables, is_active=True + ) sum_value = 0 for od in open_data: try: @@ -255,7 +317,10 @@ def observation_skeleton(variables): except: observations[key][y] = None - if open_data.count() < len(survey_ids[y]) and y not in observations[key]["incomplete_data"]: + if ( + open_data.count() < len(survey_ids[y]) + and y not in observations[key]["incomplete_data"] + ): observations[key]["incomplete_data"].append(y) - return observations \ No newline at end of file + return observations diff --git a/libstat/survey_templates.py b/libstat/survey_templates.py index d2877338..9662b6ba 100644 --- a/libstat/survey_templates.py +++ b/libstat/survey_templates.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- from libstat.models import Section, Group, Cell, Row, SurveyTemplate @@ -6,493 +5,1196 @@ def _survey_template_base(): return SurveyTemplate( intro_text_variable_key="Introtext2014", sections=[ - Section(title=u"Frågor om biblioteksorganisationen", - groups=[ - Group(rows=[ - Row(cells=[Cell(variable_key=u"Namn01")]), - Row(cells=[Cell(variable_key=u"Epost01", required=True)]), - Row(cells=[Cell(variable_key=u"Tele01")]), - Row(cells=[Cell(variable_key=u"Plan01")])]), - Group(rows=[ - Row(cells=[Cell(variable_key=u"BemanService01", required=True)]), - Row(cells=[Cell(variable_key=u"Integrerad01", required=True)])]), - Group(rows=[ - Row(cells=[Cell(variable_key=u"Obeman01", required=True)]), - Row(cells=[Cell(variable_key=u"ObemanLan01", required=True)])]), - Group(rows=[ - Row(cells=[Cell(variable_key=u"Bokbuss01", required=True)]), - Row(cells=[Cell(variable_key=u"BokbussHP01", required=True)]), - Row(cells=[Cell(variable_key=u"Bokbil01")])]), - Group(rows=[ - Row(cells=[Cell(variable_key=u"Population01")]), - Row(cells=[Cell(variable_key=u"Population02")]), - Row(cells=[Cell(variable_key=u"Population03")])])]), - Section(title=u"Frågor om bemanning och personal", - groups=[ - Group(rows=[ - Row(cells=[Cell(variable_key=u"Arsverke01")]), - Row(cells=[Cell(variable_key=u"Arsverke02")]), - Row(cells=[Cell(variable_key=u"Arsverke03")]), - Row(cells=[Cell(variable_key=u"Arsverke04")]), - Row(cells=[Cell(variable_key=u"Arsverke99", - sum_of=["Arsverke01", "Arsverke02", "Arsverke03", "Arsverke04"])]), - Row(cells=[Cell(variable_key=u"Arsverke05", required=True)])]), - Group(rows=[ - Row(cells=[Cell(variable_key=u"Personer01")]), - Row(cells=[Cell(variable_key=u"Personer02")]), - Row(cells=[ - Cell(variable_key=u"Personer99", - sum_of=[u"Personer01", u"Personer02"])])]), - Group(rows=[ - Row(cells=[ - Cell(variable_key=u"Personkomm")])])]), - Section(title=u"Frågor om ekonomi", - groups=[ - Group(rows=[ - Row(cells=[Cell(variable_key=u"Utgift01")]), - Row(cells=[Cell(variable_key=u"Utgift02")]), - Row(cells=[Cell(variable_key=u"Utgift03")]), - Row(cells=[Cell(variable_key=u"Utgift04")]), - Row(cells=[Cell(variable_key=u"Utgift05")]), - Row(cells=[Cell(variable_key=u"Utgift06")]), - Row(cells=[Cell(variable_key=u"Utgift99", - sum_of=[u"Utgift01", u"Utgift02", u"Utgift03", - u"Utgift04", u"Utgift05", u"Utgift06"])]), - Row(cells=[Cell(variable_key=u"Utgift07")])]), - Group(rows=[ - Row(cells=[Cell(variable_key=u"Intakt01")]), - Row(cells=[Cell(variable_key=u"Intakt02")]), - Row(cells=[Cell(variable_key=u"Intakt03")]), - Row(cells=[Cell(variable_key=u"Intakt99", - sum_of=[u"Intakt01", u"Intakt02", u"Intakt03"])])]), - Group(rows=[ - Row(cells=[ - Cell(variable_key=u"Ekonomikomm")])])]), - Section(title=u"Bestånd – nyförvärv", - groups=[ - Group(rows=[ - Row(cells=[ - Cell(variable_key=u"Bestand101", required=True, has_part=[u"Bestand201", u"Bestand102"]), - Cell(variable_key=u"Bestand201", has_part=u"Bestand202", part_of=u"Bestand101"), - Cell(variable_key=u"Bestand301", has_part=u"Bestand302")]), - Row(cells=[ - Cell(variable_key=u"Bestand102", has_part=u"Bestand202", part_of=u"Bestand101"), - Cell(variable_key=u"Bestand202", part_of=[u"Bestand201", u"Bestand102"]), - Cell(variable_key=u"Bestand302", part_of=u"Bestand301")]), - Row(cells=[ - Cell(variable_key=u"Bestand103", has_part=u"Bestand203", required=True), - Cell(variable_key=u"Bestand203", part_of=u"Bestand103"), - Cell(variable_key=u"Bestand303")]), - Row(cells=[ - Cell(variable_key=u"Bestand104", has_part=u"Bestand204", required=True), - Cell(variable_key=u"Bestand204", part_of=u"Bestand104"), - Cell(variable_key=u"Bestand304")]), - Row(cells=[ - Cell(variable_key=u"Bestand105", has_part=u"Bestand205", required=True), - Cell(variable_key=u"Bestand205", part_of=u"Bestand105"), - Cell(variable_key=u"Bestand305")]), - Row(cells=[ - Cell(variable_key=u"Bestand106", has_part=u"Bestand206", required=True), - Cell(variable_key=u"Bestand206", part_of=u"Bestand106"), - Cell(variable_key=u"Bestand306")]), - Row(cells=[ - Cell(variable_key=u"Bestand107", has_part=u"Bestand207", required=True), - Cell(variable_key=u"Bestand207", part_of=u"Bestand107"), - Cell(variable_key=u"Bestand307")]), - Row(cells=[ - Cell(variable_key=u"Bestand108", has_part=u"Bestand208", required=True), - Cell(variable_key=u"Bestand208", part_of=u"Bestand108"), - Cell(variable_key=u"Bestand308")]), - Row(cells=[ - Cell(variable_key=u"Bestand109", has_part=u"Bestand209", required=True), - Cell(variable_key=u"Bestand209", part_of=u"Bestand109")]), - Row(cells=[ - Cell(variable_key=u"Bestand110", has_part=u"Bestand210", required=True), - Cell(variable_key=u"Bestand210", part_of=u"Bestand110"), - Cell(variable_key=u"Bestand310")]), - Row(cells=[ - Cell(variable_key=u"Bestand111", has_part=u"Bestand211", required=True), - Cell(variable_key=u"Bestand211", part_of=u"Bestand111"), - Cell(variable_key=u"Bestand311")]), - Row(cells=[ - Cell(variable_key=u"Bestand112", has_part=u"Bestand212", required=True), - Cell(variable_key=u"Bestand212", part_of=u"Bestand112"), - Cell(variable_key=u"Bestand312")]), - Row(cells=[ - Cell(variable_key=u"Bestand113", has_part=u"Bestand213", required=True), - Cell(variable_key=u"Bestand213", part_of=u"Bestand113"), - Cell(variable_key=u"Bestand313")]), - Row(cells=[ - Cell(variable_key=u"Bestand199", - has_part=u"Bestand299", - sum_of=['Bestand101', 'Bestand103', - 'Bestand104', 'Bestand105', 'Bestand106', - 'Bestand107', 'Bestand108', 'Bestand109', - 'Bestand110', 'Bestand111', 'Bestand112', - 'Bestand113']), - Cell(variable_key=u"Bestand299", - part_of=u"Bestand199", - sum_of=['Bestand201', 'Bestand203', - 'Bestand204', 'Bestand205', 'Bestand206', - 'Bestand207', 'Bestand208', 'Bestand209', - 'Bestand210', 'Bestand211', 'Bestand212', - 'Bestand213']), - Cell(variable_key=u"Bestand399", - sum_of=['Bestand301', 'Bestand303', - 'Bestand304', 'Bestand305', 'Bestand306', - 'Bestand307', 'Bestand308', 'Bestand310', - 'Bestand311', 'Bestand312', 'Bestand313'])])]), - Group(rows=[ - Row(cells=[Cell(variable_key=u"Barn01", required=True)]), - Row(cells=[Cell(variable_key=u"Barn02", required=True)]), - Row(cells=[Cell(variable_key=u"Barn03", required=True)]), - Row(cells=[Cell(variable_key=u"HCG04", required=True)]), - Row(cells=[Cell(variable_key=u"Ref05", required=True)]), - Row(cells=[Cell(variable_key=u"LasnedBest01", required=True)]), - Row(cells=[Cell(variable_key=u"LasnedUtlan01", required=True)])]), - Group(rows=[ - Row(cells=[ - Cell(variable_key=u"Titlar101"), - Cell(variable_key=u"Titlar201"), - Cell(variable_key=u"Titlar301"), - Cell(variable_key=u"Titlar497", required=True, - sum_of=[u"Titlar101", u"Titlar201", u"Titlar301"])]), - Row(cells=[ - Cell(variable_key=u"Titlar102"), - Cell(variable_key=u"Titlar202"), - Cell(variable_key=u"Titlar302"), - Cell(variable_key=u"Titlar498", required=True, - sum_of=[u"Titlar102", u"Titlar202", u"Titlar302"])]), - Row(cells=[ - Cell(variable_key=u"Titlar199", - sum_of=[u"Titlar101", u"Titlar102"]), - Cell(variable_key=u"Titlar299", - sum_of=[u"Titlar201", u"Titlar202"]), - Cell(variable_key=u"Titlar399", - sum_of=[u"Titlar301", u"Titlar302"]), - Cell(variable_key=u"Titlar499", - sum_of=[u"Titlar497", u"Titlar498"])])]), - Group(rows=[ - Row(cells=[Cell(variable_key=u"Databas01")]), - Row(cells=[Cell(variable_key=u"Databas02")]), - Row(cells=[Cell(variable_key=u"Databas03")]), - Row(cells=[Cell(variable_key=u"Databas04")]), - Row(cells=[Cell(variable_key=u"Databas05")]), - Row(cells=[Cell(variable_key=u"Databas06")]), - Row(cells=[Cell(variable_key=u"Databas07")]), - Row(cells=[Cell(variable_key=u"Databas08")]), - Row(cells=[Cell(variable_key=u"Databas09")]), - Row(cells=[Cell(variable_key=u"Databas99", - sum_of=[u"Databas01", u"Databas02", u"Databas03", - u"Databas04", u"Databas05", u"Databas06", - u"Databas07", u"Databas08", u"Databas09"])])]), - Group(rows=[ - Row(cells=[Cell(variable_key=u"Databaskomm")])])]), - Section(title=u"Frågor om utlån, omlån och användning", - groups=[ - Group(rows=[ - Row(cells=[ - Cell(variable_key=u"Inilan101", has_part=u"Inilan102"), - Cell(variable_key=u"Omlan201", has_part=u"Omlan202"), - Cell(variable_key=u"Utlan301", has_part=u"Utlan302", - sum_of=[u"Inilan101", u"Omlan201"])]), - Row(cells=[ - Cell(variable_key=u"Inilan102", part_of=u"Inilan101"), - Cell(variable_key=u"Omlan202", part_of=u"Omlan201"), - Cell(variable_key=u"Utlan302", part_of=u"Utlan301", - sum_of=[u"Inilan102", u"Omlan202"])]), - Row(cells=[ - Cell(variable_key=u"Inilan103"), - Cell(variable_key=u"Omlan203"), - Cell(variable_key=u"Utlan303", - sum_of=[u"Inilan103", u"Omlan203"])]), - Row(cells=[ - Cell(variable_key=u"Inilan104"), - Cell(variable_key=u"Omlan204"), - Cell(variable_key=u"Utlan304", - sum_of=[u"Inilan104", u"Omlan204"])]), - Row(cells=[ - Cell(variable_key=u"Inilan105"), - Cell(variable_key=u"Omlan205"), - Cell(variable_key=u"Utlan305", - sum_of=[u"Inilan105", u"Omlan205"])]), - Row(cells=[ - Cell(variable_key=u"Inilan106"), - Cell(variable_key=u"Omlan206"), - Cell(variable_key=u"Utlan306", - sum_of=[u"Inilan106", u"Omlan206"])]), - Row(cells=[ - Cell(variable_key=u"Inilan107"), - Cell(variable_key=u"Omlan207"), - Cell(variable_key=u"Utlan307", - sum_of=[u"Inilan107", u"Omlan207"])]), - Row(cells=[ - Cell(variable_key=u"Inilan108"), - Cell(variable_key=u"Omlan208"), - Cell(variable_key=u"Utlan308", - sum_of=[u"Inilan108", u"Omlan208"])]), - Row(cells=[ - Cell(variable_key=u"Inilan109"), - Cell(variable_key=u"Omlan209"), - Cell(variable_key=u"Utlan309", - sum_of=[u"Inilan109", u"Omlan209"])]), - Row(cells=[ - Cell(variable_key=u"Inilan110"), - Cell(variable_key=u"Omlan210"), - Cell(variable_key=u"Utlan310", - sum_of=[u"Inilan110", u"Omlan210"])]), - Row(cells=[ - Cell(variable_key=u"Inilan111"), - Cell(variable_key=u"Omlan211"), - Cell(variable_key=u"Utlan311", - sum_of=[u"Inilan111", u"Omlan211"])]), - Row(cells=[ - Cell(variable_key=u"Inilan112"), - Cell(variable_key=u"Omlan212"), - Cell(variable_key=u"Utlan312", - sum_of=[u"Inilan112", u"Omlan212"])]), - Row(cells=[ - Cell(variable_key=u"Inilan113"), - Cell(variable_key=u"Omlan213"), - Cell(variable_key=u"Utlan313", - sum_of=[u"Inilan113", u"Omlan213"])]), - Row(cells=[ - Cell(variable_key=u"Inilan199", - sum_of=[u"Inilan101", u"Inilan103", u"Inilan104", - u"Inilan105", u"Inilan106", u"Inilan107", - u"Inilan108", u"Inilan109", u"Inilan110", - u"Inilan111", u"Inilan112", u"Inilan113"]), - Cell(variable_key=u"Omlan299", - sum_of=[u"Omlan201", u"Omlan203", u"Omlan204", - u"Omlan205", u"Omlan206", u"Omlan207", - u"Omlan208", u"Omlan209", u"Omlan210", - u"Omlan211", u"Omlan212", u"Omlan213"]), - Cell(variable_key=u"Utlan399", - sum_of=[u"Utlan301", u"Utlan303", u"Utlan304", - u"Utlan305", u"Utlan306", u"Utlan307", - u"Utlan308", u"Utlan309", u"Utlan310", - u"Utlan311", u"Utlan312", u"Utlan313"])])]), - Group(rows=[ - Row(cells=[ - Cell(variable_key=u"Laslan01") - ]), - Row(cells=[ - Cell(variable_key=u"Laslan02") - ]), - Row(cells=[ - Cell(variable_key=u"Laslan99") - ]) - ]), - Group(rows=[ - Row(cells=[ - Cell(variable_key=u"Fjarr101"), - Cell(variable_key=u"Fjarr201"), - Cell(variable_key=u"Fjarr397", - sum_of=[u"Fjarr101", u"Fjarr201"])]), - Row(cells=[ - Cell(variable_key=u"Fjarr102"), - Cell(variable_key=u"Fjarr202"), - Cell(variable_key=u"Fjarr398", - sum_of=[u"Fjarr102", u"Fjarr202"])]), - ]), - Group(rows=[ - Row(cells=[Cell(variable_key=u"Utlankomm")])])]), - Section(title=u"Omsättningen av elektroniska medier, användning och lån", - groups=[ - Group(rows=[ - Row(cells=[ - Cell(variable_key=u"Elan101"), - Cell(variable_key=u"Elan201"), - Cell(variable_key=u"Elan301")]), - Row(cells=[ - Cell(variable_key=u"Elan102"), - Cell(variable_key=u"Elan202")]), - Row(cells=[ - Cell(variable_key=u"Elan103"), - Cell(variable_key=u"Elan203")]), - Row(cells=[ - Cell(variable_key=u"Elan104"), - Cell(variable_key=u"Elan204")]), - Row(cells=[ - Cell(variable_key=u"Elan105"), - Cell(variable_key=u"Elan205")]), - Row(cells=[ - Cell(variable_key=u"Elan106"), - Cell(variable_key=u"Elan206")]), - Row(cells=[ - Cell(variable_key=u"Elan107"), - Cell(variable_key=u"Elan207")]), - Row(cells=[ - Cell(variable_key=u"Elan108"), - Cell(variable_key=u"Elan208")]), - Row(cells=[ - Cell(variable_key=u"Elan109"), - Cell(variable_key=u"Elan209")]), - Row(cells=[ - Cell(variable_key=u"Elan199", - sum_of=[u"Elan101", u"Elan102", u"Elan103", - u"Elan104", u"Elan105", u"Elan106", - u"Elan107", u"Elan108", u"Elan109"]), - Cell(variable_key=u"Elan299", - sum_of=[u"Elan201", u"Elan202", u"Elan203", - u"Elan204", u"Elan205", u"Elan206", - u"Elan207", u"Elan208", u"Elan209"]), - Cell(variable_key=u"Elan399", - sum_of=[u"Elan301"])])]), - Group(rows=[ - Row(cells=[Cell(variable_key=u"Elankomm")])])]), - Section(title=u"Frågor om besök och aktiva låntagare", - groups=[ - Group(rows=[ - Row(cells=[Cell(variable_key=u"Besok01", required=True)]), - Row(cells=[Cell(variable_key=u"Besok02")]), - Row(cells=[Cell(variable_key=u"Besok03")]), - Row(cells=[Cell(variable_key=u"Besok04")]), - Row(cells=[Cell(variable_key=u"Besok05")])]), - Group(rows=[ - Row(cells=[Cell(variable_key=u"Aktiv01")]), - Row(cells=[Cell(variable_key=u"Aktiv02")]), - Row(cells=[Cell(variable_key=u"Aktiv04")]), - Row(cells=[Cell(variable_key=u"Aktiv99", - sum_of=[u"Aktiv01", u"Aktiv02", u"Aktiv04"])]), - Row(cells=[Cell(variable_key=u"Aktiv03")])])]), - Section(title=u"Frågor om resurser och lokaler", - groups=[ - Group(rows=[ - Row(cells=[Cell(variable_key=u"Resurs01", required=True)]), - Row(cells=[Cell(variable_key=u"Resurs02")]), - Row(cells=[Cell(variable_key=u"Resurs03")]), - Row(cells=[Cell(variable_key=u"Resurs04")]), - Row(cells=[Cell(variable_key=u"Resurs05")]), - Row(cells=[Cell(variable_key=u"Resurs06")]), - Row(cells=[Cell(variable_key=u"Resurs07", required=True)]), - Row(cells=[Cell(variable_key=u"Resurs08")]), - Row(cells=[Cell(variable_key=u"Resurs09", required=True)]), - Row(cells=[Cell(variable_key=u"Resurs10")])]), - Group(rows=[ - Row(cells=[Cell(variable_key=u"Besokkomm")])])]), - Section(title=u"Frågor om öppettider och nyttjande", - groups=[ - Group(rows=[ - Row(cells=[ - Cell(variable_key=u"Open101"), - Cell(variable_key=u"Open201")]), - Row(cells=[ - Cell(variable_key=u"Open102"), - Cell(variable_key=u"Open202")]), - Row(cells=[ - Cell(variable_key=u"Open103"), - Cell(variable_key=u"Open203")]), - Row(cells=[ - Cell(variable_key=u"Open104"), - Cell(variable_key=u"Open204")]), - Row(cells=[ - Cell(variable_key=u"Open105"), - Cell(variable_key=u"Open205")]), - Row(cells=[ - Cell(variable_key=u"Open106"), - Cell(variable_key=u"Open206")])]), - Group(rows=[ - Row(cells=[Cell(variable_key=u"Serv01")]), - Row(cells=[Cell(variable_key=u"Serv02")]), - Row(cells=[Cell(variable_key=u"Serv03")]), - Row(cells=[Cell(variable_key=u"Serv04")]), - Row(cells=[Cell(variable_key=u"Serv05")]), - Row(cells=[Cell(variable_key=u"Serv06")]), - Row(cells=[Cell(variable_key=u"Serv07")])]), - Group(rows=[ - Row(cells=[Cell(variable_key=u"Openkomm")])])]), - Section(title=u"Aktiviteter", - groups=[ - Group(rows=[ - Row(cells=[ - Cell(variable_key=u"Publ101", required=True, has_part=u"Publ201"), - Cell(variable_key=u"Publ201", part_of=u"Publ101")]), - Row(cells=[ - Cell(variable_key=u"Publ102", required=True, has_part=u"Publ202"), - Cell(variable_key=u"Publ202", part_of=u"Publ102")]), - Row(cells=[ - Cell(variable_key=u"Publ103", required=True, has_part=u"Publ203"), - Cell(variable_key=u"Publ203", part_of=u"Publ103")]), - Row(cells=[ - Cell(variable_key=u"Publ104", required=True, has_part=u"Publ204"), - Cell(variable_key=u"Publ204", part_of=u"Publ104")]), - Row(cells=[ - Cell(variable_key=u"Publ105", required=True, has_part=u"Publ205"), - Cell(variable_key=u"Publ205", part_of=u"Publ105")]), - Row(cells=[ - Cell(variable_key=u"Publ106", required=True, has_part=u"Publ206"), - Cell(variable_key=u"Publ206", part_of=u"Publ106")]), - Row(cells=[ - Cell(variable_key=u"Publ107", required=True, has_part=u"Publ207"), - Cell(variable_key=u"Publ207", part_of=u"Publ107")]), - Row(cells=[ - Cell(variable_key=u"Publ108", required=True, has_part=u"Publ208"), - Cell(variable_key=u"Publ208", part_of=u"Publ108")]), - Row(cells=[ - Cell(variable_key=u"Publ109", required=True, has_part=u"Publ209"), - Cell(variable_key=u"Publ209", part_of=u"Publ109")]), - Row(cells=[ - Cell(variable_key=u"Publ110", required=True, has_part=u"Publ210"), - Cell(variable_key=u"Publ210", part_of=u"Publ110")]), - Row(cells=[ - Cell(variable_key=u"Publ111", required=True, has_part=u"Publ211"), - Cell(variable_key=u"Publ211", part_of=u"Publ111")]), - Row(cells=[ - Cell(variable_key=u"Publ112", required=True, has_part=u"Publ212"), - Cell(variable_key=u"Publ212", part_of=u"Publ112")]), - Row(cells=[ - Cell(variable_key=u"Publ113", required=True, has_part=u"Publ213"), - Cell(variable_key=u"Publ213", part_of=u"Publ113")]), - Row(cells=[ - Cell(variable_key=u"Publ114", required=True, has_part=u"Publ214"), - Cell(variable_key=u"Publ214", part_of=u"Publ114")]), - Row(cells=[ - Cell(variable_key=u"Publ115", required=True, has_part=u"Publ215"), - Cell(variable_key=u"Publ215", part_of=u"Publ115")]), - Row(cells=[ - Cell(variable_key=u"Publ116", required=True, has_part=u"Publ216"), - Cell(variable_key=u"Publ216", part_of=u"Publ116")]), - Row(cells=[ - Cell(variable_key=u"Publ117", required=True, has_part=u"Publ217"), - Cell(variable_key=u"Publ217", part_of=u"Publ117")]), - Row(cells=[ - Cell(variable_key=u"Publ118", required=True, has_part=u"Publ218"), - Cell(variable_key=u"Publ218", part_of=u"Publ118")]), - Row(cells=[ - Cell(variable_key=u"Publ119", required=True, has_part=u"Publ219"), - Cell(variable_key=u"Publ219", part_of=u"Publ119")]), - Row(cells=[ - Cell(variable_key=u"Publ120", required=True, has_part=u"Publ220"), - Cell(variable_key=u"Publ220", part_of=u"Publ120")]), - Row(cells=[ - Cell(variable_key=u"Publ199", - has_part=u"Publ299", - sum_of=[u"Publ101", u"Publ102", u"Publ103", - u"Publ104", u"Publ105", u"Publ106", - u"Publ107", u"Publ108", u"Publ109", - u"Publ110", u"Publ111", u"Publ112", - u"Publ113", u"Publ114", u"Publ115", - u"Publ116", u"Publ117", u"Publ118", - u"Publ119", u"Publ120"]), - Cell(variable_key=u"Publ299", - part_of=u"Publ199", - sum_of=[u"Publ201", u"Publ202", u"Publ203", - u"Publ204", u"Publ205", u"Publ206", - u"Publ207", u"Publ208", u"Publ209", - u"Publ210", u"Publ211", u"Publ212", - u"Publ213", u"Publ214", u"Publ215", - u"Publ216", u"Publ217", u"Publ218", - u"Publ219", u"Publ220"])])]), - Group(rows=[ - Row(cells=[Cell(variable_key=u"Publkomm")])])]), - Section(title=u"Slutligen", - groups=[ - Group(rows=[ - Row(cells=[Cell(variable_key=u"SCB01", required=True)]), - Row(cells=[Cell(variable_key=u"SCB02", required=True)])]), - Group(rows=[ - Row(cells=[Cell(variable_key=u"Alltkomm")])])])]) + Section( + title="Frågor om biblioteksorganisationen", + groups=[ + Group( + rows=[ + Row(cells=[Cell(variable_key="Namn01")]), + Row(cells=[Cell(variable_key="Epost01", required=True)]), + Row(cells=[Cell(variable_key="Tele01")]), + Row(cells=[Cell(variable_key="Plan01")]), + ] + ), + Group( + rows=[ + Row( + cells=[ + Cell(variable_key="BemanService01", required=True) + ] + ), + Row( + cells=[Cell(variable_key="Integrerad01", required=True)] + ), + ] + ), + Group( + rows=[ + Row(cells=[Cell(variable_key="Obeman01", required=True)]), + Row( + cells=[Cell(variable_key="ObemanLan01", required=True)] + ), + ] + ), + Group( + rows=[ + Row(cells=[Cell(variable_key="Bokbuss01", required=True)]), + Row( + cells=[Cell(variable_key="BokbussHP01", required=True)] + ), + Row(cells=[Cell(variable_key="Bokbil01")]), + ] + ), + Group( + rows=[ + Row(cells=[Cell(variable_key="Population01")]), + Row(cells=[Cell(variable_key="Population02")]), + Row(cells=[Cell(variable_key="Population03")]), + ] + ), + ], + ), + Section( + title="Frågor om bemanning och personal", + groups=[ + Group( + rows=[ + Row(cells=[Cell(variable_key="Arsverke01")]), + Row(cells=[Cell(variable_key="Arsverke02")]), + Row(cells=[Cell(variable_key="Arsverke03")]), + Row(cells=[Cell(variable_key="Arsverke04")]), + Row( + cells=[ + Cell( + variable_key="Arsverke99", + sum_of=[ + "Arsverke01", + "Arsverke02", + "Arsverke03", + "Arsverke04", + ], + ) + ] + ), + Row(cells=[Cell(variable_key="Arsverke05", required=True)]), + ] + ), + Group( + rows=[ + Row(cells=[Cell(variable_key="Personer01")]), + Row(cells=[Cell(variable_key="Personer02")]), + Row( + cells=[ + Cell( + variable_key="Personer99", + sum_of=["Personer01", "Personer02"], + ) + ] + ), + ] + ), + Group(rows=[Row(cells=[Cell(variable_key="Personkomm")])]), + ], + ), + Section( + title="Frågor om ekonomi", + groups=[ + Group( + rows=[ + Row(cells=[Cell(variable_key="Utgift01")]), + Row(cells=[Cell(variable_key="Utgift02")]), + Row(cells=[Cell(variable_key="Utgift03")]), + Row(cells=[Cell(variable_key="Utgift04")]), + Row(cells=[Cell(variable_key="Utgift05")]), + Row(cells=[Cell(variable_key="Utgift06")]), + Row( + cells=[ + Cell( + variable_key="Utgift99", + sum_of=[ + "Utgift01", + "Utgift02", + "Utgift03", + "Utgift04", + "Utgift05", + "Utgift06", + ], + ) + ] + ), + Row(cells=[Cell(variable_key="Utgift07")]), + ] + ), + Group( + rows=[ + Row(cells=[Cell(variable_key="Intakt01")]), + Row(cells=[Cell(variable_key="Intakt02")]), + Row(cells=[Cell(variable_key="Intakt03")]), + Row( + cells=[ + Cell( + variable_key="Intakt99", + sum_of=["Intakt01", "Intakt02", "Intakt03"], + ) + ] + ), + ] + ), + Group(rows=[Row(cells=[Cell(variable_key="Ekonomikomm")])]), + ], + ), + Section( + title="Bestånd – nyförvärv", + groups=[ + Group( + rows=[ + Row( + cells=[ + Cell( + variable_key="Bestand101", + required=True, + has_part=["Bestand201", "Bestand102"], + ), + Cell( + variable_key="Bestand201", + has_part="Bestand202", + part_of="Bestand101", + ), + Cell( + variable_key="Bestand301", has_part="Bestand302" + ), + ] + ), + Row( + cells=[ + Cell( + variable_key="Bestand102", + has_part="Bestand202", + part_of="Bestand101", + ), + Cell( + variable_key="Bestand202", + part_of=["Bestand201", "Bestand102"], + ), + Cell( + variable_key="Bestand302", part_of="Bestand301" + ), + ] + ), + Row( + cells=[ + Cell( + variable_key="Bestand103", + has_part="Bestand203", + required=True, + ), + Cell( + variable_key="Bestand203", part_of="Bestand103" + ), + Cell(variable_key="Bestand303"), + ] + ), + Row( + cells=[ + Cell( + variable_key="Bestand104", + has_part="Bestand204", + required=True, + ), + Cell( + variable_key="Bestand204", part_of="Bestand104" + ), + Cell(variable_key="Bestand304"), + ] + ), + Row( + cells=[ + Cell( + variable_key="Bestand105", + has_part="Bestand205", + required=True, + ), + Cell( + variable_key="Bestand205", part_of="Bestand105" + ), + Cell(variable_key="Bestand305"), + ] + ), + Row( + cells=[ + Cell( + variable_key="Bestand106", + has_part="Bestand206", + required=True, + ), + Cell( + variable_key="Bestand206", part_of="Bestand106" + ), + Cell(variable_key="Bestand306"), + ] + ), + Row( + cells=[ + Cell( + variable_key="Bestand107", + has_part="Bestand207", + required=True, + ), + Cell( + variable_key="Bestand207", part_of="Bestand107" + ), + Cell(variable_key="Bestand307"), + ] + ), + Row( + cells=[ + Cell( + variable_key="Bestand108", + has_part="Bestand208", + required=True, + ), + Cell( + variable_key="Bestand208", part_of="Bestand108" + ), + Cell(variable_key="Bestand308"), + ] + ), + Row( + cells=[ + Cell( + variable_key="Bestand109", + has_part="Bestand209", + required=True, + ), + Cell( + variable_key="Bestand209", part_of="Bestand109" + ), + ] + ), + Row( + cells=[ + Cell( + variable_key="Bestand110", + has_part="Bestand210", + required=True, + ), + Cell( + variable_key="Bestand210", part_of="Bestand110" + ), + Cell(variable_key="Bestand310"), + ] + ), + Row( + cells=[ + Cell( + variable_key="Bestand111", + has_part="Bestand211", + required=True, + ), + Cell( + variable_key="Bestand211", part_of="Bestand111" + ), + Cell(variable_key="Bestand311"), + ] + ), + Row( + cells=[ + Cell( + variable_key="Bestand112", + has_part="Bestand212", + required=True, + ), + Cell( + variable_key="Bestand212", part_of="Bestand112" + ), + Cell(variable_key="Bestand312"), + ] + ), + Row( + cells=[ + Cell( + variable_key="Bestand113", + has_part="Bestand213", + required=True, + ), + Cell( + variable_key="Bestand213", part_of="Bestand113" + ), + Cell(variable_key="Bestand313"), + ] + ), + Row( + cells=[ + Cell( + variable_key="Bestand199", + has_part="Bestand299", + sum_of=[ + "Bestand101", + "Bestand103", + "Bestand104", + "Bestand105", + "Bestand106", + "Bestand107", + "Bestand108", + "Bestand109", + "Bestand110", + "Bestand111", + "Bestand112", + "Bestand113", + ], + ), + Cell( + variable_key="Bestand299", + part_of="Bestand199", + sum_of=[ + "Bestand201", + "Bestand203", + "Bestand204", + "Bestand205", + "Bestand206", + "Bestand207", + "Bestand208", + "Bestand209", + "Bestand210", + "Bestand211", + "Bestand212", + "Bestand213", + ], + ), + Cell( + variable_key="Bestand399", + sum_of=[ + "Bestand301", + "Bestand303", + "Bestand304", + "Bestand305", + "Bestand306", + "Bestand307", + "Bestand308", + "Bestand310", + "Bestand311", + "Bestand312", + "Bestand313", + ], + ), + ] + ), + ] + ), + Group( + rows=[ + Row(cells=[Cell(variable_key="Barn01", required=True)]), + Row(cells=[Cell(variable_key="Barn02", required=True)]), + Row(cells=[Cell(variable_key="Barn03", required=True)]), + Row(cells=[Cell(variable_key="HCG04", required=True)]), + Row(cells=[Cell(variable_key="Ref05", required=True)]), + Row( + cells=[Cell(variable_key="LasnedBest01", required=True)] + ), + Row( + cells=[ + Cell(variable_key="LasnedUtlan01", required=True) + ] + ), + ] + ), + Group( + rows=[ + Row( + cells=[ + Cell(variable_key="Titlar101"), + Cell(variable_key="Titlar201"), + Cell(variable_key="Titlar301"), + Cell( + variable_key="Titlar497", + required=True, + sum_of=["Titlar101", "Titlar201", "Titlar301"], + ), + ] + ), + Row( + cells=[ + Cell(variable_key="Titlar102"), + Cell(variable_key="Titlar202"), + Cell(variable_key="Titlar302"), + Cell( + variable_key="Titlar498", + required=True, + sum_of=["Titlar102", "Titlar202", "Titlar302"], + ), + ] + ), + Row( + cells=[ + Cell( + variable_key="Titlar199", + sum_of=["Titlar101", "Titlar102"], + ), + Cell( + variable_key="Titlar299", + sum_of=["Titlar201", "Titlar202"], + ), + Cell( + variable_key="Titlar399", + sum_of=["Titlar301", "Titlar302"], + ), + Cell( + variable_key="Titlar499", + sum_of=["Titlar497", "Titlar498"], + ), + ] + ), + ] + ), + Group( + rows=[ + Row(cells=[Cell(variable_key="Databas01")]), + Row(cells=[Cell(variable_key="Databas02")]), + Row(cells=[Cell(variable_key="Databas03")]), + Row(cells=[Cell(variable_key="Databas04")]), + Row(cells=[Cell(variable_key="Databas05")]), + Row(cells=[Cell(variable_key="Databas06")]), + Row(cells=[Cell(variable_key="Databas07")]), + Row(cells=[Cell(variable_key="Databas08")]), + Row(cells=[Cell(variable_key="Databas09")]), + Row( + cells=[ + Cell( + variable_key="Databas99", + sum_of=[ + "Databas01", + "Databas02", + "Databas03", + "Databas04", + "Databas05", + "Databas06", + "Databas07", + "Databas08", + "Databas09", + ], + ) + ] + ), + ] + ), + Group(rows=[Row(cells=[Cell(variable_key="Databaskomm")])]), + ], + ), + Section( + title="Frågor om utlån, omlån och användning", + groups=[ + Group( + rows=[ + Row( + cells=[ + Cell( + variable_key="Inilan101", has_part="Inilan102" + ), + Cell(variable_key="Omlan201", has_part="Omlan202"), + Cell( + variable_key="Utlan301", + has_part="Utlan302", + sum_of=["Inilan101", "Omlan201"], + ), + ] + ), + Row( + cells=[ + Cell(variable_key="Inilan102", part_of="Inilan101"), + Cell(variable_key="Omlan202", part_of="Omlan201"), + Cell( + variable_key="Utlan302", + part_of="Utlan301", + sum_of=["Inilan102", "Omlan202"], + ), + ] + ), + Row( + cells=[ + Cell(variable_key="Inilan103"), + Cell(variable_key="Omlan203"), + Cell( + variable_key="Utlan303", + sum_of=["Inilan103", "Omlan203"], + ), + ] + ), + Row( + cells=[ + Cell(variable_key="Inilan104"), + Cell(variable_key="Omlan204"), + Cell( + variable_key="Utlan304", + sum_of=["Inilan104", "Omlan204"], + ), + ] + ), + Row( + cells=[ + Cell(variable_key="Inilan105"), + Cell(variable_key="Omlan205"), + Cell( + variable_key="Utlan305", + sum_of=["Inilan105", "Omlan205"], + ), + ] + ), + Row( + cells=[ + Cell(variable_key="Inilan106"), + Cell(variable_key="Omlan206"), + Cell( + variable_key="Utlan306", + sum_of=["Inilan106", "Omlan206"], + ), + ] + ), + Row( + cells=[ + Cell(variable_key="Inilan107"), + Cell(variable_key="Omlan207"), + Cell( + variable_key="Utlan307", + sum_of=["Inilan107", "Omlan207"], + ), + ] + ), + Row( + cells=[ + Cell(variable_key="Inilan108"), + Cell(variable_key="Omlan208"), + Cell( + variable_key="Utlan308", + sum_of=["Inilan108", "Omlan208"], + ), + ] + ), + Row( + cells=[ + Cell(variable_key="Inilan109"), + Cell(variable_key="Omlan209"), + Cell( + variable_key="Utlan309", + sum_of=["Inilan109", "Omlan209"], + ), + ] + ), + Row( + cells=[ + Cell(variable_key="Inilan110"), + Cell(variable_key="Omlan210"), + Cell( + variable_key="Utlan310", + sum_of=["Inilan110", "Omlan210"], + ), + ] + ), + Row( + cells=[ + Cell(variable_key="Inilan111"), + Cell(variable_key="Omlan211"), + Cell( + variable_key="Utlan311", + sum_of=["Inilan111", "Omlan211"], + ), + ] + ), + Row( + cells=[ + Cell(variable_key="Inilan112"), + Cell(variable_key="Omlan212"), + Cell( + variable_key="Utlan312", + sum_of=["Inilan112", "Omlan212"], + ), + ] + ), + Row( + cells=[ + Cell(variable_key="Inilan113"), + Cell(variable_key="Omlan213"), + Cell( + variable_key="Utlan313", + sum_of=["Inilan113", "Omlan213"], + ), + ] + ), + Row( + cells=[ + Cell( + variable_key="Inilan199", + sum_of=[ + "Inilan101", + "Inilan103", + "Inilan104", + "Inilan105", + "Inilan106", + "Inilan107", + "Inilan108", + "Inilan109", + "Inilan110", + "Inilan111", + "Inilan112", + "Inilan113", + ], + ), + Cell( + variable_key="Omlan299", + sum_of=[ + "Omlan201", + "Omlan203", + "Omlan204", + "Omlan205", + "Omlan206", + "Omlan207", + "Omlan208", + "Omlan209", + "Omlan210", + "Omlan211", + "Omlan212", + "Omlan213", + ], + ), + Cell( + variable_key="Utlan399", + sum_of=[ + "Utlan301", + "Utlan303", + "Utlan304", + "Utlan305", + "Utlan306", + "Utlan307", + "Utlan308", + "Utlan309", + "Utlan310", + "Utlan311", + "Utlan312", + "Utlan313", + ], + ), + ] + ), + ] + ), + Group( + rows=[ + Row(cells=[Cell(variable_key="Laslan01")]), + Row(cells=[Cell(variable_key="Laslan02")]), + Row(cells=[Cell(variable_key="Laslan99")]), + ] + ), + Group( + rows=[ + Row( + cells=[ + Cell(variable_key="Fjarr101"), + Cell(variable_key="Fjarr201"), + Cell( + variable_key="Fjarr397", + sum_of=["Fjarr101", "Fjarr201"], + ), + ] + ), + Row( + cells=[ + Cell(variable_key="Fjarr102"), + Cell(variable_key="Fjarr202"), + Cell( + variable_key="Fjarr398", + sum_of=["Fjarr102", "Fjarr202"], + ), + ] + ), + ] + ), + Group(rows=[Row(cells=[Cell(variable_key="Utlankomm")])]), + ], + ), + Section( + title="Omsättningen av elektroniska medier, användning och lån", + groups=[ + Group( + rows=[ + Row( + cells=[ + Cell(variable_key="Elan101"), + Cell(variable_key="Elan201"), + Cell(variable_key="Elan301"), + ] + ), + Row( + cells=[ + Cell(variable_key="Elan102"), + Cell(variable_key="Elan202"), + ] + ), + Row( + cells=[ + Cell(variable_key="Elan103"), + Cell(variable_key="Elan203"), + ] + ), + Row( + cells=[ + Cell(variable_key="Elan104"), + Cell(variable_key="Elan204"), + ] + ), + Row( + cells=[ + Cell(variable_key="Elan105"), + Cell(variable_key="Elan205"), + ] + ), + Row( + cells=[ + Cell(variable_key="Elan106"), + Cell(variable_key="Elan206"), + ] + ), + Row( + cells=[ + Cell(variable_key="Elan107"), + Cell(variable_key="Elan207"), + ] + ), + Row( + cells=[ + Cell(variable_key="Elan108"), + Cell(variable_key="Elan208"), + ] + ), + Row( + cells=[ + Cell(variable_key="Elan109"), + Cell(variable_key="Elan209"), + ] + ), + Row( + cells=[ + Cell( + variable_key="Elan199", + sum_of=[ + "Elan101", + "Elan102", + "Elan103", + "Elan104", + "Elan105", + "Elan106", + "Elan107", + "Elan108", + "Elan109", + ], + ), + Cell( + variable_key="Elan299", + sum_of=[ + "Elan201", + "Elan202", + "Elan203", + "Elan204", + "Elan205", + "Elan206", + "Elan207", + "Elan208", + "Elan209", + ], + ), + Cell(variable_key="Elan399", sum_of=["Elan301"]), + ] + ), + ] + ), + Group(rows=[Row(cells=[Cell(variable_key="Elankomm")])]), + ], + ), + Section( + title="Frågor om besök och aktiva låntagare", + groups=[ + Group( + rows=[ + Row(cells=[Cell(variable_key="Besok01", required=True)]), + Row(cells=[Cell(variable_key="Besok02")]), + Row(cells=[Cell(variable_key="Besok03")]), + Row(cells=[Cell(variable_key="Besok04")]), + Row(cells=[Cell(variable_key="Besok05")]), + ] + ), + Group( + rows=[ + Row(cells=[Cell(variable_key="Aktiv01")]), + Row(cells=[Cell(variable_key="Aktiv02")]), + Row(cells=[Cell(variable_key="Aktiv04")]), + Row( + cells=[ + Cell( + variable_key="Aktiv99", + sum_of=["Aktiv01", "Aktiv02", "Aktiv04"], + ) + ] + ), + Row(cells=[Cell(variable_key="Aktiv03")]), + ] + ), + ], + ), + Section( + title="Frågor om resurser och lokaler", + groups=[ + Group( + rows=[ + Row(cells=[Cell(variable_key="Resurs01", required=True)]), + Row(cells=[Cell(variable_key="Resurs02")]), + Row(cells=[Cell(variable_key="Resurs03")]), + Row(cells=[Cell(variable_key="Resurs04")]), + Row(cells=[Cell(variable_key="Resurs05")]), + Row(cells=[Cell(variable_key="Resurs06")]), + Row(cells=[Cell(variable_key="Resurs07", required=True)]), + Row(cells=[Cell(variable_key="Resurs08")]), + Row(cells=[Cell(variable_key="Resurs09", required=True)]), + Row(cells=[Cell(variable_key="Resurs10")]), + ] + ), + Group(rows=[Row(cells=[Cell(variable_key="Besokkomm")])]), + ], + ), + Section( + title="Frågor om öppettider och nyttjande", + groups=[ + Group( + rows=[ + Row( + cells=[ + Cell(variable_key="Open101"), + Cell(variable_key="Open201"), + ] + ), + Row( + cells=[ + Cell(variable_key="Open102"), + Cell(variable_key="Open202"), + ] + ), + Row( + cells=[ + Cell(variable_key="Open103"), + Cell(variable_key="Open203"), + ] + ), + Row( + cells=[ + Cell(variable_key="Open104"), + Cell(variable_key="Open204"), + ] + ), + Row( + cells=[ + Cell(variable_key="Open105"), + Cell(variable_key="Open205"), + ] + ), + Row( + cells=[ + Cell(variable_key="Open106"), + Cell(variable_key="Open206"), + ] + ), + ] + ), + Group( + rows=[ + Row(cells=[Cell(variable_key="Serv01")]), + Row(cells=[Cell(variable_key="Serv02")]), + Row(cells=[Cell(variable_key="Serv03")]), + Row(cells=[Cell(variable_key="Serv04")]), + Row(cells=[Cell(variable_key="Serv05")]), + Row(cells=[Cell(variable_key="Serv06")]), + Row(cells=[Cell(variable_key="Serv07")]), + ] + ), + Group(rows=[Row(cells=[Cell(variable_key="Openkomm")])]), + ], + ), + Section( + title="Aktiviteter", + groups=[ + Group( + rows=[ + Row( + cells=[ + Cell( + variable_key="Publ101", + required=True, + has_part="Publ201", + ), + Cell(variable_key="Publ201", part_of="Publ101"), + ] + ), + Row( + cells=[ + Cell( + variable_key="Publ102", + required=True, + has_part="Publ202", + ), + Cell(variable_key="Publ202", part_of="Publ102"), + ] + ), + Row( + cells=[ + Cell( + variable_key="Publ103", + required=True, + has_part="Publ203", + ), + Cell(variable_key="Publ203", part_of="Publ103"), + ] + ), + Row( + cells=[ + Cell( + variable_key="Publ104", + required=True, + has_part="Publ204", + ), + Cell(variable_key="Publ204", part_of="Publ104"), + ] + ), + Row( + cells=[ + Cell( + variable_key="Publ105", + required=True, + has_part="Publ205", + ), + Cell(variable_key="Publ205", part_of="Publ105"), + ] + ), + Row( + cells=[ + Cell( + variable_key="Publ106", + required=True, + has_part="Publ206", + ), + Cell(variable_key="Publ206", part_of="Publ106"), + ] + ), + Row( + cells=[ + Cell( + variable_key="Publ107", + required=True, + has_part="Publ207", + ), + Cell(variable_key="Publ207", part_of="Publ107"), + ] + ), + Row( + cells=[ + Cell( + variable_key="Publ108", + required=True, + has_part="Publ208", + ), + Cell(variable_key="Publ208", part_of="Publ108"), + ] + ), + Row( + cells=[ + Cell( + variable_key="Publ109", + required=True, + has_part="Publ209", + ), + Cell(variable_key="Publ209", part_of="Publ109"), + ] + ), + Row( + cells=[ + Cell( + variable_key="Publ110", + required=True, + has_part="Publ210", + ), + Cell(variable_key="Publ210", part_of="Publ110"), + ] + ), + Row( + cells=[ + Cell( + variable_key="Publ111", + required=True, + has_part="Publ211", + ), + Cell(variable_key="Publ211", part_of="Publ111"), + ] + ), + Row( + cells=[ + Cell( + variable_key="Publ112", + required=True, + has_part="Publ212", + ), + Cell(variable_key="Publ212", part_of="Publ112"), + ] + ), + Row( + cells=[ + Cell( + variable_key="Publ113", + required=True, + has_part="Publ213", + ), + Cell(variable_key="Publ213", part_of="Publ113"), + ] + ), + Row( + cells=[ + Cell( + variable_key="Publ114", + required=True, + has_part="Publ214", + ), + Cell(variable_key="Publ214", part_of="Publ114"), + ] + ), + Row( + cells=[ + Cell( + variable_key="Publ115", + required=True, + has_part="Publ215", + ), + Cell(variable_key="Publ215", part_of="Publ115"), + ] + ), + Row( + cells=[ + Cell( + variable_key="Publ116", + required=True, + has_part="Publ216", + ), + Cell(variable_key="Publ216", part_of="Publ116"), + ] + ), + Row( + cells=[ + Cell( + variable_key="Publ117", + required=True, + has_part="Publ217", + ), + Cell(variable_key="Publ217", part_of="Publ117"), + ] + ), + Row( + cells=[ + Cell( + variable_key="Publ118", + required=True, + has_part="Publ218", + ), + Cell(variable_key="Publ218", part_of="Publ118"), + ] + ), + Row( + cells=[ + Cell( + variable_key="Publ119", + required=True, + has_part="Publ219", + ), + Cell(variable_key="Publ219", part_of="Publ119"), + ] + ), + Row( + cells=[ + Cell( + variable_key="Publ120", + required=True, + has_part="Publ220", + ), + Cell(variable_key="Publ220", part_of="Publ120"), + ] + ), + Row( + cells=[ + Cell( + variable_key="Publ199", + has_part="Publ299", + sum_of=[ + "Publ101", + "Publ102", + "Publ103", + "Publ104", + "Publ105", + "Publ106", + "Publ107", + "Publ108", + "Publ109", + "Publ110", + "Publ111", + "Publ112", + "Publ113", + "Publ114", + "Publ115", + "Publ116", + "Publ117", + "Publ118", + "Publ119", + "Publ120", + ], + ), + Cell( + variable_key="Publ299", + part_of="Publ199", + sum_of=[ + "Publ201", + "Publ202", + "Publ203", + "Publ204", + "Publ205", + "Publ206", + "Publ207", + "Publ208", + "Publ209", + "Publ210", + "Publ211", + "Publ212", + "Publ213", + "Publ214", + "Publ215", + "Publ216", + "Publ217", + "Publ218", + "Publ219", + "Publ220", + ], + ), + ] + ), + ] + ), + Group(rows=[Row(cells=[Cell(variable_key="Publkomm")])]), + ], + ), + Section( + title="Slutligen", + groups=[ + Group( + rows=[ + Row(cells=[Cell(variable_key="SCB01", required=True)]), + Row(cells=[Cell(variable_key="SCB02", required=True)]), + ] + ), + Group(rows=[Row(cells=[Cell(variable_key="Alltkomm")])]), + ], + ), + ], + ) def _default_template_from_survey(survey): @@ -500,11 +1202,11 @@ def _default_template_from_survey(survey): if survey: for observation in survey.observations: variable = observation.variable - rows.append(Row(cells=[Cell(variable_key=variable.key, types=["text"])])) #TODO: use variable.type?? + rows.append( + Row(cells=[Cell(variable_key=variable.key, types=["text"])]) + ) # TODO: use variable.type?? - return SurveyTemplate(sections=[ - Section(title="", - groups=[Group(rows=rows)])]) + return SurveyTemplate(sections=[Section(title="", groups=[Group(rows=rows)])]) def survey_template(year, survey=None): diff --git a/libstat/templates/libstat/email_template.html b/libstat/templates/libstat/email_template.html index 5dc97cf7..122c8922 100644 --- a/libstat/templates/libstat/email_template.html +++ b/libstat/templates/libstat/email_template.html @@ -1,4 +1,4 @@ -Kungliga biblioteket +Kungliga biblioteket
{{ message|linebreaksbr }}
\ No newline at end of file diff --git a/libstat/templates/libstat/survey.html b/libstat/templates/libstat/survey.html index f1f698ae..8e8298a7 100644 --- a/libstat/templates/libstat/survey.html +++ b/libstat/templates/libstat/survey.html @@ -41,7 +41,7 @@

Stort tack för att du besvarade biblioteksstatistiken!

Enkäten är nu inskickad. Du kan fortfarande se dina inskickade svar, men enkäten är nu låst.
Om du skulle vilja ändra ett svar i efterhand, kontakta biblioteksstatistik@kb.se.

-

Dina resultat kommer att publiceras inom några dagar på http://bibstat.kb.se/reports

+

Dina resultat kommer att publiceras inom några dagar på https://bibstat.kb.se/reports

Skriv ut enkäten

@@ -294,6 +294,20 @@

-
- -
-
-

Om du inte har möjlighet att fylla i ett specifikt värde, så kan du markera det som okänt. Du gör detta genom att klicka på knappen till höger om ett inmatningsfält, och välja värdet är okänt. Nedan ser du ett fält som har markerats som okänt.

- -
-
-
- -
- - - - - -
-
-
-
- - När ett inmatningsfält markeras som okänt kan du inte längre skriva in ett värde. -
För att återigen kunna skriva in ett värde i fältet, välj ange värdet i fältets meny. -
-
-

Om vi frågar efter något som ni inte gör eller har, skriv värde 0.

-
-
-
-

diff --git a/libstat/templatetags/libstat_tags.py b/libstat/templatetags/libstat_tags.py index 09976c4d..f86d15a3 100644 --- a/libstat/templatetags/libstat_tags.py +++ b/libstat/templatetags/libstat_tags.py @@ -1,4 +1,3 @@ -# -*- coding: UTF-8 -*- import json import textwrap import re @@ -19,7 +18,11 @@ @register.filter def utc_tz(value): - return value.replace(tzinfo=pytz.utc) if value and isinstance(value, datetime) else value + return ( + value.replace(tzinfo=pytz.utc) + if value and isinstance(value, datetime) + else value + ) @register.filter @@ -38,6 +41,7 @@ def tg_label(value): display_names.append(targetGroups[value]) return ", ".join(display_names) + @register.filter def var_type_label(var_key): try: @@ -45,6 +49,7 @@ def var_type_label(var_key): except KeyError: return None + @register.filter def srs_label(key): return next((status[1] for status in Survey.STATUSES if status[0] == key)) @@ -57,6 +62,7 @@ def access(value, arg): except KeyError: return None + @register.filter def get_errors(form, key): try: @@ -66,9 +72,10 @@ def get_errors(form, key): return None return None + @register.filter def split_into_number_and_body(description): - if re.compile("^[0-9]+\.").match(description): + if re.compile(r"^[0-9]+\.").match(description): return description.split(" ", 1) else: return "", description @@ -97,37 +104,42 @@ def debug_enabled(_): @register.filter def format_number(number, digits=1): try: - locale.setlocale(locale.LC_NUMERIC, 'sv_SE') + locale.setlocale(locale.LC_NUMERIC, "sv_SE") except Exception: - locale.setlocale(locale.LC_NUMERIC, 'sv_SE.UTF-8') - return locale.format("%d" if number == int(number) else "%.{}f".format(digits), number, grouping=True) + locale.setlocale(locale.LC_NUMERIC, "sv_SE.UTF-8") + return locale.format( + "%d" if number == int(number) else "%.{}f".format(digits), number, grouping=True + ) + @register.filter def format_percentage(number): percentage = number * 100 return format_number(percentage) + "%" + @register.filter def format_email(email, limit=30): if len(email) <= limit: return email - return email[:limit - 3] + "..." + return email[: limit - 3] + "..." @register.filter def two_parts(thelist): - middle = len(thelist) / 2 + middle = len(thelist) // 2 if len(thelist) % 2 == 0: return [thelist[middle:], thelist[:middle]] else: - return [thelist[:middle + 1], thelist[middle + 1:]] + return [thelist[: middle + 1], thelist[middle + 1 :]] + @register.filter def chunks(l, n): - for i in xrange(0, len(l), n): - yield l[i:i+n] + for i in range(0, len(l), n): + yield l[i : i + n] @register.filter @@ -142,10 +154,7 @@ def show_in_chart(rows): @register.simple_tag def footer(): - infoStr = "© Kungl. biblioteket 2014-" + str(datetime.now().year) - if settings.RELEASE_VERSION: - infoStr = infoStr + ". Version " + settings.RELEASE_VERSION - return infoStr + return f"Version {settings.RELEASE_VERSION}" @register.simple_tag() diff --git a/libstat/tests/__init__.py b/libstat/tests/__init__.py index 9f6e0d9f..7130a673 100644 --- a/libstat/tests/__init__.py +++ b/libstat/tests/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: UTF-8 -*- import random import string import json @@ -9,13 +8,22 @@ from django.test.runner import DiscoverRunner from django.test import TestCase from django.conf import settings -from django.core.urlresolvers import reverse +from django.urls import reverse -from libstat.models import Variable, OpenData, Survey, Library, SurveyObservation, Article, Dispatch, ExternalIdentifier +from libstat.models import ( + Variable, + OpenData, + Survey, + Library, + SurveyObservation, + Article, + Dispatch, + ExternalIdentifier, +) class MongoEngineTestRunner(DiscoverRunner): - def setup_databases(self): + def setup_databases(self, aliases=None): pass def teardown_databases(self, *args): @@ -23,7 +31,7 @@ def teardown_databases(self, *args): class MongoTestCase(TestCase): - mongodb_name = 'test_%s' % settings.MONGODB_NAME + mongodb_name = "test_%s" % settings.MONGODB_DATABASES["default"]["name"] def _login(self): self.client.login(username="admin", password="admin") @@ -35,7 +43,7 @@ def _get(self, action=None, kwargs=None, params={}): url = reverse(action, kwargs=kwargs) if params: url += "?" - for key, value in params.iteritems(): + for key, value in list(params.items()): url = "{}{}={}&".format(url, key, value) return self.client.get(url) @@ -48,76 +56,175 @@ def _post(self, action=None, kwargs=None, data=None): else: return self.client.post(reverse(action, kwargs=kwargs)) - def _dummy_library(self, name="dummy_name", sigel=None, bibdb_id="dummy_id", city="dummy_city", - municipality_code="dummy_code", library_type="folkbib", external_identifiers=None): + def _dummy_library( + self, + name="dummy_name", + sigel=None, + bibdb_id="dummy_id", + city="dummy_city", + municipality_code="dummy_code", + library_type="folkbib", + external_identifiers=None, + ): if not sigel: sigel = Library._random_sigel() - return Library(name=name, sigel=sigel, bibdb_id=bibdb_id, city=city, - municipality_code=municipality_code, library_type=library_type, external_identifiers=external_identifiers) - - def _dummy_variable(self, key=None, description=u"dummy description", type="integer", is_public=True, - target_groups=["folkbib"], is_draft=False, replaced_by=None, save=True, question=None, - category=None, sub_category=None, replaces=[], question_part=None): + return Library( + name=name, + sigel=sigel, + bibdb_id=bibdb_id, + city=city, + municipality_code=municipality_code, + library_type=library_type, + external_identifiers=external_identifiers, + ) + + def _dummy_variable( + self, + key=None, + description="dummy description", + type="integer", + is_public=True, + target_groups=["folkbib"], + is_draft=False, + replaced_by=None, + save=True, + question=None, + category=None, + sub_category=None, + replaces=[], + question_part=None, + ): if not key: - key = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(8)) - variable = Variable(key=key, description=description, type=type, is_public=is_public, category=category, - target_groups=target_groups, is_draft=is_draft, replaced_by=replaced_by, question=question, - sub_category=sub_category, replaces=replaces, question_part=question_part) + key = "".join( + random.choice(string.ascii_uppercase + string.digits) for _ in range(8) + ) + variable = Variable( + key=key, + description=description, + type=type, + is_public=is_public, + category=category, + target_groups=target_groups, + is_draft=is_draft, + replaced_by=replaced_by, + question=question, + sub_category=sub_category, + replaces=replaces, + question_part=question_part, + ) if save: variable.save() variable.reload() return variable - def _dummy_observation(self, variable=None, value="dummy_value", disabled=False, value_unknown=False, - _is_public=True): + def _dummy_observation( + self, + variable=None, + value="dummy_value", + disabled=False, + value_unknown=False, + _is_public=True, + ): if not variable: variable = self._dummy_variable() - return SurveyObservation(variable=variable, value=value, disabled=disabled, value_unknown=value_unknown, - _is_public=_is_public) - - def _dummy_survey(self, sample_year=2001, password=None, target_group="folkbib", - status="not_viewed", publish=False, library=None, is_active=True, - observations=[], selected_libraries=None): + return SurveyObservation( + variable=variable, + value=value, + disabled=disabled, + value_unknown=value_unknown, + _is_public=_is_public, + ) + + def _dummy_survey( + self, + sample_year=2001, + password=None, + target_group="folkbib", + status="not_viewed", + publish=False, + library=None, + is_active=True, + observations=[], + selected_libraries=None, + ): if not library: library = self._dummy_library() if selected_libraries is None: selected_libraries = [library.sigel] - survey = Survey(library=library, sample_year=sample_year, - target_group=target_group, password=password, status=status, is_active=is_active, - observations=observations, selected_libraries=selected_libraries).save() + survey = Survey( + library=library, + sample_year=sample_year, + target_group=target_group, + password=password, + status=status, + is_active=is_active, + observations=observations, + selected_libraries=selected_libraries, + ).save() if publish: survey.publish() survey.reload() return survey - def _dummy_open_data(self, library_name=u"dummy_lib", sigel="dummy_sigel", sample_year=2013, - target_group="folkbib", is_active=True, - variable=None, value=1, date_created=None, date_modified=None, save=True): + def _dummy_open_data( + self, + library_name="dummy_lib", + sigel="dummy_sigel", + sample_year=2013, + target_group="folkbib", + is_active=True, + variable=None, + value=1, + date_created=None, + date_modified=None, + save=True, + ): if not variable: variable = self._dummy_variable() variable.save() if not date_created: - date_created = datetime(2014, 05, 27, 8, 00, 00) + date_created = datetime(2014, 5, 27, 8, 0, 0) if not date_modified: - date_modified = datetime(2014, 06, 02, 17, 57, 16) - open_data = OpenData(library_name=library_name, sigel=sigel, sample_year=sample_year, is_active=is_active, - target_group=target_group, variable=variable, value=value, date_created=date_created, - date_modified=date_modified) + date_modified = datetime(2014, 6, 2, 17, 57, 16) + open_data = OpenData( + library_name=library_name, + sigel=sigel, + sample_year=sample_year, + is_active=is_active, + target_group=target_group, + variable=variable, + value=value, + date_created=date_created, + date_modified=date_modified, + ) if save: open_data.save() open_data.reload() return open_data def _dummy_article(self, title=None, content=None): - article = Article(title, content) + article = Article(title=title, content=content) article.save() article.reload() return article - def _dummy_dispatch(self, description=None, title=None, message=None, library_email=None, - library_city=None, library_name=None): - dispatch = Dispatch(description=description, title=title, message=message, library_email=library_email, - library_city=library_city, library_name=library_name) + def _dummy_dispatch( + self, + description=None, + title=None, + message=None, + library_email=None, + library_city=None, + library_name=None, + ): + dispatch = Dispatch( + description=description, + title=title, + message=message, + library_email=library_email, + library_city=library_city, + library_name=library_name, + ) dispatch.save() dispatch.reload() return dispatch @@ -127,17 +234,18 @@ def _dummy_external_identifier(self, type="school_code", identifier="12345678"): return external_identifier def _fixture_setup(self): - from mongoengine.connection import connect, disconnect + from mongoengine.connection import connect, disconnect, get_db disconnect() - connect(self.mongodb_name, host=settings.MONGODB_HOST) - from mongoengine.django.mongo_auth.models import MongoUser - - if MongoUser.objects.filter(username="admin").count() == 0: - MongoUser.objects.create_superuser("admin", "admin@example.com", "admin") - if MongoUser.objects.filter(username="library_user").count() == 0: - MongoUser.objects.create_user("library_user", "library.user@example.com", "secret") - setup_test_environment() + connect(self.mongodb_name, host=settings.MONGODB_DATABASES["default"]["host"]) + self.db = get_db() + from django_mongoengine.mongo_auth.models import User + + if User.objects.filter(username="admin").count() == 0: + User.create_superuser("admin", "admin", email="admin@example.com") + if User.objects.filter(username="library_user").count() == 0: + User.create_user("library_user", "secret", email="library.user@example.com") + # setup_test_environment() def _post_teardown(self): from mongoengine.connection import get_connection, disconnect diff --git a/libstat/tests/apis/test_observation.py b/libstat/tests/apis/test_observation.py index f24f3d52..b2db246b 100644 --- a/libstat/tests/apis/test_observation.py +++ b/libstat/tests/apis/test_observation.py @@ -1,47 +1,60 @@ -# -*- coding: UTF-8 -*- import json -from django.core.urlresolvers import reverse +from django.urls import reverse from django.conf import settings from libstat.tests import MongoTestCase class ObservationApiTest(MongoTestCase): - def test_response_should_return_jsonld(self): obs = self._dummy_open_data() - response = self.client.get(reverse("observation_api", kwargs={"observation_id": str(obs.id)})) + response = self.client.get( + reverse("observation_api", kwargs={"observation_id": str(obs.id)}) + ) self.assertEqual(response["Content-Type"], "application/ld+json") def test_response_should_contain_context(self): obs = self._dummy_open_data() - response = self.client.get(reverse("observation_api", kwargs={"observation_id": str(obs.id)})) + response = self.client.get( + reverse("observation_api", kwargs={"observation_id": str(obs.id)}) + ) data = json.loads(response.content) - self.assertEqual(data[u"@context"][u"@vocab"], u"{}/def/terms/".format(settings.API_BASE_URL)), - self.assertEquals(data[u"@context"][u"@base"], u"{}/data/".format(settings.API_BASE_URL)) + self.assertEqual( + data["@context"]["@vocab"], "{}/def/terms/".format(settings.API_BASE_URL) + ), + self.assertEqual( + data["@context"]["@base"], "{}/data/".format(settings.API_BASE_URL) + ) def test_should_return_one_observation(self): - variable = self._dummy_variable(key=u"folk5") + variable = self._dummy_variable(key="folk5") obs = self._dummy_open_data(variable=variable, sample_year=2013) - response = self.client.get(reverse("observation_api", kwargs={"observation_id": str(obs.id)})) + response = self.client.get( + reverse("observation_api", kwargs={"observation_id": str(obs.id)}) + ) data = json.loads(response.content) - self.assertEqual(data[u"@id"], str(obs.id)) - self.assertEqual(data[u"@type"], u"Observation") - self.assertEqual(data[u"folk5"], obs.value) - self.assertEqual(data[u"library"][u"@id"], u"{}/library/{}".format(settings.BIBDB_BASE_URL, obs.sigel)) - self.assertEqual(data[u"library"][u"name"], obs.library_name) - self.assertEqual(data[u"sampleYear"], obs.sample_year) - self.assertEqual(data[u"published"], obs.date_created_str()) - self.assertEqual(data[u"modified"], obs.date_modified_str()) + self.assertEqual(data["@id"], str(obs.id)) + self.assertEqual(data["@type"], "Observation") + self.assertEqual(data["folk5"], obs.value) + self.assertEqual( + data["library"]["@id"], + "{}/library/{}".format(settings.BIBDB_BASE_URL, obs.sigel), + ) + self.assertEqual(data["library"]["name"], obs.library_name) + self.assertEqual(data["sampleYear"], obs.sample_year) + self.assertEqual(data["published"], obs.date_created_str()) + self.assertEqual(data["modified"], obs.date_modified_str()) def test_should_return_404_if_observation_not_found(self): - response = self.client.get(reverse("observation_api", kwargs={"observation_id": "12323873982375a8c0g"})) + response = self.client.get( + reverse("observation_api", kwargs={"observation_id": "12323873982375a8c0g"}) + ) - self.assertEqual(response.status_code, 404) \ No newline at end of file + self.assertEqual(response.status_code, 404) diff --git a/libstat/tests/apis/test_open_data.py b/libstat/tests/apis/test_open_data.py index 9e103937..8cc88a78 100644 --- a/libstat/tests/apis/test_open_data.py +++ b/libstat/tests/apis/test_open_data.py @@ -1,8 +1,7 @@ -# -*- coding: UTF-8 -*- import json from datetime import datetime -from django.core.urlresolvers import reverse +from django.urls import reverse from django.conf import settings from libstat.tests import MongoTestCase @@ -10,7 +9,6 @@ class OpenDataApiTest(MongoTestCase): - def test_response_should_return_jsonld(self): response = self.client.get(reverse("data_api")) @@ -20,7 +18,7 @@ def test_response_should_contain_context(self): response = self.client.get(reverse("data_api")) data = json.loads(response.content) - self.assertEquals(data[u"@context"], data_context) + self.assertEqual(data["@context"], data_context) def test_should_not_filter_by_date_unless_requested(self): self._dummy_open_data() @@ -30,73 +28,88 @@ def test_should_not_filter_by_date_unless_requested(self): response = self.client.get(reverse("data_api")) data = json.loads(response.content) - self.assertEquals(len(data[u"observations"]), 3) + self.assertEqual(len(data["observations"]), 3) def test_should_filter_data_by_from_date(self): - self._dummy_open_data(sigel="sigel_1", date_modified=datetime(2014, 06, 05, 11, 14, 01)) + self._dummy_open_data( + sigel="sigel_1", date_modified=datetime(2014, 6, 5, 11, 14, 1) + ) - response = self.client.get(u"{}?from_date=2014-06-04".format(reverse("data_api"))) + response = self.client.get( + "{}?from_date=2014-06-04".format(reverse("data_api")) + ) data = json.loads(response.content) - self.assertEquals(len(data[u"observations"]), 1) - self.assertEquals(data[u"observations"][0][u"library"][u"@id"], - u"{}/library/sigel_1".format(settings.BIBDB_BASE_URL)) + self.assertEqual(len(data["observations"]), 1) + self.assertEqual( + data["observations"][0]["library"]["@id"], + "{}/library/sigel_1".format(settings.BIBDB_BASE_URL), + ) def test_should_filter_data_by_to_date(self): - self._dummy_open_data(sigel="81", date_modified=datetime(2014, 06, 02, 11, 14, 01)) + self._dummy_open_data(sigel="81", date_modified=datetime(2014, 6, 2, 11, 14, 1)) - response = self.client.get(u"{}?to_date=2014-06-03".format(reverse("data_api"))) + response = self.client.get("{}?to_date=2014-06-03".format(reverse("data_api"))) data = json.loads(response.content) - self.assertEquals(len(data[u"observations"]), 1) - self.assertEquals(data[u"observations"][0][u"library"][u"@id"], - u"{}/library/81".format(settings.BIBDB_BASE_URL)) + self.assertEqual(len(data["observations"]), 1) + self.assertEqual( + data["observations"][0]["library"]["@id"], + "{}/library/81".format(settings.BIBDB_BASE_URL), + ) def test_should_filter_data_by_date_range(self): - self._dummy_open_data(sigel="323", date_modified=datetime(2014, 06, 03, 11, 14, 01)) + self._dummy_open_data( + sigel="323", date_modified=datetime(2014, 6, 3, 11, 14, 1) + ) response = self.client.get( - u"{}?from_date=2014-06-02T15:28:31.000&to_date=2014-06-04T11:14:00.000".format(reverse("data_api"))) + "{}?from_date=2014-06-02T15:28:31.000&to_date=2014-06-04T11:14:00.000".format( + reverse("data_api") + ) + ) data = json.loads(response.content) - self.assertEquals(len(data[u"observations"]), 1) - self.assertEquals(data[u"observations"][0][u"library"][u"@id"], - u"{}/library/323".format(settings.BIBDB_BASE_URL)) + self.assertEqual(len(data["observations"]), 1) + self.assertEqual( + data["observations"][0]["library"]["@id"], + "{}/library/323".format(settings.BIBDB_BASE_URL), + ) def test_should_limit_results(self): self._dummy_open_data() self._dummy_open_data() self._dummy_open_data() - response = self.client.get(u"{}?limit=2".format(reverse("data_api"))) + response = self.client.get("{}?limit=2".format(reverse("data_api"))) data = json.loads(response.content) - self.assertEquals(len(data[u"observations"]), 2) + self.assertEqual(len(data["observations"]), 2) def test_should_limit_results_with_offset(self): self._dummy_open_data() self._dummy_open_data() self._dummy_open_data() - response = self.client.get(u"{}?limit=2&offset=2".format(reverse("data_api"))) + response = self.client.get("{}?limit=2&offset=2".format(reverse("data_api"))) data = json.loads(response.content) - self.assertEquals(len(data[u"observations"]), 1) + self.assertEqual(len(data["observations"]), 1) def test_should_filter_by_term_key(self): - variable = self._dummy_variable(key=u"folk6") + variable = self._dummy_variable(key="folk6") self._dummy_open_data(variable=variable) - response = self.client.get(u"{}?term=folk6".format(reverse("data_api"))) + response = self.client.get("{}?term=folk6".format(reverse("data_api"))) data = json.loads(response.content) - self.assertEquals(len(data[u"observations"]), 1) - self.assertEquals(data[u"observations"][0][u"folk6"], 1) + self.assertEqual(len(data["observations"]), 1) + self.assertEqual(data["observations"][0]["folk6"], 1) def test_should_return_empty_result_if_unknown_term(self): - response = self.client.get(u"{}?term=hej".format(reverse("data_api"))) + response = self.client.get("{}?term=hej".format(reverse("data_api"))) data = json.loads(response.content) - self.assertEquals(len(data[u"observations"]), 0) + self.assertEqual(len(data["observations"]), 0) def test_should_only_return_active_open_data_entries(self): self._dummy_open_data(is_active=True) @@ -104,4 +117,4 @@ def test_should_only_return_active_open_data_entries(self): response = self._get_json("data_api") - self.assertEquals(len(response[u"observations"]), 1) + self.assertEqual(len(response["observations"]), 1) diff --git a/libstat/tests/apis/test_term.py b/libstat/tests/apis/test_term.py index 284bacc5..19ed99e4 100644 --- a/libstat/tests/apis/test_term.py +++ b/libstat/tests/apis/test_term.py @@ -1,7 +1,6 @@ -# -*- coding: UTF-8 -*- import json -from django.core.urlresolvers import reverse +from django.urls import reverse from libstat.tests import MongoTestCase from libstat.apis.terms import term_context @@ -9,44 +8,62 @@ class TermApiTest(MongoTestCase): def test_response_should_return_jsonld(self): - self._dummy_variable(key=u"folk5") + self._dummy_variable(key="folk5") response = self.client.get(reverse("term_api", kwargs={"term_key": "folk5"})) self.assertEqual(response["Content-Type"], "application/ld+json") def test_response_should_contain_context(self): - self._dummy_variable(key=u"folk5") + self._dummy_variable(key="folk5") response = self.client.get(reverse("term_api", kwargs={"term_key": "folk5"})) data = json.loads(response.content) - self.assertEqual(data[u"@context"][u"xsd"], u"http://www.w3.org/2001/XMLSchema#") - self.assertEqual(data[u"@context"][u"rdf"], u"http://www.w3.org/1999/02/22-rdf-syntax-ns#") - self.assertEqual(data[u"@context"][u"rdfs"], u"http://www.w3.org/2000/01/rdf-schema#") - self.assertEqual(data[u"@context"][u"qb"], u"http://purl.org/linked-data/cube#") - self.assertEqual(data[u"@context"][u"@language"], u"sv") - self.assertEqual(data[u"@context"][u"label"], u"rdfs:label") - self.assertEqual(data[u"@context"][u"range"], {u"@id": u"rdfs:range", u"@type": u"@id"}) - self.assertEqual(data[u"@context"][u"comment"], u"rdfs:comment") - self.assertEqual(data[u"@context"][u"subClassOf"], {u"@id": u"rdfs:subClassOf", u"@type": u"@id"}) - self.assertEqual(data[u"@context"][u"replaces"], {u"@id": u"dcterms:replaces", u"@type": u"@id"}) - self.assertEqual(data[u"@context"][u"replacedBy"], {u"@id": u"dcterms:isReplacedBy", u"@type": u"@id"}) - self.assertEqual(data[u"@context"][u"valid"], {u"@id": u"dcterms:valid", u"@type": u"dcterms:Period"}) + self.assertEqual(data["@context"]["xsd"], "http://www.w3.org/2001/XMLSchema#") + self.assertEqual( + data["@context"]["rdf"], "http://www.w3.org/1999/02/22-rdf-syntax-ns#" + ) + self.assertEqual( + data["@context"]["rdfs"], "http://www.w3.org/2000/01/rdf-schema#" + ) + self.assertEqual(data["@context"]["qb"], "http://purl.org/linked-data/cube#") + self.assertEqual(data["@context"]["@language"], "sv") + self.assertEqual(data["@context"]["label"], "rdfs:label") + self.assertEqual( + data["@context"]["range"], {"@id": "rdfs:range", "@type": "@id"} + ) + self.assertEqual(data["@context"]["comment"], "rdfs:comment") + self.assertEqual( + data["@context"]["subClassOf"], {"@id": "rdfs:subClassOf", "@type": "@id"} + ) + self.assertEqual( + data["@context"]["replaces"], {"@id": "dcterms:replaces", "@type": "@id"} + ) + self.assertEqual( + data["@context"]["replacedBy"], + {"@id": "dcterms:isReplacedBy", "@type": "@id"}, + ) + self.assertEqual( + data["@context"]["valid"], + {"@id": "dcterms:valid", "@type": "dcterms:Period"}, + ) def test_should_return_one_term(self): - self._dummy_variable(key=u"folk5", description=u"some description", type="integer") + self._dummy_variable( + key="folk5", description="some description", type="integer" + ) response = self.client.get(reverse("term_api", kwargs={"term_key": "folk5"})) data = json.loads(response.content) - self.assertEquals(len(data), 6) - self.assertEquals(data[u"@context"], term_context) - self.assertEquals(data[u"@id"], u"folk5"), - self.assertEquals(data[u"@type"], [u"rdf:Property", u"qb:MeasureProperty"]), - self.assertEquals(data[u"comment"], u"some description"), - self.assertEquals(data[u"range"], u"xsd:integer") - self.assertEquals(data[u"isDefinedBy"], "") + self.assertEqual(len(data), 6) + self.assertEqual(data["@context"], term_context) + self.assertEqual(data["@id"], "folk5"), + self.assertEqual(data["@type"], ["rdf:Property", "qb:MeasureProperty"]), + self.assertEqual(data["comment"], "some description"), + self.assertEqual(data["range"], "xsd:integer") + self.assertEqual(data["isDefinedBy"], "") def test_should_return_404_if_term_not_found(self): response = self.client.get(reverse("term_api", kwargs={"term_key": "foo"})) diff --git a/libstat/tests/apis/test_terms.py b/libstat/tests/apis/test_terms.py index e10534a7..3226ce72 100644 --- a/libstat/tests/apis/test_terms.py +++ b/libstat/tests/apis/test_terms.py @@ -1,14 +1,13 @@ # -*- coding: UTF-8 -*- import json -from django.core.urlresolvers import reverse +from django.urls import reverse from libstat.tests import MongoTestCase from libstat.apis.terms import term_context class TermsApiTest(MongoTestCase): - def test_response_should_return_jsonld(self): response = self.client.get(reverse("terms_api")) @@ -18,36 +17,34 @@ def test_response_should_contain_context(self): response = self.client.get(reverse("terms_api")) data = json.loads(response.content) - self.assertEquals(data[u"@context"], term_context) + self.assertEqual(data["@context"], term_context) def test_should_contain_hardcoded_terms(self): response = self.client.get(reverse("terms_api")) data = json.loads(response.content) - ids = [term[u"@id"] for term in data[u"terms"]] + ids = [term["@id"] for term in data["terms"]] - self.assertTrue(u"library" in ids) - self.assertTrue(u"sampleYear" in ids) - self.assertTrue(u"targetGroup" in ids) - self.assertTrue(u"modified" in ids) - self.assertTrue(u"published" in ids) - self.assertTrue(u"Observation" in ids) + self.assertTrue("library" in ids) + self.assertTrue("sampleYear" in ids) + self.assertTrue("targetGroup" in ids) + self.assertTrue("modified" in ids) + self.assertTrue("published" in ids) + self.assertTrue("Observation" in ids) def test_should_return_all_variables(self): - self._dummy_variable(key=u"folk5") + self._dummy_variable(key="folk5") response = self.client.get(reverse("terms_api")) data = json.loads(response.content) - ids = [term[u"@id"] for term in data[u"terms"]] + ids = [term["@id"] for term in data["terms"]] - self.assertTrue(u"folk5" in ids) + self.assertTrue("folk5" in ids) def test_should_not_return_variable_drafts(self): - self._dummy_variable(key=u"69", is_draft=True) + self._dummy_variable(key="69", is_draft=True) response = self.client.get(reverse("terms_api")) data = json.loads(response.content) - ids = [term[u"@id"] for term in data[u"terms"]] - - self.assertFalse(u"Folk69" in ids) - + ids = [term["@id"] for term in data["terms"]] + self.assertFalse("Folk69" in ids) diff --git a/libstat/tests/forms/test_survey.py b/libstat/tests/forms/test_survey.py index 5f7ad57c..d6bfdaac 100644 --- a/libstat/tests/forms/test_survey.py +++ b/libstat/tests/forms/test_survey.py @@ -1,31 +1,28 @@ -# -*- coding: UTF-8 -*- -from sets import Set - from libstat.forms.survey import SurveyForm from libstat.tests import MongoTestCase -class TestUserReadOnly(MongoTestCase): +class TestUserReadOnly(MongoTestCase): def test_form_should_not_be_user_read_only_when_survey_status_is_not_viewed(self): - survey = self._dummy_survey(status=u"not_viewed") + survey = self._dummy_survey(status="not_viewed") form = SurveyForm(survey=survey) self.assertFalse(form.is_user_read_only) def test_form_should_not_be_user_read_only_when_survey_status_is_initated(self): - survey = self._dummy_survey(status=u"initiated") + survey = self._dummy_survey(status="initiated") form = SurveyForm(survey=survey) self.assertFalse(form.is_user_read_only) def test_form_should_be_user_read_only_when_survey_status_is_submitted(self): - survey = self._dummy_survey(status=u"submitted") + survey = self._dummy_survey(status="submitted") form = SurveyForm(survey=survey) self.assertTrue(form.is_user_read_only) def test_form_should_be_user_read_only_when_survey_status_is_controlled(self): - survey = self._dummy_survey(status=u"controlled") + survey = self._dummy_survey(status="controlled") form = SurveyForm(survey=survey) self.assertTrue(form.is_user_read_only) @@ -39,15 +36,14 @@ def test_form_should_be_user_read_only_when_survey_status_is_published(self): class TestReadOnly(MongoTestCase): - def test_form_should_not_be_read_only_when_authenticated(self): - survey = self._dummy_survey(status=u"submitted") + survey = self._dummy_survey(status="submitted") form = SurveyForm(survey=survey, authenticated=True) self.assertFalse(form.is_read_only) def test_form_should_be_read_only_when_not_authenticated_and_submitted(self): - survey = self._dummy_survey(status=u"submitted") + survey = self._dummy_survey(status="submitted") form = SurveyForm(survey=survey, authenticated=False) self.assertTrue(form.is_read_only) diff --git a/libstat/tests/management/test_import_survey_responses.py b/libstat/tests/management/test_import_survey_responses.py index 3e850756..315ad7ee 100644 --- a/libstat/tests/management/test_import_survey_responses.py +++ b/libstat/tests/management/test_import_survey_responses.py @@ -1,4 +1,3 @@ -# -*- coding: UTF-8 -*- from django.core.management import call_command from django.core.management.base import CommandError @@ -7,102 +6,119 @@ class ImportSurveyResponsesTest(MongoTestCase): - def setUp(self): args = [] opts = {"file": "data/variables/folk_termer.xlsx", "target_group": "folkbib"} - call_command('import_variables', *args, **opts) + call_command("import_variables", *args, **opts) # Check that all variables have been imported - self.assertEquals(len(Variable.objects.all()), 201) + self.assertEqual(len(Variable.objects.all()), 201) def test_import_survey_responses_requires_file_option(self): args = [] opts = {"target_group": "folkbib", "year": 2012} - call_command('import_survey_responses', *args, **opts) + call_command("import_survey_responses", *args, **opts) - self.assertEquals(len(Survey.objects.all()), 0) + self.assertEqual(len(Survey.objects.all()), 0) def test_import_variables_requires_target_group_option(self): args = [] opts = {"file": "libstat/tests/data/Folk2012.xlsx", "year": 2012} - call_command('import_survey_responses', *args, **opts) + call_command("import_survey_responses", *args, **opts) - self.assertEquals(len(Survey.objects.all()), 0) + self.assertEqual(len(Survey.objects.all()), 0) def test_import_variables_requires_year_option(self): args = [] opts = {"file": "libstat/tests/data/Folk2012.xlsx", "target_group": "folkbib"} - call_command('import_survey_responses', *args, **opts) + call_command("import_survey_responses", *args, **opts) - self.assertEquals(len(Survey.objects.all()), 0) + self.assertEqual(len(Survey.objects.all()), 0) def test_import_survey_responses_should_abort_if_invalid_year(self): args = [] - opts = {"file": "libstat/tests/data/Folk2012.xlsx", "target_group": "folkbib", "year": '201b'} - self.assertRaises(CommandError, call_command, 'import_survey_responses', *args, **opts) + opts = { + "file": "libstat/tests/data/Folk2012.xlsx", + "target_group": "folkbib", + "year": "201b", + } + self.assertRaises( + CommandError, call_command, "import_survey_responses", *args, **opts + ) def test_import_survey_responses_should_abort_if_data_for_year_not_in_file(self): args = [] - opts = {"file": "libstat/tests/data/Folk2012.xlsx", "target_group": "folkbib", "year": 2013} - self.assertRaises(CommandError, call_command, 'import_survey_responses', *args, **opts) + opts = { + "file": "libstat/tests/data/Folk2012.xlsx", + "target_group": "folkbib", + "year": 2013, + } + self.assertRaises( + CommandError, call_command, "import_survey_responses", *args, **opts + ) def test_should_import_public_lib_survey_responses(self): args = [] - opts = {"file": "libstat/tests/data/Folk2012.xlsx", "target_group": "folkbib", "year": 2012} - call_command('import_survey_responses', *args, **opts) + opts = { + "file": "libstat/tests/data/Folk2012.xlsx", + "target_group": "folkbib", + "year": 2012, + } + call_command("import_survey_responses", *args, **opts) - self.assertEquals(len(Survey.objects.all()), 8) + self.assertEqual(len(Survey.objects.all()), 8) sr = None for s in Survey.objects.all(): - if s.library.name == u"KARLSTADS STADSBIBLIOTEK": + if s.library.name == "KARLSTADS STADSBIBLIOTEK": sr = s - self.assertEquals(sr.library.name, u"KARLSTADS STADSBIBLIOTEK") - self.assertEquals(sr.library.municipality_code, u"1780") + self.assertEqual(sr.library.name, "KARLSTADS STADSBIBLIOTEK") + self.assertEqual(sr.library.municipality_code, "1780") # Check data types and visibility # Private, string value folk1_obs = [obs for obs in sr.observations if obs.variable.key == "Folk1"][0] - self.assertTrue(isinstance(folk1_obs.value, basestring)) - self.assertEquals(folk1_obs.value, u"Karlstad") + self.assertTrue(isinstance(folk1_obs.value, str)) + self.assertEqual(folk1_obs.value, "Karlstad") self.assertFalse(folk1_obs._is_public) # Private, string value None folk7_obs = [obs for obs in sr.observations if obs.variable.key == "Folk7"][0] - self.assertEquals(folk7_obs.value, None) + self.assertEqual(folk7_obs.value, None) self.assertFalse(folk7_obs._is_public) # Public, int (boolean) value None folk8_obs = [obs for obs in sr.observations if obs.variable.key == "Folk8"][0] - self.assertEquals(folk8_obs.value, None) + self.assertEqual(folk8_obs.value, None) self.assertTrue(folk8_obs._is_public) # Public, decimal value folk26_obs = [obs for obs in sr.observations if obs.variable.key == "Folk26"][0] self.assertTrue(isinstance(folk26_obs.value, float)) - self.assertEquals(folk26_obs.value, 1798.57575757576) + self.assertEqual(folk26_obs.value, 1798.57575757576) self.assertTrue(folk26_obs._is_public) # Public, long value folk38_obs = [obs for obs in sr.observations if obs.variable.key == "Folk38"][0] - self.assertTrue(isinstance(folk38_obs.value, long)) - self.assertEquals(folk38_obs.value, 29500000) + self.assertTrue(isinstance(folk38_obs.value, int)) + self.assertEqual(folk38_obs.value, 29500000) self.assertTrue(folk38_obs._is_public) # Public, decimal value (percent) folk52_obs = [obs for obs in sr.observations if obs.variable.key == "Folk52"][0] self.assertTrue(isinstance(folk52_obs.value, float)) - self.assertEquals(folk52_obs.value, 0.438087421014918) + self.assertEqual(folk52_obs.value, 0.438087421014918) self.assertTrue(folk52_obs._is_public) # Public, decimal value folk54_obs = [obs for obs in sr.observations if obs.variable.key == "Folk54"][0] self.assertTrue(isinstance(folk54_obs.value, float)) - self.assertEquals(folk54_obs.value, 8.33583518419239) + self.assertEqual(folk54_obs.value, 8.33583518419239) self.assertTrue(folk54_obs._is_public) # Private, integer value - folk201_obs = [obs for obs in sr.observations if obs.variable.key == "Folk201"][0] + folk201_obs = [obs for obs in sr.observations if obs.variable.key == "Folk201"][ + 0 + ] self.assertTrue(isinstance(folk201_obs.value, int)) - self.assertEquals(folk201_obs.value, 13057) + self.assertEqual(folk201_obs.value, 13057) self.assertFalse(folk201_obs._is_public) # Check parsing of bool value when 1/1.0/True - sr2 = Survey.objects.filter(library__name=u"GISLAVEDS BIBLIOTEK")[0] + sr2 = Survey.objects.filter(library__name="GISLAVEDS BIBLIOTEK")[0] folk8_obs = [obs for obs in sr2.observations if obs.variable.key == "Folk8"][0] self.assertTrue(isinstance(folk8_obs.value, bool)) - self.assertEquals(folk8_obs.value, True) + self.assertEqual(folk8_obs.value, True) diff --git a/libstat/tests/management/test_import_variables.py b/libstat/tests/management/test_import_variables.py index c2f7fcf0..e2573f3a 100644 --- a/libstat/tests/management/test_import_variables.py +++ b/libstat/tests/management/test_import_variables.py @@ -1,4 +1,3 @@ -# -*- coding: UTF-8 -*- from django.core.management import call_command from libstat.tests import MongoTestCase @@ -6,37 +5,40 @@ class ImportVariablesTest(MongoTestCase): - def test_import_variables_requires_file_option(self): args = [] opts = {"target_group": "folkbib"} - call_command('import_variables', *args, **opts) + call_command("import_variables", *args, **opts) - self.assertEquals(len(Variable.objects.all()), 0) + self.assertEqual(len(Variable.objects.all()), 0) def test_import_variables_requires_target_group_option(self): args = [] opts = {"target_group": "folkbib"} - call_command('import_variables', *args, **opts) + call_command("import_variables", *args, **opts) - self.assertEquals(len(Variable.objects.all()), 0) + self.assertEqual(len(Variable.objects.all()), 0) def test_should_import_public_lib_variables(self): args = [] opts = {"file": "data/variables/folk_termer.xlsx", "target_group": "folkbib"} - call_command('import_variables', *args, **opts) + call_command("import_variables", *args, **opts) # Check that all variables have been imported - self.assertEquals(len(Variable.objects.all()), 201) + self.assertEqual(len(Variable.objects.all()), 201) - folk1 = Variable.objects.filter(key="Folk1")[0] # Private (by category "Bakgrundsvariabel"), type "Text" + folk1 = Variable.objects.filter(key="Folk1")[ + 0 + ] # Private (by category "Bakgrundsvariabel"), type "Text" folk7 = Variable.objects.filter(key="Folk7")[0] # Private, type "Numerisk" folk8 = Variable.objects.filter(key="Folk8")[0] # Public, type "Boolesk" folk26 = Variable.objects.filter(key="Folk26")[0] # Public, type "Decimal två" folk38 = Variable.objects.filter(key="Folk38")[0] # Public, type "Long" folk52 = Variable.objects.filter(key="Folk52")[0] # Public, type "Procent" folk54 = Variable.objects.filter(key="Folk54")[0] # Public, type "Decimal ett" - folk201 = Variable.objects.filter(key="Folk201")[0] # Private, type "Integer", last row + folk201 = Variable.objects.filter(key="Folk201")[ + 0 + ] # Private, type "Integer", last row # Check visibility self.assertFalse(folk1.is_public) @@ -49,172 +51,202 @@ def test_should_import_public_lib_variables(self): self.assertFalse(folk201.is_public) # Check types - self.assertEquals(folk1.type, u"string") - self.assertEquals(folk7.type, u"string") - self.assertEquals(folk8.type, u"boolean") - self.assertEquals(folk26.type, u"decimal") - self.assertEquals(folk38.type, u"long") - self.assertEquals(folk52.type, u"percent") - self.assertEquals(folk54.type, u"decimal") - self.assertEquals(folk201.type, u"integer") + self.assertEqual(folk1.type, "string") + self.assertEqual(folk7.type, "string") + self.assertEqual(folk8.type, "boolean") + self.assertEqual(folk26.type, "decimal") + self.assertEqual(folk38.type, "long") + self.assertEqual(folk52.type, "percent") + self.assertEqual(folk54.type, "decimal") + self.assertEqual(folk201.type, "integer") def test_should_update_public_lib_variables(self): args = [] opts = {"file": "data/variables/folk_termer.xlsx", "target_group": "folkbib"} - call_command('import_variables', *args, **opts) + call_command("import_variables", *args, **opts) # Check that all variables have been imported - self.assertEquals(len(Variable.objects.all()), 201) + self.assertEqual(len(Variable.objects.all()), 201) # Check target_group before - self.assertEquals(Variable.objects.filter(key="Folk52")[0].target_groups, [u"folkbib"]) + self.assertEqual( + Variable.objects.filter(key="Folk52")[0].target_groups, ["folkbib"] + ) # Changing target group to avoid having to modify terms file args = [] opts = {"file": "data/variables/folk_termer.xlsx", "target_group": "skolbib"} - call_command('import_variables', *args, **opts) + call_command("import_variables", *args, **opts) # Check that no new variables have been created - self.assertEquals(len(Variable.objects.all()), 201) + self.assertEqual(len(Variable.objects.all()), 201) # Check target_group after - self.assertEquals(Variable.objects.filter(key="Folk52")[0].target_groups, [u"skolbib"]) + self.assertEqual( + Variable.objects.filter(key="Folk52")[0].target_groups, ["skolbib"] + ) def test_should_import_research_lib_variables(self): args = [] opts = {"file": "data/variables/forsk_termer.xlsx", "target_group": "specbib"} - call_command('import_variables', *args, **opts) + call_command("import_variables", *args, **opts) # Check that all variables have been imported - self.assertEquals(len(Variable.objects.all()), 163) + self.assertEqual(len(Variable.objects.all()), 163) - forsk1 = Variable.objects.filter(key="Forsk1")[0] # Private (by category "Bakgrundsvariabler", type "Text" + forsk1 = Variable.objects.filter(key="Forsk1")[ + 0 + ] # Private (by category "Bakgrundsvariabler", type "Text" forsk2 = Variable.objects.filter(key="Forsk2")[0] # Private, type "Integer" forsk8 = Variable.objects.filter(key="Forsk8")[0] # Public, type "Decimal två" forsk19 = Variable.objects.filter(key="Forsk19")[0] # Public, type "Procent" forsk29 = Variable.objects.filter(key="Forsk29")[0] # Public, type "Long" - forsk154 = Variable.objects.filter(key="Forsk154")[0] # Public, type "Decimal ett" + forsk154 = Variable.objects.filter(key="Forsk154")[ + 0 + ] # Public, type "Decimal ett" # Check visibility - self.assertEquals(forsk1.is_public, False) - self.assertEquals(forsk2.is_public, False) - self.assertEquals(forsk8.is_public, True) - self.assertEquals(forsk19.is_public, True) - self.assertEquals(forsk29.is_public, True) - self.assertEquals(forsk154.is_public, True) + self.assertEqual(forsk1.is_public, False) + self.assertEqual(forsk2.is_public, False) + self.assertEqual(forsk8.is_public, True) + self.assertEqual(forsk19.is_public, True) + self.assertEqual(forsk29.is_public, True) + self.assertEqual(forsk154.is_public, True) # Check types - self.assertEquals(forsk1.type, u"string") - self.assertEquals(forsk2.type, u"integer") - self.assertEquals(forsk8.type, u"decimal") - self.assertEquals(forsk19.type, u"percent") - self.assertEquals(forsk29.type, u"long") - self.assertEquals(forsk154.type, u"decimal") + self.assertEqual(forsk1.type, "string") + self.assertEqual(forsk2.type, "integer") + self.assertEqual(forsk8.type, "decimal") + self.assertEqual(forsk19.type, "percent") + self.assertEqual(forsk29.type, "long") + self.assertEqual(forsk154.type, "decimal") def test_should_update_research_lib_variables(self): args = [] opts = {"file": "data/variables/forsk_termer.xlsx", "target_group": "specbib"} - call_command('import_variables', *args, **opts) + call_command("import_variables", *args, **opts) # Check that all variables have been imported - self.assertEquals(len(Variable.objects.all()), 163) + self.assertEqual(len(Variable.objects.all()), 163) # Check target_group before - self.assertEquals(Variable.objects.filter(key="Forsk111")[0].target_groups, ["specbib"]) + self.assertEqual( + Variable.objects.filter(key="Forsk111")[0].target_groups, ["specbib"] + ) # Changing target group to avoid having to modify terms file args = [] opts = {"file": "data/variables/forsk_termer.xlsx", "target_group": "sjukbib"} - call_command('import_variables', *args, **opts) + call_command("import_variables", *args, **opts) # Check that no new variables have been created - self.assertEquals(len(Variable.objects.all()), 163) + self.assertEqual(len(Variable.objects.all()), 163) # Check target_group after - self.assertEquals(Variable.objects.filter(key="Forsk111")[0].target_groups, [u"sjukbib"]) + self.assertEqual( + Variable.objects.filter(key="Forsk111")[0].target_groups, ["sjukbib"] + ) def test_should_import_hospital_lib_variables(self): args = [] opts = {"file": "data/variables/sjukhus_termer.xlsx", "target_group": "sjukbib"} - call_command('import_variables', *args, **opts) + call_command("import_variables", *args, **opts) # Check that all variables have been imported - self.assertEquals(len(Variable.objects.all()), 151) - - sjukhus1 = Variable.objects.filter(key="Sjukhus1")[0] # Private (by category "Bakgrundsvariabler", type "Text" - sjukhus9 = Variable.objects.filter(key="Sjukhus9")[0] # Public, type "Decimal två" - sjukhus151 = Variable.objects.filter(key="Sjukhus151")[0] # Private, type "Integer" + self.assertEqual(len(Variable.objects.all()), 151) + + sjukhus1 = Variable.objects.filter(key="Sjukhus1")[ + 0 + ] # Private (by category "Bakgrundsvariabler", type "Text" + sjukhus9 = Variable.objects.filter(key="Sjukhus9")[ + 0 + ] # Public, type "Decimal två" + sjukhus151 = Variable.objects.filter(key="Sjukhus151")[ + 0 + ] # Private, type "Integer" # Check visibility - self.assertEquals(sjukhus1.is_public, False) - self.assertEquals(sjukhus9.is_public, True) - self.assertEquals(sjukhus151.is_public, False) + self.assertEqual(sjukhus1.is_public, False) + self.assertEqual(sjukhus9.is_public, True) + self.assertEqual(sjukhus151.is_public, False) # Check types - self.assertEquals(sjukhus1.type, u"string") - self.assertEquals(sjukhus9.type, u"decimal") - self.assertEquals(sjukhus151.type, u"integer") + self.assertEqual(sjukhus1.type, "string") + self.assertEqual(sjukhus9.type, "decimal") + self.assertEqual(sjukhus151.type, "integer") def test_should_update_hospital_lib_variables(self): args = [] opts = {"file": "data/variables/sjukhus_termer.xlsx", "target_group": "sjukbib"} - call_command('import_variables', *args, **opts) + call_command("import_variables", *args, **opts) # Check that all variables have been imported - self.assertEquals(len(Variable.objects.all()), 151) + self.assertEqual(len(Variable.objects.all()), 151) # Check target_group before - self.assertEquals(Variable.objects.filter(key="Sjukhus23")[0].target_groups, [u"sjukbib"]) + self.assertEqual( + Variable.objects.filter(key="Sjukhus23")[0].target_groups, ["sjukbib"] + ) # Changing target group to avoid having to modify terms file args = [] opts = {"file": "data/variables/sjukhus_termer.xlsx", "target_group": "sjukbib"} - call_command('import_variables', *args, **opts) + call_command("import_variables", *args, **opts) # Check that no new variables have been created - self.assertEquals(len(Variable.objects.all()), 151) + self.assertEqual(len(Variable.objects.all()), 151) # Check target_group after - self.assertEquals(Variable.objects.filter(key="Sjukhus23")[0].target_groups, [u"sjukbib"]) + self.assertEqual( + Variable.objects.filter(key="Sjukhus23")[0].target_groups, ["sjukbib"] + ) def test_should_import_school_lib_variables(self): args = [] opts = {"file": "data/variables/skol_termer.xlsx", "target_group": "skolbib"} - call_command('import_variables', *args, **opts) + call_command("import_variables", *args, **opts) # Check that all variables have been imported - self.assertEquals(len(Variable.objects.all()), 139) - - skol6 = Variable.objects.filter(key="Skol6")[0] # Private (by category "Bakgrundsvariabel"), type "Text" - skol17 = Variable.objects.filter(key="Skol17")[0] # Private (by category "Bakgrundsvariabel"), type "Numerisk" + self.assertEqual(len(Variable.objects.all()), 139) + + skol6 = Variable.objects.filter(key="Skol6")[ + 0 + ] # Private (by category "Bakgrundsvariabel"), type "Text" + skol17 = Variable.objects.filter(key="Skol17")[ + 0 + ] # Private (by category "Bakgrundsvariabel"), type "Numerisk" skol41 = Variable.objects.filter(key="Skol41")[0] # Public, type "Decimal två" skol55 = Variable.objects.filter(key="Skol55")[0] # Private, type "Boolesk" skol108 = Variable.objects.filter(key="Skol108")[0] # Public, type "Integer" # Check visibility - self.assertEquals(skol6.is_public, False) - self.assertEquals(skol17.is_public, False) - self.assertEquals(skol41.is_public, True) - self.assertEquals(skol55.is_public, False) - self.assertEquals(skol108.is_public, True) + self.assertEqual(skol6.is_public, False) + self.assertEqual(skol17.is_public, False) + self.assertEqual(skol41.is_public, True) + self.assertEqual(skol55.is_public, False) + self.assertEqual(skol108.is_public, True) # Check types - self.assertEquals(skol6.type, u"string") - self.assertEquals(skol17.type, u"string") - self.assertEquals(skol41.type, u"decimal") - self.assertEquals(skol55.type, u"boolean") - self.assertEquals(skol108.type, u"integer") + self.assertEqual(skol6.type, "string") + self.assertEqual(skol17.type, "string") + self.assertEqual(skol41.type, "decimal") + self.assertEqual(skol55.type, "boolean") + self.assertEqual(skol108.type, "integer") def test_should_update_school_lib_variables(self): args = [] opts = {"file": "data/variables/skol_termer.xlsx", "target_group": "skolbib"} - call_command('import_variables', *args, **opts) + call_command("import_variables", *args, **opts) # Check that all variables have been imported - self.assertEquals(len(Variable.objects.all()), 139) + self.assertEqual(len(Variable.objects.all()), 139) # Check target_group before - self.assertEquals(Variable.objects.filter(key="Skol5")[0].target_groups, ["skolbib"]) + self.assertEqual( + Variable.objects.filter(key="Skol5")[0].target_groups, ["skolbib"] + ) # Changing target group to avoid having to modify terms file args = [] opts = {"file": "data/variables/skol_termer.xlsx", "target_group": "specbib"} - call_command('import_variables', *args, **opts) + call_command("import_variables", *args, **opts) # Check that no new variables have been created - self.assertEquals(len(Variable.objects.all()), 139) + self.assertEqual(len(Variable.objects.all()), 139) # Check target_group after - self.assertEquals(Variable.objects.filter(key="Skol5")[0].target_groups, ["specbib"]) + self.assertEqual( + Variable.objects.filter(key="Skol5")[0].target_groups, ["specbib"] + ) diff --git a/libstat/tests/models/test_external_identifier.py b/libstat/tests/models/test_external_identifier.py index 8afff31d..a41d9c74 100644 --- a/libstat/tests/models/test_external_identifier.py +++ b/libstat/tests/models/test_external_identifier.py @@ -1,16 +1,18 @@ -# -*- coding: UTF-8 -*- - from libstat.models import Survey from libstat.tests import MongoTestCase -class TestExternalIdentifier(MongoTestCase): +class TestExternalIdentifier(MongoTestCase): def test_should_save_external_identifier_to_survey_library(self): - external_identifier = self._dummy_external_identifier(type="school_code", identifier="11111111") - library = self._dummy_library(sigel="testsigel", external_identifiers=[external_identifier]) + external_identifier = self._dummy_external_identifier( + type="school_code", identifier="11111111" + ) + library = self._dummy_library( + sigel="testsigel", external_identifiers=[external_identifier] + ) self._dummy_survey(library=library) survey = Survey.objects.filter(library__sigel="testsigel").first() - self.assertEquals(survey.library.external_identifiers[0].type, "school_code") - self.assertEquals(survey.library.external_identifiers[0].identifier, "11111111") \ No newline at end of file + self.assertEqual(survey.library.external_identifiers[0].type, "school_code") + self.assertEqual(survey.library.external_identifiers[0].identifier, "11111111") diff --git a/libstat/tests/models/test_open_data.py b/libstat/tests/models/test_open_data.py index 88f06aef..5bcd1a2e 100644 --- a/libstat/tests/models/test_open_data.py +++ b/libstat/tests/models/test_open_data.py @@ -1,4 +1,3 @@ -# -*- coding: UTF-8 -*- from datetime import datetime from bibstat import settings from libstat.models import Variable @@ -7,34 +6,52 @@ class OpenDataTest(MongoTestCase): - def setUp(self): - v = Variable(key=u"folk5", description=u"Antal bemannade serviceställen, sammanräknat", type="integer", - is_public=True, target_groups=["folkbib"]) + v = Variable( + key="folk5", + description="Antal bemannade serviceställen, sammanräknat", + type="integer", + is_public=True, + target_groups=["folkbib"], + ) v.save() - publishing_date = datetime(2014, 06, 03, 15, 28, 31) - d1 = OpenData(library_name=u"KARLSTAD STADSBIBLIOTEK", sigel="323", sample_year=2013, - target_group="folkbib", variable=v, value=6, date_created=publishing_date, - date_modified=publishing_date) + publishing_date = datetime(2014, 6, 3, 15, 28, 31) + d1 = OpenData( + library_name="KARLSTAD STADSBIBLIOTEK", + sigel="323", + sample_year=2013, + target_group="folkbib", + variable=v, + value=6, + date_created=publishing_date, + date_modified=publishing_date, + ) d1.save() - d2 = OpenData(library_name=u"NORRBOTTENS LÄNSBIBLIOTEK", sample_year=2013, target_group="folkbib", variable=v, - value=6, date_created=publishing_date, date_modified=publishing_date) + d2 = OpenData( + library_name="NORRBOTTENS LÄNSBIBLIOTEK", + sample_year=2013, + target_group="folkbib", + variable=v, + value=6, + date_created=publishing_date, + date_modified=publishing_date, + ) d2.save() def test_should_transform_object_with_sigel_to_dict(self): - object = OpenData.objects.get(library_name=u"KARLSTAD STADSBIBLIOTEK") + object = OpenData.objects.get(library_name="KARLSTAD STADSBIBLIOTEK") openDataAsDict = { - u"@id": str(object.id), - u"@type": u"Observation", - u"folk5": 6, - u"library": { - u"@id": u"{}/library/323".format(settings.BIBDB_BASE_URL), - u"name": u"KARLSTAD STADSBIBLIOTEK" + "@id": str(object.id), + "@type": "Observation", + "folk5": 6, + "library": { + "@id": "{}/library/323".format(settings.BIBDB_BASE_URL), + "name": "KARLSTAD STADSBIBLIOTEK", }, - u"sampleYear": 2013, - u"targetGroup": u"Folkbibliotek", + "sampleYear": 2013, + "targetGroup": "Folkbibliotek", # u"targetGroup": {u"@id": u"public"}, #TODO - u"published": "2014-06-03T15:28:31.000000Z", - u"modified": "2014-06-03T15:28:31.000000Z" + "published": "2014-06-03T15:28:31.000000Z", + "modified": "2014-06-03T15:28:31.000000Z", } - self.assertEquals(object.to_dict(), openDataAsDict) \ No newline at end of file + self.assertEqual(object.to_dict(), openDataAsDict) diff --git a/libstat/tests/models/test_survey.py b/libstat/tests/models/test_survey.py index 9d5d0f1e..f0457150 100644 --- a/libstat/tests/models/test_survey.py +++ b/libstat/tests/models/test_survey.py @@ -1,6 +1,4 @@ -# -*- coding: UTF-8 -*- from datetime import timedelta -from sets import Set from data.principals import PRINCIPALS from libstat.tests import MongoTestCase @@ -20,7 +18,7 @@ def test_can_not_update_status_to_invalid_value(self): def test_can_create_survey_with_valid_status(self): survey = self._dummy_survey(status="not_viewed") - self.assertEquals(survey.status, "not_viewed") + self.assertEqual(survey.status, "not_viewed") def test_can_not_create_survey_with_invalid_status(self): try: @@ -34,11 +32,13 @@ def test_can_update_status_to_valid_value(self): survey.status = "initiated" - self.assertEquals(survey.status, "initiated") + self.assertEqual(survey.status, "initiated") def test_should_export_public_non_null_observations_to_openData(self): - variable = self._dummy_variable(key=u"key1", is_public=True) - observation = self._dummy_observation(variable=variable, value="val1", _is_public=variable.is_public) + variable = self._dummy_variable(key="key1", is_public=True) + observation = self._dummy_observation( + variable=variable, value="val1", _is_public=variable.is_public + ) library = self._dummy_library(name="lib1_name", sigel="lib1_sigel") survey = self._dummy_survey(library=library, observations=[observation]) @@ -46,18 +46,22 @@ def test_should_export_public_non_null_observations_to_openData(self): survey.reload() open_data = OpenData.objects.all().get(0) - self.assertEquals(open_data.library_name, "lib1_name") - self.assertEquals(open_data.variable.key, "key1") - self.assertEquals(open_data.value, "val1") + self.assertEqual(open_data.library_name, "lib1_name") + self.assertEqual(open_data.variable.key, "key1") + self.assertEqual(open_data.value, "val1") self.assertTrue(open_data.date_modified) self.assertTrue(open_data.date_created) - self.assertEquals(open_data.date_created, open_data.date_modified) - self.assertEquals(open_data.date_created, survey.published_at) + self.assertEqual(open_data.date_created, open_data.date_modified) + self.assertEqual(open_data.date_created, survey.published_at) def test_should_overwrite_value_and_date_modified_for_existing_openData(self): - variable = self._dummy_variable(key=u"key1", is_public=True) - observation = self._dummy_observation(variable=variable, value="old_value", _is_public=variable.is_public) - library = self._dummy_library(name="lib1_name", sigel="lib1_sigel", library_type="folkbib") + variable = self._dummy_variable(key="key1", is_public=True) + observation = self._dummy_observation( + variable=variable, value="old_value", _is_public=variable.is_public + ) + library = self._dummy_library( + name="lib1_name", sigel="lib1_sigel", library_type="folkbib" + ) survey = self._dummy_survey(library=library, observations=[observation]) survey.publish() @@ -70,60 +74,74 @@ def test_should_overwrite_value_and_date_modified_for_existing_openData(self): survey.publish() data = OpenData.objects.all() - self.assertEquals(len(data), 1) + self.assertEqual(len(data), 1) open_data = data.get(0) - self.assertEquals(open_data.library_name, "lib1_name") - self.assertEquals(open_data.target_group, "folkbib") - self.assertEquals(open_data.value, "new_value") + self.assertEqual(open_data.library_name, "lib1_name") + self.assertEqual(open_data.target_group, "folkbib") + self.assertEqual(open_data.value, "new_value") self.assertTrue(open_data.date_modified) self.assertTrue(open_data.date_created) - self.assertNotEquals(open_data.date_created, open_data.date_modified) + self.assertNotEqual(open_data.date_created, open_data.date_modified) def test_should_get_observation_by_variable_key(self): - observation1 = self._dummy_observation(variable=self._dummy_variable(key="key1")) - observation2 = self._dummy_observation(variable=self._dummy_variable(key="key2")) - observation3 = self._dummy_observation(variable=self._dummy_variable(key="key3")) - survey = self._dummy_survey(observations=[ - observation1, - observation2, - observation3 - ]) - self.assertEquals(survey.get_observation("key2"), observation2) + observation1 = self._dummy_observation( + variable=self._dummy_variable(key="key1") + ) + observation2 = self._dummy_observation( + variable=self._dummy_variable(key="key2") + ) + observation3 = self._dummy_observation( + variable=self._dummy_variable(key="key3") + ) + survey = self._dummy_survey( + observations=[observation1, observation2, observation3] + ) + self.assertEqual(survey.get_observation("key2"), observation2) def test_returns_none_if_variable_does_not_exist(self): survey = self._dummy_survey() self.assertEqual(survey.get_observation(key="does_not_exist"), None) - def test_should_get_observation_for_replaced_variable_if_wanted(self): variable1 = self._dummy_variable(key="key1") variable2 = self._dummy_variable(key="key2", replaces=[variable1]) variable3 = self._dummy_variable(key="key3", replaces=[variable2]) - survey = self._dummy_survey(observations=[ - self._dummy_observation(variable=variable1, value="some_value") - ]) + survey = self._dummy_survey( + observations=[ + self._dummy_observation(variable=variable1, value="some_value") + ] + ) - self.assertEqual(survey.get_observation("key3", backtrack_replaced_variables=True).value, "some_value") + self.assertEqual( + survey.get_observation("key3", backtrack_replaced_variables=True).value, + "some_value", + ) def test_should_not_get_observation_for_replaced_variable_if_not_wanted(self): variable1 = self._dummy_variable(key="key1") variable2 = self._dummy_variable(key="key2", replaces=[variable1]) variable3 = self._dummy_variable(key="key3", replaces=[variable2]) - survey = self._dummy_survey(observations=[ - self._dummy_observation(variable=variable1, value="some_value") - ]) + survey = self._dummy_survey( + observations=[ + self._dummy_observation(variable=variable1, value="some_value") + ] + ) self.assertEqual(survey.get_observation("key3"), None) - def test_should_not_get_observation_for_replaced_variable_if_replaced_by_multiple_variables(self): + def test_should_not_get_observation_for_replaced_variable_if_replaced_by_multiple_variables( + self, + ): variable1 = self._dummy_variable(key="key1") variable2 = self._dummy_variable(key="key2") variable3 = self._dummy_variable(key="key3", replaces=[variable1, variable2]) - survey = self._dummy_survey(observations=[ - self._dummy_observation(variable=variable1, value="some_value") - ]) + survey = self._dummy_survey( + observations=[ + self._dummy_observation(variable=variable1, value="some_value") + ] + ) self.assertEqual(survey.get_observation("key3"), None) @@ -131,15 +149,22 @@ def test_should_get_most_recent_observation_for_replaced_variable(self): variable1 = self._dummy_variable(key="key1") variable2 = self._dummy_variable(key="key2", replaces=[variable1]) variable3 = self._dummy_variable(key="key3", replaces=[variable2]) - survey = self._dummy_survey(observations=[ - self._dummy_observation(variable=variable1, value="some_value1"), - self._dummy_observation(variable=variable2, value="some_value2") - ]) - - self.assertEqual(survey.get_observation("key3", backtrack_replaced_variables=True).value, "some_value2") + survey = self._dummy_survey( + observations=[ + self._dummy_observation(variable=variable1, value="some_value1"), + self._dummy_observation(variable=variable2, value="some_value2"), + ] + ) + + self.assertEqual( + survey.get_observation("key3", backtrack_replaced_variables=True).value, + "some_value2", + ) def test_should_store_version_when_updating_existing_object(self): - library = self._dummy_library(name="lib1_old_name", city="lib1_old_city", sigel="lib1_sigel") + library = self._dummy_library( + name="lib1_old_name", city="lib1_old_city", sigel="lib1_sigel" + ) survey = self._dummy_survey(status="initiated", library=library) survey.library.name = "lib1_new_name" @@ -147,53 +172,53 @@ def test_should_store_version_when_updating_existing_object(self): survey.status = "controlled" survey = survey.save() - self.assertEquals(survey.library.name, "lib1_new_name") - self.assertEquals(survey.library.city, "lib1_new_city") - self.assertEquals(survey.status, "controlled") + self.assertEqual(survey.library.name, "lib1_new_name") + self.assertEqual(survey.library.city, "lib1_new_city") + self.assertEqual(survey.status, "controlled") versions = SurveyVersion.objects.filter(survey_response_id=survey.id) - self.assertEquals(len(versions), 1) - self.assertEquals(versions[0].survey_response_id, survey.id) - self.assertEquals(versions[0].library.name, "lib1_old_name") - self.assertEquals(versions[0].library.city, "lib1_old_city") - self.assertEquals(versions[0].status, "initiated") + self.assertEqual(len(versions), 1) + self.assertEqual(versions[0].survey_response_id, survey.id) + self.assertEqual(versions[0].library.name, "lib1_old_name") + self.assertEqual(versions[0].library.city, "lib1_old_city") + self.assertEqual(versions[0].status, "initiated") def test_should_store_one_version_for_each_change(self): survey = self._dummy_survey() - self.assertEquals(len(SurveyVersion.objects.all()), 0) + self.assertEqual(len(SurveyVersion.objects.all()), 0) survey.library.name = "new_name" survey.save() - self.assertEquals(len(SurveyVersion.objects.all()), 1) + self.assertEqual(len(SurveyVersion.objects.all()), 1) survey.library.name = "newer_name" survey.save() - self.assertEquals(len(SurveyVersion.objects.all()), 2) + self.assertEqual(len(SurveyVersion.objects.all()), 2) def test_should_only_store_5_latest_versions(self): survey = self._dummy_survey(library=self._dummy_library(name="name0")) - self.assertEquals(len(SurveyVersion.objects.all()), 0) + self.assertEqual(len(SurveyVersion.objects.all()), 0) survey.library.name = "name1" survey.save() - self.assertEquals(len(SurveyVersion.objects.all()), 1) + self.assertEqual(len(SurveyVersion.objects.all()), 1) survey.library.name = "name2" survey.save() - self.assertEquals(len(SurveyVersion.objects.all()), 2) + self.assertEqual(len(SurveyVersion.objects.all()), 2) survey.library.name = "name3" survey.save() - self.assertEquals(len(SurveyVersion.objects.all()), 3) + self.assertEqual(len(SurveyVersion.objects.all()), 3) survey.library.name = "name4" survey.save() - self.assertEquals(len(SurveyVersion.objects.all()), 4) + self.assertEqual(len(SurveyVersion.objects.all()), 4) survey.library.name = "name5" survey.save() - self.assertEquals(len(SurveyVersion.objects.all()), 5) + self.assertEqual(len(SurveyVersion.objects.all()), 5) survey.library.name = "name6" survey.save() - self.assertEquals(len(SurveyVersion.objects.all()), 5) + self.assertEqual(len(SurveyVersion.objects.all()), 5) survey.library.name = "name7" survey.save() - self.assertEquals(len(SurveyVersion.objects.all()), 5) + self.assertEqual(len(SurveyVersion.objects.all()), 5) survey_versions = SurveyVersion.objects.all().order_by("-library.name") self.assertEqual(survey_versions[0].library.name, "name6") @@ -202,24 +227,25 @@ def test_should_only_store_5_latest_versions(self): self.assertEqual(survey_versions[3].library.name, "name3") self.assertEqual(survey_versions[4].library.name, "name2") - def test_should_store_version_when_updating_observations_for_existing_objects(self): - survey = self._dummy_survey(observations=[ - self._dummy_observation(variable=self._dummy_variable(key="key1")) - ]) - self.assertEquals(len(SurveyVersion.objects.all()), 0) + survey = self._dummy_survey( + observations=[ + self._dummy_observation(variable=self._dummy_variable(key="key1")) + ] + ) + self.assertEqual(len(SurveyVersion.objects.all()), 0) survey.get_observation("key1").value = "new_value" survey.save() - self.assertEquals(len(SurveyVersion.objects.all()), 1) + self.assertEqual(len(SurveyVersion.objects.all()), 1) def test_should_not_store_version_when_creating_object(self): library = self._dummy_library() survey = self._dummy_survey(library=library) versions = SurveyVersion.objects.filter(survey_response_id=survey.id) - self.assertEquals(len(versions), 0) + self.assertEqual(len(versions), 0) def test_should_set_modified_date_when_updating_existing_object(self): survey = self._dummy_survey() @@ -233,16 +259,20 @@ def test_should_not_set_modified_date_when_updating_notes_in_existing_object(sel survey.notes = "new_notes" survey.save().reload() - self.assertEquals(survey.date_modified, survey.date_created) + self.assertEqual(survey.date_modified, survey.date_created) def test_should_not_store_version_when_updating_notes_in_existing_object(self): survey = self._dummy_survey() - self.assertEquals(len(SurveyVersion.objects.filter(survey_response_id=survey.id)), 0) + self.assertEqual( + len(SurveyVersion.objects.filter(survey_response_id=survey.id)), 0 + ) survey.notes = "new_notes" survey.save() - self.assertEquals(len(SurveyVersion.objects.filter(survey_response_id=survey.id)), 0) + self.assertEqual( + len(SurveyVersion.objects.filter(survey_response_id=survey.id)), 0 + ) def test_should_flag_as_not_published_when_updating_existing_object(self): survey = self._dummy_survey() @@ -251,7 +281,9 @@ def test_should_flag_as_not_published_when_updating_existing_object(self): self.assertFalse(survey.is_published) - def test_should_not_flag_as_not_published_when_updating_notes_in_existing_object(self): + def test_should_not_flag_as_not_published_when_updating_notes_in_existing_object( + self, + ): survey = self._dummy_survey() survey.publish() self.assertTrue(survey.is_published) @@ -264,7 +296,7 @@ def test_should_not_flag_as_not_published_when_updating_notes_in_existing_object def test_should_set_modified_date_when_creating_object(self): survey = self._dummy_survey() - self.assertEquals(survey.date_modified, survey.date_created) + self.assertEqual(survey.date_modified, survey.date_created) class TestSurveyPublish(MongoTestCase): @@ -293,8 +325,8 @@ def test_should_set_published_date_but_not_modified_date_when_publishing(self): survey.publish() - self.assertNotEquals(survey.published_at, None) - self.assertEquals(survey.date_modified, date_modified) + self.assertNotEqual(survey.published_at, None) + self.assertEqual(survey.date_modified, date_modified) def test_should_flag_as_published_when_publishing(self): survey = self._dummy_survey() @@ -331,95 +363,127 @@ def test_is_published(self): self.assertTrue(survey.is_published) def test_creates_open_data_when_publishing(self): - survey = self._dummy_survey(observations=[ - self._dummy_observation(), - self._dummy_observation()]) - self.assertEquals(len(OpenData.objects.all()), 0) + survey = self._dummy_survey( + observations=[self._dummy_observation(), self._dummy_observation()] + ) + self.assertEqual(len(OpenData.objects.all()), 0) survey.publish() - self.assertEquals(len(OpenData.objects.all()), 2) - - def test_does_not_create_new_open_data_for_existing_open_data_when_republishing(self): - survey = self._dummy_survey(observations=[ - self._dummy_observation(value="old_value")]) + self.assertEqual(len(OpenData.objects.all()), 2) + + def test_does_not_create_new_open_data_for_existing_open_data_when_republishing( + self, + ): + survey = self._dummy_survey( + observations=[self._dummy_observation(value="old_value")] + ) survey.publish() - self.assertEquals(len(OpenData.objects.all()), 1) + self.assertEqual(len(OpenData.objects.all()), 1) survey.observations[0].value = "new_value" survey.publish() - self.assertEquals(len(OpenData.objects.all()), 1) + self.assertEqual(len(OpenData.objects.all()), 1) def test_modifies_existing_open_data_that_has_changed_when_republishing(self): - survey = self._dummy_survey(observations=[ - self._dummy_observation(value="old_value")]) + survey = self._dummy_survey( + observations=[self._dummy_observation(value="old_value")] + ) survey.publish() - self.assertEquals(OpenData.objects.all()[0].value, "old_value") + self.assertEqual(OpenData.objects.all()[0].value, "old_value") survey.observations[0].value = "new_value" survey.publish() - self.assertEquals(OpenData.objects.all()[0].value, "new_value") + self.assertEqual(OpenData.objects.all()[0].value, "new_value") def test_deletes_existing_open_data_if_observation_value_has_been_emptied(self): - survey = self._dummy_survey(observations=[ - self._dummy_observation(value="some_value")]) + survey = self._dummy_survey( + observations=[self._dummy_observation(value="some_value")] + ) survey.publish() - self.assertEquals(OpenData.objects.all()[0].value, "some_value") + self.assertEqual(OpenData.objects.all()[0].value, "some_value") open_data = OpenData.objects.all()[0] survey.observations[0].value = "" survey.publish() - self.assertEquals(OpenData.objects.filter(pk=open_data.pk).count(), 0) + self.assertEqual(OpenData.objects.filter(pk=open_data.pk).count(), 0) - def test_updates_date_modified_for_open_data_that_has_changed_when_republishing(self): - survey = self._dummy_survey(observations=[ - self._dummy_observation(value="old_value")]) + def test_updates_date_modified_for_open_data_that_has_changed_when_republishing( + self, + ): + survey = self._dummy_survey( + observations=[self._dummy_observation(value="old_value")] + ) survey.publish() - self.assertEquals(OpenData.objects.all()[0].date_modified, OpenData.objects.all()[0].date_created) + self.assertEqual( + OpenData.objects.all()[0].date_modified, + OpenData.objects.all()[0].date_created, + ) survey.observations[0].value = "new_value" survey.publish() - self.assertTrue(OpenData.objects.all()[0].date_modified > OpenData.objects.all()[0].date_created) + self.assertTrue( + OpenData.objects.all()[0].date_modified + > OpenData.objects.all()[0].date_created + ) - def test_does_not_update_date_modified_for_open_data_that_has_not_changed_when_republishing(self): + def test_does_not_update_date_modified_for_open_data_that_has_not_changed_when_republishing( + self, + ): variable1 = self._dummy_variable(key="key1") variable2 = self._dummy_variable(key="key2") - survey = self._dummy_survey(observations=[ - self._dummy_observation(variable1, value="old_value1"), - self._dummy_observation(variable2, value="old_value2")]) + survey = self._dummy_survey( + observations=[ + self._dummy_observation(variable1, value="old_value1"), + self._dummy_observation(variable2, value="old_value2"), + ] + ) survey.publish() - self.assertEquals(OpenData.objects.filter(variable=variable2)[0].date_modified, - OpenData.objects.filter(variable=variable2)[0].date_created) + self.assertEqual( + OpenData.objects.filter(variable=variable2)[0].date_modified, + OpenData.objects.filter(variable=variable2)[0].date_created, + ) survey.get_observation("key1").value = "new_value1" survey.publish() - self.assertEquals(OpenData.objects.filter(variable=variable2)[0].date_modified, - OpenData.objects.filter(variable=variable2)[0].date_created) + self.assertEqual( + OpenData.objects.filter(variable=variable2)[0].date_modified, + OpenData.objects.filter(variable=variable2)[0].date_created, + ) - def test_does_not_modify_existing_open_data_that_has_not_changed_when_republishing(self): + def test_does_not_modify_existing_open_data_that_has_not_changed_when_republishing( + self, + ): variable1 = self._dummy_variable(key="key1") variable2 = self._dummy_variable(key="key2") - survey = self._dummy_survey(observations=[ - self._dummy_observation(variable1, value="old_value1"), - self._dummy_observation(variable2, value="old_value2")]) + survey = self._dummy_survey( + observations=[ + self._dummy_observation(variable1, value="old_value1"), + self._dummy_observation(variable2, value="old_value2"), + ] + ) survey.publish() - self.assertEquals(OpenData.objects.filter(variable=variable2)[0].value, "old_value2") + self.assertEqual( + OpenData.objects.filter(variable=variable2)[0].value, "old_value2" + ) survey.get_observation("key1").value = "new_value1" survey.publish() - self.assertEquals(OpenData.objects.filter(variable=variable2)[0].value, "old_value2") + self.assertEqual( + OpenData.objects.filter(variable=variable2)[0].value, "old_value2" + ) def test_sets_existing_open_data_as_inactive_when_revoking_publication(self): survey = self._dummy_survey(observations=[self._dummy_observation()]) @@ -431,21 +495,23 @@ def test_sets_existing_open_data_as_inactive_when_revoking_publication(self): self.assertFalse(OpenData.objects.all()[0].is_active) - def test_sets_existing_open_data_as_active_when_publishing_after_revoking_publication(self): + def test_sets_existing_open_data_as_active_when_publishing_after_revoking_publication( + self, + ): survey = self._dummy_survey(observations=[self._dummy_observation()]) survey.publish() - self.assertEquals(len(OpenData.objects.all()), 1) + self.assertEqual(len(OpenData.objects.all()), 1) self.assertTrue(OpenData.objects.all()[0].is_active) survey.unpublish() - self.assertEquals(len(OpenData.objects.all()), 1) + self.assertEqual(len(OpenData.objects.all()), 1) self.assertFalse(OpenData.objects.all()[0].is_active) survey.publish() - self.assertEquals(len(OpenData.objects.all()), 1) + self.assertEqual(len(OpenData.objects.all()), 1) self.assertTrue(OpenData.objects.all()[0].is_active) def test_revokes_publication_when_changing_status_from_published(self): @@ -469,18 +535,24 @@ def test_can_not_publish_survey_if_it_has_no_selected_libraries(self): self.assertFalse(survey.is_published) - def test_does_not_create_open_data_when_publishing_survey_if_it_has_no_selected_libraries(self): - survey = self._dummy_survey(selected_libraries=[], - observations=[ - self._dummy_observation(), - self._dummy_observation(), - ]) + def test_does_not_create_open_data_when_publishing_survey_if_it_has_no_selected_libraries( + self, + ): + survey = self._dummy_survey( + selected_libraries=[], + observations=[ + self._dummy_observation(), + self._dummy_observation(), + ], + ) survey.publish() - self.assertEquals(OpenData.objects.count(), 0) + self.assertEqual(OpenData.objects.count(), 0) - def test_can_not_publish_survey_if_another_survey_reports_for_the_same_library(self): + def test_can_not_publish_survey_if_another_survey_reports_for_the_same_library( + self, + ): self._dummy_library(sigel="lib1") self._dummy_library(sigel="lib2") self._dummy_library(sigel="lib3") @@ -499,13 +571,13 @@ class TestSelectableLibraries(MongoTestCase): def test_should_return_an_empty_list_for_no_municipality_code(self): survey = self._dummy_survey(library=self._dummy_library(municipality_code=None)) - self.assertItemsEqual(survey.selectable_libraries(), []) + self.assertCountEqual(survey.selectable_libraries(), []) def test_should_exclude_second_library_with_different_municipality_code(self): survey = self._dummy_survey(library=self._dummy_library(municipality_code="1")) self._dummy_survey(library=self._dummy_library(municipality_code="2")) - self.assertItemsEqual(survey.selectable_libraries(), []) + self.assertCountEqual(survey.selectable_libraries(), []) def test_should_include_second_library_with_same_municipality_code(self): library = self._dummy_library(municipality_code="1") @@ -521,11 +593,13 @@ def test_should_exclude_second_library_with_same_sigel(self): survey = self._dummy_survey(library=self._dummy_library(sigel="1")) self._dummy_survey(library=self._dummy_library(sigel="1")) - self.assertItemsEqual(survey.selectable_libraries(), []) + self.assertCountEqual(survey.selectable_libraries(), []) - def test_should_include_second_library_with_same_municipality_code_and_same_principal_library_type(self): - library = self._dummy_library(municipality_code="1", library_type=u"folkbib") - second = self._dummy_library(municipality_code="1", library_type=u"muskom") + def test_should_include_second_library_with_same_municipality_code_and_same_principal_library_type( + self, + ): + library = self._dummy_library(municipality_code="1", library_type="folkbib") + second = self._dummy_library(municipality_code="1", library_type="muskom") survey = self._dummy_survey(library=library) self._dummy_survey(library=second) selectables = survey.selectable_libraries() @@ -533,18 +607,22 @@ def test_should_include_second_library_with_same_municipality_code_and_same_prin self.assertEqual(len(selectables), 1) self.assertEqual(selectables[0], second) - def test_should_exclude_second_library_with_same_municipality_code_and_different_principal_library_type(self): - library = self._dummy_library(municipality_code="1", library_type=u"folkbib") - second = self._dummy_library(municipality_code="1", library_type=u"sjukbib") + def test_should_exclude_second_library_with_same_municipality_code_and_different_principal_library_type( + self, + ): + library = self._dummy_library(municipality_code="1", library_type="folkbib") + second = self._dummy_library(municipality_code="1", library_type="sjukbib") survey = self._dummy_survey(library=library) self._dummy_survey(library=second) selectables = survey.selectable_libraries() self.assertEqual(len(selectables), 0) - def test_should_include_second_library_with_same_municipality_code_when_library_type_is_unknown(self): + def test_should_include_second_library_with_same_municipality_code_when_library_type_is_unknown( + self, + ): library = self._dummy_library(municipality_code="1", library_type=None) - second = self._dummy_library(municipality_code="1", library_type=u"muskom") + second = self._dummy_library(municipality_code="1", library_type="muskom") survey = self._dummy_survey(library=library) self._dummy_survey(library=second) selectables = survey.selectable_libraries() @@ -552,11 +630,13 @@ def test_should_include_second_library_with_same_municipality_code_when_library_ self.assertEqual(len(selectables), 1) self.assertEqual(selectables[0], second) - def test_should_include_second_library_with_same_municipality_code_when_principal_is_unknown_for_library_type(self): - library = self._dummy_library(municipality_code="1", library_type=u"musbib") + def test_should_include_second_library_with_same_municipality_code_when_principal_is_unknown_for_library_type( + self, + ): + library = self._dummy_library(municipality_code="1", library_type="musbib") self.assertFalse(library.library_type in PRINCIPALS) - second = self._dummy_library(municipality_code="1", library_type=u"muskom") + second = self._dummy_library(municipality_code="1", library_type="muskom") survey = self._dummy_survey(library=library) self._dummy_survey(library=second) selectables = survey.selectable_libraries() @@ -570,14 +650,16 @@ def test_should_return_an_empty_set_for_no_municipality_code(self): library = self._dummy_library(municipality_code=None) survey = self._dummy_survey(sample_year=2014) - self.assertSetEqual(survey.selected_sigels_in_other_surveys(2014), Set()) + self.assertSetEqual(survey.selected_sigels_in_other_surveys(2014), set()) def test_should_include_second_surveys_selected_sigel(self): library = self._dummy_library(sigel="1") second_library = self._dummy_library(sigel="2") survey = self._dummy_survey(library=library, sample_year=2014) - self._dummy_survey(library=second_library, sample_year=2014, selected_libraries=["2"]) + self._dummy_survey( + library=second_library, sample_year=2014, selected_libraries=["2"] + ) self.assertSetEqual(survey.selected_sigels_in_other_surveys(2014), {"2"}) @@ -586,7 +668,9 @@ def test_should_include_librarys_own_sigel_when_selected_in_second_survey(self): second_library = self._dummy_library(sigel="2") survey = self._dummy_survey(library=library, sample_year=2014) - self._dummy_survey(library=second_library, sample_year=2014, selected_libraries=["1", "2"]) + self._dummy_survey( + library=second_library, sample_year=2014, selected_libraries=["1", "2"] + ) self.assertSetEqual(survey.selected_sigels_in_other_surveys(2014), {"1", "2"}) @@ -595,62 +679,86 @@ def test_should_exclude_selected_sigel_for_another_sample_year(self): second_library = self._dummy_library(sigel="2") survey = self._dummy_survey(library=library, sample_year=2014) - self._dummy_survey(library=second_library, sample_year=2015, selected_libraries=["1", "2"]) + self._dummy_survey( + library=second_library, sample_year=2015, selected_libraries=["1", "2"] + ) - self.assertSetEqual(survey.selected_sigels_in_other_surveys(2014), Set()) + self.assertSetEqual(survey.selected_sigels_in_other_surveys(2014), set()) def test_should_exclude_selected_sigel_for_another_municipality_code(self): library = self._dummy_library(sigel="1") second_library = self._dummy_library(sigel="2", municipality_code="m") survey = self._dummy_survey(library=library, sample_year=2014) - self._dummy_survey(library=second_library, sample_year=2014, selected_libraries=["2"]) + self._dummy_survey( + library=second_library, sample_year=2014, selected_libraries=["2"] + ) - self.assertSetEqual(survey.selected_sigels_in_other_surveys(2014), Set()) + self.assertSetEqual(survey.selected_sigels_in_other_surveys(2014), set()) def test_should_exclude_selected_sigel_in_librarys_own_survey(self): library = self._dummy_library(sigel="1") second_library = self._dummy_library(sigel="2") - survey = self._dummy_survey(library=library, sample_year=2014, selected_libraries=["3"]) - self._dummy_survey(library=second_library, sample_year=2014, selected_libraries=["2"]) + survey = self._dummy_survey( + library=library, sample_year=2014, selected_libraries=["3"] + ) + self._dummy_survey( + library=second_library, sample_year=2014, selected_libraries=["2"] + ) self.assertSetEqual(survey.selected_sigels_in_other_surveys(2014), {"2"}) - def test_should_include_second_surveys_selected_sigel_with_same_principal_library_type(self): - library = self._dummy_library(sigel="1", library_type=u"folkbib") - second_library = self._dummy_library(sigel="2", library_type=u"muskom") + def test_should_include_second_surveys_selected_sigel_with_same_principal_library_type( + self, + ): + library = self._dummy_library(sigel="1", library_type="folkbib") + second_library = self._dummy_library(sigel="2", library_type="muskom") survey = self._dummy_survey(library=library, sample_year=2014) - self._dummy_survey(library=second_library, sample_year=2014, selected_libraries=["2"]) + self._dummy_survey( + library=second_library, sample_year=2014, selected_libraries=["2"] + ) self.assertSetEqual(survey.selected_sigels_in_other_surveys(2014), {"2"}) - def test_should_exclude_second_surveys_selected_sigel_with_different_principal_library_type(self): - library = self._dummy_library(sigel="1", library_type=u"folkbib") - second_library = self._dummy_library(sigel="2", library_type=u"sjukbib") + def test_should_exclude_second_surveys_selected_sigel_with_different_principal_library_type( + self, + ): + library = self._dummy_library(sigel="1", library_type="folkbib") + second_library = self._dummy_library(sigel="2", library_type="sjukbib") survey = self._dummy_survey(library=library, sample_year=2014) - self._dummy_survey(library=second_library, sample_year=2014, selected_libraries=["2"]) + self._dummy_survey( + library=second_library, sample_year=2014, selected_libraries=["2"] + ) - self.assertSetEqual(survey.selected_sigels_in_other_surveys(2014), Set()) + self.assertSetEqual(survey.selected_sigels_in_other_surveys(2014), set()) - def test_should_include_second_surveys_selected_sigel_when_library_type_is_unknown(self): + def test_should_include_second_surveys_selected_sigel_when_library_type_is_unknown( + self, + ): library = self._dummy_library(sigel="1", library_type=None) - second_library = self._dummy_library(sigel="2", library_type=u"muskom") + second_library = self._dummy_library(sigel="2", library_type="muskom") survey = self._dummy_survey(library=library, sample_year=2014) - self._dummy_survey(library=second_library, sample_year=2014, selected_libraries=["2"]) + self._dummy_survey( + library=second_library, sample_year=2014, selected_libraries=["2"] + ) self.assertSetEqual(survey.selected_sigels_in_other_surveys(2014), {"2"}) - def test_should_include_second_surveys_selected_sigel_when_principal_is_unknown_for_library_type(self): - library = self._dummy_library(sigel="1", library_type=u"musbib") + def test_should_include_second_surveys_selected_sigel_when_principal_is_unknown_for_library_type( + self, + ): + library = self._dummy_library(sigel="1", library_type="musbib") self.assertFalse(library.library_type in PRINCIPALS) - second_library = self._dummy_library(sigel="2", library_type=u"muskom") + second_library = self._dummy_library(sigel="2", library_type="muskom") survey = self._dummy_survey(library=library, sample_year=2014) - self._dummy_survey(library=second_library, sample_year=2014, selected_libraries=["2"]) + self._dummy_survey( + library=second_library, sample_year=2014, selected_libraries=["2"] + ) self.assertSetEqual(survey.selected_sigels_in_other_surveys(2014), {"2"}) @@ -659,22 +767,25 @@ def test_reports_for_same_libraries_when_same_selected_libraries(self): library2 = self._dummy_library(sigel="lib2") library3 = self._dummy_library(sigel="lib3") - survey1 = self._dummy_survey(selected_libraries=[library1.sigel, - library2.sigel, - library3.sigel]) - survey2 = self._dummy_survey(selected_libraries=[library1.sigel, - library2.sigel, - library3.sigel]) + survey1 = self._dummy_survey( + selected_libraries=[library1.sigel, library2.sigel, library3.sigel] + ) + survey2 = self._dummy_survey( + selected_libraries=[library1.sigel, library2.sigel, library3.sigel] + ) self.assertTrue(survey1.reports_for_same_libraries(survey2)) - def test_does_not_report_for_same_libraries_when_different_amount_of_selected_libraries(self): + def test_does_not_report_for_same_libraries_when_different_amount_of_selected_libraries( + self, + ): library1 = self._dummy_library(sigel="lib1") library2 = self._dummy_library(sigel="lib2") survey1 = self._dummy_survey(selected_libraries=[library1.sigel]) - survey2 = self._dummy_survey(selected_libraries=[library1.sigel, - library2.sigel]) + survey2 = self._dummy_survey( + selected_libraries=[library1.sigel, library2.sigel] + ) self.assertFalse(survey1.reports_for_same_libraries(survey2)) @@ -692,8 +803,12 @@ def test_should_return_true_for_conflict_in_same_sample_year(self): first_library = self._dummy_library(sigel="1") second_library = self._dummy_library(sigel="2") - survey = self._dummy_survey(library=first_library, sample_year=2014, selected_libraries=["1", "2"]) - self._dummy_survey(library=second_library, sample_year=2014, selected_libraries=["2"]) + survey = self._dummy_survey( + library=first_library, sample_year=2014, selected_libraries=["1", "2"] + ) + self._dummy_survey( + library=second_library, sample_year=2014, selected_libraries=["2"] + ) self.assertTrue(survey.has_conflicts()) @@ -701,8 +816,12 @@ def test_should_return_false_for_non_conflict_in_different_sample_years(self): first_library = self._dummy_library(sigel="1") second_library = self._dummy_library(sigel="2") - survey = self._dummy_survey(library=first_library, sample_year=2014, selected_libraries=["1", "2"]) - self._dummy_survey(library=second_library, sample_year=2015, selected_libraries=["2"]) + survey = self._dummy_survey( + library=first_library, sample_year=2014, selected_libraries=["1", "2"] + ) + self._dummy_survey( + library=second_library, sample_year=2015, selected_libraries=["2"] + ) self.assertFalse(survey.has_conflicts()) @@ -710,58 +829,88 @@ def test_should_return_false_for_non_conflict_in_same_sample_year(self): first_library = self._dummy_library(sigel="1") second_library = self._dummy_library(sigel="2") - survey = self._dummy_survey(library=first_library, sample_year=2014, selected_libraries=["1"]) - self._dummy_survey(library=second_library, sample_year=2014, selected_libraries=["2"]) + survey = self._dummy_survey( + library=first_library, sample_year=2014, selected_libraries=["1"] + ) + self._dummy_survey( + library=second_library, sample_year=2014, selected_libraries=["2"] + ) self.assertFalse(survey.has_conflicts()) - def test_should_return_true_for_conflict_when_second_survey_reports_for_first_survey(self): + def test_should_return_true_for_conflict_when_second_survey_reports_for_first_survey( + self, + ): first_library = self._dummy_library(sigel="1") second_library = self._dummy_library(sigel="2") - survey = self._dummy_survey(library=first_library, sample_year=2014, selected_libraries=[]) - self._dummy_survey(library=second_library, sample_year=2014, selected_libraries=["1", "2"]) + survey = self._dummy_survey( + library=first_library, sample_year=2014, selected_libraries=[] + ) + self._dummy_survey( + library=second_library, sample_year=2014, selected_libraries=["1", "2"] + ) self.assertTrue(survey.has_conflicts()) def test_should_return_true_for_conflict_when_second_survey_reports_for_first_survey_with_same_principal_library_type( - self): - first_library = self._dummy_library(sigel="1", library_type=u"folkbib") - second_library = self._dummy_library(sigel="2", library_type=u"muskom") - - survey = self._dummy_survey(library=first_library, sample_year=2014, selected_libraries=[]) - self._dummy_survey(library=second_library, sample_year=2014, selected_libraries=["1", "2"]) + self, + ): + first_library = self._dummy_library(sigel="1", library_type="folkbib") + second_library = self._dummy_library(sigel="2", library_type="muskom") + + survey = self._dummy_survey( + library=first_library, sample_year=2014, selected_libraries=[] + ) + self._dummy_survey( + library=second_library, sample_year=2014, selected_libraries=["1", "2"] + ) self.assertTrue(survey.has_conflicts()) def test_should_return_false_for_non_conflict_when_second_survey_reports_for_first_survey_with_different_principal_library_type( - self): - first_library = self._dummy_library(sigel="1", library_type=u"folkbib") - second_library = self._dummy_library(sigel="2", library_type=u"sjukbib") - - survey = self._dummy_survey(library=first_library, sample_year=2014, selected_libraries=[]) - self._dummy_survey(library=second_library, sample_year=2014, selected_libraries=["1", "2"]) + self, + ): + first_library = self._dummy_library(sigel="1", library_type="folkbib") + second_library = self._dummy_library(sigel="2", library_type="sjukbib") + + survey = self._dummy_survey( + library=first_library, sample_year=2014, selected_libraries=[] + ) + self._dummy_survey( + library=second_library, sample_year=2014, selected_libraries=["1", "2"] + ) self.assertFalse(survey.has_conflicts()) def test_should_return_true_for_conflict_when_second_survey_reports_for_first_survey_when_library_type_is_unknown( - self): + self, + ): first_library = self._dummy_library(sigel="1", library_type=None) - second_library = self._dummy_library(sigel="2", library_type=u"muskom") + second_library = self._dummy_library(sigel="2", library_type="muskom") - survey = self._dummy_survey(library=first_library, sample_year=2014, selected_libraries=[]) - self._dummy_survey(library=second_library, sample_year=2014, selected_libraries=["1", "2"]) + survey = self._dummy_survey( + library=first_library, sample_year=2014, selected_libraries=[] + ) + self._dummy_survey( + library=second_library, sample_year=2014, selected_libraries=["1", "2"] + ) self.assertTrue(survey.has_conflicts()) def test_should_return_true_for_conflict_when_second_survey_reports_for_first_survey_when_principal_for_library_type_is_unknown( - self): - first_library = self._dummy_library(sigel="1", library_type=u"musbib") + self, + ): + first_library = self._dummy_library(sigel="1", library_type="musbib") self.assertFalse(first_library.library_type in PRINCIPALS) - second_library = self._dummy_library(sigel="2", library_type=u"muskom") + second_library = self._dummy_library(sigel="2", library_type="muskom") - survey = self._dummy_survey(library=first_library, sample_year=2014, selected_libraries=[]) - self._dummy_survey(library=second_library, sample_year=2014, selected_libraries=["1", "2"]) + survey = self._dummy_survey( + library=first_library, sample_year=2014, selected_libraries=[] + ) + self._dummy_survey( + library=second_library, sample_year=2014, selected_libraries=["1", "2"] + ) self.assertTrue(survey.has_conflicts()) @@ -771,8 +920,12 @@ def test_should_return_survey_for_conflict_in_same_sample_year(self): first_library = self._dummy_library(sigel="1") second_library = self._dummy_library(sigel="3") - first_survey = self._dummy_survey(library=first_library, sample_year=2014, selected_libraries=["1", "2"]) - second_survey = self._dummy_survey(library=second_library, sample_year=2014, selected_libraries=["2"]) + first_survey = self._dummy_survey( + library=first_library, sample_year=2014, selected_libraries=["1", "2"] + ) + second_survey = self._dummy_survey( + library=second_library, sample_year=2014, selected_libraries=["2"] + ) self.assertListEqual(first_survey.get_conflicting_surveys(), [second_survey]) @@ -781,18 +934,30 @@ def test_should_return_two_surveys_for_conflicts_in_same_sample_year(self): second_library = self._dummy_library(sigel="2") third_library = self._dummy_library(sigel="3") - first_survey = self._dummy_survey(library=first_library, sample_year=2014, selected_libraries=["1", "2"]) - second_survey = self._dummy_survey(library=second_library, sample_year=2014, selected_libraries=["1"]) - third_survey = self._dummy_survey(library=third_library, sample_year=2014, selected_libraries=["2"]) + first_survey = self._dummy_survey( + library=first_library, sample_year=2014, selected_libraries=["1", "2"] + ) + second_survey = self._dummy_survey( + library=second_library, sample_year=2014, selected_libraries=["1"] + ) + third_survey = self._dummy_survey( + library=third_library, sample_year=2014, selected_libraries=["2"] + ) - self.assertListEqual(first_survey.get_conflicting_surveys(), [second_survey, third_survey]) + self.assertListEqual( + first_survey.get_conflicting_surveys(), [second_survey, third_survey] + ) def test_should_return_empty_list_for_non_conflict_in_different_sample_years(self): first_library = self._dummy_library(sigel="1") second_library = self._dummy_library(sigel="3") - first_survey = self._dummy_survey(library=first_library, sample_year=2014, selected_libraries=["1", "2"]) - self._dummy_survey(library=second_library, sample_year=2015, selected_libraries=["2"]) + first_survey = self._dummy_survey( + library=first_library, sample_year=2014, selected_libraries=["1", "2"] + ) + self._dummy_survey( + library=second_library, sample_year=2015, selected_libraries=["2"] + ) self.assertListEqual(first_survey.get_conflicting_surveys(), []) @@ -800,106 +965,151 @@ def test_should_return_second_survey_when_reporting_for_first_survey(self): first_library = self._dummy_library(sigel="1") second_library = self._dummy_library(sigel="2") - first_survey = self._dummy_survey(library=first_library, sample_year=2014, selected_libraries=[]) - second_survey = self._dummy_survey(library=second_library, sample_year=2014, selected_libraries=["1", "2"]) + first_survey = self._dummy_survey( + library=first_library, sample_year=2014, selected_libraries=[] + ) + second_survey = self._dummy_survey( + library=second_library, sample_year=2014, selected_libraries=["1", "2"] + ) self.assertListEqual(first_survey.get_conflicting_surveys(), [second_survey]) - def test_should_return_second_survey_when_reporting_for_first_survey_with_same_principal_library_type(self): - first_library = self._dummy_library(sigel="1", library_type=u"folkbib") - second_library = self._dummy_library(sigel="2", library_type=u"muskom") + def test_should_return_second_survey_when_reporting_for_first_survey_with_same_principal_library_type( + self, + ): + first_library = self._dummy_library(sigel="1", library_type="folkbib") + second_library = self._dummy_library(sigel="2", library_type="muskom") - first_survey = self._dummy_survey(library=first_library, sample_year=2014, selected_libraries=[]) - second_survey = self._dummy_survey(library=second_library, sample_year=2014, selected_libraries=["1", "2"]) + first_survey = self._dummy_survey( + library=first_library, sample_year=2014, selected_libraries=[] + ) + second_survey = self._dummy_survey( + library=second_library, sample_year=2014, selected_libraries=["1", "2"] + ) self.assertListEqual(first_survey.get_conflicting_surveys(), [second_survey]) def test_should_not_return_second_survey_when_reporting_for_first_survey_with_different_principal_library_type( - self): - first_library = self._dummy_library(sigel="1", library_type=u"folkbib") - second_library = self._dummy_library(sigel="2", library_type=u"sjukbib") - - first_survey = self._dummy_survey(library=first_library, sample_year=2014, selected_libraries=[]) - self._dummy_survey(library=second_library, sample_year=2014, selected_libraries=["1", "2"]) + self, + ): + first_library = self._dummy_library(sigel="1", library_type="folkbib") + second_library = self._dummy_library(sigel="2", library_type="sjukbib") + + first_survey = self._dummy_survey( + library=first_library, sample_year=2014, selected_libraries=[] + ) + self._dummy_survey( + library=second_library, sample_year=2014, selected_libraries=["1", "2"] + ) self.assertListEqual(first_survey.get_conflicting_surveys(), []) - def test_should_return_second_survey_when_reporting_for_first_survey_when_library_type_is_unknown(self): + def test_should_return_second_survey_when_reporting_for_first_survey_when_library_type_is_unknown( + self, + ): first_library = self._dummy_library(sigel="1", library_type=None) - second_library = self._dummy_library(sigel="2", library_type=u"sjukbib") + second_library = self._dummy_library(sigel="2", library_type="sjukbib") - first_survey = self._dummy_survey(library=first_library, sample_year=2014, selected_libraries=[]) - second_survey = self._dummy_survey(library=second_library, sample_year=2014, selected_libraries=["1", "2"]) + first_survey = self._dummy_survey( + library=first_library, sample_year=2014, selected_libraries=[] + ) + second_survey = self._dummy_survey( + library=second_library, sample_year=2014, selected_libraries=["1", "2"] + ) self.assertListEqual(first_survey.get_conflicting_surveys(), [second_survey]) def test_should_return_second_survey_when_reporting_for_first_survey_when_principal_for_library_type_is_unknown( - self): - first_library = self._dummy_library(sigel="1", library_type=u"musbib") + self, + ): + first_library = self._dummy_library(sigel="1", library_type="musbib") self.assertFalse(first_library.library_type in PRINCIPALS) - second_library = self._dummy_library(sigel="2", library_type=u"sjukbib") + second_library = self._dummy_library(sigel="2", library_type="sjukbib") - first_survey = self._dummy_survey(library=first_library, sample_year=2014, selected_libraries=[]) - second_survey = self._dummy_survey(library=second_library, sample_year=2014, selected_libraries=["1", "2"]) + first_survey = self._dummy_survey( + library=first_library, sample_year=2014, selected_libraries=[] + ) + second_survey = self._dummy_survey( + library=second_library, sample_year=2014, selected_libraries=["1", "2"] + ) self.assertListEqual(first_survey.get_conflicting_surveys(), [second_survey]) class TestPreviousYearsSurvey(MongoTestCase): def test_finds_survey_from_previous_year_if_identical_names_ignoring_case(self): - previous_years_survey = self._dummy_survey(sample_year=2013, - library=self._dummy_library(name=u"ALLINGSÅS BIBLIOTEK")) - this_years_survey = self._dummy_survey(sample_year=2014, - library=self._dummy_library(name=u"Allingsås bibliotek")) + previous_years_survey = self._dummy_survey( + sample_year=2013, library=self._dummy_library(name="ALLINGSÅS BIBLIOTEK") + ) + this_years_survey = self._dummy_survey( + sample_year=2014, library=self._dummy_library(name="Allingsås bibliotek") + ) previous_years_survey.publish() - self.assertEqual(previous_years_survey, this_years_survey.previous_years_survey()) + self.assertEqual( + previous_years_survey, this_years_survey.previous_years_survey() + ) - def test_does_not_find_survey_from_previous_year_if_not_identical_names_ignoring_case(self): - self._dummy_survey(sample_year=2013, - library=self._dummy_library(name=u"BOTKYRKA BIBLIOTEK")) - this_years_survey = self._dummy_survey(sample_year=2014, - library=self._dummy_library(name=u"Allingsås bibliotek")) + def test_does_not_find_survey_from_previous_year_if_not_identical_names_ignoring_case( + self, + ): + self._dummy_survey( + sample_year=2013, library=self._dummy_library(name="BOTKYRKA BIBLIOTEK") + ) + this_years_survey = self._dummy_survey( + sample_year=2014, library=self._dummy_library(name="Allingsås bibliotek") + ) self.assertEqual(None, this_years_survey.previous_years_survey()) - def test_does_not_find_survey_from_previous_year_even_if_other_library_name_contains_name(self): - self._dummy_survey(sample_year=2013, - library=self._dummy_library(name=u"Nyköpings stadsbibliotek")) - this_years_survey = self._dummy_survey(sample_year=2014, - library=self._dummy_library(name=u"Köpings stadsbibliotek")) + def test_does_not_find_survey_from_previous_year_even_if_other_library_name_contains_name( + self, + ): + self._dummy_survey( + sample_year=2013, + library=self._dummy_library(name="Nyköpings stadsbibliotek"), + ) + this_years_survey = self._dummy_survey( + sample_year=2014, library=self._dummy_library(name="Köpings stadsbibliotek") + ) self.assertEqual(None, this_years_survey.previous_years_survey()) def test_finds_survey_from_previous_year_by_sigel_but_different_names(self): - previous_years_survey = self._dummy_survey(sample_year=2014, - library=self._dummy_library(sigel="lib1", - name="previous_name")) - this_years_survey = self._dummy_survey(sample_year=2015, - library=self._dummy_library(sigel="lib1", - name="new_name")) + previous_years_survey = self._dummy_survey( + sample_year=2014, + library=self._dummy_library(sigel="lib1", name="previous_name"), + ) + this_years_survey = self._dummy_survey( + sample_year=2015, library=self._dummy_library(sigel="lib1", name="new_name") + ) previous_years_survey.publish() - self.assertEqual(this_years_survey.previous_years_survey(), previous_years_survey) + self.assertEqual( + this_years_survey.previous_years_survey(), previous_years_survey + ) def test_does_not_find_survey_from_previous_year_if_not_published(self): - self._dummy_survey(sample_year=2014, - library=self._dummy_library(sigel="lib1", - name="previous_name")) - this_years_survey = self._dummy_survey(sample_year=2015, - library=self._dummy_library(sigel="lib1", - name="new_name")) + self._dummy_survey( + sample_year=2014, + library=self._dummy_library(sigel="lib1", name="previous_name"), + ) + this_years_survey = self._dummy_survey( + sample_year=2015, library=self._dummy_library(sigel="lib1", name="new_name") + ) self.assertEqual(this_years_survey.previous_years_survey(), None) def test_does_not_find_survey_from_previous_year_if_not_identical_sigels(self): - self._dummy_survey(sample_year=2014, - library=self._dummy_library(sigel="lib1", - name=u"ALLINGSÅS BIBLIOTEK")) - this_years_survey = self._dummy_survey(sample_year=2015, - library=self._dummy_library(sigel="lib2", - name=u"Allingsås bibliotek")) + self._dummy_survey( + sample_year=2014, + library=self._dummy_library(sigel="lib1", name="ALLINGSÅS BIBLIOTEK"), + ) + this_years_survey = self._dummy_survey( + sample_year=2015, + library=self._dummy_library(sigel="lib2", name="Allingsås bibliotek"), + ) self.assertEqual(None, this_years_survey.previous_years_survey()) @@ -907,125 +1117,170 @@ def test_returns_previous_years_value_if_same_variable_both_years(self): variable = self._dummy_variable() library = self._dummy_library() - self._dummy_survey(sample_year=2014, - library=library, - observations=[self._dummy_observation(variable=variable, - value="old_value")]).publish() + self._dummy_survey( + sample_year=2014, + library=library, + observations=[ + self._dummy_observation(variable=variable, value="old_value") + ], + ).publish() - this_years_survey = self._dummy_survey(sample_year=2015, - library=library) + this_years_survey = self._dummy_survey(sample_year=2015, library=library) self.assertEqual(this_years_survey.previous_years_value(variable), "old_value") def test_does_not_return_previous_years_value_if_no_previous_survey(self): variable = self._dummy_variable() - self._dummy_survey(sample_year=2014, - library=self._dummy_library(), - observations=[self._dummy_observation(variable=variable, - value="old_value")]) + self._dummy_survey( + sample_year=2014, + library=self._dummy_library(), + observations=[ + self._dummy_observation(variable=variable, value="old_value") + ], + ) - this_years_survey = self._dummy_survey(sample_year=2015, - library=self._dummy_library()) + this_years_survey = self._dummy_survey( + sample_year=2015, library=self._dummy_library() + ) self.assertEqual(this_years_survey.previous_years_value(variable), None) - def test_returns_previous_years_value_for_single_replaced_variable_with_same_target_groups(self): + def test_returns_previous_years_value_for_single_replaced_variable_with_same_target_groups( + self, + ): old_variable = self._dummy_variable(target_groups=["folkbib"]) - new_variable = self._dummy_variable(target_groups=["folkbib"], - replaces=[old_variable]) + new_variable = self._dummy_variable( + target_groups=["folkbib"], replaces=[old_variable] + ) library = self._dummy_library() - self._dummy_survey(sample_year=2014, - library=library, - observations=[self._dummy_observation(variable=old_variable, - value="old_value")]).publish() - this_years_survey = self._dummy_survey(sample_year=2015, - library=library) + self._dummy_survey( + sample_year=2014, + library=library, + observations=[ + self._dummy_observation(variable=old_variable, value="old_value") + ], + ).publish() + this_years_survey = self._dummy_survey(sample_year=2015, library=library) - self.assertEqual(this_years_survey.previous_years_value(new_variable), "old_value") + self.assertEqual( + this_years_survey.previous_years_value(new_variable), "old_value" + ) - def test_returns_previous_years_value_for_single_replaced_variable_with_different_target_groups(self): + def test_returns_previous_years_value_for_single_replaced_variable_with_different_target_groups( + self, + ): old_variable = self._dummy_variable(target_groups=["folkbib"]) - new_variable = self._dummy_variable(target_groups=["sjukbib"], - replaces=[old_variable]) + new_variable = self._dummy_variable( + target_groups=["sjukbib"], replaces=[old_variable] + ) library = self._dummy_library() - self._dummy_survey(sample_year=2014, - library=library, - observations=[self._dummy_observation(variable=old_variable, - value="old_value")]).publish() - this_years_survey = self._dummy_survey(sample_year=2015, - library=library) - - self.assertEqual(this_years_survey.previous_years_value(new_variable), "old_value") + self._dummy_survey( + sample_year=2014, + library=library, + observations=[ + self._dummy_observation(variable=old_variable, value="old_value") + ], + ).publish() + this_years_survey = self._dummy_survey(sample_year=2015, library=library) + self.assertEqual( + this_years_survey.previous_years_value(new_variable), "old_value" + ) - def test_does_not_return_previous_years_value_for_multiple_replaced_variables_with_same_library_type(self): + def test_does_not_return_previous_years_value_for_multiple_replaced_variables_with_same_library_type( + self, + ): old_variable1 = self._dummy_variable(target_groups=["folkbib"]) old_variable2 = self._dummy_variable(target_groups=["folkbib"]) new_variable = self._dummy_variable(replaces=[old_variable1, old_variable2]) library = self._dummy_library() - self._dummy_survey(sample_year=2014, - library=library, - observations=[self._dummy_observation(variable=old_variable1, - value="old_value")]).publish() - this_years_survey = self._dummy_survey(sample_year=2015, - library=library) + self._dummy_survey( + sample_year=2014, + library=library, + observations=[ + self._dummy_observation(variable=old_variable1, value="old_value") + ], + ).publish() + this_years_survey = self._dummy_survey(sample_year=2015, library=library) self.assertEqual(this_years_survey.previous_years_value(new_variable), None) def test_returns_previous_years_value_for_multiple_replaced_variables_where_one_has_same_library_type_as_this_years_survey( - self): + self, + ): old_variable1 = self._dummy_variable(target_groups=["folkbib"]) old_variable2 = self._dummy_variable(target_groups=["sjukbib"]) - new_variable = self._dummy_variable(target_groups=["folkbib"], - replaces=[old_variable1, old_variable2]) - - self._dummy_survey(sample_year=2014, - library=self._dummy_library(sigel="abcd", - library_type="specbib"), - observations=[self._dummy_observation(variable=old_variable1, - value="old_value")]).publish() - this_years_survey = self._dummy_survey(sample_year=2015, - library=self._dummy_library(sigel="abcd", - library_type="folkbib")) - - self.assertEqual(this_years_survey.previous_years_value(new_variable), "old_value") + new_variable = self._dummy_variable( + target_groups=["folkbib"], replaces=[old_variable1, old_variable2] + ) + + self._dummy_survey( + sample_year=2014, + library=self._dummy_library(sigel="abcd", library_type="specbib"), + observations=[ + self._dummy_observation(variable=old_variable1, value="old_value") + ], + ).publish() + this_years_survey = self._dummy_survey( + sample_year=2015, + library=self._dummy_library(sigel="abcd", library_type="folkbib"), + ) + + self.assertEqual( + this_years_survey.previous_years_value(new_variable), "old_value" + ) def test_returns_previous_years_value_for_multiple_replaced_variables_where_one_has_same_library_type_as_previous_years_survey( - self): + self, + ): old_variable1 = self._dummy_variable(target_groups=["folkbib"]) old_variable2 = self._dummy_variable(target_groups=["sjukbib"]) - new_variable = self._dummy_variable(target_groups=["folkbib"], - replaces=[old_variable1, old_variable2]) - - self._dummy_survey(sample_year=2014, - library=self._dummy_library(sigel="abcd", - library_type="folkbib"), - observations=[self._dummy_observation(variable=old_variable1, - value="old_value")]).publish() - this_years_survey = self._dummy_survey(sample_year=2015, - library=self._dummy_library(sigel="abcd", - library_type="specbib")) - - self.assertEqual(this_years_survey.previous_years_value(new_variable), "old_value") + new_variable = self._dummy_variable( + target_groups=["folkbib"], replaces=[old_variable1, old_variable2] + ) + + self._dummy_survey( + sample_year=2014, + library=self._dummy_library(sigel="abcd", library_type="folkbib"), + observations=[ + self._dummy_observation(variable=old_variable1, value="old_value") + ], + ).publish() + this_years_survey = self._dummy_survey( + sample_year=2015, + library=self._dummy_library(sigel="abcd", library_type="specbib"), + ) + + self.assertEqual( + this_years_survey.previous_years_value(new_variable), "old_value" + ) def test_does_not_return_previous_years_value_for_multiple_replaced_variables_where_several_has_same_library_type( - self): + self, + ): old_variable1 = self._dummy_variable(target_groups=["folkbib"]) old_variable2 = self._dummy_variable(target_groups=["folkbib"]) old_variable3 = self._dummy_variable(target_groups=["sjukbib"]) - new_variable = self._dummy_variable(target_groups=["folkbib"], - replaces=[old_variable1, old_variable2, old_variable3]) + new_variable = self._dummy_variable( + target_groups=["folkbib"], + replaces=[old_variable1, old_variable2, old_variable3], + ) library = self._dummy_library() - self._dummy_survey(sample_year=2014, library=library, observations=[ - self._dummy_observation(variable=old_variable1, value="old_value")]).publish() + self._dummy_survey( + sample_year=2014, + library=library, + observations=[ + self._dummy_observation(variable=old_variable1, value="old_value") + ], + ).publish() this_years_survey = self._dummy_survey(sample_year=2015, library=library) this_years_survey.publish() @@ -1063,7 +1318,9 @@ class TestLockSurvey(MongoTestCase): def test_creates_a_lock(self): survey = self._dummy_survey() SurveyEditingLock.lock_survey(survey_id=survey.id) - self.assertTrue(SurveyEditingLock.objects.filter(survey_id=survey.id).first() != None) + self.assertTrue( + SurveyEditingLock.objects.filter(survey_id=survey.id).first() != None + ) def test_changes_time_when_renewing_lock(self): survey = self._dummy_survey() @@ -1077,6 +1334,3 @@ def test_releases_lock(self): survey = self._dummy_survey() SurveyEditingLock.release_lock_on_survey(survey_id=survey.id) self.assertEqual(len(SurveyEditingLock.objects.filter(survey_id=survey.id)), 0) - - - diff --git a/libstat/tests/models/test_variable.py b/libstat/tests/models/test_variable.py index 64c60569..ff0353de 100644 --- a/libstat/tests/models/test_variable.py +++ b/libstat/tests/models/test_variable.py @@ -1,98 +1,151 @@ -# -*- coding: UTF-8 -*- from datetime import timedelta +from mongoengine import DoesNotExist + from libstat.tests import MongoTestCase from libstat.models import * from bson.objectid import ObjectId class VariableQuerySetTest(MongoTestCase): - def setUp(self): # Discontinued (today) - v2 = Variable(key=u"Folk35", description=u"Antal årsverken övrig personal", type="decimal", is_public=False, - target_groups=["folkbib"]) - v2.question = u"Hur många årsverken utfördes av personal i folkbiblioteksverksamheten under 2012?" - v2.question_part = u"Antal årsverken övrig personal (ej städpersonal)" + v2 = Variable( + key="Folk35", + description="Antal årsverken övrig personal", + type="decimal", + is_public=False, + target_groups=["folkbib"], + ) + v2.question = "Hur många årsverken utfördes av personal i folkbiblioteksverksamheten under 2012?" + v2.question_part = "Antal årsverken övrig personal (ej städpersonal)" v2.active_to = datetime.utcnow().date() v2.save() self.v2 = Variable.objects.get(pk=v2.id) # Replaced - v = Variable(key=u"Folk10", description=u"Antal bemannade servicesställen", type="integer", is_public=True, - target_groups=["folkbib"]) + v = Variable( + key="Folk10", + description="Antal bemannade servicesställen", + type="integer", + is_public=True, + target_groups=["folkbib"], + ) v.replaced_by = self.v2 v.save() self.v = Variable.objects.get(pk=v.id) # Active - v3 = Variable(key=u"Folk31", description=u"Antal årsverken totalt", type="decimal", is_public=True, - target_groups=["folkbib"], id_draft=False) + v3 = Variable( + key="Folk31", + description="Antal årsverken totalt", + type="decimal", + is_public=True, + target_groups=["folkbib"], + is_draft=False, + ) v3.summary_of = [self.v2] v3.save() self.v3 = Variable.objects.get(pk=v3.id) # Draft - v4 = Variable(key=u"Folk69", description=u"Totalt nyförvärv AV-medier", type="integer", is_public=True, - target_groups=["folkbib"], is_draft=True) - v4.question = u"Hur många nyförvärv av AV-media gjordes under 2012?" + v4 = Variable( + key="Folk69", + description="Totalt nyförvärv AV-medier", + type="integer", + is_public=True, + target_groups=["folkbib"], + is_draft=True, + ) + v4.question = "Hur många nyförvärv av AV-media gjordes under 2012?" v4.save() self.v4 = Variable.objects.get(pk=v4.id) def test_filter_public_terms(self): result_set = Variable.objects.public_terms() - self.assertEquals([v.id for v in result_set], [self.v.id, self.v3.id]) + self.assertEqual([v.id for v in result_set], [self.v.id, self.v3.id]) def test_filter_public_term_by_key(self): - self.assertRaises(DoesNotExist, lambda: Variable.objects.public_term_by_key(None)) - self.assertRaises(DoesNotExist, lambda: Variable.objects.public_term_by_key("foo")) - self.assertEquals(Variable.objects.public_term_by_key("Folk10").id, self.v.id) - self.assertRaises(DoesNotExist, lambda: Variable.objects.public_term_by_key("Folk35")) - self.assertEquals(Variable.objects.public_term_by_key("Folk31").id, self.v3.id) - self.assertRaises(DoesNotExist, lambda: Variable.objects.public_term_by_key("Folk69")) + self.assertRaises( + DoesNotExist, lambda: Variable.objects.public_term_by_key(None) + ) + self.assertRaises( + DoesNotExist, lambda: Variable.objects.public_term_by_key("foo") + ) + self.assertEqual(Variable.objects.public_term_by_key("Folk10").id, self.v.id) + self.assertRaises( + DoesNotExist, lambda: Variable.objects.public_term_by_key("Folk35") + ) + self.assertEqual(Variable.objects.public_term_by_key("Folk31").id, self.v3.id) + self.assertRaises( + DoesNotExist, lambda: Variable.objects.public_term_by_key("Folk69") + ) def test_filter_replaceable_should_not_return_drafts_or_replaced(self): result_set = Variable.objects.replaceable() - self.assertEquals([v.id for v in result_set], [self.v3.id, self.v2.id]) + self.assertEqual([v.id for v in result_set], [self.v3.id, self.v2.id]) def test_filter_surveyable_should_not_return_discontinued_or_replaced(self): result_set = Variable.objects.surveyable() - self.assertEquals([v.id for v in result_set], [self.v3.id, self.v4.id]) + self.assertEqual([v.id for v in result_set], [self.v3.id, self.v4.id]) class VariableTest(MongoTestCase): - def setUp(self): - v = Variable(key=u"Folk10", description=u"Antal bemannade servicesställen", type="integer", is_public=True, - target_groups=["folkbib"], - active_from=datetime(2010, 1, 1).date()) + v = Variable( + key="Folk10", + description="Antal bemannade servicesställen", + type="integer", + is_public=True, + target_groups=["folkbib"], + active_from=datetime(2010, 1, 1).date(), + ) v.save() self.v = Variable.objects.get(pk=v.id) - v2 = Variable(key=u"Folk35", description=u"Antal årsverken övrig personal", type="decimal", is_public=True, - target_groups=["folkbib"], - active_to=datetime(2014, 6, 1).date()) - v2.question = u"Hur många årsverken utfördes av personal i folkbiblioteksverksamheten under 2012?" - v2.question_part = u"Antal årsverken övrig personal (ej städpersonal)" + v2 = Variable( + key="Folk35", + description="Antal årsverken övrig personal", + type="decimal", + is_public=True, + target_groups=["folkbib"], + active_to=datetime(2014, 6, 1).date(), + ) + v2.question = "Hur många årsverken utfördes av personal i folkbiblioteksverksamheten under 2012?" + v2.question_part = "Antal årsverken övrig personal (ej städpersonal)" v2.save() self.v2 = Variable.objects.get(pk=v2.id) - v3 = Variable(key=u"Folk31", description=u"Antal årsverken totalt", type="decimal", is_public=True, - target_groups=["folkbib"], - active_from=datetime.utcnow().date(), active_to=(datetime.utcnow() + timedelta(days=1)).date()) + v3 = Variable( + key="Folk31", + description="Antal årsverken totalt", + type="decimal", + is_public=True, + target_groups=["folkbib"], + active_from=datetime.utcnow().date(), + active_to=(datetime.utcnow() + timedelta(days=1)).date(), + ) v3.summary_of = [self.v2] v3.save() self.v3 = Variable.objects.get(pk=v3.id) - v4 = Variable(key=u"Folk69", description=u"Totalt nyförvärv AV-medier", type="integer", is_public=True, - target_groups=["folkbib"], is_draft=True) - v4.question = u"Hur många nyförvärv av AV-media gjordes under 2012?" + v4 = Variable( + key="Folk69", + description="Totalt nyförvärv AV-medier", + type="integer", + is_public=True, + target_groups=["folkbib"], + is_draft=True, + ) + v4.question = "Hur många nyförvärv av AV-media gjordes under 2012?" v4.save() self.v4 = Variable.objects.get(pk=v4.id) def test_key_asc_should_be_default_sort_order(self): result = Variable.objects.all() - self.assertEquals([v.key for v in result], [u"Folk10", u"Folk31", u"Folk35", u"Folk69"]) + self.assertEqual( + [v.key for v in result], ["Folk10", "Folk31", "Folk35", "Folk69"] + ) def test_should_transform_object_to_dict(self): self.v.active_from = None @@ -100,10 +153,10 @@ def test_should_transform_object_to_dict(self): folk10 = Variable.objects.get(pk=self.v.id) expectedVariableDict = { - u"@id": u"Folk10", - u"@type": [u"rdf:Property", u"qb:MeasureProperty"], - u"comment": u"Antal bemannade servicesställen", - u"range": u"xsd:integer" + "@id": "Folk10", + "@type": ["rdf:Property", "qb:MeasureProperty"], + "comment": "Antal bemannade servicesställen", + "range": "xsd:integer", } self.assertEqual(folk10.to_dict(), expectedVariableDict) @@ -111,11 +164,11 @@ def test_should_transform_replacing_object_to_dict(self): self.v4.replace_siblings([self.v2.id], commit=True) folk69 = Variable.objects.get(pk=self.v4.id) expectedVariableDict = { - u"@id": u"Folk69", - u"@type": [u"rdf:Property", u"qb:MeasureProperty"], - u"comment": u"Totalt nyförvärv AV-medier", - u"range": u"xsd:integer", - u"replaces": [u"Folk35"] + "@id": "Folk69", + "@type": ["rdf:Property", "qb:MeasureProperty"], + "comment": "Totalt nyförvärv AV-medier", + "range": "xsd:integer", + "replaces": ["Folk35"], } self.assertEqual(folk69.to_dict(), expectedVariableDict) @@ -126,11 +179,11 @@ def test_should_transform_replaced_object_to_dict(self): folk35 = Variable.objects.get(pk=self.v2.id) expectedVariableDict = { - u"@id": u"Folk35", - u"@type": [u"rdf:Property", u"qb:MeasureProperty"], - u"comment": u"Antal årsverken övrig personal", - u"range": u"xsd:decimal", - u"replacedBy": u"Folk69", + "@id": "Folk35", + "@type": ["rdf:Property", "qb:MeasureProperty"], + "comment": "Antal årsverken övrig personal", + "range": "xsd:decimal", + "replacedBy": "Folk69", } self.assertEqual(folk35.to_dict(), expectedVariableDict) @@ -141,11 +194,11 @@ def test_should_transform_discontinued_object_to_dict(self): folk10 = Variable.objects.get(pk=self.v.id) expectedVariableDict = { - u"@id": u"Folk10", - u"@type": [u"rdf:Property", u"qb:MeasureProperty"], - u"comment": u"Antal bemannade servicesställen", - u"range": u"xsd:integer", - u"valid": "name=Giltighetstid; end=2014-08-31;" + "@id": "Folk10", + "@type": ["rdf:Property", "qb:MeasureProperty"], + "comment": "Antal bemannade servicesställen", + "range": "xsd:integer", + "valid": "name=Giltighetstid; end=2014-08-31;", } self.assertEqual(folk10.to_dict(), expectedVariableDict) @@ -157,11 +210,11 @@ def test_should_transform_pending_object_to_dict(self): folk10 = Variable.objects.get(pk=self.v.id) expectedVariableDict = { - u"@id": u"Folk10", - u"@type": [u"rdf:Property", u"qb:MeasureProperty"], - u"comment": u"Antal bemannade servicesställen", - u"range": u"xsd:integer", - u"valid": u"name=Giltighetstid; start={};".format(tomorrow) + "@id": "Folk10", + "@type": ["rdf:Property", "qb:MeasureProperty"], + "comment": "Antal bemannade servicesställen", + "range": "xsd:integer", + "valid": "name=Giltighetstid; start={};".format(tomorrow), } self.assertEqual(folk10.to_dict(), expectedVariableDict) @@ -172,22 +225,30 @@ def test_should_transform_date_ranged_object_to_dict(self): folk10 = Variable.objects.get(pk=self.v.id) expectedVariableDict = { - u"@id": u"Folk10", - u"@type": [u"rdf:Property", u"qb:MeasureProperty"], - u"comment": u"Antal bemannade servicesställen", - u"range": u"xsd:integer", - u"valid": u"name=Giltighetstid; start=2010-01-01; end=2014-12-31;" + "@id": "Folk10", + "@type": ["rdf:Property", "qb:MeasureProperty"], + "comment": "Antal bemannade servicesställen", + "range": "xsd:integer", + "valid": "name=Giltighetstid; start=2010-01-01; end=2014-12-31;", } self.assertEqual(folk10.to_dict(), expectedVariableDict) def test_variable_should_have_question_and_question_part(self): folk35 = Variable.objects.get(pk=self.v2.id) - self.assertTrue(hasattr(folk35, - "question") and folk35.question == u"Hur många årsverken utfördes av personal i folkbiblioteksverksamheten under 2012?") - self.assertTrue(hasattr(folk35, - "question_part") and folk35.question_part == u"Antal årsverken övrig personal (ej städpersonal)") - - def test_summary_variable_without_question_or_question_part_is_summary_auto_field(self): + self.assertTrue( + hasattr(folk35, "question") + and folk35.question + == "Hur många årsverken utfördes av personal i folkbiblioteksverksamheten under 2012?" + ) + self.assertTrue( + hasattr(folk35, "question_part") + and folk35.question_part + == "Antal årsverken övrig personal (ej städpersonal)" + ) + + def test_summary_variable_without_question_or_question_part_is_summary_auto_field( + self, + ): folk31 = Variable.objects.get(pk=self.v3.id) self.assertTrue(folk31.is_summary_auto_field) # THis field is automatically summarized in survey_draft and the user cannot change the value @@ -198,36 +259,38 @@ def test_summary_variable_with_question_or_question_part_is_summary_field(self): # This field is automatically summarized in survey_draft, but value can be changed by user. # TODO: Maybe a is_summary_field helper property on model could for this state? - def test_should_return_question_and_question_part_as_label_if_both_fields_exist(self): + def test_should_return_question_and_question_part_as_label_if_both_fields_exist( + self, + ): folk35 = Variable.objects.get(pk=self.v2.id) - self.assertEquals(folk35.label, [folk35.question, folk35.question_part]) + self.assertEqual(folk35.label, [folk35.question, folk35.question_part]) def test_should_return_question_as_label_if_no_question_part(self): folk69 = Variable.objects.get(pk=self.v4.id) - self.assertEquals(folk69.label, folk69.question) + self.assertEqual(folk69.label, folk69.question) def test_should_return_description_as_label_if_no_question(self): folk31 = Variable.objects.get(pk=self.v3.id) - self.assertEquals(folk31.label, folk31.description) + self.assertEqual(folk31.label, folk31.description) def test_should_store_version_when_updating_existing_object(self): - self.v.description = u"Totalt antal bemannade serviceställen, summering av antal filialer och huvudbibliotek" + self.v.description = "Totalt antal bemannade serviceställen, summering av antal filialer och huvudbibliotek" self.v.save() versions = VariableVersion.objects.all() - self.assertEquals(len(versions), 1) - self.assertEquals(versions[0].description, u"Antal bemannade servicesställen") + self.assertEqual(len(versions), 1) + self.assertEqual(versions[0].description, "Antal bemannade servicesställen") def test_should_not_store_version_when_updating_draft(self): - self.v4.description = u"En ny beskrivning" + self.v4.description = "En ny beskrivning" self.v4.save() versions = VariableVersion.objects.all() - self.assertEquals(len(versions), 0) + self.assertEqual(len(versions), 0) def test_should_set_modified_date_when_updating_existing_object(self): date_modified = self.v.date_modified - self.v.description = u"Totalt antal bemannade serviceställen, summering av antal filialer och huvudbibliotek" + self.v.description = "Totalt antal bemannade serviceställen, summering av antal filialer och huvudbibliotek" self.v.save() updated = Variable.objects.get(pk=self.v.id) @@ -235,7 +298,7 @@ def test_should_set_modified_date_when_updating_existing_object(self): def test_should_set_modified_date_when_updating_draft(self): date_modified = self.v4.date_modified - self.v4.description = u"En ny beskrivning" + self.v4.description = "En ny beskrivning" self.v4.save() updated = Variable.objects.get(pk=self.v4.id) @@ -262,74 +325,95 @@ def test_is_active(self): def test_active_variable_should_replace_other_variables(self): switchover_date = datetime(2014, 1, 1) self.v2.active_from = switchover_date # TODO: Change to using self.active_from instead of switchover_date - modified_siblings = self.v2.replace_siblings([self.v.id, self.v3.id], switchover_date=switchover_date.date(), - commit=True) - self.assertEquals(set([v.id for v in modified_siblings]), set([self.v.id, self.v3.id])) + modified_siblings = self.v2.replace_siblings( + [self.v.id, self.v3.id], switchover_date=switchover_date.date(), commit=True + ) + self.assertEqual( + set([v.id for v in modified_siblings]), set([self.v.id, self.v3.id]) + ) replacement = Variable.objects.get(pk=self.v2.id) replaced_var_1 = Variable.objects.get(pk=self.v.id) replaced_var_2 = Variable.objects.get(pk=self.v3.id) # Replacement should have fields active_from and replaces set - self.assertEquals(set([v.id for v in replacement.replaces]), set([self.v.id, self.v3.id])) - self.assertEquals(replacement.active_from, switchover_date) + self.assertEqual( + set([v.id for v in replacement.replaces]), set([self.v.id, self.v3.id]) + ) + self.assertEqual(replacement.active_from, switchover_date) # Replaced variables should have fields active_to and replaced_by set. - self.assertEquals(replaced_var_1.replaced_by.id, self.v2.id) - self.assertEquals(replaced_var_1.active_to, switchover_date) - self.assertEquals(replaced_var_1.is_active, False) + self.assertEqual(replaced_var_1.replaced_by.id, self.v2.id) + self.assertEqual(replaced_var_1.active_to, switchover_date) + self.assertEqual(replaced_var_1.is_active, False) - self.assertEquals(replaced_var_2.replaced_by.id, self.v2.id) - self.assertEquals(replaced_var_2.active_to, switchover_date) - self.assertEquals(replaced_var_2.is_active, False) + self.assertEqual(replaced_var_2.replaced_by.id, self.v2.id) + self.assertEqual(replaced_var_2.active_to, switchover_date) + self.assertEqual(replaced_var_2.is_active, False) def test_draft_variable_should_list_but_not_replace_other_variables(self): switchover_date = datetime(2014, 1, 1) - modified_siblings = self.v4.replace_siblings([self.v2.id], switchover_date=switchover_date, commit=True) - self.assertEquals(modified_siblings, []) + modified_siblings = self.v4.replace_siblings( + [self.v2.id], switchover_date=switchover_date, commit=True + ) + self.assertEqual(modified_siblings, []) replacement_var = Variable.objects.get(pk=self.v4.id) to_be_replaced = Variable.objects.get(pk=self.v2.id) - self.assertEquals(set([v.id for v in replacement_var.replaces]), set([self.v2.id])) - self.assertEquals(to_be_replaced.replaced_by, None) - self.assertEquals(to_be_replaced.active_to, self.v2.active_to) - self.assertEquals(to_be_replaced.is_active, False) + self.assertEqual( + set([v.id for v in replacement_var.replaces]), set([self.v2.id]) + ) + self.assertEqual(to_be_replaced.replaced_by, None) + self.assertEqual(to_be_replaced.active_to, self.v2.active_to) + self.assertEqual(to_be_replaced.is_active, False) def test_should_not_commit_replacement_unless_specified(self): modified_siblings = self.v2.replace_siblings([self.v.id]) - self.assertEquals(set([v.id for v in self.v2.replaces]), set([self.v.id])) - self.assertEquals(set([v.id for v in modified_siblings]), set([self.v.id])) + self.assertEqual(set([v.id for v in self.v2.replaces]), set([self.v.id])) + self.assertEqual(set([v.id for v in modified_siblings]), set([self.v.id])) replacement_var = Variable.objects.get(pk=self.v2.id) replaced_var_1 = Variable.objects.get(pk=self.v.id) - self.assertEquals(replacement_var.replaces, []) - self.assertEquals(replaced_var_1.replaced_by, None) + self.assertEqual(replacement_var.replaces, []) + self.assertEqual(replaced_var_1.replaced_by, None) def test_should_raise_error_if_trying_to_replace_already_replaced_variable(self): self.v.replace_siblings([self.v2.id], commit=True) - self.assertRaises(AttributeError, lambda: self.v3.replace_siblings([self.v2.id], commit=True)) + self.assertRaises( + AttributeError, lambda: self.v3.replace_siblings([self.v2.id], commit=True) + ) replacement = Variable.objects.get(pk=self.v.id) unsuccessful_replacement = Variable.objects.get(pk=self.v3.id) to_be_replaced = Variable.objects.get(pk=self.v2.id) - self.assertEquals([v.id for v in replacement.replaces], [self.v2.id]) - self.assertEquals(unsuccessful_replacement.replaces, []) - self.assertEquals(to_be_replaced.replaced_by.id, self.v.id) + self.assertEqual([v.id for v in replacement.replaces], [self.v2.id]) + self.assertEqual(unsuccessful_replacement.replaces, []) + self.assertEqual(to_be_replaced.replaced_by.id, self.v.id) def test_raise_error_if_trying_to_replace_non_existing_variable(self): - self.assertRaises(DoesNotExist, - lambda: self.v.replace_siblings([ObjectId("53fdec1ca9969003ec144d97")], commit=True)) + self.assertRaises( + DoesNotExist, + lambda: self.v.replace_siblings( + [ObjectId("53fdec1ca9969003ec144d97")], commit=True + ), + ) def test_should_update_replacements(self): - self.v2.replace_siblings([self.v.id, self.v3.id], switchover_date=datetime(2014, 12, 31).date(), commit=True) + self.v2.replace_siblings( + [self.v.id, self.v3.id], + switchover_date=datetime(2014, 12, 31).date(), + commit=True, + ) replacement = Variable.objects.get(pk=self.v2.id) - self.assertEquals(len(VariableVersion.objects.filter(key=self.v.key)), 1) + self.assertEqual(len(VariableVersion.objects.filter(key=self.v.key)), 1) # Should update v, add v4 and remove v3 new_switchover_date = datetime(2015, 1, 1).date() - replacement.replace_siblings([self.v.id, self.v4.id], switchover_date=new_switchover_date, commit=True) + replacement.replace_siblings( + [self.v.id, self.v4.id], switchover_date=new_switchover_date, commit=True + ) replacement = Variable.objects.get(pk=self.v2.id) modified_replaced = Variable.objects.get(pk=self.v.id) @@ -337,19 +421,22 @@ def test_should_update_replacements(self): new_replaced = Variable.objects.get(pk=self.v4.id) # replacement should have updated list of variables (active_from is set outside of this mmethod) - self.assertEquals(set([v.id for v in replacement.replaces]), set([modified_replaced.id, new_replaced.id])) + self.assertEqual( + set([v.id for v in replacement.replaces]), + set([modified_replaced.id, new_replaced.id]), + ) # v should have updated active_to date - self.assertEquals(modified_replaced.active_to.date(), new_switchover_date) - self.assertEquals(modified_replaced.replaced_by.id, replacement.id) + self.assertEqual(modified_replaced.active_to.date(), new_switchover_date) + self.assertEqual(modified_replaced.replaced_by.id, replacement.id) # v3 should no longer be replaced - self.assertEquals(no_longer_replaced.replaced_by, None) - self.assertEquals(no_longer_replaced.active_to, None) + self.assertEqual(no_longer_replaced.replaced_by, None) + self.assertEqual(no_longer_replaced.active_to, None) # v4 should be replaced - self.assertEquals(new_replaced.replaced_by.id, replacement.id) - self.assertEquals(new_replaced.active_to.date(), new_switchover_date) + self.assertEqual(new_replaced.replaced_by.id, replacement.id) + self.assertEqual(new_replaced.active_to.date(), new_switchover_date) def test_should_update_replacements_for_draft(self): modified_siblings = self.v4.replace_siblings([self.v2.id], commit=True) @@ -359,11 +446,13 @@ def test_should_update_replacements_for_draft(self): replacement_var = Variable.objects.get(pk=self.v4.id) - self.assertEquals(set([v.id for v in replacement_var.replaces]), set([self.v2.id, self.v3.id])) - self.assertEquals(Variable.objects.get(pk=self.v2.id).replaced_by, None) - self.assertEquals(len(VariableVersion.objects.filter(key=self.v2.key)), 0) - self.assertEquals(Variable.objects.get(pk=self.v3.id).replaced_by, None) - self.assertEquals(len(VariableVersion.objects.filter(key=self.v3.key)), 0) + self.assertEqual( + set([v.id for v in replacement_var.replaces]), set([self.v2.id, self.v3.id]) + ) + self.assertEqual(Variable.objects.get(pk=self.v2.id).replaced_by, None) + self.assertEqual(len(VariableVersion.objects.filter(key=self.v2.key)), 0) + self.assertEqual(Variable.objects.get(pk=self.v3.id).replaced_by, None) + self.assertEqual(len(VariableVersion.objects.filter(key=self.v3.key)), 0) def test_should_clear_all_replacements(self): # Setup @@ -378,9 +467,9 @@ def test_should_clear_all_replacements(self): # Clear replacements replacement.replace_siblings([], commit=True) - self.assertEquals(replacement.reload().replaces, []) - self.assertEquals(replaced_1.reload().replaced_by, None) - self.assertEquals(replaced_2.reload().replaced_by, None) + self.assertEqual(replacement.reload().replaces, []) + self.assertEqual(replaced_1.reload().replaced_by, None) + self.assertEqual(replaced_2.reload().replaced_by, None) def test_should_clear_all_replacements_for_draft(self): # Setup @@ -393,26 +482,33 @@ def test_should_clear_all_replacements_for_draft(self): # Clear replacements replacement.replace_siblings([], commit=True) - self.assertEquals(replacement.reload().replaces, []) - self.assertEquals(replaced.reload().replaced_by, None) - self.assertEquals(len(VariableVersion.objects.filter(key=replaced.key)), 0) + self.assertEqual(replacement.reload().replaces, []) + self.assertEqual(replaced.reload().replaced_by, None) + self.assertEqual(len(VariableVersion.objects.filter(key=replaced.key)), 0) - def test_should_nullify_active_to_and_references_to_replaced_by_when_deleting_replacement_instance(self): + def test_should_nullify_active_to_and_references_to_replaced_by_when_deleting_replacement_instance( + self, + ): # Setup replacement = self.v2 replaced_1 = self.v replaced_2 = self.v3 - replacement.replace_siblings([replaced_1.id, replaced_2.id], switchover_date=datetime(2015, 1, 1), commit=True) + replacement.replace_siblings( + [replaced_1.id, replaced_2.id], + switchover_date=datetime(2015, 1, 1), + commit=True, + ) replacement.reload() + print((replacement.id)) replacement.delete() replaced_1.reload() - self.assertEquals(replaced_1.replaced_by, None) - self.assertEquals(replaced_1.active_to, None) + self.assertEqual(replaced_1.replaced_by, None) + self.assertEqual(replaced_1.active_to, None) replaced_2.reload() - self.assertEquals(replaced_2.replaced_by, None) - self.assertEquals(replaced_2.active_to, None) + self.assertEqual(replaced_2.replaced_by, None) + self.assertEqual(replaced_2.active_to, None) def test_should_nullify_reference_in_replaces_when_deleting_replaced_instance(self): # Setup @@ -420,12 +516,15 @@ def test_should_nullify_reference_in_replaces_when_deleting_replaced_instance(se replaced_1 = self.v replaced_2 = self.v3 replacement.active_from = datetime(2015, 1, 1) - replacement.replace_siblings([replaced_1.id, replaced_2.id], switchover_date=replacement.active_from, - commit=True) + replacement.replace_siblings( + [replaced_1.id, replaced_2.id], + switchover_date=replacement.active_from, + commit=True, + ) replaced_2.reload().delete() - self.assertEquals([v.id for v in replacement.reload().replaces], [replaced_1.id]) + self.assertEqual([v.id for v in replacement.reload().replaces], [replaced_1.id]) replaced_1.reload() - self.assertEquals(replaced_1.replaced_by.id, replacement.id) - self.assertEquals(replaced_1.active_to, replacement.active_from) + self.assertEqual(replaced_1.replaced_by.id, replacement.id) + self.assertEqual(replaced_1.active_to, replacement.active_from) diff --git a/libstat/tests/services/test_bibdb_integration.py b/libstat/tests/services/test_bibdb_integration.py index 531dca91..39cc26af 100644 --- a/libstat/tests/services/test_bibdb_integration.py +++ b/libstat/tests/services/test_bibdb_integration.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- from libstat.services.bibdb_integration import check_library_criteria, library_from_json from libstat.tests import MongoTestCase @@ -13,36 +12,24 @@ def setUp(self): "library_type": "sjukbib", "municipality_code": "1793", "school_code": "12345678", - "address": - [ - { - "address_type": "gen", - "city": "lib1_city", - "street": "street1" - }, - { - "address_type": "ill", - "city": "ill_lib1_city", - "street": "ill_street1" - }, - { - "address_type": "stat", - "city": "stat_lib1_city", - "street": "stat_street1", - "zip_code": "123 45" - } - ], - "contact": - [ - { - "contact_type": "orgchef", - "email": "dont@care.atall" - }, - { - "contact_type": "statans", - "email": "lib1@dom.top" - } - ] + "address": [ + {"address_type": "gen", "city": "lib1_city", "street": "street1"}, + { + "address_type": "ill", + "city": "ill_lib1_city", + "street": "ill_street1", + }, + { + "address_type": "stat", + "city": "stat_lib1_city", + "street": "stat_street1", + "zip_code": "123 45", + }, + ], + "contact": [ + {"contact_type": "orgchef", "email": "dont@care.atall"}, + {"contact_type": "statans", "email": "lib1@dom.top"}, + ], } self._dummy_json_data_1 = { @@ -53,36 +40,24 @@ def setUp(self): "library_type": "sjukbib", "municipality_code": "1793", "school_code": None, - "address": - [ - { - "address_type": "gen", - "city": "lib1_city", - "street": "street1" - }, - { - "address_type": "ill", - "city": "ill_lib1_city", - "street": "ill_street1" - }, - { - "address_type": "stat", - "city": "stat_lib1_city", - "street": "stat_street1", - "zip_code": "123 45" - } - ], - "contact": - [ - { - "contact_type": "orgchef", - "email": "dont@care.atall" - }, - { - "contact_type": "statans", - "email": "lib1@dom.top" - } - ] + "address": [ + {"address_type": "gen", "city": "lib1_city", "street": "street1"}, + { + "address_type": "ill", + "city": "ill_lib1_city", + "street": "ill_street1", + }, + { + "address_type": "stat", + "city": "stat_lib1_city", + "street": "stat_street1", + "zip_code": "123 45", + }, + ], + "contact": [ + {"contact_type": "orgchef", "email": "dont@care.atall"}, + {"contact_type": "statans", "email": "lib1@dom.top"}, + ], } def test_creates_library_from_dict(self): @@ -92,16 +67,16 @@ def test_creates_library_from_dict(self): if check_library_criteria(json_data): library = library_from_json(json_data) - self.assertEquals(library.sigel, "lib1_sigel") - self.assertEquals(library.name, "lib1") - self.assertEquals(library.city, "stat_lib1_city") - self.assertEquals(library.address, "stat_street1") - self.assertEquals(library.email, "lib1@dom.top") - self.assertEquals(library.municipality_code, "1793") - self.assertEquals(library.library_type, "sjukbib") - self.assertEquals(library.zip_code, "123 45") - self.assertEquals(library.external_identifiers[0].type, "school_code") - self.assertEquals(library.external_identifiers[0].identifier, "12345678") + self.assertEqual(library.sigel, "lib1_sigel") + self.assertEqual(library.name, "lib1") + self.assertEqual(library.city, "stat_lib1_city") + self.assertEqual(library.address, "stat_street1") + self.assertEqual(library.email, "lib1@dom.top") + self.assertEqual(library.municipality_code, "1793") + self.assertEqual(library.library_type, "sjukbib") + self.assertEqual(library.zip_code, "123 45") + self.assertEqual(library.external_identifiers[0].type, "school_code") + self.assertEqual(library.external_identifiers[0].identifier, "12345678") def test_creates_library_from_dict_without_external_id_if_school_code_is_none(self): json_data = self._dummy_json_data_1 @@ -110,15 +85,15 @@ def test_creates_library_from_dict_without_external_id_if_school_code_is_none(se if check_library_criteria(json_data): library = library_from_json(json_data) - self.assertEquals(library.sigel, "lib1_sigel") - self.assertEquals(library.name, "lib1") - self.assertEquals(library.city, "stat_lib1_city") - self.assertEquals(library.address, "stat_street1") - self.assertEquals(library.email, "lib1@dom.top") - self.assertEquals(library.municipality_code, "1793") - self.assertEquals(library.library_type, "sjukbib") - self.assertEquals(library.zip_code, "123 45") - self.assertEquals(library.external_identifiers, None) + self.assertEqual(library.sigel, "lib1_sigel") + self.assertEqual(library.name, "lib1") + self.assertEqual(library.city, "stat_lib1_city") + self.assertEqual(library.address, "stat_street1") + self.assertEqual(library.email, "lib1@dom.top") + self.assertEqual(library.municipality_code, "1793") + self.assertEqual(library.library_type, "sjukbib") + self.assertEqual(library.zip_code, "123 45") + self.assertEqual(library.external_identifiers, None) def test_does_not_import_non_swedish_libraries(self): json_data = self._dummy_json_data @@ -128,7 +103,7 @@ def test_does_not_import_non_swedish_libraries(self): if check_library_criteria(json_data): library = library_from_json(json_data) - self.assertEquals(library, None) + self.assertEqual(library, None) def test_does_not_import_busbib(self): json_data = self._dummy_json_data @@ -138,7 +113,7 @@ def test_does_not_import_busbib(self): if check_library_criteria(json_data): library = library_from_json(json_data) - self.assertEquals(library, None) + self.assertEqual(library, None) def test_does_not_import_library_with_invalid_library_type(self): json_data = self._dummy_json_data @@ -148,7 +123,7 @@ def test_does_not_import_library_with_invalid_library_type(self): if check_library_criteria(json_data): library = library_from_json(json_data) - self.assertEquals(library, None) + self.assertEqual(library, None) def test_trims_blank_spaces_from_library_name(self): json_data = self._dummy_json_data @@ -158,7 +133,7 @@ def test_trims_blank_spaces_from_library_name(self): if check_library_criteria(json_data): library = library_from_json(json_data) - self.assertEquals(library.name, "a b c") + self.assertEqual(library.name, "a b c") def test_does_not_library_import_if_no_municipality_code(self): json_data = self._dummy_json_data @@ -168,7 +143,7 @@ def test_does_not_library_import_if_no_municipality_code(self): if check_library_criteria(json_data): library = library_from_json(json_data) - self.assertEquals(library, None) + self.assertEqual(library, None) def test_does_not_import_non_statistics_library(self): json_data = self._dummy_json_data @@ -178,4 +153,4 @@ def test_does_not_import_non_statistics_library(self): if check_library_criteria(json_data): library = library_from_json(json_data) - self.assertEquals(library, None) + self.assertEqual(library, None) diff --git a/libstat/tests/services/test_clean_data.py b/libstat/tests/services/test_clean_data.py index 50ae96d3..429f3da9 100644 --- a/libstat/tests/services/test_clean_data.py +++ b/libstat/tests/services/test_clean_data.py @@ -4,18 +4,20 @@ import unittest -class TestCleanDataFunctions(MongoTestCase): +class TestCleanDataFunctions(MongoTestCase): def test_update_sigel(self): random_sigel = Library._random_sigel() library = self._dummy_library(sigel=random_sigel) - survey1 = self._dummy_survey(library=library, observations=[self._dummy_observation()]) + survey1 = self._dummy_survey( + library=library, observations=[self._dummy_observation()] + ) survey2 = self._dummy_survey() clean_data._update_sigel(survey1, survey2.library.sigel) self.assertEqual(survey1.reload().library.sigel, survey2.library.sigel) self.assertTrue(survey1.reload().library.sigel in survey1.selected_libraries) self.assertTrue(random_sigel not in survey1.selected_libraries) - self.assertEqual(survey1.reload()._status, u"published") + self.assertEqual(survey1.reload()._status, "published") @unittest.skip("Skipped as dependent on sigel mapping workbook") def test_load_sigel_mapping_from_workbook(self): diff --git a/libstat/tests/services/test_published_open_data:excel_export.py b/libstat/tests/services/test_published_open_data:excel_export.py index 72d41de8..c0e4f678 100644 --- a/libstat/tests/services/test_published_open_data:excel_export.py +++ b/libstat/tests/services/test_published_open_data:excel_export.py @@ -1,33 +1,36 @@ -# -*- coding: UTF-8 -*- from libstat.services.excel_export import _published_open_data_as_workbook from libstat.tests import MongoTestCase -class TestSurveysExport(MongoTestCase): +class TestSurveysExport(MongoTestCase): def test_can_export_open_data_as_excel(self): survey1 = self._dummy_survey() survey1.publish() - response = self._post(action="export", - data={"sample_year": "2001"}) + response = self._post(action="export", data={"sample_year": "2001"}) - self.assertEquals(response.status_code, 200) + self.assertEqual(response.status_code, 200) def test_sets_correct_values_when_exporting_open_data_as_excel(self): self._dummy_variable(key="var1") self._dummy_observation(variable="var1", value="testvalue") - library1 = self._dummy_library(name="lib1_name", external_identifiers=[self._dummy_external_identifier(identifier="44444444")]) + library1 = self._dummy_library( + name="lib1_name", + external_identifiers=[ + self._dummy_external_identifier(identifier="44444444") + ], + ) survey1 = self._dummy_survey(library=library1) survey1.publish() worksheet = _published_open_data_as_workbook(2001) - self.assertEquals(worksheet["A1"].value, "Bibliotek") - self.assertEquals(worksheet["B1"].value, "Sigel") - self.assertEquals(worksheet["D1"].value, "Kommunkod") - self.assertEquals(worksheet["F1"].value, "Externt id") - self.assertEquals(worksheet["G1"].value, "var1") - self.assertEquals(worksheet["A2"].value, "lib1_name") - self.assertEquals(worksheet["F2"].value, "44444444") - self.assertEquals(worksheet["G2"].value, "testvalue") + self.assertEqual(worksheet["A1"].value, "Bibliotek") + self.assertEqual(worksheet["B1"].value, "Sigel") + self.assertEqual(worksheet["D1"].value, "Kommunkod") + self.assertEqual(worksheet["F1"].value, "Externt id") + self.assertEqual(worksheet["G1"].value, "var1") + self.assertEqual(worksheet["A2"].value, "lib1_name") + self.assertEqual(worksheet["F2"].value, "44444444") + self.assertEqual(worksheet["G2"].value, "testvalue") diff --git a/libstat/tests/services/test_report_generation.py b/libstat/tests/services/test_report_generation.py index 94d13e76..10e1f6ca 100644 --- a/libstat/tests/services/test_report_generation.py +++ b/libstat/tests/services/test_report_generation.py @@ -1,10 +1,13 @@ -# -*- coding: utf-8 -*- -from pprint import pprint from libstat.tests import MongoTestCase from libstat.models import CachedReport from libstat.report_templates import ReportTemplate, Group, Row -from libstat.services.report_generation import generate_report, pre_cache_observations, get_report, is_variable_to_be_included +from libstat.services.report_generation import ( + generate_report, + pre_cache_observations, + get_report, + is_variable_to_be_included, +) from libstat.services import report_generation from libstat.report_templates import report_template_base @@ -12,35 +15,49 @@ class TestReportGeneration(MongoTestCase): - @unittest.skip("Skipped as data in test itself is not correct") def test_creates_correct_report(self): - template = ReportTemplate(groups=[ - Group(title="some_title1", - rows=[Row(description="some_description1", - variable_key="key1")]), - Group(title="some_title2", - extra="some_extra_description", - show_chart=False, - rows=[Row(description="some_description2", + template = ReportTemplate( + groups=[ + Group( + title="some_title1", + rows=[Row(description="some_description1", variable_key="key1")], + ), + Group( + title="some_title2", + extra="some_extra_description", + show_chart=False, + rows=[ + Row( + description="some_description2", variable_key="key2", computation=(lambda a, b: (a / b)), - variable_keys=["key1", "key2"]), - Row(description="only_a_label", - label_only=True), - Row(description="some_description3", + variable_keys=["key1", "key2"], + ), + Row(description="only_a_label", label_only=True), + Row( + description="some_description3", computation=(lambda a, b: (a / b) / 15), - variable_keys=["key1", "key2"]), - Row(description="some_description4", - variable_key="does_not_exist1"), - Row(description="some_description5", + variable_keys=["key1", "key2"], + ), + Row( + description="some_description4", + variable_key="does_not_exist1", + ), + Row( + description="some_description5", variable_key="key4", - is_sum=True), - Row(description="some_description6", + is_sum=True, + ), + Row( + description="some_description6", computation=(lambda a, b: (a / b)), - variable_keys=["does_not_exist2", "does_not_exist3"]), - ]) - ]) + variable_keys=["does_not_exist2", "does_not_exist3"], + ), + ], + ), + ] + ) variable1 = self._dummy_variable(key="key1", target_groups=["folkbib"]) variable2 = self._dummy_variable(key="key2", target_groups=["folkbib"]) variable4 = self._dummy_variable(key="key4", target_groups=["folkbib"]) @@ -50,19 +67,10 @@ def test_creates_correct_report(self): 2013: 5.0, 2014: 7.0, "total": 31.0, - "incomplete_data": [] - }, - "key2": { - 2013: 11.0, - 2014: 13.0, - "total": 47.0, - "incomplete_data": [2012] + "incomplete_data": [], }, - "key4": { - 2014: 3.0, - 2012: 17.0, - "incomplete_data": [2013] - } + "key2": {2013: 11.0, 2014: 13.0, "total": 47.0, "incomplete_data": [2012]}, + "key4": {2014: 3.0, 2012: 17.0, "incomplete_data": [2013]}, } report = generate_report(template, 2014, observations, ["folkbib"]) @@ -81,7 +89,7 @@ def test_creates_correct_report(self): "diff": ((7.0 / 5.0) - 1) * 100, "nation_diff": (7.0 / 31.0) * 1000, } - ] + ], }, { "title": "some_title2", @@ -97,12 +105,12 @@ def test_creates_correct_report(self): "diff": ((13.0 / 11.0) - 1) * 100, "nation_diff": (13.0 / 47.0) * 1000, "extra": (7.0 / 13.0) * 100, - "incomplete_data": ["2012"] + "incomplete_data": ["2012"], }, { "label": "only_a_label", "label_only": True, - "show_in_chart": False + "show_in_chart": False, }, { "label": "some_description3", @@ -111,7 +119,7 @@ def test_creates_correct_report(self): "diff": (((7.0 / 13.0) / 15) / ((5.0 / 11.0) / 15) - 1) * 100, "incomplete_data": ["2012"], "is_key_figure": True, - "show_in_chart": False + "show_in_chart": False, }, { "label": "some_description4", @@ -123,15 +131,15 @@ def test_creates_correct_report(self): "2012": 17.0, "2014": 3.0, "is_sum": True, - "incomplete_data": ["2013"] + "incomplete_data": ["2013"], }, { "label": "some_description6", "is_key_figure": True, - "show_in_chart": False - } - ] - } + "show_in_chart": False, + }, + ], + }, ] self.assertEqual(report, expected_report) @@ -150,56 +158,63 @@ def test_parses_observations_from_surveys(self): library=library1, observations=[ self._dummy_observation(variable=variable1, value=1), - self._dummy_observation(variable=variable2, value=2) - ]) + self._dummy_observation(variable=variable2, value=2), + ], + ) survey2 = self._dummy_survey( sample_year=2016, library=library2, observations=[ self._dummy_observation(variable=variable1, value=3), - self._dummy_observation(variable=variable3, value=5) - ]) + self._dummy_observation(variable=variable3, value=5), + ], + ) survey3 = self._dummy_survey( sample_year=2016, library=library3, observations=[ self._dummy_observation(variable=variable2, value=13), - self._dummy_observation(variable=variable3, value=17) - ]) + self._dummy_observation(variable=variable3, value=17), + ], + ) survey4 = self._dummy_survey( sample_year=2015, library=library1, observations=[ self._dummy_observation(variable=variable1, value=7), - self._dummy_observation(variable=variable2, value=11) - ]) + self._dummy_observation(variable=variable2, value=11), + ], + ) survey5 = self._dummy_survey( sample_year=2015, library=library2, observations=[ self._dummy_observation(variable=variable1, value=8), - self._dummy_observation(variable=variable2, value=9) - ]) + self._dummy_observation(variable=variable2, value=9), + ], + ) survey6 = self._dummy_survey( sample_year=2014, library=library1, observations=[ self._dummy_observation(variable=variable1, value=19), - self._dummy_observation(variable=variable2, value=23) - ]) + self._dummy_observation(variable=variable2, value=23), + ], + ) survey7 = self._dummy_survey( sample_year=2014, library=library2, observations=[ self._dummy_observation(variable=variable1, value=21), - self._dummy_observation(variable=variable2, value=22) - ]) + self._dummy_observation(variable=variable2, value=22), + ], + ) survey1.publish() survey2.publish() @@ -209,12 +224,17 @@ def test_parses_observations_from_surveys(self): survey6.publish() survey7.publish() - template = ReportTemplate(groups=[ - Group(rows=[Row(variable_key="key1")]), - Group(rows=[Row(variable_key="key2"), + template = ReportTemplate( + groups=[ + Group(rows=[Row(variable_key="key1")]), + Group( + rows=[ + Row(variable_key="key2"), Row(variable_keys=["key3", "key2"]), - ]) - ]) + ] + ), + ] + ) observations = pre_cache_observations(template, [survey1, survey2], 2016) expected_observations = { @@ -223,31 +243,35 @@ def test_parses_observations_from_surveys(self): 2015: (7.0 + 8.0), 2016: (1.0 + 3.0), "total": (1.0 + 3.0), - "incomplete_data": [] + "incomplete_data": [], }, "key2": { 2014: (23.0 + 22.0), 2015: (11.0 + 9.0), 2016: 2.0, "total": (2.0 + 13.0), - "incomplete_data": [2016] + "incomplete_data": [2016], }, "key3": { 2014: None, 2015: None, 2016: 5.0, "total": (5.0 + 17.0), - "incomplete_data": [2016, 2015, 2014] - } + "incomplete_data": [2016, 2015, 2014], + }, } self.assertEqual(observations, expected_observations) @unittest.skip("Skipped due to strange bson conversion error") def test_is_variable_to_be_included(self): - variable1 = self._dummy_variable(key="key4", target_groups=["folkbib", "natbib"]) + variable1 = self._dummy_variable( + key="key4", target_groups=["folkbib", "natbib"] + ) variable2 = self._dummy_variable(key="key5", target_groups=["natbib"]) - variable3 = self._dummy_variable(key="key6", target_groups=["skolbib", "folkbib"]) + variable3 = self._dummy_variable( + key="key6", target_groups=["skolbib", "folkbib"] + ) self.assertTrue(is_variable_to_be_included(variable1, ["folkbib"])) self.assertFalse(is_variable_to_be_included(variable2, ["folkbib"])) @@ -255,16 +279,21 @@ def test_is_variable_to_be_included(self): class TestReportTemplate(MongoTestCase): - def test_returns_all_variable_keys_present_in_report_template_without_duplicates(self): - template = ReportTemplate(groups=[ - Group(rows=[ - Row(variable_key="key1")]), - Group(rows=[ - Row(variable_key="key2"), - Row(variable_keys=["key3", "key2"]), - Row(variable_key="key4", variable_keys=["key3", "key5"]), - ]) - ]) + def test_returns_all_variable_keys_present_in_report_template_without_duplicates( + self, + ): + template = ReportTemplate( + groups=[ + Group(rows=[Row(variable_key="key1")]), + Group( + rows=[ + Row(variable_key="key2"), + Row(variable_keys=["key3", "key2"]), + Row(variable_key="key4", variable_keys=["key3", "key5"]), + ] + ), + ] + ) variable_keys = template.all_variable_keys @@ -275,7 +304,9 @@ def test_returns_all_variable_keys_present_in_report_template_without_duplicates self.assertTrue("key4" in variable_keys) self.assertTrue("key5" in variable_keys) - def test_fetches_description_from_variable_if_variable_exists_and_no_description_is_given(self): + def test_fetches_description_from_variable_if_variable_exists_and_no_description_is_given( + self, + ): variable = self._dummy_variable(question_part="some_description") row = Row(variable_key=variable.key) @@ -286,7 +317,9 @@ def test_does_not_fetch_description_from_variable_if_no_variable_key_is_given(se self.assertEqual(row.description, None) - def test_does_not_fetch_description_from_variable_if_variable_exists_and_description_is_given(self): + def test_does_not_fetch_description_from_variable_if_variable_exists_and_description_is_given( + self, + ): variable = self._dummy_variable(question_part="dont use this") row = Row(variable_key=variable.key, description="some_description") @@ -377,13 +410,19 @@ def test_returns_cached_report_when_cache_hit(self): self.assertEqual(CachedReport.objects.all()[0].report["id"], report["id"]) def test_removes_all_reports_after_a_survey_has_been_published(self): - survey1 = self._dummy_survey(sample_year=2014, publish=True, observations=[self._dummy_observation()]) - survey2 = self._dummy_survey(sample_year=2014, publish=True, observations=[self._dummy_observation()]) + survey1 = self._dummy_survey( + sample_year=2014, publish=True, observations=[self._dummy_observation()] + ) + survey2 = self._dummy_survey( + sample_year=2014, publish=True, observations=[self._dummy_observation()] + ) get_report([survey1], 2014) get_report([survey2], 2014) - survey3 = self._dummy_survey(sample_year=2014, publish=True, observations=[self._dummy_observation()]) + survey3 = self._dummy_survey( + sample_year=2014, publish=True, observations=[self._dummy_observation()] + ) report = get_report([survey3], 2014) @@ -391,13 +430,17 @@ def test_removes_all_reports_after_a_survey_has_been_published(self): self.assertEqual(CachedReport.objects.all()[0].report["id"], report["id"]) def test_removes_all_reports_after_a_survey_has_been_republished(self): - survey1 = self._dummy_survey(sample_year=2014, publish=True, observations=[self._dummy_observation()]) - survey2 = self._dummy_survey(sample_year=2014, publish=True, observations=[self._dummy_observation()]) + survey1 = self._dummy_survey( + sample_year=2014, publish=True, observations=[self._dummy_observation()] + ) + survey2 = self._dummy_survey( + sample_year=2014, publish=True, observations=[self._dummy_observation()] + ) get_report([survey1], 2014) get_report([survey2], 2014) - survey1.observations[0].value = u"new_value" + survey1.observations[0].value = "new_value" survey1.publish() report = get_report([survey2], 2014) @@ -407,13 +450,17 @@ def test_removes_all_reports_after_a_survey_has_been_republished(self): def test_removes_all_reports_after_a_variable_has_been_updated(self): variable = self._dummy_variable() - survey1 = self._dummy_survey(sample_year=2014, publish=True, observations=[self._dummy_observation()]) - survey2 = self._dummy_survey(sample_year=2014, publish=True, observations=[self._dummy_observation()]) + survey1 = self._dummy_survey( + sample_year=2014, publish=True, observations=[self._dummy_observation()] + ) + survey2 = self._dummy_survey( + sample_year=2014, publish=True, observations=[self._dummy_observation()] + ) get_report([survey1], 2014) get_report([survey2], 2014) - variable.description = u"something_new" + variable.description = "something_new" variable.save() report = get_report([survey2], 2014) @@ -421,10 +468,18 @@ def test_removes_all_reports_after_a_variable_has_been_updated(self): self.assertEqual(CachedReport.objects.all()[0].report["id"], report["id"]) def test_removes_older_reports_when_limit_reached(self): - survey1 = self._dummy_survey(sample_year=2014, publish=True, observations=[self._dummy_observation()]) - survey2 = self._dummy_survey(sample_year=2014, publish=True, observations=[self._dummy_observation()]) - survey3 = self._dummy_survey(sample_year=2014, publish=True, observations=[self._dummy_observation()]) - survey4 = self._dummy_survey(sample_year=2014, publish=True, observations=[self._dummy_observation()]) + survey1 = self._dummy_survey( + sample_year=2014, publish=True, observations=[self._dummy_observation()] + ) + survey2 = self._dummy_survey( + sample_year=2014, publish=True, observations=[self._dummy_observation()] + ) + survey3 = self._dummy_survey( + sample_year=2014, publish=True, observations=[self._dummy_observation()] + ) + survey4 = self._dummy_survey( + sample_year=2014, publish=True, observations=[self._dummy_observation()] + ) report_generation.REPORT_CACHE_LIMIT = 3 @@ -436,4 +491,4 @@ def test_removes_older_reports_when_limit_reached(self): self.assertEqual(CachedReport.objects.count(), 3) self.assertEqual(CachedReport.objects.all()[0].report["id"], report4["id"]) self.assertEqual(CachedReport.objects.all()[1].report["id"], report3["id"]) - self.assertEqual(CachedReport.objects.all()[2].report["id"], report2["id"]) \ No newline at end of file + self.assertEqual(CachedReport.objects.all()[2].report["id"], report2["id"]) diff --git a/libstat/tests/test_municipalities.py b/libstat/tests/test_municipalities.py index 891738a9..8b871fa0 100644 --- a/libstat/tests/test_municipalities.py +++ b/libstat/tests/test_municipalities.py @@ -1,5 +1,7 @@ -# -*- coding: UTF-8 -*- -from data.municipalities import municipality_code_from, municipality_code_from_county_code +from data.municipalities import ( + municipality_code_from, + municipality_code_from_county_code, +) from libstat.tests import MongoTestCase @@ -64,4 +66,4 @@ def test_does_not_allow_codes_with_more_than_two_digits(self): self.assertRaises(ValueError, municipality_code_from_county_code, 123) def test_does_not_allow_negative_codes(self): - self.assertRaises(ValueError, municipality_code_from_county_code, -12) \ No newline at end of file + self.assertRaises(ValueError, municipality_code_from_county_code, -12) diff --git a/libstat/tests/test_survey_templates.py b/libstat/tests/test_survey_templates.py index cefbea3d..6b3c8545 100644 --- a/libstat/tests/test_survey_templates.py +++ b/libstat/tests/test_survey_templates.py @@ -1,4 +1,3 @@ -# -*- coding: UTF-8 -*- from libstat.tests import MongoTestCase from libstat.models import Cell, Row, Group @@ -6,68 +5,113 @@ class TestSurveyTemplate(MongoTestCase): - def test_group_has_correct_question(self): - group = Group(rows=[Row(cells=[ - Cell(variable_key=self._dummy_variable(key="var1_key", question="sample_question_text").key)]) - ]) - - self.assertEquals(group.description, "sample_question_text") + group = Group( + rows=[ + Row( + cells=[ + Cell( + variable_key=self._dummy_variable( + key="var1_key", question="sample_question_text" + ).key + ) + ] + ) + ] + ) + + self.assertEqual(group.description, "sample_question_text") def test_group_has_correct_headers(self): - group = Group(rows=[Row(cells=[ - Cell(variable_key=self._dummy_variable(key="var1_key", category="header1").key), - Cell(variable_key=self._dummy_variable(key="var2_key", category="header2").key), - Cell(variable_key=self._dummy_variable(key="var3_key", category="header3").key), - ])]) - - self.assertEquals(group.headers, ["header1", "header2", "header3"]) + group = Group( + rows=[ + Row( + cells=[ + Cell( + variable_key=self._dummy_variable( + key="var1_key", category="header1" + ).key + ), + Cell( + variable_key=self._dummy_variable( + key="var2_key", category="header2" + ).key + ), + Cell( + variable_key=self._dummy_variable( + key="var3_key", category="header3" + ).key + ), + ] + ) + ] + ) + + self.assertEqual(group.headers, ["header1", "header2", "header3"]) def test_group_has_correct_number_of_columns(self): - group = Group(rows=[Row(cells=[ - Cell(variable_key=self._dummy_variable(key="var1_key").key), - Cell(variable_key=self._dummy_variable(key="var2_key").key), - Cell(variable_key=self._dummy_variable(key="var3_key").key), - Cell(variable_key=self._dummy_variable(key="var4_key").key), - ])]) - - self.assertEquals(group.columns, 4) + group = Group( + rows=[ + Row( + cells=[ + Cell(variable_key=self._dummy_variable(key="var1_key").key), + Cell(variable_key=self._dummy_variable(key="var2_key").key), + Cell(variable_key=self._dummy_variable(key="var3_key").key), + Cell(variable_key=self._dummy_variable(key="var4_key").key), + ] + ) + ] + ) + + self.assertEqual(group.columns, 4) def test_row_has_correct_description(self): - row = Row(cells=[ - Cell(variable_key=self._dummy_variable(key="var1_key", sub_category="row_description1").key), - Cell(variable_key=self._dummy_variable(key="var2_key").key), - Cell(variable_key=self._dummy_variable(key="var3_key").key), - ]) - - self.assertEquals(row.description, "row_description1") + row = Row( + cells=[ + Cell( + variable_key=self._dummy_variable( + key="var1_key", sub_category="row_description1" + ).key + ), + Cell(variable_key=self._dummy_variable(key="var2_key").key), + Cell(variable_key=self._dummy_variable(key="var3_key").key), + ] + ) + + self.assertEqual(row.description, "row_description1") def test_cell_has_correct_explanation(self): - cell = Cell(variable_key=self._dummy_variable(key="var1_key", description="var1_sample_description").key) - - self.assertEquals(cell.explanation, "var1_sample_description") - - def test_returns_base_template_for_2014(self): - template = survey_template(2014) - - self.assertEquals(template, _survey_template_base()) - - def test_returns_base_template_for_2015(self): - template = survey_template(2015) - - self.assertEquals(template, _survey_template_base()) + cell = Cell( + variable_key=self._dummy_variable( + key="var1_key", description="var1_sample_description" + ).key + ) + + self.assertEqual(cell.explanation, "var1_sample_description") + + # def test_returns_base_template_for_2014(self): + # template = survey_template(2014) + # + # self.assertEquals(template, _survey_template_base()) + # + # def test_returns_base_template_for_2015(self): + # template = survey_template(2015) + # + # self.assertEquals(template, _survey_template_base()) def test_returns_default_template_for_2013(self): - survey = self._dummy_survey(observations=[ - self._dummy_observation(), - self._dummy_observation(), - self._dummy_observation(), - ]) + survey = self._dummy_survey( + observations=[ + self._dummy_observation(), + self._dummy_observation(), + self._dummy_observation(), + ] + ) template = survey_template(2013, survey) - self.assertEquals(len(template.cells), 3) + self.assertEqual(len(template.cells), 3) def test_returns_empty_template_for_2013_without_survey(self): template = survey_template(2013) - self.assertEquals(len(template.cells), 0) + self.assertEqual(len(template.cells), 0) diff --git a/libstat/tests/test_utils.py b/libstat/tests/test_utils.py index a124228f..dacb441f 100644 --- a/libstat/tests/test_utils.py +++ b/libstat/tests/test_utils.py @@ -1,4 +1,3 @@ -# -*- coding: UTF-8 -*- from datetime import datetime from libstat.tests import MongoTestCase @@ -6,16 +5,33 @@ class UtilsTest(MongoTestCase): - def test_should_parse_datetime_from_isodate_str(self): - self.assertEquals(parse_datetime_from_isodate_str("2014-06-03T15:47:22.873+02:00"), None) - self.assertEquals(parse_datetime_from_isodate_str("2014-06-03T15:47:22.873Z"), None) - self.assertEquals(parse_datetime_from_isodate_str("2014-06-03T15:47:22.873"), - datetime(2014, 06, 03, 15, 47, 22, 873000)) - self.assertEquals(parse_datetime_from_isodate_str("2014-06-03T15:47:22"), datetime(2014, 06, 03, 15, 47, 22)) - self.assertEquals(parse_datetime_from_isodate_str("2014-06-03T15:47"), datetime(2014, 06, 03, 15, 47)) - self.assertEquals(parse_datetime_from_isodate_str("2014-06-03T15"), datetime(2014, 06, 03, 15)) - self.assertEquals(parse_datetime_from_isodate_str("2014-06-03"), datetime(2014, 06, 03)) - self.assertEquals(parse_datetime_from_isodate_str("2014-06"), datetime(2014, 06, 01)) - self.assertEquals(parse_datetime_from_isodate_str("2014"), datetime(2014, 01, 01)) - self.assertEquals(parse_datetime_from_isodate_str("jun 3 2014"), None) + self.assertEqual( + parse_datetime_from_isodate_str("2014-06-03T15:47:22.873+02:00"), None + ) + self.assertEqual( + parse_datetime_from_isodate_str("2014-06-03T15:47:22.873Z"), None + ) + self.assertEqual( + parse_datetime_from_isodate_str("2014-06-03T15:47:22.873"), + datetime(2014, 6, 3, 15, 47, 22, 873000), + ) + self.assertEqual( + parse_datetime_from_isodate_str("2014-06-03T15:47:22"), + datetime(2014, 6, 3, 15, 47, 22), + ) + self.assertEqual( + parse_datetime_from_isodate_str("2014-06-03T15:47"), + datetime(2014, 6, 3, 15, 47), + ) + self.assertEqual( + parse_datetime_from_isodate_str("2014-06-03T15"), datetime(2014, 6, 3, 15) + ) + self.assertEqual( + parse_datetime_from_isodate_str("2014-06-03"), datetime(2014, 6, 3) + ) + self.assertEqual( + parse_datetime_from_isodate_str("2014-06"), datetime(2014, 6, 1) + ) + self.assertEqual(parse_datetime_from_isodate_str("2014"), datetime(2014, 1, 1)) + self.assertEqual(parse_datetime_from_isodate_str("jun 3 2014"), None) diff --git a/libstat/tests/views/test_articles.py b/libstat/tests/views/test_articles.py index 5b9c00c6..cd48e874 100644 --- a/libstat/tests/views/test_articles.py +++ b/libstat/tests/views/test_articles.py @@ -1,4 +1,3 @@ -# -*- coding: UTF-8 -*- from libstat.models import Article from libstat.tests import MongoTestCase @@ -15,7 +14,9 @@ def test_can_view_articles(self): self.assertEqual(response.status_code, 200) self.assertEqual(response.context["articles"][1].title, article.title) self.assertEqual(response.context["articles"][1].content, article.content) - self.assertEqual(response.context["articles"][1].date_published, article.date_published) + self.assertEqual( + response.context["articles"][1].date_published, article.date_published + ) def test_can_not_edit_articles_when_not_logged_in(self): article = self._dummy_article(title="some_title", content="some_content") @@ -36,22 +37,28 @@ def test_updates_article(self): self._login() article = self._dummy_article(title="some_title", content="some_content") - self._post("article", kwargs={"article_id": str(article.pk)}, data={"title": "new_title", - "content": "new_content"}) + self._post( + "article", + kwargs={"article_id": str(article.pk)}, + data={"title": "new_title", "content": "new_content"}, + ) article.reload() - self.assertEquals(article.title, "new_title") - self.assertEquals(article.content, "new_content") + self.assertEqual(article.title, "new_title") + self.assertEqual(article.content, "new_content") def test_can_not_update_article_if_not_logged_in(self): article = self._dummy_article(title="some_title", content="some_content") - self._post("article", kwargs={"article_id": str(article.pk)}, data={"title": "new_title", - "content": "new_content"}) + self._post( + "article", + kwargs={"article_id": str(article.pk)}, + data={"title": "new_title", "content": "new_content"}, + ) article.reload() - self.assertEquals(article.title, "some_title") - self.assertEquals(article.content, "some_content") + self.assertEqual(article.title, "some_title") + self.assertEqual(article.content, "some_content") def test_deletes_article(self): self._login() @@ -59,11 +66,11 @@ def test_deletes_article(self): self._post("articles_delete", kwargs={"article_id": str(article.pk)}) - self.assertEquals(Article.objects.count(), 0) + self.assertEqual(Article.objects.count(), 0) def test_can_not_delete_article_if_not_logged_in(self): article = self._dummy_article(title="some_title", content="some_content") self._post("articles_delete", kwargs={"article_id": str(article.pk)}) - self.assertEquals(Article.objects.count(), 1) \ No newline at end of file + self.assertEqual(Article.objects.count(), 1) diff --git a/libstat/tests/views/test_dispatches.py b/libstat/tests/views/test_dispatches.py index 6673ea08..bafe3713 100644 --- a/libstat/tests/views/test_dispatches.py +++ b/libstat/tests/views/test_dispatches.py @@ -1,4 +1,3 @@ -# -*- coding: UTF-8 -*- from libstat.models import Dispatch from libstat.tests import MongoTestCase @@ -29,41 +28,57 @@ def test_can_create_dispatches(self): self._dummy_survey(library=self._dummy_library("lib2")) survey3 = self._dummy_survey(library=self._dummy_library("lib3")) - self._post("dispatches", data={ - "title": "some_title", - "message": "some_message", - "description": "some_description", - "survey-response-ids": [survey1.pk, survey3.pk] - }) - - self.assertEquals(Dispatch.objects.count(), 2) - self.assertEquals(Dispatch.objects.get(library_name="lib3").title, "some_title") - self.assertEquals(Dispatch.objects.get(library_name="lib3").message, "some_message") - self.assertEquals(Dispatch.objects.get(library_name="lib3").description, "some_description") + self._post( + "dispatches", + data={ + "title": "some_title", + "message": "some_message", + "description": "some_description", + "survey-response-ids": [survey1.pk, survey3.pk], + }, + ) + + self.assertEqual(Dispatch.objects.count(), 2) + self.assertEqual(Dispatch.objects.get(library_name="lib3").title, "some_title") + self.assertEqual( + Dispatch.objects.get(library_name="lib3").message, "some_message" + ) + self.assertEqual( + Dispatch.objects.get(library_name="lib3").description, "some_description" + ) def test_can_delete_dispatches(self): dispatch1 = self._dummy_dispatch() self._dummy_dispatch() dispatch3 = self._dummy_dispatch() - self._post("dispatches_delete", data={ - "dispatch-ids": [dispatch1.pk, dispatch3.pk] - }) + self._post( + "dispatches_delete", data={"dispatch-ids": [dispatch1.pk, dispatch3.pk]} + ) - self.assertEquals(Dispatch.objects.count(), 1) + self.assertEqual(Dispatch.objects.count(), 1) def test_replaces_key_words_with_survey_fields(self): - survey = self._dummy_survey(password="some_password", library=self._dummy_library(name="some_name", - city="some_city")) - - self._post("dispatches", data={ - "title": "abc {bibliotek} cde {ort} fgh {lösenord}", - "message": "ijk {lösenord} lmn{bibliotek}opq {ort}", - "description": "some_description", - "survey-response-ids": [survey.pk] - }) + survey = self._dummy_survey( + password="some_password", + library=self._dummy_library(name="some_name", city="some_city"), + ) + + self._post( + "dispatches", + data={ + "title": "abc {bibliotek} cde {ort} fgh {lösenord}", + "message": "ijk {lösenord} lmn{bibliotek}opq {ort}", + "description": "some_description", + "survey-response-ids": [survey.pk], + }, + ) dispatch = Dispatch.objects.all()[0] - self.assertEquals(dispatch.title, "abc some_name cde some_city fgh some_password") - self.assertEquals(dispatch.message, "ijk some_password lmnsome_nameopq some_city") \ No newline at end of file + self.assertEqual( + dispatch.title, "abc some_name cde some_city fgh some_password" + ) + self.assertEqual( + dispatch.message, "ijk some_password lmnsome_nameopq some_city" + ) diff --git a/libstat/tests/views/test_reports.py b/libstat/tests/views/test_reports.py index 143974ad..b1aab914 100644 --- a/libstat/tests/views/test_reports.py +++ b/libstat/tests/views/test_reports.py @@ -1,6 +1,6 @@ -# -*- coding: UTF-8 -*- from libstat.tests import MongoTestCase + class ReportSelectionTest(MongoTestCase): def test_can_view_report_selection_if_not_logged_in(self): response = self._get("reports") @@ -14,12 +14,11 @@ def test_can_view_report_selection_if_logged_in(self): self.assertEqual(response.status_code, 200) def test_can_filter_reports_by_sample_year(self): - self._dummy_survey(sample_year=2014, observations=[self._dummy_observation()]).publish() + self._dummy_survey( + sample_year=2014, observations=[self._dummy_observation()] + ).publish() - response = self._get("reports", params={ - "sample_year": "2014", - "submit": "1" - }) + response = self._get("reports", params={"sample_year": "2014", "submit": "1"}) self.assertEqual(response.status_code, 200) - self.assertEqual(len(response.context["surveys"]), 1) \ No newline at end of file + self.assertEqual(len(response.context["surveys"]), 1) diff --git a/libstat/tests/views/test_surveys.py b/libstat/tests/views/test_surveys.py index 5c4668f7..a68f13ef 100644 --- a/libstat/tests/views/test_surveys.py +++ b/libstat/tests/views/test_surveys.py @@ -1,4 +1,3 @@ -# -*- coding: UTF-8 -*- from libstat.services.excel_export import surveys_to_excel_workbook from libstat.tests import MongoTestCase @@ -15,18 +14,21 @@ def test_can_view_survey_if_logged_in(self): response = self._get("survey", kwargs={"survey_id": survey.pk}) - self.assertEquals(response.status_code, 200) + self.assertEqual(response.status_code, 200) self.assertTrue("form" in response.context) def test_can_view_survey_if_not_logged_in_and_have_correct_password(self): survey = self._dummy_survey(password="dummy_password") - response = self._post(action="survey", kwargs={"survey_id": survey.pk}, - data={"password": survey.password}) + response = self._post( + action="survey", + kwargs={"survey_id": survey.pk}, + data={"password": survey.password}, + ) - self.assertEquals(response.status_code, 302) + self.assertEqual(response.status_code, 302) response = self._get(action="survey", kwargs={"survey_id": survey.pk}) - self.assertEquals(response.status_code, 200) + self.assertEqual(response.status_code, 200) self.assertTrue("form" in response.context) def test_can_not_view_survey_if_not_logged_in_and_have_incorrect_password(self): @@ -34,7 +36,7 @@ def test_can_not_view_survey_if_not_logged_in_and_have_incorrect_password(self): response = self._get("survey", kwargs={"survey_id": survey.pk}) - self.assertEquals(response.status_code, 200) + self.assertEqual(response.status_code, 200) self.assertFalse("form" in response.context) def test_can_enter_password_if_not_logged_in(self): @@ -42,48 +44,60 @@ def test_can_enter_password_if_not_logged_in(self): response = self._get("survey", kwargs={"survey_id": survey.pk}) - self.assertContains(response, - u"", - count=1, - status_code=200, - html=True) + self.assertContains( + response, + "", + count=1, + status_code=200, + html=True, + ) def test_user_can_still_view_survey_after_leaving_the_page(self): survey = self._dummy_survey(password="dummy_password") - response = self._post(action="survey", kwargs={"survey_id": survey.pk}, - data={"password": survey.password}) + response = self._post( + action="survey", + kwargs={"survey_id": survey.pk}, + data={"password": survey.password}, + ) - self.assertEquals(response.status_code, 302) + self.assertEqual(response.status_code, 302) response = self._get(action="index") - self.assertEquals(response.status_code, 200) + self.assertEqual(response.status_code, 200) response = self._get(action="survey", kwargs={"survey_id": survey.pk}) - self.assertEquals(response.status_code, 200) + self.assertEqual(response.status_code, 200) self.assertTrue("form" in response.context) def test_should_not_show_navbar_if_not_logged_in(self): survey = self._dummy_survey(password="dummy_password") - response = self._post(action="survey", kwargs={"survey_id": survey.pk}, - data={"password": survey.password}) - self.assertEquals(response.status_code, 302) + response = self._post( + action="survey", + kwargs={"survey_id": survey.pk}, + data={"password": survey.password}, + ) + self.assertEqual(response.status_code, 302) response = self._get(action="survey", kwargs={"survey_id": survey.pk}) - self.assertEquals(response.status_code, 200) + self.assertEqual(response.status_code, 200) self.assertTrue("hide_navbar" in response.context) - self.assertNotContains(response, - u'