diff --git a/.travis.yml b/.travis.yml
index 03297afa..5f109b13 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -1,22 +1,44 @@
-language: python
-
-# the new trusty images of Travis cause build errors with psycopg2, see https://github.com/travis-ci/travis-ci/issues/8897
dist: trusty
-group: deprecated-2017Q4
-python:
- - "2.7"
-env:
- # - CKANVERSION=master
- - CKANVERSION=2.8
- - CKANVERSION=2.7
+os: linux
+language: python
+
install:
- bash bin/travis-build.bash
services:
- - postgresql
- redis
-addons:
- postgresql: "9.3"
-script: sh bin/travis-run.sh
-after_success: coveralls
-sudo: required
+ - postgresql
+
+script: bash bin/travis-run.bash
+before_install:
+ - pip install codecov
+after_success:
+ - codecov
+
+jobs:
+ include:
+ - stage: Flake8
+ python: 2.7
+ env: FLAKE8=True
+ install:
+ - pip install flake8==3.5.0
+ - pip install pycodestyle==2.3.0
+ script:
+ - flake8 --version
+ # stop the build if there are Python syntax errors or undefined names
+ - flake8 . --count --select=E901,E999,F821,F822,F823 --show-source --statistics --exclude ckan
+ # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
+ # - flake8 . --count --max-line-length=127 --statistics --exclude ckan --exit-zero
+ - stage: Tests
+ python: "2.7"
+ env: CKANVERSION=master
+ - python: "3.6"
+ env: CKANVERSION=master
+ - python: "2.7"
+ env: CKANVERSION=2.8
+ - python: "2.7"
+ env: CKANVERSION=2.7
+
+cache:
+ directories:
+ - $HOME/.cache/pip
diff --git a/bin/travis-run.sh b/bin/travis-run.bash
similarity index 58%
rename from bin/travis-run.sh
rename to bin/travis-run.bash
index 880eb676..7a4e5aca 100644
--- a/bin/travis-run.sh
+++ b/bin/travis-run.bash
@@ -5,14 +5,7 @@ flake8 --version
# stop the build if there are Python syntax errors or undefined names
flake8 . --count --select=E901,E999,F821,F822,F823 --show-source --statistics --exclude ckan,ckanext-xloader
-nosetests --ckan \
- --nologcapture \
- --with-pylons=subdir/test.ini \
- --with-coverage \
- --cover-package=ckanext.xloader \
- --cover-inclusive \
- --cover-erase \
- --cover-tests
+pytest --ckan-ini subdir/test.ini --cov=ckanext.xloader ckanext/xloader/tests
# strict linting
flake8 . --count --max-complexity=27 --max-line-length=127 --statistics --exclude ckan,ckanext-xloader
diff --git a/ckanext/xloader/action.py b/ckanext/xloader/action.py
index 62cf20a5..f5f6609d 100644
--- a/ckanext/xloader/action.py
+++ b/ckanext/xloader/action.py
@@ -1,20 +1,23 @@
# encoding: utf-8
+from __future__ import absolute_import
+from builtins import str
import logging
import json
import datetime
from dateutil.parser import parse as parse_date
+import ckan.model as model
import ckan.lib.navl.dictization_functions
import ckan.logic as logic
import ckan.plugins as p
from ckan.logic import side_effect_free
import ckanext.xloader.schema
-import interfaces as xloader_interfaces
-import jobs
-import db
+from . import interfaces as xloader_interfaces
+from . import jobs
+from . import db
try:
enqueue_job = p.toolkit.enqueue_job
except AttributeError:
@@ -108,7 +111,7 @@ def xloader_submit(context, data_dict):
if existing_task.get('state') == 'pending':
import re # here because it takes a moment to load
queued_res_ids = [
- re.search(r"'resource_id': u'([^']+)'",
+ re.search(r"'resource_id': u?'([^']+)'",
job.description).groups()[0]
for job in get_queue().get_jobs()
if 'xloader_to_datastore' in str(job) # filter out test_job etc
@@ -143,6 +146,10 @@ def xloader_submit(context, data_dict):
context['ignore_auth'] = True
context['user'] = '' # benign - needed for ckan 2.5
+
+ model = context['model']
+ original_session = model.Session
+ model.Session = model.meta.create_local_session()
p.toolkit.get_action('task_status_update')(context, task)
data = {
@@ -168,6 +175,7 @@ def xloader_submit(context, data_dict):
job = _enqueue(jobs.xloader_data_into_datastore, [data], timeout=timeout)
except Exception:
log.exception('Unable to enqueued xloader res_id=%s', res_id)
+ model.Session = original_session
return False
log.debug('Enqueued xloader job=%s res_id=%s', job.id, res_id)
@@ -177,6 +185,7 @@ def xloader_submit(context, data_dict):
task['state'] = 'pending'
task['last_updated'] = str(datetime.datetime.utcnow()),
p.toolkit.get_action('task_status_update')(context, task)
+ model.Session = original_session
return True
diff --git a/ckanext/xloader/cli.py b/ckanext/xloader/cli.py
index 54fb43a0..b5a347b1 100644
--- a/ckanext/xloader/cli.py
+++ b/ckanext/xloader/cli.py
@@ -1,3 +1,4 @@
+from __future__ import print_function
import sys
import logging
@@ -62,7 +63,7 @@ def __init__(self, name):
def command(self):
if not self.args:
- print self.usage
+ print(self.usage)
sys.exit(1)
if self.args[0] == 'submit':
if len(self.args) < 2:
@@ -115,7 +116,7 @@ def _confirm_or_abort(self):
)
answer = cli.query_yes_no(question, default=None)
if not answer == 'yes':
- print "Aborting..."
+ print("Aborting...")
sys.exit(0)
def _submit_all_existing(self):
@@ -281,7 +282,7 @@ def command(self):
def _migrate_all(self):
session = model.Session
resource_count = session.query(model.Resource).filter_by(state='active').count()
- print "Updating {} resource(s)".format(resource_count)
+ print("Updating {} resource(s)".format(resource_count))
resources_done = 0
for resource in session.query(model.Resource).filter_by(state='active'):
resources_done += 1
@@ -289,15 +290,15 @@ def _migrate_all(self):
prefix='[{}/{}]: '.format(resources_done,
resource_count))
if resources_done % 100 == 0:
- print "[{}/{}] done".format(resources_done, resource_count)
- print "[{}/{}] done".format(resources_done, resource_count)
+ print("[{}/{}] done".format(resources_done, resource_count))
+ print("[{}/{}] done".format(resources_done, resource_count))
def _migrate_resource(self, resource_id, prefix=''):
data_dict = h.datastore_dictionary(resource_id)
def print_status(status):
if self.options.verbose:
- print "{}{}: {}".format(prefix, resource_id, status)
+ print("{}{}: {}".format(prefix, resource_id, status))
if not data_dict:
print_status("not found")
@@ -333,9 +334,9 @@ def print_status(status):
'fields': fields
})
print_status("updated")
- except Exception, e:
+ except Exception as e:
self.error_occured = True
- print "{}: failed, {}".format(resource_id, e)
+ print("{}: failed, {}".format(resource_id, e))
def _handle_command_status(self):
if self.error_occured:
diff --git a/ckanext/xloader/controllers.py b/ckanext/xloader/controllers.py
index e7395c38..a1ab3c3f 100644
--- a/ckanext/xloader/controllers.py
+++ b/ckanext/xloader/controllers.py
@@ -1,50 +1,7 @@
import ckan.plugins as p
-
-_ = p.toolkit._
+import ckanext.xloader.utils as utils
class ResourceDataController(p.toolkit.BaseController):
-
def resource_data(self, id, resource_id):
-
- if p.toolkit.request.method == 'POST':
- try:
- p.toolkit.c.pkg_dict = \
- p.toolkit.get_action('xloader_submit')(
- None, {'resource_id': resource_id}
- )
- except p.toolkit.ValidationError:
- pass
-
- p.toolkit.redirect_to(
- controller='ckanext.xloader.controllers:ResourceDataController',
- action='resource_data',
- id=id,
- resource_id=resource_id
- )
-
- try:
- p.toolkit.c.pkg_dict = p.toolkit.get_action('package_show')(
- None, {'id': id}
- )
- p.toolkit.c.resource = p.toolkit.get_action('resource_show')(
- None, {'id': resource_id}
- )
- except (p.toolkit.ObjectNotFound, p.toolkit.NotAuthorized):
- p.toolkit.abort(404, _('Resource not found'))
-
- try:
- xloader_status = p.toolkit.get_action('xloader_status')(
- None, {'resource_id': resource_id}
- )
- except p.toolkit.ObjectNotFound:
- xloader_status = {}
- except p.toolkit.NotAuthorized:
- p.toolkit.abort(403, _('Not authorized to see this page'))
-
- return p.toolkit.render('xloader/resource_data.html',
- extra_vars={
- 'status': xloader_status,
- 'resource': p.toolkit.c.resource,
- 'pkg_dict': p.toolkit.c.pkg_dict,
- })
+ return utils.resource_data(id, resource_id)
diff --git a/ckanext/xloader/db.py b/ckanext/xloader/db.py
index 8d7742f5..6b033b0b 100644
--- a/ckanext/xloader/db.py
+++ b/ckanext/xloader/db.py
@@ -8,6 +8,7 @@
import datetime
import json
+import six
import sqlalchemy
@@ -108,7 +109,7 @@ def get_job(job_id):
# Avoid SQLAlchemy "Unicode type received non-unicode bind param value"
# warnings.
if job_id:
- job_id = unicode(job_id)
+ job_id = six.text_type(job_id)
result = ENGINE.execute(
JOBS_TABLE.select().where(JOBS_TABLE.c.job_id == job_id)).first()
@@ -118,7 +119,7 @@ def get_job(job_id):
# Turn the result into a dictionary representation of the job.
result_dict = {}
- for field in result.keys():
+ for field in list(result.keys()):
value = getattr(result, field)
if value is None:
result_dict[field] = value
@@ -127,7 +128,7 @@ def get_job(job_id):
elif isinstance(value, datetime.datetime):
result_dict[field] = value.isoformat()
else:
- result_dict[field] = unicode(value)
+ result_dict[field] = six.text_type(value)
result_dict['metadata'] = _get_metadata(job_id)
result_dict['logs'] = _get_logs(job_id)
@@ -178,14 +179,14 @@ def add_pending_job(job_id, job_type, api_key,
# Turn strings into unicode to stop SQLAlchemy
# "Unicode type received non-unicode bind param value" warnings.
if job_id:
- job_id = unicode(job_id)
+ job_id = six.text_type(job_id)
if job_type:
- job_type = unicode(job_type)
+ job_type = six.text_type(job_type)
if result_url:
- result_url = unicode(result_url)
+ result_url = six.text_type(result_url)
if api_key:
- api_key = unicode(api_key)
- data = unicode(data)
+ api_key = six.text_type(api_key)
+ data = six.text_type(data)
if not metadata:
metadata = {}
@@ -205,16 +206,16 @@ def add_pending_job(job_id, job_type, api_key,
# Insert any (key, value) metadata pairs that the job has into the
# metadata table.
inserts = []
- for key, value in metadata.items():
+ for key, value in list(metadata.items()):
type_ = 'string'
- if not isinstance(value, basestring):
+ if not isinstance(value, six.string_types):
value = json.dumps(value)
type_ = 'json'
# Turn strings into unicode to stop SQLAlchemy
# "Unicode type received non-unicode bind param value" warnings.
- key = unicode(key)
- value = unicode(value)
+ key = six.text_type(key)
+ value = six.text_type(value)
inserts.append(
{"job_id": job_id,
@@ -261,12 +262,12 @@ def _validate_error(error):
"""
if error is None:
return None
- elif isinstance(error, basestring):
+ elif isinstance(error, six.string_types):
return {"message": error}
else:
try:
message = error["message"]
- if isinstance(message, basestring):
+ if isinstance(message, six.string_types):
return error
else:
raise InvalidErrorObjectError(
@@ -291,19 +292,19 @@ def _update_job(job_id, job_dict):
# Avoid SQLAlchemy "Unicode type received non-unicode bind param value"
# warnings.
if job_id:
- job_id = unicode(job_id)
+ job_id = six.text_type(job_id)
if "error" in job_dict:
job_dict["error"] = _validate_error(job_dict["error"])
job_dict["error"] = json.dumps(job_dict["error"])
# Avoid SQLAlchemy "Unicode type received non-unicode bind param value"
# warnings.
- job_dict["error"] = unicode(job_dict["error"])
+ job_dict["error"] = six.text_type(job_dict["error"])
# Avoid SQLAlchemy "Unicode type received non-unicode bind param value"
# warnings.
if "data" in job_dict:
- job_dict["data"] = unicode(job_dict["data"])
+ job_dict["data"] = six.text_type(job_dict["data"])
ENGINE.execute(
JOBS_TABLE.update()
@@ -448,7 +449,7 @@ def _get_metadata(job_id):
"""Return any metadata for the given job_id from the metadata table."""
# Avoid SQLAlchemy "Unicode type received non-unicode bind param value"
# warnings.
- job_id = unicode(job_id)
+ job_id = six.text_type(job_id)
results = ENGINE.execute(
METADATA_TABLE.select().where(
@@ -466,7 +467,7 @@ def _get_logs(job_id):
"""Return any logs for the given job_id from the logs table."""
# Avoid SQLAlchemy "Unicode type received non-unicode bind param value"
# warnings.
- job_id = unicode(job_id)
+ job_id = six.text_type(job_id)
results = ENGINE.execute(
LOGS_TABLE.select().where(LOGS_TABLE.c.job_id == job_id)).fetchall()
diff --git a/ckanext/xloader/jobs.py b/ckanext/xloader/jobs.py
index e380d361..e7d30ac1 100644
--- a/ckanext/xloader/jobs.py
+++ b/ckanext/xloader/jobs.py
@@ -1,28 +1,32 @@
+from __future__ import division
+from __future__ import absolute_import
import math
import logging
import hashlib
import time
import tempfile
import json
-import urlparse
import datetime
import traceback
import sys
+import six
+from six.moves.urllib.parse import urlsplit
import requests
from rq import get_current_job
import sqlalchemy as sa
-from ckan.plugins.toolkit import get_action, asbool, ObjectNotFound
+import ckan.model as model
+from ckan.plugins.toolkit import get_action, asbool, ObjectNotFound, c
try:
from ckan.plugins.toolkit import config
except ImportError:
from pylons import config
import ckan.lib.search as search
-import loader
-import db
-from job_exceptions import JobError, HTTPError, DataTooBigError, FileCouldNotBeLoadedError
+from . import loader
+from . import db
+from .job_exceptions import JobError, HTTPError, DataTooBigError, FileCouldNotBeLoadedError
if config.get('ckanext.xloader.ssl_verify') in ['False', 'FALSE', '0', False, 0]:
SSL_VERIFY = False
@@ -84,7 +88,7 @@ def xloader_data_into_datastore(input):
errored = True
except Exception as e:
db.mark_job_as_errored(
- job_id, traceback.format_tb(sys.exc_traceback)[-1] + repr(e))
+ job_id, traceback.format_tb(sys.exc_info()[2])[-1] + repr(e))
job_dict['status'] = 'error'
job_dict['error'] = str(e)
log = logging.getLogger(__name__)
@@ -135,13 +139,12 @@ def xloader_data_into_datastore_(input, job_dict):
ckan_url = data['ckan_url']
resource_id = data['resource_id']
api_key = input.get('api_key')
-
try:
- resource, dataset = get_resource_and_dataset(resource_id)
+ resource, dataset = get_resource_and_dataset(resource_id, api_key)
except (JobError, ObjectNotFound) as e:
# try again in 5 seconds just in case CKAN is slow at adding resource
time.sleep(5)
- resource, dataset = get_resource_and_dataset(resource_id)
+ resource, dataset = get_resource_and_dataset(resource_id, api_key)
resource_ckan_url = '/dataset/{}/resource/{}' \
.format(dataset['name'], resource['id'])
logger.info('Express Load starting: {}'.format(resource_ckan_url))
@@ -244,7 +247,7 @@ def _download_resource_data(resource, data, api_key, logger):
'''
# check scheme
url = resource.get('url')
- scheme = urlparse.urlsplit(url).scheme
+ scheme = urlsplit(url).scheme
if scheme not in ('http', 'https', 'ftp'):
raise JobError(
'Only http, https, and ftp resources may be fetched.'
@@ -482,12 +485,18 @@ def update_resource(resource, patch_only=False):
get_action(action)(context, resource)
-def get_resource_and_dataset(resource_id):
+def get_resource_and_dataset(resource_id, api_key):
"""
Gets available information about the resource and its dataset from CKAN
"""
- res_dict = get_action('resource_show')(None, {'id': resource_id})
- pkg_dict = get_action('package_show')(None, {'id': res_dict['package_id']})
+ user = model.Session.query(model.User).filter_by(
+ apikey=api_key).first()
+ if user is not None:
+ context = {'user': user.name}
+ else:
+ context = None
+ res_dict = get_action('resource_show')(context, {'id': resource_id})
+ pkg_dict = get_action('package_show')(context, {'id': res_dict['package_id']})
return res_dict, pkg_dict
@@ -495,7 +504,7 @@ def get_url(action, ckan_url):
"""
Get url for ckan action
"""
- if not urlparse.urlsplit(ckan_url).scheme:
+ if not urlsplit(ckan_url).scheme:
ckan_url = 'http://' + ckan_url.lstrip('/')
ckan_url = ckan_url.rstrip('/')
return '{ckan_url}/api/3/action/{action}'.format(
@@ -552,10 +561,10 @@ def emit(self, record):
try:
# Turn strings into unicode to stop SQLAlchemy
# "Unicode type received non-unicode bind param value" warnings.
- message = unicode(record.getMessage())
- level = unicode(record.levelname)
- module = unicode(record.module)
- funcName = unicode(record.funcName)
+ message = six.text_type(record.getMessage())
+ level = six.text_type(record.levelname)
+ module = six.text_type(record.module)
+ funcName = six.text_type(record.funcName)
conn.execute(db.LOGS_TABLE.insert().values(
job_id=self.task_id,
@@ -584,5 +593,5 @@ def printable_file_size(size_bytes):
size_name = ('bytes', 'KB', 'MB', 'GB', 'TB')
i = int(math.floor(math.log(size_bytes, 1024)))
p = math.pow(1024, i)
- s = round(size_bytes / p, 1)
+ s = round(float(size_bytes) / p, 1)
return "%s %s" % (s, size_name[i])
diff --git a/ckanext/xloader/loader.py b/ckanext/xloader/loader.py
index 8cbd304e..e70730a6 100644
--- a/ckanext/xloader/loader.py
+++ b/ckanext/xloader/loader.py
@@ -1,16 +1,20 @@
'Load a CSV into postgres'
+from __future__ import absolute_import
+from builtins import zip
+from builtins import str
import os
import os.path
import tempfile
import itertools
import csv
+import six
import psycopg2
import messytables
from unidecode import unidecode
import ckan.plugins as p
-from job_exceptions import LoaderError, FileCouldNotBeLoadedError
+from .job_exceptions import LoaderError, FileCouldNotBeLoadedError
import ckan.plugins.toolkit as tk
try:
from ckan.plugins.toolkit import config
@@ -61,17 +65,19 @@ def load_csv(csv_filepath, resource_id, mimetype='text/csv', logger=None):
if not table_set.tables:
raise LoaderError('Could not detect tabular data in this file')
row_set = table_set.tables.pop()
- header_offset, headers = messytables.headers_guess(row_set.sample)
-
+ try:
+ header_offset, headers = messytables.headers_guess(row_set.sample)
+ except messytables.ReadError as e:
+ raise LoaderError('Messytables error: {}'.format(e))
# Some headers might have been converted from strings to floats and such.
headers = encode_headers(headers)
# Guess the delimiter used in the file
- with open(csv_filepath, 'r') as f:
+ with open(csv_filepath, 'rb') as f:
header_line = f.readline()
try:
sniffer = csv.Sniffer()
- delimiter = sniffer.sniff(header_line).delimiter
+ delimiter = sniffer.sniff(six.ensure_text(header_line)).delimiter
except csv.Error:
logger.warning('Could not determine delimiter from file, use default ","')
delimiter = ','
@@ -351,7 +357,7 @@ def row_iterator():
h['info'] = existing_info[h['id']]
# create columns with types user requested
type_override = existing_info[h['id']].get('type_override')
- if type_override in _TYPE_MAPPING.values():
+ if type_override in list(_TYPE_MAPPING.values()):
h['type'] = type_override
logger.info('Determined headers and types: {headers}'.format(
diff --git a/ckanext/xloader/plugin.py b/ckanext/xloader/plugin.py
index a595eba0..7efa8a17 100644
--- a/ckanext/xloader/plugin.py
+++ b/ckanext/xloader/plugin.py
@@ -43,10 +43,25 @@ class xloaderPlugin(plugins.SingletonPlugin):
plugins.implements(plugins.IResourceUrlChange)
plugins.implements(plugins.IActions)
plugins.implements(plugins.IAuthFunctions)
- plugins.implements(plugins.IRoutes, inherit=True)
plugins.implements(plugins.ITemplateHelpers)
plugins.implements(plugins.IResourceController, inherit=True)
+ if toolkit.check_ckan_version('2.9'):
+ plugins.implements(plugins.IBlueprint)
+ # IBlueprint
+ def get_blueprint(self):
+ from ckanext.xloader.views import get_blueprints
+ return get_blueprints()
+ else:
+ plugins.implements(plugins.IRoutes, inherit=True)
+ # IRoutes
+ def before_map(self, m):
+ m.connect(
+ 'xloader.resource_data', '/dataset/{id}/resource_data/{resource_id}',
+ controller='ckanext.xloader.controllers:ResourceDataController',
+ action='resource_data', ckan_icon='cloud-upload')
+ return m
+
# IResourceController
def before_show(self, resource_dict):
@@ -112,13 +127,13 @@ def notify(self, entity, operation=None):
'would be circular.'.format(r=entity))
return
- # try:
- # task = p.toolkit.get_action('task_status_show')(
- # context, {
- # 'entity_id': entity.id,
- # 'task_type': 'datapusher',
- # 'key': 'datapusher'}
- # )
+ try:
+ task = p.toolkit.get_action('task_status_show')(
+ context, {
+ 'entity_id': entity.id,
+ 'task_type': 'xloader',
+ 'key': 'xloader'}
+ )
# if task.get('state') == 'pending':
# # There already is a pending DataPusher submission,
# # skip this one ...
@@ -126,8 +141,8 @@ def notify(self, entity, operation=None):
# 'Skipping DataPusher submission for '
# 'resource {0}'.format(entity.id))
# return
- # except p.toolkit.ObjectNotFound:
- # pass
+ except p.toolkit.ObjectNotFound:
+ pass
try:
log.debug('Submitting resource {0} to be xloadered'
@@ -159,15 +174,6 @@ def get_auth_functions(self):
'xloader_status': auth.xloader_status,
}
- # IRoutes
-
- def before_map(self, m):
- m.connect(
- 'resource_data_xloader', '/dataset/{id}/resource_data/{resource_id}',
- controller='ckanext.xloader.controllers:ResourceDataController',
- action='resource_data', ckan_icon='cloud-upload')
- return m
-
# ITemplateHelpers
def get_helpers(self):
diff --git a/ckanext/xloader/schema.py b/ckanext/xloader/schema.py
index ad1029b1..773fae5a 100644
--- a/ckanext/xloader/schema.py
+++ b/ckanext/xloader/schema.py
@@ -16,7 +16,7 @@
def xloader_submit_schema():
schema = {
- 'resource_id': [not_missing, not_empty, unicode],
+ 'resource_id': [not_missing, not_empty, str],
'id': [ignore_missing],
'set_url_type': [ignore_missing, boolean_validator],
'ignore_hash': [ignore_missing, boolean_validator],
diff --git a/ckanext/xloader/templates-bs2/package/resource_edit_base.html b/ckanext/xloader/templates-bs2/package/resource_edit_base.html
index 73b6f776..34403521 100644
--- a/ckanext/xloader/templates-bs2/package/resource_edit_base.html
+++ b/ckanext/xloader/templates-bs2/package/resource_edit_base.html
@@ -1,6 +1,6 @@
{% ckan_extends %}
{% block inner_primary_nav %}
- {{ super() }}
- {{ h.build_nav_icon('resource_data_xloader', _('DataStore'), id=pkg.name, resource_id=res.id) }}
+ {{ super() }}
+ {{ h.build_nav_icon('xloader.resource_data', _('DataStore'), id=pkg.name, resource_id=res.id) }}
{% endblock %}
diff --git a/ckanext/xloader/templates-bs2/xloader/resource_data.html b/ckanext/xloader/templates-bs2/xloader/resource_data.html
index 698ea85e..ace37859 100644
--- a/ckanext/xloader/templates-bs2/xloader/resource_data.html
+++ b/ckanext/xloader/templates-bs2/xloader/resource_data.html
@@ -25,7 +25,7 @@
{{ _('Error:') }} {{ status.task_info.error }}
{% elif status.task_info.error is mapping %}
{{ _('Error:') }} {{ status.task_info.error.message }}
- {% for error_key, error_value in status.task_info.error.iteritems() %}
+ {% for error_key, error_value in status.task_info.error.items() %}
{% if error_key != "message" and error_value %}
{{ error_key }}:
@@ -73,7 +73,7 @@