Skip to content

Commit

Permalink
Merge pull request #25 from qld-gov-au/QOL-7596-ckan-2.9
Browse files Browse the repository at this point in the history
[QOL-7596] use 'six' instead of assuming features will exist
  • Loading branch information
ThrawnCA authored Feb 3, 2021
2 parents f3b8d50 + 778f008 commit 5bc867a
Show file tree
Hide file tree
Showing 2 changed files with 14 additions and 16 deletions.
10 changes: 5 additions & 5 deletions ckanext/xloader/action.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
# encoding: utf-8

from __future__ import absolute_import
from builtins import str
import logging
import json
import datetime
import six

from dateutil.parser import parse as parse_date

Expand Down Expand Up @@ -91,7 +91,7 @@ def xloader_submit(context, data_dict):
'entity_id': res_id,
'entity_type': 'resource',
'task_type': 'xloader',
'last_updated': str(datetime.datetime.utcnow()),
'last_updated': six.text_type(datetime.datetime.utcnow()),
'state': 'submitting',
'key': 'xloader',
'value': '{}',
Expand All @@ -114,7 +114,7 @@ def xloader_submit(context, data_dict):
re.search(r"'resource_id': u?'([^']+)'",
job.description).groups()[0]
for job in get_queue().get_jobs()
if 'xloader_to_datastore' in str(job) # filter out test_job etc
if 'xloader_to_datastore' in six.text_type(job) # filter out test_job etc
]
updated = datetime.datetime.strptime(
existing_task['last_updated'], '%Y-%m-%dT%H:%M:%S.%f')
Expand Down Expand Up @@ -183,7 +183,7 @@ def xloader_submit(context, data_dict):

task['value'] = value
task['state'] = 'pending'
task['last_updated'] = str(datetime.datetime.utcnow()),
task['last_updated'] = six.text_type(datetime.datetime.utcnow()),
p.toolkit.get_action('task_status_update')(context, task)
model.Session = original_session

Expand Down Expand Up @@ -256,7 +256,7 @@ def xloader_hook(context, data_dict):
})

task['state'] = status
task['last_updated'] = str(datetime.datetime.utcnow())
task['last_updated'] = six.text_type(datetime.datetime.utcnow())
task['error'] = data_dict.get('error')

resubmit = False
Expand Down
20 changes: 9 additions & 11 deletions ckanext/xloader/loader.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
'Load a CSV into postgres'
from __future__ import absolute_import
from builtins import zip
from builtins import str
import os
import os.path
import tempfile
Expand Down Expand Up @@ -90,8 +88,8 @@ def load_csv(csv_filepath, resource_id, mimetype='text/csv', logger=None):
# types = messytables.type_guess(row_set.sample, types=TYPES, strict=True)

headers = [header.strip()[:MAX_COLUMN_LENGTH] for header in headers if header.strip()]
# headers_dicts = [dict(id=field[0], type=TYPE_MAPPING[str(field[1])])
# for field in zip(headers, types)]
# headers_dicts = [dict(id=field[0], type=TYPE_MAPPING[six.text_type(field[1])])
# for field in six.zip(headers, types)]

# TODO worry about csv header name problems
# e.g. duplicate names
Expand Down Expand Up @@ -169,7 +167,7 @@ def load_csv(csv_filepath, resource_id, mimetype='text/csv', logger=None):
else:
raise LoaderError(
'Validation error when creating the database table: {}'
.format(str(e)))
.format(six.text_type(e)))
except Exception as e:
raise LoaderError('Could not create the database table: {}'
.format(e))
Expand Down Expand Up @@ -227,7 +225,7 @@ def load_csv(csv_filepath, resource_id, mimetype='text/csv', logger=None):
# e is a str but with foreign chars e.g.
# 'extra data: "paul,pa\xc3\xbcl"\n'
# but logging and exceptions need a normal (7 bit) str
error_str = str(e)
error_str = six.text_type(e)
logger.warning(error_str)
raise LoaderError('Error during the load into PostgreSQL:'
' {}'.format(error_str))
Expand Down Expand Up @@ -319,7 +317,7 @@ def load_table(table_filepath, resource_id, mimetype='text/csv', logger=None):
'numeric': messytables.DecimalType(),
'timestamp': messytables.DateUtilType(),
}.get(existing_info.get(h, {}).get('type_override'), t)
for t, h in zip(types, headers)]
for t, h in six.zip(types, headers)]

row_set.register_processor(messytables.types_processor(types))

Expand Down Expand Up @@ -347,8 +345,8 @@ def row_iterator():
res_id=resource_id))
delete_datastore_resource(resource_id)

headers_dicts = [dict(id=field[0], type=TYPE_MAPPING[str(field[1])])
for field in zip(headers, types)]
headers_dicts = [dict(id=field[0], type=TYPE_MAPPING[six.text_type(field[1])])
for field in six.zip(headers, types)]

# Maintain data dictionaries from matching column names
if existing_info:
Expand Down Expand Up @@ -412,7 +410,7 @@ def encode_headers(headers):
try:
encoded_headers.append(unidecode(header))
except AttributeError:
encoded_headers.append(unidecode(str(header)))
encoded_headers.append(unidecode(six.text_type(header)))

return encoded_headers

Expand Down Expand Up @@ -446,7 +444,7 @@ def send_resource_to_datastore(resource_id, headers, records):
p.toolkit.get_action('datastore_create')(context, request)
except p.toolkit.ValidationError as e:
raise LoaderError('Validation error writing rows to db: {}'
.format(str(e)))
.format(six.text_type(e)))


def datastore_resource_exists(resource_id):
Expand Down

0 comments on commit 5bc867a

Please sign in to comment.