diff --git a/.circleci/config.yml b/.circleci/config.yml index 46c3312f1..4ff7f4156 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -97,6 +97,9 @@ jobs: - run: name: Setup Python environment command: | + echo "Installing ibcurl4-gnutls-dev because it is required for pycurl" + sudo apt-get update + sudo apt-get install -y libcurl4-gnutls-dev pip install virtualenv virtualenv pip-compile-env - restore_cache: @@ -137,6 +140,9 @@ jobs: - run: name: Setup Python environment command: | + echo "Installing ibcurl4-gnutls-dev because it is required for pycurl" + sudo apt-get update + sudo apt-get install -y libcurl4-gnutls-dev sudo apt-get update && sudo apt-get install -y libpython2.7 firefox pip install virtualenv virtualenv env diff --git a/Dockerfile b/Dockerfile index 77f889170..89f4b397a 100644 --- a/Dockerfile +++ b/Dockerfile @@ -15,7 +15,7 @@ RUN adduser -D app && \ # To install pip dependencies RUN apk add --no-cache \ build-base \ - curl \ + curl curl-dev \ git \ libxml2-dev \ libxslt-dev \ diff --git a/cla_backend/apps/cla_eventlog/management/commands/remove_same_day_consecutive_outcome_codes.py b/cla_backend/apps/cla_eventlog/management/commands/remove_same_day_consecutive_outcome_codes.py index 12b367612..b47e80b9d 100644 --- a/cla_backend/apps/cla_eventlog/management/commands/remove_same_day_consecutive_outcome_codes.py +++ b/cla_backend/apps/cla_eventlog/management/commands/remove_same_day_consecutive_outcome_codes.py @@ -3,7 +3,6 @@ from datetime import datetime, time from itertools import groupby -import boto from django.conf import settings from django.core import serializers from django.core.management.base import BaseCommand @@ -12,7 +11,7 @@ from cla_eventlog.constants import LOG_LEVELS, LOG_TYPES from cla_eventlog.models import Log -from reports.utils import get_s3_connection +from cla_backend.libs.aws.s3 import ReportsS3, ClientError logger = logging.getLogger(__name__) @@ -75,7 +74,7 @@ def remove_same_day_consecutive_outcome_codes(self): dupes_to_remove = Log.objects.filter(id__in=same_day_consecutive_outcome_log_ids) try: self.write_queryset_to_s3(dupes_to_remove) - except boto.exception.S3ResponseError as e: + except ClientError as e: logger.error( "LGA-125: Could not get bucket {}: {}".format(settings.AWS_DELETED_OBJECTS_BUCKET_NAME, e) ) @@ -86,20 +85,7 @@ def remove_same_day_consecutive_outcome_codes(self): logger.info("LGA-125: No dupe logs to remove") def write_queryset_to_s3(self, queryset): - bucket = self.get_or_create_s3_bucket() - key = bucket.new_key("deleted-log-objects-{}".format(now().isoformat())) - serialized_queryset = serializers.serialize("json", queryset) - key.set_contents_from_string(serialized_queryset) - # Restore with: - # for restored_log_object in serializers.deserialize('json', serialized_queryset): - # restored_log_object.save() - - @staticmethod - def get_or_create_s3_bucket(): - conn = get_s3_connection() bucket_name = settings.AWS_DELETED_OBJECTS_BUCKET_NAME - try: - return conn.get_bucket(bucket_name) - except boto.exception.S3ResponseError: - conn.create_bucket(bucket_name, location=settings.AWS_S3_REGION_NAME) - return conn.get_bucket(bucket_name) + key = "deleted-log-objects-{}".format(now().isoformat()) + serialized_queryset = serializers.serialize("json", queryset) + ReportsS3.save_data_to_bucket(bucket_name, key, serialized_queryset) diff --git a/cla_backend/apps/reports/models.py b/cla_backend/apps/reports/models.py index 37b87e6bf..d4052e8ae 100644 --- a/cla_backend/apps/reports/models.py +++ b/cla_backend/apps/reports/models.py @@ -5,7 +5,7 @@ from model_utils.models import TimeStampedModel from reports.constants import EXPORT_STATUS -from reports.utils import get_s3_connection +from cla_backend.libs.aws.s3 import ReportsS3 class Export(TimeStampedModel): @@ -24,12 +24,9 @@ def link(self): def delete_export_file(sender, instance=None, **kwargs): # check if there is a connection to aws, otherwise delete locally if settings.AWS_REPORTS_STORAGE_BUCKET_NAME: - conn = get_s3_connection() - bucket = conn.lookup(settings.AWS_REPORTS_STORAGE_BUCKET_NAME) - try: - k = bucket.get_key(settings.EXPORT_DIR + os.path.basename(instance.path)) - bucket.delete_key(k) + key = settings.EXPORT_DIR + os.path.basename(instance.path) + ReportsS3.delete_file(settings.AWS_REPORTS_STORAGE_BUCKET_NAME, key) except (ValueError, AttributeError): pass else: diff --git a/cla_backend/apps/reports/tasks.py b/cla_backend/apps/reports/tasks.py index 8f384a9fb..6f3f7fb69 100644 --- a/cla_backend/apps/reports/tasks.py +++ b/cla_backend/apps/reports/tasks.py @@ -18,12 +18,14 @@ from dateutil.relativedelta import relativedelta from django.conf import settings -from .utils import OBIEEExporter, get_s3_connection +from .utils import OBIEEExporter +from cla_backend.libs.aws.s3 import ReportsS3 from .models import Export from .constants import EXPORT_STATUS from core.utils import remember_cwd from checker.models import ReasonForContacting from urlparse import urlparse +from cla_backend.celery import app logger = logging.getLogger(__name__) @@ -79,20 +81,14 @@ def on_failure(self, exc, task_id, args, kwargs, einfo): self.export.save() def send_to_s3(self): - conn = get_s3_connection() - try: - bucket = conn.get_bucket(settings.AWS_REPORTS_STORAGE_BUCKET_NAME) - except Exception as e: - logger.error( - "Reports bucket couldn't be fetched. Ensure s3 credentials set. You may need the S3_USE_SIGV4 env var" - ) - raise e - k = bucket.new_key(settings.EXPORT_DIR + os.path.basename(self.filepath)) - k.set_contents_from_filename(self.filepath) + key = settings.EXPORT_DIR + os.path.basename(self.filepath) + ReportsS3.save_file(settings.AWS_REPORTS_STORAGE_BUCKET_NAME, key, self.filepath) shutil.rmtree(self.filepath, ignore_errors=True) class ExportTask(ExportTaskBase): + name = "exporttask" + def run(self, user_id, filename, form_class_name, post_data, *args, **kwargs): self.user = User.objects.get(pk=user_id) self._create_export() @@ -118,6 +114,8 @@ def run(self, user_id, filename, form_class_name, post_data, *args, **kwargs): class OBIEEExportTask(ExportTaskBase): + name = "obieeexporttask" + def run(self, user_id, filename, form_class_name, post_data, *args, **kwargs): """ Export a full dump of the db for OBIEE export and make it available @@ -158,6 +156,8 @@ def run(self, user_id, filename, form_class_name, post_data, *args, **kwargs): class ReasonForContactingExportTask(ExportTaskBase): + name = "reasonforcontactingexport" + def run(self, user_id, filename, form_class_name, post_data, *args, **kwargs): """ Export csv files for each of the referrers from reason for contacting @@ -229,3 +229,11 @@ def generate_rfc_zip(self): with ZipFile(self.filepath, "w") as refer_zip: for csv_file in glob.glob("*.csv"): refer_zip.write(csv_file) + + +# The Task base class no longer automatically register tasks +# https://docs.celeryq.dev/en/v4.0.0/whatsnew-4.0.html#the-task-base-class-no-longer-automatically-register-tasks +# https://github.com/celery/celery/issues/5992 +tasks = [ExportTask, OBIEEExportTask(), ReasonForContactingExportTask()] +for task in tasks: + app.tasks.register(task) diff --git a/cla_backend/apps/reports/tests/test_models.py b/cla_backend/apps/reports/tests/test_models.py index 7c340a3f3..f47c9bd9e 100644 --- a/cla_backend/apps/reports/tests/test_models.py +++ b/cla_backend/apps/reports/tests/test_models.py @@ -5,7 +5,7 @@ class DeleteExportFile(TestCase): - @patch("reports.models.get_s3_connection") + @patch("cla_backend.libs.aws.s3.ReportsS3.get_s3_connection") def test_delete_export_file_no_aws(self, mock_s3): with patch("os.remove") as mock_remove: settings.AWS_REPORTS_STORAGE_BUCKET_NAME = "" @@ -16,11 +16,11 @@ def test_delete_export_file_no_aws(self, mock_s3): assert mock_remove.called assert not mock_s3.called - @patch("reports.models.get_s3_connection", return_value=MagicMock()) + @patch("cla_backend.libs.aws.s3.ReportsS3.get_s3_connection", return_value=MagicMock()) def test_delete_export_file_with_aws(self, mock_s3): settings.AWS_REPORTS_STORAGE_BUCKET_NAME = "AWS_TEST" sender = MagicMock() - instance = None - # delete_export_file(sender, instance=None, **kwargs) + instance = MagicMock() + instance.path = "/tmp/test.txt" delete_export_file(sender, instance) assert mock_s3.called diff --git a/cla_backend/apps/reports/tests/test_utils.py b/cla_backend/apps/reports/tests/test_utils.py deleted file mode 100644 index 3effc13f1..000000000 --- a/cla_backend/apps/reports/tests/test_utils.py +++ /dev/null @@ -1,19 +0,0 @@ -import mock -import os - -from boto.s3.connection import S3Connection -from django.test import TestCase, override_settings -from reports.utils import get_s3_connection - - -class UtilsTestCase(TestCase): - @override_settings( - AWS_ACCESS_KEY_ID="000000000001", - AWS_SECRET_ACCESS_KEY="000000000002", - AWS_S3_HOST="s3.eu-west-2.amazonaws.com", - ) - def test_get_s3_connection(self): - envs = {"S3_USE_SIGV4": "True"} - with mock.patch.dict(os.environ, envs): - conn = get_s3_connection() - self.assertIsInstance(conn, S3Connection) diff --git a/cla_backend/apps/reports/tests/test_views.py b/cla_backend/apps/reports/tests/test_views.py index a95262524..0e5d1bca7 100644 --- a/cla_backend/apps/reports/tests/test_views.py +++ b/cla_backend/apps/reports/tests/test_views.py @@ -6,7 +6,7 @@ class DownloadFileTestCase(TestCase): - @patch("reports.views.get_s3_connection") + @patch("cla_backend.libs.aws.s3.ReportsS3.get_s3_connection") def test_download_no_aws(self, mock_s3): # mock pythons open() with patch("__builtin__.open", mock_open(read_data="data")) as mock_file: @@ -24,7 +24,7 @@ def test_download_no_aws(self, mock_s3): # built in Open method is called in views.py mock_file.assert_called_with(file_path, "r") - @patch("reports.views.get_s3_connection", return_value=MagicMock()) + @patch("cla_backend.libs.aws.s3.ReportsS3.get_s3_connection", return_value=MagicMock()) def test_download_with_aws(self, mock_s3): mock_request = MagicMock() # if file_name contains string "schedule" diff --git a/cla_backend/apps/reports/utils.py b/cla_backend/apps/reports/utils.py index b4b9009bc..b3d14df92 100644 --- a/cla_backend/apps/reports/utils.py +++ b/cla_backend/apps/reports/utils.py @@ -4,7 +4,6 @@ import tempfile from datetime import date, datetime, time, timedelta -import boto import pyminizip from django.core.exceptions import ImproperlyConfigured as DjangoImproperlyConfigured from django.conf import settings @@ -159,7 +158,3 @@ def close(self): os.remove(self.full_path) if os.path.exists(self.tmp_export_path): shutil.rmtree(self.tmp_export_path) - - -def get_s3_connection(): - return boto.connect_s3(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY, host=settings.AWS_S3_HOST) diff --git a/cla_backend/apps/reports/views.py b/cla_backend/apps/reports/views.py index 97d87322b..51c41cefb 100644 --- a/cla_backend/apps/reports/views.py +++ b/cla_backend/apps/reports/views.py @@ -34,7 +34,7 @@ from reports.models import Export from .tasks import ExportTask, OBIEEExportTask, ReasonForContactingExportTask -from reports.utils import get_s3_connection +from cla_backend.libs.aws.s3 import ReportsS3 def report_view(request, form_class, title, template="case_report", success_task=ExportTask, file_name=None): @@ -51,13 +51,15 @@ def report_view(request, form_class, title, template="case_report", success_task success_task().delay(request.user.pk, filename, form_class.__name__, json.dumps(request.POST)) messages.info(request, u"Your export is being processed. It will show up in the downloads tab shortly.") - return render(request, tmpl, {'has_permission': admin_site_instance.has_permission(request), "title": title, "form": form}) + return render( + request, tmpl, {"has_permission": admin_site_instance.has_permission(request), "title": title, "form": form} + ) def scheduled_report_view(request, title): tmpl = "admin/reports/case_report.html" admin_site_instance = AdminSite() - return render(request, tmpl, {"title": title, 'has_permission': admin_site_instance.has_permission(request)}) + return render(request, tmpl, {"title": title, "has_permission": admin_site_instance.has_permission(request)}) def valid_submit(request, form): @@ -201,18 +203,16 @@ def reasons_for_contacting(request): def download_file(request, file_name="", *args, **kwargs): # check if there is a connection to aws, otherwise download from local TEMP_DIR if settings.AWS_REPORTS_STORAGE_BUCKET_NAME: - conn = get_s3_connection() - bucket = conn.lookup(settings.AWS_REPORTS_STORAGE_BUCKET_NAME) - k = bucket.get_key(settings.EXPORT_DIR + file_name) + bucket_name = settings.AWS_REPORTS_STORAGE_BUCKET_NAME + key = settings.EXPORT_DIR + file_name + obj = ReportsS3.download_file(bucket_name, key) - if k is None: + if obj is None: raise Http404("Export does not exist") - k.open_read() - headers = dict(k.resp.getheaders()) - response = HttpResponse(k) + response = HttpResponse(obj["body"]) - for key, val in headers.items(): + for key, val in obj["headers"].items(): response[key] = val else: # only do this locally if debugging diff --git a/cla_backend/libs/aws/s3.py b/cla_backend/libs/aws/s3.py index 362c4a0eb..721e4d3e5 100644 --- a/cla_backend/libs/aws/s3.py +++ b/cla_backend/libs/aws/s3.py @@ -1,5 +1,46 @@ +from tempfile import NamedTemporaryFile from storages.backends.s3boto3 import S3Boto3Storage +from botocore.exceptions import ClientError + + +class ClientError(ClientError): + pass class StaticS3Storage(S3Boto3Storage): default_acl = "public-read" + + +class ReportsS3: + @classmethod + def clean_name(cls, name): + return name.strip("/") + + @classmethod + def get_s3_connection(cls, bucket_name): + return S3Boto3Storage(bucket=bucket_name) + + @classmethod + def download_file(cls, bucket_name, key): + try: + obj = cls.get_s3_connection(bucket_name).bucket.Object(cls.clean_name(key)) + data = NamedTemporaryFile() + obj.download_fileobj(data) + # This required otherwise any file reads will start at the end which + # leads to an empty file being downloaded (zero bytes) + data.seek(0) + return {"headers": {"Content-Type": obj.content_type}, "body": data} + except ClientError: + return None + + @classmethod + def save_file(cls, bucket_name, key, path): + cls.get_s3_connection(bucket_name).bucket.Object(cls.clean_name(key)).upload_file(path) + + @classmethod + def delete_file(cls, bucket_name, key): + cls.get_s3_connection(bucket_name).delete(cls.clean_name(key)) + + @classmethod + def save_data_to_bucket(cls, bucket_name, key, content): + cls.get_s3_connection(bucket_name).bucket.Object(key).put(Body=content) diff --git a/cla_backend/settings/base.py b/cla_backend/settings/base.py index 384e47809..b3a37c8a0 100644 --- a/cla_backend/settings/base.py +++ b/cla_backend/settings/base.py @@ -3,13 +3,11 @@ import os import sentry_sdk -from boto.s3.connection import NoHostProvided from cla_common.call_centre_availability import OpeningHours from cla_common.services import CacheAdapter from collections import defaultdict -from kombu import transport from sentry_sdk.integrations.django import DjangoIntegration -from django.conf.global_settings import TEMPLATE_CONTEXT_PROCESSORS +from kombu import transport from cla_backend.sqs import CLASQSChannel @@ -113,15 +111,9 @@ def env_var_truthy_intention(name): STATICFILES_STORAGE = "cla_backend.libs.aws.s3.StaticS3Storage" AWS_S3_REGION_NAME = os.environ.get("AWS_S3_REGION_NAME", "eu-west-1") -AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID") -AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY") AWS_DEFAULT_ACL = None AWS_QUERYSTRING_AUTH = False -# Annoyingly the host parameter boto.s3.connection.S3Connection needs to be host string if it's not the default -# value of boto.s3.connection.NoHostProvided class reference and not None -AWS_S3_HOST = os.environ.get("AWS_S3_HOST", NoHostProvided) - # This bucket needs to a private bucket as it will contain sensitive reports AWS_REPORTS_STORAGE_BUCKET_NAME = os.environ.get("AWS_REPORTS_STORAGE_BUCKET_NAME") # This bucket needs to a public bucket as it will serve public assets such as css,images and js @@ -402,24 +394,11 @@ def traces_sampler(sampling_context): OBIEE_EMAIL_TO = os.environ.get("OBIEE_EMAIL_TO", DEFAULT_EMAIL_TO) OBIEE_ZIP_PASSWORD = os.environ.get("OBIEE_ZIP_PASSWORD") -# celery -if all([env_var_truthy_intention("SQS_ACCESS_KEY"), env_var_truthy_intention("SQS_SECRET_KEY")]): - import urllib - - BROKER_URL = "sqs://{access_key}:{secret_key}@".format( - access_key=urllib.quote(os.environ.get("SQS_ACCESS_KEY"), safe=""), - secret_key=urllib.quote(os.environ.get("SQS_SECRET_KEY"), safe=""), - ) -else: - # if no BROKER_URL specified then don't try to use celery - # because it'll just cause errors - CELERY_ALWAYS_EAGER = True - CLA_ENV = os.environ.get("CLA_ENV", "local") BROKER_TRANSPORT_OPTIONS = { "polling_interval": 10, - "region": os.environ.get("SQS_REGION", "eu-west-1"), + "region": os.environ.get("SQS_REGION", "eu-west-2"), "wait_time_seconds": 20, } @@ -428,11 +407,14 @@ def traces_sampler(sampling_context): # This is to stop actions such as ListQueues being triggered # which we do not have on the cloud platform environments transport.SQS.Transport.Channel = CLASQSChannel - + BROKER_URL = "sqs://" predefined_queue_url = os.environ.get("CELERY_PREDEFINED_QUEUE_URL") CELERY_DEFAULT_QUEUE = predefined_queue_url.split("/")[-1] BROKER_TRANSPORT_OPTIONS["predefined_queue_url"] = predefined_queue_url else: + # if no BROKER_URL specified then don't try to use celery + # because it'll just cause errors + CELERY_ALWAYS_EAGER = True BROKER_TRANSPORT_OPTIONS["queue_name_prefix"] = "env-%(env)s-" % {"env": CLA_ENV} CELERY_ACCEPT_CONTENT = ["yaml"] # because json serializer doesn't support dates @@ -447,6 +429,7 @@ def traces_sampler(sampling_context): CELERY_TIMEZONE = "UTC" # apps with celery tasks CELERY_IMPORTS = ["reports.tasks", "notifications.tasks"] +CELERY_TASK_PROTOCOL = 1 CONTRACT_2018_ENABLED = os.environ.get("CONTRACT_2018_ENABLED", "True") == "True" PING_JSON_KEYS["CONTRACT_2018_ENABLED_key"] = "CONTRACT_2018_ENABLED" diff --git a/cla_backend/sqs.py b/cla_backend/sqs.py index 0970f81e4..6bf67ce73 100644 --- a/cla_backend/sqs.py +++ b/cla_backend/sqs.py @@ -1,39 +1,11 @@ -import collections - -from boto.sqs.queue import Queue -from boto.sqs.message import Message - from kombu.transport.SQS import Channel -from kombu.transport import virtual -from kombu.utils import cached_property class CLASQSChannel(Channel): - @cached_property - def predefined_queues(self): - # We are using a strict sqs setup which we are given only list of predefined queues and + def _update_queue_cache(self, queue_name_prefix): url = self.transport_options.get("predefined_queue_url", None) - q = Queue(connection=self.sqs, url=url, message_class=Message) - return [q] - - def __init__(self, *args, **kwargs): - # CLA Change - On cloud platforms we don't have permissions to perform actions such as ListQueues - # So instead lets use a list of predefined queue names - # Remove call to direct parent as that will perform the ListQueues action - # super(CLASQSChannel, self).__init__(*args, **kwargs) - virtual.Channel.__init__(self, *args, **kwargs) - - queues = self.predefined_queues # self.sqs.get_all_queues(prefix=self.queue_name_prefix) - - for queue in queues: - self._queue_cache[queue.name] = queue - self._fanout_queues = set() - - # The drain_events() method stores extra messages in a local - # Deque object. This allows multiple messages to be requested from - # SQS at once for performance, but maintains the same external API - # to the caller of the drain_events() method. - self._queue_message_cache = collections.deque() + queue_name = url.split("/")[-1] + self._queue_cache[queue_name] = url def _new_queue(self, queue, **kwargs): # Translate to SQS name for consistency with initial diff --git a/helm_deploy/cla-backend/templates/_helpers.tpl b/helm_deploy/cla-backend/templates/_helpers.tpl index 4ef2fbcaa..263d79ece 100644 --- a/helm_deploy/cla-backend/templates/_helpers.tpl +++ b/helm_deploy/cla-backend/templates/_helpers.tpl @@ -68,7 +68,7 @@ Create the name of the service account to use {{- if .Values.serviceAccount.create -}} {{ default (include "cla-backend.fullname" .) .Values.serviceAccount.name }} {{- else -}} - {{ default "default" .Values.serviceAccount.name }} + {{.Release.Namespace }}-{{.Values.serviceAccount.name }} {{- end -}} {{- end -}} diff --git a/helm_deploy/cla-backend/templates/collect-static-job.yml b/helm_deploy/cla-backend/templates/collect-static-job.yml index 5ba7a229c..a1f4e1f6b 100644 --- a/helm_deploy/cla-backend/templates/collect-static-job.yml +++ b/helm_deploy/cla-backend/templates/collect-static-job.yml @@ -20,6 +20,7 @@ spec: helm.sh/chart: "{{ .Chart.Name }}-{{ .Chart.Version }}" spec: restartPolicy: Never + serviceAccountName: {{ include "cla-backend.serviceAccountName" . }} containers: - name: collect-static image: "{{ .Values.image.repository }}:{{ .Values.image.tag }}" diff --git a/helm_deploy/cla-backend/templates/deployment-worker.yaml b/helm_deploy/cla-backend/templates/deployment-worker.yaml index 127edb0e9..9dcf00929 100644 --- a/helm_deploy/cla-backend/templates/deployment-worker.yaml +++ b/helm_deploy/cla-backend/templates/deployment-worker.yaml @@ -13,6 +13,7 @@ spec: labels: app: {{ include "cla-backend.fullname" . }}-worker spec: + serviceAccountName: {{ include "cla-backend.serviceAccountName" . }} containers: - name: {{ include "cla-backend.fullname" . }}-worker image: "{{ .Values.image.repository }}:{{ .Values.image.tag }}" diff --git a/helm_deploy/cla-backend/values.yaml b/helm_deploy/cla-backend/values.yaml index b5f6eebfd..a9704d013 100644 --- a/helm_deploy/cla-backend/values.yaml +++ b/helm_deploy/cla-backend/values.yaml @@ -20,7 +20,7 @@ serviceAccount: annotations: {} # The name of the service account to use. # If not set and create is true, a name is generated using the fullname template - name: + name: irsa-sevice-account podSecurityContext: {} # fsGroup: 2000 @@ -156,14 +156,6 @@ envVars: value: "True" SQS_REGION: value: "eu-west-2" - SQS_ACCESS_KEY: - secret: - name: sqs - key: access_key_id - SQS_SECRET_KEY: - secret: - name: sqs - key: secret_access_key OBIEE_EMAIL_TO: secret: name: obiee @@ -182,14 +174,6 @@ envVars: secret: name: s3 key: static_files_bucket_name - AWS_ACCESS_KEY_ID: - secret: - name: s3 - key: access_key_id - AWS_SECRET_ACCESS_KEY: - secret: - name: s3 - key: secret_access_key AWS_DELETED_OBJECTS_BUCKET_NAME: secret: name: s3 diff --git a/requirements/generated/requirements-dev.txt b/requirements/generated/requirements-dev.txt index db6d16c52..68ee35d73 100644 --- a/requirements/generated/requirements-dev.txt +++ b/requirements/generated/requirements-dev.txt @@ -4,25 +4,23 @@ # # pip-compile --output-file=requirements/generated/requirements-dev.txt requirements/source/requirements-dev.in # -amqp==1.4.9 - # via kombu -anyjson==0.3.3 +amqp==2.6.1 # via kombu aspy.yaml==1.3.0 # via pre-commit -billiard==3.3.0.23 +billiard==3.5.0.5 # via celery bleach==3.3.0 # via -r requirements/source/requirements-base.in boto3==1.17.112 # via -r requirements/source/requirements-base.in -boto==2.39.0 - # via -r requirements/source/requirements-base.in +boto==2.49.0 + # via celery botocore==1.20.112 # via # boto3 # s3transfer -celery==3.1.18 +celery[sqs]==4.0.2 # via -r requirements/source/requirements-base.in certifi==2021.10.8 # via @@ -179,8 +177,10 @@ jsonpatch==1.9 # via -r requirements/source/requirements-base.in jsonpointer==2.3 # via jsonpatch -kombu==3.0.37 - # via celery +kombu==4.1.0 + # via + # -r requirements/source/requirements-base.in + # celery logstash-formatter==0.5.9 # via -r requirements/source/requirements-base.in lxml==4.9.1 @@ -222,6 +222,8 @@ psycopg2==2.7.5 # via -r requirements/source/requirements-base.in pycparser==2.21 # via cffi +pycurl==7.43.0.5 + # via celery pyjwt==1.7.1 # via notifications-python-client pyminizip==0.2.3 @@ -310,6 +312,8 @@ urllib3[secure]==1.26.14 # sentry-sdk uwsgi==2.0.18 # via -r requirements/source/requirements-base.in +vine==1.3.0 + # via amqp virtualenv==20.15.1 # via pre-commit webdriver-manager==1.6.2 diff --git a/requirements/generated/requirements-docs.txt b/requirements/generated/requirements-docs.txt index 445b0acb1..ccc3c468b 100644 --- a/requirements/generated/requirements-docs.txt +++ b/requirements/generated/requirements-docs.txt @@ -6,25 +6,23 @@ # alabaster==0.7.12 # via sphinx -amqp==1.4.9 - # via kombu -anyjson==0.3.3 +amqp==2.6.1 # via kombu babel==2.9.1 # via sphinx -billiard==3.3.0.23 +billiard==3.5.0.5 # via celery bleach==3.3.0 # via -r requirements/source/requirements-base.in boto3==1.17.112 # via -r requirements/source/requirements-base.in -boto==2.39.0 - # via -r requirements/source/requirements-base.in +boto==2.49.0 + # via celery botocore==1.20.112 # via # boto3 # s3transfer -celery==3.1.18 +celery[sqs]==4.0.2 # via -r requirements/source/requirements-base.in certifi==2021.10.8 # via @@ -133,8 +131,10 @@ jsonpatch==1.9 # via -r requirements/source/requirements-base.in jsonpointer==2.3 # via jsonpatch -kombu==3.0.37 - # via celery +kombu==4.1.0 + # via + # -r requirements/source/requirements-base.in + # celery logstash-formatter==0.5.9 # via -r requirements/source/requirements-base.in lxml==4.9.1 @@ -163,6 +163,8 @@ polib==1.0.6 # via -r requirements/source/requirements-base.in psycopg2==2.7.5 # via -r requirements/source/requirements-base.in +pycurl==7.43.0.5 + # via celery pygments==2.5.2 # via sphinx pyjwt==1.7.1 @@ -232,6 +234,8 @@ urllib3==1.26.14 # sentry-sdk uwsgi==2.0.18 # via -r requirements/source/requirements-base.in +vine==1.3.0 + # via amqp webencodings==0.5.1 # via bleach xlrd==2.0.1 diff --git a/requirements/generated/requirements-production.txt b/requirements/generated/requirements-production.txt index 69230a7de..4f384be69 100644 --- a/requirements/generated/requirements-production.txt +++ b/requirements/generated/requirements-production.txt @@ -4,23 +4,21 @@ # # pip-compile --output-file=requirements/generated/requirements-production.txt requirements/source/requirements-production.in # -amqp==1.4.9 +amqp==2.6.1 # via kombu -anyjson==0.3.3 - # via kombu -billiard==3.3.0.23 +billiard==3.5.0.5 # via celery bleach==3.3.0 # via -r requirements/source/requirements-base.in boto3==1.17.112 # via -r requirements/source/requirements-base.in -boto==2.39.0 - # via -r requirements/source/requirements-base.in +boto==2.49.0 + # via celery botocore==1.20.112 # via # boto3 # s3transfer -celery==3.1.18 +celery[sqs]==4.0.2 # via -r requirements/source/requirements-base.in certifi==2021.10.8 # via @@ -119,8 +117,10 @@ jsonpatch==1.9 # via -r requirements/source/requirements-base.in jsonpointer==2.3 # via jsonpatch -kombu==3.0.37 - # via celery +kombu==4.1.0 + # via + # -r requirements/source/requirements-base.in + # celery logstash-formatter==0.5.9 # via -r requirements/source/requirements-base.in lxml==4.9.1 @@ -145,6 +145,8 @@ polib==1.0.6 # via -r requirements/source/requirements-base.in psycopg2==2.7.5 # via -r requirements/source/requirements-base.in +pycurl==7.43.0.5 + # via celery pyjwt==1.7.1 # via notifications-python-client pyminizip==0.2.3 @@ -199,6 +201,8 @@ urllib3==1.26.14 # sentry-sdk uwsgi==2.0.18 # via -r requirements/source/requirements-base.in +vine==1.3.0 + # via amqp webencodings==0.5.1 # via bleach xlrd==2.0.1 diff --git a/requirements/generated/requirements-testing.txt b/requirements/generated/requirements-testing.txt index 58b3bedfb..cafedc835 100644 --- a/requirements/generated/requirements-testing.txt +++ b/requirements/generated/requirements-testing.txt @@ -4,23 +4,21 @@ # # pip-compile --output-file=requirements/generated/requirements-testing.txt requirements/source/requirements-testing.in # -amqp==1.4.9 +amqp==2.6.1 # via kombu -anyjson==0.3.3 - # via kombu -billiard==3.3.0.23 +billiard==3.5.0.5 # via celery bleach==3.3.0 # via -r requirements/source/requirements-base.in boto3==1.17.112 # via -r requirements/source/requirements-base.in -boto==2.39.0 - # via -r requirements/source/requirements-base.in +boto==2.49.0 + # via celery botocore==1.20.112 # via # boto3 # s3transfer -celery==3.1.18 +celery[sqs]==4.0.2 # via -r requirements/source/requirements-base.in certifi==2021.10.8 # via @@ -152,8 +150,10 @@ jsonpatch==1.9 # via -r requirements/source/requirements-base.in jsonpointer==2.3 # via jsonpatch -kombu==3.0.37 - # via celery +kombu==4.1.0 + # via + # -r requirements/source/requirements-base.in + # celery logstash-formatter==0.5.9 # via -r requirements/source/requirements-base.in lxml==4.9.1 @@ -184,6 +184,8 @@ psycopg2==2.7.5 # via -r requirements/source/requirements-base.in pycparser==2.21 # via cffi +pycurl==7.43.0.5 + # via celery pyjwt==1.7.1 # via notifications-python-client pyminizip==0.2.3 @@ -256,6 +258,8 @@ urllib3[secure]==1.26.14 # sentry-sdk uwsgi==2.0.18 # via -r requirements/source/requirements-base.in +vine==1.3.0 + # via amqp webdriver-manager==1.6.2 # via -r requirements/source/requirements-testing.in webencodings==0.5.1 diff --git a/requirements/source/requirements-base.in b/requirements/source/requirements-base.in index 2e8c4850c..27db4a830 100644 --- a/requirements/source/requirements-base.in +++ b/requirements/source/requirements-base.in @@ -40,8 +40,8 @@ polib==1.0.6 # Fork PgFulltext - PR added https://github.com/ministryofjustice/djorm-ext-pgfulltext/archive/refs/tags/0.1.0.tar.gz -celery==3.1.18 -boto==2.39.0 +celery[sqs]==4.0.2 +kombu==4.1.0 PyYAML==5.4 pyminizip==0.2.3 @@ -60,4 +60,3 @@ django-session-security==2.2.3 # Govuk Notify Service notifications-python-client==6.0.0 -