diff --git a/.github/workflows/bcol-api-ci.yml b/.github/workflows/bcol-api-ci.yml
index 24bbc9143..ad004ae26 100644
--- a/.github/workflows/bcol-api-ci.yml
+++ b/.github/workflows/bcol-api-ci.yml
@@ -4,7 +4,7 @@ on:
pull_request:
branches:
- main
- - queue_python_upgrade
+ - feature-queue-python-upgrade
paths:
- "bcol-api/**"
diff --git a/.github/workflows/events-listener-ci.yml b/.github/workflows/events-listener-ci.yml
deleted file mode 100644
index 5ab9ccc96..000000000
--- a/.github/workflows/events-listener-ci.yml
+++ /dev/null
@@ -1,111 +0,0 @@
-name: Events Listener Queue CI
-
-on:
- pull_request:
- branches:
- - main
- paths:
- - "queue_services/events-listener/**"
- - "pay-api/src/pay_api/models/**"
-
-defaults:
- run:
- shell: bash
- working-directory: ./queue_services/events-listener
-
-jobs:
- setup-job:
- runs-on: ubuntu-20.04
-
- if: github.repository == 'bcgov/sbc-pay'
-
- steps:
- - uses: actions/checkout@v3
- - run: "true"
-
- linting:
- needs: setup-job
- runs-on: ubuntu-20.04
-
- strategy:
- matrix:
- python-version: [3.12]
-
- steps:
- - uses: actions/checkout@v3
- - name: Set up Python ${{ matrix.python-version }}
- uses: actions/setup-python@v1
- with:
- python-version: ${{ matrix.python-version }}
- - name: Install dependencies
- run: |
- make setup
- - name: Lint with pylint
- id: pylint
- run: |
- make pylint
- - name: Lint with flake8
- id: flake8
- run: |
- make flake8
-
- testing:
- needs: setup-job
- env:
- DATABASE_TEST_URL: "postgresql://postgres:postgres@localhost:5432/postgres"
- TEST_NATS_DOCKER: "YES"
- STAN_CLUSTER_NAME: "test-cluster"
-
- runs-on: ubuntu-20.04
-
- strategy:
- matrix:
- python-version: [3.12]
-
- services:
- postgres:
- image: postgres:12
- env:
- POSTGRES_USER: postgres
- POSTGRES_PASSWORD: postgres
- POSTGRES_DB: postgres
- ports:
- - 5432:5432
- # needed because the postgres container does not provide a healthcheck
- options: --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5
-
- steps:
- - uses: actions/checkout@v3
- - name: Set up Python ${{ matrix.python-version }}
- uses: actions/setup-python@v1
- with:
- python-version: ${{ matrix.python-version }}
- - name: Install dependencies
- run: |
- make setup
- - name: Test with pytest
- id: test
- run: |
- make test
- - name: Upload coverage to Codecov
- uses: codecov/codecov-action@v3
- with:
- file: ./queue_services/events-listener/coverage.xml
- flags: eventlistenerqueue
- name: codecov-events-listener
- fail_ci_if_error: true
-
- build-check:
- needs: setup-job
- runs-on: ubuntu-20.04
-
- strategy:
- matrix:
- python-version: [3.12]
-
- steps:
- - uses: actions/checkout@v3
- - name: build to check strictness
- id: build
- run: |
- make build-nc
diff --git a/.github/workflows/ftp-poller-ci.yml b/.github/workflows/ftp-poller-ci.yml
index 14a9c3c09..53e90abb1 100644
--- a/.github/workflows/ftp-poller-ci.yml
+++ b/.github/workflows/ftp-poller-ci.yml
@@ -4,7 +4,7 @@ on:
pull_request:
branches:
- main
- - queue_python_upgrade
+ - feature-queue-python-upgrade
paths:
- "jobs/ftp-poller/**"
@@ -53,10 +53,6 @@ jobs:
needs: setup-job
env:
DATABASE_TEST_URL: "postgresql://postgres:postgres@localhost:5432/pay-test"
- NATS_QUEUE: "account-worker"
- NATS_CLUSTER_ID: "test-cluster"
- NATS_CLIENT_NAME: "account.events.worker"
- NATS_SUBJECT: "account.events"
USE_DOCKER_MOCK: "YES"
JWT_OIDC_ISSUER: "http://localhost:8081/auth/realms/demo"
SBC_AUTH_ADMIN_CLIENT_ID: "sbc-auth-admin"
diff --git a/.github/workflows/pay-admin-ci.yml b/.github/workflows/pay-admin-ci.yml
index 0a6b544f2..a7dac379c 100644
--- a/.github/workflows/pay-admin-ci.yml
+++ b/.github/workflows/pay-admin-ci.yml
@@ -4,7 +4,7 @@ on:
pull_request:
branches:
- main
- - queue_python_upgrade
+ - feature-queue-python-upgrade
paths:
- "pay-admin/**"
- "pay-api/src/pay_api/models/**"
diff --git a/.github/workflows/pay-api-ci.yml b/.github/workflows/pay-api-ci.yml
index 0a6971992..b21549e25 100644
--- a/.github/workflows/pay-api-ci.yml
+++ b/.github/workflows/pay-api-ci.yml
@@ -4,7 +4,7 @@ on:
pull_request:
branches:
- main
- - queue_python_upgrade
+ - feature-queue-python-upgrade
paths:
- "pay-api/**"
@@ -55,7 +55,6 @@ jobs:
FLASK_ENV: "testing"
# Needs different database than POSTGRES otherwise dropping database doesn't work
DATABASE_TEST_URL: "postgresql://postgres:postgres@localhost:5432/pay-test"
- TEST_NATS_DOCKER: "YES"
USE_TEST_KEYCLOAK_DOCKER: "YES"
USE_DOCKER_MOCK: "YES"
@@ -66,9 +65,6 @@ jobs:
JWT_OIDC_TEST_CLIENT_SECRET: "1111111111"
JWT_OIDC_TEST_JWKS_CACHE_TIMEOUT: "6000"
- NATS_QUEUE: "test-worker"
- NATS_SUBJECT: "entity.payment.test"
-
SBC_AUTH_ADMIN_CLIENT_ID: "sbc-auth-admin"
SBC_AUTH_ADMIN_CLIENT_SECRET: "2222222222"
diff --git a/.github/workflows/events-listener-cd.yml b/.github/workflows/pay-queue-cd.yml
similarity index 89%
rename from .github/workflows/events-listener-cd.yml
rename to .github/workflows/pay-queue-cd.yml
index 9ae30be64..8461d258e 100644
--- a/.github/workflows/events-listener-cd.yml
+++ b/.github/workflows/pay-queue-cd.yml
@@ -1,12 +1,13 @@
-name: Events Listener Queue CD
+name: Pay Queue CD
on:
push:
branches:
- main
paths:
- - "queue_services/events-listener/**"
+ - "pay-queue/**"
- "pay-api/src/pay_api/models/**"
+ - "pay-api/src/pay_api/services/cfs_service.py"
workflow_dispatch:
inputs:
environment:
@@ -17,14 +18,14 @@ on:
defaults:
run:
shell: bash
- working-directory: ./queue_services/events-listener
+ working-directory: ./pay-queue
env:
- APP_NAME: "events-listener"
+ APP_NAME: "pay-queue"
TAG_NAME: "dev"
jobs:
- events-listener-cd-by-push:
+ pay-queue-cd-by-push:
runs-on: ubuntu-20.04
if: github.event_name == 'push' && github.repository == 'bcgov/sbc-pay'
@@ -61,13 +62,13 @@ jobs:
if: failure()
with:
type: ${{ job.status }}
- job_name: "*Events Listener Queue Built and Deployed to ${{env.TAG_NAME}}*"
+ job_name: "*Pay Queue Built and Deployed to ${{env.TAG_NAME}}*"
channel: "#registries-bot"
url: ${{ secrets.ROCKETCHAT_WEBHOOK }}
commit: true
token: ${{ secrets.GITHUB_TOKEN }}
- events-listener-cd-by-dispatch:
+ pay-queue-cd-by-dispatch:
runs-on: ubuntu-20.04
if: github.event_name == 'workflow_dispatch' && github.repository == 'bcgov/sbc-pay'
@@ -107,7 +108,7 @@ jobs:
if: failure()
with:
type: ${{ job.status }}
- job_name: "*Events Listener Queue Built and Deployed to ${{env.TAG_NAME}}*"
+ job_name: "*Pay Queue Built and Deployed to ${{env.TAG_NAME}}*"
channel: "#registries-bot"
url: ${{ secrets.ROCKETCHAT_WEBHOOK }}
commit: true
diff --git a/.github/workflows/payment-reconciliations-ci.yml b/.github/workflows/pay-queue-ci.yml
similarity index 88%
rename from .github/workflows/payment-reconciliations-ci.yml
rename to .github/workflows/pay-queue-ci.yml
index 415889d0a..c399fae26 100644
--- a/.github/workflows/payment-reconciliations-ci.yml
+++ b/.github/workflows/pay-queue-ci.yml
@@ -1,18 +1,19 @@
-name: Payment Reconciliations Queue CI
+name: Payment Queue CI
on:
pull_request:
branches:
- main
+ - feature-queue-python-upgrade
paths:
- - "queue_services/payment-reconciliations/**"
+ - "pay-queue/**"
- "pay-api/src/pay_api/models/**"
- "pay-api/src/pay_api/services/cfs_service.py"
defaults:
run:
shell: bash
- working-directory: ./queue_services/payment-reconciliations
+ working-directory: ./pay-queue
jobs:
setup-job:
@@ -53,10 +54,8 @@ jobs:
testing:
needs: setup-job
env:
- DATABASE_TEST_URL: "postgresql://postgres:postgres@localhost:5432/postgres"
- TEST_NATS_DOCKER: "YES"
+ DATABASE_TEST_URL: "postgresql://postgres:postgres@localhost:5432/pay-test"
USE_DOCKER_MOCK: "YES"
- STAN_CLUSTER_NAME: "test-cluster"
MINIO_ENDPOINT: "localhost:9000"
MINIO_ACCESS_KEY: "minio"
MINIO_ACCESS_SECRET: "minio123"
@@ -80,7 +79,7 @@ jobs:
env:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
- POSTGRES_DB: postgres
+ POSTGRES_DB: pay-test
ports:
- 5432:5432
# needed because the postgres container does not provide a healthcheck
@@ -102,7 +101,7 @@ jobs:
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v3
with:
- file: ./queue_services/payment-reconciliations/coverage.xml
+ file: ./pay-queue/coverage.xml
flags: paymentreconciliationsqueue
name: codecov-payment-reconciliations
fail_ci_if_error: true
diff --git a/.github/workflows/payment-jobs-ci.yml b/.github/workflows/payment-jobs-ci.yml
index 213027b6e..2f3e58c13 100644
--- a/.github/workflows/payment-jobs-ci.yml
+++ b/.github/workflows/payment-jobs-ci.yml
@@ -4,6 +4,7 @@ on:
pull_request:
branches:
- main
+ - feature-queue-python-upgrade
paths:
- "jobs/payment-jobs/**"
- "pay-api/src/pay_api/models/**"
@@ -53,7 +54,7 @@ jobs:
testing:
needs: setup-job
env:
- DATABASE_TEST_URL: "postgresql://postgres:postgres@localhost:5432/postgres"
+ DATABASE_TEST_URL: "postgresql://postgres:postgres@localhost:5432/pay-test"
USE_DOCKER_MOCK: "YES"
JWT_OIDC_ISSUER: "http://localhost:8081/auth/realms/demo"
SBC_AUTH_ADMIN_CLIENT_ID: "sbc-auth-admin"
@@ -71,7 +72,7 @@ jobs:
env:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
- POSTGRES_DB: postgres
+ POSTGRES_DB: pay-test
ports:
- 5432:5432
# needed because the postgres container does not provide a healthcheck
diff --git a/.github/workflows/payment-reconciliations-cd.yml b/.github/workflows/payment-reconciliations-cd.yml
deleted file mode 100644
index 0914a299f..000000000
--- a/.github/workflows/payment-reconciliations-cd.yml
+++ /dev/null
@@ -1,115 +0,0 @@
-name: Payment Reconciliations Queue CD
-
-on:
- push:
- branches:
- - main
- paths:
- - "queue_services/payment-reconciliations/**"
- - "pay-api/src/pay_api/models/**"
- - "pay-api/src/pay_api/services/cfs_service.py"
- workflow_dispatch:
- inputs:
- environment:
- description: "Environment (dev/test/prod)"
- required: true
- default: "dev"
-
-defaults:
- run:
- shell: bash
- working-directory: ./queue_services/payment-reconciliations
-
-env:
- APP_NAME: "payment-reconciliations"
- TAG_NAME: "dev"
-
-jobs:
- payment-reconciliations-cd-by-push:
- runs-on: ubuntu-20.04
-
- if: github.event_name == 'push' && github.repository == 'bcgov/sbc-pay'
- environment:
- name: "dev"
-
- steps:
- - uses: actions/checkout@v3
-
- - name: Login Openshift
- shell: bash
- run: |
- oc login --server=${{secrets.OPENSHIFT4_LOGIN_REGISTRY}} --token=${{secrets.OPENSHIFT4_SA_TOKEN}}
-
- - name: CD Flow
- shell: bash
- env:
- OPS_REPOSITORY: ${{ secrets.OPS_REPOSITORY }}
- OPENSHIFT_DOCKER_REGISTRY: ${{ secrets.OPENSHIFT4_DOCKER_REGISTRY }}
- OPENSHIFT_SA_NAME: ${{ secrets.OPENSHIFT4_SA_NAME }}
- OPENSHIFT_SA_TOKEN: ${{ secrets.OPENSHIFT4_SA_TOKEN }}
- OPENSHIFT_REPOSITORY: ${{ secrets.OPENSHIFT4_REPOSITORY }}
- TAG_NAME: ${{ env.TAG_NAME }}
- run: |
- make cd
-
- - name: Watch new rollout (trigger by image change in Openshift)
- shell: bash
- run: |
- oc rollout status dc/${{ env.APP_NAME }}-${{ env.TAG_NAME }} -n ${{ secrets.OPENSHIFT4_REPOSITORY }}-${{ env.TAG_NAME }} -w
-
- - name: Rocket.Chat Notification
- uses: RocketChat/Rocket.Chat.GitHub.Action.Notification@master
- if: failure()
- with:
- type: ${{ job.status }}
- job_name: "*Payment Reconciliations Queue Built and Deployed to ${{env.TAG_NAME}}*"
- channel: "#registries-bot"
- url: ${{ secrets.ROCKETCHAT_WEBHOOK }}
- commit: true
- token: ${{ secrets.GITHUB_TOKEN }}
-
- payment-reconciliations-cd-by-dispatch:
- runs-on: ubuntu-20.04
-
- if: github.event_name == 'workflow_dispatch' && github.repository == 'bcgov/sbc-pay'
- environment:
- name: "${{ github.event.inputs.environment }}"
-
- steps:
- - uses: actions/checkout@v3
- - name: Set env by input
- run: |
- echo "TAG_NAME=${{ github.event.inputs.environment }}" >> $GITHUB_ENV
-
- - name: Login Openshift
- shell: bash
- run: |
- oc login --server=${{secrets.OPENSHIFT4_LOGIN_REGISTRY}} --token=${{secrets.OPENSHIFT4_SA_TOKEN}}
-
- - name: CD Flow
- shell: bash
- env:
- OPS_REPOSITORY: ${{ secrets.OPS_REPOSITORY }}
- OPENSHIFT_DOCKER_REGISTRY: ${{ secrets.OPENSHIFT4_DOCKER_REGISTRY }}
- OPENSHIFT_SA_NAME: ${{ secrets.OPENSHIFT4_SA_NAME }}
- OPENSHIFT_SA_TOKEN: ${{ secrets.OPENSHIFT4_SA_TOKEN }}
- OPENSHIFT_REPOSITORY: ${{ secrets.OPENSHIFT4_REPOSITORY }}
- TAG_NAME: ${{ env.TAG_NAME }}
- run: |
- make cd
-
- - name: Watch new rollout (trigger by image change in Openshift)
- shell: bash
- run: |
- oc rollout status dc/${{ env.APP_NAME }}-${{ env.TAG_NAME }} -n ${{ secrets.OPENSHIFT4_REPOSITORY }}-${{ env.TAG_NAME }} -w
-
- - name: Rocket.Chat Notification
- uses: RocketChat/Rocket.Chat.GitHub.Action.Notification@master
- if: failure()
- with:
- type: ${{ job.status }}
- job_name: "*Payment Reconciliations Queue Built and Deployed to ${{env.TAG_NAME}}*"
- channel: "#registries-bot"
- url: ${{ secrets.ROCKETCHAT_WEBHOOK }}
- commit: true
- token: ${{ secrets.GITHUB_TOKEN }}
diff --git a/.gitignore b/.gitignore
index 27cfab6f5..b3f42e947 100644
--- a/.gitignore
+++ b/.gitignore
@@ -47,6 +47,7 @@ coverage.xml
.hypothesis/
.pytest_cache/
pytest.xml
+test_eft_tdi17.txt
# Translations
*.mo
@@ -124,6 +125,7 @@ package-lock.json
cas_settlement_file.csv
jobs/payment-jobs/tests/docker/ftp
ACK.INBOX.F12022020202
-queue_services/payment-reconciliations/ACK.*
-queue_services/payment-reconciliations/FEEDBACK.*
+
+pay-queue/ACK.*
+pay-queue/FEEDBACK.*
jobs/notebook-report/data/
diff --git a/bcol-api/requirements.txt b/bcol-api/requirements.txt
index 18e12c0c5..44c6e1330 100644
--- a/bcol-api/requirements.txt
+++ b/bcol-api/requirements.txt
@@ -16,16 +16,14 @@ flask-jwt-oidc==0.3.0
flask-restx==1.3.0
gunicorn==21.2.0
idna==3.6
-importlib-metadata==7.0.1
-importlib-resources==5.13.0
+importlib_resources==6.1.3
isodate==0.6.1
itsdangerous==2.1.2
jaeger-client==4.8.0
jsonschema==4.17.3
lxml==5.1.0
opentracing==2.4.0
-packaging==23.2
-pkgutil_resolve_name==1.3.10
+packaging==24.0
platformdirs==4.2.0
psycopg2-binary==2.9.9
pyasn1-modules==0.3.0
@@ -40,13 +38,12 @@ requests-file==2.0.0
requests-toolbelt==1.0.0
requests==2.31.0
rsa==4.9
-sentry-sdk==1.40.6
+sentry-sdk==1.41.0
six==1.16.0
threadloop==1.0.2
thrift==0.16.0
tornado==6.4
urllib3==2.2.1
zeep==4.2.1
-zipp==3.17.0
-e git+https://github.com/bcgov/sbc-common-components.git#egg=sbc-common-components&subdirectory=python
git+https://github.com/thorwolpert/flask-jwt-oidc.git
diff --git a/bcol-api/setup.cfg b/bcol-api/setup.cfg
index 1fc623352..7b0dffc81 100755
--- a/bcol-api/setup.cfg
+++ b/bcol-api/setup.cfg
@@ -17,7 +17,7 @@ keywords =
[options]
zip_safe = True
-python_requires = >=3.6
+python_requires = >=3.12
include_package_data = True
packages = find:
diff --git a/bcol-api/src/bcol_api/resources/__init__.py b/bcol-api/src/bcol_api/resources/__init__.py
index d094b94c9..28f382357 100755
--- a/bcol-api/src/bcol_api/resources/__init__.py
+++ b/bcol-api/src/bcol_api/resources/__init__.py
@@ -33,7 +33,6 @@
__all__ = ('API_BLUEPRINT', 'OPS_BLUEPRINT')
# This will add the Authorize button to the swagger docs
-# TODO oauth2 & openid may not yet be supported by restplus <- check on this
AUTHORIZATIONS = {'apikey': {'type': 'apiKey', 'in': 'header', 'name': 'Authorization'}}
OPS_BLUEPRINT = Blueprint('API_OPS', __name__, url_prefix='/ops')
diff --git a/bcol-api/src/bcol_api/resources/bcol_payment.py b/bcol-api/src/bcol_api/resources/bcol_payment.py
index dd8536bf7..ddd2d3669 100755
--- a/bcol-api/src/bcol_api/resources/bcol_payment.py
+++ b/bcol-api/src/bcol_api/resources/bcol_payment.py
@@ -24,7 +24,6 @@
from bcol_api.utils.auth import jwt as _jwt
from bcol_api.utils.constants import Role
from bcol_api.utils.errors import Error
-from bcol_api.utils.trace import tracing as _tracing
from bcol_api.utils.util import cors_preflight
@@ -37,7 +36,6 @@ class AccountPayment(Resource):
"""Endpoint resource to manage BCOL Payments."""
@staticmethod
- @_tracing.trace()
@_jwt.requires_auth
@cors.crossdomain(origin='*')
def post():
diff --git a/bcol-api/src/bcol_api/resources/bcol_profile.py b/bcol-api/src/bcol_api/resources/bcol_profile.py
index 36a61a7ff..5b1c565d9 100755
--- a/bcol-api/src/bcol_api/resources/bcol_profile.py
+++ b/bcol-api/src/bcol_api/resources/bcol_profile.py
@@ -23,7 +23,6 @@
from bcol_api.services.bcol_profile import BcolProfile as BcolProfileService
from bcol_api.utils.auth import jwt as _jwt
from bcol_api.utils.errors import Error
-from bcol_api.utils.trace import tracing as _tracing
from bcol_api.utils.util import cors_preflight
@@ -36,7 +35,6 @@ class BcolProfiles(Resource):
"""Endpoint query bcol profile using user id and password."""
@staticmethod
- @_tracing.trace()
@_jwt.requires_auth
@cors.crossdomain(origin='*')
def post():
@@ -61,7 +59,6 @@ class BcolProfile(Resource):
"""Endpoint resource to get bcol profile by user id."""
@staticmethod
- @_tracing.trace()
@_jwt.has_one_of_roles(['system'])
@cors.crossdomain(origin='*')
def get(bcol_user_id: str):
diff --git a/bcol-api/src/bcol_api/utils/logging.py b/bcol-api/src/bcol_api/utils/logging.py
index 8b88ddcf2..8568f87dd 100755
--- a/bcol-api/src/bcol_api/utils/logging.py
+++ b/bcol-api/src/bcol_api/utils/logging.py
@@ -18,12 +18,7 @@
def setup_logging(conf):
- """Create the services logger.
-
- TODO should be reworked to load in the proper loggers and remove others
- """
- # log_file_path = path.join(path.abspath(path.dirname(__file__)), conf)
-
+ """Create the services logger."""
if conf and path.isfile(conf):
logging.config.fileConfig(conf)
print(f'Configure logging, from conf:{conf}', file=sys.stdout)
diff --git a/bcol-api/src/bcol_api/utils/trace.py b/bcol-api/src/bcol_api/utils/trace.py
index 31cc43080..ece264f83 100644
--- a/bcol-api/src/bcol_api/utils/trace.py
+++ b/bcol-api/src/bcol_api/utils/trace.py
@@ -12,11 +12,11 @@
# See the License for the specific language governing permissions and
# limitations under the License.
"""Bring in the Tracer."""
-from sbc_common_components.tracing.api_tracer import ApiTracer
-from sbc_common_components.tracing.api_tracing import ApiTracing
+# from sbc_common_components.tracing.api_tracer import ApiTracer
+# from sbc_common_components.tracing.api_tracing import ApiTracing
# initialize tracer
-API_TRACER = ApiTracer('BCOL API Services')
-tracing = ApiTracing( # pylint: disable=invalid-name; lower case name as used by convention in most Flask apps
- API_TRACER.tracer)
+# API_TRACER = ApiTracer('BCOL API Services')
+# tracing = ApiTracing( # pylint: disable=invalid-name; lower case name as used by convention in most Flask apps
+# API_TRACER.tracer)
diff --git a/codecov.yaml b/codecov.yaml
index 174116e19..e7b9696ed 100644
--- a/codecov.yaml
+++ b/codecov.yaml
@@ -17,8 +17,8 @@ coverage:
- payapi
- bcolapi
- reportapi
- - eventlistenerqueue
- paymentjobs
+ - payqueue
ignore:
- "^/tests/**/*" # ignore test harness code
@@ -49,11 +49,12 @@ flags:
paths:
- report-api/src/api
carryforward: true
- eventlistenerqueue:
- paths:
- - queue_services/events-listener/src/events_listener
- carryforward: true
paymentjobs:
paths:
- jobs/payment-jobs/tasks
carryforward: true
+ payqueue:
+ paths:
+ - pay-queue/src/pay_queue
+ carryforward: true
+
diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml
index 43baaeb8d..b1337ba8e 100644
--- a/docker/docker-compose.yml
+++ b/docker/docker-compose.yml
@@ -35,21 +35,6 @@ services:
- "6831:6831/udp"
- "6832:6832/udp"
- #################### Nats Service Definition ####################
- nats:
- image: nats-streaming
- restart: always
- mem_limit: 512m
- expose:
- - 4222
- - 8222
- labels:
- - entity.services=nats
- ports:
- - 4222:4222
- - 8222:8222
- tty: true
-
volumes:
db-data:
driver: local
diff --git a/jobs/ftp-poller/README.md b/jobs/ftp-poller/README.md
index bdc63f7ee..e73b38555 100644
--- a/jobs/ftp-poller/README.md
+++ b/jobs/ftp-poller/README.md
@@ -1,7 +1,7 @@
# SFTP Poller project
-Polls the SFTP folder and if a settlement file is found , uploads to minio and sends a NATS message.
+Polls the SFTP folder and if a settlement file is found , uploads to minio and sends a pubsub message.
## Openshift commands
@@ -84,4 +84,4 @@ this will print the public key. Store the string after ssh-rsa to the ftp-poller
https://hub.docker.com/r/atmoz/sftp
-docker run -p 22:22 -d atmoz/sftp foo:pass:::upload
\ No newline at end of file
+docker run -p 22:22 -d atmoz/sftp foo:pass:::upload
diff --git a/jobs/ftp-poller/config.py b/jobs/ftp-poller/config.py
index 58fefa141..81a331765 100644
--- a/jobs/ftp-poller/config.py
+++ b/jobs/ftp-poller/config.py
@@ -89,16 +89,6 @@ class _Config(object): # pylint: disable=too-few-public-methods
CGI_FEEDBACK_FILE_PREFIX = os.getenv('CGI_FEEDBACK_FILE_PREFIX', 'FEEDBACK')
CGI_INBOX_FILE_PREFIX = os.getenv('CGI_FEEDBACK_FILE_PREFIX', 'INBOX')
- # NATS Config
- NATS_SERVERS = os.getenv('NATS_SERVERS', 'nats://127.0.0.1:4222').split(',')
- NATS_CLUSTER_ID = os.getenv('NATS_CLUSTER_ID', 'test-cluster')
- NATS_QUEUE = os.getenv('NATS_QUEUE', 'account-worker')
-
- # NATS Config for account events
- NATS_PAYMENT_RECONCILIATIONS_CLIENT_NAME = os.getenv('NATS_PAYMENT_RECONCILIATIONS_CLIENT_NAME',
- 'payment.reconciliations.worker')
- NATS_PAYMENT_RECONCILIATIONS_SUBJECT = os.getenv('NATS_SUBJECT', 'payment.reconciliations')
-
SFTP_CONFIGS = {
'CAS': {
'SFTP_HOST': CAS_SFTP_HOST,
@@ -146,6 +136,12 @@ class _Config(object): # pylint: disable=too-few-public-methods
SENTRY_ENABLE = os.getenv('SENTRY_ENABLE', 'False')
SENTRY_DSN = os.getenv('SENTRY_DSN', None)
+ # GCP PubSub
+ AUDIENCE = os.getenv('AUDIENCE', None)
+ GCP_AUTH_KEY = os.getenv('GCP_AUTH_KEY', None)
+ PUBLISHER_AUDIENCE = os.getenv('PUBLISHER_AUDIENCE', None)
+ FTP_POLLER_TOPIC = os.getenv('FTP_POLLER_TOPIC', None)
+
TESTING = False
DEBUG = True
diff --git a/jobs/ftp-poller/devops/vaults.json b/jobs/ftp-poller/devops/vaults.json
index 1539ff97e..433ed9cbd 100644
--- a/jobs/ftp-poller/devops/vaults.json
+++ b/jobs/ftp-poller/devops/vaults.json
@@ -6,13 +6,6 @@
"payment-reconciliations",
"ftp-poller"
]
- },
- {
- "vault": "nats",
- "application": [
- "base",
- "payment-reconciliations"
- ]
},
{
"vault": "relationship",
diff --git a/jobs/ftp-poller/invoke_jobs.py b/jobs/ftp-poller/invoke_jobs.py
index 6581d2f94..5e18dcccb 100755
--- a/jobs/ftp-poller/invoke_jobs.py
+++ b/jobs/ftp-poller/invoke_jobs.py
@@ -44,7 +44,6 @@ def create_app(run_mode=os.getenv('FLASK_ENV', 'production')):
integrations=[FlaskIntegration()]
)
app.logger.info(f'<<<< Starting Ftp Poller Job >>>>')
- db.init_app(app)
ma.init_app(app)
register_shellcontext(app)
diff --git a/jobs/ftp-poller/requirements.txt b/jobs/ftp-poller/requirements.txt
index f2e779e08..44675066f 100644
--- a/jobs/ftp-poller/requirements.txt
+++ b/jobs/ftp-poller/requirements.txt
@@ -45,6 +45,6 @@ tornado==6.4
typing_extensions==4.10.0
urllib3==2.2.1
-e git+https://github.com/bcgov/sbc-common-components.git#egg=sbc-common-components&subdirectory=python
--e git+https://github.com/bcgov/sbc-pay.git@queue_python_upgrade#egg=pay-api&subdirectory=pay-api
+-e git+https://github.com/seeker25/sbc-pay.git@18263#egg=pay-api&subdirectory=pay-api
git+https://github.com/daxiom/simple-cloudevent.py.git
git+https://github.com/thorwolpert/flask-jwt-oidc.git
diff --git a/jobs/ftp-poller/requirements/dev.txt b/jobs/ftp-poller/requirements/dev.txt
index 696e51010..e98fcbca3 100644
--- a/jobs/ftp-poller/requirements/dev.txt
+++ b/jobs/ftp-poller/requirements/dev.txt
@@ -20,7 +20,6 @@ autopep8
coverage
pylint
pylint-flask
-#attrs==19.1.0
# docker
lovely-pytest-docker
diff --git a/jobs/ftp-poller/requirements/repo-libraries.txt b/jobs/ftp-poller/requirements/repo-libraries.txt
index 615ab1796..f71c16b32 100644
--- a/jobs/ftp-poller/requirements/repo-libraries.txt
+++ b/jobs/ftp-poller/requirements/repo-libraries.txt
@@ -1,4 +1,4 @@
-e git+https://github.com/bcgov/sbc-common-components.git#egg=sbc-common-components&subdirectory=python
--e git+https://github.com/bcgov/sbc-pay.git@queue_python_upgrade#egg=pay-api&subdirectory=pay-api
+-e git+https://github.com/seeker25/sbc-pay.git@18263#egg=pay-api&subdirectory=pay-api
git+https://github.com/daxiom/simple-cloudevent.py.git
git+https://github.com/thorwolpert/flask-jwt-oidc.git
diff --git a/jobs/ftp-poller/services/sftp.py b/jobs/ftp-poller/services/sftp.py
index 27b91144c..06fe984e0 100644
--- a/jobs/ftp-poller/services/sftp.py
+++ b/jobs/ftp-poller/services/sftp.py
@@ -24,10 +24,10 @@
class SFTPService: # pylint: disable=too-few-public-methods
"""SFTP Service class."""
- DEFAUILT_CONNECT_SERVER = 'CAS'
+ DEFAULT_CONNECT_SERVER = 'CAS'
@staticmethod
- def get_connection(server_name: str = DEFAUILT_CONNECT_SERVER) -> Connection:
+ def get_connection(server_name: str = DEFAULT_CONNECT_SERVER) -> Connection:
"""Return a SFTP connection."""
# pylint: disable=protected-access
return SFTPService._connect(server_name)
@@ -38,7 +38,7 @@ def _connect(server_name: str) -> Connection:
sftp_configs = current_app.config.get('SFTP_CONFIGS')
# if not passed , connect to CAS server always. to make the existing code work
if not server_name or server_name not in sftp_configs.keys():
- server_name = SFTPService.DEFAUILT_CONNECT_SERVER
+ server_name = SFTPService.DEFAULT_CONNECT_SERVER
connect_configs = sftp_configs.get(server_name)
diff --git a/jobs/ftp-poller/setup.cfg b/jobs/ftp-poller/setup.cfg
index 8eb46196f..c06c0dd9b 100755
--- a/jobs/ftp-poller/setup.cfg
+++ b/jobs/ftp-poller/setup.cfg
@@ -17,7 +17,7 @@ keywords =
[options]
zip_safe = True
-python_requires = >=3.6
+python_requires = >=3.12
include_package_data = True
packages = find:
diff --git a/jobs/ftp-poller/tasks/cgi_feeder_poller_task.py b/jobs/ftp-poller/tasks/cgi_feeder_poller_task.py
index f560a11ff..7542a4d84 100644
--- a/jobs/ftp-poller/tasks/cgi_feeder_poller_task.py
+++ b/jobs/ftp-poller/tasks/cgi_feeder_poller_task.py
@@ -17,8 +17,8 @@
from flask import current_app
from paramiko.sftp_attr import SFTPAttributes
+from pay_api.utils.enums import MessageType
from services.sftp import SFTPService
-from utils.constants import CGI_ACK_MESSAGE_TYPE, CGI_FEEDBACK_MESSAGE_TYPE
from utils import utils
@@ -48,18 +48,19 @@ def poll_ftp(cls):
f'Skipping directory {file_name}.')
continue
if cls._is_ack_file(file_name):
- utils.publish_to_queue([file_name], CGI_ACK_MESSAGE_TYPE)
+ utils.publish_to_queue([file_name], MessageType.CGI_ACK_RECEIVED.value)
cls._move_file_to_backup(sftp_client, [file_name])
elif cls._is_feedback_file(file_name):
bucket_name = current_app.config.get('MINIO_CGI_BUCKET_NAME')
utils.upload_to_minio(file, file_full_name, sftp_client, bucket_name)
- utils.publish_to_queue([file_name], CGI_FEEDBACK_MESSAGE_TYPE, location=bucket_name)
+ utils.publish_to_queue([file_name], MessageType.CGI_FEEDBACK_RECEIVED.value,
+ location=bucket_name)
cls._move_file_to_backup(sftp_client, [file_name])
elif cls._is_a_trigger_file(file_name):
cls._remove_file(sftp_client, file_name)
else:
current_app.logger.warning(
- f'File found which is not trigger , ACK or feed back {file_name}.Ignoring')
+ f'Ignoring file found which is not trigger ACK or feedback {file_name}.')
except Exception as e: # NOQA # pylint: disable=broad-except
current_app.logger.error(e)
@@ -74,7 +75,7 @@ def _move_file_to_backup(cls, sftp_client, backup_file_list):
@classmethod
def _remove_file(cls, sftp_client, file_name: str):
ftp_dir: str = current_app.config.get('CGI_SFTP_DIRECTORY')
- current_app.logger.info(f'Removing file:{ftp_dir}/{file_name}')
+ current_app.logger.info(f'Removing file: {ftp_dir}/{file_name}')
sftp_client.remove(ftp_dir + '/' + file_name)
@classmethod
diff --git a/jobs/ftp-poller/tasks/eft_poller_ftp.py b/jobs/ftp-poller/tasks/eft_poller_ftp.py
index 003987a28..10f59a4f4 100644
--- a/jobs/ftp-poller/tasks/eft_poller_ftp.py
+++ b/jobs/ftp-poller/tasks/eft_poller_ftp.py
@@ -17,8 +17,8 @@
from flask import current_app
from paramiko.sftp_attr import SFTPAttributes
+from pay_api.utils.enums import MessageType
from services.sftp import SFTPService
-from utils.constants import EFT_MESSAGE_TYPE
from utils.utils import publish_to_queue, upload_to_minio
@@ -66,7 +66,8 @@ def _post_process(cls, sftp_client, payment_file_list: List[str]):
2.Send a message to queue
"""
cls._move_file_to_backup(sftp_client, payment_file_list)
- publish_to_queue(payment_file_list, EFT_MESSAGE_TYPE, location=current_app.config.get('MINIO_EFT_BUCKET_NAME'))
+ publish_to_queue(payment_file_list, MessageType.EFT_FILE_UPLOADED.value,
+ location=current_app.config.get('MINIO_EFT_BUCKET_NAME'))
@classmethod
def _move_file_to_backup(cls, sftp_client, payment_file_list):
diff --git a/jobs/ftp-poller/tests/docker/docker-compose.yml b/jobs/ftp-poller/tests/docker/docker-compose.yml
index 253f6c0ee..8fb738b62 100644
--- a/jobs/ftp-poller/tests/docker/docker-compose.yml
+++ b/jobs/ftp-poller/tests/docker/docker-compose.yml
@@ -1,19 +1,6 @@
version: "3"
services:
- nats:
- image: nats-streaming
- restart: always
- expose:
- - 4222
- - 8222
- labels:
- - entity.services=nats
- ports:
- - 4222:4222
- - 8222:8222
- tty: true
-
proxy:
image: nginx:alpine
volumes:
diff --git a/jobs/ftp-poller/tests/jobs/conftest.py b/jobs/ftp-poller/tests/jobs/conftest.py
index 4e134a17a..fd4717049 100644
--- a/jobs/ftp-poller/tests/jobs/conftest.py
+++ b/jobs/ftp-poller/tests/jobs/conftest.py
@@ -14,95 +14,19 @@
"""Common setup and fixtures for the py-test suite used by this service."""
-import sys
import time
import pytest
-from flask_migrate import Migrate, upgrade
-from pay_api.models import db as _db
-from sqlalchemy import text
-from sqlalchemy.schema import DropConstraint, MetaData
-
from invoke_jobs import create_app
@pytest.fixture(scope='session')
def app():
"""Return a session-wide application configured in TEST mode."""
- _app = create_app('testing')
-
- return _app
-
-
-@pytest.fixture(scope='function')
-def app_request():
- """Return a session-wide application configured in TEST mode."""
- _app = create_app('testing')
-
- return _app
-
-
-@pytest.fixture(scope='session')
-def client(app): # pylint: disable=redefined-outer-name
- """Return a session-wide Flask test client."""
- return app.test_client()
-
-
-@pytest.fixture(scope='session')
-def client_ctx(app): # pylint: disable=redefined-outer-name
- """Return session-wide Flask test client."""
- with app.test_client() as _client:
- yield _client
-
-
-@pytest.fixture(scope='session')
-def db(app): # pylint: disable=redefined-outer-name, invalid-name
- """Return a session-wide initialised database.
-
- Drops all existing tables - Meta follows Postgres FKs
- """
- with app.app_context():
- # Clear out any existing tables
- metadata = MetaData(_db.engine)
- metadata.reflect()
- for table in metadata.tables.values():
- for fk in table.foreign_keys: # pylint: disable=invalid-name
- _db.engine.execute(DropConstraint(fk.constraint))
- metadata.drop_all()
- _db.drop_all()
-
- sequence_sql = """SELECT sequence_name FROM information_schema.sequences
- WHERE sequence_schema='public'
- """
-
- sess = _db.session()
- for seq in [name for (name,) in sess.execute(text(sequence_sql))]:
- try:
- sess.execute(text(f'DROP SEQUENCE public.{seq} ;'))
- print(f'DROP SEQUENCE public.{seq} ')
- except Exception as err: # NOQA # pylint: disable=broad-except
- print(f'Error: {err}')
- sess.commit()
-
- # ############################################
- # There are 2 approaches, an empty database, or the same one that the app will use
- # create the tables
- # _db.create_all()
- # or
- # Use Alembic to load all of the DB revisions including supporting lookup data
- # This is the path we'll use in legal_api!!
-
- # even though this isn't referenced directly, it sets up the internal configs that upgrade needs
- migrations_path = [folder for folder in sys.path if 'pay-api/pay-api' in folder][0] \
- .replace('/pay-api/src', '/pay-api/migrations')
-
- Migrate(app, _db, directory=migrations_path)
- upgrade()
-
- return _db
+ return create_app('testing')
-@pytest.fixture(scope='function')
+@pytest.fixture(scope='function', autouse=True)
def session(app): # pylint: disable=redefined-outer-name, invalid-name
"""Return a function-scoped session."""
with app.app_context():
@@ -114,7 +38,6 @@ def auto(docker_services, app): # pylint: disable=redefined-outer-name
"""Spin up docker instances."""
if app.config['USE_DOCKER_MOCK']:
docker_services.start('proxy')
- docker_services.start('nats')
docker_services.start('sftp')
time.sleep(2)
diff --git a/jobs/ftp-poller/tests/jobs/test_sftp.py b/jobs/ftp-poller/tests/jobs/test_sftp.py
index 4dcd6e2f6..d88470a8d 100644
--- a/jobs/ftp-poller/tests/jobs/test_sftp.py
+++ b/jobs/ftp-poller/tests/jobs/test_sftp.py
@@ -16,23 +16,20 @@
Test-Suite to ensure that the CreateAccountTask is working as expected.
"""
-# import os
-# from typing import List
-
+import pytest
from flask import current_app
from services.sftp import SFTPService
-# from utils.minio import get_object
-# from tasks.poll_ftp_task import PollFtpTask
+from utils.utils import publish_to_queue
-def test_cget_sftp_connection(session): # pylint:disable=unused-argument
+def test_cget_sftp_connection():
"""Test create account."""
con = SFTPService.get_connection()
assert con
-def test_poll_ftp_task(session): # pylint:disable=unused-argument
+def test_poll_ftp_task():
"""Test Poll."""
con = SFTPService.get_connection()
@@ -40,10 +37,8 @@ def test_poll_ftp_task(session): # pylint:disable=unused-argument
files = con.listdir(ftp_dir)
assert len(files) == 1, 'Files exist in FTP folder'
- # TODO fixed this test case
- # payment_file_list: List[str] = PollFtpTask.poll_ftp()
- # minio_file_content = get_object(payment_file_list[0]).read().decode()
- # full_path = os.path.join(os.path.dirname(__file__), '../docker/ftp/test.txt')
- # sftp_local_file_content = open(full_path, 'r').read()
- # assert minio_file_content == sftp_local_file_content, 'minio upload works fine.' \
- # 'Contents of ftp drive and minio verified'
+
+@pytest.mark.skip(reason='leave this to manually verify pubsub connection; needs env vars')
+def test_queue_message():
+ """Test publishing to topic."""
+ publish_to_queue(['file1.csv'])
diff --git a/jobs/ftp-poller/utils/logger.py b/jobs/ftp-poller/utils/logger.py
index 8b88ddcf2..afa83de96 100755
--- a/jobs/ftp-poller/utils/logger.py
+++ b/jobs/ftp-poller/utils/logger.py
@@ -18,14 +18,9 @@
def setup_logging(conf):
- """Create the services logger.
-
- TODO should be reworked to load in the proper loggers and remove others
- """
- # log_file_path = path.join(path.abspath(path.dirname(__file__)), conf)
-
+ """Create the services logger."""
if conf and path.isfile(conf):
logging.config.fileConfig(conf)
- print(f'Configure logging, from conf:{conf}', file=sys.stdout)
+ print(f'Configure logging, from conf: {conf}', file=sys.stdout)
else:
- print(f'Unable to configure logging, attempted conf:{conf}', file=sys.stderr)
+ print(f'Unable to configure logging, attempted conf: {conf}', file=sys.stderr)
diff --git a/jobs/ftp-poller/utils/utils.py b/jobs/ftp-poller/utils/utils.py
index 1d6e10da2..aa452f5c4 100644
--- a/jobs/ftp-poller/utils/utils.py
+++ b/jobs/ftp-poller/utils/utils.py
@@ -12,19 +12,18 @@
# See the License for the specific language governing permissions and
# limitations under the License.
"""Service to manage PAYBC services."""
-from datetime import datetime
from typing import List
from flask import current_app
from paramiko import SFTPFile
-from pay_api.services.queue_publisher import publish_response
+from pay_api.services import gcp_queue_publisher
+from pay_api.services.gcp_queue_publisher import QueueMessage
+from pay_api.utils.enums import MessageType, QueueSources
-from utils.constants import CAS_MESSAGE_TYPE
from utils.minio import put_object
-def publish_to_queue(payment_file_list: List[str], message_type=CAS_MESSAGE_TYPE, location: str = ''
- ):
+def publish_to_queue(payment_file_list: List[str], message_type=MessageType.CAS_UPLOADED.value, location: str = ''):
"""Publish message to the Queue, saying file has been uploaded. Using the event spec."""
queue_data = {
'fileSource': 'MINIO',
@@ -33,20 +32,15 @@ def publish_to_queue(payment_file_list: List[str], message_type=CAS_MESSAGE_TYPE
for file_name in payment_file_list:
queue_data['fileName'] = file_name
- payload = {
- 'specversion': '1.x-wip',
- 'type': message_type,
- 'source': file_name,
- 'id': file_name,
- 'time': f'{datetime.now()}',
- 'datacontenttype': 'application/json',
- 'data': queue_data
- }
-
try:
- publish_response(payload=payload,
- client_name=current_app.config.get('NATS_PAYMENT_RECONCILIATIONS_CLIENT_NAME'),
- subject=current_app.config.get('NATS_PAYMENT_RECONCILIATIONS_SUBJECT'))
+ gcp_queue_publisher.publish_to_queue(
+ QueueMessage(
+ source=QueueSources.FTP_POLLER.value,
+ message_type=message_type,
+ payload=queue_data,
+ topic=current_app.config.get('FTP_POLLER_TOPIC')
+ )
+ )
except Exception as e: # NOQA # pylint: disable=broad-except
current_app.logger.error(e)
current_app.logger.warning(
diff --git a/jobs/payment-jobs/config.py b/jobs/payment-jobs/config.py
index e9dcebf61..842feb1ba 100644
--- a/jobs/payment-jobs/config.py
+++ b/jobs/payment-jobs/config.py
@@ -112,18 +112,6 @@ class _Config(object): # pylint: disable=too-few-public-methods
AUTH_WEB_STATEMENT_URL = os.getenv('AUTH_WEB_STATEMENT_URL', 'account/orgId/settings/statements')
REGISTRIES_LOGO_IMAGE_NAME = os.getenv('REGISTRIES_LOGO_IMAGE_NAME', 'bc_logo_for_email.png')
- # NATS Config
- NATS_SERVERS = os.getenv('NATS_SERVERS', 'nats://127.0.0.1:4222').split(',')
- NATS_CLUSTER_ID = os.getenv('NATS_CLUSTER_ID', 'test-cluster')
-
- # NATS Config for account events
- NATS_ACCOUNT_CLIENT_NAME = os.getenv('NATS_ACCOUNT_CLIENT_NAME', 'account.events.worker')
- NATS_ACCOUNT_SUBJECT = os.getenv('NATS_ACCOUNT_SUBJECT', 'account.events')
-
- # NATS Config for transaction events
- NATS_PAYMENT_CLIENT_NAME = os.getenv('NATS_PAYMENT_CLIENT_NAME', 'entity.filing.payment.worker')
- NATS_PAYMENT_SUBJECT = os.getenv('NATS_PAYMENT_SUBJECT', 'entity.filing.payment')
-
# Auth API Endpoint
AUTH_API_ENDPOINT = f'{os.getenv("AUTH_API_URL")}/'
@@ -145,9 +133,6 @@ class _Config(object): # pylint: disable=too-few-public-methods
DEBUG = True
PAD_CONFIRMATION_PERIOD_IN_DAYS = int(os.getenv('PAD_CONFIRMATION_PERIOD_IN_DAYS', '3'))
- NATS_MAILER_CLIENT_NAME = os.getenv('NATS_MAILER_CLIENT_NAME', 'account.mailer.worker')
- NATS_MAILER_SUBJECT = os.getenv('NATS_MAILER_SUBJECT', 'account.mailer')
-
# Secret key for encrypting bank account
ACCOUNT_SECRET_KEY = os.getenv('ACCOUNT_SECRET_KEY')
@@ -206,6 +191,11 @@ class _Config(object): # pylint: disable=too-few-public-methods
EFT_HOLDING_GL = os.getenv('EFT_HOLDING_GL', '')
EFT_TRANSFER_DESC = os.getenv('EFT_TRANSFER_DESC', 'BCREGISTRIES {} {} EFT TRANSFER')
+ # GCP PubSub
+ AUDIENCE = os.getenv('AUDIENCE', None)
+ GCP_AUTH_KEY = os.getenv('GCP_AUTH_KEY', None)
+ PUBLISHER_AUDIENCE = os.getenv('PUBLISHER_AUDIENCE', None)
+ ACCOUNT_MAILER_TOPIC = os.getenv('ACCOUNT_MAILER_TOPIC', None)
class DevConfig(_Config): # pylint: disable=too-few-public-methods
TESTING = False
diff --git a/jobs/payment-jobs/devops/vaults.json b/jobs/payment-jobs/devops/vaults.json
index e43ccdbeb..6cd1c8ebf 100644
--- a/jobs/payment-jobs/devops/vaults.json
+++ b/jobs/payment-jobs/devops/vaults.json
@@ -13,14 +13,6 @@
"sbc-auth-admin"
]
},
- {
- "vault": "nats",
- "application": [
- "base",
- "account-events-listener",
- "payment"
- ]
- },
{
"vault": "payment-external-services",
"application": [
diff --git a/jobs/payment-jobs/invoke_jobs.py b/jobs/payment-jobs/invoke_jobs.py
index edd377da7..47845a253 100755
--- a/jobs/payment-jobs/invoke_jobs.py
+++ b/jobs/payment-jobs/invoke_jobs.py
@@ -39,6 +39,7 @@ def create_app(run_mode=os.getenv('FLASK_ENV', 'production'), job_name='unknown'
from pay_api.models import db, ma
app = Flask(__name__)
+ app.env = run_mode
app.config.from_object(config.CONFIGURATION[run_mode])
# Configure Sentry
@@ -49,7 +50,7 @@ def create_app(run_mode=os.getenv('FLASK_ENV', 'production'), job_name='unknown'
integrations=[FlaskIntegration()],
release=f'payment-jobs-{job_name}@-',
)
- app.logger.info(f'<<<< Starting Payment Jobs >>>>')
+ app.logger.info('<<<< Starting Payment Jobs >>>>')
db.init_app(app)
if init_oracle:
oracle_db.init_app(app)
diff --git a/jobs/payment-jobs/requirements.txt b/jobs/payment-jobs/requirements.txt
index c36f3d540..0210163c4 100644
--- a/jobs/payment-jobs/requirements.txt
+++ b/jobs/payment-jobs/requirements.txt
@@ -1,11 +1,11 @@
--e git+https://github.com/bcgov/sbc-common-components.git@5807c201b057875f9061f60b408ab765ebaa4235#egg=sbc_common_components&subdirectory=python
--e git+https://github.com/bcgov/sbc-pay.git@e0bc6979b761347b32628a28fa1489e919e5d806#egg=pay_api&subdirectory=pay-api
+-e git+https://github.com/bcgov/sbc-common-components.git@5f99e135214ae949c9af951d4aa0b88b1067d853#egg=sbc_common_components&subdirectory=python
+-e git+https://github.com/seeker25/sbc-pay.git@5bf7ab87481a96a72c59db7d6a88c32f712f1ab6#egg=pay_api&subdirectory=pay-api
-e git+https://github.com/thorwolpert/flask-jwt-oidc.git@40cc811ccf70e838c5f7522fe8d83b7e58853539#egg=flask_jwt_oidc
Flask-Caching==2.1.0
Flask-Cors==4.0.0
-Flask-Migrate==2.7.0
+Flask-Migrate==4.0.7
Flask-Moment==1.0.5
-Flask-OpenTracing==2.0.0
+Flask-OpenTracing==1.1.0
Flask-SQLAlchemy==3.1.1
Flask-Script==2.0.6
Flask==3.0.2
@@ -31,26 +31,23 @@ charset-normalizer==3.3.2
click==8.1.7
croniter==2.0.2
cryptography==42.0.5
-cx-Oracle==8.3.0
+cx_Oracle==8.3.0
dataclass-wizard==0.22.3
dpath==2.1.6
ecdsa==0.18.0
-exceptiongroup==1.2.0
expiringdict==1.2.2
flask-marshmallow==1.2.0
google-api-core==2.17.1
google-auth==2.28.1
google-cloud-pubsub==2.20.0
-googleapis-common-protos==1.62.0
+googleapis-common-protos==1.63.0
greenlet==3.0.3
grpc-google-iam-v1==0.13.0
-grpcio-status==1.62.0
-grpcio==1.62.0
+grpcio-status==1.62.1
+grpcio==1.62.1
gunicorn==21.2.0
holidays==0.37
idna==3.6
-importlib_metadata==7.0.2
-importlib_resources==6.1.3
itsdangerous==2.1.2
jaeger-client==4.8.0
jsonschema==4.17.3
@@ -61,9 +58,8 @@ marshmallow==3.21.1
minio==7.2.5
more-itertools==10.2.0
opentracing==2.4.0
-packaging==23.2
+packaging==24.0
paramiko==3.4.0
-pkgutil_resolve_name==1.3.10
proto-plus==1.23.0
protobuf==4.25.3
psycopg2-binary==2.9.9
@@ -72,6 +68,7 @@ pyasn1-modules==0.3.0
pyasn1==0.5.1
pycparser==2.21
pycryptodome==3.20.0
+pyhumps==3.8.0
pyrsistent==0.20.0
pysftp==0.2.9
python-dateutil==2.9.0.post0
@@ -90,4 +87,3 @@ thrift==0.16.0
tornado==6.4
typing_extensions==4.10.0
urllib3==2.2.1
-zipp==3.17.0
diff --git a/jobs/payment-jobs/requirements/bcregistry-libraries.txt b/jobs/payment-jobs/requirements/bcregistry-libraries.txt
index ba230981f..06d616dd6 100644
--- a/jobs/payment-jobs/requirements/bcregistry-libraries.txt
+++ b/jobs/payment-jobs/requirements/bcregistry-libraries.txt
@@ -1,3 +1,4 @@
-e git+https://github.com/bcgov/sbc-common-components.git#egg=sbc-common-components&subdirectory=python
--e git+https://github.com/bcgov/sbc-pay.git@main#egg=pay_api&subdirectory=pay-api
+-e git+https://github.com/seeker25/sbc-pay.git@18263#egg=pay-api&subdirectory=pay-api
+-e git+https://github.com/thorwolpert/flask-jwt-oidc.git#egg=flask-jwt-oidc
git+https://github.com/daxiom/simple-cloudevent.py.git
diff --git a/jobs/payment-jobs/requirements/dev.txt b/jobs/payment-jobs/requirements/dev.txt
index 0fe0a755f..f84689dbe 100644
--- a/jobs/payment-jobs/requirements/dev.txt
+++ b/jobs/payment-jobs/requirements/dev.txt
@@ -10,7 +10,7 @@ pytest-cov
Faker
# Lint and code style
-flake8==5.0.4
+flake8
flake8-blind-except
flake8-debugger
flake8-docstrings
diff --git a/jobs/payment-jobs/requirements/prod.txt b/jobs/payment-jobs/requirements/prod.txt
index bbe762038..06a70bb0d 100644
--- a/jobs/payment-jobs/requirements/prod.txt
+++ b/jobs/payment-jobs/requirements/prod.txt
@@ -1,19 +1,19 @@
gunicorn
Flask
Flask-SQLAlchemy
-SQLAlchemy<1.4
-flask-marshmallow==0.11.0
-marshmallow-sqlalchemy==0.25.0
+SQLAlchemy
+flask-marshmallow
+marshmallow-sqlalchemy
python-dotenv
psycopg2-binary
jsonschema==4.17.3
requests
-Werkzeug<2
+Werkzeug
jaeger-client
minio
pysftp
-Flask-Migrate<3
-itsdangerous==2.0.1
+Flask-Migrate
+itsdangerous
dataclass_wizard
launchdarkly-server-sdk
cx_Oracle
diff --git a/jobs/payment-jobs/services/oracle.py b/jobs/payment-jobs/services/oracle.py
index 08c52095a..6ffc99d80 100644
--- a/jobs/payment-jobs/services/oracle.py
+++ b/jobs/payment-jobs/services/oracle.py
@@ -16,8 +16,8 @@
These will get initialized by the application.
"""
import cx_Oracle
-from flask import _app_ctx_stack, current_app
-
+from flask import current_app
+from flask.globals import app_ctx
class OracleDB:
"""Oracle database connection object for re-use in application."""
@@ -40,7 +40,7 @@ def init_app(self, app):
def teardown(ctx=None):
"""Oracle session pool cleans up after itself."""
if not ctx:
- ctx = _app_ctx_stack.top
+ ctx = app_ctx
if hasattr(ctx, '_oracle_pool'):
ctx._oracle_pool.close() # pylint: disable=protected-access
@@ -81,7 +81,7 @@ def connection(self): # pylint: disable=inconsistent-return-statements
and then return an acquired session
:return: cx_Oracle.connection type
"""
- ctx = _app_ctx_stack.top
+ ctx = app_ctx
if ctx is not None:
if not hasattr(ctx, '_oracle_pool'):
ctx._oracle_pool = self._create_pool() # pylint: disable = protected-access; need this method
diff --git a/jobs/payment-jobs/setup.cfg b/jobs/payment-jobs/setup.cfg
index fa9785f04..f4f4f4e4e 100755
--- a/jobs/payment-jobs/setup.cfg
+++ b/jobs/payment-jobs/setup.cfg
@@ -17,7 +17,7 @@ keywords =
[options]
zip_safe = True
-python_requires = >=3.6
+python_requires = >=3.12
include_package_data = True
packages = find:
diff --git a/jobs/payment-jobs/tasks/cfs_create_account_task.py b/jobs/payment-jobs/tasks/cfs_create_account_task.py
index 939e87b0f..62e128ca3 100644
--- a/jobs/payment-jobs/tasks/cfs_create_account_task.py
+++ b/jobs/payment-jobs/tasks/cfs_create_account_task.py
@@ -21,7 +21,7 @@
from pay_api.models import PaymentAccount as PaymentAccountModel
from pay_api.services.cfs_service import CFSService
from pay_api.services.oauth_service import OAuthService
-from pay_api.utils.constants import RECEIPT_METHOD_PAD_DAILY, RECEIPT_METHOD_EFT_MONTHLY
+from pay_api.utils.constants import RECEIPT_METHOD_EFT_MONTHLY, RECEIPT_METHOD_PAD_DAILY
from pay_api.utils.enums import AuthHeaderType, CfsAccountStatus, ContentType, PaymentMethod
from sentry_sdk import capture_message
from services import routing_slip
@@ -73,13 +73,6 @@ def _get_account_contact(cls, auth_token: str, auth_account_id: str):
@classmethod
def _create_cfs_account(cls, pending_account: CfsAccountModel, pay_account: PaymentAccountModel, auth_token: str):
- # If PAD Account creation in CFS is paused, then just continue
- # TODO Remove once PAD account bugs are fixed and stable on CAS side.
- if current_app.config.get('CFS_STOP_PAD_ACCOUNT_CREATION') and \
- pay_account.payment_method == PaymentMethod.PAD.value:
- current_app.logger.info('Continuing to next record as CFS PAD account creation is stopped.')
- return
-
current_app.logger.info(
f'Creating pay system instance for {pay_account.payment_method} for account {pay_account.id}.')
diff --git a/jobs/payment-jobs/tasks/eft_transfer_task.py b/jobs/payment-jobs/tasks/eft_transfer_task.py
index 8b62c952e..d6f9bdfe9 100644
--- a/jobs/payment-jobs/tasks/eft_transfer_task.py
+++ b/jobs/payment-jobs/tasks/eft_transfer_task.py
@@ -84,18 +84,19 @@ def get_invoices_for_refund_reversal(payment_account_id: int):
@staticmethod
def get_account_ids() -> List[int]:
"""Return account IDs for EFT payments."""
- return db.session.query(func.DISTINCT(InvoiceModel.payment_account_id)) \
+ query = db.session.query(func.DISTINCT(InvoiceModel.payment_account_id)) \
.filter(InvoiceModel.invoice_status_code == InvoiceStatus.PAID.value) \
.filter(InvoiceModel.payment_method_code == PaymentMethod.EFT.value) \
.filter(~exists().where((EFTGLTransferModel.invoice_id == InvoiceModel.id) &
- (EFTGLTransferModel.transfer_type == EFTGlTransferType.TRANSFER.value))).all()
+ (EFTGLTransferModel.transfer_type == EFTGlTransferType.TRANSFER.value)))
+ return db.session.scalars(query).all()
@staticmethod
def create_eft_gl_transfer(eft_holding_gl: str, line_distribution_gl: str, transfer_type: str,
line_item: PaymentLineItemModel, payment_account: PaymentAccountModel):
"""Create EFT GL Transfer record."""
short_name_id = db.session.query(EFTShortnameModel.id) \
- .filter(EFTShortnameModel.auth_account_id == payment_account.auth_account_id).one()
+ .filter(EFTShortnameModel.auth_account_id == payment_account.auth_account_id).one()[0]
source_gl = eft_holding_gl if transfer_type == EFTGlTransferType.TRANSFER.value else line_distribution_gl
target_gl = line_distribution_gl if transfer_type == EFTGlTransferType.TRANSFER.value else eft_holding_gl
now = datetime.now()
@@ -112,8 +113,8 @@ def create_eft_gl_transfer(eft_holding_gl: str, line_distribution_gl: str, trans
)
@classmethod
- def _process_eft_transfer_invoices(cls, invoices: [InvoiceModel], transfer_type: str,
- eft_gl_transfers: dict = None) -> [EFTGLTransferModel]:
+ def _process_eft_transfer_invoices(cls, invoices: List[InvoiceModel], transfer_type: str,
+ eft_gl_transfers: dict = None) -> List[EFTGLTransferModel]:
"""Create EFT GL Transfer for invoice line items."""
eft_holding_gl = current_app.config.get('EFT_HOLDING_GL')
eft_gl_transfers = eft_gl_transfers or {}
@@ -165,7 +166,7 @@ def _process_eft_transfer_invoices(cls, invoices: [InvoiceModel], transfer_type:
return eft_gl_transfers
@staticmethod
- def process_invoice_ejv_links(invoices: [InvoiceModel], ejv_header_model_id: int):
+ def process_invoice_ejv_links(invoices: List[InvoiceModel], ejv_header_model_id: int):
"""Create EJV Invoice Links."""
current_app.logger.info('Creating ejv invoice link records and setting invoice status.')
sequence = 1
@@ -223,7 +224,7 @@ def _create_ejv_file_for_eft_transfer(cls): # pylint:disable=too-many-locals, t
total: float = 0
current_app.logger.info(f'Processing EFT Transfers for account_id: {account_id}.')
- account_transfers: List[EFTGLTransferModel] = transfers[account_id[0]]
+ account_transfers: List[EFTGLTransferModel] = transfers[account_id]
for eft_transfer in account_transfers:
invoice_number = f'#{eft_transfer.invoice_id}'
diff --git a/jobs/payment-jobs/tasks/ejv_partner_distribution_task.py b/jobs/payment-jobs/tasks/ejv_partner_distribution_task.py
index 0fde9b178..cdb0421b5 100644
--- a/jobs/payment-jobs/tasks/ejv_partner_distribution_task.py
+++ b/jobs/payment-jobs/tasks/ejv_partner_distribution_task.py
@@ -251,22 +251,24 @@ def _get_partners_by_batch_type(cls, batch_type) -> List[CorpTypeModel]:
if batch_type == 'GA':
# Rule for GA. Credit is 112 and debit is 112.
- partner_distribution_code_ids: List[int] = query.filter(
+ partner_distribution_code_ids: List[int] = db.session.scalars(query.filter(
DistributionCodeModel.client == bc_reg_client_code
- ).all()
+ )).all()
else:
# Rule for GI. Debit is 112 and credit is not 112.
- partner_distribution_code_ids: List[int] = query.filter(
+ partner_distribution_code_ids: List[int] = db.session.scalars(query.filter(
DistributionCodeModel.client != bc_reg_client_code
- ).all()
+ )).all()
# Find all distribution codes who have these partner distribution codes as disbursement.
- fee_distribution_codes: List[int] = db.session.query(DistributionCodeModel.distribution_code_id).filter(
- DistributionCodeModel.disbursement_distribution_code_id.in_(partner_distribution_code_ids)).all()
+ fee_query = db.session.query(DistributionCodeModel.distribution_code_id).filter(
+ DistributionCodeModel.disbursement_distribution_code_id.in_(partner_distribution_code_ids))
+ fee_distribution_codes: List[int] = db.session.scalars(fee_query).all()
- corp_type_codes: List[str] = db.session.query(FeeScheduleModel.corp_type_code). \
+ corp_type_query = db.session.query(FeeScheduleModel.corp_type_code). \
join(DistributionCodeLinkModel,
- DistributionCodeLinkModel.fee_schedule_id == FeeScheduleModel.fee_schedule_id). \
- filter(DistributionCodeLinkModel.distribution_code_id.in_(fee_distribution_codes)).all()
+ DistributionCodeLinkModel.fee_schedule_id == FeeScheduleModel.fee_schedule_id).\
+ filter(DistributionCodeLinkModel.distribution_code_id.in_(fee_distribution_codes))
+ corp_type_codes: List[str] = db.session.scalars(corp_type_query).all()
return db.session.query(CorpTypeModel).filter(CorpTypeModel.code.in_(corp_type_codes)).all()
diff --git a/jobs/payment-jobs/tasks/ejv_payment_task.py b/jobs/payment-jobs/tasks/ejv_payment_task.py
index 55c15f716..d53d2143b 100644
--- a/jobs/payment-jobs/tasks/ejv_payment_task.py
+++ b/jobs/payment-jobs/tasks/ejv_payment_task.py
@@ -226,18 +226,18 @@ def _get_account_ids_for_payment(cls, batch_type) -> List[int]:
"""Return account IDs for payment."""
# CREDIT : Distribution code against fee schedule
# DEBIT : Distribution code against account.
- bc_reg_client_code = current_app.config.get('CGI_BCREG_CLIENT_CODE') # 112 #TODO
+ bc_reg_client_code = current_app.config.get('CGI_BCREG_CLIENT_CODE')
query = db.session.query(DistributionCodeModel.account_id) \
.filter(DistributionCodeModel.stop_ejv.is_(False) | DistributionCodeModel.stop_ejv.is_(None)) \
.filter(DistributionCodeModel.account_id.isnot(None))
if batch_type == 'GA':
# Rule for GA. Credit is 112 and debit is 112. For BCREG client code is 112
- account_ids: List[int] = query.filter(DistributionCodeModel.client == bc_reg_client_code).all()
+ account_ids: List[int] = query.filter(DistributionCodeModel.client == bc_reg_client_code)
else:
# Rule for GI. Credit is 112 and debit is not 112. For BCREG client code is 112
- account_ids: List[int] = query.filter(DistributionCodeModel.client != bc_reg_client_code).all()
- return account_ids
+ account_ids: List[int] = query.filter(DistributionCodeModel.client != bc_reg_client_code)
+ return db.session.scalars(account_ids).all()
@classmethod
def _get_invoices_for_payment(cls, account_id: int) -> List[InvoiceModel]:
diff --git a/jobs/payment-jobs/tests/docker/docker-compose.yml b/jobs/payment-jobs/tests/docker/docker-compose.yml
index 88aa6a242..dbffdb7ae 100644
--- a/jobs/payment-jobs/tests/docker/docker-compose.yml
+++ b/jobs/payment-jobs/tests/docker/docker-compose.yml
@@ -22,19 +22,6 @@ services:
retries: 10
volumes:
- ./setup:/tmp/keycloak/test/
- nats:
- image: nats-streaming
- restart: always
- expose:
- - 4222
- - 8222
- labels:
- - entity.services=nats
- ports:
- - 4222:4222
- - 8222:8222
- tty: true
-
proxy:
image: nginx:alpine
volumes:
diff --git a/jobs/payment-jobs/tests/docker/nginx.conf b/jobs/payment-jobs/tests/docker/nginx.conf
index d88123d4f..ad848249b 100644
--- a/jobs/payment-jobs/tests/docker/nginx.conf
+++ b/jobs/payment-jobs/tests/docker/nginx.conf
@@ -24,7 +24,7 @@ http {
set $last_path_component example1;
rewrite ^/reports-api/api/v1/(.*) /$1 break;
proxy_set_header Prefer example=$last_path_component;
- proxy_set_header Accept "application/json"; #TODO
+ proxy_set_header Accept "application/json";
proxy_pass http://reports:4010/;
}
diff --git a/jobs/payment-jobs/tests/jobs/conftest.py b/jobs/payment-jobs/tests/jobs/conftest.py
index c496ae842..cb217630a 100644
--- a/jobs/payment-jobs/tests/jobs/conftest.py
+++ b/jobs/payment-jobs/tests/jobs/conftest.py
@@ -22,7 +22,7 @@
from flask_migrate import Migrate, upgrade
from pay_api.models import db as _db
from sqlalchemy import event, text
-from sqlalchemy.schema import DropConstraint, MetaData
+from sqlalchemy_utils import create_database, database_exists, drop_database
from invoke_jobs import create_app
from utils.logger import setup_logging
@@ -31,17 +31,13 @@
@pytest.fixture(scope='session')
def app():
"""Return a session-wide application configured in TEST mode."""
- _app = create_app('testing')
-
- return _app
+ return create_app('testing')
@pytest.fixture(scope='function')
def app_request():
"""Return a session-wide application configured in TEST mode."""
- _app = create_app('testing')
-
- return _app
+ return create_app('testing')
@pytest.fixture(scope='session')
@@ -57,107 +53,60 @@ def client_ctx(app):
yield _client
-@pytest.fixture(scope='session')
+@pytest.fixture(scope='session', autouse=True)
def db(app): # pylint: disable=redefined-outer-name, invalid-name
- """Return a session-wide initialised database.
-
- Drops all existing tables - Meta follows Postgres FKs
- """
+ """Return a session-wide initialised database."""
with app.app_context():
- # Clear out views
- view_sql = """SELECT table_name FROM information_schema.views
- WHERE table_schema='public'
- """
-
- sess = _db.session()
- for seq in [name for (name,) in sess.execute(text(view_sql))]:
- try:
- sess.execute(text('DROP VIEW public.%s ;' % seq))
- print('DROP VIEW public.%s ' % seq)
- except Exception as err: # NOQA pylint: disable=broad-except
- print(f'Error: {err}')
- sess.commit()
-
- # Clear out any existing tables
- metadata = MetaData(_db.engine)
- metadata.reflect()
- for table in metadata.tables.values():
- for fk in table.foreign_keys: # pylint: disable=invalid-name
- _db.engine.execute(DropConstraint(fk.constraint))
- metadata.drop_all()
- _db.drop_all()
-
- sequence_sql = """SELECT sequence_name FROM information_schema.sequences
- WHERE sequence_schema='public'
- """
-
- sess = _db.session()
- for seq in [name for (name,) in sess.execute(text(sequence_sql))]:
- try:
- sess.execute(text('DROP SEQUENCE public.%s ;' % seq))
- print('DROP SEQUENCE public.%s ' % seq)
- except Exception as err: # NOQA # pylint: disable=broad-except
- print(f'Error: {err}')
- sess.commit()
-
- # ############################################
- # There are 2 approaches, an empty database, or the same one that the app will use
- # create the tables
- # _db.create_all()
- # or
- # Use Alembic to load all of the DB revisions including supporting lookup data
- # This is the path we'll use in legal_api!!
-
# even though this isn't referenced directly, it sets up the internal configs that upgrade needs
migrations_path = [folder for folder in sys.path if 'pay-api/pay-api' in folder]
if len(migrations_path) > 0:
migrations_path = migrations_path[0].replace('/pay-api/src', '/pay-api/migrations')
- # Fix for windows.
- else:
- migrations_path = os.path.abspath('../../pay-api/migrations')
+ if database_exists(_db.engine.url):
+ drop_database(_db.engine.url)
+ create_database(_db.engine.url)
+ _db.session().execute(text('SET TIME ZONE "UTC";'))
Migrate(app, _db, directory=migrations_path)
upgrade()
-
# Restore the logging, alembic and sqlalchemy have their own logging from alembic.ini.
setup_logging(os.path.abspath('logging.conf'))
return _db
-@pytest.fixture(scope='function')
-def session(app, db): # pylint: disable=redefined-outer-name, invalid-name
+@pytest.fixture(scope='function', autouse=True)
+def session(db, app): # pylint: disable=redefined-outer-name, invalid-name
"""Return a function-scoped session."""
with app.app_context():
- conn = db.engine.connect()
- txn = conn.begin()
-
- options = dict(bind=conn, binds={})
- sess = db.create_scoped_session(options=options)
-
- # establish a SAVEPOINT just before beginning the test
- # (http://docs.sqlalchemy.org/en/latest/orm/session_transaction.html#using-savepoint)
- sess.begin_nested()
-
- @event.listens_for(sess(), 'after_transaction_end')
- def restart_savepoint(sess2, trans): # pylint: disable=unused-variable
- # Detecting whether this is indeed the nested transaction of the test
- if trans.nested and not trans._parent.nested: # pylint: disable=protected-access
- # Handle where test DOESN'T session.commit(),
- sess2.expire_all()
- sess.begin_nested()
+ with db.engine.connect() as conn:
+ transaction = conn.begin()
+ sess = db._make_scoped_session(dict(bind=conn)) # pylint: disable=protected-access
+ # Establish SAVEPOINT (http://docs.sqlalchemy.org/en/latest/orm/session_transaction.html#using-savepoint)
+ nested = sess.begin_nested()
+ db.session = sess
+ db.session.commit = nested.commit
+ db.session.rollback = nested.rollback
+
+ @event.listens_for(sess, 'after_transaction_end')
+ def restart_savepoint(sess2, trans): # pylint: disable=unused-variable
+ nonlocal nested
+ if trans.nested:
+ # Handle where test DOESN'T session.commit()
+ sess2.expire_all()
+ nested = sess.begin_nested()
+ # When using a SAVEPOINT via the Session.begin_nested() or Connection.begin_nested() methods,
+ # the transaction object returned must be used to commit or rollback the SAVEPOINT.
+ # Calling the Session.commit() or Connection.commit() methods will always commit the
+ # outermost transaction; this is a SQLAlchemy 2.0 specific behavior that is
+ # reversed from the 1.x series
+ db.session = sess
+ db.session.commit = nested.commit
+ db.session.rollback = nested.rollback
- db.session = sess
-
- sql = text('select 1')
- sess.execute(sql)
-
- yield sess
-
- # Cleanup
- sess.remove()
- # This instruction rollsback any commit that were executed in the tests.
- txn.rollback()
- conn.close()
+ try:
+ yield db.session
+ finally:
+ db.session.remove()
+ transaction.rollback()
@pytest.fixture(scope='session', autouse=True)
@@ -171,7 +120,6 @@ def auto(docker_services, app):
docker_services.start('paybc')
docker_services.start('reports')
docker_services.start('proxy')
- docker_services.start('nats')
docker_services.start('sftp')
time.sleep(2)
@@ -179,7 +127,6 @@ def auto(docker_services, app):
@pytest.fixture(scope='session')
def docker_compose_files(pytestconfig):
"""Get the docker-compose.yml absolute path."""
- import os
return [
os.path.join(str(pytestconfig.rootdir), 'tests/docker', 'docker-compose.yml')
]
diff --git a/jobs/payment-jobs/tests/jobs/test_cfs_create_account_task.py b/jobs/payment-jobs/tests/jobs/test_cfs_create_account_task.py
index af72cf3ba..6c551f1ec 100644
--- a/jobs/payment-jobs/tests/jobs/test_cfs_create_account_task.py
+++ b/jobs/payment-jobs/tests/jobs/test_cfs_create_account_task.py
@@ -28,7 +28,7 @@
from tasks.cfs_create_account_task import CreateAccountTask
from utils import mailer
-from .factory import factory_create_online_banking_account, factory_create_pad_account, factory_create_eft_account
+from .factory import factory_create_eft_account, factory_create_online_banking_account, factory_create_pad_account
def test_create_account_setup(session):
@@ -50,7 +50,7 @@ def test_create_pad_account(session):
assert cfs_account.cfs_site
assert cfs_account.cfs_account
assert cfs_account.payment_instrument_number
-
+
def test_create_eft_account(session):
"""Test create account."""
diff --git a/jobs/payment-jobs/tests/jobs/test_eft_transfer_task.py b/jobs/payment-jobs/tests/jobs/test_eft_transfer_task.py
index 09a3d6254..0cd14d431 100644
--- a/jobs/payment-jobs/tests/jobs/test_eft_transfer_task.py
+++ b/jobs/payment-jobs/tests/jobs/test_eft_transfer_task.py
@@ -19,12 +19,9 @@
from datetime import datetime
from typing import List
-import pytest
-from flask import Flask
from pay_api.models import DistributionCode, EFTGLTransfer, EjvFile, EjvHeader, EjvInvoiceLink, FeeSchedule, Invoice, db
from pay_api.utils.enums import DisbursementStatus, EFTGlTransferType, EjvFileType, InvoiceStatus, PaymentMethod
-import config
from tasks.eft_transfer_task import EftTransferTask
from .factory import (
@@ -32,20 +29,7 @@
factory_payment_line_item)
-app = None
-
-
-@pytest.fixture
-def setup():
- """Initialize app with test env for testing."""
- global app
- app = Flask(__name__)
- app.env = 'testing'
- app.config.from_object(config.CONFIGURATION['testing'])
- app.config['EFT_HOLDING_GL'] = '1128888888888888888000000000000000'
-
-
-def test_eft_transfer(setup, session, monkeypatch):
+def test_eft_transfer(app, session, monkeypatch):
"""Test EFT Holdings GL Transfer for EFT invoices.
Steps:
@@ -77,6 +61,7 @@ def test_eft_transfer(setup, session, monkeypatch):
dist_code.service_fee_distribution_code_id = service_fee_dist_code.distribution_code_id
dist_code.save()
+ app.config['EFT_HOLDING_GL'] = '1128888888888888888000000000000000'
eft_holding_gl = app.config['EFT_HOLDING_GL']
distribution_gl = EftTransferTask.get_distribution_string(dist_code).strip()
service_fee_gl = EftTransferTask.get_distribution_string(service_fee_dist_code).strip()
@@ -100,8 +85,7 @@ def test_eft_transfer(setup, session, monkeypatch):
fee_dist_id=dist_code.distribution_code_id)
invoices.append(inv)
- with app.app_context():
- EftTransferTask.create_ejv_file()
+ EftTransferTask.create_ejv_file()
# Lookup invoice and assert disbursement status
for invoice in invoices:
diff --git a/jobs/payment-jobs/tests/jobs/test_generate_statements.py b/jobs/payment-jobs/tests/jobs/test_generate_statements.py
index df9faeaa5..5f31a016b 100644
--- a/jobs/payment-jobs/tests/jobs/test_generate_statements.py
+++ b/jobs/payment-jobs/tests/jobs/test_generate_statements.py
@@ -16,7 +16,7 @@
Test-Suite to ensure that the UpdateStalePayment is working as expected.
"""
-from datetime import datetime, timedelta, timezone
+from datetime import datetime, timedelta
import pytz
from freezegun import freeze_time
@@ -133,7 +133,6 @@ def test_bcol_weekly_to_eft_statement(session):
total=50)
assert weekly_invoice is not None
- assert weekly_invoice.created_on == invoice_create_date.astimezone(timezone.utc).replace(tzinfo=None)
statement_from_date = localize_date(datetime(2023, 10, 8, 12, 0))
statement_to_date = localize_date(datetime(2023, 10, 12, 12, 0))
@@ -179,7 +178,6 @@ def test_bcol_weekly_to_eft_statement(session):
total=50)
assert monthly_invoice is not None
- assert monthly_invoice.created_on == invoice_create_date.astimezone(timezone.utc).replace(tzinfo=None)
# Regenerate monthly statement using date override - it will clean up the previous empty monthly statement first
StatementTask.generate_statements((generate_date - timedelta(days=1)).strftime('%Y-%m-%d'))
@@ -212,7 +210,6 @@ def test_bcol_monthly_to_eft_statement(session):
total=50)
assert bcol_invoice is not None
- assert bcol_invoice.created_on == invoice_create_date.astimezone(timezone.utc).replace(tzinfo=None)
statement_from_date = localize_date(datetime(2023, 10, 1, 12, 0))
statement_to_date = localize_date(datetime(2023, 10, 30, 12, 0))
@@ -259,7 +256,6 @@ def test_bcol_monthly_to_eft_statement(session):
total=50)
assert monthly_invoice is not None
- assert monthly_invoice.created_on == invoice_create_date.astimezone(timezone.utc).replace(tzinfo=None)
# Regenerate monthly statement using date override - it will clean up the previous empty monthly statement first
StatementTask.generate_statements((generate_date - timedelta(days=1)).strftime('%Y-%m-%d'))
diff --git a/jobs/payment-jobs/tests/jobs/test_statement_due_task.py b/jobs/payment-jobs/tests/jobs/test_statement_due_task.py
index 48dcc1ffe..9b3f596d8 100644
--- a/jobs/payment-jobs/tests/jobs/test_statement_due_task.py
+++ b/jobs/payment-jobs/tests/jobs/test_statement_due_task.py
@@ -113,39 +113,37 @@ def test_send_unpaid_statement_notification(setup, session):
summary = Statement.get_summary(account.auth_account_id, statements[0][0].id)
total_amount_owing = summary['total_due']
- with app.app_context():
- # Assert notification was published to the mailer queue
- with patch('tasks.statement_due_task.publish_payment_notification') as mock_mailer:
- # Freeze time to due date - trigger due notification
- with freeze_time(last_day):
- StatementDueTask.process_unpaid_statements()
- mock_mailer.assert_called_with(StatementNotificationInfo(auth_account_id=account.auth_account_id,
- statement=statements[0][0],
- is_due=True,
- due_date=last_day.date(),
- emails=statement_recipient.email,
- total_amount_owing=total_amount_owing))
-
- # Freeze time to due date - trigger reminder notification
- with freeze_time(last_day - timedelta(days=7)):
- StatementDueTask.process_unpaid_statements()
- mock_mailer.assert_called_with(StatementNotificationInfo(auth_account_id=account.auth_account_id,
- statement=statements[0][0],
- is_due=False,
- due_date=last_day.date(),
- emails=statement_recipient.email,
- total_amount_owing=total_amount_owing))
+ # Assert notification was published to the mailer queue
+ with patch('tasks.statement_due_task.publish_payment_notification') as mock_mailer:
+ # Freeze time to due date - trigger due notification
+ with freeze_time(last_day):
+ StatementDueTask.process_unpaid_statements()
+ mock_mailer.assert_called_with(StatementNotificationInfo(auth_account_id=account.auth_account_id,
+ statement=statements[0][0],
+ is_due=True,
+ due_date=last_day.date(),
+ emails=statement_recipient.email,
+ total_amount_owing=total_amount_owing))
+
+ # Freeze time to due date - trigger reminder notification
+ with freeze_time(last_day - timedelta(days=7)):
+ StatementDueTask.process_unpaid_statements()
+ mock_mailer.assert_called_with(StatementNotificationInfo(auth_account_id=account.auth_account_id,
+ statement=statements[0][0],
+ is_due=False,
+ due_date=last_day.date(),
+ emails=statement_recipient.email,
+ total_amount_owing=total_amount_owing))
def test_unpaid_statement_notification_not_sent(setup, session):
"""Assert payment reminder event is not being sent."""
- with app.app_context():
- # Assert notification was published to the mailer queue
- with patch('tasks.statement_due_task.publish_payment_notification') as mock_mailer:
- # Freeze time to 10th of the month - should not trigger any notification
- with freeze_time(current_local_time().replace(day=10)):
- StatementDueTask.process_unpaid_statements()
- mock_mailer.assert_not_called()
+ # Assert notification was published to the mailer queue
+ with patch('tasks.statement_due_task.publish_payment_notification') as mock_mailer:
+ # Freeze time to 10th of the month - should not trigger any notification
+ with freeze_time(current_local_time().replace(day=10)):
+ StatementDueTask.process_unpaid_statements()
+ mock_mailer.assert_not_called()
def test_overdue_invoices_updated(setup, session):
@@ -172,9 +170,8 @@ def test_overdue_invoices_updated(setup, session):
assert invoice2.invoice_status_code == InvoiceStatus.CREATED.value
assert account.payment_method == PaymentMethod.EFT.value
- with app.app_context():
- # Freeze time to 1st of the month - should trigger overdue status update for previous month invoices
- with freeze_time(current_local_time().replace(day=1)):
- StatementDueTask.process_unpaid_statements()
- assert invoice.invoice_status_code == InvoiceStatus.OVERDUE.value
- assert invoice2.invoice_status_code == InvoiceStatus.CREATED.value
+ # Freeze time to 1st of the month - should trigger overdue status update for previous month invoices
+ with freeze_time(current_local_time().replace(day=1)):
+ StatementDueTask.process_unpaid_statements()
+ assert invoice.invoice_status_code == InvoiceStatus.OVERDUE.value
+ assert invoice2.invoice_status_code == InvoiceStatus.CREATED.value
diff --git a/jobs/payment-jobs/tests/jobs/test_statement_notification_task.py b/jobs/payment-jobs/tests/jobs/test_statement_notification_task.py
index 4281dafac..750444122 100644
--- a/jobs/payment-jobs/tests/jobs/test_statement_notification_task.py
+++ b/jobs/payment-jobs/tests/jobs/test_statement_notification_task.py
@@ -91,46 +91,45 @@ def test_send_notifications(session):
])
def test_send_monthly_notifications(setup, session, payment_method_code): # pylint: disable=unused-argument
"""Test send monthly statement notifications."""
- with app.app_context():
- # create statement, invoice, payment data for previous month
- last_month, last_year = get_previous_month_and_year()
- previous_month_year = datetime(last_year, last_month, 5)
-
- account, invoice, inv_ref, payment, \
- statement_recipient, statement_settings = create_test_data(payment_method_code,
- previous_month_year,
- StatementFrequency.MONTHLY.value)
-
- assert invoice.payment_method_code == payment_method_code
- assert account.payment_method == payment_method_code
-
- # Generate statement for previous month - freeze time to the 1st of the current month
- with freeze_time(datetime.now().replace(day=1)):
- StatementTask.generate_statements()
-
- # Assert statements and invoice was created
- statements = Statement.find_all_statements_for_account(auth_account_id=account.auth_account_id, page=1,
- limit=100)
- assert statements is not None
- assert len(statements) == 2 # items results and page total
- assert len(statements[0]) == 1 # items
- invoices = StatementInvoices.find_all_invoices_for_statement(statements[0][0].id)
- assert invoices is not None
- assert invoices[0].invoice_id == invoice.id
-
- # Assert notification send_email was invoked
- with patch.object(StatementNotificationTask, 'send_email', return_value=True) as mock_mailer:
- with patch('tasks.statement_notification_task.get_token') as mock_get_token:
- mock_get_token.return_value = 'mock_token'
- StatementNotificationTask.send_notifications()
- mock_get_token.assert_called_once()
- # Assert token and email recipient - mock any for HTML generated
- mock_mailer.assert_called_with(mock_get_token.return_value, statement_recipient.email, ANY)
-
- # Assert statement notification code indicates success
- statement: Statement = Statement.find_by_id(statements[0][0].id)
- assert statement is not None
- assert statement.notification_status_code == NotificationStatus.SUCCESS.value
+ # create statement, invoice, payment data for previous month
+ last_month, last_year = get_previous_month_and_year()
+ previous_month_year = datetime(last_year, last_month, 5)
+
+ account, invoice, inv_ref, payment, \
+ statement_recipient, statement_settings = create_test_data(payment_method_code,
+ previous_month_year,
+ StatementFrequency.MONTHLY.value)
+
+ assert invoice.payment_method_code == payment_method_code
+ assert account.payment_method == payment_method_code
+
+ # Generate statement for previous month - freeze time to the 1st of the current month
+ with freeze_time(datetime.now().replace(day=1)):
+ StatementTask.generate_statements()
+
+ # Assert statements and invoice was created
+ statements = Statement.find_all_statements_for_account(auth_account_id=account.auth_account_id, page=1,
+ limit=100)
+ assert statements is not None
+ assert len(statements) == 2 # items results and page total
+ assert len(statements[0]) == 1 # items
+ invoices = StatementInvoices.find_all_invoices_for_statement(statements[0][0].id)
+ assert invoices is not None
+ assert invoices[0].invoice_id == invoice.id
+
+ # Assert notification send_email was invoked
+ with patch.object(StatementNotificationTask, 'send_email', return_value=True) as mock_mailer:
+ with patch('tasks.statement_notification_task.get_token') as mock_get_token:
+ mock_get_token.return_value = 'mock_token'
+ StatementNotificationTask.send_notifications()
+ mock_get_token.assert_called_once()
+ # Assert token and email recipient - mock any for HTML generated
+ mock_mailer.assert_called_with(mock_get_token.return_value, statement_recipient.email, ANY)
+
+ # Assert statement notification code indicates success
+ statement: Statement = Statement.find_by_id(statements[0][0].id)
+ assert statement is not None
+ assert statement.notification_status_code == NotificationStatus.SUCCESS.value
@pytest.mark.parametrize('payment_method_code', [
@@ -144,174 +143,170 @@ def test_send_monthly_notifications(setup, session, payment_method_code): # pyl
])
def test_send_monthly_notifications_failed(setup, session, payment_method_code): # pylint: disable=unused-argument
"""Test send monthly statement notifications failure."""
- with app.app_context():
- # create statement, invoice, payment data for previous month
- last_month, last_year = get_previous_month_and_year()
- previous_month_year = datetime(last_year, last_month, 5)
-
- account, invoice, inv_ref, payment, \
- statement_recipient, statement_settings = create_test_data(payment_method_code,
- previous_month_year,
- StatementFrequency.MONTHLY.value)
-
- assert invoice.payment_method_code == payment_method_code
- assert account.payment_method == payment_method_code
-
- # Generate statement for previous month - freeze time to the 1st of the current month
- with freeze_time(datetime.now().replace(day=1)):
- StatementTask.generate_statements()
-
- # Assert statements and invoice was created
- statements = Statement.find_all_statements_for_account(auth_account_id=account.auth_account_id, page=1,
- limit=100)
- assert statements is not None
- assert len(statements) == 2 # items results and page total
- assert len(statements[0]) == 1 # items
- invoices = StatementInvoices.find_all_invoices_for_statement(statements[0][0].id)
- assert invoices is not None
- assert invoices[0].invoice_id == invoice.id
-
- # Assert notification send_email was invoked
- with patch.object(StatementNotificationTask, 'send_email', return_value=False) as mock_mailer:
- with patch('tasks.statement_notification_task.get_token') as mock_get_token:
- mock_get_token.return_value = 'mock_token'
- StatementNotificationTask.send_notifications()
- mock_get_token.assert_called_once()
- # Assert token and email recipient - mock any for HTML generated
- mock_mailer.assert_called_with(mock_get_token.return_value, statement_recipient.email, ANY)
-
- # Assert statement notification code indicates failed
- statement: Statement = Statement.find_by_id(statements[0][0].id)
- assert statement is not None
- assert statement.notification_status_code == NotificationStatus.FAILED.value
+ # create statement, invoice, payment data for previous month
+ last_month, last_year = get_previous_month_and_year()
+ previous_month_year = datetime(last_year, last_month, 5)
+
+ account, invoice, inv_ref, payment, \
+ statement_recipient, statement_settings = create_test_data(payment_method_code,
+ previous_month_year,
+ StatementFrequency.MONTHLY.value)
+
+ assert invoice.payment_method_code == payment_method_code
+ assert account.payment_method == payment_method_code
+
+ # Generate statement for previous month - freeze time to the 1st of the current month
+ with freeze_time(datetime.now().replace(day=1)):
+ StatementTask.generate_statements()
+
+ # Assert statements and invoice was created
+ statements = Statement.find_all_statements_for_account(auth_account_id=account.auth_account_id, page=1,
+ limit=100)
+ assert statements is not None
+ assert len(statements) == 2 # items results and page total
+ assert len(statements[0]) == 1 # items
+ invoices = StatementInvoices.find_all_invoices_for_statement(statements[0][0].id)
+ assert invoices is not None
+ assert invoices[0].invoice_id == invoice.id
+
+ # Assert notification send_email was invoked
+ with patch.object(StatementNotificationTask, 'send_email', return_value=False) as mock_mailer:
+ with patch('tasks.statement_notification_task.get_token') as mock_get_token:
+ mock_get_token.return_value = 'mock_token'
+ StatementNotificationTask.send_notifications()
+ mock_get_token.assert_called_once()
+ # Assert token and email recipient - mock any for HTML generated
+ mock_mailer.assert_called_with(mock_get_token.return_value, statement_recipient.email, ANY)
+
+ # Assert statement notification code indicates failed
+ statement: Statement = Statement.find_by_id(statements[0][0].id)
+ assert statement is not None
+ assert statement.notification_status_code == NotificationStatus.FAILED.value
def test_send_eft_notifications(setup, session): # pylint: disable=unused-argument
"""Test send monthly EFT statement notifications."""
- with app.app_context():
- # create statement, invoice, payment data for previous month
- last_month, last_year = get_previous_month_and_year()
- previous_month_year = datetime(last_year, last_month, 5)
- account, invoice, inv_ref, payment, \
- statement_recipient, statement_settings = create_test_data(PaymentMethod.EFT.value,
- previous_month_year,
- StatementFrequency.MONTHLY.value,
- 351.50)
-
- assert invoice.payment_method_code == PaymentMethod.EFT.value
- assert account.payment_method == PaymentMethod.EFT.value
-
- # Generate statement for previous month - freeze time to the 1st of the current month
- with freeze_time(datetime.now().replace(day=1)):
- StatementTask.generate_statements()
-
- # Assert statements and invoice was created
- statements = Statement.find_all_statements_for_account(auth_account_id=account.auth_account_id, page=1,
- limit=100)
- assert statements is not None
- assert len(statements) == 2 # items results and page total
- assert len(statements[0]) == 1 # items
- invoices = StatementInvoices.find_all_invoices_for_statement(statements[0][0].id)
- assert invoices is not None
- assert invoices[0].invoice_id == invoice.id
-
- # Assert notification was published to the mailer queue
- with patch('tasks.statement_notification_task.publish_statement_notification') as mock_mailer:
- with patch('tasks.statement_notification_task.get_token') as mock_get_token:
- mock_get_token.return_value = 'mock_token'
- StatementNotificationTask.send_notifications()
- mock_get_token.assert_called_once()
- mock_mailer.assert_called_once_with(account, statements[0][0], 351.5, statement_recipient.email)
-
- # Assert statement notification code indicates success
- statement: Statement = Statement.find_by_id(statements[0][0].id)
- assert statement is not None
- assert statement.notification_status_code == NotificationStatus.SUCCESS.value
+ # create statement, invoice, payment data for previous month
+ last_month, last_year = get_previous_month_and_year()
+ previous_month_year = datetime(last_year, last_month, 5)
+ account, invoice, inv_ref, payment, \
+ statement_recipient, statement_settings = create_test_data(PaymentMethod.EFT.value,
+ previous_month_year,
+ StatementFrequency.MONTHLY.value,
+ 351.50)
+
+ assert invoice.payment_method_code == PaymentMethod.EFT.value
+ assert account.payment_method == PaymentMethod.EFT.value
+
+ # Generate statement for previous month - freeze time to the 1st of the current month
+ with freeze_time(datetime.now().replace(day=1)):
+ StatementTask.generate_statements()
+
+ # Assert statements and invoice was created
+ statements = Statement.find_all_statements_for_account(auth_account_id=account.auth_account_id, page=1,
+ limit=100)
+ assert statements is not None
+ assert len(statements) == 2 # items results and page total
+ assert len(statements[0]) == 1 # items
+ invoices = StatementInvoices.find_all_invoices_for_statement(statements[0][0].id)
+ assert invoices is not None
+ assert invoices[0].invoice_id == invoice.id
+
+ # Assert notification was published to the mailer queue
+ with patch('tasks.statement_notification_task.publish_statement_notification') as mock_mailer:
+ with patch('tasks.statement_notification_task.get_token') as mock_get_token:
+ mock_get_token.return_value = 'mock_token'
+ StatementNotificationTask.send_notifications()
+ mock_get_token.assert_called_once()
+ mock_mailer.assert_called_once_with(account, statements[0][0], 351.5, statement_recipient.email)
+
+ # Assert statement notification code indicates success
+ statement: Statement = Statement.find_by_id(statements[0][0].id)
+ assert statement is not None
+ assert statement.notification_status_code == NotificationStatus.SUCCESS.value
def test_send_eft_notifications_failure(setup, session): # pylint: disable=unused-argument
"""Test send monthly EFT statement notifications failure."""
- with app.app_context():
- # create statement, invoice, payment data for previous month
- last_month, last_year = get_previous_month_and_year()
- previous_month_year = datetime(last_year, last_month, 5)
- account, invoice, inv_ref, payment, \
- statement_recipient, statement_settings = create_test_data(PaymentMethod.EFT.value,
- previous_month_year,
- StatementFrequency.MONTHLY.value,
- 351.50)
-
- assert invoice.payment_method_code == PaymentMethod.EFT.value
- assert account.payment_method == PaymentMethod.EFT.value
-
- # Generate statement for previous month - freeze time to the 1st of the current month
- with freeze_time(datetime.now().replace(day=1)):
- StatementTask.generate_statements()
-
- # Assert statements and invoice was created
- statements = Statement.find_all_statements_for_account(auth_account_id=account.auth_account_id, page=1,
- limit=100)
- assert statements is not None
- assert len(statements) == 2 # items results and page total
- assert len(statements[0]) == 1 # items
- invoices = StatementInvoices.find_all_invoices_for_statement(statements[0][0].id)
- assert invoices is not None
- assert invoices[0].invoice_id == invoice.id
-
- # Assert notification was published to the mailer queue
- with patch('tasks.statement_notification_task.publish_statement_notification') as mock_mailer:
- mock_mailer.side_effect = Exception('Mock Exception')
- with patch('tasks.statement_notification_task.get_token') as mock_get_token:
- mock_get_token.return_value = 'mock_token'
- StatementNotificationTask.send_notifications()
- mock_get_token.assert_called_once()
- mock_mailer.assert_called_once_with(account, statements[0][0], 351.5, statement_recipient.email)
-
- # Assert statement notification code indicates failed
- statement: Statement = Statement.find_by_id(statements[0][0].id)
- assert statement is not None
- assert statement.notification_status_code == NotificationStatus.FAILED.value
+ # create statement, invoice, payment data for previous month
+ last_month, last_year = get_previous_month_and_year()
+ previous_month_year = datetime(last_year, last_month, 5)
+ account, invoice, inv_ref, payment, \
+ statement_recipient, statement_settings = create_test_data(PaymentMethod.EFT.value,
+ previous_month_year,
+ StatementFrequency.MONTHLY.value,
+ 351.50)
+
+ assert invoice.payment_method_code == PaymentMethod.EFT.value
+ assert account.payment_method == PaymentMethod.EFT.value
+
+ # Generate statement for previous month - freeze time to the 1st of the current month
+ with freeze_time(datetime.now().replace(day=1)):
+ StatementTask.generate_statements()
+
+ # Assert statements and invoice was created
+ statements = Statement.find_all_statements_for_account(auth_account_id=account.auth_account_id, page=1,
+ limit=100)
+ assert statements is not None
+ assert len(statements) == 2 # items results and page total
+ assert len(statements[0]) == 1 # items
+ invoices = StatementInvoices.find_all_invoices_for_statement(statements[0][0].id)
+ assert invoices is not None
+ assert invoices[0].invoice_id == invoice.id
+
+ # Assert notification was published to the mailer queue
+ with patch('tasks.statement_notification_task.publish_statement_notification') as mock_mailer:
+ mock_mailer.side_effect = Exception('Mock Exception')
+ with patch('tasks.statement_notification_task.get_token') as mock_get_token:
+ mock_get_token.return_value = 'mock_token'
+ StatementNotificationTask.send_notifications()
+ mock_get_token.assert_called_once()
+ mock_mailer.assert_called_once_with(account, statements[0][0], 351.5, statement_recipient.email)
+
+ # Assert statement notification code indicates failed
+ statement: Statement = Statement.find_by_id(statements[0][0].id)
+ assert statement is not None
+ assert statement.notification_status_code == NotificationStatus.FAILED.value
def test_send_eft_notifications_ff_disabled(setup, session): # pylint: disable=unused-argument
"""Test send monthly EFT statement notifications failure."""
- with app.app_context():
- # create statement, invoice, payment data for previous month
- last_month, last_year = get_previous_month_and_year()
- previous_month_year = datetime(last_year, last_month, 5)
- account, invoice, inv_ref, payment, \
- statement_recipient, statement_settings = create_test_data(PaymentMethod.EFT.value,
- previous_month_year,
- StatementFrequency.MONTHLY.value,
- 351.50)
-
- assert invoice.payment_method_code == PaymentMethod.EFT.value
- assert account.payment_method == PaymentMethod.EFT.value
-
- # Generate statement for previous month - freeze time to the 1st of the current month
- with freeze_time(datetime.now().replace(day=1)):
- StatementTask.generate_statements()
-
- # Assert statements and invoice was created
- statements = Statement.find_all_statements_for_account(auth_account_id=account.auth_account_id, page=1,
- limit=100)
- assert statements is not None
- assert len(statements) == 2 # items results and page total
- assert len(statements[0]) == 1 # items
- invoices = StatementInvoices.find_all_invoices_for_statement(statements[0][0].id)
- assert invoices is not None
- assert invoices[0].invoice_id == invoice.id
-
- # Assert notification was published to the mailer queue
- with patch('tasks.statement_notification_task.publish_statement_notification') as mock_mailer:
- with patch('tasks.statement_notification_task.get_token') as mock_get_token:
- with patch('tasks.statement_notification_task.flags.is_on', return_value=False):
- mock_get_token.return_value = 'mock_token'
- StatementNotificationTask.send_notifications()
- mock_get_token.assert_called_once()
- mock_mailer.assert_not_called()
-
- # Assert statement notification code indicates skipped
- statement: Statement = Statement.find_by_id(statements[0][0].id)
- assert statement is not None
- assert statement.notification_status_code == NotificationStatus.SKIP.value
+ # create statement, invoice, payment data for previous month
+ last_month, last_year = get_previous_month_and_year()
+ previous_month_year = datetime(last_year, last_month, 5)
+ account, invoice, inv_ref, payment, \
+ statement_recipient, statement_settings = create_test_data(PaymentMethod.EFT.value,
+ previous_month_year,
+ StatementFrequency.MONTHLY.value,
+ 351.50)
+
+ assert invoice.payment_method_code == PaymentMethod.EFT.value
+ assert account.payment_method == PaymentMethod.EFT.value
+
+ # Generate statement for previous month - freeze time to the 1st of the current month
+ with freeze_time(datetime.now().replace(day=1)):
+ StatementTask.generate_statements()
+
+ # Assert statements and invoice was created
+ statements = Statement.find_all_statements_for_account(auth_account_id=account.auth_account_id, page=1,
+ limit=100)
+ assert statements is not None
+ assert len(statements) == 2 # items results and page total
+ assert len(statements[0]) == 1 # items
+ invoices = StatementInvoices.find_all_invoices_for_statement(statements[0][0].id)
+ assert invoices is not None
+ assert invoices[0].invoice_id == invoice.id
+
+ # Assert notification was published to the mailer queue
+ with patch('tasks.statement_notification_task.publish_statement_notification') as mock_mailer:
+ with patch('tasks.statement_notification_task.get_token') as mock_get_token:
+ with patch('tasks.statement_notification_task.flags.is_on', return_value=False):
+ mock_get_token.return_value = 'mock_token'
+ StatementNotificationTask.send_notifications()
+ mock_get_token.assert_called_once()
+ mock_mailer.assert_not_called()
+
+ # Assert statement notification code indicates skipped
+ statement: Statement = Statement.find_by_id(statements[0][0].id)
+ assert statement is not None
+ assert statement.notification_status_code == NotificationStatus.SKIP.value
diff --git a/jobs/payment-jobs/utils/logger.py b/jobs/payment-jobs/utils/logger.py
index 8b88ddcf2..8568f87dd 100755
--- a/jobs/payment-jobs/utils/logger.py
+++ b/jobs/payment-jobs/utils/logger.py
@@ -18,12 +18,7 @@
def setup_logging(conf):
- """Create the services logger.
-
- TODO should be reworked to load in the proper loggers and remove others
- """
- # log_file_path = path.join(path.abspath(path.dirname(__file__)), conf)
-
+ """Create the services logger."""
if conf and path.isfile(conf):
logging.config.fileConfig(conf)
print(f'Configure logging, from conf:{conf}', file=sys.stdout)
diff --git a/jobs/payment-jobs/utils/mailer.py b/jobs/payment-jobs/utils/mailer.py
index 439cfd740..1379db03c 100644
--- a/jobs/payment-jobs/utils/mailer.py
+++ b/jobs/payment-jobs/utils/mailer.py
@@ -20,7 +20,9 @@
from pay_api.models import FeeSchedule as FeeScheduleModel
from pay_api.models import PaymentAccount as PaymentAccountModel
from pay_api.models import Statement as StatementModel
-from pay_api.services.queue_publisher import publish_response
+from pay_api.services import gcp_queue_publisher
+from pay_api.services.gcp_queue_publisher import QueueMessage
+from pay_api.utils.enums import QueueSources, MessageType
from sentry_sdk import capture_message
@@ -37,30 +39,26 @@ class StatementNotificationInfo:
def publish_mailer_events(message_type: str, pay_account: PaymentAccountModel,
- additional_params: Dict = {}):
+ additional_params: Dict = {}):
"""Publish payment message to the mailer queue."""
# Publish message to the Queue, saying account has been activated. Using the event spec.
fee_schedule: FeeScheduleModel = FeeScheduleModel.find_by_filing_type_and_corp_type(corp_type_code='BCR',
filing_type_code='NSF')
-
payload = {
- 'specversion': '1.x-wip',
- 'type': f'bc.registry.payment.{message_type}',
- 'source': f'https://api.pay.bcregistry.gov.bc.ca/v1/accounts/{pay_account.auth_account_id}',
- 'id': f'{pay_account.auth_account_id}',
- 'time': f'{datetime.now()}',
- 'datacontenttype': 'application/json',
- 'data': {
- 'accountId': pay_account.auth_account_id,
- 'nsfFee': float(fee_schedule.fee.amount),
- **additional_params
- }
+ 'accountId': pay_account.auth_account_id,
+ 'nsfFee': float(fee_schedule.fee.amount),
+ **additional_params
}
try:
- publish_response(payload=payload,
- client_name=current_app.config.get('NATS_MAILER_CLIENT_NAME'),
- subject=current_app.config.get('NATS_MAILER_SUBJECT'))
+ gcp_queue_publisher.publish_to_queue(
+ QueueMessage(
+ source=QueueSources.PAY_JOBS.value,
+ message_type=message_type,
+ payload=payload,
+ topic=current_app.config.get('ACCOUNT_MAILER_TOPIC')
+ )
+ )
except Exception as e: # pylint: disable=broad-except
current_app.logger.error(e)
current_app.logger.warning('Notification to Queue failed for the Account Mailer %s - %s',
@@ -74,25 +72,22 @@ def publish_statement_notification(pay_account: PaymentAccountModel, statement:
total_amount_owing: float, emails: str) -> bool:
"""Publish payment statement notification message to the mailer queue."""
payload = {
- 'specversion': '1.x-wip',
- 'type': f'bc.registry.payment.statementNotification',
- 'source': f'https://api.pay.bcregistry.gov.bc.ca/v1/accounts/{pay_account.auth_account_id}',
- 'id': f'{pay_account.auth_account_id}',
- 'time': f'{datetime.now()}',
- 'datacontenttype': 'application/json',
- 'data': {
- 'emailAddresses': emails,
- 'accountId': pay_account.auth_account_id,
- 'fromDate': f'{statement.from_date}',
- 'toDate': f'{statement.to_date}',
- 'statementFrequency': statement.frequency,
- 'totalAmountOwing': total_amount_owing
- }
+ 'emailAddresses': emails,
+ 'accountId': pay_account.auth_account_id,
+ 'fromDate': f'{statement.from_date}',
+ 'toDate': f'{statement.to_date}',
+ 'statementFrequency': statement.frequency,
+ 'totalAmountOwing': total_amount_owing
}
try:
- publish_response(payload=payload,
- client_name=current_app.config.get('NATS_MAILER_CLIENT_NAME'),
- subject=current_app.config.get('NATS_MAILER_SUBJECT'))
+ gcp_queue_publisher.publish_to_queue(
+ QueueMessage(
+ source=QueueSources.PAY_JOBS.value,
+ message_type=MessageType.STATEMENT_NOTIFICATION.value,
+ payload=payload,
+ topic=current_app.config.get('ACCOUNT_MAILER_TOPIC')
+ )
+ )
except Exception as e: # pylint: disable=broad-except
current_app.logger.error(e)
current_app.logger.warning('Notification to Queue failed for the Account Mailer %s - %s',
@@ -108,28 +103,25 @@ def publish_statement_notification(pay_account: PaymentAccountModel, statement:
def publish_payment_notification(info: StatementNotificationInfo) -> bool:
"""Publish payment notification message to the mailer queue."""
- notification_type = 'bc.registry.payment.statementDueNotification' if info.is_due \
- else 'bc.registry.payment.statementReminderNotification'
+ notification_type = MessageType.STATEMENT_DUE_NOTIFICATION.value if info.is_due \
+ else MessageType.STATEMENT_REMINDER_NOTIFICATION.value
payload = {
- 'specversion': '1.x-wip',
- 'type': notification_type,
- 'source': f'https://api.pay.bcregistry.gov.bc.ca/v1/accounts/{info.auth_account_id}',
- 'id': info.auth_account_id,
- 'time': f'{datetime.now()}',
- 'datacontenttype': 'application/json',
- 'data': {
- 'emailAddresses': info.emails,
- 'accountId': info.auth_account_id,
- 'dueDate': f'{info.due_date}',
- 'statementFrequency': info.statement.frequency,
- 'totalAmountOwing': info.total_amount_owing
- }
+ 'emailAddresses': info.emails,
+ 'accountId': info.auth_account_id,
+ 'dueDate': f'{info.due_date}',
+ 'statementFrequency': info.statement.frequency,
+ 'totalAmountOwing': info.total_amount_owing
}
try:
- publish_response(payload=payload,
- client_name=current_app.config.get('NATS_MAILER_CLIENT_NAME'),
- subject=current_app.config.get('NATS_MAILER_SUBJECT'))
+ gcp_queue_publisher.publish_to_queue(
+ QueueMessage(
+ source=QueueSources.PAY_JOBS.value,
+ message_type=notification_type,
+ payload=payload,
+ topic=current_app.config.get('ACCOUNT_MAILER_TOPIC')
+ )
+ )
except Exception as e: # pylint: disable=broad-except
current_app.logger.error(e)
current_app.logger.warning('Notification to Queue failed for the Account Mailer %s - %s',
diff --git a/pay-admin/admin/config.py b/pay-admin/admin/config.py
index 707552ecf..679e8019c 100755
--- a/pay-admin/admin/config.py
+++ b/pay-admin/admin/config.py
@@ -58,7 +58,6 @@ def _get_config(config_key: str, **kwargs):
value = os.getenv(config_key, kwargs.get('default'))
else:
value = os.getenv(config_key)
- # assert value TODO Un-comment once we find a solution to run pre-hook without initializing app
return value
diff --git a/pay-admin/requirements.txt b/pay-admin/requirements.txt
index b3c14e933..a0d4f1ac5 100644
--- a/pay-admin/requirements.txt
+++ b/pay-admin/requirements.txt
@@ -28,7 +28,7 @@ requests-futures==1.0.1
requests==2.31.0
typing_extensions==4.10.0
urllib3==2.2.1
--e git+https://github.com/bcgov/sbc-pay.git@queue_python_upgrade#egg=pay-api&subdirectory=pay-api
+-e git+https://github.com/bcgov/sbc-pay.git@feature-queue-python-upgrade#egg=pay-api&subdirectory=pay-api
-e git+https://github.com/bcgov/sbc-common-components.git#egg=sbc-common-components&subdirectory=python
git+https://github.com/daxiom/simple-cloudevent.py.git
git+https://github.com/thorwolpert/flask-jwt-oidc.git
diff --git a/pay-admin/requirements/repo-libraries.txt b/pay-admin/requirements/repo-libraries.txt
index 8aaeaf504..3cffb3f2b 100644
--- a/pay-admin/requirements/repo-libraries.txt
+++ b/pay-admin/requirements/repo-libraries.txt
@@ -1,4 +1,4 @@
--e git+https://github.com/bcgov/sbc-pay.git@queue_python_upgrade#egg=pay-api&subdirectory=pay-api
+-e git+https://github.com/bcgov/sbc-pay.git@feature-queue-python-upgrade#egg=pay-api&subdirectory=pay-api
-e git+https://github.com/bcgov/sbc-common-components.git#egg=sbc-common-components&subdirectory=python
git+https://github.com/daxiom/simple-cloudevent.py.git
git+https://github.com/thorwolpert/flask-jwt-oidc.git
diff --git a/pay-admin/setup.cfg b/pay-admin/setup.cfg
index 50d2c7190..fd41bb9f4 100755
--- a/pay-admin/setup.cfg
+++ b/pay-admin/setup.cfg
@@ -17,7 +17,7 @@ keywords =
[options]
zip_safe = True
-python_requires = >=3.6
+python_requires = >=3.12
include_package_data = True
packages = find:
diff --git a/pay-api/migrations/env.py b/pay-api/migrations/env.py
index 911e5fab6..e6603a9b4 100644
--- a/pay-api/migrations/env.py
+++ b/pay-api/migrations/env.py
@@ -80,7 +80,6 @@ def process_revision_directives(context, revision, directives):
context.configure(
connection=connection,
target_metadata=target_metadata,
- compare_type=True,
process_revision_directives=process_revision_directives,
**current_app.extensions['migrate'].configure_args
)
diff --git a/pay-api/requirements.txt b/pay-api/requirements.txt
index 62323aa6c..41d1229bb 100644
--- a/pay-api/requirements.txt
+++ b/pay-api/requirements.txt
@@ -1,6 +1,6 @@
Flask-Caching==2.1.0
Flask-Cors==4.0.0
-Flask-Migrate==2.7.0
+Flask-Migrate==4.0.7
Flask-Moment==1.0.5
Flask-SQLAlchemy==3.1.1
Flask-Script==2.0.6
@@ -9,7 +9,7 @@ Jinja2==3.1.3
Mako==1.3.2
MarkupSafe==2.1.5
SQLAlchemy-Utils==0.41.1
-SQLAlchemy==2.0.27
+SQLAlchemy==2.0.28
Werkzeug==3.0.1
alembic==1.13.1
attrs==23.2.0
@@ -26,15 +26,16 @@ cryptography==42.0.5
dpath==2.1.6
ecdsa==0.18.0
expiringdict==1.2.2
+flask-jwt-oidc==0.3.0
flask-marshmallow==1.2.0
google-api-core==2.17.1
google-auth==2.28.1
google-cloud-pubsub==2.20.0
-googleapis-common-protos==1.62.0
+googleapis-common-protos==1.63.0
greenlet==3.0.3
grpc-google-iam-v1==0.13.0
-grpcio-status==1.62.0
-grpcio==1.62.0
+grpcio-status==1.62.1
+grpcio==1.62.1
gunicorn==21.2.0
holidays==0.37
idna==3.6
@@ -42,11 +43,11 @@ itsdangerous==2.1.2
jaeger-client==4.8.0
jsonschema==4.17.3
launchdarkly-eventsource==1.1.1
-launchdarkly-server-sdk==9.2.1
+launchdarkly-server-sdk==9.2.2
marshmallow-sqlalchemy==1.0.0
-marshmallow==3.21.0
+marshmallow==3.21.1
opentracing==2.4.0
-packaging==23.2
+packaging==24.0
proto-plus==1.23.0
protobuf==4.25.3
psycopg2-binary==2.9.9
@@ -54,6 +55,7 @@ pyRFC3339==1.1
pyasn1-modules==0.3.0
pyasn1==0.5.1
pycparser==2.21
+pyhumps==3.8.0
pyrsistent==0.20.0
python-dateutil==2.9.0.post0
python-dotenv==1.0.1
@@ -62,13 +64,13 @@ pytz==2024.1
requests==2.31.0
rsa==4.9
semver==3.0.2
-sentry-sdk==1.40.6
+sentry-sdk==1.41.0
six==1.16.0
threadloop==1.0.2
thrift==0.16.0
tornado==6.4
typing_extensions==4.10.0
-urllib3==1.26.18
+urllib3==2.2.1
-e git+https://github.com/bcgov/sbc-common-components.git#egg=sbc-common-components&subdirectory=python
git+https://github.com/daxiom/simple-cloudevent.py.git
git+https://github.com/thorwolpert/flask-jwt-oidc.git
diff --git a/pay-api/requirements/prod.txt b/pay-api/requirements/prod.txt
index 3e5534840..9c4c234fc 100644
--- a/pay-api/requirements/prod.txt
+++ b/pay-api/requirements/prod.txt
@@ -2,7 +2,7 @@ gunicorn
Flask
Flask-Caching
Flask-Cors
-Flask-Migrate<3
+Flask-Migrate
Flask-Script
Flask-Moment
Flask-SQLAlchemy
@@ -28,3 +28,4 @@ launchdarkly-server-sdk
holidays==0.37
google-auth==2.28.1
google-cloud-pubsub==2.20.0
+pyhumps
diff --git a/pay-api/setup.cfg b/pay-api/setup.cfg
index 35e4ccdc0..4a29f38b9 100755
--- a/pay-api/setup.cfg
+++ b/pay-api/setup.cfg
@@ -17,7 +17,7 @@ keywords =
[options]
zip_safe = True
-python_requires = >=3.6
+python_requires = >=3.12
include_package_data = True
packages = find:
diff --git a/pay-api/src/pay_api/config.py b/pay-api/src/pay_api/config.py
index 68579db3e..0ecc06a19 100755
--- a/pay-api/src/pay_api/config.py
+++ b/pay-api/src/pay_api/config.py
@@ -124,7 +124,10 @@ class _Config(): # pylint: disable=too-few-public-methods
AUDIENCE = os.getenv('AUDIENCE', None)
GCP_AUTH_KEY = os.getenv('GCP_AUTH_KEY', None)
PUBLISHER_AUDIENCE = os.getenv('PUBLISHER_AUDIENCE', None)
- TOPIC_NAME = os.getenv('TOPIC_NAME', None)
+ ACCOUNT_MAILER_TOPIC = os.getenv('ACCOUNT_MAILER_TOPIC', None)
+ EVENT_LISTENER_TOPIC = os.getenv('EVENT_LISTENER_TOPIC', None)
+ NAMEX_PAY_TOPIC = os.getenv('NAMEX_PAY_TOPIC', None)
+ BUSINESS_PAY_TOPIC = os.getenv('BUSINESS_PAY_TOPIC', None)
# Auth API Endpoint
AUTH_API_ENDPOINT = f'{_get_config("AUTH_API_URL")}/'
diff --git a/pay-api/src/pay_api/services/base_payment_system.py b/pay-api/src/pay_api/services/base_payment_system.py
index d7355816b..61bcee943 100644
--- a/pay-api/src/pay_api/services/base_payment_system.py
+++ b/pay-api/src/pay_api/services/base_payment_system.py
@@ -36,10 +36,12 @@
from pay_api.services.invoice_reference import InvoiceReference
from pay_api.services.payment import Payment
from pay_api.services.payment_account import PaymentAccount
+from pay_api.services.gcp_queue_publisher import QueueMessage
from pay_api.utils.enums import (
- CorpType, InvoiceReferenceStatus, InvoiceStatus, PaymentMethod, PaymentStatus, TransactionStatus)
+ CorpType, InvoiceReferenceStatus, InvoiceStatus, MessageType, PaymentMethod, PaymentStatus, QueueSources,
+ TransactionStatus)
from pay_api.utils.user_context import UserContext
-from pay_api.utils.util import get_local_formatted_date_time
+from pay_api.utils.util import get_local_formatted_date_time, get_topic_for_corp_type
from .payment_line_item import PaymentLineItem
from .receipt import Receipt
@@ -150,7 +152,14 @@ def _release_payment(invoice: Invoice):
payload = PaymentTransaction.create_event_payload(invoice, TransactionStatus.COMPLETED.value)
try:
current_app.logger.info(f'Releasing record for invoice {invoice.id}')
- gcp_queue_publisher.publish_to_queue(payload)
+ gcp_queue_publisher.publish_to_queue(
+ QueueMessage(
+ source=QueueSources.PAY_API.value,
+ message_type=MessageType.PAYMENT.value,
+ payload=payload,
+ topic=get_topic_for_corp_type(invoice.corp_type_code)
+ )
+ )
except Exception as e: # NOQA pylint: disable=broad-except
current_app.logger.error(e)
current_app.logger.error('Notification to Queue failed for the Payment Event %s', payload)
@@ -203,7 +212,6 @@ def _publish_refund_to_mailer(invoice: InvoiceModel):
invoice_id=invoice.id, status_code=InvoiceReferenceStatus.COMPLETED.value)
payment_transaction: PaymentTransactionModel = PaymentTransactionModel.find_recent_completed_by_invoice_id(
invoice_id=invoice.id)
- message_type: str = f'bc.registry.payment.{invoice.payment_method_code.lower()}.refundRequest'
transaction_date_time = receipt.receipt_date if invoice.payment_method_code == PaymentMethod.DRAWDOWN.value \
else payment_transaction.transaction_end_time
filing_description = ''
@@ -211,33 +219,34 @@ def _publish_refund_to_mailer(invoice: InvoiceModel):
if filing_description:
filing_description += ','
filing_description += line_item.description
- q_payload = {
- 'specversion': '1.x-wip',
- 'type': message_type,
- 'source': f'https://api.pay.bcregistry.gov.bc.ca/v1/invoices/{invoice.id}',
- 'id': invoice.id,
- 'datacontenttype': 'application/json',
- 'data': {
- 'identifier': invoice.business_identifier,
- 'orderNumber': receipt.receipt_number,
- 'transactionDateTime': get_local_formatted_date_time(transaction_date_time),
- 'transactionAmount': receipt.receipt_amount,
- 'transactionId': invoice_ref.invoice_number,
- 'refundDate': get_local_formatted_date_time(datetime.now(), '%Y%m%d'),
- 'filingDescription': filing_description
- }
+
+ payload = {
+ 'identifier': invoice.business_identifier,
+ 'orderNumber': receipt.receipt_number,
+ 'transactionDateTime': get_local_formatted_date_time(transaction_date_time),
+ 'transactionAmount': receipt.receipt_amount,
+ 'transactionId': invoice_ref.invoice_number,
+ 'refundDate': get_local_formatted_date_time(datetime.now(), '%Y%m%d'),
+ 'filingDescription': filing_description
}
if invoice.payment_method_code == PaymentMethod.DRAWDOWN.value:
payment_account: PaymentAccountModel = PaymentAccountModel.find_by_id(invoice.payment_account_id)
filing_description += ','
filing_description += invoice_ref.invoice_number
- q_payload['data'].update({
+ payload.update({
'bcolAccount': invoice.bcol_account,
'bcolUser': payment_account.bcol_user_id,
'filingDescription': filing_description
})
- current_app.logger.debug(f'Publishing payment refund request to mailer for {invoice.id} : {q_payload}')
- gcp_queue_publisher.publish_to_queue(q_payload)
+ current_app.logger.debug(f'Publishing payment refund request to mailer for {invoice.id} : {payload}')
+ gcp_queue_publisher.publish_to_queue(
+ QueueMessage(
+ source=QueueSources.PAY_API.value,
+ message_type=f'{invoice.payment_method_code.lower()}.refundRequest',
+ payload=payload,
+ topic=current_app.config.get('ACCOUNT_MAILER_TOPIC')
+ )
+ )
def complete_payment(self, invoice, invoice_reference):
"""Create payment and related records as if the payment is complete."""
diff --git a/pay-api/src/pay_api/services/eft_service.py b/pay-api/src/pay_api/services/eft_service.py
index e3abab9be..57e8a83bb 100644
--- a/pay-api/src/pay_api/services/eft_service.py
+++ b/pay-api/src/pay_api/services/eft_service.py
@@ -13,7 +13,7 @@
# limitations under the License.
"""Service to manage CFS EFT Payments."""
from datetime import datetime
-from typing import Any, Dict
+from typing import Any, Dict, List
from flask import current_app
@@ -24,7 +24,6 @@
from pay_api.models import PaymentAccount as PaymentAccountModel
from pay_api.models import Receipt as ReceiptModel
from pay_api.utils.enums import CfsAccountStatus, InvoiceReferenceStatus, PaymentMethod, PaymentStatus
-
from .deposit_service import DepositService
from .invoice import Invoice
from .invoice_reference import InvoiceReference
@@ -48,15 +47,10 @@ def create_account(self, identifier: str, contact_info: Dict[str, Any], payment_
cfs_account.status = CfsAccountStatus.PENDING.value
return cfs_account
- def create_invoice(self, payment_account: PaymentAccount, line_items: [PaymentLineItem], invoice: Invoice,
+ def create_invoice(self, payment_account: PaymentAccount, line_items: List[PaymentLineItem], invoice: Invoice,
**kwargs) -> InvoiceReference:
- """Return a static invoice number for direct pay."""
- payment: PaymentModel = PaymentModel.find_payment_for_invoice(invoice.id)
- invoice_reference = self.create_invoice_reference(invoice=invoice, payment=payment)
-
- invoice_reference.save()
-
- return invoice_reference
+ """Do nothing here, we create invoice references on the create CFS_INVOICES job."""
+ return
def apply_credit(self,
invoice: Invoice,
diff --git a/pay-api/src/pay_api/services/fee_schedule.py b/pay-api/src/pay_api/services/fee_schedule.py
index 87ef380d3..5ba5a2b1a 100644
--- a/pay-api/src/pay_api/services/fee_schedule.py
+++ b/pay-api/src/pay_api/services/fee_schedule.py
@@ -353,7 +353,6 @@ def calculate_service_fees(fee_schedule_model: FeeScheduleModel, account_fee: Ac
service_fees: float = 0
# TODO for system accounts with role EXCLUDE_SERVICE_FEES, do not charge service fees for now.
- # Handle it properly later
if not user.is_staff() and \
not (user.is_system() and Role.EXCLUDE_SERVICE_FEES.value in user.roles) \
and fee_schedule_model.fee.amount > 0 and fee_schedule_model.service_fee:
diff --git a/pay-api/src/pay_api/services/gcp_queue_publisher.py b/pay-api/src/pay_api/services/gcp_queue_publisher.py
index 0a4a8659f..2e390d3ae 100644
--- a/pay-api/src/pay_api/services/gcp_queue_publisher.py
+++ b/pay-api/src/pay_api/services/gcp_queue_publisher.py
@@ -1,9 +1,11 @@
"""This module provides Queue type services."""
import base64
+from dataclasses import dataclass
import json
import uuid
from concurrent.futures import CancelledError
from concurrent.futures import TimeoutError # pylint: disable=W0622
+from datetime import datetime, timezone
from flask import current_app
from google.auth import jwt
@@ -11,26 +13,41 @@
from simple_cloudevent import SimpleCloudEvent, to_queue_message
-def publish_to_queue(payload: dict):
+@dataclass
+class QueueMessage:
+ """Queue message data class."""
+
+ source: str
+ message_type: str
+ payload: dict
+ topic: str
+
+
+def publish_to_queue(queue_message: QueueMessage):
"""Publish to GCP PubSub Queue."""
- ce = SimpleCloudEvent()
- ce.id = payload.get('paymentToken', {}).get('id', str(uuid.uuid4()))
- ce.source = 'sbc-pay'
- ce.subject = 'SUBJECT' # invoice.business_identifier
- ce.time = 'TIME' # invoice.payment_date
- ce.type = 'payment'
- ce.data = payload
+ if queue_message.topic is None:
+ current_app.logger.info('Skipping queue message topic not set.')
+ return
+
+ queue_message_bytes = to_queue_message(SimpleCloudEvent(
+ id=str(uuid.uuid4()),
+ source=f'sbc-pay-{queue_message.source}',
+ # Intentionally blank, this field has been moved to topic.
+ subject=None,
+ time=datetime.now(tz=timezone.utc).isoformat(),
+ type=queue_message.message_type,
+ data=queue_message.payload
+ ))
- _send_to_queue(to_queue_message(ce))
+ _send_to_queue(queue_message.topic, queue_message_bytes)
-def _send_to_queue(payload: bytes):
+def _send_to_queue(topic_name: str, payload: bytes):
"""Send payload to the queue."""
if not ((gcp_auth_key := current_app.config.get('GCP_AUTH_KEY')) and
(audience := current_app.config.get('AUDIENCE')) and
- (topic_name := current_app.config.get('TOPIC_NAME')) and
(publisher_audience := current_app.config.get('PUBLISHER_AUDIENCE'))):
- raise Exception('missing setup arguments') # pylint: disable=W0719
+ raise Exception('Missing setup arguments') # pylint: disable=W0719
try:
service_account_info = json.loads(base64.b64decode(gcp_auth_key).decode('utf-8'))
diff --git a/pay-api/src/pay_api/services/payment_account.py b/pay-api/src/pay_api/services/payment_account.py
index c3e34a509..84fd20a8d 100644
--- a/pay-api/src/pay_api/services/payment_account.py
+++ b/pay-api/src/pay_api/services/payment_account.py
@@ -18,7 +18,6 @@
from decimal import Decimal
from typing import Any, Dict, List, Optional, Tuple
from cattr import Converter
-
from flask import current_app
from sentry_sdk import capture_message
from sqlalchemy import and_, desc, func, or_
@@ -36,16 +35,17 @@
from pay_api.models import StatementSettings as StatementSettingsModel
from pay_api.models import db
from pay_api.models.payment_account import PaymentAccountSearchModel
+from pay_api.services import gcp_queue_publisher
from pay_api.services.cfs_service import CFSService
from pay_api.services.distribution_code import DistributionCode
-from pay_api.services import gcp_queue_publisher
+from pay_api.services.gcp_queue_publisher import QueueMessage
from pay_api.services.oauth_service import OAuthService
from pay_api.services.receipt import Receipt as ReceiptService
from pay_api.services.statement import Statement
from pay_api.services.statement_settings import StatementSettings
from pay_api.utils.enums import (
AuthHeaderType, CfsAccountStatus, ContentType, InvoiceStatus, MessageType, PaymentMethod, PaymentSystem,
- StatementFrequency)
+ QueueSources, StatementFrequency)
from pay_api.utils.errors import Error
from pay_api.utils.user_context import UserContext, user_context
from pay_api.utils.util import (
@@ -805,12 +805,19 @@ def publish_account_mailer_event_on_creation(self):
"""Publish to account mailer message to send out confirmation email on creation."""
if self.payment_method == PaymentMethod.PAD.value:
payload = self.create_account_event_payload(MessageType.PAD_ACCOUNT_CREATE.value, include_pay_info=True)
- self._publish_queue_message(payload)
+ self._publish_queue_message(payload, MessageType.PAD_ACCOUNT_CREATE.value)
- def _publish_queue_message(self, payload):
+ def _publish_queue_message(self, payload: dict, message_type: str):
"""Publish to account mailer to send out confirmation email or notification email."""
try:
- gcp_queue_publisher.publish_to_queue(payload)
+ gcp_queue_publisher.publish_to_queue(
+ QueueMessage(
+ source=QueueSources.PAY_API.value,
+ message_type=message_type,
+ payload=payload,
+ topic=current_app.config.get('ACCOUNT_MAILER_TOPIC')
+ )
+ )
except Exception as e: # NOQA pylint: disable=broad-except
current_app.logger.error(e)
current_app.logger.error(
@@ -824,29 +831,21 @@ def create_account_event_payload(self, event_type: str, receipt_info: dict = Non
include_pay_info: bool = False):
"""Return event payload for account."""
payload: Dict[str, any] = {
- 'specversion': '1.x-wip',
- 'type': event_type,
- 'source': f'https://api.pay.bcregistry.gov.bc.ca/v1/accounts/{self.auth_account_id}',
- 'id': f'{self.auth_account_id}',
- 'time': f'{datetime.now()}',
- 'datacontenttype': 'application/json',
- 'data': {
- 'accountId': self.auth_account_id,
- 'accountName': self.name
- }
+ 'accountId': self.auth_account_id,
+ 'accountName': self.name
}
if event_type == MessageType.NSF_UNLOCK_ACCOUNT.value:
- payload['data'].update({
+ payload.update({
'invoiceNumber': receipt_info['invoiceNumber'],
'receiptNumber': receipt_info['receiptNumber'],
'paymentMethodDescription': receipt_info['paymentMethodDescription'],
'invoice': receipt_info['invoice']
})
if event_type == MessageType.PAD_ACCOUNT_CREATE.value:
- payload['data']['padTosAcceptedBy'] = self.pad_tos_accepted_by
+ payload['padTosAcceptedBy'] = self.pad_tos_accepted_by
if include_pay_info:
- payload['data']['paymentInfo'] = {
+ payload['paymentInfo'] = {
'bankInstitutionNumber': self.bank_number,
'bankTransitNumber': self.bank_branch_number,
'bankAccountNumber': mask(self.bank_account_number, current_app.config['MASK_LEN']),
@@ -873,7 +872,14 @@ def unlock_frozen_accounts(payment: Payment):
)
try:
- gcp_queue_publisher.publish_to_queue(payload=payload)
+ gcp_queue_publisher.publish_to_queue(
+ QueueMessage(
+ source=QueueSources.PAY_API.value,
+ message_type=MessageType.NSF_UNLOCK_ACCOUNT.value,
+ payload=payload,
+ topic=current_app.config.get('EVENT_LISTENER_TOPIC')
+ )
+ )
except Exception as e: # NOQA pylint: disable=broad-except
current_app.logger.error(e)
current_app.logger.error(
@@ -923,5 +929,5 @@ def enable_eft(cls, auth_account_id: str) -> PaymentAccount:
pa_service = cls.find_by_id(pay_account.id)
if not already_has_eft_enabled:
payload = pa_service.create_account_event_payload(MessageType.EFT_AVAILABLE_NOTIFICATION.value)
- pa_service._publish_queue_message(payload)
+ pa_service._publish_queue_message(payload, MessageType.EFT_AVAILABLE_NOTIFICATION.value)
return pa_service
diff --git a/pay-api/src/pay_api/services/payment_transaction.py b/pay-api/src/pay_api/services/payment_transaction.py
index 641bdd189..7c800c6f1 100644
--- a/pay-api/src/pay_api/services/payment_transaction.py
+++ b/pay-api/src/pay_api/services/payment_transaction.py
@@ -16,9 +16,11 @@
from __future__ import annotations
import uuid
+from dataclasses import asdict, dataclass
from datetime import datetime
-from typing import Dict, List
+from typing import Dict, List, Optional
+import humps
from flask import current_app
from sentry_sdk import capture_message
@@ -26,19 +28,31 @@
from pay_api.factory.payment_system_factory import PaymentSystemFactory
from pay_api.models import PaymentTransaction as PaymentTransactionModel
from pay_api.models import PaymentTransactionSchema
-from pay_api.services.base_payment_system import PaymentSystemService
from pay_api.services import gcp_queue_publisher
+from pay_api.services.base_payment_system import PaymentSystemService
+from pay_api.services.gcp_queue_publisher import QueueMessage
from pay_api.services.invoice import Invoice
from pay_api.services.invoice_reference import InvoiceReference
from pay_api.services.payment_account import PaymentAccount
from pay_api.services.receipt import Receipt
-from pay_api.utils.enums import InvoiceReferenceStatus, InvoiceStatus, PaymentMethod, PaymentStatus, TransactionStatus
+from pay_api.utils.enums import (
+ InvoiceReferenceStatus, InvoiceStatus, MessageType, PaymentMethod, PaymentStatus, QueueSources, TransactionStatus)
from pay_api.utils.errors import Error
-from pay_api.utils.util import is_valid_redirect_url
+from pay_api.utils.util import get_topic_for_corp_type, is_valid_redirect_url
from .payment import Payment
+@dataclass
+class PaymentToken:
+ """Payment Token payload common interface for LEAR and Names."""
+
+ id: Optional[str] = None
+ status_code: Optional[str] = None
+ filing_identifier: Optional[str] = None
+ corp_type_code: Optional[str] = None
+
+
class PaymentTransaction: # pylint: disable=too-many-instance-attributes, too-many-public-methods
"""Service to manage Payment transaction operations."""
@@ -493,11 +507,15 @@ def publish_status(transaction_dao: PaymentTransactionModel, invoice: Invoice):
else:
status_code = 'TRANSACTION_FAILED'
- payload = PaymentTransaction.create_event_payload(invoice, status_code)
-
try:
- gcp_queue_publisher.publish_to_queue(payload=payload)
-
+ gcp_queue_publisher.publish_to_queue(
+ QueueMessage(
+ source=QueueSources.PAY_API.value,
+ message_type=MessageType.PAYMENT.value,
+ payload=PaymentTransaction.create_event_payload(invoice, status_code),
+ topic=get_topic_for_corp_type(invoice.corp_type_code)
+ )
+ )
except Exception as e: # NOQA pylint: disable=broad-except
current_app.logger.error(e)
current_app.logger.warning(
@@ -509,12 +527,4 @@ def publish_status(transaction_dao: PaymentTransactionModel, invoice: Invoice):
@staticmethod
def create_event_payload(invoice, status_code):
"""Create event payload for payment events."""
- payload = {
- 'paymentToken': {
- 'id': invoice.id,
- 'statusCode': status_code,
- 'filingIdentifier': invoice.filing_id,
- 'corpTypeCode': invoice.corp_type_code
- }
- }
- return payload
+ return humps.camelize(asdict(PaymentToken(invoice.id, status_code, invoice.filing_id, invoice.corp_type_code)))
diff --git a/pay-api/src/pay_api/utils/constants.py b/pay-api/src/pay_api/utils/constants.py
index 0cab472ce..a01f3d7c2 100644
--- a/pay-api/src/pay_api/utils/constants.py
+++ b/pay-api/src/pay_api/utils/constants.py
@@ -22,7 +22,6 @@
RECEIPT_METHOD_PAD_DAILY = 'BCR-PAD Daily'
RECEIPT_METHOD_PAD_STOP = 'BCR-PAD Stop'
RECEIPT_METHOD_EFT_MONTHLY = 'BCR-EFT MONTHLY'
-RECEIPT_METHOD_EFT_STOP = 'BCR-PAD Stop'
CFS_BATCH_SOURCE = 'BC REG MANUAL_OTHER'
CFS_CM_BATCH_SOURCE = 'MANUAL-OTHER'
diff --git a/pay-api/src/pay_api/utils/enums.py b/pay-api/src/pay_api/utils/enums.py
index fba411797..c7bc97b71 100644
--- a/pay-api/src/pay_api/utils/enums.py
+++ b/pay-api/src/pay_api/utils/enums.py
@@ -339,13 +339,24 @@ class EFTShortnameState(Enum):
class MessageType(Enum):
- """Account Mailer Event Types."""
-
- # Ideally Should match account mailer project - FUTURE: move into sbc-common-components.
- EFT_AVAILABLE_NOTIFICATION = 'bc.registry.payment.eftAvailableNotification'
- PAD_ACCOUNT_CREATE = 'bc.registry.payment.padAccountCreate'
- NSF_LOCK_ACCOUNT = 'bc.registry.payment.lockAccount'
- NSF_UNLOCK_ACCOUNT = 'bc.registry.payment.unlockAccount'
+ """Queue Event Types."""
+
+ EFT_AVAILABLE_NOTIFICATION = 'eftAvailableNotification'
+ PAD_PAYMENT_SUCCESS = 'PAD.PaymentSuccess'
+ PAD_ACCOUNT_CREATE = 'padAccountCreate'
+ NSF_LOCK_ACCOUNT = 'lockAccount'
+ NSF_UNLOCK_ACCOUNT = 'unlockAccount'
+ STATEMENT_NOTIFICATION = 'statementNotification'
+ STATEMENT_DUE_NOTIFICATION = 'statementDueNotification'
+ STATEMENT_REMINDER_NOTIFICATION = 'statementReminderNotification'
+ PAYMENT = 'payment'
+ EJV_FAILED = 'ejvFailed'
+ CAS_UPLOADED = 'casSettlementUploaded'
+ INCORPORATION = 'incorporationApplication'
+ REGISTRATION = 'registration'
+ CGI_ACK_RECEIVED = 'ACKReceived'
+ CGI_FEEDBACK_RECEIVED = 'FEEDBACKReceived'
+ EFT_FILE_UPLOADED = 'eftFileUploaded'
class PaymentDetailsGlStatus(Enum):
@@ -355,3 +366,12 @@ class PaymentDetailsGlStatus(Enum):
INPRG = 'INPRG'
RJCT = 'RJCT' # Should have refundglerrormessage
CMPLT = 'CMPLT'
+
+
+class QueueSources(Enum):
+ """Queue sources for PAY."""
+
+ PAY_API = 'pay-api'
+ PAY_JOBS = 'pay-jobs'
+ PAY_QUEUE = 'pay-queue'
+ FTP_POLLER = 'ftp-poller'
diff --git a/pay-api/src/pay_api/utils/util.py b/pay-api/src/pay_api/utils/util.py
index 62aa51569..49343b370 100755
--- a/pay-api/src/pay_api/utils/util.py
+++ b/pay-api/src/pay_api/utils/util.py
@@ -29,7 +29,7 @@
from flask import current_app
from .constants import DT_SHORT_FORMAT
-from .enums import StatementFrequency
+from .enums import CorpType, StatementFrequency
def cors_preflight(methods: str = 'GET'):
@@ -253,3 +253,15 @@ def cents_to_decimal(amount: int):
return None
return amount / 100
+
+
+def get_topic_for_corp_type(corp_type: str):
+ """Return a topic to direct the queue message to."""
+ match corp_type:
+ case CorpType.NRO.value:
+ return current_app.config.get('NAMEX_PAY_TOPIC')
+ # Unused for now, intentionally don't send a queue message for these.
+ case CorpType.PPR.value | CorpType.VS.value | CorpType.CSO.value:
+ return None
+ case _:
+ return current_app.config.get('BUSINESS_PAY_TOPIC')
diff --git a/pay-api/tests/conftest.py b/pay-api/tests/conftest.py
index 4de3465b0..3eb46795b 100755
--- a/pay-api/tests/conftest.py
+++ b/pay-api/tests/conftest.py
@@ -14,9 +14,7 @@
"""Common setup and fixtures for the py-test suite used by this service."""
-import asyncio
import os
-import random
import pytest
from flask_migrate import Migrate, upgrade
@@ -39,6 +37,7 @@ def app():
@pytest.fixture(autouse=True)
def mock_queue_publish(monkeypatch):
"""Mock queue publish."""
+ # TODO: so it can be used like this from gcp_queue_publisher import publish_to_queue
monkeypatch.setattr('pay_api.services.gcp_queue_publisher.publish_to_queue', lambda *args, **kwargs: None)
@@ -46,7 +45,6 @@ def mock_queue_publish(monkeypatch):
def app_request():
"""Return a session-wide application configured in TEST mode."""
_app = create_app('testing')
-
return _app
@@ -118,38 +116,6 @@ def restart_savepoint(sess2, trans): # pylint: disable=unused-variable
transaction.rollback()
-@pytest.fixture(scope='function')
-def client_id():
- """Return a unique client_id that can be used in tests."""
- _id = random.SystemRandom().getrandbits(0x58)
-
- return f'client-{_id}'
-
-
-@pytest.fixture(scope='function')
-def future(event_loop):
- """Return a future that is used for managing function tests."""
- _future = asyncio.Future(loop=event_loop)
- return _future
-
-
-@pytest.fixture
-def create_mock_coro(mocker, monkeypatch):
- """Return a mocked coroutine, and optionally patch-it in."""
- def _create_mock_patch_coro(to_patch=None):
- """Return a mocked coroutine, and optionally patch-it in."""
- mock = mocker.Mock()
-
- async def _coro(*args, **kwargs):
- return mock(*args, **kwargs)
-
- if to_patch: # <-- may not need/want to patch anything
- monkeypatch.setattr(to_patch, _coro)
- return mock, _coro
-
- return _create_mock_patch_coro
-
-
@pytest.fixture()
def auth_mock(monkeypatch):
"""Mock check_auth."""
diff --git a/pay-api/tests/unit/api/test_eft_short_names.py b/pay-api/tests/unit/api/test_eft_short_names.py
index 802bc14ba..2864ce225 100755
--- a/pay-api/tests/unit/api/test_eft_short_names.py
+++ b/pay-api/tests/unit/api/test_eft_short_names.py
@@ -19,6 +19,7 @@
import json
from datetime import datetime
+import pytest
from flask import current_app
@@ -28,8 +29,7 @@
from pay_api.models import EFTTransaction as EFTTransactionModel
from pay_api.models import Payment as PaymentModel
from pay_api.models import Receipt as ReceiptModel
-from pay_api.utils.enums import (
- EFTFileLineType, EFTProcessStatus, InvoiceReferenceStatus, InvoiceStatus, PaymentMethod, PaymentStatus, Role)
+from pay_api.utils.enums import EFTFileLineType, EFTProcessStatus, InvoiceStatus, PaymentMethod, PaymentStatus, Role
from tests.utilities.base_test import (
factory_eft_file, factory_eft_shortname, factory_invoice, factory_payment_account, get_claims, token_header)
@@ -431,6 +431,10 @@ def test_search_eft_short_names(session, client, jwt, app):
assert_short_name(result_dict['items'][0], short_name_2, s2_transaction1)
+@pytest.mark.skip(reason='This needs to be re-thought, the create cfs invoice job should be handling receipt creation'
+ 'and creating invoice references when payments are mapped, '
+ 'it should wait until 6 pm before marking invoices as PAID'
+ 'Otherwise calls to CFS could potentially fail and the two systems would go out of sync.')
def test_apply_eft_short_name_credits(session, client, jwt, app):
"""Assert that credits are applied to invoices when short name is mapped to an account."""
token = jwt.create_jwt(get_claims(roles=[Role.STAFF.value, Role.MANAGE_EFT.value]), token_header)
@@ -503,12 +507,7 @@ def test_apply_eft_short_name_credits(session, client, jwt, app):
assert payment.invoice_amount == invoice_1_paid
assert payment.paid_amount == invoice_1_paid
- invoice_reference_1 = invoice_1.references[0]
- assert invoice_reference_1 is not None
- assert invoice_reference_1.invoice_id == invoice_1.id
- assert invoice_reference_1.invoice_number == payment.invoice_number
- assert invoice_reference_1.invoice_number == payment.invoice_number
- assert invoice_reference_1.status_code == InvoiceReferenceStatus.COMPLETED.value
+ assert not invoice_1.references
# Assert details of partially paid invoice
invoice_2_paid = 150
@@ -534,9 +533,4 @@ def test_apply_eft_short_name_credits(session, client, jwt, app):
assert payment.invoice_amount == 200
assert payment.paid_amount == invoice_2_paid
- invoice_reference_2 = invoice_2.references[0]
- assert invoice_reference_2 is not None
- assert invoice_reference_2.invoice_id == invoice_2.id
- assert invoice_reference_2.invoice_number == payment.invoice_number
- assert invoice_reference_2.invoice_number == payment.invoice_number
- assert invoice_reference_2.status_code == InvoiceReferenceStatus.ACTIVE.value
+ assert not invoice_2.references
diff --git a/queue_services/events-listener/.envrc b/pay-queue/.envrc
similarity index 100%
rename from queue_services/events-listener/.envrc
rename to pay-queue/.envrc
diff --git a/queue_services/payment-reconciliations/Dockerfile b/pay-queue/Dockerfile
similarity index 95%
rename from queue_services/payment-reconciliations/Dockerfile
rename to pay-queue/Dockerfile
index 0b09413fa..525967f9c 100644
--- a/queue_services/payment-reconciliations/Dockerfile
+++ b/pay-queue/Dockerfile
@@ -19,7 +19,7 @@ WORKDIR /opt/app-root
COPY ./requirements.txt .
#RUN pip install --upgrade pip
-RUN pip install pip==22.2.2
+RUN pip install pip==24.0.0
RUN pip install --no-cache-dir -r requirements.txt
COPY . .
diff --git a/queue_services/events-listener/LICENSE b/pay-queue/LICENSE
similarity index 100%
rename from queue_services/events-listener/LICENSE
rename to pay-queue/LICENSE
diff --git a/queue_services/events-listener/MANIFEST.in b/pay-queue/MANIFEST.in
similarity index 100%
rename from queue_services/events-listener/MANIFEST.in
rename to pay-queue/MANIFEST.in
diff --git a/queue_services/payment-reconciliations/Makefile b/pay-queue/Makefile
similarity index 98%
rename from queue_services/payment-reconciliations/Makefile
rename to pay-queue/Makefile
index 5d0829f10..307e09ca3 100644
--- a/queue_services/payment-reconciliations/Makefile
+++ b/pay-queue/Makefile
@@ -6,8 +6,8 @@
MKFILE_PATH:=$(abspath $(lastword $(MAKEFILE_LIST)))
CURRENT_ABS_DIR:=$(patsubst %/,%,$(dir $(MKFILE_PATH)))
-PROJECT_NAME:=reconciliations
-DOCKER_NAME:=payment-reconciliations
+PROJECT_NAME:=pay_queue
+DOCKER_NAME:=pay-queue
#################################################################################
# COMMANDS -- Setup #
diff --git a/pay-queue/README.md b/pay-queue/README.md
new file mode 100755
index 000000000..b98948f78
--- /dev/null
+++ b/pay-queue/README.md
@@ -0,0 +1,28 @@
+
+[![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](LICENSE)
+
+
+# Application Name
+BC Registries Payment Reconciliation Queue
+
+## Technology Stack Used
+* Python, Flask
+* Postgres - SQLAlchemy, psycopg2-binary & alembic
+
+
+## License
+
+ Copyright 2024 Province of British Columbia
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
diff --git a/jobs/ftp-poller/utils/constants.py b/pay-queue/app.py
old mode 100644
new mode 100755
similarity index 65%
rename from jobs/ftp-poller/utils/constants.py
rename to pay-queue/app.py
index 040a16678..dd7bd16a7
--- a/jobs/ftp-poller/utils/constants.py
+++ b/pay-queue/app.py
@@ -1,3 +1,6 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
# Copyright © 2019 Province of British Columbia
#
# Licensed under the Apache License, Version 2.0 (the 'License');
@@ -11,9 +14,13 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-"""Enum definitions."""
+"""Provides the WSGI entry point for running the application."""
+import os
+
+from pay_queue import create_app
+
+app = create_app()
-CAS_MESSAGE_TYPE = 'bc.registry.payment.casSettlementUploaded'
-CGI_ACK_MESSAGE_TYPE = 'bc.registry.payment.cgi.ACKReceived'
-CGI_FEEDBACK_MESSAGE_TYPE = 'bc.registry.payment.cgi.FEEDBACKReceived'
-EFT_MESSAGE_TYPE = 'bc.registry.payment.eft.fileUploaded'
+if __name__ == '__main__':
+ server_port = os.environ.get('PORT', '8080')
+ app.run(debug=False, port=server_port, host='0.0.0.0')
diff --git a/queue_services/payment-reconciliations/devops/vaults.json b/pay-queue/devops/vaults.json
similarity index 74%
rename from queue_services/payment-reconciliations/devops/vaults.json
rename to pay-queue/devops/vaults.json
index 0a37699ed..6b84697ac 100644
--- a/queue_services/payment-reconciliations/devops/vaults.json
+++ b/pay-queue/devops/vaults.json
@@ -12,16 +12,6 @@
"payment-reconciliations"
]
},
- {
- "vault": "nats",
- "application": [
- "base",
- "account-events-listener",
- "account-mailer",
- "payment",
- "payment-reconciliations"
- ]
- },
{
"vault": "payment-external-services",
"application": [
diff --git a/pay-queue/flags.json b/pay-queue/flags.json
new file mode 100644
index 000000000..870b306f4
--- /dev/null
+++ b/pay-queue/flags.json
@@ -0,0 +1,8 @@
+{
+ "flagValues": {
+ "string-flag": "a string value",
+ "bool-flag": true,
+ "integer-flag": 10,
+ "enable-eft-payment-method": true
+ }
+}
diff --git a/queue_services/payment-reconciliations/logging.conf b/pay-queue/logging.conf
similarity index 100%
rename from queue_services/payment-reconciliations/logging.conf
rename to pay-queue/logging.conf
diff --git a/queue_services/payment-reconciliations/openshift/templates/payment-reconciliations-build.json b/pay-queue/openshift/templates/payment-reconciliations-build.json
similarity index 100%
rename from queue_services/payment-reconciliations/openshift/templates/payment-reconciliations-build.json
rename to pay-queue/openshift/templates/payment-reconciliations-build.json
diff --git a/queue_services/payment-reconciliations/openshift/templates/payment-reconciliations-deploy.json b/pay-queue/openshift/templates/payment-reconciliations-deploy.json
similarity index 100%
rename from queue_services/payment-reconciliations/openshift/templates/payment-reconciliations-deploy.json
rename to pay-queue/openshift/templates/payment-reconciliations-deploy.json
diff --git a/pay-queue/requirements.txt b/pay-queue/requirements.txt
new file mode 100644
index 000000000..2d3d82364
--- /dev/null
+++ b/pay-queue/requirements.txt
@@ -0,0 +1,85 @@
+-e git+https://github.com/bcgov/sbc-common-components.git@5f99e135214ae949c9af951d4aa0b88b1067d853#egg=sbc_common_components&subdirectory=python
+-e git+https://github.com/seeker25/sbc-pay.git@ca0a69dce17ec602f2c3ceee164f15d0c6e7e804#egg=pay_api&subdirectory=pay-api
+-e git+https://github.com/thorwolpert/flask-jwt-oidc.git@40cc811ccf70e838c5f7522fe8d83b7e58853539#egg=flask_jwt_oidc
+CacheControl==0.14.0
+Flask-Caching==2.1.0
+Flask-Cors==4.0.0
+Flask-Migrate==4.0.7
+Flask-Moment==1.0.5
+Flask-OpenTracing==1.1.0
+Flask-SQLAlchemy==3.1.1
+Flask-Script==2.0.6
+Flask==3.0.2
+Jinja2==3.1.3
+Mako==1.3.2
+MarkupSafe==2.1.5
+SQLAlchemy-Utils==0.41.1
+SQLAlchemy==2.0.28
+Werkzeug==3.0.1
+alembic==1.13.1
+argon2-cffi-bindings==21.2.0
+argon2-cffi==23.1.0
+attrs==23.2.0
+blinker==1.7.0
+cachelib==0.9.0
+cachetools==5.3.3
+cattrs==23.2.3
+certifi==2024.2.2
+cffi==1.16.0
+charset-normalizer==3.3.2
+click==8.1.7
+croniter==2.0.2
+cryptography==42.0.5
+dpath==2.1.6
+ecdsa==0.18.0
+expiringdict==1.2.2
+flask-marshmallow==1.2.0
+google-api-core==2.17.1
+google-auth==2.28.1
+google-cloud-pubsub==2.20.0
+googleapis-common-protos==1.63.0
+greenlet==3.0.3
+grpc-google-iam-v1==0.13.0
+grpcio-status==1.62.1
+grpcio==1.62.1
+gunicorn==21.2.0
+holidays==0.37
+idna==3.6
+itsdangerous==2.1.2
+jaeger-client==4.8.0
+jsonschema==4.17.3
+launchdarkly-eventsource==1.1.1
+launchdarkly-server-sdk==9.2.2
+marshmallow-sqlalchemy==1.0.0
+marshmallow==3.21.1
+minio==7.2.5
+msgpack==1.0.8
+opentracing==2.4.0
+packaging==24.0
+proto-plus==1.23.0
+protobuf==4.25.3
+psycopg2-binary==2.9.9
+pyRFC3339==1.1
+pyasn1-modules==0.3.0
+pyasn1==0.5.1
+pycountry==23.12.11
+pycparser==2.21
+pycryptodome==3.20.0
+pyhumps==3.8.0
+pyrsistent==0.20.0
+python-dateutil==2.9.0.post0
+python-dotenv==1.0.1
+python-jose==3.3.0
+pytz==2024.1
+requests==2.31.0
+rsa==4.9
+semver==3.0.2
+sentry-sdk==1.42.0
+simple-cloudevent @ git+https://github.com/daxiom/simple-cloudevent.py.git@447cabb988202206ac69e71177d7cd11b6c0b002
+six==1.16.0
+strict-rfc3339==0.7
+threadloop==1.0.2
+thrift==0.16.0
+tornado==6.4
+typing_extensions==4.10.0
+urllib3==2.2.1
diff --git a/pay-queue/requirements/bcregistry-libraries.txt b/pay-queue/requirements/bcregistry-libraries.txt
new file mode 100644
index 000000000..d1067265b
--- /dev/null
+++ b/pay-queue/requirements/bcregistry-libraries.txt
@@ -0,0 +1,5 @@
+-e git+https://github.com/bcgov/sbc-common-components.git#egg=sbc-common-components&subdirectory=python
+-e git+https://github.com/seeker25/sbc-pay.git@18263#egg=pay-api&subdirectory=pay-api
+-e git+https://github.com/thorwolpert/flask-jwt-oidc.git#egg=flask-jwt-oidc
+# Note move out queue stuff into here.
+git+https://github.com/daxiom/simple-cloudevent.py.git
diff --git a/queue_services/events-listener/requirements/dev.txt b/pay-queue/requirements/dev.txt
similarity index 96%
rename from queue_services/events-listener/requirements/dev.txt
rename to pay-queue/requirements/dev.txt
index 04624060f..6c304627f 100755
--- a/queue_services/events-listener/requirements/dev.txt
+++ b/pay-queue/requirements/dev.txt
@@ -10,7 +10,7 @@ pytest-cov
FreezeGun
# Lint and code style
-flake8==5.0.4
+flake8
flake8-blind-except
flake8-debugger
flake8-docstrings
diff --git a/queue_services/events-listener/requirements/prod.txt b/pay-queue/requirements/prod.txt
similarity index 51%
rename from queue_services/events-listener/requirements/prod.txt
rename to pay-queue/requirements/prod.txt
index d8e80b8bb..2266a7ec5 100644
--- a/queue_services/events-listener/requirements/prod.txt
+++ b/pay-queue/requirements/prod.txt
@@ -2,13 +2,14 @@ Flask
jsonschema==4.17.3
python-dotenv
sentry-sdk[flask]
-asyncio-nats-client
-asyncio-nats-streaming
pycountry
-Werkzeug<2
+Werkzeug
+minio
jaeger-client
attrs
-itsdangerous==2.0.1
-Jinja2==3.0.3
-protobuf~=3.19.5
+sqlalchemy
+itsdangerous
+Jinja2
+protobuf
launchdarkly-server-sdk
+CacheControl
diff --git a/queue_services/events-listener/scripts/verify_license_headers.sh b/pay-queue/scripts/verify_license_headers.sh
similarity index 100%
rename from queue_services/events-listener/scripts/verify_license_headers.sh
rename to pay-queue/scripts/verify_license_headers.sh
diff --git a/queue_services/payment-reconciliations/setup.cfg b/pay-queue/setup.cfg
similarity index 93%
rename from queue_services/payment-reconciliations/setup.cfg
rename to pay-queue/setup.cfg
index eb4f0cd17..a09beb534 100644
--- a/queue_services/payment-reconciliations/setup.cfg
+++ b/pay-queue/setup.cfg
@@ -1,6 +1,6 @@
[metadata]
-name = account_reconciliations
-url = https://github.com/bcgov/sbc-pay/queue_services/payment-reconciliations
+name = pay-queue
+url = https://github.com/bcgov/sbc-pay/pay-queue
author = SBC Relationships team
author_email =
classifiers =
@@ -17,7 +17,7 @@ keywords =
[options]
zip_safe = True
-python_requires = >=3.6
+python_requires = >=3.12
include_package_data = True
packages = find:
diff --git a/queue_services/payment-reconciliations/setup.py b/pay-queue/setup.py
similarity index 97%
rename from queue_services/payment-reconciliations/setup.py
rename to pay-queue/setup.py
index 64d1ad17d..31939e558 100644
--- a/queue_services/payment-reconciliations/setup.py
+++ b/pay-queue/setup.py
@@ -23,7 +23,7 @@
_version_re = re.compile(r'__version__\s+=\s+(.*)') # pylint: disable=invalid-name
-with open('src/reconciliations/version.py', 'rb') as f:
+with open('src/pay_queue/version.py', 'rb') as f:
version = str(ast.literal_eval(_version_re.search( # pylint: disable=invalid-name
f.read().decode('utf-8')).group(1)))
diff --git a/pay-queue/src/pay_queue/__init__.py b/pay-queue/src/pay_queue/__init__.py
new file mode 100644
index 000000000..331be818d
--- /dev/null
+++ b/pay-queue/src/pay_queue/__init__.py
@@ -0,0 +1,57 @@
+# Copyright © 2024 Province of British Columbia
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""The Reconciliations queue service.
+
+The service worker for applying payments, receipts and account balance to payment system.
+"""
+from __future__ import annotations
+
+import os
+
+import sentry_sdk
+from flask import Flask
+from pay_api.models import db
+from pay_api.services.flags import flags
+from pay_api.utils.run_version import get_run_version
+from sentry_sdk.integrations.flask import FlaskIntegration
+
+from pay_queue.config import CONFIGURATION
+from pay_queue.version import __version__
+
+from .resources import register_endpoints
+from .services import queue
+
+
+def create_app(run_mode=os.getenv('FLASK_ENV', 'production')) -> Flask:
+ """Return a configured Flask App using the Factory method."""
+ app = Flask(__name__)
+ app.env = run_mode
+ app.config.from_object(CONFIGURATION[run_mode])
+
+ # Configure Sentry
+ if dsn := app.config.get('SENTRY_DSN', None):
+ sentry_sdk.init(
+ dsn=dsn,
+ integrations=[FlaskIntegration()],
+ release=f'pay-queue@{get_run_version()}',
+ send_default_pii=False,
+ )
+
+ flags.init_app(app)
+ db.init_app(app)
+ queue.init_app(app)
+
+ register_endpoints(app)
+
+ return app
diff --git a/queue_services/payment-reconciliations/src/reconciliations/config.py b/pay-queue/src/pay_queue/config.py
similarity index 66%
rename from queue_services/payment-reconciliations/src/reconciliations/config.py
rename to pay-queue/src/pay_queue/config.py
index 3ce8d1bde..9e9f21106 100644
--- a/queue_services/payment-reconciliations/src/reconciliations/config.py
+++ b/pay-queue/src/pay_queue/config.py
@@ -20,7 +20,6 @@
or by accessing this configuration directly.
"""
import os
-import random
from dotenv import find_dotenv, load_dotenv
@@ -29,10 +28,10 @@
load_dotenv(find_dotenv())
CONFIGURATION = {
- 'development': 'reconciliations.config.DevConfig',
- 'testing': 'reconciliations.config.TestConfig',
- 'production': 'reconciliations.config.ProdConfig',
- 'default': 'reconciliations.config.ProdConfig'
+ 'development': 'pay_queue.config.DevConfig',
+ 'testing': 'pay_queue.config.TestConfig',
+ 'production': 'pay_queue.config.ProdConfig',
+ 'default': 'pay_queue.config.ProdConfig'
}
@@ -74,23 +73,6 @@ class _Config(): # pylint: disable=too-few-public-methods
DB_HOST = os.getenv('DATABASE_HOST', '')
DB_PORT = os.getenv('DATABASE_PORT', '5432')
SQLALCHEMY_DATABASE_URI = f'postgresql://{DB_USER}:{DB_PASSWORD}@{DB_HOST}:{int(DB_PORT)}/{DB_NAME}'
- NATS_CONNECTION_OPTIONS = {
- 'servers': os.getenv('NATS_SERVERS', 'nats://127.0.0.1:4222').split(','),
- 'name': os.getenv('NATS_PAYMENT_RECONCILIATIONS_CLIENT_NAME', 'payment.reconciliations.worker')
-
- }
- STAN_CONNECTION_OPTIONS = {
- 'cluster_id': os.getenv('NATS_CLUSTER_ID', 'test-cluster'),
- 'client_id': str(random.SystemRandom().getrandbits(0x58)),
- 'ping_interval': 1,
- 'ping_max_out': 5,
- }
-
- SUBSCRIPTION_OPTIONS = {
- 'subject': os.getenv('NATS_PAYMENT_RECONCILIATIONS_SUBJECT', 'payment.reconciliations'),
- 'queue': os.getenv('NATS_PAYMENT_RECONCILIATIONS_QUEUE', 'payment-reconciliations-worker'),
- 'durable_name': os.getenv('NATS_PAYMENT_RECONCILIATIONS_QUEUE', 'payment-reconciliations-worker') + '_durable',
- }
# Minio configuration values
MINIO_ENDPOINT = os.getenv('MINIO_ENDPOINT')
@@ -98,16 +80,6 @@ class _Config(): # pylint: disable=too-few-public-methods
MINIO_ACCESS_SECRET = os.getenv('MINIO_ACCESS_SECRET')
MINIO_SECURE = os.getenv('MINIO_SECURE', 'True').lower() == 'true'
- # NATS Config
- NATS_SERVERS = os.getenv('NATS_SERVERS', 'nats://127.0.0.1:4222').split(',')
- NATS_CLUSTER_ID = os.getenv('NATS_CLUSTER_ID', 'test-cluster')
- NATS_PAYMENT_CLIENT_NAME = os.getenv('NATS_PAYMENT_CLIENT_NAME', 'entity.filing.worker')
- NATS_PAYMENT_SUBJECT = os.getenv('NATS_PAYMENT_SUBJECT', 'entity.{product}.payment')
- NATS_MAILER_CLIENT_NAME = os.getenv('NATS_MAILER_CLIENT_NAME', 'account.mailer.worker')
- NATS_MAILER_SUBJECT = os.getenv('NATS_MAILER_SUBJECT', 'account.mailer')
- NATS_ACCOUNT_CLIENT_NAME = os.getenv('NATS_ACCOUNT_CLIENT_NAME', 'account.events.worker')
- NATS_ACCOUNT_SUBJECT = os.getenv('NATS_ACCOUNT_SUBJECT', 'account.events')
-
# CFS API Settings
CFS_BASE_URL = os.getenv('CFS_BASE_URL')
CFS_CLIENT_ID = os.getenv('CFS_CLIENT_ID')
@@ -125,6 +97,12 @@ class _Config(): # pylint: disable=too-few-public-methods
# Disable PAD Success Email - Incase we need to reprocess records weeks/months later
DISABLE_PAD_SUCCESS_EMAIL = os.getenv('DISABLE_PAD_SUCCESS_EMAIL', 'false').lower() == 'true'
+ # GCP PubSub
+ AUDIENCE = os.getenv('AUDIENCE', None)
+ GCP_AUTH_KEY = os.getenv('GCP_AUTH_KEY', None)
+ PUBLISHER_AUDIENCE = os.getenv('PUBLISHER_AUDIENCE', None)
+ ACCOUNT_MAILER_TOPIC = os.getenv('ACCOUNT_MAILER_TOPIC', None)
+
class DevConfig(_Config): # pylint: disable=too-few-public-methods
"""Creates the Development Config object."""
@@ -152,7 +130,6 @@ class TestConfig(_Config): # pylint: disable=too-few-public-methods
default=f'postgresql://{DB_USER}:{DB_PASSWORD}@{DB_HOST}:{int(DB_PORT)}/{DB_NAME}'
)
- TEST_NATS_DOCKER = os.getenv('TEST_NATS_DOCKER', None)
USE_DOCKER_MOCK = os.getenv('USE_DOCKER_MOCK', None)
# Minio variables
@@ -166,16 +143,6 @@ class TestConfig(_Config): # pylint: disable=too-few-public-methods
CFS_CLIENT_ID = 'TEST'
CFS_CLIENT_SECRET = 'TEST'
- # NATS Config
- NATS_SERVERS = os.getenv('NATS_SERVERS', 'nats://127.0.0.1:4222').split(',')
- NATS_CLUSTER_ID = os.getenv('NATS_CLUSTER_ID', 'test-cluster')
- NATS_PAYMENT_CLIENT_NAME = os.getenv('NATS_PAYMENT_CLIENT_NAME', 'entity.filing.worker')
- NATS_PAYMENT_SUBJECT = os.getenv('NATS_PAYMENT_SUBJECT', 'entity.{product}.payment')
- NATS_MAILER_CLIENT_NAME = os.getenv('NATS_MAILER_CLIENT_NAME', 'account.mailer.worker')
- NATS_MAILER_SUBJECT = os.getenv('NATS_MAILER_SUBJECT', 'account.mailer')
- NATS_ACCOUNT_CLIENT_NAME = os.getenv('NATS_ACCOUNT_CLIENT_NAME', 'account.events.worker')
- NATS_ACCOUNT_SUBJECT = os.getenv('NATS_ACCOUNT_SUBJECT', 'account.events')
-
# Secret key for encrypting bank account
ACCOUNT_SECRET_KEY = os.getenv('ACCOUNT_SECRET_KEY', 'test')
diff --git a/queue_services/payment-reconciliations/src/reconciliations/enums.py b/pay-queue/src/pay_queue/enums.py
similarity index 86%
rename from queue_services/payment-reconciliations/src/reconciliations/enums.py
rename to pay-queue/src/pay_queue/enums.py
index 92750f68e..cf16aabf2 100644
--- a/queue_services/payment-reconciliations/src/reconciliations/enums.py
+++ b/pay-queue/src/pay_queue/enums.py
@@ -75,12 +75,3 @@ class TargetTransaction(Enum):
DEBIT_MEMO = 'DM'
CREDIT_MEMO = 'CM'
RECEIPT = 'RECEIPT'
-
-
-class MessageType(Enum):
- """Event message types."""
-
- CAS_UPLOADED = 'bc.registry.payment.casSettlementUploaded'
- CGI_ACK_RECEIVED = 'bc.registry.payment.cgi.ACKReceived'
- CGI_FEEDBACK_RECEIVED = 'bc.registry.payment.cgi.FEEDBACKReceived'
- EFT_FILE_UPLOADED = 'bc.registry.payment.eft.fileUploaded'
diff --git a/pay-queue/src/pay_queue/external/gcp_auth.py b/pay-queue/src/pay_queue/external/gcp_auth.py
new file mode 100644
index 000000000..39121ede2
--- /dev/null
+++ b/pay-queue/src/pay_queue/external/gcp_auth.py
@@ -0,0 +1,40 @@
+# pylint: skip-file
+# flake8: noqa
+# This will get moved to an external library, which is linted by black (different than our rules)
+"""Move this to external library."""
+import functools
+from http import HTTPStatus
+
+import google.oauth2.id_token as id_token
+from cachecontrol import CacheControl
+from flask import abort, current_app, request
+from google.auth.transport.requests import Request
+from requests.sessions import Session
+
+
+def verify_jwt(session):
+ """Verify token is valid."""
+ msg = ''
+ try:
+ # Get the Cloud Pub/Sub-generated JWT in the "Authorization" header.
+ id_token.verify_oauth2_token(
+ request.headers.get('Authorization').split()[1],
+ Request(session=session),
+ audience=current_app.config.get('PAY_SUB_AUDIENCE')
+ )
+ except Exception as e: # TODO fix
+ msg = f'Invalid token: {e}\n'
+ finally:
+ return msg
+
+
+def ensure_authorized_queue_user(f):
+ """Ensures the user is authorized to use the queue."""
+ @functools.wraps(f)
+ def decorated_function(*args, **kwargs):
+ # Use CacheControl to avoid re-fetching certificates for every request.
+ if message := verify_jwt(CacheControl(Session())):
+ print(message)
+ abort(HTTPStatus.UNAUTHORIZED)
+ return f(*args, **kwargs)
+ return decorated_function
diff --git a/pay-queue/src/pay_queue/external/pubsub.py b/pay-queue/src/pay_queue/external/pubsub.py
new file mode 100644
index 000000000..c116e1afd
--- /dev/null
+++ b/pay-queue/src/pay_queue/external/pubsub.py
@@ -0,0 +1,196 @@
+# pylint: skip-file
+# flake8: noqa
+# This will get moved to an external library, which is linted by black (different than our rules)
+# Copyright © 2023 Province of British Columbia
+#
+# Licensed under the BSD 3 Clause License, (the 'License');
+# you may not use this file except in compliance with the License.
+# The template for the license can be found here
+# https://opensource.org/license/bsd-3-clause/
+#
+# Redistribution and use in source and binary forms,
+# with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice,
+# this list of conditions and the following disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors
+# may be used to endorse or promote products derived from this software
+# without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS “AS IS”
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
+# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
+# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+# POSSIBILITY OF SUCH DAMAGE.
+"""This module provides Queue type services."""
+from __future__ import annotations
+
+import base64
+import json
+from concurrent.futures import TimeoutError # pylint: disable=W0622
+from concurrent.futures import CancelledError
+from contextlib import suppress
+from typing import Optional
+
+from flask import Flask
+from google.auth import jwt
+from google.cloud import pubsub_v1
+from simple_cloudevent import (
+ CloudEventVersionException,
+ InvalidCloudEventError,
+ SimpleCloudEvent,
+ from_queue_message,
+ to_queue_message,
+)
+from werkzeug.local import LocalProxy
+
+
+class GcpQueue:
+ """Provides Queue type services"""
+
+ def __init__(self, app: Flask = None):
+ """Initializes the GCP Queue class"""
+ self.audience = None
+ self.credentials_pub = None
+ self.gcp_auth_key = None
+ self.publisher_audience = None
+ self.service_account_info = None
+ self._publisher = None
+
+ if app:
+ self.init_app(app)
+
+ def init_app(self, app: Flask):
+ """Initializes the application"""
+
+ self.gcp_auth_key = app.config.get('GCP_AUTH_KEY')
+ if self.gcp_auth_key:
+ try:
+ audience = app.config.get(
+ 'AUDIENCE',
+ 'https://pubsub.googleapis.com/google.pubsub.v1.Subscriber',
+ )
+ publisher_audience = app.config.get(
+ 'PUBLISHER_AUDIENCE',
+ 'https://pubsub.googleapis.com/google.pubsub.v1.Publisher',
+ )
+
+ self.service_account_info = json.loads(
+ base64.b64decode(self.gcp_auth_key).decode('utf-8'))
+ credentials = jwt.Credentials.from_service_account_info(
+ self.service_account_info, audience=audience)
+ self.credentials_pub = credentials.with_claims(
+ audience=publisher_audience)
+ except Exception as error: # noqa: B902
+ raise Exception('Unable to create a connection',
+ error) from error # pylint: disable=W0719
+
+ @property
+ def publisher(self):
+ """Returns the publisher"""
+
+ if not self._publisher and self.credentials_pub:
+ self._publisher = pubsub_v1.PublisherClient(
+ credentials=self.credentials_pub)
+ return self._publisher
+
+ @staticmethod
+ def is_valid_envelope(msg: dict):
+ """Checks if the envelope is valid"""
+
+ if (
+ msg.get('subscription')
+ and (message := msg.get('message'))
+ and isinstance(message, dict)
+ and message.get('data')
+ ):
+ return True
+ return False
+
+ @staticmethod
+ def get_envelope(request: LocalProxy) -> Optional[dict]:
+ """Returns the envelope"""
+
+ with suppress(Exception):
+ if (envelope := request.get_json()) and GcpQueue.is_valid_envelope(envelope):
+ return envelope
+ return None
+
+ @staticmethod
+ def get_simple_cloud_event(request: LocalProxy, return_raw: bool = False) -> type[SimpleCloudEvent | dict | None]:
+ """Return a SimpleCloudEvent if one is in session from the PubSub call.
+
+ Parameters
+ ------------
+ request: LocalProxy
+ An active Flask request object
+ return_raw: bool, Optional = False
+ Flag to return the raw data on error, if it exists
+ Return
+ -----------
+ ce_returned: boolean
+ if a ce is returned == True
+ SimpleCloudEvent |
+ dict |
+ None
+ the second value returned is either a:
+ SimpleCloudEvent -or-
+ None - if there is no SimpleCloudEvent
+
+ dict - if return_raw was set to true and it's not a SimpleCloudEvent -or-
+ """
+ if not (envelope := GcpQueue.get_envelope(request)):
+ return None
+
+ if (
+ (message := envelope.get('message'))
+ and (raw_data := message.get('data'))
+ and (str_data := base64.b64decode(raw_data))
+ ):
+ try:
+ return from_queue_message(str_data)
+ except (
+ CloudEventVersionException,
+ InvalidCloudEventError,
+ ValueError,
+ Exception,
+ ):
+ if return_raw and str_data:
+ return str_data
+ return None
+
+ def publish(self, topic: str, payload: bytes):
+ """Send payload to the queue."""
+ if not (publisher := self.publisher):
+ raise Exception('missing setup arguments') # pylint: disable=W0719
+
+ try:
+ future = publisher.publish(topic, payload)
+
+ return future.result()
+ except (CancelledError, TimeoutError) as error:
+ raise Exception('Unable to post to queue',
+ error) from error # pylint: disable=W0719
+
+ @staticmethod
+ def to_queue_message(ce: SimpleCloudEvent):
+ """Return a byte string of the CloudEvent in JSON format"""
+
+ return to_queue_message(ce)
+
+ @staticmethod
+ def from_queue_message(data: dict):
+ """Convert a queue message back to a simple CloudEvent"""
+ return from_queue_message(data)
diff --git a/pay-queue/src/pay_queue/external/readme.txt b/pay-queue/src/pay_queue/external/readme.txt
new file mode 100644
index 000000000..558309a53
--- /dev/null
+++ b/pay-queue/src/pay_queue/external/readme.txt
@@ -0,0 +1 @@
+These will be refactored in the future, just here now to get stuff going.
diff --git a/queue_services/payment-reconciliations/src/reconciliations/minio.py b/pay-queue/src/pay_queue/minio.py
similarity index 100%
rename from queue_services/payment-reconciliations/src/reconciliations/minio.py
rename to pay-queue/src/pay_queue/minio.py
diff --git a/queue_services/payment-reconciliations/scripts/verify_license_headers.sh b/pay-queue/src/pay_queue/resources/__init__.py
old mode 100755
new mode 100644
similarity index 50%
rename from queue_services/payment-reconciliations/scripts/verify_license_headers.sh
rename to pay-queue/src/pay_queue/resources/__init__.py
index 028b95c63..cca349370
--- a/queue_services/payment-reconciliations/scripts/verify_license_headers.sh
+++ b/pay-queue/src/pay_queue/resources/__init__.py
@@ -1,6 +1,4 @@
-#!/usr/bin/env bash
-
-# Copyright © 2019 Province of British Columbia
+# Copyright © 2024 Province of British Columbia
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -13,18 +11,20 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
+"""Resource package for the pay-queue service."""
+from flask import Flask
+from pay_api.resources.ops import bp as ops_bp
+
+from .worker import bp as worker_endpoint
-COPYRIGHT="Copyright © 2019 Province of British Columbia"
-RET=0
+def register_endpoints(app: Flask):
+ """Register endpoints with the flask application."""
+ # Allow base route to match with, and without a trailing slash
+ app.url_map.strict_slashes = False
-for file in $(find $@ -not \( -path */venv -prune \) -not \( -path */migrations -prune \) -not \( -path */tests -prune \) -not \( -path */.egg* -prune \) -name \*.py)
-do
- grep "${COPYRIGHT}" ${file} >/dev/null
- if [[ $? != 0 ]]
- then
- echo "${file} missing copyright header"
- RET=1
- fi
-done
-exit ${RET}
+ app.register_blueprint(
+ url_prefix='/',
+ blueprint=worker_endpoint,
+ )
+ app.register_blueprint(ops_bp)
diff --git a/pay-queue/src/pay_queue/resources/worker.py b/pay-queue/src/pay_queue/resources/worker.py
new file mode 100644
index 000000000..d429ec693
--- /dev/null
+++ b/pay-queue/src/pay_queue/resources/worker.py
@@ -0,0 +1,52 @@
+# Copyright © 2024 Province of British Columbia
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Worker resource to handle incoming queue pushes from gcp."""
+from http import HTTPStatus
+
+from flask import Blueprint, request
+from pay_api.utils.enums import MessageType
+
+from pay_queue.external.gcp_auth import ensure_authorized_queue_user
+from pay_queue.services import queue, update_temporary_identifier
+from pay_queue.services.cgi_reconciliations import reconcile_distributions
+from pay_queue.services.eft.eft_reconciliation import reconcile_eft_payments
+from pay_queue.services.payment_reconciliations import reconcile_payments
+
+
+bp = Blueprint('worker', __name__)
+
+
+@bp.route('/', methods=('POST',))
+@ensure_authorized_queue_user
+def worker():
+ """Worker to handle incoming queue pushes."""
+ if not (ce := queue.get_simple_cloud_event(request)):
+ # Return a 200, so event is removed from the Queue
+ return {}, HTTPStatus.OK
+
+ match ce.type:
+ case MessageType.CAS_UPLOADED.value:
+ reconcile_payments(ce.data)
+ case MessageType.CGI_ACK_RECEIVED.value:
+ reconcile_distributions(ce.data)
+ case MessageType.CGI_FEEDBACK_RECEIVED.value:
+ reconcile_distributions(ce.data, is_feedback=True)
+ case MessageType.EFT_FILE_UPLOADED.value:
+ reconcile_eft_payments(ce.data)
+ case MessageType.INCORPORATION.value | MessageType.REGISTRATION.value:
+ update_temporary_identifier(ce.data)
+ case _:
+ raise Exception('Invalid queue message type') # pylint: disable=broad-exception-raised
+
+ return {}, HTTPStatus.OK
diff --git a/pay-queue/src/pay_queue/services/__init__.py b/pay-queue/src/pay_queue/services/__init__.py
new file mode 100644
index 000000000..63a1cf400
--- /dev/null
+++ b/pay-queue/src/pay_queue/services/__init__.py
@@ -0,0 +1,41 @@
+# Copyright © 2023 Province of British Columbia
+#
+# Licensed under the BSD 3 Clause License, (the "License");
+# you may not use this file except in compliance with the License.
+# The template for the license can be found here
+# https://opensource.org/license/bsd-3-clause/
+#
+# Redistribution and use in source and binary forms,
+# with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice,
+# this list of conditions and the following disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors
+# may be used to endorse or promote products derived from this software
+# without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS “AS IS”
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
+# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
+# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+# POSSIBILITY OF SUCH DAMAGE.
+"""This module provides Queue type services."""
+
+from pay_queue.external.pubsub import GcpQueue
+
+from .identifier_updater import update_temporary_identifier
+
+
+queue = GcpQueue()
diff --git a/queue_services/payment-reconciliations/src/reconciliations/cgi_reconciliations.py b/pay-queue/src/pay_queue/services/cgi_reconciliations.py
similarity index 80%
rename from queue_services/payment-reconciliations/src/reconciliations/cgi_reconciliations.py
rename to pay-queue/src/pay_queue/services/cgi_reconciliations.py
index 9e7e17f0a..d3040f9b9 100644
--- a/queue_services/payment-reconciliations/src/reconciliations/cgi_reconciliations.py
+++ b/pay-queue/src/pay_queue/services/cgi_reconciliations.py
@@ -11,25 +11,12 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-"""CGI reconciliation file.
-
-The entry-point is the **cb_subscription_handler**
-
-The design and flow leverage a few constraints that are placed upon it
-by NATS Streaming and using AWAIT on the default loop.
-- NATS streaming queues require one message to be processed at a time.
-- AWAIT on the default loop effectively runs synchronously
-
-If these constraints change, the use of Flask-SQLAlchemy would need to change.
-Flask-SQLAlchemy currently allows the base model to be changed, or reworking
-the model to a standalone SQLAlchemy usage with an async engine would need
-to be pursued.
-"""
+"""CGI reconciliation file."""
import os
from datetime import datetime
from typing import Dict, List, Optional
-from entity_queue_common.service_utils import logger
+from flask import current_app
from pay_api.models import DistributionCode as DistributionCodeModel
from pay_api.models import EjvFile as EjvFileModel
from pay_api.models import EjvHeader as EjvHeaderModel
@@ -42,27 +29,28 @@
from pay_api.models import Refund as RefundModel
from pay_api.models import RoutingSlip as RoutingSlipModel
from pay_api.models import db
-from pay_api.services.queue_publisher import publish
+from pay_api.services import gcp_queue_publisher
+from pay_api.services.gcp_queue_publisher import QueueMessage
from pay_api.utils.enums import (
- DisbursementStatus, EjvFileType, InvoiceReferenceStatus, InvoiceStatus, PaymentMethod, PaymentStatus, PaymentSystem,
- RoutingSlipStatus)
+ DisbursementStatus, EjvFileType, InvoiceReferenceStatus, InvoiceStatus, MessageType, PaymentMethod, PaymentStatus,
+ PaymentSystem, QueueSources, RoutingSlipStatus)
from sentry_sdk import capture_message
-from reconciliations import config
-from reconciliations.minio import get_object
+from pay_queue import config
+from pay_queue.minio import get_object
APP_CONFIG = config.get_named_config(os.getenv('DEPLOYMENT_ENV', 'production'))
-async def reconcile_distributions(msg: Dict[str, any], is_feedback: bool = False):
+def reconcile_distributions(msg: Dict[str, any], is_feedback: bool = False):
"""Read the file and update distribution details.
1: Lookup the invoice details based on the file content.
2: Update the statuses
"""
if is_feedback:
- await _update_feedback(msg)
+ _update_feedback(msg)
else:
_update_acknowledgement(msg)
@@ -72,11 +60,11 @@ def _update_acknowledgement(msg: Dict[str, any]):
# so query uploaded jv file records and mark it as acknowledged.
# Check to see that our ack file doesn't exist, if it exists, skip it.
- ack_file_name = msg.get('data').get('fileName')
+ ack_file_name = msg.get('fileName')
ack_exists: EjvFileModel = db.session.query(EjvFileModel).filter(
EjvFileModel.ack_file_ref == ack_file_name).first()
if ack_exists:
- logger.warning('Ack file: %s - already exists, possible duplicate, skipping ack.', ack_file_name)
+ current_app.logger.warning('Ack file: %s - already exists, possible duplicate, skipping ack.', ack_file_name)
return
ejv_file: EjvFileModel = db.session.query(EjvFileModel).filter(
@@ -98,24 +86,24 @@ def _update_acknowledgement(msg: Dict[str, any]):
db.session.commit()
-async def _update_feedback(msg: Dict[str, any]): # pylint:disable=too-many-locals, too-many-statements
+def _update_feedback(msg: Dict[str, any]): # pylint:disable=too-many-locals, too-many-statements
# Read the file and find records from the database, and update status.
- file_name: str = msg.get('data').get('fileName')
- minio_location: str = msg.get('data').get('location')
+ file_name: str = msg.get('fileName')
+ minio_location: str = msg.get('location')
file = get_object(minio_location, file_name)
content = file.data.decode('utf-8-sig')
group_batches: List[str] = _group_batches(content)
- has_errors, already_processed = await _process_ejv_feedback(group_batches['EJV'], file_name)
+ has_errors, already_processed = _process_ejv_feedback(group_batches['EJV'], file_name)
if not already_processed:
- has_errors = await _process_ap_feedback(group_batches['AP']) or has_errors
+ has_errors = _process_ap_feedback(group_batches['AP']) or has_errors
if has_errors and not APP_CONFIG.DISABLE_EJV_ERROR_EMAIL:
- await _publish_mailer_events(file_name, minio_location)
- logger.info('> update_feedback')
+ _publish_mailer_events(file_name, minio_location)
+ current_app.logger.info('> update_feedback')
-async def _process_ejv_feedback(group_batches, file_name) -> bool: # pylint:disable=too-many-locals
+def _process_ejv_feedback(group_batches, file_name) -> bool: # pylint:disable=too-many-locals
"""Process EJV Feedback contents."""
has_errors = False
already_processed = False
@@ -132,7 +120,7 @@ async def _process_ejv_feedback(group_batches, file_name) -> bool: # pylint:dis
batch_number = int(line[15:24])
ejv_file = EjvFileModel.find_by_id(batch_number)
if ejv_file.feedback_file_ref:
- logger.info(
+ current_app.logger.info(
'EJV file id %s with feedback file %s has already been processed, skipping.',
batch_number, file_name)
already_processed = True
@@ -159,22 +147,22 @@ async def _process_ejv_feedback(group_batches, file_name) -> bool: # pylint:dis
elif ejv_file.file_type == EjvFileType.PAYMENT.value:
amount = float(line[42:57])
receipt_number = line[0:42].strip()
- await _create_payment_record(amount, ejv_header, receipt_number)
+ _create_payment_record(amount, ejv_header, receipt_number)
elif is_jv_detail:
- has_errors = await _process_jv_details_feedback(ejv_file, has_errors, line, receipt_number)
+ has_errors = _process_jv_details_feedback(ejv_file, has_errors, line, receipt_number)
db.session.commit()
return has_errors, already_processed
-async def _process_jv_details_feedback(ejv_file, has_errors, line, receipt_number): # pylint:disable=too-many-locals
+def _process_jv_details_feedback(ejv_file, has_errors, line, receipt_number): # pylint:disable=too-many-locals
journal_name: str = line[7:17] # {ministry}{ejv_header_model.id:0>8}
ejv_header_model_id = int(journal_name[2:])
# Work around for CAS, they said fix the feedback files.
line = _fix_invoice_line(line)
invoice_id = int(line[205:315])
- logger.info('Invoice id - %s', invoice_id)
+ current_app.logger.info('Invoice id - %s', invoice_id)
invoice: InvoiceModel = InvoiceModel.find_by_id(invoice_id)
invoice_link: EjvInvoiceLinkModel = db.session.query(EjvInvoiceLinkModel).filter(
EjvInvoiceLinkModel.ejv_header_id == ejv_header_model_id).filter(
@@ -183,12 +171,12 @@ async def _process_jv_details_feedback(ejv_file, has_errors, line, receipt_numbe
invoice_return_message = line[319:469]
# If the JV process failed, then mark the GL code against the invoice to be stopped
# for further JV process for the credit GL.
- logger.info('Is Credit or Debit %s - %s', line[104:105], ejv_file.file_type)
+ current_app.logger.info('Is Credit or Debit %s - %s', line[104:105], ejv_file.file_type)
if line[104:105] == 'C' and ejv_file.file_type == EjvFileType.DISBURSEMENT.value:
disbursement_status = _get_disbursement_status(invoice_return_code)
invoice_link.disbursement_status_code = disbursement_status
invoice_link.message = invoice_return_message
- logger.info('disbursement_status %s', disbursement_status)
+ current_app.logger.info('disbursement_status %s', disbursement_status)
if disbursement_status == DisbursementStatus.ERRORED.value:
has_errors = True
invoice.disbursement_status_code = DisbursementStatus.ERRORED.value
@@ -202,17 +190,17 @@ async def _process_jv_details_feedback(ejv_file, has_errors, line, receipt_numbe
credit_distribution.stop_ejv = True
else:
effective_date = datetime.strptime(line[22:30], '%Y%m%d')
- await _update_invoice_disbursement_status(invoice, effective_date)
+ _update_invoice_disbursement_status(invoice, effective_date)
elif line[104:105] == 'D' and ejv_file.file_type == EjvFileType.PAYMENT.value:
# This is for gov account payment JV.
invoice_link.disbursement_status_code = _get_disbursement_status(invoice_return_code)
invoice_link.message = invoice_return_message
- logger.info('Invoice ID %s', invoice_id)
+ current_app.logger.info('Invoice ID %s', invoice_id)
inv_ref: InvoiceReferenceModel = InvoiceReferenceModel.find_by_invoice_id_and_status(
invoice_id, InvoiceReferenceStatus.ACTIVE.value)
- logger.info('invoice_link.disbursement_status_code %s', invoice_link.disbursement_status_code)
+ current_app.logger.info('invoice_link.disbursement_status_code %s', invoice_link.disbursement_status_code)
if invoice_link.disbursement_status_code == DisbursementStatus.ERRORED.value:
has_errors = True
# Cancel the invoice reference.
@@ -264,7 +252,7 @@ def _fix_invoice_line(line):
return line
-async def _update_invoice_disbursement_status(invoice, effective_date: datetime):
+def _update_invoice_disbursement_status(invoice, effective_date: datetime):
"""Update status to reversed if its a refund, else to completed."""
invoice.disbursement_date = effective_date
if invoice.invoice_status_code in (InvoiceStatus.REFUNDED.value, InvoiceStatus.REFUND_REQUESTED.value,
@@ -274,7 +262,7 @@ async def _update_invoice_disbursement_status(invoice, effective_date: datetime)
invoice.disbursement_status_code = DisbursementStatus.COMPLETED.value
-async def _create_payment_record(amount, ejv_header, receipt_number):
+def _create_payment_record(amount, ejv_header, receipt_number):
"""Create payment record."""
PaymentModel(
payment_system_code=PaymentSystem.CGI.value,
@@ -315,33 +303,27 @@ def _get_disbursement_status(return_code: str) -> str:
return DisbursementStatus.ERRORED.value
-async def _publish_mailer_events(file_name: str, minio_location: str):
+def _publish_mailer_events(file_name: str, minio_location: str):
"""Publish payment message to the mailer queue."""
- # Publish message to the Queue, saying account has been created. Using the event spec.
- queue_data = {
+ payload = {
'fileName': file_name,
'minioLocation': minio_location
}
- payload = {
- 'specversion': '1.x-wip',
- 'type': 'bc.registry.payment.ejvFailed',
- 'source': 'https://api.pay.bcregistry.gov.bc.ca/v1/accounts/',
- 'id': file_name,
- 'time': f'{datetime.now()}',
- 'datacontenttype': 'application/json',
- 'data': queue_data
- }
-
try:
- await publish(payload=payload,
- client_name=APP_CONFIG.NATS_MAILER_CLIENT_NAME,
- subject=APP_CONFIG.NATS_MAILER_SUBJECT)
+ gcp_queue_publisher.publish_to_queue(
+ QueueMessage(
+ source=QueueSources.PAY_QUEUE.value,
+ message_type=MessageType.EJV_FAILED.value,
+ payload=payload,
+ topic=current_app.config.get('ACCOUNT_MAILER_TOPIC')
+ )
+ )
except Exception as e: # NOQA pylint: disable=broad-except
- logger.error(e)
+ current_app.logger.error(e)
capture_message('EJV Failed message error', level='error')
-async def _process_ap_feedback(group_batches) -> bool: # pylint:disable=too-many-locals
+def _process_ap_feedback(group_batches) -> bool: # pylint:disable=too-many-locals
"""Process AP Feedback contents."""
has_errors = False
for group_batch in group_batches:
@@ -362,22 +344,22 @@ async def _process_ap_feedback(group_batches) -> bool: # pylint:disable=too-man
if ejv_file.disbursement_status_code == DisbursementStatus.ERRORED.value:
has_errors = True
elif is_ap_header:
- has_errors = await _process_ap_header(line, ejv_file) or has_errors
+ has_errors = _process_ap_header(line, ejv_file) or has_errors
db.session.commit()
return has_errors
-async def _process_ap_header(line, ejv_file: EjvFileModel) -> bool:
+def _process_ap_header(line, ejv_file: EjvFileModel) -> bool:
has_errors = False
if ejv_file.file_type == EjvFileType.REFUND.value:
- has_errors = await _process_ap_header_routing_slips(line)
+ has_errors = _process_ap_header_routing_slips(line)
else:
- has_errors = await _process_ap_header_non_gov_disbursement(line, ejv_file)
+ has_errors = _process_ap_header_non_gov_disbursement(line, ejv_file)
return has_errors
-async def _process_ap_header_routing_slips(line) -> bool:
+def _process_ap_header_routing_slips(line) -> bool:
has_errors = False
routing_slip_number = line[19:69].strip()
routing_slip: RoutingSlipModel = RoutingSlipModel.find_by_number(routing_slip_number)
@@ -396,7 +378,7 @@ async def _process_ap_header_routing_slips(line) -> bool:
return has_errors
-async def _process_ap_header_non_gov_disbursement(line, ejv_file: EjvFileModel) -> bool:
+def _process_ap_header_non_gov_disbursement(line, ejv_file: EjvFileModel) -> bool:
has_errors = False
invoice_id = line[19:69].strip()
invoice: InvoiceModel = InvoiceModel.find_by_id(invoice_id)
@@ -417,7 +399,7 @@ async def _process_ap_header_non_gov_disbursement(line, ejv_file: EjvFileModel)
level='error')
else:
# TODO - Fix this on BC Assessment launch, so the effective date reads from the feedback.
- await _update_invoice_disbursement_status(invoice, effective_date=datetime.now())
+ _update_invoice_disbursement_status(invoice, effective_date=datetime.now())
if invoice.invoice_status_code != InvoiceStatus.PAID.value:
refund = RefundModel.find_by_invoice_id(invoice.id)
refund.gl_posted = datetime.now()
diff --git a/queue_services/payment-reconciliations/src/reconciliations/eft/__init__.py b/pay-queue/src/pay_queue/services/eft/__init__.py
similarity index 100%
rename from queue_services/payment-reconciliations/src/reconciliations/eft/__init__.py
rename to pay-queue/src/pay_queue/services/eft/__init__.py
diff --git a/queue_services/payment-reconciliations/src/reconciliations/eft/eft_base.py b/pay-queue/src/pay_queue/services/eft/eft_base.py
similarity index 95%
rename from queue_services/payment-reconciliations/src/reconciliations/eft/eft_base.py
rename to pay-queue/src/pay_queue/services/eft/eft_base.py
index 2a9982b01..13f396db7 100644
--- a/queue_services/payment-reconciliations/src/reconciliations/eft/eft_base.py
+++ b/pay-queue/src/pay_queue/services/eft/eft_base.py
@@ -58,10 +58,11 @@
"""This manages the EFT base class."""
import decimal
from datetime import datetime
+from typing import List
-from reconciliations.eft.eft_enums import EFTConstants
-from reconciliations.eft.eft_errors import EFTError
-from reconciliations.eft.eft_parse_error import EFTParseError
+from pay_queue.services.eft.eft_enums import EFTConstants
+from pay_queue.services.eft.eft_errors import EFTError
+from pay_queue.services.eft.eft_parse_error import EFTParseError
class EFTBase:
@@ -71,7 +72,7 @@ class EFTBase:
record_type: str # Always 1 for header, 2 for transaction, 7 for trailer
content: str
index: int
- errors: [EFTParseError]
+ errors: List[EFTParseError]
def __init__(self, content: str, index: int):
"""Return an EFT Base record."""
@@ -148,6 +149,6 @@ def has_errors(self) -> bool:
"""Return true if the error array has elements."""
return len(self.errors) > 0
- def get_error_messages(self) -> [str]:
+ def get_error_messages(self) -> List[str]:
"""Return a string array of the error messages."""
return [error.message for error in self.errors]
diff --git a/queue_services/payment-reconciliations/src/reconciliations/eft/eft_enums.py b/pay-queue/src/pay_queue/services/eft/eft_enums.py
similarity index 100%
rename from queue_services/payment-reconciliations/src/reconciliations/eft/eft_enums.py
rename to pay-queue/src/pay_queue/services/eft/eft_enums.py
diff --git a/queue_services/payment-reconciliations/src/reconciliations/eft/eft_errors.py b/pay-queue/src/pay_queue/services/eft/eft_errors.py
similarity index 100%
rename from queue_services/payment-reconciliations/src/reconciliations/eft/eft_errors.py
rename to pay-queue/src/pay_queue/services/eft/eft_errors.py
diff --git a/queue_services/payment-reconciliations/src/reconciliations/eft/eft_header.py b/pay-queue/src/pay_queue/services/eft/eft_header.py
similarity index 89%
rename from queue_services/payment-reconciliations/src/reconciliations/eft/eft_header.py
rename to pay-queue/src/pay_queue/services/eft/eft_header.py
index a096ae06e..9244eafa3 100644
--- a/queue_services/payment-reconciliations/src/reconciliations/eft/eft_header.py
+++ b/pay-queue/src/pay_queue/services/eft/eft_header.py
@@ -14,10 +14,10 @@
"""This manages the EFT Header record."""
from datetime import datetime
-from reconciliations.eft.eft_base import EFTBase
-from reconciliations.eft.eft_enums import EFTConstants
-from reconciliations.eft.eft_errors import EFTError
-from reconciliations.eft.eft_parse_error import EFTParseError
+from pay_queue.services.eft.eft_base import EFTBase
+from pay_queue.services.eft.eft_enums import EFTConstants
+from pay_queue.services.eft.eft_errors import EFTError
+from pay_queue.services.eft.eft_parse_error import EFTParseError
class EFTHeader(EFTBase):
diff --git a/queue_services/payment-reconciliations/src/reconciliations/eft/eft_parse_error.py b/pay-queue/src/pay_queue/services/eft/eft_parse_error.py
similarity index 94%
rename from queue_services/payment-reconciliations/src/reconciliations/eft/eft_parse_error.py
rename to pay-queue/src/pay_queue/services/eft/eft_parse_error.py
index b7348d89c..6ef7ad81c 100644
--- a/queue_services/payment-reconciliations/src/reconciliations/eft/eft_parse_error.py
+++ b/pay-queue/src/pay_queue/services/eft/eft_parse_error.py
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
"""Defines the structure of EFT Errors."""
-from reconciliations.eft.eft_errors import EFTError
+from pay_queue.services.eft.eft_errors import EFTError
class EFTParseError: # pylint: disable=too-few-public-methods
diff --git a/queue_services/payment-reconciliations/src/reconciliations/eft/eft_reconciliation.py b/pay-queue/src/pay_queue/services/eft/eft_reconciliation.py
similarity index 89%
rename from queue_services/payment-reconciliations/src/reconciliations/eft/eft_reconciliation.py
rename to pay-queue/src/pay_queue/services/eft/eft_reconciliation.py
index d22c316bc..6cdc36122 100644
--- a/queue_services/payment-reconciliations/src/reconciliations/eft/eft_reconciliation.py
+++ b/pay-queue/src/pay_queue/services/eft/eft_reconciliation.py
@@ -11,26 +11,13 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-"""EFT reconciliation file.
-
-The entry-point is the **cb_subscription_handler**
-
-The design and flow leverage a few constraints that are placed upon it
-by NATS Streaming and using AWAIT on the default loop.
-- NATS streaming queues require one message to be processed at a time.
-- AWAIT on the default loop effectively runs synchronously
-
-If these constraints change, the use of Flask-SQLAlchemy would need to change.
-Flask-SQLAlchemy currently allows the base model to be changed, or reworking
-the model to a standalone SQLAlchemy usage with an async engine would need
-to be pursued.
-"""
+"""EFT reconciliation file."""
from datetime import datetime
from operator import and_
from typing import Dict, List
from _decimal import Decimal
-from entity_queue_common.service_utils import logger
+from flask import current_app
from pay_api import db
from pay_api.factory.payment_system_factory import PaymentSystemFactory
from pay_api.models import EFTCredit as EFTCreditModel
@@ -44,11 +31,11 @@
from pay_api.utils.enums import EFTFileLineType, EFTProcessStatus, InvoiceStatus, PaymentMethod
from sentry_sdk import capture_message
-from reconciliations.eft import EFTHeader, EFTRecord, EFTTrailer
-from reconciliations.minio import get_object
+from pay_queue.minio import get_object
+from pay_queue.services.eft import EFTHeader, EFTRecord, EFTTrailer
-async def reconcile_eft_payments(msg: Dict[str, any]): # pylint: disable=too-many-locals
+def reconcile_eft_payments(msg: Dict[str, any]): # pylint: disable=too-many-locals
"""Read the TDI17 file, create processing records and update payment details.
1: Check to see if file has been previously processed.
@@ -65,8 +52,8 @@ async def reconcile_eft_payments(msg: Dict[str, any]): # pylint: disable=too-ma
9: Finalize and complete
"""
# Fetch EFT File
- file_name: str = msg.get('data').get('fileName')
- minio_location: str = msg.get('data').get('location')
+ file_name: str = msg.get('fileName')
+ minio_location: str = msg.get('location')
file = get_object(minio_location, file_name)
file_content = file.data.decode('utf-8-sig')
@@ -78,7 +65,7 @@ async def reconcile_eft_payments(msg: Dict[str, any]): # pylint: disable=too-ma
EFTFileModel.file_ref == file_name).one_or_none()
if eft_file_model and eft_file_model.status_code == EFTProcessStatus.COMPLETED.value:
- logger.info('File: %s already completed processing on %s.', file_name, eft_file_model.completed_on)
+ current_app.logger.info('File: %s already completed processing on %s.', file_name, eft_file_model.completed_on)
return
# There is no existing EFT File record - instantiate one
@@ -93,7 +80,7 @@ async def reconcile_eft_payments(msg: Dict[str, any]): # pylint: disable=too-ma
# EFT File parsed data holders
eft_header: EFTHeader = None
eft_trailer: EFTTrailer = None
- eft_transactions: [EFTRecord] = []
+ eft_transactions: List[EFTRecord] = []
# Read and parse EFT file header, trailer, transactions
for index, line in enumerate(lines):
@@ -109,7 +96,7 @@ async def reconcile_eft_payments(msg: Dict[str, any]): # pylint: disable=too-ma
# If header and/or trailer has errors do not proceed
if not (eft_header_valid and eft_trailer_valid):
- logger.error('Failed to process file %s with an invalid header or trailer.', file_name)
+ current_app.logger.error('Failed to process file %s with an invalid header or trailer.', file_name)
eft_file_model.status_code = EFTProcessStatus.FAILED.value
eft_file_model.save()
return
@@ -128,7 +115,7 @@ async def reconcile_eft_payments(msg: Dict[str, any]): # pylint: disable=too-ma
# EFT Transactions have parsing errors - stop and FAIL transactions
# We want a full file to be parseable as we want to get a full accurate balance before applying them to invoices
if has_eft_transaction_errors:
- logger.error('Failed to process file %s has transaction parsing errors.', file_name)
+ current_app.logger.error('Failed to process file %s has transaction parsing errors.', file_name)
_update_transactions_to_fail(eft_file_model)
return
@@ -146,7 +133,7 @@ async def reconcile_eft_payments(msg: Dict[str, any]): # pylint: disable=too-ma
if has_eft_transaction_errors or has_eft_credits_error:
db.session.rollback()
_update_transactions_to_fail(eft_file_model)
- logger.error('Failed to process file %s due to transaction errors.', file_name)
+ current_app.logger.error('Failed to process file %s due to transaction errors.', file_name)
return
_finalize_process_state(eft_file_model)
@@ -165,7 +152,7 @@ def _finalize_process_state(eft_file_model: EFTFileModel):
def _process_eft_header(eft_header: EFTHeader, eft_file_model: EFTFileModel) -> bool:
"""Process the EFT Header."""
if eft_header is None:
- logger.error('Failed to process file %s with an invalid header.', eft_file_model.file_ref)
+ current_app.logger.error('Failed to process file %s with an invalid header.', eft_file_model.file_ref)
return False
# Populate header and trailer data on EFT File record - values will return None if parsing failed
@@ -182,7 +169,7 @@ def _process_eft_header(eft_header: EFTHeader, eft_file_model: EFTFileModel) ->
def _process_eft_trailer(eft_trailer: EFTTrailer, eft_file_model: EFTFileModel) -> bool:
"""Process the EFT Trailer."""
if eft_trailer is None:
- logger.error('Failed to process file %s with an invalid trailer.', eft_file_model.file_ref)
+ current_app.logger.error('Failed to process file %s with an invalid trailer.', eft_file_model.file_ref)
return False
# Populate header and trailer data on EFT File record - values will return None if parsing failed
@@ -237,7 +224,7 @@ def _process_eft_credits(shortname_balance, eft_file_id):
db.session.add(eft_credit_model)
except Exception as e: # NOQA pylint: disable=broad-exception-caught
has_credit_errors = True
- logger.error(e)
+ current_app.logger.error(e)
capture_message('EFT Failed to set EFT balance.', level='error')
return has_credit_errors
@@ -252,7 +239,8 @@ def _process_eft_payments(shortname_balance: Dict, eft_file: EFTFileModel) -> bo
# No balance to apply - move to next shortname
if shortname_balance[shortname]['balance'] <= 0:
- logger.warning('UNEXPECTED BALANCE: %s had zero or less balance on file: %s', shortname, eft_file.file_ref)
+ current_app.logger.warning('UNEXPECTED BALANCE: %s had zero or less balance on file: %s',
+ shortname, eft_file.file_ref)
continue
# check if short name is mapped to an auth account
@@ -262,14 +250,12 @@ def _process_eft_payments(shortname_balance: Dict, eft_file: EFTFileModel) -> bo
auth_account_id = eft_shortname_model.auth_account_id
# Find invoices to be paid
invoices: List[InvoiceModel] = EFTShortnames.get_invoices_owing(auth_account_id)
- if invoices is not None:
- for invoice in invoices:
- _pay_invoice(invoice=invoice,
- shortname_balance=shortname_balance[shortname])
+ for invoice in invoices:
+ _pay_invoice(invoice=invoice, shortname_balance=shortname_balance[shortname])
except Exception as e: # NOQA pylint: disable=broad-exception-caught
has_eft_transaction_errors = True
- logger.error(e)
+ current_app.logger.error(e)
capture_message('EFT Failed to apply balance to invoice.', level='error')
return has_eft_transaction_errors
@@ -433,6 +419,7 @@ def _shortname_balance_as_dict(eft_transactions: List[EFTRecord]) -> Dict:
return shortname_balance
+# TODO: THIS NEEDS TO CHANGE TO WORK ON THE CFS JOB instead.
def _pay_invoice(invoice: InvoiceModel, shortname_balance: Dict):
"""Pay for an invoice and update invoice state."""
payment_date = shortname_balance.get('transaction_date') or datetime.now()
@@ -447,12 +434,11 @@ def _pay_invoice(invoice: InvoiceModel, shortname_balance: Dict):
# Create the payment record
eft_payment_service: EFTService = PaymentSystemFactory.create_from_payment_method(PaymentMethod.EFT.value)
- payment, invoice_reference, receipt = eft_payment_service.apply_credit(invoice=invoice,
- payment_date=payment_date,
- auto_save=True)
+ payment, receipt = eft_payment_service.apply_credit(invoice=invoice,
+ payment_date=payment_date,
+ auto_save=True)
db.session.add(payment)
- db.session.add(invoice_reference)
db.session.add(receipt)
# Paid - update the shortname balance
diff --git a/queue_services/payment-reconciliations/src/reconciliations/eft/eft_record.py b/pay-queue/src/pay_queue/services/eft/eft_record.py
similarity index 94%
rename from queue_services/payment-reconciliations/src/reconciliations/eft/eft_record.py
rename to pay-queue/src/pay_queue/services/eft/eft_record.py
index 19acedc30..4c002754e 100644
--- a/queue_services/payment-reconciliations/src/reconciliations/eft/eft_record.py
+++ b/pay-queue/src/pay_queue/services/eft/eft_record.py
@@ -15,10 +15,10 @@
import decimal
from datetime import datetime
-from reconciliations.eft.eft_base import EFTBase
-from reconciliations.eft.eft_enums import EFTConstants
-from reconciliations.eft.eft_errors import EFTError
-from reconciliations.eft.eft_parse_error import EFTParseError
+from pay_queue.services.eft.eft_base import EFTBase
+from pay_queue.services.eft.eft_enums import EFTConstants
+from pay_queue.services.eft.eft_errors import EFTError
+from pay_queue.services.eft.eft_parse_error import EFTParseError
class EFTRecord(EFTBase):
diff --git a/queue_services/payment-reconciliations/src/reconciliations/eft/eft_trailer.py b/pay-queue/src/pay_queue/services/eft/eft_trailer.py
similarity index 88%
rename from queue_services/payment-reconciliations/src/reconciliations/eft/eft_trailer.py
rename to pay-queue/src/pay_queue/services/eft/eft_trailer.py
index 8ee4fd999..3ead1dd01 100644
--- a/queue_services/payment-reconciliations/src/reconciliations/eft/eft_trailer.py
+++ b/pay-queue/src/pay_queue/services/eft/eft_trailer.py
@@ -14,10 +14,10 @@
"""This manages the EFT Trailer record."""
import decimal
-from reconciliations.eft.eft_base import EFTBase
-from reconciliations.eft.eft_enums import EFTConstants
-from reconciliations.eft.eft_errors import EFTError
-from reconciliations.eft.eft_parse_error import EFTParseError
+from pay_queue.services.eft.eft_base import EFTBase
+from pay_queue.services.eft.eft_enums import EFTConstants
+from pay_queue.services.eft.eft_errors import EFTError
+from pay_queue.services.eft.eft_parse_error import EFTParseError
class EFTTrailer(EFTBase):
diff --git a/pay-queue/src/pay_queue/services/identifier_updater.py b/pay-queue/src/pay_queue/services/identifier_updater.py
new file mode 100644
index 000000000..dba0322e4
--- /dev/null
+++ b/pay-queue/src/pay_queue/services/identifier_updater.py
@@ -0,0 +1,34 @@
+# Copyright © 2024 Province of British Columbia
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Updates the temporary identifier to a permanent identifier in the invoice table."""
+from flask import current_app
+from pay_api.models import db
+from pay_api.models.invoice import Invoice
+
+
+def update_temporary_identifier(event_message):
+ """Update a temporary identifier to a permanent identifier."""
+ if 'tempidentifier' not in event_message or event_message.get('tempidentifier', None) is None:
+ return
+
+ old_identifier = event_message.get('tempidentifier')
+ new_identifier = event_message.get('identifier')
+ current_app.logger.debug('Received message to update %s to %s', old_identifier, new_identifier)
+
+ invoices = Invoice.find_by_business_identifier(old_identifier)
+ for inv in invoices:
+ inv.business_identifier = new_identifier
+ inv.flush()
+
+ db.session.commit()
diff --git a/queue_services/payment-reconciliations/src/reconciliations/payment_reconciliations.py b/pay-queue/src/pay_queue/services/payment_reconciliations.py
similarity index 80%
rename from queue_services/payment-reconciliations/src/reconciliations/payment_reconciliations.py
rename to pay-queue/src/pay_queue/services/payment_reconciliations.py
index 055c190dc..de5cefeb7 100644
--- a/queue_services/payment-reconciliations/src/reconciliations/payment_reconciliations.py
+++ b/pay-queue/src/pay_queue/services/payment_reconciliations.py
@@ -11,27 +11,14 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-"""Payment reconciliation file.
-
-The entry-point is the **cb_subscription_handler**
-
-The design and flow leverage a few constraints that are placed upon it
-by NATS Streaming and using AWAIT on the default loop.
-- NATS streaming queues require one message to be processed at a time.
-- AWAIT on the default loop effectively runs synchronously
-
-If these constraints change, the use of Flask-SQLAlchemy would need to change.
-Flask-SQLAlchemy currently allows the base model to be changed, or reworking
-the model to a standalone SQLAlchemy usage with an async engine would need
-to be pursued.
-"""
+"""Payment reconciliation file."""
import csv
import os
from datetime import datetime
from decimal import Decimal
from typing import Dict, List, Tuple
-from entity_queue_common.service_utils import logger
+from flask import current_app
from pay_api.models import CasSettlement as CasSettlementModel
from pay_api.models import CfsAccount as CfsAccountModel
from pay_api.models import Credit as CreditModel
@@ -44,25 +31,27 @@
from pay_api.models import PaymentLineItem as PaymentLineItemModel
from pay_api.models import Receipt as ReceiptModel
from pay_api.models import db
+from pay_api.services import gcp_queue_publisher
from pay_api.services.cfs_service import CFSService
+from pay_api.services.gcp_queue_publisher import QueueMessage
from pay_api.services.non_sufficient_funds import NonSufficientFundsService
from pay_api.services.payment_transaction import PaymentTransaction as PaymentTransactionService
-from pay_api.services.queue_publisher import publish
from pay_api.utils.enums import (
- CfsAccountStatus, InvoiceReferenceStatus, InvoiceStatus, LineItemStatus, PaymentMethod, PaymentStatus)
-from pay_api.utils.util import get_pay_subject_name
+ CfsAccountStatus, InvoiceReferenceStatus, InvoiceStatus, LineItemStatus, MessageType, PaymentMethod, PaymentStatus,
+ QueueSources)
+from pay_api.utils.util import get_topic_for_corp_type
from sentry_sdk import capture_message
-from reconciliations import config
-from reconciliations.minio import get_object
+from pay_queue import config
+from pay_queue.minio import get_object
-from .enums import Column, RecordType, SourceTransaction, Status, TargetTransaction
+from ..enums import Column, RecordType, SourceTransaction, Status, TargetTransaction
APP_CONFIG = config.get_named_config(os.getenv('DEPLOYMENT_ENV', 'production'))
-async def _create_payment_records(csv_content: str):
+def _create_payment_records(csv_content: str):
"""Create payment records by grouping the lines with target transaction number."""
# Iterate the rows and create a dict with key as the source transaction number.
source_txns: Dict[str, List[Dict[str, str]]] = {}
@@ -116,8 +105,7 @@ async def _create_payment_records(csv_content: str):
_save_payment(payment_date, inv_number, invoice_amount, paid_amount, row, PaymentStatus.COMPLETED.value,
PaymentMethod.ONLINE_BANKING.value, source_txn_number)
- # publish email event.
- await _publish_online_banking_mailer_events(payment_lines, paid_amount)
+ _publish_online_banking_mailer_events(payment_lines, paid_amount)
elif settlement_type == RecordType.EFTP.value:
# Find the payment using receipt_number and mark it as COMPLETED
@@ -186,7 +174,7 @@ def _get_payment_by_inv_number_and_status(inv_number: str, status: str) -> Payme
return payment
-async def reconcile_payments(msg: Dict[str, any]):
+def reconcile_payments(msg: Dict[str, any]):
"""Read the file and update payment details.
1: Check to see if file has been processed already.
@@ -198,18 +186,19 @@ async def reconcile_payments(msg: Dict[str, any]):
3.3 : If transaction status is PARTIAL, update payment and invoice status, publish to account mailer.
4: If the transaction is On Account for Credit, apply the credit to the account.
"""
- file_name: str = msg.get('data').get('fileName')
- minio_location: str = msg.get('data').get('location')
+ file_name: str = msg.get('fileName')
+ minio_location: str = msg.get('location')
cas_settlement: CasSettlementModel = db.session.query(CasSettlementModel) \
.filter(CasSettlementModel.file_name == file_name).one_or_none()
if cas_settlement and not cas_settlement.processed_on:
- logger.info('File: %s has attempted to be processed before.', file_name)
+ current_app.logger.info('File: %s has attempted to be processed before.', file_name)
elif cas_settlement and cas_settlement.processed_on:
- logger.info('File: %s already processed on: %s. Skipping file.', file_name, cas_settlement.processed_on)
+ current_app.logger.info('File: %s already processed on: %s. Skipping file.',
+ file_name, cas_settlement.processed_on)
return
else:
- logger.info('Creating cas_settlement record for file: %s', file_name)
+ current_app.logger.info('Creating cas_settlement record for file: %s', file_name)
cas_settlement = _create_cas_settlement(file_name)
file = get_object(minio_location, file_name)
@@ -218,7 +207,7 @@ async def reconcile_payments(msg: Dict[str, any]):
for row in csv.DictReader(content.splitlines()):
# Convert lower case keys to avoid any key mismatch
row = dict((k.lower(), v) for k, v in row.items())
- logger.debug('Processing %s', row)
+ current_app.logger.debug('Processing %s', row)
# IF not PAD and application amount is zero, continue
record_type = _get_row_value(row, Column.RECORD_TYPE)
@@ -234,17 +223,17 @@ async def reconcile_payments(msg: Dict[str, any]):
# PS : Duplicating some code to make the code more readable.
if record_type in pad_record_types:
# Handle invoices
- await _process_consolidated_invoices(row)
+ _process_consolidated_invoices(row)
elif record_type in (RecordType.BOLP.value, RecordType.EFTP.value):
# EFT, WIRE and Online Banking are one-to-one invoice. So handle them in same way.
- await _process_unconsolidated_invoices(row)
+ _process_unconsolidated_invoices(row)
elif record_type in (RecordType.ONAC.value, RecordType.CMAP.value, RecordType.DRWP.value):
- await _process_credit_on_invoices(row)
+ _process_credit_on_invoices(row)
elif record_type == RecordType.ADJS.value:
- logger.info('Adjustment received for %s.', msg)
+ current_app.logger.info('Adjustment received for %s.', msg)
else:
# For any other transactions like DM log error and continue.
- logger.error('Record Type is received as %s, and cannot process %s.', record_type, msg)
+ current_app.logger.error('Record Type is received as %s, and cannot process %s.', record_type, msg)
capture_message(f'Record Type is received as {record_type}, and cannot process {msg}.', level='error')
# Continue processing
@@ -252,7 +241,7 @@ async def reconcile_payments(msg: Dict[str, any]):
db.session.commit()
# Create payment records for lines other than PAD
- await _create_payment_records(content)
+ _create_payment_records(content)
# Create Credit Records.
_create_credit_records(content)
@@ -263,41 +252,43 @@ async def reconcile_payments(msg: Dict[str, any]):
cas_settlement.save()
-async def _process_consolidated_invoices(row):
+def _process_consolidated_invoices(row):
target_txn_status = _get_row_value(row, Column.TARGET_TXN_STATUS)
if (target_txn := _get_row_value(row, Column.TARGET_TXN)) == TargetTransaction.INV.value:
inv_number = _get_row_value(row, Column.TARGET_TXN_NO)
record_type = _get_row_value(row, Column.RECORD_TYPE)
- logger.debug('Processing invoice : %s', inv_number)
+ current_app.logger.debug('Processing invoice : %s', inv_number)
inv_references = _find_invoice_reference_by_number_and_status(inv_number, InvoiceReferenceStatus.ACTIVE.value)
payment_account: PaymentAccountModel = _get_payment_account(row)
if target_txn_status.lower() == Status.PAID.value.lower():
- logger.debug('Fully PAID payment.')
+ current_app.logger.debug('Fully PAID payment.')
# if no inv reference is found, and if there are no COMPLETED inv ref, raise alert
completed_inv_references = _find_invoice_reference_by_number_and_status(
inv_number, InvoiceReferenceStatus.COMPLETED.value
)
if not inv_references and not completed_inv_references:
- logger.error('No invoice found for %s in the system, and cannot process %s.', inv_number, row)
+ current_app.logger.error('No invoice found for %s in the system, and cannot process %s.',
+ inv_number, row)
capture_message(f'No invoice found for {inv_number} in the system, and cannot process {row}.',
level='error')
return
- await _process_paid_invoices(inv_references, row)
+ _process_paid_invoices(inv_references, row)
if not APP_CONFIG.DISABLE_PAD_SUCCESS_EMAIL:
- await _publish_mailer_events('PAD.PaymentSuccess', payment_account, row)
+ _publish_mailer_events(MessageType.PAD_PAYMENT_SUCCESS.value, payment_account, row)
elif target_txn_status.lower() == Status.NOT_PAID.value.lower() \
or record_type in (RecordType.PADR.value, RecordType.PAYR.value):
- logger.info('NOT PAID. NSF identified.')
+ current_app.logger.info('NOT PAID. NSF identified.')
# NSF Condition. Publish to account events for NSF.
if _process_failed_payments(row):
# Send mailer and account events to update status and send email notification
- await _publish_account_events('lockAccount', payment_account, row)
+ _publish_account_events(MessageType.NSF_LOCK_ACCOUNT.value, payment_account, row)
else:
- logger.error('Target Transaction Type is received as %s for PAD, and cannot process %s.', target_txn, row)
+ current_app.logger.error('Target Transaction Type is received as %s for PAD, and cannot process %s.',
+ target_txn, row)
capture_message(
f'Target Transaction Type is received as {target_txn} for PAD, and cannot process.', level='error')
@@ -310,7 +301,7 @@ def _find_invoice_reference_by_number_and_status(inv_number: str, status: str):
return inv_references
-async def _process_unconsolidated_invoices(row):
+def _process_unconsolidated_invoices(row):
target_txn_status = _get_row_value(row, Column.TARGET_TXN_STATUS)
record_type = _get_row_value(row, Column.RECORD_TYPE)
if (target_txn := _get_row_value(row, Column.TARGET_TXN)) == TargetTransaction.INV.value:
@@ -328,37 +319,37 @@ async def _process_unconsolidated_invoices(row):
filter(InvoiceReferenceModel.status_code == InvoiceReferenceStatus.COMPLETED.value). \
filter(InvoiceReferenceModel.invoice_number == inv_number). \
all()
- logger.info('Found %s completed invoice references for invoice number %s', len(completed_inv_references),
- inv_number)
+ current_app.logger.info('Found %s completed invoice references for invoice number %s',
+ len(completed_inv_references), inv_number)
if len(completed_inv_references) != 1:
- logger.error('More than one or none invoice reference received for invoice number %s for %s',
- inv_number, record_type)
+ current_app.logger.error('More than one or none invoice reference received '
+ 'for invoice number %s for %s', inv_number, record_type)
capture_message(
f'More than one or none invoice reference received for invoice number {inv_number} for '
f'{record_type}', level='error')
else:
# Handle fully PAID and Partially Paid scenarios.
if target_txn_status.lower() == Status.PAID.value.lower():
- logger.debug('Fully PAID payment.')
- await _process_paid_invoices(inv_references, row)
+ current_app.logger.debug('Fully PAID payment.')
+ _process_paid_invoices(inv_references, row)
elif target_txn_status.lower() == Status.PARTIAL.value.lower():
- logger.info('Partially PAID.')
+ current_app.logger.info('Partially PAID.')
# As per validation above, get first and only inv ref
_process_partial_paid_invoices(inv_references[0], row)
else:
- logger.error('Target Transaction Type is received as %s for %s, and cannot process.',
- target_txn, record_type)
+ current_app.logger.error('Target Transaction Type is received as %s for %s, and cannot process.',
+ target_txn, record_type)
capture_message(
f'Target Transaction Type is received as {target_txn} for {record_type}, and cannot process.',
level='error')
-async def _process_credit_on_invoices(row):
+def _process_credit_on_invoices(row):
# Credit memo can happen for any type of accounts.
target_txn_status = _get_row_value(row, Column.TARGET_TXN_STATUS)
if _get_row_value(row, Column.TARGET_TXN) == TargetTransaction.INV.value:
inv_number = _get_row_value(row, Column.TARGET_TXN_NO)
- logger.debug('Processing invoice : %s', inv_number)
+ current_app.logger.debug('Processing invoice : %s', inv_number)
inv_references: List[InvoiceReferenceModel] = db.session.query(InvoiceReferenceModel). \
filter(InvoiceReferenceModel.status_code == InvoiceReferenceStatus.ACTIVE.value). \
@@ -366,18 +357,20 @@ async def _process_credit_on_invoices(row):
all()
if target_txn_status.lower() == Status.PAID.value.lower():
- logger.debug('Fully PAID payment.')
- await _process_paid_invoices(inv_references, row)
+ current_app.logger.debug('Fully PAID payment.')
+ _process_paid_invoices(inv_references, row)
elif target_txn_status.lower() == Status.PARTIAL.value.lower():
- logger.info('Partially PAID using credit memo. Ignoring as the credit memo payment is already captured.')
+ current_app.logger.info('Partially PAID using credit memo. '
+ 'Ignoring as the credit memo payment is already captured.')
else:
- logger.error('Target Transaction status is received as %s for CMAP, and cannot process.', target_txn_status)
+ current_app.logger.error('Target Transaction status is received as %s for CMAP, and cannot process.',
+ target_txn_status)
capture_message(
f'Target Transaction status is received as {target_txn_status} for CMAP, and cannot process.',
level='error')
-async def _process_paid_invoices(inv_references, row):
+def _process_paid_invoices(inv_references, row):
"""Process PAID invoices.
Update invoices as PAID
@@ -388,7 +381,7 @@ async def _process_paid_invoices(inv_references, row):
for inv_ref in inv_references:
invoice: InvoiceModel = InvoiceModel.find_by_id(inv_ref.invoice_id)
if invoice.payment_method_code == PaymentMethod.CC.value:
- logger.info('Cannot mark CC invoices as PAID.')
+ current_app.logger.info('Cannot mark CC invoices as PAID.')
return
receipt_date: datetime = datetime.strptime(_get_row_value(row, Column.APP_DATE), '%d-%b-%y')
@@ -398,7 +391,8 @@ async def _process_paid_invoices(inv_references, row):
# Find invoice, update status
inv: InvoiceModel = InvoiceModel.find_by_id(inv_ref.invoice_id)
_validate_account(inv, row)
- logger.debug('PAID Invoice. Invoice Reference ID : %s, invoice ID : %s', inv_ref.id, inv_ref.invoice_id)
+ current_app.logger.debug('PAID Invoice. Invoice Reference ID : %s, invoice ID : %s',
+ inv_ref.id, inv_ref.invoice_id)
inv.invoice_status_code = InvoiceStatus.PAID.value
inv.payment_date = receipt_date
@@ -412,8 +406,8 @@ async def _process_paid_invoices(inv_references, row):
db.session.add(receipt)
# Publish to the queue if it's an Online Banking payment
if inv.payment_method_code == PaymentMethod.ONLINE_BANKING.value:
- logger.debug('Publishing payment event for OB. Invoice : %s', inv.id)
- await _publish_payment_event(inv)
+ current_app.logger.debug('Publishing payment event for OB. Invoice : %s', inv.id)
+ _publish_payment_event(inv)
def _process_partial_paid_invoices(inv_ref: InvoiceReferenceModel, row):
@@ -428,7 +422,8 @@ def _process_partial_paid_invoices(inv_ref: InvoiceReferenceModel, row):
inv: InvoiceModel = InvoiceModel.find_by_id(inv_ref.invoice_id)
_validate_account(inv, row)
- logger.debug('Partial Invoice. Invoice Reference ID : %s, invoice ID : %s', inv_ref.id, inv_ref.invoice_id)
+ current_app.logger.debug('Partial Invoice. Invoice Reference ID : %s, invoice ID : %s',
+ inv_ref.id, inv_ref.invoice_id)
inv.invoice_status_code = InvoiceStatus.PARTIAL.value
inv.paid = inv.total - Decimal(_get_row_value(row, Column.TARGET_TXN_OUTSTANDING))
# Create Receipt records
@@ -457,22 +452,22 @@ def _process_failed_payments(row):
inv_number, PaymentStatus.FAILED.value
)
if payment:
- logger.info('Ignoring duplicate NSF message for invoice : %s ', inv_number)
+ current_app.logger.info('Ignoring duplicate NSF message for invoice : %s ', inv_number)
return False
# If there is an NSF row, it means it's a duplicate NSF event. Ignore it.
if NonSufficientFundsService.exists_for_invoice_number(inv_number):
- logger.info('Ignoring duplicate NSF event for account: %s ', payment_account.auth_account_id)
+ current_app.logger.info('Ignoring duplicate NSF event for account: %s ', payment_account.auth_account_id)
return False
# Set CFS Account Status.
cfs_account: CfsAccountModel = CfsAccountModel.find_effective_by_account_id(payment_account.id)
is_already_frozen = cfs_account.status == CfsAccountStatus.FREEZE.value
- logger.info('setting payment account id : %s status as FREEZE', payment_account.id)
+ current_app.logger.info('setting payment account id : %s status as FREEZE', payment_account.id)
cfs_account.status = CfsAccountStatus.FREEZE.value
# Call CFS to stop any further PAD transactions on this account.
CFSService.suspend_cfs_account(cfs_account)
if is_already_frozen:
- logger.info('Ignoring NSF message for invoice : %s as the account is already FREEZE', inv_number)
+ current_app.logger.info('Ignoring NSF message for invoice : %s as the account is already FREEZE', inv_number)
return False
# Find the invoice_reference for this invoice and mark it as ACTIVE.
inv_references: List[InvoiceReferenceModel] = db.session.query(InvoiceReferenceModel). \
@@ -529,7 +524,7 @@ def _sync_credit_records():
# 3. If it's credit memo, call credit memo endpoint and calculate balance.
# 4. Roll up the credits to credit field in payment_account.
active_credits: List[CreditModel] = db.session.query(CreditModel).filter(CreditModel.remaining_amount > 0).all()
- logger.info('Found %s credit records', len(active_credits))
+ current_app.logger.info('Found %s credit records', len(active_credits))
account_ids: List[int] = []
for credit in active_credits:
cfs_account: CfsAccountModel = CfsAccountModel.find_effective_by_account_id(credit.account_id)
@@ -582,45 +577,56 @@ def _validate_account(inv: InvoiceModel, row: Dict[str, str]):
# This should never happen, just in case
cfs_account: CfsAccountModel = CfsAccountModel.find_by_id(inv.cfs_account_id)
if (account_number := _get_row_value(row, Column.CUSTOMER_ACC)) != cfs_account.cfs_account:
- logger.error('Customer Account received as %s, but expected %s.', account_number, cfs_account.cfs_account)
+ current_app.logger.error('Customer Account received as %s, but expected %s.',
+ account_number, cfs_account.cfs_account)
capture_message(f'Customer Account received as {account_number}, but expected {cfs_account.cfs_account}.',
level='error')
raise Exception('Invalid Account Number') # pylint: disable=broad-exception-raised
-async def _publish_payment_event(inv: InvoiceModel):
+def _publish_payment_event(inv: InvoiceModel):
"""Publish payment message to the queue."""
- payment_event_payload = PaymentTransactionService.create_event_payload(invoice=inv,
- status_code=PaymentStatus.COMPLETED.value)
+ payload = PaymentTransactionService.create_event_payload(invoice=inv,
+ status_code=PaymentStatus.COMPLETED.value)
try:
-
- await publish(payload=payment_event_payload, client_name=APP_CONFIG.NATS_PAYMENT_CLIENT_NAME,
- subject=get_pay_subject_name(inv.corp_type_code, subject_format=APP_CONFIG.NATS_PAYMENT_SUBJECT))
+ gcp_queue_publisher.publish_to_queue(
+ QueueMessage(
+ source=QueueSources.PAY_QUEUE.value,
+ message_type=MessageType.PAYMENT.value,
+ payload=payload,
+ topic=get_topic_for_corp_type(inv.corp_type_code)
+ )
+ )
except Exception as e: # NOQA pylint: disable=broad-except
- logger.error(e)
- logger.warning('Notification to Queue failed for the Payment Event - %s', payment_event_payload)
- capture_message(f'Notification to Queue failed for the Payment Event {payment_event_payload}.',
+ current_app.logger.error(e)
+ current_app.logger.warning('Notification to Queue failed for the Payment Event - %s', payload)
+ capture_message(f'Notification to Queue failed for the Payment Event {payload}.',
level='error')
-async def _publish_mailer_events(message_type: str, pay_account: PaymentAccountModel, row: Dict[str, str]):
+def _publish_mailer_events(message_type: str, pay_account: PaymentAccountModel, row: Dict[str, str]):
"""Publish payment message to the mailer queue."""
# Publish message to the Queue, saying account has been created. Using the event spec.
- payload = _create_event_payload(message_type, pay_account, row)
+ payload = _create_event_payload(pay_account, row)
try:
- await publish(payload=payload,
- client_name=APP_CONFIG.NATS_MAILER_CLIENT_NAME,
- subject=APP_CONFIG.NATS_MAILER_SUBJECT)
+ gcp_queue_publisher.publish_to_queue(
+ QueueMessage(
+ source=QueueSources.PAY_QUEUE.value,
+ message_type=message_type,
+ payload=payload,
+ topic=current_app.config.get('ACCOUNT_MAILER_TOPIC')
+ )
+ )
except Exception as e: # NOQA pylint: disable=broad-except
- logger.error(e)
- logger.warning('Notification to Queue failed for the Account Mailer %s - %s', pay_account.auth_account_id,
- payload)
+ current_app.logger.error(e)
+ current_app.logger.warning('Notification to Queue failed for the Account Mailer %s - %s',
+ pay_account.auth_account_id, payload)
capture_message('Notification to Queue failed for the Account Mailer {auth_account_id}, {msg}.'.format(
auth_account_id=pay_account.auth_account_id, msg=payload), level='error')
-async def _publish_online_banking_mailer_events(rows: List[Dict[str, str]], paid_amount: float):
+def _publish_online_banking_mailer_events(rows: List[Dict[str, str]], paid_amount: float):
"""Publish payment message to the mailer queue."""
# Publish message to the Queue, saying account has been created. Using the event spec.
pay_account = _get_payment_account(rows[0]) # All rows are for same account.
@@ -640,70 +646,60 @@ async def _publish_online_banking_mailer_events(rows: List[Dict[str, str]], paid
else:
message_type = 'bc.registry.payment.Payment'
- queue_data = {
+ payload = {
'accountId': pay_account.auth_account_id,
'paymentMethod': PaymentMethod.ONLINE_BANKING.value,
'amount': '{:.2f}'.format(paid_amount), # pylint: disable = consider-using-f-string
'creditAmount': '{:.2f}'.format(credit_amount) # pylint: disable = consider-using-f-string
}
- payload = {
- 'specversion': '1.x-wip',
- 'type': message_type,
- 'source': f'https://api.pay.bcregistry.gov.bc.ca/v1/accounts/{pay_account.auth_account_id}',
- 'id': f'{pay_account.auth_account_id}',
- 'time': f'{datetime.now()}',
- 'datacontenttype': 'application/json',
- 'data': queue_data
- }
-
try:
- await publish(payload=payload,
- client_name=APP_CONFIG.NATS_MAILER_CLIENT_NAME,
- subject=APP_CONFIG.NATS_MAILER_SUBJECT)
+ gcp_queue_publisher.publish_to_queue(
+ QueueMessage(
+ source=QueueSources.PAY_QUEUE.value,
+ message_type=message_type,
+ payload=payload,
+ topic=current_app.config.get('ACCOUNT_MAILER_TOPIC')
+ )
+ )
except Exception as e: # NOQA pylint: disable=broad-except
- logger.error(e)
- logger.warning('Notification to Queue failed for the Account Mailer %s - %s', pay_account.auth_account_id,
- payload)
- capture_message('Notification to Queue failed for the Account Mailer {auth_account_id}, {msg}.'.format(
- auth_account_id=pay_account.auth_account_id, msg=payload), level='error')
+ current_app.logger.error(e)
+ current_app.logger.warning('Notification to Queue failed for the Account Mailer %s - %s',
+ pay_account.auth_account_id, payload)
+ capture_message('Notification to Queue failed for the Account Mailer '
+ '{auth_account_id}, {msg}.'.format(auth_account_id=pay_account.auth_account_id, msg=payload),
+ level='error')
-async def _publish_account_events(message_type: str, pay_account: PaymentAccountModel, row: Dict[str, str]):
+def _publish_account_events(message_type: str, pay_account: PaymentAccountModel, row: Dict[str, str]):
"""Publish payment message to the mailer queue."""
# Publish message to the Queue, saying account has been created. Using the event spec.
- payload = _create_event_payload(message_type, pay_account, row)
-
+ payload = _create_event_payload(pay_account, row)
try:
- await publish(payload=payload,
- client_name=APP_CONFIG.NATS_ACCOUNT_CLIENT_NAME,
- subject=APP_CONFIG.NATS_ACCOUNT_SUBJECT)
+ gcp_queue_publisher.publish_to_queue(
+ QueueMessage(
+ source=QueueSources.PAY_QUEUE.value,
+ message_type=message_type,
+ payload=payload,
+ topic=current_app.config.get('AUTH_QUEUE_TOPIC')
+ )
+ )
except Exception as e: # NOQA pylint: disable=broad-except
- logger.error(e)
- logger.warning('Notification to Queue failed for the Account %s - %s', pay_account.auth_account_id,
- pay_account.name)
+ current_app.logger.error(e)
+ current_app.logger.warning('Notification to Queue failed for the Account %s - %s', pay_account.auth_account_id,
+ pay_account.name)
capture_message('Notification to Queue failed for the Account {auth_account_id}, {msg}.'.format(
auth_account_id=pay_account.auth_account_id, msg=payload), level='error')
-def _create_event_payload(message_type, pay_account, row):
- queue_data = {
+def _create_event_payload(pay_account, row):
+ return {
'accountId': pay_account.auth_account_id,
'paymentMethod': _convert_payment_method(_get_row_value(row, Column.SOURCE_TXN)),
'outstandingAmount': _get_row_value(row, Column.TARGET_TXN_OUTSTANDING),
'originalAmount': _get_row_value(row, Column.TARGET_TXN_ORIGINAL),
'amount': _get_row_value(row, Column.APP_AMOUNT)
}
- payload = {
- 'specversion': '1.x-wip',
- 'type': f'bc.registry.payment.{message_type}',
- 'source': f'https://api.pay.bcregistry.gov.bc.ca/v1/accounts/{pay_account.auth_account_id}',
- 'id': f'{pay_account.auth_account_id}',
- 'time': f'{datetime.now()}',
- 'datacontenttype': 'application/json',
- 'data': queue_data
- }
- return payload
def _convert_payment_method(cfs_method: str) -> str:
@@ -763,7 +759,7 @@ def _create_nsf_invoice(cfs_account: CfsAccountModel, inv_number: str,
future_effective_fees=0,
line_item_status_code=LineItemStatus.ACTIVE.value,
service_fees=0,
- fee_distribution_id=distribution.distribution_code_id if distribution else 1) # TODO
+ fee_distribution_id=distribution.distribution_code_id if distribution else 1)
line_item.save()
inv_ref: InvoiceReferenceModel = InvoiceReferenceModel(
@@ -782,7 +778,6 @@ def _get_settlement_type(payment_lines) -> str:
"""Exclude ONAC, ADJS, PAYR, ONAP and return the record type."""
settlement_type: str = None
for row in payment_lines:
- # TODO Add BC Online Drawdown record type.
if _get_row_value(row, Column.RECORD_TYPE) in \
(RecordType.BOLP.value, RecordType.EFTP.value, RecordType.PAD.value, RecordType.PADR.value,
RecordType.PAYR.value):
diff --git a/queue_services/payment-reconciliations/src/reconciliations/version.py b/pay-queue/src/pay_queue/version.py
similarity index 93%
rename from queue_services/payment-reconciliations/src/reconciliations/version.py
rename to pay-queue/src/pay_queue/version.py
index 071e98e87..19ea931f0 100644
--- a/queue_services/payment-reconciliations/src/reconciliations/version.py
+++ b/pay-queue/src/pay_queue/version.py
@@ -22,4 +22,4 @@
Development release segment: .devN
"""
-__version__ = '1.1.3' # pylint: disable=invalid-name
+__version__ = '2.0.0' # pylint: disable=invalid-name
diff --git a/queue_services/events-listener/tests/__init__.py b/pay-queue/tests/__init__.py
similarity index 100%
rename from queue_services/events-listener/tests/__init__.py
rename to pay-queue/tests/__init__.py
diff --git a/pay-queue/tests/conftest.py b/pay-queue/tests/conftest.py
new file mode 100644
index 000000000..56771964a
--- /dev/null
+++ b/pay-queue/tests/conftest.py
@@ -0,0 +1,115 @@
+# Copyright © 2019 Province of British Columbia
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Common setup and fixtures for the pytest suite used by this service."""
+import sys
+
+import pytest
+from flask_migrate import Migrate, upgrade
+from pay_api import db as _db
+from sqlalchemy import event, text
+from sqlalchemy_utils import create_database, database_exists, drop_database
+
+from pay_queue import create_app
+
+
+@pytest.fixture(scope='session', autouse=True)
+def app():
+ """Return a session-wide application configured in TEST mode."""
+ _app = create_app('testing')
+ return _app
+
+
+@pytest.fixture(scope='session', autouse=True)
+def db(app): # pylint: disable=redefined-outer-name, invalid-name
+ """Return a session-wide initialised database."""
+ with app.app_context():
+ if database_exists(_db.engine.url):
+ drop_database(_db.engine.url)
+ create_database(_db.engine.url)
+ _db.session().execute(text('SET TIME ZONE "UTC";'))
+ migrations_path = [folder for folder in sys.path if 'pay-api/pay-api' in folder]
+ if len(migrations_path) > 0:
+ migrations_path = migrations_path[0].replace('/pay-api/src', '/pay-api/migrations')
+ Migrate(app, _db, directory=migrations_path)
+ upgrade()
+ return _db
+
+
+@pytest.fixture
+def config(app):
+ """Return the application config."""
+ return app.config
+
+
+@pytest.fixture(scope='session')
+def client(app): # pylint: disable=redefined-outer-name
+ """Return a session-wide Flask test client."""
+ return app.test_client()
+
+
+@pytest.fixture(scope='function', autouse=True)
+def session(db, app): # pylint: disable=redefined-outer-name, invalid-name
+ """Return a function-scoped session."""
+ with app.app_context():
+ with db.engine.connect() as conn:
+ transaction = conn.begin()
+ sess = db._make_scoped_session(dict(bind=conn)) # pylint: disable=protected-access
+ # Establish SAVEPOINT (http://docs.sqlalchemy.org/en/latest/orm/session_transaction.html#using-savepoint)
+ nested = sess.begin_nested()
+ db.session = sess
+ db.session.commit = nested.commit
+ db.session.rollback = nested.rollback
+
+ @event.listens_for(sess, 'after_transaction_end')
+ def restart_savepoint(sess2, trans): # pylint: disable=unused-variable
+ nonlocal nested
+ if trans.nested:
+ # Handle where test DOESN'T session.commit()
+ sess2.expire_all()
+ nested = sess.begin_nested()
+ # When using a SAVEPOINT via the Session.begin_nested() or Connection.begin_nested() methods,
+ # the transaction object returned must be used to commit or rollback the SAVEPOINT.
+ # Calling the Session.commit() or Connection.commit() methods will always commit the
+ # outermost transaction; this is a SQLAlchemy 2.0 specific behavior that is
+ # reversed from the 1.x series
+ db.session = sess
+ db.session.commit = nested.commit
+ db.session.rollback = nested.rollback
+
+ try:
+ yield db.session
+ finally:
+ db.session.remove()
+ transaction.rollback()
+
+
+@pytest.fixture(scope='session', autouse=True)
+def auto(docker_services, app):
+ """Spin up docker containers."""
+ if app.config['USE_DOCKER_MOCK']:
+ docker_services.start('minio')
+ docker_services.start('proxy')
+ docker_services.start('paybc')
+
+
+@pytest.fixture()
+def mock_publish(monkeypatch):
+ """Mock check_auth."""
+ monkeypatch.setattr('pay_api.services.gcp_queue_publisher.publish_to_queue', lambda *args, **kwargs: None)
+
+
+@pytest.fixture(autouse=True)
+def mock_queue_auth(mocker):
+ """Mock queue authorization."""
+ mocker.patch('pay_queue.external.gcp_auth.verify_jwt', return_value='')
diff --git a/queue_services/payment-reconciliations/tests/docker-compose.yml b/pay-queue/tests/docker-compose.yml
similarity index 71%
rename from queue_services/payment-reconciliations/tests/docker-compose.yml
rename to pay-queue/tests/docker-compose.yml
index 80a5de1b1..5d5bca67a 100644
--- a/queue_services/payment-reconciliations/tests/docker-compose.yml
+++ b/pay-queue/tests/docker-compose.yml
@@ -1,18 +1,5 @@
version: '2.1'
services:
- nats:
- image: nats-streaming
- restart: always
- mem_limit: 512m
- expose:
- - 4222
- - 8222
- labels:
- - entity.services=nats
- ports:
- - 4222:4222
- - 8222:8222
- tty: true
minio:
image: 'bitnami/minio:2022.4.26'
ports:
diff --git a/queue_services/payment-reconciliations/tests/integration/__init__.py b/pay-queue/tests/integration/__init__.py
similarity index 97%
rename from queue_services/payment-reconciliations/tests/integration/__init__.py
rename to pay-queue/tests/integration/__init__.py
index 16d8c866a..44812fe3c 100644
--- a/queue_services/payment-reconciliations/tests/integration/__init__.py
+++ b/pay-queue/tests/integration/__init__.py
@@ -11,7 +11,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-"""Test suite for the integrations to NATS Queue."""
+"""Test suite for payment reconciliation integration."""
from datetime import datetime
@@ -53,7 +53,6 @@ def factory_payment(
payment_system_code=payment_system_code,
payment_method_code=payment_method_code,
payment_status_code=payment_status_code,
- created_on=created_on,
invoice_number=invoice_number
).save()
diff --git a/queue_services/payment-reconciliations/tests/integration/factory.py b/pay-queue/tests/integration/factory.py
similarity index 100%
rename from queue_services/payment-reconciliations/tests/integration/factory.py
rename to pay-queue/tests/integration/factory.py
diff --git a/queue_services/payment-reconciliations/tests/integration/test_cgi_reconciliations.py b/pay-queue/tests/integration/test_cgi_reconciliations.py
similarity index 89%
rename from queue_services/payment-reconciliations/tests/integration/test_cgi_reconciliations.py
rename to pay-queue/tests/integration/test_cgi_reconciliations.py
index b26621fe0..1e7ccda87 100644
--- a/queue_services/payment-reconciliations/tests/integration/test_cgi_reconciliations.py
+++ b/pay-queue/tests/integration/test_cgi_reconciliations.py
@@ -19,9 +19,6 @@
from datetime import datetime
-import pytest
-from entity_queue_common.service_utils import subscribe_to_queue
-from flask import current_app
from pay_api.models import DistributionCode as DistributionCodeModel
from pay_api.models import EjvFile as EjvFileModel
from pay_api.models import EjvHeader as EjvHeaderModel
@@ -37,30 +34,19 @@
from pay_api.models import RoutingSlip as RoutingSlipModel
from pay_api.models import db
from pay_api.utils.enums import (
- CfsAccountStatus, DisbursementStatus, EjvFileType, InvoiceReferenceStatus, InvoiceStatus, PaymentMethod,
- PaymentStatus, RoutingSlipStatus)
+ CfsAccountStatus, DisbursementStatus, EjvFileType, InvoiceReferenceStatus, InvoiceStatus, MessageType,
+ PaymentMethod, PaymentStatus, RoutingSlipStatus)
+
+from tests.integration.utils import helper_add_file_event_to_queue
from .factory import (
factory_create_ejv_account, factory_create_pad_account, factory_distribution, factory_invoice,
factory_invoice_reference, factory_payment_line_item, factory_refund, factory_routing_slip_account)
-from .utils import helper_add_ejv_event_to_queue, upload_to_minio
+from .utils import upload_to_minio
-@pytest.mark.asyncio
-async def test_successful_partner_ejv_reconciliations(session, app, stan_server, event_loop, client_id, events_stan,
- future, mock_publish):
+def test_successful_partner_ejv_reconciliations(client):
"""Test Reconciliations worker."""
- # Call back for the subscription
- from reconciliations.worker import cb_subscription_handler
-
- # Create a Credit Card Payment
- # register the handler to test it
- await subscribe_to_queue(events_stan,
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('subject'),
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('queue'),
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('durable_name'),
- cb_subscription_handler)
-
# 1. Create payment account
# 2. Create invoice and related records
# 3. Create CFS Invoice records
@@ -121,7 +107,7 @@ async def test_successful_partner_ejv_reconciliations(session, app, stan_server,
# Now upload the ACK file to minio and publish message.
upload_to_minio(file_name=ack_file_name, value_as_bytes=str.encode(''))
- await helper_add_ejv_event_to_queue(events_stan, file_name=ack_file_name)
+ helper_add_file_event_to_queue(client, file_name=ack_file_name, message_type=MessageType.CGI_ACK_RECEIVED.value)
# Query EJV File and assert the status is changed
ejv_file = EjvFileModel.find_by_id(ejv_file_id)
@@ -162,9 +148,8 @@ async def test_successful_partner_ejv_reconciliations(session, app, stan_server,
# Now upload the ACK file to minio and publish message.
with open(feedback_file_name, 'rb') as f:
upload_to_minio(f.read(), feedback_file_name)
- # upload_to_minio(file_name=feedback_file_name, value_as_bytes=feedback_content.encode())
- await helper_add_ejv_event_to_queue(events_stan, file_name=feedback_file_name, message_type='FEEDBACKReceived')
+ helper_add_file_event_to_queue(client, feedback_file_name, MessageType.CGI_FEEDBACK_RECEIVED.value)
# Query EJV File and assert the status is changed
ejv_file = EjvFileModel.find_by_id(ejv_file_id)
@@ -173,21 +158,8 @@ async def test_successful_partner_ejv_reconciliations(session, app, stan_server,
assert invoice.disbursement_status_code == DisbursementStatus.COMPLETED.value
-@pytest.mark.asyncio
-async def test_failed_partner_ejv_reconciliations(session, app, stan_server, event_loop, client_id, events_stan, future,
- mock_publish):
+def test_failed_partner_ejv_reconciliations(client, mock_publish):
"""Test Reconciliations worker."""
- # Call back for the subscription
- from reconciliations.worker import cb_subscription_handler
-
- # Create a Credit Card Payment
- # register the handler to test it
- await subscribe_to_queue(events_stan,
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('subject'),
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('queue'),
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('durable_name'),
- cb_subscription_handler)
-
# 1. Create payment account
# 2. Create invoice and related records
# 3. Create CFS Invoice records
@@ -249,7 +221,7 @@ async def test_failed_partner_ejv_reconciliations(session, app, stan_server, eve
# Now upload the ACK file to minio and publish message.
upload_to_minio(file_name=ack_file_name, value_as_bytes=str.encode(''))
- await helper_add_ejv_event_to_queue(events_stan, file_name=ack_file_name)
+ helper_add_file_event_to_queue(client, file_name=ack_file_name, message_type=MessageType.CGI_ACK_RECEIVED.value)
# Query EJV File and assert the status is changed
ejv_file = EjvFileModel.find_by_id(ejv_file_id)
@@ -290,9 +262,8 @@ async def test_failed_partner_ejv_reconciliations(session, app, stan_server, eve
# Now upload the ACK file to minio and publish message.
with open(feedback_file_name, 'rb') as f:
upload_to_minio(f.read(), feedback_file_name)
- # upload_to_minio(file_name=feedback_file_name, value_as_bytes=feedback_content.encode())
- await helper_add_ejv_event_to_queue(events_stan, file_name=feedback_file_name, message_type='FEEDBACKReceived')
+ helper_add_file_event_to_queue(client, feedback_file_name, MessageType.CGI_FEEDBACK_RECEIVED.value)
# Query EJV File and assert the status is changed
ejv_file = EjvFileModel.find_by_id(ejv_file_id)
@@ -303,21 +274,8 @@ async def test_failed_partner_ejv_reconciliations(session, app, stan_server, eve
assert disbursement_distribution_code.stop_ejv
-@pytest.mark.asyncio
-async def test_successful_partner_reversal_ejv_reconciliations(session, app, stan_server, event_loop, client_id,
- events_stan, future, mock_publish):
+def test_successful_partner_reversal_ejv_reconciliations(client):
"""Test Reconciliations worker."""
- # Call back for the subscription
- from reconciliations.worker import cb_subscription_handler
-
- # Create a Credit Card Payment
- # register the handler to test it
- await subscribe_to_queue(events_stan,
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('subject'),
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('queue'),
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('durable_name'),
- cb_subscription_handler)
-
# 1. Create payment account
# 2. Create invoice and related records
# 3. Create CFS Invoice records
@@ -381,7 +339,7 @@ async def test_successful_partner_reversal_ejv_reconciliations(session, app, sta
# Now upload the ACK file to minio and publish message.
upload_to_minio(file_name=ack_file_name, value_as_bytes=str.encode(''))
- await helper_add_ejv_event_to_queue(events_stan, file_name=ack_file_name)
+ helper_add_file_event_to_queue(client, file_name=ack_file_name, message_type=MessageType.CGI_ACK_RECEIVED.value)
# Query EJV File and assert the status is changed
ejv_file = EjvFileModel.find_by_id(ejv_file_id)
@@ -422,9 +380,8 @@ async def test_successful_partner_reversal_ejv_reconciliations(session, app, sta
# Now upload the ACK file to minio and publish message.
with open(feedback_file_name, 'rb') as f:
upload_to_minio(f.read(), feedback_file_name)
- # upload_to_minio(file_name=feedback_file_name, value_as_bytes=feedback_content.encode())
- await helper_add_ejv_event_to_queue(events_stan, file_name=feedback_file_name, message_type='FEEDBACKReceived')
+ helper_add_file_event_to_queue(client, feedback_file_name, MessageType.CGI_FEEDBACK_RECEIVED.value)
# Query EJV File and assert the status is changed
ejv_file = EjvFileModel.find_by_id(ejv_file_id)
@@ -434,21 +391,8 @@ async def test_successful_partner_reversal_ejv_reconciliations(session, app, sta
assert invoice.disbursement_date == datetime(2023, 5, 29)
-@pytest.mark.asyncio
-async def test_succesful_payment_ejv_reconciliations(session, app, stan_server, event_loop, client_id, events_stan,
- future, mock_publish):
+def test_succesful_payment_ejv_reconciliations(client):
"""Test Reconciliations worker."""
- # Call back for the subscription
- from reconciliations.worker import cb_subscription_handler
-
- # Create a Credit Card Payment
- # register the handler to test it
- await subscribe_to_queue(events_stan,
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('subject'),
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('queue'),
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('durable_name'),
- cb_subscription_handler)
-
# 1. Create EJV payment accounts
# 2. Create invoice and related records
# 3. Create a feedback file and assert status
@@ -563,7 +507,7 @@ async def test_succesful_payment_ejv_reconciliations(session, app, stan_server,
# Now upload the ACK file to minio and publish message.
upload_to_minio(file_name=ack_file_name, value_as_bytes=str.encode(''))
- await helper_add_ejv_event_to_queue(events_stan, file_name=ack_file_name)
+ helper_add_file_event_to_queue(client, file_name=ack_file_name, message_type=MessageType.CGI_ACK_RECEIVED.value)
# Query EJV File and assert the status is changed
ejv_file = EjvFileModel.find_by_id(ejv_file_id)
@@ -579,7 +523,7 @@ async def test_succesful_payment_ejv_reconciliations(session, app, stan_server,
with open(feedback_file_name, 'rb') as f:
upload_to_minio(f.read(), feedback_file_name)
- await helper_add_ejv_event_to_queue(events_stan, file_name=feedback_file_name, message_type='FEEDBACKReceived')
+ helper_add_file_event_to_queue(client, feedback_file_name, MessageType.CGI_FEEDBACK_RECEIVED.value)
# Query EJV File and assert the status is changed
ejv_file = EjvFileModel.find_by_id(ejv_file_id)
@@ -607,21 +551,8 @@ async def test_succesful_payment_ejv_reconciliations(session, app, stan_server,
assert payment[0][0].paid_amount == inv_total_amount
-@pytest.mark.asyncio
-async def test_succesful_payment_reversal_ejv_reconciliations(session, app, stan_server, event_loop, client_id,
- events_stan, future, mock_publish):
+def test_succesful_payment_reversal_ejv_reconciliations(client):
"""Test Reconciliations worker."""
- # Call back for the subscription
- from reconciliations.worker import cb_subscription_handler
-
- # Create a Credit Card Payment
- # register the handler to test it
- await subscribe_to_queue(events_stan,
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('subject'),
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('queue'),
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('durable_name'),
- cb_subscription_handler)
-
# 1. Create EJV payment accounts
# 2. Create invoice and related records
# 3. Create a feedback file and assert status
@@ -733,7 +664,7 @@ async def test_succesful_payment_reversal_ejv_reconciliations(session, app, stan
# Now upload the ACK file to minio and publish message.
upload_to_minio(file_name=ack_file_name, value_as_bytes=str.encode(''))
- await helper_add_ejv_event_to_queue(events_stan, file_name=ack_file_name)
+ helper_add_file_event_to_queue(client, file_name=ack_file_name, message_type=MessageType.CGI_ACK_RECEIVED.value)
# Query EJV File and assert the status is changed
ejv_file = EjvFileModel.find_by_id(ejv_file_id)
@@ -749,7 +680,7 @@ async def test_succesful_payment_reversal_ejv_reconciliations(session, app, stan
with open(feedback_file_name, 'rb') as f:
upload_to_minio(f.read(), feedback_file_name)
- await helper_add_ejv_event_to_queue(events_stan, file_name=feedback_file_name, message_type='FEEDBACKReceived')
+ helper_add_file_event_to_queue(client, feedback_file_name, MessageType.CGI_FEEDBACK_RECEIVED.value)
# Query EJV File and assert the status is changed
ejv_file = EjvFileModel.find_by_id(ejv_file_id)
@@ -775,21 +706,8 @@ async def test_succesful_payment_reversal_ejv_reconciliations(session, app, stan
assert payment[0][0].paid_amount == inv_total_amount
-@pytest.mark.asyncio
-async def test_successful_refund_reconciliations(
- session, app, stan_server, event_loop, client_id, events_stan, future, mock_publish
-):
+def test_successful_refund_reconciliations(client, mock_publish):
"""Test Reconciliations worker."""
- # Call back for the subscription
- from reconciliations.worker import cb_subscription_handler
-
- # register the handler to test it
- await subscribe_to_queue(events_stan,
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('subject'),
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('queue'),
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('durable_name'),
- cb_subscription_handler)
-
# 1. Create a routing slip.
# 2. Mark the routing slip for refund.
# 3. Create a AP reconciliation file.
@@ -836,7 +754,7 @@ async def test_successful_refund_reconciliations(
# Now upload the ACK file to minio and publish message.
upload_to_minio(file_name=ack_file_name, value_as_bytes=str.encode(''))
- await helper_add_ejv_event_to_queue(events_stan, file_name=ack_file_name)
+ helper_add_file_event_to_queue(client, file_name=ack_file_name, message_type=MessageType.CGI_ACK_RECEIVED.value)
# Query EJV File and assert the status is changed
ejv_file = EjvFileModel.find_by_id(ejv_file_id)
@@ -911,7 +829,7 @@ async def test_successful_refund_reconciliations(
with open(feedback_file_name, 'rb') as f:
upload_to_minio(f.read(), feedback_file_name)
- await helper_add_ejv_event_to_queue(events_stan, file_name=feedback_file_name, message_type='FEEDBACKReceived')
+ helper_add_file_event_to_queue(client, feedback_file_name, MessageType.CGI_FEEDBACK_RECEIVED.value)
# Query EJV File and assert the status is changed
ejv_file = EjvFileModel.find_by_id(ejv_file_id)
@@ -921,21 +839,8 @@ async def test_successful_refund_reconciliations(
assert routing_slip.status == RoutingSlipStatus.REFUND_COMPLETED.value
-@pytest.mark.asyncio
-async def test_failed_refund_reconciliations(
- session, app, stan_server, event_loop, client_id, events_stan, future, mock_publish
-):
+def test_failed_refund_reconciliations(client, mock_publish):
"""Test Reconciliations worker."""
- # Call back for the subscription
- from reconciliations.worker import cb_subscription_handler
-
- # register the handler to test it
- await subscribe_to_queue(events_stan,
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('subject'),
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('queue'),
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('durable_name'),
- cb_subscription_handler)
-
# 1. Create a routing slip.
# 2. Mark the routing slip for refund.
# 3. Create a AP reconciliation file.
@@ -982,7 +887,7 @@ async def test_failed_refund_reconciliations(
# Now upload the ACK file to minio and publish message.
upload_to_minio(file_name=ack_file_name, value_as_bytes=str.encode(''))
- await helper_add_ejv_event_to_queue(events_stan, file_name=ack_file_name)
+ helper_add_file_event_to_queue(client, file_name=ack_file_name, message_type=MessageType.CGI_ACK_RECEIVED.value)
# Query EJV File and assert the status is changed
ejv_file = EjvFileModel.find_by_id(ejv_file_id)
@@ -1058,7 +963,7 @@ async def test_failed_refund_reconciliations(
with open(feedback_file_name, 'rb') as f:
upload_to_minio(f.read(), feedback_file_name)
- await helper_add_ejv_event_to_queue(events_stan, file_name=feedback_file_name, message_type='FEEDBACKReceived')
+ helper_add_file_event_to_queue(client, feedback_file_name, MessageType.CGI_FEEDBACK_RECEIVED.value)
# Query EJV File and assert the status is changed
ejv_file = EjvFileModel.find_by_id(ejv_file_id)
@@ -1070,21 +975,8 @@ async def test_failed_refund_reconciliations(
assert routing_slip_2.status == RoutingSlipStatus.REFUND_REJECTED.value
-@pytest.mark.asyncio
-async def test_prevent_duplicate_ack(
- session, app, stan_server, event_loop, client_id, events_stan, future, mock_publish
-):
+def test_prevent_duplicate_ack(client, mock_publish):
"""Assert processing completes when existing ack."""
- # Call back for the subscription
- from reconciliations.worker import cb_subscription_handler
-
- # register the handler to test it
- await subscribe_to_queue(events_stan,
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('subject'),
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('queue'),
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('durable_name'),
- cb_subscription_handler)
-
file_ref = f'INBOX.{datetime.now()}'
# Upload an acknowledgement file
ack_file_name = f'ACK.{file_ref}'
@@ -1097,30 +989,19 @@ async def test_prevent_duplicate_ack(
jv_file.write('')
jv_file.close()
- await helper_add_ejv_event_to_queue(events_stan, file_name=ack_file_name)
+ helper_add_file_event_to_queue(client, file_name=ack_file_name, message_type=MessageType.CGI_ACK_RECEIVED.value)
assert ejv.ack_file_ref == ack_file_name
assert ejv.disbursement_status_code == DisbursementStatus.ACKNOWLEDGED.value
# Nothing should change, because it's already processed this ACK.
ejv.disbursement_status_code = DisbursementStatus.UPLOADED.value
- await helper_add_ejv_event_to_queue(events_stan, file_name=ack_file_name)
+ helper_add_file_event_to_queue(client, file_name=ack_file_name, message_type=MessageType.CGI_ACK_RECEIVED.value)
assert ejv.ack_file_ref == ack_file_name
assert ejv.disbursement_status_code == DisbursementStatus.UPLOADED.value
-@pytest.mark.asyncio
-async def test_successful_ap_disbursement(
- session, app, stan_server, event_loop, client_id, events_stan, future, mock_publish
-):
+def test_successful_ap_disbursement(client, mock_publish):
"""Test Reconciliations worker for ap disbursement."""
- from reconciliations.worker import cb_subscription_handler
-
- await subscribe_to_queue(events_stan,
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('subject'),
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('queue'),
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('durable_name'),
- cb_subscription_handler)
-
# 1. Create invoice.
# 2. Create a AP reconciliation file.
# 3. Assert the status.
@@ -1178,7 +1059,7 @@ async def test_successful_ap_disbursement(
upload_to_minio(file_name=ack_file_name, value_as_bytes=str.encode(''))
- await helper_add_ejv_event_to_queue(events_stan, file_name=ack_file_name)
+ helper_add_file_event_to_queue(client, file_name=ack_file_name, message_type=MessageType.CGI_ACK_RECEIVED.value)
ejv_file = EjvFileModel.find_by_id(ejv_file_id)
assert ejv_file.disbursement_status_code == DisbursementStatus.ACKNOWLEDGED.value
@@ -1251,7 +1132,8 @@ async def test_successful_ap_disbursement(
with open(feedback_file_name, 'rb') as f:
upload_to_minio(f.read(), feedback_file_name)
- await helper_add_ejv_event_to_queue(events_stan, file_name=feedback_file_name, message_type='FEEDBACKReceived')
+ helper_add_file_event_to_queue(client, file_name=feedback_file_name,
+ message_type=MessageType.CGI_FEEDBACK_RECEIVED.value)
ejv_file = EjvFileModel.find_by_id(ejv_file_id)
assert ejv_file.disbursement_status_code == DisbursementStatus.COMPLETED.value
@@ -1267,19 +1149,8 @@ async def test_successful_ap_disbursement(
assert refund.gl_posted is not None
-@pytest.mark.asyncio
-async def test_failure_ap_disbursement(
- session, app, stan_server, event_loop, client_id, events_stan, future, mock_publish
-):
+def test_failure_ap_disbursement(client, mock_publish):
"""Test Reconciliations worker for ap disbursement."""
- from reconciliations.worker import cb_subscription_handler
-
- await subscribe_to_queue(events_stan,
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('subject'),
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('queue'),
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('durable_name'),
- cb_subscription_handler)
-
# 1. Create invoice.
# 2. Create a AP reconciliation file.
# 3. Assert the status.
@@ -1335,7 +1206,7 @@ async def test_failure_ap_disbursement(
upload_to_minio(file_name=ack_file_name, value_as_bytes=str.encode(''))
- await helper_add_ejv_event_to_queue(events_stan, file_name=ack_file_name)
+ helper_add_file_event_to_queue(client, file_name=ack_file_name, message_type=MessageType.CGI_ACK_RECEIVED.value)
ejv_file = EjvFileModel.find_by_id(ejv_file_id)
assert ejv_file.disbursement_status_code == DisbursementStatus.ACKNOWLEDGED.value
@@ -1411,7 +1282,7 @@ async def test_failure_ap_disbursement(
with open(feedback_file_name, 'rb') as f:
upload_to_minio(f.read(), feedback_file_name)
- await helper_add_ejv_event_to_queue(events_stan, file_name=feedback_file_name, message_type='FEEDBACKReceived')
+ helper_add_file_event_to_queue(client, feedback_file_name, MessageType.CGI_FEEDBACK_RECEIVED.value)
ejv_file = EjvFileModel.find_by_id(ejv_file_id)
assert ejv_file.disbursement_status_code == DisbursementStatus.COMPLETED.value
diff --git a/queue_services/payment-reconciliations/tests/integration/test_eft_reconciliation.py b/pay-queue/tests/integration/test_eft_reconciliation.py
similarity index 70%
rename from queue_services/payment-reconciliations/tests/integration/test_eft_reconciliation.py
rename to pay-queue/tests/integration/test_eft_reconciliation.py
index 5ae62ac3e..3c44cf62d 100644
--- a/queue_services/payment-reconciliations/tests/integration/test_eft_reconciliation.py
+++ b/pay-queue/tests/integration/test_eft_reconciliation.py
@@ -19,9 +19,6 @@
from datetime import datetime
from typing import List
-import pytest
-from entity_queue_common.service_utils import subscribe_to_queue
-from flask import current_app
from pay_api import db
from pay_api.models import EFTCredit as EFTCreditModel
from pay_api.models import EFTCreditInvoiceLink as EFTCreditInvoiceLinkModel
@@ -29,32 +26,17 @@
from pay_api.models import EFTShortnames as EFTShortnameModel
from pay_api.models import EFTTransaction as EFTTransactionModel
from pay_api.models import Invoice as InvoiceModel
-from pay_api.models import InvoiceReference as InvoiceReferenceModel
-from pay_api.models import Payment as PaymentModel
from pay_api.models import PaymentAccount as PaymentAccountModel
-from pay_api.models import Receipt as ReceiptModel
-from pay_api.utils.enums import (
- EFTFileLineType, EFTProcessStatus, InvoiceReferenceStatus, InvoiceStatus, PaymentMethod, PaymentStatus)
+from pay_api.utils.enums import EFTFileLineType, EFTProcessStatus, MessageType, PaymentMethod
-from reconciliations.eft.eft_enums import EFTConstants
+from pay_queue.services.eft.eft_enums import EFTConstants
from tests.integration.factory import factory_create_eft_account, factory_invoice
-from tests.integration.utils import create_and_upload_eft_file, helper_add_eft_event_to_queue
+from tests.integration.utils import create_and_upload_eft_file, helper_add_file_event_to_queue
from tests.utilities.factory_utils import factory_eft_header, factory_eft_record, factory_eft_trailer
-@pytest.mark.asyncio
-async def test_eft_tdi17_fail_header(session, app, stan_server, event_loop, client_id, events_stan, future,
- mock_publish):
+def test_eft_tdi17_fail_header(client, mock_publish):
"""Test EFT Reconciliations properly fails for a bad EFT header."""
- # Call back for the subscription
- from reconciliations.worker import cb_subscription_handler
-
- await subscribe_to_queue(events_stan,
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('subject'),
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('queue'),
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('durable_name'),
- cb_subscription_handler)
-
# Generate file with invalid header
file_name: str = 'test_eft_tdi17.txt'
header = factory_eft_header(record_type=EFTConstants.HEADER_RECORD_TYPE.value, file_creation_date='20230814',
@@ -62,7 +44,7 @@ async def test_eft_tdi17_fail_header(session, app, stan_server, event_loop, clie
create_and_upload_eft_file(file_name, [header])
- await helper_add_eft_event_to_queue(events_stan, file_name=file_name)
+ helper_add_file_event_to_queue(client, file_name, MessageType.EFT_FILE_UPLOADED.value)
# Assert EFT File record was created
eft_file_model: EFTFileModel = db.session.query(EFTFileModel).filter(
@@ -105,19 +87,8 @@ async def test_eft_tdi17_fail_header(session, app, stan_server, event_loop, clie
assert not bool(eft_transactions)
-@pytest.mark.asyncio
-async def test_eft_tdi17_fail_trailer(session, app, stan_server, event_loop, client_id, events_stan, future,
- mock_publish):
+def test_eft_tdi17_fail_trailer(client, mock_publish):
"""Test EFT Reconciliations properly fails for a bad EFT trailer."""
- # Call back for the subscription
- from reconciliations.worker import cb_subscription_handler
-
- await subscribe_to_queue(events_stan,
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('subject'),
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('queue'),
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('durable_name'),
- cb_subscription_handler)
-
# Generate file with invalid trailer
file_name: str = 'test_eft_tdi17.txt'
header = factory_eft_header(record_type=EFTConstants.HEADER_RECORD_TYPE.value, file_creation_date='20230814',
@@ -127,7 +98,7 @@ async def test_eft_tdi17_fail_trailer(session, app, stan_server, event_loop, cli
create_and_upload_eft_file(file_name, [header, trailer])
- await helper_add_eft_event_to_queue(events_stan, file_name=file_name)
+ helper_add_file_event_to_queue(client, file_name=file_name, message_type=MessageType.EFT_FILE_UPLOADED.value)
# Assert EFT File record was created
eft_file_model: EFTFileModel = db.session.query(EFTFileModel).filter(
@@ -170,19 +141,8 @@ async def test_eft_tdi17_fail_trailer(session, app, stan_server, event_loop, cli
assert not bool(eft_transactions)
-@pytest.mark.asyncio
-async def test_eft_tdi17_fail_transactions(session, app, stan_server, event_loop, client_id, events_stan, future,
- mock_publish):
+def test_eft_tdi17_fail_transactions(client, mock_publish):
"""Test EFT Reconciliations properly fails for a bad EFT trailer."""
- # Call back for the subscription
- from reconciliations.worker import cb_subscription_handler
-
- await subscribe_to_queue(events_stan,
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('subject'),
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('queue'),
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('durable_name'),
- cb_subscription_handler)
-
# Generate file with invalid trailer
file_name: str = 'test_eft_tdi17.txt'
header = factory_eft_header(record_type=EFTConstants.HEADER_RECORD_TYPE.value, file_creation_date='20230814',
@@ -200,7 +160,7 @@ async def test_eft_tdi17_fail_transactions(session, app, stan_server, event_loop
create_and_upload_eft_file(file_name, [header, transaction_1, trailer])
- await helper_add_eft_event_to_queue(events_stan, file_name=file_name)
+ helper_add_file_event_to_queue(client, file_name=file_name, message_type=MessageType.EFT_FILE_UPLOADED.value)
# Assert EFT File record was created
eft_file_model: EFTFileModel = db.session.query(EFTFileModel).filter(
@@ -238,24 +198,13 @@ async def test_eft_tdi17_fail_transactions(session, app, stan_server, event_loop
assert eft_transactions[0].error_messages[0] == 'Invalid transaction deposit amount CAD.'
-@pytest.mark.asyncio
-async def test_eft_tdi17_basic_process(session, app, stan_server, event_loop, client_id, events_stan, future,
- mock_publish):
+def test_eft_tdi17_basic_process(client, mock_publish):
"""Test EFT Reconciliations worker is able to create basic EFT processing records."""
- # Call back for the subscription
- from reconciliations.worker import cb_subscription_handler
-
- await subscribe_to_queue(events_stan,
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('subject'),
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('queue'),
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('durable_name'),
- cb_subscription_handler)
-
# Generate happy path file
file_name: str = 'test_eft_tdi17.txt'
generate_basic_tdi17_file(file_name)
- await helper_add_eft_event_to_queue(events_stan, file_name=file_name)
+ helper_add_file_event_to_queue(client, file_name=file_name, message_type=MessageType.EFT_FILE_UPLOADED.value)
# Assert EFT File record was created
eft_file_model: EFTFileModel = db.session.query(EFTFileModel).filter(
@@ -327,30 +276,14 @@ async def test_eft_tdi17_basic_process(session, app, stan_server, event_loop, cl
assert not eft_credit_invoice_links
-@pytest.mark.asyncio
-async def test_eft_tdi17_process(session, app, stan_server, event_loop, client_id, events_stan, future,
- mock_publish):
+def test_eft_tdi17_process(client, mock_publish):
"""Test EFT Reconciliations worker."""
- # Call back for the subscription
- from reconciliations.worker import cb_subscription_handler
-
payment_account, eft_shortname, invoice = create_test_data()
-
- assert payment_account is not None
- assert eft_shortname is not None
- assert invoice is not None
-
- await subscribe_to_queue(events_stan,
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('subject'),
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('queue'),
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('durable_name'),
- cb_subscription_handler)
-
# Generate happy path file
file_name: str = 'test_eft_tdi17.txt'
generate_tdi17_file(file_name)
- await helper_add_eft_event_to_queue(events_stan, file_name=file_name)
+ helper_add_file_event_to_queue(client, file_name=file_name, message_type=MessageType.EFT_FILE_UPLOADED.value)
# Assert EFT File record was created
eft_file_model: EFTFileModel = db.session.query(EFTFileModel).filter(
@@ -400,88 +333,79 @@ async def test_eft_tdi17_process(session, app, stan_server, event_loop, client_i
assert eft_shortnames[1].auth_account_id is None
assert eft_shortnames[1].short_name == 'ABC123'
- today = datetime.now().date()
-
- # Assert Invoice is paid
- invoice: InvoiceModel = InvoiceModel.find_by_id(invoice.id)
- expected_amount = 100
- assert invoice is not None
- assert invoice.payment_method_code == PaymentMethod.EFT.value
- assert invoice.invoice_status_code == InvoiceStatus.PAID.value
- assert invoice.payment_date is not None
- assert invoice.payment_date.date() == today
- assert invoice.paid == expected_amount
- assert invoice.total == expected_amount
-
- receipt: ReceiptModel = ReceiptModel.find_by_invoice_id_and_receipt_number(invoice.id, invoice.id)
- assert receipt is not None
- assert receipt.receipt_number == str(invoice.id)
- assert receipt.receipt_amount == expected_amount
-
- expected_invoice_number = f'{current_app.config["EFT_INVOICE_PREFIX"]}{invoice.id}'
- payment: PaymentModel = PaymentModel.find_payment_for_invoice(invoice.id)
- assert payment is not None
- assert payment.payment_date.date() == today
- assert payment.invoice_number == expected_invoice_number
- assert payment.payment_account_id == payment_account.id
- assert payment.payment_status_code == PaymentStatus.COMPLETED.value
- assert payment.payment_method_code == PaymentMethod.EFT.value
- assert payment.invoice_amount == expected_amount
- assert payment.paid_amount == expected_amount
-
- invoice_reference: InvoiceReferenceModel = InvoiceReferenceModel\
- .find_by_invoice_id_and_status(invoice.id, InvoiceReferenceStatus.ACTIVE.value)
-
- assert invoice_reference is not None
- assert invoice_reference.invoice_id == invoice.id
- assert invoice_reference.invoice_number == payment.invoice_number
- assert invoice_reference.invoice_number == expected_invoice_number
- assert invoice_reference.status_code == InvoiceReferenceStatus.ACTIVE.value
-
- eft_credits: List[EFTCreditModel] = db.session.query(EFTCreditModel).order_by(EFTCreditModel.created_on.asc()).all()
- assert eft_credits is not None
- assert len(eft_credits) == 3
- assert eft_credits[0].payment_account_id == payment_account.id
- assert eft_credits[0].short_name_id == eft_shortnames[0].id
- assert eft_credits[0].eft_file_id == eft_file_model.id
- assert eft_credits[0].amount == 100.00
- assert eft_credits[0].remaining_amount == 0
- assert eft_credits[0].eft_transaction_id == eft_transactions[0].id
- assert eft_credits[1].payment_account_id == payment_account.id
- assert eft_credits[1].short_name_id == eft_shortnames[0].id
- assert eft_credits[1].eft_file_id == eft_file_model.id
- assert eft_credits[1].amount == 50.5
- assert eft_credits[1].remaining_amount == 50.5
- assert eft_credits[1].eft_transaction_id == eft_transactions[1].id
- assert eft_credits[2].payment_account_id is None
- assert eft_credits[2].short_name_id == eft_shortnames[1].id
- assert eft_credits[2].eft_file_id == eft_file_model.id
- assert eft_credits[2].amount == 351.5
- assert eft_credits[2].remaining_amount == 351.5
- assert eft_credits[2].eft_transaction_id == eft_transactions[2].id
-
- eft_credit_invoice_links: List[EFTCreditInvoiceLinkModel] = db.session.query(EFTCreditInvoiceLinkModel).all()
- assert eft_credit_invoice_links is not None
- assert len(eft_credit_invoice_links) == 1
- assert eft_credit_invoice_links[0].eft_credit_id == eft_credits[0].id
- assert eft_credit_invoice_links[0].invoice_id == invoice.id
-
-
-@pytest.mark.asyncio
-async def test_eft_tdi17_rerun(session, app, stan_server, event_loop, client_id, events_stan, future,
- mock_publish):
+ # NOTE THIS NEEDS TO BE RE-WRITTEN INSIDE OF THE JOB.
+ # today = datetime.now().date()
+
+ # # Assert Invoice is paid
+ # invoice: InvoiceModel = InvoiceModel.find_by_id(invoice.id)
+ # expected_amount = 100
+ # assert invoice is not None
+ # assert invoice.payment_method_code == PaymentMethod.EFT.value
+ # assert invoice.invoice_status_code == InvoiceStatus.PAID.value
+ # assert invoice.payment_date is not None
+ # assert invoice.payment_date.date() == today
+ # assert invoice.paid == expected_amount
+ # assert invoice.total == expected_amount
+
+ # receipt: ReceiptModel = ReceiptModel.find_by_invoice_id_and_receipt_number(invoice.id, invoice.id)
+ # assert receipt is not None
+ # assert receipt.receipt_number == str(invoice.id)
+ # assert receipt.receipt_amount == expected_amount
+
+ # expected_invoice_number = f'{current_app.config["EFT_INVOICE_PREFIX"]}{invoice.id}'
+ # payment: PaymentModel = PaymentModel.find_payment_for_invoice(invoice.id)
+ # assert payment is not None
+ # assert payment.payment_date.date() == today
+ # assert payment.invoice_number == expected_invoice_number
+ # assert payment.payment_account_id == payment_account.id
+ # assert payment.payment_status_code == PaymentStatus.COMPLETED.value
+ # assert payment.payment_method_code == PaymentMethod.EFT.value
+ # assert payment.invoice_amount == expected_amount
+ # assert payment.paid_amount == expected_amount
+
+ # invoice_reference: InvoiceReferenceModel = InvoiceReferenceModel\
+ # .find_by_invoice_id_and_status(invoice.id, InvoiceReferenceStatus.ACTIVE.value)
+
+ # assert invoice_reference is not None
+ # assert invoice_reference.invoice_id == invoice.id
+ # assert invoice_reference.invoice_number == payment.invoice_number
+ # assert invoice_reference.invoice_number == expected_invoice_number
+ # assert invoice_reference.status_code == InvoiceReferenceStatus.ACTIVE.value
+
+ # eft_credits: List[EFTCreditModel] = db.session.query(EFTCreditModel) \
+ # .order_by(EFTCreditModel.created_on.asc()).all()
+ # assert eft_credits is not None
+ # assert len(eft_credits) == 3
+ # assert eft_credits[0].payment_account_id == payment_account.id
+ # assert eft_credits[0].short_name_id == eft_shortnames[0].id
+ # assert eft_credits[0].eft_file_id == eft_file_model.id
+ # assert eft_credits[0].amount == 100.00
+ # assert eft_credits[0].remaining_amount == 0
+ # assert eft_credits[0].eft_transaction_id == eft_transactions[0].id
+ # assert eft_credits[1].payment_account_id == payment_account.id
+ # assert eft_credits[1].short_name_id == eft_shortnames[0].id
+ # assert eft_credits[1].eft_file_id == eft_file_model.id
+ # assert eft_credits[1].amount == 50.5
+ # assert eft_credits[1].remaining_amount == 50.5
+ # assert eft_credits[1].eft_transaction_id == eft_transactions[1].id
+ # assert eft_credits[2].payment_account_id is None
+ # assert eft_credits[2].short_name_id == eft_shortnames[1].id
+ # assert eft_credits[2].eft_file_id == eft_file_model.id
+ # assert eft_credits[2].amount == 351.5
+ # assert eft_credits[2].remaining_amount == 351.5
+ # assert eft_credits[2].eft_transaction_id == eft_transactions[2].id
+
+ # eft_credit_invoice_links: List[EFTCreditInvoiceLinkModel] = db.session.query(EFTCreditInvoiceLinkModel).all()
+ # assert eft_credit_invoice_links is not None
+ # assert len(eft_credit_invoice_links) == 1
+ # assert eft_credit_invoice_links[0].eft_credit_id == eft_credits[0].id
+ # assert eft_credit_invoice_links[0].invoice_id == invoice.id
+
+
+def test_eft_tdi17_rerun(client, mock_publish):
"""Test EFT Reconciliations can be re-executed with a corrected file."""
- # Call back for the subscription
- from reconciliations.worker import cb_subscription_handler
-
payment_account, eft_shortname, invoice = create_test_data()
- await subscribe_to_queue(events_stan,
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('subject'),
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('queue'),
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('durable_name'),
- cb_subscription_handler)
-
# Generate file with invalid trailer
file_name: str = 'test_eft_tdi17.txt'
header = factory_eft_header(record_type=EFTConstants.HEADER_RECORD_TYPE.value, file_creation_date='20230814',
@@ -499,7 +423,7 @@ async def test_eft_tdi17_rerun(session, app, stan_server, event_loop, client_id,
create_and_upload_eft_file(file_name, [header, transaction_1, trailer])
- await helper_add_eft_event_to_queue(events_stan, file_name=file_name)
+ helper_add_file_event_to_queue(client, file_name=file_name, message_type=MessageType.EFT_FILE_UPLOADED.value)
# Assert EFT File record was created
eft_file_model: EFTFileModel = db.session.query(EFTFileModel).filter(
@@ -538,7 +462,7 @@ async def test_eft_tdi17_rerun(session, app, stan_server, event_loop, client_id,
jv_number='002425669', transaction_date='')
create_and_upload_eft_file(file_name, [header, transaction_1, trailer])
- await helper_add_eft_event_to_queue(events_stan, file_name=file_name)
+ helper_add_file_event_to_queue(client, file_name=file_name, message_type=MessageType.EFT_FILE_UPLOADED.value)
# Check file is completed after correction
eft_file_model: EFTFileModel = db.session.query(EFTFileModel).filter(
@@ -569,51 +493,53 @@ async def test_eft_tdi17_rerun(session, app, stan_server, event_loop, client_id,
assert eft_transactions[0].status_code == EFTProcessStatus.COMPLETED.value
assert eft_transactions[0].deposit_amount_cents == 13500
- today = datetime.now().date()
- # Assert Invoice is paid
- invoice: InvoiceModel = InvoiceModel.find_by_id(invoice.id)
- expected_amount = 100
- assert invoice is not None
- assert invoice.payment_method_code == PaymentMethod.EFT.value
- assert invoice.invoice_status_code == InvoiceStatus.PAID.value
- assert invoice.payment_date is not None
- assert invoice.payment_date.date() == today
- assert invoice.paid == expected_amount
- assert invoice.total == expected_amount
-
- receipt: ReceiptModel = ReceiptModel.find_by_invoice_id_and_receipt_number(invoice.id, invoice.id)
- assert receipt is not None
- assert receipt.receipt_number == str(invoice.id)
- assert receipt.receipt_amount == expected_amount
-
- expected_invoice_number = f'{current_app.config["EFT_INVOICE_PREFIX"]}{invoice.id}'
- payment: PaymentModel = PaymentModel.find_payment_for_invoice(invoice.id)
- assert payment is not None
- assert payment.payment_date.date() == today
- assert payment.invoice_number == expected_invoice_number
- assert payment.payment_account_id == payment_account.id
- assert payment.payment_status_code == PaymentStatus.COMPLETED.value
- assert payment.payment_method_code == PaymentMethod.EFT.value
- assert payment.invoice_amount == expected_amount
-
- invoice_reference: InvoiceReferenceModel = InvoiceReferenceModel \
- .find_by_invoice_id_and_status(invoice.id, InvoiceReferenceStatus.ACTIVE.value)
-
- assert invoice_reference is not None
- assert invoice_reference.invoice_id == invoice.id
- assert invoice_reference.invoice_number == payment.invoice_number
- assert invoice_reference.invoice_number == expected_invoice_number
- assert invoice_reference.status_code == InvoiceReferenceStatus.ACTIVE.value
-
- eft_credits: List[EFTCreditModel] = db.session.query(EFTCreditModel).order_by(EFTCreditModel.created_on.asc()).all()
- assert eft_credits is not None
- assert len(eft_credits) == 1
- assert eft_credits[0].payment_account_id == payment_account.id
- assert eft_credits[0].short_name_id == eft_shortname.id
- assert eft_credits[0].eft_file_id == eft_file_model.id
- assert eft_credits[0].amount == 135
- assert eft_credits[0].remaining_amount == 35
- assert eft_credits[0].eft_transaction_id == eft_transactions[0].id
+ # NOTE THIS NEEDS TO BE REWRITTEN IN A JOB
+ # today = datetime.now().date()
+ # # Assert Invoice is paid
+ # invoice: InvoiceModel = InvoiceModel.find_by_id(invoice.id)
+ # expected_amount = 100
+ # assert invoice is not None
+ # assert invoice.payment_method_code == PaymentMethod.EFT.value
+ # assert invoice.invoice_status_code == InvoiceStatus.PAID.value
+ # assert invoice.payment_date is not None
+ # assert invoice.payment_date.date() == today
+ # assert invoice.paid == expected_amount
+ # assert invoice.total == expected_amount
+
+ # receipt: ReceiptModel = ReceiptModel.find_by_invoice_id_and_receipt_number(invoice.id, invoice.id)
+ # assert receipt is not None
+ # assert receipt.receipt_number == str(invoice.id)
+ # assert receipt.receipt_amount == expected_amount
+
+ # expected_invoice_number = f'{current_app.config["EFT_INVOICE_PREFIX"]}{invoice.id}'
+ # payment: PaymentModel = PaymentModel.find_payment_for_invoice(invoice.id)
+ # assert payment is not None
+ # assert payment.payment_date.date() == today
+ # assert payment.invoice_number == expected_invoice_number
+ # assert payment.payment_account_id == payment_account.id
+ # assert payment.payment_status_code == PaymentStatus.COMPLETED.value
+ # assert payment.payment_method_code == PaymentMethod.EFT.value
+ # assert payment.invoice_amount == expected_amount
+
+ # invoice_reference: InvoiceReferenceModel = InvoiceReferenceModel \
+ # .find_by_invoice_id_and_status(invoice.id, InvoiceReferenceStatus.ACTIVE.value)
+
+ # assert invoice_reference is not None
+ # assert invoice_reference.invoice_id == invoice.id
+ # assert invoice_reference.invoice_number == payment.invoice_number
+ # assert invoice_reference.invoice_number == expected_invoice_number
+ # assert invoice_reference.status_code == InvoiceReferenceStatus.ACTIVE.value
+
+ # eft_credits: List[EFTCreditModel] = db.session.query(EFTCreditModel) \
+ # .order_by(EFTCreditModel.created_on.asc()).all()
+ # assert eft_credits is not None
+ # assert len(eft_credits) == 1
+ # assert eft_credits[0].payment_account_id == payment_account.id
+ # assert eft_credits[0].short_name_id == eft_shortname.id
+ # assert eft_credits[0].eft_file_id == eft_file_model.id
+ # assert eft_credits[0].amount == 135
+ # assert eft_credits[0].remaining_amount == 35
+ # assert eft_credits[0].eft_transaction_id == eft_transactions[0].id
def create_test_data():
diff --git a/queue_services/payment-reconciliations/tests/integration/test_payment_reconciliations.py b/pay-queue/tests/integration/test_payment_reconciliations.py
similarity index 77%
rename from queue_services/payment-reconciliations/tests/integration/test_payment_reconciliations.py
rename to pay-queue/tests/integration/test_payment_reconciliations.py
index 10dff791d..fe3eb034a 100644
--- a/queue_services/payment-reconciliations/tests/integration/test_payment_reconciliations.py
+++ b/pay-queue/tests/integration/test_payment_reconciliations.py
@@ -20,39 +20,25 @@
from datetime import datetime
import pytest
-from entity_queue_common.service_utils import subscribe_to_queue
-from flask import current_app
from pay_api.models import CfsAccount as CfsAccountModel
from pay_api.models import Credit as CreditModel
from pay_api.models import Invoice as InvoiceModel
from pay_api.models import Payment as PaymentModel
from pay_api.models import PaymentAccount as PaymentAccountModel
from pay_api.models import Receipt as ReceiptModel
-from pay_api.utils.enums import CfsAccountStatus, InvoiceReferenceStatus, InvoiceStatus, PaymentMethod, PaymentStatus
+from pay_api.utils.enums import (
+ CfsAccountStatus, InvoiceReferenceStatus, InvoiceStatus, MessageType, PaymentMethod, PaymentStatus)
-from reconciliations.enums import RecordType, SourceTransaction, Status, TargetTransaction
+from pay_queue.enums import RecordType, SourceTransaction, Status, TargetTransaction
from .factory import (
factory_create_online_banking_account, factory_create_pad_account, factory_invoice, factory_invoice_reference,
factory_payment, factory_payment_line_item, factory_receipt)
-from .utils import create_and_upload_settlement_file, helper_add_event_to_queue
+from .utils import create_and_upload_settlement_file, helper_add_file_event_to_queue
-@pytest.mark.asyncio
-async def test_online_banking_reconciliations(session, app, stan_server, event_loop, client_id, events_stan, future,
- mock_publish):
+def test_online_banking_reconciliations(client):
"""Test Reconciliations worker."""
- # Call back for the subscription
- from reconciliations.worker import cb_subscription_handler
-
- # Create a Credit Card Payment
- # register the handler to test it
- await subscribe_to_queue(events_stan,
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('subject'),
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('queue'),
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('durable_name'),
- cb_subscription_handler)
-
# 1. Create payment account
# 2. Create invoice and related records
# 3. Create CFS Invoice records
@@ -81,7 +67,7 @@ async def test_online_banking_reconciliations(session, app, stan_server, event_l
TargetTransaction.INV.value, invoice_number,
total, 0, Status.PAID.value]
create_and_upload_settlement_file(file_name, [row])
- await helper_add_event_to_queue(events_stan, file_name=file_name)
+ helper_add_file_event_to_queue(client, file_name=file_name, message_type=MessageType.CAS_UPLOADED.value)
# The invoice should be in PAID status and Payment should be completed
updated_invoice = InvoiceModel.find_by_id(invoice_id)
@@ -95,22 +81,8 @@ async def test_online_banking_reconciliations(session, app, stan_server, event_l
assert payment.invoice_number == invoice_number
-@pytest.mark.asyncio
-async def test_online_banking_reconciliations_over_payment(session, app, stan_server, event_loop, client_id,
- events_stan, future,
- mock_publish):
+def test_online_banking_reconciliations_over_payment(client):
"""Test Reconciliations worker."""
- # Call back for the subscription
- from reconciliations.worker import cb_subscription_handler
-
- # Create a Credit Card Payment
- # register the handler to test it
- await subscribe_to_queue(events_stan,
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('subject'),
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('queue'),
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('durable_name'),
- cb_subscription_handler)
-
# 1. Create payment account
# 2. Create invoice and related records
# 3. Create CFS Invoice records
@@ -141,7 +113,7 @@ async def test_online_banking_reconciliations_over_payment(session, app, stan_se
over_payment_amount, cfs_account_number, TargetTransaction.INV.value, invoice_number,
over_payment_amount, 0, Status.ON_ACC.value]
create_and_upload_settlement_file(file_name, [inv_row, credit_row])
- await helper_add_event_to_queue(events_stan, file_name=file_name)
+ helper_add_file_event_to_queue(client, file_name=file_name, message_type=MessageType.CAS_UPLOADED.value)
# The invoice should be in PAID status and Payment should be completed
updated_invoice = InvoiceModel.find_by_id(invoice_id)
@@ -155,22 +127,8 @@ async def test_online_banking_reconciliations_over_payment(session, app, stan_se
assert payment.invoice_number is None # No invoice_number if payment is not for 1 invoice
-@pytest.mark.asyncio
-async def test_online_banking_reconciliations_with_credit(session, app, stan_server, event_loop, client_id, events_stan,
- future,
- mock_publish):
+def test_online_banking_reconciliations_with_credit(client):
"""Test Reconciliations worker."""
- # Call back for the subscription
- from reconciliations.worker import cb_subscription_handler
-
- # Create a Credit Card Payment
- # register the handler to test it
- await subscribe_to_queue(events_stan,
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('subject'),
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('queue'),
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('durable_name'),
- cb_subscription_handler)
-
# 1. Create payment account
# 2. Create invoice and related records
# 3. Create CFS Invoice records
@@ -201,7 +159,7 @@ async def test_online_banking_reconciliations_with_credit(session, app, stan_ser
credit_row = [RecordType.ONAC.value, SourceTransaction.EFT_WIRE.value, '555566677', 100001, date, credit_amount,
cfs_account_number, TargetTransaction.INV.value, invoice_number, total, 0, Status.PAID.value]
create_and_upload_settlement_file(file_name, [inv_row, credit_row])
- await helper_add_event_to_queue(events_stan, file_name=file_name)
+ helper_add_file_event_to_queue(client, file_name=file_name, message_type=MessageType.CAS_UPLOADED.value)
# The invoice should be in PAID status and Payment should be completed
updated_invoice = InvoiceModel.find_by_id(invoice_id)
@@ -215,22 +173,8 @@ async def test_online_banking_reconciliations_with_credit(session, app, stan_ser
assert payment.invoice_number == invoice_number
-@pytest.mark.asyncio
-async def test_online_banking_reconciliations_overflows_credit(session, app, stan_server, event_loop, client_id,
- events_stan, future,
- mock_publish):
+def test_online_banking_reconciliations_overflows_credit(client, mock_publish):
"""Test Reconciliations worker."""
- # Call back for the subscription
- from reconciliations.worker import cb_subscription_handler
-
- # Create a Credit Card Payment
- # register the handler to test it
- await subscribe_to_queue(events_stan,
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('subject'),
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('queue'),
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('durable_name'),
- cb_subscription_handler)
-
# 1. Create payment account
# 2. Create invoice and related records
# 3. Create CFS Invoice records
@@ -266,7 +210,7 @@ async def test_online_banking_reconciliations_overflows_credit(session, app, sta
Status.ON_ACC.value]
create_and_upload_settlement_file(file_name, [inv_row, credit_row, onac_row])
- await helper_add_event_to_queue(events_stan, file_name=file_name)
+ helper_add_file_event_to_queue(client, file_name=file_name, message_type=MessageType.CAS_UPLOADED.value)
# The invoice should be in PAID status and Payment should be completed
updated_invoice = InvoiceModel.find_by_id(invoice_id)
@@ -280,21 +224,8 @@ async def test_online_banking_reconciliations_overflows_credit(session, app, sta
assert payment.invoice_number is None
-@pytest.mark.asyncio
-async def test_online_banking_under_payment(session, app, stan_server, event_loop, client_id, events_stan, future,
- mock_publish):
+def test_online_banking_under_payment(client):
"""Test Reconciliations worker."""
- # Call back for the subscription
- from reconciliations.worker import cb_subscription_handler
-
- # Create a Credit Card Payment
- # register the handler to test it
- await subscribe_to_queue(events_stan,
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('subject'),
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('queue'),
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('durable_name'),
- cb_subscription_handler)
-
# 1. Create payment account
# 2. Create invoice and related records
# 3. Create CFS Invoice records
@@ -325,7 +256,7 @@ async def test_online_banking_under_payment(session, app, stan_server, event_loo
TargetTransaction.INV.value, invoice_number,
total, total - paid_amount, Status.PARTIAL.value]
create_and_upload_settlement_file(file_name, [row])
- await helper_add_event_to_queue(events_stan, file_name=file_name)
+ helper_add_file_event_to_queue(client, file_name=file_name, message_type=MessageType.CAS_UPLOADED.value)
# The invoice should be in PAID status and Payment should be completed
updated_invoice: InvoiceModel = InvoiceModel.find_by_id(invoice_id)
@@ -340,20 +271,8 @@ async def test_online_banking_under_payment(session, app, stan_server, event_loo
assert payment.invoice_number == invoice_number
-@pytest.mark.asyncio
-async def test_pad_reconciliations(session, app, stan_server, event_loop, client_id, events_stan, future, mock_publish):
+def test_pad_reconciliations(client, mock_publish):
"""Test Reconciliations worker."""
- # Call back for the subscription
- from reconciliations.worker import cb_subscription_handler
-
- # Create a Credit Card Payment
- # register the handler to test it
- await subscribe_to_queue(events_stan,
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('subject'),
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('queue'),
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('durable_name'),
- cb_subscription_handler)
-
# 1. Create payment account
# 2. Create invoices and related records
# 3. Create CFS Invoice records
@@ -394,7 +313,7 @@ async def test_pad_reconciliations(session, app, stan_server, event_loop, client
'INV', invoice_number,
total, 0, Status.PAID.value]
create_and_upload_settlement_file(file_name, [row])
- await helper_add_event_to_queue(events_stan, file_name=file_name)
+ helper_add_file_event_to_queue(client, file_name=file_name, message_type=MessageType.CAS_UPLOADED.value)
# The invoice should be in PAID status and Payment should be completed
updated_invoice1 = InvoiceModel.find_by_id(invoice1_id)
@@ -416,21 +335,8 @@ async def test_pad_reconciliations(session, app, stan_server, event_loop, client
assert rcpt1.receipt_date == rcpt2.receipt_date
-@pytest.mark.asyncio
-async def test_pad_reconciliations_with_credit_memo(session, app, stan_server, event_loop,
- client_id, events_stan, future, mock_publish):
+def test_pad_reconciliations_with_credit_memo(client):
"""Test Reconciliations worker."""
- # Call back for the subscription
- from reconciliations.worker import cb_subscription_handler
-
- # Create a Credit Card Payment
- # register the handler to test it
- await subscribe_to_queue(events_stan,
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('subject'),
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('queue'),
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('durable_name'),
- cb_subscription_handler)
-
# 1. Create payment account
# 2. Create invoices and related records
# 3. Create CFS Invoice records
@@ -475,7 +381,7 @@ async def test_pad_reconciliations_with_credit_memo(session, app, stan_server, e
pad_row = [RecordType.PAD.value, SourceTransaction.PAD.value, receipt_number, 100001, date, total - credit_amount,
cfs_account_number, 'INV', invoice_number, total, 0, Status.PAID.value]
create_and_upload_settlement_file(file_name, [credit_row, pad_row])
- await helper_add_event_to_queue(events_stan, file_name=file_name)
+ helper_add_file_event_to_queue(client, file_name=file_name, message_type=MessageType.CAS_UPLOADED.value)
# The invoice should be in PAID status and Payment should be completed
updated_invoice1 = InvoiceModel.find_by_id(invoice1_id)
@@ -497,20 +403,8 @@ async def test_pad_reconciliations_with_credit_memo(session, app, stan_server, e
assert rcpt1.receipt_date == rcpt2.receipt_date
-@pytest.mark.asyncio
-async def test_pad_nsf_reconciliations(session, app, stan_server, event_loop, client_id, events_stan, future,
- mock_publish):
+def test_pad_nsf_reconciliations(client, mock_publish):
"""Test Reconciliations worker for NSF."""
- # Call back for the subscription
- from reconciliations.worker import cb_subscription_handler
-
- # register the handler to test it
- await subscribe_to_queue(events_stan,
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('subject'),
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('queue'),
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('durable_name'),
- cb_subscription_handler)
-
# 1. Create payment account
# 2. Create invoices and related records
# 3. Create CFS Invoice records
@@ -552,7 +446,7 @@ async def test_pad_nsf_reconciliations(session, app, stan_server, event_loop, cl
'INV', invoice_number,
total, total, Status.NOT_PAID.value]
create_and_upload_settlement_file(file_name, [row])
- await helper_add_event_to_queue(events_stan, file_name=file_name)
+ helper_add_file_event_to_queue(client, file_name=file_name, message_type=MessageType.CAS_UPLOADED.value)
# The invoice should be in SETTLEMENT_SCHEDULED status and Payment should be FAILED
updated_invoice1 = InvoiceModel.find_by_id(invoice1_id)
@@ -571,20 +465,8 @@ async def test_pad_nsf_reconciliations(session, app, stan_server, event_loop, cl
assert cfs_account.status == CfsAccountStatus.FREEZE.value
-@pytest.mark.asyncio
-async def test_pad_reversal_reconciliations(session, app, stan_server, event_loop, client_id, events_stan, future,
- mock_publish):
+def test_pad_reversal_reconciliations(client, mock_publish):
"""Test Reconciliations worker for NSF."""
- # Call back for the subscription
- from reconciliations.worker import cb_subscription_handler
-
- # register the handler to test it
- await subscribe_to_queue(events_stan,
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('subject'),
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('queue'),
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('durable_name'),
- cb_subscription_handler)
-
# 1. Create payment account
# 2. Create invoices and related records for a completed payment
# 3. Create CFS Invoice records
@@ -635,7 +517,7 @@ async def test_pad_reversal_reconciliations(session, app, stan_server, event_loo
'INV', invoice_number,
total, total, Status.NOT_PAID.value]
create_and_upload_settlement_file(file_name, [row])
- await helper_add_event_to_queue(events_stan, file_name=file_name)
+ helper_add_file_event_to_queue(client, file_name=file_name, message_type=MessageType.CAS_UPLOADED.value)
# The invoice should be in SETTLEMENT_SCHEDULED status and Payment should be FAILED
updated_invoice1 = InvoiceModel.find_by_id(invoice1_id)
@@ -659,20 +541,8 @@ async def test_pad_reversal_reconciliations(session, app, stan_server, event_loo
@pytest.mark.asyncio
-async def test_eft_wire_reconciliations(session, app, stan_server, event_loop, client_id, events_stan, future,
- mock_publish):
+async def test_eft_wire_reconciliations(client, mock_publish):
"""Test Reconciliations worker."""
- # Call back for the subscription
- from reconciliations.worker import cb_subscription_handler
-
- # Create a Credit Card Payment
- # register the handler to test it
- await subscribe_to_queue(events_stan,
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('subject'),
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('queue'),
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('durable_name'),
- cb_subscription_handler)
-
# 1. Create payment account
# 2. Create invoice and related records
# 3. Create CFS Invoice records
@@ -712,7 +582,7 @@ async def test_eft_wire_reconciliations(session, app, stan_server, event_loop, c
row = [RecordType.EFTP.value, SourceTransaction.EFT_WIRE.value, eft_wire_receipt, 100001, date, total,
cfs_account_number, TargetTransaction.INV.value, invoice_number, total, 0, Status.PAID.value]
create_and_upload_settlement_file(file_name, [row])
- await helper_add_event_to_queue(events_stan, file_name=file_name)
+ helper_add_file_event_to_queue(client, file_name=file_name, message_type=MessageType.CAS_UPLOADED.value)
# The invoice should be in PAID status and Payment should be completed
updated_invoice = InvoiceModel.find_by_id(invoice_id)
@@ -725,20 +595,8 @@ async def test_eft_wire_reconciliations(session, app, stan_server, event_loop, c
@pytest.mark.asyncio
-async def test_credits(session, app, stan_server, event_loop, client_id, events_stan, future, mock_publish,
- monkeypatch):
+async def test_credits(client, monkeypatch):
"""Test Reconciliations worker."""
- # Call back for the subscription
- from reconciliations.worker import cb_subscription_handler
-
- # Create a Credit Card Payment
- # register the handler to test it
- await subscribe_to_queue(events_stan,
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('subject'),
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('queue'),
- current_app.config.get('SUBSCRIPTION_OPTIONS').get('durable_name'),
- cb_subscription_handler)
-
# 1. Create payment account.
# 2. Create EFT/WIRE payment db record.
# 3. Create a credit memo db record.
@@ -799,7 +657,7 @@ def mock_cms(cfs_account: CfsAccountModel,
cfs_account_number, TargetTransaction.RECEIPT.value, eft_wire_receipt, onac_amount, 0, Status.ON_ACC.value]
create_and_upload_settlement_file(file_name, [row])
- await helper_add_event_to_queue(events_stan, file_name=file_name)
+ helper_add_file_event_to_queue(client, file_name=file_name, message_type=MessageType.CAS_UPLOADED.value)
# Look up credit file and make sure the credits are recorded.
pay_account = PaymentAccountModel.find_by_id(pay_account_id)
diff --git a/pay-queue/tests/integration/test_worker_queue.py b/pay-queue/tests/integration/test_worker_queue.py
new file mode 100644
index 000000000..932aa48bd
--- /dev/null
+++ b/pay-queue/tests/integration/test_worker_queue.py
@@ -0,0 +1,48 @@
+# Copyright © 2019 Province of British Columbia
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Test Suite to ensure the worker routines are working as expected."""
+
+from pay_api.models import Invoice
+from pay_api.utils.enums import PaymentMethod, PaymentSystem
+
+from tests.integration import factory_invoice, factory_invoice_reference, factory_payment, factory_payment_account
+
+from .utils import helper_add_identifier_event_to_queue
+
+
+def test_update_payment(client):
+ """Assert that the update internal payment records works."""
+ # vars
+ old_identifier = 'T000000000'
+ new_identifier = 'BC12345678'
+
+ # Create an Internal Payment
+ payment_account = factory_payment_account(payment_system_code=PaymentSystem.BCOL.value).save()
+
+ invoice: Invoice = factory_invoice(payment_account=payment_account,
+ business_identifier=old_identifier,
+ payment_method_code=PaymentMethod.INTERNAL.value).save()
+
+ inv_ref = factory_invoice_reference(invoice_id=invoice.id)
+ factory_payment(invoice_number=inv_ref.invoice_number)
+
+ invoice_id = invoice.id
+
+ helper_add_identifier_event_to_queue(client, old_identifier=old_identifier,
+ new_identifier=new_identifier)
+
+ # Get the internal account and invoice and assert that the identifier is new identifier
+ invoice = Invoice.find_by_id(invoice_id)
+
+ assert invoice.business_identifier == new_identifier
diff --git a/queue_services/payment-reconciliations/tests/integration/utils.py b/pay-queue/tests/integration/utils.py
similarity index 51%
rename from queue_services/payment-reconciliations/tests/integration/utils.py
rename to pay-queue/tests/integration/utils.py
index 098228b25..65c54f8b8 100644
--- a/queue_services/payment-reconciliations/tests/integration/utils.py
+++ b/pay-queue/tests/integration/utils.py
@@ -12,77 +12,44 @@
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities used by the integration tests."""
+import base64
import csv
import io
import json
import os
+import uuid
+from datetime import datetime, timezone
from typing import List
-import stan
from flask import current_app
from minio import Minio
-
-from reconciliations.enums import MessageType
-
-
-async def helper_add_event_to_queue(stan_client: stan.aio.client.Client,
- file_name: str):
- """Add event to the Queue."""
- payload = {
- 'specversion': '1.x-wip',
- 'type': 'bc.registry.payment.casSettlementUploaded',
- 'source': 'https://api.business.bcregistry.gov.bc.ca/v1/accounts/1/',
- 'id': 'C234-1234-1234',
- 'time': '2020-08-28T17:37:34.651294+00:00',
- 'datacontenttype': 'application/json',
- 'data': {
- 'fileName': file_name,
- 'location': current_app.config['MINIO_BUCKET_NAME']
- }
+from pay_api.utils.enums import MessageType
+from simple_cloudevent import SimpleCloudEvent, to_queue_message
+
+
+def build_request_for_queue_push(message_type, payload):
+ """Build request for queue message."""
+ queue_message_bytes = to_queue_message(SimpleCloudEvent(
+ id=str(uuid.uuid4()),
+ source='pay-queue',
+ subject=None,
+ time=datetime.now(tz=timezone.utc).isoformat(),
+ type=message_type,
+ data=payload
+ ))
+
+ return {
+ 'message': {
+ 'data': base64.b64encode(queue_message_bytes).decode('utf-8')
+ },
+ 'subscription': 'foobar'
}
- await stan_client.publish(subject=current_app.config.get('SUBSCRIPTION_OPTIONS').get('subject'),
- payload=json.dumps(payload).encode('utf-8'))
-
-
-async def helper_add_eft_event_to_queue(stan_client: stan.aio.client.Client, file_name: str,
- message_type: str = MessageType.EFT_FILE_UPLOADED.value):
- """Add eft event to the Queue."""
- payload = {
- 'specversion': '1.x-wip',
- 'type': message_type,
- 'source': 'https://api.business.bcregistry.gov.bc.ca/v1/accounts/1/',
- 'id': 'C234-1234-1234',
- 'time': '2020-08-28T17:37:34.651294+00:00',
- 'datacontenttype': 'text/plain',
- 'data': {
- 'fileName': file_name,
- 'location': current_app.config['MINIO_BUCKET_NAME']
- }
- }
-
- await stan_client.publish(subject=current_app.config.get('SUBSCRIPTION_OPTIONS').get('subject'),
- payload=json.dumps(payload).encode('utf-8'))
-
-
-async def helper_add_ejv_event_to_queue(stan_client: stan.aio.client.Client, file_name: str,
- message_type: str = 'ACKReceived'):
- """Add event to the Queue."""
- payload = {
- 'specversion': '1.x-wip',
- 'type': f'bc.registry.payment.cgi.{message_type}',
- 'source': 'https://api.business.bcregistry.gov.bc.ca/v1/accounts/1/',
- 'id': 'C234-1234-1234',
- 'time': '2020-08-28T17:37:34.651294+00:00',
- 'datacontenttype': 'application/json',
- 'data': {
- 'fileName': file_name,
- 'location': current_app.config['MINIO_BUCKET_NAME']
- }
- }
- await stan_client.publish(subject=current_app.config.get('SUBSCRIPTION_OPTIONS').get('subject'),
- payload=json.dumps(payload).encode('utf-8'))
+def post_to_queue(client, request_payload):
+ """Post request to queue."""
+ response = client.post('/', data=json.dumps(request_payload), headers={'Content-Type': 'application/json'})
+ assert response.status_code == 200
def create_and_upload_settlement_file(file_name: str, rows: List[List]):
@@ -93,7 +60,7 @@ def create_and_upload_settlement_file(file_name: str, rows: List[List]):
'Target transaction Number', 'Target Transaction Original amount',
'Target Transaction Outstanding Amount',
'Target transaction status', 'Reversal Reason code', 'Reversal reason description']
- with open(file_name, mode='w') as cas_file:
+ with open(file_name, mode='w', encoding='utf-8') as cas_file:
cas_writer = csv.writer(cas_file, quoting=csv.QUOTE_ALL)
cas_writer.writerow(headers)
for row in rows:
@@ -105,7 +72,7 @@ def create_and_upload_settlement_file(file_name: str, rows: List[List]):
def create_and_upload_eft_file(file_name: str, rows: List[List]):
"""Create eft file, upload to minio and send event."""
- with open(file_name, mode='w') as eft_file:
+ with open(file_name, mode='w', encoding='utf-8') as eft_file:
for row in rows:
print(row, file=eft_file)
@@ -124,3 +91,29 @@ def upload_to_minio(value_as_bytes, file_name: str):
value_as_stream = io.BytesIO(value_as_bytes)
minio_client.put_object(current_app.config['MINIO_BUCKET_NAME'], file_name, value_as_stream,
os.stat(file_name).st_size)
+
+
+def helper_add_file_event_to_queue(client, file_name: str, message_type: str):
+ """Add event to the Queue."""
+ queue_payload = {
+ 'fileName': file_name,
+ 'location': current_app.config['MINIO_BUCKET_NAME']
+ }
+ request_payload = build_request_for_queue_push(message_type, queue_payload)
+ post_to_queue(client, request_payload)
+
+
+def helper_add_identifier_event_to_queue(client, old_identifier: str = 'T1234567890',
+ new_identifier: str = 'BC1234567890'):
+ """Add event to the Queue."""
+ message_type = MessageType.INCORPORATION.value
+ queue_payload = {
+ 'filing': {
+ 'header': {'filingId': '12345678'},
+ 'business': {'identifier': 'BC1234567'}
+ },
+ 'identifier': new_identifier,
+ 'tempidentifier': old_identifier,
+ }
+ request_payload = build_request_for_queue_push(message_type, queue_payload)
+ post_to_queue(client, request_payload)
diff --git a/queue_services/payment-reconciliations/tests/nginx.conf b/pay-queue/tests/nginx.conf
similarity index 100%
rename from queue_services/payment-reconciliations/tests/nginx.conf
rename to pay-queue/tests/nginx.conf
diff --git a/queue_services/payment-reconciliations/tests/unit/__init__.py b/pay-queue/tests/unit/__init__.py
similarity index 100%
rename from queue_services/payment-reconciliations/tests/unit/__init__.py
rename to pay-queue/tests/unit/__init__.py
diff --git a/queue_services/payment-reconciliations/tests/unit/test_data/tdi17_sample.txt b/pay-queue/tests/unit/test_data/tdi17_sample.txt
similarity index 100%
rename from queue_services/payment-reconciliations/tests/unit/test_data/tdi17_sample.txt
rename to pay-queue/tests/unit/test_data/tdi17_sample.txt
diff --git a/queue_services/payment-reconciliations/tests/unit/test_eft_file_parser.py b/pay-queue/tests/unit/test_eft_file_parser.py
similarity index 99%
rename from queue_services/payment-reconciliations/tests/unit/test_eft_file_parser.py
rename to pay-queue/tests/unit/test_eft_file_parser.py
index f6feb96f5..fd667674f 100644
--- a/queue_services/payment-reconciliations/tests/unit/test_eft_file_parser.py
+++ b/pay-queue/tests/unit/test_eft_file_parser.py
@@ -18,9 +18,9 @@
"""
from datetime import datetime
-from reconciliations.eft import EFTHeader, EFTRecord, EFTTrailer
-from reconciliations.eft.eft_enums import EFTConstants
-from reconciliations.eft.eft_errors import EFTError
+from pay_queue.services.eft import EFTHeader, EFTRecord, EFTTrailer
+from pay_queue.services.eft.eft_enums import EFTConstants
+from pay_queue.services.eft.eft_errors import EFTError
from tests.utilities.factory_utils import factory_eft_header, factory_eft_record, factory_eft_trailer
diff --git a/queue_services/payment-reconciliations/tests/utilities/__init__.py b/pay-queue/tests/utilities/__init__.py
similarity index 100%
rename from queue_services/payment-reconciliations/tests/utilities/__init__.py
rename to pay-queue/tests/utilities/__init__.py
diff --git a/queue_services/payment-reconciliations/tests/utilities/factory_utils.py b/pay-queue/tests/utilities/factory_utils.py
similarity index 98%
rename from queue_services/payment-reconciliations/tests/utilities/factory_utils.py
rename to pay-queue/tests/utilities/factory_utils.py
index 714085d05..d8047880a 100644
--- a/queue_services/payment-reconciliations/tests/utilities/factory_utils.py
+++ b/pay-queue/tests/utilities/factory_utils.py
@@ -15,7 +15,7 @@
Test Factory Utility for creating test data.
"""
-from reconciliations.eft.eft_enums import EFTConstants
+from pay_queue.services.eft.eft_enums import EFTConstants
def factory_eft_header(record_type: str, file_creation_date: str, file_creation_time: str,
diff --git a/queue_services/events-listener/Dockerfile b/queue_services/events-listener/Dockerfile
deleted file mode 100644
index 3747d9aa5..000000000
--- a/queue_services/events-listener/Dockerfile
+++ /dev/null
@@ -1,35 +0,0 @@
-FROM python:3.12.2-bullseye
-
-ARG VCS_REF="missing"
-ARG BUILD_DATE="missing"
-
-ENV VCS_REF=${VCS_REF}
-ENV BUILD_DATE=${BUILD_DATE}
-
-LABEL org.label-schema.vcs-ref=${VCS_REF} \
- org.label-schema.build-date=${BUILD_DATE}
-
-USER root
-
-# Create working directory
-RUN mkdir /opt/app-root && chmod 755 /opt/app-root
-WORKDIR /opt/app-root
-
-# Install the requirements
-COPY ./requirements.txt .
-
-RUN pip install --upgrade pip
-RUN pip install --no-cache-dir -r requirements.txt
-
-COPY . .
-
-RUN pip install .
-
-USER 1001
-
-# Set Python path
-ENV PYTHONPATH=/opt/app-root/src
-
-#EXPOSE 8080
-
-CMD [ "python", "/opt/app-root/app.py" ]
diff --git a/queue_services/events-listener/Makefile b/queue_services/events-listener/Makefile
deleted file mode 100644
index 822b1ae88..000000000
--- a/queue_services/events-listener/Makefile
+++ /dev/null
@@ -1,144 +0,0 @@
-.PHONY: license
-.PHONY: setup
-.PHONY: ci cd
-.PHONY: run
-
-MKFILE_PATH:=$(abspath $(lastword $(MAKEFILE_LIST)))
-CURRENT_ABS_DIR:=$(patsubst %/,%,$(dir $(MKFILE_PATH)))
-
-PROJECT_NAME:=events_listener
-DOCKER_NAME:=events-listener
-
-#################################################################################
-# COMMANDS -- Setup #
-#################################################################################
-setup: install install-dev ## Setup the project
-
-clean: clean-build clean-pyc clean-test ## Clean the project
- rm -rf venv/
-
-clean-build: ## Clean build files
- rm -fr build/
- rm -fr dist/
- rm -fr .eggs/
- find . -name '*.egg-info' -exec rm -fr {} +
- find . -name '*.egg' -exec rm -fr {} +
-
-clean-pyc: ## Clean cache files
- find . -name '*.pyc' -exec rm -f {} +
- find . -name '*.pyo' -exec rm -f {} +
- find . -name '*~' -exec rm -f {} +
- find . -name '__pycache__' -exec rm -fr {} +
-
-clean-test: ## clean test files
- find . -name '.pytest_cache' -exec rm -fr {} +
- rm -fr .tox/
- rm -f .coverage
- rm -fr htmlcov/
-
-build-req: clean ## Upgrade requirements
- test -f venv/bin/activate || python3 -m venv $(CURRENT_ABS_DIR)/venv ;\
- . venv/bin/activate ;\
- pip install --upgrade pip ;\
- pip install -Ur requirements/prod.txt ;\
- pip freeze | sort > requirements.txt ;\
- cat requirements/repo-libraries.txt >> requirements.txt ;\
- pip install -Ur requirements/repo-libraries.txt
-
-install: clean ## Install python virtrual environment
- test -f venv/bin/activate || python3 -m venv $(CURRENT_ABS_DIR)/venv ;\
- . venv/bin/activate ;\
- pip install --upgrade pip ;\
- pip install -Ur requirements.txt
-
-install-dev: ## Install local application
- . venv/bin/activate ; \
- pip install -Ur requirements/dev.txt; \
- pip install -e .
-
-#################################################################################
-# COMMANDS - CI #
-#################################################################################
-ci: lint flake8 test ## CI flow
-
-pylint: ## Linting with pylint
- . venv/bin/activate && pylint --rcfile=setup.cfg src/$(PROJECT_NAME)
-
-flake8: ## Linting with flake8
- . venv/bin/activate && flake8 src/$(PROJECT_NAME) tests
-
-lint: pylint flake8 ## run all lint type scripts
-
-test: ## Unit testing
- . venv/bin/activate && pytest
-
-mac-cov: test ## Run the coverage report and display in a browser window (mac)
- @open -a "Google Chrome" htmlcov/index.html
-
-#################################################################################
-# COMMANDS - CD
-# expects the terminal to be openshift login
-# expects export OPENSHIFT_DOCKER_REGISTRY=""
-# expects export OPENSHIFT_SA_NAME="$(oc whoami)"
-# expects export OPENSHIFT_SA_TOKEN="$(oc whoami -t)"
-# expects export OPENSHIFT_REPOSITORY=""
-# expects export TAG_NAME="dev/test/prod"
-# expects export OPS_REPOSITORY="" #
-#################################################################################
-cd: ## CD flow
-ifeq ($(TAG_NAME), test)
-cd: update-env
- oc -n "$(OPENSHIFT_REPOSITORY)-tools" tag $(DOCKER_NAME):dev $(DOCKER_NAME):$(TAG_NAME)
-else ifeq ($(TAG_NAME), prod)
-cd: update-env
- oc -n "$(OPENSHIFT_REPOSITORY)-tools" tag $(DOCKER_NAME):$(TAG_NAME) $(DOCKER_NAME):$(TAG_NAME)-$(shell date +%F)
- oc -n "$(OPENSHIFT_REPOSITORY)-tools" tag $(DOCKER_NAME):test $(DOCKER_NAME):$(TAG_NAME)
-else
-TAG_NAME=dev
-cd: build update-env tag
-endif
-
-build: ## Build the docker container
- docker build . -t $(DOCKER_NAME) \
- --build-arg VCS_REF=$(shell git rev-parse --short HEAD) \
- --build-arg BUILD_DATE=$(shell date -u +"%Y-%m-%dT%H:%M:%SZ") \
-
-build-nc: ## Build the docker container without caching
- docker build --no-cache -t $(DOCKER_NAME) .
-
-REGISTRY_IMAGE=$(OPENSHIFT_DOCKER_REGISTRY)/$(OPENSHIFT_REPOSITORY)-tools/$(DOCKER_NAME)
-push: #build ## Push the docker container to the registry & tag latest
- @echo "$(OPENSHIFT_SA_TOKEN)" | docker login $(OPENSHIFT_DOCKER_REGISTRY) -u $(OPENSHIFT_SA_NAME) --password-stdin ;\
- docker tag $(DOCKER_NAME) $(REGISTRY_IMAGE):latest ;\
- docker push $(REGISTRY_IMAGE):latest
-
-VAULTS=`cat devops/vaults.json`
-update-env: ## Update env from 1pass
- oc -n "$(OPS_REPOSITORY)-$(TAG_NAME)" exec "dc/vault-service-$(TAG_NAME)" -- ./scripts/1pass.sh \
- -m "secret" \
- -e "$(TAG_NAME)" \
- -a "$(DOCKER_NAME)-$(TAG_NAME)" \
- -n "$(OPENSHIFT_REPOSITORY)-$(TAG_NAME)" \
- -v "$(VAULTS)" \
- -r "true" \
- -f "false"
-
-tag: push ## tag image
- oc -n "$(OPENSHIFT_REPOSITORY)-tools" tag $(DOCKER_NAME):latest $(DOCKER_NAME):$(TAG_NAME)
-
-#################################################################################
-# COMMANDS - Local #
-#################################################################################
-
-run: ## Run the project in local
- . venv/bin/activate && python app.py
-
-#################################################################################
-# Self Documenting Commands #
-#################################################################################
-.PHONY: help
-
-.DEFAULT_GOAL := help
-
-help:
- @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}'
diff --git a/queue_services/events-listener/README.md b/queue_services/events-listener/README.md
deleted file mode 100755
index 2379003c9..000000000
--- a/queue_services/events-listener/README.md
+++ /dev/null
@@ -1,75 +0,0 @@
-
-[![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](LICENSE)
-
-
-# Application Name
-
-BC Registries Names Examination, research and approval system API
-
-## Technology Stack Used
-* Python, Flask
-* Postgres - SQLAlchemy, psycopg2-binary & alembic
-
-## Third-Party Products/Libraries used and the the License they are covert by
-
-## Project Status
-As of 2018-02-22 in **ALPHA**
-
-## Documnentation
-
-GitHub Pages (https://guides.github.com/features/pages/) are a neat way to document you application/project.
-
-## Security
-
-Future - BCGov Keycloak
-
-Current - JWT hack
-
-## Files in this repository
-
-```
-docs/ - Project Documentation
-└── images
-└── icons
-
-openshift/ - OpenShift-specific files
-├── scripts - helper scripts
-└── templates - application templates
-```
-
-## Deployment (Local Development)
-
-* Developer Workstation Requirements/Setup
-* Application Specific Setup
-
-## Deployment (OpenShift)
-
-See (openshift/Readme.md)
-
-## Getting Help or Reporting an Issue
-
-To report bugs/issues/feature requests, please file an [issue](../../issues).
-
-## How to Contribute
-
-If you would like to contribute, please see our [CONTRIBUTING](./CONTRIBUTING.md) guidelines.
-
-Please note that this project is released with a [Contributor Code of Conduct](./CODE_OF_CONDUCT.md).
-By participating in this project you agree to abide by its terms.
-
-## License
-
- Copyright 2018 Province of British Columbia
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-
diff --git a/queue_services/events-listener/app.py b/queue_services/events-listener/app.py
deleted file mode 100755
index 5ea127a92..000000000
--- a/queue_services/events-listener/app.py
+++ /dev/null
@@ -1,35 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-
-# Copyright © 2019 Province of British Columbia
-#
-# Licensed under the Apache License, Version 2.0 (the 'License');
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an 'AS IS' BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""s2i based launch script to run the service."""
-import asyncio
-import os
-
-from events_listener.worker import APP_CONFIG, cb_subscription_handler, qsm
-
-
-if __name__ == '__main__':
-
- # my_config = config.get_named_config(os.getenv('DEPLOYMENT_ENV', 'production'))
-
- event_loop = asyncio.get_event_loop()
- event_loop.run_until_complete(qsm.run(loop=event_loop,
- config=APP_CONFIG,
- callback=cb_subscription_handler))
- try:
- event_loop.run_forever()
- finally:
- event_loop.close()
diff --git a/queue_services/events-listener/coverage.xml b/queue_services/events-listener/coverage.xml
deleted file mode 100644
index 45f65708f..000000000
--- a/queue_services/events-listener/coverage.xml
+++ /dev/null
@@ -1,133 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/queue_services/events-listener/devops/vaults.json b/queue_services/events-listener/devops/vaults.json
deleted file mode 100644
index b043cf203..000000000
--- a/queue_services/events-listener/devops/vaults.json
+++ /dev/null
@@ -1,28 +0,0 @@
-[
- {
- "vault": "nats",
- "application": [
- "base",
- "entity-events-listener"
- ]
- },
- {
- "vault": "relationship",
- "application": [
- "postgres-pay",
- "jwt"
- ]
- },
- {
- "vault": "sentry",
- "application": [
- "relationship-api"
- ]
- },
- {
- "vault": "launchdarkly",
- "application": [
- "pay"
- ]
- }
-]
diff --git a/queue_services/events-listener/logging.conf b/queue_services/events-listener/logging.conf
deleted file mode 100644
index ffc1a01e3..000000000
--- a/queue_services/events-listener/logging.conf
+++ /dev/null
@@ -1,28 +0,0 @@
-[loggers]
-keys=root,api
-
-[handlers]
-keys=console
-
-[formatters]
-keys=simple
-
-[logger_root]
-level=DEBUG
-handlers=console
-
-[logger_api]
-level=DEBUG
-handlers=console
-qualname=api
-propagate=0
-
-[handler_console]
-class=StreamHandler
-level=DEBUG
-formatter=simple
-args=(sys.stdout,)
-
-[formatter_simple]
-format=%(asctime)s - %(name)s - %(levelname)s in %(module)s:%(filename)s:%(lineno)d - %(funcName)s: %(message)s
-datefmt=
\ No newline at end of file
diff --git a/queue_services/events-listener/openshift/templates/events-listener-build.json b/queue_services/events-listener/openshift/templates/events-listener-build.json
deleted file mode 100755
index ce0bb88e3..000000000
--- a/queue_services/events-listener/openshift/templates/events-listener-build.json
+++ /dev/null
@@ -1,111 +0,0 @@
-{
- "kind": "Template",
- "apiVersion": "v1",
- "metadata": {
- "annotations": {
- "description": "Build template for a events listener.",
- "tags": "flask",
- "iconClass": "icon-python"
- },
- "name": "${NAME}-build"
- },
- "objects": [
- {
- "kind": "ImageStream",
- "apiVersion": "v1",
- "metadata": {
- "name": "${NAME}"
- }
- },
- {
- "kind": "BuildConfig",
- "apiVersion": "v1",
- "metadata": {
- "name": "${NAME}",
- "labels": {
- "app": "${NAME}",
- "app-group": "${APP_GROUP}",
- "template": "${NAME}-build"
- }
- },
- "spec": {
- "source": {
- "type": "Git",
- "git": {
- "uri": "${GIT_REPO_URL}",
- "ref": "${GIT_REF}"
- },
- "contextDir": "${SOURCE_CONTEXT_DIR}"
- },
- "strategy": {
- "type": "Docker",
- "dockerStrategy": {
- "dockerfilePath": "${DOCKER_FILE_PATH}"
- }
- },
- "output": {
- "to": {
- "kind": "ImageStreamTag",
- "name": "${NAME}:${OUTPUT_IMAGE_TAG}"
- }
- },
- "triggers": [
- {
- "type": "ConfigChange"
- }
- ]
- }
- }
- ],
- "parameters": [
- {
- "name": "NAME",
- "displayName": "Name",
- "description": "The name assigned to all of the objects defined in this template. You should keep this as default unless your know what your doing.",
- "required": true,
- "value": "events-listener"
- },
- {
- "name": "APP_GROUP",
- "displayName": "App Group",
- "description": "The name assigned to all of the deployments in this project.",
- "required": true,
- "value": "sbc-pay"
- },
- {
- "name": "GIT_REPO_URL",
- "displayName": "Git Repo URL",
- "description": "The URL to your GIT repo, don't use the this default unless your just experimenting.",
- "required": true,
- "value": "https://github.com/bcgov/sbc-pay.git"
- },
- {
- "name": "GIT_REF",
- "displayName": "Git Reference",
- "description": "The git reference or branch.",
- "required": true,
- "value": "development"
- },
- {
- "name": "SOURCE_CONTEXT_DIR",
- "displayName": "Source Context Directory",
- "description": "The source context directory.",
- "required": true,
- "value": "queue_services/events-listener"
- },
- {
- "name": "OUTPUT_IMAGE_TAG",
- "displayName": "Output Image Tag",
- "description": "The tag given to the built image.",
- "required": true,
- "value": "latest"
- },
- {
- "name": "DOCKER_FILE_PATH",
- "displayName": "Docker File Path",
- "description": "The path to the docker file defining the build.",
- "required": false,
- "value": "Dockerfile"
- }
- ]
-}
\ No newline at end of file
diff --git a/queue_services/events-listener/openshift/templates/events-listener-deploy.json b/queue_services/events-listener/openshift/templates/events-listener-deploy.json
deleted file mode 100755
index b30d289f5..000000000
--- a/queue_services/events-listener/openshift/templates/events-listener-deploy.json
+++ /dev/null
@@ -1,200 +0,0 @@
-{
- "kind": "Template",
- "apiVersion": "v1",
- "metadata": {
- "annotations": {
- "description": "Deployment template for events listener service.",
- "tags": "${NAME}-${TAG_NAME}"
- },
- "name": "${NAME}-${TAG_NAME}-deploy"
- },
- "objects": [
- {
- "kind": "DeploymentConfig",
- "apiVersion": "v1",
- "metadata": {
- "name": "${NAME}-${TAG_NAME}",
- "labels": {
- "app": "${NAME}-${TAG_NAME}",
- "app-group": "${APP_GROUP}",
- "template": "${NAME}-deploy"
- }
- },
- "spec": {
- "strategy": {
- "type": "Rolling",
- "rollingParams": {
- "updatePeriodSeconds": 1,
- "intervalSeconds": 1,
- "timeoutSeconds": 600,
- "maxUnavailable": "25%",
- "maxSurge": "25%"
- }
- },
- "triggers": [
- {
- "type": "ImageChange",
- "imageChangeParams": {
- "automatic": true,
- "containerNames": [
- "${NAME}-${TAG_NAME}"
- ],
- "from": {
- "kind": "ImageStreamTag",
- "namespace": "${IMAGE_NAMESPACE}",
- "name": "${NAME}:${TAG_NAME}"
- }
- }
- },
- {
- "type": "ConfigChange"
- }
- ],
- "replicas": "${REPLICAS}",
- "test": false,
- "selector": {
- "app": "${NAME}-${TAG_NAME}",
- "deploymentconfig": "${NAME}-${TAG_NAME}"
- },
- "template": {
- "metadata": {
- "labels": {
- "app": "${NAME}-${TAG_NAME}",
- "app-group": "${APP_GROUP}",
- "deploymentconfig": "${NAME}-${TAG_NAME}",
- "template": "${NAME}-deploy"
- }
- },
- "spec": {
- "containers": [
- {
- "name": "${NAME}-${TAG_NAME}",
- "image": "docker-registry.default.svc:5000/${IMAGE_NAMESPACE}/${NAME}:${TAG_NAME}",
- "ports": [
- {
- "containerPort": 8080,
- "protocol": "TCP"
- }
- ],
- "env": [
- ],
- "resources": {
- "requests": {
- "cpu": "${CPU_REQUEST}",
- "memory": "${MEMORY_REQUEST}"
- },
- "limits": {
- "cpu": "${CPU_LIMIT}",
- "memory": "${MEMORY_LIMIT}"
- }
- },
- "livenessProbe": {
- "httpGet": {
- "path": "/healthz",
- "port": 7070,
- "scheme": "HTTP"
- },
- "timeoutSeconds": 1,
- "periodSeconds": 10,
- "successThreshold": 1,
- "failureThreshold": 3
- },
- "readinessProbe": {
- "httpGet": {
- "path": "/readyz",
- "port": 7070,
- "scheme": "HTTP"
- },
- "timeoutSeconds": 1,
- "periodSeconds": 10,
- "successThreshold": 1,
- "failureThreshold": 3
- },
- "terminationMessagePath": "/dev/termination-log",
- "terminationMessagePolicy": "File",
- "imagePullPolicy": "Always"
- }
- ],
- "restartPolicy": "Always",
- "terminationGracePeriodSeconds": 30,
- "dnsPolicy": "ClusterFirst",
- "securityContext": {},
- "schedulerName": "default-scheduler"
- }
- }
- }
- }
- ],
- "parameters": [
- {
- "name": "NAME",
- "displayName": "Name",
- "description": "The name assigned to all of the OpenShift resources associated to the server instance.",
- "required": true,
- "value": "events-listener"
- },
- {
- "name": "APP_GROUP",
- "displayName": "App Group",
- "description": "The name assigned to all of the deployments in this project.",
- "required": true,
- "value": "sbc-pay"
- },
- {
- "name": "IMAGE_NAMESPACE",
- "displayName": "Image Namespace",
- "required": true,
- "description": "The namespace of the OpenShift project containing the imagestream for the application.",
- "value": "l4ygcl-tools"
- },
- {
- "name": "TAG_NAME",
- "displayName": "Environment TAG name",
- "description": "The TAG name for this environment, e.g., dev, test, prod",
- "required": true,
- "value": "dev"
- },
- {
- "name": "DATABASE_NAME",
- "displayName": "Database App Name",
- "description": "A valid database app name used by the service.",
- "required": true,
- "value": "postgresql"
- },
- {
- "name": "CPU_REQUEST",
- "displayName": "Resources CPU Request",
- "description": "The resources CPU request (in cores) for this build.",
- "required": true,
- "value": "100m"
- },
- {
- "name": "CPU_LIMIT",
- "displayName": "Resources CPU Limit",
- "description": "The resources CPU limit (in cores) for this build.",
- "required": true,
- "value": "750m"
- },
- {
- "name": "MEMORY_REQUEST",
- "displayName": "Resources Memory Request",
- "description": "The resources Memory request (in Mi, Gi, etc) for this build.",
- "required": true,
- "value": "100Mi"
- },
- {
- "name": "MEMORY_LIMIT",
- "displayName": "Resources Memory Limit",
- "description": "The resources Memory limit (in Mi, Gi, etc) for this build.",
- "required": true,
- "value": "2Gi"
- },
- {
- "name": "REPLICAS",
- "displayName": "The number of replicas to run",
- "description": "The number of replicas to run in this environment.",
- "required": true,
- "value": "1"
- }
- ]
-}
diff --git a/queue_services/events-listener/q_cli.py b/queue_services/events-listener/q_cli.py
deleted file mode 100755
index a14856165..000000000
--- a/queue_services/events-listener/q_cli.py
+++ /dev/null
@@ -1,128 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-
-# Copyright © 2019 Province of British Columbia
-#
-# Licensed under the Apache License, Version 2.0 (the 'License');
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an 'AS IS' BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Service for listening and handling Queue Messages.
-
-This service registers interest in listening to a Queue and processing received messages.
-"""
-import asyncio
-import functools
-import getopt
-import json
-import os
-import random
-import signal
-import sys
-
-from entity_queue_common.service_utils import error_cb, logger, signal_handler
-from nats.aio.client import Client as NATS # noqa N814; by convention the name is NATS
-from stan.aio.client import Client as STAN # noqa N814; by convention the name is STAN
-
-
-async def run(loop, old_identifier, new_identifier): # pylint: disable=too-many-locals
- """Run the main application loop for the service.
-
- This runs the main top level service functions for working with the Queue.
- """
- # NATS client connections
- nc = NATS()
- sc = STAN()
-
- async def close():
- """Close the stream and nats connections."""
- await sc.close()
- await nc.close()
-
- # Connection and Queue configuration.
- def nats_connection_options():
- return {
- 'servers': os.getenv('NATS_SERVERS', 'nats://127.0.0.1:4222').split(','),
- 'io_loop': loop,
- 'error_cb': error_cb,
- 'name': os.getenv('NATS_ENTITY_EVENTS_CLIENT_NAME', 'entity.events.worker')
- }
-
- def stan_connection_options():
- return {
- 'cluster_id': os.getenv('NATS_CLUSTER_ID', 'test-cluster'),
- 'client_id': str(random.SystemRandom().getrandbits(0x58)),
- 'nats': nc
- }
-
- def subscription_options():
- return {
- 'subject': os.getenv('NATS_ENTITY_EVENTS_SUBJECT', 'entity.events'),
- 'queue': os.getenv('NATS_ENTITY_EVENTS_QUEUE', 'events-worker'),
- 'durable_name': os.getenv('NATS_ENTITY_EVENTS_QUEUE', 'events-worker') + '_durable'
- }
-
- try:
- # Connect to the NATS server, and then use that for the streaming connection.
- await nc.connect(**nats_connection_options())
- await sc.connect(**stan_connection_options())
-
- # register the signal handler
- for sig in ('SIGINT', 'SIGTERM'):
- loop.add_signal_handler(getattr(signal, sig),
- functools.partial(signal_handler, sig_loop=loop, sig_nc=nc, task=close)
- )
-
- payload = {
- 'specversion': '1.x-wip',
- 'type': 'bc.registry.business.incorporationApplication',
- 'source': 'https://api.business.bcregistry.gov.bc.ca/v1/business/BC1234567/filing/12345678',
- 'id': 'C234-1234-1234',
- 'time': '2020-08-28T17:37:34.651294+00:00',
- 'datacontenttype': 'application/json',
- 'identifier': new_identifier,
- 'tempidentifier': old_identifier,
- 'data': {
- 'filing': {
- 'header': {'filingId': '12345678'},
- 'business': {'identifier': 'BC1234567'}
- }
- }
- }
-
- print('payload-->', payload)
-
- await sc.publish(subject=subscription_options().get('subject'),
- payload=json.dumps(payload).encode('utf-8'))
-
- except Exception as e: # pylint: disable=broad-except
- # TODO tighten this error and decide when to bail on the infinite reconnect
- logger.error(e)
-
-
-if __name__ == '__main__':
- try:
- opts, args = getopt.getopt(sys.argv[1:], "ho:n:", ["oldid=", "newid="])
- except getopt.GetoptError:
- print('q_cli.py -o -n ')
- sys.exit(2)
-
- for opt, arg in opts:
- if opt == '-h':
- print('q_cli.py -o -n ')
- sys.exit()
- elif opt in ("-o", "--oldid"):
- old_id = arg
- elif opt in ("-n", "--newid"):
- new_id = arg
-
- print('publish:', old_id, new_id)
- event_loop = asyncio.get_event_loop()
- event_loop.run_until_complete(run(event_loop, old_id, new_id))
diff --git a/queue_services/events-listener/requirements.txt b/queue_services/events-listener/requirements.txt
deleted file mode 100644
index c9ce347ce..000000000
--- a/queue_services/events-listener/requirements.txt
+++ /dev/null
@@ -1,36 +0,0 @@
-Flask==1.1.2
-Jinja2==3.0.3
-MarkupSafe==2.1.3
-Werkzeug==1.0.1
-asyncio-nats-client==0.11.5
-asyncio-nats-streaming==0.4.0
-attrs==23.1.0
-blinker==1.6.2
-certifi==2023.7.22
-click==8.1.3
-expiringdict==1.2.2
-importlib-resources==5.12.0
-itsdangerous==2.0.1
-jaeger-client==4.8.0
-jsonschema==4.17.3
-launchdarkly-server-sdk==8.1.4
-opentracing==2.4.0
-pkgutil_resolve_name==1.3.10
-protobuf==3.19.6
-pyRFC3339==1.1
-pycountry==22.3.5
-pyrsistent==0.19.3
-python-dotenv==1.0.0
-pytz==2023.3
-semver==2.13.0
-sentry-sdk==1.25.1
-six==1.16.0
-threadloop==1.0.2
-thrift==0.16.0
-tornado==6.3.3
-urllib3==1.26.17
-zipp==3.15.0
--e git+https://github.com/bcgov/lear.git#egg=entity_queue_common&subdirectory=queue_services/common
--e git+https://github.com/bcgov/sbc-common-components.git#egg=sbc-common-components&subdirectory=python
--e git+https://github.com/bcgov/sbc-pay.git#egg=pay-api&subdirectory=pay-api
-git+https://github.com/daxiom/simple-cloudevent.py.git
diff --git a/queue_services/events-listener/requirements/repo-libraries.txt b/queue_services/events-listener/requirements/repo-libraries.txt
deleted file mode 100644
index 77f773f6e..000000000
--- a/queue_services/events-listener/requirements/repo-libraries.txt
+++ /dev/null
@@ -1,4 +0,0 @@
--e git+https://github.com/bcgov/lear.git#egg=entity_queue_common&subdirectory=queue_services/common
--e git+https://github.com/bcgov/sbc-common-components.git#egg=sbc-common-components&subdirectory=python
--e git+https://github.com/bcgov/sbc-pay.git#egg=pay-api&subdirectory=pay-api
-git+https://github.com/daxiom/simple-cloudevent.py.git
diff --git a/queue_services/events-listener/setup.cfg b/queue_services/events-listener/setup.cfg
deleted file mode 100644
index 113ffe725..000000000
--- a/queue_services/events-listener/setup.cfg
+++ /dev/null
@@ -1,119 +0,0 @@
-[metadata]
-name = events_listener
-url = https://github.com/bcgov/sbc-pay/queue_services/events-listener
-author = SBC Relationships team
-author_email =
-classifiers =
- Development Status :: Beta
- Intended Audience :: Developers / QA
- Topic :: Payments
- License :: OSI Approved :: Apache Software License
- Natural Language :: English
- Programming Language :: Python :: 3.8
-license = Apache Software License Version 2.0
-description = A short description of the project
-long_description = file: README.md
-keywords =
-
-[options]
-zip_safe = True
-python_requires = >=3.6
-include_package_data = True
-packages = find:
-
-[options.package_data]
-events_listener =
-
-[wheel]
-universal = 1
-
-[bdist_wheel]
-universal = 1
-
-[aliases]
-test = pytest
-
-[flake8]
-exclude = .git,*migrations*
-max-line-length = 120
-docstring-min-length=10
-per-file-ignores =
- */__init__.py:F401
-
-[pycodestyle]
-max_line_length = 120
-ignore = E501
-docstring-min-length=10
-notes=FIXME,XXX # TODO is ignored
-match_dir = src/events_listener
-ignored-modules=flask_sqlalchemy
- sqlalchemy
-per-file-ignores =
- */__init__.py:F401
-good-names=
- b,
- d,
- i,
- e,
- f,
- k,
- u,
- v,
- ar,
- cb, #common shorthand for callback
- nc,
- rv,
- sc,
- event_loop,
- logger,
- loop,
-
-[pylint]
-ignore=migrations,test
-notes=FIXME,XXX,TODO
-ignored-modules=flask_sqlalchemy,sqlalchemy,SQLAlchemy,alembic,scoped_session
-ignored-classes=scoped_session
-disable=C0301,W0511,R0801,R0902
-good-names=
- b,
- d,
- i,
- e,
- f,
- k,
- u,
- v,
- ar,
- cb, #common shorthand for callback
- nc,
- rv,
- sc,
- event_loop,
- logger,
- loop,
-
-[isort]
-line_length = 120
-indent = 4
-multi_line_output = 4
-lines_after_imports = 2
-
-[tool:pytest]
-addopts = --cov=src --cov-report html:htmlcov --cov-report xml:coverage.xml
-testpaths = tests
-filterwarnings =
- ignore::UserWarning
-
-
-[report:run]
-exclude_lines =
- pragma: no cover
- from
- import
- def __repr__
- if self.debug:
- if settings.DEBUG
- raise AssertionError
- raise NotImplementedError
- if 0:
- if __name__ == .__main__.:
diff --git a/queue_services/events-listener/setup.py b/queue_services/events-listener/setup.py
deleted file mode 100644
index 1477e6874..000000000
--- a/queue_services/events-listener/setup.py
+++ /dev/null
@@ -1,70 +0,0 @@
-# Copyright © 2019 Province of British Columbia.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Installer and setup for this module
-"""
-import ast
-import re
-from glob import glob
-from os.path import basename, splitext
-
-from setuptools import find_packages, setup
-
-
-_version_re = re.compile(r'__version__\s+=\s+(.*)') # pylint: disable=invalid-name
-
-with open('src/events_listener/version.py', 'rb') as f:
- version = str(ast.literal_eval(_version_re.search( # pylint: disable=invalid-name
- f.read().decode('utf-8')).group(1)))
-
-
-def read_requirements(filename):
- """
- Get application requirements from
- the requirements.txt file.
- :return: Python requirements
- """
- with open(filename, 'r') as req:
- requirements = req.readlines()
- install_requires = [r.strip() for r in requirements if (r.find('git+') != 0 and r.find('-e git+') != 0)]
- return install_requires
-
-
-def read(filepath):
- """
- Read the contents from a file.
- :param str filepath: path to the file to be read
- :return: file contents
- """
- with open(filepath, 'r') as file_handle:
- content = file_handle.read()
- return content
-
-
-REQUIREMENTS = read_requirements('requirements.txt')
-
-setup(
- name="events_listener",
- version=version,
- author_email='',
- packages=find_packages('src'),
- package_dir={'': 'src'},
- py_modules=[splitext(basename(path))[0] for path in glob('src/*.py')],
- include_package_data=True,
- license=read('LICENSE'),
- long_description=read('README.md'),
- zip_safe=False,
- install_requires=REQUIREMENTS,
- setup_requires=["pytest-runner", ],
- tests_require=["pytest", ],
-)
diff --git a/queue_services/events-listener/src/events_listener/__init__.py b/queue_services/events-listener/src/events_listener/__init__.py
deleted file mode 100644
index 7fcd624dc..000000000
--- a/queue_services/events-listener/src/events_listener/__init__.py
+++ /dev/null
@@ -1,17 +0,0 @@
-# Copyright © 2019 Province of British Columbia
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""The Events Listener service.
-
-This module is the service worker for applying filings to the Business Database structure.
-"""
diff --git a/queue_services/events-listener/src/events_listener/config.py b/queue_services/events-listener/src/events_listener/config.py
deleted file mode 100644
index e5e12c062..000000000
--- a/queue_services/events-listener/src/events_listener/config.py
+++ /dev/null
@@ -1,131 +0,0 @@
-# Copyright © 2019 Province of British Columbia
-#
-# Licensed under the Apache License, Version 2.0 (the 'License');
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an 'AS IS' BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""All of the configuration for the service is captured here.
-
-All items are loaded, or have Constants defined here that
-are loaded into the Flask configuration.
-All modules and lookups get their configuration from the
-Flask config, rather than reading environment variables directly
-or by accessing this configuration directly.
-"""
-import os
-import random
-
-from dotenv import find_dotenv, load_dotenv
-
-
-# this will load all the envars from a .env file located in the project root (api)
-load_dotenv(find_dotenv())
-
-CONFIGURATION = {
- 'development': 'events_listener.config.DevConfig',
- 'testing': 'events_listener.config.TestConfig',
- 'production': 'events_listener.config.ProdConfig',
- 'default': 'events_listener.config.ProdConfig'
-}
-
-
-def get_named_config(config_name: str = 'production'):
- """Return the configuration object based on the name.
-
- :raise: KeyError: if an unknown configuration is requested
- """
- if config_name in ['production', 'staging', 'default']:
- app_config = ProdConfig()
- elif config_name == 'testing':
- app_config = TestConfig()
- elif config_name == 'development':
- app_config = DevConfig()
- else:
- raise KeyError(f'Unknown configuration: {config_name}')
- return app_config
-
-
-class _Config(): # pylint: disable=too-few-public-methods
- """Base class configuration that should set reasonable defaults.
-
- Used as the base for all the other configurations.
- """
-
- LEGISLATIVE_TIMEZONE = os.getenv('LEGISLATIVE_TIMEZONE', 'America/Vancouver')
- PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
- PAY_LD_SDK_KEY = os.getenv('PAY_LD_SDK_KEY', None)
-
- SENTRY_ENABLE = os.getenv('SENTRY_ENABLE', 'False')
- SENTRY_DSN = os.getenv('SENTRY_DSN', None)
-
- SQLALCHEMY_TRACK_MODIFICATIONS = False
-
- # POSTGRESQL
- DB_USER = os.getenv('DATABASE_USERNAME', '')
- DB_PASSWORD = os.getenv('DATABASE_PASSWORD', '')
- DB_NAME = os.getenv('DATABASE_NAME', '')
- DB_HOST = os.getenv('DATABASE_HOST', '')
- DB_PORT = os.getenv('DATABASE_PORT', '5432')
- SQLALCHEMY_DATABASE_URI = f'postgresql://{DB_USER}:{DB_PASSWORD}@{DB_HOST}:{int(DB_PORT)}/{DB_NAME}'
-
- NATS_CONNECTION_OPTIONS = {
- 'servers': os.getenv('NATS_SERVERS', 'nats://127.0.0.1:4222').split(','),
- 'name': os.getenv('NATS_ENTITY_EVENTS_CLIENT_NAME', 'entity.events.worker')
-
- }
- STAN_CONNECTION_OPTIONS = {
- 'cluster_id': os.getenv('NATS_CLUSTER_ID', 'test-cluster'),
- 'client_id': str(random.SystemRandom().getrandbits(0x58)),
- 'ping_interval': 1,
- 'ping_max_out': 5,
- }
-
- SUBSCRIPTION_OPTIONS = {
- 'subject': os.getenv('NATS_ENTITY_EVENTS_SUBJECT', 'entity.events'),
- 'queue': os.getenv('NATS_ENTITY_EVENTS_QUEUE', 'events-worker'),
- 'durable_name': os.getenv('NATS_ENTITY_EVENTS_QUEUE', 'events-worker') + '_durable',
- }
-
-
-class DevConfig(_Config): # pylint: disable=too-few-public-methods
- """Creates the Development Config object."""
-
- TESTING = False
- DEBUG = True
-
-
-class TestConfig(_Config): # pylint: disable=too-few-public-methods
- """In support of testing only.
-
- Used by the py.test suite
- """
-
- DEBUG = True
- TESTING = True
- # POSTGRESQL
- DB_USER = os.getenv('DATABASE_TEST_USERNAME', '')
- DB_PASSWORD = os.getenv('DATABASE_TEST_PASSWORD', '')
- DB_NAME = os.getenv('DATABASE_TEST_NAME', '')
- DB_HOST = os.getenv('DATABASE_TEST_HOST', '')
- DB_PORT = os.getenv('DATABASE_TEST_PORT', '5432')
- SQLALCHEMY_DATABASE_URI = os.getenv(
- 'DATABASE_TEST_URL',
- default=f'postgresql://{DB_USER}:{DB_PASSWORD}@{DB_HOST}:{int(DB_PORT)}/{DB_NAME}'
- )
-
- STAN_CLUSTER_NAME = 'test-cluster'
- TEST_NATS_DOCKER = os.getenv('TEST_NATS_DOCKER', None)
-
-
-class ProdConfig(_Config): # pylint: disable=too-few-public-methods
- """Production environment configuration."""
-
- TESTING = False
- DEBUG = False
diff --git a/queue_services/events-listener/src/events_listener/utils.py b/queue_services/events-listener/src/events_listener/utils.py
deleted file mode 100644
index a5c34c3fb..000000000
--- a/queue_services/events-listener/src/events_listener/utils.py
+++ /dev/null
@@ -1,32 +0,0 @@
-# Copyright © 2019 Province of British Columbia
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Supply version and commit hash info.
-
-When deployed in OKD, it adds the last commit hash onto the version info.
-"""
-import os
-
-from events_listener.version import __version__
-
-
-def _get_build_openshift_commit_hash():
- return os.getenv('OPENSHIFT_BUILD_COMMIT', None)
-
-
-def get_run_version():
- """Return a formatted version string for this service."""
- commit_hash = _get_build_openshift_commit_hash()
- if commit_hash:
- return f'{__version__}-{commit_hash}'
- return __version__
diff --git a/queue_services/events-listener/src/events_listener/version.py b/queue_services/events-listener/src/events_listener/version.py
deleted file mode 100644
index 313a6e40e..000000000
--- a/queue_services/events-listener/src/events_listener/version.py
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright © 2019 Province of British Columbia
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Version of this service in PEP440.
-
-[N!]N(.N)*[{a|b|rc}N][.postN][.devN]
-Epoch segment: N!
-Release segment: N(.N)*
-Pre-release segment: {a|b|rc}N
-Post-release segment: .postN
-Development release segment: .devN
-"""
-
-__version__ = '2.15.2' # pylint: disable=invalid-name
diff --git a/queue_services/events-listener/src/events_listener/worker.py b/queue_services/events-listener/src/events_listener/worker.py
deleted file mode 100644
index 831ccb18b..000000000
--- a/queue_services/events-listener/src/events_listener/worker.py
+++ /dev/null
@@ -1,84 +0,0 @@
-# Copyright © 2019 Province of British Columbia
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""The unique worker functionality for this service is contained here.
-
-The entry-point is the **cb_subscription_handler**
-
-The design and flow leverage a few constraints that are placed upon it
-by NATS Streaming and using AWAIT on the default loop.
-- NATS streaming queues require one message to be processed at a time.
-- AWAIT on the default loop effectively runs synchronously
-
-If these constraints change, the use of Flask-SQLAlchemy would need to change.
-Flask-SQLAlchemy currently allows the base model to be changed, or reworking
-the model to a standalone SQLAlchemy usage with an async engine would need
-to be pursued.
-"""
-import json
-import os
-
-import nats
-from entity_queue_common.service import QueueServiceManager
-from entity_queue_common.service_utils import QueueException, logger
-from flask import Flask # pylint: disable=wrong-import-order
-from pay_api.models import Invoice, db
-from pay_api.services import Flags
-
-from events_listener import config
-
-
-qsm = QueueServiceManager() # pylint: disable=invalid-name
-APP_CONFIG = config.get_named_config(os.getenv('DEPLOYMENT_ENV', 'production'))
-FLASK_APP = Flask(__name__)
-FLASK_APP.config.from_object(APP_CONFIG)
-db.init_app(FLASK_APP)
-flag_service = Flags(FLASK_APP)
-
-INCORPORATION_TYPE = 'bc.registry.business.incorporationApplication'
-REGISTRATION = 'bc.registry.business.registration'
-
-
-async def process_event(event_message, flask_app):
- """Render the payment status."""
- if not flask_app:
- raise QueueException('Flask App not available.')
-
- with flask_app.app_context():
- if event_message.get('type', None) in [INCORPORATION_TYPE, REGISTRATION] \
- and 'tempidentifier' in event_message \
- and event_message.get('tempidentifier', None) is not None:
-
- old_identifier = event_message.get('tempidentifier')
- new_identifier = event_message.get('identifier')
- logger.debug('Received message to update %s to %s', old_identifier, new_identifier)
-
- # Find all invoice records which have the old corp number
- invoices = Invoice.find_by_business_identifier(old_identifier)
- for inv in invoices:
- inv.business_identifier = new_identifier
- inv.flush()
-
- db.session.commit()
-
-
-async def cb_subscription_handler(msg: nats.aio.client.Msg):
- """Use Callback to process Queue Msg objects."""
- try:
- logger.info('Received raw message seq:%s, data= %s', msg.sequence, msg.data.decode())
- event_message = json.loads(msg.data.decode('utf-8'))
- logger.debug('Event Message Received: %s', event_message)
- await process_event(event_message, FLASK_APP)
- except Exception: # noqa pylint: disable=broad-except
- # Catch Exception so that any error is still caught and the message is removed from the queue
- logger.error('Queue Error: %s', json.dumps(event_message), exc_info=True)
diff --git a/queue_services/events-listener/tests/conftest.py b/queue_services/events-listener/tests/conftest.py
deleted file mode 100644
index 2bc5e4a7e..000000000
--- a/queue_services/events-listener/tests/conftest.py
+++ /dev/null
@@ -1,244 +0,0 @@
-# Copyright © 2019 Province of British Columbia
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Common setup and fixtures for the pytest suite used by this service."""
-import asyncio
-import os
-import random
-import time
-from contextlib import contextmanager
-
-import pytest
-from flask import Flask
-from flask_migrate import Migrate, upgrade
-from nats.aio.client import Client as Nats
-from pay_api import db as _db
-from sqlalchemy import event, text
-from sqlalchemy.schema import DropConstraint, MetaData
-from stan.aio.client import Client as Stan
-
-from events_listener.config import get_named_config
-
-
-@contextmanager
-def not_raises(exception):
- """Corallary to the pytest raises builtin.
-
- Assures that an exception is NOT thrown.
- """
- try:
- yield
- except exception:
- raise pytest.fail(f'DID RAISE {exception}')
-
-
-@pytest.fixture(scope='session')
-def app():
- """Return a session-wide application configured in TEST mode."""
- # _app = create_app('testing')
- _app = Flask(__name__)
- _app.config.from_object(get_named_config('testing'))
- _db.init_app(_app)
-
- return _app
-
-
-@pytest.fixture(scope='session')
-def db(app): # pylint: disable=redefined-outer-name, invalid-name
- """Return a session-wide initialised database.
-
- Drops all existing tables - Meta follows Postgres FKs
- """
- with app.app_context():
- # Clear out any existing tables
- metadata = MetaData(_db.engine)
- metadata.reflect()
- for table in metadata.tables.values():
- for fk in table.foreign_keys: # pylint: disable=invalid-name
- _db.engine.execute(DropConstraint(fk.constraint))
- metadata.drop_all()
- _db.drop_all()
-
- sequence_sql = """SELECT sequence_name FROM information_schema.sequences
- WHERE sequence_schema='public'
- """
-
- sess = _db.session()
- for seq in [name for (name,) in sess.execute(text(sequence_sql))]:
- try:
- sess.execute(text('DROP SEQUENCE public.%s ;' % seq))
- print('DROP SEQUENCE public.%s ' % seq)
- except Exception as err: # noqa pylint: disable=broad-except
- print(f'Error: {err}')
- sess.commit()
-
- # ############################################
- # There are 2 approaches, an empty database, or the same one that the app will use
- # create the tables
- # _db.create_all()
- # or
- # Use Alembic to load all of the DB revisions including supporting lookup data
- # This is the path we'll use in legal_api!!
-
- # even though this isn't referenced directly, it sets up the internal configs that upgrade
- import sys
- pay_api_folder = [folder for folder in sys.path if 'pay-api' in folder][0]
- migration_path = pay_api_folder.replace('/pay-api/src', '/pay-api/migrations')
-
- Migrate(app, _db, directory=migration_path)
- upgrade()
-
- return _db
-
-
-@pytest.fixture
-def config(app):
- """Return the application config."""
- return app.config
-
-
-@pytest.fixture(scope='session')
-def client(app): # pylint: disable=redefined-outer-name
- """Return a session-wide Flask test client."""
- return app.test_client()
-
-
-@pytest.fixture(scope='session')
-def client_ctx(app): # pylint: disable=redefined-outer-name
- """Return session-wide Flask test client."""
- with app.test_client() as _client:
- yield _client
-
-
-@pytest.fixture(scope='function')
-def client_id():
- """Return a unique client_id that can be used in tests."""
- _id = random.SystemRandom().getrandbits(0x58)
- # _id = (base64.urlsafe_b64encode(uuid.uuid4().bytes)).replace('=', '')
-
- return f'client-{_id}'
-
-
-@pytest.fixture(scope='function')
-def session(app, db): # pylint: disable=redefined-outer-name, invalid-name
- """Return a function-scoped session."""
- with app.app_context():
- conn = db.engine.connect()
- txn = conn.begin()
-
- options = dict(bind=conn, binds={})
- sess = db.create_scoped_session(options=options)
-
- # establish a SAVEPOINT just before beginning the test
- # (http://docs.sqlalchemy.org/en/latest/orm/session_transaction.html#using-savepoint)
- sess.begin_nested()
-
- @event.listens_for(sess(), 'after_transaction_end')
- def restart_savepoint(sess2, trans): # pylint: disable=unused-variable
- # Detecting whether this is indeed the nested transaction of the test
- if trans.nested and not trans._parent.nested: # pylint: disable=protected-access
- # Handle where test DOESN'T session.commit(),
- sess2.expire_all()
- sess.begin_nested()
-
- db.session = sess
-
- sql = text('select 1')
- sess.execute(sql)
-
- yield sess
-
- # Cleanup
- sess.remove()
- # This instruction rollsback any commit that were executed in the tests.
- txn.rollback()
- conn.close()
-
-
-@pytest.fixture(scope='session')
-def stan_server(docker_services):
- """Create the nats / stan services that the integration tests will use."""
- if os.getenv('TEST_NATS_DOCKER'):
- docker_services.start('nats')
- time.sleep(2)
- # TODO get the wait part working, as opposed to sleeping for 2s
- # public_port = docker_services.wait_for_service("nats", 4222)
- # dsn = "{docker_services.docker_ip}:{public_port}".format(**locals())
- # return dsn
-
-
-@pytest.fixture(scope='function')
-@pytest.mark.asyncio
-async def stan(event_loop, client_id):
- """Create a stan connection for each function, to be used in the tests."""
- nc = Nats()
- sc = Stan()
- cluster_name = 'test-cluster'
-
- await nc.connect(io_loop=event_loop, name='entity.filing.tester')
-
- await sc.connect(cluster_name, client_id, nats=nc)
-
- yield sc
-
- await sc.close()
- await nc.close()
-
-
-@pytest.fixture(scope='function')
-@pytest.mark.asyncio
-async def events_stan(app, event_loop, client_id):
- """Create a stan connection for each function.
-
- Uses environment variables for the cluster name.
- """
- nc = Nats()
- sc = Stan()
-
- await nc.connect(io_loop=event_loop)
-
- cluster_name = os.getenv('STAN_CLUSTER_NAME')
-
- if not cluster_name:
- raise ValueError('Missing env variable: STAN_CLUSTER_NAME')
-
- await sc.connect(cluster_name, client_id, nats=nc)
-
- yield sc
-
- await sc.close()
- await nc.close()
-
-
-@pytest.fixture(scope='function')
-def future(event_loop):
- """Return a future that is used for managing function tests."""
- _future = asyncio.Future(loop=event_loop)
- return _future
-
-
-@pytest.fixture
-def create_mock_coro(mocker, monkeypatch):
- """Return a mocked coroutine, and optionally patch-it in."""
-
- def _create_mock_patch_coro(to_patch=None):
- mock = mocker.Mock()
-
- async def _coro(*args, **kwargs):
- return mock(*args, **kwargs)
-
- if to_patch: # <-- may not need/want to patch anything
- monkeypatch.setattr(to_patch, _coro)
- return mock, _coro
-
- return _create_mock_patch_coro
diff --git a/queue_services/events-listener/tests/docker-compose.yml b/queue_services/events-listener/tests/docker-compose.yml
deleted file mode 100644
index db7bd6eaa..000000000
--- a/queue_services/events-listener/tests/docker-compose.yml
+++ /dev/null
@@ -1,15 +0,0 @@
-version: '2.1'
-services:
- nats:
- image: nats-streaming
- restart: always
- mem_limit: 512m
- expose:
- - 4222
- - 8222
- labels:
- - entity.services=nats
- ports:
- - 4222:4222
- - 8222:8222
- tty: true
\ No newline at end of file
diff --git a/queue_services/events-listener/tests/integration/__init__.py b/queue_services/events-listener/tests/integration/__init__.py
deleted file mode 100644
index 63792ce1a..000000000
--- a/queue_services/events-listener/tests/integration/__init__.py
+++ /dev/null
@@ -1,87 +0,0 @@
-# Copyright © 2019 Province of British Columbia
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Test suite for the integrations to NATS Queue."""
-
-from datetime import datetime
-
-from pay_api.models import CfsAccount, Invoice, InvoiceReference, Payment, PaymentAccount
-from pay_api.utils.enums import InvoiceReferenceStatus, InvoiceStatus, PaymentMethod, PaymentStatus, PaymentSystem
-
-
-def factory_payment_account(payment_system_code: str = 'PAYBC', payment_method_code: str = 'CC', account_number='4101',
- bcol_user_id='test',
- auth_account_id: str = '1234'):
- """Return Factory."""
- # Create a payment account
- account = PaymentAccount(
- auth_account_id=auth_account_id,
- bcol_user_id=bcol_user_id,
- bcol_account='TEST'
- ).save()
-
- CfsAccount(cfs_party='11111',
- cfs_account=account_number,
- cfs_site='29921', payment_account=account).save()
-
- if payment_system_code == PaymentSystem.BCOL.value:
- account.payment_method = PaymentMethod.DRAWDOWN.value
- elif payment_system_code == PaymentSystem.PAYBC.value:
- account.payment_method = payment_method_code
-
- return account
-
-
-def factory_payment(
- payment_system_code: str = 'PAYBC', payment_method_code: str = 'CC',
- payment_status_code: str = PaymentStatus.CREATED.value,
- created_on: datetime = datetime.now(),
- invoice_number: str = None
-):
- """Return Factory."""
- return Payment(
- payment_system_code=payment_system_code,
- payment_method_code=payment_method_code,
- payment_status_code=payment_status_code,
- # created_on=created_on,
- invoice_number=invoice_number
- ).save()
-
-
-def factory_invoice(payment_account, status_code: str = InvoiceStatus.CREATED.value,
- corp_type_code='CP',
- business_identifier: str = 'CP0001234',
- service_fees: float = 0.0, total=0,
- payment_method_code: str = PaymentMethod.DIRECT_PAY.value,
- created_on: datetime = datetime.now()):
- """Return Factory."""
- return Invoice(
- invoice_status_code=status_code,
- payment_account_id=payment_account.id,
- total=total,
- created_by='test',
- # created_on=created_on,
- business_identifier=business_identifier,
- corp_type_code=corp_type_code,
- folio_number='1234567890',
- service_fees=service_fees,
- bcol_account=payment_account.bcol_account,
- payment_method_code=payment_method_code
- ).save()
-
-
-def factory_invoice_reference(invoice_id: int, invoice_number: str = '10021'):
- """Return Factory."""
- return InvoiceReference(invoice_id=invoice_id,
- status_code=InvoiceReferenceStatus.ACTIVE.value,
- invoice_number=invoice_number).save()
diff --git a/queue_services/events-listener/tests/integration/test_worker_queue.py b/queue_services/events-listener/tests/integration/test_worker_queue.py
deleted file mode 100644
index 5c4792291..000000000
--- a/queue_services/events-listener/tests/integration/test_worker_queue.py
+++ /dev/null
@@ -1,141 +0,0 @@
-# Copyright © 2019 Province of British Columbia
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Test Suite to ensure the worker routines are working as expected."""
-
-import pytest
-from entity_queue_common.service_utils import subscribe_to_queue
-from pay_api.models import Invoice
-from pay_api.utils.enums import PaymentMethod, PaymentSystem
-
-from tests.integration import factory_invoice, factory_invoice_reference, factory_payment, factory_payment_account
-
-from .utils import helper_add_event_to_queue
-
-
-@pytest.mark.asyncio
-async def test_events_listener_queue(app, session, stan_server, event_loop, client_id, events_stan, future):
- """Assert that events can be retrieved and decoded from the Queue."""
- # Call back for the subscription
- from events_listener.worker import cb_subscription_handler
-
- # vars
- old_identifier = 'T000000000'
- new_identifier = 'BC12345678'
-
- events_subject = 'test_subject'
- events_queue = 'test_queue'
- events_durable_name = 'test_durable'
-
- # Create a Credit Card Payment
-
- # register the handler to test it
- await subscribe_to_queue(events_stan,
- events_subject,
- events_queue,
- events_durable_name,
- cb_subscription_handler)
-
- # add an event to queue
- await helper_add_event_to_queue(events_stan, events_subject, old_identifier=old_identifier,
- new_identifier=new_identifier)
-
- assert True
-
-
-@pytest.mark.asyncio
-async def test_update_internal_payment(app, session, stan_server, event_loop, client_id, events_stan, future):
- """Assert that the update internal payment records works."""
- # Call back for the subscription
- from events_listener.worker import cb_subscription_handler
-
- # vars
- old_identifier = 'T000000000'
- new_identifier = 'BC12345678'
-
- events_subject = 'test_subject'
- events_queue = 'test_queue'
- events_durable_name = 'test_durable'
-
- # Create an Internal Payment
- payment_account = factory_payment_account(payment_system_code=PaymentSystem.BCOL.value).save()
-
- invoice: Invoice = factory_invoice(payment_account=payment_account,
- business_identifier=old_identifier,
- payment_method_code=PaymentMethod.INTERNAL.value).save()
-
- inv_ref = factory_invoice_reference(invoice_id=invoice.id)
- factory_payment(invoice_number=inv_ref.invoice_number)
-
- invoice_id = invoice.id
-
- # register the handler to test it
- await subscribe_to_queue(events_stan,
- events_subject,
- events_queue,
- events_durable_name,
- cb_subscription_handler)
-
- # add an event to queue
- await helper_add_event_to_queue(events_stan, events_subject, old_identifier=old_identifier,
- new_identifier=new_identifier)
-
- # Get the internal account and invoice and assert that the identifier is new identifier
- invoice = Invoice.find_by_id(invoice_id)
-
- assert invoice.business_identifier == new_identifier
-
-
-@pytest.mark.asyncio
-async def test_update_credit_payment(app, session, stan_server, event_loop, client_id, events_stan, future):
- """Assert that the update credit payment records works."""
- # Call back for the subscription
- from events_listener.worker import cb_subscription_handler
-
- # vars
- old_identifier = 'T000000000'
- new_identifier = 'BC12345678'
-
- events_subject = 'test_subject'
- events_queue = 'test_queue'
- events_durable_name = 'test_durable'
-
- # Create an Internal Payment
-
- payment_account = factory_payment_account(payment_system_code=PaymentSystem.PAYBC.value,
- payment_method_code=PaymentMethod.DIRECT_PAY.value).save()
-
- invoice: Invoice = factory_invoice(payment_account=payment_account,
- business_identifier=old_identifier,
- payment_method_code=PaymentMethod.DIRECT_PAY.value).save()
-
- inv_ref = factory_invoice_reference(invoice_id=invoice.id)
- factory_payment(invoice_number=inv_ref.invoice_number)
-
- invoice_id = invoice.id
-
- # register the handler to test it
- await subscribe_to_queue(events_stan,
- events_subject,
- events_queue,
- events_durable_name,
- cb_subscription_handler)
-
- # add an event to queue
- await helper_add_event_to_queue(events_stan, events_subject, old_identifier=old_identifier,
- new_identifier=new_identifier)
-
- # Get the internal account and invoice and assert that the identifier is new identifier
- invoice = Invoice.find_by_id(invoice_id)
-
- assert invoice.business_identifier == new_identifier
diff --git a/queue_services/events-listener/tests/integration/utils.py b/queue_services/events-listener/tests/integration/utils.py
deleted file mode 100644
index 718c05b7a..000000000
--- a/queue_services/events-listener/tests/integration/utils.py
+++ /dev/null
@@ -1,43 +0,0 @@
-# Copyright © 2019 Province of British Columbia
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Utilities used by the integration tests."""
-import json
-
-import stan
-
-
-async def helper_add_event_to_queue(stan_client: stan.aio.client.Client,
- subject: str,
- old_identifier: str = 'T1234567890',
- new_identifier: str = 'BC1234567890'):
- """Add event to the Queue."""
- payload = {
- 'specversion': '1.x-wip',
- 'type': 'bc.registry.business.incorporationApplication',
- 'source': 'https://api.business.bcregistry.gov.bc.ca/v1/business/BC1234567/filing/12345678',
- 'id': 'C234-1234-1234',
- 'time': '2020-08-28T17:37:34.651294+00:00',
- 'datacontenttype': 'application/json',
- 'identifier': new_identifier,
- 'tempidentifier': old_identifier,
- 'data': {
- 'filing': {
- 'header': {'filingId': '12345678'},
- 'business': {'identifier': 'BC1234567'}
- }
- }
- }
-
- await stan_client.publish(subject=subject,
- payload=json.dumps(payload).encode('utf-8'))
diff --git a/queue_services/payment-reconciliations/.envrc b/queue_services/payment-reconciliations/.envrc
deleted file mode 100644
index 64fff9a69..000000000
--- a/queue_services/payment-reconciliations/.envrc
+++ /dev/null
@@ -1,6 +0,0 @@
-while read -r line; do
- echo $line
- [[ "$line" =~ ^#.*$ ]] && continue
- export $line
-done < .env
-source venv/bin/activate
diff --git a/queue_services/payment-reconciliations/LICENSE b/queue_services/payment-reconciliations/LICENSE
deleted file mode 100644
index 18b5abc34..000000000
--- a/queue_services/payment-reconciliations/LICENSE
+++ /dev/null
@@ -1,13 +0,0 @@
-Copyright © 2018 Province of British Columbia
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
diff --git a/queue_services/payment-reconciliations/MANIFEST.in b/queue_services/payment-reconciliations/MANIFEST.in
deleted file mode 100644
index 1a342bdeb..000000000
--- a/queue_services/payment-reconciliations/MANIFEST.in
+++ /dev/null
@@ -1,5 +0,0 @@
-include requirements.txt
-include config.py
-include logging.conf
-include LICENSE
-include README.md
\ No newline at end of file
diff --git a/queue_services/payment-reconciliations/README.md b/queue_services/payment-reconciliations/README.md
deleted file mode 100755
index 2ae0e90f6..000000000
--- a/queue_services/payment-reconciliations/README.md
+++ /dev/null
@@ -1,75 +0,0 @@
-
-[![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](LICENSE)
-
-
-# Application Name
-
-BC Registries Payments system API
-
-## Technology Stack Used
-* Python, Flask
-* Postgres - SQLAlchemy, psycopg2-binary & alembic
-
-## Third-Party Products/Libraries used and the the License they are covert by
-
-## Project Status
-As of 2018-02-22 in **ALPHA**
-
-## Documnentation
-
-GitHub Pages (https://guides.github.com/features/pages/) are a neat way to document you application/project.
-
-## Security
-
-Future - BCGov Keycloak
-
-Current - JWT hack
-
-## Files in this repository
-
-```
-docs/ - Project Documentation
-└── images
-└── icons
-
-openshift/ - OpenShift-specific files
-├── scripts - helper scripts
-└── templates - application templates
-```
-
-## Deployment (Local Development)
-
-* Developer Workstation Requirements/Setup
-* Application Specific Setup
-
-## Deployment (OpenShift)
-
-See (openshift/Readme.md)
-
-## Getting Help or Reporting an Issue
-
-To report bugs/issues/feature requests, please file an [issue](../../issues).
-
-## How to Contribute
-
-If you would like to contribute, please see our [CONTRIBUTING](./CONTRIBUTING.md) guidelines.
-
-Please note that this project is released with a [Contributor Code of Conduct](./CODE_OF_CONDUCT.md).
-By participating in this project you agree to abide by its terms.
-
-## License
-
- Copyright 2018 Province of British Columbia
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-
diff --git a/queue_services/payment-reconciliations/app.py b/queue_services/payment-reconciliations/app.py
deleted file mode 100755
index 52754b9c5..000000000
--- a/queue_services/payment-reconciliations/app.py
+++ /dev/null
@@ -1,35 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-
-# Copyright © 2019 Province of British Columbia
-#
-# Licensed under the Apache License, Version 2.0 (the 'License');
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an 'AS IS' BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""s2i based launch script to run the service."""
-import asyncio
-import os
-
-from reconciliations.worker import APP_CONFIG, cb_subscription_handler, qsm
-
-
-if __name__ == '__main__':
-
- # my_config = config.get_named_config(os.getenv('DEPLOYMENT_ENV', 'production'))
-
- event_loop = asyncio.get_event_loop()
- event_loop.run_until_complete(qsm.run(loop=event_loop,
- config=APP_CONFIG,
- callback=cb_subscription_handler))
- try:
- event_loop.run_forever()
- finally:
- event_loop.close()
diff --git a/queue_services/payment-reconciliations/q_cli.py b/queue_services/payment-reconciliations/q_cli.py
deleted file mode 100755
index d05c10f2d..000000000
--- a/queue_services/payment-reconciliations/q_cli.py
+++ /dev/null
@@ -1,132 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-
-# Copyright © 2019 Province of British Columbia
-#
-# Licensed under the Apache License, Version 2.0 (the 'License');
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an 'AS IS' BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Service for listening and handling Queue Messages.
-
-This service registers interest in listening to a Queue and processing received messages.
-"""
-import asyncio
-import functools
-import getopt
-import json
-import os
-import random
-import signal
-import sys
-
-from nats.aio.client import Client as NATS # noqa N814; by convention the name is NATS
-from stan.aio.client import Client as STAN # noqa N814; by convention the name is STAN
-
-
-async def run(loop, file_name: str, location: str, message_type: str): # pylint: disable=too-many-locals
- """Run the main application loop for the service.
-
- This runs the main top level service functions for working with the Queue.
- """
- from entity_queue_common.service_utils import error_cb, logger, signal_handler
-
- # NATS client connections
- nc = NATS()
- sc = STAN()
-
- async def close():
- """Close the stream and nats connections."""
- await sc.close()
- await nc.close()
-
- # Connection and Queue configuration.
- def nats_connection_options():
- return {
- 'servers': os.getenv('NATS_SERVERS', 'nats://127.0.0.1:4222').split(','),
- 'io_loop': loop,
- 'error_cb': error_cb,
- 'name': os.getenv('NATS_PAYMENT_RECONCILIATIONS_CLIENT_NAME', 'account.reconciliations.worker')
- }
-
- def stan_connection_options():
- return {
- 'cluster_id': os.getenv('NATS_CLUSTER_ID', 'test-cluster'),
- 'client_id': str(random.SystemRandom().getrandbits(0x58)),
- 'nats': nc
- }
-
- def subscription_options():
- return {
- 'subject': os.getenv('NATS_PAYMENT_RECONCILIATIONS_SUBJECT', 'payment.reconciliations'),
- 'queue': os.getenv('NATS_PAYMENT_RECONCILIATIONS_QUEUE', 'payment-reconciliations-worker'),
- 'durable_name': os.getenv('NATS_PAYMENT_RECONCILIATIONS_QUEUE',
- 'payment-reconciliations-worker') + '_durable'
- }
-
- try:
- # Connect to the NATS server, and then use that for the streaming connection.
- await nc.connect(**nats_connection_options())
- await sc.connect(**stan_connection_options())
-
- # register the signal handler
- for sig in ('SIGINT', 'SIGTERM'):
- loop.add_signal_handler(getattr(signal, sig),
- functools.partial(signal_handler, sig_loop=loop, sig_nc=nc, task=close)
- )
-
- payload = {
- 'specversion': '1.x-wip',
- 'type': f'{message_type}',
- 'source': 'https://api.business.bcregistry.gov.bc.ca/v1/business/BC1234567/filing/12345678',
- 'id': 'C234-1234-1234',
- 'time': '2020-08-28T17:37:34.651294+00:00',
- 'datacontenttype': 'application/json',
- 'data': {
- 'fileName': file_name,
- 'source': 'MINIO',
- 'location': location
-
- }
- }
-
- print('payload-->', payload)
- print(subscription_options())
-
- await sc.publish(subject=subscription_options().get('subject'),
- payload=json.dumps(payload).encode('utf-8'))
-
- except Exception as e: # pylint: disable=broad-except
- # TODO tighten this error and decide when to bail on the infinite reconnect
- logger.error(e)
-
-
-if __name__ == '__main__':
- try:
- opts, args = getopt.getopt(sys.argv[1:], "hf:l:m:", ["file=", "location=", "message="])
- except getopt.GetoptError:
- sys.exit(2)
-
- for opt, arg in opts:
- if opt == '-h':
- sys.exit()
- elif opt in ("-f", "--file"):
- file = arg
- elif opt in ("-l", "--location"):
- location = arg
- if not location:
- location = 'payment-sftp'
- elif opt in ("-m", "--message"):
- message = arg
- if not message:
- message = 'bc.registry.payment.casSettlementUploaded'
-
- event_loop = asyncio.get_event_loop()
- event_loop.run_until_complete(run(event_loop, file, location, message))
diff --git a/queue_services/payment-reconciliations/requirements.txt b/queue_services/payment-reconciliations/requirements.txt
deleted file mode 100644
index 5d330d3fb..000000000
--- a/queue_services/payment-reconciliations/requirements.txt
+++ /dev/null
@@ -1,93 +0,0 @@
--e git+https://github.com/bcgov/lear.git@30dba30463c99aaedfdcfd463213e71ba0d35b51#egg=entity_queue_common&subdirectory=queue_services/common
--e git+https://github.com/bcgov/sbc-common-components.git@b93585ea3ac273b9e51c4dd5ddbc8190fd95da6a#egg=sbc_common_components&subdirectory=python
--e git+https://github.com/bcgov/sbc-pay.git@6d4a94c8d78f11322487f93eb65ec6e9d5238b35#egg=pay_api&subdirectory=pay-api
-Flask-Caching==2.0.2
-Flask-Migrate==2.7.0
-Flask-Moment==1.0.5
-Flask-OpenTracing==1.1.0
-Flask-SQLAlchemy==2.5.1
-Flask-Script==2.0.6
-Flask==1.1.2
-Jinja2==3.0.3
-Mako==1.2.4
-MarkupSafe==2.1.3
-PyMeeus==0.5.12
-SQLAlchemy-Continuum==1.3.15
-SQLAlchemy-Utils==0.41.1
-SQLAlchemy==1.3.24
-Werkzeug==1.0.1
-aiohttp==3.9.2
-aiosignal==1.3.1
-alembic==1.11.1
-aniso8601==9.0.1
-async-timeout==4.0.2
-asyncio-nats-client==0.11.5
-asyncio-nats-streaming==0.4.0
-attrs==23.1.0
-blinker==1.6.2
-cachelib==0.9.0
-cachetools==5.3.1
-cattrs==23.1.2
-certifi==2023.7.22
-cffi==1.15.1
-charset-normalizer==3.1.0
-click==8.1.3
-convertdate==2.4.0
-croniter==1.4.1
-cryptography==42.0.2
-dpath==2.1.6
-ecdsa==0.18.0
-exceptiongroup==1.1.1
-expiringdict==1.2.2
-flask-jwt-oidc==0.3.0
-flask-marshmallow==0.11.0
-flask-restx==1.1.0
-frozenlist==1.3.3
-google-api-core==2.11.1
-google-auth==2.18.1
-google-cloud-pubsub==2.17.0
-googleapis-common-protos==1.59.1
-grpc-google-iam-v1==0.12.6
-grpcio-status==1.48.2
-grpcio==1.54.3
-gunicorn==20.1.0
-hijri-converter==2.3.1
-holidays==0.37
-idna==3.4
-itsdangerous==2.0.1
-jaeger-client==4.8.0
-jsonschema==4.17.3
-korean-lunar-calendar==0.3.1
-launchdarkly-server-sdk==8.1.4
-marshmallow-sqlalchemy==0.25.0
-marshmallow==3.19.0
-minio==7.1.15
-multidict==6.0.4
-opentracing==2.4.0
-packaging==23.1
-proto-plus==1.22.2
-protobuf==3.19.6
-psycopg2-binary==2.9.6
-pyRFC3339==1.1
-pyasn1-modules==0.3.0
-pyasn1==0.5.0
-pycountry==22.3.5
-pycparser==2.21
-pyrsistent==0.19.3
-python-dateutil==2.8.2
-python-dotenv==1.0.0
-python-jose==3.3.0
-pytz==2023.3
-requests==2.31.0
-rsa==4.9
-semver==2.13.0
-sentry-sdk==1.26.0
-simple-cloudevent @ git+https://github.com/daxiom/simple-cloudevent.py.git@447cabb988202206ac69e71177d7cd11b6c0b002
-six==1.16.0
-strict-rfc3339==0.7
-threadloop==1.0.2
-thrift==0.16.0
-tornado==6.3.3
-typing_extensions==4.6.3
-urllib3==1.26.17
-yarl==1.9.2
diff --git a/queue_services/payment-reconciliations/requirements/bcregistry-libraries.txt b/queue_services/payment-reconciliations/requirements/bcregistry-libraries.txt
deleted file mode 100644
index 623fffcbd..000000000
--- a/queue_services/payment-reconciliations/requirements/bcregistry-libraries.txt
+++ /dev/null
@@ -1,5 +0,0 @@
--e git+https://github.com/bcgov/lear.git#egg=entity_queue_common&subdirectory=queue_services/common
--e git+https://github.com/bcgov/sbc-common-components.git#egg=sbc-common-components&subdirectory=python
-# -e git+https://github.com/bcgov/sbc-pay.git@refunds#egg=pay-api&subdirectory=pay-api
--e git+https://github.com/bcgov/sbc-pay.git@main#egg=pay-api&subdirectory=pay-api
-git+https://github.com/daxiom/simple-cloudevent.py.git
diff --git a/queue_services/payment-reconciliations/requirements/dev.txt b/queue_services/payment-reconciliations/requirements/dev.txt
deleted file mode 100755
index 04624060f..000000000
--- a/queue_services/payment-reconciliations/requirements/dev.txt
+++ /dev/null
@@ -1,31 +0,0 @@
-# Everything the developer needs in addition to the production requirements
--r prod.txt
-
-# Testing
-pytest
-pytest-mock
-requests
-pyhamcrest
-pytest-cov
-FreezeGun
-
-# Lint and code style
-flake8==5.0.4
-flake8-blind-except
-flake8-debugger
-flake8-docstrings
-flake8-isort
-flake8-quotes
-pep8-naming
-autopep8
-coverage
-pylint
-pylint-flask
-pydocstyle
-isort
-
-
-# docker
-lovely-pytest-docker
-pytest-asyncio==0.18.3
-
diff --git a/queue_services/payment-reconciliations/requirements/prod.txt b/queue_services/payment-reconciliations/requirements/prod.txt
deleted file mode 100644
index c4f830f82..000000000
--- a/queue_services/payment-reconciliations/requirements/prod.txt
+++ /dev/null
@@ -1,16 +0,0 @@
-Flask
-jsonschema==4.17.3
-python-dotenv
-sentry-sdk[flask]
-asyncio-nats-client
-asyncio-nats-streaming
-pycountry
-Werkzeug<2
-minio
-jaeger-client
-attrs
-sqlalchemy<1.4
-itsdangerous==2.0.1
-Jinja2==3.0.3
-protobuf~=3.19.5
-launchdarkly-server-sdk
diff --git a/queue_services/payment-reconciliations/src/reconciliations/__init__.py b/queue_services/payment-reconciliations/src/reconciliations/__init__.py
deleted file mode 100644
index aef4cc171..000000000
--- a/queue_services/payment-reconciliations/src/reconciliations/__init__.py
+++ /dev/null
@@ -1,17 +0,0 @@
-# Copyright © 2019 Province of British Columbia
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""The Reconciliations queue service.
-
-This module is the service worker for applying payments, receipts and account balance to payment system.
-"""
diff --git a/queue_services/payment-reconciliations/src/reconciliations/utils.py b/queue_services/payment-reconciliations/src/reconciliations/utils.py
deleted file mode 100644
index ca782b559..000000000
--- a/queue_services/payment-reconciliations/src/reconciliations/utils.py
+++ /dev/null
@@ -1,32 +0,0 @@
-# Copyright © 2019 Province of British Columbia
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Supply version and commit hash info.
-
-When deployed in OKD, it adds the last commit hash onto the version info.
-"""
-import os
-
-from reconciliations.version import __version__
-
-
-def _get_build_openshift_commit_hash():
- return os.getenv('OPENSHIFT_BUILD_COMMIT', None)
-
-
-def get_run_version():
- """Return a formatted version string for this service."""
- commit_hash = _get_build_openshift_commit_hash()
- if commit_hash:
- return f'{__version__}-{commit_hash}'
- return __version__
diff --git a/queue_services/payment-reconciliations/src/reconciliations/worker.py b/queue_services/payment-reconciliations/src/reconciliations/worker.py
deleted file mode 100644
index bc47ad577..000000000
--- a/queue_services/payment-reconciliations/src/reconciliations/worker.py
+++ /dev/null
@@ -1,81 +0,0 @@
-# Copyright © 2019 Province of British Columbia
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""The unique worker functionality for this service is contained here.
-
-The entry-point is the **cb_subscription_handler**
-
-The design and flow leverage a few constraints that are placed upon it
-by NATS Streaming and using AWAIT on the default loop.
-- NATS streaming queues require one message to be processed at a time.
-- AWAIT on the default loop effectively runs synchronously
-
-If these constraints change, the use of Flask-SQLAlchemy would need to change.
-Flask-SQLAlchemy currently allows the base model to be changed, or reworking
-the model to a standalone SQLAlchemy usage with an async engine would need
-to be pursued.
-"""
-import json
-import os
-
-import nats
-from entity_queue_common.service import QueueServiceManager
-from entity_queue_common.service_utils import QueueException, logger
-from flask import Flask
-from pay_api.models import db
-from pay_api.services import Flags
-
-from reconciliations import config
-from reconciliations.cgi_reconciliations import reconcile_distributions
-from reconciliations.eft.eft_reconciliation import reconcile_eft_payments
-from reconciliations.enums import MessageType
-from reconciliations.payment_reconciliations import reconcile_payments
-
-
-qsm = QueueServiceManager() # pylint: disable=invalid-name
-APP_CONFIG = config.get_named_config(os.getenv('DEPLOYMENT_ENV', 'production'))
-FLASK_APP = Flask(__name__)
-FLASK_APP.config.from_object(APP_CONFIG)
-db.init_app(FLASK_APP)
-flag_service = Flags(FLASK_APP)
-
-
-async def process_event(event_message, flask_app):
- """Render the payment status."""
- if not flask_app:
- raise QueueException('Flask App not available.')
-
- with flask_app.app_context():
- if (message_type := event_message.get('type', None)) == MessageType.CAS_UPLOADED.value:
- await reconcile_payments(event_message)
- elif message_type == MessageType.CGI_ACK_RECEIVED.value:
- await reconcile_distributions(event_message)
- elif message_type == MessageType.CGI_FEEDBACK_RECEIVED.value:
- await reconcile_distributions(event_message, is_feedback=True)
- elif message_type == MessageType.EFT_FILE_UPLOADED.value:
- await reconcile_eft_payments(event_message)
- else:
- raise Exception('Invalid type') # pylint: disable=broad-exception-raised
-
-
-async def cb_subscription_handler(msg: nats.aio.client.Msg):
- """Use Callback to process Queue Msg objects."""
- try:
- logger.info('Received raw message seq:%s, data= %s', msg.sequence, msg.data.decode())
- event_message = json.loads(msg.data.decode('utf-8'))
- logger.debug('Event Message Received: %s', event_message)
- await process_event(event_message, FLASK_APP)
- except Exception as e: # NOQA pylint: disable=broad-except
- # Catch Exception so that any error is still caught and the message is removed from the queue
- logger.error('Queue Error: %s', json.dumps(event_message), exc_info=True)
- logger.error(e)
diff --git a/queue_services/payment-reconciliations/tests/__init__.py b/queue_services/payment-reconciliations/tests/__init__.py
deleted file mode 100644
index 3e44a42f5..000000000
--- a/queue_services/payment-reconciliations/tests/__init__.py
+++ /dev/null
@@ -1,32 +0,0 @@
-# Copyright © 2019 Province of British Columbia
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""The Test Suites to ensure that the service is built and operating correctly."""
-import datetime
-
-
-EPOCH_DATETIME = datetime.datetime.utcfromtimestamp(0)
-FROZEN_DATETIME = datetime.datetime(2001, 8, 5, 7, 7, 58, 272362)
-
-
-def add_years(d, years):
- """Return a date that's `years` years after the date (or datetime).
-
- Return the same calendar date (month and day) in the destination year,
- if it exists, otherwise use the following day
- (thus changing February 29 to February 28).
- """
- try:
- return d.replace(year=d.year + years)
- except ValueError:
- return d + (datetime.date(d.year + years, 3, 1) - datetime.date(d.year, 3, 1))
diff --git a/queue_services/payment-reconciliations/tests/conftest.py b/queue_services/payment-reconciliations/tests/conftest.py
deleted file mode 100644
index 24c346ae3..000000000
--- a/queue_services/payment-reconciliations/tests/conftest.py
+++ /dev/null
@@ -1,278 +0,0 @@
-# Copyright © 2019 Province of British Columbia
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Common setup and fixtures for the pytest suite used by this service."""
-import asyncio
-import os
-import random
-import time
-from contextlib import contextmanager
-
-import pytest
-from flask import Flask
-from flask_migrate import Migrate, upgrade
-from nats.aio.client import Client as Nats
-from pay_api import db as _db
-from sqlalchemy import event, text
-from sqlalchemy.schema import DropConstraint, MetaData
-from stan.aio.client import Client as Stan
-
-from reconciliations.config import get_named_config
-
-
-@contextmanager
-def not_raises(exception):
- """Corallary to the pytest raises builtin.
-
- Assures that an exception is NOT thrown.
- """
- try:
- yield
- except exception: # NOQA
- raise pytest.fail(f'DID RAISE {exception}')
-
-
-@pytest.fixture(scope='session')
-def app():
- """Return a session-wide application configured in TEST mode."""
- # _app = create_app('testing')
- _app = Flask(__name__)
- _app.config.from_object(get_named_config('testing'))
- _db.init_app(_app)
-
- return _app
-
-
-@pytest.fixture(scope='session')
-def db(app): # pylint: disable=redefined-outer-name, invalid-name
- """Return a session-wide initialised database.
-
- Drops all existing tables - Meta follows Postgres FKs
- """
- with app.app_context():
- # Clear out views
- view_sql = """SELECT table_name FROM information_schema.views
- WHERE table_schema='public'
- """
-
- sess = _db.session()
- for seq in [name for (name,) in sess.execute(text(view_sql))]:
- try:
- sess.execute(text('DROP VIEW public.%s ;' % seq))
- print('DROP VIEW public.%s ' % seq)
- except Exception as err: # NOQA pylint: disable=broad-except
- print(f'Error: {err}')
- sess.commit()
-
- # Clear out any existing tables
- metadata = MetaData(_db.engine)
- metadata.reflect()
- for table in metadata.tables.values():
- for fk in table.foreign_keys: # pylint: disable=invalid-name
- _db.engine.execute(DropConstraint(fk.constraint))
- metadata.drop_all()
- _db.drop_all()
-
- sequence_sql = """SELECT sequence_name FROM information_schema.sequences
- WHERE sequence_schema='public'
- """
-
- sess = _db.session()
- for seq in [name for (name,) in sess.execute(text(sequence_sql))]:
- try:
- sess.execute(text('DROP SEQUENCE public.%s ;' % seq))
- print('DROP SEQUENCE public.%s ' % seq)
- except Exception as err: # NOQA pylint: disable=broad-except
- print(f'Error: {err}')
- sess.commit()
-
- # ############################################
- # There are 2 approaches, an empty database, or the same one that the app will use
- # create the tables
- # _db.create_all()
- # or
- # Use Alembic to load all of the DB revisions including supporting lookup data
- # This is the path we'll use in legal_api!!
-
- # even though this isn't referenced directly, it sets up the internal configs that upgrade
- import sys
- migrations_path = [folder for folder in sys.path if 'pay-api/pay-api' in folder]
- if len(migrations_path) > 0:
- migrations_path = migrations_path[0].replace('/pay-api/src', '/pay-api/migrations')
- # Fix for windows.
- else:
- migrations_path = os.path.abspath('../../pay-api/migrations')
-
- Migrate(app, _db, directory=migrations_path)
- upgrade()
-
- return _db
-
-
-@pytest.fixture
-def config(app):
- """Return the application config."""
- return app.config
-
-
-@pytest.fixture(scope='session')
-def client(app): # pylint: disable=redefined-outer-name
- """Return a session-wide Flask test client."""
- return app.test_client()
-
-
-@pytest.fixture(scope='session')
-def client_ctx(app): # pylint: disable=redefined-outer-name
- """Return session-wide Flask test client."""
- with app.test_client() as _client:
- yield _client
-
-
-@pytest.fixture(scope='function')
-def client_id():
- """Return a unique client_id that can be used in tests."""
- _id = random.SystemRandom().getrandbits(0x58)
- # _id = (base64.urlsafe_b64encode(uuid.uuid4().bytes)).replace('=', '')
-
- return f'client-{_id}'
-
-
-@pytest.fixture(scope='function')
-def session(app, db): # pylint: disable=redefined-outer-name, invalid-name
- """Return a function-scoped session."""
- with app.app_context():
- conn = db.engine.connect()
- txn = conn.begin()
-
- options = dict(bind=conn, binds={})
- sess = db.create_scoped_session(options=options)
-
- # establish a SAVEPOINT just before beginning the test
- # (http://docs.sqlalchemy.org/en/latest/orm/session_transaction.html#using-savepoint)
- sess.begin_nested()
-
- @event.listens_for(sess(), 'after_transaction_end')
- def restart_savepoint(sess2, trans): # pylint: disable=unused-variable
- # Detecting whether this is indeed the nested transaction of the test
- if trans.nested and not trans._parent.nested: # pylint: disable=protected-access
- # Handle where test DOESN'T session.commit(),
- # sess2.expire_all()
- sess.begin_nested()
-
- db.session = sess
-
- sql = text('select 1')
- sess.execute(sql)
-
- yield sess
-
- # Cleanup
- sess.remove()
- # This instruction rollsback any commit that were executed in the tests.
- txn.rollback()
- conn.close()
-
-
-@pytest.fixture(scope='session')
-def stan_server(docker_services):
- """Create the nats / stan services that the integration tests will use."""
- if os.getenv('TEST_NATS_DOCKER'):
- docker_services.start('nats')
- time.sleep(2)
- # TODO get the wait part working, as opposed to sleeping for 2s
- # public_port = docker_services.wait_for_service("nats", 4222)
- # dsn = "{docker_services.docker_ip}:{public_port}".format(**locals())
- # return dsn
-
-
-@pytest.fixture(scope='session', autouse=True)
-def auto(docker_services, app):
- """Spin up docker containers."""
- if app.config['USE_DOCKER_MOCK']:
- docker_services.start('minio')
- docker_services.start('proxy')
- docker_services.start('paybc')
- docker_services.start('nats')
-
-
-@pytest.fixture(scope='function')
-@pytest.mark.asyncio
-async def stan(event_loop, client_id):
- """Create a stan connection for each function, to be used in the tests."""
- nc = Nats()
- sc = Stan()
- cluster_name = 'test-cluster'
-
- await nc.connect(io_loop=event_loop, name='entity.filing.tester')
-
- await sc.connect(cluster_name, client_id, nats=nc)
-
- yield sc
-
- await sc.close()
- await nc.close()
-
-
-@pytest.fixture(scope='function')
-@pytest.mark.asyncio
-async def events_stan(app, event_loop, client_id):
- """Create a stan connection for each function.
-
- Uses environment variables for the cluster name.
- """
- nc = Nats()
- sc = Stan()
-
- await nc.connect(io_loop=event_loop)
-
- cluster_name = os.getenv('STAN_CLUSTER_NAME', 'test-cluster')
-
- if not cluster_name:
- raise ValueError('Missing env variable: STAN_CLUSTER_NAME')
-
- await sc.connect(cluster_name, client_id, nats=nc)
-
- yield sc
-
- await sc.close()
- await nc.close()
-
-
-@pytest.fixture(scope='function')
-def future(event_loop):
- """Return a future that is used for managing function tests."""
- _future = asyncio.Future(loop=event_loop)
- return _future
-
-
-@pytest.fixture
-def create_mock_coro(mocker, monkeypatch):
- """Return a mocked coroutine, and optionally patch-it in."""
-
- def _create_mock_patch_coro(to_patch=None):
- mock = mocker.Mock()
-
- async def _coro(*args, **kwargs):
- return mock(*args, **kwargs)
-
- if to_patch: # <-- may not need/want to patch anything
- monkeypatch.setattr(to_patch, _coro)
- return mock, _coro
-
- return _create_mock_patch_coro
-
-
-@pytest.fixture()
-def mock_publish(monkeypatch):
- """Mock check_auth."""
- monkeypatch.setattr('pay_api.services.queue_publisher.publish', lambda *args, **kwargs: None)
diff --git a/report-api/src/api/utils/logging.py b/report-api/src/api/utils/logging.py
index 8b88ddcf2..8568f87dd 100755
--- a/report-api/src/api/utils/logging.py
+++ b/report-api/src/api/utils/logging.py
@@ -18,12 +18,7 @@
def setup_logging(conf):
- """Create the services logger.
-
- TODO should be reworked to load in the proper loggers and remove others
- """
- # log_file_path = path.join(path.abspath(path.dirname(__file__)), conf)
-
+ """Create the services logger."""
if conf and path.isfile(conf):
logging.config.fileConfig(conf)
print(f'Configure logging, from conf:{conf}', file=sys.stdout)