diff --git a/README.md b/README.md
index eb2c07c2f..ff9a01f81 100644
--- a/README.md
+++ b/README.md
@@ -180,9 +180,7 @@ For the "Debug Solution" to run properly on Windows 10/11 using WSL, the followi
API Variables
-- COUNTS_DB_TYPE: Set to either "MongoDB" or "BigQuery" depending on where the message counts are stored.
- COUNTS_MSG_TYPES: Set to a list of message types to include in counts query. Sample format is described in the sample.env.
-- COUNTS_DB_NAME: The BigQuery table or MongoDB collection name where the RSU message counts are located.
- BSM_DB_NAME: The database name for BSM visualization data.
- SSM_DB_NAME: The database name for SSM visualization data.
- SRM_DB_NAME: The database name for SRM visualization data.
diff --git a/docker-compose.yml b/docker-compose.yml
index 2f07756cc..1d0b682cc 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -22,8 +22,6 @@ services:
MONGO_DB_NAME: ${MONGO_DB_NAME}
COUNTS_MSG_TYPES: ${COUNTS_MSG_TYPES}
- COUNTS_DB_TYPE: ${COUNTS_DB_TYPE}
- COUNTS_DB_NAME: ${COUNTS_DB_NAME}
GOOGLE_APPLICATION_CREDENTIALS: '/google/gcp_credentials.json'
BSM_DB_NAME: ${BSM_DB_NAME}
diff --git a/resources/kubernetes/cv-manager-api.yaml b/resources/kubernetes/cv-manager-api.yaml
index 922666dfb..66654781e 100644
--- a/resources/kubernetes/cv-manager-api.yaml
+++ b/resources/kubernetes/cv-manager-api.yaml
@@ -103,10 +103,6 @@ spec:
# Fill out the ENV vars with your own values
- name: CORS_DOMAIN
value: ''
- - name: GOOGLE_APPLICATION_CREDENTIALS
- value: ''
- - name: GOOGLE_CLIENT_ID
- value: ""
- name: KEYCLOAK_ENDPOINT
value: ""
- name: KEYCLOAK_REALM
@@ -132,11 +128,11 @@ spec:
secretKeyRef:
name: some-postgres-secret-password
key: some-postgres-secret-key
- - name: COUNTS_DB_TYPE
+ - name: MONGO_DB_URI
value: ""
- - name: COUNTS_MSG_TYPES
+ - name: MONGO_DB_NAME
value: ""
- - name: COUNTS_DB_NAME
+ - name: COUNTS_MSG_TYPES
value: ""
- name: BSM_DB_NAME
value: ""
diff --git a/sample.env b/sample.env
index 3cbfd8772..1118f1d3c 100644
--- a/sample.env
+++ b/sample.env
@@ -46,9 +46,6 @@ KEYCLOAK_DOMAIN=cvmanager.auth.com
GOOGLE_CLIENT_ID=
GOOGLE_CLIENT_SECRET=
-# Set to either "BIGQUERY" or "MONGODB" depending on message count location.
-COUNTS_DB_TYPE="BIGQUERY"
-
# If "BIGQUERY", set the location of the GCP service account key attached as a volume
GOOGLE_APPLICATION_CREDENTIALS='./resources/google/sample_gcp_service_account.json'
@@ -57,9 +54,7 @@ MONGO_DB_URI=
MONGO_DB_NAME="ODE"
# Set these variables if using either "MONGODB" or "BIGQUERY"
-# COUNTS_DB_NAME: Used for V2X message counts
# COUNTS_MSG_TYPES: Comma seperated list of message types.
-COUNTS_DB_NAME=
# COUNTS_MSG_TYPES must be set for the counts menu to correctly populate when building an image for deployment
COUNTS_MSG_TYPES='BSM,SSM,SPAT,SRM,MAP'
BSM_DB_NAME=
diff --git a/services/api/README.md b/services/api/README.md
index 7dc191c1f..2bdce3486 100644
--- a/services/api/README.md
+++ b/services/api/README.md
@@ -297,9 +297,7 @@ HTTP URL Arguments:
- PG_DB_PORT: The database port.
- PG_PG_DB_USER: The database user that will be used to authenticate the cloud function when it queries the database.
- PG_PG_DB_PASS: The database user's password that will be used to authenticate the cloud function.
-- COUNTS_DB_TYPE: Set to either "MongoDB" or "BigQuery" depending on where the message counts are stored.
- COUNTS_MSG_TYPES: Set to a list of message types to include in counts query. Sample format is described in the sample.env.
-- COUNTS_DB_NAME: The BigQuery table or MongoDB collection name where the RSU message counts are located.
- BSM_DB_NAME: The database name for BSM visualization data.
- SSM_DB_NAME: The database name for SSM visualization data.
- SRM_DB_NAME: The database name for SRM visualization data.
diff --git a/services/api/sample.env b/services/api/sample.env
index 83153b615..f1bee8cca 100644
--- a/services/api/sample.env
+++ b/services/api/sample.env
@@ -25,9 +25,6 @@ KEYCLOAK_API_CLIENT_SECRET_KEY=
# Firmware Manager connectivity in the format 'http://endpoint:port'
FIRMWARE_MANAGER_ENDPOINT=http://:8089
-# Set to either "BIGQUERY" or "MONGODB" depending on message count location.
-COUNTS_DB_TYPE=
-
# If "BIGQUERY", set the location of the GCP service account key
GOOGLE_APPLICATION_CREDENTIALS='./resources/google/sample_gcp_service_account.json'
@@ -36,9 +33,7 @@ MONGO_DB_URI=
MONGO_DB_NAME="ODE"
# Set these variables if using either "MONGODB" or "BIGQUERY"
-# COUNTS_DB_NAME: Used for V2X message counts
# COUNTS_MSG_TYPES: Comma seperated list of message types
-COUNTS_DB_NAME=
COUNTS_MSG_TYPES='BSM,SSM,SPAT,SRM,MAP'
BSM_DB_NAME=
SSM_DB_NAME=
diff --git a/services/api/src/rsu_bsmdata.py b/services/api/src/rsu_bsmdata.py
index 51bd4180a..d20ea84f8 100644
--- a/services/api/src/rsu_bsmdata.py
+++ b/services/api/src/rsu_bsmdata.py
@@ -1,4 +1,3 @@
-from google.cloud import bigquery
import common.util as util
import os
import logging
@@ -78,65 +77,6 @@ def query_bsm_data_mongo(pointList, start, end):
return [], 500
-def query_bsm_data_bq(pointList, start, end):
- start_date = util.format_date_utc(start)
- end_date = util.format_date_utc(end)
- client = bigquery.Client()
- tablename = os.environ["BSM_DB_NAME"]
- geogString = "POLYGON(("
- for elem in pointList:
- long = str(elem.pop(0))
- lat = str(elem.pop(0))
- geogString += long + " " + lat + ","
-
- geogString = geogString[:-1] + "))"
-
- query = (
- "SELECT DISTINCT bsm.metadata.originIp as Ip, "
- f"bsm.payload.data.coreData.position.longitude as long, "
- f"bsm.payload.data.coreData.position.latitude as lat, "
- f"bsm.metadata.odeReceivedAt as time "
- f"FROM `{tablename}` "
- f'WHERE TIMESTAMP(bsm.metadata.odeReceivedAt) >= TIMESTAMP("{start_date}") '
- f'AND TIMESTAMP(bsm.metadata.odeReceivedAt) <= TIMESTAMP("{end_date}") '
- f"AND ST_CONTAINS(ST_GEOGFROM('{geogString}'), "
- f"ST_GEOGPOINT(bsm.payload.data.coreData.position.longitude, bsm.payload.data.coreData.position.latitude))"
- )
-
- logging.info(f"Running query on table {tablename}")
-
- query_job = client.query(query)
- hashmap = {}
- count = 0
- total_count = 0
-
- for row in query_job:
- message_hash = bsm_hash(
- row["Ip"],
- int(datetime.timestamp(util.format_date_utc(row["time"], "DATETIME"))),
- row["long"],
- row["lat"],
- )
-
- if message_hash not in hashmap:
- doc = {
- "type": "Feature",
- "geometry": {"type": "Point", "coordinates": [row["long"], row["lat"]]},
- "properties": {
- "id": row["Ip"],
- "time": util.format_date_utc(row["time"]) + "z",
- },
- }
- hashmap[message_hash] = doc
- count += 1
- total_count += 1
- else:
- total_count += 1
-
- logging.info(f"Query successful. Record returned: {count}")
- return list(hashmap.values()), 200
-
-
# REST endpoint resource class and schema
from flask import request
from flask_restful import Resource
@@ -181,16 +121,7 @@ def post(self):
400,
self.headers,
)
- db_type = os.getenv("COUNTS_DB_TYPE", "BIGQUERY").upper()
- data = []
- code = None
-
- if db_type == "MONGODB":
- logging.debug("RsuBsmData Mongodb query")
- data, code = query_bsm_data_mongo(pointList, start, end)
- # If the db_type is set to anything other than MONGODB then default to bigquery
- else:
- logging.debug("RsuBsmData BigQuery query")
- data, code = query_bsm_data_bq(pointList, start, end)
+
+ data, code = query_bsm_data_mongo(pointList, start, end)
return (data, code, self.headers)
diff --git a/services/api/src/rsu_querycounts.py b/services/api/src/rsu_querycounts.py
index 4a31ef480..ab7ea43d4 100644
--- a/services/api/src/rsu_querycounts.py
+++ b/services/api/src/rsu_querycounts.py
@@ -1,82 +1,45 @@
-from google.cloud import bigquery
from datetime import datetime, timedelta
import common.pgquery as pgquery
import common.util as util
import os
import logging
-import json
from pymongo import MongoClient
-def query_rsu_counts_mongo(allowed_ips, message_type, start, end):
- start_date = util.format_date_utc(start, "DATETIME")
- end_date = util.format_date_utc(end, "DATETIME")
+def query_rsu_counts_mongo(allowed_ips_dict, message_type, start, end):
+ start_dt = util.format_date_utc(start, "DATETIME")
+ end_dt = util.format_date_utc(end, "DATETIME")
try:
client = MongoClient(os.getenv("MONGO_DB_URI"), serverSelectionTimeoutMS=5000)
- db = client[os.getenv("MONGO_DB_NAME")]
- collection = db[os.getenv("COUNTS_DB_NAME")]
+ mongo_db = client[os.getenv("MONGO_DB_NAME")]
+ collection = mongo_db[f"Ode{message_type.title()}Json"]
except Exception as e:
logging.error(
f"Failed to connect to Mongo counts collection with error message: {e}"
)
return {}, 503
- filter = {
- "timestamp": {"$gte": start_date, "$lt": end_date},
- "message_type": message_type.upper(),
- }
-
result = {}
- count = 0
- try:
- logging.debug(f"Running filter: {filter}, on collection: {collection.name}")
- for doc in collection.find(filter=filter):
- if doc["ip"] in allowed_ips:
- count += 1
- item = {"road": doc["road"], "count": doc["count"]}
- result[doc["ip"]] = item
-
- logging.info(f"Filter successful. Length of data: {count}")
- return result, 200
- except Exception as e:
- logging.error(f"Filter failed: {e}")
- return {}, 500
-
-
-def query_rsu_counts_bq(allowed_ips, message_type, start, end):
- start_date = util.format_date_utc(start)
- end_date = util.format_date_utc(end)
- try:
- client = bigquery.Client()
- tablename = os.environ["COUNTS_DB_NAME"]
-
- query = (
- "SELECT RSU, Road, SUM(Count) as Count "
- f"FROM `{tablename}` "
- f'WHERE Date >= DATETIME("{start_date}") '
- f'AND Date < DATETIME("{end_date}") '
- f'AND Type = "{message_type.upper()}" '
- f"GROUP BY RSU, Road "
- )
-
- logging.info(f"Running query on table {tablename}")
-
- query_job = client.query(query)
-
- result = {}
- count = 0
- for row in query_job:
- if row["RSU"] in allowed_ips:
- count += 1
- item = {"road": row["Road"], "count": row["Count"]}
- result[row["RSU"]] = item
-
- logging.info(f"Query successful. Length of data: {count}")
- return result, 200
- except Exception as e:
- logging.error(f"Query failed: {e}")
- return {}, 500
+ for rsu_ip in allowed_ips_dict:
+ query = {
+ "metadata.originIp": rsu_ip,
+ "recordGeneratedAt": {
+ "$gte": start_dt,
+ "$lt": end_dt,
+ },
+ }
+
+ try:
+ logging.debug(f"Running query: {query}, on collection: {collection.name}")
+ count = collection.count_documents(query)
+ item = {"road": allowed_ips_dict[rsu_ip], "count": count}
+ result[rsu_ip] = item
+ except Exception as e:
+ logging.error(f"Filter failed: {e}")
+ return {}, 500
+
+ return result, 200
def get_organization_rsus(organization):
@@ -84,18 +47,24 @@ def get_organization_rsus(organization):
# Execute the query and fetch all results
query = (
- "SELECT jsonb_build_object('ip', rd.ipv4_address) "
- "FROM public.rsus AS rd "
+ "SELECT to_jsonb(row) "
+ "FROM ("
+ "SELECT rd.ipv4_address, rd.primary_route "
+ "FROM public.rsus rd "
"JOIN public.rsu_organization_name AS ron_v ON ron_v.rsu_id = rd.rsu_id "
f"WHERE ron_v.name = '{organization}' "
- "ORDER BY rd.ipv4_address"
+ "ORDER BY primary_route ASC, milepost ASC"
+ ") as row"
)
logging.debug(f'Executing query: "{query};"')
data = pgquery.query_db(query)
- logging.debug(str(data))
- ips = [rsu[0]["ip"] for rsu in data]
- return ips
+
+ rsu_dict = {}
+ for row in data:
+ row = dict(row[0])
+ rsu_dict[row["ipv4_address"]] = row["primary_route"]
+ return rsu_dict
# REST endpoint resource class and schema
@@ -147,22 +116,17 @@ def get(self):
# Validate request with supported message types
logging.debug(f"COUNTS_MSG_TYPES: {os.getenv('COUNTS_MSG_TYPES','NOT_SET')}")
msgList = os.getenv("COUNTS_MSG_TYPES", "BSM,SSM,SPAT,SRM,MAP")
- msgList = [msgtype.strip() for msgtype in msgList.split(",")]
- if message.upper() not in msgList:
+ msgList = [msgtype.strip().title() for msgtype in msgList.split(",")]
+ if message.title() not in msgList:
return (
"Invalid Message Type.\nValid message types: " + ", ".join(msgList),
400,
self.headers,
)
- db_type = os.getenv("COUNTS_DB_TYPE", "BIGQUERY").upper()
data = 0
code = 204
- rsus = get_organization_rsus(request.environ["organization"])
- if db_type == "MONGODB":
- data, code = query_rsu_counts_mongo(rsus, message.upper(), start, end)
- # If the db_type is set to anything other than MONGODB then default to bigquery
- else:
- data, code = query_rsu_counts_bq(rsus, message.upper(), start, end)
+ rsu_dict = get_organization_rsus(request.environ["organization"])
+ data, code = query_rsu_counts_mongo(rsu_dict, message, start, end)
return (data, code, self.headers)
diff --git a/services/api/src/rsu_ssm_srm.py b/services/api/src/rsu_ssm_srm.py
index 8192d7635..ad4b57d4a 100644
--- a/services/api/src/rsu_ssm_srm.py
+++ b/services/api/src/rsu_ssm_srm.py
@@ -1,87 +1,121 @@
-from google.cloud import bigquery
import common.util as util
import os
import logging
from datetime import datetime, timedelta
+from pymongo import MongoClient
-def query_ssm_data(result):
+def query_ssm_data_mongo(result):
end_date = datetime.now()
end_utc = util.format_date_utc(end_date.isoformat())
start_date = end_date - timedelta(days=1)
start_utc = util.format_date_utc(start_date.isoformat())
- client = bigquery.Client()
- tablename = os.environ["SSM_DB_NAME"]
-
- query = (
- "SELECT rtdh_timestamp as time, ssm.metadata.originIp as ip, "
- f"ssm.payload.data.status.signalStatus[ordinal(1)].sigStatus.signalStatusPackage[ordinal(1)].requester.request, "
- f"ssm.payload.data.status.signalStatus[ordinal(1)].sigStatus.signalStatusPackage[ordinal(1)].requester.typeData.role, "
- f"ssm.payload.data.status.signalStatus[ordinal(1)].sigStatus.signalStatusPackage[ordinal(1)].status, "
- f"ssm.metadata.recordType as type "
- f'FROM `{tablename}` WHERE TIMESTAMP(rtdh_timestamp) >= "{start_utc}" '
- f'AND TIMESTAMP(rtdh_timestamp) <= "{end_utc}" '
- f"ORDER BY rtdh_timestamp ASC"
- )
-
- logging.info(f"Running query on table {tablename}")
-
- query_job = client.query(query)
-
- for row in query_job:
- result.append(
- {
- "time": util.format_date_denver(row["time"].isoformat()),
- "ip": row["ip"],
- "requestId": row["request"],
- "role": row["role"],
- "status": row["status"],
- "type": row["type"],
- }
- )
-
- return 200, result
+ try:
+ client = MongoClient(os.getenv("MONGO_DB_URI"), serverSelectionTimeoutMS=5000)
+ db = client[os.getenv("MONGO_DB_NAME")]
+ collection = db[os.getenv("SSM_DB_NAME")]
+ except Exception as e:
+ logging.error(
+ f"Failed to connect to Mongo counts collection with error message: {e}"
+ )
+ return [], 503
+
+ filter = {"recordGeneratedAt": {"$gte": start_utc, "$lte": end_utc}}
+ project = {
+ "recordGeneratedAt": 1,
+ "metadata.originIp": 1,
+ "metadata.recordType": 1,
+ "payload.data.status.signalStatus.sigStatus.signalStatusPackage.requester.request": 1,
+ "payload.data.status.signalStatus.sigStatus.signalStatusPackage.requester.role": 1,
+ "payload.data.status.signalStatus.sigStatus.signalStatusPackage.status": 1,
+ "_id": 0,
+ }
-def query_srm_data(result):
+ logging.debug(f"Running filter on SSM mongoDB collection")
+
+ # The data schema for the mongoDB collection is the same for the OdeSsmJson schema
+ # This can be viewed here: https://github.com/usdot-jpo-ode/jpo-ode/blob/develop/jpo-ode-core/src/main/resources/schemas/schema-ssm.json
+ try:
+ for doc in collection.find(filter, project):
+ result.append(
+ {
+ "time": util.format_date_denver(doc["recordGeneratedAt"]),
+ "ip": doc["metadata"]["originIp"],
+ "requestId": doc["payload"]["data"]["status"]["signalStatus"][0][
+ "sigStatus"
+ ]["signalStatusPackage"][0]["requester"]["request"],
+ "role": doc["payload"]["data"]["status"]["signalStatus"][0][
+ "sigStatus"
+ ]["signalStatusPackage"][0]["requester"]["role"],
+ "status": doc["payload"]["data"]["status"]["signalStatus"][0][
+ "sigStatus"
+ ]["signalStatusPackage"][0]["status"],
+ "type": doc["metadata"]["recordType"],
+ }
+ )
+ return 200, result
+ except Exception as e:
+ logging.error(f"SSM filter failed: {e}")
+ return 500, result
+
+
+def query_srm_data_mongo(result):
end_date = datetime.now()
end_utc = util.format_date_utc(end_date.isoformat())
start_date = end_date - timedelta(days=1)
start_utc = util.format_date_utc(start_date.isoformat())
- client = bigquery.Client()
- tablename = os.environ["SRM_DB_NAME"]
-
- query = (
- "SELECT rtdh_timestamp as time, srm.metadata.originIp as ip, "
- f"srm.payload.data.requests.signalRequestPackage[ordinal(1)].request.requestID as request, "
- f"srm.payload.data.requestor.type.role, "
- f"srm.payload.data.requestor.position.position.latitude as lat, "
- f"srm.payload.data.requestor.position.position.longitude as long, "
- f"srm.metadata.recordType as type "
- f'FROM `{tablename}` WHERE TIMESTAMP(rtdh_timestamp) >= "{start_utc}" '
- f'AND TIMESTAMP(rtdh_timestamp) <= "{end_utc}" '
- f"ORDER BY rtdh_timestamp ASC"
- )
-
- logging.info(f"Running query on table {tablename}")
-
- query_job = client.query(query)
-
- for row in query_job:
- result.append(
- {
- "time": util.format_date_denver(row["time"].isoformat()),
- "ip": row["ip"],
- "requestId": row["request"],
- "role": row["role"],
- "lat": row["lat"],
- "long": row["long"],
- "type": row["type"],
- "status": "N/A",
- }
+
+ try:
+ client = MongoClient(os.getenv("MONGO_DB_URI"), serverSelectionTimeoutMS=5000)
+ db = client[os.getenv("MONGO_DB_NAME")]
+ collection = db[os.getenv("SRM_DB_NAME")]
+ except Exception as e:
+ logging.error(
+ f"Failed to connect to Mongo counts collection with error message: {e}"
)
+ return [], 503
+
+ filter = {"recordGeneratedAt": {"$gte": start_utc, "$lte": end_utc}}
+ project = {
+ "recordGeneratedAt": 1,
+ "metadata.originIp": 1,
+ "metadata.recordType": 1,
+ "payload.data.requests.signalRequestPackage.request.requestID": 1,
+ "payload.data.requestor.type.role": 1,
+ "payload.data.requestor.position.position.latitude": 1,
+ "payload.data.requestor.position.position.longitude": 1,
+ "_id": 0,
+ }
- return 200, result
+ logging.debug(f"Running filter on SRM mongoDB collection")
+
+ # The data schema for the mongoDB collection is the same for the OdeSrmJson schema
+ # This can be viewed here: https://github.com/usdot-jpo-ode/jpo-ode/blob/develop/jpo-ode-core/src/main/resources/schemas/schema-srm.json
+ try:
+ for doc in collection.find(filter, project):
+ result.append(
+ {
+ "time": util.format_date_denver(doc["recordGeneratedAt"]),
+ "ip": doc["metadata"]["originIp"],
+ "requestId": doc["payload"]["data"]["requests"][
+ "signalRequestPackage"
+ ][0]["request"]["requestID"],
+ "role": doc["payload"]["data"]["requestor"]["type"]["role"],
+ "lat": doc["payload"]["data"]["requestor"]["position"]["position"][
+ "latitude"
+ ],
+ "long": doc["payload"]["data"]["requestor"]["position"]["position"][
+ "longitude"
+ ],
+ "type": doc["metadata"]["recordType"],
+ "status": "N/A",
+ }
+ )
+ return 200, result
+ except Exception as e:
+ logging.error(f"SRM filter failed: {e}")
+ return 500, result
# REST endpoint resource class and schema
@@ -109,7 +143,7 @@ def options(self):
def get(self):
logging.debug("RsuSsmSrmData GET requested")
data = []
- code, ssmRes = query_ssm_data(data)
- code, finalRes = query_srm_data(ssmRes)
+ code, ssmRes = query_ssm_data_mongo(data)
+ code, finalRes = query_srm_data_mongo(ssmRes)
finalRes.sort(key=lambda x: x["time"])
return (finalRes, code, self.headers)
diff --git a/services/api/tests/data/rsu_ssm_srm_data.py b/services/api/tests/data/rsu_ssm_srm_data.py
index b6d90b25c..e0d4af468 100644
--- a/services/api/tests/data/rsu_ssm_srm_data.py
+++ b/services/api/tests/data/rsu_ssm_srm_data.py
@@ -16,90 +16,124 @@
"%Y-%m-%dT%H:%M:%S",
)
-ssm_expected_query = (
- f"SELECT rtdh_timestamp as time, "
- f"ssm.metadata.originIp as ip, ssm.payload.data.status.signalStatus[ordinal(1)].sigStatus.signalStatusPackage[ordinal(1)].requester.request, "
- f"ssm.payload.data.status.signalStatus[ordinal(1)].sigStatus.signalStatusPackage[ordinal(1)].requester.typeData.role, "
- f"ssm.payload.data.status.signalStatus[ordinal(1)].sigStatus.signalStatusPackage[ordinal(1)].status, "
- f'ssm.metadata.recordType as type FROM `Fake_table` WHERE TIMESTAMP(rtdh_timestamp) >= "{start_date}" '
- f'AND TIMESTAMP(rtdh_timestamp) <= "{end_date}" ORDER BY rtdh_timestamp ASC'
-)
+ssm_record_one = {
+ "recordGeneratedAt": "2022-12-13T07:00:00.000+00:00",
+ "metadata": {"originIp": "127.0.0.1", "recordType": "ssmTx"},
+ "payload": {
+ "data": {
+ "status": {
+ "signalStatus": [
+ {
+ "sigStatus": {
+ "signalStatusPackage": [
+ {
+ "requester": {
+ "request": 13,
+ "role": "publicTrasport",
+ },
+ "status": "granted",
+ }
+ ]
+ }
+ }
+ ]
+ }
+ }
+ },
+}
-srm_expected_query = (
- f"SELECT rtdh_timestamp as time, srm.metadata.originIp as ip, "
- f"srm.payload.data.requests.signalRequestPackage[ordinal(1)].request.requestID as request, "
- f"srm.payload.data.requestor.type.role, srm.payload.data.requestor.position.position.latitude as lat, "
- f"srm.payload.data.requestor.position.position.longitude as long, srm.metadata.recordType as type "
- f'FROM `Fake_table` WHERE TIMESTAMP(rtdh_timestamp) >= "{start_date}" AND '
- f'TIMESTAMP(rtdh_timestamp) <= "{end_date}" ORDER BY rtdh_timestamp ASC'
-)
+ssm_record_two = {
+ "recordGeneratedAt": "2022-12-14T07:00:00.000+00:00",
+ "metadata": {"originIp": "127.0.0.1", "recordType": "ssmTx"},
+ "payload": {
+ "data": {
+ "status": {
+ "signalStatus": [
+ {
+ "sigStatus": {
+ "signalStatusPackage": [
+ {
+ "requester": {
+ "request": 10,
+ "role": "publicTrasport",
+ },
+ "status": "granted",
+ }
+ ]
+ }
+ }
+ ]
+ }
+ }
+ },
+}
-ssm_record_one = multidict.MultiDict(
- [
- ("time", datetime.strptime("2022/12/13 00:00:00", "%Y/%m/%d %H:%M:%S")),
- ("ip", "127.0.0.1"),
- ("request", 13),
- ("role", "publicTrasport"),
- ("status", "granted"),
- ("type", "ssmTx"),
- ]
-)
-ssm_record_two = multidict.MultiDict(
- [
- ("time", datetime.strptime("2022/12/14 00:00:00", "%Y/%m/%d %H:%M:%S")),
- ("ip", "127.0.0.1"),
- ("request", 10),
- ("role", "publicTrasport"),
- ("status", "granted"),
- ("type", "ssmTx"),
- ]
-)
-ssm_record_three = multidict.MultiDict(
- [
- ("time", datetime.strptime("2022/12/12 00:00:00", "%Y/%m/%d %H:%M:%S")),
- ("ip", "127.0.0.1"),
- ("request", 17),
- ("role", "publicTrasport"),
- ("status", "granted"),
- ("type", "ssmTx"),
- ]
-)
-srm_record_one = multidict.MultiDict(
- [
- ("time", datetime.strptime("2022/12/13 00:00:00", "%Y/%m/%d %H:%M:%S")),
- ("ip", "127.0.0.1"),
- ("request", 9),
- ("role", "publicTrasport"),
- ("lat", "100.00"),
- ("long", "50.00"),
- ("status", "N/A"),
- ("type", "srmTx"),
- ]
-)
-srm_record_two = multidict.MultiDict(
- [
- ("time", datetime.strptime("2022/12/12 00:00:00", "%Y/%m/%d %H:%M:%S")),
- ("ip", "127.0.0.1"),
- ("request", 13),
- ("role", "publicTrasport"),
- ("lat", "101.00"),
- ("long", "49.00"),
- ("status", "N/A"),
- ("type", "srmTx"),
- ]
-)
-srm_record_three = multidict.MultiDict(
- [
- ("time", datetime.strptime("2022/12/14 00:00:00", "%Y/%m/%d %H:%M:%S")),
- ("ip", "127.0.0.1"),
- ("request", 17),
- ("role", "publicTrasport"),
- ("lat", "102.00"),
- ("long", "53.00"),
- ("status", "N/A"),
- ("type", "srmTx"),
- ]
-)
+ssm_record_three = {
+ "recordGeneratedAt": "2022-12-12T07:00:00.000+00:00",
+ "metadata": {"originIp": "127.0.0.1", "recordType": "ssmTx"},
+ "payload": {
+ "data": {
+ "status": {
+ "signalStatus": [
+ {
+ "sigStatus": {
+ "signalStatusPackage": [
+ {
+ "requester": {
+ "request": 17,
+ "role": "publicTrasport",
+ },
+ "status": "granted",
+ }
+ ]
+ }
+ }
+ ]
+ }
+ }
+ },
+}
+
+
+srm_record_one = {
+ "recordGeneratedAt": "2022-12-13T07:00:00.000+00:00",
+ "metadata": {"originIp": "127.0.0.1", "recordType": "srmTx"},
+ "payload": {
+ "data": {
+ "requests": {"signalRequestPackage": [{"request": {"requestID": 9}}]},
+ "requestor": {
+ "type": {"role": "publicTrasport"},
+ "position": {"position": {"latitude": "100.00", "longitude": "50.00"}},
+ },
+ }
+ },
+}
+srm_record_two = {
+ "recordGeneratedAt": "2022-12-12T07:00:00.000+00:00",
+ "metadata": {"originIp": "127.0.0.1", "recordType": "srmTx"},
+ "payload": {
+ "data": {
+ "requests": {"signalRequestPackage": [{"request": {"requestID": 13}}]},
+ "requestor": {
+ "type": {"role": "publicTrasport"},
+ "position": {"position": {"latitude": "101.00", "longitude": "49.00"}},
+ },
+ }
+ },
+}
+srm_record_three = {
+ "recordGeneratedAt": "2022-12-14T07:00:00.000+00:00",
+ "metadata": {"originIp": "127.0.0.1", "recordType": "srmTx"},
+ "payload": {
+ "data": {
+ "requests": {"signalRequestPackage": [{"request": {"requestID": 17}}]},
+ "requestor": {
+ "type": {"role": "publicTrasport"},
+ "position": {"position": {"latitude": "102.00", "longitude": "53.00"}},
+ },
+ }
+ },
+}
ssm_single_result_expected = [
{
@@ -224,3 +258,52 @@
"status": "N/A",
},
]
+
+
+srm_processed_one = {
+ "time": datetime.strftime(
+ datetime.strptime("12/13/2022 12:00:00 AM", "%m/%d/%Y %I:%M:%S %p").astimezone(
+ timezone("America/Denver")
+ ),
+ "%m/%d/%Y %I:%M:%S %p",
+ ),
+ "ip": "127.0.0.1",
+ "requestId": 9,
+ "role": "publicTrasport",
+ "lat": "100.00",
+ "long": "50.00",
+ "type": "srmTx",
+ "status": "N/A",
+}
+
+srm_processed_two = {
+ "time": datetime.strftime(
+ datetime.strptime("12/12/2022 12:00:00 AM", "%m/%d/%Y %I:%M:%S %p").astimezone(
+ timezone("America/Denver")
+ ),
+ "%m/%d/%Y %I:%M:%S %p",
+ ),
+ "ip": "127.0.0.1",
+ "requestId": 13,
+ "role": "publicTrasport",
+ "lat": "101.00",
+ "long": "49.00",
+ "type": "srmTx",
+ "status": "N/A",
+}
+
+srm_processed_three = {
+ "time": datetime.strftime(
+ datetime.strptime("12/14/2022 12:00:00 AM", "%m/%d/%Y %I:%M:%S %p").astimezone(
+ timezone("America/Denver")
+ ),
+ "%m/%d/%Y %I:%M:%S %p",
+ ),
+ "ip": "127.0.0.1",
+ "requestId": 17,
+ "role": "publicTrasport",
+ "lat": "102.00",
+ "long": "53.00",
+ "type": "srmTx",
+ "status": "N/A",
+}
diff --git a/services/api/tests/src/test_rsu_bsmdata.py b/services/api/tests/src/test_rsu_bsmdata.py
index 153a26661..8057be486 100644
--- a/services/api/tests/src/test_rsu_bsmdata.py
+++ b/services/api/tests/src/test_rsu_bsmdata.py
@@ -1,6 +1,6 @@
from unittest.mock import patch, MagicMock
import os
-from api.src.rsu_bsmdata import query_bsm_data_mongo, bsm_hash, query_bsm_data_bq
+from api.src.rsu_bsmdata import query_bsm_data_mongo, bsm_hash
import api.tests.data.rsu_bsmdata_data as rsu_bsmdata_data
@@ -72,27 +72,3 @@ def test_query_bsm_data_mongo_failed_to_connect(mock_mongo):
mock_mongo.assert_called()
assert code == 503
assert response == expected_response
-
-
-@patch.dict(os.environ, {"BSM_DB_NAME": "col"})
-@patch("api.src.rsu_bsmdata.bigquery")
-def test_query_bsm_data_bq(mock_bq):
- mock_bq_client = MagicMock()
- mock_bq.Client.return_value = mock_bq_client
-
- mock_job = MagicMock()
- mock_bq_client.query.return_value = mock_job
- mock_job.__iter__.return_value = rsu_bsmdata_data.bq_bsm_data_response
-
- point_list = [[1, 2], [3, 4]]
- start = "2023-07-01T00:00:00Z"
- end = "2023-07-02T00:00:00Z"
-
- response, code = query_bsm_data_bq(point_list, start, end)
- expected_response = rsu_bsmdata_data.processed_bsm_message_data
-
- assert response[0]["properties"]["id"] == expected_response[0]["properties"]["id"]
- assert (
- response[0]["properties"]["time"] == expected_response[0]["properties"]["time"]
- )
- assert code == 200 # Expect a success status code
diff --git a/services/api/tests/src/test_rsu_querycounts.py b/services/api/tests/src/test_rsu_querycounts.py
index d4f7fe4c6..5f3a2cd41 100644
--- a/services/api/tests/src/test_rsu_querycounts.py
+++ b/services/api/tests/src/test_rsu_querycounts.py
@@ -17,28 +17,9 @@ def test_options_request():
assert headers["Access-Control-Allow-Methods"] == "GET"
-@patch.dict(os.environ, {"COUNTS_DB_TYPE": "BIGQUERY"})
-@patch("api.src.rsu_querycounts.get_organization_rsus")
-@patch("api.src.rsu_querycounts.query_rsu_counts_bq")
-def test_get_request_bq(mock_query, mock_rsus):
- req = MagicMock()
- req.args = querycounts_data.request_args_good
- req.environ = querycounts_data.request_params_good
- counts = rsu_querycounts.RsuQueryCounts()
- mock_rsus.return_value = ["10.0.0.1", "10.0.0.2", "10.0.0.3"]
- mock_query.return_value = {"Some Data"}, 200
- with patch("api.src.rsu_querycounts.request", req):
- (data, code, headers) = counts.get()
- assert code == 200
- assert headers["Access-Control-Allow-Origin"] == "test.com"
- assert headers["Content-Type"] == "application/json"
- assert data == {"Some Data"}
-
-
-@patch.dict(os.environ, {"COUNTS_DB_TYPE": "MONGODB"})
@patch("api.src.rsu_querycounts.get_organization_rsus")
@patch("api.src.rsu_querycounts.query_rsu_counts_mongo")
-def test_get_request_mongo(mock_query, mock_rsus):
+def test_get_request(mock_query, mock_rsus):
req = MagicMock()
req.args = querycounts_data.request_args_good
req.environ = querycounts_data.request_params_good
@@ -65,7 +46,7 @@ def test_get_request_invalid_message():
(data, code, headers) = counts.get()
assert code == 400
assert headers["Access-Control-Allow-Origin"] == "test.com"
- assert data == "Invalid Message Type.\nValid message types: test, anothErtest"
+ assert data == "Invalid Message Type.\nValid message types: Test, Anothertest"
@patch.dict(os.environ, {}, clear=True)
@@ -79,7 +60,7 @@ def test_get_request_invalid_message_no_env():
assert headers["Access-Control-Allow-Origin"] == "test.com"
assert (
data
- == "Invalid Message Type.\nValid message types: BSM, SSM, SPAT, SRM, MAP"
+ == "Invalid Message Type.\nValid message types: Bsm, Ssm, Spat, Srm, Map"
)
@@ -98,47 +79,57 @@ def test_schema_validate_bad_data():
@patch("api.src.rsu_querycounts.pgquery")
def test_rsu_counts_get_organization_rsus(mock_pgquery):
mock_pgquery.query_db.return_value = [
- ({"ip": "10.11.81.12"},),
- ({"ip": "10.11.81.13"},),
- ({"ip": "10.11.81.14"},),
+ ({"ipv4_address": "10.11.81.12", "primary_route": "Route 1"},),
+ ({"ipv4_address": "10.11.81.13", "primary_route": "Route 1"},),
+ ({"ipv4_address": "10.11.81.14", "primary_route": "Route 1"},),
]
expected_query = (
- "SELECT jsonb_build_object('ip', rd.ipv4_address) "
- "FROM public.rsus AS rd "
+ "SELECT to_jsonb(row) "
+ "FROM ("
+ "SELECT rd.ipv4_address, rd.primary_route "
+ "FROM public.rsus rd "
"JOIN public.rsu_organization_name AS ron_v ON ron_v.rsu_id = rd.rsu_id "
- f"WHERE ron_v.name = 'Test' "
- "ORDER BY rd.ipv4_address"
+ "WHERE ron_v.name = 'Test' "
+ "ORDER BY primary_route ASC, milepost ASC"
+ ") as row"
)
+
actual_result = rsu_querycounts.get_organization_rsus("Test")
- mock_pgquery.query_db.assert_called_with(expected_query)
- assert actual_result == ["10.11.81.12", "10.11.81.13", "10.11.81.14"]
+ mock_pgquery.query_db.assert_called_with(expected_query)
+ assert actual_result == {
+ "10.11.81.12": "Route 1",
+ "10.11.81.13": "Route 1",
+ "10.11.81.14": "Route 1",
+ }
@patch("api.src.rsu_querycounts.pgquery")
def test_rsu_counts_get_organization_rsus_empty(mock_pgquery):
mock_pgquery.query_db.return_value = []
expected_query = (
- "SELECT jsonb_build_object('ip', rd.ipv4_address) "
- "FROM public.rsus AS rd "
+ "SELECT to_jsonb(row) "
+ "FROM ("
+ "SELECT rd.ipv4_address, rd.primary_route "
+ "FROM public.rsus rd "
"JOIN public.rsu_organization_name AS ron_v ON ron_v.rsu_id = rd.rsu_id "
- f"WHERE ron_v.name = 'Test' "
- "ORDER BY rd.ipv4_address"
+ "WHERE ron_v.name = 'Test' "
+ "ORDER BY primary_route ASC, milepost ASC"
+ ") as row"
)
actual_result = rsu_querycounts.get_organization_rsus("Test")
mock_pgquery.query_db.assert_called_with(expected_query)
- assert actual_result == []
+ assert actual_result == {}
##################################### Test query_rsu_counts ###########################################
@patch.dict(
os.environ,
- {"MONGO_DB_URI": "uri", "MONGO_DB_NAME": "name", "COUNTS_DB_NAME": "col"},
+ {"MONGO_DB_URI": "uri", "MONGO_DB_NAME": "name"},
)
@patch("api.src.rsu_querycounts.MongoClient")
-@patch("api.src.rsu_querycounts.logging")
-def test_query_rsu_counts_mongo_success(mock_logging, mock_mongo):
+def test_query_rsu_counts_mongo_success(mock_mongo):
mock_db = MagicMock()
mock_collection = MagicMock()
mock_mongo.return_value.__getitem__.return_value = mock_db
@@ -146,29 +137,27 @@ def test_query_rsu_counts_mongo_success(mock_logging, mock_mongo):
mock_db.validate_collection.return_value = "valid"
# Mock data that would be returned from MongoDB
- mock_collection.find.return_value = [
- {"ip": "192.168.0.1", "road": "A1", "count": 5},
- {"ip": "192.168.0.2", "road": "A2", "count": 10},
- ]
+ mock_collection.count_documents.return_value = 5
- allowed_ips = ["192.168.0.1", "192.168.0.2"]
+ allowed_ips = {"192.168.0.1": "A1", "192.168.0.2": "A2"}
message_type = "TYPE_A"
start = "2022-01-01T00:00:00"
end = "2023-01-01T00:00:00"
expected_result = {
"192.168.0.1": {"road": "A1", "count": 5},
- "192.168.0.2": {"road": "A2", "count": 10},
+ "192.168.0.2": {"road": "A2", "count": 5},
}
result, status_code = query_rsu_counts_mongo(allowed_ips, message_type, start, end)
+
assert result == expected_result
assert status_code == 200
@patch.dict(
os.environ,
- {"MONGO_DB_URI": "uri", "MONGO_DB_NAME": "name", "COUNTS_DB_NAME": "col"},
+ {"MONGO_DB_URI": "uri", "MONGO_DB_NAME": "name"},
)
@patch("api.src.rsu_querycounts.MongoClient")
@patch("api.src.rsu_querycounts.logging")
@@ -184,47 +173,3 @@ def test_query_rsu_counts_mongo_failure(mock_logging, mock_mongo):
result, status_code = query_rsu_counts_mongo(allowed_ips, message_type, start, end)
assert result == {}
assert status_code == 503
-
-
-@patch("api.src.rsu_querycounts.bigquery")
-def test_rsu_counts_multiple_result(mock_bigquery):
- mock_bigquery.Client.return_value.query.return_value = [
- querycounts_data.rsu_one,
- querycounts_data.rsu_two,
- querycounts_data.rsu_three,
- ]
- expected_rsu_data = querycounts_data.rsu_counts_expected_multiple
- with patch.dict(
- "api.src.rsu_querycounts.os.environ",
- {"COUNTS_DB_NAME": "Fake_table", "COUNTS_DB_TYPE": "BIGQUERY"},
- ):
- (data, code) = rsu_querycounts.query_rsu_counts_bq(
- ["10.11.81.24", "172.16.28.23", "172.16.28.136"],
- "BSM",
- "2022-05-23T12:00:00",
- "2022-05-24T12:00:00",
- )
- assert data == expected_rsu_data
- assert code == 200
-
-
-@patch("api.src.rsu_querycounts.bigquery")
-def test_rsu_counts_limited_rsus(mock_bigquery):
- mock_bigquery.Client.return_value.query.return_value = [
- querycounts_data.rsu_one,
- querycounts_data.rsu_two,
- querycounts_data.rsu_three,
- ]
- expected_rsu_data = querycounts_data.rsu_counts_expected_limited_rsus
- with patch.dict(
- "api.src.rsu_querycounts.os.environ",
- {"COUNTS_DB_NAME": "Fake_table", "COUNTS_DB_TYPE": "BIGQUERY"},
- ):
- (data, code) = rsu_querycounts.query_rsu_counts_bq(
- ["172.16.28.23", "172.16.28.136"],
- "BSM",
- "2022-05-23T12:00:00",
- "2022-05-24T12:00:00",
- )
- assert data == expected_rsu_data
- assert code == 200
diff --git a/services/api/tests/src/test_rsu_ssm_srm.py b/services/api/tests/src/test_rsu_ssm_srm.py
index e3881a539..27fbd8359 100644
--- a/services/api/tests/src/test_rsu_ssm_srm.py
+++ b/services/api/tests/src/test_rsu_ssm_srm.py
@@ -1,3 +1,4 @@
+import os
from unittest.mock import patch, MagicMock
import api.src.rsu_ssm_srm as rsu_ssm_srm
import api.tests.data.rsu_ssm_srm_data as ssm_srm_data
@@ -16,17 +17,17 @@ def test_options_request():
assert headers["Access-Control-Allow-Methods"] == "GET"
-@patch("api.src.rsu_ssm_srm.query_ssm_data")
-@patch("api.src.rsu_ssm_srm.query_srm_data")
+@patch("api.src.rsu_ssm_srm.query_ssm_data_mongo")
+@patch("api.src.rsu_ssm_srm.query_srm_data_mongo")
def test_get_request(mock_srm, mock_ssm):
req = MagicMock()
ssm_srm = rsu_ssm_srm.RsuSsmSrmData()
mock_ssm.return_value = 200, []
mock_srm.return_value = 200, [
- ssm_srm_data.ssm_record_one,
- ssm_srm_data.ssm_record_two,
- ssm_srm_data.srm_record_one,
- ssm_srm_data.srm_record_three,
+ ssm_srm_data.srm_processed_one,
+ ssm_srm_data.srm_processed_two,
+ ssm_srm_data.srm_processed_one,
+ ssm_srm_data.srm_processed_three,
]
with patch("api.src.rsu_ssm_srm.request", req):
(data, code, headers) = ssm_srm.get()
@@ -34,57 +35,82 @@ def test_get_request(mock_srm, mock_ssm):
assert headers["Access-Control-Allow-Origin"] == "test.com"
assert headers["Content-Type"] == "application/json"
assert data == [
- ssm_srm_data.ssm_record_one,
- ssm_srm_data.srm_record_one,
- ssm_srm_data.ssm_record_two,
- ssm_srm_data.srm_record_three,
+ ssm_srm_data.srm_processed_two,
+ ssm_srm_data.srm_processed_one,
+ ssm_srm_data.srm_processed_one,
+ ssm_srm_data.srm_processed_three,
]
#################################### Test query_ssm_data ########################################
-@patch("api.src.rsu_ssm_srm.bigquery")
+@patch.dict(
+ os.environ,
+ {"MONGO_DB_NAME": "name", "SSM_DB_NAME": "ssm_collection"},
+)
+@patch("api.src.rsu_ssm_srm.MongoClient")
@patch("api.src.rsu_ssm_srm.datetime")
-def test_query_ssm_data_query(mock_date, mock_bigquery):
- mock_bigquery.Client.return_value.query.return_value = []
+def test_query_ssm_data_query(mock_date, mock_mongo):
+ mock_db = MagicMock()
+ mock_collection = MagicMock()
+ mock_mongo.return_value.__getitem__.return_value = mock_db
+ mock_db.__getitem__.return_value = mock_collection
+
+ mock_collection.find.return_value = []
+
mock_date.now.return_value = datetime.strptime(
"2022/12/14 00:00:00", "%Y/%m/%d %H:%M:%S"
).astimezone(UTC)
- with patch.dict("api.src.rsu_ssm_srm.os.environ", {"SSM_DB_NAME": "Fake_table"}):
- rsu_ssm_srm.query_ssm_data([])
- mock_bigquery.Client.return_value.query.assert_called_with(
- ssm_srm_data.ssm_expected_query
- )
+ rsu_ssm_srm.query_ssm_data_mongo([])
+
+ mock_mongo.assert_called()
+ mock_collection.find.assert_called()
-@patch("api.src.rsu_ssm_srm.bigquery")
-def test_query_ssm_data_no_data(mock_bigquery):
- mock_bigquery.Client.return_value.query.return_value = []
+
+@patch("api.src.rsu_ssm_srm.MongoClient")
+def test_query_ssm_data_no_data(mock_mongo):
+ mock_db = MagicMock()
+ mock_collection = MagicMock()
+ mock_mongo.return_value.__getitem__.return_value = mock_db
+ mock_db.__getitem__.return_value = mock_collection
+
+ mock_collection.find.return_value = []
with patch.dict("api.src.rsu_ssm_srm.os.environ", {"SSM_DB_NAME": "Fake_table"}):
- (code, data) = rsu_ssm_srm.query_ssm_data([])
+ (code, data) = rsu_ssm_srm.query_ssm_data_mongo([])
assert data == []
assert code == 200
-@patch("api.src.rsu_ssm_srm.bigquery")
-def test_query_ssm_data_single_result(mock_bigquery):
- mock_bigquery.Client.return_value.query.return_value = [ssm_srm_data.ssm_record_one]
+@patch("api.src.rsu_ssm_srm.MongoClient")
+def test_query_ssm_data_single_result(mock_mongo):
+ mock_db = MagicMock()
+ mock_collection = MagicMock()
+ mock_mongo.return_value.__getitem__.return_value = mock_db
+ mock_db.__getitem__.return_value = mock_collection
+
+ mock_collection.find.return_value = [ssm_srm_data.ssm_record_one]
with patch.dict("api.src.rsu_ssm_srm.os.environ", {"SSM_DB_NAME": "Fake_table"}):
- (code, data) = rsu_ssm_srm.query_ssm_data([])
+ (code, data) = rsu_ssm_srm.query_ssm_data_mongo([])
assert data == ssm_srm_data.ssm_single_result_expected
assert code == 200
-@patch("api.src.rsu_ssm_srm.bigquery")
-def test_query_ssm_data_multiple_result(mock_bigquery):
- mock_bigquery.Client.return_value.query.return_value = [
+@patch("api.src.rsu_ssm_srm.MongoClient")
+def test_query_ssm_data_multiple_result(mock_mongo):
+ mock_db = MagicMock()
+ mock_collection = MagicMock()
+ mock_mongo.return_value.__getitem__.return_value = mock_db
+ mock_db.__getitem__.return_value = mock_collection
+
+ mock_collection.find.return_value = [
ssm_srm_data.ssm_record_one,
ssm_srm_data.ssm_record_two,
ssm_srm_data.ssm_record_three,
]
with patch.dict("api.src.rsu_ssm_srm.os.environ", {"SSM_DB_NAME": "Fake_table"}):
- (code, data) = rsu_ssm_srm.query_ssm_data([])
+ (code, data) = rsu_ssm_srm.query_ssm_data_mongo([])
assert data == ssm_srm_data.ssm_multiple_result_expected
assert code == 200
@@ -92,46 +118,69 @@ def test_query_ssm_data_multiple_result(mock_bigquery):
##################################### Test query_srm_data ###########################################
-@patch("api.src.rsu_ssm_srm.bigquery")
+@patch.dict(
+ os.environ,
+ {"MONGO_DB_NAME": "name", "SRM_DB_NAME": "srm_collection"},
+)
+@patch("api.src.rsu_ssm_srm.MongoClient")
@patch("api.src.rsu_ssm_srm.datetime")
-def test_query_srm_data_query(mock_date, mock_bigquery):
- mock_bigquery.Client.return_value.query.return_value = []
+def test_query_srm_data_query(mock_date, mock_mongo):
+ mock_db = MagicMock()
+ mock_collection = MagicMock()
+ mock_mongo.return_value.__getitem__.return_value = mock_db
+ mock_db.__getitem__.return_value = mock_collection
+
+ mock_collection.find.return_value = []
mock_date.now.return_value = datetime.strptime(
"2022/12/14 00:00:00", "%Y/%m/%d %H:%M:%S"
).astimezone(UTC)
with patch.dict("api.src.rsu_ssm_srm.os.environ", {"SRM_DB_NAME": "Fake_table"}):
- rsu_ssm_srm.query_srm_data([])
- mock_bigquery.Client.return_value.query.assert_called_with(
- ssm_srm_data.srm_expected_query
- )
+ rsu_ssm_srm.query_srm_data_mongo([])
+ mock_mongo.assert_called()
+ mock_collection.find.assert_called()
+
+@patch("api.src.rsu_ssm_srm.MongoClient")
+def test_query_srm_data_no_data(mock_mongo):
+ mock_db = MagicMock()
+ mock_collection = MagicMock()
+ mock_mongo.return_value.__getitem__.return_value = mock_db
+ mock_db.__getitem__.return_value = mock_collection
-@patch("api.src.rsu_ssm_srm.bigquery")
-def test_query_srm_data_no_data(mock_bigquery):
- mock_bigquery.Client.return_value.query.return_value = []
+ mock_collection.find.return_value = []
with patch.dict("api.src.rsu_ssm_srm.os.environ", {"SRM_DB_NAME": "Fake_table"}):
- (code, data) = rsu_ssm_srm.query_srm_data([])
+ (code, data) = rsu_ssm_srm.query_srm_data_mongo([])
assert data == []
assert code == 200
-@patch("api.src.rsu_ssm_srm.bigquery")
-def test_query_srm_data_single_result(mock_bigquery):
- mock_bigquery.Client.return_value.query.return_value = [ssm_srm_data.srm_record_one]
+@patch("api.src.rsu_ssm_srm.MongoClient")
+def test_query_srm_data_single_result(mock_mongo):
+ mock_db = MagicMock()
+ mock_collection = MagicMock()
+ mock_mongo.return_value.__getitem__.return_value = mock_db
+ mock_db.__getitem__.return_value = mock_collection
+
+ mock_collection.find.return_value = [ssm_srm_data.srm_record_one]
with patch.dict("api.src.rsu_ssm_srm.os.environ", {"SRM_DB_NAME": "Fake_table"}):
- (code, data) = rsu_ssm_srm.query_srm_data([])
+ (code, data) = rsu_ssm_srm.query_srm_data_mongo([])
assert data == ssm_srm_data.srm_single_result_expected
assert code == 200
-@patch("api.src.rsu_ssm_srm.bigquery")
-def test_query_srm_data_multiple_result(mock_bigquery):
- mock_bigquery.Client.return_value.query.return_value = [
+@patch("api.src.rsu_ssm_srm.MongoClient")
+def test_query_srm_data_multiple_result(mock_mongo):
+ mock_db = MagicMock()
+ mock_collection = MagicMock()
+ mock_mongo.return_value.__getitem__.return_value = mock_db
+ mock_db.__getitem__.return_value = mock_collection
+
+ mock_collection.find.return_value = [
ssm_srm_data.srm_record_one,
ssm_srm_data.srm_record_two,
ssm_srm_data.srm_record_three,
]
with patch.dict("api.src.rsu_ssm_srm.os.environ", {"SRM_DB_NAME": "Fake_table"}):
- (code, data) = rsu_ssm_srm.query_srm_data([])
+ (code, data) = rsu_ssm_srm.query_srm_data_mongo([])
assert data == ssm_srm_data.srm_multiple_result_expected
assert code == 200