Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions nhsn/delphi_nhsn/constants.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
"""Registry for signal names."""

from datetime import timedelta

GEOS = ["state", "nation", "hhs"]

MAIN_DATASET_ID = "ua7e-t2fy"
Expand Down Expand Up @@ -62,3 +64,5 @@
f"{NUM_HOSP_REPORTING_FLU}_prelim": float,
f"{NUM_HOSP_REPORTING_RSV}_prelim": float,
}

RECENTLY_UPDATED_DIFF = timedelta(days=1)
37 changes: 30 additions & 7 deletions nhsn/delphi_nhsn/pull.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,16 +4,25 @@
import logging
import random
import time
from datetime import datetime, timedelta
from datetime import datetime, timedelta, timezone
from pathlib import Path
from typing import Optional
from urllib.error import HTTPError

import pandas as pd
from delphi_epidata import Epidata
from delphi_utils import create_backup_csv
from sodapy import Socrata

from .constants import MAIN_DATASET_ID, PRELIM_DATASET_ID, PRELIM_SIGNALS_MAP, PRELIM_TYPE_DICT, SIGNALS_MAP, TYPE_DICT
from .constants import (
MAIN_DATASET_ID,
PRELIM_DATASET_ID,
PRELIM_SIGNALS_MAP,
PRELIM_TYPE_DICT,
RECENTLY_UPDATED_DIFF,
SIGNALS_MAP,
TYPE_DICT,
)


def check_last_updated(socrata_token, dataset_id, logger):
Expand All @@ -38,17 +47,31 @@ def check_last_updated(socrata_token, dataset_id, logger):
client = Socrata("data.cdc.gov", socrata_token)
response = client.get_metadata(dataset_id)

updated_timestamp = datetime.utcfromtimestamp(int(response["rowsUpdatedAt"]))
now = datetime.utcnow()
recently_updated_source = (now - updated_timestamp) < timedelta(days=1)
updated_timestamp = datetime.fromtimestamp(int(response["rowsUpdatedAt"]), tz=timezone.utc)

# pulling last updated from the api
meta_df = pd.DataFrame(Epidata.covidcast_meta()["epidata"])
signal_suffix = "prelim" if dataset_id == PRELIM_DATASET_ID else "ew"
nhsn_meta_df = meta_df[(meta_df["data_source"] == "nhsn") & (meta_df["signal"].str.endswith(signal_suffix))]
est = timezone(timedelta(hours=-5))
last_updated = datetime.fromtimestamp(nhsn_meta_df["last_update"].min(), tz=est)
Comment on lines +56 to +57
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this is gonna have issues because of DST changes, however this timestamp should be for UTC already.

it shouldnt make too much of a difference because the probability of it biting us should be rare, but i think youll also want a max instead of a min (in case we change signal names or discontinue signals, among other things).

Suggested change
est = timezone(timedelta(hours=-5))
last_updated = datetime.fromtimestamp(nhsn_meta_df["last_update"].min(), tz=est)
last_updated = datetime.fromtimestamp(nhsn_meta_df["last_update"].max(), tz=timezone.utc)


# currently set to run twice a week, RECENTLY_UPDATED_DIFF may need adjusting based on the cadence
recently_updated_source = (updated_timestamp - last_updated) > RECENTLY_UPDATED_DIFF
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

i dont think this math is quite right... why wouldnt we want to proceed any time socrata has a newer timestamp than we do? the form you have here has the potential to delay processing if updates are frequent enough or if RECENTLY_UPDATED_DIFF is too large.

Suggested change
recently_updated_source = (updated_timestamp - last_updated) > RECENTLY_UPDATED_DIFF
socrata_ts = updated_timestamp
delphi_ts = last_updated
recently_updated_source = socrata_ts > delphi_ts


prelim_prefix = "Preliminary " if dataset_id == PRELIM_DATASET_ID else ""
if recently_updated_source:
logger.info(
f"{prelim_prefix}NHSN data was recently updated; Pulling data", updated_timestamp=updated_timestamp
f"{prelim_prefix}NHSN data was recently updated; Pulling data",
updated_timestamp=updated_timestamp,
metadata_timestamp=last_updated,
)
else:
logger.info(f"{prelim_prefix}NHSN data is stale; Skipping", updated_timestamp=updated_timestamp)
logger.info(
f"{prelim_prefix}NHSN data is stale; Skipping",
updated_timestamp=updated_timestamp,
metadata_timestamp=last_updated,
)
# pylint: disable=W0703
except Exception as e:
logger.info("error while processing socrata metadata; treating data as stale", error=str(e))
Expand Down
9 changes: 8 additions & 1 deletion nhsn/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,11 @@
with open(f"{TEST_DIR}/test_data/prelim_page.json", "r") as f:
PRELIM_TEST_DATA = json.load(f)

# filtered metadata (just includes nhsn meta)
with open(f"{TEST_DIR}/test_data/covidcast_meta.json", "r") as f:
COVID_META_DATA = json.load(f)


@pytest.fixture(scope="session")
def params():
params = {
Expand Down Expand Up @@ -62,7 +67,8 @@ def params_w_patch(params):
@pytest.fixture(scope="function")
def run_as_module(params):
with patch('sodapy.Socrata.get') as mock_get, \
patch('sodapy.Socrata.get_metadata') as mock_get_metadata:
patch('sodapy.Socrata.get_metadata') as mock_get_metadata, \
patch('delphi_nhsn.pull.Epidata.covidcast_meta') as mock_covidcast_meta:
def side_effect(*args, **kwargs):
if kwargs['offset'] == 0:
if "ua7e-t2fy" in args[0]:
Expand All @@ -73,5 +79,6 @@ def side_effect(*args, **kwargs):
return []
mock_get.side_effect = side_effect
mock_get_metadata.return_value = {"rowsUpdatedAt": time.time()}
mock_covidcast_meta.return_value = COVID_META_DATA
run_module(params)

Loading
Loading