From 16cbc564402b185a7a055a1780010b528684c55c Mon Sep 17 00:00:00 2001 From: Zack Ganger Date: Thu, 27 Jun 2024 18:24:49 -0400 Subject: [PATCH] fix lines too long --- backend/database/core.py | 6 ++--- backend/importer/loop.py | 33 ++++++++++++++++---------- frontend/babel.config.js | 2 +- frontend/helpers/api/mocks/browser.ts | 2 +- frontend/tests/helpers/api.e2e.test.ts | 2 +- 5 files changed, 27 insertions(+), 18 deletions(-) diff --git a/backend/database/core.py b/backend/database/core.py index 7304c1dfe..acbebee8f 100644 --- a/backend/database/core.py +++ b/backend/database/core.py @@ -1,7 +1,7 @@ """This file defines the database connection, plus some terminal commands for setting up and tearing down the database. -Do not importer anything directly from `backend.database._core`. Instead, importer +Do not import anything directly from `backend.database._core`. Instead, import from `backend.database`. """ import os @@ -61,9 +61,9 @@ def execute_query(filename: str) -> Optional[pd.DataFrame]: """Run SQL from a file. It will return a Pandas DataFrame if it selected anything; otherwise it will return None. - I do not recommend you use this function too often. In general we should be + I do not recommend you use this function too often. In general, we should be using the SQLAlchemy ORM. That said, it's a nice convenience, and there are - times where this function is genuinely something you want to run. + times when this function is genuinely something you want to run. """ with open(os.path.join(QUERIES_DIR, secure_filename(filename))) as f: query = f.read() diff --git a/backend/importer/loop.py b/backend/importer/loop.py index f5f73b2da..ffe14ad6f 100644 --- a/backend/importer/loop.py +++ b/backend/importer/loop.py @@ -12,29 +12,34 @@ def __init__(self, content: bytes): self.content = content def load(self): - raise Exception("unimplemented; extend this class to write a load migration.") + raise Exception("unimplemented; extend class to write a load migration") class Importer(Thread): def __init__(self, queue_name: str, region: str = "us-east-1"): - super().__init__(daemon=True) # TODO: ideally we would have a function on the app to catch shutdown events and close gracefully, but until then daemon it is. + # TODO: ideally we would have a function on the app to catch shutdown + # events and close gracefully, but until then daemon it is. + super().__init__(daemon=True) self.queue_name = queue_name self.session = boto3.Session(region_name=region) self.sqs_client = self.session.client("sqs") self.s3_client = self.session.client("s3") - self.sqs_queue_url = self.sqs_client.get_queue_url(QueueName=self.queue_name) + self.sqs_queue_url = self.sqs_client.get_queue_url( + QueueName=self.queue_name) self.logger = getLogger(self.__class__.__name__) - self.loader_map: dict[str, Loader] = { - # this should be a mapping of s3 key prefix : loader class for that file type - } + self.loader_map: dict[str, Loader] = {} def run(self): while True: resp = self.sqs_client.receive_message( QueueUrl=self.sqs_queue_url, - MaxNumberOfMessages=1, # retrieve one message at a time - we could up this and parallelize but no point until way more files. - VisibilityTimeout=600, # 10 minutes to process message before it becomes visible for another consumer. + # retrieve one message at a time - we could up this + # and parallelize but no point until way more files. + MaxNumberOfMessages=1, + # 10 minutes to process message before it becomes + # visible for another consumer. + VisibilityTimeout=600, ) # if no messages found, wait 5m for next poll if len(resp["Messages"]) == 0: @@ -43,16 +48,20 @@ def run(self): for message in resp["Messages"]: sqs_body = ujson.loads(message["Body"]) - for record in sqs_body["Records"]: # this comes through as a list, but we expect one object + # this comes through as a list, but we expect one object + for record in sqs_body["Records"]: bucket_name = record["s3"]["bucket"]["name"] key = record["s3"]["object"]["key"] with BytesIO() as fileobj: - self.s3_client.download_fileobj(bucket_name, key, fileobj) + self.s3_client.download_fileobj( + bucket_name, key, fileobj) fileobj.seek(0) content = fileobj.read() + _ = content # for linting. - # TODO: we now have an in-memory copy of the s3 file content. This is where we would run the importer. - # we want a standardized importer class; we would call something like below: + # TODO: we now have an in-memory copy of s3 file content + # This is where we would run the importer. + # we want a standardized importer class; use like: # loader = self.get_loader_for_content_type(key) # loader(content).load() diff --git a/frontend/babel.config.js b/frontend/babel.config.js index 772bf8265..d3a6c6010 100644 --- a/frontend/babel.config.js +++ b/frontend/babel.config.js @@ -3,7 +3,7 @@ module.exports = { plugins: ["inline-react-svg"], env: { test: { - plugins: ["transform-dynamic-importer"] + plugins: ["transform-dynamic-import"] } } } diff --git a/frontend/helpers/api/mocks/browser.ts b/frontend/helpers/api/mocks/browser.ts index ca6ace3e7..5e73c9342 100644 --- a/frontend/helpers/api/mocks/browser.ts +++ b/frontend/helpers/api/mocks/browser.ts @@ -3,7 +3,7 @@ import { handlers, rejectUnhandledApiRequests } from "./handlers" export const worker = setupWorker(...handlers) -/** Starts worker, convenience for conditional importer */ +/** Starts worker, convenience for conditional import */ export const startWorker = () => { worker.start({ onUnhandledRequest: rejectUnhandledApiRequests diff --git a/frontend/tests/helpers/api.e2e.test.ts b/frontend/tests/helpers/api.e2e.test.ts index 8c38238f3..61ee78d40 100644 --- a/frontend/tests/helpers/api.e2e.test.ts +++ b/frontend/tests/helpers/api.e2e.test.ts @@ -3,5 +3,5 @@ import { server } from "../test-utils" /** Turn off API mocking for the test so we use the real API */ beforeAll(() => server.close()) -/** Re-importer the main test file to pick up the tests */ +/** Re-import the main test file to pick up the tests */ require("./api.test")