Skip to content

Commit

Permalink
cleaning up the settings
Browse files Browse the repository at this point in the history
  • Loading branch information
caparker committed Jun 11, 2024
1 parent a82b13e commit a121166
Show file tree
Hide file tree
Showing 4 changed files with 9 additions and 10 deletions.
4 changes: 2 additions & 2 deletions ingest/lcs.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
app = typer.Typer()
dir_path = os.path.dirname(os.path.realpath(__file__))

FETCH_BUCKET = settings.ETL_BUCKET
FETCH_BUCKET = settings.FETCH_BUCKET

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -395,7 +395,7 @@ def select_object(key):
try:
content = ""
resp = s3c.select_object_content(
Bucket=settings.ETL_BUCKET,
Bucket=settings.FETCH_BUCKET,
Key=key,
ExpressionType="SQL",
Expression="""
Expand Down
2 changes: 1 addition & 1 deletion ingest/lcsV2.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@
app = typer.Typer()
dir_path = os.path.dirname(os.path.realpath(__file__))

FETCH_BUCKET = settings.ETL_BUCKET
FETCH_BUCKET = settings.FETCH_BUCKET

logger = logging.getLogger(__name__)

Expand Down
1 change: 0 additions & 1 deletion ingest/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@ class Settings(BaseSettings):
DATABASE_PORT: int
DATABASE_READ_URL: Union[str, None]
DATABASE_WRITE_URL: Union[str, None]
FASTAPI_URL: str
DRYRUN: bool = False
FETCH_BUCKET: str
ETL_BUCKET: str
Expand Down
12 changes: 6 additions & 6 deletions ingest/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -230,7 +230,7 @@ def deconstruct_path(key: str):
path["key"] = "/".join(p)
else:
# use the current bucket from settings
path["bucket"] = settings.ETL_BUCKET
path["bucket"] = settings.FETCH_BUCKET
path["key"] = key

logger.debug(path)
Expand Down Expand Up @@ -263,7 +263,7 @@ def get_data(key: str):
key = "/".join(path)
else:
# use the current bucket from settings
bucket = settings.ETL_BUCKET
bucket = settings.FETCH_BUCKET

# stream the file
logger.debug(f"streaming s3 file data from s3://{bucket}/{key}")
Expand All @@ -289,7 +289,7 @@ def get_file(filepath: str):

def get_object(
key: str,
bucket: str = settings.ETL_BUCKET
bucket: str = settings.FETCH_BUCKET
):
key = unquote_plus(key)
text = ''
Expand All @@ -310,7 +310,7 @@ def get_object(
def put_object(
data: str,
key: str,
bucket: str = settings.ETL_BUCKET
bucket: str = settings.FETCH_BUCKET
):
out = io.BytesIO()
with gzip.GzipFile(fileobj=out, mode='wb') as gz:
Expand Down Expand Up @@ -362,7 +362,7 @@ def select_object(key: str):
content = ""
logger.debug(f"Getting object: {key}, {output_serialization}")
resp = s3.select_object_content(
Bucket=settings.ETL_BUCKET,
Bucket=settings.FETCH_BUCKET,
Key=key,
ExpressionType="SQL",
Expression="""
Expand Down Expand Up @@ -662,7 +662,7 @@ def crawl(bucket, prefix):


def crawl_lcs():
crawl(settings.ETL_BUCKET, "lcs-etl-pipeline/")
crawl(settings.FETCH_BUCKET, "lcs-etl-pipeline/")


def crawl_fetch():
Expand Down

0 comments on commit a121166

Please sign in to comment.