Skip to content

Commit

Permalink
chore: migrate tests from test_lambda to test_vectorizer_cli
Browse files Browse the repository at this point in the history
  • Loading branch information
Askir committed Nov 8, 2024
1 parent 63d0dbd commit f815594
Show file tree
Hide file tree
Showing 5 changed files with 431 additions and 412 deletions.
80 changes: 0 additions & 80 deletions projects/pgai/pgai/vectorizer/lambda_handler.py

This file was deleted.

6 changes: 6 additions & 0 deletions projects/pgai/pgai/vectorizer/vectorizer.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import asyncio
import os
import threading
import time
from collections.abc import Callable
Expand Down Expand Up @@ -747,3 +748,8 @@ def _vectorizer_error_record(
}
),
)


TIKTOKEN_CACHE_DIR = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "tiktoken_cache"
)
11 changes: 5 additions & 6 deletions projects/pgai/tests/vectorizer/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,21 +10,20 @@
from psycopg import sql
from testcontainers.postgres import PostgresContainer # type:ignore

from pgai.vectorizer.vectorizer import TIKTOKEN_CACHE_DIR

DIMENSION_COUNT = 1536


@pytest.fixture(autouse=True)
def __env_setup(monkeypatch): # type:ignore
def __env_setup(): # type:ignore
# Capture the current environment variables to restore after the test. The
# lambda function sets an evironment variable for using the secrets. We
# need to clear the environment after a test runs.
original_env = os.environ.copy()

# OpenAI uses tiktoken to get the model encoding specification to use when
# tokenizing. The model spec is fetched with a GET request and stored in a
# cache directory. We need to create a temp dir as cache, to trigger a
# request on every test.
monkeypatch.setenv("TIKTOKEN_CACHE_DIR", "") # type:ignore
# Use the existing tiktoken cache
os.environ["TIKTOKEN_CACHE_DIR"] = TIKTOKEN_CACHE_DIR
yield

tiktoken.registry.ENCODINGS = {}
Expand Down
Loading

0 comments on commit f815594

Please sign in to comment.