Skip to content

Commit

Permalink
fix tests
Browse files Browse the repository at this point in the history
  • Loading branch information
aaronsteers committed Feb 22, 2024
1 parent 6e5ad29 commit e34d494
Show file tree
Hide file tree
Showing 3 changed files with 13 additions and 14 deletions.
14 changes: 6 additions & 8 deletions airbyte/caches/_catalog_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,19 +116,17 @@ def save_state(
def get_state(
self,
source_name: str,
streams: list[str],
streams: list[str] | None = None,
) -> list[dict] | None:
self._ensure_internal_tables()
engine = self._engine
with Session(engine) as session:
states = (
session.query(StreamState)
.filter(
StreamState.source_name == source_name,
StreamState.stream_name.in_([*streams, *GLOBAL_STATE_STREAM_NAMES]),
query = session.query(StreamState).filter(StreamState.source_name == source_name)
if streams:
query = query.filter(
StreamState.stream_name.in_([*streams, *GLOBAL_STATE_STREAM_NAMES])
)
.all()
)
states = query.all()
if not states:
return None
# Only return the states if the table name matches what the current cache
Expand Down
9 changes: 5 additions & 4 deletions tests/integration_tests/test_source_faker_integration.py
Original file line number Diff line number Diff line change
Expand Up @@ -220,7 +220,7 @@ def test_incremental_sync(
assert len(list(result1.cache.streams["purchases"])) == FAKER_SCALE_A
assert result1.processed_records == NUM_PRODUCTS + FAKER_SCALE_A * 2

assert not duckdb_cache._catalog_manager._get_state() == []
assert not duckdb_cache.processor._catalog_manager.get_state("source-faker") == []

# Second run should not return records as it picks up the state and knows it's up to date.
result2 = source_faker_seed_b.read(duckdb_cache)
Expand Down Expand Up @@ -249,7 +249,8 @@ def test_incremental_state_cache_persistence(
result2 = source_faker_seed_b.read(second_cache)
assert result2.processed_records == 0

assert second_cache._catalog_manager and second_cache._catalog_manager._get_state()
assert second_cache.processor._catalog_manager and \
second_cache.processor._catalog_manager.get_state("source-faker")
assert len(list(result2.cache.streams["products"])) == NUM_PRODUCTS
assert len(list(result2.cache.streams["purchases"])) == FAKER_SCALE_A

Expand All @@ -266,8 +267,8 @@ def test_incremental_state_prefix_isolation(
source_faker_seed_a.set_config(config_a)
cache_name = str(ulid.ULID())
db_path = Path(f"./.cache/{cache_name}.duckdb")
cache = ab.DuckDBCacheInstance(config=ab.DuckDBCache(db_path=db_path, table_prefix="prefix_"))
different_prefix_cache = ab.DuckDBCacheInstance(config=ab.DuckDBCache(db_path=db_path, table_prefix="different_prefix_"))
cache = ab.DuckDBCache(db_path=db_path, table_prefix="prefix_")
different_prefix_cache = ab.DuckDBCache(db_path=db_path, table_prefix="different_prefix_")

result = source_faker_seed_a.read(cache)
assert result.processed_records == NUM_PRODUCTS + FAKER_SCALE_A * 2
Expand Down
4 changes: 2 additions & 2 deletions tests/unit_tests/test_caches.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ def test_duck_db_cache_config_get_database_name():
assert config.get_database_name() == 'test_db'

def test_duck_db_cache_base_inheritance():
assert issubclass(DuckDBCache, SQLCacheInstanceBase)
assert issubclass(DuckDBCache, SQLCacheBase)

def test_duck_db_cache_config_default_schema_name():
config = DuckDBCache(db_path='test_path')
Expand All @@ -58,4 +58,4 @@ def test_duck_db_cache_config_inheritance_from_sql_cache_config_base():
assert issubclass(DuckDBCache, SQLCacheBase)

def test_duck_db_cache_config_inheritance_from_parquet_writer_config():
assert issubclass(DuckDBCacheConfig, JsonlWriterConfig)
assert issubclass(DuckDBCache, JsonlWriterConfig)

0 comments on commit e34d494

Please sign in to comment.