Skip to content

Commit

Permalink
chore: fix alias store export
Browse files Browse the repository at this point in the history
  • Loading branch information
makkus committed Feb 3, 2024
1 parent f707c00 commit e552b5a
Show file tree
Hide file tree
Showing 6 changed files with 88 additions and 52 deletions.
33 changes: 20 additions & 13 deletions src/kiara/interfaces/cli/data/commands.py
Original file line number Diff line number Diff line change
Expand Up @@ -544,7 +544,7 @@ def export_data_store(
compression: str,
force: bool,
):
"""Export one or several values into a new data store."""
"""Export one or several values into a new data data_store."""

from kiara.utils.stores import create_new_archive

Expand Down Expand Up @@ -575,14 +575,14 @@ def export_data_store(
if not path:
base_path = "."
file_name = f"{archive_alias}.kiarchive"
terminal_print(f"Creating new store '{file_name}'...")
terminal_print(f"Creating new data_store '{file_name}'...")
else:
base_path = os.path.dirname(path)
file_name = os.path.basename(path)
if "." not in file_name:
file_name = f"{file_name}.kiarchive"

terminal_print(f"Creating new store '{path}'...")
terminal_print(f"Creating new data_store '{path}'...")

full_path = Path(base_path) / file_name
if full_path.is_file() and force:
Expand All @@ -592,29 +592,37 @@ def export_data_store(
terminal_print(f"[red]Error[/red]: File '{full_path}' already exists.")
sys.exit(1)

store: DataArchive = create_new_archive( # type: ignore
data_store: DataStore = create_new_archive( # type: ignore

Check failure on line 595 in src/kiara/interfaces/cli/data/commands.py

View workflow job for this annotation

GitHub Actions / linting-linux

Ruff (F821)

src/kiara/interfaces/cli/data/commands.py:595:17: F821 Undefined name `DataStore`
archive_alias=archive_alias,
store_base_path=base_path,
store_type="sqlite_data_store",
file_name=file_name,
default_chunk_compression=compression,
allow_write_access=True,
)
archive_store = create_new_archive(
archive_alias=archive_alias,
store_base_path=base_path,
store_type="sqlite_alias_store",
file_name=file_name,
allow_write_access=True,
)

terminal_print("Registering store...")
store_alias = kiara_api.context.data_registry.register_data_archive(store)
terminal_print("Registering data store...")
data_store_alias = kiara_api.context.data_registry.register_data_archive(data_store)
alias_store_alias = kiara_api.context.alias_registry.register_archive(archive_store)

terminal_print("Exporting value into new store...")
terminal_print("Exporting value into new data_store...")

no_default_value = False

if not no_default_value:
try:
store.set_archive_metadata_value(
data_store.set_archive_metadata_value(
"default_value", str(values[0][0].value_id)
)
except Exception as e:
store.delete_archive(archive_id=store.archive_id)
data_store.delete_archive(archive_id=data_store.archive_id)
log_exception(e)
terminal_print(f"[red]Error setting value[/red]: {e}")
sys.exit(1)
Expand All @@ -627,21 +635,20 @@ def export_data_store(
if value_alias:
alias_map[key] = value_alias

alias_store_alias = None
try:

persisted_data = kiara_api.store_values(
values=values_to_store,
alias_map=alias_map,
data_store=store_alias,
alias_store_id=alias_store_alias,
data_store=data_store_alias,
alias_store=alias_store_alias,
)

dbg(persisted_data)

Check failure on line 647 in src/kiara/interfaces/cli/data/commands.py

View workflow job for this annotation

GitHub Actions / linting-linux

Ruff (F821)

src/kiara/interfaces/cli/data/commands.py:647:9: F821 Undefined name `dbg`
terminal_print("Done.")

except Exception as e:
store.delete_archive(archive_id=store.archive_id)
data_store.delete_archive(archive_id=data_store.archive_id)
log_exception(e)
terminal_print(f"[red]Error saving results[/red]: {e}")
sys.exit(1)
Expand Down
12 changes: 9 additions & 3 deletions src/kiara/interfaces/python_api/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -1641,6 +1641,7 @@ def store_value(
alias: Union[str, Iterable[str], None],
allow_overwrite: bool = True,
data_store: Union[str, None] = None,
alias_store: Union[str, None] = None,
) -> StoreValueResult:
"""
Store the specified value in the (default) value store.
Expand All @@ -1664,7 +1665,10 @@ def store_value(
)
if alias:
self.context.alias_registry.register_aliases(
value_obj.value_id, *alias, allow_overwrite=allow_overwrite
value_obj.value_id,
*alias,
allow_overwrite=allow_overwrite,
alias_store=alias_store,
)
result = StoreValueResult(
value=value_obj,
Expand Down Expand Up @@ -1693,6 +1697,7 @@ def store_values(
alias_map: Union[Mapping[str, Iterable[str]], bool, str] = False,
allow_overwrite: bool = True,
data_store: Union[str, None] = None,
alias_store: Union[str, None] = None,
) -> StoreValuesResult:
"""
Store multiple values into the (default) kiara value store.
Expand Down Expand Up @@ -1729,11 +1734,11 @@ def store_values(
value=value_obj,
alias=None,
allow_overwrite=allow_overwrite,
store_id=data_store,
data_store=data_store,
alias_store=alias_store,
)
result[f"value_{idx}"] = store_result
else:

for field_name, value in values.items():
if alias_map is False:
aliases = None
Expand All @@ -1750,6 +1755,7 @@ def store_values(
alias=aliases,
allow_overwrite=allow_overwrite,
data_store=data_store,
alias_store=alias_store,
)
result[field_name] = store_result

Expand Down
15 changes: 7 additions & 8 deletions src/kiara/registries/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -391,27 +391,26 @@ def create_new_store_config(

archive_path = os.path.abspath(os.path.join(store_base_path, file_name))

if os.path.exists(archive_path):
raise Exception(f"Archive path '{archive_path}' already exists.")

Path(archive_path).parent.mkdir(exist_ok=True, parents=True)

# Connect to the SQLite database (or create it if it doesn't exist)
import sqlite3

if not os.path.exists(archive_path):
Path(archive_path).parent.mkdir(exist_ok=True, parents=True)

conn = sqlite3.connect(archive_path)

# Create a cursor object
c = conn.cursor()

# Create table
c.execute(
"""CREATE TABLE archive_metadata
"""CREATE TABLE IF NOT EXISTS archive_metadata
(key text PRIMARY KEY , value text NOT NULL)"""
)

# Insert a row of data
c.execute(f"INSERT INTO archive_metadata VALUES ('archive_id','{store_id}')")
c.execute(
f"INSERT OR IGNORE INTO archive_metadata VALUES ('archive_id','{store_id}')"
)

# Save (commit) the changes
conn.commit()
Expand Down
18 changes: 13 additions & 5 deletions src/kiara/registries/aliases/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,10 +11,11 @@
Callable,
Dict,
Iterable,
List,
Mapping,
NamedTuple,
Set,
Union, List,
Union,
)

import structlog
Expand Down Expand Up @@ -89,6 +90,7 @@ class AliasRegistry(object):
There is also a 'default' alias store, which is used when the alias provided does not contain a '#' indicating a
mountpoint.
"""

def __init__(self, kiara: "Kiara"):

self._kiara: Kiara = kiara
Expand Down Expand Up @@ -169,7 +171,6 @@ def register_archive(
self._dynamic_stores = None
self._cached_dynamic_aliases = None


event = AliasArchiveAddedEvent(
kiara_id=self._kiara.id,
alias_archive_id=archive.archive_id,
Expand Down Expand Up @@ -380,7 +381,9 @@ def register_aliases(
mountpoint, alias_name = alias.split("#", maxsplit=1)
alias_store_alias = self._mountpoints.get(mountpoint, None)
if alias_store_alias is None:
raise Exception(f"Invalid mountpoint: '{mountpoint}' not registered.")
raise Exception(
f"Invalid mountpoint: '{mountpoint}' not registered."
)

if alias_store and alias_store != alias_store_alias:
raise Exception(
Expand All @@ -391,7 +394,10 @@ def register_aliases(
alias_store_alias = alias_store

else:
alias_store_alias = self.default_alias_store
if alias_store:
alias_store_alias = alias_store
else:
alias_store_alias = self.default_alias_store
alias_name = alias

if alias_name in INVALID_ALIAS_NAMES:
Expand Down Expand Up @@ -432,7 +438,9 @@ def register_aliases(
if store is None:
raise Exception(f"Invalid alias store: '{store_alias}' not registered.")
if not store.is_writeable():
raise Exception(f"Can't register aliases in store '{store_alias}': store is read-only.")
raise Exception(
f"Can't register aliases in store '{store_alias}': store is read-only."
)

for store_alias, aliases_for_store in aliases_to_store.items():

Expand Down
12 changes: 10 additions & 2 deletions src/kiara/registries/aliases/sqlite_store.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,10 +45,18 @@ def _load_store_config(
# config = SqliteArchiveConfig(sqlite_db_path=store_uri)
return {"sqlite_db_path": store_uri}

def __init__(self, archive_alias: str, archive_config: SqliteArchiveConfig):
def __init__(
self,
archive_alias: str,
archive_config: SqliteArchiveConfig,
force_read_only: bool = False,
):

AliasArchive.__init__(
self, archive_alias=archive_alias, archive_config=archive_config
self,
archive_alias=archive_alias,
archive_config=archive_config,
force_read_only=force_read_only,
)
self._db_path: Union[Path, None] = None
self._cached_engine: Union[Engine, None] = None
Expand Down
50 changes: 29 additions & 21 deletions src/kiara/registries/data/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -335,39 +335,39 @@ def data_archives(self) -> Mapping[str, DataArchive]:
return self._data_archives

def get_archive(
self, archive_id: Union[None, uuid.UUID, str] = None
self, archive_id_or_alias: Union[None, uuid.UUID, str] = None
) -> DataArchive:

if archive_id is None:
archive_id = self.default_data_store
if archive_id is None:
if archive_id_or_alias is None:
archive_id_or_alias = self.default_data_store
if archive_id_or_alias is None:
raise Exception("Can't retrieve default data archive, none set (yet).")

if isinstance(archive_id, uuid.UUID):
if isinstance(archive_id_or_alias, uuid.UUID):
for archive in self._data_archives.values():
if archive.archive_id == archive_id:
if archive.archive_id == archive_id_or_alias:
return archive

raise Exception(
f"Can't retrieve archive with id '{archive_id}': no archive with that id registered."
f"Can't retrieve archive with id '{archive_id_or_alias}': no archive with that id registered."
)

if archive_id in self._data_archives.keys():
return self._data_archives[archive_id]
if archive_id_or_alias in self._data_archives.keys():
return self._data_archives[archive_id_or_alias]
else:
try:
_archive_id = uuid.UUID(archive_id)
_archive_id = uuid.UUID(archive_id_or_alias)
for archive in self._data_archives.values():
if archive.archive_id == _archive_id:
return archive
raise Exception(
f"Can't retrieve archive with id '{archive_id}': no archive with that id registered."
f"Can't retrieve archive with id '{archive_id_or_alias}': no archive with that id registered."
)
except Exception:
pass

raise Exception(
f"Can't retrieve archive with id '{archive_id}': no archive with that id registered."
f"Can't retrieve archive with id '{archive_id_or_alias}': no archive with that id registered."
)

def find_store_id_for_value(self, value_id: uuid.UUID) -> Union[str, None]:
Expand Down Expand Up @@ -427,7 +427,7 @@ def get_value(self, value: Union[uuid.UUID, ValueLink, str, Path]) -> Value:
return _value

default_store: DataArchive = self.get_archive(
archive_id=self.default_data_store
archive_id_or_alias=self.default_data_store
)
if not default_store.has_value(value_id=_value_id):

Expand Down Expand Up @@ -464,34 +464,42 @@ def get_value(self, value: Union[uuid.UUID, ValueLink, str, Path]) -> Value:
def store_value(
self,
value: Union[ValueLink, uuid.UUID, str],
store_id: Union[str, None] = None,
data_store: Union[str, uuid.UUID, None] = None,
) -> Union[PersistedData, None]:
"""Store a value into a data store.
If 'data_store' is not provided, the default data store is used. If the 'data_store' argument is of
type uuid, the archive_id is used, if string, first it will be converted to an uuid, if that works,
again, the archive_id is used, if not, the string is used as the archive alias.
"""

if store_id is None:
store_id = self.default_data_store
if data_store is None:
data_store = self.default_data_store

_value = self.get_value(value)

store: DataStore = self.get_archive(archive_id=store_id) # type: ignore
store: DataStore = self.get_archive(archive_id_or_alias=data_store) # type: ignore
if not isinstance(store, DataStore):
raise Exception(f"Can't store value into store '{store_id}': not writable.")
raise Exception(
f"Can't store value into store '{data_store}': not writable."
)

# make sure all property values are available
if _value.pedigree != ORPHAN:
for value_id in _value.pedigree.inputs.values():
self.store_value(value=value_id, store_id=store_id)
self.store_value(value=value_id, data_store=data_store)

if not store.has_value(_value.value_id):
event = ValuePreStoreEvent(kiara_id=self._kiara.id, value=_value)
self._event_callback(event)
persisted_value = store.store_value(_value)
_value._is_stored = True
self._value_archive_lookup_map[_value.value_id] = store_id
self._value_archive_lookup_map[_value.value_id] = data_store
self._persisted_value_descs[_value.value_id] = persisted_value
property_values = _value.property_values

for property, property_value in property_values.items():
self.store_value(value=property_value, store_id=store_id)
self.store_value(value=property_value, data_store=data_store)
else:
persisted_value = None

Expand Down

0 comments on commit e552b5a

Please sign in to comment.