Skip to content

Commit

Permalink
feat: table_spec: add new table_from_dict and table_dump_astuple APIs…
Browse files Browse the repository at this point in the history
…, update table_dump_* and table_from_* methods signature (#51)

Detailed changes include:
1. table_spec: add table_from_dict and table_dump_astuple APIs.
2. table_spec: now table_from_* APIs take with_validation arg to allow skip pydantic validation when importing.
3. table_spec: table_dump_* and table_from_* APIs now take kwargs, which will be passed to pydantic model_dump API underneath.
4. add tests for the from and dump series APIs.

Other minor changes:
1. table_spec: remove debug loggings from this module.
  • Loading branch information
pga2rn authored Dec 14, 2024
1 parent 853de73 commit 5044f51
Show file tree
Hide file tree
Showing 3 changed files with 157 additions and 17 deletions.
2 changes: 1 addition & 1 deletion src/simple_sqlite3_orm/_orm/_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ def __class_getitem__(cls, params: Any | type[Any] | type[TableSpecType]) -> Any
def orm_con(self) -> sqlite3.Connection:
"""A reference to the underlying sqlite3.Connection.
This is for advanced database execution.
This is for directly executing sql stmts.
"""
return self._con

Expand Down
68 changes: 52 additions & 16 deletions src/simple_sqlite3_orm/_table_spec.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from __future__ import annotations

import logging
import sqlite3
from collections.abc import Mapping
from typing import Any, Iterable, Literal, TypeVar

from pydantic import BaseModel
Expand All @@ -12,7 +12,6 @@
INSERT_OR,
ORDER_DIRECTION,
SQLiteBuiltInFuncs,
SQLiteStorageClass,
)
from simple_sqlite3_orm._utils import (
ConstrainRepr,
Expand All @@ -21,8 +20,6 @@
lru_cache,
)

logger = logging.getLogger(__name__)


class TableSpec(BaseModel):
"""Define table as pydantic model, with specific APIs."""
Expand Down Expand Up @@ -118,7 +115,6 @@ def table_dump_column(cls, column_name: str) -> str:
raise ValueError("data affinity must be set")

res = f"{column_name} {datatype_name} {constrain}".strip()
logger.debug(f"{column_name=}: {res}")
return res

@classmethod
Expand Down Expand Up @@ -154,7 +150,6 @@ def table_create_stmt(
f"{table_name} ({cols_spec})",
f"{','.join(table_options)}",
)
logger.debug(res)
return res

@classmethod
Expand Down Expand Up @@ -198,7 +193,6 @@ def table_create_index_stmt(
f"{index_name}",
f"ON {table_name} {indexed_columns_stmt}",
)
logger.debug(res)
return res

@classmethod
Expand All @@ -221,16 +215,44 @@ def table_row_factory(
return cls.model_validate(dict(zip(_fields, _row)))

@classmethod
def table_from_tuple(cls, _row: Iterable[Any]) -> Self:
def table_from_tuple(
cls, _row: Iterable[Any], *, with_validation: bool = True, **kwargs
) -> Self:
"""A raw row_factory that converts the input _row to TableSpec instance.
Args:
_row (tuple[Any, ...]): the raw table row as tuple.
with_validation (bool): if set to False, will use pydantic model_construct to directly
construct instance without validation. Default to True.
**kwargs: extra kwargs passed to pydantic model_validate API. Note that only when
with_validation is True, the kwargs will be used.
Returns:
An instance of self.
"""
if with_validation:
return cls.model_validate(dict(zip(cls.model_fields, _row)), **kwargs)
return cls.model_construct(**dict(zip(cls.model_fields, _row)))

@classmethod
def table_from_dict(
cls, _map: Mapping[str, Any], *, with_validation: bool = True, **kwargs
) -> Self:
"""A raw row_factory that converts the input mapping to TableSpec instance.
Args:
_map (Mapping[str, Any]): the raw table row as a dict.
with_validation (bool, optional): if set to False, will use pydantic model_construct to directly
construct instance without validation. Default to True.
**kwargs: extra kwargs passed to pydantic model_validate API. Note that only when
with_validation is True, the kwargs will be used.
Returns:
An instance of self.
"""
return cls.model_validate(dict(zip(cls.model_fields, _row)))
if with_validation:
return cls.model_validate(_map, **kwargs)
return cls.model_construct(**_map)

@classmethod
@lru_cache
Expand Down Expand Up @@ -290,7 +312,6 @@ def table_insert_stmt(
gen_insert_value_stmt,
gen_returning_stmt,
)
logger.debug(res)
return res

@classmethod
Expand Down Expand Up @@ -348,7 +369,6 @@ def table_select_all_stmt(
gen_order_by_stmt,
gen_pagination,
)
logger.debug(res)
return res

@classmethod
Expand Down Expand Up @@ -415,7 +435,6 @@ def table_select_stmt(
gen_order_by_stmt,
gen_limit_stmt,
)
logger.debug(res)
return res

@classmethod
Expand Down Expand Up @@ -481,16 +500,18 @@ def table_delete_stmt(
gen_order_by_stmt,
gen_limit_stmt,
)
logger.debug(res)
return res

def table_dump_asdict(self, *cols: str) -> dict[str, SQLiteStorageClass]:
"""Dump self to a dict containing all col values.
def table_dump_asdict(self, *cols: str, **kwargs) -> dict[str, Any]:
"""Dump self as a dict, containing all cols or specified cols.
Under the hook this method calls pydantic model_dump on self.
The dumped dict can be used to directly insert into the table.
Args:
*cols: which cols to export, if not specified, export all cols.
**kwargs: any other kwargs that passed to pydantic model_dump method.
Note that the include kwarg is used to specific which cols to dump.
Raises:
ValueError if failed to serialize the model, wrapping underlying
Expand All @@ -501,9 +522,24 @@ def table_dump_asdict(self, *cols: str) -> dict[str, SQLiteStorageClass]:
"""
try:
_included_cols = set(cols) if cols else None
return self.model_dump(include=_included_cols)
return self.model_dump(include=_included_cols, **kwargs)
except Exception as e:
raise ValueError(f"failed to dump as dict: {e!r}") from e

def table_dump_astuple(self, *cols: str, **kwargs) -> tuple[Any, ...]:
"""Dump self's values as a tuple, containing all cols or specified cols.
This method is basically the same as table_dump_asdict, but instead return a
tuple of the dumped values.
Returns:
A tuple of dumped col values.
"""
try:
_included_cols = set(cols) if cols else None
return tuple(self.model_dump(include=_included_cols, **kwargs).values())
except Exception as e:
raise ValueError(f"failed to dump as tuple: {e!r}") from e


TableSpecType = TypeVar("TableSpecType", bound=TableSpec)
104 changes: 104 additions & 0 deletions tests/test__table_spec.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,104 @@
from __future__ import annotations

from collections.abc import Mapping
from typing import Any, Iterable, Optional

import pytest
from typing_extensions import Annotated

from simple_sqlite3_orm import ConstrainRepr, TableSpec, TypeAffinityRepr


class SimpleTableForTest(TableSpec):
id: Annotated[
int,
TypeAffinityRepr(int),
ConstrainRepr("PRIMARY KEY"),
]

id_str: Annotated[
str,
TypeAffinityRepr(str),
ConstrainRepr("NOT NULL"),
]

extra: Annotated[
Optional[float],
TypeAffinityRepr(float),
] = None


@pytest.mark.parametrize(
"_in, _validate, _expected",
(
([1, "1", 1.0], True, SimpleTableForTest(id=1, id_str="1", extra=1.0)),
([1, "1", 1.0], False, SimpleTableForTest(id=1, id_str="1", extra=1.0)),
),
)
def test_table_from_tuple(
_in: Iterable[Any], _validate: bool, _expected: SimpleTableForTest
):
assert (
SimpleTableForTest.table_from_tuple(_in, with_validation=_validate) == _expected
)


@pytest.mark.parametrize(
"_in, _validate, _expected",
(
(
{"id": 1, "id_str": "1", "extra": 1.0},
True,
SimpleTableForTest(id=1, id_str="1", extra=1.0),
),
(
{"id": 1, "id_str": "1", "extra": 1.0},
False,
SimpleTableForTest(id=1, id_str="1", extra=1.0),
),
),
)
def test_table_from_dict(
_in: Mapping[str, Any], _validate: bool, _expected: SimpleTableForTest
):
assert (
SimpleTableForTest.table_from_dict(_in, with_validation=_validate) == _expected
)


@pytest.mark.parametrize(
"_in, _expected",
(
(
SimpleTableForTest(id=1, id_str="1", extra=1.0),
{"id": 1, "id_str": "1", "extra": 1.0},
),
(
SimpleTableForTest(id=1, id_str="1", extra=1.0),
{"id": 1, "extra": 1.0},
),
),
)
def test_table_dump_asdict(_in: SimpleTableForTest, _expected: dict[str, Any]):
assert _in.table_dump_asdict(*_expected) == _expected


@pytest.mark.parametrize(
"_in, _cols, _expected",
(
(
SimpleTableForTest(id=1, id_str="1", extra=1.0),
["id", "id_str", "extra"],
(1, "1", 1.0),
),
(
SimpleTableForTest(id=1, id_str="1", extra=1.0),
["extra"],
(1.0,),
),
),
)
def test_table_dump_astuple(
_in: SimpleTableForTest, _cols: tuple[str, ...], _expected: tuple[Any, ...]
):
assert _in.table_dump_astuple(*_cols) == _expected

1 comment on commit 5044f51

@github-actions
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Coverage

Coverage Report
FileStmtsMissCoverMissing
src/simple_sqlite3_orm
   __init__.py70100% 
   _sqlite_spec.py360100% 
   _table_spec.py1853481%49, 58, 69, 73–74, 86, 98, 115, 174, 179–181, 296, 298, 300–301, 349, 352, 356–358, 360–364, 366, 372, 415–416, 526–527, 541–542
   _types.py22195%31
   _utils.py60591%52, 65, 68, 78, 91
   utils.py1203670%127, 134–135, 170–173, 219, 232, 252–256, 282, 286, 327, 332–339, 351–353, 355–356, 359, 361–363, 372–373
src/simple_sqlite3_orm/_orm
   __init__.py40100% 
   _async.py751185%62, 93, 95–96, 105–106, 108, 143, 172, 227, 234
   _base.py1241091%38, 40–41, 44–45, 143, 267, 307, 430, 458
   _multi_thread.py951287%35, 80, 162, 164–165, 174–175, 177, 213, 233, 256, 264
TOTAL72810985% 

Tests Skipped Failures Errors Time
71 0 💤 0 ❌ 0 🔥 2m 5s ⏱️

Please sign in to comment.