Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

remove unUse import #2295

Merged
merged 5 commits into from
Jul 15, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- Fix the bug where the query itself changes after encoding
- Fix the dependency error in copy_vectors within vector_index.
- Fix Template substitutions

- Fix remove un_use _import function

## [0.2.0](https://github.com/superduper-io/superduper/compare/0.1.3...0.2.0]) (2024-Jun-21)

Expand Down
2 changes: 1 addition & 1 deletion deploy/databases/azuresql/example.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,4 +5,4 @@
port = 1433
host = 'localhost'

db = superduper(f"mssql://{user}:{password}@{host}:{port}")
db = superduper(f"mssql://{user}:{password}@{host}:{port}")
4 changes: 3 additions & 1 deletion deploy/databases/clickhouse/example.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,4 +5,6 @@
port = 8123
host = 'localhost'

db = superduper(f"clickhouse://{user}:{password}@{host}:{port}", metadata_store=f'mongomock://meta')
db = superduper(
f"clickhouse://{user}:{password}@{host}:{port}", metadata_store=f'mongomock://meta'
)
2 changes: 1 addition & 1 deletion deploy/databases/mongodb/example.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,4 +6,4 @@
host = 'localhost'
database = 'test_db'

db = superduper(f"mongodb://{user}:{password}@{host}:{port}/{database}")
db = superduper(f"mongodb://{user}:{password}@{host}:{port}/{database}")
2 changes: 1 addition & 1 deletion deploy/databases/mssql/example.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,4 +5,4 @@
port = 1433
host = 'localhost'

db = superduper(f"mssql://{user}:{password}@{host}:{port}")
db = superduper(f"mssql://{user}:{password}@{host}:{port}")
2 changes: 1 addition & 1 deletion deploy/databases/mysql/example.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,4 +6,4 @@
host = 'localhost'
database = 'test_db'

db = superduper(f"mysql://{user}:{password}@{host}:{port}/{database}")
db = superduper(f"mysql://{user}:{password}@{host}:{port}/{database}")
14 changes: 7 additions & 7 deletions deploy/databases/oracle/example.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,16 +8,16 @@

# OK
con = ibis.oracle.connect(
user = 'superduper',
password = 'superduper',
port = 1521,
host = 'localhost',
database = 'test_db',
user='superduper',
password='superduper',
port=1521,
host='localhost',
database='test_db',
)

# ERROR AttributeError: 'str' object has no attribute 'username'
#con = ibis.connect(f"oracle://{user}:{password}@{host}:{port}/{database}")
# con = ibis.connect(f"oracle://{user}:{password}@{host}:{port}/{database}")

# ERROR ModuleNotFoundError: No module named 'ibis.backends.base'
# from superduper import superduper
#db = superduper(f"oracle://{user}:{password}@{host}:{port}/{database}")
# db = superduper(f"oracle://{user}:{password}@{host}:{port}/{database}")
2 changes: 1 addition & 1 deletion deploy/databases/postgresql/example.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,4 +7,4 @@
database = 'test_db'
db_uri = f"postgres://{user}:{password}@{host}:{port}/{database}"

db = superduper(db_uri, metadata_store=db_uri.replace('postgres://', 'postgresql://'))
db = superduper(db_uri, metadata_store=db_uri.replace('postgres://', 'postgresql://'))
8 changes: 6 additions & 2 deletions docs/content/build_api_docs.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,11 @@
sys.path.insert(0, os.getcwd())

from superduper.misc.annotations import extract_parameters, replace_parameters
from test.unittest.test_docstrings import FUNCTION_TEST_CASES, CLASS_TEST_CASES, TEST_CASES
from test.unittest.test_docstrings import (
FUNCTION_TEST_CASES,
CLASS_TEST_CASES,
TEST_CASES,
)

lookup = defaultdict(list)

Expand All @@ -24,6 +28,7 @@

import re


def format_docstring(docstring):
"""
Formats a docstring by creating a markdown table for parameters,
Expand Down Expand Up @@ -107,4 +112,3 @@ def format_docstring(docstring):
os.makedirs('docs/content/api/' + parent, exist_ok=True)
with open(f'docs/content/api/{parent}/{child}.md', 'w') as f:
f.write(content)

23 changes: 11 additions & 12 deletions docs/content/build_use_cases.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,8 +39,8 @@ def build_use_case(path):

for cell in nb['cells']:
if (
cell['cell_type'] == 'raw'
and cell['source']
cell['cell_type'] == 'raw'
and cell['source']
and cell['source'][0].startswith('<snippet:')
):
snippet, tabs = re.match(
Expand All @@ -67,10 +67,9 @@ def build_use_cases():
built = build_use_case(f'./use_cases/{file}')
with open(f'./use_cases/{file[1:]}', 'w') as f:
json.dump(built, f)


def get_snippet(snippet_nb_cells, snippet_tab):

def get_snippet(snippet_nb_cells, snippet_tab):
snippet_cells = []
snippet_tab_cell = None
for cell in snippet_nb_cells:
Expand Down Expand Up @@ -103,11 +102,10 @@ def build_notebook_from_tabs(path, selected_tabs):

for cell in nb['cells']:
if (
cell['cell_type'] == 'raw'
and cell['source']
cell['cell_type'] == 'raw'
and cell['source']
and cell['source'][0].startswith('<snippet:')
):

if non_snippet_group:
snippets_group.append(non_snippet_group)
non_snippet_group = []
Expand All @@ -131,13 +129,13 @@ def build_notebook_from_tabs(path, selected_tabs):
if non_snippet_group:
snippets_group.append(non_snippet_group)

snippets_group.append((cell, ))
snippets_group.append((cell,))
non_snippet_group = []
else:
if non_snippet_group:
snippets_group.append(non_snippet_group)
non_snippet_group = []
snippets_group.append((cell, ))
snippets_group.append((cell,))

if non_snippet_group:
snippets_group.append(non_snippet_group)
Expand All @@ -151,28 +149,29 @@ def build_notebook_from_tabs(path, selected_tabs):
built_nb['cells'].append(cell)
ix += 1


notebook_bytes = json.dumps(built_nb).encode('utf-8')
notebook_bytes = json.dumps(built_nb).encode('utf-8')
return Response(content=notebook_bytes, media_type="application/octet-stream")


def serve_notebook_builder():
app = FastAPI()

app.add_middleware(
CORSMiddleware,
allow_origins=["*"], # Allows all origins
allow_credentials=True,
allow_methods=["*"], # Allows all methods
allow_headers=["*"], # Allows all headers
)

@app.post("/build_notebook")
def build(usecase_path: str, tabs: t.List[str]):
return build_notebook_from_tabs(usecase_path, tabs)

if __name__ == "__main__":
uvicorn.run(app, host="0.0.0.0", port=8000)


if __name__ == '__main__':
if not sys.argv[1:] or sys.argv[1] == 'build':
build_use_cases()
Expand Down
32 changes: 18 additions & 14 deletions docs/content/to_docusaurus_markdown.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
{body}
</Tabs>
"""

TABS_ITEM = """ <TabItem value="{title}" label="{title}" default>
{content}
</TabItem>"""
Expand All @@ -43,9 +43,7 @@ def render_notebook_as_chunks(nb):
chunks = []
for cell in nb['cells']:
if cell['cell_type'] == 'markdown':
chunks.append({
'content': cell['source'] + ['\n', '\n']
})
chunks.append({'content': cell['source'] + ['\n', '\n']})

elif cell['cell_type'] == 'code':
if not cell['source']:
Expand All @@ -57,24 +55,30 @@ def render_notebook_as_chunks(nb):
title = match.groups()[1].strip()
if 'tabs' not in chunks[-1]:
chunks.append({'tabs': []})
chunks[-1]['tabs'].append({
'content': [CODE_BLOCK[0], *cell['source'][1:], CODE_BLOCK[-1]],
'title': title,
})
chunks[-1]['tabs'].append(
{
'content': [CODE_BLOCK[0], *cell['source'][1:], CODE_BLOCK[-1]],
'title': title,
}
)
elif match: # testing not handled yet
continue
else:
chunks.append({
'content': [CODE_BLOCK[0], *cell['source'], CODE_BLOCK[-1].replace(' ', '') + '\n\n']
}
)
chunks.append(
{
'content': [
CODE_BLOCK[0],
*cell['source'],
CODE_BLOCK[-1].replace(' ', '') + '\n\n',
]
}
)
else:
raise Exception(f'Unknown source type {cell["cell_type"]}, {cell}')

return chunks



if __name__ == '__main__':
directory = sys.argv[1]

Expand All @@ -95,7 +99,7 @@ def render_notebook_as_chunks(nb):
chunks = render_notebook_as_chunks(content)
md = render_chunks_as_md(chunks)
target_filename = file.replace('.ipynb', '.md')

md = f'---\nsidebar_label: {title}\nfilename: {target_filename}\n---\n' + md

with open(f'{directory}/{target_filename}', 'w') as f:
Expand Down
2 changes: 1 addition & 1 deletion docs/content/tutorials/my_jupyter_convert.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,4 +48,4 @@
text += '\n' + ''.join(cell['source']) + '\n'

with open(sys.argv[1].replace('.ipynb', '.md'), "w") as f:
f.write(text)
f.write(text)
7 changes: 4 additions & 3 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ quiet = false
verbose = 0
whitelist-regex = []

[tool.ruff]
[tool.ruff.lint]
extend-select = [
"I", # Missing required import (auto-fixable)
"F", # PyFlakes
Expand All @@ -117,14 +117,15 @@ ignore = [
"D104", # Missing docstring in public package
"D107", # Missing docstring in __init__
"D105", # Missing docstring in magic method
"D203", # 1 blank line required before class docstring
"D212", # Multi-line docstring summary should start at the first line
"D213", # Multi-line docstring summary should start at the second line
"D401",
"E402",
]

[tool.ruff.isort]
[tool.ruff.lint.isort]
combine-as-imports = true

[tool.ruff.per-file-ignores]
[tool.ruff.lint.per-file-ignores]
"test/**" = ["D"]
8 changes: 5 additions & 3 deletions superduper/backends/mongodb/query.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,9 +53,11 @@ def extract_character(s):
new_dict[new_key] = _serialize_special_character(value, to=to)
elif isinstance(value, list):
new_dict[new_key] = [
_serialize_special_character(item, to=to)
if isinstance(item, dict)
else item
(
_serialize_special_character(item, to=to)
if isinstance(item, dict)
else item
)
for item in value
]
else:
Expand Down
8 changes: 5 additions & 3 deletions superduper/base/document.py
Original file line number Diff line number Diff line change
Expand Up @@ -493,9 +493,11 @@ def _deep_flat_decode(r, builds, getters: _Getters, db: t.Optional['Datalayer']
if isinstance(r, dict):
literals = r.get('_literals', [])
return {
k: _deep_flat_decode(v, builds, getters=getters, db=db)
if k not in literals
else v
k: (
_deep_flat_decode(v, builds, getters=getters, db=db)
if k not in literals
else v
)
for k, v in r.items()
}
if isinstance(r, str) and r.startswith('?'):
Expand Down
27 changes: 1 addition & 26 deletions superduper/components/component.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
import yaml

from superduper import logging
from superduper.base.constant import KEY_BLOBS, KEY_BUILDS, KEY_FILES
from superduper.base.constant import KEY_BLOBS, KEY_FILES
from superduper.base.leaf import Leaf
from superduper.jobs.job import ComponentJob, Job

Expand Down Expand Up @@ -60,31 +60,6 @@ def _build_info_from_path(path: str):
return config_object


def import_(r=None, path=None, db=None):
"""Helper function for importing component JSONs, YAMLs, etc.

:param r: Object to be imported.
:param path: Components directory.
:param db: Datalayer instance.
"""
from superduper.base.document import _build_leaves

if r is None:
try:
with open(f'{path}/component.json') as f:
r = json.load(f)
except FileNotFoundError:
with open(f'{path}/component.yaml') as f:
r = yaml.safe_load(f)
for id_ in os.listdir(path):
if id_ == 'component.json' or id_ == 'component.yaml':
continue
with open(f'{path}/{id_}', 'rb') as f:
bytes[id_] = f.read()
r[KEY_BUILDS] = _build_leaves(r[KEY_BUILDS], db=db)[0]
return r[KEY_BUILDS][r['_base']]


def getdeepattr(obj, attr):
"""Get nested attribute with dot notation.

Expand Down
2 changes: 0 additions & 2 deletions test/unittest/misc/test_downloaders.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
import os
import tempfile
import uuid
from test.db_config import DBConfig

import pytest
Expand All @@ -21,7 +20,6 @@ def test_s3_and_web():

@pytest.fixture
def patch_cfg_downloads(monkeypatch):
td = str(uuid.uuid4())
with tempfile.TemporaryDirectory() as td:
monkeypatch.setattr(CFG.downloads, 'folder', td)
yield
Expand Down
Loading