diff --git a/BodoSQL/bodosql/bodosql_types/snowflake_catalog.py b/BodoSQL/bodosql/bodosql_types/snowflake_catalog.py index 4213fde9dd..269a1af06f 100644 --- a/BodoSQL/bodosql/bodosql_types/snowflake_catalog.py +++ b/BodoSQL/bodosql/bodosql_types/snowflake_catalog.py @@ -179,8 +179,7 @@ def from_conn_str(cls, conn_str: str) -> "SnowflakeCatalog": raise BodoError( f"SnowflakeCatalog.from_conn_str: `conn_str` must contain a user login name. {ref_str}" ) - - + if (password := conn_contents.pop("password", None)) is None: password = "" if (account := conn_contents.pop("account", None)) is None: diff --git a/BodoSQL/bodosql/tests/test_literals.py b/BodoSQL/bodosql/tests/test_literals.py index 880eb35a4c..0021bd2edf 100644 --- a/BodoSQL/bodosql/tests/test_literals.py +++ b/BodoSQL/bodosql/tests/test_literals.py @@ -563,7 +563,7 @@ def test_backslash_literals(spark_info, memory_leak_check): def test_large_day_literals(bodosql_date_types, memory_leak_check): """ tests that Interval literals with large offsets are handled by BodoSQL. - + """ query = "select A + Interval '180 Days' as output from table1" expected_output = pd.DataFrame( diff --git a/BodoSQL/bodosql/tests/test_types/test_snowflake_catalog_basic.py b/BodoSQL/bodosql/tests/test_types/test_snowflake_catalog_basic.py index 62ddb397bd..33aa000899 100644 --- a/BodoSQL/bodosql/tests/test_types/test_snowflake_catalog_basic.py +++ b/BodoSQL/bodosql/tests/test_types/test_snowflake_catalog_basic.py @@ -127,8 +127,6 @@ def impl4(): "snowflake://myusername:mypassword@myaccount/mydatabase/myschema?role=USERADMIN&warehouse=mywarehouse", # Missing Password "snowflake://myusername@myaccount/mydatabase?warehouse=mywarehouse", - - ], ) def test_snowflake_catalog_from_conn_str(conn_str: str): diff --git a/README_pypi.md b/README_pypi.md deleted file mode 100644 index a7a6c19709..0000000000 --- a/README_pypi.md +++ /dev/null @@ -1,7 +0,0 @@ -# Bodo - -## Bodo: Extreme Performance Python Analytics Engine - -Bodo is a new compute engine using a novel JIT inferential compiler technology that brings supercomputing-like performance and scalability to native Python analytics code. Bodo automatically parallelizes Python/Pandas code allowing applications to scale to 10,000+ cores and petabytes of data. - -Bodo Documentation: https://docs.bodo.ai diff --git a/bodo/hiframes/pd_groupby_ext.py b/bodo/hiframes/pd_groupby_ext.py index 0a380cbff0..357d9d55d3 100644 --- a/bodo/hiframes/pd_groupby_ext.py +++ b/bodo/hiframes/pd_groupby_ext.py @@ -1657,7 +1657,7 @@ def resolve_window_funcs( args[3] if len(args) > 3 else kws.pop("na_position", default_tuple) ) # We currently require only a single order by column as that satisfies the initial - + if not ( isinstance(order_by, tuple) and all(isinstance(col_name, str) for col_name in order_by) diff --git a/bodo/libs/array_kernels.py b/bodo/libs/array_kernels.py index f32aaa42b6..d4f3119a16 100644 --- a/bodo/libs/array_kernels.py +++ b/bodo/libs/array_kernels.py @@ -4354,7 +4354,7 @@ def np_interp(x, xp, fp, left=None, right=None, period=None): # Using objmode since Numpy's implementation is optimized: # https://github.com/numpy/numpy/blob/1f82da745496092d85b402b1703877462a7c2de2/numpy/core/src/multiarray/compiled_base.c#L492 - + def impl(x, xp, fp, left=None, right=None, period=None): # pragma: no cover with bodo.objmode(A=out_type): A = np.interp(x, xp, fp, left, right, period) diff --git a/bodo/tests/test_snowflake_read.py b/bodo/tests/test_snowflake_read.py index 16256e24eb..5cca7d2cd2 100644 --- a/bodo/tests/test_snowflake_read.py +++ b/bodo/tests/test_snowflake_read.py @@ -3089,8 +3089,8 @@ def impl(query, conn_str): def test_snowflake_filter_pushdown_edgecase(memory_leak_check): """ Test that filter pushdown works for an edge case - - + + """ @bodo.jit(inline="never") @@ -3153,7 +3153,7 @@ def impl_should_not_do_filter_pushdown(conn): def test_snowflake_filter_pushdown_edgecase_2(memory_leak_check): """ Test that filter pushdown works for a specific edge case - + Originally, this would throw a compile time error in the filter pushdown code. """ diff --git a/buildscripts/bodo/conda-recipe/meta.yaml b/buildscripts/bodo/conda-recipe/meta.yaml index 4741da13b0..8229f3f50c 100644 --- a/buildscripts/bodo/conda-recipe/meta.yaml +++ b/buildscripts/bodo/conda-recipe/meta.yaml @@ -98,4 +98,4 @@ about: home: https://bodo.ai license: Apache-2.0 license_file: LICENSE - summary: Python Supercomputing Analytics Platform + summary: High-Performance Python Compute Engine for Data and AI diff --git a/e2e-tests/search_grids/search.py b/e2e-tests/search_grids/search.py index 42879f9eba..6ccd86e051 100644 --- a/e2e-tests/search_grids/search.py +++ b/e2e-tests/search_grids/search.py @@ -11,12 +11,12 @@ import numpy as np import pandas as pd -import bodo -from bodo import prange - # Local imports from tools import search +import bodo +from bodo import prange + @bodo.jit(cache=True, spawn=True, replicated=["df_prod", "grids"]) def search_all(categories, strategies, df_prod, grids, results_file): diff --git a/pyproject.toml b/pyproject.toml index 8f765e9b64..c2940ff72c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -13,8 +13,8 @@ build-backend = "scikit_build_core.build" [project] name = "bodo" dynamic = ["version"] -description = "The Python Supercomputing Analytics Platform" -readme = "README_pypi.md" +description = "High-Performance Python Compute Engine for Data and AI" +readme = "README.md" requires-python = ">=3.10,<3.13" keywords = ["data", "analytics", "cluster"] authors = [{ name = "Bodo.ai" }]