From 9500853a8f3433b115fe428d532347f95d4c422f Mon Sep 17 00:00:00 2001 From: Justin Cechmanek Date: Mon, 24 Mar 2025 13:20:18 -0700 Subject: [PATCH 01/26] wip: adding Text and Hybrid queries --- redisvl/query/query.py | 212 ++++++++++++++++++++++++++++++++++++++++- 1 file changed, 210 insertions(+), 2 deletions(-) diff --git a/redisvl/query/query.py b/redisvl/query/query.py index 7dbddc91..e9038d02 100644 --- a/redisvl/query/query.py +++ b/redisvl/query/query.py @@ -91,7 +91,8 @@ def __init__( num_results (Optional[int], optional): The number of results to return. Defaults to 10. dialect (int, optional): The query dialect. Defaults to 2. sort_by (Optional[str], optional): The field to order the results by. Defaults to None. - in_order (bool, optional): Requires the terms in the field to have the same order as the terms in the query filter. Defaults to False. + in_order (bool, optional): Requires the terms in the field to have the same order as the + terms in the query filter. Defaults to False. params (Optional[Dict[str, Any]], optional): The parameters for the query. Defaults to None. Raises: @@ -136,7 +137,8 @@ def __init__( """A query for a simple count operation provided some filter expression. Args: - filter_expression (Optional[Union[str, FilterExpression]]): The filter expression to query with. Defaults to None. + filter_expression (Optional[Union[str, FilterExpression]]): The filter expression to + query with. Defaults to None. params (Optional[Dict[str, Any]], optional): The parameters for the query. Defaults to None. Raises: @@ -214,6 +216,7 @@ def __init__( "float32". num_results (int, optional): The top k results to return from the vector search. Defaults to 10. + return_score (bool, optional): Whether to return the vector distance. Defaults to True. dialect (int, optional): The RediSearch query dialect. @@ -647,3 +650,208 @@ def params(self) -> Dict[str, Any]: class RangeQuery(VectorRangeQuery): # keep for backwards compatibility pass + + +class TextQuery(FilterQuery): + def __init__( + self, + text: str, + text_field: str, + text_scorer: str = "TFIDF", + return_fields: Optional[List[str]] = None, + filter_expression: Optional[Union[str, FilterExpression]] = None, + num_results: int = 10, + return_score: bool = True, + dialect: int = 2, + sort_by: Optional[str] = None, + in_order: bool = False, + ): + """A query for running a full text and vector search, along with an optional + filter expression. + + Args: + text (str): The text string to perform the text search with. + text_field (str): The name of the document field to perform text search on. + text_scorer (str, optional): The text scoring algorithm to use. + Defaults to TFIDF. Options are {TFIDF, BM25, DOCNORM, DISMAX, DOCSCORE}. + See https://redis.io/docs/latest/develop/interact/search-and-query/advanced-concepts/scoring/ + return_fields (List[str]): The declared fields to return with search + results. + filter_expression (Union[str, FilterExpression], optional): A filter to apply + along with the vector search. Defaults to None. + num_results (int, optional): The top k results to return from the + search. Defaults to 10. + return_score (bool, optional): Whether to return the text score. + Defaults to True. + dialect (int, optional): The RediSearch query dialect. + Defaults to 2. + sort_by (Optional[str]): The field to order the results by. Defaults + to None. Results will be ordered by text score. + in_order (bool): Requires the terms in the field to have + the same order as the terms in the query filter, regardless of + the offsets between them. Defaults to False. + + Raises: + TypeError: If filter_expression is not of type redisvl.query.FilterExpression + """ + self._text_field = text_field + self._num_results = num_results + self.set_filter(filter_expression) + query_string = self._build_query_string() + from nltk.corpus import stopwords + import nltk + + nltk.download('stopwords') + self._stopwords = set(stopwords.words('english')) + + + super().__init__(query_string) + + # Handle query modifiers + if return_fields: + self.return_fields(*return_fields) + + self.paging(0, self._num_results).dialect(dialect) + + if return_score: + self.return_fields(self.DISTANCE_ID) #TODO + + if sort_by: + self.sort_by(sort_by) + else: + self.sort_by(self.DISTANCE_ID) #TODO + + if in_order: + self.in_order() + + + def _tokenize_query(self, user_query: str) -> str: + """Convert a raw user query to a redis full text query joined by ORs""" + words = word_tokenize(user_query) + + tokens = [token.strip().strip(",").lower() for token in user_query.split()] + return " | ".join([token for token in tokens if token not in self._stopwords]) + + + def _build_query_string(self) -> str: + """Build the full query string for text search with optional filtering.""" + filter_expression = self._filter_expression + # TODO include text only + if isinstance(filter_expression, FilterExpression): + filter_expression = str(filter_expression) + + text = f"(~{Text(self._text_field) % self._tokenize_query(user_query)})" + + text_and_filter = text & self._filter_expression + + #TODO is this method even needed? use + return text_and_filter + + +class HybridQuery(VectorQuery, TextQuery): + def __init__(): + self, + text: str, + text_field: str, + vector: Union[List[float], bytes], + vector_field_name: str, + text_scorer: str = "TFIDF", + alpha: float = 0.7, + return_fields: Optional[List[str]] = None, + filter_expression: Optional[Union[str, FilterExpression]] = None, + dtype: str = "float32", + num_results: int = 10, + return_score: bool = True, + dialect: int = 2, + sort_by: Optional[str] = None, + in_order: bool = False, + ): + """A query for running a hybrid full text and vector search, along with + an optional filter expression. + + Args: + text (str): The text string to run text search with. + text_field (str): The name of the text field to search against. + vector (List[float]): The vector to perform the vector search with. + vector_field_name (str): The name of the vector field to search + against in the database. + text_scorer (str, optional): The text scoring algorithm to use. + Defaults to TFIDF. + alpha (float, optional): The amount to weight the vector similarity + score relative to the text similarity score. Defaults to 0.7 + return_fields (List[str]): The declared fields to return with search + results. + filter_expression (Union[str, FilterExpression], optional): A filter to apply + along with the vector search. Defaults to None. + dtype (str, optional): The dtype of the vector. Defaults to + "float32". + num_results (int, optional): The top k results to return from the + vector search. Defaults to 10. + return_score (bool, optional): Whether to return the vector + distance. Defaults to True. + dialect (int, optional): The RediSearch query dialect. + Defaults to 2. + sort_by (Optional[str]): The field to order the results by. Defaults + to None. Results will be ordered by vector distance. + in_order (bool): Requires the terms in the field to have + the same order as the terms in the query filter, regardless of + the offsets between them. Defaults to False. + + Raises: + TypeError: If filter_expression is not of type redisvl.query.FilterExpression + + Note: + Learn more about vector queries in Redis: https://redis.io/docs/interact/search-and-query/search/vectors/#knn-search + """ + self._text = text + self._text_field_name = tex_field_name + self._vector = vector + self._vector_field_name = vector_field_name + self._dtype = dtype + self._num_results = num_results + self.set_filter(filter_expression) + query_string = self._build_query_string() + + # TODO how to handle multiple parents? call parent.__init__() manually? + super().__init__(query_string) + + # Handle query modifiers + if return_fields: + self.return_fields(*return_fields) + + self.paging(0, self._num_results).dialect(dialect) + + if return_score: + self.return_fields(self.DISTANCE_ID) + + if sort_by: + self.sort_by(sort_by) + else: + self.sort_by(self.DISTANCE_ID) + + if in_order: + self.in_order() + + + def _build_query_string(self) -> str: + """Build the full query string for hybrid search with optional filtering.""" + filter_expression = self._filter_expression + # TODO include hybrid + if isinstance(filter_expression, FilterExpression): + filter_expression = str(filter_expression) + return f"{filter_expression}=>[KNN {self._num_results} @{self._vector_field_name} ${self.VECTOR_PARAM} AS {self.DISTANCE_ID}]" + + @property + def params(self) -> Dict[str, Any]: + """Return the parameters for the query. + + Returns: + Dict[str, Any]: The parameters for the query. + """ + if isinstance(self._vector, bytes): + vector = self._vector + else: + vector = array_to_buffer(self._vector, dtype=self._dtype) + + return {self.VECTOR_PARAM: vector} + From 3fa93ff7cd89060946927ee0ad8add4925a96a2c Mon Sep 17 00:00:00 2001 From: Robert Shelton Date: Mon, 24 Mar 2025 16:44:29 -0400 Subject: [PATCH 02/26] tokenizer helper function --- redisvl/query/query.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/redisvl/query/query.py b/redisvl/query/query.py index e9038d02..68f740d0 100644 --- a/redisvl/query/query.py +++ b/redisvl/query/query.py @@ -704,7 +704,6 @@ def __init__( nltk.download('stopwords') self._stopwords = set(stopwords.words('english')) - super().__init__(query_string) # Handle query modifiers @@ -727,6 +726,7 @@ def __init__( def _tokenize_query(self, user_query: str) -> str: """Convert a raw user query to a redis full text query joined by ORs""" + words = word_tokenize(user_query) tokens = [token.strip().strip(",").lower() for token in user_query.split()] @@ -747,6 +747,12 @@ def _build_query_string(self) -> str: #TODO is this method even needed? use return text_and_filter +# from redisvl.utils.token_escaper import TokenEscaper +# escaper = TokenEscaper() +# def tokenize_and_escape_query(user_query: str) -> str: +# """Convert a raw user query to a redis full text query joined by ORs""" +# tokens = [escaper.escape(token.strip().strip(",").replace("“", "").replace("”", "").lower()) for token in user_query.split()] +# return " | ".join([token for token in tokens if token and token not in stopwords_en]) class HybridQuery(VectorQuery, TextQuery): def __init__(): From 3b8e2b6299377bd1b51daa46515673e594d47d6e Mon Sep 17 00:00:00 2001 From: Justin Cechmanek Date: Tue, 25 Mar 2025 19:30:13 -0700 Subject: [PATCH 03/26] adds TextQuery class --- redisvl/query/__init__.py | 4 + redisvl/query/query.py | 208 ++++++++++----------------------- tests/unit/test_query_types.py | 72 +++++++++++- 3 files changed, 134 insertions(+), 150 deletions(-) diff --git a/redisvl/query/__init__.py b/redisvl/query/__init__.py index 8246794f..900202ca 100644 --- a/redisvl/query/__init__.py +++ b/redisvl/query/__init__.py @@ -2,7 +2,9 @@ BaseQuery, CountQuery, FilterQuery, + HybridQuery, RangeQuery, + TextQuery, VectorQuery, VectorRangeQuery, ) @@ -14,4 +16,6 @@ "RangeQuery", "VectorRangeQuery", "CountQuery", + "TextQuery", + "HybridQuery", ] diff --git a/redisvl/query/query.py b/redisvl/query/query.py index 68f740d0..ae8a9e43 100644 --- a/redisvl/query/query.py +++ b/redisvl/query/query.py @@ -1,6 +1,7 @@ from enum import Enum from typing import Any, Dict, List, Optional, Union +from redis.commands.search.aggregation import AggregateRequest, Desc from redis.commands.search.query import Query as RedisQuery from redisvl.query.filter import FilterExpression @@ -137,7 +138,7 @@ def __init__( """A query for a simple count operation provided some filter expression. Args: - filter_expression (Optional[Union[str, FilterExpression]]): The filter expression to + filter_expression (Optional[Union[str, FilterExpression]]): The filter expression to query with. Defaults to None. params (Optional[Dict[str, Any]], optional): The parameters for the query. Defaults to None. @@ -654,31 +655,32 @@ class RangeQuery(VectorRangeQuery): class TextQuery(FilterQuery): def __init__( - self, + self, text: str, text_field: str, - text_scorer: str = "TFIDF", - return_fields: Optional[List[str]] = None, + text_scorer: str = "BM25", filter_expression: Optional[Union[str, FilterExpression]] = None, + return_fields: Optional[List[str]] = None, num_results: int = 10, return_score: bool = True, dialect: int = 2, sort_by: Optional[str] = None, in_order: bool = False, + params: Optional[Dict[str, Any]] = None, ): """A query for running a full text and vector search, along with an optional filter expression. Args: - text (str): The text string to perform the text search with. + text (str): The text string to perform the text search with. text_field (str): The name of the document field to perform text search on. text_scorer (str, optional): The text scoring algorithm to use. - Defaults to TFIDF. Options are {TFIDF, BM25, DOCNORM, DISMAX, DOCSCORE}. + Defaults to BM25. Options are {TFIDF, BM25, DOCNORM, DISMAX, DOCSCORE}. See https://redis.io/docs/latest/develop/interact/search-and-query/advanced-concepts/scoring/ + filter_expression (Union[str, FilterExpression], optional): A filter to apply + along with the text search. Defaults to None. return_fields (List[str]): The declared fields to return with search results. - filter_expression (Union[str, FilterExpression], optional): A filter to apply - along with the vector search. Defaults to None. num_results (int, optional): The top k results to return from the search. Defaults to 10. return_score (bool, optional): Whether to return the text score. @@ -690,174 +692,82 @@ def __init__( in_order (bool): Requires the terms in the field to have the same order as the terms in the query filter, regardless of the offsets between them. Defaults to False. - - Raises: - TypeError: If filter_expression is not of type redisvl.query.FilterExpression + params (Optional[Dict[str, Any]], optional): The parameters for the query. + Defaults to None. """ + import nltk + from nltk.corpus import stopwords + + nltk.download("stopwords") + self._stopwords = set(stopwords.words("english")) + + self._text = text self._text_field = text_field - self._num_results = num_results + self._text_scorer = text_scorer + self.set_filter(filter_expression) - query_string = self._build_query_string() - from nltk.corpus import stopwords - import nltk + self._num_results = num_results - nltk.download('stopwords') - self._stopwords = set(stopwords.words('english')) + query_string = self._build_query_string() - super().__init__(query_string) + super().__init__( + query_string, + return_fields=return_fields, + num_results=num_results, + dialect=dialect, + sort_by=sort_by, + in_order=in_order, + params=params, + ) # Handle query modifiers - if return_fields: - self.return_fields(*return_fields) - + self.scorer(self._text_scorer) self.paging(0, self._num_results).dialect(dialect) if return_score: - self.return_fields(self.DISTANCE_ID) #TODO - - if sort_by: - self.sort_by(sort_by) - else: - self.sort_by(self.DISTANCE_ID) #TODO + self.with_scores() - if in_order: - self.in_order() - - - def _tokenize_query(self, user_query: str) -> str: + def tokenize_and_escape_query(self, user_query: str) -> str: """Convert a raw user query to a redis full text query joined by ORs""" + from redisvl.utils.token_escaper import TokenEscaper - words = word_tokenize(user_query) - - tokens = [token.strip().strip(",").lower() for token in user_query.split()] - return " | ".join([token for token in tokens if token not in self._stopwords]) + escaper = TokenEscaper() + tokens = [ + escaper.escape( + token.strip().strip(",").replace("“", "").replace("”", "").lower() + ) + for token in user_query.split() + ] + return " | ".join( + [token for token in tokens if token and token not in self._stopwords] + ) def _build_query_string(self) -> str: """Build the full query string for text search with optional filtering.""" filter_expression = self._filter_expression - # TODO include text only if isinstance(filter_expression, FilterExpression): filter_expression = str(filter_expression) + else: + filter_expression = "" - text = f"(~{Text(self._text_field) % self._tokenize_query(user_query)})" - - text_and_filter = text & self._filter_expression - - #TODO is this method even needed? use - return text_and_filter + text = f"(~@{self._text_field}:({self.tokenize_and_escape_query(self._text)}))" + if filter_expression and filter_expression != "*": + text += f"({filter_expression})" + return text -# from redisvl.utils.token_escaper import TokenEscaper -# escaper = TokenEscaper() -# def tokenize_and_escape_query(user_query: str) -> str: -# """Convert a raw user query to a redis full text query joined by ORs""" -# tokens = [escaper.escape(token.strip().strip(",").replace("“", "").replace("”", "").lower()) for token in user_query.split()] -# return " | ".join([token for token in tokens if token and token not in stopwords_en]) -class HybridQuery(VectorQuery, TextQuery): - def __init__(): - self, - text: str, - text_field: str, - vector: Union[List[float], bytes], - vector_field_name: str, - text_scorer: str = "TFIDF", - alpha: float = 0.7, - return_fields: Optional[List[str]] = None, - filter_expression: Optional[Union[str, FilterExpression]] = None, - dtype: str = "float32", - num_results: int = 10, - return_score: bool = True, - dialect: int = 2, - sort_by: Optional[str] = None, - in_order: bool = False, +class HybridQuery(AggregateRequest): + def __init__( + self, text_query: TextQuery, vector_query: VectorQuery, alpha: float = 0.7 ): - """A query for running a hybrid full text and vector search, along with - an optional filter expression. + """An aggregate query for running a hybrid full text and vector search. Args: - text (str): The text string to run text search with. - text_field (str): The name of the text field to search against. - vector (List[float]): The vector to perform the vector search with. - vector_field_name (str): The name of the vector field to search - against in the database. - text_scorer (str, optional): The text scoring algorithm to use. - Defaults to TFIDF. + text_query (TextQuery): The text query to run text search with. + vector_query (VectorQuery): The vector query to run vector search with. alpha (float, optional): The amount to weight the vector similarity score relative to the text similarity score. Defaults to 0.7 - return_fields (List[str]): The declared fields to return with search - results. - filter_expression (Union[str, FilterExpression], optional): A filter to apply - along with the vector search. Defaults to None. - dtype (str, optional): The dtype of the vector. Defaults to - "float32". - num_results (int, optional): The top k results to return from the - vector search. Defaults to 10. - return_score (bool, optional): Whether to return the vector - distance. Defaults to True. - dialect (int, optional): The RediSearch query dialect. - Defaults to 2. - sort_by (Optional[str]): The field to order the results by. Defaults - to None. Results will be ordered by vector distance. - in_order (bool): Requires the terms in the field to have - the same order as the terms in the query filter, regardless of - the offsets between them. Defaults to False. - - Raises: - TypeError: If filter_expression is not of type redisvl.query.FilterExpression - - Note: - Learn more about vector queries in Redis: https://redis.io/docs/interact/search-and-query/search/vectors/#knn-search - """ - self._text = text - self._text_field_name = tex_field_name - self._vector = vector - self._vector_field_name = vector_field_name - self._dtype = dtype - self._num_results = num_results - self.set_filter(filter_expression) - query_string = self._build_query_string() - - # TODO how to handle multiple parents? call parent.__init__() manually? - super().__init__(query_string) - - # Handle query modifiers - if return_fields: - self.return_fields(*return_fields) - self.paging(0, self._num_results).dialect(dialect) - - if return_score: - self.return_fields(self.DISTANCE_ID) - - if sort_by: - self.sort_by(sort_by) - else: - self.sort_by(self.DISTANCE_ID) - - if in_order: - self.in_order() - - - def _build_query_string(self) -> str: - """Build the full query string for hybrid search with optional filtering.""" - filter_expression = self._filter_expression - # TODO include hybrid - if isinstance(filter_expression, FilterExpression): - filter_expression = str(filter_expression) - return f"{filter_expression}=>[KNN {self._num_results} @{self._vector_field_name} ${self.VECTOR_PARAM} AS {self.DISTANCE_ID}]" - - @property - def params(self) -> Dict[str, Any]: - """Return the parameters for the query. - - Returns: - Dict[str, Any]: The parameters for the query. """ - if isinstance(self._vector, bytes): - vector = self._vector - else: - vector = array_to_buffer(self._vector, dtype=self._dtype) - - return {self.VECTOR_PARAM: vector} - + pass diff --git a/tests/unit/test_query_types.py b/tests/unit/test_query_types.py index bceaa215..e3ef5e96 100644 --- a/tests/unit/test_query_types.py +++ b/tests/unit/test_query_types.py @@ -3,7 +3,14 @@ from redis.commands.search.result import Result from redisvl.index.index import process_results -from redisvl.query import CountQuery, FilterQuery, RangeQuery, VectorQuery +from redisvl.query import ( + CountQuery, + FilterQuery, + HybridQuery, + RangeQuery, + TextQuery, + VectorQuery, +) from redisvl.query.filter import Tag from redisvl.query.query import VectorRangeQuery @@ -188,6 +195,69 @@ def test_range_query(): assert range_query._in_order +def test_text_query(): + text_string = "the toon squad play basketball against a gang of aliens" + text_field_name = "description" + return_fields = ["title", "genre", "rating"] + text_query = TextQuery( + text=text_string, + text_field=text_field_name, + return_fields=return_fields, + return_score=False, + ) + + # Check properties + assert text_query._return_fields == return_fields + assert text_query._num_results == 10 + assert ( + text_query.filter + == f"(~@{text_field_name}:({text_query.tokenize_and_escape_query(text_string)}))" + ) + assert isinstance(text_query, Query) + assert isinstance(text_query.query, Query) + assert isinstance(text_query.params, dict) + assert text_query._text_scorer == "BM25" + assert text_query.params == {} + assert text_query._dialect == 2 + assert text_query._in_order == False + + # Test paging functionality + text_query.paging(5, 7) + assert text_query._offset == 5 + assert text_query._num == 7 + assert text_query._num_results == 10 + + # Test sort_by functionality + filter_expression = Tag("genre") == "comedy" + scorer = "TFIDF" + text_query = TextQuery( + text_string, + text_field_name, + scorer, + filter_expression, + return_fields, + num_results=10, + sort_by="rating", + ) + assert text_query._sortby is not None + + # Test in_order functionality + text_query = TextQuery( + text_string, + text_field_name, + scorer, + filter_expression, + return_fields, + num_results=10, + in_order=True, + ) + assert text_query._in_order + + +def test_hybrid_query(): + pass + + @pytest.mark.parametrize( "query", [ From 7e0f24d79047f7c167ad59dfa3fab90784afbbcb Mon Sep 17 00:00:00 2001 From: Justin Cechmanek Date: Thu, 27 Mar 2025 10:17:02 -0700 Subject: [PATCH 04/26] adds nltk requirement --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index 2a897726..b9e0d63d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -33,6 +33,7 @@ tenacity = ">=8.2.2" tabulate = "^0.9.0" ml-dtypes = "^0.4.0" python-ulid = "^3.0.0" +nltk = { version = "^3.8.1", optional = true } openai = { version = "^1.13.0", optional = true } sentence-transformers = { version = "^3.4.0", optional = true } scipy = [ From 49b2aba14a803adca4a66f822c186f68a11f0d5a Mon Sep 17 00:00:00 2001 From: Justin Cechmanek Date: Thu, 27 Mar 2025 13:18:04 -0700 Subject: [PATCH 05/26] makes stopwords user defined in TextQuery --- poetry.lock | 545 +++++++++++++++++++++++++++++---- redisvl/query/query.py | 48 ++- tests/unit/test_query_types.py | 28 +- 3 files changed, 549 insertions(+), 72 deletions(-) diff --git a/poetry.lock b/poetry.lock index 85c682e7..4faca337 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand. [[package]] name = "accessible-pygments" @@ -6,6 +6,7 @@ version = "0.0.5" description = "A collection of accessible pygments styles" optional = false python-versions = ">=3.9" +groups = ["docs"] files = [ {file = "accessible_pygments-0.0.5-py3-none-any.whl", hash = "sha256:88ae3211e68a1d0b011504b2ffc1691feafce124b845bd072ab6f9f66f34d4b7"}, {file = "accessible_pygments-0.0.5.tar.gz", hash = "sha256:40918d3e6a2b619ad424cb91e556bd3bd8865443d9f22f1dcdf79e33c8046872"}, @@ -24,6 +25,8 @@ version = "2.4.6" description = "Happy Eyeballs for asyncio" optional = true python-versions = ">=3.9" +groups = ["main"] +markers = "extra == \"voyageai\"" files = [ {file = "aiohappyeyeballs-2.4.6-py3-none-any.whl", hash = "sha256:147ec992cf873d74f5062644332c539fcd42956dc69453fe5204195e560517e1"}, {file = "aiohappyeyeballs-2.4.6.tar.gz", hash = "sha256:9b05052f9042985d32ecbe4b59a77ae19c006a78f1344d7fdad69d28ded3d0b0"}, @@ -35,6 +38,8 @@ version = "3.11.13" description = "Async http client/server framework (asyncio)" optional = true python-versions = ">=3.9" +groups = ["main"] +markers = "extra == \"voyageai\"" files = [ {file = "aiohttp-3.11.13-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a4fe27dbbeec445e6e1291e61d61eb212ee9fed6e47998b27de71d70d3e8777d"}, {file = "aiohttp-3.11.13-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9e64ca2dbea28807f8484c13f684a2f761e69ba2640ec49dacd342763cc265ef"}, @@ -130,7 +135,7 @@ propcache = ">=0.2.0" yarl = ">=1.17.0,<2.0" [package.extras] -speedups = ["Brotli", "aiodns (>=3.2.0)", "brotlicffi"] +speedups = ["Brotli ; platform_python_implementation == \"CPython\"", "aiodns (>=3.2.0) ; sys_platform == \"linux\" or sys_platform == \"darwin\"", "brotlicffi ; platform_python_implementation != \"CPython\""] [[package]] name = "aiolimiter" @@ -138,6 +143,8 @@ version = "1.2.1" description = "asyncio rate limiter, a leaky bucket implementation" optional = true python-versions = "<4.0,>=3.8" +groups = ["main"] +markers = "extra == \"voyageai\"" files = [ {file = "aiolimiter-1.2.1-py3-none-any.whl", hash = "sha256:d3f249e9059a20badcb56b61601a83556133655c11d1eb3dd3e04ff069e5f3c7"}, {file = "aiolimiter-1.2.1.tar.gz", hash = "sha256:e02a37ea1a855d9e832252a105420ad4d15011505512a1a1d814647451b5cca9"}, @@ -149,6 +156,8 @@ version = "1.3.2" description = "aiosignal: a list of registered asynchronous callbacks" optional = true python-versions = ">=3.9" +groups = ["main"] +markers = "extra == \"voyageai\"" files = [ {file = "aiosignal-1.3.2-py2.py3-none-any.whl", hash = "sha256:45cde58e409a301715980c2b01d0c28bdde3770d8290b5eb2173759d9acb31a5"}, {file = "aiosignal-1.3.2.tar.gz", hash = "sha256:a8c255c66fafb1e499c9351d0bf32ff2d8a0321595ebac3b93713656d2436f54"}, @@ -163,6 +172,7 @@ version = "0.7.16" description = "A light, configurable Sphinx theme" optional = false python-versions = ">=3.9" +groups = ["docs"] files = [ {file = "alabaster-0.7.16-py3-none-any.whl", hash = "sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92"}, {file = "alabaster-0.7.16.tar.gz", hash = "sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65"}, @@ -174,6 +184,7 @@ version = "0.7.0" description = "Reusable constraint types to use with typing.Annotated" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, @@ -185,6 +196,8 @@ version = "4.8.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = true python-versions = ">=3.9" +groups = ["main"] +markers = "extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\"" files = [ {file = "anyio-4.8.0-py3-none-any.whl", hash = "sha256:b5011f270ab5eb0abf13385f851315585cc37ef330dd88e27ec3d34d651fd47a"}, {file = "anyio-4.8.0.tar.gz", hash = "sha256:1d9fe889df5212298c0c0723fa20479d1b94883a2df44bd3897aa91083316f7a"}, @@ -198,7 +211,7 @@ typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""} [package.extras] doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21)"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1) ; python_version >= \"3.10\"", "uvloop (>=0.21) ; platform_python_implementation == \"CPython\" and platform_system != \"Windows\" and python_version < \"3.14\""] trio = ["trio (>=0.26.1)"] [[package]] @@ -207,6 +220,8 @@ version = "0.1.4" description = "Disable App Nap on macOS >= 10.9" optional = false python-versions = ">=3.6" +groups = ["dev", "docs"] +markers = "platform_system == \"Darwin\"" files = [ {file = "appnope-0.1.4-py2.py3-none-any.whl", hash = "sha256:502575ee11cd7a28c0205f379b525beefebab9d161b7c964670864014ed7213c"}, {file = "appnope-0.1.4.tar.gz", hash = "sha256:1de3860566df9caf38f01f86f65e0e13e379af54f9e4bee1e66b48f2efffd1ee"}, @@ -218,6 +233,7 @@ version = "3.3.8" description = "An abstract syntax tree for Python with inference support." optional = false python-versions = ">=3.9.0" +groups = ["dev"] files = [ {file = "astroid-3.3.8-py3-none-any.whl", hash = "sha256:187ccc0c248bfbba564826c26f070494f7bc964fd286b6d9fff4420e55de828c"}, {file = "astroid-3.3.8.tar.gz", hash = "sha256:a88c7994f914a4ea8572fac479459f4955eeccc877be3f2d959a33273b0cf40b"}, @@ -232,6 +248,7 @@ version = "3.0.0" description = "Annotate AST trees with source code positions" optional = false python-versions = ">=3.8" +groups = ["dev", "docs"] files = [ {file = "asttokens-3.0.0-py3-none-any.whl", hash = "sha256:e3078351a059199dd5138cb1c706e6430c05eff2ff136af5eb4790f9d28932e2"}, {file = "asttokens-3.0.0.tar.gz", hash = "sha256:0dcd8baa8d62b0c1d118b399b2ddba3c4aff271d0d7a9e0d4c1681c79035bbc7"}, @@ -247,6 +264,8 @@ version = "5.0.1" description = "Timeout context manager for asyncio programs" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_full_version < \"3.11.3\"" files = [ {file = "async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c"}, {file = "async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3"}, @@ -258,18 +277,20 @@ version = "25.1.0" description = "Classes Without Boilerplate" optional = false python-versions = ">=3.8" +groups = ["main", "dev", "docs"] files = [ {file = "attrs-25.1.0-py3-none-any.whl", hash = "sha256:c75a69e28a550a7e93789579c22aa26b0f5b83b75dc4e08fe092980051e1090a"}, {file = "attrs-25.1.0.tar.gz", hash = "sha256:1c97078a80c814273a76b2a298a932eb681c87415c11dee0a6921de7f1b02c3e"}, ] +markers = {main = "extra == \"voyageai\""} [package.extras] -benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +benchmark = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +cov = ["cloudpickle ; platform_python_implementation == \"CPython\"", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +dev = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] -tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] +tests = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\""] [[package]] name = "babel" @@ -277,13 +298,14 @@ version = "2.17.0" description = "Internationalization utilities" optional = false python-versions = ">=3.8" +groups = ["docs"] files = [ {file = "babel-2.17.0-py3-none-any.whl", hash = "sha256:4d0b53093fdfb4b21c92b5213dba5a1b23885afa8383709427046b21c366e5f2"}, {file = "babel-2.17.0.tar.gz", hash = "sha256:0c54cffb19f690cdcc52a3b50bcbf71e07a808d1c80d549f2459b9d2cf0afb9d"}, ] [package.extras] -dev = ["backports.zoneinfo", "freezegun (>=1.0,<2.0)", "jinja2 (>=3.0)", "pytest (>=6.0)", "pytest-cov", "pytz", "setuptools", "tzdata"] +dev = ["backports.zoneinfo ; python_version < \"3.9\"", "freezegun (>=1.0,<2.0)", "jinja2 (>=3.0)", "pytest (>=6.0)", "pytest-cov", "pytz", "setuptools", "tzdata ; sys_platform == \"win32\""] [[package]] name = "beautifulsoup4" @@ -291,10 +313,12 @@ version = "4.13.3" description = "Screen-scraping library" optional = false python-versions = ">=3.7.0" +groups = ["main", "docs"] files = [ {file = "beautifulsoup4-4.13.3-py3-none-any.whl", hash = "sha256:99045d7d3f08f91f0d656bc9b7efbae189426cd913d830294a15eefa0ea4df16"}, {file = "beautifulsoup4-4.13.3.tar.gz", hash = "sha256:1bd32405dacc920b42b83ba01644747ed77456a65760e285fbc47633ceddaf8b"}, ] +markers = {main = "python_version >= \"3.10\" and extra == \"ranx\""} [package.dependencies] soupsieve = ">1.2" @@ -313,6 +337,7 @@ version = "25.1.0" description = "The uncompromising code formatter." optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "black-25.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:759e7ec1e050a15f89b770cefbf91ebee8917aac5c20483bc2d80a6c3a04df32"}, {file = "black-25.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e519ecf93120f34243e6b0054db49c00a35f84f195d5bce7e9f5cfc578fc2da"}, @@ -359,6 +384,7 @@ version = "6.2.0" description = "An easy safelist-based HTML-sanitizing tool." optional = false python-versions = ">=3.9" +groups = ["docs"] files = [ {file = "bleach-6.2.0-py3-none-any.whl", hash = "sha256:117d9c6097a7c3d22fd578fcd8d35ff1e125df6736f554da4e432fdd63f31e5e"}, {file = "bleach-6.2.0.tar.gz", hash = "sha256:123e894118b8a599fd80d3ec1a6d4cc7ce4e5882b1317a7e1ba69b56e95f991f"}, @@ -377,6 +403,8 @@ version = "1.36.0" description = "The AWS SDK for Python" optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"bedrock\"" files = [ {file = "boto3-1.36.0-py3-none-any.whl", hash = "sha256:d0ca7a58ce25701a52232cc8df9d87854824f1f2964b929305722ebc7959d5a9"}, {file = "boto3-1.36.0.tar.gz", hash = "sha256:159898f51c2997a12541c0e02d6e5a8fe2993ddb307b9478fd9a339f98b57e00"}, @@ -396,6 +424,8 @@ version = "1.36.26" description = "Low-level, data-driven core of boto 3." optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"bedrock\"" files = [ {file = "botocore-1.36.26-py3-none-any.whl", hash = "sha256:4e3f19913887a58502e71ef8d696fe7eaa54de7813ff73390cd5883f837dfa6e"}, {file = "botocore-1.36.26.tar.gz", hash = "sha256:4a63bcef7ecf6146fd3a61dc4f9b33b7473b49bdaf1770e9aaca6eee0c9eab62"}, @@ -418,6 +448,8 @@ version = "5.5.2" description = "Extensible memoizing collections and decorators" optional = true python-versions = ">=3.7" +groups = ["main"] +markers = "extra == \"vertexai\"" files = [ {file = "cachetools-5.5.2-py3-none-any.whl", hash = "sha256:d26a22bcc62eb95c3beabd9f1ee5e820d3d2704fe2967cbe350e20c8ffcd3f0a"}, {file = "cachetools-5.5.2.tar.gz", hash = "sha256:1a661caa9175d26759571b2e19580f9d6393969e5dfca11fdb1f947a23e640d4"}, @@ -429,6 +461,8 @@ version = "1.0.0" description = "RFC 7049 - Concise Binary Object Representation" optional = true python-versions = "*" +groups = ["main"] +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "cbor-1.0.0.tar.gz", hash = "sha256:13225a262ddf5615cbd9fd55a76a0d53069d18b07d2e9f19c39e6acb8609bbb6"}, ] @@ -439,6 +473,8 @@ version = "5.6.5" description = "CBOR (de)serializer with extensive tag support" optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "cbor2-5.6.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e16c4a87fc999b4926f5c8f6c696b0d251b4745bc40f6c5aee51d69b30b15ca2"}, {file = "cbor2-5.6.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:87026fc838370d69f23ed8572939bd71cea2b3f6c8f8bb8283f573374b4d7f33"}, @@ -488,7 +524,7 @@ files = [ [package.extras] benchmarks = ["pytest-benchmark (==4.0.0)"] -doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme (>=1.3.0)", "typing-extensions"] +doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme (>=1.3.0)", "typing-extensions ; python_version < \"3.12\""] test = ["coverage (>=7)", "hypothesis", "pytest"] [[package]] @@ -497,10 +533,12 @@ version = "2025.1.31" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" +groups = ["main", "dev", "docs"] files = [ {file = "certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe"}, {file = "certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651"}, ] +markers = {main = "(extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\" or extra == \"sentence-transformers\" or extra == \"vertexai\" or extra == \"voyageai\" or extra == \"ranx\") and (extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\" or extra == \"sentence-transformers\" or extra == \"vertexai\" or extra == \"voyageai\") or (extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\" or extra == \"sentence-transformers\" or extra == \"vertexai\" or extra == \"voyageai\" or extra == \"ranx\") and python_version >= \"3.10\""} [[package]] name = "cffi" @@ -508,6 +546,7 @@ version = "1.17.1" description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.8" +groups = ["dev", "docs"] files = [ {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, @@ -577,6 +616,7 @@ files = [ {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, ] +markers = {dev = "implementation_name == \"pypy\" or platform_python_implementation != \"PyPy\"", docs = "implementation_name == \"pypy\""} [package.dependencies] pycparser = "*" @@ -587,6 +627,7 @@ version = "3.4.0" description = "Validate configuration and produce human readable error messages." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, @@ -598,6 +639,7 @@ version = "3.4.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7" +groups = ["main", "dev", "docs"] files = [ {file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"}, {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"}, @@ -692,6 +734,7 @@ files = [ {file = "charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85"}, {file = "charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3"}, ] +markers = {main = "(extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"vertexai\" or extra == \"voyageai\" or extra == \"ranx\") and (extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"vertexai\" or extra == \"voyageai\") or (extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"vertexai\" or extra == \"voyageai\" or extra == \"ranx\") and python_version >= \"3.10\""} [[package]] name = "click" @@ -699,6 +742,7 @@ version = "8.1.8" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" +groups = ["main", "dev", "docs"] files = [ {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, @@ -713,6 +757,8 @@ version = "5.13.12" description = "" optional = true python-versions = "<4.0,>=3.9" +groups = ["main"] +markers = "extra == \"cohere\"" files = [ {file = "cohere-5.13.12-py3-none-any.whl", hash = "sha256:2a043591a3e5280b47716a6b311e4c7f58e799364113a9cb81b50cd4f6c95f7e"}, {file = "cohere-5.13.12.tar.gz", hash = "sha256:97bb9ac107e580780b941acbabd3aa5e71960e6835398292c46aaa8a0a4cab88"}, @@ -735,10 +781,12 @@ version = "0.4.6" description = "Cross-platform colored terminal text." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["main", "dev", "docs"] files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] +markers = {main = "platform_system == \"Windows\"", dev = "platform_system == \"Windows\" or sys_platform == \"win32\"", docs = "platform_system == \"Windows\" or sys_platform == \"win32\""} [[package]] name = "coloredlogs" @@ -746,6 +794,7 @@ version = "15.0.1" description = "Colored terminal output for Python's logging module" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +groups = ["main"] files = [ {file = "coloredlogs-15.0.1-py2.py3-none-any.whl", hash = "sha256:612ee75c546f53e92e70049c9dbfcc18c935a2b9a53b66085ce9ef6a6e5c0934"}, {file = "coloredlogs-15.0.1.tar.gz", hash = "sha256:7c991aa71a4577af2f82600d8f8f3a89f936baeaf9b50a9c197da014e5bf16b0"}, @@ -763,6 +812,7 @@ version = "0.2.2" description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc." optional = false python-versions = ">=3.8" +groups = ["dev", "docs"] files = [ {file = "comm-0.2.2-py3-none-any.whl", hash = "sha256:e6fb86cb70ff661ee8c9c14e7d36d6de3b4066f1441be4063df9c5009f0a64d3"}, {file = "comm-0.2.2.tar.gz", hash = "sha256:3fd7a84065306e07bea1773df6eb8282de51ba82f77c72f9c85716ab11fe980e"}, @@ -780,6 +830,8 @@ version = "1.3.1" description = "Python library for calculating contours of 2D quadrilateral grids" optional = true python-versions = ">=3.10" +groups = ["main"] +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "contourpy-1.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a045f341a77b77e1c5de31e74e966537bba9f3c4099b35bf4c2e3939dd54cdab"}, {file = "contourpy-1.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:500360b77259914f7805af7462e41f9cb7ca92ad38e9f94d6c8641b089338124"}, @@ -853,6 +905,7 @@ version = "7.6.12" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "coverage-7.6.12-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:704c8c8c6ce6569286ae9622e534b4f5b9759b6f2cd643f1c1a61f666d534fe8"}, {file = "coverage-7.6.12-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ad7525bf0241e5502168ae9c643a2f6c219fa0a283001cee4cf23a9b7da75879"}, @@ -920,7 +973,7 @@ files = [ ] [package.extras] -toml = ["tomli"] +toml = ["tomli ; python_full_version <= \"3.11.0a6\""] [[package]] name = "cramjam" @@ -928,6 +981,8 @@ version = "2.9.1" description = "Thin Python bindings to de/compression algorithms in Rust" optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "cramjam-2.9.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:8e82464d1e00fbbb12958999b8471ba5e9f3d9711954505a0a7b378762332e6f"}, {file = "cramjam-2.9.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d2df8a6511cc08ef1fccd2e0c65e2ebc9f57574ec8376052a76851af5398810"}, @@ -1030,6 +1085,7 @@ version = "44.0.1" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = "!=3.9.0,!=3.9.1,>=3.7" +groups = ["dev"] files = [ {file = "cryptography-44.0.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bf688f615c29bfe9dfc44312ca470989279f0e94bb9f631f85e3459af8efc009"}, {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd7c7e2d71d908dc0f8d2027e1604102140d84b155e658c20e8ad1304317691f"}, @@ -1068,10 +1124,10 @@ files = [ cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} [package.extras] -docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=3.0.0)"] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=3.0.0) ; python_version >= \"3.8\""] docstest = ["pyenchant (>=3)", "readme-renderer (>=30.0)", "sphinxcontrib-spelling (>=7.3.1)"] -nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2)"] -pep8test = ["check-sdist", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"] +nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2) ; python_version >= \"3.8\""] +pep8test = ["check-sdist ; python_version >= \"3.8\"", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"] sdist = ["build (>=1.0.0)"] ssh = ["bcrypt (>=3.1.5)"] test = ["certifi (>=2024)", "cryptography-vectors (==44.0.1)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"] @@ -1083,6 +1139,8 @@ version = "0.12.1" description = "Composable style cycles" optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "cycler-0.12.1-py3-none-any.whl", hash = "sha256:85cef7cff222d8644161529808465972e51340599459b8ac3ccbac5a854e0d30"}, {file = "cycler-0.12.1.tar.gz", hash = "sha256:88bb128f02ba341da8ef447245a9e138fae777f6a23943da4540077d3601eb1c"}, @@ -1098,6 +1156,7 @@ version = "1.8.12" description = "An implementation of the Debug Adapter Protocol for Python" optional = false python-versions = ">=3.8" +groups = ["dev", "docs"] files = [ {file = "debugpy-1.8.12-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:a2ba7ffe58efeae5b8fad1165357edfe01464f9aef25e814e891ec690e7dd82a"}, {file = "debugpy-1.8.12-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cbbd4149c4fc5e7d508ece083e78c17442ee13b0e69bfa6bd63003e486770f45"}, @@ -1133,6 +1192,7 @@ version = "5.2.1" description = "Decorators for Humans" optional = false python-versions = ">=3.8" +groups = ["dev", "docs"] files = [ {file = "decorator-5.2.1-py3-none-any.whl", hash = "sha256:d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a"}, {file = "decorator-5.2.1.tar.gz", hash = "sha256:65f266143752f734b0a7cc83c46f4618af75b8c5911b00ccb61d0ac9b6da0360"}, @@ -1144,6 +1204,7 @@ version = "0.7.1" description = "XML bomb protection for Python stdlib modules" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +groups = ["docs"] files = [ {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, @@ -1155,6 +1216,7 @@ version = "0.3.9" description = "serialize all of Python" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "dill-0.3.9-py3-none-any.whl", hash = "sha256:468dff3b89520b474c0397703366b7b95eebe6303f108adf9b19da1f702be87a"}, {file = "dill-0.3.9.tar.gz", hash = "sha256:81aa267dddf68cbfe8029c42ca9ec6a4ab3b22371d1c450abc54422577b4512c"}, @@ -1170,6 +1232,7 @@ version = "0.3.9" description = "Distribution utilities" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87"}, {file = "distlib-0.3.9.tar.gz", hash = "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403"}, @@ -1181,6 +1244,8 @@ version = "1.9.0" description = "Distro - an OS platform information API" optional = true python-versions = ">=3.6" +groups = ["main"] +markers = "extra == \"openai\"" files = [ {file = "distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2"}, {file = "distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed"}, @@ -1192,6 +1257,7 @@ version = "7.1.0" description = "A Python library for the Docker Engine API." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "docker-7.1.0-py3-none-any.whl", hash = "sha256:c96b93b7f0a746f9e77d325bcfb87422a3d8bd4f03136ae8a85b37f1898d5fc0"}, {file = "docker-7.1.0.tar.gz", hash = "sha256:ad8c70e6e3f8926cb8a92619b832b4ea5299e2831c14284663184e200546fa6c"}, @@ -1214,6 +1280,8 @@ version = "0.16" description = "Parse Python docstrings in reST, Google and Numpydoc format" optional = true python-versions = ">=3.6,<4.0" +groups = ["main"] +markers = "extra == \"vertexai\"" files = [ {file = "docstring_parser-0.16-py3-none-any.whl", hash = "sha256:bf0a1387354d3691d102edef7ec124f219ef639982d096e26e3b60aeffa90637"}, {file = "docstring_parser-0.16.tar.gz", hash = "sha256:538beabd0af1e2db0146b6bd3caa526c35a34d61af9fd2887f3a8a27a739aa6e"}, @@ -1225,6 +1293,7 @@ version = "0.21.2" description = "Docutils -- Python Documentation Utilities" optional = false python-versions = ">=3.9" +groups = ["docs"] files = [ {file = "docutils-0.21.2-py3-none-any.whl", hash = "sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2"}, {file = "docutils-0.21.2.tar.gz", hash = "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f"}, @@ -1236,6 +1305,8 @@ version = "0.2.2" description = "Like `typing._eval_type`, but lets older Python versions use newer typing features." optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"mistralai\"" files = [ {file = "eval_type_backport-0.2.2-py3-none-any.whl", hash = "sha256:cb6ad7c393517f476f96d456d0412ea80f0a8cf96f6892834cd9340149111b0a"}, {file = "eval_type_backport-0.2.2.tar.gz", hash = "sha256:f0576b4cf01ebb5bd358d02314d31846af5e07678387486e2c798af0e7d849c1"}, @@ -1250,10 +1321,12 @@ version = "1.2.2" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" +groups = ["main", "dev", "docs"] files = [ {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, ] +markers = {main = "(extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\") and python_version <= \"3.10\"", dev = "python_version <= \"3.10\"", docs = "python_version <= \"3.10\""} [package.extras] test = ["pytest (>=6)"] @@ -1264,6 +1337,7 @@ version = "2.1.1" description = "execnet: rapid multi-Python deployment" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "execnet-2.1.1-py3-none-any.whl", hash = "sha256:26dee51f1b80cebd6d0ca8e74dd8745419761d3bef34163928cbebbdc4749fdc"}, {file = "execnet-2.1.1.tar.gz", hash = "sha256:5189b52c6121c24feae288166ab41b32549c7e2348652736540b9e6e7d4e72e3"}, @@ -1278,13 +1352,14 @@ version = "2.2.0" description = "Get the currently executing AST node of a frame, and other information" optional = false python-versions = ">=3.8" +groups = ["dev", "docs"] files = [ {file = "executing-2.2.0-py2.py3-none-any.whl", hash = "sha256:11387150cad388d62750327a53d3339fad4888b39a6fe233c3afbb54ecffd3aa"}, {file = "executing-2.2.0.tar.gz", hash = "sha256:5d108c028108fe2551d1a7b2e8b713341e2cb4fc0aa7dcf966fa4327a5226755"}, ] [package.extras] -tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich"] +tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich ; python_version >= \"3.11\""] [[package]] name = "fastavro" @@ -1292,6 +1367,8 @@ version = "1.10.0" description = "Fast read/write of AVRO files" optional = true python-versions = ">=3.9" +groups = ["main"] +markers = "extra == \"cohere\"" files = [ {file = "fastavro-1.10.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1a9fe0672d2caf0fe54e3be659b13de3cad25a267f2073d6f4b9f8862acc31eb"}, {file = "fastavro-1.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:86dd0410770e0c99363788f0584523709d85e57bb457372ec5c285a482c17fe6"}, @@ -1338,6 +1415,7 @@ version = "2.21.1" description = "Fastest Python implementation of JSON schema" optional = false python-versions = "*" +groups = ["dev", "docs"] files = [ {file = "fastjsonschema-2.21.1-py3-none-any.whl", hash = "sha256:c9e5b7e908310918cf494a434eeb31384dd84a98b57a30bcb1f535015b554667"}, {file = "fastjsonschema-2.21.1.tar.gz", hash = "sha256:794d4f0a58f848961ba16af7b9c85a3e88cd360df008c59aac6fc5ae9323b5d4"}, @@ -1352,6 +1430,8 @@ version = "2024.11.0" description = "Python support for Parquet file format" optional = true python-versions = ">=3.9" +groups = ["main"] +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "fastparquet-2024.11.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:60ccf587410f0979105e17036df61bb60e1c2b81880dc91895cdb4ee65b71e7f"}, {file = "fastparquet-2024.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a5ad5fc14b0567e700bea3cd528a0bd45a6f9371370b49de8889fb3d10a6574a"}, @@ -1412,15 +1492,17 @@ version = "3.17.0" description = "A platform independent file lock." optional = false python-versions = ">=3.9" +groups = ["main", "dev"] files = [ {file = "filelock-3.17.0-py3-none-any.whl", hash = "sha256:533dc2f7ba78dc2f0f531fc6c4940addf7b70a481e269a5a3b93be94ffbe8338"}, {file = "filelock-3.17.0.tar.gz", hash = "sha256:ee4e77401ef576ebb38cd7f13b9b28893194acc20a8e68e18730ba9c0e54660e"}, ] +markers = {main = "extra == \"sentence-transformers\" or extra == \"cohere\""} [package.extras] docs = ["furo (>=2024.8.6)", "sphinx (>=8.1.3)", "sphinx-autodoc-typehints (>=3)"] testing = ["covdefaults (>=2.3)", "coverage (>=7.6.10)", "diff-cover (>=9.2.1)", "pytest (>=8.3.4)", "pytest-asyncio (>=0.25.2)", "pytest-cov (>=6)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.28.1)"] -typing = ["typing-extensions (>=4.12.2)"] +typing = ["typing-extensions (>=4.12.2) ; python_version < \"3.11\""] [[package]] name = "fonttools" @@ -1428,6 +1510,8 @@ version = "4.56.0" description = "Tools to manipulate font files" optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "fonttools-4.56.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:331954d002dbf5e704c7f3756028e21db07097c19722569983ba4d74df014000"}, {file = "fonttools-4.56.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8d1613abd5af2f93c05867b3a3759a56e8bf97eb79b1da76b2bc10892f96ff16"}, @@ -1482,18 +1566,18 @@ files = [ ] [package.extras] -all = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "fs (>=2.2.0,<3)", "lxml (>=4.0)", "lz4 (>=1.7.4.2)", "matplotlib", "munkres", "pycairo", "scipy", "skia-pathops (>=0.5.0)", "sympy", "uharfbuzz (>=0.23.0)", "unicodedata2 (>=15.1.0)", "xattr", "zopfli (>=0.1.4)"] +all = ["brotli (>=1.0.1) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\"", "fs (>=2.2.0,<3)", "lxml (>=4.0)", "lz4 (>=1.7.4.2)", "matplotlib", "munkres ; platform_python_implementation == \"PyPy\"", "pycairo", "scipy ; platform_python_implementation != \"PyPy\"", "skia-pathops (>=0.5.0)", "sympy", "uharfbuzz (>=0.23.0)", "unicodedata2 (>=15.1.0) ; python_version <= \"3.12\"", "xattr ; sys_platform == \"darwin\"", "zopfli (>=0.1.4)"] graphite = ["lz4 (>=1.7.4.2)"] -interpolatable = ["munkres", "pycairo", "scipy"] +interpolatable = ["munkres ; platform_python_implementation == \"PyPy\"", "pycairo", "scipy ; platform_python_implementation != \"PyPy\""] lxml = ["lxml (>=4.0)"] pathops = ["skia-pathops (>=0.5.0)"] plot = ["matplotlib"] repacker = ["uharfbuzz (>=0.23.0)"] symfont = ["sympy"] -type1 = ["xattr"] +type1 = ["xattr ; sys_platform == \"darwin\""] ufo = ["fs (>=2.2.0,<3)"] -unicode = ["unicodedata2 (>=15.1.0)"] -woff = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "zopfli (>=0.1.4)"] +unicode = ["unicodedata2 (>=15.1.0) ; python_version <= \"3.12\""] +woff = ["brotli (>=1.0.1) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\"", "zopfli (>=0.1.4)"] [[package]] name = "frozenlist" @@ -1501,6 +1585,8 @@ version = "1.5.0" description = "A list-like structure which implements collections.abc.MutableSequence" optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"voyageai\"" files = [ {file = "frozenlist-1.5.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5b6a66c18b5b9dd261ca98dffcb826a525334b2f29e7caa54e182255c5f6a65a"}, {file = "frozenlist-1.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d1b3eb7b05ea246510b43a7e53ed1653e55c2121019a97e60cad7efb881a97bb"}, @@ -1602,6 +1688,8 @@ version = "2025.2.0" description = "File-system specification" optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "(extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"ranx\") and (extra == \"sentence-transformers\" or extra == \"cohere\") or (extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"ranx\") and python_version >= \"3.10\"" files = [ {file = "fsspec-2025.2.0-py3-none-any.whl", hash = "sha256:9de2ad9ce1f85e1931858535bc882543171d197001a0a5eb2ddc04f1781ab95b"}, {file = "fsspec-2025.2.0.tar.gz", hash = "sha256:1c24b16eaa0a1798afa0337aa0db9b256718ab2a89c425371f5628d22c3b6afd"}, @@ -1641,6 +1729,8 @@ version = "2.24.1" description = "Google API client core library" optional = true python-versions = ">=3.7" +groups = ["main"] +markers = "extra == \"vertexai\"" files = [ {file = "google_api_core-2.24.1-py3-none-any.whl", hash = "sha256:bc78d608f5a5bf853b80bd70a795f703294de656c096c0968320830a4bc280f1"}, {file = "google_api_core-2.24.1.tar.gz", hash = "sha256:f8b36f5456ab0dd99a1b693a40a31d1e7757beea380ad1b38faaf8941eae9d8a"}, @@ -1650,15 +1740,15 @@ files = [ google-auth = ">=2.14.1,<3.0.dev0" googleapis-common-protos = ">=1.56.2,<2.0.dev0" grpcio = [ - {version = ">=1.33.2,<2.0dev", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, + {version = ">=1.33.2,<2.0dev", optional = true, markers = "extra == \"grpc\""}, {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, ] grpcio-status = [ - {version = ">=1.33.2,<2.0.dev0", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, + {version = ">=1.33.2,<2.0.dev0", optional = true, markers = "extra == \"grpc\""}, {version = ">=1.49.1,<2.0.dev0", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, ] proto-plus = [ - {version = ">=1.22.3,<2.0.0dev", markers = "python_version < \"3.13\""}, + {version = ">=1.22.3,<2.0.0dev"}, {version = ">=1.25.0,<2.0.0dev", markers = "python_version >= \"3.13\""}, ] protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0.dev0" @@ -1666,7 +1756,7 @@ requests = ">=2.18.0,<3.0.0.dev0" [package.extras] async-rest = ["google-auth[aiohttp] (>=2.35.0,<3.0.dev0)"] -grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "grpcio-status (>=1.33.2,<2.0.dev0)", "grpcio-status (>=1.49.1,<2.0.dev0)"] +grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio (>=1.49.1,<2.0dev) ; python_version >= \"3.11\"", "grpcio-status (>=1.33.2,<2.0.dev0)", "grpcio-status (>=1.49.1,<2.0.dev0) ; python_version >= \"3.11\""] grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] @@ -1676,6 +1766,8 @@ version = "2.38.0" description = "Google Authentication Library" optional = true python-versions = ">=3.7" +groups = ["main"] +markers = "extra == \"vertexai\"" files = [ {file = "google_auth-2.38.0-py2.py3-none-any.whl", hash = "sha256:e7dae6694313f434a2727bf2906f27ad259bae090d7aa896590d86feec3d9d4a"}, {file = "google_auth-2.38.0.tar.gz", hash = "sha256:8285113607d3b80a3f1543b75962447ba8a09fe85783432a784fdeef6ac094c4"}, @@ -1700,6 +1792,8 @@ version = "1.82.0" description = "Vertex AI API client library" optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"vertexai\"" files = [ {file = "google_cloud_aiplatform-1.82.0-py2.py3-none-any.whl", hash = "sha256:13368a961b2bfa8f46ccd10371bb19bd5f946d8f29c411726061ed1a140ce890"}, {file = "google_cloud_aiplatform-1.82.0.tar.gz", hash = "sha256:b7ea7379249cc1821aa46300a16e4b15aa64aa22665e2536b2bcb7e473d7438e"}, @@ -1725,10 +1819,10 @@ ag2-testing = ["absl-py", "ag2[gemini]", "cloudpickle (>=3.0,<4.0)", "google-clo agent-engines = ["cloudpickle (>=3.0,<4.0)", "google-cloud-logging (<4)", "google-cloud-trace (<2)", "packaging (>=24.0)", "pydantic (>=2.10,<3)", "typing-extensions"] autologging = ["mlflow (>=1.27.0,<=2.16.0)"] cloud-profiler = ["tensorboard-plugin-profile (>=2.4.0,<2.18.0)", "tensorflow (>=2.4.0,<3.0.0dev)", "werkzeug (>=2.0.0,<2.1.0dev)"] -datasets = ["pyarrow (>=10.0.1)", "pyarrow (>=14.0.0)", "pyarrow (>=3.0.0,<8.0dev)"] +datasets = ["pyarrow (>=10.0.1) ; python_version == \"3.11\"", "pyarrow (>=14.0.0) ; python_version >= \"3.12\"", "pyarrow (>=3.0.0,<8.0dev) ; python_version < \"3.11\""] endpoint = ["requests (>=2.28.1)"] -evaluation = ["pandas (>=1.0.0)", "scikit-learn", "scikit-learn (<1.6.0)", "tqdm (>=4.23.0)"] -full = ["docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<=0.114.0)", "google-cloud-bigquery", "google-cloud-bigquery-storage", "google-vizier (>=0.1.6)", "httpx (>=0.23.0,<0.25.0)", "immutabledict", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.16.0)", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "pyarrow (>=10.0.1)", "pyarrow (>=14.0.0)", "pyarrow (>=3.0.0,<8.0dev)", "pyarrow (>=6.0.1)", "pyyaml (>=5.3.1,<7)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<2.10.dev0 || >=2.33.dev0,<=2.33.0)", "ray[default] (>=2.5,<=2.33.0)", "requests (>=2.28.1)", "scikit-learn", "scikit-learn (<1.6.0)", "setuptools (<70.0.0)", "starlette (>=0.17.1)", "tensorboard-plugin-profile (>=2.4.0,<2.18.0)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "tqdm (>=4.23.0)", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)", "werkzeug (>=2.0.0,<2.1.0dev)"] +evaluation = ["pandas (>=1.0.0)", "scikit-learn (<1.6.0) ; python_version <= \"3.10\"", "scikit-learn ; python_version > \"3.10\"", "tqdm (>=4.23.0)"] +full = ["docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<=0.114.0)", "google-cloud-bigquery", "google-cloud-bigquery-storage", "google-vizier (>=0.1.6)", "httpx (>=0.23.0,<0.25.0)", "immutabledict", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.16.0)", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "pyarrow (>=10.0.1) ; python_version == \"3.11\"", "pyarrow (>=14.0.0) ; python_version >= \"3.12\"", "pyarrow (>=3.0.0,<8.0dev) ; python_version < \"3.11\"", "pyarrow (>=6.0.1)", "pyyaml (>=5.3.1,<7)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<2.10.dev0 || >=2.33.dev0,<=2.33.0) ; python_version < \"3.11\"", "ray[default] (>=2.5,<=2.33.0) ; python_version == \"3.11\"", "requests (>=2.28.1)", "scikit-learn (<1.6.0) ; python_version <= \"3.10\"", "scikit-learn ; python_version > \"3.10\"", "setuptools (<70.0.0)", "starlette (>=0.17.1)", "tensorboard-plugin-profile (>=2.4.0,<2.18.0)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev) ; python_version <= \"3.11\"", "tensorflow (>=2.4.0,<3.0.0dev)", "tqdm (>=4.23.0)", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)", "werkzeug (>=2.0.0,<2.1.0dev)"] langchain = ["langchain (>=0.3,<0.4)", "langchain-core (>=0.3,<0.4)", "langchain-google-vertexai (>=2,<3)", "langgraph (>=0.2.45,<0.3)", "openinference-instrumentation-langchain (>=0.1.19,<0.2)"] langchain-testing = ["absl-py", "cloudpickle (>=3.0,<4.0)", "google-cloud-trace (<2)", "langchain (>=0.3,<0.4)", "langchain-core (>=0.3,<0.4)", "langchain-google-vertexai (>=2,<3)", "langgraph (>=0.2.45,<0.3)", "openinference-instrumentation-langchain (>=0.1.19,<0.2)", "opentelemetry-exporter-gcp-trace (<2)", "opentelemetry-sdk (<2)", "pydantic (>=2.6.3,<3)", "pytest-xdist", "typing-extensions"] lit = ["explainable-ai-sdk (>=1.0.0)", "lit-nlp (==0.4.0)", "pandas (>=1.0.0)", "tensorflow (>=2.3.0,<3.0.0dev)"] @@ -1736,11 +1830,11 @@ metadata = ["numpy (>=1.15.0)", "pandas (>=1.0.0)"] pipelines = ["pyyaml (>=5.3.1,<7)"] prediction = ["docker (>=5.0.3)", "fastapi (>=0.71.0,<=0.114.0)", "httpx (>=0.23.0,<0.25.0)", "starlette (>=0.17.1)", "uvicorn[standard] (>=0.16.0)"] private-endpoints = ["requests (>=2.28.1)", "urllib3 (>=1.21.1,<1.27)"] -ray = ["google-cloud-bigquery", "google-cloud-bigquery-storage", "immutabledict", "pandas (>=1.0.0)", "pyarrow (>=6.0.1)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<2.10.dev0 || >=2.33.dev0,<=2.33.0)", "ray[default] (>=2.5,<=2.33.0)", "setuptools (<70.0.0)"] -ray-testing = ["google-cloud-bigquery", "google-cloud-bigquery-storage", "immutabledict", "pandas (>=1.0.0)", "pyarrow (>=6.0.1)", "pytest-xdist", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<2.10.dev0 || >=2.33.dev0,<=2.33.0)", "ray[default] (>=2.5,<=2.33.0)", "ray[train]", "scikit-learn (<1.6.0)", "setuptools (<70.0.0)", "tensorflow", "torch (>=2.0.0,<2.1.0)", "xgboost", "xgboost-ray"] +ray = ["google-cloud-bigquery", "google-cloud-bigquery-storage", "immutabledict", "pandas (>=1.0.0)", "pyarrow (>=6.0.1)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<2.10.dev0 || >=2.33.dev0,<=2.33.0) ; python_version < \"3.11\"", "ray[default] (>=2.5,<=2.33.0) ; python_version == \"3.11\"", "setuptools (<70.0.0)"] +ray-testing = ["google-cloud-bigquery", "google-cloud-bigquery-storage", "immutabledict", "pandas (>=1.0.0)", "pyarrow (>=6.0.1)", "pytest-xdist", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<2.10.dev0 || >=2.33.dev0,<=2.33.0) ; python_version < \"3.11\"", "ray[default] (>=2.5,<=2.33.0) ; python_version == \"3.11\"", "ray[train]", "scikit-learn (<1.6.0)", "setuptools (<70.0.0)", "tensorflow", "torch (>=2.0.0,<2.1.0)", "xgboost", "xgboost-ray"] reasoningengine = ["cloudpickle (>=3.0,<4.0)", "google-cloud-trace (<2)", "opentelemetry-exporter-gcp-trace (<2)", "opentelemetry-sdk (<2)", "pydantic (>=2.6.3,<3)", "typing-extensions"] -tensorboard = ["tensorboard-plugin-profile (>=2.4.0,<2.18.0)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "werkzeug (>=2.0.0,<2.1.0dev)"] -testing = ["aiohttp", "bigframes", "docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<=0.114.0)", "google-api-core (>=2.11,<3.0.0)", "google-cloud-bigquery", "google-cloud-bigquery-storage", "google-vizier (>=0.1.6)", "grpcio-testing", "httpx (>=0.23.0,<0.25.0)", "immutabledict", "ipython", "kfp (>=2.6.0,<3.0.0)", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.16.0)", "nltk", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "pyarrow (>=10.0.1)", "pyarrow (>=14.0.0)", "pyarrow (>=3.0.0,<8.0dev)", "pyarrow (>=6.0.1)", "pytest-asyncio", "pytest-xdist", "pyyaml (>=5.3.1,<7)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<2.10.dev0 || >=2.33.dev0,<=2.33.0)", "ray[default] (>=2.5,<=2.33.0)", "requests (>=2.28.1)", "requests-toolbelt (<1.0.0)", "scikit-learn", "scikit-learn (<1.6.0)", "sentencepiece (>=0.2.0)", "setuptools (<70.0.0)", "starlette (>=0.17.1)", "tensorboard-plugin-profile (>=2.4.0,<2.18.0)", "tensorflow (==2.13.0)", "tensorflow (==2.16.1)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "torch (>=2.0.0,<2.1.0)", "torch (>=2.2.0)", "tqdm (>=4.23.0)", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)", "werkzeug (>=2.0.0,<2.1.0dev)", "xgboost"] +tensorboard = ["tensorboard-plugin-profile (>=2.4.0,<2.18.0)", "tensorflow (>=2.3.0,<3.0.0dev) ; python_version <= \"3.11\"", "tensorflow (>=2.4.0,<3.0.0dev)", "werkzeug (>=2.0.0,<2.1.0dev)"] +testing = ["aiohttp", "bigframes ; python_version >= \"3.10\"", "docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<=0.114.0)", "google-api-core (>=2.11,<3.0.0)", "google-cloud-bigquery", "google-cloud-bigquery-storage", "google-vizier (>=0.1.6)", "grpcio-testing", "httpx (>=0.23.0,<0.25.0)", "immutabledict", "ipython", "kfp (>=2.6.0,<3.0.0)", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.16.0)", "nltk", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "pyarrow (>=10.0.1) ; python_version == \"3.11\"", "pyarrow (>=14.0.0) ; python_version >= \"3.12\"", "pyarrow (>=3.0.0,<8.0dev) ; python_version < \"3.11\"", "pyarrow (>=6.0.1)", "pytest-asyncio", "pytest-xdist", "pyyaml (>=5.3.1,<7)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<2.10.dev0 || >=2.33.dev0,<=2.33.0) ; python_version < \"3.11\"", "ray[default] (>=2.5,<=2.33.0) ; python_version == \"3.11\"", "requests (>=2.28.1)", "requests-toolbelt (<1.0.0)", "scikit-learn (<1.6.0) ; python_version <= \"3.10\"", "scikit-learn ; python_version > \"3.10\"", "sentencepiece (>=0.2.0)", "setuptools (<70.0.0)", "starlette (>=0.17.1)", "tensorboard-plugin-profile (>=2.4.0,<2.18.0)", "tensorflow (==2.13.0) ; python_version <= \"3.11\"", "tensorflow (==2.16.1) ; python_version > \"3.11\"", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev) ; python_version <= \"3.11\"", "tensorflow (>=2.4.0,<3.0.0dev)", "torch (>=2.0.0,<2.1.0) ; python_version <= \"3.11\"", "torch (>=2.2.0) ; python_version > \"3.11\"", "tqdm (>=4.23.0)", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)", "werkzeug (>=2.0.0,<2.1.0dev)", "xgboost"] tokenization = ["sentencepiece (>=0.2.0)"] vizier = ["google-vizier (>=0.1.6)"] xai = ["tensorflow (>=2.3.0,<3.0.0dev)"] @@ -1751,6 +1845,8 @@ version = "3.30.0" description = "Google BigQuery API client library" optional = true python-versions = ">=3.7" +groups = ["main"] +markers = "extra == \"vertexai\"" files = [ {file = "google_cloud_bigquery-3.30.0-py2.py3-none-any.whl", hash = "sha256:f4d28d846a727f20569c9b2d2f4fa703242daadcb2ec4240905aa485ba461877"}, {file = "google_cloud_bigquery-3.30.0.tar.gz", hash = "sha256:7e27fbafc8ed33cc200fe05af12ecd74d279fe3da6692585a3cef7aee90575b6"}, @@ -1768,12 +1864,12 @@ requests = ">=2.21.0,<3.0.0dev" [package.extras] all = ["google-cloud-bigquery[bigquery-v2,bqstorage,geopandas,ipython,ipywidgets,opentelemetry,pandas,tqdm]"] bigquery-v2 = ["proto-plus (>=1.22.3,<2.0.0dev)", "protobuf (>=3.20.2,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<6.0.0dev)"] -bqstorage = ["google-cloud-bigquery-storage (>=2.6.0,<3.0.0dev)", "grpcio (>=1.47.0,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "pyarrow (>=3.0.0)"] +bqstorage = ["google-cloud-bigquery-storage (>=2.6.0,<3.0.0dev)", "grpcio (>=1.47.0,<2.0dev)", "grpcio (>=1.49.1,<2.0dev) ; python_version >= \"3.11\"", "pyarrow (>=3.0.0)"] geopandas = ["Shapely (>=1.8.4,<3.0.0dev)", "geopandas (>=0.9.0,<2.0dev)"] ipython = ["bigquery-magics (>=0.1.0)"] ipywidgets = ["ipykernel (>=6.0.0)", "ipywidgets (>=7.7.0)"] opentelemetry = ["opentelemetry-api (>=1.1.0)", "opentelemetry-instrumentation (>=0.20b0)", "opentelemetry-sdk (>=1.1.0)"] -pandas = ["db-dtypes (>=0.3.0,<2.0.0dev)", "grpcio (>=1.47.0,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "importlib-metadata (>=1.0.0)", "pandas (>=1.1.0)", "pandas-gbq (>=0.26.1)", "pyarrow (>=3.0.0)"] +pandas = ["db-dtypes (>=0.3.0,<2.0.0dev)", "grpcio (>=1.47.0,<2.0dev)", "grpcio (>=1.49.1,<2.0dev) ; python_version >= \"3.11\"", "importlib-metadata (>=1.0.0) ; python_version < \"3.8\"", "pandas (>=1.1.0)", "pandas-gbq (>=0.26.1) ; python_version >= \"3.8\"", "pyarrow (>=3.0.0)"] tqdm = ["tqdm (>=4.7.4,<5.0.0dev)"] [[package]] @@ -1782,6 +1878,8 @@ version = "2.4.2" description = "Google Cloud API client core library" optional = true python-versions = ">=3.7" +groups = ["main"] +markers = "extra == \"vertexai\"" files = [ {file = "google_cloud_core-2.4.2-py2.py3-none-any.whl", hash = "sha256:7459c3e83de7cb8b9ecfec9babc910efb4314030c56dd798eaad12c426f7d180"}, {file = "google_cloud_core-2.4.2.tar.gz", hash = "sha256:a4fcb0e2fcfd4bfe963837fad6d10943754fd79c1a50097d68540b6eb3d67f35"}, @@ -1800,6 +1898,8 @@ version = "1.14.1" description = "Google Cloud Resource Manager API client library" optional = true python-versions = ">=3.7" +groups = ["main"] +markers = "extra == \"vertexai\"" files = [ {file = "google_cloud_resource_manager-1.14.1-py2.py3-none-any.whl", hash = "sha256:68340599f85ebf07a6e18487e460ea07cc15e132068f6b188786d01c2cf25518"}, {file = "google_cloud_resource_manager-1.14.1.tar.gz", hash = "sha256:41e9e546aaa03d5160cdfa2341dbe81ef7596706c300a89b94c429f1f3411f87"}, @@ -1810,7 +1910,7 @@ google-api-core = {version = ">=1.34.1,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extr google-auth = ">=2.14.1,<2.24.0 || >2.24.0,<2.25.0 || >2.25.0,<3.0.0dev" grpc-google-iam-v1 = ">=0.14.0,<1.0.0dev" proto-plus = [ - {version = ">=1.22.3,<2.0.0dev", markers = "python_version < \"3.13\""}, + {version = ">=1.22.3,<2.0.0dev"}, {version = ">=1.25.0,<2.0.0dev", markers = "python_version >= \"3.13\""}, ] protobuf = ">=3.20.2,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0dev" @@ -1821,6 +1921,8 @@ version = "2.19.0" description = "Google Cloud Storage API client library" optional = true python-versions = ">=3.7" +groups = ["main"] +markers = "extra == \"vertexai\"" files = [ {file = "google_cloud_storage-2.19.0-py2.py3-none-any.whl", hash = "sha256:aeb971b5c29cf8ab98445082cbfe7b161a1f48ed275822f59ed3f1524ea54fba"}, {file = "google_cloud_storage-2.19.0.tar.gz", hash = "sha256:cd05e9e7191ba6cb68934d8eb76054d9be4562aa89dbc4236feee4d7d51342b2"}, @@ -1844,6 +1946,8 @@ version = "1.6.0" description = "A python wrapper of the C library 'Google CRC32C'" optional = true python-versions = ">=3.9" +groups = ["main"] +markers = "extra == \"vertexai\"" files = [ {file = "google_crc32c-1.6.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:5bcc90b34df28a4b38653c36bb5ada35671ad105c99cfe915fb5bed7ad6924aa"}, {file = "google_crc32c-1.6.0-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:d9e9913f7bd69e093b81da4535ce27af842e7bf371cde42d1ae9e9bd382dc0e9"}, @@ -1883,6 +1987,8 @@ version = "2.7.2" description = "Utilities for Google Media Downloads and Resumable Uploads" optional = true python-versions = ">=3.7" +groups = ["main"] +markers = "extra == \"vertexai\"" files = [ {file = "google_resumable_media-2.7.2-py2.py3-none-any.whl", hash = "sha256:3ce7551e9fe6d99e9a126101d2536612bb73486721951e9562fee0f90c6ababa"}, {file = "google_resumable_media-2.7.2.tar.gz", hash = "sha256:5280aed4629f2b60b847b0d42f9857fd4935c11af266744df33d8074cae92fe0"}, @@ -1901,6 +2007,8 @@ version = "1.68.0" description = "Common protobufs used in Google APIs" optional = true python-versions = ">=3.7" +groups = ["main"] +markers = "extra == \"vertexai\"" files = [ {file = "googleapis_common_protos-1.68.0-py2.py3-none-any.whl", hash = "sha256:aaf179b2f81df26dfadac95def3b16a95064c76a5f45f07e4c68a21bb371c4ac"}, {file = "googleapis_common_protos-1.68.0.tar.gz", hash = "sha256:95d38161f4f9af0d9423eed8fb7b64ffd2568c3464eb542ff02c5bfa1953ab3c"}, @@ -1919,6 +2027,8 @@ version = "3.1.1" description = "Lightweight in-process concurrent programming" optional = false python-versions = ">=3.7" +groups = ["docs"] +markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\"" files = [ {file = "greenlet-3.1.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563"}, {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83"}, @@ -2005,6 +2115,8 @@ version = "0.14.0" description = "IAM API client library" optional = true python-versions = ">=3.7" +groups = ["main"] +markers = "extra == \"vertexai\"" files = [ {file = "grpc_google_iam_v1-0.14.0-py2.py3-none-any.whl", hash = "sha256:fb4a084b30099ba3ab07d61d620a0d4429570b13ff53bd37bac75235f98b7da4"}, {file = "grpc_google_iam_v1-0.14.0.tar.gz", hash = "sha256:c66e07aa642e39bb37950f9e7f491f70dad150ac9801263b42b2814307c2df99"}, @@ -2021,6 +2133,8 @@ version = "1.70.0" description = "HTTP/2-based RPC framework" optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"vertexai\"" files = [ {file = "grpcio-1.70.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:95469d1977429f45fe7df441f586521361e235982a0b39e33841549143ae2851"}, {file = "grpcio-1.70.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:ed9718f17fbdb472e33b869c77a16d0b55e166b100ec57b016dc7de9c8d236bf"}, @@ -2088,6 +2202,8 @@ version = "1.70.0" description = "Status proto mapping for gRPC" optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"vertexai\"" files = [ {file = "grpcio_status-1.70.0-py3-none-any.whl", hash = "sha256:fc5a2ae2b9b1c1969cc49f3262676e6854aa2398ec69cb5bd6c47cd501904a85"}, {file = "grpcio_status-1.70.0.tar.gz", hash = "sha256:0e7b42816512433b18b9d764285ff029bde059e9d41f8fe10a60631bd8348101"}, @@ -2104,6 +2220,8 @@ version = "0.14.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" optional = true python-versions = ">=3.7" +groups = ["main"] +markers = "extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\"" files = [ {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, @@ -2115,6 +2233,8 @@ version = "1.0.7" description = "A minimal low-level HTTP client." optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\"" files = [ {file = "httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd"}, {file = "httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c"}, @@ -2136,6 +2256,8 @@ version = "0.28.1" description = "The next generation HTTP client." optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\"" files = [ {file = "httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad"}, {file = "httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc"}, @@ -2148,7 +2270,7 @@ httpcore = "==1.*" idna = "*" [package.extras] -brotli = ["brotli", "brotlicffi"] +brotli = ["brotli ; platform_python_implementation == \"CPython\"", "brotlicffi ; platform_python_implementation != \"CPython\""] cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] @@ -2160,6 +2282,8 @@ version = "0.4.0" description = "Consume Server-Sent Event (SSE) messages with HTTPX." optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"cohere\"" files = [ {file = "httpx-sse-0.4.0.tar.gz", hash = "sha256:1e81a3a3070ce322add1d3529ed42eb5f70817f45ed6ec915ab753f961139721"}, {file = "httpx_sse-0.4.0-py3-none-any.whl", hash = "sha256:f329af6eae57eaa2bdfd962b42524764af68075ea87370a2de920af5341e318f"}, @@ -2171,6 +2295,8 @@ version = "0.29.1" description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub" optional = true python-versions = ">=3.8.0" +groups = ["main"] +markers = "extra == \"sentence-transformers\" or extra == \"cohere\"" files = [ {file = "huggingface_hub-0.29.1-py3-none-any.whl", hash = "sha256:352f69caf16566c7b6de84b54a822f6238e17ddd8ae3da4f8f2272aea5b198d5"}, {file = "huggingface_hub-0.29.1.tar.gz", hash = "sha256:9524eae42077b8ff4fc459ceb7a514eca1c1232b775276b009709fe2a084f250"}, @@ -2205,6 +2331,7 @@ version = "10.0" description = "Human friendly output for text interfaces using Python" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +groups = ["main"] files = [ {file = "humanfriendly-10.0-py2.py3-none-any.whl", hash = "sha256:1697e1a8a8f550fd43c2865cd84542fc175a61dcb779b6fee18cf6b6ccba1477"}, {file = "humanfriendly-10.0.tar.gz", hash = "sha256:6b0b831ce8f15f7300721aa49829fc4e83921a9a301cc7f606be6686a2288ddc"}, @@ -2219,6 +2346,7 @@ version = "2.6.8" description = "File identification library for Python" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "identify-2.6.8-py2.py3-none-any.whl", hash = "sha256:83657f0f766a3c8d0eaea16d4ef42494b39b34629a4b3192a9d020d349b3e255"}, {file = "identify-2.6.8.tar.gz", hash = "sha256:61491417ea2c0c5c670484fd8abbb34de34cdae1e5f39a73ee65e48e4bb663fc"}, @@ -2233,10 +2361,12 @@ version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.6" +groups = ["main", "dev", "docs"] files = [ {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, ] +markers = {main = "(extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\" or extra == \"sentence-transformers\" or extra == \"vertexai\" or extra == \"voyageai\" or extra == \"ranx\") and (extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\" or extra == \"sentence-transformers\" or extra == \"vertexai\" or extra == \"voyageai\") or (extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\" or extra == \"sentence-transformers\" or extra == \"vertexai\" or extra == \"voyageai\" or extra == \"ranx\") and python_version >= \"3.10\""} [package.extras] all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] @@ -2247,6 +2377,8 @@ version = "3.3.0" description = "Iterative JSON parser with standard Python iterator interfaces" optional = true python-versions = "*" +groups = ["main"] +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "ijson-3.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7f7a5250599c366369fbf3bc4e176f5daa28eb6bc7d6130d02462ed335361675"}, {file = "ijson-3.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f87a7e52f79059f9c58f6886c262061065eb6f7554a587be7ed3aa63e6b71b34"}, @@ -2350,6 +2482,7 @@ version = "1.4.1" description = "Getting image size from png/jpeg/jpeg2000/gif file" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["docs"] files = [ {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, @@ -2361,21 +2494,23 @@ version = "8.6.1" description = "Read metadata from Python packages" optional = false python-versions = ">=3.9" +groups = ["dev", "docs"] files = [ {file = "importlib_metadata-8.6.1-py3-none-any.whl", hash = "sha256:02a89390c1e15fdfdc0d7c6b25cb3e62650d0494005c97d6f148bf5b9787525e"}, {file = "importlib_metadata-8.6.1.tar.gz", hash = "sha256:310b41d755445d74569f993ccfc22838295d9fe005425094fad953d7f15c8580"}, ] +markers = {dev = "python_version < \"3.10\""} [package.dependencies] zipp = ">=3.20" [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] enabler = ["pytest-enabler (>=2.2)"] perf = ["ipython"] -test = ["flufl.flake8", "importlib_resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] +test = ["flufl.flake8", "importlib_resources (>=1.3) ; python_version < \"3.9\"", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] type = ["pytest-mypy"] [[package]] @@ -2384,6 +2519,7 @@ version = "2.0.0" description = "brain-dead simple config-ini parsing" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, @@ -2395,6 +2531,8 @@ version = "2.5.3" description = "inscriptis - HTML to text converter." optional = true python-versions = "<4.0,>=3.9" +groups = ["main"] +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "inscriptis-2.5.3-py3-none-any.whl", hash = "sha256:25962cf5a60b1a8f33e7bfbbea08a29af82299702339b9b90c538653a5c7aa38"}, {file = "inscriptis-2.5.3.tar.gz", hash = "sha256:256043caa13e4995c71fafdeadec4ac42b57f3914cb41023ecbee8bc27ca1cc0"}, @@ -2413,6 +2551,7 @@ version = "6.29.5" description = "IPython Kernel for Jupyter" optional = false python-versions = ">=3.8" +groups = ["dev", "docs"] files = [ {file = "ipykernel-6.29.5-py3-none-any.whl", hash = "sha256:afdb66ba5aa354b09b91379bac28ae4afebbb30e8b39510c9690afb7a10421b5"}, {file = "ipykernel-6.29.5.tar.gz", hash = "sha256:f093a22c4a40f8828f8e330a9c297cb93dcab13bd9678ded6de8e5cf81c56215"}, @@ -2446,6 +2585,7 @@ version = "8.18.1" description = "IPython: Productive Interactive Computing" optional = false python-versions = ">=3.9" +groups = ["dev", "docs"] files = [ {file = "ipython-8.18.1-py3-none-any.whl", hash = "sha256:e8267419d72d81955ec1177f8a29aaa90ac80ad647499201119e2f05e99aa397"}, {file = "ipython-8.18.1.tar.gz", hash = "sha256:ca6f079bb33457c66e233e4580ebfc4128855b4cf6370dddd73842a9563e8a27"}, @@ -2483,6 +2623,8 @@ version = "0.5.9" description = "provides a common interface to many IR ad-hoc ranking benchmarks, training datasets, etc." optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "ir_datasets-0.5.9-py3-none-any.whl", hash = "sha256:07c9bed07f31031f1da1bc02afc7a1077b1179a3af402d061f83bf6fb833b90a"}, {file = "ir_datasets-0.5.9.tar.gz", hash = "sha256:35c90980fbd0f4ea8fe22a1ab16d2bb6be3dc373cbd6dfab1d905f176a70e5ac"}, @@ -2510,6 +2652,7 @@ version = "5.13.2" description = "A Python utility / library to sort Python imports." optional = false python-versions = ">=3.8.0" +groups = ["dev"] files = [ {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, @@ -2524,6 +2667,7 @@ version = "0.19.2" description = "An autocompletion tool for Python that can be used for text editors." optional = false python-versions = ">=3.6" +groups = ["dev", "docs"] files = [ {file = "jedi-0.19.2-py2.py3-none-any.whl", hash = "sha256:a8ef22bde8490f57fe5c7681a3c83cb58874daf72b4784de3cce5b6ef6edb5b9"}, {file = "jedi-0.19.2.tar.gz", hash = "sha256:4770dc3de41bde3966b02eb84fbcf557fb33cce26ad23da12c742fb50ecb11f0"}, @@ -2543,6 +2687,7 @@ version = "3.1.5" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" +groups = ["main", "docs"] files = [ {file = "jinja2-3.1.5-py3-none-any.whl", hash = "sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb"}, {file = "jinja2-3.1.5.tar.gz", hash = "sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb"}, @@ -2560,6 +2705,8 @@ version = "0.8.2" description = "Fast iterable JSON parser." optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"openai\"" files = [ {file = "jiter-0.8.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:ca8577f6a413abe29b079bc30f907894d7eb07a865c4df69475e868d73e71c7b"}, {file = "jiter-0.8.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b25bd626bde7fb51534190c7e3cb97cee89ee76b76d7585580e22f34f5e3f393"}, @@ -2645,6 +2792,8 @@ version = "1.0.1" description = "JSON Matching Expressions" optional = true python-versions = ">=3.7" +groups = ["main"] +markers = "extra == \"bedrock\"" files = [ {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, @@ -2656,6 +2805,7 @@ version = "1.4.2" description = "Lightweight pipelining with Python functions" optional = true python-versions = ">=3.8" +groups = ["main"] files = [ {file = "joblib-1.4.2-py3-none-any.whl", hash = "sha256:06d478d5674cbc267e7496a410ee875abd68e4340feff4490bcb7afb88060ae6"}, {file = "joblib-1.4.2.tar.gz", hash = "sha256:2382c5816b2636fbd20a09e0f4e9dad4736765fdfb7dca582943b9c1366b3f0e"}, @@ -2667,6 +2817,8 @@ version = "1.0.6" description = "A more powerful JSONPath implementation in modern python" optional = true python-versions = ">=3.6" +groups = ["main"] +markers = "extra == \"mistralai\"" files = [ {file = "jsonpath-python-1.0.6.tar.gz", hash = "sha256:dd5be4a72d8a2995c3f583cf82bf3cd1a9544cfdabf2d22595b67aff07349666"}, {file = "jsonpath_python-1.0.6-py3-none-any.whl", hash = "sha256:1e3b78df579f5efc23565293612decee04214609208a2335884b3ee3f786b575"}, @@ -2678,6 +2830,7 @@ version = "4.23.0" description = "An implementation of JSON Schema validation for Python" optional = false python-versions = ">=3.8" +groups = ["dev", "docs"] files = [ {file = "jsonschema-4.23.0-py3-none-any.whl", hash = "sha256:fbadb6f8b144a8f8cf9f0b89ba94501d143e50411a1278633f56a7acf7fd5566"}, {file = "jsonschema-4.23.0.tar.gz", hash = "sha256:d71497fef26351a33265337fa77ffeb82423f3ea21283cd9467bb03999266bc4"}, @@ -2699,6 +2852,7 @@ version = "2024.10.1" description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" optional = false python-versions = ">=3.9" +groups = ["dev", "docs"] files = [ {file = "jsonschema_specifications-2024.10.1-py3-none-any.whl", hash = "sha256:a09a0680616357d9a0ecf05c12ad234479f549239d0f5b55f3deea67475da9bf"}, {file = "jsonschema_specifications-2024.10.1.tar.gz", hash = "sha256:0f38b83639958ce1152d02a7f062902c41c8fd20d558b0c34344292d417ae272"}, @@ -2713,6 +2867,7 @@ version = "1.0.1" description = "A defined interface for working with a cache of jupyter notebooks." optional = false python-versions = ">=3.9" +groups = ["docs"] files = [ {file = "jupyter_cache-1.0.1-py3-none-any.whl", hash = "sha256:9c3cafd825ba7da8b5830485343091143dff903e4d8c69db9349b728b140abf6"}, {file = "jupyter_cache-1.0.1.tar.gz", hash = "sha256:16e808eb19e3fb67a223db906e131ea6e01f03aa27f49a7214ce6a5fec186fb9"}, @@ -2740,6 +2895,7 @@ version = "8.6.3" description = "Jupyter protocol implementation and client libraries" optional = false python-versions = ">=3.8" +groups = ["dev", "docs"] files = [ {file = "jupyter_client-8.6.3-py3-none-any.whl", hash = "sha256:e8a19cc986cc45905ac3362915f410f3af85424b4c0905e94fa5f2cb08e8f23f"}, {file = "jupyter_client-8.6.3.tar.gz", hash = "sha256:35b3a0947c4a6e9d589eb97d7d4cd5e90f910ee73101611f01283732bd6d9419"}, @@ -2755,7 +2911,7 @@ traitlets = ">=5.3" [package.extras] docs = ["ipykernel", "myst-parser", "pydata-sphinx-theme", "sphinx (>=4)", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] -test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pytest (<8.2.0)", "pytest-cov", "pytest-jupyter[client] (>=0.4.1)", "pytest-timeout"] +test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko ; sys_platform == \"win32\"", "pre-commit", "pytest (<8.2.0)", "pytest-cov", "pytest-jupyter[client] (>=0.4.1)", "pytest-timeout"] [[package]] name = "jupyter-core" @@ -2763,6 +2919,7 @@ version = "5.7.2" description = "Jupyter core package. A base package on which Jupyter projects rely." optional = false python-versions = ">=3.8" +groups = ["dev", "docs"] files = [ {file = "jupyter_core-5.7.2-py3-none-any.whl", hash = "sha256:4f7315d2f6b4bcf2e3e7cb6e46772eba760ae459cd1f59d29eb57b0a01bd7409"}, {file = "jupyter_core-5.7.2.tar.gz", hash = "sha256:aa5f8d32bbf6b431ac830496da7392035d6f61b4f54872f15c4bd2a9c3f536d9"}, @@ -2783,6 +2940,7 @@ version = "0.3.0" description = "Pygments theme using JupyterLab CSS variables" optional = false python-versions = ">=3.8" +groups = ["docs"] files = [ {file = "jupyterlab_pygments-0.3.0-py3-none-any.whl", hash = "sha256:841a89020971da1d8693f1a99997aefc5dc424bb1b251fd6322462a1b8842780"}, {file = "jupyterlab_pygments-0.3.0.tar.gz", hash = "sha256:721aca4d9029252b11cfa9d185e5b5af4d54772bb8072f9b7036f4170054d35d"}, @@ -2794,6 +2952,8 @@ version = "1.4.8" description = "A fast implementation of the Cassowary constraint solver" optional = true python-versions = ">=3.10" +groups = ["main"] +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "kiwisolver-1.4.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:88c6f252f6816a73b1f8c904f7bbe02fd67c09a69f7cb8a0eecdbf5ce78e63db"}, {file = "kiwisolver-1.4.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c72941acb7b67138f35b879bbe85be0f6c6a70cab78fe3ef6db9c024d9223e5b"}, @@ -2883,6 +3043,8 @@ version = "0.44.0" description = "lightweight wrapper around basic LLVM functionality" optional = true python-versions = ">=3.10" +groups = ["main"] +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "llvmlite-0.44.0-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:9fbadbfba8422123bab5535b293da1cf72f9f478a65645ecd73e781f962ca614"}, {file = "llvmlite-0.44.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cccf8eb28f24840f2689fb1a45f9c0f7e582dd24e088dcf96e424834af11f791"}, @@ -2913,6 +3075,8 @@ version = "5.3.1" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." optional = true python-versions = ">=3.6" +groups = ["main"] +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "lxml-5.3.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a4058f16cee694577f7e4dd410263cd0ef75644b43802a689c2b3c2a7e69453b"}, {file = "lxml-5.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:364de8f57d6eda0c16dcfb999af902da31396949efa0e583e12675d09709881b"}, @@ -3067,6 +3231,8 @@ version = "4.4.3" description = "LZ4 Bindings for Python" optional = true python-versions = ">=3.9" +groups = ["main"] +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "lz4-4.4.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1ebf23ffd36b32b980f720a81990fcfdeadacafe7498fbeff7a8e058259d4e58"}, {file = "lz4-4.4.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8fe3caea61427057a9e3697c69b2403510fdccfca4483520d02b98ffae74531e"}, @@ -3112,10 +3278,12 @@ version = "3.0.0" description = "Python port of markdown-it. Markdown parsing, done right!" optional = false python-versions = ">=3.8" +groups = ["main", "docs"] files = [ {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, ] +markers = {main = "python_version >= \"3.10\" and extra == \"ranx\""} [package.dependencies] mdurl = ">=0.1,<1.0" @@ -3136,6 +3304,7 @@ version = "3.0.2" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.9" +groups = ["main", "docs"] files = [ {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, @@ -3206,6 +3375,8 @@ version = "3.10.1" description = "Python plotting package" optional = true python-versions = ">=3.10" +groups = ["main"] +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "matplotlib-3.10.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:ff2ae14910be903f4a24afdbb6d7d3a6c44da210fc7d42790b87aeac92238a16"}, {file = "matplotlib-3.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0721a3fd3d5756ed593220a8b86808a36c5031fce489adb5b31ee6dbb47dd5b2"}, @@ -3263,6 +3434,7 @@ version = "0.1.7" description = "Inline Matplotlib backend for Jupyter" optional = false python-versions = ">=3.8" +groups = ["dev", "docs"] files = [ {file = "matplotlib_inline-0.1.7-py3-none-any.whl", hash = "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca"}, {file = "matplotlib_inline-0.1.7.tar.gz", hash = "sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90"}, @@ -3277,6 +3449,7 @@ version = "0.7.0" description = "McCabe checker, plugin for flake8" optional = false python-versions = ">=3.6" +groups = ["dev"] files = [ {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, @@ -3288,6 +3461,7 @@ version = "0.4.2" description = "Collection of plugins for markdown-it-py" optional = false python-versions = ">=3.8" +groups = ["docs"] files = [ {file = "mdit_py_plugins-0.4.2-py3-none-any.whl", hash = "sha256:0c673c3f889399a33b95e88d2f0d111b4447bdfea7f237dab2d488f459835636"}, {file = "mdit_py_plugins-0.4.2.tar.gz", hash = "sha256:5f2cd1fdb606ddf152d37ec30e46101a60512bc0e5fa1a7002c36647b09e26b5"}, @@ -3307,10 +3481,12 @@ version = "0.1.2" description = "Markdown URL utilities" optional = false python-versions = ">=3.7" +groups = ["main", "docs"] files = [ {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, ] +markers = {main = "python_version >= \"3.10\" and extra == \"ranx\""} [[package]] name = "mistralai" @@ -3318,6 +3494,8 @@ version = "1.5.0" description = "Python Client SDK for the Mistral AI API." optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"mistralai\"" files = [ {file = "mistralai-1.5.0-py3-none-any.whl", hash = "sha256:9372537719f87bd6f9feef4747d0bf1f4fbe971f8c02945ca4b4bf3c94571c97"}, {file = "mistralai-1.5.0.tar.gz", hash = "sha256:fd94bc93bc25aad9c6dd8005b1a0bc4ba1250c6b3fbf855a49936989cc6e5c0d"}, @@ -3340,6 +3518,7 @@ version = "3.1.2" description = "A sane and fast Markdown parser with useful plugins and renderers" optional = false python-versions = ">=3.8" +groups = ["docs"] files = [ {file = "mistune-3.1.2-py3-none-any.whl", hash = "sha256:4b47731332315cdca99e0ded46fc0004001c1299ff773dfb48fbe1fd226de319"}, {file = "mistune-3.1.2.tar.gz", hash = "sha256:733bf018ba007e8b5f2d3a9eb624034f6ee26c4ea769a98ec533ee111d504dff"}, @@ -3354,6 +3533,7 @@ version = "0.4.1" description = "" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "ml_dtypes-0.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1fe8b5b5e70cd67211db94b05cfd58dace592f24489b038dc6f9fe347d2e07d5"}, {file = "ml_dtypes-0.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c09a6d11d8475c2a9fd2bc0695628aec105f97cab3b3a3fb7c9660348ff7d24"}, @@ -3376,9 +3556,9 @@ files = [ [package.dependencies] numpy = [ - {version = ">1.20", markers = "python_version < \"3.10\""}, - {version = ">=1.23.3", markers = "python_version >= \"3.11\" and python_version < \"3.12\""}, - {version = ">=1.21.2", markers = "python_version >= \"3.10\" and python_version < \"3.11\""}, + {version = ">1.20"}, + {version = ">=1.21.2", markers = "python_version >= \"3.10\""}, + {version = ">=1.23.3", markers = "python_version >= \"3.11\""}, {version = ">=1.26.0", markers = "python_version >= \"3.12\""}, ] @@ -3391,6 +3571,8 @@ version = "1.3.0" description = "Python library for arbitrary-precision floating-point arithmetic" optional = true python-versions = "*" +groups = ["main"] +markers = "extra == \"sentence-transformers\"" files = [ {file = "mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c"}, {file = "mpmath-1.3.0.tar.gz", hash = "sha256:7a28eb2a9774d00c7bc92411c19a89209d5da7c4c9a9e227be8330a23a25b91f"}, @@ -3399,7 +3581,7 @@ files = [ [package.extras] develop = ["codecov", "pycodestyle", "pytest (>=4.6)", "pytest-cov", "wheel"] docs = ["sphinx"] -gmpy = ["gmpy2 (>=2.1.0a4)"] +gmpy = ["gmpy2 (>=2.1.0a4) ; platform_python_implementation != \"PyPy\""] tests = ["pytest (>=4.6)"] [[package]] @@ -3408,6 +3590,8 @@ version = "6.1.0" description = "multidict implementation" optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"voyageai\"" files = [ {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3380252550e372e8511d49481bd836264c009adb826b23fefcc5dd3c69692f60"}, {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:99f826cbf970077383d7de805c0681799491cb939c25450b9b5b3ced03ca99f1"}, @@ -3512,6 +3696,7 @@ version = "1.9.0" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "mypy-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f8a67616990062232ee4c3952f41c779afac41405806042a8126fe96e098419f"}, {file = "mypy-1.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d357423fa57a489e8c47b7c85dfb96698caba13d66e086b412298a1a0ea3b0ed"}, @@ -3559,10 +3744,12 @@ version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." optional = false python-versions = ">=3.5" +groups = ["main", "dev"] files = [ {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] +markers = {main = "extra == \"mistralai\""} [[package]] name = "myst-nb" @@ -3570,6 +3757,7 @@ version = "1.2.0" description = "A Jupyter Notebook Sphinx reader built on top of the MyST markdown parser." optional = false python-versions = ">=3.9" +groups = ["docs"] files = [ {file = "myst_nb-1.2.0-py3-none-any.whl", hash = "sha256:0e09909877848c0cf45e1aecee97481512efa29a0c4caa37870a03bba11c56c1"}, {file = "myst_nb-1.2.0.tar.gz", hash = "sha256:af459ec753b341952182b45b0a80b4776cebf80c9ee6aaca2a3f4027b440c9de"}, @@ -3598,6 +3786,7 @@ version = "3.0.1" description = "An extended [CommonMark](https://spec.commonmark.org/) compliant parser," optional = false python-versions = ">=3.8" +groups = ["docs"] files = [ {file = "myst_parser-3.0.1-py3-none-any.whl", hash = "sha256:6457aaa33a5d474aca678b8ead9b3dc298e89c68e67012e73146ea6fd54babf1"}, {file = "myst_parser-3.0.1.tar.gz", hash = "sha256:88f0cb406cb363b077d176b51c476f62d60604d68a8dcdf4832e080441301a87"}, @@ -3624,6 +3813,7 @@ version = "0.10.2" description = "A client library for executing notebooks. Formerly nbconvert's ExecutePreprocessor." optional = false python-versions = ">=3.9.0" +groups = ["docs"] files = [ {file = "nbclient-0.10.2-py3-none-any.whl", hash = "sha256:4ffee11e788b4a27fabeb7955547e4318a5298f34342a4bfd01f2e1faaeadc3d"}, {file = "nbclient-0.10.2.tar.gz", hash = "sha256:90b7fc6b810630db87a6d0c2250b1f0ab4cf4d3c27a299b0cde78a4ed3fd9193"}, @@ -3646,6 +3836,7 @@ version = "7.16.6" description = "Converting Jupyter Notebooks (.ipynb files) to other formats. Output formats include asciidoc, html, latex, markdown, pdf, py, rst, script. nbconvert can be used both as a Python library (`import nbconvert`) or as a command line tool (invoked as `jupyter nbconvert ...`)." optional = false python-versions = ">=3.8" +groups = ["docs"] files = [ {file = "nbconvert-7.16.6-py3-none-any.whl", hash = "sha256:1375a7b67e0c2883678c48e506dc320febb57685e5ee67faa51b18a90f3a712b"}, {file = "nbconvert-7.16.6.tar.gz", hash = "sha256:576a7e37c6480da7b8465eefa66c17844243816ce1ccc372633c6b71c3c0f582"}, @@ -3683,6 +3874,7 @@ version = "5.10.4" description = "The Jupyter Notebook format" optional = false python-versions = ">=3.8" +groups = ["dev", "docs"] files = [ {file = "nbformat-5.10.4-py3-none-any.whl", hash = "sha256:3b48d6c8fbca4b299bf3982ea7db1af21580e4fec269ad087b9e81588891200b"}, {file = "nbformat-5.10.4.tar.gz", hash = "sha256:322168b14f937a5d11362988ecac2a4952d3d8e3a2cbeb2319584631226d5b3a"}, @@ -3704,6 +3896,7 @@ version = "0.9.6" description = "Jupyter Notebook Tools for Sphinx" optional = false python-versions = ">=3.6" +groups = ["docs"] files = [ {file = "nbsphinx-0.9.6-py3-none-any.whl", hash = "sha256:336b0b557945a7678ec7449b16449f854bc852a435bb53b8a72e6b5dc740d992"}, {file = "nbsphinx-0.9.6.tar.gz", hash = "sha256:c2b28a2d702f1159a95b843831798e86e60a17fc647b9bff9ba1585355de54e3"}, @@ -3723,6 +3916,7 @@ version = "0.11.0" description = "A py.test plugin to validate Jupyter notebooks" optional = false python-versions = ">=3.7, <4" +groups = ["dev"] files = [ {file = "nbval-0.11.0-py2.py3-none-any.whl", hash = "sha256:307aecc866c9a1e8a13bb5bbb008a702bacfda2394dff6fe504a3108a58042a0"}, {file = "nbval-0.11.0.tar.gz", hash = "sha256:77c95797607b0a968babd2597ee3494102d25c3ad37435debbdac0e46e379094"}, @@ -3741,6 +3935,7 @@ version = "1.6.0" description = "Patch asyncio to allow nested event loops" optional = false python-versions = ">=3.5" +groups = ["dev", "docs"] files = [ {file = "nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c"}, {file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"}, @@ -3752,6 +3947,8 @@ version = "3.2.1" description = "Python package for creating and manipulating graphs and networks" optional = true python-versions = ">=3.9" +groups = ["main"] +markers = "extra == \"sentence-transformers\"" files = [ {file = "networkx-3.2.1-py3-none-any.whl", hash = "sha256:f18c69adc97877c42332c170849c96cefa91881c99a7cb3e95b7c659ebdc1ec2"}, {file = "networkx-3.2.1.tar.gz", hash = "sha256:9f1bb5cf3409bf324e0a722c20bdb4c20ee39bf1c30ce8ae499c8502b0b5e0c6"}, @@ -3764,12 +3961,39 @@ doc = ["nb2plots (>=0.7)", "nbconvert (<7.9)", "numpydoc (>=1.6)", "pillow (>=9. extra = ["lxml (>=4.6)", "pydot (>=1.4.2)", "pygraphviz (>=1.11)", "sympy (>=1.10)"] test = ["pytest (>=7.2)", "pytest-cov (>=4.0)"] +[[package]] +name = "nltk" +version = "3.9.1" +description = "Natural Language Toolkit" +optional = true +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "nltk-3.9.1-py3-none-any.whl", hash = "sha256:4fa26829c5b00715afe3061398a8989dc643b92ce7dd93fb4585a70930d168a1"}, + {file = "nltk-3.9.1.tar.gz", hash = "sha256:87d127bd3de4bd89a4f81265e5fa59cb1b199b27440175370f7417d2bc7ae868"}, +] + +[package.dependencies] +click = "*" +joblib = "*" +regex = ">=2021.8.3" +tqdm = "*" + +[package.extras] +all = ["matplotlib", "numpy", "pyparsing", "python-crfsuite", "requests", "scikit-learn", "scipy", "twython"] +corenlp = ["requests"] +machine-learning = ["numpy", "python-crfsuite", "scikit-learn", "scipy"] +plot = ["matplotlib"] +tgrep = ["pyparsing"] +twitter = ["twython"] + [[package]] name = "nodeenv" version = "1.9.1" description = "Node.js virtual environment builder" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["dev"] files = [ {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"}, {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, @@ -3781,6 +4005,8 @@ version = "0.61.0" description = "compiling Python code using LLVM" optional = true python-versions = ">=3.10" +groups = ["main"] +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "numba-0.61.0-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:9cab9783a700fa428b1a54d65295122bc03b3de1d01fb819a6b9dbbddfdb8c43"}, {file = "numba-0.61.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:46c5ae094fb3706f5adf9021bfb7fc11e44818d61afee695cdee4eadfed45e98"}, @@ -3815,6 +4041,8 @@ version = "1.26.4" description = "Fundamental package for array computing in Python" optional = false python-versions = ">=3.9" +groups = ["main"] +markers = "python_version < \"3.12\"" files = [ {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, @@ -3860,6 +4088,8 @@ version = "2.1.3" description = "Fundamental package for array computing in Python" optional = false python-versions = ">=3.10" +groups = ["main"] +markers = "python_version >= \"3.12\"" files = [ {file = "numpy-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c894b4305373b9c5576d7a12b473702afdf48ce5369c074ba304cc5ad8730dff"}, {file = "numpy-2.1.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b47fbb433d3260adcd51eb54f92a2ffbc90a4595f8970ee00e064c644ac788f5"}, @@ -3924,6 +4154,8 @@ version = "12.4.5.8" description = "CUBLAS native runtime libraries" optional = true python-versions = ">=3" +groups = ["main"] +markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\"" files = [ {file = "nvidia_cublas_cu12-12.4.5.8-py3-none-manylinux2014_aarch64.whl", hash = "sha256:0f8aa1706812e00b9f19dfe0cdb3999b092ccb8ca168c0db5b8ea712456fd9b3"}, {file = "nvidia_cublas_cu12-12.4.5.8-py3-none-manylinux2014_x86_64.whl", hash = "sha256:2fc8da60df463fdefa81e323eef2e36489e1c94335b5358bcb38360adf75ac9b"}, @@ -3936,6 +4168,8 @@ version = "12.4.127" description = "CUDA profiling tools runtime libs." optional = true python-versions = ">=3" +groups = ["main"] +markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\"" files = [ {file = "nvidia_cuda_cupti_cu12-12.4.127-py3-none-manylinux2014_aarch64.whl", hash = "sha256:79279b35cf6f91da114182a5ce1864997fd52294a87a16179ce275773799458a"}, {file = "nvidia_cuda_cupti_cu12-12.4.127-py3-none-manylinux2014_x86_64.whl", hash = "sha256:9dec60f5ac126f7bb551c055072b69d85392b13311fcc1bcda2202d172df30fb"}, @@ -3948,6 +4182,8 @@ version = "12.4.127" description = "NVRTC native runtime libraries" optional = true python-versions = ">=3" +groups = ["main"] +markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\"" files = [ {file = "nvidia_cuda_nvrtc_cu12-12.4.127-py3-none-manylinux2014_aarch64.whl", hash = "sha256:0eedf14185e04b76aa05b1fea04133e59f465b6f960c0cbf4e37c3cb6b0ea198"}, {file = "nvidia_cuda_nvrtc_cu12-12.4.127-py3-none-manylinux2014_x86_64.whl", hash = "sha256:a178759ebb095827bd30ef56598ec182b85547f1508941a3d560eb7ea1fbf338"}, @@ -3960,6 +4196,8 @@ version = "12.4.127" description = "CUDA Runtime native Libraries" optional = true python-versions = ">=3" +groups = ["main"] +markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\"" files = [ {file = "nvidia_cuda_runtime_cu12-12.4.127-py3-none-manylinux2014_aarch64.whl", hash = "sha256:961fe0e2e716a2a1d967aab7caee97512f71767f852f67432d572e36cb3a11f3"}, {file = "nvidia_cuda_runtime_cu12-12.4.127-py3-none-manylinux2014_x86_64.whl", hash = "sha256:64403288fa2136ee8e467cdc9c9427e0434110899d07c779f25b5c068934faa5"}, @@ -3972,6 +4210,8 @@ version = "9.1.0.70" description = "cuDNN runtime libraries" optional = true python-versions = ">=3" +groups = ["main"] +markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\"" files = [ {file = "nvidia_cudnn_cu12-9.1.0.70-py3-none-manylinux2014_x86_64.whl", hash = "sha256:165764f44ef8c61fcdfdfdbe769d687e06374059fbb388b6c89ecb0e28793a6f"}, {file = "nvidia_cudnn_cu12-9.1.0.70-py3-none-win_amd64.whl", hash = "sha256:6278562929433d68365a07a4a1546c237ba2849852c0d4b2262a486e805b977a"}, @@ -3986,6 +4226,8 @@ version = "11.2.1.3" description = "CUFFT native runtime libraries" optional = true python-versions = ">=3" +groups = ["main"] +markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\"" files = [ {file = "nvidia_cufft_cu12-11.2.1.3-py3-none-manylinux2014_aarch64.whl", hash = "sha256:5dad8008fc7f92f5ddfa2101430917ce2ffacd86824914c82e28990ad7f00399"}, {file = "nvidia_cufft_cu12-11.2.1.3-py3-none-manylinux2014_x86_64.whl", hash = "sha256:f083fc24912aa410be21fa16d157fed2055dab1cc4b6934a0e03cba69eb242b9"}, @@ -4001,6 +4243,8 @@ version = "10.3.5.147" description = "CURAND native runtime libraries" optional = true python-versions = ">=3" +groups = ["main"] +markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\"" files = [ {file = "nvidia_curand_cu12-10.3.5.147-py3-none-manylinux2014_aarch64.whl", hash = "sha256:1f173f09e3e3c76ab084aba0de819c49e56614feae5c12f69883f4ae9bb5fad9"}, {file = "nvidia_curand_cu12-10.3.5.147-py3-none-manylinux2014_x86_64.whl", hash = "sha256:a88f583d4e0bb643c49743469964103aa59f7f708d862c3ddb0fc07f851e3b8b"}, @@ -4013,6 +4257,8 @@ version = "11.6.1.9" description = "CUDA solver native runtime libraries" optional = true python-versions = ">=3" +groups = ["main"] +markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\"" files = [ {file = "nvidia_cusolver_cu12-11.6.1.9-py3-none-manylinux2014_aarch64.whl", hash = "sha256:d338f155f174f90724bbde3758b7ac375a70ce8e706d70b018dd3375545fc84e"}, {file = "nvidia_cusolver_cu12-11.6.1.9-py3-none-manylinux2014_x86_64.whl", hash = "sha256:19e33fa442bcfd085b3086c4ebf7e8debc07cfe01e11513cc6d332fd918ac260"}, @@ -4030,6 +4276,8 @@ version = "12.3.1.170" description = "CUSPARSE native runtime libraries" optional = true python-versions = ">=3" +groups = ["main"] +markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\"" files = [ {file = "nvidia_cusparse_cu12-12.3.1.170-py3-none-manylinux2014_aarch64.whl", hash = "sha256:9d32f62896231ebe0480efd8a7f702e143c98cfaa0e8a76df3386c1ba2b54df3"}, {file = "nvidia_cusparse_cu12-12.3.1.170-py3-none-manylinux2014_x86_64.whl", hash = "sha256:ea4f11a2904e2a8dc4b1833cc1b5181cde564edd0d5cd33e3c168eff2d1863f1"}, @@ -4045,6 +4293,8 @@ version = "0.6.2" description = "NVIDIA cuSPARSELt" optional = true python-versions = "*" +groups = ["main"] +markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\"" files = [ {file = "nvidia_cusparselt_cu12-0.6.2-py3-none-manylinux2014_aarch64.whl", hash = "sha256:067a7f6d03ea0d4841c85f0c6f1991c5dda98211f6302cb83a4ab234ee95bef8"}, {file = "nvidia_cusparselt_cu12-0.6.2-py3-none-manylinux2014_x86_64.whl", hash = "sha256:df2c24502fd76ebafe7457dbc4716b2fec071aabaed4fb7691a201cde03704d9"}, @@ -4057,6 +4307,8 @@ version = "2.21.5" description = "NVIDIA Collective Communication Library (NCCL) Runtime" optional = true python-versions = ">=3" +groups = ["main"] +markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\"" files = [ {file = "nvidia_nccl_cu12-2.21.5-py3-none-manylinux2014_x86_64.whl", hash = "sha256:8579076d30a8c24988834445f8d633c697d42397e92ffc3f63fa26766d25e0a0"}, ] @@ -4067,7 +4319,10 @@ version = "12.4.127" description = "Nvidia JIT LTO Library" optional = true python-versions = ">=3" +groups = ["main"] +markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\"" files = [ + {file = "nvidia_nvjitlink_cu12-12.4.127-py3-none-manylinux2014_aarch64.whl", hash = "sha256:4abe7fef64914ccfa909bc2ba39739670ecc9e820c83ccc7a6ed414122599b83"}, {file = "nvidia_nvjitlink_cu12-12.4.127-py3-none-manylinux2014_x86_64.whl", hash = "sha256:06b3b9b25bf3f8af351d664978ca26a16d2c5127dbd53c0497e28d1fb9611d57"}, {file = "nvidia_nvjitlink_cu12-12.4.127-py3-none-win_amd64.whl", hash = "sha256:fd9020c501d27d135f983c6d3e244b197a7ccad769e34df53a42e276b0e25fa1"}, ] @@ -4078,6 +4333,8 @@ version = "12.4.127" description = "NVIDIA Tools Extension" optional = true python-versions = ">=3" +groups = ["main"] +markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\"" files = [ {file = "nvidia_nvtx_cu12-12.4.127-py3-none-manylinux2014_aarch64.whl", hash = "sha256:7959ad635db13edf4fc65c06a6e9f9e55fc2f92596db928d169c0bb031e88ef3"}, {file = "nvidia_nvtx_cu12-12.4.127-py3-none-manylinux2014_x86_64.whl", hash = "sha256:781e950d9b9f60d8241ccea575b32f5105a5baf4c2351cab5256a24869f12a1a"}, @@ -4090,6 +4347,8 @@ version = "1.65.1" description = "The official Python library for the openai API" optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"openai\"" files = [ {file = "openai-1.65.1-py3-none-any.whl", hash = "sha256:396652a6452dd42791b3ad8a3aab09b1feb7c1c4550a672586fb300760a8e204"}, {file = "openai-1.65.1.tar.gz", hash = "sha256:9d9370a20d2b8c3ce319fd2194c2eef5eab59effbcc5b04ff480977edc530fba"}, @@ -4115,6 +4374,8 @@ version = "3.10.15" description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "orjson-3.10.15-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:552c883d03ad185f720d0c09583ebde257e41b9521b74ff40e08b7dec4559c04"}, {file = "orjson-3.10.15-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:616e3e8d438d02e4854f70bfdc03a6bcdb697358dbaa6bcd19cbe24d24ece1f8"}, @@ -4203,10 +4464,12 @@ version = "24.2" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" +groups = ["main", "dev", "docs"] files = [ {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, ] +markers = {main = "(extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"vertexai\" or extra == \"ranx\") and (extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"vertexai\") or (extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"vertexai\" or extra == \"ranx\") and python_version >= \"3.10\""} [[package]] name = "pandas" @@ -4214,6 +4477,8 @@ version = "2.2.3" description = "Powerful data structures for data analysis, time series, and statistics" optional = true python-versions = ">=3.9" +groups = ["main"] +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "pandas-2.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1948ddde24197a0f7add2bdc4ca83bf2b1ef84a1bc8ccffd95eda17fd836ecb5"}, {file = "pandas-2.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:381175499d3802cde0eabbaf6324cce0c4f5d52ca6f8c377c29ad442f50f6348"}, @@ -4300,6 +4565,7 @@ version = "1.5.1" description = "Utilities for writing pandoc filters in python" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["docs"] files = [ {file = "pandocfilters-1.5.1-py2.py3-none-any.whl", hash = "sha256:93be382804a9cdb0a7267585f157e5d1731bbe5545a85b268d6f5fe6232de2bc"}, {file = "pandocfilters-1.5.1.tar.gz", hash = "sha256:002b4a555ee4ebc03f8b66307e287fa492e4a77b4ea14d3f934328297bb4939e"}, @@ -4311,6 +4577,7 @@ version = "0.8.4" description = "A Python Parser" optional = false python-versions = ">=3.6" +groups = ["dev", "docs"] files = [ {file = "parso-0.8.4-py2.py3-none-any.whl", hash = "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18"}, {file = "parso-0.8.4.tar.gz", hash = "sha256:eb3a7b58240fb99099a345571deecc0f9540ea5f4dd2fe14c2a99d6b281ab92d"}, @@ -4326,6 +4593,7 @@ version = "0.12.1" description = "Utility library for gitignore style pattern matching of file paths." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, @@ -4337,6 +4605,8 @@ version = "4.9.0" description = "Pexpect allows easy control of interactive console applications." optional = false python-versions = "*" +groups = ["dev", "docs"] +markers = "sys_platform != \"win32\"" files = [ {file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"}, {file = "pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f"}, @@ -4351,6 +4621,8 @@ version = "11.1.0" description = "Python Imaging Library (Fork)" optional = true python-versions = ">=3.9" +groups = ["main"] +markers = "(python_version >= \"3.10\" or extra == \"sentence-transformers\") and (python_version == \"3.10\" or python_version == \"3.11\" or extra == \"sentence-transformers\" or extra == \"ranx\") and (extra == \"ranx\" or extra == \"sentence-transformers\")" files = [ {file = "pillow-11.1.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:e1abe69aca89514737465752b4bcaf8016de61b3be1397a8fc260ba33321b3a8"}, {file = "pillow-11.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c640e5a06869c75994624551f45e5506e4256562ead981cce820d5ab39ae2192"}, @@ -4430,7 +4702,7 @@ docs = ["furo", "olefile", "sphinx (>=8.1)", "sphinx-copybutton", "sphinx-inline fpx = ["olefile"] mic = ["olefile"] tests = ["check-manifest", "coverage (>=7.4.2)", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout", "trove-classifiers (>=2024.10.12)"] -typing = ["typing-extensions"] +typing = ["typing-extensions ; python_version < \"3.10\""] xmp = ["defusedxml"] [[package]] @@ -4439,6 +4711,7 @@ version = "4.3.6" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" +groups = ["dev", "docs"] files = [ {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, @@ -4455,6 +4728,7 @@ version = "1.5.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, @@ -4470,6 +4744,7 @@ version = "4.1.0" description = "A framework for managing and maintaining multi-language pre-commit hooks." optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "pre_commit-4.1.0-py2.py3-none-any.whl", hash = "sha256:d29e7cb346295bcc1cc75fc3e92e343495e3ea0196c9ec6ba53f49f10ab6ae7b"}, {file = "pre_commit-4.1.0.tar.gz", hash = "sha256:ae3f018575a588e30dfddfab9a05448bfbd6b73d78709617b5a2b853549716d4"}, @@ -4488,6 +4763,7 @@ version = "3.0.50" description = "Library for building powerful interactive command lines in Python" optional = false python-versions = ">=3.8.0" +groups = ["dev", "docs"] files = [ {file = "prompt_toolkit-3.0.50-py3-none-any.whl", hash = "sha256:9b6427eb19e479d98acff65196a307c555eb567989e6d88ebbb1b509d9779198"}, {file = "prompt_toolkit-3.0.50.tar.gz", hash = "sha256:544748f3860a2623ca5cd6d2795e7a14f3d0e1c3c9728359013f79877fc89bab"}, @@ -4502,6 +4778,8 @@ version = "0.3.0" description = "Accelerated property cache" optional = true python-versions = ">=3.9" +groups = ["main"] +markers = "extra == \"voyageai\"" files = [ {file = "propcache-0.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:efa44f64c37cc30c9f05932c740a8b40ce359f51882c70883cc95feac842da4d"}, {file = "propcache-0.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2383a17385d9800b6eb5855c2f05ee550f803878f344f58b6e194de08b96352c"}, @@ -4609,6 +4887,8 @@ version = "1.26.0" description = "Beautiful, Pythonic protocol buffers" optional = true python-versions = ">=3.7" +groups = ["main"] +markers = "extra == \"vertexai\"" files = [ {file = "proto_plus-1.26.0-py3-none-any.whl", hash = "sha256:bf2dfaa3da281fc3187d12d224c707cb57214fb2c22ba854eb0c105a3fb2d4d7"}, {file = "proto_plus-1.26.0.tar.gz", hash = "sha256:6e93d5f5ca267b54300880fff156b6a3386b3fa3f43b1da62e680fc0c586ef22"}, @@ -4626,6 +4906,8 @@ version = "5.29.3" description = "" optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"vertexai\"" files = [ {file = "protobuf-5.29.3-cp310-abi3-win32.whl", hash = "sha256:3ea51771449e1035f26069c4c7fd51fba990d07bc55ba80701c78f886bf9c888"}, {file = "protobuf-5.29.3-cp310-abi3-win_amd64.whl", hash = "sha256:a4fa6f80816a9a0678429e84973f2f98cbc218cca434abe8db2ad0bffc98503a"}, @@ -4646,6 +4928,7 @@ version = "7.0.0" description = "Cross-platform lib for process and system monitoring in Python. NOTE: the syntax of this script MUST be kept compatible with Python 2.7." optional = false python-versions = ">=3.6" +groups = ["dev", "docs"] files = [ {file = "psutil-7.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:101d71dc322e3cffd7cea0650b09b3d08b8e7c4109dd6809fe452dfd00e58b25"}, {file = "psutil-7.0.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:39db632f6bb862eeccf56660871433e111b6ea58f2caea825571951d4b6aa3da"}, @@ -4669,6 +4952,8 @@ version = "0.7.0" description = "Run a subprocess in a pseudo terminal" optional = false python-versions = "*" +groups = ["dev", "docs"] +markers = "sys_platform != \"win32\"" files = [ {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, @@ -4680,6 +4965,7 @@ version = "0.2.3" description = "Safely evaluate AST nodes without side effects" optional = false python-versions = "*" +groups = ["dev", "docs"] files = [ {file = "pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0"}, {file = "pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42"}, @@ -4694,6 +4980,8 @@ version = "0.6.1" description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"vertexai\"" files = [ {file = "pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629"}, {file = "pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034"}, @@ -4705,6 +4993,8 @@ version = "0.4.1" description = "A collection of ASN.1-based protocols modules" optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"vertexai\"" files = [ {file = "pyasn1_modules-0.4.1-py3-none-any.whl", hash = "sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd"}, {file = "pyasn1_modules-0.4.1.tar.gz", hash = "sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c"}, @@ -4719,10 +5009,12 @@ version = "2.22" description = "C parser in Python" optional = false python-versions = ">=3.8" +groups = ["dev", "docs"] files = [ {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, ] +markers = {dev = "implementation_name == \"pypy\" or platform_python_implementation != \"PyPy\"", docs = "implementation_name == \"pypy\""} [[package]] name = "pydantic" @@ -4730,6 +5022,7 @@ version = "2.10.6" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "pydantic-2.10.6-py3-none-any.whl", hash = "sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584"}, {file = "pydantic-2.10.6.tar.gz", hash = "sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236"}, @@ -4742,7 +5035,7 @@ typing-extensions = ">=4.12.2" [package.extras] email = ["email-validator (>=2.0.0)"] -timezone = ["tzdata"] +timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""] [[package]] name = "pydantic-core" @@ -4750,6 +5043,7 @@ version = "2.27.2" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa"}, {file = "pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c"}, @@ -4862,6 +5156,7 @@ version = "0.15.4" description = "Bootstrap-based Sphinx theme from the PyData community" optional = false python-versions = ">=3.9" +groups = ["docs"] files = [ {file = "pydata_sphinx_theme-0.15.4-py3-none-any.whl", hash = "sha256:2136ad0e9500d0949f96167e63f3e298620040aea8f9c74621959eda5d4cf8e6"}, {file = "pydata_sphinx_theme-0.15.4.tar.gz", hash = "sha256:7762ec0ac59df3acecf49fd2f889e1b4565dbce8b88b2e29ee06fdd90645a06d"}, @@ -4890,10 +5185,12 @@ version = "2.19.1" description = "Pygments is a syntax highlighting package written in Python." optional = false python-versions = ">=3.8" +groups = ["main", "dev", "docs"] files = [ {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"}, {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"}, ] +markers = {main = "python_version >= \"3.10\" and extra == \"ranx\""} [package.extras] windows-terminal = ["colorama (>=0.4.6)"] @@ -4904,6 +5201,7 @@ version = "3.3.4" description = "python code static checker" optional = false python-versions = ">=3.9.0" +groups = ["dev"] files = [ {file = "pylint-3.3.4-py3-none-any.whl", hash = "sha256:289e6a1eb27b453b08436478391a48cd53bb0efb824873f949e709350f3de018"}, {file = "pylint-3.3.4.tar.gz", hash = "sha256:74ae7a38b177e69a9b525d0794bd8183820bfa7eb68cc1bee6e8ed22a42be4ce"}, @@ -4914,7 +5212,7 @@ astroid = ">=3.3.8,<=3.4.0-dev0" colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} dill = [ {version = ">=0.2", markers = "python_version < \"3.11\""}, - {version = ">=0.3.6", markers = "python_version >= \"3.11\" and python_version < \"3.12\""}, + {version = ">=0.3.6", markers = "python_version >= \"3.11\""}, {version = ">=0.3.7", markers = "python_version >= \"3.12\""}, ] isort = ">=4.2.5,<5.13.0 || >5.13.0,<7" @@ -4934,6 +5232,8 @@ version = "3.2.1" description = "pyparsing module - Classes and methods to define and execute parsing grammars" optional = true python-versions = ">=3.9" +groups = ["main"] +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "pyparsing-3.2.1-py3-none-any.whl", hash = "sha256:506ff4f4386c4cec0590ec19e6302d3aedb992fdc02c761e90416f158dacf8e1"}, {file = "pyparsing-3.2.1.tar.gz", hash = "sha256:61980854fd66de3a90028d679a954d5f2623e83144b5afe5ee86f43d762e5f0a"}, @@ -4948,6 +5248,8 @@ version = "3.5.4" description = "A python implementation of GNU readline." optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "sys_platform == \"win32\"" files = [ {file = "pyreadline3-3.5.4-py3-none-any.whl", hash = "sha256:eaf8e6cc3c49bcccf145fc6067ba8643d1df34d604a1ec0eccbf7a18e6d3fae6"}, {file = "pyreadline3-3.5.4.tar.gz", hash = "sha256:8d57d53039a1c75adba8e50dd3d992b28143480816187ea5efbd5c78e6c885b7"}, @@ -4962,6 +5264,7 @@ version = "8.3.4" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6"}, {file = "pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761"}, @@ -4984,6 +5287,7 @@ version = "0.23.8" description = "Pytest support for asyncio" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "pytest_asyncio-0.23.8-py3-none-any.whl", hash = "sha256:50265d892689a5faefb84df80819d1ecef566eb3549cf915dfb33569359d1ce2"}, {file = "pytest_asyncio-0.23.8.tar.gz", hash = "sha256:759b10b33a6dc61cce40a8bd5205e302978bbbcc00e279a8b61d9a6a3c82e4d3"}, @@ -5002,6 +5306,7 @@ version = "3.6.1" description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "pytest_xdist-3.6.1-py3-none-any.whl", hash = "sha256:9ed4adfb68a016610848639bb7e02c9352d5d9f03d04809919e2dafc3be4cca7"}, {file = "pytest_xdist-3.6.1.tar.gz", hash = "sha256:ead156a4db231eec769737f57668ef58a2084a34b2e55c4a8fa20d861107300d"}, @@ -5023,10 +5328,12 @@ version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main", "dev", "docs"] files = [ {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] +markers = {main = "(extra == \"vertexai\" or extra == \"mistralai\" or extra == \"bedrock\" or extra == \"ranx\") and (extra == \"vertexai\" or extra == \"mistralai\" or extra == \"bedrock\") or (extra == \"vertexai\" or extra == \"mistralai\" or extra == \"bedrock\" or extra == \"ranx\") and python_version >= \"3.10\""} [package.dependencies] six = ">=1.5" @@ -5037,6 +5344,7 @@ version = "1.0.1" description = "Read key-value pairs from a .env file and set them as environment variables" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, @@ -5051,6 +5359,7 @@ version = "3.0.0" description = "Universally unique lexicographically sortable identifier" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "python_ulid-3.0.0-py3-none-any.whl", hash = "sha256:e4c4942ff50dbd79167ad01ac725ec58f924b4018025ce22c858bfcff99a5e31"}, {file = "python_ulid-3.0.0.tar.gz", hash = "sha256:e50296a47dc8209d28629a22fc81ca26c00982c78934bd7766377ba37ea49a9f"}, @@ -5065,6 +5374,8 @@ version = "2025.1" description = "World timezone definitions, modern and historical" optional = true python-versions = "*" +groups = ["main"] +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "pytz-2025.1-py2.py3-none-any.whl", hash = "sha256:89dd22dca55b46eac6eda23b2d72721bf1bdfef212645d81513ef5d03038de57"}, {file = "pytz-2025.1.tar.gz", hash = "sha256:c2db42be2a2518b28e65f9207c4d05e6ff547d1efa4086469ef855e4ab70178e"}, @@ -5076,6 +5387,7 @@ version = "308" description = "Python for Window Extensions" optional = false python-versions = "*" +groups = ["dev", "docs"] files = [ {file = "pywin32-308-cp310-cp310-win32.whl", hash = "sha256:796ff4426437896550d2981b9c2ac0ffd75238ad9ea2d3bfa67a1abd546d262e"}, {file = "pywin32-308-cp310-cp310-win_amd64.whl", hash = "sha256:4fc888c59b3c0bef905ce7eb7e2106a07712015ea1c8234b703a088d46110e8e"}, @@ -5096,6 +5408,7 @@ files = [ {file = "pywin32-308-cp39-cp39-win32.whl", hash = "sha256:7873ca4dc60ab3287919881a7d4f88baee4a6e639aa6962de25a98ba6b193341"}, {file = "pywin32-308-cp39-cp39-win_amd64.whl", hash = "sha256:71b3322d949b4cc20776436a9c9ba0eeedcbc9c650daa536df63f0ff111bb920"}, ] +markers = {dev = "sys_platform == \"win32\"", docs = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\""} [[package]] name = "pyyaml" @@ -5103,6 +5416,7 @@ version = "6.0.2" description = "YAML parser and emitter for Python" optional = false python-versions = ">=3.8" +groups = ["main", "dev", "docs"] files = [ {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, @@ -5165,6 +5479,7 @@ version = "26.2.1" description = "Python bindings for 0MQ" optional = false python-versions = ">=3.7" +groups = ["dev", "docs"] files = [ {file = "pyzmq-26.2.1-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:f39d1227e8256d19899d953e6e19ed2ccb689102e6d85e024da5acf410f301eb"}, {file = "pyzmq-26.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a23948554c692df95daed595fdd3b76b420a4939d7a8a28d6d7dea9711878641"}, @@ -5286,6 +5601,8 @@ version = "0.3.20" description = "ranx: A Blazing-Fast Python Library for Ranking Evaluation, Comparison, and Fusion" optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "ranx-0.3.20-py3-none-any.whl", hash = "sha256:e056e4d5981b0328b045868cc7064fc57a545f36009fbe9bb602295ec33335de"}, {file = "ranx-0.3.20.tar.gz", hash = "sha256:8afc6f2042c40645e5d1fd80c35ed75a885e18bd2db7e95cc7ec32a0b41e59ea"}, @@ -5312,6 +5629,7 @@ version = "5.2.1" description = "Python client for Redis database and key-value store" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "redis-5.2.1-py3-none-any.whl", hash = "sha256:ee7e1056b9aea0f04c6c2ed59452947f34c4940ee025f5dd83e6a6418b6989e4"}, {file = "redis-5.2.1.tar.gz", hash = "sha256:16f2e22dff21d5125e8481515e386711a34cbec50f0e44413dd7d9c060a54e0f"}, @@ -5330,6 +5648,7 @@ version = "0.36.2" description = "JSON Referencing + Python" optional = false python-versions = ">=3.9" +groups = ["dev", "docs"] files = [ {file = "referencing-0.36.2-py3-none-any.whl", hash = "sha256:e8699adbbf8b5c7de96d8ffa0eb5c158b3beafce084968e2ea8bb08c6794dcd0"}, {file = "referencing-0.36.2.tar.gz", hash = "sha256:df2e89862cd09deabbdba16944cc3f10feb6b3e6f18e902f7cc25609a34775aa"}, @@ -5346,6 +5665,7 @@ version = "2024.11.6" description = "Alternative regular expression module, to replace re." optional = true python-versions = ">=3.8" +groups = ["main"] files = [ {file = "regex-2024.11.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ff590880083d60acc0433f9c3f713c51f7ac6ebb9adf889c79a261ecf541aa91"}, {file = "regex-2024.11.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:658f90550f38270639e83ce492f27d2c8d2cd63805c65a13a14d36ca126753f0"}, @@ -5449,10 +5769,12 @@ version = "2.32.3" description = "Python HTTP for Humans." optional = false python-versions = ">=3.8" +groups = ["main", "dev", "docs"] files = [ {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, ] +markers = {main = "(extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"vertexai\" or extra == \"voyageai\" or extra == \"ranx\") and (extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"vertexai\" or extra == \"voyageai\") or (extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"vertexai\" or extra == \"voyageai\" or extra == \"ranx\") and python_version >= \"3.10\""} [package.dependencies] certifi = ">=2017.4.17" @@ -5470,6 +5792,8 @@ version = "13.9.4" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = true python-versions = ">=3.8.0" +groups = ["main"] +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90"}, {file = "rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098"}, @@ -5489,6 +5813,7 @@ version = "0.23.1" description = "Python bindings to Rust's persistent data structures (rpds)" optional = false python-versions = ">=3.9" +groups = ["dev", "docs"] files = [ {file = "rpds_py-0.23.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2a54027554ce9b129fc3d633c92fa33b30de9f08bc61b32c053dc9b537266fed"}, {file = "rpds_py-0.23.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b5ef909a37e9738d146519657a1aab4584018746a18f71c692f2f22168ece40c"}, @@ -5601,6 +5926,8 @@ version = "4.9" description = "Pure-Python RSA implementation" optional = true python-versions = ">=3.6,<4" +groups = ["main"] +markers = "extra == \"vertexai\"" files = [ {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, @@ -5615,6 +5942,8 @@ version = "0.11.3" description = "An Amazon S3 Transfer Manager" optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"bedrock\"" files = [ {file = "s3transfer-0.11.3-py3-none-any.whl", hash = "sha256:ca855bdeb885174b5ffa95b9913622459d4ad8e331fc98eb01e6d5eb6a30655d"}, {file = "s3transfer-0.11.3.tar.gz", hash = "sha256:edae4977e3a122445660c7c114bba949f9d191bae3b34a096f18a1c8c354527a"}, @@ -5632,6 +5961,8 @@ version = "0.5.3" description = "" optional = true python-versions = ">=3.7" +groups = ["main"] +markers = "extra == \"sentence-transformers\"" files = [ {file = "safetensors-0.5.3-cp38-abi3-macosx_10_12_x86_64.whl", hash = "sha256:bd20eb133db8ed15b40110b7c00c6df51655a2998132193de2f75f72d99c7073"}, {file = "safetensors-0.5.3-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:21d01c14ff6c415c485616b8b0bf961c46b3b343ca59110d38d744e577f9cce7"}, @@ -5669,6 +6000,8 @@ version = "1.6.1" description = "A set of python modules for machine learning and data mining" optional = true python-versions = ">=3.9" +groups = ["main"] +markers = "extra == \"sentence-transformers\"" files = [ {file = "scikit_learn-1.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d056391530ccd1e501056160e3c9673b4da4805eb67eb2bdf4e983e1f9c9204e"}, {file = "scikit_learn-1.6.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:0c8d036eb937dbb568c6242fa598d551d88fb4399c0344d95c001980ec1c7d36"}, @@ -5723,6 +6056,8 @@ version = "1.13.1" description = "Fundamental algorithms for scientific computing in Python" optional = true python-versions = ">=3.9" +groups = ["main"] +markers = "python_version < \"3.10\" and extra == \"sentence-transformers\"" files = [ {file = "scipy-1.13.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:20335853b85e9a49ff7572ab453794298bcf0354d8068c5f6775a0eabf350aca"}, {file = "scipy-1.13.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:d605e9c23906d1994f55ace80e0125c587f96c020037ea6aa98d01b4bd2e222f"}, @@ -5765,6 +6100,8 @@ version = "1.15.2" description = "Fundamental algorithms for scientific computing in Python" optional = true python-versions = ">=3.10" +groups = ["main"] +markers = "python_version >= \"3.10\" and (extra == \"ranx\" or extra == \"sentence-transformers\")" files = [ {file = "scipy-1.15.2-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:a2ec871edaa863e8213ea5df811cd600734f6400b4af272e1c011e69401218e9"}, {file = "scipy-1.15.2-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:6f223753c6ea76983af380787611ae1291e3ceb23917393079dcc746ba60cfb5"}, @@ -5820,7 +6157,7 @@ numpy = ">=1.23.5,<2.5" [package.extras] dev = ["cython-lint (>=0.12.2)", "doit (>=0.36.0)", "mypy (==1.10.0)", "pycodestyle", "pydevtool", "rich-click", "ruff (>=0.0.292)", "types-psutil", "typing_extensions"] doc = ["intersphinx_registry", "jupyterlite-pyodide-kernel", "jupyterlite-sphinx (>=0.16.5)", "jupytext", "matplotlib (>=3.5)", "myst-nb", "numpydoc", "pooch", "pydata-sphinx-theme (>=0.15.2)", "sphinx (>=5.0.0,<8.0.0)", "sphinx-copybutton", "sphinx-design (>=0.4.0)"] -test = ["Cython", "array-api-strict (>=2.0,<2.1.1)", "asv", "gmpy2", "hypothesis (>=6.30)", "meson", "mpmath", "ninja", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] +test = ["Cython", "array-api-strict (>=2.0,<2.1.1)", "asv", "gmpy2", "hypothesis (>=6.30)", "meson", "mpmath", "ninja ; sys_platform != \"emscripten\"", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] [[package]] name = "seaborn" @@ -5828,6 +6165,8 @@ version = "0.13.2" description = "Statistical data visualization" optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "seaborn-0.13.2-py3-none-any.whl", hash = "sha256:636f8336facf092165e27924f223d3c62ca560b1f2bb5dff7ab7fad265361987"}, {file = "seaborn-0.13.2.tar.gz", hash = "sha256:93e60a40988f4d65e9f4885df477e2fdaff6b73a9ded434c1ab356dd57eefff7"}, @@ -5849,6 +6188,8 @@ version = "3.4.1" description = "State-of-the-Art Text Embeddings" optional = true python-versions = ">=3.9" +groups = ["main"] +markers = "extra == \"sentence-transformers\"" files = [ {file = "sentence_transformers-3.4.1-py3-none-any.whl", hash = "sha256:e026dc6d56801fd83f74ad29a30263f401b4b522165c19386d8bc10dcca805da"}, {file = "sentence_transformers-3.4.1.tar.gz", hash = "sha256:68daa57504ff548340e54ff117bd86c1d2f784b21e0fb2689cf3272b8937b24b"}, @@ -5876,19 +6217,21 @@ version = "75.8.2" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = true python-versions = ">=3.9" +groups = ["main"] +markers = "python_version >= \"3.12\" and extra == \"sentence-transformers\"" files = [ {file = "setuptools-75.8.2-py3-none-any.whl", hash = "sha256:558e47c15f1811c1fa7adbd0096669bf76c1d3f433f58324df69f3f5ecac4e8f"}, {file = "setuptools-75.8.2.tar.gz", hash = "sha256:4880473a969e5f23f2a2be3646b2dfd84af9028716d398e46192f84bc36900d2"}, ] [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.8.0)"] -core = ["importlib_metadata (>=6)", "jaraco.collections", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\"", "ruff (>=0.8.0) ; sys_platform != \"cygwin\""] +core = ["importlib_metadata (>=6) ; python_version < \"3.10\"", "jaraco.collections", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1) ; python_version < \"3.11\"", "wheel (>=0.43.0)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] enabler = ["pytest-enabler (>=2.2)"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] -type = ["importlib_metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.14.*)", "pytest-mypy"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21) ; python_version >= \"3.9\" and sys_platform != \"cygwin\"", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf ; sys_platform != \"cygwin\"", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib_metadata (>=7.0.2) ; python_version < \"3.10\"", "jaraco.develop (>=7.21) ; sys_platform != \"cygwin\"", "mypy (==1.14.*)", "pytest-mypy"] [[package]] name = "shapely" @@ -5896,6 +6239,8 @@ version = "2.0.7" description = "Manipulation and analysis of geometric objects" optional = true python-versions = ">=3.7" +groups = ["main"] +markers = "extra == \"vertexai\"" files = [ {file = "shapely-2.0.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:33fb10e50b16113714ae40adccf7670379e9ccf5b7a41d0002046ba2b8f0f691"}, {file = "shapely-2.0.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f44eda8bd7a4bccb0f281264b34bf3518d8c4c9a8ffe69a1a05dabf6e8461147"}, @@ -5954,10 +6299,12 @@ version = "1.17.0" description = "Python 2 and 3 compatibility utilities" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main", "dev", "docs"] files = [ {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, ] +markers = {main = "(extra == \"vertexai\" or extra == \"mistralai\" or extra == \"bedrock\" or extra == \"ranx\") and (extra == \"vertexai\" or extra == \"mistralai\" or extra == \"bedrock\") or (extra == \"vertexai\" or extra == \"mistralai\" or extra == \"bedrock\" or extra == \"ranx\") and python_version >= \"3.10\""} [[package]] name = "sniffio" @@ -5965,6 +6312,8 @@ version = "1.3.1" description = "Sniff out which async library your code is running under" optional = true python-versions = ">=3.7" +groups = ["main"] +markers = "extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\"" files = [ {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, @@ -5976,6 +6325,7 @@ version = "2.2.0" description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." optional = false python-versions = "*" +groups = ["docs"] files = [ {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, @@ -5987,10 +6337,12 @@ version = "2.6" description = "A modern CSS selector implementation for Beautiful Soup." optional = false python-versions = ">=3.8" +groups = ["main", "docs"] files = [ {file = "soupsieve-2.6-py3-none-any.whl", hash = "sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9"}, {file = "soupsieve-2.6.tar.gz", hash = "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb"}, ] +markers = {main = "python_version >= \"3.10\" and extra == \"ranx\""} [[package]] name = "sphinx" @@ -5998,6 +6350,7 @@ version = "7.4.7" description = "Python documentation generator" optional = false python-versions = ">=3.9" +groups = ["docs"] files = [ {file = "sphinx-7.4.7-py3-none-any.whl", hash = "sha256:c2419e2135d11f1951cd994d6eb18a1835bd8fdd8429f9ca375dc1f3281bd239"}, {file = "sphinx-7.4.7.tar.gz", hash = "sha256:242f92a7ea7e6c5b406fdc2615413890ba9f699114a9c09192d7dfead2ee9cfe"}, @@ -6034,6 +6387,7 @@ version = "0.5.2" description = "Add a copy button to each of your code cells." optional = false python-versions = ">=3.7" +groups = ["docs"] files = [ {file = "sphinx-copybutton-0.5.2.tar.gz", hash = "sha256:4cf17c82fb9646d1bc9ca92ac280813a3b605d8c421225fd9913154103ee1fbd"}, {file = "sphinx_copybutton-0.5.2-py3-none-any.whl", hash = "sha256:fb543fd386d917746c9a2c50360c7905b605726b9355cd26e9974857afeae06e"}, @@ -6052,6 +6406,7 @@ version = "0.5.0" description = "A sphinx extension for designing beautiful, view size responsive web components." optional = false python-versions = ">=3.8" +groups = ["docs"] files = [ {file = "sphinx_design-0.5.0-py3-none-any.whl", hash = "sha256:1af1267b4cea2eedd6724614f19dcc88fe2e15aff65d06b2f6252cee9c4f4c1e"}, {file = "sphinx_design-0.5.0.tar.gz", hash = "sha256:e8e513acea6f92d15c6de3b34e954458f245b8e761b45b63950f65373352ab00"}, @@ -6075,6 +6430,7 @@ version = "1.0.1" description = "Sphinx Extension adding support for custom favicons" optional = false python-versions = ">=3.7" +groups = ["docs"] files = [ {file = "sphinx-favicon-1.0.1.tar.gz", hash = "sha256:df796de32125609c1b4a8964db74270ebf4502089c27cd53f542354dc0b57e8e"}, {file = "sphinx_favicon-1.0.1-py3-none-any.whl", hash = "sha256:7c93d6b634cb4c9687ceab67a8526f05d3b02679df94e273e51a43282e6b034c"}, @@ -6094,6 +6450,7 @@ version = "2.0.0" description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" optional = false python-versions = ">=3.9" +groups = ["docs"] files = [ {file = "sphinxcontrib_applehelp-2.0.0-py3-none-any.whl", hash = "sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5"}, {file = "sphinxcontrib_applehelp-2.0.0.tar.gz", hash = "sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1"}, @@ -6110,6 +6467,7 @@ version = "2.0.0" description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp documents" optional = false python-versions = ">=3.9" +groups = ["docs"] files = [ {file = "sphinxcontrib_devhelp-2.0.0-py3-none-any.whl", hash = "sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2"}, {file = "sphinxcontrib_devhelp-2.0.0.tar.gz", hash = "sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad"}, @@ -6126,6 +6484,7 @@ version = "2.1.0" description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" optional = false python-versions = ">=3.9" +groups = ["docs"] files = [ {file = "sphinxcontrib_htmlhelp-2.1.0-py3-none-any.whl", hash = "sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8"}, {file = "sphinxcontrib_htmlhelp-2.1.0.tar.gz", hash = "sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9"}, @@ -6142,6 +6501,7 @@ version = "1.0.1" description = "A sphinx extension which renders display math in HTML via JavaScript" optional = false python-versions = ">=3.5" +groups = ["docs"] files = [ {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, @@ -6156,6 +6516,7 @@ version = "2.0.0" description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp documents" optional = false python-versions = ">=3.9" +groups = ["docs"] files = [ {file = "sphinxcontrib_qthelp-2.0.0-py3-none-any.whl", hash = "sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb"}, {file = "sphinxcontrib_qthelp-2.0.0.tar.gz", hash = "sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab"}, @@ -6172,6 +6533,7 @@ version = "2.0.0" description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)" optional = false python-versions = ">=3.9" +groups = ["docs"] files = [ {file = "sphinxcontrib_serializinghtml-2.0.0-py3-none-any.whl", hash = "sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331"}, {file = "sphinxcontrib_serializinghtml-2.0.0.tar.gz", hash = "sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d"}, @@ -6188,6 +6550,7 @@ version = "2.0.38" description = "Database Abstraction Library" optional = false python-versions = ">=3.7" +groups = ["docs"] files = [ {file = "SQLAlchemy-2.0.38-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5e1d9e429028ce04f187a9f522818386c8b076723cdbe9345708384f49ebcec6"}, {file = "SQLAlchemy-2.0.38-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b87a90f14c68c925817423b0424381f0e16d80fc9a1a1046ef202ab25b19a444"}, @@ -6283,6 +6646,7 @@ version = "0.6.3" description = "Extract data from python stack frames and tracebacks for informative displays" optional = false python-versions = "*" +groups = ["dev", "docs"] files = [ {file = "stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"}, {file = "stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9"}, @@ -6302,6 +6666,8 @@ version = "1.13.1" description = "Computer algebra system (CAS) in Python" optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"sentence-transformers\"" files = [ {file = "sympy-1.13.1-py3-none-any.whl", hash = "sha256:db36cdc64bf61b9b24578b6f7bab1ecdd2452cf008f34faa33776680c26d66f8"}, {file = "sympy-1.13.1.tar.gz", hash = "sha256:9cebf7e04ff162015ce31c9c6c9144daa34a93bd082f54fd8f12deca4f47515f"}, @@ -6319,6 +6685,7 @@ version = "0.9.0" description = "Pretty-print tabular data" optional = false python-versions = ">=3.7" +groups = ["main", "docs"] files = [ {file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"}, {file = "tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c"}, @@ -6333,6 +6700,7 @@ version = "9.0.0" description = "Retry code until it succeeds" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "tenacity-9.0.0-py3-none-any.whl", hash = "sha256:93de0c98785b27fcf659856aa9f54bfbd399e29969b0621bc7f762bd441b4539"}, {file = "tenacity-9.0.0.tar.gz", hash = "sha256:807f37ca97d62aa361264d497b0e31e92b8027044942bfa756160d908320d73b"}, @@ -6348,6 +6716,7 @@ version = "4.9.1" description = "Python library for throwaway instances of anything that can run in a Docker container" optional = false python-versions = "<4.0,>=3.9" +groups = ["dev"] files = [ {file = "testcontainers-4.9.1-py3-none-any.whl", hash = "sha256:315fb94b42a383872df530aa45319745278ef0cc18b9cfcdc231a75d14afa5a0"}, {file = "testcontainers-4.9.1.tar.gz", hash = "sha256:37fe9a222549ddb788463935965b16f91809e9a8d654f437d6a59eac9b77f76f"}, @@ -6401,6 +6770,8 @@ version = "3.5.0" description = "threadpoolctl" optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"sentence-transformers\"" files = [ {file = "threadpoolctl-3.5.0-py3-none-any.whl", hash = "sha256:56c1e26c150397e58c4926da8eeee87533b1e32bef131bd4bf6a2f45f3185467"}, {file = "threadpoolctl-3.5.0.tar.gz", hash = "sha256:082433502dd922bf738de0d8bcc4fdcbf0979ff44c42bd40f5af8a282f6fa107"}, @@ -6412,6 +6783,7 @@ version = "1.4.0" description = "A tiny CSS parser" optional = false python-versions = ">=3.8" +groups = ["docs"] files = [ {file = "tinycss2-1.4.0-py3-none-any.whl", hash = "sha256:3a49cf47b7675da0b15d0c6e1df8df4ebd96e9394bb905a5775adb0d884c5289"}, {file = "tinycss2-1.4.0.tar.gz", hash = "sha256:10c0972f6fc0fbee87c3edb76549357415e94548c1ae10ebccdea16fb404a9b7"}, @@ -6430,6 +6802,8 @@ version = "0.21.0" description = "" optional = true python-versions = ">=3.7" +groups = ["main"] +markers = "extra == \"sentence-transformers\" or extra == \"cohere\"" files = [ {file = "tokenizers-0.21.0-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:3c4c93eae637e7d2aaae3d376f06085164e1660f89304c0ab2b1d08a406636b2"}, {file = "tokenizers-0.21.0-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:f53ea537c925422a2e0e92a24cce96f6bc5046bbef24a1652a5edc8ba975f62e"}, @@ -6462,6 +6836,8 @@ version = "2.2.1" description = "A lil' TOML parser" optional = false python-versions = ">=3.8" +groups = ["dev", "docs"] +markers = "python_version <= \"3.10\"" files = [ {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, @@ -6503,6 +6879,7 @@ version = "0.13.2" description = "Style preserving TOML library" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "tomlkit-0.13.2-py3-none-any.whl", hash = "sha256:7a974427f6e119197f670fbbbeae7bef749a6c14e793db934baefc1b5f03efde"}, {file = "tomlkit-0.13.2.tar.gz", hash = "sha256:fff5fe59a87295b278abd31bec92c15d9bc4a06885ab12bcea52c71119392e79"}, @@ -6514,6 +6891,8 @@ version = "2.6.0" description = "Tensors and Dynamic neural networks in Python with strong GPU acceleration" optional = true python-versions = ">=3.9.0" +groups = ["main"] +markers = "extra == \"sentence-transformers\"" files = [ {file = "torch-2.6.0-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:6860df13d9911ac158f4c44031609700e1eba07916fff62e21e6ffa0a9e01961"}, {file = "torch-2.6.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:c4f103a49830ce4c7561ef4434cc7926e5a5fe4e5eb100c19ab36ea1e2b634ab"}, @@ -6570,6 +6949,7 @@ version = "6.4.2" description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." optional = false python-versions = ">=3.8" +groups = ["dev", "docs"] files = [ {file = "tornado-6.4.2-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e828cce1123e9e44ae2a50a9de3055497ab1d0aeb440c5ac23064d9e44880da1"}, {file = "tornado-6.4.2-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:072ce12ada169c5b00b7d92a99ba089447ccc993ea2143c9ede887e0937aa803"}, @@ -6590,6 +6970,7 @@ version = "4.67.1" description = "Fast, Extensible Progress Meter" optional = true python-versions = ">=3.7" +groups = ["main"] files = [ {file = "tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2"}, {file = "tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2"}, @@ -6611,6 +6992,7 @@ version = "5.14.3" description = "Traitlets Python configuration system" optional = false python-versions = ">=3.8" +groups = ["dev", "docs"] files = [ {file = "traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f"}, {file = "traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7"}, @@ -6626,6 +7008,8 @@ version = "4.49.0" description = "State-of-the-art Machine Learning for JAX, PyTorch and TensorFlow" optional = true python-versions = ">=3.9.0" +groups = ["main"] +markers = "extra == \"sentence-transformers\"" files = [ {file = "transformers-4.49.0-py3-none-any.whl", hash = "sha256:6b4fded1c5fee04d384b1014495b4235a2b53c87503d7d592423c06128cbbe03"}, {file = "transformers-4.49.0.tar.gz", hash = "sha256:7e40e640b5b8dc3f48743f5f5adbdce3660c82baafbd3afdfc04143cdbd2089e"}, @@ -6695,6 +7079,8 @@ version = "2.6" description = "Support tools for TREC CAR participants. Also see trec-car.cs.unh.edu" optional = true python-versions = ">=3.6" +groups = ["main"] +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "trec-car-tools-2.6.tar.gz", hash = "sha256:2fce2de120224fd569b151d5bed358a4ed334e643889b9e3dfe3e5a3d15d21c8"}, {file = "trec_car_tools-2.6-py3-none-any.whl", hash = "sha256:e6f0373259e1c234222da7270ab54ca7af7a6f8d0dd32b13e158c1659d3991cf"}, @@ -6710,6 +7096,8 @@ version = "3.2.0" description = "A language and compiler for custom Deep Learning operations" optional = true python-versions = "*" +groups = ["main"] +markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\"" files = [ {file = "triton-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3e54983cd51875855da7c68ec05c05cf8bb08df361b1d5b69e05e40b0c9bd62"}, {file = "triton-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8009a1fb093ee8546495e96731336a33fb8856a38e45bb4ab6affd6dbc3ba220"}, @@ -6729,6 +7117,7 @@ version = "1.16.0.20241221" description = "Typing stubs for cffi" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "types_cffi-1.16.0.20241221-py3-none-any.whl", hash = "sha256:e5b76b4211d7a9185f6ab8d06a106d56c7eb80af7cdb8bfcb4186ade10fb112f"}, {file = "types_cffi-1.16.0.20241221.tar.gz", hash = "sha256:1c96649618f4b6145f58231acb976e0b448be6b847f7ab733dabe62dfbff6591"}, @@ -6743,6 +7132,7 @@ version = "24.1.0.20240722" description = "Typing stubs for pyOpenSSL" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "types-pyOpenSSL-24.1.0.20240722.tar.gz", hash = "sha256:47913b4678a01d879f503a12044468221ed8576263c1540dcb0484ca21b08c39"}, {file = "types_pyOpenSSL-24.1.0.20240722-py3-none-any.whl", hash = "sha256:6a7a5d2ec042537934cfb4c9d4deb0e16c4c6250b09358df1f083682fe6fda54"}, @@ -6758,6 +7148,7 @@ version = "6.0.12.20241230" description = "Typing stubs for PyYAML" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "types_PyYAML-6.0.12.20241230-py3-none-any.whl", hash = "sha256:fa4d32565219b68e6dee5f67534c722e53c00d1cfc09c435ef04d7353e1e96e6"}, {file = "types_pyyaml-6.0.12.20241230.tar.gz", hash = "sha256:7f07622dbd34bb9c8b264fe860a17e0efcad00d50b5f27e93984909d9363498c"}, @@ -6769,6 +7160,7 @@ version = "4.6.0.20241004" description = "Typing stubs for redis" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "types-redis-4.6.0.20241004.tar.gz", hash = "sha256:5f17d2b3f9091ab75384153bfa276619ffa1cf6a38da60e10d5e6749cc5b902e"}, {file = "types_redis-4.6.0.20241004-py3-none-any.whl", hash = "sha256:ef5da68cb827e5f606c8f9c0b49eeee4c2669d6d97122f301d3a55dc6a63f6ed"}, @@ -6784,6 +7176,8 @@ version = "2.31.0.6" description = "Typing stubs for requests" optional = true python-versions = ">=3.7" +groups = ["main"] +markers = "python_version < \"3.10\" and extra == \"cohere\"" files = [ {file = "types-requests-2.31.0.6.tar.gz", hash = "sha256:cd74ce3b53c461f1228a9b783929ac73a666658f223e28ed29753771477b3bd0"}, {file = "types_requests-2.31.0.6-py3-none-any.whl", hash = "sha256:a2db9cb228a81da8348b49ad6db3f5519452dd20a9c1e1a868c83c5fe88fd1a9"}, @@ -6798,6 +7192,8 @@ version = "2.32.0.20241016" description = "Typing stubs for requests" optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "python_version >= \"3.10\" and extra == \"cohere\"" files = [ {file = "types-requests-2.32.0.20241016.tar.gz", hash = "sha256:0d9cad2f27515d0e3e3da7134a1b6f28fb97129d86b867f24d9c726452634d95"}, {file = "types_requests-2.32.0.20241016-py3-none-any.whl", hash = "sha256:4195d62d6d3e043a4eaaf08ff8a62184584d2e8684e9d2aa178c7915a7da3747"}, @@ -6812,6 +7208,7 @@ version = "75.8.0.20250225" description = "Typing stubs for setuptools" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "types_setuptools-75.8.0.20250225-py3-none-any.whl", hash = "sha256:94c86b439cc60bcc68c1cda3fd2c301f007f8f9502f4fbb54c66cb5ce9b875af"}, {file = "types_setuptools-75.8.0.20250225.tar.gz", hash = "sha256:6038f7e983d55792a5f90d8fdbf5d4c186026214a16bb65dd6ae83c624ae9636"}, @@ -6823,6 +7220,7 @@ version = "0.9.0.20241207" description = "Typing stubs for tabulate" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "types_tabulate-0.9.0.20241207-py3-none-any.whl", hash = "sha256:b8dad1343c2a8ba5861c5441370c3e35908edd234ff036d4298708a1d4cf8a85"}, {file = "types_tabulate-0.9.0.20241207.tar.gz", hash = "sha256:ac1ac174750c0a385dfd248edc6279fa328aaf4ea317915ab879a2ec47833230"}, @@ -6834,6 +7232,8 @@ version = "1.26.25.14" description = "Typing stubs for urllib3" optional = true python-versions = "*" +groups = ["main"] +markers = "python_version < \"3.10\" and extra == \"cohere\"" files = [ {file = "types-urllib3-1.26.25.14.tar.gz", hash = "sha256:229b7f577c951b8c1b92c1bc2b2fdb0b49847bd2af6d1cc2a2e3dd340f3bda8f"}, {file = "types_urllib3-1.26.25.14-py3-none-any.whl", hash = "sha256:9683bbb7fb72e32bfe9d2be6e04875fbe1b3eeec3cbb4ea231435aa7fd6b4f0e"}, @@ -6845,6 +7245,7 @@ version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" +groups = ["main", "dev", "docs"] files = [ {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, @@ -6856,6 +7257,8 @@ version = "0.9.0" description = "Runtime inspection utilities for typing module." optional = true python-versions = "*" +groups = ["main"] +markers = "extra == \"mistralai\"" files = [ {file = "typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f"}, {file = "typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78"}, @@ -6871,6 +7274,8 @@ version = "2025.1" description = "Provider of IANA time zone data" optional = true python-versions = ">=2" +groups = ["main"] +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "tzdata-2025.1-py2.py3-none-any.whl", hash = "sha256:7e127113816800496f027041c570f50bcd464a020098a3b6b199517772303639"}, {file = "tzdata-2025.1.tar.gz", hash = "sha256:24894909e88cdb28bd1636c6887801df64cb485bd593f2fd83ef29075a81d694"}, @@ -6882,6 +7287,8 @@ version = "0.2.3" description = "Pure Python decompression module for .Z files compressed using Unix compress utility" optional = true python-versions = ">=3.9" +groups = ["main"] +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "unlzw3-0.2.3-py3-none-any.whl", hash = "sha256:7760fb4f3afa1225623944c061991d89a061f7fb78665dbc4cddfdb562bb4a8b"}, {file = "unlzw3-0.2.3.tar.gz", hash = "sha256:ede5d928c792fff9da406f20334f9739693327f448f383ae1df1774627197bbb"}, @@ -6897,14 +7304,16 @@ version = "1.26.20" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +groups = ["main", "dev", "docs"] files = [ {file = "urllib3-1.26.20-py2.py3-none-any.whl", hash = "sha256:0ed14ccfbf1c30a9072c7ca157e4319b70d65f623e91e7b32fadb2853431016e"}, {file = "urllib3-1.26.20.tar.gz", hash = "sha256:40c2dc0c681e47eb8f90e7e27bf6ff7df2e677421fd46756da1161c39ca70d32"}, ] +markers = {main = "python_version < \"3.10\" and (extra == \"bedrock\" or extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"vertexai\" or extra == \"voyageai\")", dev = "python_version < \"3.10\"", docs = "python_version < \"3.10\""} [package.extras] -brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] -secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] +brotli = ["brotli (==1.0.9) ; os_name != \"nt\" and python_version < \"3\" and platform_python_implementation == \"CPython\"", "brotli (>=1.0.9) ; python_version >= \"3\" and platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; (os_name != \"nt\" or python_version >= \"3\") and platform_python_implementation != \"CPython\"", "brotlipy (>=0.6.0) ; os_name == \"nt\" and python_version < \"3\""] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress ; python_version == \"2.7\"", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] @@ -6913,13 +7322,15 @@ version = "2.3.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.9" +groups = ["main", "dev", "docs"] files = [ {file = "urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df"}, {file = "urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d"}, ] +markers = {main = "python_version >= \"3.10\" and (extra == \"ranx\" or extra == \"bedrock\" or extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"vertexai\" or extra == \"voyageai\")", dev = "python_version >= \"3.10\"", docs = "python_version >= \"3.10\""} [package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""] h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] @@ -6930,6 +7341,7 @@ version = "20.29.2" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "virtualenv-20.29.2-py3-none-any.whl", hash = "sha256:febddfc3d1ea571bdb1dc0f98d7b45d24def7428214d4fb73cc486c9568cce6a"}, {file = "virtualenv-20.29.2.tar.gz", hash = "sha256:fdaabebf6d03b5ba83ae0a02cfe96f48a716f4fae556461d180825866f75b728"}, @@ -6942,7 +7354,7 @@ platformdirs = ">=3.9.1,<5" [package.extras] docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] -test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8) ; platform_python_implementation == \"PyPy\" or platform_python_implementation == \"CPython\" and sys_platform == \"win32\" and python_version >= \"3.13\"", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10) ; platform_python_implementation == \"CPython\""] [[package]] name = "voyageai" @@ -6950,6 +7362,8 @@ version = "0.2.4" description = "" optional = true python-versions = "<4.0.0,>=3.7.1" +groups = ["main"] +markers = "extra == \"voyageai\"" files = [ {file = "voyageai-0.2.4-py3-none-any.whl", hash = "sha256:e3070e5c78dec89adae43231334b4637aa88933dad99b1c33d3219fdfc94dfa4"}, {file = "voyageai-0.2.4.tar.gz", hash = "sha256:b9911d8629e8a4e363291c133482fead49a3536afdf1e735f3ab3aaccd8d250d"}, @@ -6968,6 +7382,8 @@ version = "0.2.5" description = "Python library to work with ARC and WARC files" optional = true python-versions = "*" +groups = ["main"] +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "warc3_wet-0.2.5-py3-none-any.whl", hash = "sha256:5a9a525383fb1af159734baa75f349a7c4ec7bccd1b938681b5748515d2bf624"}, {file = "warc3_wet-0.2.5.tar.gz", hash = "sha256:15e50402dabaa1e95307f1e2a6169cfd5f137b70761d9f0b16a10aa6de227970"}, @@ -6979,6 +7395,8 @@ version = "0.2.5" description = "Python library to work with ARC and WARC files, with fixes for ClueWeb09" optional = true python-versions = "*" +groups = ["main"] +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "warc3-wet-clueweb09-0.2.5.tar.gz", hash = "sha256:3054bfc07da525d5967df8ca3175f78fa3f78514c82643f8c81fbca96300b836"}, ] @@ -6989,6 +7407,7 @@ version = "0.2.13" description = "Measures the displayed width of unicode strings in a terminal" optional = false python-versions = "*" +groups = ["dev", "docs"] files = [ {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, @@ -7000,6 +7419,7 @@ version = "0.5.1" description = "Character encoding aliases for legacy web content" optional = false python-versions = "*" +groups = ["docs"] files = [ {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, @@ -7011,6 +7431,7 @@ version = "1.17.2" description = "Module for decorators, wrappers and monkey patching." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "wrapt-1.17.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3d57c572081fed831ad2d26fd430d565b76aa277ed1d30ff4d40670b1c0dd984"}, {file = "wrapt-1.17.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b5e251054542ae57ac7f3fba5d10bfff615b6c2fb09abeb37d2f1463f841ae22"}, @@ -7099,6 +7520,8 @@ version = "1.18.3" description = "Yet another URL library" optional = true python-versions = ">=3.9" +groups = ["main"] +markers = "extra == \"voyageai\"" files = [ {file = "yarl-1.18.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7df647e8edd71f000a5208fe6ff8c382a1de8edfbccdbbfe649d263de07d8c34"}, {file = "yarl-1.18.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c69697d3adff5aa4f874b19c0e4ed65180ceed6318ec856ebc423aa5850d84f7"}, @@ -7195,17 +7618,19 @@ version = "3.21.0" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.9" +groups = ["dev", "docs"] files = [ {file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"}, {file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"}, ] +markers = {dev = "python_version < \"3.10\""} [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] enabler = ["pytest-enabler (>=2.2)"] -test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +test = ["big-O", "importlib-resources ; python_version < \"3.9\"", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] type = ["pytest-mypy"] [[package]] @@ -7214,6 +7639,8 @@ version = "0.1.9" description = "Low-level interface to the zlib library that enables capturing the decoding state" optional = true python-versions = ">=3.6" +groups = ["main"] +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "zlib_state-0.1.9-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:97f45d0f80e9d7070229ecb36112eea6a17dc40053449a9c613ef837d9cb66b4"}, {file = "zlib_state-0.1.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3564eaa130f2533b87b82d0e622cfb5c25acec123e7bfe38d39db9ce6349cb52"}, @@ -7256,6 +7683,6 @@ vertexai = ["google-cloud-aiplatform", "protobuf"] voyageai = ["voyageai"] [metadata] -lock-version = "2.0" +lock-version = "2.1" python-versions = ">=3.9,<3.14" -content-hash = "8be5a998fb20c7b99f19af0112aa1c2c7e981f802c26e7a3bce08eeb61dfb741" +content-hash = "f1e335904a4234779fa7566c44cb28423c4677285c0236b0e19cbd19fad9d003" diff --git a/redisvl/query/query.py b/redisvl/query/query.py index ae8a9e43..99f178d9 100644 --- a/redisvl/query/query.py +++ b/redisvl/query/query.py @@ -1,5 +1,5 @@ from enum import Enum -from typing import Any, Dict, List, Optional, Union +from typing import Any, Dict, Iterable, List, Optional, Union from redis.commands.search.aggregation import AggregateRequest, Desc from redis.commands.search.query import Query as RedisQuery @@ -658,7 +658,7 @@ def __init__( self, text: str, text_field: str, - text_scorer: str = "BM25", + text_scorer: str = "BM25STD", filter_expression: Optional[Union[str, FilterExpression]] = None, return_fields: Optional[List[str]] = None, num_results: int = 10, @@ -667,6 +667,7 @@ def __init__( sort_by: Optional[str] = None, in_order: bool = False, params: Optional[Dict[str, Any]] = None, + stopwords: Optional[Union[str, Iterable[str]]] = "english", ): """A query for running a full text and vector search, along with an optional filter expression. @@ -675,7 +676,7 @@ def __init__( text (str): The text string to perform the text search with. text_field (str): The name of the document field to perform text search on. text_scorer (str, optional): The text scoring algorithm to use. - Defaults to BM25. Options are {TFIDF, BM25, DOCNORM, DISMAX, DOCSCORE}. + Defaults to BM25STD. Options are {TFIDF, BM25STD, BM25, DOCNORM, DISMAX, DOCSCORE}. See https://redis.io/docs/latest/develop/interact/search-and-query/advanced-concepts/scoring/ filter_expression (Union[str, FilterExpression], optional): A filter to apply along with the text search. Defaults to None. @@ -694,19 +695,23 @@ def __init__( the offsets between them. Defaults to False. params (Optional[Dict[str, Any]], optional): The parameters for the query. Defaults to None. - """ - import nltk - from nltk.corpus import stopwords - - nltk.download("stopwords") - self._stopwords = set(stopwords.words("english")) + stopwords (Optional[Union[str, Iterable[str]]): The set of stop words to remove + from the query text. If a language like 'english' or 'spanish' is provided + a default set of stopwords for that language will be used. Users may specify + their own stop words by providing a List or Set of words. if set to None, + then no words will be removed. Defaults to 'english'. + Raises: + ValueError: if stopwords language string cannot be loaded. + TypeError: If stopwords is not a valid iterable set of strings. + """ self._text = text self._text_field = text_field self._text_scorer = text_scorer + self._num_results = num_results + self.set_stopwords(stopwords) self.set_filter(filter_expression) - self._num_results = num_results query_string = self._build_query_string() @@ -727,6 +732,27 @@ def __init__( if return_score: self.with_scores() + @property + def stopwords(self): + return self._stopwords + + def set_stopwords(self, stopwords: Optional[Union[str, Iterable[str]]] = "english"): + if not stopwords: + self._stopwords = set() + elif isinstance(stopwords, str): + try: + import nltk + from nltk.corpus import stopwords as nltk_stopwords + + nltk.download("stopwords") + self._stopwords = set(nltk_stopwords.words(stopwords)) + except Exception as e: + raise ValueError(f"Error trying to load {stopwords} from nltk. {e}") + elif isinstance(stopwords, Iterable) and isinstance(stopwords[0], str): + self._stopwords = set(stopwords) + else: + raise TypeError("stopwords must be an Iterable set of strings") + def tokenize_and_escape_query(self, user_query: str) -> str: """Convert a raw user query to a redis full text query joined by ORs""" from redisvl.utils.token_escaper import TokenEscaper @@ -751,7 +777,7 @@ def _build_query_string(self) -> str: else: filter_expression = "" - text = f"(~@{self._text_field}:({self.tokenize_and_escape_query(self._text)}))" + text = f"(@{self._text_field}:({self.tokenize_and_escape_query(self._text)}))" if filter_expression and filter_expression != "*": text += f"({filter_expression})" return text diff --git a/tests/unit/test_query_types.py b/tests/unit/test_query_types.py index e3ef5e96..2294605d 100644 --- a/tests/unit/test_query_types.py +++ b/tests/unit/test_query_types.py @@ -211,12 +211,12 @@ def test_text_query(): assert text_query._num_results == 10 assert ( text_query.filter - == f"(~@{text_field_name}:({text_query.tokenize_and_escape_query(text_string)}))" + == f"(@{text_field_name}:({text_query.tokenize_and_escape_query(text_string)}))" ) assert isinstance(text_query, Query) assert isinstance(text_query.query, Query) assert isinstance(text_query.params, dict) - assert text_query._text_scorer == "BM25" + assert text_query._text_scorer == "BM25STD" assert text_query.params == {} assert text_query._dialect == 2 assert text_query._in_order == False @@ -253,6 +253,30 @@ def test_text_query(): ) assert text_query._in_order + # Test stopwords are configurable + text_query = TextQuery(text_string, text_field_name, stopwords=None) + assert text_query.stopwords == set([]) + assert ( + text_query.filter + == f"(@{text_field_name}:({text_query.tokenize_and_escape_query(text_string)}))" + ) + + text_query = TextQuery(text_string, text_field_name, stopwords=["the", "a", "of"]) + assert text_query.stopwords == set(["the", "a", "of"]) + assert ( + text_query.filter + == f"(@{text_field_name}:({text_query.tokenize_and_escape_query(text_string)}))" + ) + + text_query = TextQuery(text_string, text_field_name, stopwords="german") + assert text_query.stopwords != set([]) + + with pytest.raises(ValueError): + text_query = TextQuery(text_string, text_field_name, stopwords="gibberish") + + with pytest.raises(TypeError): + text_query = TextQuery(text_string, text_field_name, stopwords=[1, 2, 3]) + def test_hybrid_query(): pass From f7a4b9ebafc32f72dd1680d8812c899559f799c5 Mon Sep 17 00:00:00 2001 From: Justin Cechmanek Date: Tue, 1 Apr 2025 12:23:28 -0700 Subject: [PATCH 06/26] adds hybrid aggregation query and tests. modifies search index to accept aggregate queries --- redisvl/index/index.py | 75 ++++++++- redisvl/query/__init__.py | 5 +- redisvl/query/aggregate.py | 221 ++++++++++++++++++++++++++ redisvl/query/query.py | 33 ++-- tests/conftest.py | 7 + tests/integration/test_aggregation.py | 184 +++++++++++++++++++++ tests/unit/test_aggregation_types.py | 118 ++++++++++++++ tests/unit/test_query_types.py | 13 +- 8 files changed, 617 insertions(+), 39 deletions(-) create mode 100644 redisvl/query/aggregate.py create mode 100644 tests/integration/test_aggregation.py create mode 100644 tests/unit/test_aggregation_types.py diff --git a/redisvl/index/index.py b/redisvl/index/index.py index 9bec205c..2186a907 100644 --- a/redisvl/index/index.py +++ b/redisvl/index/index.py @@ -16,6 +16,7 @@ Union, ) +from redisvl.redis.utils import convert_bytes, make_dict from redisvl.utils.utils import deprecated_argument, deprecated_function, sync_wrapper if TYPE_CHECKING: @@ -30,7 +31,13 @@ from redisvl.exceptions import RedisModuleVersionError, RedisSearchError from redisvl.index.storage import BaseStorage, HashStorage, JsonStorage -from redisvl.query import BaseQuery, CountQuery, FilterQuery +from redisvl.query import ( + AggregationQuery, + BaseQuery, + CountQuery, + FilterQuery, + HybridAggregationQuery, +) from redisvl.query.filter import FilterExpression from redisvl.redis.connection import ( RedisConnectionFactory, @@ -101,6 +108,34 @@ def _process(doc: "Document") -> Dict[str, Any]: return [_process(doc) for doc in results.docs] +def process_aggregate_results( + results: "AggregateResult", query: AggregationQuery, storage_type: StorageType +) -> List[Dict[str, Any]]: + """Convert an aggregate reslt object into a list of document dictionaries. + + This function processes results from Redis, handling different storage + types and query types. For JSON storage with empty return fields, it + unpacks the JSON object while retaining the document ID. The 'payload' + field is also removed from all resulting documents for consistency. + + Args: + results (AggregarteResult): The aggregart results from Redis. + query (AggregationQuery): The aggregation query object used for the aggregation. + storage_type (StorageType): The storage type of the search + index (json or hash). + + Returns: + List[Dict[str, Any]]: A list of processed document dictionaries. + """ + + def _process(row): + result = make_dict(convert_bytes(row)) + result.pop("__score", None) + return result + + return [_process(r) for r in results.rows] + + class BaseSearchIndex: """Base search engine class""" @@ -628,6 +663,44 @@ def fetch(self, id: str) -> Optional[Dict[str, Any]]: return convert_bytes(obj[0]) return None + def aggregate_query( + self, aggregation_query: AggregationQuery + ) -> List[Dict[str, Any]]: + """Execute an aggretation query and processes the results. + + This method takes an AggregationHyridQuery object directly, runs the search, and + handles post-processing of the search. + + Args: + aggregation_query (AggregationQuery): The aggregation query to run. + + Returns: + List[Result]: A list of search results. + + .. code-block:: python + + from redisvl.query import HybridAggregationQuery + + aggregation = HybridAggregationQuery( + text="the text to search for", + text_field="description", + vector=[0.16, -0.34, 0.98, 0.23], + vector_field="embedding", + num_results=3 + ) + + results = index.aggregate_query(aggregation_query) + + """ + results = self.aggregate( + aggregation_query, query_params=aggregation_query.params + ) + return process_aggregate_results( + results, + query=aggregation_query, + storage_type=self.schema.index.storage_type, + ) + def aggregate(self, *args, **kwargs) -> "AggregateResult": """Perform an aggregation operation against the index. diff --git a/redisvl/query/__init__.py b/redisvl/query/__init__.py index 900202ca..13b3fb02 100644 --- a/redisvl/query/__init__.py +++ b/redisvl/query/__init__.py @@ -1,8 +1,8 @@ +from redisvl.query.aggregate import AggregationQuery, HybridAggregationQuery from redisvl.query.query import ( BaseQuery, CountQuery, FilterQuery, - HybridQuery, RangeQuery, TextQuery, VectorQuery, @@ -17,5 +17,6 @@ "VectorRangeQuery", "CountQuery", "TextQuery", - "HybridQuery", + "AggregationQuery", + "HybridAggregationQuery", ] diff --git a/redisvl/query/aggregate.py b/redisvl/query/aggregate.py new file mode 100644 index 00000000..067a9056 --- /dev/null +++ b/redisvl/query/aggregate.py @@ -0,0 +1,221 @@ +from typing import Any, Dict, List, Optional, Set, Tuple, Union + +import nltk +from nltk.corpus import stopwords as nltk_stopwords +from redis.commands.search.aggregation import AggregateRequest, Desc + +from redisvl.query.filter import FilterExpression +from redisvl.redis.utils import array_to_buffer +from redisvl.utils.token_escaper import TokenEscaper + + +# base class +class AggregationQuery(AggregateRequest): + """ + Base class for aggregation queries used to create aggregation queries for Redis. + """ + + def __init__(self, query_string): + super().__init__(query_string) + + +class HybridAggregationQuery(AggregationQuery): + """ + HybridAggregationQuery combines text and vector search in Redis. + It allows you to perform a hybrid search using both text and vector similarity. + It scores documents based on a weighted combination of text and vector similarity. + """ + + DISTANCE_ID: str = "vector_distance" + VECTOR_PARAM: str = "vector" + + def __init__( + self, + text: str, + text_field: str, + vector: Union[bytes, List[float]], + vector_field: str, + text_scorer: str = "BM25STD", + filter_expression: Optional[Union[str, FilterExpression]] = None, + alpha: float = 0.7, + dtype: str = "float32", + num_results: int = 10, + return_fields: Optional[List[str]] = None, + stopwords: Optional[Union[str, Set[str]]] = "english", + dialect: int = 4, + ): + """ + Instantiages a HybridAggregationQuery object. + + Args: + text (str): The text to search for. + text_field (str): The text field name to search in. + vector (Union[bytes, List[float]]): The vector to perform vector similarity search. + vector_field (str): The vector field name to search in. + text_scorer (str, optional): The text scorer to use. Options are {TFIDF, TFIDF.DOCNORM, + BM25, DISMAX, DOCSCORE, BM25STD}. Defaults to "BM25STD". + filter_expression (Optional[FilterExpression], optional): The filter expression to use. + Defaults to None. + alpha (float, optional): The weight of the vector similarity. Documents will be scored + as: hybrid_score = (alpha) * vector_score + (1-alpha) * text_score. + Defaults to 0.7. + dtype (str, optional): The data type of the vector. Defaults to "float32". + num_results (int, optional): The number of results to return. Defaults to 10. + return_fields (Optional[List[str]], optional): The fields to return. Defaults to None. + stopwords (Optional[Union[str, Set[str]]], optional): The stopwords to remove from the + provided text prior to searchuse. If a string such as "english" "german" is + provided then a default set of stopwords for that language will be used. if a list, + set, or tuple of strings is provided then those will be used as stopwords. + Defaults to "english". if set to "None" then no stopwords will be removed. + dialect (int, optional): The Redis dialect version. Defaults to 4. + + Raises: + ValueError: If the text string is empty, or if the text string becomes empty after + stopwords are removed. + TypeError: If the stopwords are not a set, list, or tuple of strings. + + .. code-block:: python + from redisvl.query.aggregate import HybridAggregationQuery + from redisvl.index import SearchIndex + + index = SearchIndex("my_index") + + query = HybridAggregationQuery( + text="example text", + text_field="text_field", + vector=[0.1, 0.2, 0.3], + vector_field="vector_field", + text_scorer="BM25STD", + filter_expression=None, + alpha=0.7, + dtype="float32", + num_results=10, + return_fields=["field1", "field2"], + stopwords="english", + dialect=4, + ) + + results = index.aggregate_query(query) + """ + + if not text.strip(): + raise ValueError("text string cannot be empty") + + self._text = text + self._text_field = text_field + self._vector = vector + self._vector_field = vector_field + self._filter_expression = filter_expression + self._alpha = alpha + self._dtype = dtype + self._num_results = num_results + self.set_stopwords(stopwords) + + query_string = self._build_query_string() + super().__init__(query_string) + + self.scorer(text_scorer) + self.add_scores() + self.apply( + vector_similarity=f"(2 - @{self.DISTANCE_ID})/2", text_score="@__score" + ) + self.apply(hybrid_score=f"{1-alpha}*@text_score + {alpha}*@vector_similarity") + self.sort_by(Desc("@hybrid_score"), max=num_results) + self.dialect(dialect) + + if return_fields: + self.load(*return_fields) + + @property + def params(self) -> Dict[str, Any]: + """Return the parameters for the aggregation. + + Returns: + Dict[str, Any]: The parameters for the aggregation. + """ + if isinstance(self._vector, bytes): + vector = self._vector + else: + vector = array_to_buffer(self._vector, dtype=self._dtype) + + params = {self.VECTOR_PARAM: vector} + + return params + + @property + def stopwords(self) -> Set[str]: + """Return the stopwords used in the query. + Returns: + Set[str]: The stopwords used in the query. + """ + return self._stopwords.copy() if self._stopwords else set() + + def set_stopwords(self, stopwords: Optional[Union[str, Set[str]]] = "english"): + """Set the stopwords to use in the query. + Args: + stopwords (Optional[Union[str, Set[str]]]): The stopwords to use. If a string + such as "english" "german" is provided then a default set of stopwords for that + language will be used. if a list, set, or tuple of strings is provided then those + will be used as stopwords. Defaults to "english". if set to "None" then no stopwords + will be removed. + Raises: + TypeError: If the stopwords are not a set, list, or tuple of strings. + """ + if not stopwords: + self._stopwords = set() + elif isinstance(stopwords, str): + try: + nltk.download("stopwords") + self._stopwords = set(nltk_stopwords.words(stopwords)) + except Exception as e: + raise ValueError(f"Error trying to load {stopwords} from nltk. {e}") + elif isinstance(stopwords, (Set, List, Tuple)) and all( # type: ignore + isinstance(word, str) for word in stopwords + ): + self._stopwords = set(stopwords) + else: + raise TypeError("stopwords must be a set, list, or tuple of strings") + + def tokenize_and_escape_query(self, user_query: str) -> str: + """Convert a raw user query to a redis full text query joined by ORs + Args: + user_query (str): The user query to tokenize and escape. + + Returns: + str: The tokenized and escaped query string. + Raises: + ValueError: If the text string becomes empty after stopwords are removed. + """ + + escaper = TokenEscaper() + + tokens = [ + escaper.escape( + token.strip().strip(",").replace("“", "").replace("”", "").lower() + ) + for token in user_query.split() + ] + tokenized = " | ".join( + [token for token in tokens if token and token not in self._stopwords] + ) + + if not tokenized: + raise ValueError("text string cannot be empty after removing stopwords") + return tokenized + + def _build_query_string(self) -> str: + """Build the full query string for text search with optional filtering.""" + if isinstance(self._filter_expression, FilterExpression): + filter_expression = str(self._filter_expression) + else: + filter_expression = "" + + # base KNN query + knn_query = f"KNN {self._num_results} @{self._vector_field} ${self.VECTOR_PARAM} AS {self.DISTANCE_ID}" + + text = f"(~@{self._text_field}:({self.tokenize_and_escape_query(self._text)}))" + + if filter_expression and filter_expression != "*": + text += f"({filter_expression})" + + return f"{text}=>[{knn_query}]" diff --git a/redisvl/query/query.py b/redisvl/query/query.py index 99f178d9..a9d334bc 100644 --- a/redisvl/query/query.py +++ b/redisvl/query/query.py @@ -1,11 +1,11 @@ from enum import Enum -from typing import Any, Dict, Iterable, List, Optional, Union +from typing import Any, Dict, List, Optional, Set, Tuple, Union -from redis.commands.search.aggregation import AggregateRequest, Desc from redis.commands.search.query import Query as RedisQuery from redisvl.query.filter import FilterExpression from redisvl.redis.utils import array_to_buffer +from redisvl.utils.token_escaper import TokenEscaper class BaseQuery(RedisQuery): @@ -667,7 +667,7 @@ def __init__( sort_by: Optional[str] = None, in_order: bool = False, params: Optional[Dict[str, Any]] = None, - stopwords: Optional[Union[str, Iterable[str]]] = "english", + stopwords: Optional[Union[str, Set[str]]] = "english", ): """A query for running a full text and vector search, along with an optional filter expression. @@ -695,7 +695,7 @@ def __init__( the offsets between them. Defaults to False. params (Optional[Dict[str, Any]], optional): The parameters for the query. Defaults to None. - stopwords (Optional[Union[str, Iterable[str]]): The set of stop words to remove + stopwords (Optional[Union[str, Set[str]]): The set of stop words to remove from the query text. If a language like 'english' or 'spanish' is provided a default set of stopwords for that language will be used. Users may specify their own stop words by providing a List or Set of words. if set to None, @@ -736,7 +736,7 @@ def __init__( def stopwords(self): return self._stopwords - def set_stopwords(self, stopwords: Optional[Union[str, Iterable[str]]] = "english"): + def set_stopwords(self, stopwords: Optional[Union[str, Set[str]]] = "english"): if not stopwords: self._stopwords = set() elif isinstance(stopwords, str): @@ -748,14 +748,15 @@ def set_stopwords(self, stopwords: Optional[Union[str, Iterable[str]]] = "englis self._stopwords = set(nltk_stopwords.words(stopwords)) except Exception as e: raise ValueError(f"Error trying to load {stopwords} from nltk. {e}") - elif isinstance(stopwords, Iterable) and isinstance(stopwords[0], str): + elif isinstance(stopwords, (Set, List, Tuple)) and all( # type: ignore + isinstance(word, str) for word in stopwords + ): self._stopwords = set(stopwords) else: - raise TypeError("stopwords must be an Iterable set of strings") + raise TypeError("stopwords must be a set, list, or tuple of strings") def tokenize_and_escape_query(self, user_query: str) -> str: """Convert a raw user query to a redis full text query joined by ORs""" - from redisvl.utils.token_escaper import TokenEscaper escaper = TokenEscaper() @@ -781,19 +782,3 @@ def _build_query_string(self) -> str: if filter_expression and filter_expression != "*": text += f"({filter_expression})" return text - - -class HybridQuery(AggregateRequest): - def __init__( - self, text_query: TextQuery, vector_query: VectorQuery, alpha: float = 0.7 - ): - """An aggregate query for running a hybrid full text and vector search. - - Args: - text_query (TextQuery): The text query to run text search with. - vector_query (VectorQuery): The vector query to run vector search with. - alpha (float, optional): The amount to weight the vector similarity - score relative to the text similarity score. Defaults to 0.7 - - """ - pass diff --git a/tests/conftest.py b/tests/conftest.py index 24da05e5..4d5483e4 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -84,6 +84,7 @@ def sample_data(sample_datetimes): "user": "john", "age": 18, "job": "engineer", + "description": "engineers conduct trains that ride on train tracks", "last_updated": sample_datetimes["low"].timestamp(), "credit_score": "high", "location": "-122.4194,37.7749", @@ -93,6 +94,7 @@ def sample_data(sample_datetimes): "user": "mary", "age": 14, "job": "doctor", + "description": "a medical professional who treats diseases and helps people stay healthy", "last_updated": sample_datetimes["low"].timestamp(), "credit_score": "low", "location": "-122.4194,37.7749", @@ -102,6 +104,7 @@ def sample_data(sample_datetimes): "user": "nancy", "age": 94, "job": "doctor", + "description": "a research scientist specializing in cancers and diseases of the lungs", "last_updated": sample_datetimes["mid"].timestamp(), "credit_score": "high", "location": "-122.4194,37.7749", @@ -111,6 +114,7 @@ def sample_data(sample_datetimes): "user": "tyler", "age": 100, "job": "engineer", + "description": "a software developer with expertise in mathematics and computer science", "last_updated": sample_datetimes["mid"].timestamp(), "credit_score": "high", "location": "-110.0839,37.3861", @@ -120,6 +124,7 @@ def sample_data(sample_datetimes): "user": "tim", "age": 12, "job": "dermatologist", + "description": "a medical professional specializing in diseases of the skin", "last_updated": sample_datetimes["mid"].timestamp(), "credit_score": "high", "location": "-110.0839,37.3861", @@ -129,6 +134,7 @@ def sample_data(sample_datetimes): "user": "taimur", "age": 15, "job": "CEO", + "description": "high stress, but financially rewarding position at the head of a company", "last_updated": sample_datetimes["high"].timestamp(), "credit_score": "low", "location": "-110.0839,37.3861", @@ -138,6 +144,7 @@ def sample_data(sample_datetimes): "user": "joe", "age": 35, "job": "dentist", + "description": "like the tooth fairy because they'll take your teeth, but you have to pay them!", "last_updated": sample_datetimes["high"].timestamp(), "credit_score": "medium", "location": "-110.0839,37.3861", diff --git a/tests/integration/test_aggregation.py b/tests/integration/test_aggregation.py new file mode 100644 index 00000000..46c07474 --- /dev/null +++ b/tests/integration/test_aggregation.py @@ -0,0 +1,184 @@ +from datetime import timedelta + +import pytest +from redis.commands.search.aggregation import AggregateResult +from redis.commands.search.result import Result + +from redisvl.index import SearchIndex +from redisvl.query import HybridAggregationQuery +from redisvl.query.filter import ( + FilterExpression, + Geo, + GeoRadius, + Num, + Tag, + Text, + Timestamp, +) +from redisvl.redis.utils import array_to_buffer + +# TODO expand to multiple schema types and sync + async + +vector = ([0.1, 0.1, 0.5],) +vector_field_name = ("user_embedding",) +return_fields = ( + [ + "user", + "credit_score", + "age", + "job", + "location", + "last_updated", + ], +) +filter_expression = (Tag("credit_score") == "high",) +distance_threshold = (0.2,) + + +@pytest.fixture +def index(sample_data, redis_url): + # construct a search index from the schema + index = SearchIndex.from_dict( + { + "index": { + "name": "user_index", + "prefix": "v1", + "storage_type": "hash", + }, + "fields": [ + {"name": "credit_score", "type": "tag"}, + {"name": "job", "type": "text"}, + {"name": "description", "type": "text"}, + {"name": "age", "type": "numeric"}, + {"name": "last_updated", "type": "numeric"}, + {"name": "location", "type": "geo"}, + { + "name": "user_embedding", + "type": "vector", + "attrs": { + "dims": 3, + "distance_metric": "cosine", + "algorithm": "flat", + "datatype": "float32", + }, + }, + ], + }, + redis_url=redis_url, + ) + + # create the index (no data yet) + index.create(overwrite=True) + + # Prepare and load the data + def hash_preprocess(item: dict) -> dict: + return { + **item, + "user_embedding": array_to_buffer(item["user_embedding"], "float32"), + } + + index.load(sample_data, preprocess=hash_preprocess) + + # run the test + yield index + + # clean up + index.delete(drop=True) + + +def test_aggregation_query(index): + # *=>[KNN 7 @user_embedding $vector AS vector_distance] + text = "a medical professional with expertise in lung cancer" + text_field = "description" + vector = [0.1, 0.1, 0.5] + vector_field = "user_embedding" + return_fields = ["user", "credit_score", "age", "job", "location", "description"] + + hybrid_query = HybridAggregationQuery( + text=text, + text_field=text_field, + vector=vector, + vector_field=vector_field, + return_fields=return_fields, + ) + + results = index.aggregate_query(hybrid_query) + assert isinstance(results, list) + assert len(results) == 7 + for doc in results: + # ensure all return fields present + assert doc["user"] in [ + "john", + "derrick", + "nancy", + "tyler", + "tim", + "taimur", + "joe", + "mary", + ] + assert int(doc["age"]) in [18, 14, 94, 100, 12, 15, 35] + assert doc["job"] in ["engineer", "doctor", "dermatologist", "CEO", "dentist"] + assert doc["credit_score"] in ["high", "low", "medium"] + + # test num_results + hybrid_query = HybridAggregationQuery( + text=text, + text_field=text_field, + vector=vector, + vector_field=vector_field, + num_results=3, + ) + + results = index.aggregate_query(hybrid_query) + assert len(results) == 3 + assert ( + results[0]["hybrid_score"] + >= results[1]["hybrid_score"] + >= results[2]["hybrid_score"] + ) + + +def test_empty_query_string(index): + text = "" + text_field = "description" + vector = [0.1, 0.1, 0.5] + vector_field = "user_embedding" + return_fields = ["user", "credit_score", "age", "job", "location", "description"] + + # test if text is empty + with pytest.raises(ValueError): + hybrid_query = HybridAggregationQuery( + text=text, text_field=text_field, vector=vector, vector_field=vector_field + ) + + # test if text becomes empty after stopwords are removed + text = "with a for but and" # will all be removed as default stopwords + with pytest.raises(ValueError): + hybrid_query = HybridAggregationQuery( + text=text, text_field=text_field, vector=vector, vector_field=vector_field + ) + + +def test_aggregate_query_stopwords(index): + text = "a medical professional with expertise in lung cancer" + text_field = "description" + vector = [0.1, 0.1, 0.5] + vector_field = "user_embedding" + return_fields = ["user", "credit_score", "age", "job", "location", "description"] + return + # test num_results + hybrid_query = HybridAggregationQuery( + text=text, + text_field=text_field, + vector=vector, + vector_field=vector_field, + alpha=0.5, + stopwords=["medical", "expertise"], + ) + + results = index.aggregate_query(hybrid_query) + assert len(results) == 7 + for r in results: + assert r["text_score"] == 0 + assert r["hybrid_score"] == 0.5 * r["vector_similarity"] diff --git a/tests/unit/test_aggregation_types.py b/tests/unit/test_aggregation_types.py new file mode 100644 index 00000000..a128ffc1 --- /dev/null +++ b/tests/unit/test_aggregation_types.py @@ -0,0 +1,118 @@ +import pytest +from redis.commands.search.aggregation import AggregateRequest +from redis.commands.search.query import Query +from redis.commands.search.result import Result + +from redisvl.index.index import process_results +from redisvl.query.aggregate import HybridAggregationQuery +from redisvl.query.filter import Tag + +# Sample data for testing +sample_vector = [0.1, 0.2, 0.3, 0.4] +sample_text = "the toon squad play basketball against a gang of aliens" + + +# Test Cases + + +def test_aggregate_hybrid_query(): + text_field_name = "description" + vector_field_name = "embedding" + + hybrid_query = HybridAggregationQuery( + text=sample_text, + text_field=text_field_name, + vector=sample_vector, + vector_field=vector_field_name, + ) + + assert isinstance(hybrid_query, AggregateRequest) + + # Check defaut properties + assert hybrid_query._text == sample_text + assert hybrid_query._text_field == text_field_name + assert hybrid_query._vector == sample_vector + assert hybrid_query._vector_field == vector_field_name + assert hybrid_query._scorer == "BM25STD" + # assert ( + # hybrid_query.filter + # == f"(@{text_field_name}:({hybrid_query.tokenize_and_escape_query(text_string)}))" + # ) + assert hybrid_query._filter_expression == None + assert hybrid_query._alpha == 0.7 + assert hybrid_query._num_results == 10 + assert hybrid_query._loadfields == [] + assert hybrid_query._dialect == 4 + + # Check specifying properties + scorer = "TFIDF" + filter_expression = Tag("genre") == "comedy" + alpha = 0.5 + num_results = 8 + return_fields = ["title", "genre", "rating"] + stopwords = [] + dialect = 2 + + hybrid_query = HybridAggregationQuery( + text=sample_text, + text_field=text_field_name, + vector=sample_vector, + vector_field=vector_field_name, + text_scorer=scorer, + filter_expression=filter_expression, + alpha=alpha, + num_results=num_results, + return_fields=return_fields, + stopwords=stopwords, + dialect=dialect, + ) + + assert hybrid_query._text == sample_text + assert hybrid_query._text_field == text_field_name + assert hybrid_query._vector == sample_vector + assert hybrid_query._vector_field == vector_field_name + assert hybrid_query._scorer == scorer + # assert ( + # hybrid_query.filter + # == f"(@{text_field_name}:({hybrid_query.tokenize_and_escape_query(text_string)}))" + # ) + assert hybrid_query._filter_expression == filter_expression + assert hybrid_query._alpha == 0.5 + assert hybrid_query._num_results == 8 + assert hybrid_query._loadfields == return_fields + assert hybrid_query._dialect == 2 + assert hybrid_query.stopwords == set() + + return + + # Test stopwords are configurable + hybrid_query = HybridAggregationQuery(text_string, text_field_name, stopwords=None) + assert hybrid_query.stopwords == set([]) + # assert ( + # hyrid_query.filter + # == f"(@{text_field_name}:({hyrid_query.tokenize_and_escape_query(text_string)}))" + # ) + + hyrid_query = HybridAggregationQuery( + text_string, text_field_name, stopwords=["the", "a", "of"] + ) + assert hybrid_query.stopwords == set(["the", "a", "of"]) + assert ( + hybrid_query.filter + == f"(@{text_field_name}:({hybrid_query.tokenize_and_escape_query(text_string)}))" + ) + + hybrid_query = HybridAggregationQuery( + text_string, text_field_name, stopwords="german" + ) + assert hybrid_query.stopwords != set([]) + + with pytest.raises(ValueError): + hybrid_query = HybridAggregationQuery( + text_string, text_field_name, stopwords="gibberish" + ) + + with pytest.raises(TypeError): + hybrid_query = HybridAggregationQuery( + text_string, text_field_name, stopwords=[1, 2, 3] + ) diff --git a/tests/unit/test_query_types.py b/tests/unit/test_query_types.py index 2294605d..dea7d113 100644 --- a/tests/unit/test_query_types.py +++ b/tests/unit/test_query_types.py @@ -3,14 +3,7 @@ from redis.commands.search.result import Result from redisvl.index.index import process_results -from redisvl.query import ( - CountQuery, - FilterQuery, - HybridQuery, - RangeQuery, - TextQuery, - VectorQuery, -) +from redisvl.query import CountQuery, FilterQuery, RangeQuery, TextQuery, VectorQuery from redisvl.query.filter import Tag from redisvl.query.query import VectorRangeQuery @@ -278,10 +271,6 @@ def test_text_query(): text_query = TextQuery(text_string, text_field_name, stopwords=[1, 2, 3]) -def test_hybrid_query(): - pass - - @pytest.mark.parametrize( "query", [ From 6e007f7b9a49a9d0b7b90820e18e8017b28e5a03 Mon Sep 17 00:00:00 2001 From: Andrew Brookins Date: Fri, 21 Mar 2025 05:41:05 -0700 Subject: [PATCH 07/26] Validate passed-in Redis clients (#296) Prior to RedisVL 0.4.0, we validated passed-in Redis clients when the user called `set_client()`. This PR reintroduces similar behavior by validating all clients, whether we created them or not, on first access through the lazy-client mechanism. Closes RAAE-694. --- redisvl/index/index.py | 13 +++++++- redisvl/redis/connection.py | 2 +- tests/integration/test_async_search_index.py | 34 +++++++++++++++++--- tests/integration/test_search_index.py | 28 +++++++++++++++- 4 files changed, 70 insertions(+), 7 deletions(-) diff --git a/redisvl/index/index.py b/redisvl/index/index.py index 2186a907..d9d9bba8 100644 --- a/redisvl/index/index.py +++ b/redisvl/index/index.py @@ -317,6 +317,7 @@ def __init__( self._connection_kwargs = connection_kwargs or {} self._lock = threading.Lock() + self._validated_client = False self._owns_redis_client = redis_client is None if self._owns_redis_client: weakref.finalize(self, self.disconnect) @@ -396,6 +397,12 @@ def _redis_client(self) -> Optional[redis.Redis]: redis_url=self._redis_url, **self._connection_kwargs, ) + if not self._validated_client: + RedisConnectionFactory.validate_sync_redis( + self.__redis_client, + self._lib_name, + ) + self._validated_client = True return self.__redis_client @deprecated_function("connect", "Pass connection parameters in __init__.") @@ -931,6 +938,7 @@ def __init__( self._connection_kwargs = connection_kwargs or {} self._lock = asyncio.Lock() + self._validated_client = False self._owns_redis_client = redis_client is None if self._owns_redis_client: weakref.finalize(self, sync_wrapper(self.disconnect)) @@ -1027,9 +1035,12 @@ async def _get_client(self) -> aredis.Redis: self._redis_client = ( await RedisConnectionFactory._get_aredis_connection(**kwargs) ) + if not self._validated_client: await RedisConnectionFactory.validate_async_redis( - self._redis_client, self._lib_name + self._redis_client, + self._lib_name, ) + self._validated_client = True return self._redis_client async def _validate_client( diff --git a/redisvl/redis/connection.py b/redisvl/redis/connection.py index a558fe4b..9690fb56 100644 --- a/redisvl/redis/connection.py +++ b/redisvl/redis/connection.py @@ -159,7 +159,7 @@ def validate_modules( required_modules: List of required modules. Raises: - ValueError: If required Redis modules are not installed. + RedisModuleVersionError: If required Redis modules are not installed. """ required_modules = required_modules or DEFAULT_REQUIRED_MODULES diff --git a/tests/integration/test_async_search_index.py b/tests/integration/test_async_search_index.py index 2dd97e36..d1b42235 100644 --- a/tests/integration/test_async_search_index.py +++ b/tests/integration/test_async_search_index.py @@ -1,10 +1,11 @@ import warnings +from unittest import mock import pytest from redis import Redis as SyncRedis -from redis.asyncio import Redis +from redis.asyncio import Redis as AsyncRedis -from redisvl.exceptions import RedisSearchError +from redisvl.exceptions import RedisModuleVersionError, RedisSearchError from redisvl.index import AsyncSearchIndex from redisvl.query import VectorQuery from redisvl.redis.utils import convert_bytes @@ -172,12 +173,12 @@ async def test_search_index_set_client(client, redis_url, index_schema): with warnings.catch_warnings(): warnings.filterwarnings("ignore", category=DeprecationWarning) await async_index.create(overwrite=True, drop=True) - assert isinstance(async_index.client, Redis) + assert isinstance(async_index.client, AsyncRedis) # Tests deprecated sync -> async conversion behavior assert isinstance(client, SyncRedis) await async_index.set_client(client) - assert isinstance(async_index.client, Redis) + assert isinstance(async_index.client, AsyncRedis) await async_index.disconnect() assert async_index.client is None @@ -410,3 +411,28 @@ async def test_search_index_that_owns_client_disconnect_sync(index_schema, redis await async_index.create(overwrite=True, drop=True) await async_index.disconnect() assert async_index._redis_client is None + + +@pytest.mark.asyncio +async def test_async_search_index_validates_redis_modules(redis_url): + """ + A regression test for RAAE-694: we should validate that a passed-in + Redis client has the correct modules installed. + """ + client = AsyncRedis.from_url(redis_url) + with mock.patch( + "redisvl.index.index.RedisConnectionFactory.validate_async_redis" + ) as mock_validate_async_redis: + mock_validate_async_redis.side_effect = RedisModuleVersionError( + "Required modules not installed" + ) + with pytest.raises(RedisModuleVersionError): + index = AsyncSearchIndex( + schema=IndexSchema.from_dict( + {"index": {"name": "my_index"}, "fields": fields} + ), + redis_client=client, + ) + await index.create(overwrite=True, drop=True) + + mock_validate_async_redis.assert_called_once() diff --git a/tests/integration/test_search_index.py b/tests/integration/test_search_index.py index fab4a591..368c048a 100644 --- a/tests/integration/test_search_index.py +++ b/tests/integration/test_search_index.py @@ -1,8 +1,10 @@ import warnings +from unittest import mock import pytest +from redis import Redis -from redisvl.exceptions import RedisSearchError +from redisvl.exceptions import RedisModuleVersionError, RedisSearchError from redisvl.index import SearchIndex from redisvl.query import VectorQuery from redisvl.redis.utils import convert_bytes @@ -363,3 +365,27 @@ def test_search_index_that_owns_client_disconnect(index_schema, redis_url): index.create(overwrite=True, drop=True) index.disconnect() assert index.client is None + + +def test_search_index_validates_redis_modules(redis_url): + """ + A regression test for RAAE-694: we should validate that a passed-in + Redis client has the correct modules installed. + """ + client = Redis.from_url(redis_url) + with mock.patch( + "redisvl.index.index.RedisConnectionFactory.validate_sync_redis" + ) as mock_validate_sync_redis: + mock_validate_sync_redis.side_effect = RedisModuleVersionError( + "Required modules not installed" + ) + with pytest.raises(RedisModuleVersionError): + index = SearchIndex( + schema=IndexSchema.from_dict( + {"index": {"name": "my_index"}, "fields": fields} + ), + redis_client=client, + ) + index.create(overwrite=True, drop=True) + + mock_validate_sync_redis.assert_called_once() From 298d0557e6bb68b11e3736c47ea2fd4b92499ada Mon Sep 17 00:00:00 2001 From: Andrew Brookins Date: Fri, 28 Mar 2025 19:01:14 -0700 Subject: [PATCH 08/26] Add batch_search to sync Index (#305) Add `batch_search` and `batch_query` methods to `SearchIndex` and `AsyncSearchIndex`. These methods run search commands in a pipeline and return correctly-parsed and post-processed results. --- .github/workflows/test.yml | 100 ++++++++-- redisvl/index/index.py | 187 ++++++++++++++++++- tests/integration/test_async_search_index.py | 115 ++++++++++++ tests/integration/test_search_index.py | 107 +++++++++++ 4 files changed, 493 insertions(+), 16 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index c174973a..cd4eafe7 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -17,14 +17,76 @@ env: POETRY_VERSION: "1.8.3" jobs: + prime-cache: + name: Prime HuggingFace Model Cache + runs-on: ubuntu-latest + env: + HF_HOME: ${{ github.workspace }}/hf_cache + steps: + - name: Check out repository + uses: actions/checkout@v3 + + - name: Cache HuggingFace Models + id: hf-cache + uses: actions/cache@v3 + with: + path: hf_cache + key: ${{ runner.os }}-hf-cache + + - name: Set HuggingFace token + run: | + mkdir -p ~/.huggingface + echo '{"token":"${{ secrets.HF_TOKEN }}"}' > ~/.huggingface/token + + - name: Set up Python 3.9 + uses: actions/setup-python@v4 + with: + python-version: 3.9 + cache: pip + + - name: Install Poetry + uses: snok/install-poetry@v1 + with: + version: ${{ env.POETRY_VERSION }} + + - name: Install dependencies + run: | + poetry install --all-extras + + - name: Authenticate to Google Cloud + uses: google-github-actions/auth@v1 + with: + credentials_json: ${{ secrets.GOOGLE_CREDENTIALS }} + + - name: Run full test suite to prime cache + env: + HF_TOKEN: ${{ secrets.HF_TOKEN }} + HF_HOME: ${{ github.workspace }}/hf_cache + OPENAI_API_KEY: ${{ secrets.OPENAI_KEY }} + GCP_LOCATION: ${{ secrets.GCP_LOCATION }} + GCP_PROJECT_ID: ${{ secrets.GCP_PROJECT_ID }} + COHERE_API_KEY: ${{ secrets.COHERE_API_KEY }} + MISTRAL_API_KEY: ${{ secrets.MISTRAL_API_KEY }} + VOYAGE_API_KEY: ${{ secrets.VOYAGE_API_KEY }} + AZURE_OPENAI_API_KEY: ${{ secrets.AZURE_OPENAI_API_KEY }} + AZURE_OPENAI_ENDPOINT: ${{ secrets.AZURE_OPENAI_ENDPOINT }} + AZURE_OPENAI_DEPLOYMENT_NAME: ${{ secrets.AZURE_OPENAI_DEPLOYMENT_NAME }} + OPENAI_API_VERSION: ${{ secrets.OPENAI_API_VERSION }} + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + run: | + make test-all + test: name: Python ${{ matrix.python-version }} - ${{ matrix.connection }} [redis ${{ matrix.redis-version }}] runs-on: ubuntu-latest - + needs: prime-cache + env: + HF_HOME: ${{ github.workspace }}/hf_cache strategy: fail-fast: false matrix: - python-version: [3.9, '3.10', 3.11, 3.12, 3.13] + python-version: ['3.10', '3.11', 3.12, 3.13] connection: ['hiredis', 'plain'] redis-version: ['6.2.6-v9', 'latest', '8.0-M03'] @@ -32,11 +94,17 @@ jobs: - name: Check out repository uses: actions/checkout@v3 + - name: Cache HuggingFace Models + uses: actions/cache@v3 + with: + path: hf_cache + key: ${{ runner.os }}-hf-cache + - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - cache: 'pip' + cache: pip - name: Install Poetry uses: snok/install-poetry@v1 @@ -68,22 +136,23 @@ jobs: - name: Run tests if: matrix.connection == 'plain' && matrix.redis-version == 'latest' env: + HF_HOME: ${{ github.workspace }}/hf_cache OPENAI_API_KEY: ${{ secrets.OPENAI_KEY }} GCP_LOCATION: ${{ secrets.GCP_LOCATION }} GCP_PROJECT_ID: ${{ secrets.GCP_PROJECT_ID }} COHERE_API_KEY: ${{ secrets.COHERE_API_KEY }} MISTRAL_API_KEY: ${{ secrets.MISTRAL_API_KEY }} VOYAGE_API_KEY: ${{ secrets.VOYAGE_API_KEY }} - AZURE_OPENAI_API_KEY: ${{secrets.AZURE_OPENAI_API_KEY}} - AZURE_OPENAI_ENDPOINT: ${{secrets.AZURE_OPENAI_ENDPOINT}} - AZURE_OPENAI_DEPLOYMENT_NAME: ${{secrets.AZURE_OPENAI_DEPLOYMENT_NAME}} - OPENAI_API_VERSION: ${{secrets.OPENAI_API_VERSION}} + AZURE_OPENAI_API_KEY: ${{ secrets.AZURE_OPENAI_API_KEY }} + AZURE_OPENAI_ENDPOINT: ${{ secrets.AZURE_OPENAI_ENDPOINT }} + AZURE_OPENAI_DEPLOYMENT_NAME: ${{ secrets.AZURE_OPENAI_DEPLOYMENT_NAME }} + OPENAI_API_VERSION: ${{ secrets.OPENAI_API_VERSION }} AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} run: | make test-all - - name: Run tests + - name: Run tests (alternate) if: matrix.connection != 'plain' || matrix.redis-version != 'latest' run: | make test @@ -91,16 +160,17 @@ jobs: - name: Run notebooks if: matrix.connection == 'plain' && matrix.redis-version == 'latest' env: + HF_HOME: ${{ github.workspace }}/hf_cache OPENAI_API_KEY: ${{ secrets.OPENAI_KEY }} GCP_LOCATION: ${{ secrets.GCP_LOCATION }} GCP_PROJECT_ID: ${{ secrets.GCP_PROJECT_ID }} COHERE_API_KEY: ${{ secrets.COHERE_API_KEY }} MISTRAL_API_KEY: ${{ secrets.MISTRAL_API_KEY }} VOYAGE_API_KEY: ${{ secrets.VOYAGE_API_KEY }} - AZURE_OPENAI_API_KEY: ${{secrets.AZURE_OPENAI_API_KEY}} - AZURE_OPENAI_ENDPOINT: ${{secrets.AZURE_OPENAI_ENDPOINT}} - AZURE_OPENAI_DEPLOYMENT_NAME: ${{secrets.AZURE_OPENAI_DEPLOYMENT_NAME}} - OPENAI_API_VERSION: ${{secrets.OPENAI_API_VERSION}} + AZURE_OPENAI_API_KEY: ${{ secrets.AZURE_OPENAI_API_KEY }} + AZURE_OPENAI_ENDPOINT: ${{ secrets.AZURE_OPENAI_ENDPOINT }} + AZURE_OPENAI_DEPLOYMENT_NAME: ${{ secrets.AZURE_OPENAI_DEPLOYMENT_NAME }} + OPENAI_API_VERSION: ${{ secrets.OPENAI_API_VERSION }} AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} run: | @@ -121,17 +191,17 @@ jobs: uses: actions/setup-python@v4 with: python-version: ${{ env.PYTHON_VERSION }} - cache: 'pip' + cache: pip - name: Install Poetry uses: snok/install-poetry@v1 with: version: ${{ env.POETRY_VERSION }} - + - name: Install dependencies run: | poetry install --all-extras - name: Build docs run: | - make docs-build \ No newline at end of file + make docs-build diff --git a/redisvl/index/index.py b/redisvl/index/index.py index d9d9bba8..65d736e3 100644 --- a/redisvl/index/index.py +++ b/redisvl/index/index.py @@ -1,6 +1,7 @@ import asyncio import json import threading +import time import warnings import weakref from typing import ( @@ -13,6 +14,7 @@ Iterable, List, Optional, + Tuple, Union, ) @@ -27,6 +29,8 @@ import redis import redis.asyncio as aredis +from redis.client import NEVER_DECODE +from redis.commands.helpers import get_protocol_version # type: ignore from redis.commands.search.indexDefinition import IndexDefinition from redisvl.exceptions import RedisModuleVersionError, RedisSearchError @@ -55,6 +59,14 @@ {"name": "searchlight", "ver": 20810}, ] +SearchParams = Union[ + Tuple[ + Union[str, BaseQuery], + Union[Dict[str, Union[str, int, float, bytes]], None], + ], + Union[str, BaseQuery], +] + def process_results( results: "Result", query: BaseQuery, storage_type: StorageType @@ -384,7 +396,7 @@ def client(self) -> Optional[redis.Redis]: return self.__redis_client @property - def _redis_client(self) -> Optional[redis.Redis]: + def _redis_client(self) -> redis.Redis: """ Get a Redis client instance. @@ -725,6 +737,73 @@ def aggregate(self, *args, **kwargs) -> "AggregateResult": except Exception as e: raise RedisSearchError(f"Error while aggregating: {str(e)}") from e + def batch_search( + self, + queries: List[SearchParams], + batch_size: int = 10, + ) -> List["Result"]: + """Perform a search against the index for multiple queries. + + This method takes a list of queries and optionally query params and + returns a list of Result objects for each query. Results are + returned in the same order as the queries. + + Args: + queries (List[SearchParams]): The queries to search for. batch_size + (int, optional): The number of queries to search for at a time. + Defaults to 10. + + Returns: + List[Result]: The search results for each query. + """ + all_parsed = [] + search = self._redis_client.ft(self.schema.index.name) + options = {} + if get_protocol_version(self._redis_client) not in ["3", 3]: + options[NEVER_DECODE] = True + + for i in range(0, len(queries), batch_size): + batch_queries = queries[i : i + batch_size] + + # redis-py doesn't support calling `search` in a pipeline, + # so we need to manually execute each command in a pipeline + # and parse the results + with self._redis_client.pipeline(transaction=False) as pipe: + batch_built_queries = [] + for query in batch_queries: + if isinstance(query, tuple): + query_args, q = search._mk_query_args( # type: ignore + query[0], query_params=query[1] + ) + else: + query_args, q = search._mk_query_args( # type: ignore + query, query_params=None + ) + batch_built_queries.append(q) + pipe.execute_command( + "FT.SEARCH", + *query_args, + **options, + ) + + st = time.time() + results = pipe.execute() + + # We don't know how long each query took, so we'll use the total time + # for all queries in the batch as the duration for each query + duration = (time.time() - st) * 1000.0 + + for i, query_results in enumerate(results): + _built_query = batch_built_queries[i] + parsed_result = search._parse_search( # type: ignore + query_results, + query=_built_query, + duration=duration, + ) + # Return a parsed Result object for each query + all_parsed.append(parsed_result) + return all_parsed + def search(self, *args, **kwargs) -> "Result": """Perform a search against the index. @@ -742,6 +821,26 @@ def search(self, *args, **kwargs) -> "Result": except Exception as e: raise RedisSearchError(f"Error while searching: {str(e)}") from e + def batch_query( + self, queries: List[BaseQuery], batch_size: int = 10 + ) -> List[List[Dict[str, Any]]]: + """Execute a batch of queries and process results.""" + results = self.batch_search( + [(query.query, query.params) for query in queries], batch_size=batch_size + ) + all_parsed = [] + for query, batch_results in zip(queries, results): + parsed = process_results( + batch_results, + query=query, + storage_type=self.schema.index.storage_type, + ) + # Create separate lists of parsed results for each query + # passed in to the batch_search method, so that callers can + # access the results for each query individually + all_parsed.append(parsed) + return all_parsed + def _query(self, query: BaseQuery) -> List[Dict[str, Any]]: """Execute a query and process results.""" results = self.search(query.query, query_params=query.params) @@ -1284,6 +1383,71 @@ async def aggregate(self, *args, **kwargs) -> "AggregateResult": except Exception as e: raise RedisSearchError(f"Error while aggregating: {str(e)}") from e + async def batch_search( + self, queries: List[SearchParams], batch_size: int = 10 + ) -> List["Result"]: + """Perform a search against the index for multiple queries. + + This method takes a list of queries and returns a list of Result objects + for each query. Results are returned in the same order as the queries. + + Args: + queries (List[SearchParams]): The queries to search for. batch_size + (int, optional): The number of queries to search for at a time. + Defaults to 10. + + Returns: + List[Result]: The search results for each query. + """ + all_results = [] + client = await self._get_client() + search = client.ft(self.schema.index.name) + options = {} + if get_protocol_version(client) not in ["3", 3]: + options[NEVER_DECODE] = True + + for i in range(0, len(queries), batch_size): + batch_queries = queries[i : i + batch_size] + + # redis-py doesn't support calling `search` in a pipeline, + # so we need to manually execute each command in a pipeline + # and parse the results + async with client.pipeline(transaction=False) as pipe: + batch_built_queries = [] + for query in batch_queries: + if isinstance(query, tuple): + query_args, q = search._mk_query_args( # type: ignore + query[0], query_params=query[1] + ) + else: + query_args, q = search._mk_query_args( # type: ignore + query, query_params=None + ) + batch_built_queries.append(q) + pipe.execute_command( + "FT.SEARCH", + *query_args, + **options, + ) + + st = time.time() + results = await pipe.execute() + + # We don't know how long each query took, so we'll use the total time + # for all queries in the batch as the duration for each query + duration = (time.time() - st) * 1000.0 + + for i, query_results in enumerate(results): + _built_query = batch_built_queries[i] + parsed_result = search._parse_search( # type: ignore + query_results, + query=_built_query, + duration=duration, + ) + # Return a parsed Result object for each query + all_results.append(parsed_result) + return all_results + async def search(self, *args, **kwargs) -> "Result": """Perform a search on this index. @@ -1300,6 +1464,27 @@ async def search(self, *args, **kwargs) -> "Result": except Exception as e: raise RedisSearchError(f"Error while searching: {str(e)}") from e + async def batch_query( + self, queries: List[BaseQuery], batch_size: int = 10 + ) -> List[List[Dict[str, Any]]]: + """Asynchronously execute a batch of queries and process results.""" + results = await self.batch_search( + [(query.query, query.params) for query in queries], batch_size=batch_size + ) + all_parsed = [] + for query, batch_results in zip(queries, results): + parsed = process_results( + batch_results, + query=query, + storage_type=self.schema.index.storage_type, + ) + # Create separate lists of parsed results for each query + # passed in to the batch_search method, so that callers can + # access the results for each query individually + all_parsed.append(parsed) + + return all_parsed + async def _query(self, query: BaseQuery) -> List[Dict[str, Any]]: """Asynchronously execute a query and process results.""" results = await self.search(query.query, query_params=query.params) diff --git a/tests/integration/test_async_search_index.py b/tests/integration/test_async_search_index.py index d1b42235..edc6c01a 100644 --- a/tests/integration/test_async_search_index.py +++ b/tests/integration/test_async_search_index.py @@ -8,6 +8,7 @@ from redisvl.exceptions import RedisModuleVersionError, RedisSearchError from redisvl.index import AsyncSearchIndex from redisvl.query import VectorQuery +from redisvl.query.query import FilterQuery from redisvl.redis.utils import convert_bytes from redisvl.schema import IndexSchema, StorageType @@ -436,3 +437,117 @@ async def test_async_search_index_validates_redis_modules(redis_url): await index.create(overwrite=True, drop=True) mock_validate_async_redis.assert_called_once() + + +@pytest.mark.asyncio +async def test_batch_search(async_index): + await async_index.create(overwrite=True, drop=True) + data = [{"id": "1", "test": "foo"}, {"id": "2", "test": "bar"}] + await async_index.load(data, id_field="id") + + results = await async_index.batch_search(["@test:{foo}", "@test:{bar}"]) + assert len(results) == 2 + assert results[0].total == 1 + assert results[0].docs[0]["id"] == "rvl:1" + assert results[1].total == 1 + assert results[1].docs[0]["id"] == "rvl:2" + + +@pytest.mark.parametrize( + "queries", + [ + [ + [ + FilterQuery(filter_expression="@test:{foo}"), + FilterQuery(filter_expression="@test:{bar}"), + ], + [ + FilterQuery(filter_expression="@test:{foo}"), + FilterQuery(filter_expression="@test:{bar}"), + FilterQuery(filter_expression="@test:{baz}"), + FilterQuery(filter_expression="@test:{foo}"), + FilterQuery(filter_expression="@test:{bar}"), + FilterQuery(filter_expression="@test:{baz}"), + ], + ], + [ + [ + "@test:{foo}", + "@test:{bar}", + ], + [ + "@test:{foo}", + "@test:{bar}", + "@test:{baz}", + "@test:{foo}", + "@test:{bar}", + "@test:{baz}", + ], + ], + ], +) +@pytest.mark.asyncio +async def test_batch_search_with_multiple_batches(async_index, queries): + await async_index.create(overwrite=True, drop=True) + data = [{"id": "1", "test": "foo"}, {"id": "2", "test": "bar"}] + await async_index.load(data, id_field="id") + + results = await async_index.batch_search(queries[0]) + assert len(results) == 2 + assert results[0].total == 1 + assert results[0].docs[0]["id"] == "rvl:1" + assert results[1].total == 1 + assert results[1].docs[0]["id"] == "rvl:2" + + results = await async_index.batch_search( + queries[1], + batch_size=2, + ) + assert len(results) == 6 + + # First (and only) result for the first query + assert results[0].total == 1 + assert results[0].docs[0]["id"] == "rvl:1" + + # Second (and only) result for the second query + assert results[1].total == 1 + assert results[1].docs[0]["id"] == "rvl:2" + + # Third query should have zero results because there is no baz + assert results[2].total == 0 + + # Then the pattern repeats + assert results[3].total == 1 + assert results[3].docs[0]["id"] == "rvl:1" + assert results[4].total == 1 + assert results[4].docs[0]["id"] == "rvl:2" + assert results[5].total == 0 + + +@pytest.mark.asyncio +async def test_batch_query(async_index): + await async_index.create(overwrite=True, drop=True) + data = [{"id": "1", "test": "foo"}, {"id": "2", "test": "bar"}] + await async_index.load(data, id_field="id") + + query = FilterQuery(filter_expression="@test:{foo}") + results = await async_index.batch_query([query]) + + assert len(results) == 1 + assert results[0][0]["id"] == "rvl:1" + + +@pytest.mark.asyncio +async def test_batch_query_with_multiple_batches(async_index): + await async_index.create(overwrite=True, drop=True) + data = [{"id": "1", "test": "foo"}, {"id": "2", "test": "bar"}] + await async_index.load(data, id_field="id") + + queries = [ + FilterQuery(filter_expression="@test:{foo}"), + FilterQuery(filter_expression="@test:{bar}"), + ] + results = await async_index.batch_query(queries, batch_size=1) + assert len(results) == 2 + assert results[0][0]["id"] == "rvl:1" + assert results[1][0]["id"] == "rvl:2" diff --git a/tests/integration/test_search_index.py b/tests/integration/test_search_index.py index 368c048a..800f6a06 100644 --- a/tests/integration/test_search_index.py +++ b/tests/integration/test_search_index.py @@ -7,6 +7,7 @@ from redisvl.exceptions import RedisModuleVersionError, RedisSearchError from redisvl.index import SearchIndex from redisvl.query import VectorQuery +from redisvl.query.query import FilterQuery from redisvl.redis.utils import convert_bytes from redisvl.schema import IndexSchema, StorageType @@ -389,3 +390,109 @@ def test_search_index_validates_redis_modules(redis_url): index.create(overwrite=True, drop=True) mock_validate_sync_redis.assert_called_once() + + +def test_batch_search(index): + index.create(overwrite=True, drop=True) + data = [{"id": "1", "test": "foo"}, {"id": "2", "test": "bar"}] + index.load(data, id_field="id") + + results = index.batch_search(["@test:{foo}", "@test:{bar}"]) + assert len(results) == 2 + assert results[0].total == 1 + assert results[0].docs[0]["id"] == "rvl:1" + assert results[1].total == 1 + assert results[1].docs[0]["id"] == "rvl:2" + + +@pytest.mark.parametrize( + "queries", + [ + [ + [ + FilterQuery(filter_expression="@test:{foo}"), + FilterQuery(filter_expression="@test:{bar}"), + ], + [ + FilterQuery(filter_expression="@test:{foo}"), + FilterQuery(filter_expression="@test:{bar}"), + FilterQuery(filter_expression="@test:{baz}"), + FilterQuery(filter_expression="@test:{foo}"), + FilterQuery(filter_expression="@test:{bar}"), + FilterQuery(filter_expression="@test:{baz}"), + ], + ], + [ + [ + "@test:{foo}", + "@test:{bar}", + ], + [ + "@test:{foo}", + "@test:{bar}", + "@test:{baz}", + "@test:{foo}", + "@test:{bar}", + "@test:{baz}", + ], + ], + ], +) +def test_batch_search_with_multiple_batches(index, queries): + index.create(overwrite=True, drop=True) + data = [{"id": "1", "test": "foo"}, {"id": "2", "test": "bar"}] + index.load(data, id_field="id") + + results = index.batch_search(queries[0]) + assert len(results) == 2 + assert results[0].total == 1 + assert results[0].docs[0]["id"] == "rvl:1" + assert results[1].total == 1 + assert results[1].docs[0]["id"] == "rvl:2" + + results = index.batch_search( + queries[1], + batch_size=2, + ) + assert len(results) == 6 + + # First (and only) result for the first query + assert results[0].docs[0]["id"] == "rvl:1" + + # Second (and only) result for the second query + assert results[1].docs[0]["id"] == "rvl:2" + + # Third query should have zero results because there is no baz + assert results[2].total == 0 + + # Then the pattern repeats + assert results[3].docs[0]["id"] == "rvl:1" + assert results[4].docs[0]["id"] == "rvl:2" + assert results[5].total == 0 + + +def test_batch_query(index): + index.create(overwrite=True, drop=True) + data = [{"id": "1", "test": "foo"}, {"id": "2", "test": "bar"}] + index.load(data, id_field="id") + + query = FilterQuery(filter_expression="@test:{foo}") + results = index.batch_query([query]) + + assert len(results) == 1 + assert results[0][0]["id"] == "rvl:1" + + +def test_batch_query_with_multiple_batches(index): + index.create(overwrite=True, drop=True) + data = [{"id": "1", "test": "foo"}, {"id": "2", "test": "bar"}] + index.load(data, id_field="id") + + queries = [ + FilterQuery(filter_expression="@test:{foo}"), + FilterQuery(filter_expression="@test:{bar}"), + ] + results = index.batch_query(queries, batch_size=1) + assert len(results) == 2 + assert results[0][0]["id"] == "rvl:1" + assert results[1][0]["id"] == "rvl:2" From 94eea5219dbd53ea274a72a9f02beea6902f9459 Mon Sep 17 00:00:00 2001 From: Tyler Hutcherson Date: Mon, 31 Mar 2025 14:48:11 -0400 Subject: [PATCH 09/26] Support client-side schema validation using Pydantic (#304) This PR implements a layered architecture for managing and validating searchable data in Redis, with clear separation of concerns between schema definition, data validation, and storage operations. - `IndexSchema` provides the blueprint for data structure and constraints - Defines fields with specific types (TEXT, TAG, NUMERIC, GEO, VECTOR) - Supports different storage types (HASH, JSON) with appropriate configuration - `SchemaModelGenerator` dynamically creates Pydantic models from schema definitions - Implements a caching mechanism to avoid redundant model generation - Maps Redis field types to appropriate Python/Pydantic types - Provides type-specific validators: - VECTOR: validates dimensions and value ranges (e.g., INT8 range checks) - GEO: validates geographic coordinate format - NUMERIC: prevents boolean values - `BaseStorage` is the abstract class provides the foundation for Redis operations - Specialized implementations (HashStorage, JsonStorage) for different Redis data types - Enforces schema validation during write operations when set to True - Implements optimized batch operations using Redis pipelines - Supports both synchronous and asynchronous interfaces - Handles key generation, preprocessing, and error handling The `SearchIndex` contains the setting `validate_on_load`, which defaults on `False`. Objects are preprocessed and validated against the schema Objects are prepared with appropriate keys Batch writing occurs using Redis pipelines for efficiency TTL (expiration) can be applied if specified Keys are fetched in batches using pipelines Data is converted from Redis format to Python objects Bytes are automatically converted to appropriate types --- .github/workflows/test.yml | 7 + docs/user_guide/01_getting_started.ipynb | 238 +++--- poetry.lock | 445 +++++++---- pyproject.toml | 1 + redisvl/exceptions.py | 34 +- redisvl/extensions/llmcache/semantic.py | 13 +- redisvl/extensions/router/semantic.py | 13 +- .../session_manager/semantic_session.py | 9 +- redisvl/index/index.py | 117 ++- redisvl/index/storage.py | 320 ++++---- redisvl/schema/__init__.py | 34 +- redisvl/schema/fields.py | 4 +- redisvl/schema/schema.py | 62 +- redisvl/schema/type_utils.py | 63 ++ redisvl/schema/validation.py | 277 +++++++ tests/conftest.py | 10 + tests/integration/test_async_search_index.py | 10 +- .../test_cross_encoder_reranker.py | 0 tests/integration/test_flow_async.py | 2 +- tests/integration/test_search_index.py | 6 +- tests/integration/test_threshold_optimizer.py | 12 +- tests/unit/test_fields.py | 2 + tests/unit/test_storage.py | 143 +++- tests/unit/test_utils.py | 3 - tests/unit/test_validation.py | 692 ++++++++++++++++++ 25 files changed, 1936 insertions(+), 581 deletions(-) create mode 100644 redisvl/schema/type_utils.py create mode 100644 redisvl/schema/validation.py rename tests/{unit => integration}/test_cross_encoder_reranker.py (100%) create mode 100644 tests/unit/test_validation.py diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index cd4eafe7..48b27711 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -133,6 +133,11 @@ jobs: with: credentials_json: ${{ secrets.GOOGLE_CREDENTIALS }} + - name: Set HuggingFace token + run: | + mkdir -p ~/.huggingface + echo '{"token":"${{ secrets.HF_TOKEN }}"}' > ~/.huggingface/token + - name: Run tests if: matrix.connection == 'plain' && matrix.redis-version == 'latest' env: @@ -149,6 +154,7 @@ jobs: OPENAI_API_VERSION: ${{ secrets.OPENAI_API_VERSION }} AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + HF_TOKEN: ${{ secrets.HF_TOKEN }} run: | make test-all @@ -173,6 +179,7 @@ jobs: OPENAI_API_VERSION: ${{ secrets.OPENAI_API_VERSION }} AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + HF_TOKEN: ${{ secrets.HF_TOKEN }} run: | docker run -d --name redis -p 6379:6379 redis/redis-stack-server:latest if [[ "${{ matrix.python-version }}" > "3.9" ]]; then diff --git a/docs/user_guide/01_getting_started.ipynb b/docs/user_guide/01_getting_started.ipynb index 6130f589..bf097415 100644 --- a/docs/user_guide/01_getting_started.ipynb +++ b/docs/user_guide/01_getting_started.ipynb @@ -81,7 +81,7 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 1, "metadata": {}, "outputs": [], "source": [ @@ -126,7 +126,7 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 2, "metadata": {}, "outputs": [], "source": [ @@ -173,29 +173,7 @@ "source": [ "## Create a `SearchIndex`\n", "\n", - "With the schema and sample dataset ready, instantiate a `SearchIndex`:" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [], - "source": [ - "from redisvl.index import SearchIndex\n", - "\n", - "index = SearchIndex.from_dict(schema)\n", - "# or use .from_yaml('schema_file.yaml')" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Now we also need to facilitate a Redis connection. There are a few ways to do this:\n", - "\n", - "- Create & manage your own client connection (recommended)\n", - "- Provide a Redis URL and let RedisVL connect on your behalf (by default, it will connect to \"redis://localhost:6379\")" + "With the schema and sample dataset ready, create a `SearchIndex`." ] }, { @@ -209,31 +187,15 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 11, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 4, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ + "from redisvl.index import SearchIndex\n", "from redis import Redis\n", "\n", "client = Redis.from_url(\"redis://localhost:6379\")\n", - "index = SearchIndex.from_dict(schema, redis_client=client)\n", - "\n", - "# alternatively, provide an async Redis client object to enable async index operations\n", - "# from redis.asyncio import Redis\n", - "# from redisvl.index import AsyncSearchIndex\n", - "# client = Redis.from_url(\"redis://localhost:6379\")\n", - "# index = AsyncSearchIndex.from_dict(schema, redis_client=client)\n" + "index = SearchIndex.from_dict(schema, redis_client=client, validate_on_load=True)" ] }, { @@ -262,24 +224,24 @@ } ], "source": [ - "index = SearchIndex.from_dict(schema, redis_url=\"redis://localhost:6379\")\n", + "index = SearchIndex.from_dict(schema, redis_url=\"redis://localhost:6379\", validate_on_load=True)\n", "\n", "# If you don't specify a client or Redis URL, the index will attempt to\n", - "# connect to Redis at the default address (\"redis://localhost:6379\")." + "# connect to Redis at the default address \"redis://localhost:6379\"." ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "### Create the underlying index\n", + "### Create the index\n", "\n", "Now that we are connected to Redis, we need to run the create command." ] }, { "cell_type": "code", - "execution_count": 8, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -303,15 +265,15 @@ }, { "cell_type": "code", - "execution_count": 9, + "execution_count": 13, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "\u001b[32m11:50:15\u001b[0m \u001b[34m[RedisVL]\u001b[0m \u001b[1;30mINFO\u001b[0m Indices:\n", - "\u001b[32m11:50:15\u001b[0m \u001b[34m[RedisVL]\u001b[0m \u001b[1;30mINFO\u001b[0m 1. user_simple\n" + "\u001b[32m10:59:25\u001b[0m \u001b[34m[RedisVL]\u001b[0m \u001b[1;30mINFO\u001b[0m Indices:\n", + "\u001b[32m10:59:25\u001b[0m \u001b[34m[RedisVL]\u001b[0m \u001b[1;30mINFO\u001b[0m 1. user_simple\n" ] } ], @@ -321,7 +283,7 @@ }, { "cell_type": "code", - "execution_count": 10, + "execution_count": 14, "metadata": {}, "outputs": [ { @@ -359,19 +321,22 @@ "source": [ "## Load Data to `SearchIndex`\n", "\n", - "Load the sample dataset to Redis:" + "Load the sample dataset to Redis.\n", + "\n", + "### Validate data entries on load\n", + "RedisVL uses pydantic validation under the hood to ensure loaded data is valid and confirms to your schema. This setting is optional and can be configured in the `SearchIndex` class." ] }, { "cell_type": "code", - "execution_count": 11, + "execution_count": 15, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "['user_simple_docs:01JM2NWFWNH0BNA640MT5DS8BD', 'user_simple_docs:01JM2NWFWNF4S2V4E4HYG25CVA', 'user_simple_docs:01JM2NWFWNBFXJJ4PV9F4KMJSE']\n" + "['user_simple_docs:01JQ9FEZ4GAAYT9W7BWAF7CV18', 'user_simple_docs:01JQ9FEZ4JCE5FD1D5QY6BAJ0J', 'user_simple_docs:01JQ9FEZ4KF9AZYBKMYNMYBZ5A']\n" ] } ], @@ -388,6 +353,98 @@ ">By default, `load` will create a unique Redis key as a combination of the index key `prefix` and a random ULID. You can also customize the key by providing direct keys or pointing to a specified `id_field` on load." ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Load invalid data\n", + "This will raise a `SchemaValidationError` if `validate_on_load` is set to true in the `SearchIndex` class." + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "11:00:03 redisvl.index.index ERROR Schema validation error while loading data\n", + "Traceback (most recent call last):\n", + " File \"/Users/tyler.hutcherson/Documents/AppliedAI/redis-vl-python/redisvl/index/storage.py\", line 204, in _preprocess_and_validate_objects\n", + " processed_obj = self._validate(processed_obj)\n", + " File \"/Users/tyler.hutcherson/Documents/AppliedAI/redis-vl-python/redisvl/index/storage.py\", line 160, in _validate\n", + " return validate_object(self.index_schema, obj)\n", + " File \"/Users/tyler.hutcherson/Documents/AppliedAI/redis-vl-python/redisvl/schema/validation.py\", line 274, in validate_object\n", + " validated = model_class.model_validate(flat_obj)\n", + " File \"/Users/tyler.hutcherson/Library/Caches/pypoetry/virtualenvs/redisvl-VnTEShF2-py3.13/lib/python3.13/site-packages/pydantic/main.py\", line 627, in model_validate\n", + " return cls.__pydantic_validator__.validate_python(\n", + " ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^\n", + " obj, strict=strict, from_attributes=from_attributes, context=context\n", + " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n", + " )\n", + " ^\n", + "pydantic_core._pydantic_core.ValidationError: 1 validation error for user_simple__PydanticModel\n", + "user_embedding\n", + " Input should be a valid bytes [type=bytes_type, input_value=True, input_type=bool]\n", + " For further information visit https://errors.pydantic.dev/2.10/v/bytes_type\n", + "\n", + "The above exception was the direct cause of the following exception:\n", + "\n", + "Traceback (most recent call last):\n", + " File \"/Users/tyler.hutcherson/Documents/AppliedAI/redis-vl-python/redisvl/index/index.py\", line 586, in load\n", + " return self._storage.write(\n", + " ~~~~~~~~~~~~~~~~~~~^\n", + " self._redis_client, # type: ignore\n", + " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n", + " ...<6 lines>...\n", + " validate=self._validate_on_load,\n", + " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n", + " )\n", + " ^\n", + " File \"/Users/tyler.hutcherson/Documents/AppliedAI/redis-vl-python/redisvl/index/storage.py\", line 265, in write\n", + " prepared_objects = self._preprocess_and_validate_objects(\n", + " list(objects), # Convert Iterable to List\n", + " ...<3 lines>...\n", + " validate=validate,\n", + " )\n", + " File \"/Users/tyler.hutcherson/Documents/AppliedAI/redis-vl-python/redisvl/index/storage.py\", line 211, in _preprocess_and_validate_objects\n", + " raise SchemaValidationError(str(e), index=i) from e\n", + "redisvl.exceptions.SchemaValidationError: Validation failed for object at index 0: 1 validation error for user_simple__PydanticModel\n", + "user_embedding\n", + " Input should be a valid bytes [type=bytes_type, input_value=True, input_type=bool]\n", + " For further information visit https://errors.pydantic.dev/2.10/v/bytes_type\n" + ] + }, + { + "ename": "SchemaValidationError", + "evalue": "Validation failed for object at index 0: 1 validation error for user_simple__PydanticModel\nuser_embedding\n Input should be a valid bytes [type=bytes_type, input_value=True, input_type=bool]\n For further information visit https://errors.pydantic.dev/2.10/v/bytes_type", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mValidationError\u001b[0m Traceback (most recent call last)", + "File \u001b[0;32m~/Documents/AppliedAI/redis-vl-python/redisvl/index/storage.py:204\u001b[0m, in \u001b[0;36mBaseStorage._preprocess_and_validate_objects\u001b[0;34m(self, objects, id_field, keys, preprocess, validate)\u001b[0m\n\u001b[1;32m 203\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m validate:\n\u001b[0;32m--> 204\u001b[0m processed_obj \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_validate\u001b[49m\u001b[43m(\u001b[49m\u001b[43mprocessed_obj\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 206\u001b[0m \u001b[38;5;66;03m# Store valid object with its key for writing\u001b[39;00m\n", + "File \u001b[0;32m~/Documents/AppliedAI/redis-vl-python/redisvl/index/storage.py:160\u001b[0m, in \u001b[0;36mBaseStorage._validate\u001b[0;34m(self, obj)\u001b[0m\n\u001b[1;32m 159\u001b[0m \u001b[38;5;66;03m# Pass directly to validation function and let any errors propagate\u001b[39;00m\n\u001b[0;32m--> 160\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mvalidate_object\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mindex_schema\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mobj\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m~/Documents/AppliedAI/redis-vl-python/redisvl/schema/validation.py:274\u001b[0m, in \u001b[0;36mvalidate_object\u001b[0;34m(schema, obj)\u001b[0m\n\u001b[1;32m 273\u001b[0m \u001b[38;5;66;03m# Validate against model\u001b[39;00m\n\u001b[0;32m--> 274\u001b[0m validated \u001b[38;5;241m=\u001b[39m \u001b[43mmodel_class\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mmodel_validate\u001b[49m\u001b[43m(\u001b[49m\u001b[43mflat_obj\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 275\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m validated\u001b[38;5;241m.\u001b[39mmodel_dump(exclude_none\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mTrue\u001b[39;00m)\n", + "File \u001b[0;32m~/Library/Caches/pypoetry/virtualenvs/redisvl-VnTEShF2-py3.13/lib/python3.13/site-packages/pydantic/main.py:627\u001b[0m, in \u001b[0;36mBaseModel.model_validate\u001b[0;34m(cls, obj, strict, from_attributes, context)\u001b[0m\n\u001b[1;32m 626\u001b[0m __tracebackhide__ \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mTrue\u001b[39;00m\n\u001b[0;32m--> 627\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mcls\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m__pydantic_validator__\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mvalidate_python\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 628\u001b[0m \u001b[43m \u001b[49m\u001b[43mobj\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mstrict\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mstrict\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mfrom_attributes\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mfrom_attributes\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcontext\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mcontext\u001b[49m\n\u001b[1;32m 629\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n", + "\u001b[0;31mValidationError\u001b[0m: 1 validation error for user_simple__PydanticModel\nuser_embedding\n Input should be a valid bytes [type=bytes_type, input_value=True, input_type=bool]\n For further information visit https://errors.pydantic.dev/2.10/v/bytes_type", + "\nThe above exception was the direct cause of the following exception:\n", + "\u001b[0;31mSchemaValidationError\u001b[0m Traceback (most recent call last)", + "Cell \u001b[0;32mIn[16], line 1\u001b[0m\n\u001b[0;32m----> 1\u001b[0m keys \u001b[38;5;241m=\u001b[39m \u001b[43mindex\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mload\u001b[49m\u001b[43m(\u001b[49m\u001b[43m[\u001b[49m\u001b[43m{\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43muser_embedding\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mTrue\u001b[39;49;00m\u001b[43m}\u001b[49m\u001b[43m]\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m~/Documents/AppliedAI/redis-vl-python/redisvl/index/index.py:586\u001b[0m, in \u001b[0;36mSearchIndex.load\u001b[0;34m(self, data, id_field, keys, ttl, preprocess, batch_size)\u001b[0m\n\u001b[1;32m 556\u001b[0m \u001b[38;5;250m\u001b[39m\u001b[38;5;124;03m\"\"\"Load objects to the Redis database. Returns the list of keys loaded\u001b[39;00m\n\u001b[1;32m 557\u001b[0m \u001b[38;5;124;03mto Redis.\u001b[39;00m\n\u001b[1;32m 558\u001b[0m \n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 583\u001b[0m \u001b[38;5;124;03m RedisVLError: If there's an error loading data to Redis.\u001b[39;00m\n\u001b[1;32m 584\u001b[0m \u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[1;32m 585\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m--> 586\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_storage\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mwrite\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 587\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_redis_client\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;66;43;03m# type: ignore\u001b[39;49;00m\n\u001b[1;32m 588\u001b[0m \u001b[43m \u001b[49m\u001b[43mobjects\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdata\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 589\u001b[0m \u001b[43m \u001b[49m\u001b[43mid_field\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mid_field\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 590\u001b[0m \u001b[43m \u001b[49m\u001b[43mkeys\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mkeys\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 591\u001b[0m \u001b[43m \u001b[49m\u001b[43mttl\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mttl\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 592\u001b[0m \u001b[43m \u001b[49m\u001b[43mpreprocess\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mpreprocess\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 593\u001b[0m \u001b[43m \u001b[49m\u001b[43mbatch_size\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mbatch_size\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 594\u001b[0m \u001b[43m \u001b[49m\u001b[43mvalidate\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_validate_on_load\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 595\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 596\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m SchemaValidationError:\n\u001b[1;32m 597\u001b[0m \u001b[38;5;66;03m# Pass through validation errors directly\u001b[39;00m\n\u001b[1;32m 598\u001b[0m logger\u001b[38;5;241m.\u001b[39mexception(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mSchema validation error while loading data\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n", + "File \u001b[0;32m~/Documents/AppliedAI/redis-vl-python/redisvl/index/storage.py:265\u001b[0m, in \u001b[0;36mBaseStorage.write\u001b[0;34m(self, redis_client, objects, id_field, keys, ttl, preprocess, batch_size, validate)\u001b[0m\n\u001b[1;32m 262\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m []\n\u001b[1;32m 264\u001b[0m \u001b[38;5;66;03m# Pass 1: Preprocess and validate all objects\u001b[39;00m\n\u001b[0;32m--> 265\u001b[0m prepared_objects \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_preprocess_and_validate_objects\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 266\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mlist\u001b[39;49m\u001b[43m(\u001b[49m\u001b[43mobjects\u001b[49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;66;43;03m# Convert Iterable to List\u001b[39;49;00m\n\u001b[1;32m 267\u001b[0m \u001b[43m \u001b[49m\u001b[43mid_field\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mid_field\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 268\u001b[0m \u001b[43m \u001b[49m\u001b[43mkeys\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mkeys\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 269\u001b[0m \u001b[43m \u001b[49m\u001b[43mpreprocess\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mpreprocess\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 270\u001b[0m \u001b[43m \u001b[49m\u001b[43mvalidate\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mvalidate\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 271\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 273\u001b[0m \u001b[38;5;66;03m# Pass 2: Write all valid objects in batches\u001b[39;00m\n\u001b[1;32m 274\u001b[0m added_keys \u001b[38;5;241m=\u001b[39m []\n", + "File \u001b[0;32m~/Documents/AppliedAI/redis-vl-python/redisvl/index/storage.py:211\u001b[0m, in \u001b[0;36mBaseStorage._preprocess_and_validate_objects\u001b[0;34m(self, objects, id_field, keys, preprocess, validate)\u001b[0m\n\u001b[1;32m 207\u001b[0m prepared_objects\u001b[38;5;241m.\u001b[39mappend((key, processed_obj))\n\u001b[1;32m 209\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m ValidationError \u001b[38;5;28;01mas\u001b[39;00m e:\n\u001b[1;32m 210\u001b[0m \u001b[38;5;66;03m# Convert Pydantic ValidationError to SchemaValidationError with index context\u001b[39;00m\n\u001b[0;32m--> 211\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m SchemaValidationError(\u001b[38;5;28mstr\u001b[39m(e), index\u001b[38;5;241m=\u001b[39mi) \u001b[38;5;28;01mfrom\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[38;5;21;01me\u001b[39;00m\n\u001b[1;32m 212\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mException\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m e:\n\u001b[1;32m 213\u001b[0m \u001b[38;5;66;03m# Capture other exceptions with context\u001b[39;00m\n\u001b[1;32m 214\u001b[0m object_id \u001b[38;5;241m=\u001b[39m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mat index \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mi\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m\n", + "\u001b[0;31mSchemaValidationError\u001b[0m: Validation failed for object at index 0: 1 validation error for user_simple__PydanticModel\nuser_embedding\n Input should be a valid bytes [type=bytes_type, input_value=True, input_type=bool]\n For further information visit https://errors.pydantic.dev/2.10/v/bytes_type" + ] + } + ], + "source": [ + "# NBVAL_SKIP\n", + "\n", + "keys = index.load([{\"user_embedding\": True}])" + ] + }, { "cell_type": "markdown", "metadata": {}, @@ -398,14 +455,14 @@ }, { "cell_type": "code", - "execution_count": 12, + "execution_count": 17, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "['user_simple_docs:01JM2NWJGYMJ0QTR5YB4MB0BX9']\n" + "['user_simple_docs:01JQ9FHCB1B64GXF6WPK127VZ6']\n" ] } ], @@ -435,7 +492,7 @@ }, { "cell_type": "code", - "execution_count": 13, + "execution_count": 18, "metadata": {}, "outputs": [], "source": [ @@ -460,20 +517,13 @@ }, { "cell_type": "code", - "execution_count": 14, + "execution_count": 19, "metadata": {}, "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "*=>[KNN 3 @user_embedding $vector AS vector_distance] RETURN 6 user age job credit_score vector_distance vector_distance SORTBY vector_distance ASC DIALECT 2 LIMIT 0 3\n" - ] - }, { "data": { "text/html": [ - "table>vector_distanceuseragejobcredit_score0john1engineerhigh0mary2doctorlow0.0566299557686tyler9engineerhigh" + "
vector_distanceuseragejobcredit_score
0john1engineerhigh
0mary2doctorlow
0.0566299557686tyler9engineerhigh
" ], "text/plain": [ "" @@ -500,7 +550,7 @@ }, { "cell_type": "code", - "execution_count": 15, + "execution_count": 20, "metadata": {}, "outputs": [ { @@ -519,7 +569,7 @@ " 'datatype': 'float32'}}]}" ] }, - "execution_count": 13, + "execution_count": 20, "metadata": {}, "output_type": "execute_result" } @@ -530,32 +580,20 @@ }, { "cell_type": "code", - "execution_count": 14, + "execution_count": 21, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 14, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "from redisvl.index import AsyncSearchIndex\n", "from redis.asyncio import Redis\n", "\n", "client = Redis.from_url(\"redis://localhost:6379\")\n", - "\n", "index = AsyncSearchIndex.from_dict(schema, redis_client=client)" ] }, { "cell_type": "code", - "execution_count": 16, + "execution_count": 22, "metadata": {}, "outputs": [ { @@ -596,7 +634,7 @@ }, { "cell_type": "code", - "execution_count": 17, + "execution_count": 23, "metadata": {}, "outputs": [], "source": [ @@ -621,14 +659,14 @@ }, { "cell_type": "code", - "execution_count": 18, + "execution_count": 24, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "11:28:32 redisvl.index.index INFO Index already exists, overwriting.\n" + "11:01:30 redisvl.index.index INFO Index already exists, overwriting.\n" ] } ], @@ -639,13 +677,13 @@ }, { "cell_type": "code", - "execution_count": 19, + "execution_count": 25, "metadata": {}, "outputs": [ { "data": { "text/html": [ - "
vector_distanceuseragejobcredit_score
0mary2doctorlow
0john1engineerhigh
0.0566299557686tyler9engineerhigh
" + "
vector_distanceuseragejobcredit_score
0john1engineerhigh
0mary2doctorlow
0.0566299557686tyler9engineerhigh
" ], "text/plain": [ "" @@ -671,7 +709,7 @@ }, { "cell_type": "code", - "execution_count": 20, + "execution_count": 26, "metadata": {}, "outputs": [ { @@ -684,9 +722,9 @@ "│ Stat Key │ Value │\n", "├─────────────────────────────┼─────────────┤\n", "│ num_docs │ 4 │\n", - "│ num_terms │ 4 │\n", + "│ num_terms │ 0 │\n", "│ max_doc_id │ 4 │\n", - "│ num_records │ 22 │\n", + "│ num_records │ 20 │\n", "│ percent_indexed │ 1 │\n", "│ hash_indexing_failures │ 0 │\n", "│ number_of_uses │ 2 │\n", @@ -699,9 +737,9 @@ "│ offsets_per_term_avg │ 0 │\n", "│ records_per_doc_avg │ 5 │\n", "│ sortable_values_size_mb │ 0 │\n", - "│ total_indexing_time │ 0.239 │\n", + "│ total_indexing_time │ 6.529 │\n", "│ total_inverted_index_blocks │ 11 │\n", - "│ vector_index_sz_mb │ 0.235603 │\n", + "│ vector_index_sz_mb │ 0.235947 │\n", "╰─────────────────────────────┴─────────────╯\n" ] } @@ -730,7 +768,7 @@ }, { "cell_type": "code", - "execution_count": 21, + "execution_count": 27, "metadata": {}, "outputs": [ { @@ -739,7 +777,7 @@ "4" ] }, - "execution_count": 21, + "execution_count": 27, "metadata": {}, "output_type": "execute_result" } @@ -751,7 +789,7 @@ }, { "cell_type": "code", - "execution_count": 22, + "execution_count": 28, "metadata": {}, "outputs": [ { @@ -760,7 +798,7 @@ "True" ] }, - "execution_count": 22, + "execution_count": 28, "metadata": {}, "output_type": "execute_result" } @@ -772,7 +810,7 @@ }, { "cell_type": "code", - "execution_count": 23, + "execution_count": 29, "metadata": {}, "outputs": [], "source": [ diff --git a/poetry.lock b/poetry.lock index 4faca337..c8696428 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.0.1 and should not be changed by hand. [[package]] name = "accessible-pygments" @@ -7,6 +7,7 @@ description = "A collection of accessible pygments styles" optional = false python-versions = ">=3.9" groups = ["docs"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "accessible_pygments-0.0.5-py3-none-any.whl", hash = "sha256:88ae3211e68a1d0b011504b2ffc1691feafce124b845bd072ab6f9f66f34d4b7"}, {file = "accessible_pygments-0.0.5.tar.gz", hash = "sha256:40918d3e6a2b619ad424cb91e556bd3bd8865443d9f22f1dcdf79e33c8046872"}, @@ -26,7 +27,7 @@ description = "Happy Eyeballs for asyncio" optional = true python-versions = ">=3.9" groups = ["main"] -markers = "extra == \"voyageai\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"voyageai\"" files = [ {file = "aiohappyeyeballs-2.4.6-py3-none-any.whl", hash = "sha256:147ec992cf873d74f5062644332c539fcd42956dc69453fe5204195e560517e1"}, {file = "aiohappyeyeballs-2.4.6.tar.gz", hash = "sha256:9b05052f9042985d32ecbe4b59a77ae19c006a78f1344d7fdad69d28ded3d0b0"}, @@ -39,7 +40,7 @@ description = "Async http client/server framework (asyncio)" optional = true python-versions = ">=3.9" groups = ["main"] -markers = "extra == \"voyageai\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"voyageai\"" files = [ {file = "aiohttp-3.11.13-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a4fe27dbbeec445e6e1291e61d61eb212ee9fed6e47998b27de71d70d3e8777d"}, {file = "aiohttp-3.11.13-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9e64ca2dbea28807f8484c13f684a2f761e69ba2640ec49dacd342763cc265ef"}, @@ -144,7 +145,7 @@ description = "asyncio rate limiter, a leaky bucket implementation" optional = true python-versions = "<4.0,>=3.8" groups = ["main"] -markers = "extra == \"voyageai\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"voyageai\"" files = [ {file = "aiolimiter-1.2.1-py3-none-any.whl", hash = "sha256:d3f249e9059a20badcb56b61601a83556133655c11d1eb3dd3e04ff069e5f3c7"}, {file = "aiolimiter-1.2.1.tar.gz", hash = "sha256:e02a37ea1a855d9e832252a105420ad4d15011505512a1a1d814647451b5cca9"}, @@ -157,7 +158,7 @@ description = "aiosignal: a list of registered asynchronous callbacks" optional = true python-versions = ">=3.9" groups = ["main"] -markers = "extra == \"voyageai\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"voyageai\"" files = [ {file = "aiosignal-1.3.2-py2.py3-none-any.whl", hash = "sha256:45cde58e409a301715980c2b01d0c28bdde3770d8290b5eb2173759d9acb31a5"}, {file = "aiosignal-1.3.2.tar.gz", hash = "sha256:a8c255c66fafb1e499c9351d0bf32ff2d8a0321595ebac3b93713656d2436f54"}, @@ -173,6 +174,7 @@ description = "A light, configurable Sphinx theme" optional = false python-versions = ">=3.9" groups = ["docs"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "alabaster-0.7.16-py3-none-any.whl", hash = "sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92"}, {file = "alabaster-0.7.16.tar.gz", hash = "sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65"}, @@ -185,6 +187,7 @@ description = "Reusable constraint types to use with typing.Annotated" optional = false python-versions = ">=3.8" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, @@ -197,7 +200,7 @@ description = "High level compatibility layer for multiple asynchronous event lo optional = true python-versions = ">=3.9" groups = ["main"] -markers = "extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\"" +markers = "(extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "anyio-4.8.0-py3-none-any.whl", hash = "sha256:b5011f270ab5eb0abf13385f851315585cc37ef330dd88e27ec3d34d651fd47a"}, {file = "anyio-4.8.0.tar.gz", hash = "sha256:1d9fe889df5212298c0c0723fa20479d1b94883a2df44bd3897aa91083316f7a"}, @@ -221,7 +224,7 @@ description = "Disable App Nap on macOS >= 10.9" optional = false python-versions = ">=3.6" groups = ["dev", "docs"] -markers = "platform_system == \"Darwin\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and platform_system == \"Darwin\"" files = [ {file = "appnope-0.1.4-py2.py3-none-any.whl", hash = "sha256:502575ee11cd7a28c0205f379b525beefebab9d161b7c964670864014ed7213c"}, {file = "appnope-0.1.4.tar.gz", hash = "sha256:1de3860566df9caf38f01f86f65e0e13e379af54f9e4bee1e66b48f2efffd1ee"}, @@ -234,6 +237,7 @@ description = "An abstract syntax tree for Python with inference support." optional = false python-versions = ">=3.9.0" groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "astroid-3.3.8-py3-none-any.whl", hash = "sha256:187ccc0c248bfbba564826c26f070494f7bc964fd286b6d9fff4420e55de828c"}, {file = "astroid-3.3.8.tar.gz", hash = "sha256:a88c7994f914a4ea8572fac479459f4955eeccc877be3f2d959a33273b0cf40b"}, @@ -249,6 +253,7 @@ description = "Annotate AST trees with source code positions" optional = false python-versions = ">=3.8" groups = ["dev", "docs"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "asttokens-3.0.0-py3-none-any.whl", hash = "sha256:e3078351a059199dd5138cb1c706e6430c05eff2ff136af5eb4790f9d28932e2"}, {file = "asttokens-3.0.0.tar.gz", hash = "sha256:0dcd8baa8d62b0c1d118b399b2ddba3c4aff271d0d7a9e0d4c1681c79035bbc7"}, @@ -282,7 +287,7 @@ files = [ {file = "attrs-25.1.0-py3-none-any.whl", hash = "sha256:c75a69e28a550a7e93789579c22aa26b0f5b83b75dc4e08fe092980051e1090a"}, {file = "attrs-25.1.0.tar.gz", hash = "sha256:1c97078a80c814273a76b2a298a932eb681c87415c11dee0a6921de7f1b02c3e"}, ] -markers = {main = "extra == \"voyageai\""} +markers = {main = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"voyageai\"", dev = "python_version <= \"3.11\" or python_version >= \"3.12\"", docs = "python_version <= \"3.11\" or python_version >= \"3.12\""} [package.extras] benchmark = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] @@ -299,6 +304,7 @@ description = "Internationalization utilities" optional = false python-versions = ">=3.8" groups = ["docs"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "babel-2.17.0-py3-none-any.whl", hash = "sha256:4d0b53093fdfb4b21c92b5213dba5a1b23885afa8383709427046b21c366e5f2"}, {file = "babel-2.17.0.tar.gz", hash = "sha256:0c54cffb19f690cdcc52a3b50bcbf71e07a808d1c80d549f2459b9d2cf0afb9d"}, @@ -318,7 +324,7 @@ files = [ {file = "beautifulsoup4-4.13.3-py3-none-any.whl", hash = "sha256:99045d7d3f08f91f0d656bc9b7efbae189426cd913d830294a15eefa0ea4df16"}, {file = "beautifulsoup4-4.13.3.tar.gz", hash = "sha256:1bd32405dacc920b42b83ba01644747ed77456a65760e285fbc47633ceddaf8b"}, ] -markers = {main = "python_version >= \"3.10\" and extra == \"ranx\""} +markers = {main = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"", docs = "python_version <= \"3.11\" or python_version >= \"3.12\""} [package.dependencies] soupsieve = ">1.2" @@ -338,6 +344,7 @@ description = "The uncompromising code formatter." optional = false python-versions = ">=3.9" groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "black-25.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:759e7ec1e050a15f89b770cefbf91ebee8917aac5c20483bc2d80a6c3a04df32"}, {file = "black-25.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e519ecf93120f34243e6b0054db49c00a35f84f195d5bce7e9f5cfc578fc2da"}, @@ -385,6 +392,7 @@ description = "An easy safelist-based HTML-sanitizing tool." optional = false python-versions = ">=3.9" groups = ["docs"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "bleach-6.2.0-py3-none-any.whl", hash = "sha256:117d9c6097a7c3d22fd578fcd8d35ff1e125df6736f554da4e432fdd63f31e5e"}, {file = "bleach-6.2.0.tar.gz", hash = "sha256:123e894118b8a599fd80d3ec1a6d4cc7ce4e5882b1317a7e1ba69b56e95f991f"}, @@ -404,7 +412,7 @@ description = "The AWS SDK for Python" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "extra == \"bedrock\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"bedrock\"" files = [ {file = "boto3-1.36.0-py3-none-any.whl", hash = "sha256:d0ca7a58ce25701a52232cc8df9d87854824f1f2964b929305722ebc7959d5a9"}, {file = "boto3-1.36.0.tar.gz", hash = "sha256:159898f51c2997a12541c0e02d6e5a8fe2993ddb307b9478fd9a339f98b57e00"}, @@ -425,7 +433,7 @@ description = "Low-level, data-driven core of boto 3." optional = true python-versions = ">=3.8" groups = ["main"] -markers = "extra == \"bedrock\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"bedrock\"" files = [ {file = "botocore-1.36.26-py3-none-any.whl", hash = "sha256:4e3f19913887a58502e71ef8d696fe7eaa54de7813ff73390cd5883f837dfa6e"}, {file = "botocore-1.36.26.tar.gz", hash = "sha256:4a63bcef7ecf6146fd3a61dc4f9b33b7473b49bdaf1770e9aaca6eee0c9eab62"}, @@ -449,7 +457,7 @@ description = "Extensible memoizing collections and decorators" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "extra == \"vertexai\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vertexai\"" files = [ {file = "cachetools-5.5.2-py3-none-any.whl", hash = "sha256:d26a22bcc62eb95c3beabd9f1ee5e820d3d2704fe2967cbe350e20c8ffcd3f0a"}, {file = "cachetools-5.5.2.tar.gz", hash = "sha256:1a661caa9175d26759571b2e19580f9d6393969e5dfca11fdb1f947a23e640d4"}, @@ -462,7 +470,7 @@ description = "RFC 7049 - Concise Binary Object Representation" optional = true python-versions = "*" groups = ["main"] -markers = "python_version >= \"3.10\" and extra == \"ranx\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" files = [ {file = "cbor-1.0.0.tar.gz", hash = "sha256:13225a262ddf5615cbd9fd55a76a0d53069d18b07d2e9f19c39e6acb8609bbb6"}, ] @@ -474,7 +482,7 @@ description = "CBOR (de)serializer with extensive tag support" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "python_version >= \"3.10\" and extra == \"ranx\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" files = [ {file = "cbor2-5.6.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e16c4a87fc999b4926f5c8f6c696b0d251b4745bc40f6c5aee51d69b30b15ca2"}, {file = "cbor2-5.6.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:87026fc838370d69f23ed8572939bd71cea2b3f6c8f8bb8283f573374b4d7f33"}, @@ -538,7 +546,7 @@ files = [ {file = "certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe"}, {file = "certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651"}, ] -markers = {main = "(extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\" or extra == \"sentence-transformers\" or extra == \"vertexai\" or extra == \"voyageai\" or extra == \"ranx\") and (extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\" or extra == \"sentence-transformers\" or extra == \"vertexai\" or extra == \"voyageai\") or (extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\" or extra == \"sentence-transformers\" or extra == \"vertexai\" or extra == \"voyageai\" or extra == \"ranx\") and python_version >= \"3.10\""} +markers = {main = "(python_version <= \"3.11\" or python_version >= \"3.12\") and (extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\" or extra == \"sentence-transformers\" or extra == \"vertexai\" or extra == \"voyageai\" or extra == \"ranx\") and (extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\" or extra == \"sentence-transformers\" or extra == \"vertexai\" or extra == \"voyageai\" or python_version >= \"3.10\")", dev = "python_version <= \"3.11\" or python_version >= \"3.12\"", docs = "python_version <= \"3.11\" or python_version >= \"3.12\""} [[package]] name = "cffi" @@ -616,7 +624,7 @@ files = [ {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, ] -markers = {dev = "implementation_name == \"pypy\" or platform_python_implementation != \"PyPy\"", docs = "implementation_name == \"pypy\""} +markers = {dev = "(implementation_name == \"pypy\" or platform_python_implementation != \"PyPy\") and (python_version <= \"3.11\" or python_version >= \"3.12\")", docs = "(python_version <= \"3.11\" or python_version >= \"3.12\") and implementation_name == \"pypy\""} [package.dependencies] pycparser = "*" @@ -628,6 +636,7 @@ description = "Validate configuration and produce human readable error messages. optional = false python-versions = ">=3.8" groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, @@ -734,7 +743,7 @@ files = [ {file = "charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85"}, {file = "charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3"}, ] -markers = {main = "(extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"vertexai\" or extra == \"voyageai\" or extra == \"ranx\") and (extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"vertexai\" or extra == \"voyageai\") or (extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"vertexai\" or extra == \"voyageai\" or extra == \"ranx\") and python_version >= \"3.10\""} +markers = {main = "(python_version <= \"3.11\" or python_version >= \"3.12\") and (extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"vertexai\" or extra == \"voyageai\" or extra == \"ranx\") and (extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"vertexai\" or extra == \"voyageai\" or python_version >= \"3.10\")", dev = "python_version <= \"3.11\" or python_version >= \"3.12\"", docs = "python_version <= \"3.11\" or python_version >= \"3.12\""} [[package]] name = "click" @@ -742,7 +751,8 @@ version = "8.1.8" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" -groups = ["main", "dev", "docs"] +groups = ["dev", "docs"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, @@ -758,7 +768,7 @@ description = "" optional = true python-versions = "<4.0,>=3.9" groups = ["main"] -markers = "extra == \"cohere\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"cohere\"" files = [ {file = "cohere-5.13.12-py3-none-any.whl", hash = "sha256:2a043591a3e5280b47716a6b311e4c7f58e799364113a9cb81b50cd4f6c95f7e"}, {file = "cohere-5.13.12.tar.gz", hash = "sha256:97bb9ac107e580780b941acbabd3aa5e71960e6835398292c46aaa8a0a4cab88"}, @@ -786,7 +796,7 @@ files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] -markers = {main = "platform_system == \"Windows\"", dev = "platform_system == \"Windows\" or sys_platform == \"win32\"", docs = "platform_system == \"Windows\" or sys_platform == \"win32\""} +markers = {main = "(python_version <= \"3.11\" or python_version >= \"3.12\") and (extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"openai\" or extra == \"ranx\") and platform_system == \"Windows\" and (extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"openai\" or python_version >= \"3.10\")", dev = "(platform_system == \"Windows\" or sys_platform == \"win32\") and (python_version <= \"3.11\" or python_version >= \"3.12\")", docs = "(platform_system == \"Windows\" or sys_platform == \"win32\") and (python_version <= \"3.11\" or python_version >= \"3.12\")"} [[package]] name = "coloredlogs" @@ -795,6 +805,7 @@ description = "Colored terminal output for Python's logging module" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "coloredlogs-15.0.1-py2.py3-none-any.whl", hash = "sha256:612ee75c546f53e92e70049c9dbfcc18c935a2b9a53b66085ce9ef6a6e5c0934"}, {file = "coloredlogs-15.0.1.tar.gz", hash = "sha256:7c991aa71a4577af2f82600d8f8f3a89f936baeaf9b50a9c197da014e5bf16b0"}, @@ -813,6 +824,7 @@ description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus- optional = false python-versions = ">=3.8" groups = ["dev", "docs"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "comm-0.2.2-py3-none-any.whl", hash = "sha256:e6fb86cb70ff661ee8c9c14e7d36d6de3b4066f1441be4063df9c5009f0a64d3"}, {file = "comm-0.2.2.tar.gz", hash = "sha256:3fd7a84065306e07bea1773df6eb8282de51ba82f77c72f9c85716ab11fe980e"}, @@ -831,7 +843,7 @@ description = "Python library for calculating contours of 2D quadrilateral grids optional = true python-versions = ">=3.10" groups = ["main"] -markers = "python_version >= \"3.10\" and extra == \"ranx\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" files = [ {file = "contourpy-1.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a045f341a77b77e1c5de31e74e966537bba9f3c4099b35bf4c2e3939dd54cdab"}, {file = "contourpy-1.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:500360b77259914f7805af7462e41f9cb7ca92ad38e9f94d6c8641b089338124"}, @@ -906,6 +918,7 @@ description = "Code coverage measurement for Python" optional = false python-versions = ">=3.9" groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "coverage-7.6.12-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:704c8c8c6ce6569286ae9622e534b4f5b9759b6f2cd643f1c1a61f666d534fe8"}, {file = "coverage-7.6.12-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ad7525bf0241e5502168ae9c643a2f6c219fa0a283001cee4cf23a9b7da75879"}, @@ -982,7 +995,7 @@ description = "Thin Python bindings to de/compression algorithms in Rust" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "python_version >= \"3.10\" and extra == \"ranx\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" files = [ {file = "cramjam-2.9.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:8e82464d1e00fbbb12958999b8471ba5e9f3d9711954505a0a7b378762332e6f"}, {file = "cramjam-2.9.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d2df8a6511cc08ef1fccd2e0c65e2ebc9f57574ec8376052a76851af5398810"}, @@ -1086,6 +1099,7 @@ description = "cryptography is a package which provides cryptographic recipes an optional = false python-versions = "!=3.9.0,!=3.9.1,>=3.7" groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "cryptography-44.0.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bf688f615c29bfe9dfc44312ca470989279f0e94bb9f631f85e3459af8efc009"}, {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd7c7e2d71d908dc0f8d2027e1604102140d84b155e658c20e8ad1304317691f"}, @@ -1140,7 +1154,7 @@ description = "Composable style cycles" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "python_version >= \"3.10\" and extra == \"ranx\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" files = [ {file = "cycler-0.12.1-py3-none-any.whl", hash = "sha256:85cef7cff222d8644161529808465972e51340599459b8ac3ccbac5a854e0d30"}, {file = "cycler-0.12.1.tar.gz", hash = "sha256:88bb128f02ba341da8ef447245a9e138fae777f6a23943da4540077d3601eb1c"}, @@ -1157,6 +1171,7 @@ description = "An implementation of the Debug Adapter Protocol for Python" optional = false python-versions = ">=3.8" groups = ["dev", "docs"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "debugpy-1.8.12-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:a2ba7ffe58efeae5b8fad1165357edfe01464f9aef25e814e891ec690e7dd82a"}, {file = "debugpy-1.8.12-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cbbd4149c4fc5e7d508ece083e78c17442ee13b0e69bfa6bd63003e486770f45"}, @@ -1193,6 +1208,7 @@ description = "Decorators for Humans" optional = false python-versions = ">=3.8" groups = ["dev", "docs"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "decorator-5.2.1-py3-none-any.whl", hash = "sha256:d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a"}, {file = "decorator-5.2.1.tar.gz", hash = "sha256:65f266143752f734b0a7cc83c46f4618af75b8c5911b00ccb61d0ac9b6da0360"}, @@ -1205,6 +1221,7 @@ description = "XML bomb protection for Python stdlib modules" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" groups = ["docs"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, @@ -1217,6 +1234,7 @@ description = "serialize all of Python" optional = false python-versions = ">=3.8" groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "dill-0.3.9-py3-none-any.whl", hash = "sha256:468dff3b89520b474c0397703366b7b95eebe6303f108adf9b19da1f702be87a"}, {file = "dill-0.3.9.tar.gz", hash = "sha256:81aa267dddf68cbfe8029c42ca9ec6a4ab3b22371d1c450abc54422577b4512c"}, @@ -1233,6 +1251,7 @@ description = "Distribution utilities" optional = false python-versions = "*" groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87"}, {file = "distlib-0.3.9.tar.gz", hash = "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403"}, @@ -1245,7 +1264,7 @@ description = "Distro - an OS platform information API" optional = true python-versions = ">=3.6" groups = ["main"] -markers = "extra == \"openai\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"openai\"" files = [ {file = "distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2"}, {file = "distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed"}, @@ -1258,6 +1277,7 @@ description = "A Python library for the Docker Engine API." optional = false python-versions = ">=3.8" groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "docker-7.1.0-py3-none-any.whl", hash = "sha256:c96b93b7f0a746f9e77d325bcfb87422a3d8bd4f03136ae8a85b37f1898d5fc0"}, {file = "docker-7.1.0.tar.gz", hash = "sha256:ad8c70e6e3f8926cb8a92619b832b4ea5299e2831c14284663184e200546fa6c"}, @@ -1281,7 +1301,7 @@ description = "Parse Python docstrings in reST, Google and Numpydoc format" optional = true python-versions = ">=3.6,<4.0" groups = ["main"] -markers = "extra == \"vertexai\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vertexai\"" files = [ {file = "docstring_parser-0.16-py3-none-any.whl", hash = "sha256:bf0a1387354d3691d102edef7ec124f219ef639982d096e26e3b60aeffa90637"}, {file = "docstring_parser-0.16.tar.gz", hash = "sha256:538beabd0af1e2db0146b6bd3caa526c35a34d61af9fd2887f3a8a27a739aa6e"}, @@ -1294,6 +1314,7 @@ description = "Docutils -- Python Documentation Utilities" optional = false python-versions = ">=3.9" groups = ["docs"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "docutils-0.21.2-py3-none-any.whl", hash = "sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2"}, {file = "docutils-0.21.2.tar.gz", hash = "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f"}, @@ -1306,7 +1327,7 @@ description = "Like `typing._eval_type`, but lets older Python versions use newe optional = true python-versions = ">=3.8" groups = ["main"] -markers = "extra == \"mistralai\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"mistralai\"" files = [ {file = "eval_type_backport-0.2.2-py3-none-any.whl", hash = "sha256:cb6ad7c393517f476f96d456d0412ea80f0a8cf96f6892834cd9340149111b0a"}, {file = "eval_type_backport-0.2.2.tar.gz", hash = "sha256:f0576b4cf01ebb5bd358d02314d31846af5e07678387486e2c798af0e7d849c1"}, @@ -1326,7 +1347,7 @@ files = [ {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, ] -markers = {main = "(extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\") and python_version <= \"3.10\"", dev = "python_version <= \"3.10\"", docs = "python_version <= \"3.10\""} +markers = {main = "(extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\") and python_version < \"3.11\"", dev = "python_version < \"3.11\"", docs = "python_version < \"3.11\""} [package.extras] test = ["pytest (>=6)"] @@ -1338,6 +1359,7 @@ description = "execnet: rapid multi-Python deployment" optional = false python-versions = ">=3.8" groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "execnet-2.1.1-py3-none-any.whl", hash = "sha256:26dee51f1b80cebd6d0ca8e74dd8745419761d3bef34163928cbebbdc4749fdc"}, {file = "execnet-2.1.1.tar.gz", hash = "sha256:5189b52c6121c24feae288166ab41b32549c7e2348652736540b9e6e7d4e72e3"}, @@ -1353,6 +1375,7 @@ description = "Get the currently executing AST node of a frame, and other inform optional = false python-versions = ">=3.8" groups = ["dev", "docs"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "executing-2.2.0-py2.py3-none-any.whl", hash = "sha256:11387150cad388d62750327a53d3339fad4888b39a6fe233c3afbb54ecffd3aa"}, {file = "executing-2.2.0.tar.gz", hash = "sha256:5d108c028108fe2551d1a7b2e8b713341e2cb4fc0aa7dcf966fa4327a5226755"}, @@ -1368,7 +1391,7 @@ description = "Fast read/write of AVRO files" optional = true python-versions = ">=3.9" groups = ["main"] -markers = "extra == \"cohere\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"cohere\"" files = [ {file = "fastavro-1.10.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1a9fe0672d2caf0fe54e3be659b13de3cad25a267f2073d6f4b9f8862acc31eb"}, {file = "fastavro-1.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:86dd0410770e0c99363788f0584523709d85e57bb457372ec5c285a482c17fe6"}, @@ -1416,6 +1439,7 @@ description = "Fastest Python implementation of JSON schema" optional = false python-versions = "*" groups = ["dev", "docs"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "fastjsonschema-2.21.1-py3-none-any.whl", hash = "sha256:c9e5b7e908310918cf494a434eeb31384dd84a98b57a30bcb1f535015b554667"}, {file = "fastjsonschema-2.21.1.tar.gz", hash = "sha256:794d4f0a58f848961ba16af7b9c85a3e88cd360df008c59aac6fc5ae9323b5d4"}, @@ -1431,7 +1455,7 @@ description = "Python support for Parquet file format" optional = true python-versions = ">=3.9" groups = ["main"] -markers = "python_version >= \"3.10\" and extra == \"ranx\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" files = [ {file = "fastparquet-2024.11.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:60ccf587410f0979105e17036df61bb60e1c2b81880dc91895cdb4ee65b71e7f"}, {file = "fastparquet-2024.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a5ad5fc14b0567e700bea3cd528a0bd45a6f9371370b49de8889fb3d10a6574a"}, @@ -1497,7 +1521,7 @@ files = [ {file = "filelock-3.17.0-py3-none-any.whl", hash = "sha256:533dc2f7ba78dc2f0f531fc6c4940addf7b70a481e269a5a3b93be94ffbe8338"}, {file = "filelock-3.17.0.tar.gz", hash = "sha256:ee4e77401ef576ebb38cd7f13b9b28893194acc20a8e68e18730ba9c0e54660e"}, ] -markers = {main = "extra == \"sentence-transformers\" or extra == \"cohere\""} +markers = {main = "(extra == \"sentence-transformers\" or extra == \"cohere\") and (python_version <= \"3.11\" or python_version >= \"3.12\")", dev = "python_version <= \"3.11\" or python_version >= \"3.12\""} [package.extras] docs = ["furo (>=2024.8.6)", "sphinx (>=8.1.3)", "sphinx-autodoc-typehints (>=3)"] @@ -1511,7 +1535,7 @@ description = "Tools to manipulate font files" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "python_version >= \"3.10\" and extra == \"ranx\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" files = [ {file = "fonttools-4.56.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:331954d002dbf5e704c7f3756028e21db07097c19722569983ba4d74df014000"}, {file = "fonttools-4.56.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8d1613abd5af2f93c05867b3a3759a56e8bf97eb79b1da76b2bc10892f96ff16"}, @@ -1586,7 +1610,7 @@ description = "A list-like structure which implements collections.abc.MutableSeq optional = true python-versions = ">=3.8" groups = ["main"] -markers = "extra == \"voyageai\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"voyageai\"" files = [ {file = "frozenlist-1.5.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5b6a66c18b5b9dd261ca98dffcb826a525334b2f29e7caa54e182255c5f6a65a"}, {file = "frozenlist-1.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d1b3eb7b05ea246510b43a7e53ed1653e55c2121019a97e60cad7efb881a97bb"}, @@ -1689,7 +1713,7 @@ description = "File-system specification" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "(extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"ranx\") and (extra == \"sentence-transformers\" or extra == \"cohere\") or (extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"ranx\") and python_version >= \"3.10\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and (extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"ranx\") and (extra == \"sentence-transformers\" or extra == \"cohere\" or python_version >= \"3.10\")" files = [ {file = "fsspec-2025.2.0-py3-none-any.whl", hash = "sha256:9de2ad9ce1f85e1931858535bc882543171d197001a0a5eb2ddc04f1781ab95b"}, {file = "fsspec-2025.2.0.tar.gz", hash = "sha256:1c24b16eaa0a1798afa0337aa0db9b256718ab2a89c425371f5628d22c3b6afd"}, @@ -1730,7 +1754,7 @@ description = "Google API client core library" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "extra == \"vertexai\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vertexai\"" files = [ {file = "google_api_core-2.24.1-py3-none-any.whl", hash = "sha256:bc78d608f5a5bf853b80bd70a795f703294de656c096c0968320830a4bc280f1"}, {file = "google_api_core-2.24.1.tar.gz", hash = "sha256:f8b36f5456ab0dd99a1b693a40a31d1e7757beea380ad1b38faaf8941eae9d8a"}, @@ -1767,7 +1791,7 @@ description = "Google Authentication Library" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "extra == \"vertexai\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vertexai\"" files = [ {file = "google_auth-2.38.0-py2.py3-none-any.whl", hash = "sha256:e7dae6694313f434a2727bf2906f27ad259bae090d7aa896590d86feec3d9d4a"}, {file = "google_auth-2.38.0.tar.gz", hash = "sha256:8285113607d3b80a3f1543b75962447ba8a09fe85783432a784fdeef6ac094c4"}, @@ -1793,7 +1817,7 @@ description = "Vertex AI API client library" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "extra == \"vertexai\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vertexai\"" files = [ {file = "google_cloud_aiplatform-1.82.0-py2.py3-none-any.whl", hash = "sha256:13368a961b2bfa8f46ccd10371bb19bd5f946d8f29c411726061ed1a140ce890"}, {file = "google_cloud_aiplatform-1.82.0.tar.gz", hash = "sha256:b7ea7379249cc1821aa46300a16e4b15aa64aa22665e2536b2bcb7e473d7438e"}, @@ -1846,7 +1870,7 @@ description = "Google BigQuery API client library" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "extra == \"vertexai\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vertexai\"" files = [ {file = "google_cloud_bigquery-3.30.0-py2.py3-none-any.whl", hash = "sha256:f4d28d846a727f20569c9b2d2f4fa703242daadcb2ec4240905aa485ba461877"}, {file = "google_cloud_bigquery-3.30.0.tar.gz", hash = "sha256:7e27fbafc8ed33cc200fe05af12ecd74d279fe3da6692585a3cef7aee90575b6"}, @@ -1879,7 +1903,7 @@ description = "Google Cloud API client core library" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "extra == \"vertexai\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vertexai\"" files = [ {file = "google_cloud_core-2.4.2-py2.py3-none-any.whl", hash = "sha256:7459c3e83de7cb8b9ecfec9babc910efb4314030c56dd798eaad12c426f7d180"}, {file = "google_cloud_core-2.4.2.tar.gz", hash = "sha256:a4fcb0e2fcfd4bfe963837fad6d10943754fd79c1a50097d68540b6eb3d67f35"}, @@ -1899,7 +1923,7 @@ description = "Google Cloud Resource Manager API client library" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "extra == \"vertexai\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vertexai\"" files = [ {file = "google_cloud_resource_manager-1.14.1-py2.py3-none-any.whl", hash = "sha256:68340599f85ebf07a6e18487e460ea07cc15e132068f6b188786d01c2cf25518"}, {file = "google_cloud_resource_manager-1.14.1.tar.gz", hash = "sha256:41e9e546aaa03d5160cdfa2341dbe81ef7596706c300a89b94c429f1f3411f87"}, @@ -1922,7 +1946,7 @@ description = "Google Cloud Storage API client library" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "extra == \"vertexai\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vertexai\"" files = [ {file = "google_cloud_storage-2.19.0-py2.py3-none-any.whl", hash = "sha256:aeb971b5c29cf8ab98445082cbfe7b161a1f48ed275822f59ed3f1524ea54fba"}, {file = "google_cloud_storage-2.19.0.tar.gz", hash = "sha256:cd05e9e7191ba6cb68934d8eb76054d9be4562aa89dbc4236feee4d7d51342b2"}, @@ -1947,7 +1971,7 @@ description = "A python wrapper of the C library 'Google CRC32C'" optional = true python-versions = ">=3.9" groups = ["main"] -markers = "extra == \"vertexai\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vertexai\"" files = [ {file = "google_crc32c-1.6.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:5bcc90b34df28a4b38653c36bb5ada35671ad105c99cfe915fb5bed7ad6924aa"}, {file = "google_crc32c-1.6.0-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:d9e9913f7bd69e093b81da4535ce27af842e7bf371cde42d1ae9e9bd382dc0e9"}, @@ -1988,7 +2012,7 @@ description = "Utilities for Google Media Downloads and Resumable Uploads" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "extra == \"vertexai\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vertexai\"" files = [ {file = "google_resumable_media-2.7.2-py2.py3-none-any.whl", hash = "sha256:3ce7551e9fe6d99e9a126101d2536612bb73486721951e9562fee0f90c6ababa"}, {file = "google_resumable_media-2.7.2.tar.gz", hash = "sha256:5280aed4629f2b60b847b0d42f9857fd4935c11af266744df33d8074cae92fe0"}, @@ -2008,7 +2032,7 @@ description = "Common protobufs used in Google APIs" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "extra == \"vertexai\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vertexai\"" files = [ {file = "googleapis_common_protos-1.68.0-py2.py3-none-any.whl", hash = "sha256:aaf179b2f81df26dfadac95def3b16a95064c76a5f45f07e4c68a21bb371c4ac"}, {file = "googleapis_common_protos-1.68.0.tar.gz", hash = "sha256:95d38161f4f9af0d9423eed8fb7b64ffd2568c3464eb542ff02c5bfa1953ab3c"}, @@ -2028,7 +2052,7 @@ description = "Lightweight in-process concurrent programming" optional = false python-versions = ">=3.7" groups = ["docs"] -markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\"" +markers = "(platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "greenlet-3.1.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563"}, {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83"}, @@ -2116,7 +2140,7 @@ description = "IAM API client library" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "extra == \"vertexai\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vertexai\"" files = [ {file = "grpc_google_iam_v1-0.14.0-py2.py3-none-any.whl", hash = "sha256:fb4a084b30099ba3ab07d61d620a0d4429570b13ff53bd37bac75235f98b7da4"}, {file = "grpc_google_iam_v1-0.14.0.tar.gz", hash = "sha256:c66e07aa642e39bb37950f9e7f491f70dad150ac9801263b42b2814307c2df99"}, @@ -2134,7 +2158,7 @@ description = "HTTP/2-based RPC framework" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "extra == \"vertexai\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vertexai\"" files = [ {file = "grpcio-1.70.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:95469d1977429f45fe7df441f586521361e235982a0b39e33841549143ae2851"}, {file = "grpcio-1.70.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:ed9718f17fbdb472e33b869c77a16d0b55e166b100ec57b016dc7de9c8d236bf"}, @@ -2203,7 +2227,7 @@ description = "Status proto mapping for gRPC" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "extra == \"vertexai\"" +markers = "extra == \"vertexai\" and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "grpcio_status-1.70.0-py3-none-any.whl", hash = "sha256:fc5a2ae2b9b1c1969cc49f3262676e6854aa2398ec69cb5bd6c47cd501904a85"}, {file = "grpcio_status-1.70.0.tar.gz", hash = "sha256:0e7b42816512433b18b9d764285ff029bde059e9d41f8fe10a60631bd8348101"}, @@ -2221,7 +2245,7 @@ description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\"" +markers = "(extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, @@ -2234,7 +2258,7 @@ description = "A minimal low-level HTTP client." optional = true python-versions = ">=3.8" groups = ["main"] -markers = "extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\"" +markers = "(extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd"}, {file = "httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c"}, @@ -2257,7 +2281,7 @@ description = "The next generation HTTP client." optional = true python-versions = ">=3.8" groups = ["main"] -markers = "extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\"" +markers = "(extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad"}, {file = "httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc"}, @@ -2283,7 +2307,7 @@ description = "Consume Server-Sent Event (SSE) messages with HTTPX." optional = true python-versions = ">=3.8" groups = ["main"] -markers = "extra == \"cohere\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"cohere\"" files = [ {file = "httpx-sse-0.4.0.tar.gz", hash = "sha256:1e81a3a3070ce322add1d3529ed42eb5f70817f45ed6ec915ab753f961139721"}, {file = "httpx_sse-0.4.0-py3-none-any.whl", hash = "sha256:f329af6eae57eaa2bdfd962b42524764af68075ea87370a2de920af5341e318f"}, @@ -2296,7 +2320,7 @@ description = "Client library to download and publish models, datasets and other optional = true python-versions = ">=3.8.0" groups = ["main"] -markers = "extra == \"sentence-transformers\" or extra == \"cohere\"" +markers = "(extra == \"sentence-transformers\" or extra == \"cohere\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "huggingface_hub-0.29.1-py3-none-any.whl", hash = "sha256:352f69caf16566c7b6de84b54a822f6238e17ddd8ae3da4f8f2272aea5b198d5"}, {file = "huggingface_hub-0.29.1.tar.gz", hash = "sha256:9524eae42077b8ff4fc459ceb7a514eca1c1232b775276b009709fe2a084f250"}, @@ -2332,6 +2356,7 @@ description = "Human friendly output for text interfaces using Python" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "humanfriendly-10.0-py2.py3-none-any.whl", hash = "sha256:1697e1a8a8f550fd43c2865cd84542fc175a61dcb779b6fee18cf6b6ccba1477"}, {file = "humanfriendly-10.0.tar.gz", hash = "sha256:6b0b831ce8f15f7300721aa49829fc4e83921a9a301cc7f606be6686a2288ddc"}, @@ -2347,6 +2372,7 @@ description = "File identification library for Python" optional = false python-versions = ">=3.9" groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "identify-2.6.8-py2.py3-none-any.whl", hash = "sha256:83657f0f766a3c8d0eaea16d4ef42494b39b34629a4b3192a9d020d349b3e255"}, {file = "identify-2.6.8.tar.gz", hash = "sha256:61491417ea2c0c5c670484fd8abbb34de34cdae1e5f39a73ee65e48e4bb663fc"}, @@ -2366,7 +2392,7 @@ files = [ {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, ] -markers = {main = "(extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\" or extra == \"sentence-transformers\" or extra == \"vertexai\" or extra == \"voyageai\" or extra == \"ranx\") and (extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\" or extra == \"sentence-transformers\" or extra == \"vertexai\" or extra == \"voyageai\") or (extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\" or extra == \"sentence-transformers\" or extra == \"vertexai\" or extra == \"voyageai\" or extra == \"ranx\") and python_version >= \"3.10\""} +markers = {main = "(python_version <= \"3.11\" or python_version >= \"3.12\") and (extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\" or extra == \"sentence-transformers\" or extra == \"vertexai\" or extra == \"voyageai\" or extra == \"ranx\") and (extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\" or extra == \"sentence-transformers\" or extra == \"vertexai\" or extra == \"voyageai\" or python_version >= \"3.10\")", dev = "python_version <= \"3.11\" or python_version >= \"3.12\"", docs = "python_version <= \"3.11\" or python_version >= \"3.12\""} [package.extras] all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] @@ -2378,7 +2404,7 @@ description = "Iterative JSON parser with standard Python iterator interfaces" optional = true python-versions = "*" groups = ["main"] -markers = "python_version >= \"3.10\" and extra == \"ranx\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" files = [ {file = "ijson-3.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7f7a5250599c366369fbf3bc4e176f5daa28eb6bc7d6130d02462ed335361675"}, {file = "ijson-3.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f87a7e52f79059f9c58f6886c262061065eb6f7554a587be7ed3aa63e6b71b34"}, @@ -2483,6 +2509,7 @@ description = "Getting image size from png/jpeg/jpeg2000/gif file" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" groups = ["docs"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, @@ -2499,7 +2526,7 @@ files = [ {file = "importlib_metadata-8.6.1-py3-none-any.whl", hash = "sha256:02a89390c1e15fdfdc0d7c6b25cb3e62650d0494005c97d6f148bf5b9787525e"}, {file = "importlib_metadata-8.6.1.tar.gz", hash = "sha256:310b41d755445d74569f993ccfc22838295d9fe005425094fad953d7f15c8580"}, ] -markers = {dev = "python_version < \"3.10\""} +markers = {dev = "python_version < \"3.10\"", docs = "python_version <= \"3.11\" or python_version >= \"3.12\""} [package.dependencies] zipp = ">=3.20" @@ -2520,6 +2547,7 @@ description = "brain-dead simple config-ini parsing" optional = false python-versions = ">=3.7" groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, @@ -2532,7 +2560,7 @@ description = "inscriptis - HTML to text converter." optional = true python-versions = "<4.0,>=3.9" groups = ["main"] -markers = "python_version >= \"3.10\" and extra == \"ranx\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" files = [ {file = "inscriptis-2.5.3-py3-none-any.whl", hash = "sha256:25962cf5a60b1a8f33e7bfbbea08a29af82299702339b9b90c538653a5c7aa38"}, {file = "inscriptis-2.5.3.tar.gz", hash = "sha256:256043caa13e4995c71fafdeadec4ac42b57f3914cb41023ecbee8bc27ca1cc0"}, @@ -2552,6 +2580,7 @@ description = "IPython Kernel for Jupyter" optional = false python-versions = ">=3.8" groups = ["dev", "docs"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "ipykernel-6.29.5-py3-none-any.whl", hash = "sha256:afdb66ba5aa354b09b91379bac28ae4afebbb30e8b39510c9690afb7a10421b5"}, {file = "ipykernel-6.29.5.tar.gz", hash = "sha256:f093a22c4a40f8828f8e330a9c297cb93dcab13bd9678ded6de8e5cf81c56215"}, @@ -2586,6 +2615,7 @@ description = "IPython: Productive Interactive Computing" optional = false python-versions = ">=3.9" groups = ["dev", "docs"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "ipython-8.18.1-py3-none-any.whl", hash = "sha256:e8267419d72d81955ec1177f8a29aaa90ac80ad647499201119e2f05e99aa397"}, {file = "ipython-8.18.1.tar.gz", hash = "sha256:ca6f079bb33457c66e233e4580ebfc4128855b4cf6370dddd73842a9563e8a27"}, @@ -2624,7 +2654,7 @@ description = "provides a common interface to many IR ad-hoc ranking benchmarks, optional = true python-versions = ">=3.8" groups = ["main"] -markers = "python_version >= \"3.10\" and extra == \"ranx\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" files = [ {file = "ir_datasets-0.5.9-py3-none-any.whl", hash = "sha256:07c9bed07f31031f1da1bc02afc7a1077b1179a3af402d061f83bf6fb833b90a"}, {file = "ir_datasets-0.5.9.tar.gz", hash = "sha256:35c90980fbd0f4ea8fe22a1ab16d2bb6be3dc373cbd6dfab1d905f176a70e5ac"}, @@ -2653,6 +2683,7 @@ description = "A Python utility / library to sort Python imports." optional = false python-versions = ">=3.8.0" groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, @@ -2668,6 +2699,7 @@ description = "An autocompletion tool for Python that can be used for text edito optional = false python-versions = ">=3.6" groups = ["dev", "docs"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "jedi-0.19.2-py2.py3-none-any.whl", hash = "sha256:a8ef22bde8490f57fe5c7681a3c83cb58874daf72b4784de3cce5b6ef6edb5b9"}, {file = "jedi-0.19.2.tar.gz", hash = "sha256:4770dc3de41bde3966b02eb84fbcf557fb33cce26ad23da12c742fb50ecb11f0"}, @@ -2688,6 +2720,7 @@ description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" groups = ["main", "docs"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "jinja2-3.1.5-py3-none-any.whl", hash = "sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb"}, {file = "jinja2-3.1.5.tar.gz", hash = "sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb"}, @@ -2706,7 +2739,7 @@ description = "Fast iterable JSON parser." optional = true python-versions = ">=3.8" groups = ["main"] -markers = "extra == \"openai\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"openai\"" files = [ {file = "jiter-0.8.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:ca8577f6a413abe29b079bc30f907894d7eb07a865c4df69475e868d73e71c7b"}, {file = "jiter-0.8.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b25bd626bde7fb51534190c7e3cb97cee89ee76b76d7585580e22f34f5e3f393"}, @@ -2793,7 +2826,7 @@ description = "JSON Matching Expressions" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "extra == \"bedrock\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"bedrock\"" files = [ {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, @@ -2806,11 +2839,29 @@ description = "Lightweight pipelining with Python functions" optional = true python-versions = ">=3.8" groups = ["main"] +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"sentence-transformers\"" files = [ {file = "joblib-1.4.2-py3-none-any.whl", hash = "sha256:06d478d5674cbc267e7496a410ee875abd68e4340feff4490bcb7afb88060ae6"}, {file = "joblib-1.4.2.tar.gz", hash = "sha256:2382c5816b2636fbd20a09e0f4e9dad4736765fdfb7dca582943b9c1366b3f0e"}, ] +[[package]] +name = "jsonpath-ng" +version = "1.7.0" +description = "A final implementation of JSONPath for Python that aims to be standard compliant, including arithmetic and binary comparison operators and providing clear AST for metaprogramming." +optional = false +python-versions = "*" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "jsonpath-ng-1.7.0.tar.gz", hash = "sha256:f6f5f7fd4e5ff79c785f1573b394043b39849fb2bb47bcead935d12b00beab3c"}, + {file = "jsonpath_ng-1.7.0-py2-none-any.whl", hash = "sha256:898c93fc173f0c336784a3fa63d7434297544b7198124a68f9a3ef9597b0ae6e"}, + {file = "jsonpath_ng-1.7.0-py3-none-any.whl", hash = "sha256:f3d7f9e848cba1b6da28c55b1c26ff915dc9e0b1ba7e752a53d6da8d5cbd00b6"}, +] + +[package.dependencies] +ply = "*" + [[package]] name = "jsonpath-python" version = "1.0.6" @@ -2818,7 +2869,7 @@ description = "A more powerful JSONPath implementation in modern python" optional = true python-versions = ">=3.6" groups = ["main"] -markers = "extra == \"mistralai\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"mistralai\"" files = [ {file = "jsonpath-python-1.0.6.tar.gz", hash = "sha256:dd5be4a72d8a2995c3f583cf82bf3cd1a9544cfdabf2d22595b67aff07349666"}, {file = "jsonpath_python-1.0.6-py3-none-any.whl", hash = "sha256:1e3b78df579f5efc23565293612decee04214609208a2335884b3ee3f786b575"}, @@ -2831,6 +2882,7 @@ description = "An implementation of JSON Schema validation for Python" optional = false python-versions = ">=3.8" groups = ["dev", "docs"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "jsonschema-4.23.0-py3-none-any.whl", hash = "sha256:fbadb6f8b144a8f8cf9f0b89ba94501d143e50411a1278633f56a7acf7fd5566"}, {file = "jsonschema-4.23.0.tar.gz", hash = "sha256:d71497fef26351a33265337fa77ffeb82423f3ea21283cd9467bb03999266bc4"}, @@ -2853,6 +2905,7 @@ description = "The JSON Schema meta-schemas and vocabularies, exposed as a Regis optional = false python-versions = ">=3.9" groups = ["dev", "docs"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "jsonschema_specifications-2024.10.1-py3-none-any.whl", hash = "sha256:a09a0680616357d9a0ecf05c12ad234479f549239d0f5b55f3deea67475da9bf"}, {file = "jsonschema_specifications-2024.10.1.tar.gz", hash = "sha256:0f38b83639958ce1152d02a7f062902c41c8fd20d558b0c34344292d417ae272"}, @@ -2868,6 +2921,7 @@ description = "A defined interface for working with a cache of jupyter notebooks optional = false python-versions = ">=3.9" groups = ["docs"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "jupyter_cache-1.0.1-py3-none-any.whl", hash = "sha256:9c3cafd825ba7da8b5830485343091143dff903e4d8c69db9349b728b140abf6"}, {file = "jupyter_cache-1.0.1.tar.gz", hash = "sha256:16e808eb19e3fb67a223db906e131ea6e01f03aa27f49a7214ce6a5fec186fb9"}, @@ -2896,6 +2950,7 @@ description = "Jupyter protocol implementation and client libraries" optional = false python-versions = ">=3.8" groups = ["dev", "docs"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "jupyter_client-8.6.3-py3-none-any.whl", hash = "sha256:e8a19cc986cc45905ac3362915f410f3af85424b4c0905e94fa5f2cb08e8f23f"}, {file = "jupyter_client-8.6.3.tar.gz", hash = "sha256:35b3a0947c4a6e9d589eb97d7d4cd5e90f910ee73101611f01283732bd6d9419"}, @@ -2920,6 +2975,7 @@ description = "Jupyter core package. A base package on which Jupyter projects re optional = false python-versions = ">=3.8" groups = ["dev", "docs"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "jupyter_core-5.7.2-py3-none-any.whl", hash = "sha256:4f7315d2f6b4bcf2e3e7cb6e46772eba760ae459cd1f59d29eb57b0a01bd7409"}, {file = "jupyter_core-5.7.2.tar.gz", hash = "sha256:aa5f8d32bbf6b431ac830496da7392035d6f61b4f54872f15c4bd2a9c3f536d9"}, @@ -2941,6 +2997,7 @@ description = "Pygments theme using JupyterLab CSS variables" optional = false python-versions = ">=3.8" groups = ["docs"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "jupyterlab_pygments-0.3.0-py3-none-any.whl", hash = "sha256:841a89020971da1d8693f1a99997aefc5dc424bb1b251fd6322462a1b8842780"}, {file = "jupyterlab_pygments-0.3.0.tar.gz", hash = "sha256:721aca4d9029252b11cfa9d185e5b5af4d54772bb8072f9b7036f4170054d35d"}, @@ -2953,7 +3010,7 @@ description = "A fast implementation of the Cassowary constraint solver" optional = true python-versions = ">=3.10" groups = ["main"] -markers = "python_version >= \"3.10\" and extra == \"ranx\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" files = [ {file = "kiwisolver-1.4.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:88c6f252f6816a73b1f8c904f7bbe02fd67c09a69f7cb8a0eecdbf5ce78e63db"}, {file = "kiwisolver-1.4.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c72941acb7b67138f35b879bbe85be0f6c6a70cab78fe3ef6db9c024d9223e5b"}, @@ -3044,7 +3101,7 @@ description = "lightweight wrapper around basic LLVM functionality" optional = true python-versions = ">=3.10" groups = ["main"] -markers = "python_version >= \"3.10\" and extra == \"ranx\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" files = [ {file = "llvmlite-0.44.0-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:9fbadbfba8422123bab5535b293da1cf72f9f478a65645ecd73e781f962ca614"}, {file = "llvmlite-0.44.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cccf8eb28f24840f2689fb1a45f9c0f7e582dd24e088dcf96e424834af11f791"}, @@ -3076,7 +3133,7 @@ description = "Powerful and Pythonic XML processing library combining libxml2/li optional = true python-versions = ">=3.6" groups = ["main"] -markers = "python_version >= \"3.10\" and extra == \"ranx\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" files = [ {file = "lxml-5.3.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a4058f16cee694577f7e4dd410263cd0ef75644b43802a689c2b3c2a7e69453b"}, {file = "lxml-5.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:364de8f57d6eda0c16dcfb999af902da31396949efa0e583e12675d09709881b"}, @@ -3232,7 +3289,7 @@ description = "LZ4 Bindings for Python" optional = true python-versions = ">=3.9" groups = ["main"] -markers = "python_version >= \"3.10\" and extra == \"ranx\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" files = [ {file = "lz4-4.4.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1ebf23ffd36b32b980f720a81990fcfdeadacafe7498fbeff7a8e058259d4e58"}, {file = "lz4-4.4.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8fe3caea61427057a9e3697c69b2403510fdccfca4483520d02b98ffae74531e"}, @@ -3283,7 +3340,7 @@ files = [ {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, ] -markers = {main = "python_version >= \"3.10\" and extra == \"ranx\""} +markers = {main = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"", docs = "python_version <= \"3.11\" or python_version >= \"3.12\""} [package.dependencies] mdurl = ">=0.1,<1.0" @@ -3305,6 +3362,7 @@ description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.9" groups = ["main", "docs"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, @@ -3376,7 +3434,7 @@ description = "Python plotting package" optional = true python-versions = ">=3.10" groups = ["main"] -markers = "python_version >= \"3.10\" and extra == \"ranx\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" files = [ {file = "matplotlib-3.10.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:ff2ae14910be903f4a24afdbb6d7d3a6c44da210fc7d42790b87aeac92238a16"}, {file = "matplotlib-3.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0721a3fd3d5756ed593220a8b86808a36c5031fce489adb5b31ee6dbb47dd5b2"}, @@ -3435,6 +3493,7 @@ description = "Inline Matplotlib backend for Jupyter" optional = false python-versions = ">=3.8" groups = ["dev", "docs"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "matplotlib_inline-0.1.7-py3-none-any.whl", hash = "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca"}, {file = "matplotlib_inline-0.1.7.tar.gz", hash = "sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90"}, @@ -3450,6 +3509,7 @@ description = "McCabe checker, plugin for flake8" optional = false python-versions = ">=3.6" groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, @@ -3462,6 +3522,7 @@ description = "Collection of plugins for markdown-it-py" optional = false python-versions = ">=3.8" groups = ["docs"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "mdit_py_plugins-0.4.2-py3-none-any.whl", hash = "sha256:0c673c3f889399a33b95e88d2f0d111b4447bdfea7f237dab2d488f459835636"}, {file = "mdit_py_plugins-0.4.2.tar.gz", hash = "sha256:5f2cd1fdb606ddf152d37ec30e46101a60512bc0e5fa1a7002c36647b09e26b5"}, @@ -3486,7 +3547,7 @@ files = [ {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, ] -markers = {main = "python_version >= \"3.10\" and extra == \"ranx\""} +markers = {main = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"", docs = "python_version <= \"3.11\" or python_version >= \"3.12\""} [[package]] name = "mistralai" @@ -3495,7 +3556,7 @@ description = "Python Client SDK for the Mistral AI API." optional = true python-versions = ">=3.8" groups = ["main"] -markers = "extra == \"mistralai\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"mistralai\"" files = [ {file = "mistralai-1.5.0-py3-none-any.whl", hash = "sha256:9372537719f87bd6f9feef4747d0bf1f4fbe971f8c02945ca4b4bf3c94571c97"}, {file = "mistralai-1.5.0.tar.gz", hash = "sha256:fd94bc93bc25aad9c6dd8005b1a0bc4ba1250c6b3fbf855a49936989cc6e5c0d"}, @@ -3519,6 +3580,7 @@ description = "A sane and fast Markdown parser with useful plugins and renderers optional = false python-versions = ">=3.8" groups = ["docs"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "mistune-3.1.2-py3-none-any.whl", hash = "sha256:4b47731332315cdca99e0ded46fc0004001c1299ff773dfb48fbe1fd226de319"}, {file = "mistune-3.1.2.tar.gz", hash = "sha256:733bf018ba007e8b5f2d3a9eb624034f6ee26c4ea769a98ec533ee111d504dff"}, @@ -3534,6 +3596,7 @@ description = "" optional = false python-versions = ">=3.9" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "ml_dtypes-0.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1fe8b5b5e70cd67211db94b05cfd58dace592f24489b038dc6f9fe347d2e07d5"}, {file = "ml_dtypes-0.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c09a6d11d8475c2a9fd2bc0695628aec105f97cab3b3a3fb7c9660348ff7d24"}, @@ -3556,9 +3619,9 @@ files = [ [package.dependencies] numpy = [ - {version = ">1.20"}, - {version = ">=1.21.2", markers = "python_version >= \"3.10\""}, - {version = ">=1.23.3", markers = "python_version >= \"3.11\""}, + {version = ">1.20", markers = "python_version < \"3.10\""}, + {version = ">=1.21.2", markers = "python_version >= \"3.10\" and python_version < \"3.11\""}, + {version = ">=1.23.3", markers = "python_version >= \"3.11\" and python_version < \"3.12\""}, {version = ">=1.26.0", markers = "python_version >= \"3.12\""}, ] @@ -3572,7 +3635,7 @@ description = "Python library for arbitrary-precision floating-point arithmetic" optional = true python-versions = "*" groups = ["main"] -markers = "extra == \"sentence-transformers\"" +markers = "extra == \"sentence-transformers\" and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c"}, {file = "mpmath-1.3.0.tar.gz", hash = "sha256:7a28eb2a9774d00c7bc92411c19a89209d5da7c4c9a9e227be8330a23a25b91f"}, @@ -3591,7 +3654,7 @@ description = "multidict implementation" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "extra == \"voyageai\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"voyageai\"" files = [ {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3380252550e372e8511d49481bd836264c009adb826b23fefcc5dd3c69692f60"}, {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:99f826cbf970077383d7de805c0681799491cb939c25450b9b5b3ced03ca99f1"}, @@ -3697,6 +3760,7 @@ description = "Optional static typing for Python" optional = false python-versions = ">=3.8" groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "mypy-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f8a67616990062232ee4c3952f41c779afac41405806042a8126fe96e098419f"}, {file = "mypy-1.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d357423fa57a489e8c47b7c85dfb96698caba13d66e086b412298a1a0ea3b0ed"}, @@ -3749,7 +3813,7 @@ files = [ {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] -markers = {main = "extra == \"mistralai\""} +markers = {main = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"mistralai\"", dev = "python_version <= \"3.11\" or python_version >= \"3.12\""} [[package]] name = "myst-nb" @@ -3758,6 +3822,7 @@ description = "A Jupyter Notebook Sphinx reader built on top of the MyST markdow optional = false python-versions = ">=3.9" groups = ["docs"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "myst_nb-1.2.0-py3-none-any.whl", hash = "sha256:0e09909877848c0cf45e1aecee97481512efa29a0c4caa37870a03bba11c56c1"}, {file = "myst_nb-1.2.0.tar.gz", hash = "sha256:af459ec753b341952182b45b0a80b4776cebf80c9ee6aaca2a3f4027b440c9de"}, @@ -3787,6 +3852,7 @@ description = "An extended [CommonMark](https://spec.commonmark.org/) compliant optional = false python-versions = ">=3.8" groups = ["docs"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "myst_parser-3.0.1-py3-none-any.whl", hash = "sha256:6457aaa33a5d474aca678b8ead9b3dc298e89c68e67012e73146ea6fd54babf1"}, {file = "myst_parser-3.0.1.tar.gz", hash = "sha256:88f0cb406cb363b077d176b51c476f62d60604d68a8dcdf4832e080441301a87"}, @@ -3814,6 +3880,7 @@ description = "A client library for executing notebooks. Formerly nbconvert's Ex optional = false python-versions = ">=3.9.0" groups = ["docs"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "nbclient-0.10.2-py3-none-any.whl", hash = "sha256:4ffee11e788b4a27fabeb7955547e4318a5298f34342a4bfd01f2e1faaeadc3d"}, {file = "nbclient-0.10.2.tar.gz", hash = "sha256:90b7fc6b810630db87a6d0c2250b1f0ab4cf4d3c27a299b0cde78a4ed3fd9193"}, @@ -3837,6 +3904,7 @@ description = "Converting Jupyter Notebooks (.ipynb files) to other formats. Ou optional = false python-versions = ">=3.8" groups = ["docs"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "nbconvert-7.16.6-py3-none-any.whl", hash = "sha256:1375a7b67e0c2883678c48e506dc320febb57685e5ee67faa51b18a90f3a712b"}, {file = "nbconvert-7.16.6.tar.gz", hash = "sha256:576a7e37c6480da7b8465eefa66c17844243816ce1ccc372633c6b71c3c0f582"}, @@ -3875,6 +3943,7 @@ description = "The Jupyter Notebook format" optional = false python-versions = ">=3.8" groups = ["dev", "docs"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "nbformat-5.10.4-py3-none-any.whl", hash = "sha256:3b48d6c8fbca4b299bf3982ea7db1af21580e4fec269ad087b9e81588891200b"}, {file = "nbformat-5.10.4.tar.gz", hash = "sha256:322168b14f937a5d11362988ecac2a4952d3d8e3a2cbeb2319584631226d5b3a"}, @@ -3897,6 +3966,7 @@ description = "Jupyter Notebook Tools for Sphinx" optional = false python-versions = ">=3.6" groups = ["docs"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "nbsphinx-0.9.6-py3-none-any.whl", hash = "sha256:336b0b557945a7678ec7449b16449f854bc852a435bb53b8a72e6b5dc740d992"}, {file = "nbsphinx-0.9.6.tar.gz", hash = "sha256:c2b28a2d702f1159a95b843831798e86e60a17fc647b9bff9ba1585355de54e3"}, @@ -3917,6 +3987,7 @@ description = "A py.test plugin to validate Jupyter notebooks" optional = false python-versions = ">=3.7, <4" groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "nbval-0.11.0-py2.py3-none-any.whl", hash = "sha256:307aecc866c9a1e8a13bb5bbb008a702bacfda2394dff6fe504a3108a58042a0"}, {file = "nbval-0.11.0.tar.gz", hash = "sha256:77c95797607b0a968babd2597ee3494102d25c3ad37435debbdac0e46e379094"}, @@ -3936,6 +4007,7 @@ description = "Patch asyncio to allow nested event loops" optional = false python-versions = ">=3.5" groups = ["dev", "docs"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c"}, {file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"}, @@ -3948,7 +4020,7 @@ description = "Python package for creating and manipulating graphs and networks" optional = true python-versions = ">=3.9" groups = ["main"] -markers = "extra == \"sentence-transformers\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"sentence-transformers\"" files = [ {file = "networkx-3.2.1-py3-none-any.whl", hash = "sha256:f18c69adc97877c42332c170849c96cefa91881c99a7cb3e95b7c659ebdc1ec2"}, {file = "networkx-3.2.1.tar.gz", hash = "sha256:9f1bb5cf3409bf324e0a722c20bdb4c20ee39bf1c30ce8ae499c8502b0b5e0c6"}, @@ -3994,6 +4066,7 @@ description = "Node.js virtual environment builder" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"}, {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, @@ -4006,7 +4079,7 @@ description = "compiling Python code using LLVM" optional = true python-versions = ">=3.10" groups = ["main"] -markers = "python_version >= \"3.10\" and extra == \"ranx\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" files = [ {file = "numba-0.61.0-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:9cab9783a700fa428b1a54d65295122bc03b3de1d01fb819a6b9dbbddfdb8c43"}, {file = "numba-0.61.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:46c5ae094fb3706f5adf9021bfb7fc11e44818d61afee695cdee4eadfed45e98"}, @@ -4042,7 +4115,7 @@ description = "Fundamental package for array computing in Python" optional = false python-versions = ">=3.9" groups = ["main"] -markers = "python_version < \"3.12\"" +markers = "python_version <= \"3.11\"" files = [ {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, @@ -4155,7 +4228,7 @@ description = "CUBLAS native runtime libraries" optional = true python-versions = ">=3" groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\"" +markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\" and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "nvidia_cublas_cu12-12.4.5.8-py3-none-manylinux2014_aarch64.whl", hash = "sha256:0f8aa1706812e00b9f19dfe0cdb3999b092ccb8ca168c0db5b8ea712456fd9b3"}, {file = "nvidia_cublas_cu12-12.4.5.8-py3-none-manylinux2014_x86_64.whl", hash = "sha256:2fc8da60df463fdefa81e323eef2e36489e1c94335b5358bcb38360adf75ac9b"}, @@ -4169,7 +4242,7 @@ description = "CUDA profiling tools runtime libs." optional = true python-versions = ">=3" groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\"" +markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\" and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "nvidia_cuda_cupti_cu12-12.4.127-py3-none-manylinux2014_aarch64.whl", hash = "sha256:79279b35cf6f91da114182a5ce1864997fd52294a87a16179ce275773799458a"}, {file = "nvidia_cuda_cupti_cu12-12.4.127-py3-none-manylinux2014_x86_64.whl", hash = "sha256:9dec60f5ac126f7bb551c055072b69d85392b13311fcc1bcda2202d172df30fb"}, @@ -4183,7 +4256,7 @@ description = "NVRTC native runtime libraries" optional = true python-versions = ">=3" groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\"" +markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\" and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "nvidia_cuda_nvrtc_cu12-12.4.127-py3-none-manylinux2014_aarch64.whl", hash = "sha256:0eedf14185e04b76aa05b1fea04133e59f465b6f960c0cbf4e37c3cb6b0ea198"}, {file = "nvidia_cuda_nvrtc_cu12-12.4.127-py3-none-manylinux2014_x86_64.whl", hash = "sha256:a178759ebb095827bd30ef56598ec182b85547f1508941a3d560eb7ea1fbf338"}, @@ -4197,7 +4270,7 @@ description = "CUDA Runtime native Libraries" optional = true python-versions = ">=3" groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\"" +markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\" and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "nvidia_cuda_runtime_cu12-12.4.127-py3-none-manylinux2014_aarch64.whl", hash = "sha256:961fe0e2e716a2a1d967aab7caee97512f71767f852f67432d572e36cb3a11f3"}, {file = "nvidia_cuda_runtime_cu12-12.4.127-py3-none-manylinux2014_x86_64.whl", hash = "sha256:64403288fa2136ee8e467cdc9c9427e0434110899d07c779f25b5c068934faa5"}, @@ -4211,7 +4284,7 @@ description = "cuDNN runtime libraries" optional = true python-versions = ">=3" groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\"" +markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\" and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "nvidia_cudnn_cu12-9.1.0.70-py3-none-manylinux2014_x86_64.whl", hash = "sha256:165764f44ef8c61fcdfdfdbe769d687e06374059fbb388b6c89ecb0e28793a6f"}, {file = "nvidia_cudnn_cu12-9.1.0.70-py3-none-win_amd64.whl", hash = "sha256:6278562929433d68365a07a4a1546c237ba2849852c0d4b2262a486e805b977a"}, @@ -4227,7 +4300,7 @@ description = "CUFFT native runtime libraries" optional = true python-versions = ">=3" groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\"" +markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\" and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "nvidia_cufft_cu12-11.2.1.3-py3-none-manylinux2014_aarch64.whl", hash = "sha256:5dad8008fc7f92f5ddfa2101430917ce2ffacd86824914c82e28990ad7f00399"}, {file = "nvidia_cufft_cu12-11.2.1.3-py3-none-manylinux2014_x86_64.whl", hash = "sha256:f083fc24912aa410be21fa16d157fed2055dab1cc4b6934a0e03cba69eb242b9"}, @@ -4244,7 +4317,7 @@ description = "CURAND native runtime libraries" optional = true python-versions = ">=3" groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\"" +markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\" and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "nvidia_curand_cu12-10.3.5.147-py3-none-manylinux2014_aarch64.whl", hash = "sha256:1f173f09e3e3c76ab084aba0de819c49e56614feae5c12f69883f4ae9bb5fad9"}, {file = "nvidia_curand_cu12-10.3.5.147-py3-none-manylinux2014_x86_64.whl", hash = "sha256:a88f583d4e0bb643c49743469964103aa59f7f708d862c3ddb0fc07f851e3b8b"}, @@ -4258,7 +4331,7 @@ description = "CUDA solver native runtime libraries" optional = true python-versions = ">=3" groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\"" +markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\" and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "nvidia_cusolver_cu12-11.6.1.9-py3-none-manylinux2014_aarch64.whl", hash = "sha256:d338f155f174f90724bbde3758b7ac375a70ce8e706d70b018dd3375545fc84e"}, {file = "nvidia_cusolver_cu12-11.6.1.9-py3-none-manylinux2014_x86_64.whl", hash = "sha256:19e33fa442bcfd085b3086c4ebf7e8debc07cfe01e11513cc6d332fd918ac260"}, @@ -4277,7 +4350,7 @@ description = "CUSPARSE native runtime libraries" optional = true python-versions = ">=3" groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\"" +markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\" and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "nvidia_cusparse_cu12-12.3.1.170-py3-none-manylinux2014_aarch64.whl", hash = "sha256:9d32f62896231ebe0480efd8a7f702e143c98cfaa0e8a76df3386c1ba2b54df3"}, {file = "nvidia_cusparse_cu12-12.3.1.170-py3-none-manylinux2014_x86_64.whl", hash = "sha256:ea4f11a2904e2a8dc4b1833cc1b5181cde564edd0d5cd33e3c168eff2d1863f1"}, @@ -4294,7 +4367,7 @@ description = "NVIDIA cuSPARSELt" optional = true python-versions = "*" groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\"" +markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\" and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "nvidia_cusparselt_cu12-0.6.2-py3-none-manylinux2014_aarch64.whl", hash = "sha256:067a7f6d03ea0d4841c85f0c6f1991c5dda98211f6302cb83a4ab234ee95bef8"}, {file = "nvidia_cusparselt_cu12-0.6.2-py3-none-manylinux2014_x86_64.whl", hash = "sha256:df2c24502fd76ebafe7457dbc4716b2fec071aabaed4fb7691a201cde03704d9"}, @@ -4308,7 +4381,7 @@ description = "NVIDIA Collective Communication Library (NCCL) Runtime" optional = true python-versions = ">=3" groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\"" +markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\" and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "nvidia_nccl_cu12-2.21.5-py3-none-manylinux2014_x86_64.whl", hash = "sha256:8579076d30a8c24988834445f8d633c697d42397e92ffc3f63fa26766d25e0a0"}, ] @@ -4320,7 +4393,7 @@ description = "Nvidia JIT LTO Library" optional = true python-versions = ">=3" groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\"" +markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\" and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "nvidia_nvjitlink_cu12-12.4.127-py3-none-manylinux2014_aarch64.whl", hash = "sha256:4abe7fef64914ccfa909bc2ba39739670ecc9e820c83ccc7a6ed414122599b83"}, {file = "nvidia_nvjitlink_cu12-12.4.127-py3-none-manylinux2014_x86_64.whl", hash = "sha256:06b3b9b25bf3f8af351d664978ca26a16d2c5127dbd53c0497e28d1fb9611d57"}, @@ -4334,7 +4407,7 @@ description = "NVIDIA Tools Extension" optional = true python-versions = ">=3" groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\"" +markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\" and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "nvidia_nvtx_cu12-12.4.127-py3-none-manylinux2014_aarch64.whl", hash = "sha256:7959ad635db13edf4fc65c06a6e9f9e55fc2f92596db928d169c0bb031e88ef3"}, {file = "nvidia_nvtx_cu12-12.4.127-py3-none-manylinux2014_x86_64.whl", hash = "sha256:781e950d9b9f60d8241ccea575b32f5105a5baf4c2351cab5256a24869f12a1a"}, @@ -4348,7 +4421,7 @@ description = "The official Python library for the openai API" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "extra == \"openai\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"openai\"" files = [ {file = "openai-1.65.1-py3-none-any.whl", hash = "sha256:396652a6452dd42791b3ad8a3aab09b1feb7c1c4550a672586fb300760a8e204"}, {file = "openai-1.65.1.tar.gz", hash = "sha256:9d9370a20d2b8c3ce319fd2194c2eef5eab59effbcc5b04ff480977edc530fba"}, @@ -4375,7 +4448,7 @@ description = "Fast, correct Python JSON library supporting dataclasses, datetim optional = true python-versions = ">=3.8" groups = ["main"] -markers = "python_version >= \"3.10\" and extra == \"ranx\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" files = [ {file = "orjson-3.10.15-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:552c883d03ad185f720d0c09583ebde257e41b9521b74ff40e08b7dec4559c04"}, {file = "orjson-3.10.15-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:616e3e8d438d02e4854f70bfdc03a6bcdb697358dbaa6bcd19cbe24d24ece1f8"}, @@ -4469,7 +4542,7 @@ files = [ {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, ] -markers = {main = "(extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"vertexai\" or extra == \"ranx\") and (extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"vertexai\") or (extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"vertexai\" or extra == \"ranx\") and python_version >= \"3.10\""} +markers = {main = "(python_version <= \"3.11\" or python_version >= \"3.12\") and (extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"vertexai\" or extra == \"ranx\") and (extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"vertexai\" or python_version >= \"3.10\")", dev = "python_version <= \"3.11\" or python_version >= \"3.12\"", docs = "python_version <= \"3.11\" or python_version >= \"3.12\""} [[package]] name = "pandas" @@ -4478,7 +4551,7 @@ description = "Powerful data structures for data analysis, time series, and stat optional = true python-versions = ">=3.9" groups = ["main"] -markers = "python_version >= \"3.10\" and extra == \"ranx\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" files = [ {file = "pandas-2.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1948ddde24197a0f7add2bdc4ca83bf2b1ef84a1bc8ccffd95eda17fd836ecb5"}, {file = "pandas-2.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:381175499d3802cde0eabbaf6324cce0c4f5d52ca6f8c377c29ad442f50f6348"}, @@ -4566,6 +4639,7 @@ description = "Utilities for writing pandoc filters in python" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" groups = ["docs"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pandocfilters-1.5.1-py2.py3-none-any.whl", hash = "sha256:93be382804a9cdb0a7267585f157e5d1731bbe5545a85b268d6f5fe6232de2bc"}, {file = "pandocfilters-1.5.1.tar.gz", hash = "sha256:002b4a555ee4ebc03f8b66307e287fa492e4a77b4ea14d3f934328297bb4939e"}, @@ -4578,6 +4652,7 @@ description = "A Python Parser" optional = false python-versions = ">=3.6" groups = ["dev", "docs"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "parso-0.8.4-py2.py3-none-any.whl", hash = "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18"}, {file = "parso-0.8.4.tar.gz", hash = "sha256:eb3a7b58240fb99099a345571deecc0f9540ea5f4dd2fe14c2a99d6b281ab92d"}, @@ -4594,6 +4669,7 @@ description = "Utility library for gitignore style pattern matching of file path optional = false python-versions = ">=3.8" groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, @@ -4606,7 +4682,7 @@ description = "Pexpect allows easy control of interactive console applications." optional = false python-versions = "*" groups = ["dev", "docs"] -markers = "sys_platform != \"win32\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and sys_platform != \"win32\"" files = [ {file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"}, {file = "pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f"}, @@ -4622,7 +4698,7 @@ description = "Python Imaging Library (Fork)" optional = true python-versions = ">=3.9" groups = ["main"] -markers = "(python_version >= \"3.10\" or extra == \"sentence-transformers\") and (python_version == \"3.10\" or python_version == \"3.11\" or extra == \"sentence-transformers\" or extra == \"ranx\") and (extra == \"ranx\" or extra == \"sentence-transformers\")" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and (extra == \"sentence-transformers\" or extra == \"ranx\") and (extra == \"sentence-transformers\" or python_version >= \"3.10\")" files = [ {file = "pillow-11.1.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:e1abe69aca89514737465752b4bcaf8016de61b3be1397a8fc260ba33321b3a8"}, {file = "pillow-11.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c640e5a06869c75994624551f45e5506e4256562ead981cce820d5ab39ae2192"}, @@ -4712,6 +4788,7 @@ description = "A small Python package for determining appropriate platform-speci optional = false python-versions = ">=3.8" groups = ["dev", "docs"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, @@ -4729,6 +4806,7 @@ description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, @@ -4738,6 +4816,19 @@ files = [ dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] +[[package]] +name = "ply" +version = "3.11" +description = "Python Lex & Yacc" +optional = false +python-versions = "*" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "ply-3.11-py2.py3-none-any.whl", hash = "sha256:096f9b8350b65ebd2fd1346b12452efe5b9607f7482813ffca50c22722a807ce"}, + {file = "ply-3.11.tar.gz", hash = "sha256:00c7c1aaa88358b9c765b6d3000c6eec0ba42abca5351b095321aef446081da3"}, +] + [[package]] name = "pre-commit" version = "4.1.0" @@ -4745,6 +4836,7 @@ description = "A framework for managing and maintaining multi-language pre-commi optional = false python-versions = ">=3.9" groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pre_commit-4.1.0-py2.py3-none-any.whl", hash = "sha256:d29e7cb346295bcc1cc75fc3e92e343495e3ea0196c9ec6ba53f49f10ab6ae7b"}, {file = "pre_commit-4.1.0.tar.gz", hash = "sha256:ae3f018575a588e30dfddfab9a05448bfbd6b73d78709617b5a2b853549716d4"}, @@ -4764,6 +4856,7 @@ description = "Library for building powerful interactive command lines in Python optional = false python-versions = ">=3.8.0" groups = ["dev", "docs"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "prompt_toolkit-3.0.50-py3-none-any.whl", hash = "sha256:9b6427eb19e479d98acff65196a307c555eb567989e6d88ebbb1b509d9779198"}, {file = "prompt_toolkit-3.0.50.tar.gz", hash = "sha256:544748f3860a2623ca5cd6d2795e7a14f3d0e1c3c9728359013f79877fc89bab"}, @@ -4779,7 +4872,7 @@ description = "Accelerated property cache" optional = true python-versions = ">=3.9" groups = ["main"] -markers = "extra == \"voyageai\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"voyageai\"" files = [ {file = "propcache-0.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:efa44f64c37cc30c9f05932c740a8b40ce359f51882c70883cc95feac842da4d"}, {file = "propcache-0.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2383a17385d9800b6eb5855c2f05ee550f803878f344f58b6e194de08b96352c"}, @@ -4888,7 +4981,7 @@ description = "Beautiful, Pythonic protocol buffers" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "extra == \"vertexai\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vertexai\"" files = [ {file = "proto_plus-1.26.0-py3-none-any.whl", hash = "sha256:bf2dfaa3da281fc3187d12d224c707cb57214fb2c22ba854eb0c105a3fb2d4d7"}, {file = "proto_plus-1.26.0.tar.gz", hash = "sha256:6e93d5f5ca267b54300880fff156b6a3386b3fa3f43b1da62e680fc0c586ef22"}, @@ -4907,7 +5000,7 @@ description = "" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "extra == \"vertexai\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vertexai\"" files = [ {file = "protobuf-5.29.3-cp310-abi3-win32.whl", hash = "sha256:3ea51771449e1035f26069c4c7fd51fba990d07bc55ba80701c78f886bf9c888"}, {file = "protobuf-5.29.3-cp310-abi3-win_amd64.whl", hash = "sha256:a4fa6f80816a9a0678429e84973f2f98cbc218cca434abe8db2ad0bffc98503a"}, @@ -4929,6 +5022,7 @@ description = "Cross-platform lib for process and system monitoring in Python. optional = false python-versions = ">=3.6" groups = ["dev", "docs"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "psutil-7.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:101d71dc322e3cffd7cea0650b09b3d08b8e7c4109dd6809fe452dfd00e58b25"}, {file = "psutil-7.0.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:39db632f6bb862eeccf56660871433e111b6ea58f2caea825571951d4b6aa3da"}, @@ -4953,7 +5047,7 @@ description = "Run a subprocess in a pseudo terminal" optional = false python-versions = "*" groups = ["dev", "docs"] -markers = "sys_platform != \"win32\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and sys_platform != \"win32\"" files = [ {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, @@ -4966,6 +5060,7 @@ description = "Safely evaluate AST nodes without side effects" optional = false python-versions = "*" groups = ["dev", "docs"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0"}, {file = "pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42"}, @@ -4981,7 +5076,7 @@ description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs optional = true python-versions = ">=3.8" groups = ["main"] -markers = "extra == \"vertexai\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vertexai\"" files = [ {file = "pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629"}, {file = "pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034"}, @@ -4994,7 +5089,7 @@ description = "A collection of ASN.1-based protocols modules" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "extra == \"vertexai\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vertexai\"" files = [ {file = "pyasn1_modules-0.4.1-py3-none-any.whl", hash = "sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd"}, {file = "pyasn1_modules-0.4.1.tar.gz", hash = "sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c"}, @@ -5014,7 +5109,7 @@ files = [ {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, ] -markers = {dev = "implementation_name == \"pypy\" or platform_python_implementation != \"PyPy\"", docs = "implementation_name == \"pypy\""} +markers = {dev = "(implementation_name == \"pypy\" or platform_python_implementation != \"PyPy\") and (python_version <= \"3.11\" or python_version >= \"3.12\")", docs = "(python_version <= \"3.11\" or python_version >= \"3.12\") and implementation_name == \"pypy\""} [[package]] name = "pydantic" @@ -5023,6 +5118,7 @@ description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pydantic-2.10.6-py3-none-any.whl", hash = "sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584"}, {file = "pydantic-2.10.6.tar.gz", hash = "sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236"}, @@ -5044,6 +5140,7 @@ description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa"}, {file = "pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c"}, @@ -5157,6 +5254,7 @@ description = "Bootstrap-based Sphinx theme from the PyData community" optional = false python-versions = ">=3.9" groups = ["docs"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pydata_sphinx_theme-0.15.4-py3-none-any.whl", hash = "sha256:2136ad0e9500d0949f96167e63f3e298620040aea8f9c74621959eda5d4cf8e6"}, {file = "pydata_sphinx_theme-0.15.4.tar.gz", hash = "sha256:7762ec0ac59df3acecf49fd2f889e1b4565dbce8b88b2e29ee06fdd90645a06d"}, @@ -5190,7 +5288,7 @@ files = [ {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"}, {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"}, ] -markers = {main = "python_version >= \"3.10\" and extra == \"ranx\""} +markers = {main = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"", dev = "python_version <= \"3.11\" or python_version >= \"3.12\"", docs = "python_version <= \"3.11\" or python_version >= \"3.12\""} [package.extras] windows-terminal = ["colorama (>=0.4.6)"] @@ -5202,6 +5300,7 @@ description = "python code static checker" optional = false python-versions = ">=3.9.0" groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pylint-3.3.4-py3-none-any.whl", hash = "sha256:289e6a1eb27b453b08436478391a48cd53bb0efb824873f949e709350f3de018"}, {file = "pylint-3.3.4.tar.gz", hash = "sha256:74ae7a38b177e69a9b525d0794bd8183820bfa7eb68cc1bee6e8ed22a42be4ce"}, @@ -5233,7 +5332,7 @@ description = "pyparsing module - Classes and methods to define and execute pars optional = true python-versions = ">=3.9" groups = ["main"] -markers = "python_version >= \"3.10\" and extra == \"ranx\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" files = [ {file = "pyparsing-3.2.1-py3-none-any.whl", hash = "sha256:506ff4f4386c4cec0590ec19e6302d3aedb992fdc02c761e90416f158dacf8e1"}, {file = "pyparsing-3.2.1.tar.gz", hash = "sha256:61980854fd66de3a90028d679a954d5f2623e83144b5afe5ee86f43d762e5f0a"}, @@ -5249,7 +5348,7 @@ description = "A python implementation of GNU readline." optional = false python-versions = ">=3.8" groups = ["main"] -markers = "sys_platform == \"win32\"" +markers = "sys_platform == \"win32\" and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "pyreadline3-3.5.4-py3-none-any.whl", hash = "sha256:eaf8e6cc3c49bcccf145fc6067ba8643d1df34d604a1ec0eccbf7a18e6d3fae6"}, {file = "pyreadline3-3.5.4.tar.gz", hash = "sha256:8d57d53039a1c75adba8e50dd3d992b28143480816187ea5efbd5c78e6c885b7"}, @@ -5265,6 +5364,7 @@ description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6"}, {file = "pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761"}, @@ -5288,6 +5388,7 @@ description = "Pytest support for asyncio" optional = false python-versions = ">=3.8" groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pytest_asyncio-0.23.8-py3-none-any.whl", hash = "sha256:50265d892689a5faefb84df80819d1ecef566eb3549cf915dfb33569359d1ce2"}, {file = "pytest_asyncio-0.23.8.tar.gz", hash = "sha256:759b10b33a6dc61cce40a8bd5205e302978bbbcc00e279a8b61d9a6a3c82e4d3"}, @@ -5307,6 +5408,7 @@ description = "pytest xdist plugin for distributed testing, most importantly acr optional = false python-versions = ">=3.8" groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pytest_xdist-3.6.1-py3-none-any.whl", hash = "sha256:9ed4adfb68a016610848639bb7e02c9352d5d9f03d04809919e2dafc3be4cca7"}, {file = "pytest_xdist-3.6.1.tar.gz", hash = "sha256:ead156a4db231eec769737f57668ef58a2084a34b2e55c4a8fa20d861107300d"}, @@ -5333,7 +5435,7 @@ files = [ {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] -markers = {main = "(extra == \"vertexai\" or extra == \"mistralai\" or extra == \"bedrock\" or extra == \"ranx\") and (extra == \"vertexai\" or extra == \"mistralai\" or extra == \"bedrock\") or (extra == \"vertexai\" or extra == \"mistralai\" or extra == \"bedrock\" or extra == \"ranx\") and python_version >= \"3.10\""} +markers = {main = "(python_version <= \"3.11\" or python_version >= \"3.12\") and (extra == \"vertexai\" or extra == \"mistralai\" or extra == \"bedrock\" or extra == \"ranx\") and (extra == \"vertexai\" or extra == \"mistralai\" or extra == \"bedrock\" or python_version >= \"3.10\")", dev = "python_version <= \"3.11\" or python_version >= \"3.12\"", docs = "python_version <= \"3.11\" or python_version >= \"3.12\""} [package.dependencies] six = ">=1.5" @@ -5345,6 +5447,7 @@ description = "Read key-value pairs from a .env file and set them as environment optional = false python-versions = ">=3.8" groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, @@ -5360,6 +5463,7 @@ description = "Universally unique lexicographically sortable identifier" optional = false python-versions = ">=3.9" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "python_ulid-3.0.0-py3-none-any.whl", hash = "sha256:e4c4942ff50dbd79167ad01ac725ec58f924b4018025ce22c858bfcff99a5e31"}, {file = "python_ulid-3.0.0.tar.gz", hash = "sha256:e50296a47dc8209d28629a22fc81ca26c00982c78934bd7766377ba37ea49a9f"}, @@ -5375,7 +5479,7 @@ description = "World timezone definitions, modern and historical" optional = true python-versions = "*" groups = ["main"] -markers = "python_version >= \"3.10\" and extra == \"ranx\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" files = [ {file = "pytz-2025.1-py2.py3-none-any.whl", hash = "sha256:89dd22dca55b46eac6eda23b2d72721bf1bdfef212645d81513ef5d03038de57"}, {file = "pytz-2025.1.tar.gz", hash = "sha256:c2db42be2a2518b28e65f9207c4d05e6ff547d1efa4086469ef855e4ab70178e"}, @@ -5408,7 +5512,7 @@ files = [ {file = "pywin32-308-cp39-cp39-win32.whl", hash = "sha256:7873ca4dc60ab3287919881a7d4f88baee4a6e639aa6962de25a98ba6b193341"}, {file = "pywin32-308-cp39-cp39-win_amd64.whl", hash = "sha256:71b3322d949b4cc20776436a9c9ba0eeedcbc9c650daa536df63f0ff111bb920"}, ] -markers = {dev = "sys_platform == \"win32\"", docs = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\""} +markers = {dev = "(python_version <= \"3.11\" or python_version >= \"3.12\") and sys_platform == \"win32\"", docs = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\" and (python_version <= \"3.11\" or python_version >= \"3.12\")"} [[package]] name = "pyyaml" @@ -5417,6 +5521,7 @@ description = "YAML parser and emitter for Python" optional = false python-versions = ">=3.8" groups = ["main", "dev", "docs"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, @@ -5480,6 +5585,7 @@ description = "Python bindings for 0MQ" optional = false python-versions = ">=3.7" groups = ["dev", "docs"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pyzmq-26.2.1-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:f39d1227e8256d19899d953e6e19ed2ccb689102e6d85e024da5acf410f301eb"}, {file = "pyzmq-26.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a23948554c692df95daed595fdd3b76b420a4939d7a8a28d6d7dea9711878641"}, @@ -5602,7 +5708,7 @@ description = "ranx: A Blazing-Fast Python Library for Ranking Evaluation, Compa optional = true python-versions = ">=3.8" groups = ["main"] -markers = "python_version >= \"3.10\" and extra == \"ranx\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" files = [ {file = "ranx-0.3.20-py3-none-any.whl", hash = "sha256:e056e4d5981b0328b045868cc7064fc57a545f36009fbe9bb602295ec33335de"}, {file = "ranx-0.3.20.tar.gz", hash = "sha256:8afc6f2042c40645e5d1fd80c35ed75a885e18bd2db7e95cc7ec32a0b41e59ea"}, @@ -5630,6 +5736,7 @@ description = "Python client for Redis database and key-value store" optional = false python-versions = ">=3.8" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "redis-5.2.1-py3-none-any.whl", hash = "sha256:ee7e1056b9aea0f04c6c2ed59452947f34c4940ee025f5dd83e6a6418b6989e4"}, {file = "redis-5.2.1.tar.gz", hash = "sha256:16f2e22dff21d5125e8481515e386711a34cbec50f0e44413dd7d9c060a54e0f"}, @@ -5649,6 +5756,7 @@ description = "JSON Referencing + Python" optional = false python-versions = ">=3.9" groups = ["dev", "docs"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "referencing-0.36.2-py3-none-any.whl", hash = "sha256:e8699adbbf8b5c7de96d8ffa0eb5c158b3beafce084968e2ea8bb08c6794dcd0"}, {file = "referencing-0.36.2.tar.gz", hash = "sha256:df2e89862cd09deabbdba16944cc3f10feb6b3e6f18e902f7cc25609a34775aa"}, @@ -5666,6 +5774,7 @@ description = "Alternative regular expression module, to replace re." optional = true python-versions = ">=3.8" groups = ["main"] +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"sentence-transformers\"" files = [ {file = "regex-2024.11.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ff590880083d60acc0433f9c3f713c51f7ac6ebb9adf889c79a261ecf541aa91"}, {file = "regex-2024.11.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:658f90550f38270639e83ce492f27d2c8d2cd63805c65a13a14d36ca126753f0"}, @@ -5774,7 +5883,7 @@ files = [ {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, ] -markers = {main = "(extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"vertexai\" or extra == \"voyageai\" or extra == \"ranx\") and (extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"vertexai\" or extra == \"voyageai\") or (extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"vertexai\" or extra == \"voyageai\" or extra == \"ranx\") and python_version >= \"3.10\""} +markers = {main = "(python_version <= \"3.11\" or python_version >= \"3.12\") and (extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"vertexai\" or extra == \"voyageai\" or extra == \"ranx\") and (extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"vertexai\" or extra == \"voyageai\" or python_version >= \"3.10\")", dev = "python_version <= \"3.11\" or python_version >= \"3.12\"", docs = "python_version <= \"3.11\" or python_version >= \"3.12\""} [package.dependencies] certifi = ">=2017.4.17" @@ -5793,7 +5902,7 @@ description = "Render rich text, tables, progress bars, syntax highlighting, mar optional = true python-versions = ">=3.8.0" groups = ["main"] -markers = "python_version >= \"3.10\" and extra == \"ranx\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" files = [ {file = "rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90"}, {file = "rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098"}, @@ -5814,6 +5923,7 @@ description = "Python bindings to Rust's persistent data structures (rpds)" optional = false python-versions = ">=3.9" groups = ["dev", "docs"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "rpds_py-0.23.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2a54027554ce9b129fc3d633c92fa33b30de9f08bc61b32c053dc9b537266fed"}, {file = "rpds_py-0.23.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b5ef909a37e9738d146519657a1aab4584018746a18f71c692f2f22168ece40c"}, @@ -5927,7 +6037,7 @@ description = "Pure-Python RSA implementation" optional = true python-versions = ">=3.6,<4" groups = ["main"] -markers = "extra == \"vertexai\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vertexai\"" files = [ {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, @@ -5943,7 +6053,7 @@ description = "An Amazon S3 Transfer Manager" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "extra == \"bedrock\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"bedrock\"" files = [ {file = "s3transfer-0.11.3-py3-none-any.whl", hash = "sha256:ca855bdeb885174b5ffa95b9913622459d4ad8e331fc98eb01e6d5eb6a30655d"}, {file = "s3transfer-0.11.3.tar.gz", hash = "sha256:edae4977e3a122445660c7c114bba949f9d191bae3b34a096f18a1c8c354527a"}, @@ -5962,7 +6072,7 @@ description = "" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "extra == \"sentence-transformers\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"sentence-transformers\"" files = [ {file = "safetensors-0.5.3-cp38-abi3-macosx_10_12_x86_64.whl", hash = "sha256:bd20eb133db8ed15b40110b7c00c6df51655a2998132193de2f75f72d99c7073"}, {file = "safetensors-0.5.3-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:21d01c14ff6c415c485616b8b0bf961c46b3b343ca59110d38d744e577f9cce7"}, @@ -6001,7 +6111,7 @@ description = "A set of python modules for machine learning and data mining" optional = true python-versions = ">=3.9" groups = ["main"] -markers = "extra == \"sentence-transformers\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"sentence-transformers\"" files = [ {file = "scikit_learn-1.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d056391530ccd1e501056160e3c9673b4da4805eb67eb2bdf4e983e1f9c9204e"}, {file = "scikit_learn-1.6.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:0c8d036eb937dbb568c6242fa598d551d88fb4399c0344d95c001980ec1c7d36"}, @@ -6057,7 +6167,7 @@ description = "Fundamental algorithms for scientific computing in Python" optional = true python-versions = ">=3.9" groups = ["main"] -markers = "python_version < \"3.10\" and extra == \"sentence-transformers\"" +markers = "extra == \"sentence-transformers\" and python_version < \"3.10\"" files = [ {file = "scipy-1.13.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:20335853b85e9a49ff7572ab453794298bcf0354d8068c5f6775a0eabf350aca"}, {file = "scipy-1.13.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:d605e9c23906d1994f55ace80e0125c587f96c020037ea6aa98d01b4bd2e222f"}, @@ -6101,7 +6211,7 @@ description = "Fundamental algorithms for scientific computing in Python" optional = true python-versions = ">=3.10" groups = ["main"] -markers = "python_version >= \"3.10\" and (extra == \"ranx\" or extra == \"sentence-transformers\")" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and (extra == \"sentence-transformers\" or extra == \"ranx\") and (python_version >= \"3.10\" or extra == \"sentence-transformers\")" files = [ {file = "scipy-1.15.2-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:a2ec871edaa863e8213ea5df811cd600734f6400b4af272e1c011e69401218e9"}, {file = "scipy-1.15.2-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:6f223753c6ea76983af380787611ae1291e3ceb23917393079dcc746ba60cfb5"}, @@ -6166,7 +6276,7 @@ description = "Statistical data visualization" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "python_version >= \"3.10\" and extra == \"ranx\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" files = [ {file = "seaborn-0.13.2-py3-none-any.whl", hash = "sha256:636f8336facf092165e27924f223d3c62ca560b1f2bb5dff7ab7fad265361987"}, {file = "seaborn-0.13.2.tar.gz", hash = "sha256:93e60a40988f4d65e9f4885df477e2fdaff6b73a9ded434c1ab356dd57eefff7"}, @@ -6189,7 +6299,7 @@ description = "State-of-the-Art Text Embeddings" optional = true python-versions = ">=3.9" groups = ["main"] -markers = "extra == \"sentence-transformers\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"sentence-transformers\"" files = [ {file = "sentence_transformers-3.4.1-py3-none-any.whl", hash = "sha256:e026dc6d56801fd83f74ad29a30263f401b4b522165c19386d8bc10dcca805da"}, {file = "sentence_transformers-3.4.1.tar.gz", hash = "sha256:68daa57504ff548340e54ff117bd86c1d2f784b21e0fb2689cf3272b8937b24b"}, @@ -6218,7 +6328,7 @@ description = "Easily download, build, install, upgrade, and uninstall Python pa optional = true python-versions = ">=3.9" groups = ["main"] -markers = "python_version >= \"3.12\" and extra == \"sentence-transformers\"" +markers = "extra == \"sentence-transformers\" and python_version >= \"3.12\"" files = [ {file = "setuptools-75.8.2-py3-none-any.whl", hash = "sha256:558e47c15f1811c1fa7adbd0096669bf76c1d3f433f58324df69f3f5ecac4e8f"}, {file = "setuptools-75.8.2.tar.gz", hash = "sha256:4880473a969e5f23f2a2be3646b2dfd84af9028716d398e46192f84bc36900d2"}, @@ -6240,7 +6350,7 @@ description = "Manipulation and analysis of geometric objects" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "extra == \"vertexai\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vertexai\"" files = [ {file = "shapely-2.0.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:33fb10e50b16113714ae40adccf7670379e9ccf5b7a41d0002046ba2b8f0f691"}, {file = "shapely-2.0.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f44eda8bd7a4bccb0f281264b34bf3518d8c4c9a8ffe69a1a05dabf6e8461147"}, @@ -6304,7 +6414,7 @@ files = [ {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, ] -markers = {main = "(extra == \"vertexai\" or extra == \"mistralai\" or extra == \"bedrock\" or extra == \"ranx\") and (extra == \"vertexai\" or extra == \"mistralai\" or extra == \"bedrock\") or (extra == \"vertexai\" or extra == \"mistralai\" or extra == \"bedrock\" or extra == \"ranx\") and python_version >= \"3.10\""} +markers = {main = "(python_version <= \"3.11\" or python_version >= \"3.12\") and (extra == \"vertexai\" or extra == \"mistralai\" or extra == \"bedrock\" or extra == \"ranx\") and (extra == \"vertexai\" or extra == \"mistralai\" or extra == \"bedrock\" or python_version >= \"3.10\")", dev = "python_version <= \"3.11\" or python_version >= \"3.12\"", docs = "python_version <= \"3.11\" or python_version >= \"3.12\""} [[package]] name = "sniffio" @@ -6313,7 +6423,7 @@ description = "Sniff out which async library your code is running under" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\"" +markers = "(extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, @@ -6326,6 +6436,7 @@ description = "This package provides 29 stemmers for 28 languages generated from optional = false python-versions = "*" groups = ["docs"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, @@ -6342,7 +6453,7 @@ files = [ {file = "soupsieve-2.6-py3-none-any.whl", hash = "sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9"}, {file = "soupsieve-2.6.tar.gz", hash = "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb"}, ] -markers = {main = "python_version >= \"3.10\" and extra == \"ranx\""} +markers = {main = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"", docs = "python_version <= \"3.11\" or python_version >= \"3.12\""} [[package]] name = "sphinx" @@ -6351,6 +6462,7 @@ description = "Python documentation generator" optional = false python-versions = ">=3.9" groups = ["docs"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "sphinx-7.4.7-py3-none-any.whl", hash = "sha256:c2419e2135d11f1951cd994d6eb18a1835bd8fdd8429f9ca375dc1f3281bd239"}, {file = "sphinx-7.4.7.tar.gz", hash = "sha256:242f92a7ea7e6c5b406fdc2615413890ba9f699114a9c09192d7dfead2ee9cfe"}, @@ -6388,6 +6500,7 @@ description = "Add a copy button to each of your code cells." optional = false python-versions = ">=3.7" groups = ["docs"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "sphinx-copybutton-0.5.2.tar.gz", hash = "sha256:4cf17c82fb9646d1bc9ca92ac280813a3b605d8c421225fd9913154103ee1fbd"}, {file = "sphinx_copybutton-0.5.2-py3-none-any.whl", hash = "sha256:fb543fd386d917746c9a2c50360c7905b605726b9355cd26e9974857afeae06e"}, @@ -6407,6 +6520,7 @@ description = "A sphinx extension for designing beautiful, view size responsive optional = false python-versions = ">=3.8" groups = ["docs"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "sphinx_design-0.5.0-py3-none-any.whl", hash = "sha256:1af1267b4cea2eedd6724614f19dcc88fe2e15aff65d06b2f6252cee9c4f4c1e"}, {file = "sphinx_design-0.5.0.tar.gz", hash = "sha256:e8e513acea6f92d15c6de3b34e954458f245b8e761b45b63950f65373352ab00"}, @@ -6431,6 +6545,7 @@ description = "Sphinx Extension adding support for custom favicons" optional = false python-versions = ">=3.7" groups = ["docs"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "sphinx-favicon-1.0.1.tar.gz", hash = "sha256:df796de32125609c1b4a8964db74270ebf4502089c27cd53f542354dc0b57e8e"}, {file = "sphinx_favicon-1.0.1-py3-none-any.whl", hash = "sha256:7c93d6b634cb4c9687ceab67a8526f05d3b02679df94e273e51a43282e6b034c"}, @@ -6451,6 +6566,7 @@ description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple optional = false python-versions = ">=3.9" groups = ["docs"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "sphinxcontrib_applehelp-2.0.0-py3-none-any.whl", hash = "sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5"}, {file = "sphinxcontrib_applehelp-2.0.0.tar.gz", hash = "sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1"}, @@ -6468,6 +6584,7 @@ description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp optional = false python-versions = ">=3.9" groups = ["docs"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "sphinxcontrib_devhelp-2.0.0-py3-none-any.whl", hash = "sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2"}, {file = "sphinxcontrib_devhelp-2.0.0.tar.gz", hash = "sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad"}, @@ -6485,6 +6602,7 @@ description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML h optional = false python-versions = ">=3.9" groups = ["docs"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "sphinxcontrib_htmlhelp-2.1.0-py3-none-any.whl", hash = "sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8"}, {file = "sphinxcontrib_htmlhelp-2.1.0.tar.gz", hash = "sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9"}, @@ -6502,6 +6620,7 @@ description = "A sphinx extension which renders display math in HTML via JavaScr optional = false python-versions = ">=3.5" groups = ["docs"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, @@ -6517,6 +6636,7 @@ description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp d optional = false python-versions = ">=3.9" groups = ["docs"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "sphinxcontrib_qthelp-2.0.0-py3-none-any.whl", hash = "sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb"}, {file = "sphinxcontrib_qthelp-2.0.0.tar.gz", hash = "sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab"}, @@ -6534,6 +6654,7 @@ description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs optional = false python-versions = ">=3.9" groups = ["docs"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "sphinxcontrib_serializinghtml-2.0.0-py3-none-any.whl", hash = "sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331"}, {file = "sphinxcontrib_serializinghtml-2.0.0.tar.gz", hash = "sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d"}, @@ -6551,6 +6672,7 @@ description = "Database Abstraction Library" optional = false python-versions = ">=3.7" groups = ["docs"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "SQLAlchemy-2.0.38-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5e1d9e429028ce04f187a9f522818386c8b076723cdbe9345708384f49ebcec6"}, {file = "SQLAlchemy-2.0.38-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b87a90f14c68c925817423b0424381f0e16d80fc9a1a1046ef202ab25b19a444"}, @@ -6584,27 +6706,16 @@ files = [ {file = "SQLAlchemy-2.0.38-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5dba1cdb8f319084f5b00d41207b2079822aa8d6a4667c0f369fce85e34b0c86"}, {file = "SQLAlchemy-2.0.38-cp313-cp313-win32.whl", hash = "sha256:eae27ad7580529a427cfdd52c87abb2dfb15ce2b7a3e0fc29fbb63e2ed6f8120"}, {file = "SQLAlchemy-2.0.38-cp313-cp313-win_amd64.whl", hash = "sha256:b335a7c958bc945e10c522c069cd6e5804f4ff20f9a744dd38e748eb602cbbda"}, - {file = "SQLAlchemy-2.0.38-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:40310db77a55512a18827488e592965d3dec6a3f1e3d8af3f8243134029daca3"}, {file = "SQLAlchemy-2.0.38-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d3043375dd5bbcb2282894cbb12e6c559654c67b5fffb462fda815a55bf93f7"}, - {file = "SQLAlchemy-2.0.38-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70065dfabf023b155a9c2a18f573e47e6ca709b9e8619b2e04c54d5bcf193178"}, {file = "SQLAlchemy-2.0.38-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:c058b84c3b24812c859300f3b5abf300daa34df20d4d4f42e9652a4d1c48c8a4"}, - {file = "SQLAlchemy-2.0.38-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:0398361acebb42975deb747a824b5188817d32b5c8f8aba767d51ad0cc7bb08d"}, {file = "SQLAlchemy-2.0.38-cp37-cp37m-win32.whl", hash = "sha256:a2bc4e49e8329f3283d99840c136ff2cd1a29e49b5624a46a290f04dff48e079"}, {file = "SQLAlchemy-2.0.38-cp37-cp37m-win_amd64.whl", hash = "sha256:9cd136184dd5f58892f24001cdce986f5d7e96059d004118d5410671579834a4"}, - {file = "SQLAlchemy-2.0.38-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:665255e7aae5f38237b3a6eae49d2358d83a59f39ac21036413fab5d1e810578"}, - {file = "SQLAlchemy-2.0.38-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:92f99f2623ff16bd4aaf786ccde759c1f676d39c7bf2855eb0b540e1ac4530c8"}, {file = "SQLAlchemy-2.0.38-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa498d1392216fae47eaf10c593e06c34476ced9549657fca713d0d1ba5f7248"}, - {file = "SQLAlchemy-2.0.38-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9afbc3909d0274d6ac8ec891e30210563b2c8bdd52ebbda14146354e7a69373"}, {file = "SQLAlchemy-2.0.38-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:57dd41ba32430cbcc812041d4de8d2ca4651aeefad2626921ae2a23deb8cd6ff"}, - {file = "SQLAlchemy-2.0.38-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:3e35d5565b35b66905b79ca4ae85840a8d40d31e0b3e2990f2e7692071b179ca"}, {file = "SQLAlchemy-2.0.38-cp38-cp38-win32.whl", hash = "sha256:f0d3de936b192980209d7b5149e3c98977c3810d401482d05fb6d668d53c1c63"}, {file = "SQLAlchemy-2.0.38-cp38-cp38-win_amd64.whl", hash = "sha256:3868acb639c136d98107c9096303d2d8e5da2880f7706f9f8c06a7f961961149"}, - {file = "SQLAlchemy-2.0.38-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:07258341402a718f166618470cde0c34e4cec85a39767dce4e24f61ba5e667ea"}, - {file = "SQLAlchemy-2.0.38-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0a826f21848632add58bef4f755a33d45105d25656a0c849f2dc2df1c71f6f50"}, {file = "SQLAlchemy-2.0.38-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:386b7d136919bb66ced64d2228b92d66140de5fefb3c7df6bd79069a269a7b06"}, - {file = "SQLAlchemy-2.0.38-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f2951dc4b4f990a4b394d6b382accb33141d4d3bd3ef4e2b27287135d6bdd68"}, {file = "SQLAlchemy-2.0.38-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8bf312ed8ac096d674c6aa9131b249093c1b37c35db6a967daa4c84746bc1bc9"}, - {file = "SQLAlchemy-2.0.38-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:6db316d6e340f862ec059dc12e395d71f39746a20503b124edc255973977b728"}, {file = "SQLAlchemy-2.0.38-cp39-cp39-win32.whl", hash = "sha256:c09a6ea87658695e527104cf857c70f79f14e9484605e205217aae0ec27b45fc"}, {file = "SQLAlchemy-2.0.38-cp39-cp39-win_amd64.whl", hash = "sha256:12f5c9ed53334c3ce719155424dc5407aaa4f6cadeb09c5b627e06abb93933a1"}, {file = "SQLAlchemy-2.0.38-py3-none-any.whl", hash = "sha256:63178c675d4c80def39f1febd625a6333f44c0ba269edd8a468b156394b27753"}, @@ -6647,6 +6758,7 @@ description = "Extract data from python stack frames and tracebacks for informat optional = false python-versions = "*" groups = ["dev", "docs"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"}, {file = "stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9"}, @@ -6667,7 +6779,7 @@ description = "Computer algebra system (CAS) in Python" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "extra == \"sentence-transformers\"" +markers = "extra == \"sentence-transformers\" and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "sympy-1.13.1-py3-none-any.whl", hash = "sha256:db36cdc64bf61b9b24578b6f7bab1ecdd2452cf008f34faa33776680c26d66f8"}, {file = "sympy-1.13.1.tar.gz", hash = "sha256:9cebf7e04ff162015ce31c9c6c9144daa34a93bd082f54fd8f12deca4f47515f"}, @@ -6686,6 +6798,7 @@ description = "Pretty-print tabular data" optional = false python-versions = ">=3.7" groups = ["main", "docs"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"}, {file = "tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c"}, @@ -6701,6 +6814,7 @@ description = "Retry code until it succeeds" optional = false python-versions = ">=3.8" groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "tenacity-9.0.0-py3-none-any.whl", hash = "sha256:93de0c98785b27fcf659856aa9f54bfbd399e29969b0621bc7f762bd441b4539"}, {file = "tenacity-9.0.0.tar.gz", hash = "sha256:807f37ca97d62aa361264d497b0e31e92b8027044942bfa756160d908320d73b"}, @@ -6717,6 +6831,7 @@ description = "Python library for throwaway instances of anything that can run i optional = false python-versions = "<4.0,>=3.9" groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "testcontainers-4.9.1-py3-none-any.whl", hash = "sha256:315fb94b42a383872df530aa45319745278ef0cc18b9cfcdc231a75d14afa5a0"}, {file = "testcontainers-4.9.1.tar.gz", hash = "sha256:37fe9a222549ddb788463935965b16f91809e9a8d654f437d6a59eac9b77f76f"}, @@ -6771,7 +6886,7 @@ description = "threadpoolctl" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "extra == \"sentence-transformers\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"sentence-transformers\"" files = [ {file = "threadpoolctl-3.5.0-py3-none-any.whl", hash = "sha256:56c1e26c150397e58c4926da8eeee87533b1e32bef131bd4bf6a2f45f3185467"}, {file = "threadpoolctl-3.5.0.tar.gz", hash = "sha256:082433502dd922bf738de0d8bcc4fdcbf0979ff44c42bd40f5af8a282f6fa107"}, @@ -6784,6 +6899,7 @@ description = "A tiny CSS parser" optional = false python-versions = ">=3.8" groups = ["docs"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "tinycss2-1.4.0-py3-none-any.whl", hash = "sha256:3a49cf47b7675da0b15d0c6e1df8df4ebd96e9394bb905a5775adb0d884c5289"}, {file = "tinycss2-1.4.0.tar.gz", hash = "sha256:10c0972f6fc0fbee87c3edb76549357415e94548c1ae10ebccdea16fb404a9b7"}, @@ -6803,7 +6919,7 @@ description = "" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "extra == \"sentence-transformers\" or extra == \"cohere\"" +markers = "(extra == \"sentence-transformers\" or extra == \"cohere\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "tokenizers-0.21.0-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:3c4c93eae637e7d2aaae3d376f06085164e1660f89304c0ab2b1d08a406636b2"}, {file = "tokenizers-0.21.0-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:f53ea537c925422a2e0e92a24cce96f6bc5046bbef24a1652a5edc8ba975f62e"}, @@ -6837,7 +6953,7 @@ description = "A lil' TOML parser" optional = false python-versions = ">=3.8" groups = ["dev", "docs"] -markers = "python_version <= \"3.10\"" +markers = "python_version < \"3.11\"" files = [ {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, @@ -6880,6 +6996,7 @@ description = "Style preserving TOML library" optional = false python-versions = ">=3.8" groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "tomlkit-0.13.2-py3-none-any.whl", hash = "sha256:7a974427f6e119197f670fbbbeae7bef749a6c14e793db934baefc1b5f03efde"}, {file = "tomlkit-0.13.2.tar.gz", hash = "sha256:fff5fe59a87295b278abd31bec92c15d9bc4a06885ab12bcea52c71119392e79"}, @@ -6892,7 +7009,7 @@ description = "Tensors and Dynamic neural networks in Python with strong GPU acc optional = true python-versions = ">=3.9.0" groups = ["main"] -markers = "extra == \"sentence-transformers\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"sentence-transformers\"" files = [ {file = "torch-2.6.0-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:6860df13d9911ac158f4c44031609700e1eba07916fff62e21e6ffa0a9e01961"}, {file = "torch-2.6.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:c4f103a49830ce4c7561ef4434cc7926e5a5fe4e5eb100c19ab36ea1e2b634ab"}, @@ -6950,6 +7067,7 @@ description = "Tornado is a Python web framework and asynchronous networking lib optional = false python-versions = ">=3.8" groups = ["dev", "docs"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "tornado-6.4.2-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e828cce1123e9e44ae2a50a9de3055497ab1d0aeb440c5ac23064d9e44880da1"}, {file = "tornado-6.4.2-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:072ce12ada169c5b00b7d92a99ba089447ccc993ea2143c9ede887e0937aa803"}, @@ -6971,6 +7089,7 @@ description = "Fast, Extensible Progress Meter" optional = true python-versions = ">=3.7" groups = ["main"] +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and (extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"openai\" or extra == \"ranx\") and (extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"openai\" or python_version >= \"3.10\")" files = [ {file = "tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2"}, {file = "tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2"}, @@ -6993,6 +7112,7 @@ description = "Traitlets Python configuration system" optional = false python-versions = ">=3.8" groups = ["dev", "docs"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f"}, {file = "traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7"}, @@ -7009,7 +7129,7 @@ description = "State-of-the-art Machine Learning for JAX, PyTorch and TensorFlow optional = true python-versions = ">=3.9.0" groups = ["main"] -markers = "extra == \"sentence-transformers\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"sentence-transformers\"" files = [ {file = "transformers-4.49.0-py3-none-any.whl", hash = "sha256:6b4fded1c5fee04d384b1014495b4235a2b53c87503d7d592423c06128cbbe03"}, {file = "transformers-4.49.0.tar.gz", hash = "sha256:7e40e640b5b8dc3f48743f5f5adbdce3660c82baafbd3afdfc04143cdbd2089e"}, @@ -7080,7 +7200,7 @@ description = "Support tools for TREC CAR participants. Also see trec-car.cs.unh optional = true python-versions = ">=3.6" groups = ["main"] -markers = "python_version >= \"3.10\" and extra == \"ranx\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" files = [ {file = "trec-car-tools-2.6.tar.gz", hash = "sha256:2fce2de120224fd569b151d5bed358a4ed334e643889b9e3dfe3e5a3d15d21c8"}, {file = "trec_car_tools-2.6-py3-none-any.whl", hash = "sha256:e6f0373259e1c234222da7270ab54ca7af7a6f8d0dd32b13e158c1659d3991cf"}, @@ -7097,7 +7217,7 @@ description = "A language and compiler for custom Deep Learning operations" optional = true python-versions = "*" groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\"" +markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\" and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "triton-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3e54983cd51875855da7c68ec05c05cf8bb08df361b1d5b69e05e40b0c9bd62"}, {file = "triton-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8009a1fb093ee8546495e96731336a33fb8856a38e45bb4ab6affd6dbc3ba220"}, @@ -7118,6 +7238,7 @@ description = "Typing stubs for cffi" optional = false python-versions = ">=3.8" groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "types_cffi-1.16.0.20241221-py3-none-any.whl", hash = "sha256:e5b76b4211d7a9185f6ab8d06a106d56c7eb80af7cdb8bfcb4186ade10fb112f"}, {file = "types_cffi-1.16.0.20241221.tar.gz", hash = "sha256:1c96649618f4b6145f58231acb976e0b448be6b847f7ab733dabe62dfbff6591"}, @@ -7133,6 +7254,7 @@ description = "Typing stubs for pyOpenSSL" optional = false python-versions = ">=3.8" groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "types-pyOpenSSL-24.1.0.20240722.tar.gz", hash = "sha256:47913b4678a01d879f503a12044468221ed8576263c1540dcb0484ca21b08c39"}, {file = "types_pyOpenSSL-24.1.0.20240722-py3-none-any.whl", hash = "sha256:6a7a5d2ec042537934cfb4c9d4deb0e16c4c6250b09358df1f083682fe6fda54"}, @@ -7149,6 +7271,7 @@ description = "Typing stubs for PyYAML" optional = false python-versions = ">=3.8" groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "types_PyYAML-6.0.12.20241230-py3-none-any.whl", hash = "sha256:fa4d32565219b68e6dee5f67534c722e53c00d1cfc09c435ef04d7353e1e96e6"}, {file = "types_pyyaml-6.0.12.20241230.tar.gz", hash = "sha256:7f07622dbd34bb9c8b264fe860a17e0efcad00d50b5f27e93984909d9363498c"}, @@ -7161,6 +7284,7 @@ description = "Typing stubs for redis" optional = false python-versions = ">=3.8" groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "types-redis-4.6.0.20241004.tar.gz", hash = "sha256:5f17d2b3f9091ab75384153bfa276619ffa1cf6a38da60e10d5e6749cc5b902e"}, {file = "types_redis-4.6.0.20241004-py3-none-any.whl", hash = "sha256:ef5da68cb827e5f606c8f9c0b49eeee4c2669d6d97122f301d3a55dc6a63f6ed"}, @@ -7193,7 +7317,7 @@ description = "Typing stubs for requests" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "python_version >= \"3.10\" and extra == \"cohere\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"cohere\" and python_version >= \"3.10\"" files = [ {file = "types-requests-2.32.0.20241016.tar.gz", hash = "sha256:0d9cad2f27515d0e3e3da7134a1b6f28fb97129d86b867f24d9c726452634d95"}, {file = "types_requests-2.32.0.20241016-py3-none-any.whl", hash = "sha256:4195d62d6d3e043a4eaaf08ff8a62184584d2e8684e9d2aa178c7915a7da3747"}, @@ -7209,6 +7333,7 @@ description = "Typing stubs for setuptools" optional = false python-versions = ">=3.9" groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "types_setuptools-75.8.0.20250225-py3-none-any.whl", hash = "sha256:94c86b439cc60bcc68c1cda3fd2c301f007f8f9502f4fbb54c66cb5ce9b875af"}, {file = "types_setuptools-75.8.0.20250225.tar.gz", hash = "sha256:6038f7e983d55792a5f90d8fdbf5d4c186026214a16bb65dd6ae83c624ae9636"}, @@ -7221,6 +7346,7 @@ description = "Typing stubs for tabulate" optional = false python-versions = ">=3.8" groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "types_tabulate-0.9.0.20241207-py3-none-any.whl", hash = "sha256:b8dad1343c2a8ba5861c5441370c3e35908edd234ff036d4298708a1d4cf8a85"}, {file = "types_tabulate-0.9.0.20241207.tar.gz", hash = "sha256:ac1ac174750c0a385dfd248edc6279fa328aaf4ea317915ab879a2ec47833230"}, @@ -7246,6 +7372,7 @@ description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" groups = ["main", "dev", "docs"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, @@ -7258,7 +7385,7 @@ description = "Runtime inspection utilities for typing module." optional = true python-versions = "*" groups = ["main"] -markers = "extra == \"mistralai\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"mistralai\"" files = [ {file = "typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f"}, {file = "typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78"}, @@ -7275,7 +7402,7 @@ description = "Provider of IANA time zone data" optional = true python-versions = ">=2" groups = ["main"] -markers = "python_version >= \"3.10\" and extra == \"ranx\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" files = [ {file = "tzdata-2025.1-py2.py3-none-any.whl", hash = "sha256:7e127113816800496f027041c570f50bcd464a020098a3b6b199517772303639"}, {file = "tzdata-2025.1.tar.gz", hash = "sha256:24894909e88cdb28bd1636c6887801df64cb485bd593f2fd83ef29075a81d694"}, @@ -7288,7 +7415,7 @@ description = "Pure Python decompression module for .Z files compressed using Un optional = true python-versions = ">=3.9" groups = ["main"] -markers = "python_version >= \"3.10\" and extra == \"ranx\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" files = [ {file = "unlzw3-0.2.3-py3-none-any.whl", hash = "sha256:7760fb4f3afa1225623944c061991d89a061f7fb78665dbc4cddfdb562bb4a8b"}, {file = "unlzw3-0.2.3.tar.gz", hash = "sha256:ede5d928c792fff9da406f20334f9739693327f448f383ae1df1774627197bbb"}, @@ -7309,7 +7436,7 @@ files = [ {file = "urllib3-1.26.20-py2.py3-none-any.whl", hash = "sha256:0ed14ccfbf1c30a9072c7ca157e4319b70d65f623e91e7b32fadb2853431016e"}, {file = "urllib3-1.26.20.tar.gz", hash = "sha256:40c2dc0c681e47eb8f90e7e27bf6ff7df2e677421fd46756da1161c39ca70d32"}, ] -markers = {main = "python_version < \"3.10\" and (extra == \"bedrock\" or extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"vertexai\" or extra == \"voyageai\")", dev = "python_version < \"3.10\"", docs = "python_version < \"3.10\""} +markers = {main = "extra == \"sentence-transformers\" and python_version < \"3.10\" or extra == \"cohere\" and python_version < \"3.10\" or extra == \"vertexai\" and python_version < \"3.10\" or extra == \"voyageai\" and python_version < \"3.10\" or extra == \"bedrock\" and python_version < \"3.10\"", dev = "python_version < \"3.10\"", docs = "python_version < \"3.10\""} [package.extras] brotli = ["brotli (==1.0.9) ; os_name != \"nt\" and python_version < \"3\" and platform_python_implementation == \"CPython\"", "brotli (>=1.0.9) ; python_version >= \"3\" and platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; (os_name != \"nt\" or python_version >= \"3\") and platform_python_implementation != \"CPython\"", "brotlipy (>=0.6.0) ; os_name == \"nt\" and python_version < \"3\""] @@ -7327,7 +7454,7 @@ files = [ {file = "urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df"}, {file = "urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d"}, ] -markers = {main = "python_version >= \"3.10\" and (extra == \"ranx\" or extra == \"bedrock\" or extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"vertexai\" or extra == \"voyageai\")", dev = "python_version >= \"3.10\"", docs = "python_version >= \"3.10\""} +markers = {main = "(python_version <= \"3.11\" or python_version >= \"3.12\") and (extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"vertexai\" or extra == \"voyageai\" or extra == \"ranx\" or extra == \"bedrock\") and python_version >= \"3.10\"", dev = "python_version <= \"3.11\" and python_version >= \"3.10\" or python_version >= \"3.12\"", docs = "python_version <= \"3.11\" and python_version >= \"3.10\" or python_version >= \"3.12\""} [package.extras] brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""] @@ -7342,6 +7469,7 @@ description = "Virtual Python Environment builder" optional = false python-versions = ">=3.8" groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "virtualenv-20.29.2-py3-none-any.whl", hash = "sha256:febddfc3d1ea571bdb1dc0f98d7b45d24def7428214d4fb73cc486c9568cce6a"}, {file = "virtualenv-20.29.2.tar.gz", hash = "sha256:fdaabebf6d03b5ba83ae0a02cfe96f48a716f4fae556461d180825866f75b728"}, @@ -7363,7 +7491,7 @@ description = "" optional = true python-versions = "<4.0.0,>=3.7.1" groups = ["main"] -markers = "extra == \"voyageai\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"voyageai\"" files = [ {file = "voyageai-0.2.4-py3-none-any.whl", hash = "sha256:e3070e5c78dec89adae43231334b4637aa88933dad99b1c33d3219fdfc94dfa4"}, {file = "voyageai-0.2.4.tar.gz", hash = "sha256:b9911d8629e8a4e363291c133482fead49a3536afdf1e735f3ab3aaccd8d250d"}, @@ -7383,7 +7511,7 @@ description = "Python library to work with ARC and WARC files" optional = true python-versions = "*" groups = ["main"] -markers = "python_version >= \"3.10\" and extra == \"ranx\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" files = [ {file = "warc3_wet-0.2.5-py3-none-any.whl", hash = "sha256:5a9a525383fb1af159734baa75f349a7c4ec7bccd1b938681b5748515d2bf624"}, {file = "warc3_wet-0.2.5.tar.gz", hash = "sha256:15e50402dabaa1e95307f1e2a6169cfd5f137b70761d9f0b16a10aa6de227970"}, @@ -7396,7 +7524,7 @@ description = "Python library to work with ARC and WARC files, with fixes for Cl optional = true python-versions = "*" groups = ["main"] -markers = "python_version >= \"3.10\" and extra == \"ranx\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" files = [ {file = "warc3-wet-clueweb09-0.2.5.tar.gz", hash = "sha256:3054bfc07da525d5967df8ca3175f78fa3f78514c82643f8c81fbca96300b836"}, ] @@ -7408,6 +7536,7 @@ description = "Measures the displayed width of unicode strings in a terminal" optional = false python-versions = "*" groups = ["dev", "docs"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, @@ -7420,6 +7549,7 @@ description = "Character encoding aliases for legacy web content" optional = false python-versions = "*" groups = ["docs"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, @@ -7432,6 +7562,7 @@ description = "Module for decorators, wrappers and monkey patching." optional = false python-versions = ">=3.8" groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "wrapt-1.17.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3d57c572081fed831ad2d26fd430d565b76aa277ed1d30ff4d40670b1c0dd984"}, {file = "wrapt-1.17.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b5e251054542ae57ac7f3fba5d10bfff615b6c2fb09abeb37d2f1463f841ae22"}, @@ -7521,7 +7652,7 @@ description = "Yet another URL library" optional = true python-versions = ">=3.9" groups = ["main"] -markers = "extra == \"voyageai\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"voyageai\"" files = [ {file = "yarl-1.18.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7df647e8edd71f000a5208fe6ff8c382a1de8edfbccdbbfe649d263de07d8c34"}, {file = "yarl-1.18.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c69697d3adff5aa4f874b19c0e4ed65180ceed6318ec856ebc423aa5850d84f7"}, @@ -7623,7 +7754,7 @@ files = [ {file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"}, {file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"}, ] -markers = {dev = "python_version < \"3.10\""} +markers = {dev = "python_version < \"3.10\"", docs = "python_version <= \"3.11\" or python_version >= \"3.12\""} [package.extras] check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] @@ -7640,7 +7771,7 @@ description = "Low-level interface to the zlib library that enables capturing th optional = true python-versions = ">=3.6" groups = ["main"] -markers = "python_version >= \"3.10\" and extra == \"ranx\"" +markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" files = [ {file = "zlib_state-0.1.9-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:97f45d0f80e9d7070229ecb36112eea6a17dc40053449a9c613ef837d9cb66b4"}, {file = "zlib_state-0.1.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3564eaa130f2533b87b82d0e622cfb5c25acec123e7bfe38d39db9ce6349cb52"}, @@ -7685,4 +7816,4 @@ voyageai = ["voyageai"] [metadata] lock-version = "2.1" python-versions = ">=3.9,<3.14" -content-hash = "f1e335904a4234779fa7566c44cb28423c4677285c0236b0e19cbd19fad9d003" +content-hash = "3bcbaaf402487a181810db22556d0207a555d5683984cc3af14a803974c8900e" diff --git a/pyproject.toml b/pyproject.toml index b9e0d63d..1da572ce 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -34,6 +34,7 @@ tabulate = "^0.9.0" ml-dtypes = "^0.4.0" python-ulid = "^3.0.0" nltk = { version = "^3.8.1", optional = true } +jsonpath-ng = "^1.5.0" openai = { version = "^1.13.0", optional = true } sentence-transformers = { version = "^3.4.0", optional = true } scipy = [ diff --git a/redisvl/exceptions.py b/redisvl/exceptions.py index e645e3e2..f8917c3d 100644 --- a/redisvl/exceptions.py +++ b/redisvl/exceptions.py @@ -1,10 +1,32 @@ -class RedisVLException(Exception): - """Base RedisVL exception""" +""" +RedisVL Exception Classes +This module defines all custom exceptions used throughout the RedisVL library. +""" -class RedisModuleVersionError(RedisVLException): - """Invalid module versions installed""" +class RedisVLError(Exception): + """Base exception for all RedisVL errors.""" -class RedisSearchError(RedisVLException): - """Error while performing a search or aggregate request""" + pass + + +class RedisModuleVersionError(RedisVLError): + """Error raised when required Redis modules are missing or have incompatible versions.""" + + pass + + +class RedisSearchError(RedisVLError): + """Error raised for Redis Search specific operations.""" + + pass + + +class SchemaValidationError(RedisVLError): + """Error when validating data against a schema.""" + + def __init__(self, message, index=None): + if index is not None: + message = f"Validation failed for object at index {index}: {message}" + super().__init__(message) diff --git a/redisvl/extensions/llmcache/semantic.py b/redisvl/extensions/llmcache/semantic.py index 2b70ae09..6e15c43b 100644 --- a/redisvl/extensions/llmcache/semantic.py +++ b/redisvl/extensions/llmcache/semantic.py @@ -95,12 +95,8 @@ def __init__( } # Use the index name as the key prefix by default - if "prefix" in kwargs: - prefix = kwargs["prefix"] - else: - prefix = name - - dtype = kwargs.get("dtype") + prefix = kwargs.pop("prefix", name) + dtype = kwargs.pop("dtype", None) # Validate a provided vectorizer or set the default if vectorizer: @@ -111,7 +107,10 @@ def __init__( f"Provided dtype {dtype} does not match vectorizer dtype {vectorizer.dtype}" ) else: - vectorizer_kwargs = {"dtype": dtype} if dtype else {} + vectorizer_kwargs = kwargs + + if dtype: + vectorizer_kwargs.update(**{"dtype": dtype}) vectorizer = HFTextVectorizer( model="sentence-transformers/all-mpnet-base-v2", diff --git a/redisvl/extensions/router/semantic.py b/redisvl/extensions/router/semantic.py index c06789e1..be83b447 100644 --- a/redisvl/extensions/router/semantic.py +++ b/redisvl/extensions/router/semantic.py @@ -72,7 +72,7 @@ def __init__( connection_kwargs (Dict[str, Any]): The connection arguments for the redis client. Defaults to empty {}. """ - dtype = kwargs.get("dtype") + dtype = kwargs.pop("dtype", None) # Validate a provided vectorizer or set the default if vectorizer: @@ -83,8 +83,15 @@ def __init__( f"Provided dtype {dtype} does not match vectorizer dtype {vectorizer.dtype}" ) else: - vectorizer_kwargs = {"dtype": dtype} if dtype else {} - vectorizer = HFTextVectorizer(**vectorizer_kwargs) + vectorizer_kwargs = kwargs + + if dtype: + vectorizer_kwargs.update(**{"dtype": dtype}) + + vectorizer = HFTextVectorizer( + model="sentence-transformers/all-mpnet-base-v2", + **vectorizer_kwargs, + ) if routing_config is None: routing_config = RoutingConfig() diff --git a/redisvl/extensions/session_manager/semantic_session.py b/redisvl/extensions/session_manager/semantic_session.py index 1aa15315..9497d06c 100644 --- a/redisvl/extensions/session_manager/semantic_session.py +++ b/redisvl/extensions/session_manager/semantic_session.py @@ -71,7 +71,7 @@ def __init__( super().__init__(name, session_tag) prefix = prefix or name - dtype = kwargs.get("dtype") + dtype = kwargs.pop("dtype", None) # Validate a provided vectorizer or set the default if vectorizer: @@ -82,10 +82,13 @@ def __init__( f"Provided dtype {dtype} does not match vectorizer dtype {vectorizer.dtype}" ) else: - vectorizer_kwargs = {"dtype": dtype} if dtype else {} + vectorizer_kwargs = kwargs + + if dtype: + vectorizer_kwargs.update(**{"dtype": dtype}) vectorizer = HFTextVectorizer( - model="sentence-transformers/msmarco-distilbert-cos-v5", + model="sentence-transformers/all-mpnet-base-v2", **vectorizer_kwargs, ) diff --git a/redisvl/index/index.py b/redisvl/index/index.py index 65d736e3..9b4cc6dd 100644 --- a/redisvl/index/index.py +++ b/redisvl/index/index.py @@ -33,7 +33,12 @@ from redis.commands.helpers import get_protocol_version # type: ignore from redis.commands.search.indexDefinition import IndexDefinition -from redisvl.exceptions import RedisModuleVersionError, RedisSearchError +from redisvl.exceptions import ( + RedisModuleVersionError, + RedisSearchError, + RedisVLError, + SchemaValidationError, +) from redisvl.index.storage import BaseStorage, HashStorage, JsonStorage from redisvl.query import ( AggregationQuery, @@ -165,8 +170,7 @@ def __init__(*args, **kwargs): def _storage(self) -> BaseStorage: """The storage type for the index schema.""" return self._STORAGE_MAP[self.schema.index.storage_type]( - prefix=self.schema.index.prefix, - key_separator=self.schema.index.key_separator, + index_schema=self.schema ) @property @@ -206,7 +210,7 @@ def from_yaml(cls, schema_path: str, **kwargs): from redisvl.index import SearchIndex - index = SearchIndex.from_yaml("schemas/schema.yaml") + index = SearchIndex.from_yaml("schemas/schema.yaml", redis_url="redis://localhost:6379") """ schema = IndexSchema.from_yaml(schema_path) return cls(schema=schema, **kwargs) @@ -234,7 +238,7 @@ def from_dict(cls, schema_dict: Dict[str, Any], **kwargs): "fields": [ {"name": "doc-id", "type": "tag"} ] - }) + }, redis_url="redis://localhost:6379") """ schema = IndexSchema.from_dict(schema_dict) @@ -278,10 +282,14 @@ class SearchIndex(BaseSearchIndex): from redisvl.index import SearchIndex # initialize the index object with schema from file - index = SearchIndex.from_yaml("schemas/schema.yaml", redis_url="redis://localhost:6379") + index = SearchIndex.from_yaml( + "schemas/schema.yaml", + redis_url="redis://localhost:6379", + validate_on_load=True + ) # create the index - index.create(overwrite=True) + index.create(overwrite=True, drop=False) # data is an iterable of dictionaries index.load(data) @@ -298,6 +306,7 @@ def __init__( redis_client: Optional[redis.Redis] = None, redis_url: Optional[str] = None, connection_kwargs: Optional[Dict[str, Any]] = None, + validate_on_load: bool = False, **kwargs, ): """Initialize the RedisVL search index with a schema, Redis client @@ -312,6 +321,8 @@ def __init__( connect to. connection_kwargs (Dict[str, Any], optional): Redis client connection args. + validate_on_load (bool, optional): Whether to validate data against schema + when loading. Defaults to False. """ if "connection_args" in kwargs: connection_kwargs = kwargs.pop("connection_args") @@ -320,7 +331,7 @@ def __init__( raise ValueError("Must provide a valid IndexSchema object") self.schema = schema - + self._validate_on_load = validate_on_load self._lib_name: Optional[str] = kwargs.pop("lib_name", None) # Store connection parameters @@ -435,11 +446,6 @@ def connect(self, redis_url: Optional[str] = None, **kwargs): ValueError: If the Redis URL is not provided nor accessible through the `REDIS_URL` environment variable. ModuleNotFoundError: If required Redis modules are not installed. - - .. code-block:: python - - index.connect(redis_url="redis://localhost:6379") - """ self.__redis_client = RedisConnectionFactory.get_redis_connection( redis_url=redis_url, **kwargs @@ -459,16 +465,6 @@ def set_client(self, redis_client: redis.Redis, **kwargs): Raises: TypeError: If the provided client is not valid. - - .. code-block:: python - - import redis - from redisvl.index import SearchIndex - - client = redis.Redis.from_url("redis://localhost:6379") - index = SearchIndex.from_yaml("schemas/schema.yaml") - index.set_client(client) - """ RedisConnectionFactory.validate_sync_redis(redis_client) self.__redis_client = redis_client @@ -627,27 +623,8 @@ def load( List[str]: List of keys loaded to Redis. Raises: - ValueError: If the length of provided keys does not match the length - of objects. - - .. code-block:: python - - data = [{"test": "foo"}, {"test": "bar"}] - - # simple case - keys = index.load(data) - - # set 360 second ttl policy on data - keys = index.load(data, ttl=360) - - # load data with predefined keys - keys = index.load(data, keys=["rvl:foo", "rvl:bar"]) - - # load data with preprocessing step - def add_field(d): - d["new_field"] = 123 - return d - keys = index.load(data, preprocess=add_field) + SchemaValidationError: If validation fails when validate_on_load is enabled. + RedisVLError: If there's an error loading data to Redis. """ try: return self._storage.write( @@ -658,10 +635,16 @@ def add_field(d): ttl=ttl, preprocess=preprocess, batch_size=batch_size, + validate=self._validate_on_load, ) - except: - logger.exception("Error while loading data to Redis") + except SchemaValidationError: + # Pass through validation errors directly + logger.exception("Schema validation error while loading data") raise + except Exception as e: + # Wrap other errors as general RedisVL errors + logger.exception("Error while loading data to Redis") + raise RedisVLError(f"Failed to load data: {str(e)}") from e def fetch(self, id: str) -> Optional[Dict[str, Any]]: """Fetch an object from Redis by id. @@ -985,11 +968,12 @@ class AsyncSearchIndex(BaseSearchIndex): # initialize the index object with schema from file index = AsyncSearchIndex.from_yaml( "schemas/schema.yaml", - redis_url="redis://localhost:6379" + redis_url="redis://localhost:6379", + validate_on_load=True ) # create the index - await index.create(overwrite=True) + await index.create(overwrite=True, drop=False) # data is an iterable of dictionaries await index.load(data) @@ -1007,6 +991,7 @@ def __init__( redis_url: Optional[str] = None, redis_client: Optional[aredis.Redis] = None, connection_kwargs: Optional[Dict[str, Any]] = None, + validate_on_load: bool = False, **kwargs, ): """Initialize the RedisVL async search index with a schema. @@ -1019,6 +1004,8 @@ def __init__( instantiated redis client. connection_kwargs (Optional[Dict[str, Any]]): Redis client connection args. + validate_on_load (bool, optional): Whether to validate data against schema + when loading. Defaults to False. """ if "redis_kwargs" in kwargs: connection_kwargs = kwargs.pop("redis_kwargs") @@ -1028,7 +1015,7 @@ def __init__( raise ValueError("Must provide a valid IndexSchema object") self.schema = schema - + self._validate_on_load = validate_on_load self._lib_name: Optional[str] = kwargs.pop("lib_name", None) # Store connection parameters @@ -1276,6 +1263,7 @@ async def expire_keys( else: return await client.expire(keys, ttl) + @deprecated_argument("concurrency", "Use batch_size instead.") async def load( self, data: Iterable[Any], @@ -1284,9 +1272,10 @@ async def load( ttl: Optional[int] = None, preprocess: Optional[Callable] = None, concurrency: Optional[int] = None, + batch_size: Optional[int] = None, ) -> List[str]: - """Asynchronously load objects to Redis with concurrency control. - Returns the list of keys loaded to Redis. + """Asynchronously load objects to Redis. Returns the list of keys loaded + to Redis. RedisVL automatically handles constructing the object keys, batching, optional preprocessing steps, and setting optional expiration @@ -1301,18 +1290,18 @@ async def load( Must match the length of objects if provided. Defaults to None. ttl (Optional[int], optional): Time-to-live in seconds for each key. Defaults to None. - preprocess (Optional[Callable], optional): An async function to + preprocess (Optional[Callable], optional): A function to preprocess objects before storage. Defaults to None. - concurrency (Optional[int], optional): The maximum number of - concurrent write operations. Defaults to class's default - concurrency level. + batch_size (Optional[int], optional): Number of objects to write in + a single Redis pipeline execution. Defaults to class's + default batch size. Returns: List[str]: List of keys loaded to Redis. Raises: - ValueError: If the length of provided keys does not match the - length of objects. + SchemaValidationError: If validation fails when validate_on_load is enabled. + RedisVLError: If there's an error loading data to Redis. .. code-block:: python @@ -1328,7 +1317,7 @@ async def load( keys = await index.load(data, keys=["rvl:foo", "rvl:bar"]) # load data with preprocessing step - async def add_field(d): + def add_field(d): d["new_field"] = 123 return d keys = await index.load(data, preprocess=add_field) @@ -1343,11 +1332,17 @@ async def add_field(d): keys=keys, ttl=ttl, preprocess=preprocess, - concurrency=concurrency, + batch_size=batch_size, + validate=self._validate_on_load, ) - except: - logger.exception("Error while loading data to Redis") + except SchemaValidationError: + # Pass through validation errors directly + logger.exception("Schema validation error while loading data") raise + except Exception as e: + # Wrap other errors as general RedisVL errors + logger.exception("Error while loading data to Redis") + raise RedisVLError(f"Failed to load data: {str(e)}") from e async def fetch(self, id: str) -> Optional[Dict[str, Any]]: """Asynchronously etch an object from Redis by id. The id is typically diff --git a/redisvl/index/storage.py b/redisvl/index/storage.py index 2be386c0..792b6bc4 100644 --- a/redisvl/index/storage.py +++ b/redisvl/index/storage.py @@ -1,14 +1,19 @@ -import asyncio -from typing import Any, Callable, Dict, Iterable, List, Optional +from typing import Any, Callable, Dict, Iterable, List, Optional, Tuple -from pydantic import BaseModel +from pydantic import BaseModel, ValidationError from redis import Redis from redis.asyncio import Redis as AsyncRedis from redis.commands.search.indexDefinition import IndexType +from redisvl.exceptions import SchemaValidationError from redisvl.redis.utils import convert_bytes +from redisvl.schema import IndexSchema +from redisvl.schema.validation import validate_object +from redisvl.utils.log import get_logger from redisvl.utils.utils import create_ulid +logger = get_logger(__name__) + class BaseStorage(BaseModel): """ @@ -20,14 +25,10 @@ class BaseStorage(BaseModel): type: IndexType """Type of index used in storage""" - prefix: str - """Prefix for Redis keys""" - key_separator: str - """Separator between prefix and key value""" + index_schema: IndexSchema + """Index schema definition""" default_batch_size: int = 200 """Default size for batch operations""" - default_write_concurrency: int = 20 - """Default concurrency for async ops""" @staticmethod def _key(id: str, prefix: str, key_separator: str) -> str: @@ -72,7 +73,9 @@ def _create_key(self, obj: Dict[str, Any], id_field: Optional[str] = None) -> st raise ValueError(f"Key field {id_field} not found in record {obj}") return self._key( - key_value, prefix=self.prefix, key_separator=self.key_separator + key_value, + prefix=self.index_schema.index.prefix, + key_separator=self.index_schema.index.key_separator, ) @staticmethod @@ -92,35 +95,6 @@ def _preprocess(obj: Any, preprocess: Optional[Callable] = None) -> Dict[str, An obj = preprocess(obj) return obj - @staticmethod - async def _apreprocess( - obj: Any, preprocess: Optional[Callable] = None - ) -> Dict[str, Any]: - """Asynchronously apply a preprocessing function to the object if - provided. - - Args: - preprocess (Optional[Callable], optional): Async function to - process the object. - obj (Any): Object to preprocess. - - Returns: - Dict[str, Any]: Processed object as a dictionary. - """ - # optionally async preprocess object - if preprocess: - obj = await preprocess(obj) - return obj - - def _validate(self, obj: Dict[str, Any]): - """Validate the object before writing to Redis. This method should be - implemented by subclasses. - - Args: - obj (Dict[str, Any]): The object to validate. - """ - raise NotImplementedError - @staticmethod def _set(client: Redis, key: str, obj: Dict[str, Any]): """Synchronously set the value in Redis for the given key. @@ -169,6 +143,81 @@ async def _aget(client: AsyncRedis, key: str) -> Dict[str, Any]: """ raise NotImplementedError + def _validate(self, obj: Dict[str, Any]) -> Dict[str, Any]: + """ + Validate an object against the schema using Pydantic-based validation. + + Args: + obj: The object to validate + + Returns: + Validated object with any type coercions applied + + Raises: + ValueError: If validation fails + """ + # Pass directly to validation function and let any errors propagate + return validate_object(self.index_schema, obj) + + def _preprocess_and_validate_objects( + self, + objects: Iterable[Any], + id_field: Optional[str] = None, + keys: Optional[Iterable[str]] = None, + preprocess: Optional[Callable] = None, + validate: bool = False, + ) -> List[Tuple[str, Dict[str, Any]]]: + """ + Preprocess and validate a list of objects with fail-fast approach. + + Args: + objects: List of objects to preprocess and validate + id_field: Field to use as the key + keys: Optional iterable of keys + preprocess: Optional preprocessing function + validate: Whether to validate against schema + + Returns: + List of tuples (key, processed_obj) for valid objects + + Raises: + SchemaValidationError: If validation fails, with context about which object failed + ValueError: If any other processing errors occur + """ + prepared_objects = [] + keys_iterator = iter(keys) if keys else None + + for i, obj in enumerate(objects): + try: + # Generate key + key = ( + next(keys_iterator) + if keys_iterator + else self._create_key(obj, id_field) + ) + + # Preprocess + processed_obj = self._preprocess(obj, preprocess) + + # Schema validation if enabled + if validate: + processed_obj = self._validate(processed_obj) + + # Store valid object with its key for writing + prepared_objects.append((key, processed_obj)) + + except ValidationError as e: + # Convert Pydantic ValidationError to SchemaValidationError with index context + raise SchemaValidationError(str(e), index=i) from e + except Exception as e: + # Capture other exceptions with context + object_id = f"at index {i}" + raise ValueError( + f"Error processing object {object_id}: {str(e)}" + ) from e + + return prepared_objects + def write( self, redis_client: Redis, @@ -178,6 +227,7 @@ def write( ttl: Optional[int] = None, preprocess: Optional[Callable] = None, batch_size: Optional[int] = None, + validate: bool = False, ) -> List[str]: """Write a batch of objects to Redis as hash entries. This method returns a list of Redis keys written to the database. @@ -195,44 +245,52 @@ def write( objects before storage. Defaults to None. batch_size (Optional[int], optional): Number of objects to write in a single Redis pipeline execution. + validate (bool, optional): Whether to validate objects against schema. + Defaults to False. Raises: ValueError: If the length of provided keys does not match the - length of objects. + length of objects, or if validation fails. """ if keys and len(keys) != len(objects): # type: ignore raise ValueError("Length of keys does not match the length of objects") if batch_size is None: - # Use default or calculate based on the input data batch_size = self.default_batch_size - keys_iterator = iter(keys) if keys else None - added_keys: List[str] = [] - - if objects: - with redis_client.pipeline(transaction=False) as pipe: - for i, obj in enumerate(objects, start=1): - # Construct key, validate, and write - key = ( - next(keys_iterator) - if keys_iterator - else self._create_key(obj, id_field) - ) - obj = self._preprocess(obj, preprocess) - self._validate(obj) - self._set(pipe, key, obj) - # Set TTL if provided - if ttl: - pipe.expire(key, ttl) - # Execute mini batch - if i % batch_size == 0: - pipe.execute() - added_keys.append(key) - # Clean up batches if needed - if i % batch_size != 0: + if not objects: + return [] + + # Pass 1: Preprocess and validate all objects + prepared_objects = self._preprocess_and_validate_objects( + list(objects), # Convert Iterable to List + id_field=id_field, + keys=keys, + preprocess=preprocess, + validate=validate, + ) + + # Pass 2: Write all valid objects in batches + added_keys = [] + + with redis_client.pipeline(transaction=False) as pipe: + for i, (key, obj) in enumerate(prepared_objects, start=1): + self._set(pipe, key, obj) + + # Set TTL if provided + if ttl: + pipe.expire(key, ttl) + + added_keys.append(key) + + # Execute in batches + if i % batch_size == 0: pipe.execute() + # Execute any remaining commands + if len(prepared_objects) % batch_size != 0: + pipe.execute() + return added_keys async def awrite( @@ -242,12 +300,12 @@ async def awrite( id_field: Optional[str] = None, keys: Optional[Iterable[str]] = None, ttl: Optional[int] = None, + batch_size: Optional[int] = None, preprocess: Optional[Callable] = None, - concurrency: Optional[int] = None, + validate: bool = False, ) -> List[str]: - """Asynchronously write objects to Redis as hash entries with - concurrency control. The method returns a list of keys written to the - database. + """Asynchronously write objects to Redis as hash entries using pipeline batching. + The method returns a list of keys written to the database. Args: redis_client (AsyncRedis): An asynchronous Redis client used @@ -259,47 +317,60 @@ async def awrite( Must match the length of objects if provided. ttl (Optional[int], optional): Time-to-live in seconds for each key. Defaults to None. + batch_size (Optional[int], optional): Number of objects to write + in a single Redis pipeline execution. preprocess (Optional[Callable], optional): An async function to preprocess objects before storage. Defaults to None. - concurrency (Optional[int], optional): The maximum number of - concurrent write operations. Defaults to class's default - concurrency level. + validate (bool, optional): Whether to validate objects against schema. + Defaults to False. Returns: List[str]: List of Redis keys loaded to the databases. Raises: ValueError: If the length of provided keys does not match the - length of objects. + length of objects, or if validation fails. """ if keys and len(keys) != len(objects): # type: ignore raise ValueError("Length of keys does not match the length of objects") - if not concurrency: - concurrency = self.default_write_concurrency + if batch_size is None: + batch_size = self.default_batch_size - semaphore = asyncio.Semaphore(concurrency) - keys_iterator = iter(keys) if keys else None + if not objects: + return [] + + # Pass 1: Preprocess and validate all objects + prepared_objects = self._preprocess_and_validate_objects( + list(objects), # Convert Iterable to List + id_field=id_field, + keys=keys, + preprocess=preprocess, + validate=validate, + ) - async def _load(obj: Dict[str, Any], key: Optional[str] = None) -> str: - async with semaphore: - if key is None: - key = self._create_key(obj, id_field) - obj = await self._apreprocess(obj, preprocess) - self._validate(obj) - await self._aset(redis_client, key, obj) + # Pass 2: Write all valid objects in batches using pipeline + added_keys = [] + + async with redis_client.pipeline(transaction=False) as pipe: + for i, (key, obj) in enumerate(prepared_objects, start=1): + await self._aset(pipe, key, obj) + + # Set TTL if provided if ttl: - await redis_client.expire(key, ttl) - return key + await pipe.expire(key, ttl) - if keys_iterator: - tasks = [ - asyncio.create_task(_load(obj, next(keys_iterator))) for obj in objects - ] - else: - tasks = [asyncio.create_task(_load(obj)) for obj in objects] + added_keys.append(key) - return await asyncio.gather(*tasks) + # Execute in batches + if i % batch_size == 0: + await pipe.execute() + + # Execute any remaining commands + if len(prepared_objects) % batch_size != 0: + await pipe.execute() + + return added_keys def get( self, redis_client: Redis, keys: Iterable[str], batch_size: Optional[int] = None @@ -325,9 +396,7 @@ def get( return [] if batch_size is None: - batch_size = ( - self.default_batch_size - ) # Use default or calculate based on the input data + batch_size = self.default_batch_size # Use a pipeline to batch the retrieval with redis_client.pipeline(transaction=False) as pipe: @@ -345,39 +414,42 @@ async def aget( self, redis_client: AsyncRedis, keys: Iterable[str], - concurrency: Optional[int] = None, + batch_size: Optional[int] = None, ) -> List[Dict[str, Any]]: - """Asynchronously retrieve objects from Redis by keys, with concurrency - control. + """Asynchronously retrieve objects from Redis by keys. Args: redis_client (AsyncRedis): Asynchronous Redis client. keys (Iterable[str]): Keys to retrieve from Redis. - concurrency (Optional[int], optional): The number of concurrent - requests to make. + batch_size (Optional[int], optional): Number of objects to write + in a single Redis pipeline execution. Defaults to class's + default batch size. Returns: Dict[str, Any]: Dictionary with keys and their corresponding objects. """ + results: List = [] + if not isinstance(keys, Iterable): # type: ignore raise TypeError("Keys must be an iterable of strings") if len(keys) == 0: # type: ignore return [] - if not concurrency: - concurrency = self.default_write_concurrency - - semaphore = asyncio.Semaphore(concurrency) + if batch_size is None: + batch_size = self.default_batch_size - async def _get(key: str) -> Dict[str, Any]: - async with semaphore: - result = await self._aget(redis_client, key) - return result + # Use a pipeline to batch the retrieval + async with redis_client.pipeline(transaction=False) as pipe: + for i, key in enumerate(keys, start=1): + await self._aget(pipe, key) + if i % batch_size == 0: + results.extend(await pipe.execute()) + if i % batch_size != 0: + results.extend(await pipe.execute()) - tasks = [asyncio.create_task(_get(key)) for key in keys] - results = await asyncio.gather(*tasks) + # Process results return convert_bytes(results) @@ -392,19 +464,6 @@ class HashStorage(BaseStorage): type: IndexType = IndexType.HASH """Hash data type for the index""" - def _validate(self, obj: Dict[str, Any]): - """Validate that the given object is a dictionary, suitable for storage - as a Redis hash. - - Args: - obj (Dict[str, Any]): The object to validate. - - Raises: - TypeError: If the object is not a dictionary. - """ - if not isinstance(obj, dict): - raise TypeError("Object must be a dictionary.") - @staticmethod def _set(client: Redis, key: str, obj: Dict[str, Any]): """Synchronously set a hash value in Redis for the given key. @@ -465,19 +524,6 @@ class JsonStorage(BaseStorage): type: IndexType = IndexType.JSON """JSON data type for the index""" - def _validate(self, obj: Dict[str, Any]): - """Validate that the given object is a dictionary, suitable for JSON - serialization. - - Args: - obj (Dict[str, Any]): The object to validate. - - Raises: - TypeError: If the object is not a dictionary. - """ - if not isinstance(obj, dict): - raise TypeError("Object must be a dictionary.") - @staticmethod def _set(client: Redis, key: str, obj: Dict[str, Any]): """Synchronously set a JSON obj in Redis for the given key. diff --git a/redisvl/schema/__init__.py b/redisvl/schema/__init__.py index 24f6b821..c835ccd5 100644 --- a/redisvl/schema/__init__.py +++ b/redisvl/schema/__init__.py @@ -1,3 +1,35 @@ +from redisvl.schema.fields import ( + BaseField, + FieldTypes, + FlatVectorField, + GeoField, + HNSWVectorField, + NumericField, + TagField, + TextField, + VectorDataType, + VectorDistanceMetric, + VectorIndexAlgorithm, +) from redisvl.schema.schema import IndexInfo, IndexSchema, StorageType -__all__ = ["StorageType", "IndexSchema", "IndexInfo"] +# Expose validation functionality +from redisvl.schema.validation import validate_object + +__all__ = [ + "IndexSchema", + "IndexInfo", + "StorageType", + "FieldTypes", + "VectorDistanceMetric", + "VectorDataType", + "VectorIndexAlgorithm", + "BaseField", + "TextField", + "TagField", + "NumericField", + "GeoField", + "FlatVectorField", + "HNSWVectorField", + "validate_object", +] diff --git a/redisvl/schema/fields.py b/redisvl/schema/fields.py index 17714480..b77188d7 100644 --- a/redisvl/schema/fields.py +++ b/redisvl/schema/fields.py @@ -164,12 +164,14 @@ class BaseField(BaseModel): """Specified field attributes""" def _handle_names(self) -> Tuple[str, Optional[str]]: + """Helper to handle field naming with path support""" if self.path: return self.path, self.name return self.name, None def as_redis_field(self) -> RedisField: - raise NotImplementedError + """Convert schema field to Redis Field object""" + raise NotImplementedError("Must be implemented by field subclasses") class TextField(BaseField): diff --git a/redisvl/schema/schema.py b/redisvl/schema/schema.py index 33dfd9c7..90617d18 100644 --- a/redisvl/schema/schema.py +++ b/redisvl/schema/schema.py @@ -8,6 +8,7 @@ from redis.commands.search.field import Field as RedisField from redisvl.schema.fields import BaseField, FieldFactory +from redisvl.schema.type_utils import TypeInferrer from redisvl.utils.log import get_logger from redisvl.utils.utils import model_to_dict @@ -455,64 +456,3 @@ def to_yaml(self, file_path: str, overwrite: bool = True) -> None: with open(fp, "w") as f: yaml_data = self.to_dict() yaml.dump(yaml_data, f, sort_keys=False) - - -class TypeInferrer: - """Infers the type of a field based on its value.""" - - GEO_PATTERN = re.compile( - r"^\s*[-+]?([1-8]?\d(\.\d+)?|90(\.0+)?),\s*[-+]?(180(\.0+)?|((1[0-7]\d)|([1-9]?\d))(\.\d+)?)\s*$" - ) - - TYPE_METHOD_MAP = { - "numeric": "_is_numeric", - "geo": "_is_geographic", - "tag": "_is_tag", - "text": "_is_text", - } - - @classmethod - def infer(cls, value: Any) -> str: - """Infers the field type for a given value. - - Args: - value: The value to infer the type of. - - Returns: - The inferred field type as a string. - - Raises: - ValueError: If the type cannot be inferred. - """ - for type_name, method_name in cls.TYPE_METHOD_MAP.items(): - if getattr(cls, method_name)(value): - return type_name - raise ValueError(f"Unable to infer type for value: {value}") - - @classmethod - def _is_numeric(cls, value: Any) -> bool: - """Check if the value is numeric.""" - if not isinstance(value, (int, float, str)): - return False - try: - float(value) - return True - except (ValueError, TypeError): - return False - - @classmethod - def _is_tag(cls, value: Any) -> bool: - """Check if the value is a tag.""" - return isinstance(value, (list, set, tuple)) and all( - isinstance(v, str) for v in value - ) - - @classmethod - def _is_text(cls, value: Any) -> bool: - """Check if the value is text.""" - return isinstance(value, str) - - @classmethod - def _is_geographic(cls, value: Any) -> bool: - """Check if the value is a geographic coordinate.""" - return isinstance(value, str) and cls.GEO_PATTERN.match(value) is not None diff --git a/redisvl/schema/type_utils.py b/redisvl/schema/type_utils.py new file mode 100644 index 00000000..83329961 --- /dev/null +++ b/redisvl/schema/type_utils.py @@ -0,0 +1,63 @@ +import re +from typing import Any + + +class TypeInferrer: + """Infers the type of a field based on its value.""" + + GEO_PATTERN = re.compile( + r"^\s*[-+]?([1-8]?\d(\.\d+)?|90(\.0+)?),\s*[-+]?(180(\.0+)?|((1[0-7]\d)|([1-9]?\d))(\.\d+)?)\s*$" + ) + + TYPE_METHOD_MAP = { + "numeric": "_is_numeric", + "geo": "_is_geographic", + "tag": "_is_tag", + "text": "_is_text", + } + + @classmethod + def infer(cls, value: Any) -> str: + """Infers the field type for a given value. + + Args: + value: The value to infer the type of. + + Returns: + The inferred field type as a string. + + Raises: + ValueError: If the type cannot be inferred. + """ + for type_name, method_name in cls.TYPE_METHOD_MAP.items(): + if getattr(cls, method_name)(value): + return type_name + raise ValueError(f"Unable to infer type for value: {value}") + + @classmethod + def _is_numeric(cls, value: Any) -> bool: + """Check if the value is numeric.""" + if not isinstance(value, (int, float, str)): + return False + try: + float(value) + return True + except (ValueError, TypeError): + return False + + @classmethod + def _is_tag(cls, value: Any) -> bool: + """Check if the value is a tag.""" + return isinstance(value, (list, set, tuple)) and all( + isinstance(v, str) for v in value + ) + + @classmethod + def _is_text(cls, value: Any) -> bool: + """Check if the value is text.""" + return isinstance(value, str) + + @classmethod + def _is_geographic(cls, value: Any) -> bool: + """Check if the value is a geographic coordinate.""" + return isinstance(value, str) and cls.GEO_PATTERN.match(value) is not None diff --git a/redisvl/schema/validation.py b/redisvl/schema/validation.py new file mode 100644 index 00000000..629b193b --- /dev/null +++ b/redisvl/schema/validation.py @@ -0,0 +1,277 @@ +""" +RedisVL Schema Validation Module + +This module provides utilities for validating data against RedisVL schemas +using dynamically generated Pydantic models. +""" + +import json +from typing import Any, Dict, List, Optional, Type, Union + +from jsonpath_ng import parse as jsonpath_parse +from pydantic import BaseModel, Field, field_validator + +from redisvl.schema import IndexSchema +from redisvl.schema.fields import BaseField, FieldTypes, VectorDataType +from redisvl.schema.schema import StorageType +from redisvl.schema.type_utils import TypeInferrer +from redisvl.utils.log import get_logger + +logger = get_logger(__name__) + + +class SchemaModelGenerator: + """ + Generates and caches Pydantic models based on Redis schema definitions. + + This class handles the conversion of RedisVL IndexSchema objects into + Pydantic models with appropriate field types and validators. + """ + + _model_cache: Dict[str, Type[BaseModel]] = {} + + @classmethod + def get_model_for_schema(cls, schema: IndexSchema) -> Type[BaseModel]: + """ + Get or create a Pydantic model for a schema. + + Args: + schema: The IndexSchema to convert to a Pydantic model + + Returns: + A Pydantic model class that can validate data against the schema + """ + # Use schema identifier as cache key + cache_key = str(hash(json.dumps(schema.to_dict(), sort_keys=True).encode())) + + if cache_key not in cls._model_cache: + cls._model_cache[cache_key] = cls._create_model(schema) + + return cls._model_cache[cache_key] + + @classmethod + def _map_field_to_pydantic_type( + cls, field: BaseField, storage_type: StorageType + ) -> Type[Any]: + """ + Map Redis field types to appropriate Pydantic types. + + Args: + field: The Redis field definition + storage_type: The storage type (HASH or JSON) + + Returns: + The Pydantic field type + + Raises: + ValueError: If the field type is not supported + """ + if field.type == FieldTypes.TEXT: + return str + elif field.type == FieldTypes.TAG: + return str + elif field.type == FieldTypes.NUMERIC: + return Union[int, float] # type: ignore + elif field.type == FieldTypes.GEO: + return str + elif field.type == FieldTypes.VECTOR: + # For JSON storage, vectors are always lists + if storage_type == StorageType.JSON: + # For int data types, vectors must be ints, otherwise floats + if field.attrs.datatype in ( # type: ignore + VectorDataType.INT8, + VectorDataType.UINT8, + ): + return List[int] + return List[float] + else: + return bytes + + # If we get here, the field type is not supported + raise ValueError(f"Unsupported field type: {field.type}") + + @classmethod + def _create_model(cls, schema: IndexSchema) -> Type[BaseModel]: + """ + Create a Pydantic model from schema definition using type() approach. + + Args: + schema: The IndexSchema to convert + + Returns: + A Pydantic model class with appropriate fields and validators + """ + # Get storage type from schema + storage_type = schema.index.storage_type + + # Create annotations dictionary for the dynamic model + annotations: Dict[str, Any] = {} + class_dict: Dict[str, Any] = {} + + # Build annotations and field metadata + for field_name, field in schema.fields.items(): + field_type = cls._map_field_to_pydantic_type(field, storage_type) + + # Make all fields optional in the model + annotations[field_name] = Optional[field_type] + + # Add default=None to make fields truly optional (can be missing from input) + class_dict[field_name] = Field(default=None) + + # Register validators for GEO fields + if field.type == FieldTypes.GEO: + + def make_geo_validator(fname: str): + @field_validator(fname, mode="after") + def _validate_geo(cls, value): + # Skip validation for None values + if value is not None: + # Validate against pattern + if not TypeInferrer._is_geographic(value): + raise ValueError( + f"Geo field '{fname}' value '{value}' is not a valid 'lat,lon' format" + ) + return value + + return _validate_geo + + class_dict[f"validate_{field_name}"] = make_geo_validator(field_name) + + # Register validators for NUMERIC fields + elif field.type == FieldTypes.NUMERIC: + + def make_numeric_validator(fname: str): + # mode='before' so it catches bools before parsing + @field_validator(fname, mode="before") + def _disallow_bool(cls, value): + if isinstance(value, bool): + raise ValueError(f"Field '{fname}' cannot be boolean.") + return value + + return _disallow_bool + + class_dict[f"validate_{field_name}"] = make_numeric_validator( + field_name + ) + + # Register validators for VECTOR fields + elif field.type == FieldTypes.VECTOR: + dims = field.attrs.dims # type: ignore + datatype = field.attrs.datatype # type: ignore + + def make_vector_validator( + fname: str, dims: int, datatype: VectorDataType + ): + @field_validator(fname, mode="after") + def _validate_vector(cls, value): + # Skip validation for None values + if value is not None: + # Handle list representation + if isinstance(value, list): + # Validate dimensions + if len(value) != dims: + raise ValueError( + f"Vector field '{fname}' must have {dims} dimensions, got {len(value)}" + ) + # Validate data types + datatype_str = str(datatype).upper() + # Integer-based datatypes + if datatype_str in ("INT8", "UINT8"): + # Check range for INT8 + if datatype_str == "INT8": + if any(v < -128 or v > 127 for v in value): + raise ValueError( + f"Vector field '{fname}' contains values outside the INT8 range (-128 to 127)" + ) + # Check range for UINT8 + elif datatype_str == "UINT8": + if any(v < 0 or v > 255 for v in value): + raise ValueError( + f"Vector field '{fname}' contains values outside the UINT8 range (0 to 255)" + ) + return value + + return _validate_vector + + class_dict[f"validate_{field_name}"] = make_vector_validator( + field_name, dims, datatype + ) + + # Create class dictionary with annotations and field metadata + class_dict.update( + **{ + "__annotations__": annotations, + "model_config": {"arbitrary_types_allowed": True, "extra": "allow"}, + } + ) + + # Create the model class using type() + model_name = f"{schema.index.name}__PydanticModel" + return type(model_name, (BaseModel,), class_dict) + + +def extract_from_json_path(obj: Dict[str, Any], path: str) -> Any: + """ + Extract a value from a nested JSON object using a JSONPath expression. + + Args: + obj: The object to extract values from + path: JSONPath expression (e.g., $.field.subfield, $.[*].name) + + Returns: + The extracted value or None if not found + + Notes: + This function uses the jsonpath-ng library for proper JSONPath parsing + and supports the full JSONPath specification including filters, wildcards, + and array indexing. + """ + # If path doesn't start with $, add it as per JSONPath spec + if not path.startswith("$"): + path = f"$.{path}" + + # Parse and find the JSONPath expression + jsonpath_expr = jsonpath_parse(path) + matches = jsonpath_expr.find(obj) + + # Return the first match value, or None if no matches + if matches: + return matches[0].value + return None + + +def validate_object(schema: IndexSchema, obj: Dict[str, Any]) -> Dict[str, Any]: + """ + Validate an object against a schema. + + Args: + schema: The IndexSchema to validate against + obj: The object to validate + + Returns: + Validated object with any type coercions applied + + Raises: + ValueError: If validation fails with enhanced error message + """ + # Get Pydantic model for this schema + model_class = SchemaModelGenerator.get_model_for_schema(schema) + + # Prepare object for validation + # Handle nested JSON if needed + if schema.index.storage_type == StorageType.JSON: + # Extract values from nested paths + flat_obj = {} + for field_name, field in schema.fields.items(): + if field.path: + value = extract_from_json_path(obj, field.path) + if value is not None: + flat_obj[field_name] = value + elif field_name in obj: + flat_obj[field_name] = obj[field_name] + else: + flat_obj = obj + + # Validate against model + validated = model_class.model_validate(flat_obj) + return validated.model_dump(exclude_none=True) diff --git a/tests/conftest.py b/tests/conftest.py index 4d5483e4..6dfe19a8 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -5,6 +5,7 @@ from testcontainers.compose import DockerCompose from redisvl.redis.connection import RedisConnectionFactory +from redisvl.utils.vectorize import HFTextVectorizer @pytest.fixture(autouse=True) @@ -68,6 +69,15 @@ def client(redis_url): yield conn +@pytest.fixture(scope="session", autouse=True) +def hf_vectorizer(): + return HFTextVectorizer( + model="sentence-transformers/all-mpnet-base-v2", + token=os.getenv("HF_TOKEN"), + cache_folder=os.getenv("SENTENCE_TRANSFORMERS_HOME"), + ) + + @pytest.fixture def sample_datetimes(): return { diff --git a/tests/integration/test_async_search_index.py b/tests/integration/test_async_search_index.py index edc6c01a..32a2e3d3 100644 --- a/tests/integration/test_async_search_index.py +++ b/tests/integration/test_async_search_index.py @@ -5,7 +5,7 @@ from redis import Redis as SyncRedis from redis.asyncio import Redis as AsyncRedis -from redisvl.exceptions import RedisModuleVersionError, RedisSearchError +from redisvl.exceptions import RedisModuleVersionError, RedisSearchError, RedisVLError from redisvl.index import AsyncSearchIndex from redisvl.query import VectorQuery from redisvl.query.query import FilterQuery @@ -269,7 +269,7 @@ async def test_search_index_load_preprocess(async_index): await async_index.create(overwrite=True, drop=True) data = [{"id": "1", "test": "foo"}] - async def preprocess(record): + def preprocess(record): record["test"] = "bar" return record @@ -281,10 +281,10 @@ async def preprocess(record): == "bar" ) - async def bad_preprocess(record): + def bad_preprocess(record): return 1 - with pytest.raises(TypeError): + with pytest.raises(RedisVLError): await async_index.load(data, id_field="id", preprocess=bad_preprocess) @@ -300,7 +300,7 @@ async def test_no_id_field(async_index): bad_data = [{"wrong_key": "1", "value": "test"}] # catch missing / invalid id_field - with pytest.raises(ValueError): + with pytest.raises(RedisVLError): await async_index.load(bad_data, id_field="key") diff --git a/tests/unit/test_cross_encoder_reranker.py b/tests/integration/test_cross_encoder_reranker.py similarity index 100% rename from tests/unit/test_cross_encoder_reranker.py rename to tests/integration/test_cross_encoder_reranker.py diff --git a/tests/integration/test_flow_async.py b/tests/integration/test_flow_async.py index a368f677..c727fd28 100644 --- a/tests/integration/test_flow_async.py +++ b/tests/integration/test_flow_async.py @@ -52,7 +52,7 @@ async def test_simple(async_client, schema, sample_data): await index.create(overwrite=True, drop=True) # Prepare and load the data based on storage type - async def hash_preprocess(item: dict) -> dict: + def hash_preprocess(item: dict) -> dict: return { **item, "user_embedding": array_to_buffer(item["user_embedding"], "float32"), diff --git a/tests/integration/test_search_index.py b/tests/integration/test_search_index.py index 800f6a06..875449be 100644 --- a/tests/integration/test_search_index.py +++ b/tests/integration/test_search_index.py @@ -4,7 +4,7 @@ import pytest from redis import Redis -from redisvl.exceptions import RedisModuleVersionError, RedisSearchError +from redisvl.exceptions import RedisModuleVersionError, RedisSearchError, RedisVLError from redisvl.index import SearchIndex from redisvl.query import VectorQuery from redisvl.query.query import FilterQuery @@ -268,7 +268,7 @@ def preprocess(record): def bad_preprocess(record): return 1 - with pytest.raises(TypeError): + with pytest.raises(RedisVLError): index.load(data, id_field="id", preprocess=bad_preprocess) @@ -277,7 +277,7 @@ def test_no_id_field(index): bad_data = [{"wrong_key": "1", "value": "test"}] # catch missing / invalid id_field - with pytest.raises(ValueError): + with pytest.raises(RedisVLError): index.load(bad_data, id_field="key") diff --git a/tests/integration/test_threshold_optimizer.py b/tests/integration/test_threshold_optimizer.py index 44871901..b510b038 100644 --- a/tests/integration/test_threshold_optimizer.py +++ b/tests/integration/test_threshold_optimizer.py @@ -35,10 +35,11 @@ def routes(): @pytest.fixture -def semantic_router(client, routes): +def semantic_router(client, routes, hf_vectorizer): router = SemanticRouter( name="test-router", routes=routes, + vectorizer=hf_vectorizer, routing_config=RoutingConfig(max_k=2), redis_client=client, overwrite=False, @@ -86,7 +87,7 @@ def test_data_optimization(): def test_routes_different_distance_thresholds_optimizer_default( - semantic_router, routes, redis_url, test_data_optimization + semantic_router, routes, redis_url, test_data_optimization, hf_vectorizer ): redis_version = semantic_router._index.client.info()["redis_version"] if not compare_versions(redis_version, "7.0.0"): @@ -101,6 +102,7 @@ def test_routes_different_distance_thresholds_optimizer_default( router = SemanticRouter( name="test_routes_different_distance_optimizer", routes=routes, + vectorizer=hf_vectorizer, redis_url=redis_url, overwrite=True, ) @@ -119,7 +121,7 @@ def test_routes_different_distance_thresholds_optimizer_default( def test_routes_different_distance_thresholds_optimizer_precision( - semantic_router, routes, redis_url, test_data_optimization + semantic_router, routes, redis_url, test_data_optimization, hf_vectorizer ): redis_version = semantic_router._index.client.info()["redis_version"] @@ -135,6 +137,7 @@ def test_routes_different_distance_thresholds_optimizer_precision( router = SemanticRouter( name="test_routes_different_distance_optimizer", routes=routes, + vectorizer=hf_vectorizer, redis_url=redis_url, overwrite=True, ) @@ -155,7 +158,7 @@ def test_routes_different_distance_thresholds_optimizer_precision( def test_routes_different_distance_thresholds_optimizer_recall( - semantic_router, routes, redis_url, test_data_optimization + semantic_router, routes, redis_url, test_data_optimization, hf_vectorizer ): redis_version = semantic_router._index.client.info()["redis_version"] if not compare_versions(redis_version, "7.0.0"): @@ -170,6 +173,7 @@ def test_routes_different_distance_thresholds_optimizer_recall( router = SemanticRouter( name="test_routes_different_distance_optimizer", routes=routes, + vectorizer=hf_vectorizer, redis_url=redis_url, overwrite=True, ) diff --git a/tests/unit/test_fields.py b/tests/unit/test_fields.py index f420afff..0c0d504e 100644 --- a/tests/unit/test_fields.py +++ b/tests/unit/test_fields.py @@ -1,3 +1,5 @@ +from typing import Any, Optional, Tuple + import pytest from redis.commands.search.field import GeoField as RedisGeoField from redis.commands.search.field import NumericField as RedisNumericField diff --git a/tests/unit/test_storage.py b/tests/unit/test_storage.py index 21637dd5..11f51e73 100644 --- a/tests/unit/test_storage.py +++ b/tests/unit/test_storage.py @@ -1,13 +1,99 @@ import pytest +from pydantic import ValidationError +from redisvl.exceptions import SchemaValidationError from redisvl.index.storage import BaseStorage, HashStorage, JsonStorage +from redisvl.schema import IndexSchema + + +@pytest.fixture +def sample_hash_schema(): + """Create a sample schema with HASH storage for testing.""" + schema_dict = { + "index": { + "name": "test-hash-index", + "prefix": "test", + "key_separator": ":", + "storage_type": "hash", + }, + "fields": [ + {"name": "test_id", "type": "tag"}, + {"name": "title", "type": "text"}, + {"name": "user", "type": "tag"}, + {"name": "rating", "type": "numeric"}, + {"name": "location", "type": "geo"}, + { + "name": "embedding", + "type": "vector", + "attrs": { + "algorithm": "flat", + "dims": 4, + "datatype": "float32", + "distance_metric": "cosine", + }, + }, + { + "name": "int_vector", + "type": "vector", + "attrs": { + "algorithm": "flat", + "dims": 3, + "datatype": "int8", + "distance_metric": "l2", + }, + }, + ], + } + return IndexSchema.from_dict(schema_dict) + + +@pytest.fixture +def sample_json_schema(): + """Create a sample schema with JSON storage for testing.""" + schema_dict = { + "index": { + "name": "test-json-index", + "prefix": "test", + "key_separator": ":", + "storage_type": "json", + }, + "fields": [ + {"name": "test_id", "type": "tag"}, + {"name": "user", "type": "tag"}, + {"name": "title", "type": "text"}, + {"name": "rating", "type": "numeric"}, + {"name": "location", "type": "geo"}, + { + "name": "embedding", + "type": "vector", + "attrs": { + "algorithm": "flat", + "dims": 4, + "datatype": "float32", + "distance_metric": "cosine", + }, + }, + { + "name": "int_vector", + "type": "vector", + "attrs": { + "algorithm": "flat", + "dims": 3, + "datatype": "int8", + "distance_metric": "l2", + }, + }, + ], + } + return IndexSchema.from_dict(schema_dict) @pytest.fixture(params=[JsonStorage, HashStorage]) -def storage_instance(request): +def storage_instance(request, sample_hash_schema, sample_json_schema): StorageClass = request.param - instance = StorageClass(prefix="test", key_separator=":") - return instance + if isinstance(StorageClass, JsonStorage): + return StorageClass(index_schema=sample_json_schema) + return StorageClass(index_schema=sample_hash_schema) def test_key_formatting(storage_instance): @@ -25,9 +111,7 @@ def test_key_formatting(storage_instance): def test_create_key(storage_instance): id_field = "id" obj = {id_field: "1234"} - expected_key = ( - f"{storage_instance.prefix}{storage_instance.key_separator}{obj[id_field]}" - ) + expected_key = f"{storage_instance.index_schema.index.prefix}{storage_instance.index_schema.index.key_separator}{obj[id_field]}" generated_key = storage_instance._create_key(obj, id_field) assert ( generated_key == expected_key @@ -35,46 +119,49 @@ def test_create_key(storage_instance): def test_validate_success(storage_instance): - data = {"foo": "bar"} try: - storage_instance._validate(data) + storage_instance._validate( + {"test_id": "1234", "rating": 5, "user": "john", "title": "engineer"} + ) except Exception as e: pytest.fail(f"_validate should not raise an exception here, but raised {e}") def test_validate_failure(storage_instance): - data = "Some invalid data type" - with pytest.raises(TypeError): - storage_instance._validate(data) - data = 12345 - with pytest.raises(TypeError): + data = {"title": 5} + with pytest.raises(ValidationError): storage_instance._validate(data) + data = {"user": [1]} + with pytest.raises(ValidationError): + storage_instance._validate(data) -def test_preprocess(storage_instance): - data = {"key": "value"} - preprocessed_data = storage_instance._preprocess(preprocess=None, obj=data) - assert preprocessed_data == data - def fn(d): - d["foo"] = "bar" - return d +def test_validate_preprocess_and_validate_failure(storage_instance): + data = {"title": 5} + data == storage_instance._preprocess_and_validate_objects( + objects=[data], validate=False + ) + with pytest.raises(SchemaValidationError): + storage_instance._preprocess_and_validate_objects(objects=[data], validate=True) - preprocessed_data = storage_instance._preprocess(fn, data) - assert "foo" in preprocessed_data - assert preprocessed_data["foo"] == "bar" + data = {"user": [1]} + data == storage_instance._preprocess_and_validate_objects( + objects=[data], validate=False + ) + with pytest.raises(SchemaValidationError): + storage_instance._preprocess_and_validate_objects(objects=[data], validate=True) -@pytest.mark.asyncio -async def test_preprocess(storage_instance): +def test_preprocess(storage_instance): data = {"key": "value"} - preprocessed_data = await storage_instance._apreprocess(preprocess=None, obj=data) + preprocessed_data = storage_instance._preprocess(obj=data, preprocess=None) assert preprocessed_data == data - async def fn(d): + def fn(d): d["foo"] = "bar" return d - preprocessed_data = await storage_instance._apreprocess(data, fn) + preprocessed_data = storage_instance._preprocess(obj=data, preprocess=fn) assert "foo" in preprocessed_data assert preprocessed_data["foo"] == "bar" diff --git a/tests/unit/test_utils.py b/tests/unit/test_utils.py index 83300d0c..af0cc192 100644 --- a/tests/unit/test_utils.py +++ b/tests/unit/test_utils.py @@ -1,6 +1,3 @@ -import importlib -import io -import logging import re import sys from functools import wraps diff --git a/tests/unit/test_validation.py b/tests/unit/test_validation.py new file mode 100644 index 00000000..68933938 --- /dev/null +++ b/tests/unit/test_validation.py @@ -0,0 +1,692 @@ +""" +Tests for the RedisVL schema validation module. + +This module tests the core validation functionality: +1. Model generation from schemas +2. Field-specific validators +3. JSON path extraction +4. Validation of various field types +""" + +import re +from typing import Any, List, Optional, Tuple, Union + +import pytest + +from redisvl.schema import IndexSchema +from redisvl.schema.fields import FieldTypes, VectorDataType +from redisvl.schema.schema import StorageType +from redisvl.schema.type_utils import TypeInferrer +from redisvl.schema.validation import ( + SchemaModelGenerator, + extract_from_json_path, + validate_object, +) + +# -------------------- FIXTURES -------------------- + + +@pytest.fixture +def sample_hash_schema(): + """Create a sample schema with HASH storage for testing.""" + schema_dict = { + "index": { + "name": "test-hash-index", + "prefix": "test", + "key_separator": ":", + "storage_type": "hash", + }, + "fields": [ + {"name": "test_id", "type": "tag"}, + {"name": "title", "type": "text"}, + {"name": "rating", "type": "numeric"}, + {"name": "location", "type": "geo"}, + { + "name": "embedding", + "type": "vector", + "attrs": { + "algorithm": "flat", + "dims": 4, + "datatype": "float32", + "distance_metric": "cosine", + }, + }, + { + "name": "int_vector", + "type": "vector", + "attrs": { + "algorithm": "flat", + "dims": 3, + "datatype": "int8", + "distance_metric": "l2", + }, + }, + ], + } + return IndexSchema.from_dict(schema_dict) + + +@pytest.fixture +def sample_json_schema(): + """Create a sample schema with JSON storage for testing.""" + schema_dict = { + "index": { + "name": "test-json-index", + "prefix": "test", + "key_separator": ":", + "storage_type": "json", + }, + "fields": [ + {"name": "test_id", "type": "tag", "path": "$.test_id"}, + {"name": "user", "type": "tag", "path": "$.metadata.user"}, + {"name": "title", "type": "text", "path": "$.content.title"}, + {"name": "rating", "type": "numeric", "path": "$.metadata.rating"}, + { + "name": "embedding", + "type": "vector", + "path": "$.content.embedding", + "attrs": { + "algorithm": "flat", + "dims": 4, + "datatype": "float32", + "distance_metric": "cosine", + }, + }, + { + "name": "int_vector", + "type": "vector", + "path": "$.content.int_vector", + "attrs": { + "algorithm": "flat", + "dims": 3, + "datatype": "int8", + "distance_metric": "l2", + }, + }, + ], + } + return IndexSchema.from_dict(schema_dict) + + +@pytest.fixture +def valid_hash_data(): + """Sample valid data for testing HASH storage validation.""" + return { + "test_id": "doc1", + "title": "Test Document", + "rating": 4.5, + "location": "37.7749,-122.4194", + "embedding": b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", # Bytes for HASH + "int_vector": b"\x01\x02\x03", # Bytes for HASH + } + + +@pytest.fixture +def valid_json_data(): + """Sample valid data for testing JSON storage validation.""" + return { + "test_id": "doc1", + "metadata": {"user": "user123", "rating": 4.5}, + "content": { + "title": "Test Document", + "embedding": [0.1, 0.2, 0.3, 0.4], # List for JSON + "int_vector": [1, 2, 3], # List for JSON + }, + } + + +# -------------------- TEST HELPERS -------------------- + + +def validate_field( + schema: IndexSchema, + field_name: str, + value: Any, + should_pass: bool, + error_text: Optional[str] = None, +) -> Tuple[bool, Optional[str]]: + """ + Helper function to validate a field value against a schema. + + Args: + schema: The schema to validate against + field_name: The name of the field to validate + value: The value to validate + should_pass: Whether validation should pass + error_text: Expected error text if validation should fail + + Returns: + Tuple of (validation_success, error_message) + """ + # Get model for schema + model_class = SchemaModelGenerator.get_model_for_schema(schema) + + # Create test data with minimal viable fields + test_data = {field_name: value} + + # Try to validate + try: + validated = model_class.model_validate(test_data) + + # If we got here, validation passed + success = True + error_msg = None + + except Exception as e: + # Validation failed + success = False + error_msg = str(e) + + # Check if result matches expectation + if success != should_pass: + print("ERROR", error_msg, flush=True) + print(validated, flush=True) + assert ( + success == should_pass + ), f"Validation {'passed' if success else 'failed'} but expected {'pass' if should_pass else 'fail'}" + + # Check error text if specified and validation failed + if not success and error_text and error_msg: + assert ( + error_text in error_msg + ), f"Error '{error_msg}' does not contain expected text '{error_text}'" + + return success, error_msg + + +# -------------------- CATEGORY 1: BASIC UNIT TESTS -------------------- + + +class TestSchemaModelGenerator: + """Tests for the SchemaModelGenerator class.""" + + @pytest.mark.parametrize("schema_type", ["hash", "json"]) + def test_get_model_for_schema( + self, schema_type, sample_hash_schema, sample_json_schema + ): + """Test generating a model from a schema.""" + # Select schema based on type + schema = sample_hash_schema if schema_type == "hash" else sample_json_schema + + # Get model for schema + model_class = SchemaModelGenerator.get_model_for_schema(schema) + + # Verify model name matches the index name + assert model_class.__name__ == f"{schema.index.name}__PydanticModel" + + # Verify model has expected fields + for field_name in schema.field_names: + assert field_name in model_class.model_fields + + def test_model_caching(self, sample_hash_schema): + """Test that models are cached and reused.""" + # Get model twice + model1 = SchemaModelGenerator.get_model_for_schema(sample_hash_schema) + model2 = SchemaModelGenerator.get_model_for_schema(sample_hash_schema) + + # Verify same instance + assert model1 is model2 + + @pytest.mark.parametrize( + "field_type,storage_type,expected_type", + [ + (FieldTypes.TEXT, StorageType.HASH, str), + (FieldTypes.TAG, StorageType.HASH, str), + (FieldTypes.NUMERIC, StorageType.HASH, Union[int, float]), + (FieldTypes.GEO, StorageType.HASH, str), + (FieldTypes.VECTOR, StorageType.HASH, bytes), + (FieldTypes.TEXT, StorageType.JSON, str), + (FieldTypes.TAG, StorageType.JSON, str), + (FieldTypes.NUMERIC, StorageType.JSON, Union[int, float]), + (FieldTypes.GEO, StorageType.JSON, str), + (FieldTypes.VECTOR, StorageType.JSON, List[float]), + ], + ) + def test_type_mapping(self, field_type, storage_type, expected_type): + """Test mapping Redis field types to Pydantic types.""" + + # Create a basic field of the specified type + class SimpleField: + def __init__(self, ftype): + self.type = ftype + # Add attrs for vector fields + if ftype == FieldTypes.VECTOR: + + class Attrs: + dims = 4 + datatype = VectorDataType.FLOAT32 + + self.attrs = Attrs() + + field = SimpleField(field_type) + field_type_result = SchemaModelGenerator._map_field_to_pydantic_type( + field, storage_type + ) + + assert field_type_result == expected_type + + def test_unsupported_field_type(self): + """Test that an error is raised for unsupported field types.""" + + # Create a dummy field with unsupported type + class DummyField: + type = "unsupported_type" + + # Mapping should raise ValueError + with pytest.raises(ValueError) as exc_info: + SchemaModelGenerator._map_field_to_pydantic_type( + DummyField(), StorageType.HASH + ) + + assert "Unsupported field type" in str(exc_info.value) + + +class TestJsonPathExtraction: + """Tests for JSON path extraction functionality.""" + + @pytest.mark.parametrize( + "path,expected_value", + [ + ("$.test_id", "doc1"), + ("$.metadata.user", "user123"), + ("$.metadata.rating", 4.5), + ("$.content.title", "Test Document"), + ("$.content.embedding", [0.1, 0.2, 0.3, 0.4]), + ("metadata.user", "user123"), # alternate format + ("$.nonexistent", None), # nonexistent path + ("$.metadata.nonexistent", None), # nonexistent nested path + ], + ) + def test_extract_from_json_path(self, valid_json_data, path, expected_value): + """Test extracting values using JSON paths.""" + assert extract_from_json_path(valid_json_data, path) == expected_value + + +# # -------------------- CATEGORY 2: PARAMETRIZED VALIDATOR TESTS -------------------- + + +class TestBasicFieldValidation: + """Tests for validating non-vector field types.""" + + @pytest.mark.parametrize( + "field_type,field_name,valid_values,invalid_values", + [ + # TEXT fields + ( + "text", + "title", + [("Test Document", None), ("123", None), ("", None)], + [(123, "string"), (True, "string"), ([], "string")], + ), + # TAG fields + ( + "tag", + "test_id", + [("doc1", None), ("123", None), ("abc,def", None), ("", None)], + [ + (123, "string"), + (True, "string"), + ([], "string"), + ([1, 2, 3], "string"), + ], + ), + # NUMERIC fields + ( + "numeric", + "rating", + [(5, None), (4.5, None), (0, None), (-1.5, None), ("5.3", None)], + [("high", "number"), (True, "boolean"), ([], "number")], + ), + # GEO fields + ( + "geo", + "location", + [ + ("0,0", None), + ("90,-180", None), + ("-90,180", None), + ("37.7749,-122.4194", None), + ], + [ + ("invalid_geo", "lat,lon"), + ("37.7749", "lat,lon"), + ("37.7749,", "lat,lon"), + (",122.4194", "lat,lon"), + ("91,0", "lat,lon"), # Latitude > 90 + ("-91,0", "lat,lon"), # Latitude < -90 + ("0,181", "lat,lon"), # Longitude > 180 + ("0,-181", "lat,lon"), # Longitude < -180 + (123, "string"), + (True, "string"), + ], + ), + ], + ) + def test_basic_field_validation( + self, sample_hash_schema, field_type, field_name, valid_values, invalid_values + ): + """ + Test validation of basic field types (text, tag, numeric, geo). + + This test consolidates previously separate tests for different field types. + """ + # Test valid values + for value, _ in valid_values: + validate_field(sample_hash_schema, field_name, value, True) + + # For GEO fields, also verify pattern + if field_type == "geo" and isinstance(value, str): + assert re.match(TypeInferrer.GEO_PATTERN.pattern, value) + + # Test invalid values + for value, error_text in invalid_values: + validate_field(sample_hash_schema, field_name, value, False, error_text) + + # For GEO fields, also verify pattern failure + if field_type == "geo" and isinstance(value, str): + assert not re.match(TypeInferrer.GEO_PATTERN.pattern, value) + + @pytest.mark.parametrize( + "test_case", + [ + # Valid cases for HASH storage (bytes) + { + "storage": StorageType.HASH, + "field_name": "embedding", + "value": b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "valid": True, + "error_text": None, + "description": "Valid bytes for HASH storage", + }, + { + "storage": StorageType.HASH, + "field_name": "int_vector", + "value": b"\x01\x02\x03", + "valid": True, + "error_text": None, + "description": "Valid bytes for HASH storage (int vector)", + }, + # Invalid cases for HASH storage (trying to use lists) + { + "storage": StorageType.HASH, + "field_name": "embedding", + "value": [0.1, 0.2, 0.3, 0.4], + "valid": False, + "error_text": "bytes", + "description": "List not valid for HASH storage", + }, + # Valid cases for JSON storage (lists) + { + "storage": StorageType.JSON, + "field_name": "embedding", + "value": [0.1, 0.2, 0.3, 0.4], + "valid": True, + "error_text": None, + "description": "Valid list for JSON storage", + }, + { + "storage": StorageType.JSON, + "field_name": "int_vector", + "value": [1, 2, 3], + "valid": True, + "error_text": None, + "description": "Valid int list for JSON storage", + }, + # Invalid cases for JSON storage (trying to use bytes) + { + "storage": StorageType.JSON, + "field_name": "embedding", + "value": b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "valid": False, + "error_text": "list", + "description": "Bytes not valid for JSON storage", + }, + # Dimension validation + { + "storage": StorageType.JSON, + "field_name": "embedding", + "value": [0.1, 0.2, 0.3], # Should be 4 dimensions + "valid": False, + "error_text": "dimensions", + "description": "Wrong dimensions for vector", + }, + # Type validation for int vectors + { + "storage": StorageType.JSON, + "field_name": "int_vector", + "value": [0.1, 0.2, 0.3], # Should be integers + "valid": False, + "error_text": "integer", + "description": "Float values in int vector", + }, + ], + ) + def test_vector_field_validation( + self, sample_hash_schema, sample_json_schema, test_case + ): + """Test validation of vector fields with storage-specific requirements.""" + # Select the appropriate schema based on storage type + schema = ( + sample_hash_schema + if test_case["storage"] == StorageType.HASH + else sample_json_schema + ) + + # Validate the field + validate_field( + schema, + test_case["field_name"], + test_case["value"], + test_case["valid"], + test_case["error_text"], + ) + + +class TestNestedJsonValidation: + """Tests for JSON path-based validation with nested structures.""" + + @pytest.mark.parametrize( + "test_case", + [ + # Complete valid data + { + "data": { + "test_id": "doc1", + "metadata": {"user": "user123", "rating": 4.5}, + "content": { + "title": "Test Document", + "embedding": [0.1, 0.2, 0.3, 0.4], + "int_vector": [1, 2, 3], + }, + }, + "expected_fields": [ + "test_id", + "user", + "title", + "rating", + "embedding", + "int_vector", + ], + "missing_fields": [], + }, + # Partial data - missing some fields + { + "data": { + "test_id": "doc1", + "metadata": {"user": "user123"}, + "content": {"title": "Test Document"}, + }, + "expected_fields": ["test_id", "user", "title"], + "missing_fields": ["rating", "embedding", "int_vector"], + }, + # Minimal data + { + "data": {"test_id": "doc1"}, + "expected_fields": ["test_id"], + "missing_fields": [ + "user", + "title", + "rating", + "embedding", + "int_vector", + ], + }, + ], + ) + def test_nested_json_validation(self, sample_json_schema, test_case): + """Test validating nested JSON with various data structures.""" + # Validate object + validated = validate_object(sample_json_schema, test_case["data"]) + + # Verify expected fields are present + for field in test_case["expected_fields"]: + assert field in validated + + # Verify missing fields are not present + for field in test_case["missing_fields"]: + assert field not in validated + + +class TestEndToEndValidation: + """End-to-end tests for complete object validation against schema.""" + + @pytest.mark.parametrize( + "schema_type,data,expected_result", + [ + # Valid HASH data + ( + "hash", + { + "test_id": "doc1", + "title": "Test Document", + "rating": 4.5, + "location": "37.7749,-122.4194", + "embedding": b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "int_vector": b"\x01\x02\x03", + }, + { + "success": True, + "fields": [ + "test_id", + "title", + "rating", + "location", + "embedding", + "int_vector", + ], + }, + ), + # Partial HASH data + ( + "hash", + {"test_id": "doc1", "title": "Test Document"}, + {"success": True, "fields": ["test_id", "title"]}, + ), + # Valid JSON data + ( + "json", + { + "test_id": "doc1", + "metadata": {"user": "user123", "rating": 4.5}, + "content": { + "title": "Test Document", + "embedding": [0.1, 0.2, 0.3, 0.4], + "int_vector": [1, 2, 3], + }, + }, + { + "success": True, + "fields": [ + "test_id", + "user", + "rating", + "title", + "embedding", + "int_vector", + ], + }, + ), + # Invalid HASH data - wrong vector type + ( + "hash", + { + "test_id": "doc1", + "embedding": [0.1, 0.2, 0.3, 0.4], # Should be bytes for HASH + }, + {"success": False, "error_field": "embedding"}, + ), + # Invalid JSON data - wrong vector type + ( + "json", + { + "test_id": "doc1", + "content": { + "embedding": b"\x00\x00\x00\x00" # Should be list for JSON + }, + }, + {"success": False, "error_field": "embedding"}, + ), + ], + ) + def test_end_to_end_validation( + self, sample_hash_schema, sample_json_schema, schema_type, data, expected_result + ): + """Test validating complete objects with various data scenarios.""" + # Select schema based on type + schema = sample_hash_schema if schema_type == "hash" else sample_json_schema + + if expected_result["success"]: + # Validation should succeed + validated = validate_object(schema, data) + + # Verify expected fields are present + for field in expected_result["fields"]: + assert field in validated + else: + # Validation should fail + with pytest.raises(ValueError) as exc_info: + validate_object(schema, data) + + # Error should mention the field + assert expected_result["error_field"] in str(exc_info.value) + + +# -------------------- ADDITIONAL TESTS -------------------- + + +class TestEdgeCases: + """Tests for edge cases and boundary conditions.""" + + def test_empty_object_validation(self, sample_hash_schema, sample_json_schema): + """Test validating an empty object.""" + # Empty object should validate for both storage types (all fields are optional) + # TODO confirm if this is indeed true + assert validate_object(sample_hash_schema, {}) == {} + assert validate_object(sample_json_schema, {}) == {} + + def test_additional_fields(self, sample_hash_schema, valid_hash_data): + """Test that additional fields not in schema are NOT ignored.""" + # Add extra field not in schema + data_with_extra = valid_hash_data.copy() + data_with_extra["extra_field"] = "some value" + + # Validation should succeed and ignore extra field + validated = validate_object(sample_hash_schema, data_with_extra) + assert "extra_field" in validated + + def test_explicit_none_fields_excluded(self, sample_hash_schema): + """Test that fields explicitly set to None are excluded.""" + # Data with explicit None values + data = { + "test_id": "doc1", + "title": "Test Document", + "rating": None, + "location": None, + } + + # Validate and check fields + validated = validate_object(sample_hash_schema, data) + assert "test_id" in validated + assert "title" in validated + assert "rating" not in validated + assert "location" not in validated From 123ee223fcefc2aa2e4f4cb4755201c7af86e7b5 Mon Sep 17 00:00:00 2001 From: Andrew Brookins Date: Mon, 31 Mar 2025 15:31:05 -0700 Subject: [PATCH 10/26] Run API tests once (#306) Run API-dependent tests once per matrix run. --- .github/workflows/test.yml | 37 +++++++++++-------------------------- 1 file changed, 11 insertions(+), 26 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 48b27711..95ed0122 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -17,8 +17,8 @@ env: POETRY_VERSION: "1.8.3" jobs: - prime-cache: - name: Prime HuggingFace Model Cache + service-tests: + name: Service Tests runs-on: ubuntu-latest env: HF_HOME: ${{ github.workspace }}/hf_cache @@ -38,10 +38,10 @@ jobs: mkdir -p ~/.huggingface echo '{"token":"${{ secrets.HF_TOKEN }}"}' > ~/.huggingface/token - - name: Set up Python 3.9 + - name: Set up Python 3.11 uses: actions/setup-python@v4 with: - python-version: 3.9 + python-version: 3.11 cache: pip - name: Install Poetry @@ -58,7 +58,7 @@ jobs: with: credentials_json: ${{ secrets.GOOGLE_CREDENTIALS }} - - name: Run full test suite to prime cache + - name: Run full test suite and prime the HF cache env: HF_TOKEN: ${{ secrets.HF_TOKEN }} HF_HOME: ${{ github.workspace }}/hf_cache @@ -80,15 +80,16 @@ jobs: test: name: Python ${{ matrix.python-version }} - ${{ matrix.connection }} [redis ${{ matrix.redis-version }}] runs-on: ubuntu-latest - needs: prime-cache + needs: service-tests env: HF_HOME: ${{ github.workspace }}/hf_cache strategy: fail-fast: false matrix: - python-version: ['3.10', '3.11', 3.12, 3.13] - connection: ['hiredis', 'plain'] - redis-version: ['6.2.6-v9', 'latest', '8.0-M03'] + # 3.11 tests are run in the service-tests job + python-version: ["3.9", "3.10", 3.12, 3.13] + connection: ["hiredis", "plain"] + redis-version: ["6.2.6-v9", "latest", "8.0-M03"] steps: - name: Check out repository @@ -133,30 +134,14 @@ jobs: with: credentials_json: ${{ secrets.GOOGLE_CREDENTIALS }} - - name: Set HuggingFace token - run: | - mkdir -p ~/.huggingface - echo '{"token":"${{ secrets.HF_TOKEN }}"}' > ~/.huggingface/token - - name: Run tests if: matrix.connection == 'plain' && matrix.redis-version == 'latest' env: HF_HOME: ${{ github.workspace }}/hf_cache - OPENAI_API_KEY: ${{ secrets.OPENAI_KEY }} GCP_LOCATION: ${{ secrets.GCP_LOCATION }} GCP_PROJECT_ID: ${{ secrets.GCP_PROJECT_ID }} - COHERE_API_KEY: ${{ secrets.COHERE_API_KEY }} - MISTRAL_API_KEY: ${{ secrets.MISTRAL_API_KEY }} - VOYAGE_API_KEY: ${{ secrets.VOYAGE_API_KEY }} - AZURE_OPENAI_API_KEY: ${{ secrets.AZURE_OPENAI_API_KEY }} - AZURE_OPENAI_ENDPOINT: ${{ secrets.AZURE_OPENAI_ENDPOINT }} - AZURE_OPENAI_DEPLOYMENT_NAME: ${{ secrets.AZURE_OPENAI_DEPLOYMENT_NAME }} - OPENAI_API_VERSION: ${{ secrets.OPENAI_API_VERSION }} - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - HF_TOKEN: ${{ secrets.HF_TOKEN }} run: | - make test-all + make test - name: Run tests (alternate) if: matrix.connection != 'plain' || matrix.redis-version != 'latest' From 9025bfe513267f90fe7e8c6f6b6f8acb8e50c372 Mon Sep 17 00:00:00 2001 From: Robert Shelton Date: Mon, 31 Mar 2025 19:59:54 -0400 Subject: [PATCH 11/26] Add option to normalize vector distances on query (#298) This pr accomplishes 2 goals: 1. Add an option for users to easily get back a similarity value between 0 and 1 that they might expect to compare against other vector dbs. 2. Fix the current bug that `distance_threshold` is validated to be between 0 and 1 when in reality it can take values between 0 and 2. > Note: after much careful thought I believe it is best that for `0.5.0` we do **not** start enforcing all distance_thresholds between 0 and 1 and move to this option as default behavior. Ideally this metric would be consistent throughout our code and I don't love supporting this flag but I think it provides the value that is scoped for this ticket while inflicting the least amount of pain and confusion. Changes: 1. Adds the `normalize_vector_distance` flag to VectorQuery and VectorRangeQuery. Behavior: - If set to `True` it normalizes values returned from redis to a value between 0 and 1. - For cosine similarity, it applies `(2 - value)/2`. - For L2 distance, it applies normalization `(1/(1+value))`. - For IP, it does nothing and throws a warning since normalized IP is cosine by definition. - For VectorRangeQuery, if `normalize_vector_distance=True` the distance threshold is now validated to be between 0 and 1 and denormalized for execution against the database to make consistent. 2. Relaxes validation for semantic caching and routing to be between 0 and 2 fixing the current bug and aligning with how the database actually functions. --- redisvl/extensions/llmcache/semantic.py | 11 +- redisvl/extensions/router/schema.py | 2 +- redisvl/extensions/router/semantic.py | 6 +- redisvl/index/index.py | 40 ++++--- redisvl/query/__init__.py | 2 + redisvl/query/query.py | 35 +++++- redisvl/schema/fields.py | 8 ++ redisvl/utils/utils.py | 19 ++++ tests/conftest.py | 2 +- tests/integration/test_query.py | 145 ++++++++++++++++++++++-- tests/unit/test_utils.py | 19 ++++ 11 files changed, 257 insertions(+), 32 deletions(-) diff --git a/redisvl/extensions/llmcache/semantic.py b/redisvl/extensions/llmcache/semantic.py index 6e15c43b..13bab707 100644 --- a/redisvl/extensions/llmcache/semantic.py +++ b/redisvl/extensions/llmcache/semantic.py @@ -22,7 +22,7 @@ SemanticCacheIndexSchema, ) from redisvl.index import AsyncSearchIndex, SearchIndex -from redisvl.query import RangeQuery +from redisvl.query import VectorRangeQuery from redisvl.query.filter import FilterExpression from redisvl.query.query import BaseQuery from redisvl.redis.connection import RedisConnectionFactory @@ -237,9 +237,9 @@ def set_threshold(self, distance_threshold: float) -> None: Raises: ValueError: If the threshold is not between 0 and 1. """ - if not 0 <= float(distance_threshold) <= 1: + if not 0 <= float(distance_threshold) <= 2: raise ValueError( - f"Distance must be between 0 and 1, got {distance_threshold}" + f"Distance must be between 0 and 2, got {distance_threshold}" ) self._distance_threshold = float(distance_threshold) @@ -389,7 +389,7 @@ def check( vector = vector or self._vectorize_prompt(prompt) self._check_vector_dims(vector) - query = RangeQuery( + query = VectorRangeQuery( vector=vector, vector_field_name=CACHE_VECTOR_FIELD_NAME, return_fields=self.return_fields, @@ -472,7 +472,7 @@ async def acheck( vector = vector or await self._avectorize_prompt(prompt) self._check_vector_dims(vector) - query = RangeQuery( + query = VectorRangeQuery( vector=vector, vector_field_name=CACHE_VECTOR_FIELD_NAME, return_fields=self.return_fields, @@ -480,6 +480,7 @@ async def acheck( num_results=num_results, return_score=True, filter_expression=filter_expression, + normalize_vector_distance=True, ) # Search the cache! diff --git a/redisvl/extensions/router/schema.py b/redisvl/extensions/router/schema.py index 1b1d6dc8..d9b38677 100644 --- a/redisvl/extensions/router/schema.py +++ b/redisvl/extensions/router/schema.py @@ -18,7 +18,7 @@ class Route(BaseModel): """List of reference phrases for the route.""" metadata: Dict[str, Any] = Field(default={}) """Metadata associated with the route.""" - distance_threshold: Annotated[float, Field(strict=True, gt=0, le=1)] = 0.5 + distance_threshold: Annotated[float, Field(strict=True, gt=0, le=2)] = 0.5 """Distance threshold for matching the route.""" @field_validator("name") diff --git a/redisvl/extensions/router/semantic.py b/redisvl/extensions/router/semantic.py index be83b447..9ca886ab 100644 --- a/redisvl/extensions/router/semantic.py +++ b/redisvl/extensions/router/semantic.py @@ -17,7 +17,7 @@ SemanticRouterIndexSchema, ) from redisvl.index import SearchIndex -from redisvl.query import RangeQuery +from redisvl.query import VectorRangeQuery from redisvl.redis.utils import convert_bytes, hashify, make_dict from redisvl.utils.log import get_logger from redisvl.utils.utils import deprecated_argument, model_to_dict @@ -244,7 +244,7 @@ def _distance_threshold_filter(self) -> str: def _build_aggregate_request( self, - vector_range_query: RangeQuery, + vector_range_query: VectorRangeQuery, aggregation_method: DistanceAggregationMethod, max_k: int, ) -> AggregateRequest: @@ -286,7 +286,7 @@ def _get_route_matches( # therefore you might take the max_threshold and further refine from there. distance_threshold = max(route.distance_threshold for route in self.routes) - vector_range_query = RangeQuery( + vector_range_query = VectorRangeQuery( vector=vector, vector_field_name=ROUTE_VECTOR_FIELD_NAME, distance_threshold=float(distance_threshold), diff --git a/redisvl/index/index.py b/redisvl/index/index.py index 9b4cc6dd..39a57592 100644 --- a/redisvl/index/index.py +++ b/redisvl/index/index.py @@ -54,6 +54,7 @@ ) from redisvl.redis.utils import convert_bytes from redisvl.schema import IndexSchema, StorageType +from redisvl.schema.fields import VECTOR_NORM_MAP, VectorDistanceMetric from redisvl.utils.log import get_logger logger = get_logger(__name__) @@ -74,7 +75,7 @@ def process_results( - results: "Result", query: BaseQuery, storage_type: StorageType + results: "Result", query: BaseQuery, schema: IndexSchema ) -> List[Dict[str, Any]]: """Convert a list of search Result objects into a list of document dictionaries. @@ -99,11 +100,24 @@ def process_results( # Determine if unpacking JSON is needed unpack_json = ( - (storage_type == StorageType.JSON) + (schema.index.storage_type == StorageType.JSON) and isinstance(query, FilterQuery) and not query._return_fields # type: ignore ) + if (isinstance(query, BaseVectorQuery)) and query._normalize_vector_distance: + dist_metric = VectorDistanceMetric( + schema.fields[query._vector_field_name].attrs.distance_metric.upper() # type: ignore + ) + if dist_metric == VectorDistanceMetric.IP: + warnings.warn( + "Attempting to normalize inner product distance metric. Use cosine distance instead which is normalized inner product by definition." + ) + + norm_fn = VECTOR_NORM_MAP[dist_metric.value] + else: + norm_fn = None + # Process records def _process(doc: "Document") -> Dict[str, Any]: doc_dict = doc.__dict__ @@ -117,6 +131,12 @@ def _process(doc: "Document") -> Dict[str, Any]: return {"id": doc_dict.get("id"), **json_data} raise ValueError(f"Unable to parse json data from Redis {json_data}") + if norm_fn: + # convert float back to string to be consistent + doc_dict[query.DISTANCE_ID] = str( # type: ignore + norm_fn(float(doc_dict[query.DISTANCE_ID])) # type: ignore + ) + # Remove 'payload' if present doc_dict.pop("payload", None) @@ -813,11 +833,7 @@ def batch_query( ) all_parsed = [] for query, batch_results in zip(queries, results): - parsed = process_results( - batch_results, - query=query, - storage_type=self.schema.index.storage_type, - ) + parsed = process_results(batch_results, query=query, schema=self.schema) # Create separate lists of parsed results for each query # passed in to the batch_search method, so that callers can # access the results for each query individually @@ -827,9 +843,7 @@ def batch_query( def _query(self, query: BaseQuery) -> List[Dict[str, Any]]: """Execute a query and process results.""" results = self.search(query.query, query_params=query.params) - return process_results( - results, query=query, storage_type=self.schema.index.storage_type - ) + return process_results(results, query=query, schema=self.schema) def query(self, query: BaseQuery) -> List[Dict[str, Any]]: """Execute a query on the index. @@ -1471,7 +1485,7 @@ async def batch_query( parsed = process_results( batch_results, query=query, - storage_type=self.schema.index.storage_type, + schema=self.schema, ) # Create separate lists of parsed results for each query # passed in to the batch_search method, so that callers can @@ -1483,9 +1497,7 @@ async def batch_query( async def _query(self, query: BaseQuery) -> List[Dict[str, Any]]: """Asynchronously execute a query and process results.""" results = await self.search(query.query, query_params=query.params) - return process_results( - results, query=query, storage_type=self.schema.index.storage_type - ) + return process_results(results, query=query, schema=self.schema) async def query(self, query: BaseQuery) -> List[Dict[str, Any]]: """Asynchronously execute a query on the index. diff --git a/redisvl/query/__init__.py b/redisvl/query/__init__.py index 13b3fb02..6245315f 100644 --- a/redisvl/query/__init__.py +++ b/redisvl/query/__init__.py @@ -1,6 +1,7 @@ from redisvl.query.aggregate import AggregationQuery, HybridAggregationQuery from redisvl.query.query import ( BaseQuery, + BaseVectorQuery, CountQuery, FilterQuery, RangeQuery, @@ -11,6 +12,7 @@ __all__ = [ "BaseQuery", + "BaseVectorQuery", "VectorQuery", "FilterQuery", "RangeQuery", diff --git a/redisvl/query/query.py b/redisvl/query/query.py index a9d334bc..9340b36b 100644 --- a/redisvl/query/query.py +++ b/redisvl/query/query.py @@ -4,7 +4,7 @@ from redis.commands.search.query import Query as RedisQuery from redisvl.query.filter import FilterExpression -from redisvl.redis.utils import array_to_buffer +from redisvl.redis.utils import array_to_buffer, denorm_cosine_distance from redisvl.utils.token_escaper import TokenEscaper @@ -178,6 +178,8 @@ class BaseVectorQuery: DISTANCE_ID: str = "vector_distance" VECTOR_PARAM: str = "vector" + _normalize_vector_distance: bool = False + class HybridPolicy(str, Enum): """Enum for valid hybrid policy options in vector queries.""" @@ -201,6 +203,7 @@ def __init__( in_order: bool = False, hybrid_policy: Optional[str] = None, batch_size: Optional[int] = None, + normalize_vector_distance: bool = False, ): """A query for running a vector search along with an optional filter expression. @@ -237,6 +240,12 @@ def __init__( of vectors to fetch in each batch. Larger values may improve performance at the cost of memory usage. Only applies when hybrid_policy="BATCHES". Defaults to None, which lets Redis auto-select an appropriate batch size. + normalize_vector_distance (bool): Redis supports 3 distance metrics: L2 (euclidean), + IP (inner product), and COSINE. By default, L2 distance returns an unbounded value. + COSINE distance returns a value between 0 and 2. IP returns a value determined by + the magnitude of the vector. Setting this flag to true converts COSINE and L2 distance + to a similarity score between 0 and 1. Note: setting this flag to true for IP will + throw a warning since by definition COSINE similarity is normalized IP. Raises: TypeError: If filter_expression is not of type redisvl.query.FilterExpression @@ -250,6 +259,7 @@ def __init__( self._num_results = num_results self._hybrid_policy: Optional[HybridPolicy] = None self._batch_size: Optional[int] = None + self._normalize_vector_distance = normalize_vector_distance self.set_filter(filter_expression) query_string = self._build_query_string() @@ -398,6 +408,7 @@ def __init__( in_order: bool = False, hybrid_policy: Optional[str] = None, batch_size: Optional[int] = None, + normalize_vector_distance: bool = False, ): """A query for running a filtered vector search based on semantic distance threshold. @@ -441,6 +452,19 @@ def __init__( of vectors to fetch in each batch. Larger values may improve performance at the cost of memory usage. Only applies when hybrid_policy="BATCHES". Defaults to None, which lets Redis auto-select an appropriate batch size. + normalize_vector_distance (bool): Redis supports 3 distance metrics: L2 (euclidean), + IP (inner product), and COSINE. By default, L2 distance returns an unbounded value. + COSINE distance returns a value between 0 and 2. IP returns a value determined by + the magnitude of the vector. Setting this flag to true converts COSINE and L2 distance + to a similarity score between 0 and 1. Note: setting this flag to true for IP will + throw a warning since by definition COSINE similarity is normalized IP. + + Raises: + TypeError: If filter_expression is not of type redisvl.query.FilterExpression + + Note: + Learn more about vector range queries: https://redis.io/docs/interact/search-and-query/search/vectors/#range-query + """ self._vector = vector self._vector_field_name = vector_field_name @@ -460,6 +484,7 @@ def __init__( if batch_size is not None: self.set_batch_size(batch_size) + self._normalize_vector_distance = normalize_vector_distance self.set_distance_threshold(distance_threshold) self.set_filter(filter_expression) query_string = self._build_query_string() @@ -497,6 +522,14 @@ def set_distance_threshold(self, distance_threshold: float): raise TypeError("distance_threshold must be of type float or int") if distance_threshold < 0: raise ValueError("distance_threshold must be non-negative") + if self._normalize_vector_distance: + if distance_threshold > 1: + raise ValueError( + "distance_threshold must be between 0 and 1 when normalize_vector_distance is set to True" + ) + + # User sets normalized value 0-1 denormalize for use in DB + distance_threshold = denorm_cosine_distance(distance_threshold) self._distance_threshold = distance_threshold # Reset the query string diff --git a/redisvl/schema/fields.py b/redisvl/schema/fields.py index b77188d7..1df294d8 100644 --- a/redisvl/schema/fields.py +++ b/redisvl/schema/fields.py @@ -16,6 +16,14 @@ from redis.commands.search.field import TextField as RedisTextField from redis.commands.search.field import VectorField as RedisVectorField +from redisvl.utils.utils import norm_cosine_distance, norm_l2_distance + +VECTOR_NORM_MAP = { + "COSINE": norm_cosine_distance, + "L2": norm_l2_distance, + "IP": None, # normalized inner product is cosine similarity by definition +} + class FieldTypes(str, Enum): TAG = "tag" diff --git a/redisvl/utils/utils.py b/redisvl/utils/utils.py index 4c40d41a..016c40fa 100644 --- a/redisvl/utils/utils.py +++ b/redisvl/utils/utils.py @@ -191,3 +191,22 @@ def wrapper(): return return wrapper + + +def norm_cosine_distance(value: float) -> float: + """ + Normalize the cosine distance to a similarity score between 0 and 1. + """ + return max((2 - value) / 2, 0) + + +def denorm_cosine_distance(value: float) -> float: + """Denormalize the distance threshold from [0, 1] to [0, 1] for our db.""" + return max(2 - 2 * value, 0) + + +def norm_l2_distance(value: float) -> float: + """ + Normalize the L2 distance. + """ + return 1 / (1 + value) diff --git a/tests/conftest.py b/tests/conftest.py index 6dfe19a8..de6b901a 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -158,7 +158,7 @@ def sample_data(sample_datetimes): "last_updated": sample_datetimes["high"].timestamp(), "credit_score": "medium", "location": "-110.0839,37.3861", - "user_embedding": [0.9, 0.9, 0.1], + "user_embedding": [-0.1, -0.1, -0.5], }, ] diff --git a/tests/integration/test_query.py b/tests/integration/test_query.py index 3f30c02e..9bee41f6 100644 --- a/tests/integration/test_query.py +++ b/tests/integration/test_query.py @@ -4,7 +4,7 @@ from redis.commands.search.result import Result from redisvl.index import SearchIndex -from redisvl.query import CountQuery, FilterQuery, RangeQuery, VectorQuery +from redisvl.query import CountQuery, FilterQuery, VectorQuery, VectorRangeQuery from redisvl.query.filter import ( FilterExpression, Geo, @@ -25,6 +25,7 @@ def vector_query(): return VectorQuery( vector=[0.1, 0.1, 0.5], vector_field_name="user_embedding", + return_score=True, return_fields=[ "user", "credit_score", @@ -53,6 +54,24 @@ def sorted_vector_query(): ) +@pytest.fixture +def normalized_vector_query(): + return VectorQuery( + vector=[0.1, 0.1, 0.5], + vector_field_name="user_embedding", + normalize_vector_distance=True, + return_score=True, + return_fields=[ + "user", + "credit_score", + "age", + "job", + "location", + "last_updated", + ], + ) + + @pytest.fixture def filter_query(): return FilterQuery( @@ -84,9 +103,21 @@ def sorted_filter_query(): ) +@pytest.fixture +def normalized_range_query(): + return VectorRangeQuery( + vector=[0.1, 0.1, 0.5], + vector_field_name="user_embedding", + normalize_vector_distance=True, + return_score=True, + return_fields=["user", "credit_score", "age", "job", "location"], + distance_threshold=0.2, + ) + + @pytest.fixture def range_query(): - return RangeQuery( + return VectorRangeQuery( vector=[0.1, 0.1, 0.5], vector_field_name="user_embedding", return_fields=["user", "credit_score", "age", "job", "location"], @@ -96,7 +127,7 @@ def range_query(): @pytest.fixture def sorted_range_query(): - return RangeQuery( + return VectorRangeQuery( vector=[0.1, 0.1, 0.5], vector_field_name="user_embedding", return_fields=["user", "credit_score", "age", "job", "location"], @@ -155,6 +186,56 @@ def hash_preprocess(item: dict) -> dict: index.delete(drop=True) +@pytest.fixture +def L2_index(sample_data, redis_url): + # construct a search index from the schema + index = SearchIndex.from_dict( + { + "index": { + "name": "L2_index", + "prefix": "L2_index", + "storage_type": "hash", + }, + "fields": [ + {"name": "credit_score", "type": "tag"}, + {"name": "job", "type": "text"}, + {"name": "age", "type": "numeric"}, + {"name": "last_updated", "type": "numeric"}, + {"name": "location", "type": "geo"}, + { + "name": "user_embedding", + "type": "vector", + "attrs": { + "dims": 3, + "distance_metric": "L2", + "algorithm": "flat", + "datatype": "float32", + }, + }, + ], + }, + redis_url=redis_url, + ) + + # create the index (no data yet) + index.create(overwrite=True) + + # Prepare and load the data + def hash_preprocess(item: dict) -> dict: + return { + **item, + "user_embedding": array_to_buffer(item["user_embedding"], "float32"), + } + + index.load(sample_data, preprocess=hash_preprocess) + + # run the test + yield index + + # clean up + index.delete(drop=True) + + def test_search_and_query(index): # *=>[KNN 7 @user_embedding $vector AS vector_distance] v = VectorQuery( @@ -191,7 +272,7 @@ def test_search_and_query(index): def test_range_query(index): - r = RangeQuery( + r = VectorRangeQuery( vector=[0.1, 0.1, 0.5], vector_field_name="user_embedding", return_fields=["user", "credit_score", "age", "job"], @@ -262,7 +343,7 @@ def search( assert doc.location == location # if range query, test results by distance threshold - if isinstance(query, RangeQuery): + if isinstance(query, VectorRangeQuery): for doc in results.docs: print(doc.vector_distance) assert float(doc.vector_distance) <= distance_threshold @@ -273,7 +354,7 @@ def search( # check results are in sorted order if sort: - if isinstance(query, RangeQuery): + if isinstance(query, VectorRangeQuery): assert [int(doc.age) for doc in results.docs] == [12, 14, 18, 100] else: assert [int(doc.age) for doc in results.docs] == [ @@ -289,7 +370,7 @@ def search( @pytest.fixture( params=["vector_query", "filter_query", "range_query"], - ids=["VectorQuery", "FilterQuery", "RangeQuery"], + ids=["VectorQuery", "FilterQuery", "VectorRangeQuery"], ) def query(request): return request.getfixturevalue(request.param) @@ -659,3 +740,53 @@ def test_range_query_with_filter_and_hybrid_policy(index): for result in results: assert result["credit_score"] == "high" assert float(result["vector_distance"]) <= 0.5 + + +def test_query_normalize_cosine_distance(index, normalized_vector_query): + + res = index.query(normalized_vector_query) + + for r in res: + assert 0 <= float(r["vector_distance"]) <= 1 + + +def test_query_cosine_distance_un_normalized(index, vector_query): + + res = index.query(vector_query) + + assert any(float(r["vector_distance"]) > 1 for r in res) + + +def test_query_l2_distance_un_normalized(L2_index, vector_query): + + res = L2_index.query(vector_query) + + assert any(float(r["vector_distance"]) > 1 for r in res) + + +def test_query_l2_distance_normalized(L2_index, normalized_vector_query): + + res = L2_index.query(normalized_vector_query) + + for r in res: + assert 0 <= float(r["vector_distance"]) <= 1 + + +def test_range_query_normalize_cosine_distance(index, normalized_range_query): + + res = index.query(normalized_range_query) + + for r in res: + assert 0 <= float(r["vector_distance"]) <= 1 + + +def test_range_query_normalize_bad_input(index): + with pytest.raises(ValueError): + VectorRangeQuery( + vector=[0.1, 0.1, 0.5], + vector_field_name="user_embedding", + normalize_vector_distance=True, + return_score=True, + return_fields=["user", "credit_score", "age", "job", "location"], + distance_threshold=1.2, + ) diff --git a/tests/unit/test_utils.py b/tests/unit/test_utils.py index af0cc192..59a1abd8 100644 --- a/tests/unit/test_utils.py +++ b/tests/unit/test_utils.py @@ -13,11 +13,30 @@ ) from redisvl.utils.utils import ( assert_no_warnings, + denorm_cosine_distance, deprecated_argument, deprecated_function, + norm_cosine_distance, ) +def test_norm_cosine_distance(): + input = 2 + expected = 0 + assert norm_cosine_distance(input) == expected + + +def test_denorm_cosine_distance(): + input = 0 + expected = 2 + assert denorm_cosine_distance(input) == expected + + +def test_norm_denorm_cosine(): + input = 0.6 + assert input == round(denorm_cosine_distance(norm_cosine_distance(input)), 6) + + def test_even_number_of_elements(): """Test with an even number of elements""" values = ["key1", "value1", "key2", "value2"] From ae69ae965b6c14d3fa75abbf76865e5dc5f5b285 Mon Sep 17 00:00:00 2001 From: Justin Cechmanek Date: Tue, 25 Mar 2025 19:30:13 -0700 Subject: [PATCH 12/26] adds TextQuery class --- redisvl/query/__init__.py | 1 + redisvl/query/query.py | 4 +++- tests/unit/test_query_types.py | 2 +- 3 files changed, 5 insertions(+), 2 deletions(-) diff --git a/redisvl/query/__init__.py b/redisvl/query/__init__.py index 6245315f..6231097f 100644 --- a/redisvl/query/__init__.py +++ b/redisvl/query/__init__.py @@ -4,6 +4,7 @@ BaseVectorQuery, CountQuery, FilterQuery, + HybridQuery, RangeQuery, TextQuery, VectorQuery, diff --git a/redisvl/query/query.py b/redisvl/query/query.py index 9340b36b..6c12e87e 100644 --- a/redisvl/query/query.py +++ b/redisvl/query/query.py @@ -1,6 +1,7 @@ from enum import Enum from typing import Any, Dict, List, Optional, Set, Tuple, Union +from redis.commands.search.aggregation import AggregateRequest, Desc from redis.commands.search.query import Query as RedisQuery from redisvl.query.filter import FilterExpression @@ -790,6 +791,7 @@ def set_stopwords(self, stopwords: Optional[Union[str, Set[str]]] = "english"): def tokenize_and_escape_query(self, user_query: str) -> str: """Convert a raw user query to a redis full text query joined by ORs""" + from redisvl.utils.token_escaper import TokenEscaper escaper = TokenEscaper() @@ -811,7 +813,7 @@ def _build_query_string(self) -> str: else: filter_expression = "" - text = f"(@{self._text_field}:({self.tokenize_and_escape_query(self._text)}))" + text = f"~(@{self._text_field}:({self.tokenize_and_escape_query(self._text)}))" if filter_expression and filter_expression != "*": text += f"({filter_expression})" return text diff --git a/tests/unit/test_query_types.py b/tests/unit/test_query_types.py index dea7d113..28172c3c 100644 --- a/tests/unit/test_query_types.py +++ b/tests/unit/test_query_types.py @@ -204,7 +204,7 @@ def test_text_query(): assert text_query._num_results == 10 assert ( text_query.filter - == f"(@{text_field_name}:({text_query.tokenize_and_escape_query(text_string)}))" + == f"(~@{text_field_name}:({text_query.tokenize_and_escape_query(text_string)}))" ) assert isinstance(text_query, Query) assert isinstance(text_query.query, Query) From e403934bf0031b628f44c3a7b46b6326bc42ae87 Mon Sep 17 00:00:00 2001 From: Justin Cechmanek Date: Thu, 27 Mar 2025 13:18:04 -0700 Subject: [PATCH 13/26] makes stopwords user defined in TextQuery --- poetry.lock | 417 +++++++++++++-------------------- redisvl/query/query.py | 1 - tests/unit/test_query_types.py | 19 +- 3 files changed, 175 insertions(+), 262 deletions(-) diff --git a/poetry.lock b/poetry.lock index c8696428..57d9d741 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 2.0.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand. [[package]] name = "accessible-pygments" @@ -7,7 +7,6 @@ description = "A collection of accessible pygments styles" optional = false python-versions = ">=3.9" groups = ["docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "accessible_pygments-0.0.5-py3-none-any.whl", hash = "sha256:88ae3211e68a1d0b011504b2ffc1691feafce124b845bd072ab6f9f66f34d4b7"}, {file = "accessible_pygments-0.0.5.tar.gz", hash = "sha256:40918d3e6a2b619ad424cb91e556bd3bd8865443d9f22f1dcdf79e33c8046872"}, @@ -27,7 +26,7 @@ description = "Happy Eyeballs for asyncio" optional = true python-versions = ">=3.9" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"voyageai\"" +markers = "extra == \"voyageai\"" files = [ {file = "aiohappyeyeballs-2.4.6-py3-none-any.whl", hash = "sha256:147ec992cf873d74f5062644332c539fcd42956dc69453fe5204195e560517e1"}, {file = "aiohappyeyeballs-2.4.6.tar.gz", hash = "sha256:9b05052f9042985d32ecbe4b59a77ae19c006a78f1344d7fdad69d28ded3d0b0"}, @@ -40,7 +39,7 @@ description = "Async http client/server framework (asyncio)" optional = true python-versions = ">=3.9" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"voyageai\"" +markers = "extra == \"voyageai\"" files = [ {file = "aiohttp-3.11.13-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a4fe27dbbeec445e6e1291e61d61eb212ee9fed6e47998b27de71d70d3e8777d"}, {file = "aiohttp-3.11.13-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9e64ca2dbea28807f8484c13f684a2f761e69ba2640ec49dacd342763cc265ef"}, @@ -145,7 +144,7 @@ description = "asyncio rate limiter, a leaky bucket implementation" optional = true python-versions = "<4.0,>=3.8" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"voyageai\"" +markers = "extra == \"voyageai\"" files = [ {file = "aiolimiter-1.2.1-py3-none-any.whl", hash = "sha256:d3f249e9059a20badcb56b61601a83556133655c11d1eb3dd3e04ff069e5f3c7"}, {file = "aiolimiter-1.2.1.tar.gz", hash = "sha256:e02a37ea1a855d9e832252a105420ad4d15011505512a1a1d814647451b5cca9"}, @@ -158,7 +157,7 @@ description = "aiosignal: a list of registered asynchronous callbacks" optional = true python-versions = ">=3.9" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"voyageai\"" +markers = "extra == \"voyageai\"" files = [ {file = "aiosignal-1.3.2-py2.py3-none-any.whl", hash = "sha256:45cde58e409a301715980c2b01d0c28bdde3770d8290b5eb2173759d9acb31a5"}, {file = "aiosignal-1.3.2.tar.gz", hash = "sha256:a8c255c66fafb1e499c9351d0bf32ff2d8a0321595ebac3b93713656d2436f54"}, @@ -174,7 +173,6 @@ description = "A light, configurable Sphinx theme" optional = false python-versions = ">=3.9" groups = ["docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "alabaster-0.7.16-py3-none-any.whl", hash = "sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92"}, {file = "alabaster-0.7.16.tar.gz", hash = "sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65"}, @@ -187,7 +185,6 @@ description = "Reusable constraint types to use with typing.Annotated" optional = false python-versions = ">=3.8" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, @@ -200,7 +197,7 @@ description = "High level compatibility layer for multiple asynchronous event lo optional = true python-versions = ">=3.9" groups = ["main"] -markers = "(extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" +markers = "extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\"" files = [ {file = "anyio-4.8.0-py3-none-any.whl", hash = "sha256:b5011f270ab5eb0abf13385f851315585cc37ef330dd88e27ec3d34d651fd47a"}, {file = "anyio-4.8.0.tar.gz", hash = "sha256:1d9fe889df5212298c0c0723fa20479d1b94883a2df44bd3897aa91083316f7a"}, @@ -224,7 +221,7 @@ description = "Disable App Nap on macOS >= 10.9" optional = false python-versions = ">=3.6" groups = ["dev", "docs"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and platform_system == \"Darwin\"" +markers = "platform_system == \"Darwin\"" files = [ {file = "appnope-0.1.4-py2.py3-none-any.whl", hash = "sha256:502575ee11cd7a28c0205f379b525beefebab9d161b7c964670864014ed7213c"}, {file = "appnope-0.1.4.tar.gz", hash = "sha256:1de3860566df9caf38f01f86f65e0e13e379af54f9e4bee1e66b48f2efffd1ee"}, @@ -237,7 +234,6 @@ description = "An abstract syntax tree for Python with inference support." optional = false python-versions = ">=3.9.0" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "astroid-3.3.8-py3-none-any.whl", hash = "sha256:187ccc0c248bfbba564826c26f070494f7bc964fd286b6d9fff4420e55de828c"}, {file = "astroid-3.3.8.tar.gz", hash = "sha256:a88c7994f914a4ea8572fac479459f4955eeccc877be3f2d959a33273b0cf40b"}, @@ -253,7 +249,6 @@ description = "Annotate AST trees with source code positions" optional = false python-versions = ">=3.8" groups = ["dev", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "asttokens-3.0.0-py3-none-any.whl", hash = "sha256:e3078351a059199dd5138cb1c706e6430c05eff2ff136af5eb4790f9d28932e2"}, {file = "asttokens-3.0.0.tar.gz", hash = "sha256:0dcd8baa8d62b0c1d118b399b2ddba3c4aff271d0d7a9e0d4c1681c79035bbc7"}, @@ -287,7 +282,7 @@ files = [ {file = "attrs-25.1.0-py3-none-any.whl", hash = "sha256:c75a69e28a550a7e93789579c22aa26b0f5b83b75dc4e08fe092980051e1090a"}, {file = "attrs-25.1.0.tar.gz", hash = "sha256:1c97078a80c814273a76b2a298a932eb681c87415c11dee0a6921de7f1b02c3e"}, ] -markers = {main = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"voyageai\"", dev = "python_version <= \"3.11\" or python_version >= \"3.12\"", docs = "python_version <= \"3.11\" or python_version >= \"3.12\""} +markers = {main = "extra == \"voyageai\""} [package.extras] benchmark = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] @@ -304,7 +299,6 @@ description = "Internationalization utilities" optional = false python-versions = ">=3.8" groups = ["docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "babel-2.17.0-py3-none-any.whl", hash = "sha256:4d0b53093fdfb4b21c92b5213dba5a1b23885afa8383709427046b21c366e5f2"}, {file = "babel-2.17.0.tar.gz", hash = "sha256:0c54cffb19f690cdcc52a3b50bcbf71e07a808d1c80d549f2459b9d2cf0afb9d"}, @@ -324,7 +318,7 @@ files = [ {file = "beautifulsoup4-4.13.3-py3-none-any.whl", hash = "sha256:99045d7d3f08f91f0d656bc9b7efbae189426cd913d830294a15eefa0ea4df16"}, {file = "beautifulsoup4-4.13.3.tar.gz", hash = "sha256:1bd32405dacc920b42b83ba01644747ed77456a65760e285fbc47633ceddaf8b"}, ] -markers = {main = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"", docs = "python_version <= \"3.11\" or python_version >= \"3.12\""} +markers = {main = "python_version >= \"3.10\" and extra == \"ranx\""} [package.dependencies] soupsieve = ">1.2" @@ -344,7 +338,6 @@ description = "The uncompromising code formatter." optional = false python-versions = ">=3.9" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "black-25.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:759e7ec1e050a15f89b770cefbf91ebee8917aac5c20483bc2d80a6c3a04df32"}, {file = "black-25.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e519ecf93120f34243e6b0054db49c00a35f84f195d5bce7e9f5cfc578fc2da"}, @@ -392,7 +385,6 @@ description = "An easy safelist-based HTML-sanitizing tool." optional = false python-versions = ">=3.9" groups = ["docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "bleach-6.2.0-py3-none-any.whl", hash = "sha256:117d9c6097a7c3d22fd578fcd8d35ff1e125df6736f554da4e432fdd63f31e5e"}, {file = "bleach-6.2.0.tar.gz", hash = "sha256:123e894118b8a599fd80d3ec1a6d4cc7ce4e5882b1317a7e1ba69b56e95f991f"}, @@ -412,7 +404,7 @@ description = "The AWS SDK for Python" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"bedrock\"" +markers = "extra == \"bedrock\"" files = [ {file = "boto3-1.36.0-py3-none-any.whl", hash = "sha256:d0ca7a58ce25701a52232cc8df9d87854824f1f2964b929305722ebc7959d5a9"}, {file = "boto3-1.36.0.tar.gz", hash = "sha256:159898f51c2997a12541c0e02d6e5a8fe2993ddb307b9478fd9a339f98b57e00"}, @@ -433,7 +425,7 @@ description = "Low-level, data-driven core of boto 3." optional = true python-versions = ">=3.8" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"bedrock\"" +markers = "extra == \"bedrock\"" files = [ {file = "botocore-1.36.26-py3-none-any.whl", hash = "sha256:4e3f19913887a58502e71ef8d696fe7eaa54de7813ff73390cd5883f837dfa6e"}, {file = "botocore-1.36.26.tar.gz", hash = "sha256:4a63bcef7ecf6146fd3a61dc4f9b33b7473b49bdaf1770e9aaca6eee0c9eab62"}, @@ -457,7 +449,7 @@ description = "Extensible memoizing collections and decorators" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vertexai\"" +markers = "extra == \"vertexai\"" files = [ {file = "cachetools-5.5.2-py3-none-any.whl", hash = "sha256:d26a22bcc62eb95c3beabd9f1ee5e820d3d2704fe2967cbe350e20c8ffcd3f0a"}, {file = "cachetools-5.5.2.tar.gz", hash = "sha256:1a661caa9175d26759571b2e19580f9d6393969e5dfca11fdb1f947a23e640d4"}, @@ -470,7 +462,7 @@ description = "RFC 7049 - Concise Binary Object Representation" optional = true python-versions = "*" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "cbor-1.0.0.tar.gz", hash = "sha256:13225a262ddf5615cbd9fd55a76a0d53069d18b07d2e9f19c39e6acb8609bbb6"}, ] @@ -482,7 +474,7 @@ description = "CBOR (de)serializer with extensive tag support" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "cbor2-5.6.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e16c4a87fc999b4926f5c8f6c696b0d251b4745bc40f6c5aee51d69b30b15ca2"}, {file = "cbor2-5.6.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:87026fc838370d69f23ed8572939bd71cea2b3f6c8f8bb8283f573374b4d7f33"}, @@ -546,7 +538,7 @@ files = [ {file = "certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe"}, {file = "certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651"}, ] -markers = {main = "(python_version <= \"3.11\" or python_version >= \"3.12\") and (extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\" or extra == \"sentence-transformers\" or extra == \"vertexai\" or extra == \"voyageai\" or extra == \"ranx\") and (extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\" or extra == \"sentence-transformers\" or extra == \"vertexai\" or extra == \"voyageai\" or python_version >= \"3.10\")", dev = "python_version <= \"3.11\" or python_version >= \"3.12\"", docs = "python_version <= \"3.11\" or python_version >= \"3.12\""} +markers = {main = "(extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\" or extra == \"sentence-transformers\" or extra == \"vertexai\" or extra == \"voyageai\" or extra == \"ranx\") and (extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\" or extra == \"sentence-transformers\" or extra == \"vertexai\" or extra == \"voyageai\") or (extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\" or extra == \"sentence-transformers\" or extra == \"vertexai\" or extra == \"voyageai\" or extra == \"ranx\") and python_version >= \"3.10\""} [[package]] name = "cffi" @@ -624,7 +616,7 @@ files = [ {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, ] -markers = {dev = "(implementation_name == \"pypy\" or platform_python_implementation != \"PyPy\") and (python_version <= \"3.11\" or python_version >= \"3.12\")", docs = "(python_version <= \"3.11\" or python_version >= \"3.12\") and implementation_name == \"pypy\""} +markers = {dev = "implementation_name == \"pypy\" or platform_python_implementation != \"PyPy\"", docs = "implementation_name == \"pypy\""} [package.dependencies] pycparser = "*" @@ -636,7 +628,6 @@ description = "Validate configuration and produce human readable error messages. optional = false python-versions = ">=3.8" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, @@ -743,7 +734,7 @@ files = [ {file = "charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85"}, {file = "charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3"}, ] -markers = {main = "(python_version <= \"3.11\" or python_version >= \"3.12\") and (extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"vertexai\" or extra == \"voyageai\" or extra == \"ranx\") and (extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"vertexai\" or extra == \"voyageai\" or python_version >= \"3.10\")", dev = "python_version <= \"3.11\" or python_version >= \"3.12\"", docs = "python_version <= \"3.11\" or python_version >= \"3.12\""} +markers = {main = "(extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"vertexai\" or extra == \"voyageai\" or extra == \"ranx\") and (extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"vertexai\" or extra == \"voyageai\") or (extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"vertexai\" or extra == \"voyageai\" or extra == \"ranx\") and python_version >= \"3.10\""} [[package]] name = "click" @@ -751,8 +742,7 @@ version = "8.1.8" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" -groups = ["dev", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +groups = ["main", "dev", "docs"] files = [ {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, @@ -768,7 +758,7 @@ description = "" optional = true python-versions = "<4.0,>=3.9" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"cohere\"" +markers = "extra == \"cohere\"" files = [ {file = "cohere-5.13.12-py3-none-any.whl", hash = "sha256:2a043591a3e5280b47716a6b311e4c7f58e799364113a9cb81b50cd4f6c95f7e"}, {file = "cohere-5.13.12.tar.gz", hash = "sha256:97bb9ac107e580780b941acbabd3aa5e71960e6835398292c46aaa8a0a4cab88"}, @@ -796,7 +786,7 @@ files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] -markers = {main = "(python_version <= \"3.11\" or python_version >= \"3.12\") and (extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"openai\" or extra == \"ranx\") and platform_system == \"Windows\" and (extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"openai\" or python_version >= \"3.10\")", dev = "(platform_system == \"Windows\" or sys_platform == \"win32\") and (python_version <= \"3.11\" or python_version >= \"3.12\")", docs = "(platform_system == \"Windows\" or sys_platform == \"win32\") and (python_version <= \"3.11\" or python_version >= \"3.12\")"} +markers = {main = "platform_system == \"Windows\"", dev = "platform_system == \"Windows\" or sys_platform == \"win32\"", docs = "platform_system == \"Windows\" or sys_platform == \"win32\""} [[package]] name = "coloredlogs" @@ -805,7 +795,6 @@ description = "Colored terminal output for Python's logging module" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "coloredlogs-15.0.1-py2.py3-none-any.whl", hash = "sha256:612ee75c546f53e92e70049c9dbfcc18c935a2b9a53b66085ce9ef6a6e5c0934"}, {file = "coloredlogs-15.0.1.tar.gz", hash = "sha256:7c991aa71a4577af2f82600d8f8f3a89f936baeaf9b50a9c197da014e5bf16b0"}, @@ -824,7 +813,6 @@ description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus- optional = false python-versions = ">=3.8" groups = ["dev", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "comm-0.2.2-py3-none-any.whl", hash = "sha256:e6fb86cb70ff661ee8c9c14e7d36d6de3b4066f1441be4063df9c5009f0a64d3"}, {file = "comm-0.2.2.tar.gz", hash = "sha256:3fd7a84065306e07bea1773df6eb8282de51ba82f77c72f9c85716ab11fe980e"}, @@ -843,7 +831,7 @@ description = "Python library for calculating contours of 2D quadrilateral grids optional = true python-versions = ">=3.10" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "contourpy-1.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a045f341a77b77e1c5de31e74e966537bba9f3c4099b35bf4c2e3939dd54cdab"}, {file = "contourpy-1.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:500360b77259914f7805af7462e41f9cb7ca92ad38e9f94d6c8641b089338124"}, @@ -918,7 +906,6 @@ description = "Code coverage measurement for Python" optional = false python-versions = ">=3.9" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "coverage-7.6.12-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:704c8c8c6ce6569286ae9622e534b4f5b9759b6f2cd643f1c1a61f666d534fe8"}, {file = "coverage-7.6.12-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ad7525bf0241e5502168ae9c643a2f6c219fa0a283001cee4cf23a9b7da75879"}, @@ -995,7 +982,7 @@ description = "Thin Python bindings to de/compression algorithms in Rust" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "cramjam-2.9.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:8e82464d1e00fbbb12958999b8471ba5e9f3d9711954505a0a7b378762332e6f"}, {file = "cramjam-2.9.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d2df8a6511cc08ef1fccd2e0c65e2ebc9f57574ec8376052a76851af5398810"}, @@ -1099,7 +1086,6 @@ description = "cryptography is a package which provides cryptographic recipes an optional = false python-versions = "!=3.9.0,!=3.9.1,>=3.7" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "cryptography-44.0.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bf688f615c29bfe9dfc44312ca470989279f0e94bb9f631f85e3459af8efc009"}, {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd7c7e2d71d908dc0f8d2027e1604102140d84b155e658c20e8ad1304317691f"}, @@ -1154,7 +1140,7 @@ description = "Composable style cycles" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "cycler-0.12.1-py3-none-any.whl", hash = "sha256:85cef7cff222d8644161529808465972e51340599459b8ac3ccbac5a854e0d30"}, {file = "cycler-0.12.1.tar.gz", hash = "sha256:88bb128f02ba341da8ef447245a9e138fae777f6a23943da4540077d3601eb1c"}, @@ -1171,7 +1157,6 @@ description = "An implementation of the Debug Adapter Protocol for Python" optional = false python-versions = ">=3.8" groups = ["dev", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "debugpy-1.8.12-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:a2ba7ffe58efeae5b8fad1165357edfe01464f9aef25e814e891ec690e7dd82a"}, {file = "debugpy-1.8.12-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cbbd4149c4fc5e7d508ece083e78c17442ee13b0e69bfa6bd63003e486770f45"}, @@ -1208,7 +1193,6 @@ description = "Decorators for Humans" optional = false python-versions = ">=3.8" groups = ["dev", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "decorator-5.2.1-py3-none-any.whl", hash = "sha256:d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a"}, {file = "decorator-5.2.1.tar.gz", hash = "sha256:65f266143752f734b0a7cc83c46f4618af75b8c5911b00ccb61d0ac9b6da0360"}, @@ -1221,7 +1205,6 @@ description = "XML bomb protection for Python stdlib modules" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" groups = ["docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, @@ -1234,7 +1217,6 @@ description = "serialize all of Python" optional = false python-versions = ">=3.8" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "dill-0.3.9-py3-none-any.whl", hash = "sha256:468dff3b89520b474c0397703366b7b95eebe6303f108adf9b19da1f702be87a"}, {file = "dill-0.3.9.tar.gz", hash = "sha256:81aa267dddf68cbfe8029c42ca9ec6a4ab3b22371d1c450abc54422577b4512c"}, @@ -1251,7 +1233,6 @@ description = "Distribution utilities" optional = false python-versions = "*" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87"}, {file = "distlib-0.3.9.tar.gz", hash = "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403"}, @@ -1264,7 +1245,7 @@ description = "Distro - an OS platform information API" optional = true python-versions = ">=3.6" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"openai\"" +markers = "extra == \"openai\"" files = [ {file = "distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2"}, {file = "distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed"}, @@ -1277,7 +1258,6 @@ description = "A Python library for the Docker Engine API." optional = false python-versions = ">=3.8" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "docker-7.1.0-py3-none-any.whl", hash = "sha256:c96b93b7f0a746f9e77d325bcfb87422a3d8bd4f03136ae8a85b37f1898d5fc0"}, {file = "docker-7.1.0.tar.gz", hash = "sha256:ad8c70e6e3f8926cb8a92619b832b4ea5299e2831c14284663184e200546fa6c"}, @@ -1301,7 +1281,7 @@ description = "Parse Python docstrings in reST, Google and Numpydoc format" optional = true python-versions = ">=3.6,<4.0" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vertexai\"" +markers = "extra == \"vertexai\"" files = [ {file = "docstring_parser-0.16-py3-none-any.whl", hash = "sha256:bf0a1387354d3691d102edef7ec124f219ef639982d096e26e3b60aeffa90637"}, {file = "docstring_parser-0.16.tar.gz", hash = "sha256:538beabd0af1e2db0146b6bd3caa526c35a34d61af9fd2887f3a8a27a739aa6e"}, @@ -1314,7 +1294,6 @@ description = "Docutils -- Python Documentation Utilities" optional = false python-versions = ">=3.9" groups = ["docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "docutils-0.21.2-py3-none-any.whl", hash = "sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2"}, {file = "docutils-0.21.2.tar.gz", hash = "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f"}, @@ -1327,7 +1306,7 @@ description = "Like `typing._eval_type`, but lets older Python versions use newe optional = true python-versions = ">=3.8" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"mistralai\"" +markers = "extra == \"mistralai\"" files = [ {file = "eval_type_backport-0.2.2-py3-none-any.whl", hash = "sha256:cb6ad7c393517f476f96d456d0412ea80f0a8cf96f6892834cd9340149111b0a"}, {file = "eval_type_backport-0.2.2.tar.gz", hash = "sha256:f0576b4cf01ebb5bd358d02314d31846af5e07678387486e2c798af0e7d849c1"}, @@ -1347,7 +1326,7 @@ files = [ {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, ] -markers = {main = "(extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\") and python_version < \"3.11\"", dev = "python_version < \"3.11\"", docs = "python_version < \"3.11\""} +markers = {main = "(extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\") and python_version <= \"3.10\"", dev = "python_version <= \"3.10\"", docs = "python_version <= \"3.10\""} [package.extras] test = ["pytest (>=6)"] @@ -1359,7 +1338,6 @@ description = "execnet: rapid multi-Python deployment" optional = false python-versions = ">=3.8" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "execnet-2.1.1-py3-none-any.whl", hash = "sha256:26dee51f1b80cebd6d0ca8e74dd8745419761d3bef34163928cbebbdc4749fdc"}, {file = "execnet-2.1.1.tar.gz", hash = "sha256:5189b52c6121c24feae288166ab41b32549c7e2348652736540b9e6e7d4e72e3"}, @@ -1375,7 +1353,6 @@ description = "Get the currently executing AST node of a frame, and other inform optional = false python-versions = ">=3.8" groups = ["dev", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "executing-2.2.0-py2.py3-none-any.whl", hash = "sha256:11387150cad388d62750327a53d3339fad4888b39a6fe233c3afbb54ecffd3aa"}, {file = "executing-2.2.0.tar.gz", hash = "sha256:5d108c028108fe2551d1a7b2e8b713341e2cb4fc0aa7dcf966fa4327a5226755"}, @@ -1391,7 +1368,7 @@ description = "Fast read/write of AVRO files" optional = true python-versions = ">=3.9" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"cohere\"" +markers = "extra == \"cohere\"" files = [ {file = "fastavro-1.10.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1a9fe0672d2caf0fe54e3be659b13de3cad25a267f2073d6f4b9f8862acc31eb"}, {file = "fastavro-1.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:86dd0410770e0c99363788f0584523709d85e57bb457372ec5c285a482c17fe6"}, @@ -1439,7 +1416,6 @@ description = "Fastest Python implementation of JSON schema" optional = false python-versions = "*" groups = ["dev", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "fastjsonschema-2.21.1-py3-none-any.whl", hash = "sha256:c9e5b7e908310918cf494a434eeb31384dd84a98b57a30bcb1f535015b554667"}, {file = "fastjsonschema-2.21.1.tar.gz", hash = "sha256:794d4f0a58f848961ba16af7b9c85a3e88cd360df008c59aac6fc5ae9323b5d4"}, @@ -1455,7 +1431,7 @@ description = "Python support for Parquet file format" optional = true python-versions = ">=3.9" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "fastparquet-2024.11.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:60ccf587410f0979105e17036df61bb60e1c2b81880dc91895cdb4ee65b71e7f"}, {file = "fastparquet-2024.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a5ad5fc14b0567e700bea3cd528a0bd45a6f9371370b49de8889fb3d10a6574a"}, @@ -1521,7 +1497,7 @@ files = [ {file = "filelock-3.17.0-py3-none-any.whl", hash = "sha256:533dc2f7ba78dc2f0f531fc6c4940addf7b70a481e269a5a3b93be94ffbe8338"}, {file = "filelock-3.17.0.tar.gz", hash = "sha256:ee4e77401ef576ebb38cd7f13b9b28893194acc20a8e68e18730ba9c0e54660e"}, ] -markers = {main = "(extra == \"sentence-transformers\" or extra == \"cohere\") and (python_version <= \"3.11\" or python_version >= \"3.12\")", dev = "python_version <= \"3.11\" or python_version >= \"3.12\""} +markers = {main = "extra == \"sentence-transformers\" or extra == \"cohere\""} [package.extras] docs = ["furo (>=2024.8.6)", "sphinx (>=8.1.3)", "sphinx-autodoc-typehints (>=3)"] @@ -1535,7 +1511,7 @@ description = "Tools to manipulate font files" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "fonttools-4.56.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:331954d002dbf5e704c7f3756028e21db07097c19722569983ba4d74df014000"}, {file = "fonttools-4.56.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8d1613abd5af2f93c05867b3a3759a56e8bf97eb79b1da76b2bc10892f96ff16"}, @@ -1610,7 +1586,7 @@ description = "A list-like structure which implements collections.abc.MutableSeq optional = true python-versions = ">=3.8" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"voyageai\"" +markers = "extra == \"voyageai\"" files = [ {file = "frozenlist-1.5.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5b6a66c18b5b9dd261ca98dffcb826a525334b2f29e7caa54e182255c5f6a65a"}, {file = "frozenlist-1.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d1b3eb7b05ea246510b43a7e53ed1653e55c2121019a97e60cad7efb881a97bb"}, @@ -1713,7 +1689,7 @@ description = "File-system specification" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and (extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"ranx\") and (extra == \"sentence-transformers\" or extra == \"cohere\" or python_version >= \"3.10\")" +markers = "(extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"ranx\") and (extra == \"sentence-transformers\" or extra == \"cohere\") or (extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"ranx\") and python_version >= \"3.10\"" files = [ {file = "fsspec-2025.2.0-py3-none-any.whl", hash = "sha256:9de2ad9ce1f85e1931858535bc882543171d197001a0a5eb2ddc04f1781ab95b"}, {file = "fsspec-2025.2.0.tar.gz", hash = "sha256:1c24b16eaa0a1798afa0337aa0db9b256718ab2a89c425371f5628d22c3b6afd"}, @@ -1754,7 +1730,7 @@ description = "Google API client core library" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vertexai\"" +markers = "extra == \"vertexai\"" files = [ {file = "google_api_core-2.24.1-py3-none-any.whl", hash = "sha256:bc78d608f5a5bf853b80bd70a795f703294de656c096c0968320830a4bc280f1"}, {file = "google_api_core-2.24.1.tar.gz", hash = "sha256:f8b36f5456ab0dd99a1b693a40a31d1e7757beea380ad1b38faaf8941eae9d8a"}, @@ -1791,7 +1767,7 @@ description = "Google Authentication Library" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vertexai\"" +markers = "extra == \"vertexai\"" files = [ {file = "google_auth-2.38.0-py2.py3-none-any.whl", hash = "sha256:e7dae6694313f434a2727bf2906f27ad259bae090d7aa896590d86feec3d9d4a"}, {file = "google_auth-2.38.0.tar.gz", hash = "sha256:8285113607d3b80a3f1543b75962447ba8a09fe85783432a784fdeef6ac094c4"}, @@ -1817,7 +1793,7 @@ description = "Vertex AI API client library" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vertexai\"" +markers = "extra == \"vertexai\"" files = [ {file = "google_cloud_aiplatform-1.82.0-py2.py3-none-any.whl", hash = "sha256:13368a961b2bfa8f46ccd10371bb19bd5f946d8f29c411726061ed1a140ce890"}, {file = "google_cloud_aiplatform-1.82.0.tar.gz", hash = "sha256:b7ea7379249cc1821aa46300a16e4b15aa64aa22665e2536b2bcb7e473d7438e"}, @@ -1870,7 +1846,7 @@ description = "Google BigQuery API client library" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vertexai\"" +markers = "extra == \"vertexai\"" files = [ {file = "google_cloud_bigquery-3.30.0-py2.py3-none-any.whl", hash = "sha256:f4d28d846a727f20569c9b2d2f4fa703242daadcb2ec4240905aa485ba461877"}, {file = "google_cloud_bigquery-3.30.0.tar.gz", hash = "sha256:7e27fbafc8ed33cc200fe05af12ecd74d279fe3da6692585a3cef7aee90575b6"}, @@ -1903,7 +1879,7 @@ description = "Google Cloud API client core library" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vertexai\"" +markers = "extra == \"vertexai\"" files = [ {file = "google_cloud_core-2.4.2-py2.py3-none-any.whl", hash = "sha256:7459c3e83de7cb8b9ecfec9babc910efb4314030c56dd798eaad12c426f7d180"}, {file = "google_cloud_core-2.4.2.tar.gz", hash = "sha256:a4fcb0e2fcfd4bfe963837fad6d10943754fd79c1a50097d68540b6eb3d67f35"}, @@ -1923,7 +1899,7 @@ description = "Google Cloud Resource Manager API client library" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vertexai\"" +markers = "extra == \"vertexai\"" files = [ {file = "google_cloud_resource_manager-1.14.1-py2.py3-none-any.whl", hash = "sha256:68340599f85ebf07a6e18487e460ea07cc15e132068f6b188786d01c2cf25518"}, {file = "google_cloud_resource_manager-1.14.1.tar.gz", hash = "sha256:41e9e546aaa03d5160cdfa2341dbe81ef7596706c300a89b94c429f1f3411f87"}, @@ -1946,7 +1922,7 @@ description = "Google Cloud Storage API client library" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vertexai\"" +markers = "extra == \"vertexai\"" files = [ {file = "google_cloud_storage-2.19.0-py2.py3-none-any.whl", hash = "sha256:aeb971b5c29cf8ab98445082cbfe7b161a1f48ed275822f59ed3f1524ea54fba"}, {file = "google_cloud_storage-2.19.0.tar.gz", hash = "sha256:cd05e9e7191ba6cb68934d8eb76054d9be4562aa89dbc4236feee4d7d51342b2"}, @@ -1971,7 +1947,7 @@ description = "A python wrapper of the C library 'Google CRC32C'" optional = true python-versions = ">=3.9" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vertexai\"" +markers = "extra == \"vertexai\"" files = [ {file = "google_crc32c-1.6.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:5bcc90b34df28a4b38653c36bb5ada35671ad105c99cfe915fb5bed7ad6924aa"}, {file = "google_crc32c-1.6.0-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:d9e9913f7bd69e093b81da4535ce27af842e7bf371cde42d1ae9e9bd382dc0e9"}, @@ -2012,7 +1988,7 @@ description = "Utilities for Google Media Downloads and Resumable Uploads" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vertexai\"" +markers = "extra == \"vertexai\"" files = [ {file = "google_resumable_media-2.7.2-py2.py3-none-any.whl", hash = "sha256:3ce7551e9fe6d99e9a126101d2536612bb73486721951e9562fee0f90c6ababa"}, {file = "google_resumable_media-2.7.2.tar.gz", hash = "sha256:5280aed4629f2b60b847b0d42f9857fd4935c11af266744df33d8074cae92fe0"}, @@ -2032,7 +2008,7 @@ description = "Common protobufs used in Google APIs" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vertexai\"" +markers = "extra == \"vertexai\"" files = [ {file = "googleapis_common_protos-1.68.0-py2.py3-none-any.whl", hash = "sha256:aaf179b2f81df26dfadac95def3b16a95064c76a5f45f07e4c68a21bb371c4ac"}, {file = "googleapis_common_protos-1.68.0.tar.gz", hash = "sha256:95d38161f4f9af0d9423eed8fb7b64ffd2568c3464eb542ff02c5bfa1953ab3c"}, @@ -2052,7 +2028,7 @@ description = "Lightweight in-process concurrent programming" optional = false python-versions = ">=3.7" groups = ["docs"] -markers = "(platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" +markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\"" files = [ {file = "greenlet-3.1.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563"}, {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83"}, @@ -2140,7 +2116,7 @@ description = "IAM API client library" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vertexai\"" +markers = "extra == \"vertexai\"" files = [ {file = "grpc_google_iam_v1-0.14.0-py2.py3-none-any.whl", hash = "sha256:fb4a084b30099ba3ab07d61d620a0d4429570b13ff53bd37bac75235f98b7da4"}, {file = "grpc_google_iam_v1-0.14.0.tar.gz", hash = "sha256:c66e07aa642e39bb37950f9e7f491f70dad150ac9801263b42b2814307c2df99"}, @@ -2158,7 +2134,7 @@ description = "HTTP/2-based RPC framework" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vertexai\"" +markers = "extra == \"vertexai\"" files = [ {file = "grpcio-1.70.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:95469d1977429f45fe7df441f586521361e235982a0b39e33841549143ae2851"}, {file = "grpcio-1.70.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:ed9718f17fbdb472e33b869c77a16d0b55e166b100ec57b016dc7de9c8d236bf"}, @@ -2227,7 +2203,7 @@ description = "Status proto mapping for gRPC" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "extra == \"vertexai\" and (python_version <= \"3.11\" or python_version >= \"3.12\")" +markers = "extra == \"vertexai\"" files = [ {file = "grpcio_status-1.70.0-py3-none-any.whl", hash = "sha256:fc5a2ae2b9b1c1969cc49f3262676e6854aa2398ec69cb5bd6c47cd501904a85"}, {file = "grpcio_status-1.70.0.tar.gz", hash = "sha256:0e7b42816512433b18b9d764285ff029bde059e9d41f8fe10a60631bd8348101"}, @@ -2245,7 +2221,7 @@ description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "(extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" +markers = "extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\"" files = [ {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, @@ -2258,7 +2234,7 @@ description = "A minimal low-level HTTP client." optional = true python-versions = ">=3.8" groups = ["main"] -markers = "(extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" +markers = "extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\"" files = [ {file = "httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd"}, {file = "httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c"}, @@ -2281,7 +2257,7 @@ description = "The next generation HTTP client." optional = true python-versions = ">=3.8" groups = ["main"] -markers = "(extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" +markers = "extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\"" files = [ {file = "httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad"}, {file = "httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc"}, @@ -2307,7 +2283,7 @@ description = "Consume Server-Sent Event (SSE) messages with HTTPX." optional = true python-versions = ">=3.8" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"cohere\"" +markers = "extra == \"cohere\"" files = [ {file = "httpx-sse-0.4.0.tar.gz", hash = "sha256:1e81a3a3070ce322add1d3529ed42eb5f70817f45ed6ec915ab753f961139721"}, {file = "httpx_sse-0.4.0-py3-none-any.whl", hash = "sha256:f329af6eae57eaa2bdfd962b42524764af68075ea87370a2de920af5341e318f"}, @@ -2320,7 +2296,7 @@ description = "Client library to download and publish models, datasets and other optional = true python-versions = ">=3.8.0" groups = ["main"] -markers = "(extra == \"sentence-transformers\" or extra == \"cohere\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" +markers = "extra == \"sentence-transformers\" or extra == \"cohere\"" files = [ {file = "huggingface_hub-0.29.1-py3-none-any.whl", hash = "sha256:352f69caf16566c7b6de84b54a822f6238e17ddd8ae3da4f8f2272aea5b198d5"}, {file = "huggingface_hub-0.29.1.tar.gz", hash = "sha256:9524eae42077b8ff4fc459ceb7a514eca1c1232b775276b009709fe2a084f250"}, @@ -2356,7 +2332,6 @@ description = "Human friendly output for text interfaces using Python" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "humanfriendly-10.0-py2.py3-none-any.whl", hash = "sha256:1697e1a8a8f550fd43c2865cd84542fc175a61dcb779b6fee18cf6b6ccba1477"}, {file = "humanfriendly-10.0.tar.gz", hash = "sha256:6b0b831ce8f15f7300721aa49829fc4e83921a9a301cc7f606be6686a2288ddc"}, @@ -2372,7 +2347,6 @@ description = "File identification library for Python" optional = false python-versions = ">=3.9" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "identify-2.6.8-py2.py3-none-any.whl", hash = "sha256:83657f0f766a3c8d0eaea16d4ef42494b39b34629a4b3192a9d020d349b3e255"}, {file = "identify-2.6.8.tar.gz", hash = "sha256:61491417ea2c0c5c670484fd8abbb34de34cdae1e5f39a73ee65e48e4bb663fc"}, @@ -2392,7 +2366,7 @@ files = [ {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, ] -markers = {main = "(python_version <= \"3.11\" or python_version >= \"3.12\") and (extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\" or extra == \"sentence-transformers\" or extra == \"vertexai\" or extra == \"voyageai\" or extra == \"ranx\") and (extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\" or extra == \"sentence-transformers\" or extra == \"vertexai\" or extra == \"voyageai\" or python_version >= \"3.10\")", dev = "python_version <= \"3.11\" or python_version >= \"3.12\"", docs = "python_version <= \"3.11\" or python_version >= \"3.12\""} +markers = {main = "(extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\" or extra == \"sentence-transformers\" or extra == \"vertexai\" or extra == \"voyageai\" or extra == \"ranx\") and (extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\" or extra == \"sentence-transformers\" or extra == \"vertexai\" or extra == \"voyageai\") or (extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\" or extra == \"sentence-transformers\" or extra == \"vertexai\" or extra == \"voyageai\" or extra == \"ranx\") and python_version >= \"3.10\""} [package.extras] all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] @@ -2404,7 +2378,7 @@ description = "Iterative JSON parser with standard Python iterator interfaces" optional = true python-versions = "*" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "ijson-3.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7f7a5250599c366369fbf3bc4e176f5daa28eb6bc7d6130d02462ed335361675"}, {file = "ijson-3.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f87a7e52f79059f9c58f6886c262061065eb6f7554a587be7ed3aa63e6b71b34"}, @@ -2509,7 +2483,6 @@ description = "Getting image size from png/jpeg/jpeg2000/gif file" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" groups = ["docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, @@ -2526,7 +2499,7 @@ files = [ {file = "importlib_metadata-8.6.1-py3-none-any.whl", hash = "sha256:02a89390c1e15fdfdc0d7c6b25cb3e62650d0494005c97d6f148bf5b9787525e"}, {file = "importlib_metadata-8.6.1.tar.gz", hash = "sha256:310b41d755445d74569f993ccfc22838295d9fe005425094fad953d7f15c8580"}, ] -markers = {dev = "python_version < \"3.10\"", docs = "python_version <= \"3.11\" or python_version >= \"3.12\""} +markers = {dev = "python_version < \"3.10\""} [package.dependencies] zipp = ">=3.20" @@ -2547,7 +2520,6 @@ description = "brain-dead simple config-ini parsing" optional = false python-versions = ">=3.7" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, @@ -2560,7 +2532,7 @@ description = "inscriptis - HTML to text converter." optional = true python-versions = "<4.0,>=3.9" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "inscriptis-2.5.3-py3-none-any.whl", hash = "sha256:25962cf5a60b1a8f33e7bfbbea08a29af82299702339b9b90c538653a5c7aa38"}, {file = "inscriptis-2.5.3.tar.gz", hash = "sha256:256043caa13e4995c71fafdeadec4ac42b57f3914cb41023ecbee8bc27ca1cc0"}, @@ -2580,7 +2552,6 @@ description = "IPython Kernel for Jupyter" optional = false python-versions = ">=3.8" groups = ["dev", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "ipykernel-6.29.5-py3-none-any.whl", hash = "sha256:afdb66ba5aa354b09b91379bac28ae4afebbb30e8b39510c9690afb7a10421b5"}, {file = "ipykernel-6.29.5.tar.gz", hash = "sha256:f093a22c4a40f8828f8e330a9c297cb93dcab13bd9678ded6de8e5cf81c56215"}, @@ -2615,7 +2586,6 @@ description = "IPython: Productive Interactive Computing" optional = false python-versions = ">=3.9" groups = ["dev", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "ipython-8.18.1-py3-none-any.whl", hash = "sha256:e8267419d72d81955ec1177f8a29aaa90ac80ad647499201119e2f05e99aa397"}, {file = "ipython-8.18.1.tar.gz", hash = "sha256:ca6f079bb33457c66e233e4580ebfc4128855b4cf6370dddd73842a9563e8a27"}, @@ -2654,7 +2624,7 @@ description = "provides a common interface to many IR ad-hoc ranking benchmarks, optional = true python-versions = ">=3.8" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "ir_datasets-0.5.9-py3-none-any.whl", hash = "sha256:07c9bed07f31031f1da1bc02afc7a1077b1179a3af402d061f83bf6fb833b90a"}, {file = "ir_datasets-0.5.9.tar.gz", hash = "sha256:35c90980fbd0f4ea8fe22a1ab16d2bb6be3dc373cbd6dfab1d905f176a70e5ac"}, @@ -2683,7 +2653,6 @@ description = "A Python utility / library to sort Python imports." optional = false python-versions = ">=3.8.0" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, @@ -2699,7 +2668,6 @@ description = "An autocompletion tool for Python that can be used for text edito optional = false python-versions = ">=3.6" groups = ["dev", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "jedi-0.19.2-py2.py3-none-any.whl", hash = "sha256:a8ef22bde8490f57fe5c7681a3c83cb58874daf72b4784de3cce5b6ef6edb5b9"}, {file = "jedi-0.19.2.tar.gz", hash = "sha256:4770dc3de41bde3966b02eb84fbcf557fb33cce26ad23da12c742fb50ecb11f0"}, @@ -2720,7 +2688,6 @@ description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" groups = ["main", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "jinja2-3.1.5-py3-none-any.whl", hash = "sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb"}, {file = "jinja2-3.1.5.tar.gz", hash = "sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb"}, @@ -2739,7 +2706,7 @@ description = "Fast iterable JSON parser." optional = true python-versions = ">=3.8" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"openai\"" +markers = "extra == \"openai\"" files = [ {file = "jiter-0.8.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:ca8577f6a413abe29b079bc30f907894d7eb07a865c4df69475e868d73e71c7b"}, {file = "jiter-0.8.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b25bd626bde7fb51534190c7e3cb97cee89ee76b76d7585580e22f34f5e3f393"}, @@ -2826,7 +2793,7 @@ description = "JSON Matching Expressions" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"bedrock\"" +markers = "extra == \"bedrock\"" files = [ {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, @@ -2839,7 +2806,6 @@ description = "Lightweight pipelining with Python functions" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"sentence-transformers\"" files = [ {file = "joblib-1.4.2-py3-none-any.whl", hash = "sha256:06d478d5674cbc267e7496a410ee875abd68e4340feff4490bcb7afb88060ae6"}, {file = "joblib-1.4.2.tar.gz", hash = "sha256:2382c5816b2636fbd20a09e0f4e9dad4736765fdfb7dca582943b9c1366b3f0e"}, @@ -2852,7 +2818,6 @@ description = "A final implementation of JSONPath for Python that aims to be sta optional = false python-versions = "*" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "jsonpath-ng-1.7.0.tar.gz", hash = "sha256:f6f5f7fd4e5ff79c785f1573b394043b39849fb2bb47bcead935d12b00beab3c"}, {file = "jsonpath_ng-1.7.0-py2-none-any.whl", hash = "sha256:898c93fc173f0c336784a3fa63d7434297544b7198124a68f9a3ef9597b0ae6e"}, @@ -2869,7 +2834,7 @@ description = "A more powerful JSONPath implementation in modern python" optional = true python-versions = ">=3.6" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"mistralai\"" +markers = "extra == \"mistralai\"" files = [ {file = "jsonpath-python-1.0.6.tar.gz", hash = "sha256:dd5be4a72d8a2995c3f583cf82bf3cd1a9544cfdabf2d22595b67aff07349666"}, {file = "jsonpath_python-1.0.6-py3-none-any.whl", hash = "sha256:1e3b78df579f5efc23565293612decee04214609208a2335884b3ee3f786b575"}, @@ -2882,7 +2847,6 @@ description = "An implementation of JSON Schema validation for Python" optional = false python-versions = ">=3.8" groups = ["dev", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "jsonschema-4.23.0-py3-none-any.whl", hash = "sha256:fbadb6f8b144a8f8cf9f0b89ba94501d143e50411a1278633f56a7acf7fd5566"}, {file = "jsonschema-4.23.0.tar.gz", hash = "sha256:d71497fef26351a33265337fa77ffeb82423f3ea21283cd9467bb03999266bc4"}, @@ -2905,7 +2869,6 @@ description = "The JSON Schema meta-schemas and vocabularies, exposed as a Regis optional = false python-versions = ">=3.9" groups = ["dev", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "jsonschema_specifications-2024.10.1-py3-none-any.whl", hash = "sha256:a09a0680616357d9a0ecf05c12ad234479f549239d0f5b55f3deea67475da9bf"}, {file = "jsonschema_specifications-2024.10.1.tar.gz", hash = "sha256:0f38b83639958ce1152d02a7f062902c41c8fd20d558b0c34344292d417ae272"}, @@ -2921,7 +2884,6 @@ description = "A defined interface for working with a cache of jupyter notebooks optional = false python-versions = ">=3.9" groups = ["docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "jupyter_cache-1.0.1-py3-none-any.whl", hash = "sha256:9c3cafd825ba7da8b5830485343091143dff903e4d8c69db9349b728b140abf6"}, {file = "jupyter_cache-1.0.1.tar.gz", hash = "sha256:16e808eb19e3fb67a223db906e131ea6e01f03aa27f49a7214ce6a5fec186fb9"}, @@ -2950,7 +2912,6 @@ description = "Jupyter protocol implementation and client libraries" optional = false python-versions = ">=3.8" groups = ["dev", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "jupyter_client-8.6.3-py3-none-any.whl", hash = "sha256:e8a19cc986cc45905ac3362915f410f3af85424b4c0905e94fa5f2cb08e8f23f"}, {file = "jupyter_client-8.6.3.tar.gz", hash = "sha256:35b3a0947c4a6e9d589eb97d7d4cd5e90f910ee73101611f01283732bd6d9419"}, @@ -2975,7 +2936,6 @@ description = "Jupyter core package. A base package on which Jupyter projects re optional = false python-versions = ">=3.8" groups = ["dev", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "jupyter_core-5.7.2-py3-none-any.whl", hash = "sha256:4f7315d2f6b4bcf2e3e7cb6e46772eba760ae459cd1f59d29eb57b0a01bd7409"}, {file = "jupyter_core-5.7.2.tar.gz", hash = "sha256:aa5f8d32bbf6b431ac830496da7392035d6f61b4f54872f15c4bd2a9c3f536d9"}, @@ -2997,7 +2957,6 @@ description = "Pygments theme using JupyterLab CSS variables" optional = false python-versions = ">=3.8" groups = ["docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "jupyterlab_pygments-0.3.0-py3-none-any.whl", hash = "sha256:841a89020971da1d8693f1a99997aefc5dc424bb1b251fd6322462a1b8842780"}, {file = "jupyterlab_pygments-0.3.0.tar.gz", hash = "sha256:721aca4d9029252b11cfa9d185e5b5af4d54772bb8072f9b7036f4170054d35d"}, @@ -3010,7 +2969,7 @@ description = "A fast implementation of the Cassowary constraint solver" optional = true python-versions = ">=3.10" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "kiwisolver-1.4.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:88c6f252f6816a73b1f8c904f7bbe02fd67c09a69f7cb8a0eecdbf5ce78e63db"}, {file = "kiwisolver-1.4.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c72941acb7b67138f35b879bbe85be0f6c6a70cab78fe3ef6db9c024d9223e5b"}, @@ -3101,7 +3060,7 @@ description = "lightweight wrapper around basic LLVM functionality" optional = true python-versions = ">=3.10" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "llvmlite-0.44.0-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:9fbadbfba8422123bab5535b293da1cf72f9f478a65645ecd73e781f962ca614"}, {file = "llvmlite-0.44.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cccf8eb28f24840f2689fb1a45f9c0f7e582dd24e088dcf96e424834af11f791"}, @@ -3133,7 +3092,7 @@ description = "Powerful and Pythonic XML processing library combining libxml2/li optional = true python-versions = ">=3.6" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "lxml-5.3.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a4058f16cee694577f7e4dd410263cd0ef75644b43802a689c2b3c2a7e69453b"}, {file = "lxml-5.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:364de8f57d6eda0c16dcfb999af902da31396949efa0e583e12675d09709881b"}, @@ -3289,7 +3248,7 @@ description = "LZ4 Bindings for Python" optional = true python-versions = ">=3.9" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "lz4-4.4.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1ebf23ffd36b32b980f720a81990fcfdeadacafe7498fbeff7a8e058259d4e58"}, {file = "lz4-4.4.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8fe3caea61427057a9e3697c69b2403510fdccfca4483520d02b98ffae74531e"}, @@ -3340,7 +3299,7 @@ files = [ {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, ] -markers = {main = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"", docs = "python_version <= \"3.11\" or python_version >= \"3.12\""} +markers = {main = "python_version >= \"3.10\" and extra == \"ranx\""} [package.dependencies] mdurl = ">=0.1,<1.0" @@ -3362,7 +3321,6 @@ description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.9" groups = ["main", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, @@ -3434,7 +3392,7 @@ description = "Python plotting package" optional = true python-versions = ">=3.10" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "matplotlib-3.10.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:ff2ae14910be903f4a24afdbb6d7d3a6c44da210fc7d42790b87aeac92238a16"}, {file = "matplotlib-3.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0721a3fd3d5756ed593220a8b86808a36c5031fce489adb5b31ee6dbb47dd5b2"}, @@ -3493,7 +3451,6 @@ description = "Inline Matplotlib backend for Jupyter" optional = false python-versions = ">=3.8" groups = ["dev", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "matplotlib_inline-0.1.7-py3-none-any.whl", hash = "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca"}, {file = "matplotlib_inline-0.1.7.tar.gz", hash = "sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90"}, @@ -3509,7 +3466,6 @@ description = "McCabe checker, plugin for flake8" optional = false python-versions = ">=3.6" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, @@ -3522,7 +3478,6 @@ description = "Collection of plugins for markdown-it-py" optional = false python-versions = ">=3.8" groups = ["docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "mdit_py_plugins-0.4.2-py3-none-any.whl", hash = "sha256:0c673c3f889399a33b95e88d2f0d111b4447bdfea7f237dab2d488f459835636"}, {file = "mdit_py_plugins-0.4.2.tar.gz", hash = "sha256:5f2cd1fdb606ddf152d37ec30e46101a60512bc0e5fa1a7002c36647b09e26b5"}, @@ -3547,7 +3502,7 @@ files = [ {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, ] -markers = {main = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"", docs = "python_version <= \"3.11\" or python_version >= \"3.12\""} +markers = {main = "python_version >= \"3.10\" and extra == \"ranx\""} [[package]] name = "mistralai" @@ -3556,7 +3511,7 @@ description = "Python Client SDK for the Mistral AI API." optional = true python-versions = ">=3.8" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"mistralai\"" +markers = "extra == \"mistralai\"" files = [ {file = "mistralai-1.5.0-py3-none-any.whl", hash = "sha256:9372537719f87bd6f9feef4747d0bf1f4fbe971f8c02945ca4b4bf3c94571c97"}, {file = "mistralai-1.5.0.tar.gz", hash = "sha256:fd94bc93bc25aad9c6dd8005b1a0bc4ba1250c6b3fbf855a49936989cc6e5c0d"}, @@ -3580,7 +3535,6 @@ description = "A sane and fast Markdown parser with useful plugins and renderers optional = false python-versions = ">=3.8" groups = ["docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "mistune-3.1.2-py3-none-any.whl", hash = "sha256:4b47731332315cdca99e0ded46fc0004001c1299ff773dfb48fbe1fd226de319"}, {file = "mistune-3.1.2.tar.gz", hash = "sha256:733bf018ba007e8b5f2d3a9eb624034f6ee26c4ea769a98ec533ee111d504dff"}, @@ -3596,7 +3550,6 @@ description = "" optional = false python-versions = ">=3.9" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "ml_dtypes-0.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1fe8b5b5e70cd67211db94b05cfd58dace592f24489b038dc6f9fe347d2e07d5"}, {file = "ml_dtypes-0.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c09a6d11d8475c2a9fd2bc0695628aec105f97cab3b3a3fb7c9660348ff7d24"}, @@ -3619,9 +3572,9 @@ files = [ [package.dependencies] numpy = [ - {version = ">1.20", markers = "python_version < \"3.10\""}, - {version = ">=1.21.2", markers = "python_version >= \"3.10\" and python_version < \"3.11\""}, - {version = ">=1.23.3", markers = "python_version >= \"3.11\" and python_version < \"3.12\""}, + {version = ">1.20"}, + {version = ">=1.21.2", markers = "python_version >= \"3.10\""}, + {version = ">=1.23.3", markers = "python_version >= \"3.11\""}, {version = ">=1.26.0", markers = "python_version >= \"3.12\""}, ] @@ -3635,7 +3588,7 @@ description = "Python library for arbitrary-precision floating-point arithmetic" optional = true python-versions = "*" groups = ["main"] -markers = "extra == \"sentence-transformers\" and (python_version <= \"3.11\" or python_version >= \"3.12\")" +markers = "extra == \"sentence-transformers\"" files = [ {file = "mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c"}, {file = "mpmath-1.3.0.tar.gz", hash = "sha256:7a28eb2a9774d00c7bc92411c19a89209d5da7c4c9a9e227be8330a23a25b91f"}, @@ -3654,7 +3607,7 @@ description = "multidict implementation" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"voyageai\"" +markers = "extra == \"voyageai\"" files = [ {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3380252550e372e8511d49481bd836264c009adb826b23fefcc5dd3c69692f60"}, {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:99f826cbf970077383d7de805c0681799491cb939c25450b9b5b3ced03ca99f1"}, @@ -3760,7 +3713,6 @@ description = "Optional static typing for Python" optional = false python-versions = ">=3.8" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "mypy-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f8a67616990062232ee4c3952f41c779afac41405806042a8126fe96e098419f"}, {file = "mypy-1.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d357423fa57a489e8c47b7c85dfb96698caba13d66e086b412298a1a0ea3b0ed"}, @@ -3813,7 +3765,7 @@ files = [ {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] -markers = {main = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"mistralai\"", dev = "python_version <= \"3.11\" or python_version >= \"3.12\""} +markers = {main = "extra == \"mistralai\""} [[package]] name = "myst-nb" @@ -3822,7 +3774,6 @@ description = "A Jupyter Notebook Sphinx reader built on top of the MyST markdow optional = false python-versions = ">=3.9" groups = ["docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "myst_nb-1.2.0-py3-none-any.whl", hash = "sha256:0e09909877848c0cf45e1aecee97481512efa29a0c4caa37870a03bba11c56c1"}, {file = "myst_nb-1.2.0.tar.gz", hash = "sha256:af459ec753b341952182b45b0a80b4776cebf80c9ee6aaca2a3f4027b440c9de"}, @@ -3852,7 +3803,6 @@ description = "An extended [CommonMark](https://spec.commonmark.org/) compliant optional = false python-versions = ">=3.8" groups = ["docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "myst_parser-3.0.1-py3-none-any.whl", hash = "sha256:6457aaa33a5d474aca678b8ead9b3dc298e89c68e67012e73146ea6fd54babf1"}, {file = "myst_parser-3.0.1.tar.gz", hash = "sha256:88f0cb406cb363b077d176b51c476f62d60604d68a8dcdf4832e080441301a87"}, @@ -3880,7 +3830,6 @@ description = "A client library for executing notebooks. Formerly nbconvert's Ex optional = false python-versions = ">=3.9.0" groups = ["docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "nbclient-0.10.2-py3-none-any.whl", hash = "sha256:4ffee11e788b4a27fabeb7955547e4318a5298f34342a4bfd01f2e1faaeadc3d"}, {file = "nbclient-0.10.2.tar.gz", hash = "sha256:90b7fc6b810630db87a6d0c2250b1f0ab4cf4d3c27a299b0cde78a4ed3fd9193"}, @@ -3904,7 +3853,6 @@ description = "Converting Jupyter Notebooks (.ipynb files) to other formats. Ou optional = false python-versions = ">=3.8" groups = ["docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "nbconvert-7.16.6-py3-none-any.whl", hash = "sha256:1375a7b67e0c2883678c48e506dc320febb57685e5ee67faa51b18a90f3a712b"}, {file = "nbconvert-7.16.6.tar.gz", hash = "sha256:576a7e37c6480da7b8465eefa66c17844243816ce1ccc372633c6b71c3c0f582"}, @@ -3943,7 +3891,6 @@ description = "The Jupyter Notebook format" optional = false python-versions = ">=3.8" groups = ["dev", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "nbformat-5.10.4-py3-none-any.whl", hash = "sha256:3b48d6c8fbca4b299bf3982ea7db1af21580e4fec269ad087b9e81588891200b"}, {file = "nbformat-5.10.4.tar.gz", hash = "sha256:322168b14f937a5d11362988ecac2a4952d3d8e3a2cbeb2319584631226d5b3a"}, @@ -3966,7 +3913,6 @@ description = "Jupyter Notebook Tools for Sphinx" optional = false python-versions = ">=3.6" groups = ["docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "nbsphinx-0.9.6-py3-none-any.whl", hash = "sha256:336b0b557945a7678ec7449b16449f854bc852a435bb53b8a72e6b5dc740d992"}, {file = "nbsphinx-0.9.6.tar.gz", hash = "sha256:c2b28a2d702f1159a95b843831798e86e60a17fc647b9bff9ba1585355de54e3"}, @@ -3987,7 +3933,6 @@ description = "A py.test plugin to validate Jupyter notebooks" optional = false python-versions = ">=3.7, <4" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "nbval-0.11.0-py2.py3-none-any.whl", hash = "sha256:307aecc866c9a1e8a13bb5bbb008a702bacfda2394dff6fe504a3108a58042a0"}, {file = "nbval-0.11.0.tar.gz", hash = "sha256:77c95797607b0a968babd2597ee3494102d25c3ad37435debbdac0e46e379094"}, @@ -4007,7 +3952,6 @@ description = "Patch asyncio to allow nested event loops" optional = false python-versions = ">=3.5" groups = ["dev", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c"}, {file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"}, @@ -4020,7 +3964,7 @@ description = "Python package for creating and manipulating graphs and networks" optional = true python-versions = ">=3.9" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"sentence-transformers\"" +markers = "extra == \"sentence-transformers\"" files = [ {file = "networkx-3.2.1-py3-none-any.whl", hash = "sha256:f18c69adc97877c42332c170849c96cefa91881c99a7cb3e95b7c659ebdc1ec2"}, {file = "networkx-3.2.1.tar.gz", hash = "sha256:9f1bb5cf3409bf324e0a722c20bdb4c20ee39bf1c30ce8ae499c8502b0b5e0c6"}, @@ -4066,7 +4010,6 @@ description = "Node.js virtual environment builder" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"}, {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, @@ -4079,7 +4022,7 @@ description = "compiling Python code using LLVM" optional = true python-versions = ">=3.10" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "numba-0.61.0-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:9cab9783a700fa428b1a54d65295122bc03b3de1d01fb819a6b9dbbddfdb8c43"}, {file = "numba-0.61.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:46c5ae094fb3706f5adf9021bfb7fc11e44818d61afee695cdee4eadfed45e98"}, @@ -4115,7 +4058,7 @@ description = "Fundamental package for array computing in Python" optional = false python-versions = ">=3.9" groups = ["main"] -markers = "python_version <= \"3.11\"" +markers = "python_version < \"3.12\"" files = [ {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, @@ -4228,7 +4171,7 @@ description = "CUBLAS native runtime libraries" optional = true python-versions = ">=3" groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\" and (python_version <= \"3.11\" or python_version >= \"3.12\")" +markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\"" files = [ {file = "nvidia_cublas_cu12-12.4.5.8-py3-none-manylinux2014_aarch64.whl", hash = "sha256:0f8aa1706812e00b9f19dfe0cdb3999b092ccb8ca168c0db5b8ea712456fd9b3"}, {file = "nvidia_cublas_cu12-12.4.5.8-py3-none-manylinux2014_x86_64.whl", hash = "sha256:2fc8da60df463fdefa81e323eef2e36489e1c94335b5358bcb38360adf75ac9b"}, @@ -4242,7 +4185,7 @@ description = "CUDA profiling tools runtime libs." optional = true python-versions = ">=3" groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\" and (python_version <= \"3.11\" or python_version >= \"3.12\")" +markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\"" files = [ {file = "nvidia_cuda_cupti_cu12-12.4.127-py3-none-manylinux2014_aarch64.whl", hash = "sha256:79279b35cf6f91da114182a5ce1864997fd52294a87a16179ce275773799458a"}, {file = "nvidia_cuda_cupti_cu12-12.4.127-py3-none-manylinux2014_x86_64.whl", hash = "sha256:9dec60f5ac126f7bb551c055072b69d85392b13311fcc1bcda2202d172df30fb"}, @@ -4256,7 +4199,7 @@ description = "NVRTC native runtime libraries" optional = true python-versions = ">=3" groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\" and (python_version <= \"3.11\" or python_version >= \"3.12\")" +markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\"" files = [ {file = "nvidia_cuda_nvrtc_cu12-12.4.127-py3-none-manylinux2014_aarch64.whl", hash = "sha256:0eedf14185e04b76aa05b1fea04133e59f465b6f960c0cbf4e37c3cb6b0ea198"}, {file = "nvidia_cuda_nvrtc_cu12-12.4.127-py3-none-manylinux2014_x86_64.whl", hash = "sha256:a178759ebb095827bd30ef56598ec182b85547f1508941a3d560eb7ea1fbf338"}, @@ -4270,7 +4213,7 @@ description = "CUDA Runtime native Libraries" optional = true python-versions = ">=3" groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\" and (python_version <= \"3.11\" or python_version >= \"3.12\")" +markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\"" files = [ {file = "nvidia_cuda_runtime_cu12-12.4.127-py3-none-manylinux2014_aarch64.whl", hash = "sha256:961fe0e2e716a2a1d967aab7caee97512f71767f852f67432d572e36cb3a11f3"}, {file = "nvidia_cuda_runtime_cu12-12.4.127-py3-none-manylinux2014_x86_64.whl", hash = "sha256:64403288fa2136ee8e467cdc9c9427e0434110899d07c779f25b5c068934faa5"}, @@ -4284,7 +4227,7 @@ description = "cuDNN runtime libraries" optional = true python-versions = ">=3" groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\" and (python_version <= \"3.11\" or python_version >= \"3.12\")" +markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\"" files = [ {file = "nvidia_cudnn_cu12-9.1.0.70-py3-none-manylinux2014_x86_64.whl", hash = "sha256:165764f44ef8c61fcdfdfdbe769d687e06374059fbb388b6c89ecb0e28793a6f"}, {file = "nvidia_cudnn_cu12-9.1.0.70-py3-none-win_amd64.whl", hash = "sha256:6278562929433d68365a07a4a1546c237ba2849852c0d4b2262a486e805b977a"}, @@ -4300,7 +4243,7 @@ description = "CUFFT native runtime libraries" optional = true python-versions = ">=3" groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\" and (python_version <= \"3.11\" or python_version >= \"3.12\")" +markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\"" files = [ {file = "nvidia_cufft_cu12-11.2.1.3-py3-none-manylinux2014_aarch64.whl", hash = "sha256:5dad8008fc7f92f5ddfa2101430917ce2ffacd86824914c82e28990ad7f00399"}, {file = "nvidia_cufft_cu12-11.2.1.3-py3-none-manylinux2014_x86_64.whl", hash = "sha256:f083fc24912aa410be21fa16d157fed2055dab1cc4b6934a0e03cba69eb242b9"}, @@ -4317,7 +4260,7 @@ description = "CURAND native runtime libraries" optional = true python-versions = ">=3" groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\" and (python_version <= \"3.11\" or python_version >= \"3.12\")" +markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\"" files = [ {file = "nvidia_curand_cu12-10.3.5.147-py3-none-manylinux2014_aarch64.whl", hash = "sha256:1f173f09e3e3c76ab084aba0de819c49e56614feae5c12f69883f4ae9bb5fad9"}, {file = "nvidia_curand_cu12-10.3.5.147-py3-none-manylinux2014_x86_64.whl", hash = "sha256:a88f583d4e0bb643c49743469964103aa59f7f708d862c3ddb0fc07f851e3b8b"}, @@ -4331,7 +4274,7 @@ description = "CUDA solver native runtime libraries" optional = true python-versions = ">=3" groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\" and (python_version <= \"3.11\" or python_version >= \"3.12\")" +markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\"" files = [ {file = "nvidia_cusolver_cu12-11.6.1.9-py3-none-manylinux2014_aarch64.whl", hash = "sha256:d338f155f174f90724bbde3758b7ac375a70ce8e706d70b018dd3375545fc84e"}, {file = "nvidia_cusolver_cu12-11.6.1.9-py3-none-manylinux2014_x86_64.whl", hash = "sha256:19e33fa442bcfd085b3086c4ebf7e8debc07cfe01e11513cc6d332fd918ac260"}, @@ -4350,7 +4293,7 @@ description = "CUSPARSE native runtime libraries" optional = true python-versions = ">=3" groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\" and (python_version <= \"3.11\" or python_version >= \"3.12\")" +markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\"" files = [ {file = "nvidia_cusparse_cu12-12.3.1.170-py3-none-manylinux2014_aarch64.whl", hash = "sha256:9d32f62896231ebe0480efd8a7f702e143c98cfaa0e8a76df3386c1ba2b54df3"}, {file = "nvidia_cusparse_cu12-12.3.1.170-py3-none-manylinux2014_x86_64.whl", hash = "sha256:ea4f11a2904e2a8dc4b1833cc1b5181cde564edd0d5cd33e3c168eff2d1863f1"}, @@ -4367,7 +4310,7 @@ description = "NVIDIA cuSPARSELt" optional = true python-versions = "*" groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\" and (python_version <= \"3.11\" or python_version >= \"3.12\")" +markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\"" files = [ {file = "nvidia_cusparselt_cu12-0.6.2-py3-none-manylinux2014_aarch64.whl", hash = "sha256:067a7f6d03ea0d4841c85f0c6f1991c5dda98211f6302cb83a4ab234ee95bef8"}, {file = "nvidia_cusparselt_cu12-0.6.2-py3-none-manylinux2014_x86_64.whl", hash = "sha256:df2c24502fd76ebafe7457dbc4716b2fec071aabaed4fb7691a201cde03704d9"}, @@ -4381,7 +4324,7 @@ description = "NVIDIA Collective Communication Library (NCCL) Runtime" optional = true python-versions = ">=3" groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\" and (python_version <= \"3.11\" or python_version >= \"3.12\")" +markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\"" files = [ {file = "nvidia_nccl_cu12-2.21.5-py3-none-manylinux2014_x86_64.whl", hash = "sha256:8579076d30a8c24988834445f8d633c697d42397e92ffc3f63fa26766d25e0a0"}, ] @@ -4393,7 +4336,7 @@ description = "Nvidia JIT LTO Library" optional = true python-versions = ">=3" groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\" and (python_version <= \"3.11\" or python_version >= \"3.12\")" +markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\"" files = [ {file = "nvidia_nvjitlink_cu12-12.4.127-py3-none-manylinux2014_aarch64.whl", hash = "sha256:4abe7fef64914ccfa909bc2ba39739670ecc9e820c83ccc7a6ed414122599b83"}, {file = "nvidia_nvjitlink_cu12-12.4.127-py3-none-manylinux2014_x86_64.whl", hash = "sha256:06b3b9b25bf3f8af351d664978ca26a16d2c5127dbd53c0497e28d1fb9611d57"}, @@ -4407,7 +4350,7 @@ description = "NVIDIA Tools Extension" optional = true python-versions = ">=3" groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\" and (python_version <= \"3.11\" or python_version >= \"3.12\")" +markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\"" files = [ {file = "nvidia_nvtx_cu12-12.4.127-py3-none-manylinux2014_aarch64.whl", hash = "sha256:7959ad635db13edf4fc65c06a6e9f9e55fc2f92596db928d169c0bb031e88ef3"}, {file = "nvidia_nvtx_cu12-12.4.127-py3-none-manylinux2014_x86_64.whl", hash = "sha256:781e950d9b9f60d8241ccea575b32f5105a5baf4c2351cab5256a24869f12a1a"}, @@ -4421,7 +4364,7 @@ description = "The official Python library for the openai API" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"openai\"" +markers = "extra == \"openai\"" files = [ {file = "openai-1.65.1-py3-none-any.whl", hash = "sha256:396652a6452dd42791b3ad8a3aab09b1feb7c1c4550a672586fb300760a8e204"}, {file = "openai-1.65.1.tar.gz", hash = "sha256:9d9370a20d2b8c3ce319fd2194c2eef5eab59effbcc5b04ff480977edc530fba"}, @@ -4448,7 +4391,7 @@ description = "Fast, correct Python JSON library supporting dataclasses, datetim optional = true python-versions = ">=3.8" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "orjson-3.10.15-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:552c883d03ad185f720d0c09583ebde257e41b9521b74ff40e08b7dec4559c04"}, {file = "orjson-3.10.15-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:616e3e8d438d02e4854f70bfdc03a6bcdb697358dbaa6bcd19cbe24d24ece1f8"}, @@ -4542,7 +4485,7 @@ files = [ {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, ] -markers = {main = "(python_version <= \"3.11\" or python_version >= \"3.12\") and (extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"vertexai\" or extra == \"ranx\") and (extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"vertexai\" or python_version >= \"3.10\")", dev = "python_version <= \"3.11\" or python_version >= \"3.12\"", docs = "python_version <= \"3.11\" or python_version >= \"3.12\""} +markers = {main = "(extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"vertexai\" or extra == \"ranx\") and (extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"vertexai\") or (extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"vertexai\" or extra == \"ranx\") and python_version >= \"3.10\""} [[package]] name = "pandas" @@ -4551,7 +4494,7 @@ description = "Powerful data structures for data analysis, time series, and stat optional = true python-versions = ">=3.9" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "pandas-2.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1948ddde24197a0f7add2bdc4ca83bf2b1ef84a1bc8ccffd95eda17fd836ecb5"}, {file = "pandas-2.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:381175499d3802cde0eabbaf6324cce0c4f5d52ca6f8c377c29ad442f50f6348"}, @@ -4639,7 +4582,6 @@ description = "Utilities for writing pandoc filters in python" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" groups = ["docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pandocfilters-1.5.1-py2.py3-none-any.whl", hash = "sha256:93be382804a9cdb0a7267585f157e5d1731bbe5545a85b268d6f5fe6232de2bc"}, {file = "pandocfilters-1.5.1.tar.gz", hash = "sha256:002b4a555ee4ebc03f8b66307e287fa492e4a77b4ea14d3f934328297bb4939e"}, @@ -4652,7 +4594,6 @@ description = "A Python Parser" optional = false python-versions = ">=3.6" groups = ["dev", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "parso-0.8.4-py2.py3-none-any.whl", hash = "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18"}, {file = "parso-0.8.4.tar.gz", hash = "sha256:eb3a7b58240fb99099a345571deecc0f9540ea5f4dd2fe14c2a99d6b281ab92d"}, @@ -4669,7 +4610,6 @@ description = "Utility library for gitignore style pattern matching of file path optional = false python-versions = ">=3.8" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, @@ -4682,7 +4622,7 @@ description = "Pexpect allows easy control of interactive console applications." optional = false python-versions = "*" groups = ["dev", "docs"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and sys_platform != \"win32\"" +markers = "sys_platform != \"win32\"" files = [ {file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"}, {file = "pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f"}, @@ -4698,7 +4638,7 @@ description = "Python Imaging Library (Fork)" optional = true python-versions = ">=3.9" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and (extra == \"sentence-transformers\" or extra == \"ranx\") and (extra == \"sentence-transformers\" or python_version >= \"3.10\")" +markers = "(python_version >= \"3.10\" or extra == \"sentence-transformers\") and (python_version == \"3.10\" or python_version == \"3.11\" or extra == \"sentence-transformers\" or extra == \"ranx\") and (extra == \"ranx\" or extra == \"sentence-transformers\")" files = [ {file = "pillow-11.1.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:e1abe69aca89514737465752b4bcaf8016de61b3be1397a8fc260ba33321b3a8"}, {file = "pillow-11.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c640e5a06869c75994624551f45e5506e4256562ead981cce820d5ab39ae2192"}, @@ -4788,7 +4728,6 @@ description = "A small Python package for determining appropriate platform-speci optional = false python-versions = ">=3.8" groups = ["dev", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, @@ -4806,7 +4745,6 @@ description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, @@ -4823,7 +4761,6 @@ description = "Python Lex & Yacc" optional = false python-versions = "*" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "ply-3.11-py2.py3-none-any.whl", hash = "sha256:096f9b8350b65ebd2fd1346b12452efe5b9607f7482813ffca50c22722a807ce"}, {file = "ply-3.11.tar.gz", hash = "sha256:00c7c1aaa88358b9c765b6d3000c6eec0ba42abca5351b095321aef446081da3"}, @@ -4836,7 +4773,6 @@ description = "A framework for managing and maintaining multi-language pre-commi optional = false python-versions = ">=3.9" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pre_commit-4.1.0-py2.py3-none-any.whl", hash = "sha256:d29e7cb346295bcc1cc75fc3e92e343495e3ea0196c9ec6ba53f49f10ab6ae7b"}, {file = "pre_commit-4.1.0.tar.gz", hash = "sha256:ae3f018575a588e30dfddfab9a05448bfbd6b73d78709617b5a2b853549716d4"}, @@ -4856,7 +4792,6 @@ description = "Library for building powerful interactive command lines in Python optional = false python-versions = ">=3.8.0" groups = ["dev", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "prompt_toolkit-3.0.50-py3-none-any.whl", hash = "sha256:9b6427eb19e479d98acff65196a307c555eb567989e6d88ebbb1b509d9779198"}, {file = "prompt_toolkit-3.0.50.tar.gz", hash = "sha256:544748f3860a2623ca5cd6d2795e7a14f3d0e1c3c9728359013f79877fc89bab"}, @@ -4872,7 +4807,7 @@ description = "Accelerated property cache" optional = true python-versions = ">=3.9" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"voyageai\"" +markers = "extra == \"voyageai\"" files = [ {file = "propcache-0.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:efa44f64c37cc30c9f05932c740a8b40ce359f51882c70883cc95feac842da4d"}, {file = "propcache-0.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2383a17385d9800b6eb5855c2f05ee550f803878f344f58b6e194de08b96352c"}, @@ -4981,7 +4916,7 @@ description = "Beautiful, Pythonic protocol buffers" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vertexai\"" +markers = "extra == \"vertexai\"" files = [ {file = "proto_plus-1.26.0-py3-none-any.whl", hash = "sha256:bf2dfaa3da281fc3187d12d224c707cb57214fb2c22ba854eb0c105a3fb2d4d7"}, {file = "proto_plus-1.26.0.tar.gz", hash = "sha256:6e93d5f5ca267b54300880fff156b6a3386b3fa3f43b1da62e680fc0c586ef22"}, @@ -5000,7 +4935,7 @@ description = "" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vertexai\"" +markers = "extra == \"vertexai\"" files = [ {file = "protobuf-5.29.3-cp310-abi3-win32.whl", hash = "sha256:3ea51771449e1035f26069c4c7fd51fba990d07bc55ba80701c78f886bf9c888"}, {file = "protobuf-5.29.3-cp310-abi3-win_amd64.whl", hash = "sha256:a4fa6f80816a9a0678429e84973f2f98cbc218cca434abe8db2ad0bffc98503a"}, @@ -5022,7 +4957,6 @@ description = "Cross-platform lib for process and system monitoring in Python. optional = false python-versions = ">=3.6" groups = ["dev", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "psutil-7.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:101d71dc322e3cffd7cea0650b09b3d08b8e7c4109dd6809fe452dfd00e58b25"}, {file = "psutil-7.0.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:39db632f6bb862eeccf56660871433e111b6ea58f2caea825571951d4b6aa3da"}, @@ -5047,7 +4981,7 @@ description = "Run a subprocess in a pseudo terminal" optional = false python-versions = "*" groups = ["dev", "docs"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and sys_platform != \"win32\"" +markers = "sys_platform != \"win32\"" files = [ {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, @@ -5060,7 +4994,6 @@ description = "Safely evaluate AST nodes without side effects" optional = false python-versions = "*" groups = ["dev", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0"}, {file = "pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42"}, @@ -5076,7 +5009,7 @@ description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs optional = true python-versions = ">=3.8" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vertexai\"" +markers = "extra == \"vertexai\"" files = [ {file = "pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629"}, {file = "pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034"}, @@ -5089,7 +5022,7 @@ description = "A collection of ASN.1-based protocols modules" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vertexai\"" +markers = "extra == \"vertexai\"" files = [ {file = "pyasn1_modules-0.4.1-py3-none-any.whl", hash = "sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd"}, {file = "pyasn1_modules-0.4.1.tar.gz", hash = "sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c"}, @@ -5109,7 +5042,7 @@ files = [ {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, ] -markers = {dev = "(implementation_name == \"pypy\" or platform_python_implementation != \"PyPy\") and (python_version <= \"3.11\" or python_version >= \"3.12\")", docs = "(python_version <= \"3.11\" or python_version >= \"3.12\") and implementation_name == \"pypy\""} +markers = {dev = "implementation_name == \"pypy\" or platform_python_implementation != \"PyPy\"", docs = "implementation_name == \"pypy\""} [[package]] name = "pydantic" @@ -5118,7 +5051,6 @@ description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pydantic-2.10.6-py3-none-any.whl", hash = "sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584"}, {file = "pydantic-2.10.6.tar.gz", hash = "sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236"}, @@ -5140,7 +5072,6 @@ description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa"}, {file = "pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c"}, @@ -5254,7 +5185,6 @@ description = "Bootstrap-based Sphinx theme from the PyData community" optional = false python-versions = ">=3.9" groups = ["docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pydata_sphinx_theme-0.15.4-py3-none-any.whl", hash = "sha256:2136ad0e9500d0949f96167e63f3e298620040aea8f9c74621959eda5d4cf8e6"}, {file = "pydata_sphinx_theme-0.15.4.tar.gz", hash = "sha256:7762ec0ac59df3acecf49fd2f889e1b4565dbce8b88b2e29ee06fdd90645a06d"}, @@ -5288,7 +5218,7 @@ files = [ {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"}, {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"}, ] -markers = {main = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"", dev = "python_version <= \"3.11\" or python_version >= \"3.12\"", docs = "python_version <= \"3.11\" or python_version >= \"3.12\""} +markers = {main = "python_version >= \"3.10\" and extra == \"ranx\""} [package.extras] windows-terminal = ["colorama (>=0.4.6)"] @@ -5300,7 +5230,6 @@ description = "python code static checker" optional = false python-versions = ">=3.9.0" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pylint-3.3.4-py3-none-any.whl", hash = "sha256:289e6a1eb27b453b08436478391a48cd53bb0efb824873f949e709350f3de018"}, {file = "pylint-3.3.4.tar.gz", hash = "sha256:74ae7a38b177e69a9b525d0794bd8183820bfa7eb68cc1bee6e8ed22a42be4ce"}, @@ -5332,7 +5261,7 @@ description = "pyparsing module - Classes and methods to define and execute pars optional = true python-versions = ">=3.9" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "pyparsing-3.2.1-py3-none-any.whl", hash = "sha256:506ff4f4386c4cec0590ec19e6302d3aedb992fdc02c761e90416f158dacf8e1"}, {file = "pyparsing-3.2.1.tar.gz", hash = "sha256:61980854fd66de3a90028d679a954d5f2623e83144b5afe5ee86f43d762e5f0a"}, @@ -5348,7 +5277,7 @@ description = "A python implementation of GNU readline." optional = false python-versions = ">=3.8" groups = ["main"] -markers = "sys_platform == \"win32\" and (python_version <= \"3.11\" or python_version >= \"3.12\")" +markers = "sys_platform == \"win32\"" files = [ {file = "pyreadline3-3.5.4-py3-none-any.whl", hash = "sha256:eaf8e6cc3c49bcccf145fc6067ba8643d1df34d604a1ec0eccbf7a18e6d3fae6"}, {file = "pyreadline3-3.5.4.tar.gz", hash = "sha256:8d57d53039a1c75adba8e50dd3d992b28143480816187ea5efbd5c78e6c885b7"}, @@ -5364,7 +5293,6 @@ description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6"}, {file = "pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761"}, @@ -5388,7 +5316,6 @@ description = "Pytest support for asyncio" optional = false python-versions = ">=3.8" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pytest_asyncio-0.23.8-py3-none-any.whl", hash = "sha256:50265d892689a5faefb84df80819d1ecef566eb3549cf915dfb33569359d1ce2"}, {file = "pytest_asyncio-0.23.8.tar.gz", hash = "sha256:759b10b33a6dc61cce40a8bd5205e302978bbbcc00e279a8b61d9a6a3c82e4d3"}, @@ -5408,7 +5335,6 @@ description = "pytest xdist plugin for distributed testing, most importantly acr optional = false python-versions = ">=3.8" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pytest_xdist-3.6.1-py3-none-any.whl", hash = "sha256:9ed4adfb68a016610848639bb7e02c9352d5d9f03d04809919e2dafc3be4cca7"}, {file = "pytest_xdist-3.6.1.tar.gz", hash = "sha256:ead156a4db231eec769737f57668ef58a2084a34b2e55c4a8fa20d861107300d"}, @@ -5435,7 +5361,7 @@ files = [ {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] -markers = {main = "(python_version <= \"3.11\" or python_version >= \"3.12\") and (extra == \"vertexai\" or extra == \"mistralai\" or extra == \"bedrock\" or extra == \"ranx\") and (extra == \"vertexai\" or extra == \"mistralai\" or extra == \"bedrock\" or python_version >= \"3.10\")", dev = "python_version <= \"3.11\" or python_version >= \"3.12\"", docs = "python_version <= \"3.11\" or python_version >= \"3.12\""} +markers = {main = "(extra == \"vertexai\" or extra == \"mistralai\" or extra == \"bedrock\" or extra == \"ranx\") and (extra == \"vertexai\" or extra == \"mistralai\" or extra == \"bedrock\") or (extra == \"vertexai\" or extra == \"mistralai\" or extra == \"bedrock\" or extra == \"ranx\") and python_version >= \"3.10\""} [package.dependencies] six = ">=1.5" @@ -5447,7 +5373,6 @@ description = "Read key-value pairs from a .env file and set them as environment optional = false python-versions = ">=3.8" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, @@ -5463,7 +5388,6 @@ description = "Universally unique lexicographically sortable identifier" optional = false python-versions = ">=3.9" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "python_ulid-3.0.0-py3-none-any.whl", hash = "sha256:e4c4942ff50dbd79167ad01ac725ec58f924b4018025ce22c858bfcff99a5e31"}, {file = "python_ulid-3.0.0.tar.gz", hash = "sha256:e50296a47dc8209d28629a22fc81ca26c00982c78934bd7766377ba37ea49a9f"}, @@ -5479,7 +5403,7 @@ description = "World timezone definitions, modern and historical" optional = true python-versions = "*" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "pytz-2025.1-py2.py3-none-any.whl", hash = "sha256:89dd22dca55b46eac6eda23b2d72721bf1bdfef212645d81513ef5d03038de57"}, {file = "pytz-2025.1.tar.gz", hash = "sha256:c2db42be2a2518b28e65f9207c4d05e6ff547d1efa4086469ef855e4ab70178e"}, @@ -5512,7 +5436,7 @@ files = [ {file = "pywin32-308-cp39-cp39-win32.whl", hash = "sha256:7873ca4dc60ab3287919881a7d4f88baee4a6e639aa6962de25a98ba6b193341"}, {file = "pywin32-308-cp39-cp39-win_amd64.whl", hash = "sha256:71b3322d949b4cc20776436a9c9ba0eeedcbc9c650daa536df63f0ff111bb920"}, ] -markers = {dev = "(python_version <= \"3.11\" or python_version >= \"3.12\") and sys_platform == \"win32\"", docs = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\" and (python_version <= \"3.11\" or python_version >= \"3.12\")"} +markers = {dev = "sys_platform == \"win32\"", docs = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\""} [[package]] name = "pyyaml" @@ -5521,7 +5445,6 @@ description = "YAML parser and emitter for Python" optional = false python-versions = ">=3.8" groups = ["main", "dev", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, @@ -5585,7 +5508,6 @@ description = "Python bindings for 0MQ" optional = false python-versions = ">=3.7" groups = ["dev", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pyzmq-26.2.1-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:f39d1227e8256d19899d953e6e19ed2ccb689102e6d85e024da5acf410f301eb"}, {file = "pyzmq-26.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a23948554c692df95daed595fdd3b76b420a4939d7a8a28d6d7dea9711878641"}, @@ -5708,7 +5630,7 @@ description = "ranx: A Blazing-Fast Python Library for Ranking Evaluation, Compa optional = true python-versions = ">=3.8" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "ranx-0.3.20-py3-none-any.whl", hash = "sha256:e056e4d5981b0328b045868cc7064fc57a545f36009fbe9bb602295ec33335de"}, {file = "ranx-0.3.20.tar.gz", hash = "sha256:8afc6f2042c40645e5d1fd80c35ed75a885e18bd2db7e95cc7ec32a0b41e59ea"}, @@ -5736,7 +5658,6 @@ description = "Python client for Redis database and key-value store" optional = false python-versions = ">=3.8" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "redis-5.2.1-py3-none-any.whl", hash = "sha256:ee7e1056b9aea0f04c6c2ed59452947f34c4940ee025f5dd83e6a6418b6989e4"}, {file = "redis-5.2.1.tar.gz", hash = "sha256:16f2e22dff21d5125e8481515e386711a34cbec50f0e44413dd7d9c060a54e0f"}, @@ -5756,7 +5677,6 @@ description = "JSON Referencing + Python" optional = false python-versions = ">=3.9" groups = ["dev", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "referencing-0.36.2-py3-none-any.whl", hash = "sha256:e8699adbbf8b5c7de96d8ffa0eb5c158b3beafce084968e2ea8bb08c6794dcd0"}, {file = "referencing-0.36.2.tar.gz", hash = "sha256:df2e89862cd09deabbdba16944cc3f10feb6b3e6f18e902f7cc25609a34775aa"}, @@ -5774,7 +5694,6 @@ description = "Alternative regular expression module, to replace re." optional = true python-versions = ">=3.8" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"sentence-transformers\"" files = [ {file = "regex-2024.11.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ff590880083d60acc0433f9c3f713c51f7ac6ebb9adf889c79a261ecf541aa91"}, {file = "regex-2024.11.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:658f90550f38270639e83ce492f27d2c8d2cd63805c65a13a14d36ca126753f0"}, @@ -5883,7 +5802,7 @@ files = [ {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, ] -markers = {main = "(python_version <= \"3.11\" or python_version >= \"3.12\") and (extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"vertexai\" or extra == \"voyageai\" or extra == \"ranx\") and (extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"vertexai\" or extra == \"voyageai\" or python_version >= \"3.10\")", dev = "python_version <= \"3.11\" or python_version >= \"3.12\"", docs = "python_version <= \"3.11\" or python_version >= \"3.12\""} +markers = {main = "(extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"vertexai\" or extra == \"voyageai\" or extra == \"ranx\") and (extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"vertexai\" or extra == \"voyageai\") or (extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"vertexai\" or extra == \"voyageai\" or extra == \"ranx\") and python_version >= \"3.10\""} [package.dependencies] certifi = ">=2017.4.17" @@ -5902,7 +5821,7 @@ description = "Render rich text, tables, progress bars, syntax highlighting, mar optional = true python-versions = ">=3.8.0" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90"}, {file = "rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098"}, @@ -5923,7 +5842,6 @@ description = "Python bindings to Rust's persistent data structures (rpds)" optional = false python-versions = ">=3.9" groups = ["dev", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "rpds_py-0.23.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2a54027554ce9b129fc3d633c92fa33b30de9f08bc61b32c053dc9b537266fed"}, {file = "rpds_py-0.23.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b5ef909a37e9738d146519657a1aab4584018746a18f71c692f2f22168ece40c"}, @@ -6037,7 +5955,7 @@ description = "Pure-Python RSA implementation" optional = true python-versions = ">=3.6,<4" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vertexai\"" +markers = "extra == \"vertexai\"" files = [ {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, @@ -6053,7 +5971,7 @@ description = "An Amazon S3 Transfer Manager" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"bedrock\"" +markers = "extra == \"bedrock\"" files = [ {file = "s3transfer-0.11.3-py3-none-any.whl", hash = "sha256:ca855bdeb885174b5ffa95b9913622459d4ad8e331fc98eb01e6d5eb6a30655d"}, {file = "s3transfer-0.11.3.tar.gz", hash = "sha256:edae4977e3a122445660c7c114bba949f9d191bae3b34a096f18a1c8c354527a"}, @@ -6072,7 +5990,7 @@ description = "" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"sentence-transformers\"" +markers = "extra == \"sentence-transformers\"" files = [ {file = "safetensors-0.5.3-cp38-abi3-macosx_10_12_x86_64.whl", hash = "sha256:bd20eb133db8ed15b40110b7c00c6df51655a2998132193de2f75f72d99c7073"}, {file = "safetensors-0.5.3-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:21d01c14ff6c415c485616b8b0bf961c46b3b343ca59110d38d744e577f9cce7"}, @@ -6111,7 +6029,7 @@ description = "A set of python modules for machine learning and data mining" optional = true python-versions = ">=3.9" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"sentence-transformers\"" +markers = "extra == \"sentence-transformers\"" files = [ {file = "scikit_learn-1.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d056391530ccd1e501056160e3c9673b4da4805eb67eb2bdf4e983e1f9c9204e"}, {file = "scikit_learn-1.6.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:0c8d036eb937dbb568c6242fa598d551d88fb4399c0344d95c001980ec1c7d36"}, @@ -6167,7 +6085,7 @@ description = "Fundamental algorithms for scientific computing in Python" optional = true python-versions = ">=3.9" groups = ["main"] -markers = "extra == \"sentence-transformers\" and python_version < \"3.10\"" +markers = "python_version < \"3.10\" and extra == \"sentence-transformers\"" files = [ {file = "scipy-1.13.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:20335853b85e9a49ff7572ab453794298bcf0354d8068c5f6775a0eabf350aca"}, {file = "scipy-1.13.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:d605e9c23906d1994f55ace80e0125c587f96c020037ea6aa98d01b4bd2e222f"}, @@ -6211,7 +6129,7 @@ description = "Fundamental algorithms for scientific computing in Python" optional = true python-versions = ">=3.10" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and (extra == \"sentence-transformers\" or extra == \"ranx\") and (python_version >= \"3.10\" or extra == \"sentence-transformers\")" +markers = "python_version >= \"3.10\" and (extra == \"ranx\" or extra == \"sentence-transformers\")" files = [ {file = "scipy-1.15.2-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:a2ec871edaa863e8213ea5df811cd600734f6400b4af272e1c011e69401218e9"}, {file = "scipy-1.15.2-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:6f223753c6ea76983af380787611ae1291e3ceb23917393079dcc746ba60cfb5"}, @@ -6276,7 +6194,7 @@ description = "Statistical data visualization" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "seaborn-0.13.2-py3-none-any.whl", hash = "sha256:636f8336facf092165e27924f223d3c62ca560b1f2bb5dff7ab7fad265361987"}, {file = "seaborn-0.13.2.tar.gz", hash = "sha256:93e60a40988f4d65e9f4885df477e2fdaff6b73a9ded434c1ab356dd57eefff7"}, @@ -6299,7 +6217,7 @@ description = "State-of-the-Art Text Embeddings" optional = true python-versions = ">=3.9" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"sentence-transformers\"" +markers = "extra == \"sentence-transformers\"" files = [ {file = "sentence_transformers-3.4.1-py3-none-any.whl", hash = "sha256:e026dc6d56801fd83f74ad29a30263f401b4b522165c19386d8bc10dcca805da"}, {file = "sentence_transformers-3.4.1.tar.gz", hash = "sha256:68daa57504ff548340e54ff117bd86c1d2f784b21e0fb2689cf3272b8937b24b"}, @@ -6328,7 +6246,7 @@ description = "Easily download, build, install, upgrade, and uninstall Python pa optional = true python-versions = ">=3.9" groups = ["main"] -markers = "extra == \"sentence-transformers\" and python_version >= \"3.12\"" +markers = "python_version >= \"3.12\" and extra == \"sentence-transformers\"" files = [ {file = "setuptools-75.8.2-py3-none-any.whl", hash = "sha256:558e47c15f1811c1fa7adbd0096669bf76c1d3f433f58324df69f3f5ecac4e8f"}, {file = "setuptools-75.8.2.tar.gz", hash = "sha256:4880473a969e5f23f2a2be3646b2dfd84af9028716d398e46192f84bc36900d2"}, @@ -6350,7 +6268,7 @@ description = "Manipulation and analysis of geometric objects" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vertexai\"" +markers = "extra == \"vertexai\"" files = [ {file = "shapely-2.0.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:33fb10e50b16113714ae40adccf7670379e9ccf5b7a41d0002046ba2b8f0f691"}, {file = "shapely-2.0.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f44eda8bd7a4bccb0f281264b34bf3518d8c4c9a8ffe69a1a05dabf6e8461147"}, @@ -6414,7 +6332,7 @@ files = [ {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, ] -markers = {main = "(python_version <= \"3.11\" or python_version >= \"3.12\") and (extra == \"vertexai\" or extra == \"mistralai\" or extra == \"bedrock\" or extra == \"ranx\") and (extra == \"vertexai\" or extra == \"mistralai\" or extra == \"bedrock\" or python_version >= \"3.10\")", dev = "python_version <= \"3.11\" or python_version >= \"3.12\"", docs = "python_version <= \"3.11\" or python_version >= \"3.12\""} +markers = {main = "(extra == \"vertexai\" or extra == \"mistralai\" or extra == \"bedrock\" or extra == \"ranx\") and (extra == \"vertexai\" or extra == \"mistralai\" or extra == \"bedrock\") or (extra == \"vertexai\" or extra == \"mistralai\" or extra == \"bedrock\" or extra == \"ranx\") and python_version >= \"3.10\""} [[package]] name = "sniffio" @@ -6423,7 +6341,7 @@ description = "Sniff out which async library your code is running under" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "(extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" +markers = "extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\"" files = [ {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, @@ -6436,7 +6354,6 @@ description = "This package provides 29 stemmers for 28 languages generated from optional = false python-versions = "*" groups = ["docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, @@ -6453,7 +6370,7 @@ files = [ {file = "soupsieve-2.6-py3-none-any.whl", hash = "sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9"}, {file = "soupsieve-2.6.tar.gz", hash = "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb"}, ] -markers = {main = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"", docs = "python_version <= \"3.11\" or python_version >= \"3.12\""} +markers = {main = "python_version >= \"3.10\" and extra == \"ranx\""} [[package]] name = "sphinx" @@ -6462,7 +6379,6 @@ description = "Python documentation generator" optional = false python-versions = ">=3.9" groups = ["docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "sphinx-7.4.7-py3-none-any.whl", hash = "sha256:c2419e2135d11f1951cd994d6eb18a1835bd8fdd8429f9ca375dc1f3281bd239"}, {file = "sphinx-7.4.7.tar.gz", hash = "sha256:242f92a7ea7e6c5b406fdc2615413890ba9f699114a9c09192d7dfead2ee9cfe"}, @@ -6500,7 +6416,6 @@ description = "Add a copy button to each of your code cells." optional = false python-versions = ">=3.7" groups = ["docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "sphinx-copybutton-0.5.2.tar.gz", hash = "sha256:4cf17c82fb9646d1bc9ca92ac280813a3b605d8c421225fd9913154103ee1fbd"}, {file = "sphinx_copybutton-0.5.2-py3-none-any.whl", hash = "sha256:fb543fd386d917746c9a2c50360c7905b605726b9355cd26e9974857afeae06e"}, @@ -6520,7 +6435,6 @@ description = "A sphinx extension for designing beautiful, view size responsive optional = false python-versions = ">=3.8" groups = ["docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "sphinx_design-0.5.0-py3-none-any.whl", hash = "sha256:1af1267b4cea2eedd6724614f19dcc88fe2e15aff65d06b2f6252cee9c4f4c1e"}, {file = "sphinx_design-0.5.0.tar.gz", hash = "sha256:e8e513acea6f92d15c6de3b34e954458f245b8e761b45b63950f65373352ab00"}, @@ -6545,7 +6459,6 @@ description = "Sphinx Extension adding support for custom favicons" optional = false python-versions = ">=3.7" groups = ["docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "sphinx-favicon-1.0.1.tar.gz", hash = "sha256:df796de32125609c1b4a8964db74270ebf4502089c27cd53f542354dc0b57e8e"}, {file = "sphinx_favicon-1.0.1-py3-none-any.whl", hash = "sha256:7c93d6b634cb4c9687ceab67a8526f05d3b02679df94e273e51a43282e6b034c"}, @@ -6566,7 +6479,6 @@ description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple optional = false python-versions = ">=3.9" groups = ["docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "sphinxcontrib_applehelp-2.0.0-py3-none-any.whl", hash = "sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5"}, {file = "sphinxcontrib_applehelp-2.0.0.tar.gz", hash = "sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1"}, @@ -6584,7 +6496,6 @@ description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp optional = false python-versions = ">=3.9" groups = ["docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "sphinxcontrib_devhelp-2.0.0-py3-none-any.whl", hash = "sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2"}, {file = "sphinxcontrib_devhelp-2.0.0.tar.gz", hash = "sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad"}, @@ -6602,7 +6513,6 @@ description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML h optional = false python-versions = ">=3.9" groups = ["docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "sphinxcontrib_htmlhelp-2.1.0-py3-none-any.whl", hash = "sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8"}, {file = "sphinxcontrib_htmlhelp-2.1.0.tar.gz", hash = "sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9"}, @@ -6620,7 +6530,6 @@ description = "A sphinx extension which renders display math in HTML via JavaScr optional = false python-versions = ">=3.5" groups = ["docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, @@ -6636,7 +6545,6 @@ description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp d optional = false python-versions = ">=3.9" groups = ["docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "sphinxcontrib_qthelp-2.0.0-py3-none-any.whl", hash = "sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb"}, {file = "sphinxcontrib_qthelp-2.0.0.tar.gz", hash = "sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab"}, @@ -6654,7 +6562,6 @@ description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs optional = false python-versions = ">=3.9" groups = ["docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "sphinxcontrib_serializinghtml-2.0.0-py3-none-any.whl", hash = "sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331"}, {file = "sphinxcontrib_serializinghtml-2.0.0.tar.gz", hash = "sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d"}, @@ -6672,7 +6579,6 @@ description = "Database Abstraction Library" optional = false python-versions = ">=3.7" groups = ["docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "SQLAlchemy-2.0.38-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5e1d9e429028ce04f187a9f522818386c8b076723cdbe9345708384f49ebcec6"}, {file = "SQLAlchemy-2.0.38-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b87a90f14c68c925817423b0424381f0e16d80fc9a1a1046ef202ab25b19a444"}, @@ -6706,16 +6612,27 @@ files = [ {file = "SQLAlchemy-2.0.38-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5dba1cdb8f319084f5b00d41207b2079822aa8d6a4667c0f369fce85e34b0c86"}, {file = "SQLAlchemy-2.0.38-cp313-cp313-win32.whl", hash = "sha256:eae27ad7580529a427cfdd52c87abb2dfb15ce2b7a3e0fc29fbb63e2ed6f8120"}, {file = "SQLAlchemy-2.0.38-cp313-cp313-win_amd64.whl", hash = "sha256:b335a7c958bc945e10c522c069cd6e5804f4ff20f9a744dd38e748eb602cbbda"}, + {file = "SQLAlchemy-2.0.38-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:40310db77a55512a18827488e592965d3dec6a3f1e3d8af3f8243134029daca3"}, {file = "SQLAlchemy-2.0.38-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d3043375dd5bbcb2282894cbb12e6c559654c67b5fffb462fda815a55bf93f7"}, + {file = "SQLAlchemy-2.0.38-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70065dfabf023b155a9c2a18f573e47e6ca709b9e8619b2e04c54d5bcf193178"}, {file = "SQLAlchemy-2.0.38-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:c058b84c3b24812c859300f3b5abf300daa34df20d4d4f42e9652a4d1c48c8a4"}, + {file = "SQLAlchemy-2.0.38-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:0398361acebb42975deb747a824b5188817d32b5c8f8aba767d51ad0cc7bb08d"}, {file = "SQLAlchemy-2.0.38-cp37-cp37m-win32.whl", hash = "sha256:a2bc4e49e8329f3283d99840c136ff2cd1a29e49b5624a46a290f04dff48e079"}, {file = "SQLAlchemy-2.0.38-cp37-cp37m-win_amd64.whl", hash = "sha256:9cd136184dd5f58892f24001cdce986f5d7e96059d004118d5410671579834a4"}, + {file = "SQLAlchemy-2.0.38-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:665255e7aae5f38237b3a6eae49d2358d83a59f39ac21036413fab5d1e810578"}, + {file = "SQLAlchemy-2.0.38-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:92f99f2623ff16bd4aaf786ccde759c1f676d39c7bf2855eb0b540e1ac4530c8"}, {file = "SQLAlchemy-2.0.38-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa498d1392216fae47eaf10c593e06c34476ced9549657fca713d0d1ba5f7248"}, + {file = "SQLAlchemy-2.0.38-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9afbc3909d0274d6ac8ec891e30210563b2c8bdd52ebbda14146354e7a69373"}, {file = "SQLAlchemy-2.0.38-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:57dd41ba32430cbcc812041d4de8d2ca4651aeefad2626921ae2a23deb8cd6ff"}, + {file = "SQLAlchemy-2.0.38-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:3e35d5565b35b66905b79ca4ae85840a8d40d31e0b3e2990f2e7692071b179ca"}, {file = "SQLAlchemy-2.0.38-cp38-cp38-win32.whl", hash = "sha256:f0d3de936b192980209d7b5149e3c98977c3810d401482d05fb6d668d53c1c63"}, {file = "SQLAlchemy-2.0.38-cp38-cp38-win_amd64.whl", hash = "sha256:3868acb639c136d98107c9096303d2d8e5da2880f7706f9f8c06a7f961961149"}, + {file = "SQLAlchemy-2.0.38-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:07258341402a718f166618470cde0c34e4cec85a39767dce4e24f61ba5e667ea"}, + {file = "SQLAlchemy-2.0.38-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0a826f21848632add58bef4f755a33d45105d25656a0c849f2dc2df1c71f6f50"}, {file = "SQLAlchemy-2.0.38-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:386b7d136919bb66ced64d2228b92d66140de5fefb3c7df6bd79069a269a7b06"}, + {file = "SQLAlchemy-2.0.38-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f2951dc4b4f990a4b394d6b382accb33141d4d3bd3ef4e2b27287135d6bdd68"}, {file = "SQLAlchemy-2.0.38-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8bf312ed8ac096d674c6aa9131b249093c1b37c35db6a967daa4c84746bc1bc9"}, + {file = "SQLAlchemy-2.0.38-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:6db316d6e340f862ec059dc12e395d71f39746a20503b124edc255973977b728"}, {file = "SQLAlchemy-2.0.38-cp39-cp39-win32.whl", hash = "sha256:c09a6ea87658695e527104cf857c70f79f14e9484605e205217aae0ec27b45fc"}, {file = "SQLAlchemy-2.0.38-cp39-cp39-win_amd64.whl", hash = "sha256:12f5c9ed53334c3ce719155424dc5407aaa4f6cadeb09c5b627e06abb93933a1"}, {file = "SQLAlchemy-2.0.38-py3-none-any.whl", hash = "sha256:63178c675d4c80def39f1febd625a6333f44c0ba269edd8a468b156394b27753"}, @@ -6758,7 +6675,6 @@ description = "Extract data from python stack frames and tracebacks for informat optional = false python-versions = "*" groups = ["dev", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"}, {file = "stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9"}, @@ -6779,7 +6695,7 @@ description = "Computer algebra system (CAS) in Python" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "extra == \"sentence-transformers\" and (python_version <= \"3.11\" or python_version >= \"3.12\")" +markers = "extra == \"sentence-transformers\"" files = [ {file = "sympy-1.13.1-py3-none-any.whl", hash = "sha256:db36cdc64bf61b9b24578b6f7bab1ecdd2452cf008f34faa33776680c26d66f8"}, {file = "sympy-1.13.1.tar.gz", hash = "sha256:9cebf7e04ff162015ce31c9c6c9144daa34a93bd082f54fd8f12deca4f47515f"}, @@ -6798,7 +6714,6 @@ description = "Pretty-print tabular data" optional = false python-versions = ">=3.7" groups = ["main", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"}, {file = "tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c"}, @@ -6814,7 +6729,6 @@ description = "Retry code until it succeeds" optional = false python-versions = ">=3.8" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "tenacity-9.0.0-py3-none-any.whl", hash = "sha256:93de0c98785b27fcf659856aa9f54bfbd399e29969b0621bc7f762bd441b4539"}, {file = "tenacity-9.0.0.tar.gz", hash = "sha256:807f37ca97d62aa361264d497b0e31e92b8027044942bfa756160d908320d73b"}, @@ -6831,7 +6745,6 @@ description = "Python library for throwaway instances of anything that can run i optional = false python-versions = "<4.0,>=3.9" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "testcontainers-4.9.1-py3-none-any.whl", hash = "sha256:315fb94b42a383872df530aa45319745278ef0cc18b9cfcdc231a75d14afa5a0"}, {file = "testcontainers-4.9.1.tar.gz", hash = "sha256:37fe9a222549ddb788463935965b16f91809e9a8d654f437d6a59eac9b77f76f"}, @@ -6886,7 +6799,7 @@ description = "threadpoolctl" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"sentence-transformers\"" +markers = "extra == \"sentence-transformers\"" files = [ {file = "threadpoolctl-3.5.0-py3-none-any.whl", hash = "sha256:56c1e26c150397e58c4926da8eeee87533b1e32bef131bd4bf6a2f45f3185467"}, {file = "threadpoolctl-3.5.0.tar.gz", hash = "sha256:082433502dd922bf738de0d8bcc4fdcbf0979ff44c42bd40f5af8a282f6fa107"}, @@ -6899,7 +6812,6 @@ description = "A tiny CSS parser" optional = false python-versions = ">=3.8" groups = ["docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "tinycss2-1.4.0-py3-none-any.whl", hash = "sha256:3a49cf47b7675da0b15d0c6e1df8df4ebd96e9394bb905a5775adb0d884c5289"}, {file = "tinycss2-1.4.0.tar.gz", hash = "sha256:10c0972f6fc0fbee87c3edb76549357415e94548c1ae10ebccdea16fb404a9b7"}, @@ -6919,7 +6831,7 @@ description = "" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "(extra == \"sentence-transformers\" or extra == \"cohere\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" +markers = "extra == \"sentence-transformers\" or extra == \"cohere\"" files = [ {file = "tokenizers-0.21.0-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:3c4c93eae637e7d2aaae3d376f06085164e1660f89304c0ab2b1d08a406636b2"}, {file = "tokenizers-0.21.0-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:f53ea537c925422a2e0e92a24cce96f6bc5046bbef24a1652a5edc8ba975f62e"}, @@ -6953,7 +6865,7 @@ description = "A lil' TOML parser" optional = false python-versions = ">=3.8" groups = ["dev", "docs"] -markers = "python_version < \"3.11\"" +markers = "python_version <= \"3.10\"" files = [ {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, @@ -6996,7 +6908,6 @@ description = "Style preserving TOML library" optional = false python-versions = ">=3.8" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "tomlkit-0.13.2-py3-none-any.whl", hash = "sha256:7a974427f6e119197f670fbbbeae7bef749a6c14e793db934baefc1b5f03efde"}, {file = "tomlkit-0.13.2.tar.gz", hash = "sha256:fff5fe59a87295b278abd31bec92c15d9bc4a06885ab12bcea52c71119392e79"}, @@ -7009,7 +6920,7 @@ description = "Tensors and Dynamic neural networks in Python with strong GPU acc optional = true python-versions = ">=3.9.0" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"sentence-transformers\"" +markers = "extra == \"sentence-transformers\"" files = [ {file = "torch-2.6.0-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:6860df13d9911ac158f4c44031609700e1eba07916fff62e21e6ffa0a9e01961"}, {file = "torch-2.6.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:c4f103a49830ce4c7561ef4434cc7926e5a5fe4e5eb100c19ab36ea1e2b634ab"}, @@ -7067,7 +6978,6 @@ description = "Tornado is a Python web framework and asynchronous networking lib optional = false python-versions = ">=3.8" groups = ["dev", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "tornado-6.4.2-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e828cce1123e9e44ae2a50a9de3055497ab1d0aeb440c5ac23064d9e44880da1"}, {file = "tornado-6.4.2-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:072ce12ada169c5b00b7d92a99ba089447ccc993ea2143c9ede887e0937aa803"}, @@ -7089,7 +6999,6 @@ description = "Fast, Extensible Progress Meter" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and (extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"openai\" or extra == \"ranx\") and (extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"openai\" or python_version >= \"3.10\")" files = [ {file = "tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2"}, {file = "tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2"}, @@ -7112,7 +7021,6 @@ description = "Traitlets Python configuration system" optional = false python-versions = ">=3.8" groups = ["dev", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f"}, {file = "traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7"}, @@ -7129,7 +7037,7 @@ description = "State-of-the-art Machine Learning for JAX, PyTorch and TensorFlow optional = true python-versions = ">=3.9.0" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"sentence-transformers\"" +markers = "extra == \"sentence-transformers\"" files = [ {file = "transformers-4.49.0-py3-none-any.whl", hash = "sha256:6b4fded1c5fee04d384b1014495b4235a2b53c87503d7d592423c06128cbbe03"}, {file = "transformers-4.49.0.tar.gz", hash = "sha256:7e40e640b5b8dc3f48743f5f5adbdce3660c82baafbd3afdfc04143cdbd2089e"}, @@ -7200,7 +7108,7 @@ description = "Support tools for TREC CAR participants. Also see trec-car.cs.unh optional = true python-versions = ">=3.6" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "trec-car-tools-2.6.tar.gz", hash = "sha256:2fce2de120224fd569b151d5bed358a4ed334e643889b9e3dfe3e5a3d15d21c8"}, {file = "trec_car_tools-2.6-py3-none-any.whl", hash = "sha256:e6f0373259e1c234222da7270ab54ca7af7a6f8d0dd32b13e158c1659d3991cf"}, @@ -7217,7 +7125,7 @@ description = "A language and compiler for custom Deep Learning operations" optional = true python-versions = "*" groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\" and (python_version <= \"3.11\" or python_version >= \"3.12\")" +markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\"" files = [ {file = "triton-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3e54983cd51875855da7c68ec05c05cf8bb08df361b1d5b69e05e40b0c9bd62"}, {file = "triton-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8009a1fb093ee8546495e96731336a33fb8856a38e45bb4ab6affd6dbc3ba220"}, @@ -7238,7 +7146,6 @@ description = "Typing stubs for cffi" optional = false python-versions = ">=3.8" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "types_cffi-1.16.0.20241221-py3-none-any.whl", hash = "sha256:e5b76b4211d7a9185f6ab8d06a106d56c7eb80af7cdb8bfcb4186ade10fb112f"}, {file = "types_cffi-1.16.0.20241221.tar.gz", hash = "sha256:1c96649618f4b6145f58231acb976e0b448be6b847f7ab733dabe62dfbff6591"}, @@ -7254,7 +7161,6 @@ description = "Typing stubs for pyOpenSSL" optional = false python-versions = ">=3.8" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "types-pyOpenSSL-24.1.0.20240722.tar.gz", hash = "sha256:47913b4678a01d879f503a12044468221ed8576263c1540dcb0484ca21b08c39"}, {file = "types_pyOpenSSL-24.1.0.20240722-py3-none-any.whl", hash = "sha256:6a7a5d2ec042537934cfb4c9d4deb0e16c4c6250b09358df1f083682fe6fda54"}, @@ -7271,7 +7177,6 @@ description = "Typing stubs for PyYAML" optional = false python-versions = ">=3.8" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "types_PyYAML-6.0.12.20241230-py3-none-any.whl", hash = "sha256:fa4d32565219b68e6dee5f67534c722e53c00d1cfc09c435ef04d7353e1e96e6"}, {file = "types_pyyaml-6.0.12.20241230.tar.gz", hash = "sha256:7f07622dbd34bb9c8b264fe860a17e0efcad00d50b5f27e93984909d9363498c"}, @@ -7284,7 +7189,6 @@ description = "Typing stubs for redis" optional = false python-versions = ">=3.8" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "types-redis-4.6.0.20241004.tar.gz", hash = "sha256:5f17d2b3f9091ab75384153bfa276619ffa1cf6a38da60e10d5e6749cc5b902e"}, {file = "types_redis-4.6.0.20241004-py3-none-any.whl", hash = "sha256:ef5da68cb827e5f606c8f9c0b49eeee4c2669d6d97122f301d3a55dc6a63f6ed"}, @@ -7317,7 +7221,7 @@ description = "Typing stubs for requests" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"cohere\" and python_version >= \"3.10\"" +markers = "python_version >= \"3.10\" and extra == \"cohere\"" files = [ {file = "types-requests-2.32.0.20241016.tar.gz", hash = "sha256:0d9cad2f27515d0e3e3da7134a1b6f28fb97129d86b867f24d9c726452634d95"}, {file = "types_requests-2.32.0.20241016-py3-none-any.whl", hash = "sha256:4195d62d6d3e043a4eaaf08ff8a62184584d2e8684e9d2aa178c7915a7da3747"}, @@ -7333,7 +7237,6 @@ description = "Typing stubs for setuptools" optional = false python-versions = ">=3.9" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "types_setuptools-75.8.0.20250225-py3-none-any.whl", hash = "sha256:94c86b439cc60bcc68c1cda3fd2c301f007f8f9502f4fbb54c66cb5ce9b875af"}, {file = "types_setuptools-75.8.0.20250225.tar.gz", hash = "sha256:6038f7e983d55792a5f90d8fdbf5d4c186026214a16bb65dd6ae83c624ae9636"}, @@ -7346,7 +7249,6 @@ description = "Typing stubs for tabulate" optional = false python-versions = ">=3.8" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "types_tabulate-0.9.0.20241207-py3-none-any.whl", hash = "sha256:b8dad1343c2a8ba5861c5441370c3e35908edd234ff036d4298708a1d4cf8a85"}, {file = "types_tabulate-0.9.0.20241207.tar.gz", hash = "sha256:ac1ac174750c0a385dfd248edc6279fa328aaf4ea317915ab879a2ec47833230"}, @@ -7372,7 +7274,6 @@ description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" groups = ["main", "dev", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, @@ -7385,7 +7286,7 @@ description = "Runtime inspection utilities for typing module." optional = true python-versions = "*" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"mistralai\"" +markers = "extra == \"mistralai\"" files = [ {file = "typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f"}, {file = "typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78"}, @@ -7402,7 +7303,7 @@ description = "Provider of IANA time zone data" optional = true python-versions = ">=2" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "tzdata-2025.1-py2.py3-none-any.whl", hash = "sha256:7e127113816800496f027041c570f50bcd464a020098a3b6b199517772303639"}, {file = "tzdata-2025.1.tar.gz", hash = "sha256:24894909e88cdb28bd1636c6887801df64cb485bd593f2fd83ef29075a81d694"}, @@ -7415,7 +7316,7 @@ description = "Pure Python decompression module for .Z files compressed using Un optional = true python-versions = ">=3.9" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "unlzw3-0.2.3-py3-none-any.whl", hash = "sha256:7760fb4f3afa1225623944c061991d89a061f7fb78665dbc4cddfdb562bb4a8b"}, {file = "unlzw3-0.2.3.tar.gz", hash = "sha256:ede5d928c792fff9da406f20334f9739693327f448f383ae1df1774627197bbb"}, @@ -7436,7 +7337,7 @@ files = [ {file = "urllib3-1.26.20-py2.py3-none-any.whl", hash = "sha256:0ed14ccfbf1c30a9072c7ca157e4319b70d65f623e91e7b32fadb2853431016e"}, {file = "urllib3-1.26.20.tar.gz", hash = "sha256:40c2dc0c681e47eb8f90e7e27bf6ff7df2e677421fd46756da1161c39ca70d32"}, ] -markers = {main = "extra == \"sentence-transformers\" and python_version < \"3.10\" or extra == \"cohere\" and python_version < \"3.10\" or extra == \"vertexai\" and python_version < \"3.10\" or extra == \"voyageai\" and python_version < \"3.10\" or extra == \"bedrock\" and python_version < \"3.10\"", dev = "python_version < \"3.10\"", docs = "python_version < \"3.10\""} +markers = {main = "python_version < \"3.10\" and (extra == \"bedrock\" or extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"vertexai\" or extra == \"voyageai\")", dev = "python_version < \"3.10\"", docs = "python_version < \"3.10\""} [package.extras] brotli = ["brotli (==1.0.9) ; os_name != \"nt\" and python_version < \"3\" and platform_python_implementation == \"CPython\"", "brotli (>=1.0.9) ; python_version >= \"3\" and platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; (os_name != \"nt\" or python_version >= \"3\") and platform_python_implementation != \"CPython\"", "brotlipy (>=0.6.0) ; os_name == \"nt\" and python_version < \"3\""] @@ -7454,7 +7355,7 @@ files = [ {file = "urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df"}, {file = "urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d"}, ] -markers = {main = "(python_version <= \"3.11\" or python_version >= \"3.12\") and (extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"vertexai\" or extra == \"voyageai\" or extra == \"ranx\" or extra == \"bedrock\") and python_version >= \"3.10\"", dev = "python_version <= \"3.11\" and python_version >= \"3.10\" or python_version >= \"3.12\"", docs = "python_version <= \"3.11\" and python_version >= \"3.10\" or python_version >= \"3.12\""} +markers = {main = "python_version >= \"3.10\" and (extra == \"ranx\" or extra == \"bedrock\" or extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"vertexai\" or extra == \"voyageai\")", dev = "python_version >= \"3.10\"", docs = "python_version >= \"3.10\""} [package.extras] brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""] @@ -7469,7 +7370,6 @@ description = "Virtual Python Environment builder" optional = false python-versions = ">=3.8" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "virtualenv-20.29.2-py3-none-any.whl", hash = "sha256:febddfc3d1ea571bdb1dc0f98d7b45d24def7428214d4fb73cc486c9568cce6a"}, {file = "virtualenv-20.29.2.tar.gz", hash = "sha256:fdaabebf6d03b5ba83ae0a02cfe96f48a716f4fae556461d180825866f75b728"}, @@ -7491,7 +7391,7 @@ description = "" optional = true python-versions = "<4.0.0,>=3.7.1" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"voyageai\"" +markers = "extra == \"voyageai\"" files = [ {file = "voyageai-0.2.4-py3-none-any.whl", hash = "sha256:e3070e5c78dec89adae43231334b4637aa88933dad99b1c33d3219fdfc94dfa4"}, {file = "voyageai-0.2.4.tar.gz", hash = "sha256:b9911d8629e8a4e363291c133482fead49a3536afdf1e735f3ab3aaccd8d250d"}, @@ -7511,7 +7411,7 @@ description = "Python library to work with ARC and WARC files" optional = true python-versions = "*" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "warc3_wet-0.2.5-py3-none-any.whl", hash = "sha256:5a9a525383fb1af159734baa75f349a7c4ec7bccd1b938681b5748515d2bf624"}, {file = "warc3_wet-0.2.5.tar.gz", hash = "sha256:15e50402dabaa1e95307f1e2a6169cfd5f137b70761d9f0b16a10aa6de227970"}, @@ -7524,7 +7424,7 @@ description = "Python library to work with ARC and WARC files, with fixes for Cl optional = true python-versions = "*" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "warc3-wet-clueweb09-0.2.5.tar.gz", hash = "sha256:3054bfc07da525d5967df8ca3175f78fa3f78514c82643f8c81fbca96300b836"}, ] @@ -7536,7 +7436,6 @@ description = "Measures the displayed width of unicode strings in a terminal" optional = false python-versions = "*" groups = ["dev", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, @@ -7549,7 +7448,6 @@ description = "Character encoding aliases for legacy web content" optional = false python-versions = "*" groups = ["docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, @@ -7562,7 +7460,6 @@ description = "Module for decorators, wrappers and monkey patching." optional = false python-versions = ">=3.8" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "wrapt-1.17.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3d57c572081fed831ad2d26fd430d565b76aa277ed1d30ff4d40670b1c0dd984"}, {file = "wrapt-1.17.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b5e251054542ae57ac7f3fba5d10bfff615b6c2fb09abeb37d2f1463f841ae22"}, @@ -7652,7 +7549,7 @@ description = "Yet another URL library" optional = true python-versions = ">=3.9" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"voyageai\"" +markers = "extra == \"voyageai\"" files = [ {file = "yarl-1.18.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7df647e8edd71f000a5208fe6ff8c382a1de8edfbccdbbfe649d263de07d8c34"}, {file = "yarl-1.18.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c69697d3adff5aa4f874b19c0e4ed65180ceed6318ec856ebc423aa5850d84f7"}, @@ -7754,7 +7651,7 @@ files = [ {file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"}, {file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"}, ] -markers = {dev = "python_version < \"3.10\"", docs = "python_version <= \"3.11\" or python_version >= \"3.12\""} +markers = {dev = "python_version < \"3.10\""} [package.extras] check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] @@ -7771,7 +7668,7 @@ description = "Low-level interface to the zlib library that enables capturing th optional = true python-versions = ">=3.6" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "zlib_state-0.1.9-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:97f45d0f80e9d7070229ecb36112eea6a17dc40053449a9c613ef837d9cb66b4"}, {file = "zlib_state-0.1.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3564eaa130f2533b87b82d0e622cfb5c25acec123e7bfe38d39db9ce6349cb52"}, @@ -7816,4 +7713,4 @@ voyageai = ["voyageai"] [metadata] lock-version = "2.1" python-versions = ">=3.9,<3.14" -content-hash = "3bcbaaf402487a181810db22556d0207a555d5683984cc3af14a803974c8900e" +content-hash = "31caa5fcd726203ef2d950c07ae16690e85509f4e857696030fc091263fcd9f9" diff --git a/redisvl/query/query.py b/redisvl/query/query.py index 6c12e87e..bdf50b6f 100644 --- a/redisvl/query/query.py +++ b/redisvl/query/query.py @@ -1,7 +1,6 @@ from enum import Enum from typing import Any, Dict, List, Optional, Set, Tuple, Union -from redis.commands.search.aggregation import AggregateRequest, Desc from redis.commands.search.query import Query as RedisQuery from redisvl.query.filter import FilterExpression diff --git a/tests/unit/test_query_types.py b/tests/unit/test_query_types.py index 28172c3c..d0772a20 100644 --- a/tests/unit/test_query_types.py +++ b/tests/unit/test_query_types.py @@ -204,7 +204,7 @@ def test_text_query(): assert text_query._num_results == 10 assert ( text_query.filter - == f"(~@{text_field_name}:({text_query.tokenize_and_escape_query(text_string)}))" + == f"(@{text_field_name}:({text_query.tokenize_and_escape_query(text_string)}))" ) assert isinstance(text_query, Query) assert isinstance(text_query.query, Query) @@ -271,6 +271,23 @@ def test_text_query(): text_query = TextQuery(text_string, text_field_name, stopwords=[1, 2, 3]) + text_query = TextQuery(text_string, text_field_name, stopwords=["the", "a", "of"]) + assert text_query.stopwords == set(["the", "a", "of"]) + assert ( + text_query.filter + == f"(@{text_field_name}:({text_query.tokenize_and_escape_query(text_string)}))" + ) + + text_query = TextQuery(text_string, text_field_name, stopwords="german") + assert text_query.stopwords != set([]) + + with pytest.raises(ValueError): + text_query = TextQuery(text_string, text_field_name, stopwords="gibberish") + + with pytest.raises(TypeError): + text_query = TextQuery(text_string, text_field_name, stopwords=[1, 2, 3]) + + @pytest.mark.parametrize( "query", [ From 93485834a668a2f6bbe35ac037e75a62347158b2 Mon Sep 17 00:00:00 2001 From: Justin Cechmanek Date: Tue, 1 Apr 2025 12:23:28 -0700 Subject: [PATCH 14/26] adds hybrid aggregation query and tests. modifies search index to accept aggregate queries --- redisvl/query/__init__.py | 1 - redisvl/query/query.py | 1 - 2 files changed, 2 deletions(-) diff --git a/redisvl/query/__init__.py b/redisvl/query/__init__.py index 6231097f..6245315f 100644 --- a/redisvl/query/__init__.py +++ b/redisvl/query/__init__.py @@ -4,7 +4,6 @@ BaseVectorQuery, CountQuery, FilterQuery, - HybridQuery, RangeQuery, TextQuery, VectorQuery, diff --git a/redisvl/query/query.py b/redisvl/query/query.py index bdf50b6f..06f82734 100644 --- a/redisvl/query/query.py +++ b/redisvl/query/query.py @@ -790,7 +790,6 @@ def set_stopwords(self, stopwords: Optional[Union[str, Set[str]]] = "english"): def tokenize_and_escape_query(self, user_query: str) -> str: """Convert a raw user query to a redis full text query joined by ORs""" - from redisvl.utils.token_escaper import TokenEscaper escaper = TokenEscaper() From 10f447491545e3aeb2f8e25b9e6cc645ef06abe2 Mon Sep 17 00:00:00 2001 From: Justin Cechmanek Date: Tue, 1 Apr 2025 17:42:08 -0700 Subject: [PATCH 15/26] cleans text and hybrid tests --- redisvl/index/index.py | 1 + redisvl/query/query.py | 5 +-- tests/unit/test_aggregation_types.py | 51 +++++++++++++--------------- tests/unit/test_query_types.py | 1 - 4 files changed, 28 insertions(+), 30 deletions(-) diff --git a/redisvl/index/index.py b/redisvl/index/index.py index 39a57592..710751f4 100644 --- a/redisvl/index/index.py +++ b/redisvl/index/index.py @@ -42,6 +42,7 @@ from redisvl.index.storage import BaseStorage, HashStorage, JsonStorage from redisvl.query import ( AggregationQuery, + BaseVectorQuery, BaseQuery, CountQuery, FilterQuery, diff --git a/redisvl/query/query.py b/redisvl/query/query.py index 06f82734..a6b36a90 100644 --- a/redisvl/query/query.py +++ b/redisvl/query/query.py @@ -4,8 +4,9 @@ from redis.commands.search.query import Query as RedisQuery from redisvl.query.filter import FilterExpression -from redisvl.redis.utils import array_to_buffer, denorm_cosine_distance +from redisvl.redis.utils import array_to_buffer from redisvl.utils.token_escaper import TokenEscaper +from redisvl.utils.utils import denorm_cosine_distance class BaseQuery(RedisQuery): @@ -811,7 +812,7 @@ def _build_query_string(self) -> str: else: filter_expression = "" - text = f"~(@{self._text_field}:({self.tokenize_and_escape_query(self._text)}))" + text = f"(~@{self._text_field}:({self.tokenize_and_escape_query(self._text)}))" if filter_expression and filter_expression != "*": text += f"({filter_expression})" return text diff --git a/tests/unit/test_aggregation_types.py b/tests/unit/test_aggregation_types.py index a128ffc1..9e409d8e 100644 --- a/tests/unit/test_aggregation_types.py +++ b/tests/unit/test_aggregation_types.py @@ -13,8 +13,6 @@ # Test Cases - - def test_aggregate_hybrid_query(): text_field_name = "description" vector_field_name = "embedding" @@ -34,10 +32,6 @@ def test_aggregate_hybrid_query(): assert hybrid_query._vector == sample_vector assert hybrid_query._vector_field == vector_field_name assert hybrid_query._scorer == "BM25STD" - # assert ( - # hybrid_query.filter - # == f"(@{text_field_name}:({hybrid_query.tokenize_and_escape_query(text_string)}))" - # ) assert hybrid_query._filter_expression == None assert hybrid_query._alpha == 0.7 assert hybrid_query._num_results == 10 @@ -72,10 +66,6 @@ def test_aggregate_hybrid_query(): assert hybrid_query._vector == sample_vector assert hybrid_query._vector_field == vector_field_name assert hybrid_query._scorer == scorer - # assert ( - # hybrid_query.filter - # == f"(@{text_field_name}:({hybrid_query.tokenize_and_escape_query(text_string)}))" - # ) assert hybrid_query._filter_expression == filter_expression assert hybrid_query._alpha == 0.5 assert hybrid_query._num_results == 8 @@ -83,36 +73,43 @@ def test_aggregate_hybrid_query(): assert hybrid_query._dialect == 2 assert hybrid_query.stopwords == set() - return - # Test stopwords are configurable - hybrid_query = HybridAggregationQuery(text_string, text_field_name, stopwords=None) + hybrid_query = HybridAggregationQuery( + sample_text, text_field_name, sample_vector, vector_field_name, stopwords=None + ) assert hybrid_query.stopwords == set([]) - # assert ( - # hyrid_query.filter - # == f"(@{text_field_name}:({hyrid_query.tokenize_and_escape_query(text_string)}))" - # ) - hyrid_query = HybridAggregationQuery( - text_string, text_field_name, stopwords=["the", "a", "of"] + hybrid_query = HybridAggregationQuery( + sample_text, + text_field_name, + sample_vector, + vector_field_name, + stopwords=["the", "a", "of"], ) assert hybrid_query.stopwords == set(["the", "a", "of"]) - assert ( - hybrid_query.filter - == f"(@{text_field_name}:({hybrid_query.tokenize_and_escape_query(text_string)}))" - ) - hybrid_query = HybridAggregationQuery( - text_string, text_field_name, stopwords="german" + sample_text, + text_field_name, + sample_vector, + vector_field_name, + stopwords="german", ) assert hybrid_query.stopwords != set([]) with pytest.raises(ValueError): hybrid_query = HybridAggregationQuery( - text_string, text_field_name, stopwords="gibberish" + sample_text, + text_field_name, + sample_vector, + vector_field_name, + stopwords="gibberish", ) with pytest.raises(TypeError): hybrid_query = HybridAggregationQuery( - text_string, text_field_name, stopwords=[1, 2, 3] + sample_text, + text_field_name, + sample_vector, + vector_field_name, + stopwords=[1, 2, 3], ) diff --git a/tests/unit/test_query_types.py b/tests/unit/test_query_types.py index d0772a20..385c03b4 100644 --- a/tests/unit/test_query_types.py +++ b/tests/unit/test_query_types.py @@ -270,7 +270,6 @@ def test_text_query(): with pytest.raises(TypeError): text_query = TextQuery(text_string, text_field_name, stopwords=[1, 2, 3]) - text_query = TextQuery(text_string, text_field_name, stopwords=["the", "a", "of"]) assert text_query.stopwords == set(["the", "a", "of"]) assert ( From 35181211ff54006d3fe77449691e48e6962cf5dc Mon Sep 17 00:00:00 2001 From: Justin Cechmanek Date: Tue, 1 Apr 2025 18:34:15 -0700 Subject: [PATCH 16/26] updates lock file --- poetry.lock | 417 ++++++++++++++++++++-------------------------------- 1 file changed, 157 insertions(+), 260 deletions(-) diff --git a/poetry.lock b/poetry.lock index c8696428..57d9d741 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 2.0.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand. [[package]] name = "accessible-pygments" @@ -7,7 +7,6 @@ description = "A collection of accessible pygments styles" optional = false python-versions = ">=3.9" groups = ["docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "accessible_pygments-0.0.5-py3-none-any.whl", hash = "sha256:88ae3211e68a1d0b011504b2ffc1691feafce124b845bd072ab6f9f66f34d4b7"}, {file = "accessible_pygments-0.0.5.tar.gz", hash = "sha256:40918d3e6a2b619ad424cb91e556bd3bd8865443d9f22f1dcdf79e33c8046872"}, @@ -27,7 +26,7 @@ description = "Happy Eyeballs for asyncio" optional = true python-versions = ">=3.9" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"voyageai\"" +markers = "extra == \"voyageai\"" files = [ {file = "aiohappyeyeballs-2.4.6-py3-none-any.whl", hash = "sha256:147ec992cf873d74f5062644332c539fcd42956dc69453fe5204195e560517e1"}, {file = "aiohappyeyeballs-2.4.6.tar.gz", hash = "sha256:9b05052f9042985d32ecbe4b59a77ae19c006a78f1344d7fdad69d28ded3d0b0"}, @@ -40,7 +39,7 @@ description = "Async http client/server framework (asyncio)" optional = true python-versions = ">=3.9" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"voyageai\"" +markers = "extra == \"voyageai\"" files = [ {file = "aiohttp-3.11.13-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a4fe27dbbeec445e6e1291e61d61eb212ee9fed6e47998b27de71d70d3e8777d"}, {file = "aiohttp-3.11.13-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9e64ca2dbea28807f8484c13f684a2f761e69ba2640ec49dacd342763cc265ef"}, @@ -145,7 +144,7 @@ description = "asyncio rate limiter, a leaky bucket implementation" optional = true python-versions = "<4.0,>=3.8" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"voyageai\"" +markers = "extra == \"voyageai\"" files = [ {file = "aiolimiter-1.2.1-py3-none-any.whl", hash = "sha256:d3f249e9059a20badcb56b61601a83556133655c11d1eb3dd3e04ff069e5f3c7"}, {file = "aiolimiter-1.2.1.tar.gz", hash = "sha256:e02a37ea1a855d9e832252a105420ad4d15011505512a1a1d814647451b5cca9"}, @@ -158,7 +157,7 @@ description = "aiosignal: a list of registered asynchronous callbacks" optional = true python-versions = ">=3.9" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"voyageai\"" +markers = "extra == \"voyageai\"" files = [ {file = "aiosignal-1.3.2-py2.py3-none-any.whl", hash = "sha256:45cde58e409a301715980c2b01d0c28bdde3770d8290b5eb2173759d9acb31a5"}, {file = "aiosignal-1.3.2.tar.gz", hash = "sha256:a8c255c66fafb1e499c9351d0bf32ff2d8a0321595ebac3b93713656d2436f54"}, @@ -174,7 +173,6 @@ description = "A light, configurable Sphinx theme" optional = false python-versions = ">=3.9" groups = ["docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "alabaster-0.7.16-py3-none-any.whl", hash = "sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92"}, {file = "alabaster-0.7.16.tar.gz", hash = "sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65"}, @@ -187,7 +185,6 @@ description = "Reusable constraint types to use with typing.Annotated" optional = false python-versions = ">=3.8" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, @@ -200,7 +197,7 @@ description = "High level compatibility layer for multiple asynchronous event lo optional = true python-versions = ">=3.9" groups = ["main"] -markers = "(extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" +markers = "extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\"" files = [ {file = "anyio-4.8.0-py3-none-any.whl", hash = "sha256:b5011f270ab5eb0abf13385f851315585cc37ef330dd88e27ec3d34d651fd47a"}, {file = "anyio-4.8.0.tar.gz", hash = "sha256:1d9fe889df5212298c0c0723fa20479d1b94883a2df44bd3897aa91083316f7a"}, @@ -224,7 +221,7 @@ description = "Disable App Nap on macOS >= 10.9" optional = false python-versions = ">=3.6" groups = ["dev", "docs"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and platform_system == \"Darwin\"" +markers = "platform_system == \"Darwin\"" files = [ {file = "appnope-0.1.4-py2.py3-none-any.whl", hash = "sha256:502575ee11cd7a28c0205f379b525beefebab9d161b7c964670864014ed7213c"}, {file = "appnope-0.1.4.tar.gz", hash = "sha256:1de3860566df9caf38f01f86f65e0e13e379af54f9e4bee1e66b48f2efffd1ee"}, @@ -237,7 +234,6 @@ description = "An abstract syntax tree for Python with inference support." optional = false python-versions = ">=3.9.0" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "astroid-3.3.8-py3-none-any.whl", hash = "sha256:187ccc0c248bfbba564826c26f070494f7bc964fd286b6d9fff4420e55de828c"}, {file = "astroid-3.3.8.tar.gz", hash = "sha256:a88c7994f914a4ea8572fac479459f4955eeccc877be3f2d959a33273b0cf40b"}, @@ -253,7 +249,6 @@ description = "Annotate AST trees with source code positions" optional = false python-versions = ">=3.8" groups = ["dev", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "asttokens-3.0.0-py3-none-any.whl", hash = "sha256:e3078351a059199dd5138cb1c706e6430c05eff2ff136af5eb4790f9d28932e2"}, {file = "asttokens-3.0.0.tar.gz", hash = "sha256:0dcd8baa8d62b0c1d118b399b2ddba3c4aff271d0d7a9e0d4c1681c79035bbc7"}, @@ -287,7 +282,7 @@ files = [ {file = "attrs-25.1.0-py3-none-any.whl", hash = "sha256:c75a69e28a550a7e93789579c22aa26b0f5b83b75dc4e08fe092980051e1090a"}, {file = "attrs-25.1.0.tar.gz", hash = "sha256:1c97078a80c814273a76b2a298a932eb681c87415c11dee0a6921de7f1b02c3e"}, ] -markers = {main = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"voyageai\"", dev = "python_version <= \"3.11\" or python_version >= \"3.12\"", docs = "python_version <= \"3.11\" or python_version >= \"3.12\""} +markers = {main = "extra == \"voyageai\""} [package.extras] benchmark = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] @@ -304,7 +299,6 @@ description = "Internationalization utilities" optional = false python-versions = ">=3.8" groups = ["docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "babel-2.17.0-py3-none-any.whl", hash = "sha256:4d0b53093fdfb4b21c92b5213dba5a1b23885afa8383709427046b21c366e5f2"}, {file = "babel-2.17.0.tar.gz", hash = "sha256:0c54cffb19f690cdcc52a3b50bcbf71e07a808d1c80d549f2459b9d2cf0afb9d"}, @@ -324,7 +318,7 @@ files = [ {file = "beautifulsoup4-4.13.3-py3-none-any.whl", hash = "sha256:99045d7d3f08f91f0d656bc9b7efbae189426cd913d830294a15eefa0ea4df16"}, {file = "beautifulsoup4-4.13.3.tar.gz", hash = "sha256:1bd32405dacc920b42b83ba01644747ed77456a65760e285fbc47633ceddaf8b"}, ] -markers = {main = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"", docs = "python_version <= \"3.11\" or python_version >= \"3.12\""} +markers = {main = "python_version >= \"3.10\" and extra == \"ranx\""} [package.dependencies] soupsieve = ">1.2" @@ -344,7 +338,6 @@ description = "The uncompromising code formatter." optional = false python-versions = ">=3.9" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "black-25.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:759e7ec1e050a15f89b770cefbf91ebee8917aac5c20483bc2d80a6c3a04df32"}, {file = "black-25.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e519ecf93120f34243e6b0054db49c00a35f84f195d5bce7e9f5cfc578fc2da"}, @@ -392,7 +385,6 @@ description = "An easy safelist-based HTML-sanitizing tool." optional = false python-versions = ">=3.9" groups = ["docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "bleach-6.2.0-py3-none-any.whl", hash = "sha256:117d9c6097a7c3d22fd578fcd8d35ff1e125df6736f554da4e432fdd63f31e5e"}, {file = "bleach-6.2.0.tar.gz", hash = "sha256:123e894118b8a599fd80d3ec1a6d4cc7ce4e5882b1317a7e1ba69b56e95f991f"}, @@ -412,7 +404,7 @@ description = "The AWS SDK for Python" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"bedrock\"" +markers = "extra == \"bedrock\"" files = [ {file = "boto3-1.36.0-py3-none-any.whl", hash = "sha256:d0ca7a58ce25701a52232cc8df9d87854824f1f2964b929305722ebc7959d5a9"}, {file = "boto3-1.36.0.tar.gz", hash = "sha256:159898f51c2997a12541c0e02d6e5a8fe2993ddb307b9478fd9a339f98b57e00"}, @@ -433,7 +425,7 @@ description = "Low-level, data-driven core of boto 3." optional = true python-versions = ">=3.8" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"bedrock\"" +markers = "extra == \"bedrock\"" files = [ {file = "botocore-1.36.26-py3-none-any.whl", hash = "sha256:4e3f19913887a58502e71ef8d696fe7eaa54de7813ff73390cd5883f837dfa6e"}, {file = "botocore-1.36.26.tar.gz", hash = "sha256:4a63bcef7ecf6146fd3a61dc4f9b33b7473b49bdaf1770e9aaca6eee0c9eab62"}, @@ -457,7 +449,7 @@ description = "Extensible memoizing collections and decorators" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vertexai\"" +markers = "extra == \"vertexai\"" files = [ {file = "cachetools-5.5.2-py3-none-any.whl", hash = "sha256:d26a22bcc62eb95c3beabd9f1ee5e820d3d2704fe2967cbe350e20c8ffcd3f0a"}, {file = "cachetools-5.5.2.tar.gz", hash = "sha256:1a661caa9175d26759571b2e19580f9d6393969e5dfca11fdb1f947a23e640d4"}, @@ -470,7 +462,7 @@ description = "RFC 7049 - Concise Binary Object Representation" optional = true python-versions = "*" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "cbor-1.0.0.tar.gz", hash = "sha256:13225a262ddf5615cbd9fd55a76a0d53069d18b07d2e9f19c39e6acb8609bbb6"}, ] @@ -482,7 +474,7 @@ description = "CBOR (de)serializer with extensive tag support" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "cbor2-5.6.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e16c4a87fc999b4926f5c8f6c696b0d251b4745bc40f6c5aee51d69b30b15ca2"}, {file = "cbor2-5.6.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:87026fc838370d69f23ed8572939bd71cea2b3f6c8f8bb8283f573374b4d7f33"}, @@ -546,7 +538,7 @@ files = [ {file = "certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe"}, {file = "certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651"}, ] -markers = {main = "(python_version <= \"3.11\" or python_version >= \"3.12\") and (extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\" or extra == \"sentence-transformers\" or extra == \"vertexai\" or extra == \"voyageai\" or extra == \"ranx\") and (extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\" or extra == \"sentence-transformers\" or extra == \"vertexai\" or extra == \"voyageai\" or python_version >= \"3.10\")", dev = "python_version <= \"3.11\" or python_version >= \"3.12\"", docs = "python_version <= \"3.11\" or python_version >= \"3.12\""} +markers = {main = "(extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\" or extra == \"sentence-transformers\" or extra == \"vertexai\" or extra == \"voyageai\" or extra == \"ranx\") and (extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\" or extra == \"sentence-transformers\" or extra == \"vertexai\" or extra == \"voyageai\") or (extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\" or extra == \"sentence-transformers\" or extra == \"vertexai\" or extra == \"voyageai\" or extra == \"ranx\") and python_version >= \"3.10\""} [[package]] name = "cffi" @@ -624,7 +616,7 @@ files = [ {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, ] -markers = {dev = "(implementation_name == \"pypy\" or platform_python_implementation != \"PyPy\") and (python_version <= \"3.11\" or python_version >= \"3.12\")", docs = "(python_version <= \"3.11\" or python_version >= \"3.12\") and implementation_name == \"pypy\""} +markers = {dev = "implementation_name == \"pypy\" or platform_python_implementation != \"PyPy\"", docs = "implementation_name == \"pypy\""} [package.dependencies] pycparser = "*" @@ -636,7 +628,6 @@ description = "Validate configuration and produce human readable error messages. optional = false python-versions = ">=3.8" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, @@ -743,7 +734,7 @@ files = [ {file = "charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85"}, {file = "charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3"}, ] -markers = {main = "(python_version <= \"3.11\" or python_version >= \"3.12\") and (extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"vertexai\" or extra == \"voyageai\" or extra == \"ranx\") and (extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"vertexai\" or extra == \"voyageai\" or python_version >= \"3.10\")", dev = "python_version <= \"3.11\" or python_version >= \"3.12\"", docs = "python_version <= \"3.11\" or python_version >= \"3.12\""} +markers = {main = "(extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"vertexai\" or extra == \"voyageai\" or extra == \"ranx\") and (extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"vertexai\" or extra == \"voyageai\") or (extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"vertexai\" or extra == \"voyageai\" or extra == \"ranx\") and python_version >= \"3.10\""} [[package]] name = "click" @@ -751,8 +742,7 @@ version = "8.1.8" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" -groups = ["dev", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +groups = ["main", "dev", "docs"] files = [ {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, @@ -768,7 +758,7 @@ description = "" optional = true python-versions = "<4.0,>=3.9" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"cohere\"" +markers = "extra == \"cohere\"" files = [ {file = "cohere-5.13.12-py3-none-any.whl", hash = "sha256:2a043591a3e5280b47716a6b311e4c7f58e799364113a9cb81b50cd4f6c95f7e"}, {file = "cohere-5.13.12.tar.gz", hash = "sha256:97bb9ac107e580780b941acbabd3aa5e71960e6835398292c46aaa8a0a4cab88"}, @@ -796,7 +786,7 @@ files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] -markers = {main = "(python_version <= \"3.11\" or python_version >= \"3.12\") and (extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"openai\" or extra == \"ranx\") and platform_system == \"Windows\" and (extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"openai\" or python_version >= \"3.10\")", dev = "(platform_system == \"Windows\" or sys_platform == \"win32\") and (python_version <= \"3.11\" or python_version >= \"3.12\")", docs = "(platform_system == \"Windows\" or sys_platform == \"win32\") and (python_version <= \"3.11\" or python_version >= \"3.12\")"} +markers = {main = "platform_system == \"Windows\"", dev = "platform_system == \"Windows\" or sys_platform == \"win32\"", docs = "platform_system == \"Windows\" or sys_platform == \"win32\""} [[package]] name = "coloredlogs" @@ -805,7 +795,6 @@ description = "Colored terminal output for Python's logging module" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "coloredlogs-15.0.1-py2.py3-none-any.whl", hash = "sha256:612ee75c546f53e92e70049c9dbfcc18c935a2b9a53b66085ce9ef6a6e5c0934"}, {file = "coloredlogs-15.0.1.tar.gz", hash = "sha256:7c991aa71a4577af2f82600d8f8f3a89f936baeaf9b50a9c197da014e5bf16b0"}, @@ -824,7 +813,6 @@ description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus- optional = false python-versions = ">=3.8" groups = ["dev", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "comm-0.2.2-py3-none-any.whl", hash = "sha256:e6fb86cb70ff661ee8c9c14e7d36d6de3b4066f1441be4063df9c5009f0a64d3"}, {file = "comm-0.2.2.tar.gz", hash = "sha256:3fd7a84065306e07bea1773df6eb8282de51ba82f77c72f9c85716ab11fe980e"}, @@ -843,7 +831,7 @@ description = "Python library for calculating contours of 2D quadrilateral grids optional = true python-versions = ">=3.10" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "contourpy-1.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a045f341a77b77e1c5de31e74e966537bba9f3c4099b35bf4c2e3939dd54cdab"}, {file = "contourpy-1.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:500360b77259914f7805af7462e41f9cb7ca92ad38e9f94d6c8641b089338124"}, @@ -918,7 +906,6 @@ description = "Code coverage measurement for Python" optional = false python-versions = ">=3.9" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "coverage-7.6.12-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:704c8c8c6ce6569286ae9622e534b4f5b9759b6f2cd643f1c1a61f666d534fe8"}, {file = "coverage-7.6.12-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ad7525bf0241e5502168ae9c643a2f6c219fa0a283001cee4cf23a9b7da75879"}, @@ -995,7 +982,7 @@ description = "Thin Python bindings to de/compression algorithms in Rust" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "cramjam-2.9.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:8e82464d1e00fbbb12958999b8471ba5e9f3d9711954505a0a7b378762332e6f"}, {file = "cramjam-2.9.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d2df8a6511cc08ef1fccd2e0c65e2ebc9f57574ec8376052a76851af5398810"}, @@ -1099,7 +1086,6 @@ description = "cryptography is a package which provides cryptographic recipes an optional = false python-versions = "!=3.9.0,!=3.9.1,>=3.7" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "cryptography-44.0.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bf688f615c29bfe9dfc44312ca470989279f0e94bb9f631f85e3459af8efc009"}, {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd7c7e2d71d908dc0f8d2027e1604102140d84b155e658c20e8ad1304317691f"}, @@ -1154,7 +1140,7 @@ description = "Composable style cycles" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "cycler-0.12.1-py3-none-any.whl", hash = "sha256:85cef7cff222d8644161529808465972e51340599459b8ac3ccbac5a854e0d30"}, {file = "cycler-0.12.1.tar.gz", hash = "sha256:88bb128f02ba341da8ef447245a9e138fae777f6a23943da4540077d3601eb1c"}, @@ -1171,7 +1157,6 @@ description = "An implementation of the Debug Adapter Protocol for Python" optional = false python-versions = ">=3.8" groups = ["dev", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "debugpy-1.8.12-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:a2ba7ffe58efeae5b8fad1165357edfe01464f9aef25e814e891ec690e7dd82a"}, {file = "debugpy-1.8.12-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cbbd4149c4fc5e7d508ece083e78c17442ee13b0e69bfa6bd63003e486770f45"}, @@ -1208,7 +1193,6 @@ description = "Decorators for Humans" optional = false python-versions = ">=3.8" groups = ["dev", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "decorator-5.2.1-py3-none-any.whl", hash = "sha256:d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a"}, {file = "decorator-5.2.1.tar.gz", hash = "sha256:65f266143752f734b0a7cc83c46f4618af75b8c5911b00ccb61d0ac9b6da0360"}, @@ -1221,7 +1205,6 @@ description = "XML bomb protection for Python stdlib modules" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" groups = ["docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, @@ -1234,7 +1217,6 @@ description = "serialize all of Python" optional = false python-versions = ">=3.8" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "dill-0.3.9-py3-none-any.whl", hash = "sha256:468dff3b89520b474c0397703366b7b95eebe6303f108adf9b19da1f702be87a"}, {file = "dill-0.3.9.tar.gz", hash = "sha256:81aa267dddf68cbfe8029c42ca9ec6a4ab3b22371d1c450abc54422577b4512c"}, @@ -1251,7 +1233,6 @@ description = "Distribution utilities" optional = false python-versions = "*" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87"}, {file = "distlib-0.3.9.tar.gz", hash = "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403"}, @@ -1264,7 +1245,7 @@ description = "Distro - an OS platform information API" optional = true python-versions = ">=3.6" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"openai\"" +markers = "extra == \"openai\"" files = [ {file = "distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2"}, {file = "distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed"}, @@ -1277,7 +1258,6 @@ description = "A Python library for the Docker Engine API." optional = false python-versions = ">=3.8" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "docker-7.1.0-py3-none-any.whl", hash = "sha256:c96b93b7f0a746f9e77d325bcfb87422a3d8bd4f03136ae8a85b37f1898d5fc0"}, {file = "docker-7.1.0.tar.gz", hash = "sha256:ad8c70e6e3f8926cb8a92619b832b4ea5299e2831c14284663184e200546fa6c"}, @@ -1301,7 +1281,7 @@ description = "Parse Python docstrings in reST, Google and Numpydoc format" optional = true python-versions = ">=3.6,<4.0" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vertexai\"" +markers = "extra == \"vertexai\"" files = [ {file = "docstring_parser-0.16-py3-none-any.whl", hash = "sha256:bf0a1387354d3691d102edef7ec124f219ef639982d096e26e3b60aeffa90637"}, {file = "docstring_parser-0.16.tar.gz", hash = "sha256:538beabd0af1e2db0146b6bd3caa526c35a34d61af9fd2887f3a8a27a739aa6e"}, @@ -1314,7 +1294,6 @@ description = "Docutils -- Python Documentation Utilities" optional = false python-versions = ">=3.9" groups = ["docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "docutils-0.21.2-py3-none-any.whl", hash = "sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2"}, {file = "docutils-0.21.2.tar.gz", hash = "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f"}, @@ -1327,7 +1306,7 @@ description = "Like `typing._eval_type`, but lets older Python versions use newe optional = true python-versions = ">=3.8" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"mistralai\"" +markers = "extra == \"mistralai\"" files = [ {file = "eval_type_backport-0.2.2-py3-none-any.whl", hash = "sha256:cb6ad7c393517f476f96d456d0412ea80f0a8cf96f6892834cd9340149111b0a"}, {file = "eval_type_backport-0.2.2.tar.gz", hash = "sha256:f0576b4cf01ebb5bd358d02314d31846af5e07678387486e2c798af0e7d849c1"}, @@ -1347,7 +1326,7 @@ files = [ {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, ] -markers = {main = "(extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\") and python_version < \"3.11\"", dev = "python_version < \"3.11\"", docs = "python_version < \"3.11\""} +markers = {main = "(extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\") and python_version <= \"3.10\"", dev = "python_version <= \"3.10\"", docs = "python_version <= \"3.10\""} [package.extras] test = ["pytest (>=6)"] @@ -1359,7 +1338,6 @@ description = "execnet: rapid multi-Python deployment" optional = false python-versions = ">=3.8" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "execnet-2.1.1-py3-none-any.whl", hash = "sha256:26dee51f1b80cebd6d0ca8e74dd8745419761d3bef34163928cbebbdc4749fdc"}, {file = "execnet-2.1.1.tar.gz", hash = "sha256:5189b52c6121c24feae288166ab41b32549c7e2348652736540b9e6e7d4e72e3"}, @@ -1375,7 +1353,6 @@ description = "Get the currently executing AST node of a frame, and other inform optional = false python-versions = ">=3.8" groups = ["dev", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "executing-2.2.0-py2.py3-none-any.whl", hash = "sha256:11387150cad388d62750327a53d3339fad4888b39a6fe233c3afbb54ecffd3aa"}, {file = "executing-2.2.0.tar.gz", hash = "sha256:5d108c028108fe2551d1a7b2e8b713341e2cb4fc0aa7dcf966fa4327a5226755"}, @@ -1391,7 +1368,7 @@ description = "Fast read/write of AVRO files" optional = true python-versions = ">=3.9" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"cohere\"" +markers = "extra == \"cohere\"" files = [ {file = "fastavro-1.10.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1a9fe0672d2caf0fe54e3be659b13de3cad25a267f2073d6f4b9f8862acc31eb"}, {file = "fastavro-1.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:86dd0410770e0c99363788f0584523709d85e57bb457372ec5c285a482c17fe6"}, @@ -1439,7 +1416,6 @@ description = "Fastest Python implementation of JSON schema" optional = false python-versions = "*" groups = ["dev", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "fastjsonschema-2.21.1-py3-none-any.whl", hash = "sha256:c9e5b7e908310918cf494a434eeb31384dd84a98b57a30bcb1f535015b554667"}, {file = "fastjsonschema-2.21.1.tar.gz", hash = "sha256:794d4f0a58f848961ba16af7b9c85a3e88cd360df008c59aac6fc5ae9323b5d4"}, @@ -1455,7 +1431,7 @@ description = "Python support for Parquet file format" optional = true python-versions = ">=3.9" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "fastparquet-2024.11.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:60ccf587410f0979105e17036df61bb60e1c2b81880dc91895cdb4ee65b71e7f"}, {file = "fastparquet-2024.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a5ad5fc14b0567e700bea3cd528a0bd45a6f9371370b49de8889fb3d10a6574a"}, @@ -1521,7 +1497,7 @@ files = [ {file = "filelock-3.17.0-py3-none-any.whl", hash = "sha256:533dc2f7ba78dc2f0f531fc6c4940addf7b70a481e269a5a3b93be94ffbe8338"}, {file = "filelock-3.17.0.tar.gz", hash = "sha256:ee4e77401ef576ebb38cd7f13b9b28893194acc20a8e68e18730ba9c0e54660e"}, ] -markers = {main = "(extra == \"sentence-transformers\" or extra == \"cohere\") and (python_version <= \"3.11\" or python_version >= \"3.12\")", dev = "python_version <= \"3.11\" or python_version >= \"3.12\""} +markers = {main = "extra == \"sentence-transformers\" or extra == \"cohere\""} [package.extras] docs = ["furo (>=2024.8.6)", "sphinx (>=8.1.3)", "sphinx-autodoc-typehints (>=3)"] @@ -1535,7 +1511,7 @@ description = "Tools to manipulate font files" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "fonttools-4.56.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:331954d002dbf5e704c7f3756028e21db07097c19722569983ba4d74df014000"}, {file = "fonttools-4.56.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8d1613abd5af2f93c05867b3a3759a56e8bf97eb79b1da76b2bc10892f96ff16"}, @@ -1610,7 +1586,7 @@ description = "A list-like structure which implements collections.abc.MutableSeq optional = true python-versions = ">=3.8" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"voyageai\"" +markers = "extra == \"voyageai\"" files = [ {file = "frozenlist-1.5.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5b6a66c18b5b9dd261ca98dffcb826a525334b2f29e7caa54e182255c5f6a65a"}, {file = "frozenlist-1.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d1b3eb7b05ea246510b43a7e53ed1653e55c2121019a97e60cad7efb881a97bb"}, @@ -1713,7 +1689,7 @@ description = "File-system specification" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and (extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"ranx\") and (extra == \"sentence-transformers\" or extra == \"cohere\" or python_version >= \"3.10\")" +markers = "(extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"ranx\") and (extra == \"sentence-transformers\" or extra == \"cohere\") or (extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"ranx\") and python_version >= \"3.10\"" files = [ {file = "fsspec-2025.2.0-py3-none-any.whl", hash = "sha256:9de2ad9ce1f85e1931858535bc882543171d197001a0a5eb2ddc04f1781ab95b"}, {file = "fsspec-2025.2.0.tar.gz", hash = "sha256:1c24b16eaa0a1798afa0337aa0db9b256718ab2a89c425371f5628d22c3b6afd"}, @@ -1754,7 +1730,7 @@ description = "Google API client core library" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vertexai\"" +markers = "extra == \"vertexai\"" files = [ {file = "google_api_core-2.24.1-py3-none-any.whl", hash = "sha256:bc78d608f5a5bf853b80bd70a795f703294de656c096c0968320830a4bc280f1"}, {file = "google_api_core-2.24.1.tar.gz", hash = "sha256:f8b36f5456ab0dd99a1b693a40a31d1e7757beea380ad1b38faaf8941eae9d8a"}, @@ -1791,7 +1767,7 @@ description = "Google Authentication Library" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vertexai\"" +markers = "extra == \"vertexai\"" files = [ {file = "google_auth-2.38.0-py2.py3-none-any.whl", hash = "sha256:e7dae6694313f434a2727bf2906f27ad259bae090d7aa896590d86feec3d9d4a"}, {file = "google_auth-2.38.0.tar.gz", hash = "sha256:8285113607d3b80a3f1543b75962447ba8a09fe85783432a784fdeef6ac094c4"}, @@ -1817,7 +1793,7 @@ description = "Vertex AI API client library" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vertexai\"" +markers = "extra == \"vertexai\"" files = [ {file = "google_cloud_aiplatform-1.82.0-py2.py3-none-any.whl", hash = "sha256:13368a961b2bfa8f46ccd10371bb19bd5f946d8f29c411726061ed1a140ce890"}, {file = "google_cloud_aiplatform-1.82.0.tar.gz", hash = "sha256:b7ea7379249cc1821aa46300a16e4b15aa64aa22665e2536b2bcb7e473d7438e"}, @@ -1870,7 +1846,7 @@ description = "Google BigQuery API client library" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vertexai\"" +markers = "extra == \"vertexai\"" files = [ {file = "google_cloud_bigquery-3.30.0-py2.py3-none-any.whl", hash = "sha256:f4d28d846a727f20569c9b2d2f4fa703242daadcb2ec4240905aa485ba461877"}, {file = "google_cloud_bigquery-3.30.0.tar.gz", hash = "sha256:7e27fbafc8ed33cc200fe05af12ecd74d279fe3da6692585a3cef7aee90575b6"}, @@ -1903,7 +1879,7 @@ description = "Google Cloud API client core library" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vertexai\"" +markers = "extra == \"vertexai\"" files = [ {file = "google_cloud_core-2.4.2-py2.py3-none-any.whl", hash = "sha256:7459c3e83de7cb8b9ecfec9babc910efb4314030c56dd798eaad12c426f7d180"}, {file = "google_cloud_core-2.4.2.tar.gz", hash = "sha256:a4fcb0e2fcfd4bfe963837fad6d10943754fd79c1a50097d68540b6eb3d67f35"}, @@ -1923,7 +1899,7 @@ description = "Google Cloud Resource Manager API client library" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vertexai\"" +markers = "extra == \"vertexai\"" files = [ {file = "google_cloud_resource_manager-1.14.1-py2.py3-none-any.whl", hash = "sha256:68340599f85ebf07a6e18487e460ea07cc15e132068f6b188786d01c2cf25518"}, {file = "google_cloud_resource_manager-1.14.1.tar.gz", hash = "sha256:41e9e546aaa03d5160cdfa2341dbe81ef7596706c300a89b94c429f1f3411f87"}, @@ -1946,7 +1922,7 @@ description = "Google Cloud Storage API client library" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vertexai\"" +markers = "extra == \"vertexai\"" files = [ {file = "google_cloud_storage-2.19.0-py2.py3-none-any.whl", hash = "sha256:aeb971b5c29cf8ab98445082cbfe7b161a1f48ed275822f59ed3f1524ea54fba"}, {file = "google_cloud_storage-2.19.0.tar.gz", hash = "sha256:cd05e9e7191ba6cb68934d8eb76054d9be4562aa89dbc4236feee4d7d51342b2"}, @@ -1971,7 +1947,7 @@ description = "A python wrapper of the C library 'Google CRC32C'" optional = true python-versions = ">=3.9" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vertexai\"" +markers = "extra == \"vertexai\"" files = [ {file = "google_crc32c-1.6.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:5bcc90b34df28a4b38653c36bb5ada35671ad105c99cfe915fb5bed7ad6924aa"}, {file = "google_crc32c-1.6.0-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:d9e9913f7bd69e093b81da4535ce27af842e7bf371cde42d1ae9e9bd382dc0e9"}, @@ -2012,7 +1988,7 @@ description = "Utilities for Google Media Downloads and Resumable Uploads" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vertexai\"" +markers = "extra == \"vertexai\"" files = [ {file = "google_resumable_media-2.7.2-py2.py3-none-any.whl", hash = "sha256:3ce7551e9fe6d99e9a126101d2536612bb73486721951e9562fee0f90c6ababa"}, {file = "google_resumable_media-2.7.2.tar.gz", hash = "sha256:5280aed4629f2b60b847b0d42f9857fd4935c11af266744df33d8074cae92fe0"}, @@ -2032,7 +2008,7 @@ description = "Common protobufs used in Google APIs" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vertexai\"" +markers = "extra == \"vertexai\"" files = [ {file = "googleapis_common_protos-1.68.0-py2.py3-none-any.whl", hash = "sha256:aaf179b2f81df26dfadac95def3b16a95064c76a5f45f07e4c68a21bb371c4ac"}, {file = "googleapis_common_protos-1.68.0.tar.gz", hash = "sha256:95d38161f4f9af0d9423eed8fb7b64ffd2568c3464eb542ff02c5bfa1953ab3c"}, @@ -2052,7 +2028,7 @@ description = "Lightweight in-process concurrent programming" optional = false python-versions = ">=3.7" groups = ["docs"] -markers = "(platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" +markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\"" files = [ {file = "greenlet-3.1.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563"}, {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83"}, @@ -2140,7 +2116,7 @@ description = "IAM API client library" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vertexai\"" +markers = "extra == \"vertexai\"" files = [ {file = "grpc_google_iam_v1-0.14.0-py2.py3-none-any.whl", hash = "sha256:fb4a084b30099ba3ab07d61d620a0d4429570b13ff53bd37bac75235f98b7da4"}, {file = "grpc_google_iam_v1-0.14.0.tar.gz", hash = "sha256:c66e07aa642e39bb37950f9e7f491f70dad150ac9801263b42b2814307c2df99"}, @@ -2158,7 +2134,7 @@ description = "HTTP/2-based RPC framework" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vertexai\"" +markers = "extra == \"vertexai\"" files = [ {file = "grpcio-1.70.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:95469d1977429f45fe7df441f586521361e235982a0b39e33841549143ae2851"}, {file = "grpcio-1.70.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:ed9718f17fbdb472e33b869c77a16d0b55e166b100ec57b016dc7de9c8d236bf"}, @@ -2227,7 +2203,7 @@ description = "Status proto mapping for gRPC" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "extra == \"vertexai\" and (python_version <= \"3.11\" or python_version >= \"3.12\")" +markers = "extra == \"vertexai\"" files = [ {file = "grpcio_status-1.70.0-py3-none-any.whl", hash = "sha256:fc5a2ae2b9b1c1969cc49f3262676e6854aa2398ec69cb5bd6c47cd501904a85"}, {file = "grpcio_status-1.70.0.tar.gz", hash = "sha256:0e7b42816512433b18b9d764285ff029bde059e9d41f8fe10a60631bd8348101"}, @@ -2245,7 +2221,7 @@ description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "(extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" +markers = "extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\"" files = [ {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, @@ -2258,7 +2234,7 @@ description = "A minimal low-level HTTP client." optional = true python-versions = ">=3.8" groups = ["main"] -markers = "(extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" +markers = "extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\"" files = [ {file = "httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd"}, {file = "httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c"}, @@ -2281,7 +2257,7 @@ description = "The next generation HTTP client." optional = true python-versions = ">=3.8" groups = ["main"] -markers = "(extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" +markers = "extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\"" files = [ {file = "httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad"}, {file = "httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc"}, @@ -2307,7 +2283,7 @@ description = "Consume Server-Sent Event (SSE) messages with HTTPX." optional = true python-versions = ">=3.8" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"cohere\"" +markers = "extra == \"cohere\"" files = [ {file = "httpx-sse-0.4.0.tar.gz", hash = "sha256:1e81a3a3070ce322add1d3529ed42eb5f70817f45ed6ec915ab753f961139721"}, {file = "httpx_sse-0.4.0-py3-none-any.whl", hash = "sha256:f329af6eae57eaa2bdfd962b42524764af68075ea87370a2de920af5341e318f"}, @@ -2320,7 +2296,7 @@ description = "Client library to download and publish models, datasets and other optional = true python-versions = ">=3.8.0" groups = ["main"] -markers = "(extra == \"sentence-transformers\" or extra == \"cohere\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" +markers = "extra == \"sentence-transformers\" or extra == \"cohere\"" files = [ {file = "huggingface_hub-0.29.1-py3-none-any.whl", hash = "sha256:352f69caf16566c7b6de84b54a822f6238e17ddd8ae3da4f8f2272aea5b198d5"}, {file = "huggingface_hub-0.29.1.tar.gz", hash = "sha256:9524eae42077b8ff4fc459ceb7a514eca1c1232b775276b009709fe2a084f250"}, @@ -2356,7 +2332,6 @@ description = "Human friendly output for text interfaces using Python" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "humanfriendly-10.0-py2.py3-none-any.whl", hash = "sha256:1697e1a8a8f550fd43c2865cd84542fc175a61dcb779b6fee18cf6b6ccba1477"}, {file = "humanfriendly-10.0.tar.gz", hash = "sha256:6b0b831ce8f15f7300721aa49829fc4e83921a9a301cc7f606be6686a2288ddc"}, @@ -2372,7 +2347,6 @@ description = "File identification library for Python" optional = false python-versions = ">=3.9" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "identify-2.6.8-py2.py3-none-any.whl", hash = "sha256:83657f0f766a3c8d0eaea16d4ef42494b39b34629a4b3192a9d020d349b3e255"}, {file = "identify-2.6.8.tar.gz", hash = "sha256:61491417ea2c0c5c670484fd8abbb34de34cdae1e5f39a73ee65e48e4bb663fc"}, @@ -2392,7 +2366,7 @@ files = [ {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, ] -markers = {main = "(python_version <= \"3.11\" or python_version >= \"3.12\") and (extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\" or extra == \"sentence-transformers\" or extra == \"vertexai\" or extra == \"voyageai\" or extra == \"ranx\") and (extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\" or extra == \"sentence-transformers\" or extra == \"vertexai\" or extra == \"voyageai\" or python_version >= \"3.10\")", dev = "python_version <= \"3.11\" or python_version >= \"3.12\"", docs = "python_version <= \"3.11\" or python_version >= \"3.12\""} +markers = {main = "(extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\" or extra == \"sentence-transformers\" or extra == \"vertexai\" or extra == \"voyageai\" or extra == \"ranx\") and (extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\" or extra == \"sentence-transformers\" or extra == \"vertexai\" or extra == \"voyageai\") or (extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\" or extra == \"sentence-transformers\" or extra == \"vertexai\" or extra == \"voyageai\" or extra == \"ranx\") and python_version >= \"3.10\""} [package.extras] all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] @@ -2404,7 +2378,7 @@ description = "Iterative JSON parser with standard Python iterator interfaces" optional = true python-versions = "*" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "ijson-3.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7f7a5250599c366369fbf3bc4e176f5daa28eb6bc7d6130d02462ed335361675"}, {file = "ijson-3.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f87a7e52f79059f9c58f6886c262061065eb6f7554a587be7ed3aa63e6b71b34"}, @@ -2509,7 +2483,6 @@ description = "Getting image size from png/jpeg/jpeg2000/gif file" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" groups = ["docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, @@ -2526,7 +2499,7 @@ files = [ {file = "importlib_metadata-8.6.1-py3-none-any.whl", hash = "sha256:02a89390c1e15fdfdc0d7c6b25cb3e62650d0494005c97d6f148bf5b9787525e"}, {file = "importlib_metadata-8.6.1.tar.gz", hash = "sha256:310b41d755445d74569f993ccfc22838295d9fe005425094fad953d7f15c8580"}, ] -markers = {dev = "python_version < \"3.10\"", docs = "python_version <= \"3.11\" or python_version >= \"3.12\""} +markers = {dev = "python_version < \"3.10\""} [package.dependencies] zipp = ">=3.20" @@ -2547,7 +2520,6 @@ description = "brain-dead simple config-ini parsing" optional = false python-versions = ">=3.7" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, @@ -2560,7 +2532,7 @@ description = "inscriptis - HTML to text converter." optional = true python-versions = "<4.0,>=3.9" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "inscriptis-2.5.3-py3-none-any.whl", hash = "sha256:25962cf5a60b1a8f33e7bfbbea08a29af82299702339b9b90c538653a5c7aa38"}, {file = "inscriptis-2.5.3.tar.gz", hash = "sha256:256043caa13e4995c71fafdeadec4ac42b57f3914cb41023ecbee8bc27ca1cc0"}, @@ -2580,7 +2552,6 @@ description = "IPython Kernel for Jupyter" optional = false python-versions = ">=3.8" groups = ["dev", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "ipykernel-6.29.5-py3-none-any.whl", hash = "sha256:afdb66ba5aa354b09b91379bac28ae4afebbb30e8b39510c9690afb7a10421b5"}, {file = "ipykernel-6.29.5.tar.gz", hash = "sha256:f093a22c4a40f8828f8e330a9c297cb93dcab13bd9678ded6de8e5cf81c56215"}, @@ -2615,7 +2586,6 @@ description = "IPython: Productive Interactive Computing" optional = false python-versions = ">=3.9" groups = ["dev", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "ipython-8.18.1-py3-none-any.whl", hash = "sha256:e8267419d72d81955ec1177f8a29aaa90ac80ad647499201119e2f05e99aa397"}, {file = "ipython-8.18.1.tar.gz", hash = "sha256:ca6f079bb33457c66e233e4580ebfc4128855b4cf6370dddd73842a9563e8a27"}, @@ -2654,7 +2624,7 @@ description = "provides a common interface to many IR ad-hoc ranking benchmarks, optional = true python-versions = ">=3.8" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "ir_datasets-0.5.9-py3-none-any.whl", hash = "sha256:07c9bed07f31031f1da1bc02afc7a1077b1179a3af402d061f83bf6fb833b90a"}, {file = "ir_datasets-0.5.9.tar.gz", hash = "sha256:35c90980fbd0f4ea8fe22a1ab16d2bb6be3dc373cbd6dfab1d905f176a70e5ac"}, @@ -2683,7 +2653,6 @@ description = "A Python utility / library to sort Python imports." optional = false python-versions = ">=3.8.0" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, @@ -2699,7 +2668,6 @@ description = "An autocompletion tool for Python that can be used for text edito optional = false python-versions = ">=3.6" groups = ["dev", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "jedi-0.19.2-py2.py3-none-any.whl", hash = "sha256:a8ef22bde8490f57fe5c7681a3c83cb58874daf72b4784de3cce5b6ef6edb5b9"}, {file = "jedi-0.19.2.tar.gz", hash = "sha256:4770dc3de41bde3966b02eb84fbcf557fb33cce26ad23da12c742fb50ecb11f0"}, @@ -2720,7 +2688,6 @@ description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" groups = ["main", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "jinja2-3.1.5-py3-none-any.whl", hash = "sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb"}, {file = "jinja2-3.1.5.tar.gz", hash = "sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb"}, @@ -2739,7 +2706,7 @@ description = "Fast iterable JSON parser." optional = true python-versions = ">=3.8" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"openai\"" +markers = "extra == \"openai\"" files = [ {file = "jiter-0.8.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:ca8577f6a413abe29b079bc30f907894d7eb07a865c4df69475e868d73e71c7b"}, {file = "jiter-0.8.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b25bd626bde7fb51534190c7e3cb97cee89ee76b76d7585580e22f34f5e3f393"}, @@ -2826,7 +2793,7 @@ description = "JSON Matching Expressions" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"bedrock\"" +markers = "extra == \"bedrock\"" files = [ {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, @@ -2839,7 +2806,6 @@ description = "Lightweight pipelining with Python functions" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"sentence-transformers\"" files = [ {file = "joblib-1.4.2-py3-none-any.whl", hash = "sha256:06d478d5674cbc267e7496a410ee875abd68e4340feff4490bcb7afb88060ae6"}, {file = "joblib-1.4.2.tar.gz", hash = "sha256:2382c5816b2636fbd20a09e0f4e9dad4736765fdfb7dca582943b9c1366b3f0e"}, @@ -2852,7 +2818,6 @@ description = "A final implementation of JSONPath for Python that aims to be sta optional = false python-versions = "*" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "jsonpath-ng-1.7.0.tar.gz", hash = "sha256:f6f5f7fd4e5ff79c785f1573b394043b39849fb2bb47bcead935d12b00beab3c"}, {file = "jsonpath_ng-1.7.0-py2-none-any.whl", hash = "sha256:898c93fc173f0c336784a3fa63d7434297544b7198124a68f9a3ef9597b0ae6e"}, @@ -2869,7 +2834,7 @@ description = "A more powerful JSONPath implementation in modern python" optional = true python-versions = ">=3.6" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"mistralai\"" +markers = "extra == \"mistralai\"" files = [ {file = "jsonpath-python-1.0.6.tar.gz", hash = "sha256:dd5be4a72d8a2995c3f583cf82bf3cd1a9544cfdabf2d22595b67aff07349666"}, {file = "jsonpath_python-1.0.6-py3-none-any.whl", hash = "sha256:1e3b78df579f5efc23565293612decee04214609208a2335884b3ee3f786b575"}, @@ -2882,7 +2847,6 @@ description = "An implementation of JSON Schema validation for Python" optional = false python-versions = ">=3.8" groups = ["dev", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "jsonschema-4.23.0-py3-none-any.whl", hash = "sha256:fbadb6f8b144a8f8cf9f0b89ba94501d143e50411a1278633f56a7acf7fd5566"}, {file = "jsonschema-4.23.0.tar.gz", hash = "sha256:d71497fef26351a33265337fa77ffeb82423f3ea21283cd9467bb03999266bc4"}, @@ -2905,7 +2869,6 @@ description = "The JSON Schema meta-schemas and vocabularies, exposed as a Regis optional = false python-versions = ">=3.9" groups = ["dev", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "jsonschema_specifications-2024.10.1-py3-none-any.whl", hash = "sha256:a09a0680616357d9a0ecf05c12ad234479f549239d0f5b55f3deea67475da9bf"}, {file = "jsonschema_specifications-2024.10.1.tar.gz", hash = "sha256:0f38b83639958ce1152d02a7f062902c41c8fd20d558b0c34344292d417ae272"}, @@ -2921,7 +2884,6 @@ description = "A defined interface for working with a cache of jupyter notebooks optional = false python-versions = ">=3.9" groups = ["docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "jupyter_cache-1.0.1-py3-none-any.whl", hash = "sha256:9c3cafd825ba7da8b5830485343091143dff903e4d8c69db9349b728b140abf6"}, {file = "jupyter_cache-1.0.1.tar.gz", hash = "sha256:16e808eb19e3fb67a223db906e131ea6e01f03aa27f49a7214ce6a5fec186fb9"}, @@ -2950,7 +2912,6 @@ description = "Jupyter protocol implementation and client libraries" optional = false python-versions = ">=3.8" groups = ["dev", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "jupyter_client-8.6.3-py3-none-any.whl", hash = "sha256:e8a19cc986cc45905ac3362915f410f3af85424b4c0905e94fa5f2cb08e8f23f"}, {file = "jupyter_client-8.6.3.tar.gz", hash = "sha256:35b3a0947c4a6e9d589eb97d7d4cd5e90f910ee73101611f01283732bd6d9419"}, @@ -2975,7 +2936,6 @@ description = "Jupyter core package. A base package on which Jupyter projects re optional = false python-versions = ">=3.8" groups = ["dev", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "jupyter_core-5.7.2-py3-none-any.whl", hash = "sha256:4f7315d2f6b4bcf2e3e7cb6e46772eba760ae459cd1f59d29eb57b0a01bd7409"}, {file = "jupyter_core-5.7.2.tar.gz", hash = "sha256:aa5f8d32bbf6b431ac830496da7392035d6f61b4f54872f15c4bd2a9c3f536d9"}, @@ -2997,7 +2957,6 @@ description = "Pygments theme using JupyterLab CSS variables" optional = false python-versions = ">=3.8" groups = ["docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "jupyterlab_pygments-0.3.0-py3-none-any.whl", hash = "sha256:841a89020971da1d8693f1a99997aefc5dc424bb1b251fd6322462a1b8842780"}, {file = "jupyterlab_pygments-0.3.0.tar.gz", hash = "sha256:721aca4d9029252b11cfa9d185e5b5af4d54772bb8072f9b7036f4170054d35d"}, @@ -3010,7 +2969,7 @@ description = "A fast implementation of the Cassowary constraint solver" optional = true python-versions = ">=3.10" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "kiwisolver-1.4.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:88c6f252f6816a73b1f8c904f7bbe02fd67c09a69f7cb8a0eecdbf5ce78e63db"}, {file = "kiwisolver-1.4.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c72941acb7b67138f35b879bbe85be0f6c6a70cab78fe3ef6db9c024d9223e5b"}, @@ -3101,7 +3060,7 @@ description = "lightweight wrapper around basic LLVM functionality" optional = true python-versions = ">=3.10" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "llvmlite-0.44.0-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:9fbadbfba8422123bab5535b293da1cf72f9f478a65645ecd73e781f962ca614"}, {file = "llvmlite-0.44.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cccf8eb28f24840f2689fb1a45f9c0f7e582dd24e088dcf96e424834af11f791"}, @@ -3133,7 +3092,7 @@ description = "Powerful and Pythonic XML processing library combining libxml2/li optional = true python-versions = ">=3.6" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "lxml-5.3.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a4058f16cee694577f7e4dd410263cd0ef75644b43802a689c2b3c2a7e69453b"}, {file = "lxml-5.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:364de8f57d6eda0c16dcfb999af902da31396949efa0e583e12675d09709881b"}, @@ -3289,7 +3248,7 @@ description = "LZ4 Bindings for Python" optional = true python-versions = ">=3.9" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "lz4-4.4.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1ebf23ffd36b32b980f720a81990fcfdeadacafe7498fbeff7a8e058259d4e58"}, {file = "lz4-4.4.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8fe3caea61427057a9e3697c69b2403510fdccfca4483520d02b98ffae74531e"}, @@ -3340,7 +3299,7 @@ files = [ {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, ] -markers = {main = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"", docs = "python_version <= \"3.11\" or python_version >= \"3.12\""} +markers = {main = "python_version >= \"3.10\" and extra == \"ranx\""} [package.dependencies] mdurl = ">=0.1,<1.0" @@ -3362,7 +3321,6 @@ description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.9" groups = ["main", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, @@ -3434,7 +3392,7 @@ description = "Python plotting package" optional = true python-versions = ">=3.10" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "matplotlib-3.10.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:ff2ae14910be903f4a24afdbb6d7d3a6c44da210fc7d42790b87aeac92238a16"}, {file = "matplotlib-3.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0721a3fd3d5756ed593220a8b86808a36c5031fce489adb5b31ee6dbb47dd5b2"}, @@ -3493,7 +3451,6 @@ description = "Inline Matplotlib backend for Jupyter" optional = false python-versions = ">=3.8" groups = ["dev", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "matplotlib_inline-0.1.7-py3-none-any.whl", hash = "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca"}, {file = "matplotlib_inline-0.1.7.tar.gz", hash = "sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90"}, @@ -3509,7 +3466,6 @@ description = "McCabe checker, plugin for flake8" optional = false python-versions = ">=3.6" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, @@ -3522,7 +3478,6 @@ description = "Collection of plugins for markdown-it-py" optional = false python-versions = ">=3.8" groups = ["docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "mdit_py_plugins-0.4.2-py3-none-any.whl", hash = "sha256:0c673c3f889399a33b95e88d2f0d111b4447bdfea7f237dab2d488f459835636"}, {file = "mdit_py_plugins-0.4.2.tar.gz", hash = "sha256:5f2cd1fdb606ddf152d37ec30e46101a60512bc0e5fa1a7002c36647b09e26b5"}, @@ -3547,7 +3502,7 @@ files = [ {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, ] -markers = {main = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"", docs = "python_version <= \"3.11\" or python_version >= \"3.12\""} +markers = {main = "python_version >= \"3.10\" and extra == \"ranx\""} [[package]] name = "mistralai" @@ -3556,7 +3511,7 @@ description = "Python Client SDK for the Mistral AI API." optional = true python-versions = ">=3.8" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"mistralai\"" +markers = "extra == \"mistralai\"" files = [ {file = "mistralai-1.5.0-py3-none-any.whl", hash = "sha256:9372537719f87bd6f9feef4747d0bf1f4fbe971f8c02945ca4b4bf3c94571c97"}, {file = "mistralai-1.5.0.tar.gz", hash = "sha256:fd94bc93bc25aad9c6dd8005b1a0bc4ba1250c6b3fbf855a49936989cc6e5c0d"}, @@ -3580,7 +3535,6 @@ description = "A sane and fast Markdown parser with useful plugins and renderers optional = false python-versions = ">=3.8" groups = ["docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "mistune-3.1.2-py3-none-any.whl", hash = "sha256:4b47731332315cdca99e0ded46fc0004001c1299ff773dfb48fbe1fd226de319"}, {file = "mistune-3.1.2.tar.gz", hash = "sha256:733bf018ba007e8b5f2d3a9eb624034f6ee26c4ea769a98ec533ee111d504dff"}, @@ -3596,7 +3550,6 @@ description = "" optional = false python-versions = ">=3.9" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "ml_dtypes-0.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1fe8b5b5e70cd67211db94b05cfd58dace592f24489b038dc6f9fe347d2e07d5"}, {file = "ml_dtypes-0.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c09a6d11d8475c2a9fd2bc0695628aec105f97cab3b3a3fb7c9660348ff7d24"}, @@ -3619,9 +3572,9 @@ files = [ [package.dependencies] numpy = [ - {version = ">1.20", markers = "python_version < \"3.10\""}, - {version = ">=1.21.2", markers = "python_version >= \"3.10\" and python_version < \"3.11\""}, - {version = ">=1.23.3", markers = "python_version >= \"3.11\" and python_version < \"3.12\""}, + {version = ">1.20"}, + {version = ">=1.21.2", markers = "python_version >= \"3.10\""}, + {version = ">=1.23.3", markers = "python_version >= \"3.11\""}, {version = ">=1.26.0", markers = "python_version >= \"3.12\""}, ] @@ -3635,7 +3588,7 @@ description = "Python library for arbitrary-precision floating-point arithmetic" optional = true python-versions = "*" groups = ["main"] -markers = "extra == \"sentence-transformers\" and (python_version <= \"3.11\" or python_version >= \"3.12\")" +markers = "extra == \"sentence-transformers\"" files = [ {file = "mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c"}, {file = "mpmath-1.3.0.tar.gz", hash = "sha256:7a28eb2a9774d00c7bc92411c19a89209d5da7c4c9a9e227be8330a23a25b91f"}, @@ -3654,7 +3607,7 @@ description = "multidict implementation" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"voyageai\"" +markers = "extra == \"voyageai\"" files = [ {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3380252550e372e8511d49481bd836264c009adb826b23fefcc5dd3c69692f60"}, {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:99f826cbf970077383d7de805c0681799491cb939c25450b9b5b3ced03ca99f1"}, @@ -3760,7 +3713,6 @@ description = "Optional static typing for Python" optional = false python-versions = ">=3.8" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "mypy-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f8a67616990062232ee4c3952f41c779afac41405806042a8126fe96e098419f"}, {file = "mypy-1.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d357423fa57a489e8c47b7c85dfb96698caba13d66e086b412298a1a0ea3b0ed"}, @@ -3813,7 +3765,7 @@ files = [ {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] -markers = {main = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"mistralai\"", dev = "python_version <= \"3.11\" or python_version >= \"3.12\""} +markers = {main = "extra == \"mistralai\""} [[package]] name = "myst-nb" @@ -3822,7 +3774,6 @@ description = "A Jupyter Notebook Sphinx reader built on top of the MyST markdow optional = false python-versions = ">=3.9" groups = ["docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "myst_nb-1.2.0-py3-none-any.whl", hash = "sha256:0e09909877848c0cf45e1aecee97481512efa29a0c4caa37870a03bba11c56c1"}, {file = "myst_nb-1.2.0.tar.gz", hash = "sha256:af459ec753b341952182b45b0a80b4776cebf80c9ee6aaca2a3f4027b440c9de"}, @@ -3852,7 +3803,6 @@ description = "An extended [CommonMark](https://spec.commonmark.org/) compliant optional = false python-versions = ">=3.8" groups = ["docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "myst_parser-3.0.1-py3-none-any.whl", hash = "sha256:6457aaa33a5d474aca678b8ead9b3dc298e89c68e67012e73146ea6fd54babf1"}, {file = "myst_parser-3.0.1.tar.gz", hash = "sha256:88f0cb406cb363b077d176b51c476f62d60604d68a8dcdf4832e080441301a87"}, @@ -3880,7 +3830,6 @@ description = "A client library for executing notebooks. Formerly nbconvert's Ex optional = false python-versions = ">=3.9.0" groups = ["docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "nbclient-0.10.2-py3-none-any.whl", hash = "sha256:4ffee11e788b4a27fabeb7955547e4318a5298f34342a4bfd01f2e1faaeadc3d"}, {file = "nbclient-0.10.2.tar.gz", hash = "sha256:90b7fc6b810630db87a6d0c2250b1f0ab4cf4d3c27a299b0cde78a4ed3fd9193"}, @@ -3904,7 +3853,6 @@ description = "Converting Jupyter Notebooks (.ipynb files) to other formats. Ou optional = false python-versions = ">=3.8" groups = ["docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "nbconvert-7.16.6-py3-none-any.whl", hash = "sha256:1375a7b67e0c2883678c48e506dc320febb57685e5ee67faa51b18a90f3a712b"}, {file = "nbconvert-7.16.6.tar.gz", hash = "sha256:576a7e37c6480da7b8465eefa66c17844243816ce1ccc372633c6b71c3c0f582"}, @@ -3943,7 +3891,6 @@ description = "The Jupyter Notebook format" optional = false python-versions = ">=3.8" groups = ["dev", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "nbformat-5.10.4-py3-none-any.whl", hash = "sha256:3b48d6c8fbca4b299bf3982ea7db1af21580e4fec269ad087b9e81588891200b"}, {file = "nbformat-5.10.4.tar.gz", hash = "sha256:322168b14f937a5d11362988ecac2a4952d3d8e3a2cbeb2319584631226d5b3a"}, @@ -3966,7 +3913,6 @@ description = "Jupyter Notebook Tools for Sphinx" optional = false python-versions = ">=3.6" groups = ["docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "nbsphinx-0.9.6-py3-none-any.whl", hash = "sha256:336b0b557945a7678ec7449b16449f854bc852a435bb53b8a72e6b5dc740d992"}, {file = "nbsphinx-0.9.6.tar.gz", hash = "sha256:c2b28a2d702f1159a95b843831798e86e60a17fc647b9bff9ba1585355de54e3"}, @@ -3987,7 +3933,6 @@ description = "A py.test plugin to validate Jupyter notebooks" optional = false python-versions = ">=3.7, <4" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "nbval-0.11.0-py2.py3-none-any.whl", hash = "sha256:307aecc866c9a1e8a13bb5bbb008a702bacfda2394dff6fe504a3108a58042a0"}, {file = "nbval-0.11.0.tar.gz", hash = "sha256:77c95797607b0a968babd2597ee3494102d25c3ad37435debbdac0e46e379094"}, @@ -4007,7 +3952,6 @@ description = "Patch asyncio to allow nested event loops" optional = false python-versions = ">=3.5" groups = ["dev", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c"}, {file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"}, @@ -4020,7 +3964,7 @@ description = "Python package for creating and manipulating graphs and networks" optional = true python-versions = ">=3.9" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"sentence-transformers\"" +markers = "extra == \"sentence-transformers\"" files = [ {file = "networkx-3.2.1-py3-none-any.whl", hash = "sha256:f18c69adc97877c42332c170849c96cefa91881c99a7cb3e95b7c659ebdc1ec2"}, {file = "networkx-3.2.1.tar.gz", hash = "sha256:9f1bb5cf3409bf324e0a722c20bdb4c20ee39bf1c30ce8ae499c8502b0b5e0c6"}, @@ -4066,7 +4010,6 @@ description = "Node.js virtual environment builder" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"}, {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, @@ -4079,7 +4022,7 @@ description = "compiling Python code using LLVM" optional = true python-versions = ">=3.10" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "numba-0.61.0-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:9cab9783a700fa428b1a54d65295122bc03b3de1d01fb819a6b9dbbddfdb8c43"}, {file = "numba-0.61.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:46c5ae094fb3706f5adf9021bfb7fc11e44818d61afee695cdee4eadfed45e98"}, @@ -4115,7 +4058,7 @@ description = "Fundamental package for array computing in Python" optional = false python-versions = ">=3.9" groups = ["main"] -markers = "python_version <= \"3.11\"" +markers = "python_version < \"3.12\"" files = [ {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, @@ -4228,7 +4171,7 @@ description = "CUBLAS native runtime libraries" optional = true python-versions = ">=3" groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\" and (python_version <= \"3.11\" or python_version >= \"3.12\")" +markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\"" files = [ {file = "nvidia_cublas_cu12-12.4.5.8-py3-none-manylinux2014_aarch64.whl", hash = "sha256:0f8aa1706812e00b9f19dfe0cdb3999b092ccb8ca168c0db5b8ea712456fd9b3"}, {file = "nvidia_cublas_cu12-12.4.5.8-py3-none-manylinux2014_x86_64.whl", hash = "sha256:2fc8da60df463fdefa81e323eef2e36489e1c94335b5358bcb38360adf75ac9b"}, @@ -4242,7 +4185,7 @@ description = "CUDA profiling tools runtime libs." optional = true python-versions = ">=3" groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\" and (python_version <= \"3.11\" or python_version >= \"3.12\")" +markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\"" files = [ {file = "nvidia_cuda_cupti_cu12-12.4.127-py3-none-manylinux2014_aarch64.whl", hash = "sha256:79279b35cf6f91da114182a5ce1864997fd52294a87a16179ce275773799458a"}, {file = "nvidia_cuda_cupti_cu12-12.4.127-py3-none-manylinux2014_x86_64.whl", hash = "sha256:9dec60f5ac126f7bb551c055072b69d85392b13311fcc1bcda2202d172df30fb"}, @@ -4256,7 +4199,7 @@ description = "NVRTC native runtime libraries" optional = true python-versions = ">=3" groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\" and (python_version <= \"3.11\" or python_version >= \"3.12\")" +markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\"" files = [ {file = "nvidia_cuda_nvrtc_cu12-12.4.127-py3-none-manylinux2014_aarch64.whl", hash = "sha256:0eedf14185e04b76aa05b1fea04133e59f465b6f960c0cbf4e37c3cb6b0ea198"}, {file = "nvidia_cuda_nvrtc_cu12-12.4.127-py3-none-manylinux2014_x86_64.whl", hash = "sha256:a178759ebb095827bd30ef56598ec182b85547f1508941a3d560eb7ea1fbf338"}, @@ -4270,7 +4213,7 @@ description = "CUDA Runtime native Libraries" optional = true python-versions = ">=3" groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\" and (python_version <= \"3.11\" or python_version >= \"3.12\")" +markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\"" files = [ {file = "nvidia_cuda_runtime_cu12-12.4.127-py3-none-manylinux2014_aarch64.whl", hash = "sha256:961fe0e2e716a2a1d967aab7caee97512f71767f852f67432d572e36cb3a11f3"}, {file = "nvidia_cuda_runtime_cu12-12.4.127-py3-none-manylinux2014_x86_64.whl", hash = "sha256:64403288fa2136ee8e467cdc9c9427e0434110899d07c779f25b5c068934faa5"}, @@ -4284,7 +4227,7 @@ description = "cuDNN runtime libraries" optional = true python-versions = ">=3" groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\" and (python_version <= \"3.11\" or python_version >= \"3.12\")" +markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\"" files = [ {file = "nvidia_cudnn_cu12-9.1.0.70-py3-none-manylinux2014_x86_64.whl", hash = "sha256:165764f44ef8c61fcdfdfdbe769d687e06374059fbb388b6c89ecb0e28793a6f"}, {file = "nvidia_cudnn_cu12-9.1.0.70-py3-none-win_amd64.whl", hash = "sha256:6278562929433d68365a07a4a1546c237ba2849852c0d4b2262a486e805b977a"}, @@ -4300,7 +4243,7 @@ description = "CUFFT native runtime libraries" optional = true python-versions = ">=3" groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\" and (python_version <= \"3.11\" or python_version >= \"3.12\")" +markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\"" files = [ {file = "nvidia_cufft_cu12-11.2.1.3-py3-none-manylinux2014_aarch64.whl", hash = "sha256:5dad8008fc7f92f5ddfa2101430917ce2ffacd86824914c82e28990ad7f00399"}, {file = "nvidia_cufft_cu12-11.2.1.3-py3-none-manylinux2014_x86_64.whl", hash = "sha256:f083fc24912aa410be21fa16d157fed2055dab1cc4b6934a0e03cba69eb242b9"}, @@ -4317,7 +4260,7 @@ description = "CURAND native runtime libraries" optional = true python-versions = ">=3" groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\" and (python_version <= \"3.11\" or python_version >= \"3.12\")" +markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\"" files = [ {file = "nvidia_curand_cu12-10.3.5.147-py3-none-manylinux2014_aarch64.whl", hash = "sha256:1f173f09e3e3c76ab084aba0de819c49e56614feae5c12f69883f4ae9bb5fad9"}, {file = "nvidia_curand_cu12-10.3.5.147-py3-none-manylinux2014_x86_64.whl", hash = "sha256:a88f583d4e0bb643c49743469964103aa59f7f708d862c3ddb0fc07f851e3b8b"}, @@ -4331,7 +4274,7 @@ description = "CUDA solver native runtime libraries" optional = true python-versions = ">=3" groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\" and (python_version <= \"3.11\" or python_version >= \"3.12\")" +markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\"" files = [ {file = "nvidia_cusolver_cu12-11.6.1.9-py3-none-manylinux2014_aarch64.whl", hash = "sha256:d338f155f174f90724bbde3758b7ac375a70ce8e706d70b018dd3375545fc84e"}, {file = "nvidia_cusolver_cu12-11.6.1.9-py3-none-manylinux2014_x86_64.whl", hash = "sha256:19e33fa442bcfd085b3086c4ebf7e8debc07cfe01e11513cc6d332fd918ac260"}, @@ -4350,7 +4293,7 @@ description = "CUSPARSE native runtime libraries" optional = true python-versions = ">=3" groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\" and (python_version <= \"3.11\" or python_version >= \"3.12\")" +markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\"" files = [ {file = "nvidia_cusparse_cu12-12.3.1.170-py3-none-manylinux2014_aarch64.whl", hash = "sha256:9d32f62896231ebe0480efd8a7f702e143c98cfaa0e8a76df3386c1ba2b54df3"}, {file = "nvidia_cusparse_cu12-12.3.1.170-py3-none-manylinux2014_x86_64.whl", hash = "sha256:ea4f11a2904e2a8dc4b1833cc1b5181cde564edd0d5cd33e3c168eff2d1863f1"}, @@ -4367,7 +4310,7 @@ description = "NVIDIA cuSPARSELt" optional = true python-versions = "*" groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\" and (python_version <= \"3.11\" or python_version >= \"3.12\")" +markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\"" files = [ {file = "nvidia_cusparselt_cu12-0.6.2-py3-none-manylinux2014_aarch64.whl", hash = "sha256:067a7f6d03ea0d4841c85f0c6f1991c5dda98211f6302cb83a4ab234ee95bef8"}, {file = "nvidia_cusparselt_cu12-0.6.2-py3-none-manylinux2014_x86_64.whl", hash = "sha256:df2c24502fd76ebafe7457dbc4716b2fec071aabaed4fb7691a201cde03704d9"}, @@ -4381,7 +4324,7 @@ description = "NVIDIA Collective Communication Library (NCCL) Runtime" optional = true python-versions = ">=3" groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\" and (python_version <= \"3.11\" or python_version >= \"3.12\")" +markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\"" files = [ {file = "nvidia_nccl_cu12-2.21.5-py3-none-manylinux2014_x86_64.whl", hash = "sha256:8579076d30a8c24988834445f8d633c697d42397e92ffc3f63fa26766d25e0a0"}, ] @@ -4393,7 +4336,7 @@ description = "Nvidia JIT LTO Library" optional = true python-versions = ">=3" groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\" and (python_version <= \"3.11\" or python_version >= \"3.12\")" +markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\"" files = [ {file = "nvidia_nvjitlink_cu12-12.4.127-py3-none-manylinux2014_aarch64.whl", hash = "sha256:4abe7fef64914ccfa909bc2ba39739670ecc9e820c83ccc7a6ed414122599b83"}, {file = "nvidia_nvjitlink_cu12-12.4.127-py3-none-manylinux2014_x86_64.whl", hash = "sha256:06b3b9b25bf3f8af351d664978ca26a16d2c5127dbd53c0497e28d1fb9611d57"}, @@ -4407,7 +4350,7 @@ description = "NVIDIA Tools Extension" optional = true python-versions = ">=3" groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\" and (python_version <= \"3.11\" or python_version >= \"3.12\")" +markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\"" files = [ {file = "nvidia_nvtx_cu12-12.4.127-py3-none-manylinux2014_aarch64.whl", hash = "sha256:7959ad635db13edf4fc65c06a6e9f9e55fc2f92596db928d169c0bb031e88ef3"}, {file = "nvidia_nvtx_cu12-12.4.127-py3-none-manylinux2014_x86_64.whl", hash = "sha256:781e950d9b9f60d8241ccea575b32f5105a5baf4c2351cab5256a24869f12a1a"}, @@ -4421,7 +4364,7 @@ description = "The official Python library for the openai API" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"openai\"" +markers = "extra == \"openai\"" files = [ {file = "openai-1.65.1-py3-none-any.whl", hash = "sha256:396652a6452dd42791b3ad8a3aab09b1feb7c1c4550a672586fb300760a8e204"}, {file = "openai-1.65.1.tar.gz", hash = "sha256:9d9370a20d2b8c3ce319fd2194c2eef5eab59effbcc5b04ff480977edc530fba"}, @@ -4448,7 +4391,7 @@ description = "Fast, correct Python JSON library supporting dataclasses, datetim optional = true python-versions = ">=3.8" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "orjson-3.10.15-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:552c883d03ad185f720d0c09583ebde257e41b9521b74ff40e08b7dec4559c04"}, {file = "orjson-3.10.15-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:616e3e8d438d02e4854f70bfdc03a6bcdb697358dbaa6bcd19cbe24d24ece1f8"}, @@ -4542,7 +4485,7 @@ files = [ {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, ] -markers = {main = "(python_version <= \"3.11\" or python_version >= \"3.12\") and (extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"vertexai\" or extra == \"ranx\") and (extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"vertexai\" or python_version >= \"3.10\")", dev = "python_version <= \"3.11\" or python_version >= \"3.12\"", docs = "python_version <= \"3.11\" or python_version >= \"3.12\""} +markers = {main = "(extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"vertexai\" or extra == \"ranx\") and (extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"vertexai\") or (extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"vertexai\" or extra == \"ranx\") and python_version >= \"3.10\""} [[package]] name = "pandas" @@ -4551,7 +4494,7 @@ description = "Powerful data structures for data analysis, time series, and stat optional = true python-versions = ">=3.9" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "pandas-2.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1948ddde24197a0f7add2bdc4ca83bf2b1ef84a1bc8ccffd95eda17fd836ecb5"}, {file = "pandas-2.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:381175499d3802cde0eabbaf6324cce0c4f5d52ca6f8c377c29ad442f50f6348"}, @@ -4639,7 +4582,6 @@ description = "Utilities for writing pandoc filters in python" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" groups = ["docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pandocfilters-1.5.1-py2.py3-none-any.whl", hash = "sha256:93be382804a9cdb0a7267585f157e5d1731bbe5545a85b268d6f5fe6232de2bc"}, {file = "pandocfilters-1.5.1.tar.gz", hash = "sha256:002b4a555ee4ebc03f8b66307e287fa492e4a77b4ea14d3f934328297bb4939e"}, @@ -4652,7 +4594,6 @@ description = "A Python Parser" optional = false python-versions = ">=3.6" groups = ["dev", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "parso-0.8.4-py2.py3-none-any.whl", hash = "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18"}, {file = "parso-0.8.4.tar.gz", hash = "sha256:eb3a7b58240fb99099a345571deecc0f9540ea5f4dd2fe14c2a99d6b281ab92d"}, @@ -4669,7 +4610,6 @@ description = "Utility library for gitignore style pattern matching of file path optional = false python-versions = ">=3.8" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, @@ -4682,7 +4622,7 @@ description = "Pexpect allows easy control of interactive console applications." optional = false python-versions = "*" groups = ["dev", "docs"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and sys_platform != \"win32\"" +markers = "sys_platform != \"win32\"" files = [ {file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"}, {file = "pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f"}, @@ -4698,7 +4638,7 @@ description = "Python Imaging Library (Fork)" optional = true python-versions = ">=3.9" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and (extra == \"sentence-transformers\" or extra == \"ranx\") and (extra == \"sentence-transformers\" or python_version >= \"3.10\")" +markers = "(python_version >= \"3.10\" or extra == \"sentence-transformers\") and (python_version == \"3.10\" or python_version == \"3.11\" or extra == \"sentence-transformers\" or extra == \"ranx\") and (extra == \"ranx\" or extra == \"sentence-transformers\")" files = [ {file = "pillow-11.1.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:e1abe69aca89514737465752b4bcaf8016de61b3be1397a8fc260ba33321b3a8"}, {file = "pillow-11.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c640e5a06869c75994624551f45e5506e4256562ead981cce820d5ab39ae2192"}, @@ -4788,7 +4728,6 @@ description = "A small Python package for determining appropriate platform-speci optional = false python-versions = ">=3.8" groups = ["dev", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, @@ -4806,7 +4745,6 @@ description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, @@ -4823,7 +4761,6 @@ description = "Python Lex & Yacc" optional = false python-versions = "*" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "ply-3.11-py2.py3-none-any.whl", hash = "sha256:096f9b8350b65ebd2fd1346b12452efe5b9607f7482813ffca50c22722a807ce"}, {file = "ply-3.11.tar.gz", hash = "sha256:00c7c1aaa88358b9c765b6d3000c6eec0ba42abca5351b095321aef446081da3"}, @@ -4836,7 +4773,6 @@ description = "A framework for managing and maintaining multi-language pre-commi optional = false python-versions = ">=3.9" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pre_commit-4.1.0-py2.py3-none-any.whl", hash = "sha256:d29e7cb346295bcc1cc75fc3e92e343495e3ea0196c9ec6ba53f49f10ab6ae7b"}, {file = "pre_commit-4.1.0.tar.gz", hash = "sha256:ae3f018575a588e30dfddfab9a05448bfbd6b73d78709617b5a2b853549716d4"}, @@ -4856,7 +4792,6 @@ description = "Library for building powerful interactive command lines in Python optional = false python-versions = ">=3.8.0" groups = ["dev", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "prompt_toolkit-3.0.50-py3-none-any.whl", hash = "sha256:9b6427eb19e479d98acff65196a307c555eb567989e6d88ebbb1b509d9779198"}, {file = "prompt_toolkit-3.0.50.tar.gz", hash = "sha256:544748f3860a2623ca5cd6d2795e7a14f3d0e1c3c9728359013f79877fc89bab"}, @@ -4872,7 +4807,7 @@ description = "Accelerated property cache" optional = true python-versions = ">=3.9" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"voyageai\"" +markers = "extra == \"voyageai\"" files = [ {file = "propcache-0.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:efa44f64c37cc30c9f05932c740a8b40ce359f51882c70883cc95feac842da4d"}, {file = "propcache-0.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2383a17385d9800b6eb5855c2f05ee550f803878f344f58b6e194de08b96352c"}, @@ -4981,7 +4916,7 @@ description = "Beautiful, Pythonic protocol buffers" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vertexai\"" +markers = "extra == \"vertexai\"" files = [ {file = "proto_plus-1.26.0-py3-none-any.whl", hash = "sha256:bf2dfaa3da281fc3187d12d224c707cb57214fb2c22ba854eb0c105a3fb2d4d7"}, {file = "proto_plus-1.26.0.tar.gz", hash = "sha256:6e93d5f5ca267b54300880fff156b6a3386b3fa3f43b1da62e680fc0c586ef22"}, @@ -5000,7 +4935,7 @@ description = "" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vertexai\"" +markers = "extra == \"vertexai\"" files = [ {file = "protobuf-5.29.3-cp310-abi3-win32.whl", hash = "sha256:3ea51771449e1035f26069c4c7fd51fba990d07bc55ba80701c78f886bf9c888"}, {file = "protobuf-5.29.3-cp310-abi3-win_amd64.whl", hash = "sha256:a4fa6f80816a9a0678429e84973f2f98cbc218cca434abe8db2ad0bffc98503a"}, @@ -5022,7 +4957,6 @@ description = "Cross-platform lib for process and system monitoring in Python. optional = false python-versions = ">=3.6" groups = ["dev", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "psutil-7.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:101d71dc322e3cffd7cea0650b09b3d08b8e7c4109dd6809fe452dfd00e58b25"}, {file = "psutil-7.0.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:39db632f6bb862eeccf56660871433e111b6ea58f2caea825571951d4b6aa3da"}, @@ -5047,7 +4981,7 @@ description = "Run a subprocess in a pseudo terminal" optional = false python-versions = "*" groups = ["dev", "docs"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and sys_platform != \"win32\"" +markers = "sys_platform != \"win32\"" files = [ {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, @@ -5060,7 +4994,6 @@ description = "Safely evaluate AST nodes without side effects" optional = false python-versions = "*" groups = ["dev", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0"}, {file = "pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42"}, @@ -5076,7 +5009,7 @@ description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs optional = true python-versions = ">=3.8" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vertexai\"" +markers = "extra == \"vertexai\"" files = [ {file = "pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629"}, {file = "pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034"}, @@ -5089,7 +5022,7 @@ description = "A collection of ASN.1-based protocols modules" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vertexai\"" +markers = "extra == \"vertexai\"" files = [ {file = "pyasn1_modules-0.4.1-py3-none-any.whl", hash = "sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd"}, {file = "pyasn1_modules-0.4.1.tar.gz", hash = "sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c"}, @@ -5109,7 +5042,7 @@ files = [ {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, ] -markers = {dev = "(implementation_name == \"pypy\" or platform_python_implementation != \"PyPy\") and (python_version <= \"3.11\" or python_version >= \"3.12\")", docs = "(python_version <= \"3.11\" or python_version >= \"3.12\") and implementation_name == \"pypy\""} +markers = {dev = "implementation_name == \"pypy\" or platform_python_implementation != \"PyPy\"", docs = "implementation_name == \"pypy\""} [[package]] name = "pydantic" @@ -5118,7 +5051,6 @@ description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pydantic-2.10.6-py3-none-any.whl", hash = "sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584"}, {file = "pydantic-2.10.6.tar.gz", hash = "sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236"}, @@ -5140,7 +5072,6 @@ description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa"}, {file = "pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c"}, @@ -5254,7 +5185,6 @@ description = "Bootstrap-based Sphinx theme from the PyData community" optional = false python-versions = ">=3.9" groups = ["docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pydata_sphinx_theme-0.15.4-py3-none-any.whl", hash = "sha256:2136ad0e9500d0949f96167e63f3e298620040aea8f9c74621959eda5d4cf8e6"}, {file = "pydata_sphinx_theme-0.15.4.tar.gz", hash = "sha256:7762ec0ac59df3acecf49fd2f889e1b4565dbce8b88b2e29ee06fdd90645a06d"}, @@ -5288,7 +5218,7 @@ files = [ {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"}, {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"}, ] -markers = {main = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"", dev = "python_version <= \"3.11\" or python_version >= \"3.12\"", docs = "python_version <= \"3.11\" or python_version >= \"3.12\""} +markers = {main = "python_version >= \"3.10\" and extra == \"ranx\""} [package.extras] windows-terminal = ["colorama (>=0.4.6)"] @@ -5300,7 +5230,6 @@ description = "python code static checker" optional = false python-versions = ">=3.9.0" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pylint-3.3.4-py3-none-any.whl", hash = "sha256:289e6a1eb27b453b08436478391a48cd53bb0efb824873f949e709350f3de018"}, {file = "pylint-3.3.4.tar.gz", hash = "sha256:74ae7a38b177e69a9b525d0794bd8183820bfa7eb68cc1bee6e8ed22a42be4ce"}, @@ -5332,7 +5261,7 @@ description = "pyparsing module - Classes and methods to define and execute pars optional = true python-versions = ">=3.9" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "pyparsing-3.2.1-py3-none-any.whl", hash = "sha256:506ff4f4386c4cec0590ec19e6302d3aedb992fdc02c761e90416f158dacf8e1"}, {file = "pyparsing-3.2.1.tar.gz", hash = "sha256:61980854fd66de3a90028d679a954d5f2623e83144b5afe5ee86f43d762e5f0a"}, @@ -5348,7 +5277,7 @@ description = "A python implementation of GNU readline." optional = false python-versions = ">=3.8" groups = ["main"] -markers = "sys_platform == \"win32\" and (python_version <= \"3.11\" or python_version >= \"3.12\")" +markers = "sys_platform == \"win32\"" files = [ {file = "pyreadline3-3.5.4-py3-none-any.whl", hash = "sha256:eaf8e6cc3c49bcccf145fc6067ba8643d1df34d604a1ec0eccbf7a18e6d3fae6"}, {file = "pyreadline3-3.5.4.tar.gz", hash = "sha256:8d57d53039a1c75adba8e50dd3d992b28143480816187ea5efbd5c78e6c885b7"}, @@ -5364,7 +5293,6 @@ description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6"}, {file = "pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761"}, @@ -5388,7 +5316,6 @@ description = "Pytest support for asyncio" optional = false python-versions = ">=3.8" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pytest_asyncio-0.23.8-py3-none-any.whl", hash = "sha256:50265d892689a5faefb84df80819d1ecef566eb3549cf915dfb33569359d1ce2"}, {file = "pytest_asyncio-0.23.8.tar.gz", hash = "sha256:759b10b33a6dc61cce40a8bd5205e302978bbbcc00e279a8b61d9a6a3c82e4d3"}, @@ -5408,7 +5335,6 @@ description = "pytest xdist plugin for distributed testing, most importantly acr optional = false python-versions = ">=3.8" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pytest_xdist-3.6.1-py3-none-any.whl", hash = "sha256:9ed4adfb68a016610848639bb7e02c9352d5d9f03d04809919e2dafc3be4cca7"}, {file = "pytest_xdist-3.6.1.tar.gz", hash = "sha256:ead156a4db231eec769737f57668ef58a2084a34b2e55c4a8fa20d861107300d"}, @@ -5435,7 +5361,7 @@ files = [ {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] -markers = {main = "(python_version <= \"3.11\" or python_version >= \"3.12\") and (extra == \"vertexai\" or extra == \"mistralai\" or extra == \"bedrock\" or extra == \"ranx\") and (extra == \"vertexai\" or extra == \"mistralai\" or extra == \"bedrock\" or python_version >= \"3.10\")", dev = "python_version <= \"3.11\" or python_version >= \"3.12\"", docs = "python_version <= \"3.11\" or python_version >= \"3.12\""} +markers = {main = "(extra == \"vertexai\" or extra == \"mistralai\" or extra == \"bedrock\" or extra == \"ranx\") and (extra == \"vertexai\" or extra == \"mistralai\" or extra == \"bedrock\") or (extra == \"vertexai\" or extra == \"mistralai\" or extra == \"bedrock\" or extra == \"ranx\") and python_version >= \"3.10\""} [package.dependencies] six = ">=1.5" @@ -5447,7 +5373,6 @@ description = "Read key-value pairs from a .env file and set them as environment optional = false python-versions = ">=3.8" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, @@ -5463,7 +5388,6 @@ description = "Universally unique lexicographically sortable identifier" optional = false python-versions = ">=3.9" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "python_ulid-3.0.0-py3-none-any.whl", hash = "sha256:e4c4942ff50dbd79167ad01ac725ec58f924b4018025ce22c858bfcff99a5e31"}, {file = "python_ulid-3.0.0.tar.gz", hash = "sha256:e50296a47dc8209d28629a22fc81ca26c00982c78934bd7766377ba37ea49a9f"}, @@ -5479,7 +5403,7 @@ description = "World timezone definitions, modern and historical" optional = true python-versions = "*" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "pytz-2025.1-py2.py3-none-any.whl", hash = "sha256:89dd22dca55b46eac6eda23b2d72721bf1bdfef212645d81513ef5d03038de57"}, {file = "pytz-2025.1.tar.gz", hash = "sha256:c2db42be2a2518b28e65f9207c4d05e6ff547d1efa4086469ef855e4ab70178e"}, @@ -5512,7 +5436,7 @@ files = [ {file = "pywin32-308-cp39-cp39-win32.whl", hash = "sha256:7873ca4dc60ab3287919881a7d4f88baee4a6e639aa6962de25a98ba6b193341"}, {file = "pywin32-308-cp39-cp39-win_amd64.whl", hash = "sha256:71b3322d949b4cc20776436a9c9ba0eeedcbc9c650daa536df63f0ff111bb920"}, ] -markers = {dev = "(python_version <= \"3.11\" or python_version >= \"3.12\") and sys_platform == \"win32\"", docs = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\" and (python_version <= \"3.11\" or python_version >= \"3.12\")"} +markers = {dev = "sys_platform == \"win32\"", docs = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\""} [[package]] name = "pyyaml" @@ -5521,7 +5445,6 @@ description = "YAML parser and emitter for Python" optional = false python-versions = ">=3.8" groups = ["main", "dev", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, @@ -5585,7 +5508,6 @@ description = "Python bindings for 0MQ" optional = false python-versions = ">=3.7" groups = ["dev", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pyzmq-26.2.1-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:f39d1227e8256d19899d953e6e19ed2ccb689102e6d85e024da5acf410f301eb"}, {file = "pyzmq-26.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a23948554c692df95daed595fdd3b76b420a4939d7a8a28d6d7dea9711878641"}, @@ -5708,7 +5630,7 @@ description = "ranx: A Blazing-Fast Python Library for Ranking Evaluation, Compa optional = true python-versions = ">=3.8" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "ranx-0.3.20-py3-none-any.whl", hash = "sha256:e056e4d5981b0328b045868cc7064fc57a545f36009fbe9bb602295ec33335de"}, {file = "ranx-0.3.20.tar.gz", hash = "sha256:8afc6f2042c40645e5d1fd80c35ed75a885e18bd2db7e95cc7ec32a0b41e59ea"}, @@ -5736,7 +5658,6 @@ description = "Python client for Redis database and key-value store" optional = false python-versions = ">=3.8" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "redis-5.2.1-py3-none-any.whl", hash = "sha256:ee7e1056b9aea0f04c6c2ed59452947f34c4940ee025f5dd83e6a6418b6989e4"}, {file = "redis-5.2.1.tar.gz", hash = "sha256:16f2e22dff21d5125e8481515e386711a34cbec50f0e44413dd7d9c060a54e0f"}, @@ -5756,7 +5677,6 @@ description = "JSON Referencing + Python" optional = false python-versions = ">=3.9" groups = ["dev", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "referencing-0.36.2-py3-none-any.whl", hash = "sha256:e8699adbbf8b5c7de96d8ffa0eb5c158b3beafce084968e2ea8bb08c6794dcd0"}, {file = "referencing-0.36.2.tar.gz", hash = "sha256:df2e89862cd09deabbdba16944cc3f10feb6b3e6f18e902f7cc25609a34775aa"}, @@ -5774,7 +5694,6 @@ description = "Alternative regular expression module, to replace re." optional = true python-versions = ">=3.8" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"sentence-transformers\"" files = [ {file = "regex-2024.11.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ff590880083d60acc0433f9c3f713c51f7ac6ebb9adf889c79a261ecf541aa91"}, {file = "regex-2024.11.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:658f90550f38270639e83ce492f27d2c8d2cd63805c65a13a14d36ca126753f0"}, @@ -5883,7 +5802,7 @@ files = [ {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, ] -markers = {main = "(python_version <= \"3.11\" or python_version >= \"3.12\") and (extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"vertexai\" or extra == \"voyageai\" or extra == \"ranx\") and (extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"vertexai\" or extra == \"voyageai\" or python_version >= \"3.10\")", dev = "python_version <= \"3.11\" or python_version >= \"3.12\"", docs = "python_version <= \"3.11\" or python_version >= \"3.12\""} +markers = {main = "(extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"vertexai\" or extra == \"voyageai\" or extra == \"ranx\") and (extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"vertexai\" or extra == \"voyageai\") or (extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"vertexai\" or extra == \"voyageai\" or extra == \"ranx\") and python_version >= \"3.10\""} [package.dependencies] certifi = ">=2017.4.17" @@ -5902,7 +5821,7 @@ description = "Render rich text, tables, progress bars, syntax highlighting, mar optional = true python-versions = ">=3.8.0" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90"}, {file = "rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098"}, @@ -5923,7 +5842,6 @@ description = "Python bindings to Rust's persistent data structures (rpds)" optional = false python-versions = ">=3.9" groups = ["dev", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "rpds_py-0.23.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2a54027554ce9b129fc3d633c92fa33b30de9f08bc61b32c053dc9b537266fed"}, {file = "rpds_py-0.23.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b5ef909a37e9738d146519657a1aab4584018746a18f71c692f2f22168ece40c"}, @@ -6037,7 +5955,7 @@ description = "Pure-Python RSA implementation" optional = true python-versions = ">=3.6,<4" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vertexai\"" +markers = "extra == \"vertexai\"" files = [ {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, @@ -6053,7 +5971,7 @@ description = "An Amazon S3 Transfer Manager" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"bedrock\"" +markers = "extra == \"bedrock\"" files = [ {file = "s3transfer-0.11.3-py3-none-any.whl", hash = "sha256:ca855bdeb885174b5ffa95b9913622459d4ad8e331fc98eb01e6d5eb6a30655d"}, {file = "s3transfer-0.11.3.tar.gz", hash = "sha256:edae4977e3a122445660c7c114bba949f9d191bae3b34a096f18a1c8c354527a"}, @@ -6072,7 +5990,7 @@ description = "" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"sentence-transformers\"" +markers = "extra == \"sentence-transformers\"" files = [ {file = "safetensors-0.5.3-cp38-abi3-macosx_10_12_x86_64.whl", hash = "sha256:bd20eb133db8ed15b40110b7c00c6df51655a2998132193de2f75f72d99c7073"}, {file = "safetensors-0.5.3-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:21d01c14ff6c415c485616b8b0bf961c46b3b343ca59110d38d744e577f9cce7"}, @@ -6111,7 +6029,7 @@ description = "A set of python modules for machine learning and data mining" optional = true python-versions = ">=3.9" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"sentence-transformers\"" +markers = "extra == \"sentence-transformers\"" files = [ {file = "scikit_learn-1.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d056391530ccd1e501056160e3c9673b4da4805eb67eb2bdf4e983e1f9c9204e"}, {file = "scikit_learn-1.6.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:0c8d036eb937dbb568c6242fa598d551d88fb4399c0344d95c001980ec1c7d36"}, @@ -6167,7 +6085,7 @@ description = "Fundamental algorithms for scientific computing in Python" optional = true python-versions = ">=3.9" groups = ["main"] -markers = "extra == \"sentence-transformers\" and python_version < \"3.10\"" +markers = "python_version < \"3.10\" and extra == \"sentence-transformers\"" files = [ {file = "scipy-1.13.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:20335853b85e9a49ff7572ab453794298bcf0354d8068c5f6775a0eabf350aca"}, {file = "scipy-1.13.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:d605e9c23906d1994f55ace80e0125c587f96c020037ea6aa98d01b4bd2e222f"}, @@ -6211,7 +6129,7 @@ description = "Fundamental algorithms for scientific computing in Python" optional = true python-versions = ">=3.10" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and (extra == \"sentence-transformers\" or extra == \"ranx\") and (python_version >= \"3.10\" or extra == \"sentence-transformers\")" +markers = "python_version >= \"3.10\" and (extra == \"ranx\" or extra == \"sentence-transformers\")" files = [ {file = "scipy-1.15.2-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:a2ec871edaa863e8213ea5df811cd600734f6400b4af272e1c011e69401218e9"}, {file = "scipy-1.15.2-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:6f223753c6ea76983af380787611ae1291e3ceb23917393079dcc746ba60cfb5"}, @@ -6276,7 +6194,7 @@ description = "Statistical data visualization" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "seaborn-0.13.2-py3-none-any.whl", hash = "sha256:636f8336facf092165e27924f223d3c62ca560b1f2bb5dff7ab7fad265361987"}, {file = "seaborn-0.13.2.tar.gz", hash = "sha256:93e60a40988f4d65e9f4885df477e2fdaff6b73a9ded434c1ab356dd57eefff7"}, @@ -6299,7 +6217,7 @@ description = "State-of-the-Art Text Embeddings" optional = true python-versions = ">=3.9" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"sentence-transformers\"" +markers = "extra == \"sentence-transformers\"" files = [ {file = "sentence_transformers-3.4.1-py3-none-any.whl", hash = "sha256:e026dc6d56801fd83f74ad29a30263f401b4b522165c19386d8bc10dcca805da"}, {file = "sentence_transformers-3.4.1.tar.gz", hash = "sha256:68daa57504ff548340e54ff117bd86c1d2f784b21e0fb2689cf3272b8937b24b"}, @@ -6328,7 +6246,7 @@ description = "Easily download, build, install, upgrade, and uninstall Python pa optional = true python-versions = ">=3.9" groups = ["main"] -markers = "extra == \"sentence-transformers\" and python_version >= \"3.12\"" +markers = "python_version >= \"3.12\" and extra == \"sentence-transformers\"" files = [ {file = "setuptools-75.8.2-py3-none-any.whl", hash = "sha256:558e47c15f1811c1fa7adbd0096669bf76c1d3f433f58324df69f3f5ecac4e8f"}, {file = "setuptools-75.8.2.tar.gz", hash = "sha256:4880473a969e5f23f2a2be3646b2dfd84af9028716d398e46192f84bc36900d2"}, @@ -6350,7 +6268,7 @@ description = "Manipulation and analysis of geometric objects" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"vertexai\"" +markers = "extra == \"vertexai\"" files = [ {file = "shapely-2.0.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:33fb10e50b16113714ae40adccf7670379e9ccf5b7a41d0002046ba2b8f0f691"}, {file = "shapely-2.0.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f44eda8bd7a4bccb0f281264b34bf3518d8c4c9a8ffe69a1a05dabf6e8461147"}, @@ -6414,7 +6332,7 @@ files = [ {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, ] -markers = {main = "(python_version <= \"3.11\" or python_version >= \"3.12\") and (extra == \"vertexai\" or extra == \"mistralai\" or extra == \"bedrock\" or extra == \"ranx\") and (extra == \"vertexai\" or extra == \"mistralai\" or extra == \"bedrock\" or python_version >= \"3.10\")", dev = "python_version <= \"3.11\" or python_version >= \"3.12\"", docs = "python_version <= \"3.11\" or python_version >= \"3.12\""} +markers = {main = "(extra == \"vertexai\" or extra == \"mistralai\" or extra == \"bedrock\" or extra == \"ranx\") and (extra == \"vertexai\" or extra == \"mistralai\" or extra == \"bedrock\") or (extra == \"vertexai\" or extra == \"mistralai\" or extra == \"bedrock\" or extra == \"ranx\") and python_version >= \"3.10\""} [[package]] name = "sniffio" @@ -6423,7 +6341,7 @@ description = "Sniff out which async library your code is running under" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "(extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" +markers = "extra == \"openai\" or extra == \"cohere\" or extra == \"mistralai\"" files = [ {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, @@ -6436,7 +6354,6 @@ description = "This package provides 29 stemmers for 28 languages generated from optional = false python-versions = "*" groups = ["docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, @@ -6453,7 +6370,7 @@ files = [ {file = "soupsieve-2.6-py3-none-any.whl", hash = "sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9"}, {file = "soupsieve-2.6.tar.gz", hash = "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb"}, ] -markers = {main = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"", docs = "python_version <= \"3.11\" or python_version >= \"3.12\""} +markers = {main = "python_version >= \"3.10\" and extra == \"ranx\""} [[package]] name = "sphinx" @@ -6462,7 +6379,6 @@ description = "Python documentation generator" optional = false python-versions = ">=3.9" groups = ["docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "sphinx-7.4.7-py3-none-any.whl", hash = "sha256:c2419e2135d11f1951cd994d6eb18a1835bd8fdd8429f9ca375dc1f3281bd239"}, {file = "sphinx-7.4.7.tar.gz", hash = "sha256:242f92a7ea7e6c5b406fdc2615413890ba9f699114a9c09192d7dfead2ee9cfe"}, @@ -6500,7 +6416,6 @@ description = "Add a copy button to each of your code cells." optional = false python-versions = ">=3.7" groups = ["docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "sphinx-copybutton-0.5.2.tar.gz", hash = "sha256:4cf17c82fb9646d1bc9ca92ac280813a3b605d8c421225fd9913154103ee1fbd"}, {file = "sphinx_copybutton-0.5.2-py3-none-any.whl", hash = "sha256:fb543fd386d917746c9a2c50360c7905b605726b9355cd26e9974857afeae06e"}, @@ -6520,7 +6435,6 @@ description = "A sphinx extension for designing beautiful, view size responsive optional = false python-versions = ">=3.8" groups = ["docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "sphinx_design-0.5.0-py3-none-any.whl", hash = "sha256:1af1267b4cea2eedd6724614f19dcc88fe2e15aff65d06b2f6252cee9c4f4c1e"}, {file = "sphinx_design-0.5.0.tar.gz", hash = "sha256:e8e513acea6f92d15c6de3b34e954458f245b8e761b45b63950f65373352ab00"}, @@ -6545,7 +6459,6 @@ description = "Sphinx Extension adding support for custom favicons" optional = false python-versions = ">=3.7" groups = ["docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "sphinx-favicon-1.0.1.tar.gz", hash = "sha256:df796de32125609c1b4a8964db74270ebf4502089c27cd53f542354dc0b57e8e"}, {file = "sphinx_favicon-1.0.1-py3-none-any.whl", hash = "sha256:7c93d6b634cb4c9687ceab67a8526f05d3b02679df94e273e51a43282e6b034c"}, @@ -6566,7 +6479,6 @@ description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple optional = false python-versions = ">=3.9" groups = ["docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "sphinxcontrib_applehelp-2.0.0-py3-none-any.whl", hash = "sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5"}, {file = "sphinxcontrib_applehelp-2.0.0.tar.gz", hash = "sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1"}, @@ -6584,7 +6496,6 @@ description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp optional = false python-versions = ">=3.9" groups = ["docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "sphinxcontrib_devhelp-2.0.0-py3-none-any.whl", hash = "sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2"}, {file = "sphinxcontrib_devhelp-2.0.0.tar.gz", hash = "sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad"}, @@ -6602,7 +6513,6 @@ description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML h optional = false python-versions = ">=3.9" groups = ["docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "sphinxcontrib_htmlhelp-2.1.0-py3-none-any.whl", hash = "sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8"}, {file = "sphinxcontrib_htmlhelp-2.1.0.tar.gz", hash = "sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9"}, @@ -6620,7 +6530,6 @@ description = "A sphinx extension which renders display math in HTML via JavaScr optional = false python-versions = ">=3.5" groups = ["docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, @@ -6636,7 +6545,6 @@ description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp d optional = false python-versions = ">=3.9" groups = ["docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "sphinxcontrib_qthelp-2.0.0-py3-none-any.whl", hash = "sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb"}, {file = "sphinxcontrib_qthelp-2.0.0.tar.gz", hash = "sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab"}, @@ -6654,7 +6562,6 @@ description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs optional = false python-versions = ">=3.9" groups = ["docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "sphinxcontrib_serializinghtml-2.0.0-py3-none-any.whl", hash = "sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331"}, {file = "sphinxcontrib_serializinghtml-2.0.0.tar.gz", hash = "sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d"}, @@ -6672,7 +6579,6 @@ description = "Database Abstraction Library" optional = false python-versions = ">=3.7" groups = ["docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "SQLAlchemy-2.0.38-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5e1d9e429028ce04f187a9f522818386c8b076723cdbe9345708384f49ebcec6"}, {file = "SQLAlchemy-2.0.38-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b87a90f14c68c925817423b0424381f0e16d80fc9a1a1046ef202ab25b19a444"}, @@ -6706,16 +6612,27 @@ files = [ {file = "SQLAlchemy-2.0.38-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5dba1cdb8f319084f5b00d41207b2079822aa8d6a4667c0f369fce85e34b0c86"}, {file = "SQLAlchemy-2.0.38-cp313-cp313-win32.whl", hash = "sha256:eae27ad7580529a427cfdd52c87abb2dfb15ce2b7a3e0fc29fbb63e2ed6f8120"}, {file = "SQLAlchemy-2.0.38-cp313-cp313-win_amd64.whl", hash = "sha256:b335a7c958bc945e10c522c069cd6e5804f4ff20f9a744dd38e748eb602cbbda"}, + {file = "SQLAlchemy-2.0.38-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:40310db77a55512a18827488e592965d3dec6a3f1e3d8af3f8243134029daca3"}, {file = "SQLAlchemy-2.0.38-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d3043375dd5bbcb2282894cbb12e6c559654c67b5fffb462fda815a55bf93f7"}, + {file = "SQLAlchemy-2.0.38-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70065dfabf023b155a9c2a18f573e47e6ca709b9e8619b2e04c54d5bcf193178"}, {file = "SQLAlchemy-2.0.38-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:c058b84c3b24812c859300f3b5abf300daa34df20d4d4f42e9652a4d1c48c8a4"}, + {file = "SQLAlchemy-2.0.38-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:0398361acebb42975deb747a824b5188817d32b5c8f8aba767d51ad0cc7bb08d"}, {file = "SQLAlchemy-2.0.38-cp37-cp37m-win32.whl", hash = "sha256:a2bc4e49e8329f3283d99840c136ff2cd1a29e49b5624a46a290f04dff48e079"}, {file = "SQLAlchemy-2.0.38-cp37-cp37m-win_amd64.whl", hash = "sha256:9cd136184dd5f58892f24001cdce986f5d7e96059d004118d5410671579834a4"}, + {file = "SQLAlchemy-2.0.38-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:665255e7aae5f38237b3a6eae49d2358d83a59f39ac21036413fab5d1e810578"}, + {file = "SQLAlchemy-2.0.38-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:92f99f2623ff16bd4aaf786ccde759c1f676d39c7bf2855eb0b540e1ac4530c8"}, {file = "SQLAlchemy-2.0.38-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa498d1392216fae47eaf10c593e06c34476ced9549657fca713d0d1ba5f7248"}, + {file = "SQLAlchemy-2.0.38-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9afbc3909d0274d6ac8ec891e30210563b2c8bdd52ebbda14146354e7a69373"}, {file = "SQLAlchemy-2.0.38-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:57dd41ba32430cbcc812041d4de8d2ca4651aeefad2626921ae2a23deb8cd6ff"}, + {file = "SQLAlchemy-2.0.38-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:3e35d5565b35b66905b79ca4ae85840a8d40d31e0b3e2990f2e7692071b179ca"}, {file = "SQLAlchemy-2.0.38-cp38-cp38-win32.whl", hash = "sha256:f0d3de936b192980209d7b5149e3c98977c3810d401482d05fb6d668d53c1c63"}, {file = "SQLAlchemy-2.0.38-cp38-cp38-win_amd64.whl", hash = "sha256:3868acb639c136d98107c9096303d2d8e5da2880f7706f9f8c06a7f961961149"}, + {file = "SQLAlchemy-2.0.38-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:07258341402a718f166618470cde0c34e4cec85a39767dce4e24f61ba5e667ea"}, + {file = "SQLAlchemy-2.0.38-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0a826f21848632add58bef4f755a33d45105d25656a0c849f2dc2df1c71f6f50"}, {file = "SQLAlchemy-2.0.38-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:386b7d136919bb66ced64d2228b92d66140de5fefb3c7df6bd79069a269a7b06"}, + {file = "SQLAlchemy-2.0.38-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f2951dc4b4f990a4b394d6b382accb33141d4d3bd3ef4e2b27287135d6bdd68"}, {file = "SQLAlchemy-2.0.38-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8bf312ed8ac096d674c6aa9131b249093c1b37c35db6a967daa4c84746bc1bc9"}, + {file = "SQLAlchemy-2.0.38-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:6db316d6e340f862ec059dc12e395d71f39746a20503b124edc255973977b728"}, {file = "SQLAlchemy-2.0.38-cp39-cp39-win32.whl", hash = "sha256:c09a6ea87658695e527104cf857c70f79f14e9484605e205217aae0ec27b45fc"}, {file = "SQLAlchemy-2.0.38-cp39-cp39-win_amd64.whl", hash = "sha256:12f5c9ed53334c3ce719155424dc5407aaa4f6cadeb09c5b627e06abb93933a1"}, {file = "SQLAlchemy-2.0.38-py3-none-any.whl", hash = "sha256:63178c675d4c80def39f1febd625a6333f44c0ba269edd8a468b156394b27753"}, @@ -6758,7 +6675,6 @@ description = "Extract data from python stack frames and tracebacks for informat optional = false python-versions = "*" groups = ["dev", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"}, {file = "stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9"}, @@ -6779,7 +6695,7 @@ description = "Computer algebra system (CAS) in Python" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "extra == \"sentence-transformers\" and (python_version <= \"3.11\" or python_version >= \"3.12\")" +markers = "extra == \"sentence-transformers\"" files = [ {file = "sympy-1.13.1-py3-none-any.whl", hash = "sha256:db36cdc64bf61b9b24578b6f7bab1ecdd2452cf008f34faa33776680c26d66f8"}, {file = "sympy-1.13.1.tar.gz", hash = "sha256:9cebf7e04ff162015ce31c9c6c9144daa34a93bd082f54fd8f12deca4f47515f"}, @@ -6798,7 +6714,6 @@ description = "Pretty-print tabular data" optional = false python-versions = ">=3.7" groups = ["main", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"}, {file = "tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c"}, @@ -6814,7 +6729,6 @@ description = "Retry code until it succeeds" optional = false python-versions = ">=3.8" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "tenacity-9.0.0-py3-none-any.whl", hash = "sha256:93de0c98785b27fcf659856aa9f54bfbd399e29969b0621bc7f762bd441b4539"}, {file = "tenacity-9.0.0.tar.gz", hash = "sha256:807f37ca97d62aa361264d497b0e31e92b8027044942bfa756160d908320d73b"}, @@ -6831,7 +6745,6 @@ description = "Python library for throwaway instances of anything that can run i optional = false python-versions = "<4.0,>=3.9" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "testcontainers-4.9.1-py3-none-any.whl", hash = "sha256:315fb94b42a383872df530aa45319745278ef0cc18b9cfcdc231a75d14afa5a0"}, {file = "testcontainers-4.9.1.tar.gz", hash = "sha256:37fe9a222549ddb788463935965b16f91809e9a8d654f437d6a59eac9b77f76f"}, @@ -6886,7 +6799,7 @@ description = "threadpoolctl" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"sentence-transformers\"" +markers = "extra == \"sentence-transformers\"" files = [ {file = "threadpoolctl-3.5.0-py3-none-any.whl", hash = "sha256:56c1e26c150397e58c4926da8eeee87533b1e32bef131bd4bf6a2f45f3185467"}, {file = "threadpoolctl-3.5.0.tar.gz", hash = "sha256:082433502dd922bf738de0d8bcc4fdcbf0979ff44c42bd40f5af8a282f6fa107"}, @@ -6899,7 +6812,6 @@ description = "A tiny CSS parser" optional = false python-versions = ">=3.8" groups = ["docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "tinycss2-1.4.0-py3-none-any.whl", hash = "sha256:3a49cf47b7675da0b15d0c6e1df8df4ebd96e9394bb905a5775adb0d884c5289"}, {file = "tinycss2-1.4.0.tar.gz", hash = "sha256:10c0972f6fc0fbee87c3edb76549357415e94548c1ae10ebccdea16fb404a9b7"}, @@ -6919,7 +6831,7 @@ description = "" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "(extra == \"sentence-transformers\" or extra == \"cohere\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" +markers = "extra == \"sentence-transformers\" or extra == \"cohere\"" files = [ {file = "tokenizers-0.21.0-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:3c4c93eae637e7d2aaae3d376f06085164e1660f89304c0ab2b1d08a406636b2"}, {file = "tokenizers-0.21.0-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:f53ea537c925422a2e0e92a24cce96f6bc5046bbef24a1652a5edc8ba975f62e"}, @@ -6953,7 +6865,7 @@ description = "A lil' TOML parser" optional = false python-versions = ">=3.8" groups = ["dev", "docs"] -markers = "python_version < \"3.11\"" +markers = "python_version <= \"3.10\"" files = [ {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, @@ -6996,7 +6908,6 @@ description = "Style preserving TOML library" optional = false python-versions = ">=3.8" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "tomlkit-0.13.2-py3-none-any.whl", hash = "sha256:7a974427f6e119197f670fbbbeae7bef749a6c14e793db934baefc1b5f03efde"}, {file = "tomlkit-0.13.2.tar.gz", hash = "sha256:fff5fe59a87295b278abd31bec92c15d9bc4a06885ab12bcea52c71119392e79"}, @@ -7009,7 +6920,7 @@ description = "Tensors and Dynamic neural networks in Python with strong GPU acc optional = true python-versions = ">=3.9.0" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"sentence-transformers\"" +markers = "extra == \"sentence-transformers\"" files = [ {file = "torch-2.6.0-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:6860df13d9911ac158f4c44031609700e1eba07916fff62e21e6ffa0a9e01961"}, {file = "torch-2.6.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:c4f103a49830ce4c7561ef4434cc7926e5a5fe4e5eb100c19ab36ea1e2b634ab"}, @@ -7067,7 +6978,6 @@ description = "Tornado is a Python web framework and asynchronous networking lib optional = false python-versions = ">=3.8" groups = ["dev", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "tornado-6.4.2-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e828cce1123e9e44ae2a50a9de3055497ab1d0aeb440c5ac23064d9e44880da1"}, {file = "tornado-6.4.2-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:072ce12ada169c5b00b7d92a99ba089447ccc993ea2143c9ede887e0937aa803"}, @@ -7089,7 +6999,6 @@ description = "Fast, Extensible Progress Meter" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and (extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"openai\" or extra == \"ranx\") and (extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"openai\" or python_version >= \"3.10\")" files = [ {file = "tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2"}, {file = "tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2"}, @@ -7112,7 +7021,6 @@ description = "Traitlets Python configuration system" optional = false python-versions = ">=3.8" groups = ["dev", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f"}, {file = "traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7"}, @@ -7129,7 +7037,7 @@ description = "State-of-the-art Machine Learning for JAX, PyTorch and TensorFlow optional = true python-versions = ">=3.9.0" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"sentence-transformers\"" +markers = "extra == \"sentence-transformers\"" files = [ {file = "transformers-4.49.0-py3-none-any.whl", hash = "sha256:6b4fded1c5fee04d384b1014495b4235a2b53c87503d7d592423c06128cbbe03"}, {file = "transformers-4.49.0.tar.gz", hash = "sha256:7e40e640b5b8dc3f48743f5f5adbdce3660c82baafbd3afdfc04143cdbd2089e"}, @@ -7200,7 +7108,7 @@ description = "Support tools for TREC CAR participants. Also see trec-car.cs.unh optional = true python-versions = ">=3.6" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "trec-car-tools-2.6.tar.gz", hash = "sha256:2fce2de120224fd569b151d5bed358a4ed334e643889b9e3dfe3e5a3d15d21c8"}, {file = "trec_car_tools-2.6-py3-none-any.whl", hash = "sha256:e6f0373259e1c234222da7270ab54ca7af7a6f8d0dd32b13e158c1659d3991cf"}, @@ -7217,7 +7125,7 @@ description = "A language and compiler for custom Deep Learning operations" optional = true python-versions = "*" groups = ["main"] -markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\" and (python_version <= \"3.11\" or python_version >= \"3.12\")" +markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and extra == \"sentence-transformers\"" files = [ {file = "triton-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3e54983cd51875855da7c68ec05c05cf8bb08df361b1d5b69e05e40b0c9bd62"}, {file = "triton-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8009a1fb093ee8546495e96731336a33fb8856a38e45bb4ab6affd6dbc3ba220"}, @@ -7238,7 +7146,6 @@ description = "Typing stubs for cffi" optional = false python-versions = ">=3.8" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "types_cffi-1.16.0.20241221-py3-none-any.whl", hash = "sha256:e5b76b4211d7a9185f6ab8d06a106d56c7eb80af7cdb8bfcb4186ade10fb112f"}, {file = "types_cffi-1.16.0.20241221.tar.gz", hash = "sha256:1c96649618f4b6145f58231acb976e0b448be6b847f7ab733dabe62dfbff6591"}, @@ -7254,7 +7161,6 @@ description = "Typing stubs for pyOpenSSL" optional = false python-versions = ">=3.8" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "types-pyOpenSSL-24.1.0.20240722.tar.gz", hash = "sha256:47913b4678a01d879f503a12044468221ed8576263c1540dcb0484ca21b08c39"}, {file = "types_pyOpenSSL-24.1.0.20240722-py3-none-any.whl", hash = "sha256:6a7a5d2ec042537934cfb4c9d4deb0e16c4c6250b09358df1f083682fe6fda54"}, @@ -7271,7 +7177,6 @@ description = "Typing stubs for PyYAML" optional = false python-versions = ">=3.8" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "types_PyYAML-6.0.12.20241230-py3-none-any.whl", hash = "sha256:fa4d32565219b68e6dee5f67534c722e53c00d1cfc09c435ef04d7353e1e96e6"}, {file = "types_pyyaml-6.0.12.20241230.tar.gz", hash = "sha256:7f07622dbd34bb9c8b264fe860a17e0efcad00d50b5f27e93984909d9363498c"}, @@ -7284,7 +7189,6 @@ description = "Typing stubs for redis" optional = false python-versions = ">=3.8" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "types-redis-4.6.0.20241004.tar.gz", hash = "sha256:5f17d2b3f9091ab75384153bfa276619ffa1cf6a38da60e10d5e6749cc5b902e"}, {file = "types_redis-4.6.0.20241004-py3-none-any.whl", hash = "sha256:ef5da68cb827e5f606c8f9c0b49eeee4c2669d6d97122f301d3a55dc6a63f6ed"}, @@ -7317,7 +7221,7 @@ description = "Typing stubs for requests" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"cohere\" and python_version >= \"3.10\"" +markers = "python_version >= \"3.10\" and extra == \"cohere\"" files = [ {file = "types-requests-2.32.0.20241016.tar.gz", hash = "sha256:0d9cad2f27515d0e3e3da7134a1b6f28fb97129d86b867f24d9c726452634d95"}, {file = "types_requests-2.32.0.20241016-py3-none-any.whl", hash = "sha256:4195d62d6d3e043a4eaaf08ff8a62184584d2e8684e9d2aa178c7915a7da3747"}, @@ -7333,7 +7237,6 @@ description = "Typing stubs for setuptools" optional = false python-versions = ">=3.9" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "types_setuptools-75.8.0.20250225-py3-none-any.whl", hash = "sha256:94c86b439cc60bcc68c1cda3fd2c301f007f8f9502f4fbb54c66cb5ce9b875af"}, {file = "types_setuptools-75.8.0.20250225.tar.gz", hash = "sha256:6038f7e983d55792a5f90d8fdbf5d4c186026214a16bb65dd6ae83c624ae9636"}, @@ -7346,7 +7249,6 @@ description = "Typing stubs for tabulate" optional = false python-versions = ">=3.8" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "types_tabulate-0.9.0.20241207-py3-none-any.whl", hash = "sha256:b8dad1343c2a8ba5861c5441370c3e35908edd234ff036d4298708a1d4cf8a85"}, {file = "types_tabulate-0.9.0.20241207.tar.gz", hash = "sha256:ac1ac174750c0a385dfd248edc6279fa328aaf4ea317915ab879a2ec47833230"}, @@ -7372,7 +7274,6 @@ description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" groups = ["main", "dev", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, @@ -7385,7 +7286,7 @@ description = "Runtime inspection utilities for typing module." optional = true python-versions = "*" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"mistralai\"" +markers = "extra == \"mistralai\"" files = [ {file = "typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f"}, {file = "typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78"}, @@ -7402,7 +7303,7 @@ description = "Provider of IANA time zone data" optional = true python-versions = ">=2" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "tzdata-2025.1-py2.py3-none-any.whl", hash = "sha256:7e127113816800496f027041c570f50bcd464a020098a3b6b199517772303639"}, {file = "tzdata-2025.1.tar.gz", hash = "sha256:24894909e88cdb28bd1636c6887801df64cb485bd593f2fd83ef29075a81d694"}, @@ -7415,7 +7316,7 @@ description = "Pure Python decompression module for .Z files compressed using Un optional = true python-versions = ">=3.9" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "unlzw3-0.2.3-py3-none-any.whl", hash = "sha256:7760fb4f3afa1225623944c061991d89a061f7fb78665dbc4cddfdb562bb4a8b"}, {file = "unlzw3-0.2.3.tar.gz", hash = "sha256:ede5d928c792fff9da406f20334f9739693327f448f383ae1df1774627197bbb"}, @@ -7436,7 +7337,7 @@ files = [ {file = "urllib3-1.26.20-py2.py3-none-any.whl", hash = "sha256:0ed14ccfbf1c30a9072c7ca157e4319b70d65f623e91e7b32fadb2853431016e"}, {file = "urllib3-1.26.20.tar.gz", hash = "sha256:40c2dc0c681e47eb8f90e7e27bf6ff7df2e677421fd46756da1161c39ca70d32"}, ] -markers = {main = "extra == \"sentence-transformers\" and python_version < \"3.10\" or extra == \"cohere\" and python_version < \"3.10\" or extra == \"vertexai\" and python_version < \"3.10\" or extra == \"voyageai\" and python_version < \"3.10\" or extra == \"bedrock\" and python_version < \"3.10\"", dev = "python_version < \"3.10\"", docs = "python_version < \"3.10\""} +markers = {main = "python_version < \"3.10\" and (extra == \"bedrock\" or extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"vertexai\" or extra == \"voyageai\")", dev = "python_version < \"3.10\"", docs = "python_version < \"3.10\""} [package.extras] brotli = ["brotli (==1.0.9) ; os_name != \"nt\" and python_version < \"3\" and platform_python_implementation == \"CPython\"", "brotli (>=1.0.9) ; python_version >= \"3\" and platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; (os_name != \"nt\" or python_version >= \"3\") and platform_python_implementation != \"CPython\"", "brotlipy (>=0.6.0) ; os_name == \"nt\" and python_version < \"3\""] @@ -7454,7 +7355,7 @@ files = [ {file = "urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df"}, {file = "urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d"}, ] -markers = {main = "(python_version <= \"3.11\" or python_version >= \"3.12\") and (extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"vertexai\" or extra == \"voyageai\" or extra == \"ranx\" or extra == \"bedrock\") and python_version >= \"3.10\"", dev = "python_version <= \"3.11\" and python_version >= \"3.10\" or python_version >= \"3.12\"", docs = "python_version <= \"3.11\" and python_version >= \"3.10\" or python_version >= \"3.12\""} +markers = {main = "python_version >= \"3.10\" and (extra == \"ranx\" or extra == \"bedrock\" or extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"vertexai\" or extra == \"voyageai\")", dev = "python_version >= \"3.10\"", docs = "python_version >= \"3.10\""} [package.extras] brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""] @@ -7469,7 +7370,6 @@ description = "Virtual Python Environment builder" optional = false python-versions = ">=3.8" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "virtualenv-20.29.2-py3-none-any.whl", hash = "sha256:febddfc3d1ea571bdb1dc0f98d7b45d24def7428214d4fb73cc486c9568cce6a"}, {file = "virtualenv-20.29.2.tar.gz", hash = "sha256:fdaabebf6d03b5ba83ae0a02cfe96f48a716f4fae556461d180825866f75b728"}, @@ -7491,7 +7391,7 @@ description = "" optional = true python-versions = "<4.0.0,>=3.7.1" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"voyageai\"" +markers = "extra == \"voyageai\"" files = [ {file = "voyageai-0.2.4-py3-none-any.whl", hash = "sha256:e3070e5c78dec89adae43231334b4637aa88933dad99b1c33d3219fdfc94dfa4"}, {file = "voyageai-0.2.4.tar.gz", hash = "sha256:b9911d8629e8a4e363291c133482fead49a3536afdf1e735f3ab3aaccd8d250d"}, @@ -7511,7 +7411,7 @@ description = "Python library to work with ARC and WARC files" optional = true python-versions = "*" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "warc3_wet-0.2.5-py3-none-any.whl", hash = "sha256:5a9a525383fb1af159734baa75f349a7c4ec7bccd1b938681b5748515d2bf624"}, {file = "warc3_wet-0.2.5.tar.gz", hash = "sha256:15e50402dabaa1e95307f1e2a6169cfd5f137b70761d9f0b16a10aa6de227970"}, @@ -7524,7 +7424,7 @@ description = "Python library to work with ARC and WARC files, with fixes for Cl optional = true python-versions = "*" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "warc3-wet-clueweb09-0.2.5.tar.gz", hash = "sha256:3054bfc07da525d5967df8ca3175f78fa3f78514c82643f8c81fbca96300b836"}, ] @@ -7536,7 +7436,6 @@ description = "Measures the displayed width of unicode strings in a terminal" optional = false python-versions = "*" groups = ["dev", "docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, @@ -7549,7 +7448,6 @@ description = "Character encoding aliases for legacy web content" optional = false python-versions = "*" groups = ["docs"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, @@ -7562,7 +7460,6 @@ description = "Module for decorators, wrappers and monkey patching." optional = false python-versions = ">=3.8" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "wrapt-1.17.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3d57c572081fed831ad2d26fd430d565b76aa277ed1d30ff4d40670b1c0dd984"}, {file = "wrapt-1.17.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b5e251054542ae57ac7f3fba5d10bfff615b6c2fb09abeb37d2f1463f841ae22"}, @@ -7652,7 +7549,7 @@ description = "Yet another URL library" optional = true python-versions = ">=3.9" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"voyageai\"" +markers = "extra == \"voyageai\"" files = [ {file = "yarl-1.18.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7df647e8edd71f000a5208fe6ff8c382a1de8edfbccdbbfe649d263de07d8c34"}, {file = "yarl-1.18.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c69697d3adff5aa4f874b19c0e4ed65180ceed6318ec856ebc423aa5850d84f7"}, @@ -7754,7 +7651,7 @@ files = [ {file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"}, {file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"}, ] -markers = {dev = "python_version < \"3.10\"", docs = "python_version <= \"3.11\" or python_version >= \"3.12\""} +markers = {dev = "python_version < \"3.10\""} [package.extras] check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] @@ -7771,7 +7668,7 @@ description = "Low-level interface to the zlib library that enables capturing th optional = true python-versions = ">=3.6" groups = ["main"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and extra == \"ranx\" and python_version >= \"3.10\"" +markers = "python_version >= \"3.10\" and extra == \"ranx\"" files = [ {file = "zlib_state-0.1.9-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:97f45d0f80e9d7070229ecb36112eea6a17dc40053449a9c613ef837d9cb66b4"}, {file = "zlib_state-0.1.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3564eaa130f2533b87b82d0e622cfb5c25acec123e7bfe38d39db9ce6349cb52"}, @@ -7816,4 +7713,4 @@ voyageai = ["voyageai"] [metadata] lock-version = "2.1" python-versions = ">=3.9,<3.14" -content-hash = "3bcbaaf402487a181810db22556d0207a555d5683984cc3af14a803974c8900e" +content-hash = "31caa5fcd726203ef2d950c07ae16690e85509f4e857696030fc091263fcd9f9" From 091148cad5d000959b8308544e110362179d18c4 Mon Sep 17 00:00:00 2001 From: Justin Cechmanek Date: Tue, 1 Apr 2025 18:40:04 -0700 Subject: [PATCH 17/26] mypy cannot find defined methods --- redisvl/index/index.py | 4 ++-- redisvl/query/aggregate.py | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/redisvl/index/index.py b/redisvl/index/index.py index 710751f4..4d5b1dbc 100644 --- a/redisvl/index/index.py +++ b/redisvl/index/index.py @@ -42,8 +42,8 @@ from redisvl.index.storage import BaseStorage, HashStorage, JsonStorage from redisvl.query import ( AggregationQuery, - BaseVectorQuery, BaseQuery, + BaseVectorQuery, CountQuery, FilterQuery, HybridAggregationQuery, @@ -716,7 +716,7 @@ def aggregate_query( """ results = self.aggregate( - aggregation_query, query_params=aggregation_query.params + aggregation_query, query_params=aggregation_query.params # type: ignore[attr-defined] ) return process_aggregate_results( results, diff --git a/redisvl/query/aggregate.py b/redisvl/query/aggregate.py index 067a9056..025e8d54 100644 --- a/redisvl/query/aggregate.py +++ b/redisvl/query/aggregate.py @@ -114,14 +114,14 @@ def __init__( query_string = self._build_query_string() super().__init__(query_string) - self.scorer(text_scorer) - self.add_scores() + self.scorer(text_scorer) # type: ignore[attr-defined] + self.add_scores() # type: ignore[attr-defined] self.apply( vector_similarity=f"(2 - @{self.DISTANCE_ID})/2", text_score="@__score" ) self.apply(hybrid_score=f"{1-alpha}*@text_score + {alpha}*@vector_similarity") self.sort_by(Desc("@hybrid_score"), max=num_results) - self.dialect(dialect) + self.dialect(dialect) # type: ignore[attr-defined] if return_fields: self.load(*return_fields) From 9069dd51eb60226e621681529a0e1292a47bd0c0 Mon Sep 17 00:00:00 2001 From: Justin Cechmanek Date: Tue, 1 Apr 2025 18:53:03 -0700 Subject: [PATCH 18/26] updates nltk requirement --- poetry.lock | 10 ++++++++-- pyproject.toml | 1 + 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/poetry.lock b/poetry.lock index 57d9d741..16d7cd16 100644 --- a/poetry.lock +++ b/poetry.lock @@ -747,6 +747,7 @@ files = [ {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, ] +markers = {main = "extra == \"nltk\""} [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} @@ -786,7 +787,7 @@ files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] -markers = {main = "platform_system == \"Windows\"", dev = "platform_system == \"Windows\" or sys_platform == \"win32\"", docs = "platform_system == \"Windows\" or sys_platform == \"win32\""} +markers = {main = "platform_system == \"Windows\" and (extra == \"nltk\" or extra == \"openai\" or extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"ranx\") and (extra == \"nltk\" or extra == \"openai\" or extra == \"sentence-transformers\" or extra == \"cohere\" or python_version >= \"3.10\")", dev = "platform_system == \"Windows\" or sys_platform == \"win32\"", docs = "platform_system == \"Windows\" or sys_platform == \"win32\""} [[package]] name = "coloredlogs" @@ -2806,6 +2807,7 @@ description = "Lightweight pipelining with Python functions" optional = true python-versions = ">=3.8" groups = ["main"] +markers = "extra == \"nltk\" or extra == \"sentence-transformers\"" files = [ {file = "joblib-1.4.2-py3-none-any.whl", hash = "sha256:06d478d5674cbc267e7496a410ee875abd68e4340feff4490bcb7afb88060ae6"}, {file = "joblib-1.4.2.tar.gz", hash = "sha256:2382c5816b2636fbd20a09e0f4e9dad4736765fdfb7dca582943b9c1366b3f0e"}, @@ -3984,6 +3986,7 @@ description = "Natural Language Toolkit" optional = true python-versions = ">=3.8" groups = ["main"] +markers = "extra == \"nltk\"" files = [ {file = "nltk-3.9.1-py3-none-any.whl", hash = "sha256:4fa26829c5b00715afe3061398a8989dc643b92ce7dd93fb4585a70930d168a1"}, {file = "nltk-3.9.1.tar.gz", hash = "sha256:87d127bd3de4bd89a4f81265e5fa59cb1b199b27440175370f7417d2bc7ae868"}, @@ -5694,6 +5697,7 @@ description = "Alternative regular expression module, to replace re." optional = true python-versions = ">=3.8" groups = ["main"] +markers = "extra == \"nltk\" or extra == \"sentence-transformers\"" files = [ {file = "regex-2024.11.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ff590880083d60acc0433f9c3f713c51f7ac6ebb9adf889c79a261ecf541aa91"}, {file = "regex-2024.11.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:658f90550f38270639e83ce492f27d2c8d2cd63805c65a13a14d36ca126753f0"}, @@ -6999,6 +7003,7 @@ description = "Fast, Extensible Progress Meter" optional = true python-versions = ">=3.7" groups = ["main"] +markers = "(extra == \"nltk\" or extra == \"openai\" or extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"ranx\") and (extra == \"nltk\" or extra == \"openai\" or extra == \"sentence-transformers\" or extra == \"cohere\") or (extra == \"nltk\" or extra == \"openai\" or extra == \"sentence-transformers\" or extra == \"cohere\" or extra == \"ranx\") and python_version >= \"3.10\"" files = [ {file = "tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2"}, {file = "tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2"}, @@ -7704,6 +7709,7 @@ files = [ bedrock = ["boto3"] cohere = ["cohere"] mistralai = ["mistralai"] +nltk = ["nltk"] openai = ["openai"] ranx = ["ranx"] sentence-transformers = ["scipy", "scipy", "sentence-transformers"] @@ -7713,4 +7719,4 @@ voyageai = ["voyageai"] [metadata] lock-version = "2.1" python-versions = ">=3.9,<3.14" -content-hash = "31caa5fcd726203ef2d950c07ae16690e85509f4e857696030fc091263fcd9f9" +content-hash = "52db593aa7e849c77d41201044adb215e57151dfe8c2da7be4ddfe381b27e230" diff --git a/pyproject.toml b/pyproject.toml index 1da572ce..eed0e0dd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -58,6 +58,7 @@ mistralai = ["mistralai"] voyageai = ["voyageai"] ranx = ["ranx"] bedrock = ["boto3"] +nltk = ["nltk"] [tool.poetry.group.dev.dependencies] black = "^25.1.0" From c5ad6964691f4184560b4a7ee848813dfcf65465 Mon Sep 17 00:00:00 2001 From: Justin Cechmanek Date: Tue, 1 Apr 2025 19:03:04 -0700 Subject: [PATCH 19/26] I swear I have changed this 4 times now --- redisvl/query/query.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/redisvl/query/query.py b/redisvl/query/query.py index a6b36a90..bf3d59c0 100644 --- a/redisvl/query/query.py +++ b/redisvl/query/query.py @@ -812,7 +812,7 @@ def _build_query_string(self) -> str: else: filter_expression = "" - text = f"(~@{self._text_field}:({self.tokenize_and_escape_query(self._text)}))" + text = f"(@{self._text_field}:({self.tokenize_and_escape_query(self._text)}))" if filter_expression and filter_expression != "*": text += f"({filter_expression})" return text From ea5d08729e031121d41b01f439810e7ca02ba2de Mon Sep 17 00:00:00 2001 From: Justin Cechmanek Date: Wed, 2 Apr 2025 11:46:58 -0700 Subject: [PATCH 20/26] wip: debugging aggregations and filters --- redisvl/query/aggregate.py | 23 +++-- redisvl/query/query.py | 6 +- tests/integration/test_aggregation.py | 139 +++++++++++++++++++------- 3 files changed, 118 insertions(+), 50 deletions(-) diff --git a/redisvl/query/aggregate.py b/redisvl/query/aggregate.py index 025e8d54..8bcb9b4c 100644 --- a/redisvl/query/aggregate.py +++ b/redisvl/query/aggregate.py @@ -32,9 +32,9 @@ class HybridAggregationQuery(AggregationQuery): def __init__( self, text: str, - text_field: str, + text_field_name: str, vector: Union[bytes, List[float]], - vector_field: str, + vector_field_name: str, text_scorer: str = "BM25STD", filter_expression: Optional[Union[str, FilterExpression]] = None, alpha: float = 0.7, @@ -42,16 +42,16 @@ def __init__( num_results: int = 10, return_fields: Optional[List[str]] = None, stopwords: Optional[Union[str, Set[str]]] = "english", - dialect: int = 4, + dialect: int = 2, ): """ Instantiages a HybridAggregationQuery object. Args: text (str): The text to search for. - text_field (str): The text field name to search in. + text_field_name (str): The text field name to search in. vector (Union[bytes, List[float]]): The vector to perform vector similarity search. - vector_field (str): The vector field name to search in. + vector_field_name (str): The vector field name to search in. text_scorer (str, optional): The text scorer to use. Options are {TFIDF, TFIDF.DOCNORM, BM25, DISMAX, DOCSCORE, BM25STD}. Defaults to "BM25STD". filter_expression (Optional[FilterExpression], optional): The filter expression to use. @@ -67,7 +67,7 @@ def __init__( provided then a default set of stopwords for that language will be used. if a list, set, or tuple of strings is provided then those will be used as stopwords. Defaults to "english". if set to "None" then no stopwords will be removed. - dialect (int, optional): The Redis dialect version. Defaults to 4. + dialect (int, optional): The Redis dialect version. Defaults to 2. Raises: ValueError: If the text string is empty, or if the text string becomes empty after @@ -82,9 +82,9 @@ def __init__( query = HybridAggregationQuery( text="example text", - text_field="text_field", + text_field_name="text_field", vector=[0.1, 0.2, 0.3], - vector_field="vector_field", + vector_field_name="vector_field", text_scorer="BM25STD", filter_expression=None, alpha=0.7, @@ -102,9 +102,9 @@ def __init__( raise ValueError("text string cannot be empty") self._text = text - self._text_field = text_field + self._text_field = text_field_name self._vector = vector - self._vector_field = vector_field + self._vector_field = vector_field_name self._filter_expression = filter_expression self._alpha = alpha self._dtype = dtype @@ -112,6 +112,9 @@ def __init__( self.set_stopwords(stopwords) query_string = self._build_query_string() + #### + print('query_string: ', query_string) + #### super().__init__(query_string) self.scorer(text_scorer) # type: ignore[attr-defined] diff --git a/redisvl/query/query.py b/redisvl/query/query.py index bf3d59c0..244b893e 100644 --- a/redisvl/query/query.py +++ b/redisvl/query/query.py @@ -691,7 +691,7 @@ class TextQuery(FilterQuery): def __init__( self, text: str, - text_field: str, + text_field_name: str, text_scorer: str = "BM25STD", filter_expression: Optional[Union[str, FilterExpression]] = None, return_fields: Optional[List[str]] = None, @@ -708,7 +708,7 @@ def __init__( Args: text (str): The text string to perform the text search with. - text_field (str): The name of the document field to perform text search on. + text_field_name (str): The name of the document field to perform text search on. text_scorer (str, optional): The text scoring algorithm to use. Defaults to BM25STD. Options are {TFIDF, BM25STD, BM25, DOCNORM, DISMAX, DOCSCORE}. See https://redis.io/docs/latest/develop/interact/search-and-query/advanced-concepts/scoring/ @@ -740,7 +740,7 @@ def __init__( TypeError: If stopwords is not a valid iterable set of strings. """ self._text = text - self._text_field = text_field + self._text_field = text_field_name self._text_scorer = text_scorer self._num_results = num_results diff --git a/tests/integration/test_aggregation.py b/tests/integration/test_aggregation.py index 46c07474..50e42e73 100644 --- a/tests/integration/test_aggregation.py +++ b/tests/integration/test_aggregation.py @@ -1,9 +1,9 @@ -from datetime import timedelta - import pytest from redis.commands.search.aggregation import AggregateResult from redis.commands.search.result import Result +from redisvl.redis.connection import compare_versions + from redisvl.index import SearchIndex from redisvl.query import HybridAggregationQuery from redisvl.query.filter import ( @@ -13,31 +13,12 @@ Num, Tag, Text, - Timestamp, ) from redisvl.redis.utils import array_to_buffer -# TODO expand to multiple schema types and sync + async - -vector = ([0.1, 0.1, 0.5],) -vector_field_name = ("user_embedding",) -return_fields = ( - [ - "user", - "credit_score", - "age", - "job", - "location", - "last_updated", - ], -) -filter_expression = (Tag("credit_score") == "high",) -distance_threshold = (0.2,) - @pytest.fixture def index(sample_data, redis_url): - # construct a search index from the schema index = SearchIndex.from_dict( { "index": { @@ -70,7 +51,7 @@ def index(sample_data, redis_url): # create the index (no data yet) index.create(overwrite=True) - # Prepare and load the data + # prepare and load the data def hash_preprocess(item: dict) -> dict: return { **item, @@ -87,7 +68,10 @@ def hash_preprocess(item: dict) -> dict: def test_aggregation_query(index): - # *=>[KNN 7 @user_embedding $vector AS vector_distance] + redis_version = index.client.info()["redis_version"] + if not compare_versions(redis_version, "7.2.0"): + pytest.skip("Not using a late enough version of Redis") + text = "a medical professional with expertise in lung cancer" text_field = "description" vector = [0.1, 0.1, 0.5] @@ -96,9 +80,9 @@ def test_aggregation_query(index): hybrid_query = HybridAggregationQuery( text=text, - text_field=text_field, + text_field_name=text_field, vector=vector, - vector_field=vector_field, + vector_field_name=vector_field, return_fields=return_fields, ) @@ -106,7 +90,6 @@ def test_aggregation_query(index): assert isinstance(results, list) assert len(results) == 7 for doc in results: - # ensure all return fields present assert doc["user"] in [ "john", "derrick", @@ -121,12 +104,11 @@ def test_aggregation_query(index): assert doc["job"] in ["engineer", "doctor", "dermatologist", "CEO", "dentist"] assert doc["credit_score"] in ["high", "low", "medium"] - # test num_results hybrid_query = HybridAggregationQuery( text=text, - text_field=text_field, + text_field_name=text_field, vector=vector, - vector_field=vector_field, + vector_field_name=vector_field, num_results=3, ) @@ -139,7 +121,7 @@ def test_aggregation_query(index): ) -def test_empty_query_string(index): +def test_empty_query_string(): text = "" text_field = "description" vector = [0.1, 0.1, 0.5] @@ -149,30 +131,86 @@ def test_empty_query_string(index): # test if text is empty with pytest.raises(ValueError): hybrid_query = HybridAggregationQuery( - text=text, text_field=text_field, vector=vector, vector_field=vector_field + text=text, text_field_name=text_field, vector=vector, vector_field_name=vector_field ) # test if text becomes empty after stopwords are removed text = "with a for but and" # will all be removed as default stopwords with pytest.raises(ValueError): hybrid_query = HybridAggregationQuery( - text=text, text_field=text_field, vector=vector, vector_field=vector_field + text=text, text_field_name=text_field, vector=vector, vector_field_name=vector_field ) +def test_aggregation_query_filter(index): + redis_version = index.client.info()["redis_version"] + if not compare_versions(redis_version, "7.2.0"): + pytest.skip("Not using a late enough version of Redis") -def test_aggregate_query_stopwords(index): text = "a medical professional with expertise in lung cancer" text_field = "description" vector = [0.1, 0.1, 0.5] vector_field = "user_embedding" return_fields = ["user", "credit_score", "age", "job", "location", "description"] - return - # test num_results + filter_expression = (Tag("credit_score") == ("high")) & (Num("age") > 30) + hybrid_query = HybridAggregationQuery( text=text, - text_field=text_field, + text_field_name=text_field, vector=vector, - vector_field=vector_field, + vector_field_name=vector_field, + filter_expression=filter_expression, + return_fields=return_fields, + ) + + results = index.aggregate_query(hybrid_query) + assert len(results) == 3 + for result in results: + assert result["credit_score"] == "high" + assert int(result["age"]) > 30 + + +def test_aggregation_query_with_geo_filter(index): + redis_version = index.client.info()["redis_version"] + if not compare_versions(redis_version, "7.2.0"): + pytest.skip("Not using a late enough version of Redis") + + text = "a medical professional with expertise in lung cancer" + text_field = "description" + vector = [0.1, 0.1, 0.5] + vector_field = "user_embedding" + return_fields = ["user", "credit_score", "age", "job", "location", "description"] + filter_expression = Geo("location") == GeoRadius(37.7749, -122.4194, 1000) + + hybrid_query = HybridAggregationQuery( + text=text, + text_field_name=text_field, + vector=vector, + vector_field_name=vector_field, + filter_expression=filter_expression, + return_fields=return_fields, + ) + + results = index.aggregate_query(hybrid_query) + assert len(results) == 3 + for result in results: + assert result["location"] is not None + + +def test_aggregate_query_stopwords(index): + redis_version = index.client.info()["redis_version"] + if not compare_versions(redis_version, "7.2.0"): + pytest.skip("Not using a late enough version of Redis") + + text = "a medical professional with expertise in lung cancer" + text_field = "description" + vector = [0.1, 0.1, 0.5] + vector_field = "user_embedding" + + hybrid_query = HybridAggregationQuery( + text=text, + text_field_name=text_field, + vector=vector, + vector_field_name=vector_field, alpha=0.5, stopwords=["medical", "expertise"], ) @@ -182,3 +220,30 @@ def test_aggregate_query_stopwords(index): for r in results: assert r["text_score"] == 0 assert r["hybrid_score"] == 0.5 * r["vector_similarity"] + + +def test_aggregate_query_text_filter(index): + redis_version = index.client.info()["redis_version"] + if not compare_versions(redis_version, "7.2.0"): + pytest.skip("Not using a late enough version of Redis") + + text = "a medical professional with expertise in lung cancer" + text_field = "description" + vector = [0.1, 0.1, 0.5] + vector_field = "user_embedding" + filter_expression = (Text("description") == ("medical")) | (Text("job") % ("doct*")) + + hybrid_query = HybridAggregationQuery( + text=text, + text_field_name=text_field, + vector=vector, + vector_field_name=vector_field, + alpha=0.5, + filter_expression=filter_expression + ) + + results = index.aggregate_query(hybrid_query) + assert len(results) == 7 + for result in results: + assert result["text_score"] == 0 + assert result["hybrid_score"] == 0.5 * result["vector_similarity"] \ No newline at end of file From 1672ea3c9cbf7c610c0041963057291ac6d21cb8 Mon Sep 17 00:00:00 2001 From: Justin Cechmanek Date: Wed, 2 Apr 2025 17:47:54 -0700 Subject: [PATCH 21/26] fixes query string parsing. adds more tests --- redisvl/query/aggregate.py | 10 +-- redisvl/query/query.py | 34 ++++---- tests/integration/test_aggregation.py | 111 ++++++++++++++++++++------ tests/integration/test_query.py | 84 ++++++++++++++++++- tests/unit/test_aggregation_types.py | 8 +- tests/unit/test_query_types.py | 27 ++++--- 6 files changed, 212 insertions(+), 62 deletions(-) diff --git a/redisvl/query/aggregate.py b/redisvl/query/aggregate.py index 8bcb9b4c..dfe7296c 100644 --- a/redisvl/query/aggregate.py +++ b/redisvl/query/aggregate.py @@ -112,9 +112,6 @@ def __init__( self.set_stopwords(stopwords) query_string = self._build_query_string() - #### - print('query_string: ', query_string) - #### super().__init__(query_string) self.scorer(text_scorer) # type: ignore[attr-defined] @@ -125,7 +122,6 @@ def __init__( self.apply(hybrid_score=f"{1-alpha}*@text_score + {alpha}*@vector_similarity") self.sort_by(Desc("@hybrid_score"), max=num_results) self.dialect(dialect) # type: ignore[attr-defined] - if return_fields: self.load(*return_fields) @@ -216,9 +212,9 @@ def _build_query_string(self) -> str: # base KNN query knn_query = f"KNN {self._num_results} @{self._vector_field} ${self.VECTOR_PARAM} AS {self.DISTANCE_ID}" - text = f"(~@{self._text_field}:({self.tokenize_and_escape_query(self._text)}))" + text = f"(~@{self._text_field}:({self.tokenize_and_escape_query(self._text)})" if filter_expression and filter_expression != "*": - text += f"({filter_expression})" + text += f" AND {filter_expression}" - return f"{text}=>[{knn_query}]" + return f"{text})=>[{knn_query}]" diff --git a/redisvl/query/query.py b/redisvl/query/query.py index 244b893e..6735ae5c 100644 --- a/redisvl/query/query.py +++ b/redisvl/query/query.py @@ -687,7 +687,7 @@ class RangeQuery(VectorRangeQuery): pass -class TextQuery(FilterQuery): +class TextQuery(BaseQuery): def __init__( self, text: str, @@ -747,22 +747,26 @@ def __init__( self.set_stopwords(stopwords) self.set_filter(filter_expression) - query_string = self._build_query_string() + if params: + self._params = params - super().__init__( - query_string, - return_fields=return_fields, - num_results=num_results, - dialect=dialect, - sort_by=sort_by, - in_order=in_order, - params=params, - ) + self._num_results = num_results - # Handle query modifiers - self.scorer(self._text_scorer) + # initialize the base query with the full query string and filter expression + query_string = self._build_query_string() + super().__init__(query_string) + + # Handle query settings + if return_fields: + self.return_fields(*return_fields) self.paging(0, self._num_results).dialect(dialect) + if sort_by: + self.sort_by(sort_by) + + if in_order: + self.in_order() + if return_score: self.with_scores() @@ -812,7 +816,7 @@ def _build_query_string(self) -> str: else: filter_expression = "" - text = f"(@{self._text_field}:({self.tokenize_and_escape_query(self._text)}))" + text = f"@{self._text_field}:({self.tokenize_and_escape_query(self._text)})" if filter_expression and filter_expression != "*": - text += f"({filter_expression})" + text += f" AND {filter_expression}" return text diff --git a/tests/integration/test_aggregation.py b/tests/integration/test_aggregation.py index 50e42e73..384230c7 100644 --- a/tests/integration/test_aggregation.py +++ b/tests/integration/test_aggregation.py @@ -2,18 +2,10 @@ from redis.commands.search.aggregation import AggregateResult from redis.commands.search.result import Result -from redisvl.redis.connection import compare_versions - from redisvl.index import SearchIndex from redisvl.query import HybridAggregationQuery -from redisvl.query.filter import ( - FilterExpression, - Geo, - GeoRadius, - Num, - Tag, - Text, -) +from redisvl.query.filter import FilterExpression, Geo, GeoRadius, Num, Tag, Text +from redisvl.redis.connection import compare_versions from redisvl.redis.utils import array_to_buffer @@ -131,17 +123,24 @@ def test_empty_query_string(): # test if text is empty with pytest.raises(ValueError): hybrid_query = HybridAggregationQuery( - text=text, text_field_name=text_field, vector=vector, vector_field_name=vector_field + text=text, + text_field_name=text_field, + vector=vector, + vector_field_name=vector_field, ) # test if text becomes empty after stopwords are removed text = "with a for but and" # will all be removed as default stopwords with pytest.raises(ValueError): hybrid_query = HybridAggregationQuery( - text=text, text_field_name=text_field, vector=vector, vector_field_name=vector_field + text=text, + text_field_name=text_field, + vector=vector, + vector_field_name=vector_field, ) -def test_aggregation_query_filter(index): + +def test_aggregation_query_with_filter(index): redis_version = index.client.info()["redis_version"] if not compare_versions(redis_version, "7.2.0"): pytest.skip("Not using a late enough version of Redis") @@ -163,7 +162,7 @@ def test_aggregation_query_filter(index): ) results = index.aggregate_query(hybrid_query) - assert len(results) == 3 + assert len(results) == 2 for result in results: assert result["credit_score"] == "high" assert int(result["age"]) > 30 @@ -179,7 +178,7 @@ def test_aggregation_query_with_geo_filter(index): vector = [0.1, 0.1, 0.5] vector_field = "user_embedding" return_fields = ["user", "credit_score", "age", "job", "location", "description"] - filter_expression = Geo("location") == GeoRadius(37.7749, -122.4194, 1000) + filter_expression = Geo("location") == GeoRadius(-122.4194, 37.7749, 1000, "m") hybrid_query = HybridAggregationQuery( text=text, @@ -196,6 +195,36 @@ def test_aggregation_query_with_geo_filter(index): assert result["location"] is not None +@pytest.mark.parametrize("alpha", [0.1, 0.5, 0.9]) +def test_aggregate_query_alpha(index, alpha): + redis_version = index.client.info()["redis_version"] + if not compare_versions(redis_version, "7.2.0"): + pytest.skip("Not using a late enough version of Redis") + + text = "a medical professional with expertise in lung cancer" + text_field = "description" + vector = [0.1, 0.1, 0.5] + vector_field = "user_embedding" + + hybrid_query = HybridAggregationQuery( + text=text, + text_field_name=text_field, + vector=vector, + vector_field_name=vector_field, + alpha=alpha, + ) + + results = index.aggregate_query(hybrid_query) + assert len(results) == 7 + for result in results: + score = alpha * float(result["vector_similarity"]) + (1 - alpha) * float( + result["text_score"] + ) + assert ( + float(result["hybrid_score"]) - score <= 0.0001 + ) # allow for small floating point error + + def test_aggregate_query_stopwords(index): redis_version = index.client.info()["redis_version"] if not compare_versions(redis_version, "7.2.0"): @@ -205,24 +234,34 @@ def test_aggregate_query_stopwords(index): text_field = "description" vector = [0.1, 0.1, 0.5] vector_field = "user_embedding" + alpha = 0.5 hybrid_query = HybridAggregationQuery( text=text, text_field_name=text_field, vector=vector, vector_field_name=vector_field, - alpha=0.5, + alpha=alpha, stopwords=["medical", "expertise"], ) + query_string = hybrid_query._build_query_string() + + assert "medical" not in query_string + assert "expertize" not in query_string + results = index.aggregate_query(hybrid_query) assert len(results) == 7 - for r in results: - assert r["text_score"] == 0 - assert r["hybrid_score"] == 0.5 * r["vector_similarity"] + for result in results: + score = alpha * float(result["vector_similarity"]) + (1 - alpha) * float( + result["text_score"] + ) + assert ( + float(result["hybrid_score"]) - score <= 0.0001 + ) # allow for small floating point error -def test_aggregate_query_text_filter(index): +def test_aggregate_query_with_text_filter(index): redis_version = index.client.info()["redis_version"] if not compare_versions(redis_version, "7.2.0"): pytest.skip("Not using a late enough version of Redis") @@ -231,19 +270,39 @@ def test_aggregate_query_text_filter(index): text_field = "description" vector = [0.1, 0.1, 0.5] vector_field = "user_embedding" - filter_expression = (Text("description") == ("medical")) | (Text("job") % ("doct*")) + filter_expression = Text(text_field) == ("medical") + # make sure we can still apply filters to the same text field we are querying hybrid_query = HybridAggregationQuery( text=text, text_field_name=text_field, vector=vector, vector_field_name=vector_field, alpha=0.5, - filter_expression=filter_expression - ) + filter_expression=filter_expression, + return_fields=["job", "description"], + ) results = index.aggregate_query(hybrid_query) - assert len(results) == 7 + assert len(results) == 2 + for result in results: + assert "medical" in result[text_field].lower() + + filter_expression = (Text(text_field) == ("medical")) & ( + (Text(text_field) != ("research")) + ) + hybrid_query = HybridAggregationQuery( + text=text, + text_field_name=text_field, + vector=vector, + vector_field_name=vector_field, + alpha=0.5, + filter_expression=filter_expression, + return_fields=["description"], + ) + + results = index.aggregate_query(hybrid_query) + assert len(results) == 2 for result in results: - assert result["text_score"] == 0 - assert result["hybrid_score"] == 0.5 * result["vector_similarity"] \ No newline at end of file + assert "medical" in result[text_field].lower() + assert "research" not in result[text_field].lower() diff --git a/tests/integration/test_query.py b/tests/integration/test_query.py index 9bee41f6..5591d7bb 100644 --- a/tests/integration/test_query.py +++ b/tests/integration/test_query.py @@ -4,7 +4,13 @@ from redis.commands.search.result import Result from redisvl.index import SearchIndex -from redisvl.query import CountQuery, FilterQuery, VectorQuery, VectorRangeQuery +from redisvl.query import ( + CountQuery, + FilterQuery, + TextQuery, + VectorQuery, + VectorRangeQuery, +) from redisvl.query.filter import ( FilterExpression, Geo, @@ -147,6 +153,7 @@ def index(sample_data, redis_url): "storage_type": "hash", }, "fields": [ + {"name": "description", "type": "text"}, {"name": "credit_score", "type": "tag"}, {"name": "job", "type": "text"}, {"name": "age", "type": "numeric"}, @@ -790,3 +797,78 @@ def test_range_query_normalize_bad_input(index): return_fields=["user", "credit_score", "age", "job", "location"], distance_threshold=1.2, ) + + +def test_text_query(index): + text = "a medical professional with expertise in lung cancer" + text_field = "description" + return_fields = ["user", "credit_score", "age", "job", "location", "description"] + + text_query = TextQuery( + text=text, + text_field_name=text_field, + return_fields=return_fields, + ) + results = index.query(text_query) + + assert len(results) == 4 + + # make sure at least one word from the query is in the description + for result in results: + assert any(word in result[text_field] for word in text.split()) + + +# test that text queryies work with filter expressions +def test_text_query_with_filter(index): + text = "a medical professional with expertise in lung cancer" + text_field = "description" + return_fields = ["user", "credit_score", "age", "job", "location", "description"] + filter_expression = (Tag("credit_score") == ("high")) & (Num("age") > 30) + + text_query = TextQuery( + text=text, + text_field_name=text_field, + filter_expression=filter_expression, + return_fields=return_fields, + ) + results = index.query(text_query) + assert len(results) == 2 + for result in results: + assert any(word in result[text_field] for word in text.split()) + assert result["credit_score"] == "high" + assert int(result["age"]) > 30 + + +# test that text queryies workt with text filter expressions on the same text field +def test_text_query_with_text_filter(index): + text = "a medical professional with expertise in lung cancer" + text_field = "description" + return_fields = ["age", "job", "description"] + filter_expression = Text(text_field) == ("medical") + + text_query = TextQuery( + text=text, + text_field_name=text_field, + filter_expression=filter_expression, + return_fields=return_fields, + ) + results = index.query(text_query) + assert len(results) == 2 + for result in results: + assert any(word in result[text_field] for word in text.split()) + assert "medical" in result[text_field] + + filter_expression = Text(text_field) != ("research") + + text_query = TextQuery( + text=text, + text_field_name=text_field, + filter_expression=filter_expression, + return_fields=return_fields, + ) + + results = index.query(text_query) + assert len(results) == 3 + for result in results: + assert any(word in result[text_field] for word in text.split()) + assert "research" not in result[text_field] diff --git a/tests/unit/test_aggregation_types.py b/tests/unit/test_aggregation_types.py index 9e409d8e..50b75b10 100644 --- a/tests/unit/test_aggregation_types.py +++ b/tests/unit/test_aggregation_types.py @@ -19,9 +19,9 @@ def test_aggregate_hybrid_query(): hybrid_query = HybridAggregationQuery( text=sample_text, - text_field=text_field_name, + text_field_name=text_field_name, vector=sample_vector, - vector_field=vector_field_name, + vector_field_name=vector_field_name, ) assert isinstance(hybrid_query, AggregateRequest) @@ -49,9 +49,9 @@ def test_aggregate_hybrid_query(): hybrid_query = HybridAggregationQuery( text=sample_text, - text_field=text_field_name, + text_field_name=text_field_name, vector=sample_vector, - vector_field=vector_field_name, + vector_field_name=vector_field_name, text_scorer=scorer, filter_expression=filter_expression, alpha=alpha, diff --git a/tests/unit/test_query_types.py b/tests/unit/test_query_types.py index 385c03b4..c411c9fb 100644 --- a/tests/unit/test_query_types.py +++ b/tests/unit/test_query_types.py @@ -194,7 +194,7 @@ def test_text_query(): return_fields = ["title", "genre", "rating"] text_query = TextQuery( text=text_string, - text_field=text_field_name, + text_field_name=text_field_name, return_fields=return_fields, return_score=False, ) @@ -202,9 +202,10 @@ def test_text_query(): # Check properties assert text_query._return_fields == return_fields assert text_query._num_results == 10 + assert ( - text_query.filter - == f"(@{text_field_name}:({text_query.tokenize_and_escape_query(text_string)}))" + text_query._build_query_string() + == f"@{text_field_name}:({text_query.tokenize_and_escape_query(text_string)})" ) assert isinstance(text_query, Query) assert isinstance(text_query.query, Query) @@ -250,15 +251,15 @@ def test_text_query(): text_query = TextQuery(text_string, text_field_name, stopwords=None) assert text_query.stopwords == set([]) assert ( - text_query.filter - == f"(@{text_field_name}:({text_query.tokenize_and_escape_query(text_string)}))" + text_query._build_query_string() + == f"@{text_field_name}:({text_query.tokenize_and_escape_query(text_string)})" ) text_query = TextQuery(text_string, text_field_name, stopwords=["the", "a", "of"]) assert text_query.stopwords == set(["the", "a", "of"]) assert ( - text_query.filter - == f"(@{text_field_name}:({text_query.tokenize_and_escape_query(text_string)}))" + text_query._build_query_string() + == f"@{text_field_name}:({text_query.tokenize_and_escape_query(text_string)})" ) text_query = TextQuery(text_string, text_field_name, stopwords="german") @@ -273,13 +274,21 @@ def test_text_query(): text_query = TextQuery(text_string, text_field_name, stopwords=["the", "a", "of"]) assert text_query.stopwords == set(["the", "a", "of"]) assert ( - text_query.filter - == f"(@{text_field_name}:({text_query.tokenize_and_escape_query(text_string)}))" + text_query._build_query_string() + == f"@{text_field_name}:({text_query.tokenize_and_escape_query(text_string)})" ) text_query = TextQuery(text_string, text_field_name, stopwords="german") assert text_query.stopwords != set([]) + # test that filter expression is set correctly + text_query.set_filter(filter_expression) + assert text_query.filter == filter_expression + assert ( + text_query._build_query_string() + == f"@{text_field_name}:({text_query.tokenize_and_escape_query(text_string)}) AND {filter_expression}" + ) + with pytest.raises(ValueError): text_query = TextQuery(text_string, text_field_name, stopwords="gibberish") From f32067a51b91f778d0ef2ff8797d23752f64564f Mon Sep 17 00:00:00 2001 From: Justin Cechmanek Date: Wed, 2 Apr 2025 21:55:36 -0700 Subject: [PATCH 22/26] test now checks default dialect is 2 --- tests/unit/test_aggregation_types.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/unit/test_aggregation_types.py b/tests/unit/test_aggregation_types.py index 50b75b10..a2f3015c 100644 --- a/tests/unit/test_aggregation_types.py +++ b/tests/unit/test_aggregation_types.py @@ -36,7 +36,7 @@ def test_aggregate_hybrid_query(): assert hybrid_query._alpha == 0.7 assert hybrid_query._num_results == 10 assert hybrid_query._loadfields == [] - assert hybrid_query._dialect == 4 + assert hybrid_query._dialect == 2 # Check specifying properties scorer = "TFIDF" From 9b1dc18399a5339a426f73558098d752b87fea7b Mon Sep 17 00:00:00 2001 From: Justin Cechmanek Date: Thu, 3 Apr 2025 11:42:30 -0700 Subject: [PATCH 23/26] makes methods private --- redisvl/query/aggregate.py | 11 +++++------ redisvl/query/query.py | 31 ++++++++++++++++++------------- tests/unit/test_query_types.py | 21 --------------------- 3 files changed, 23 insertions(+), 40 deletions(-) diff --git a/redisvl/query/aggregate.py b/redisvl/query/aggregate.py index dfe7296c..d9b89f56 100644 --- a/redisvl/query/aggregate.py +++ b/redisvl/query/aggregate.py @@ -109,7 +109,7 @@ def __init__( self._alpha = alpha self._dtype = dtype self._num_results = num_results - self.set_stopwords(stopwords) + self._set_stopwords(stopwords) query_string = self._build_query_string() super().__init__(query_string) @@ -149,7 +149,7 @@ def stopwords(self) -> Set[str]: """ return self._stopwords.copy() if self._stopwords else set() - def set_stopwords(self, stopwords: Optional[Union[str, Set[str]]] = "english"): + def _set_stopwords(self, stopwords: Optional[Union[str, Set[str]]] = "english"): """Set the stopwords to use in the query. Args: stopwords (Optional[Union[str, Set[str]]]): The stopwords to use. If a string @@ -164,7 +164,7 @@ def set_stopwords(self, stopwords: Optional[Union[str, Set[str]]] = "english"): self._stopwords = set() elif isinstance(stopwords, str): try: - nltk.download("stopwords") + nltk.download("stopwords", quiet=True) self._stopwords = set(nltk_stopwords.words(stopwords)) except Exception as e: raise ValueError(f"Error trying to load {stopwords} from nltk. {e}") @@ -175,7 +175,7 @@ def set_stopwords(self, stopwords: Optional[Union[str, Set[str]]] = "english"): else: raise TypeError("stopwords must be a set, list, or tuple of strings") - def tokenize_and_escape_query(self, user_query: str) -> str: + def _tokenize_and_escape_query(self, user_query: str) -> str: """Convert a raw user query to a redis full text query joined by ORs Args: user_query (str): The user query to tokenize and escape. @@ -185,7 +185,6 @@ def tokenize_and_escape_query(self, user_query: str) -> str: Raises: ValueError: If the text string becomes empty after stopwords are removed. """ - escaper = TokenEscaper() tokens = [ @@ -212,7 +211,7 @@ def _build_query_string(self) -> str: # base KNN query knn_query = f"KNN {self._num_results} @{self._vector_field} ${self.VECTOR_PARAM} AS {self.DISTANCE_ID}" - text = f"(~@{self._text_field}:({self.tokenize_and_escape_query(self._text)})" + text = f"(~@{self._text_field}:({self._tokenize_and_escape_query(self._text)})" if filter_expression and filter_expression != "*": text += f" AND {filter_expression}" diff --git a/redisvl/query/query.py b/redisvl/query/query.py index 6735ae5c..7351561a 100644 --- a/redisvl/query/query.py +++ b/redisvl/query/query.py @@ -1,6 +1,8 @@ from enum import Enum from typing import Any, Dict, List, Optional, Set, Tuple, Union +import nltk +from nltk.corpus import stopwords as nltk_stopwords from redis.commands.search.query import Query as RedisQuery from redisvl.query.filter import FilterExpression @@ -741,22 +743,21 @@ def __init__( """ self._text = text self._text_field = text_field_name - self._text_scorer = text_scorer self._num_results = num_results - self.set_stopwords(stopwords) + self._set_stopwords(stopwords) self.set_filter(filter_expression) if params: self._params = params - self._num_results = num_results - # initialize the base query with the full query string and filter expression query_string = self._build_query_string() super().__init__(query_string) - # Handle query settings + # handle query settings + self.scorer(text_scorer) + if return_fields: self.return_fields(*return_fields) self.paging(0, self._num_results).dialect(dialect) @@ -774,15 +775,12 @@ def __init__( def stopwords(self): return self._stopwords - def set_stopwords(self, stopwords: Optional[Union[str, Set[str]]] = "english"): + def _set_stopwords(self, stopwords: Optional[Union[str, Set[str]]] = "english"): if not stopwords: self._stopwords = set() elif isinstance(stopwords, str): try: - import nltk - from nltk.corpus import stopwords as nltk_stopwords - - nltk.download("stopwords") + nltk.download("stopwords", quiet=True) self._stopwords = set(nltk_stopwords.words(stopwords)) except Exception as e: raise ValueError(f"Error trying to load {stopwords} from nltk. {e}") @@ -793,9 +791,16 @@ def set_stopwords(self, stopwords: Optional[Union[str, Set[str]]] = "english"): else: raise TypeError("stopwords must be a set, list, or tuple of strings") - def tokenize_and_escape_query(self, user_query: str) -> str: - """Convert a raw user query to a redis full text query joined by ORs""" + def _tokenize_and_escape_query(self, user_query: str) -> str: + """Convert a raw user query to a redis full text query joined by ORs + Args: + user_query (str): The user query to tokenize and escape. + Returns: + str: The tokenized and escaped query string. + Raises: + ValueError: If the text string becomes empty after stopwords are removed. + """ escaper = TokenEscaper() tokens = [ @@ -816,7 +821,7 @@ def _build_query_string(self) -> str: else: filter_expression = "" - text = f"@{self._text_field}:({self.tokenize_and_escape_query(self._text)})" + text = f"@{self._text_field}:({self._tokenize_and_escape_query(self._text)})" if filter_expression and filter_expression != "*": text += f" AND {filter_expression}" return text diff --git a/tests/unit/test_query_types.py b/tests/unit/test_query_types.py index c411c9fb..ee5d7aec 100644 --- a/tests/unit/test_query_types.py +++ b/tests/unit/test_query_types.py @@ -203,14 +203,9 @@ def test_text_query(): assert text_query._return_fields == return_fields assert text_query._num_results == 10 - assert ( - text_query._build_query_string() - == f"@{text_field_name}:({text_query.tokenize_and_escape_query(text_string)})" - ) assert isinstance(text_query, Query) assert isinstance(text_query.query, Query) assert isinstance(text_query.params, dict) - assert text_query._text_scorer == "BM25STD" assert text_query.params == {} assert text_query._dialect == 2 assert text_query._in_order == False @@ -250,17 +245,9 @@ def test_text_query(): # Test stopwords are configurable text_query = TextQuery(text_string, text_field_name, stopwords=None) assert text_query.stopwords == set([]) - assert ( - text_query._build_query_string() - == f"@{text_field_name}:({text_query.tokenize_and_escape_query(text_string)})" - ) text_query = TextQuery(text_string, text_field_name, stopwords=["the", "a", "of"]) assert text_query.stopwords == set(["the", "a", "of"]) - assert ( - text_query._build_query_string() - == f"@{text_field_name}:({text_query.tokenize_and_escape_query(text_string)})" - ) text_query = TextQuery(text_string, text_field_name, stopwords="german") assert text_query.stopwords != set([]) @@ -273,10 +260,6 @@ def test_text_query(): text_query = TextQuery(text_string, text_field_name, stopwords=["the", "a", "of"]) assert text_query.stopwords == set(["the", "a", "of"]) - assert ( - text_query._build_query_string() - == f"@{text_field_name}:({text_query.tokenize_and_escape_query(text_string)})" - ) text_query = TextQuery(text_string, text_field_name, stopwords="german") assert text_query.stopwords != set([]) @@ -284,10 +267,6 @@ def test_text_query(): # test that filter expression is set correctly text_query.set_filter(filter_expression) assert text_query.filter == filter_expression - assert ( - text_query._build_query_string() - == f"@{text_field_name}:({text_query.tokenize_and_escape_query(text_string)}) AND {filter_expression}" - ) with pytest.raises(ValueError): text_query = TextQuery(text_string, text_field_name, stopwords="gibberish") From ff44041982c953e0a77e3a79b003517ae919c0b7 Mon Sep 17 00:00:00 2001 From: Justin Cechmanek Date: Thu, 3 Apr 2025 13:12:38 -0700 Subject: [PATCH 24/26] abstracts AggregationQuery to follow BaseQuery calls in search index --- redisvl/index/index.py | 68 ++++++++++++--------------- redisvl/query/__init__.py | 4 +- redisvl/query/aggregate.py | 16 +++---- redisvl/query/query.py | 21 ++++++++- tests/integration/test_aggregation.py | 38 +++++++-------- tests/integration/test_query.py | 11 ++++- tests/unit/test_aggregation_types.py | 16 +++---- tests/unit/test_query_types.py | 1 + 8 files changed, 99 insertions(+), 76 deletions(-) diff --git a/redisvl/index/index.py b/redisvl/index/index.py index 4d5b1dbc..bbd9c628 100644 --- a/redisvl/index/index.py +++ b/redisvl/index/index.py @@ -46,7 +46,7 @@ BaseVectorQuery, CountQuery, FilterQuery, - HybridAggregationQuery, + HybridQuery, ) from redisvl.query.filter import FilterExpression from redisvl.redis.connection import ( @@ -686,35 +686,8 @@ def fetch(self, id: str) -> Optional[Dict[str, Any]]: return convert_bytes(obj[0]) return None - def aggregate_query( - self, aggregation_query: AggregationQuery - ) -> List[Dict[str, Any]]: - """Execute an aggretation query and processes the results. - - This method takes an AggregationHyridQuery object directly, runs the search, and - handles post-processing of the search. - - Args: - aggregation_query (AggregationQuery): The aggregation query to run. - - Returns: - List[Result]: A list of search results. - - .. code-block:: python - - from redisvl.query import HybridAggregationQuery - - aggregation = HybridAggregationQuery( - text="the text to search for", - text_field="description", - vector=[0.16, -0.34, 0.98, 0.23], - vector_field="embedding", - num_results=3 - ) - - results = index.aggregate_query(aggregation_query) - - """ + def _aggregate(self, aggregation_query: AggregationQuery) -> List[Dict[str, Any]]: + """Execute an aggretation query and processes the results.""" results = self.aggregate( aggregation_query, query_params=aggregation_query.params # type: ignore[attr-defined] ) @@ -846,7 +819,7 @@ def _query(self, query: BaseQuery) -> List[Dict[str, Any]]: results = self.search(query.query, query_params=query.params) return process_results(results, query=query, schema=self.schema) - def query(self, query: BaseQuery) -> List[Dict[str, Any]]: + def query(self, query: Union[BaseQuery, AggregationQuery]) -> List[Dict[str, Any]]: """Execute a query on the index. This method takes a BaseQuery object directly, runs the search, and @@ -871,7 +844,10 @@ def query(self, query: BaseQuery) -> List[Dict[str, Any]]: results = index.query(query) """ - return self._query(query) + if isinstance(query, AggregationQuery): + return self._aggregate(query) + else: + return self._query(query) def paginate(self, query: BaseQuery, page_size: int = 30) -> Generator: """Execute a given query against the index and return results in @@ -1377,6 +1353,19 @@ async def fetch(self, id: str) -> Optional[Dict[str, Any]]: return convert_bytes(obj[0]) return None + async def _aggregate( + self, aggregation_query: AggregationQuery + ) -> List[Dict[str, Any]]: + """Execute an aggretation query and processes the results.""" + results = await self.aggregate( + aggregation_query, query_params=aggregation_query.params # type: ignore[attr-defined] + ) + return process_aggregate_results( + results, + query=aggregation_query, + storage_type=self.schema.index.storage_type, + ) + async def aggregate(self, *args, **kwargs) -> "AggregateResult": """Perform an aggregation operation against the index. @@ -1500,14 +1489,16 @@ async def _query(self, query: BaseQuery) -> List[Dict[str, Any]]: results = await self.search(query.query, query_params=query.params) return process_results(results, query=query, schema=self.schema) - async def query(self, query: BaseQuery) -> List[Dict[str, Any]]: + async def query( + self, query: Union[BaseQuery, AggregationQuery] + ) -> List[Dict[str, Any]]: """Asynchronously execute a query on the index. - This method takes a BaseQuery object directly, runs the search, and - handles post-processing of the search. + This method takes a BaseQuery or AggregationQuery object directly, runs + the search, and handles post-processing of the search. Args: - query (BaseQuery): The query to run. + query Union(BaseQuery, AggregationQuery): The query to run. Returns: List[Result]: A list of search results. @@ -1524,7 +1515,10 @@ async def query(self, query: BaseQuery) -> List[Dict[str, Any]]: results = await index.query(query) """ - return await self._query(query) + if isinstance(query, AggregationQuery): + return await self._aggregate(query) + else: + return await self._query(query) async def paginate(self, query: BaseQuery, page_size: int = 30) -> AsyncGenerator: """Execute a given query against the index and return results in diff --git a/redisvl/query/__init__.py b/redisvl/query/__init__.py index 6245315f..30d35562 100644 --- a/redisvl/query/__init__.py +++ b/redisvl/query/__init__.py @@ -1,4 +1,4 @@ -from redisvl.query.aggregate import AggregationQuery, HybridAggregationQuery +from redisvl.query.aggregate import AggregationQuery, HybridQuery from redisvl.query.query import ( BaseQuery, BaseVectorQuery, @@ -20,5 +20,5 @@ "CountQuery", "TextQuery", "AggregationQuery", - "HybridAggregationQuery", + "HybridQuery", ] diff --git a/redisvl/query/aggregate.py b/redisvl/query/aggregate.py index d9b89f56..c4d01c70 100644 --- a/redisvl/query/aggregate.py +++ b/redisvl/query/aggregate.py @@ -19,9 +19,9 @@ def __init__(self, query_string): super().__init__(query_string) -class HybridAggregationQuery(AggregationQuery): +class HybridQuery(AggregationQuery): """ - HybridAggregationQuery combines text and vector search in Redis. + HybridQuery combines text and vector search in Redis. It allows you to perform a hybrid search using both text and vector similarity. It scores documents based on a weighted combination of text and vector similarity. """ @@ -45,7 +45,7 @@ def __init__( dialect: int = 2, ): """ - Instantiages a HybridAggregationQuery object. + Instantiages a HybridQuery object. Args: text (str): The text to search for. @@ -75,12 +75,12 @@ def __init__( TypeError: If the stopwords are not a set, list, or tuple of strings. .. code-block:: python - from redisvl.query.aggregate import HybridAggregationQuery + from redisvl.query import HybridQuery from redisvl.index import SearchIndex - index = SearchIndex("my_index") + index = SearchIndex.from_yaml(index.yaml) - query = HybridAggregationQuery( + query = HybridQuery( text="example text", text_field_name="text_field", vector=[0.1, 0.2, 0.3], @@ -92,10 +92,10 @@ def __init__( num_results=10, return_fields=["field1", "field2"], stopwords="english", - dialect=4, + dialect=2, ) - results = index.aggregate_query(query) + results = index.query(query) """ if not text.strip(): diff --git a/redisvl/query/query.py b/redisvl/query/query.py index 7351561a..ab9a2657 100644 --- a/redisvl/query/query.py +++ b/redisvl/query/query.py @@ -712,7 +712,7 @@ def __init__( text (str): The text string to perform the text search with. text_field_name (str): The name of the document field to perform text search on. text_scorer (str, optional): The text scoring algorithm to use. - Defaults to BM25STD. Options are {TFIDF, BM25STD, BM25, DOCNORM, DISMAX, DOCSCORE}. + Defaults to BM25STD. Options are {TFIDF, BM25STD, BM25, TFIDF.DOCNORM, DISMAX, DOCSCORE}. See https://redis.io/docs/latest/develop/interact/search-and-query/advanced-concepts/scoring/ filter_expression (Union[str, FilterExpression], optional): A filter to apply along with the text search. Defaults to None. @@ -740,6 +740,25 @@ def __init__( Raises: ValueError: if stopwords language string cannot be loaded. TypeError: If stopwords is not a valid iterable set of strings. + + .. code-block:: python + from redisvl.query import TextQuery + from redisvl.index import SearchIndex + + index = SearchIndex.from_yaml(index.yaml) + + query = TextQuery( + text="example text", + text_field_name="text_field", + text_scorer="BM25STD", + filter_expression=None, + num_results=10, + return_fields=["field1", "field2"], + stopwords="english", + dialect=2, + ) + + results = index.query(query) """ self._text = text self._text_field = text_field_name diff --git a/tests/integration/test_aggregation.py b/tests/integration/test_aggregation.py index 384230c7..deab8afe 100644 --- a/tests/integration/test_aggregation.py +++ b/tests/integration/test_aggregation.py @@ -3,7 +3,7 @@ from redis.commands.search.result import Result from redisvl.index import SearchIndex -from redisvl.query import HybridAggregationQuery +from redisvl.query import HybridQuery from redisvl.query.filter import FilterExpression, Geo, GeoRadius, Num, Tag, Text from redisvl.redis.connection import compare_versions from redisvl.redis.utils import array_to_buffer @@ -70,7 +70,7 @@ def test_aggregation_query(index): vector_field = "user_embedding" return_fields = ["user", "credit_score", "age", "job", "location", "description"] - hybrid_query = HybridAggregationQuery( + hybrid_query = HybridQuery( text=text, text_field_name=text_field, vector=vector, @@ -78,7 +78,7 @@ def test_aggregation_query(index): return_fields=return_fields, ) - results = index.aggregate_query(hybrid_query) + results = index.query(hybrid_query) assert isinstance(results, list) assert len(results) == 7 for doc in results: @@ -96,7 +96,7 @@ def test_aggregation_query(index): assert doc["job"] in ["engineer", "doctor", "dermatologist", "CEO", "dentist"] assert doc["credit_score"] in ["high", "low", "medium"] - hybrid_query = HybridAggregationQuery( + hybrid_query = HybridQuery( text=text, text_field_name=text_field, vector=vector, @@ -104,7 +104,7 @@ def test_aggregation_query(index): num_results=3, ) - results = index.aggregate_query(hybrid_query) + results = index.query(hybrid_query) assert len(results) == 3 assert ( results[0]["hybrid_score"] @@ -122,7 +122,7 @@ def test_empty_query_string(): # test if text is empty with pytest.raises(ValueError): - hybrid_query = HybridAggregationQuery( + hybrid_query = HybridQuery( text=text, text_field_name=text_field, vector=vector, @@ -132,7 +132,7 @@ def test_empty_query_string(): # test if text becomes empty after stopwords are removed text = "with a for but and" # will all be removed as default stopwords with pytest.raises(ValueError): - hybrid_query = HybridAggregationQuery( + hybrid_query = HybridQuery( text=text, text_field_name=text_field, vector=vector, @@ -152,7 +152,7 @@ def test_aggregation_query_with_filter(index): return_fields = ["user", "credit_score", "age", "job", "location", "description"] filter_expression = (Tag("credit_score") == ("high")) & (Num("age") > 30) - hybrid_query = HybridAggregationQuery( + hybrid_query = HybridQuery( text=text, text_field_name=text_field, vector=vector, @@ -161,7 +161,7 @@ def test_aggregation_query_with_filter(index): return_fields=return_fields, ) - results = index.aggregate_query(hybrid_query) + results = index.query(hybrid_query) assert len(results) == 2 for result in results: assert result["credit_score"] == "high" @@ -180,7 +180,7 @@ def test_aggregation_query_with_geo_filter(index): return_fields = ["user", "credit_score", "age", "job", "location", "description"] filter_expression = Geo("location") == GeoRadius(-122.4194, 37.7749, 1000, "m") - hybrid_query = HybridAggregationQuery( + hybrid_query = HybridQuery( text=text, text_field_name=text_field, vector=vector, @@ -189,7 +189,7 @@ def test_aggregation_query_with_geo_filter(index): return_fields=return_fields, ) - results = index.aggregate_query(hybrid_query) + results = index.query(hybrid_query) assert len(results) == 3 for result in results: assert result["location"] is not None @@ -206,7 +206,7 @@ def test_aggregate_query_alpha(index, alpha): vector = [0.1, 0.1, 0.5] vector_field = "user_embedding" - hybrid_query = HybridAggregationQuery( + hybrid_query = HybridQuery( text=text, text_field_name=text_field, vector=vector, @@ -214,7 +214,7 @@ def test_aggregate_query_alpha(index, alpha): alpha=alpha, ) - results = index.aggregate_query(hybrid_query) + results = index.query(hybrid_query) assert len(results) == 7 for result in results: score = alpha * float(result["vector_similarity"]) + (1 - alpha) * float( @@ -236,7 +236,7 @@ def test_aggregate_query_stopwords(index): vector_field = "user_embedding" alpha = 0.5 - hybrid_query = HybridAggregationQuery( + hybrid_query = HybridQuery( text=text, text_field_name=text_field, vector=vector, @@ -250,7 +250,7 @@ def test_aggregate_query_stopwords(index): assert "medical" not in query_string assert "expertize" not in query_string - results = index.aggregate_query(hybrid_query) + results = index.query(hybrid_query) assert len(results) == 7 for result in results: score = alpha * float(result["vector_similarity"]) + (1 - alpha) * float( @@ -273,7 +273,7 @@ def test_aggregate_query_with_text_filter(index): filter_expression = Text(text_field) == ("medical") # make sure we can still apply filters to the same text field we are querying - hybrid_query = HybridAggregationQuery( + hybrid_query = HybridQuery( text=text, text_field_name=text_field, vector=vector, @@ -283,7 +283,7 @@ def test_aggregate_query_with_text_filter(index): return_fields=["job", "description"], ) - results = index.aggregate_query(hybrid_query) + results = index.query(hybrid_query) assert len(results) == 2 for result in results: assert "medical" in result[text_field].lower() @@ -291,7 +291,7 @@ def test_aggregate_query_with_text_filter(index): filter_expression = (Text(text_field) == ("medical")) & ( (Text(text_field) != ("research")) ) - hybrid_query = HybridAggregationQuery( + hybrid_query = HybridQuery( text=text, text_field_name=text_field, vector=vector, @@ -301,7 +301,7 @@ def test_aggregate_query_with_text_filter(index): return_fields=["description"], ) - results = index.aggregate_query(hybrid_query) + results = index.query(hybrid_query) assert len(results) == 2 for result in results: assert "medical" in result[text_field].lower() diff --git a/tests/integration/test_query.py b/tests/integration/test_query.py index 5591d7bb..7a0229fe 100644 --- a/tests/integration/test_query.py +++ b/tests/integration/test_query.py @@ -799,7 +799,10 @@ def test_range_query_normalize_bad_input(index): ) -def test_text_query(index): +@pytest.mark.parametrize( + "scorer", ["BM25", "TFIDF", "TFIDF.DOCNORM", "DISMAX", "DOCSCORE"] +) +def test_text_query(index, scorer): text = "a medical professional with expertise in lung cancer" text_field = "description" return_fields = ["user", "credit_score", "age", "job", "location", "description"] @@ -807,6 +810,7 @@ def test_text_query(index): text_query = TextQuery( text=text, text_field_name=text_field, + text_scorer=scorer, return_fields=return_fields, ) results = index.query(text_query) @@ -824,10 +828,12 @@ def test_text_query_with_filter(index): text_field = "description" return_fields = ["user", "credit_score", "age", "job", "location", "description"] filter_expression = (Tag("credit_score") == ("high")) & (Num("age") > 30) + scorer = "TFIDF" text_query = TextQuery( text=text, text_field_name=text_field, + text_scorer=scorer, filter_expression=filter_expression, return_fields=return_fields, ) @@ -843,12 +849,14 @@ def test_text_query_with_filter(index): def test_text_query_with_text_filter(index): text = "a medical professional with expertise in lung cancer" text_field = "description" + scorer = "TFIDF.DOCNORM" return_fields = ["age", "job", "description"] filter_expression = Text(text_field) == ("medical") text_query = TextQuery( text=text, text_field_name=text_field, + text_scorer=scorer, filter_expression=filter_expression, return_fields=return_fields, ) @@ -863,6 +871,7 @@ def test_text_query_with_text_filter(index): text_query = TextQuery( text=text, text_field_name=text_field, + text_scorer=scorer, filter_expression=filter_expression, return_fields=return_fields, ) diff --git a/tests/unit/test_aggregation_types.py b/tests/unit/test_aggregation_types.py index a2f3015c..aaf34d5d 100644 --- a/tests/unit/test_aggregation_types.py +++ b/tests/unit/test_aggregation_types.py @@ -4,7 +4,7 @@ from redis.commands.search.result import Result from redisvl.index.index import process_results -from redisvl.query.aggregate import HybridAggregationQuery +from redisvl.query.aggregate import HybridQuery from redisvl.query.filter import Tag # Sample data for testing @@ -17,7 +17,7 @@ def test_aggregate_hybrid_query(): text_field_name = "description" vector_field_name = "embedding" - hybrid_query = HybridAggregationQuery( + hybrid_query = HybridQuery( text=sample_text, text_field_name=text_field_name, vector=sample_vector, @@ -47,7 +47,7 @@ def test_aggregate_hybrid_query(): stopwords = [] dialect = 2 - hybrid_query = HybridAggregationQuery( + hybrid_query = HybridQuery( text=sample_text, text_field_name=text_field_name, vector=sample_vector, @@ -74,12 +74,12 @@ def test_aggregate_hybrid_query(): assert hybrid_query.stopwords == set() # Test stopwords are configurable - hybrid_query = HybridAggregationQuery( + hybrid_query = HybridQuery( sample_text, text_field_name, sample_vector, vector_field_name, stopwords=None ) assert hybrid_query.stopwords == set([]) - hybrid_query = HybridAggregationQuery( + hybrid_query = HybridQuery( sample_text, text_field_name, sample_vector, @@ -87,7 +87,7 @@ def test_aggregate_hybrid_query(): stopwords=["the", "a", "of"], ) assert hybrid_query.stopwords == set(["the", "a", "of"]) - hybrid_query = HybridAggregationQuery( + hybrid_query = HybridQuery( sample_text, text_field_name, sample_vector, @@ -97,7 +97,7 @@ def test_aggregate_hybrid_query(): assert hybrid_query.stopwords != set([]) with pytest.raises(ValueError): - hybrid_query = HybridAggregationQuery( + hybrid_query = HybridQuery( sample_text, text_field_name, sample_vector, @@ -106,7 +106,7 @@ def test_aggregate_hybrid_query(): ) with pytest.raises(TypeError): - hybrid_query = HybridAggregationQuery( + hybrid_query = HybridQuery( sample_text, text_field_name, sample_vector, diff --git a/tests/unit/test_query_types.py b/tests/unit/test_query_types.py index ee5d7aec..546e7675 100644 --- a/tests/unit/test_query_types.py +++ b/tests/unit/test_query_types.py @@ -196,6 +196,7 @@ def test_text_query(): text=text_string, text_field_name=text_field_name, return_fields=return_fields, + text_scorer="BM25", return_score=False, ) From c0be24fef30d32116f2c7206d418491cda121be1 Mon Sep 17 00:00:00 2001 From: Justin Cechmanek Date: Thu, 3 Apr 2025 14:43:16 -0700 Subject: [PATCH 25/26] updates docstrings --- redisvl/index/index.py | 6 +++--- redisvl/query/aggregate.py | 2 +- redisvl/query/query.py | 10 ++++++++++ 3 files changed, 14 insertions(+), 4 deletions(-) diff --git a/redisvl/index/index.py b/redisvl/index/index.py index bbd9c628..99bf6b87 100644 --- a/redisvl/index/index.py +++ b/redisvl/index/index.py @@ -822,11 +822,11 @@ def _query(self, query: BaseQuery) -> List[Dict[str, Any]]: def query(self, query: Union[BaseQuery, AggregationQuery]) -> List[Dict[str, Any]]: """Execute a query on the index. - This method takes a BaseQuery object directly, runs the search, and + This method takes a BaseQuery or AggregationQuery object directly, and handles post-processing of the search. Args: - query (BaseQuery): The query to run. + query (Union[BaseQuery, AggregateQuery]): The query to run. Returns: List[Result]: A list of search results. @@ -1498,7 +1498,7 @@ async def query( the search, and handles post-processing of the search. Args: - query Union(BaseQuery, AggregationQuery): The query to run. + query (Union[BaseQuery, AggregateQuery]): The query to run. Returns: List[Result]: A list of search results. diff --git a/redisvl/query/aggregate.py b/redisvl/query/aggregate.py index c4d01c70..8a20f156 100644 --- a/redisvl/query/aggregate.py +++ b/redisvl/query/aggregate.py @@ -45,7 +45,7 @@ def __init__( dialect: int = 2, ): """ - Instantiages a HybridQuery object. + Instantiates a HybridQuery object. Args: text (str): The text to search for. diff --git a/redisvl/query/query.py b/redisvl/query/query.py index ab9a2657..44d5bac9 100644 --- a/redisvl/query/query.py +++ b/redisvl/query/query.py @@ -795,6 +795,16 @@ def stopwords(self): return self._stopwords def _set_stopwords(self, stopwords: Optional[Union[str, Set[str]]] = "english"): + """Set the stopwords to use in the query. + Args: + stopwords (Optional[Union[str, Set[str]]]): The stopwords to use. If a string + such as "english" "german" is provided then a default set of stopwords for that + language will be used. if a list, set, or tuple of strings is provided then those + will be used as stopwords. Defaults to "english". if set to "None" then no stopwords + will be removed. + Raises: + TypeError: If the stopwords are not a set, list, or tuple of strings. + """ if not stopwords: self._stopwords = set() elif isinstance(stopwords, str): From aae39493a823fffd18c00567a829a216998ec5b9 Mon Sep 17 00:00:00 2001 From: Justin Cechmanek Date: Thu, 3 Apr 2025 18:08:14 -0700 Subject: [PATCH 26/26] fixes docstring --- redisvl/query/aggregate.py | 1 - redisvl/query/query.py | 3 +-- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/redisvl/query/aggregate.py b/redisvl/query/aggregate.py index 8a20f156..1e5526d1 100644 --- a/redisvl/query/aggregate.py +++ b/redisvl/query/aggregate.py @@ -9,7 +9,6 @@ from redisvl.utils.token_escaper import TokenEscaper -# base class class AggregationQuery(AggregateRequest): """ Base class for aggregation queries used to create aggregation queries for Redis. diff --git a/redisvl/query/query.py b/redisvl/query/query.py index 44d5bac9..cc4d26f0 100644 --- a/redisvl/query/query.py +++ b/redisvl/query/query.py @@ -705,8 +705,7 @@ def __init__( params: Optional[Dict[str, Any]] = None, stopwords: Optional[Union[str, Set[str]]] = "english", ): - """A query for running a full text and vector search, along with an optional - filter expression. + """A query for running a full text search, along with an optional filter expression. Args: text (str): The text string to perform the text search with.