diff --git a/.github/workflows/python-notebook-tests.yml b/.github/workflows/python-notebook-tests.yml index bae0f715b4..9808d690f2 100644 --- a/.github/workflows/python-notebook-tests.yml +++ b/.github/workflows/python-notebook-tests.yml @@ -37,7 +37,8 @@ jobs: fail-fast: false # Continue running all jobs even if one fails env: DEBUG: 1 - GRAPHRAG_API_KEY: ${{ secrets.OPENAI_NOTEBOOK_KEY }} + GRAPHRAG_API_KEY: ${{ secrets.OPENAI_API_KEY }} + GRAPHRAG_API_BASE: ${{ secrets.GRAPHRAG_API_BASE }} runs-on: ${{ matrix.os }} steps: diff --git a/.gitignore b/.gitignore index 92652fcb71..77ee0a6eb8 100644 --- a/.gitignore +++ b/.gitignore @@ -22,6 +22,8 @@ output/lancedb venv/ .conda .tmp +packages/graphrag-llm/notebooks/metrics +packages/graphrag-llm/notebooks/cache .env build.zip diff --git a/.vscode/launch.json b/.vscode/launch.json index 9f949cb12f..a8ae08553f 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -21,10 +21,10 @@ "module": "graphrag", "args": [ "query", + "${input:query}", "--root", "${input:root_folder}", - "--method", "${input:query_method}", - "--query", "${input:query}" + "--method", "${input:query_method}" ] }, { diff --git a/dictionary.txt b/dictionary.txt index 32c6f1ebe4..5d30e2e850 100644 --- a/dictionary.txt +++ b/dictionary.txt @@ -26,6 +26,7 @@ noqa dtypes ints genid +isinstance # Azure abfs diff --git a/packages/graphrag-cache/README.md b/packages/graphrag-cache/README.md index 9924ce09a9..83cba45176 100644 --- a/packages/graphrag-cache/README.md +++ b/packages/graphrag-cache/README.md @@ -5,12 +5,15 @@ ```python import asyncio from graphrag_storage import StorageConfig, create_storage, StorageType -from graphrag_cache import CacheConfig, create_cache, CacheType +from graphrag_cache import CacheConfig, create_cache, CacheType, create_cache_key async def run(): + cache = create_cache() + + # The above is equivalent to the following: cache = create_cache( CacheConfig( - type=CacheType.Json + type=CacheType.Json, storage=StorageConfig( type=StorageType.File, base_dir="cache" @@ -21,6 +24,14 @@ async def run(): await cache.set("my_key", {"some": "object to cache"}) print(await cache.get("my_key")) + # create cache key from data dict. + cache_key = create_cache_key({ + "some_arg": "some_value", + "something_else": 5 + }) + await cache.set(cache_key, {"some": "object to cache"}) + print(await cache.get(cache_key)) + if __name__ == "__main__": asyncio.run(run()) ``` diff --git a/packages/graphrag-cache/graphrag_cache/__init__.py b/packages/graphrag-cache/graphrag_cache/__init__.py index 82ea4811d5..41caa0c72e 100644 --- a/packages/graphrag-cache/graphrag_cache/__init__.py +++ b/packages/graphrag-cache/graphrag_cache/__init__.py @@ -6,12 +6,15 @@ from graphrag_cache.cache import Cache from graphrag_cache.cache_config import CacheConfig from graphrag_cache.cache_factory import create_cache, register_cache +from graphrag_cache.cache_key import CacheKeyCreator, create_cache_key from graphrag_cache.cache_type import CacheType __all__ = [ "Cache", "CacheConfig", + "CacheKeyCreator", "CacheType", "create_cache", + "create_cache_key", "register_cache", ] diff --git a/packages/graphrag-cache/graphrag_cache/cache.py b/packages/graphrag-cache/graphrag_cache/cache.py index 26f1a96a0e..8395bb4b4c 100644 --- a/packages/graphrag-cache/graphrag_cache/cache.py +++ b/packages/graphrag-cache/graphrag_cache/cache.py @@ -6,14 +6,17 @@ from __future__ import annotations from abc import ABC, abstractmethod -from typing import Any +from typing import TYPE_CHECKING, Any + +if TYPE_CHECKING: + from graphrag_storage import Storage class Cache(ABC): """Provide a cache interface for the pipeline.""" @abstractmethod - def __init__(self, **kwargs: Any) -> None: + def __init__(self, *, storage: Storage | None, **kwargs: Any) -> None: """Create a cache instance.""" @abstractmethod diff --git a/packages/graphrag-cache/graphrag_cache/cache_config.py b/packages/graphrag-cache/graphrag_cache/cache_config.py index afa2f6340c..93bcabed6d 100644 --- a/packages/graphrag-cache/graphrag_cache/cache_config.py +++ b/packages/graphrag-cache/graphrag_cache/cache_config.py @@ -3,7 +3,7 @@ """Cache configuration model.""" -from graphrag_storage import StorageConfig +from graphrag_storage import StorageConfig, StorageType from pydantic import BaseModel, ConfigDict, Field from graphrag_cache.cache_type import CacheType @@ -22,5 +22,5 @@ class CacheConfig(BaseModel): storage: StorageConfig | None = Field( description="The storage configuration to use for file-based caches such as 'Json'.", - default=None, + default_factory=lambda: StorageConfig(type=StorageType.File, base_dir="cache"), ) diff --git a/packages/graphrag-cache/graphrag_cache/cache_factory.py b/packages/graphrag-cache/graphrag_cache/cache_factory.py index 6e4bc32813..6b1310754c 100644 --- a/packages/graphrag-cache/graphrag_cache/cache_factory.py +++ b/packages/graphrag-cache/graphrag_cache/cache_factory.py @@ -5,16 +5,22 @@ """Cache factory implementation.""" from collections.abc import Callable +from typing import TYPE_CHECKING -from graphrag_common.factory import Factory, ServiceScope -from graphrag_storage import Storage +from graphrag_common.factory import Factory +from graphrag_storage import create_storage -from graphrag_cache.cache import Cache from graphrag_cache.cache_config import CacheConfig from graphrag_cache.cache_type import CacheType +if TYPE_CHECKING: + from graphrag_common.factory import ServiceScope + from graphrag_storage import Storage -class CacheFactory(Factory[Cache]): + from graphrag_cache.cache import Cache + + +class CacheFactory(Factory["Cache"]): """A factory class for cache implementations.""" @@ -23,8 +29,8 @@ class CacheFactory(Factory[Cache]): def register_cache( cache_type: str, - cache_initializer: Callable[..., Cache], - scope: ServiceScope = "transient", + cache_initializer: Callable[..., "Cache"], + scope: "ServiceScope" = "transient", ) -> None: """Register a custom cache implementation. @@ -38,7 +44,9 @@ def register_cache( cache_factory.register(cache_type, cache_initializer, scope) -def create_cache(config: CacheConfig, storage: Storage | None = None) -> Cache: +def create_cache( + config: CacheConfig | None = None, storage: "Storage | None" = None +) -> "Cache": """Create a cache implementation based on the given configuration. Args @@ -53,9 +61,13 @@ def create_cache(config: CacheConfig, storage: Storage | None = None) -> Cache: Cache The created cache implementation. """ + config = config or CacheConfig() config_model = config.model_dump() cache_strategy = config.type + if not storage and config.storage: + storage = create_storage(config.storage) + if cache_strategy not in cache_factory: match cache_strategy: case CacheType.Json: diff --git a/packages/graphrag-cache/graphrag_cache/cache_key.py b/packages/graphrag-cache/graphrag_cache/cache_key.py new file mode 100644 index 0000000000..6bf4930b97 --- /dev/null +++ b/packages/graphrag-cache/graphrag_cache/cache_key.py @@ -0,0 +1,36 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Create cache key.""" + +from typing import Any, Protocol, runtime_checkable + +from graphrag_common.hasher import hash_data + + +@runtime_checkable +class CacheKeyCreator(Protocol): + """Create cache key function protocol. + + Args + ---- + input_args: dict[str, Any] + The input arguments for creating the cache key. + + Returns + ------- + str + The generated cache key. + """ + + def __call__( + self, + input_args: dict[str, Any], + ) -> str: + """Create cache key.""" + ... + + +def create_cache_key(input_args: dict[str, Any]) -> str: + """Create a cache key based on the input arguments.""" + return hash_data(input_args) diff --git a/packages/graphrag-common/graphrag_common/factory/factory.py b/packages/graphrag-common/graphrag_common/factory/factory.py index 8f57606aa3..9571b58da1 100644 --- a/packages/graphrag-common/graphrag_common/factory/factory.py +++ b/packages/graphrag-common/graphrag_common/factory/factory.py @@ -8,6 +8,8 @@ from dataclasses import dataclass from typing import Any, ClassVar, Generic, Literal, TypeVar +from graphrag_common.hasher import hash_data + T = TypeVar("T", covariant=True) ServiceScope = Literal["singleton", "transient"] @@ -57,9 +59,14 @@ def register( Args ---- - strategy: The name of the strategy. - initializer: A callable that creates an instance of T. - scope: The service scope, either 'singleton' or 'transient'. + strategy: str + The name of the strategy. + initializer: Callable[..., T] + A callable that creates an instance of T. + scope: ServiceScope (default: "transient") + The scope of the service ("singleton" or "transient"). + Singleton services are cached based on their init args + so that the same instance is returned for the same init args. """ self._service_initializers[strategy] = _ServiceDescriptor(scope, initializer) @@ -69,8 +76,10 @@ def create(self, strategy: str, init_args: dict[str, Any] | None = None) -> T: Args ---- - strategy: The name of the strategy. - init_args: Dict of keyword arguments to pass to the service initializer. + strategy: str + The name of the strategy. + init_args: dict[str, Any] | None + A dictionary of keyword arguments to pass to the service initializer. Returns ------- @@ -85,14 +94,20 @@ def create(self, strategy: str, init_args: dict[str, Any] | None = None) -> T: raise ValueError(msg) # Delete entries with value None + # That way services can have default values init_args = {k: v for k, v in (init_args or {}).items() if v is not None} service_descriptor = self._service_initializers[strategy] if service_descriptor.scope == "singleton": - if strategy not in self._initialized_services: - self._initialized_services[strategy] = service_descriptor.initializer( + cache_key = hash_data({ + "strategy": strategy, + "init_args": init_args, + }) + + if cache_key not in self._initialized_services: + self._initialized_services[cache_key] = service_descriptor.initializer( **init_args ) - return self._initialized_services[strategy] + return self._initialized_services[cache_key] return service_descriptor.initializer(**(init_args or {})) diff --git a/packages/graphrag-common/graphrag_common/hasher/__init__.py b/packages/graphrag-common/graphrag_common/hasher/__init__.py new file mode 100644 index 0000000000..cdf27f8f80 --- /dev/null +++ b/packages/graphrag-common/graphrag_common/hasher/__init__.py @@ -0,0 +1,18 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""The GraphRAG hasher module.""" + +from graphrag_common.hasher.hasher import ( + Hasher, + hash_data, + make_yaml_serializable, + sha256_hasher, +) + +__all__ = [ + "Hasher", + "hash_data", + "make_yaml_serializable", + "sha256_hasher", +] diff --git a/packages/graphrag-common/graphrag_common/hasher/hasher.py b/packages/graphrag-common/graphrag_common/hasher/hasher.py new file mode 100644 index 0000000000..c4af285377 --- /dev/null +++ b/packages/graphrag-common/graphrag_common/hasher/hasher.py @@ -0,0 +1,59 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""The GraphRAG hasher module.""" + +import hashlib +from collections.abc import Callable +from typing import Any + +import yaml + +Hasher = Callable[[str], str] +"""Type alias for a hasher function (data: str) -> str.""" + + +def sha256_hasher(data: str) -> str: + """Generate a SHA-256 hash for the input data.""" + return hashlib.sha256(data.encode("utf-8")).hexdigest() + + +def make_yaml_serializable(data: Any) -> Any: + """Convert data to a YAML-serializable format.""" + if isinstance(data, (list, tuple)): + return tuple(make_yaml_serializable(item) for item in data) + + if isinstance(data, set): + return tuple(sorted(make_yaml_serializable(item) for item in data)) + + if isinstance(data, dict): + return tuple( + sorted((key, make_yaml_serializable(value)) for key, value in data.items()) + ) + + return str(data) + + +def hash_data(data: Any, *, hasher: Hasher | None = None) -> str: + """Hash the input data dictionary using the specified hasher function. + + Args + ---- + data: dict[str, Any] + The input data to be hashed. + The input data is serialized using yaml + to support complex data structures such as classes and functions. + hasher: Hasher | None (default: sha256_hasher) + The hasher function to use. (data: str) -> str + + Returns + ------- + str + The resulting hash of the input data. + + """ + hasher = hasher or sha256_hasher + try: + return hasher(yaml.dump(data, sort_keys=True)) + except TypeError: + return hasher(yaml.dump(make_yaml_serializable(data), sort_keys=True)) diff --git a/packages/graphrag-llm/README.md b/packages/graphrag-llm/README.md new file mode 100644 index 0000000000..30f84431fc --- /dev/null +++ b/packages/graphrag-llm/README.md @@ -0,0 +1,87 @@ +# GraphRAG LLM + +## Basic Completion + +```python +import os +from collections.abc import AsyncIterator, Iterator + +from dotenv import load_dotenv +from graphrag_llm.completion import LLMCompletion, create_completion +from graphrag_llm.config import AuthMethod, ModelConfig +from graphrag_llm.types import LLMCompletionChunk, LLMCompletionResponse +from graphrag_llm.utils import ( + gather_completion_response, + gather_completion_response_async, +) + +load_dotenv() + +api_key = os.getenv("GRAPHRAG_API_KEY") +model_config = ModelConfig( + model_provider="azure", + model=os.getenv("GRAPHRAG_MODEL", "gpt-4o"), + azure_deployment_name=os.getenv("GRAPHRAG_MODEL", "gpt-4o"), + api_base=os.getenv("GRAPHRAG_API_BASE"), + api_version=os.getenv("GRAPHRAG_API_VERSION", "2025-04-01-preview"), + api_key=api_key, + auth_method=AuthMethod.AzureManagedIdentity if not api_key else AuthMethod.ApiKey, +) +llm_completion: LLMCompletion = create_completion(model_config) + +response: LLMCompletionResponse | Iterator[LLMCompletionChunk] = ( + llm_completion.completion( + messages="What is the capital of France?", + ) +) + +if isinstance(response, Iterator): + # Streaming response + for chunk in response: + print(chunk.choices[0].delta.content or "", end="", flush=True) +else: + # Non-streaming response + print(response.choices[0].message.content) + +# Alternatively, you can use the utility function to gather the full response +# The following is equivalent to the above logic. If all you care about is +# the first choice response then you can use the gather_completion_response +# utility function. +response_text = gather_completion_response(response) +print(response_text) +``` + +## Basic Embedding + +```python +from graphrag_llm.embedding import LLMEmbedding, create_embedding +from graphrag_llm.types import LLMEmbeddingResponse +from graphrag_llm.utils import gather_embeddings + +embedding_config = ModelConfig( + model_provider="azure", + model=os.getenv("GRAPHRAG_EMBEDDING_MODEL", "text-embedding-3-small"), + azure_deployment_name=os.getenv( + "GRAPHRAG_LLM_EMBEDDING_MODEL", "text-embedding-3-small" + ), + api_base=os.getenv("GRAPHRAG_API_BASE"), + api_version=os.getenv("GRAPHRAG_API_VERSION", "2025-04-01-preview"), + api_key=api_key, + auth_method=AuthMethod.AzureManagedIdentity if not api_key else AuthMethod.ApiKey, +) + +llm_embedding: LLMEmbedding = create_embedding(embedding_config) + +embeddings_batch: LLMEmbeddingResponse = llm_embedding.embedding( + input=["Hello world", "How are you?"] +) +for data in embeddings_batch.data: + print(data.embedding[0:3]) + +# OR +batch = gather_embeddings(embeddings_batch) +for embedding in batch: + print(embedding[0:3]) +``` + +View the [notebooks](notebooks/README.md) for more examples. \ No newline at end of file diff --git a/packages/graphrag-llm/graphrag_llm/README.md b/packages/graphrag-llm/graphrag_llm/README.md new file mode 100644 index 0000000000..4ed411515f --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/README.md @@ -0,0 +1,83 @@ +# GraphRAG LLM + +View the [notebooks](notebooks) for detailed examples. + +## Basic Completion + +```python +import os +from collections.abc import AsyncIterator, Iterator + +from graphrag_llm.completion import LLMCompletion, create_completion +from graphrag_llm.config import ModelConfig +from graphrag_llm.types import LLMCompletionChunk, LLMCompletionResponse +from graphrag_llm.utils import ( + gather_completion_response, +) + +api_key = os.getenv("GRAPHRAG_API_KEY") +model_config = ModelConfig( + model_provider="azure", + model=os.getenv("GRAPHRAG_MODEL"), + azure_deployment_name=os.getenv("GRAPHRAG_MODEL"), + api_base=os.getenv("GRAPHRAG_API_BASE"), + api_version=os.getenv("GRAPHRAG_API_VERSION"), + api_key=api_key, + azure_managed_identity=not api_key, +) +llm_completion: LLMCompletion = create_completion(model_config) + +response: LLMCompletionResponse | Iterator[LLMCompletionChunk] = ( + llm_completion.completion( + messages="What is the capital of France?", + ) +) + +if isinstance(response, Iterator): + # Streaming response + for chunk in response: + print(chunk.choices[0].delta.content or "", end="", flush=True) +else: + # Non-streaming response + print(response.choices[0].message.content) + +# Alternatively, you can use the utility function to gather the full response +# The following is equivalent to the above logic. If all you care about is +# the first choice response then you can use the gather_completion_response +# utility function. +response_text = gather_completion_response(response) +print(response_text) +``` + +## Basic Embedding + +```python +import os +from collections.abc import AsyncIterator, Iterator + +from graphrag_llm.embedding import LLMEmbedding, create_embedding +from graphrag_llm.config import ModelConfig +from graphrag_llm.types import LLMEmbeddingResponse +from graphrag_llm.utils import ( + gather_completion_response, +) + +api_key = os.getenv("GRAPHRAG_API_KEY") +embedding_config = ModelConfig( + model_provider="azure", + model=os.getenv("GRAPHRAG_EMBEDDING_MODEL"), # type: ignore + azure_deployment_name=os.getenv("GRAPHRAG_EMBEDDING_MODEL"), + api_base=os.getenv("GRAPHRAG_API_BASE"), + api_version=os.getenv("GRAPHRAG_API_VERSION"), + api_key=api_key, + azure_managed_identity=not api_key, +) + +llm_embedding: LLMEmbedding = create_embedding(embedding_config) + +embeddings: LLMEmbeddingResponse = llm_embedding.embedding( + input=["Hello world", "How are you?"] +) +for data in embeddings.data: + print(data.embedding[0:3]) +``` \ No newline at end of file diff --git a/tests/unit/indexing/verbs/helpers/__init__.py b/packages/graphrag-llm/graphrag_llm/__init__.py similarity index 50% rename from tests/unit/indexing/verbs/helpers/__init__.py rename to packages/graphrag-llm/graphrag_llm/__init__.py index 0a3e38adfb..c126a1e373 100644 --- a/tests/unit/indexing/verbs/helpers/__init__.py +++ b/packages/graphrag-llm/graphrag_llm/__init__.py @@ -1,2 +1,8 @@ # Copyright (c) 2024 Microsoft Corporation. # Licensed under the MIT License + +"""GraphRAG LLM Package.""" + +import nest_asyncio2 + +nest_asyncio2.apply() diff --git a/packages/graphrag-llm/graphrag_llm/cache/__init__.py b/packages/graphrag-llm/graphrag_llm/cache/__init__.py new file mode 100644 index 0000000000..20d8b56f08 --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/cache/__init__.py @@ -0,0 +1,10 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Cache module.""" + +from graphrag_llm.cache.create_cache_key import create_cache_key + +__all__ = [ + "create_cache_key", +] diff --git a/packages/graphrag-llm/graphrag_llm/cache/create_cache_key.py b/packages/graphrag-llm/graphrag_llm/cache/create_cache_key.py new file mode 100644 index 0000000000..7fa7ec26dd --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/cache/create_cache_key.py @@ -0,0 +1,71 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Create cache key.""" + +from typing import Any + +from graphrag_cache import create_cache_key as default_create_cache_key + +_CACHE_VERSION = 4 +""" +If there's a breaking change in what we cache, we should increment this version number to invalidate existing caches. + +fnllm was on cache version 2 and though we generate +similar cache keys, the objects stored in cache by fnllm and litellm are different. +Using litellm model providers will not be able to reuse caches generated by fnllm +thus we start with version 3 for litellm. + +graphrag-llm package is now on version 4. +This is to account for changes to the ModelConfig that affect the cache key and +occurred when pulling this package out of graphrag. +graphrag-llm, now that is supports metrics, also caches metrics which were not cached before. +""" + + +def create_cache_key( + input_args: dict[str, Any], +) -> str: + """Generate a cache key based on the model configuration and input arguments. + + Args + ____ + input_args: dict[str, Any] + The input arguments for the model call. + + Returns + ------- + str + The generated cache key in the format + `{prefix}_{data_hash}_v{version}` if prefix is provided. + """ + cache_key_parameters = _get_parameters( + input_args=input_args, + ) + return default_create_cache_key(cache_key_parameters) + + +def _get_parameters( + # model_config: "ModelConfig", + input_args: dict[str, Any], +) -> dict[str, Any]: + """Pluck out the parameters that define a cache key.""" + excluded_keys = [ + "metrics", + "stream", + "stream_options", + "mock_response", + "timeout", + "base_url", + "api_base", + "api_version", + "api_key", + "azure_ad_token_provider", + "drop_params", + ] + + parameters: dict[str, Any] = { + k: v for k, v in input_args.items() if k not in excluded_keys + } + + return parameters diff --git a/packages/graphrag-llm/graphrag_llm/completion/__init__.py b/packages/graphrag-llm/graphrag_llm/completion/__init__.py new file mode 100644 index 0000000000..ceb3a43bfb --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/completion/__init__.py @@ -0,0 +1,16 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Completion module for graphrag-llm.""" + +from graphrag_llm.completion.completion import LLMCompletion +from graphrag_llm.completion.completion_factory import ( + create_completion, + register_completion, +) + +__all__ = [ + "LLMCompletion", + "create_completion", + "register_completion", +] diff --git a/packages/graphrag-llm/graphrag_llm/completion/completion.py b/packages/graphrag-llm/graphrag_llm/completion/completion.py new file mode 100644 index 0000000000..0debab2731 --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/completion/completion.py @@ -0,0 +1,276 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Completion Abstract Base Class.""" + +from abc import ABC, abstractmethod +from contextlib import contextmanager +from typing import TYPE_CHECKING, Any, Unpack + +from graphrag_llm.threading.completion_thread_runner import completion_thread_runner + +if TYPE_CHECKING: + from collections.abc import AsyncIterator, Iterator + + from graphrag_cache import Cache, CacheKeyCreator + + from graphrag_llm.config import ModelConfig + from graphrag_llm.metrics import MetricsProcessor, MetricsStore + from graphrag_llm.rate_limit import RateLimiter + from graphrag_llm.retry import Retry + from graphrag_llm.threading.completion_thread_runner import ( + ThreadedLLMCompletionFunction, + ThreadedLLMCompletionResponseHandler, + ) + from graphrag_llm.tokenizer import Tokenizer + from graphrag_llm.types import ( + LLMCompletionArgs, + LLMCompletionChunk, + LLMCompletionResponse, + ResponseFormat, + ) + + +class LLMCompletion(ABC): + """Abstract base class for language model completions.""" + + @abstractmethod + def __init__( + self, + *, + model_id: str, + model_config: "ModelConfig", + tokenizer: "Tokenizer", + metrics_store: "MetricsStore", + metrics_processor: "MetricsProcessor | None" = None, + rate_limiter: "RateLimiter | None" = None, + retrier: "Retry | None" = None, + cache: "Cache | None" = None, + cache_key_creator: "CacheKeyCreator", + **kwargs: Any, + ): + """Initialize the LLMCompletion. + + Args + ---- + model_id: str + The model ID, e.g., "openai/gpt-4o". + model_config: ModelConfig + The configuration for the language model. + tokenizer: Tokenizer + The tokenizer to use. + metrics_store: MetricsStore | None (default=None) + The metrics store to use. + metrics_processor: MetricsProcessor | None (default: None) + The metrics processor to use. + rate_limiter: RateLimiter | None (default=None) + The rate limiter to use. + retrier: Retry | None (default=None) + The retry strategy to use. + cache: Cache | None (default=None) + Optional cache for embeddings. + cache_key_creator: CacheKeyCreator | None (default=None) + Optional cache key creator function. + (dict[str, Any]) -> str + **kwargs: Any + Additional keyword arguments. + """ + raise NotImplementedError + + @abstractmethod + def supports_structured_response(self) -> bool: + """Whether the completion supports structured responses. + + Returns + ------- + bool: + True if structured responses are supported, False otherwise. + """ + raise NotImplementedError + + @abstractmethod + def completion( + self, + /, + **kwargs: Unpack["LLMCompletionArgs[ResponseFormat]"], + ) -> "LLMCompletionResponse[ResponseFormat] | Iterator[LLMCompletionChunk]": + """Sync completion method. + + Args + ---- + messages: LLMCompletionMessagesParam + The messages to send to the LLM. + Can be str | list[dict[str, str]] | list[ChatCompletionMessageParam]. + response_format: BaseModel | None (default=None) + The structured response format. + Must extend pydantic BaseModel. + stream: bool (default=False) + Whether to stream the response. + streaming is not supported when using response_format. + max_completion_tokens: int | None (default=None) + The maximum number of tokens to generate in the completion. + temperature: float | None (default=None) + The temperature to control how deterministic vs. creative the responses are. + top_p: float | None (default=None) + top_p for nucleus sampling, where the model considers tokens with + cumulative probabilities up to top_p. Values range from 0 to 1. + n: int | None (default=None) + The number of completions to generate for each prompt. + tools: list[Tool] | None (default=None) + Optional tools to use during completion. + https://docs.litellm.ai/docs/completion/function_call + **kwargs: Any + Additional keyword arguments. + + Returns + ------- + LLMCompletionResponse[ResponseFormat] | Iterator[LLMCompletionChunk]: + The completion response or an iterator of completion chunks if streaming. + + """ + raise NotImplementedError + + @abstractmethod + async def completion_async( + self, + /, + **kwargs: Unpack["LLMCompletionArgs[ResponseFormat]"], + ) -> "LLMCompletionResponse[ResponseFormat] | AsyncIterator[LLMCompletionChunk]": + """Async completion method. + + Args + ---- + messages: LLMCompletionMessagesParam + The messages to send to the LLM. + Can be str | list[dict[str, str]] | list[ChatCompletionMessageParam]. + response_format: BaseModel | None (default=None) + The structured response format. + Must extend pydantic BaseModel. + stream: bool (default=False) + Whether to stream the response. + streaming is not supported when using response_format. + max_completion_tokens: int | None (default=None) + The maximum number of tokens to generate in the completion. + temperature: float | None (default=None) + The temperature to control how deterministic vs. creative the responses are. + top_p: float | None (default=None) + top_p for nucleus sampling, where the model considers tokens with + cumulative probabilities up to top_p. Values range from 0 to 1. + n: int | None (default=None) + The number of completions to generate for each prompt. + tools: list[Tool] | None (default=None) + Optional tools to use during completion. + https://docs.litellm.ai/docs/completion/function_call + **kwargs: Any + Additional keyword arguments. + + Returns + ------- + LLMCompletionResponse[ResponseFormat] | Iterator[LLMCompletionChunk]: + The completion response or an iterator of completion chunks if streaming. + """ + raise NotImplementedError + + @contextmanager + def completion_thread_pool( + self, + *, + response_handler: "ThreadedLLMCompletionResponseHandler", + concurrency: int, + queue_limit: int = 0, + ) -> "Iterator[ThreadedLLMCompletionFunction]": + """Run a completion thread pool. + + Args + ---- + response_handler: ThreadedLLMCompletionResponseHandler + The callback function to handle completion responses. + (request_id, response|exception) -> Awaitable[None] | None + concurrency: int + The number of threads to spin up in a thread pool. + queue_limit: int (default=0) + The maximum number of items allowed in the input queue. + 0 means unlimited. + Set this to a value to create backpressure on the caller. + + Yields + ------ + ThreadedLLMCompletionFunction: + A function that can be used to submit completion requests to the thread pool. + (messages, request_id, **kwargs) -> None + + The thread pool will process the requests and invoke the provided callback + with the responses. + + same signature as LLMCompletionFunction but requires a `request_id` parameter + to identify the request and does not return anything. + """ + with completion_thread_runner( + completion=self.completion, + response_handler=response_handler, + concurrency=concurrency, + queue_limit=queue_limit, + metrics_store=self.metrics_store, + ) as completion: + yield completion + + def completion_batch( + self, + completion_requests: list["LLMCompletionArgs[ResponseFormat]"], + *, + concurrency: int, + queue_limit: int = 0, + ) -> list[ + "LLMCompletionResponse[ResponseFormat] | Iterator[LLMCompletionChunk] | Exception" + ]: + """Process a batch of completion requests using a thread pool. + + Args + ---- + completion_requests: list[LLMCompletionArgs] + A list of completion request arguments to process in parallel. + concurrency: int + The number of threads to spin up in a thread pool. + queue_limit: int (default=0) + The maximum number of items allowed in the input queue. + 0 means unlimited. + Set this to a value to create backpressure on the caller. + + Returns + ------- + list[LLMCompletionResponse[ResponseFormat] | Iterator[LLMCompletionChunk] | Exception]: + A list of completion responses or exceptions corresponding to all the requests. + """ + responses: list[ + LLMCompletionResponse[ResponseFormat] + | Iterator[LLMCompletionChunk] + | Exception + ] = [None] * len(completion_requests) # type: ignore + + def handle_response( + request_id: str, + resp: "LLMCompletionResponse[ResponseFormat] | Iterator[LLMCompletionChunk] | Exception", + ): + responses[int(request_id)] = resp + + with self.completion_thread_pool( + response_handler=handle_response, + concurrency=concurrency, + queue_limit=queue_limit, + ) as threaded_completion: + for idx, request in enumerate(completion_requests): + threaded_completion(request_id=str(idx), **request) + + return responses + + @property + @abstractmethod + def metrics_store(self) -> "MetricsStore": + """Metrics store.""" + raise NotImplementedError + + @property + @abstractmethod + def tokenizer(self) -> "Tokenizer": + """Tokenizer.""" + raise NotImplementedError diff --git a/packages/graphrag-llm/graphrag_llm/completion/completion_factory.py b/packages/graphrag-llm/graphrag_llm/completion/completion_factory.py new file mode 100644 index 0000000000..e1923b9c63 --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/completion/completion_factory.py @@ -0,0 +1,150 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Completion factory.""" + +from collections.abc import Callable +from typing import TYPE_CHECKING, Any + +from graphrag_common.factory import Factory + +from graphrag_llm.cache import create_cache_key +from graphrag_llm.config.tokenizer_config import TokenizerConfig +from graphrag_llm.config.types import LLMProviderType +from graphrag_llm.metrics.noop_metrics_store import NoopMetricsStore +from graphrag_llm.tokenizer.tokenizer_factory import create_tokenizer + +if TYPE_CHECKING: + from graphrag_cache import Cache, CacheKeyCreator + from graphrag_common.factory import ServiceScope + + from graphrag_llm.completion.completion import LLMCompletion + from graphrag_llm.config import ModelConfig + from graphrag_llm.metrics import MetricsProcessor, MetricsStore + from graphrag_llm.rate_limit import RateLimiter + from graphrag_llm.retry import Retry + from graphrag_llm.tokenizer import Tokenizer + + +class CompletionFactory(Factory["LLMCompletion"]): + """Factory for creating Completion instances.""" + + +completion_factory = CompletionFactory() + + +def register_completion( + completion_type: str, + completion_initializer: Callable[..., "LLMCompletion"], + scope: "ServiceScope" = "transient", +) -> None: + """Register a custom completion implementation. + + Args + ---- + completion_type: str + The completion id to register. + completion_initializer: Callable[..., LLMCompletion] + The completion initializer to register. + scope: ServiceScope (default: "transient") + The service scope for the completion. + """ + completion_factory.register(completion_type, completion_initializer, scope) + + +def create_completion( + model_config: "ModelConfig", + *, + cache: "Cache | None" = None, + cache_key_creator: "CacheKeyCreator | None" = None, + tokenizer: "Tokenizer | None" = None, +) -> "LLMCompletion": + """Create a Completion instance based on the model configuration. + + Args + ---- + model_config: ModelConfig + The configuration for the model. + cache: Cache | None (default: None) + An optional cache instance. + cache_key_creator: CacheKeyCreator | None (default: create_cache_key) + An optional cache key creator function. + (dict[str, Any]) -> str + tokenizer: Tokenizer | None (default: litellm) + An optional tokenizer instance. + + Returns + ------- + LLMCompletion: + An instance of a LLMCompletion subclass. + """ + cache_key_creator = cache_key_creator or create_cache_key + model_id = f"{model_config.model_provider}/{model_config.model}" + strategy = model_config.type + extra: dict[str, Any] = model_config.model_extra or {} + + if strategy not in completion_factory: + match strategy: + case LLMProviderType.LiteLLM: + from graphrag_llm.completion.lite_llm_completion import ( + LiteLLMCompletion, + ) + + register_completion( + completion_type=LLMProviderType.LiteLLM, + completion_initializer=LiteLLMCompletion, + scope="singleton", + ) + case LLMProviderType.MockLLM: + from graphrag_llm.completion.mock_llm_completion import ( + MockLLMCompletion, + ) + + register_completion( + completion_type=LLMProviderType.MockLLM, + completion_initializer=MockLLMCompletion, + ) + case _: + msg = f"ModelConfig.type '{strategy}' is not registered in the CompletionFactory. Registered strategies: {', '.join(completion_factory.keys())}" + raise ValueError(msg) + + tokenizer = tokenizer or create_tokenizer(TokenizerConfig(model_id=model_id)) + + rate_limiter: RateLimiter | None = None + if model_config.rate_limit: + from graphrag_llm.rate_limit.rate_limit_factory import create_rate_limiter + + rate_limiter = create_rate_limiter(rate_limit_config=model_config.rate_limit) + + retrier: Retry | None = None + if model_config.retry: + from graphrag_llm.retry.retry_factory import create_retry + + retrier = create_retry(retry_config=model_config.retry) + + metrics_store: MetricsStore = NoopMetricsStore() + metrics_processor: MetricsProcessor | None = None + if model_config.metrics: + from graphrag_llm.metrics import create_metrics_processor, create_metrics_store + + metrics_store = create_metrics_store( + config=model_config.metrics, + id=model_id, + ) + metrics_processor = create_metrics_processor(model_config.metrics) + + return completion_factory.create( + strategy=strategy, + init_args={ + **extra, + "model_id": model_id, + "model_config": model_config, + "tokenizer": tokenizer, + "metrics_store": metrics_store, + "metrics_processor": metrics_processor, + "rate_limiter": rate_limiter, + "retrier": retrier, + "cache": cache, + "cache_key_creator": cache_key_creator, + }, + ) diff --git a/packages/graphrag-llm/graphrag_llm/completion/lite_llm_completion.py b/packages/graphrag-llm/graphrag_llm/completion/lite_llm_completion.py new file mode 100644 index 0000000000..794296604c --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/completion/lite_llm_completion.py @@ -0,0 +1,314 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""LLMCompletion based on litellm.""" + +from collections.abc import AsyncIterator, Iterator +from typing import TYPE_CHECKING, Any, Unpack + +import litellm +from azure.identity import DefaultAzureCredential, get_bearer_token_provider +from litellm import ModelResponse, supports_response_schema # type: ignore + +from graphrag_llm.completion.completion import LLMCompletion +from graphrag_llm.config.types import AuthMethod +from graphrag_llm.middleware import ( + with_middleware_pipeline, +) +from graphrag_llm.types import LLMCompletionChunk, LLMCompletionResponse +from graphrag_llm.utils import ( + structure_completion_response, +) + +if TYPE_CHECKING: + from graphrag_cache import Cache, CacheKeyCreator + + from graphrag_llm.config import ModelConfig + from graphrag_llm.metrics import MetricsProcessor, MetricsStore + from graphrag_llm.rate_limit import RateLimiter + from graphrag_llm.retry import Retry + from graphrag_llm.tokenizer import Tokenizer + from graphrag_llm.types import ( + AsyncLLMCompletionFunction, + LLMCompletionArgs, + LLMCompletionFunction, + LLMCompletionMessagesParam, + Metrics, + ResponseFormat, + ) + + +litellm.suppress_debug_info = True + + +class LiteLLMCompletion(LLMCompletion): + """LLMCompletion based on litellm.""" + + _model_config: "ModelConfig" + _model_id: str + _track_metrics: bool = False + _metrics_store: "MetricsStore" + _metrics_processor: "MetricsProcessor | None" + _cache: "Cache | None" + _cache_key_creator: "CacheKeyCreator" + _tokenizer: "Tokenizer" + _rate_limiter: "RateLimiter | None" + _retrier: "Retry | None" + + def __init__( + self, + *, + model_id: str, + model_config: "ModelConfig", + tokenizer: "Tokenizer", + metrics_store: "MetricsStore", + metrics_processor: "MetricsProcessor | None" = None, + rate_limiter: "RateLimiter | None" = None, + retrier: "Retry | None" = None, + cache: "Cache | None" = None, + cache_key_creator: "CacheKeyCreator", + azure_cognitive_services_audience: str = "https://cognitiveservices.azure.com/.default", + drop_unsupported_params: bool = True, + **kwargs: Any, + ) -> None: + """Initialize LiteLLMCompletion. + + Args + ---- + model_id: str + The LiteLLM model ID, e.g., "openai/gpt-4o" + model_config: ModelConfig + The configuration for the model. + tokenizer: Tokenizer + The tokenizer to use. + metrics_store: MetricsStore | None (default: None) + The metrics store to use. + metrics_processor: MetricsProcessor | None (default: None) + The metrics processor to use. + cache: Cache | None (default: None) + An optional cache instance. + cache_key_prefix: str | None (default: "chat") + The cache key prefix. Required if cache is provided. + rate_limiter: RateLimiter | None (default: None) + The rate limiter to use. + retrier: Retry | None (default: None) + The retry strategy to use. + azure_cognitive_services_audience: str (default: "https://cognitiveservices.azure.com/.default") + The audience for Azure Cognitive Services when using Managed Identity. + drop_unsupported_params: bool (default: True) + Whether to drop unsupported parameters for the model provider. + """ + self._model_id = model_id + self._model_config = model_config + self._tokenizer = tokenizer + self._metrics_store = metrics_store + self._metrics_processor = metrics_processor + self._cache = cache + self._track_metrics = metrics_processor is not None + self._cache_key_creator = cache_key_creator + self._rate_limiter = rate_limiter + self._retrier = retrier + + self._completion, self._completion_async = _create_base_completions( + model_config=model_config, + drop_unsupported_params=drop_unsupported_params, + azure_cognitive_services_audience=azure_cognitive_services_audience, + ) + + self._completion, self._completion_async = with_middleware_pipeline( + model_config=self._model_config, + model_fn=self._completion, + async_model_fn=self._completion_async, + request_type="chat", + cache=self._cache, + cache_key_creator=self._cache_key_creator, + tokenizer=self._tokenizer, + metrics_processor=self._metrics_processor, + rate_limiter=self._rate_limiter, + retrier=self._retrier, + ) + + def supports_structured_response(self) -> bool: + """Check if the model supports structured response.""" + return supports_response_schema(self._model_id) + + def completion( + self, + /, + **kwargs: Unpack["LLMCompletionArgs[ResponseFormat]"], + ) -> "LLMCompletionResponse[ResponseFormat] | Iterator[LLMCompletionChunk]": + """Sync completion method.""" + messages: LLMCompletionMessagesParam = kwargs.pop("messages") + response_format = kwargs.pop("response_format", None) + if response_format and not self.supports_structured_response(): + msg = f"Model '{self._model_id}' does not support response schemas." + raise ValueError(msg) + + is_streaming = kwargs.get("stream") or False + + if response_format is not None and is_streaming: + msg = "response_format is not supported for streaming completions." + raise ValueError(msg) + + request_metrics: Metrics | None = kwargs.pop("metrics", None) or {} + if not self._track_metrics: + request_metrics = None + + if isinstance(messages, str): + messages = [{"role": "user", "content": messages}] + + try: + response = self._completion( + messages=messages, + metrics=request_metrics, + response_format=response_format, + **kwargs, # type: ignore + ) + if response_format is not None: + structured_response = structure_completion_response( + response.content, response_format + ) + response.formatted_response = structured_response + return response + finally: + if request_metrics is not None: + self._metrics_store.update_metrics(metrics=request_metrics) + + async def completion_async( + self, + /, + **kwargs: Unpack["LLMCompletionArgs[ResponseFormat]"], + ) -> "LLMCompletionResponse[ResponseFormat] | AsyncIterator[LLMCompletionChunk]": + """Async completion method.""" + messages: LLMCompletionMessagesParam = kwargs.pop("messages") + response_format = kwargs.pop("response_format", None) + if response_format and not supports_response_schema( + self._model_id, + ): + msg = f"Model '{self._model_id}' does not support response schemas." + raise ValueError(msg) + + is_streaming = kwargs.get("stream") or False + + if response_format is not None and is_streaming: + msg = "response_format is not supported for streaming completions." + raise ValueError(msg) + + request_metrics: Metrics | None = kwargs.pop("metrics", None) or {} + if not self._track_metrics: + request_metrics = None + + if isinstance(messages, str): + messages = [{"role": "user", "content": messages}] + + try: + response = await self._completion_async( + messages=messages, + metrics=request_metrics, + response_format=response_format, + **kwargs, # type: ignore + ) + if response_format is not None: + structured_response = structure_completion_response( + response.content, response_format + ) + response.formatted_response = structured_response + return response + finally: + if request_metrics is not None: + self._metrics_store.update_metrics(metrics=request_metrics) + + @property + def metrics_store(self) -> "MetricsStore": + """Get metrics store.""" + return self._metrics_store + + @property + def tokenizer(self) -> "Tokenizer": + """Get tokenizer.""" + return self._tokenizer + + +def _create_base_completions( + *, + model_config: "ModelConfig", + drop_unsupported_params: bool, + azure_cognitive_services_audience: str, +) -> tuple["LLMCompletionFunction", "AsyncLLMCompletionFunction"]: + """Create base completions for LiteLLM. + + Convert litellm completion functions to graphrag_llm LLMCompletionFunction. + LLMCompletionFunction is close to the litellm completion function signature, + but uses a few extra params such as metrics. Remove graphrag_llm LLMCompletionFunction + specific params before calling litellm completion functions. + """ + model_provider = model_config.model_provider + model = model_config.azure_deployment_name or model_config.model + + base_args: dict[str, Any] = { + "drop_params": drop_unsupported_params, + "model": f"{model_provider}/{model}", + "api_key": model_config.api_key, + "api_base": model_config.api_base, + "api_version": model_config.api_version, + **model_config.call_args, + } + + if model_config.auth_method == AuthMethod.AzureManagedIdentity: + base_args["azure_ad_token_provider"] = get_bearer_token_provider( + DefaultAzureCredential(), azure_cognitive_services_audience + ) + + def _base_completion( + **kwargs: Any, + ) -> LLMCompletionResponse | Iterator[LLMCompletionChunk]: + kwargs.pop("metrics", None) + mock_response: str | None = kwargs.pop("mock_response", None) + json_object: bool | None = kwargs.pop("response_format_json_object", None) + new_args: dict[str, Any] = {**base_args, **kwargs} + + if model_config.mock_responses and mock_response is not None: + new_args["mock_response"] = mock_response + + if json_object and "response_format" not in new_args: + new_args["response_format"] = {"type": "json_object"} + + response = litellm.completion( + **new_args, + ) + if isinstance(response, ModelResponse): + return LLMCompletionResponse(**response.model_dump()) + + def _run_iterator() -> Iterator[LLMCompletionChunk]: + for chunk in response: + yield LLMCompletionChunk(**chunk.model_dump()) + + return _run_iterator() + + async def _base_completion_async( + **kwargs: Any, + ) -> LLMCompletionResponse | AsyncIterator[LLMCompletionChunk]: + kwargs.pop("metrics", None) + mock_response: str | None = kwargs.pop("mock_response", None) + json_object: bool | None = kwargs.pop("response_format_json_object", None) + new_args: dict[str, Any] = {**base_args, **kwargs} + + if model_config.mock_responses and mock_response is not None: + new_args["mock_response"] = mock_response + + if json_object and "response_format" not in new_args: + new_args["response_format"] = {"type": "json_object"} + + response = await litellm.acompletion( + **new_args, + ) + if isinstance(response, ModelResponse): + return LLMCompletionResponse(**response.model_dump()) + + async def _run_iterator() -> AsyncIterator[LLMCompletionChunk]: + async for chunk in response: + yield LLMCompletionChunk(**chunk.model_dump()) # type: ignore + + return _run_iterator() + + return (_base_completion, _base_completion_async) diff --git a/packages/graphrag-llm/graphrag_llm/completion/mock_llm_completion.py b/packages/graphrag-llm/graphrag_llm/completion/mock_llm_completion.py new file mode 100644 index 0000000000..c1e29fcfc4 --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/completion/mock_llm_completion.py @@ -0,0 +1,134 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Mock LLMCompletion.""" + +from typing import TYPE_CHECKING, Any, Unpack + +import litellm + +from graphrag_llm.completion.completion import LLMCompletion +from graphrag_llm.utils import ( + create_completion_response, + structure_completion_response, +) + +if TYPE_CHECKING: + from collections.abc import AsyncIterator, Iterator + + from graphrag_llm.config import ModelConfig + from graphrag_llm.metrics import MetricsStore + from graphrag_llm.tokenizer import Tokenizer + from graphrag_llm.types import ( + LLMCompletionArgs, + LLMCompletionChunk, + LLMCompletionResponse, + ResponseFormat, + ) + + +litellm.suppress_debug_info = True + + +class MockLLMCompletion(LLMCompletion): + """LLMCompletion based on litellm.""" + + _metrics_store: "MetricsStore" + _tokenizer: "Tokenizer" + _mock_responses: list[str] + _mock_index: int = 0 + + def __init__( + self, + *, + model_config: "ModelConfig", + tokenizer: "Tokenizer", + metrics_store: "MetricsStore", + **kwargs: Any, + ) -> None: + """Initialize LiteLLMCompletion. + + Args + ---- + model_id: str + The LiteLLM model ID, e.g., "openai/gpt-4o" + model_config: ModelConfig + The configuration for the model. + tokenizer: Tokenizer + The tokenizer to use. + metrics_store: MetricsStore | None (default: None) + The metrics store to use. + metrics_processor: MetricsProcessor | None (default: None) + The metrics processor to use. + cache: Cache | None (default: None) + An optional cache instance. + cache_key_prefix: str | None (default: "chat") + The cache key prefix. Required if cache is provided. + rate_limiter: RateLimiter | None (default: None) + The rate limiter to use. + retrier: Retry | None (default: None) + The retry strategy to use. + azure_cognitive_services_audience: str (default: "https://cognitiveservices.azure.com/.default") + The audience for Azure Cognitive Services when using Managed Identity. + drop_unsupported_params: bool (default: True) + Whether to drop unsupported parameters for the model provider. + """ + self._tokenizer = tokenizer + self._metrics_store = metrics_store + + mock_responses = model_config.mock_responses + if not isinstance(mock_responses, list) or len(mock_responses) == 0: + msg = "ModelConfig.mock_responses must be a non-empty list." + raise ValueError(msg) + + if not all(isinstance(resp, str) for resp in mock_responses): + msg = "Each item in ModelConfig.mock_responses must be a string." + raise ValueError(msg) + + self._mock_responses = mock_responses # type: ignore + + def supports_structured_response(self) -> bool: + """Check if the model supports structured response.""" + return True + + def completion( + self, + /, + **kwargs: Unpack["LLMCompletionArgs[ResponseFormat]"], + ) -> "LLMCompletionResponse[ResponseFormat] | Iterator[LLMCompletionChunk]": + """Sync completion method.""" + response_format = kwargs.pop("response_format", None) + + is_streaming = kwargs.get("stream", False) + if is_streaming: + msg = "MockLLMCompletion does not support streaming completions." + raise ValueError(msg) + + response = create_completion_response( + self._mock_responses[self._mock_index % len(self._mock_responses)] + ) + self._mock_index += 1 + if response_format is not None: + structured_response = structure_completion_response( + response.content, response_format + ) + response.formatted_response = structured_response + return response + + async def completion_async( + self, + /, + **kwargs: Unpack["LLMCompletionArgs[ResponseFormat]"], + ) -> "LLMCompletionResponse[ResponseFormat] | AsyncIterator[LLMCompletionChunk]": + """Async completion method.""" + return self.completion(**kwargs) # type: ignore + + @property + def metrics_store(self) -> "MetricsStore": + """Get metrics store.""" + return self._metrics_store + + @property + def tokenizer(self) -> "Tokenizer": + """Get tokenizer.""" + return self._tokenizer diff --git a/packages/graphrag-llm/graphrag_llm/config/__init__.py b/packages/graphrag-llm/graphrag_llm/config/__init__.py new file mode 100644 index 0000000000..fc9023dc1b --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/config/__init__.py @@ -0,0 +1,42 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Config module for graphrag-llm.""" + +from graphrag_llm.config.metrics_config import MetricsConfig +from graphrag_llm.config.model_config import ModelConfig +from graphrag_llm.config.rate_limit_config import RateLimitConfig +from graphrag_llm.config.retry_config import RetryConfig +from graphrag_llm.config.template_engine_config import TemplateEngineConfig +from graphrag_llm.config.tokenizer_config import TokenizerConfig +from graphrag_llm.config.types import ( + AuthMethod, + LLMProviderType, + MetricsProcessorType, + MetricsStoreType, + MetricsWriterType, + RateLimitType, + RetryType, + TemplateEngineType, + TemplateManagerType, + TokenizerType, +) + +__all__ = [ + "AuthMethod", + "LLMProviderType", + "MetricsConfig", + "MetricsProcessorType", + "MetricsStoreType", + "MetricsWriterType", + "ModelConfig", + "RateLimitConfig", + "RateLimitType", + "RetryConfig", + "RetryType", + "TemplateEngineConfig", + "TemplateEngineType", + "TemplateManagerType", + "TokenizerConfig", + "TokenizerType", +] diff --git a/packages/graphrag-llm/graphrag_llm/config/metrics_config.py b/packages/graphrag-llm/graphrag_llm/config/metrics_config.py new file mode 100644 index 0000000000..9d8f88a047 --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/config/metrics_config.py @@ -0,0 +1,57 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Metrics configuration.""" + +from pydantic import BaseModel, ConfigDict, Field, model_validator + +from graphrag_llm.config.types import ( + MetricsProcessorType, + MetricsStoreType, + MetricsWriterType, +) + + +class MetricsConfig(BaseModel): + """Configuration for metrics.""" + + model_config = ConfigDict(extra="allow") + """Allow extra fields to support custom metrics implementations.""" + + type: str = Field( + default=MetricsProcessorType.Default, + description="MetricsProcessor implementation to use.", + ) + + store: str = Field( + default=MetricsStoreType.Memory, + description="MetricsStore implementation to use. [memory] (default: memory).", + ) + + writer: str | None = Field( + default=MetricsWriterType.Log, + description="MetricsWriter implementation to use. [log, file] (default: log).", + ) + + log_level: int | None = Field( + default=None, + description="Log level to use when using the 'Log' metrics writer. (default: INFO)", + ) + + base_dir: str | None = Field( + default=None, + description="Base directory for file-based metrics writer. (default: ./metrics)", + ) + + def _validate_file_metrics_writer_config(self) -> None: + """Validate parameters for file-based metrics writer.""" + if self.base_dir is not None and self.base_dir.strip() == "": + msg = "base_dir must be specified for file-based metrics writer." + raise ValueError(msg) + + @model_validator(mode="after") + def _validate_model(self): + """Validate the metrics configuration based on its writer type.""" + if self.writer == MetricsWriterType.File: + self._validate_file_metrics_writer_config() + return self diff --git a/packages/graphrag-llm/graphrag_llm/config/model_config.py b/packages/graphrag-llm/graphrag_llm/config/model_config.py new file mode 100644 index 0000000000..1b70f61508 --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/config/model_config.py @@ -0,0 +1,110 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Language model configuration.""" + +from typing import Any + +from pydantic import BaseModel, ConfigDict, Field, model_validator + +from graphrag_llm.config.metrics_config import MetricsConfig +from graphrag_llm.config.rate_limit_config import RateLimitConfig +from graphrag_llm.config.retry_config import RetryConfig +from graphrag_llm.config.types import AuthMethod, LLMProviderType + + +class ModelConfig(BaseModel): + """Configuration for a language model.""" + + model_config = ConfigDict(extra="allow") + """Allow extra fields to support custom LLM provider implementations.""" + + type: str = Field( + default=LLMProviderType.LiteLLM, + description="The type of LLM provider to use. (default: litellm)", + ) + + model_provider: str = Field( + description="The provider of the model, e.g., 'openai', 'azure', etc.", + ) + + model: str = Field( + description="The specific model to use, e.g., 'gpt-4o', 'gpt-3.5-turbo', etc.", + ) + + call_args: dict[str, Any] = Field( + default_factory=dict, + description="Base keyword arguments to pass to the model provider's API.", + ) + + api_base: str | None = Field( + default=None, + description="The base URL for the API, required for some providers like Azure.", + ) + + api_version: str | None = Field( + default=None, + description="The version of the API to use.", + ) + + api_key: str | None = Field( + default=None, + description="API key for authentication with the model provider.", + ) + + auth_method: AuthMethod = Field( + default=AuthMethod.ApiKey, + description="The authentication method to use. (default: api_key)", + ) + + azure_deployment_name: str | None = Field( + default=None, + description="The deployment name for Azure models.", + ) + + retry: RetryConfig | None = Field( + default=None, + description="Configuration for the retry strategy.", + ) + + rate_limit: RateLimitConfig | None = Field( + default=None, + description="Configuration for the rate limit behavior.", + ) + + metrics: MetricsConfig | None = Field( + default_factory=MetricsConfig, + description="Specify and configure the metric services.", + ) + + mock_responses: list[str] | list[float] = Field( + default_factory=list, + description="List of mock responses for testing.", + ) + + def _validate_lite_llm_config(self) -> None: + """Validate LiteLLM specific configuration.""" + if self.model_provider == "azure" and ( + not self.azure_deployment_name or not self.api_base + ): + msg = "azure_deployment_name and api_base must be specified with the 'azure' model provider." + raise ValueError(msg) + + if self.model_provider != "azure" and self.azure_deployment_name: + msg = "azure_deployment_name should not be specified for non-Azure model providers." + raise ValueError(msg) + + if self.auth_method == AuthMethod.AzureManagedIdentity: + if self.api_key is not None: + msg = "api_key should not be set when using Azure Managed Identity." + raise ValueError(msg) + elif not self.api_key: + msg = "api_key must be set when auth_method=api_key." + raise ValueError(msg) + + @model_validator(mode="after") + def _validate_model(self): + """Validate model configuration after initialization.""" + if self.type == LLMProviderType.LiteLLM: + self._validate_lite_llm_config() + return self diff --git a/packages/graphrag-llm/graphrag_llm/config/rate_limit_config.py b/packages/graphrag-llm/graphrag_llm/config/rate_limit_config.py new file mode 100644 index 0000000000..df654b8e88 --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/config/rate_limit_config.py @@ -0,0 +1,60 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""RateLimit configuration.""" + +from pydantic import BaseModel, ConfigDict, Field, model_validator + +from graphrag_llm.config.types import RateLimitType + + +class RateLimitConfig(BaseModel): + """Configuration for rate limit behavior.""" + + model_config = ConfigDict(extra="allow") + """Allow extra fields to support custom RateLimit implementations.""" + + type: str = Field( + default=RateLimitType.SlidingWindow, + description="The type of rate limit strategy to use. [sliding_window] (default: sliding_window).", + ) + + period_in_seconds: int | None = Field( + default=None, + description="The period in seconds for the rate limit window. (default: 60).", + ) + + requests_per_period: int | None = Field( + default=None, + description="The maximum number of requests allowed per period. (default: None, no limit).", + ) + + tokens_per_period: int | None = Field( + default=None, + description="The maximum number of tokens allowed per period. (default: None, no limit).", + ) + + def _validate_sliding_window_config(self) -> None: + """Validate Sliding Window rate limit configuration.""" + if self.period_in_seconds is not None and self.period_in_seconds <= 0: + msg = "period_in_seconds must be a positive integer for Sliding Window rate limit." + raise ValueError(msg) + + if not self.requests_per_period and not self.tokens_per_period: + msg = "At least one of requests_per_period or tokens_per_period must be specified for Sliding Window rate limit." + raise ValueError(msg) + + if self.requests_per_period is not None and self.requests_per_period <= 0: + msg = "requests_per_period must be a positive integer for Sliding Window rate limit." + raise ValueError(msg) + + if self.tokens_per_period is not None and self.tokens_per_period <= 0: + msg = "tokens_per_period must be a positive integer for Sliding Window rate limit." + raise ValueError(msg) + + @model_validator(mode="after") + def _validate_model(self): + """Validate the rate limit configuration based on its type.""" + if self.type == RateLimitType.SlidingWindow: + self._validate_sliding_window_config() + return self diff --git a/packages/graphrag-llm/graphrag_llm/config/retry_config.py b/packages/graphrag-llm/graphrag_llm/config/retry_config.py new file mode 100644 index 0000000000..01042da70b --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/config/retry_config.py @@ -0,0 +1,69 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Retry configuration.""" + +from pydantic import BaseModel, ConfigDict, Field, model_validator + +from graphrag_llm.config.types import RetryType + + +class RetryConfig(BaseModel): + """Configuration for retry behavior.""" + + model_config = ConfigDict(extra="allow") + """Allow extra fields to support custom Retry implementations.""" + + type: str = Field( + default=RetryType.ExponentialBackoff, + description="The type of retry strategy to use. [exponential_backoff, immediate] (default: exponential_backoff).", + ) + + max_retries: int | None = Field( + default=None, + description="The maximum number of retry attempts.", + ) + + base_delay: float | None = Field( + default=None, + description="The base delay in seconds for exponential backoff.", + ) + + jitter: bool | None = Field( + default=None, + description="Whether to apply jitter to the delay intervals in exponential backoff.", + ) + + max_delay: float | None = Field( + default=None, + description="The maximum delay in seconds between retries.", + ) + + def _validate_exponential_backoff_config(self) -> None: + """Validate Exponential Backoff retry configuration.""" + if self.max_retries is not None and self.max_retries <= 1: + msg = "max_retries must be greater than 1 for Exponential Backoff retry." + raise ValueError(msg) + + if self.base_delay is not None and self.base_delay <= 1.0: + msg = "base_delay must be greater than 1.0 for Exponential Backoff retry." + raise ValueError(msg) + + if self.max_delay is not None and self.max_delay <= 1: + msg = "max_delay must be greater than 1 for Exponential Backoff retry." + raise ValueError(msg) + + def _validate_immediate_config(self) -> None: + """Validate Immediate retry configuration.""" + if self.max_retries is not None and self.max_retries <= 1: + msg = "max_retries must be greater than 1 for Immediate retry." + raise ValueError(msg) + + @model_validator(mode="after") + def _validate_model(self): + """Validate the retry configuration based on its type.""" + if self.type == RetryType.ExponentialBackoff: + self._validate_exponential_backoff_config() + elif self.type == RetryType.Immediate: + self._validate_immediate_config() + return self diff --git a/packages/graphrag-llm/graphrag_llm/config/template_engine_config.py b/packages/graphrag-llm/graphrag_llm/config/template_engine_config.py new file mode 100644 index 0000000000..02d20acd33 --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/config/template_engine_config.py @@ -0,0 +1,69 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Template engine configuration.""" + +from pydantic import BaseModel, ConfigDict, Field, model_validator + +from graphrag_llm.config.types import ( + TemplateEngineType, + TemplateManagerType, +) + + +class TemplateEngineConfig(BaseModel): + """Configuration for the template engine.""" + + model_config = ConfigDict(extra="allow") + """Allow extra fields to support custom metrics implementations.""" + + type: str = Field( + default=TemplateEngineType.Jinja, + description="The template engine to use. [jinja]", + ) + + template_manager: str = Field( + default=TemplateManagerType.File, + description="The template manager to use. [file, memory] (default: file)", + ) + + base_dir: str | None = Field( + default=None, + description="The base directory for file-based template managers.", + ) + + template_extension: str | None = Field( + default=None, + description="The file extension for locating templates in file-based template managers.", + ) + + encoding: str | None = Field( + default=None, + description="The file encoding for reading templates in file-based template managers.", + ) + + def _validate_file_template_manager_config(self) -> None: + """Validate parameters for file-based template managers.""" + if self.base_dir is not None and self.base_dir.strip() == "": + msg = "base_dir must be specified for file-based template managers." + raise ValueError(msg) + + if ( + self.template_extension is not None + and self.template_extension.strip() == "" + ): + msg = "template_extension cannot be an empty string for file-based template managers." + raise ValueError(msg) + + if ( + self.template_extension is not None + and not self.template_extension.startswith(".") + ): + self.template_extension = f".{self.template_extension}" + + @model_validator(mode="after") + def _validate_model(self): + """Validate the template engine configuration based on its type.""" + if self.template_manager == TemplateManagerType.File: + self._validate_file_template_manager_config() + return self diff --git a/packages/graphrag-llm/graphrag_llm/config/tokenizer_config.py b/packages/graphrag-llm/graphrag_llm/config/tokenizer_config.py new file mode 100644 index 0000000000..b7e6545755 --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/config/tokenizer_config.py @@ -0,0 +1,51 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Tokenizer model configuration.""" + +from pydantic import BaseModel, ConfigDict, Field, model_validator + +from graphrag_llm.config.types import TokenizerType + + +class TokenizerConfig(BaseModel): + """Configuration for a tokenizer.""" + + model_config = ConfigDict(extra="allow") + """Allow extra fields to support custom LLM provider implementations.""" + + type: str = Field( + default=TokenizerType.LiteLLM, + description="The type of tokenizer to use. [litellm] (default: litellm).", + ) + + model_id: str | None = Field( + default=None, + description="The identifier for the tokenizer model. Example: openai/gpt-4o. Used by the litellm tokenizer.", + ) + + encoding_name: str | None = Field( + default=None, + description="The encoding name for the tokenizer. Example: gpt-4o.", + ) + + def _validate_litellm_config(self) -> None: + """Validate LiteLLM tokenizer configuration.""" + if self.model_id is None or self.model_id.strip() == "": + msg = "model_id must be specified for LiteLLM tokenizer." + raise ValueError(msg) + + def _validate_tiktoken_config(self) -> None: + """Validate TikToken tokenizer configuration.""" + if self.encoding_name is None or self.encoding_name.strip() == "": + msg = "encoding_name must be specified for TikToken tokenizer." + raise ValueError(msg) + + @model_validator(mode="after") + def _validate_model(self): + """Validate the tokenizer configuration based on its type.""" + if self.type == TokenizerType.LiteLLM: + self._validate_litellm_config() + elif self.type == TokenizerType.Tiktoken: + self._validate_tiktoken_config() + return self diff --git a/packages/graphrag-llm/graphrag_llm/config/types.py b/packages/graphrag-llm/graphrag_llm/config/types.py new file mode 100644 index 0000000000..320e8765fb --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/config/types.py @@ -0,0 +1,72 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + + +"""GraphRAG LLM configuration types.""" + +from enum import StrEnum + + +class LLMProviderType(StrEnum): + """Enum for LLM provider types.""" + + LiteLLM = "litellm" + MockLLM = "mock" + + +class AuthMethod(StrEnum): + """Enum for authentication methods.""" + + ApiKey = "api_key" + AzureManagedIdentity = "azure_managed_identity" + + +class MetricsProcessorType(StrEnum): + """Enum for built-in MetricsProcessor types.""" + + Default = "default" + + +class MetricsWriterType(StrEnum): + """Enum for built-in MetricsWriter types.""" + + Log = "log" + File = "file" + + +class MetricsStoreType(StrEnum): + """Enum for built-in MetricsStore types.""" + + Memory = "memory" + + +class RateLimitType(StrEnum): + """Enum for built-in RateLimit types.""" + + SlidingWindow = "sliding_window" + + +class RetryType(StrEnum): + """Enum for built-in Retry types.""" + + ExponentialBackoff = "exponential_backoff" + Immediate = "immediate" + + +class TemplateEngineType(StrEnum): + """Enum for built-in TemplateEngine types.""" + + Jinja = "jinja" + + +class TemplateManagerType(StrEnum): + """Enum for built-in TemplateEngine types.""" + + File = "file" + + +class TokenizerType(StrEnum): + """Enum for tokenizer types.""" + + LiteLLM = "litellm" + Tiktoken = "tiktoken" diff --git a/packages/graphrag-llm/graphrag_llm/embedding/__init__.py b/packages/graphrag-llm/graphrag_llm/embedding/__init__.py new file mode 100644 index 0000000000..1fb7352ea1 --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/embedding/__init__.py @@ -0,0 +1,16 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""LLMEmbedding module for graphrag_llm.""" + +from graphrag_llm.embedding.embedding import LLMEmbedding +from graphrag_llm.embedding.embedding_factory import ( + create_embedding, + register_embedding, +) + +__all__ = [ + "LLMEmbedding", + "create_embedding", + "register_embedding", +] diff --git a/packages/graphrag-llm/graphrag_llm/embedding/embedding.py b/packages/graphrag-llm/graphrag_llm/embedding/embedding.py new file mode 100644 index 0000000000..b97cc3e3cf --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/embedding/embedding.py @@ -0,0 +1,191 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Completion Abstract Base Class.""" + +from abc import ABC, abstractmethod +from contextlib import contextmanager +from typing import TYPE_CHECKING, Any, Unpack + +from graphrag_llm.threading.embedding_thread_runner import embedding_thread_runner + +if TYPE_CHECKING: + from collections.abc import Iterator + + from graphrag_cache import Cache, CacheKeyCreator + + from graphrag_llm.config import ModelConfig + from graphrag_llm.metrics import MetricsProcessor, MetricsStore + from graphrag_llm.rate_limit import RateLimiter + from graphrag_llm.retry import Retry + from graphrag_llm.threading.embedding_thread_runner import ( + ThreadedLLMEmbeddingFunction, + ThreadedLLMEmbeddingResponseHandler, + ) + from graphrag_llm.tokenizer import Tokenizer + from graphrag_llm.types import LLMEmbeddingArgs, LLMEmbeddingResponse + + +class LLMEmbedding(ABC): + """Abstract base class for language model embeddings.""" + + @abstractmethod + def __init__( + self, + *, + model_id: str, + model_config: "ModelConfig", + tokenizer: "Tokenizer", + metrics_store: "MetricsStore", + metrics_processor: "MetricsProcessor | None" = None, + rate_limiter: "RateLimiter | None" = None, + retrier: "Retry | None" = None, + cache: "Cache | None" = None, + cache_key_creator: "CacheKeyCreator", + **kwargs: Any, + ): + """Initialize the LLMEmbedding. + + Args + ---- + model_id: str + The model ID, e.g., "openai/gpt-4o". + model_config: ModelConfig + The configuration for the language model. + tokenizer: Tokenizer + The tokenizer to use. + metrics_store: MetricsStore | None (default=None) + The metrics store to use. + metrics_processor: MetricsProcessor | None (default: None) + The metrics processor to use. + rate_limiter: RateLimiter | None (default=None) + The rate limiter to use. + retrier: Retry | None (default=None) + The retry strategy to use. + cache: Cache | None (default=None) + Optional cache for embeddings. + cache_key_creator: CacheKeyCreator | None (default=None) + Optional cache key creator function. + (dict[str, Any]) -> str + **kwargs: Any + Additional keyword arguments. + """ + raise NotImplementedError + + @abstractmethod + def embedding( + self, /, **kwargs: Unpack["LLMEmbeddingArgs"] + ) -> "LLMEmbeddingResponse": + """Sync embedding method.""" + raise NotImplementedError + + @abstractmethod + async def embedding_async( + self, /, **kwargs: Unpack["LLMEmbeddingArgs"] + ) -> "LLMEmbeddingResponse": + """Async embedding method.""" + raise NotImplementedError + + @contextmanager + def embedding_thread_pool( + self, + *, + response_handler: "ThreadedLLMEmbeddingResponseHandler", + concurrency: int, + queue_limit: int = 0, + ) -> "Iterator[ThreadedLLMEmbeddingFunction]": + """Run an embedding thread pool. + + Args + ---- + response_handler: ThreadedLLMEmbeddingResponseHandler + The callback function to handle embedding responses. + (request_id, response|exception) -> Awaitable[None] | None + concurrency: int + The number of threads to spin up in a thread pool. + queue_limit: int (default=0) + The maximum number of items allowed in the input queue. + 0 means unlimited. + Set this to a value to create backpressure on the caller. + + Yields + ------ + ThreadedLLMEmbeddingFunction: + A function that can be used to submit embedding requests to the thread pool. + (input, request_id, **kwargs) -> None + + The thread pool will process the requests and invoke the provided callback + with the responses. + + same signature as LLMEmbeddingFunction but requires a `request_id` parameter + to identify the request and does not return anything. + + """ + with embedding_thread_runner( + embedding=self.embedding, + response_handler=response_handler, + concurrency=concurrency, + queue_limit=queue_limit, + metrics_store=self.metrics_store, + ) as embedding: + yield embedding + + def embedding_batch( + self, + embedding_requests: list["LLMEmbeddingArgs"], + *, + concurrency: int, + queue_limit: int = 0, + ) -> list["LLMEmbeddingResponse | Exception"]: + """Process a batch of embedding requests using a thread pool. + + Args + ---- + embedding_requests: list[LLMEmbeddingArgs] + A list of embedding request arguments to process in parallel. + batch_size: int + The number of inputs to process in each batch. + concurrency: int + The number of threads to spin up in a thread pool. + queue_limit: int (default=0) + The maximum number of items allowed in the input queue. + 0 means unlimited. + Set this to a value to create backpressure on the caller. + + Returns + ------- + list[LLMEmbeddingResponse | Exception] + A list of embedding responses or exceptions for each input. + """ + results: list[LLMEmbeddingResponse | Exception] = [None] * len( + embedding_requests + ) # type: ignore + + def handle_response( + request_id: str, + response: "LLMEmbeddingResponse | Exception", + ) -> None: + index = int(request_id) + results[index] = response + + with self.embedding_thread_pool( + response_handler=handle_response, + concurrency=concurrency, + queue_limit=queue_limit, + ) as embedding: + for idx, embedding_request in enumerate(embedding_requests): + embedding(request_id=str(idx), **embedding_request) + + return results + + @property + @abstractmethod + def metrics_store(self) -> "MetricsStore": + """Metrics store.""" + raise NotImplementedError + + @property + @abstractmethod + def tokenizer(self) -> "Tokenizer": + """Tokenizer.""" + raise NotImplementedError diff --git a/packages/graphrag-llm/graphrag_llm/embedding/embedding_factory.py b/packages/graphrag-llm/graphrag_llm/embedding/embedding_factory.py new file mode 100644 index 0000000000..44592991b3 --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/embedding/embedding_factory.py @@ -0,0 +1,150 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Embedding factory.""" + +from collections.abc import Callable +from typing import TYPE_CHECKING, Any + +from graphrag_common.factory import Factory + +from graphrag_llm.cache import create_cache_key +from graphrag_llm.config.tokenizer_config import TokenizerConfig +from graphrag_llm.config.types import LLMProviderType +from graphrag_llm.metrics.noop_metrics_store import NoopMetricsStore +from graphrag_llm.tokenizer.tokenizer_factory import create_tokenizer + +if TYPE_CHECKING: + from graphrag_cache import Cache, CacheKeyCreator + from graphrag_common.factory import ServiceScope + + from graphrag_llm.config.model_config import ModelConfig + from graphrag_llm.embedding.embedding import LLMEmbedding + from graphrag_llm.metrics import MetricsProcessor, MetricsStore + from graphrag_llm.rate_limit import RateLimiter + from graphrag_llm.retry import Retry + from graphrag_llm.tokenizer import Tokenizer + + +class EmbeddingFactory(Factory["LLMEmbedding"]): + """Factory for creating Embedding instances.""" + + +embedding_factory = EmbeddingFactory() + + +def register_embedding( + embedding_type: str, + embedding_initializer: Callable[..., "LLMEmbedding"], + scope: "ServiceScope" = "transient", +) -> None: + """Register a custom completion implementation. + + Args + ---- + embedding_type: str + The embedding id to register. + embedding_initializer: Callable[..., LLMEmbedding] + The embedding initializer to register. + scope: ServiceScope (default: "transient") + The service scope for the embedding. + """ + embedding_factory.register(embedding_type, embedding_initializer, scope) + + +def create_embedding( + model_config: "ModelConfig", + *, + cache: "Cache | None" = None, + cache_key_creator: "CacheKeyCreator | None" = None, + tokenizer: "Tokenizer | None" = None, +) -> "LLMEmbedding": + """Create an Embedding instance based on the model configuration. + + Args + ---- + model_config: ModelConfig + The configuration for the model. + cache: Cache | None (default: None) + An optional cache instance. + cache_key_creator: CacheKeyCreator | None (default: create_cache_key) + An optional cache key creator function. + tokenizer: Tokenizer | None (default: litellm) + An optional tokenizer instance. + + Returns + ------- + LLMEmbedding: + An instance of an Embedding subclass. + """ + cache_key_creator = cache_key_creator or create_cache_key + model_id = f"{model_config.model_provider}/{model_config.model}" + strategy = model_config.type + extra: dict[str, Any] = model_config.model_extra or {} + + if strategy not in embedding_factory: + match strategy: + case LLMProviderType.LiteLLM: + from graphrag_llm.embedding.lite_llm_embedding import ( + LiteLLMEmbedding, + ) + + register_embedding( + embedding_type=LLMProviderType.LiteLLM, + embedding_initializer=LiteLLMEmbedding, + scope="singleton", + ) + case LLMProviderType.MockLLM: + from graphrag_llm.embedding.mock_llm_embedding import MockLLMEmbedding + + register_embedding( + embedding_type=LLMProviderType.MockLLM, + embedding_initializer=MockLLMEmbedding, + ) + case _: + msg = f"ModelConfig.type '{strategy}' is not registered in the CompletionFactory. Registered strategies: {', '.join(embedding_factory.keys())}" + raise ValueError(msg) + + tokenizer = tokenizer or create_tokenizer(TokenizerConfig(model_id=model_id)) + + rate_limiter: RateLimiter | None = None + if model_config.rate_limit: + from graphrag_llm.rate_limit.rate_limit_factory import create_rate_limiter + + rate_limiter = create_rate_limiter(rate_limit_config=model_config.rate_limit) + + retrier: Retry | None = None + if model_config.retry: + from graphrag_llm.retry.retry_factory import create_retry + + retrier = create_retry(retry_config=model_config.retry) + + metrics_store: MetricsStore = NoopMetricsStore() + metrics_processor: MetricsProcessor | None = None + if model_config.metrics: + from graphrag_llm.metrics import ( + create_metrics_processor, + create_metrics_store, + ) + + metrics_store = create_metrics_store( + config=model_config.metrics, + id=model_id, + ) + metrics_processor = create_metrics_processor(model_config.metrics) + + return embedding_factory.create( + strategy=strategy, + init_args={ + **extra, + "model_id": model_id, + "model_config": model_config, + "tokenizer": tokenizer, + "metrics_store": metrics_store, + "metrics_processor": metrics_processor, + "rate_limiter": rate_limiter, + "retrier": retrier, + "cache": cache, + "cache_key_creator": cache_key_creator, + }, + ) diff --git a/packages/graphrag-llm/graphrag_llm/embedding/lite_llm_embedding.py b/packages/graphrag-llm/graphrag_llm/embedding/lite_llm_embedding.py new file mode 100644 index 0000000000..18e39a4ea8 --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/embedding/lite_llm_embedding.py @@ -0,0 +1,198 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""LLMEmbedding based on litellm.""" + +from typing import TYPE_CHECKING, Any, Unpack + +import litellm +from azure.identity import DefaultAzureCredential, get_bearer_token_provider + +from graphrag_llm.config.types import AuthMethod +from graphrag_llm.embedding.embedding import LLMEmbedding +from graphrag_llm.middleware import with_middleware_pipeline +from graphrag_llm.types import LLMEmbeddingResponse + +if TYPE_CHECKING: + from graphrag_cache import Cache, CacheKeyCreator + + from graphrag_llm.config import ModelConfig + from graphrag_llm.metrics import MetricsProcessor, MetricsStore + from graphrag_llm.rate_limit import RateLimiter + from graphrag_llm.retry import Retry + from graphrag_llm.tokenizer import Tokenizer + from graphrag_llm.types import ( + AsyncLLMEmbeddingFunction, + LLMEmbeddingArgs, + LLMEmbeddingFunction, + Metrics, + ) + +litellm.suppress_debug_info = True + + +class LiteLLMEmbedding(LLMEmbedding): + """LLMEmbedding based on litellm.""" + + _model_config: "ModelConfig" + _model_id: str + _track_metrics: bool = False + _metrics_store: "MetricsStore" + _metrics_processor: "MetricsProcessor | None" + _cache: "Cache | None" + _cache_key_creator: "CacheKeyCreator" + _tokenizer: "Tokenizer" + _rate_limiter: "RateLimiter | None" + _retrier: "Retry | None" + + def __init__( + self, + *, + model_id: str, + model_config: "ModelConfig", + tokenizer: "Tokenizer", + metrics_store: "MetricsStore", + metrics_processor: "MetricsProcessor | None" = None, + rate_limiter: "RateLimiter | None" = None, + retrier: "Retry | None" = None, + cache: "Cache | None" = None, + cache_key_creator: "CacheKeyCreator", + azure_cognitive_services_audience: str = "https://cognitiveservices.azure.com/.default", + drop_unsupported_params: bool = True, + **kwargs: Any, + ): + """Initialize LiteLLMEmbedding. + + Args + ---- + model_id: str + The LiteLLM model ID, e.g., "openai/gpt-4o" + model_config: ModelConfig + The configuration for the model. + tokenizer: Tokenizer + The tokenizer to use. + metrics_store: MetricsStore | None (default: None) + The metrics store to use. + metrics_processor: MetricsProcessor | None (default: None) + The metrics processor to use. + cache: Cache | None (default: None) + An optional cache instance. + cache_key_prefix: str | None (default: "chat") + The cache key prefix. Required if cache is provided. + rate_limiter: RateLimiter | None (default: None) + The rate limiter to use. + retrier: Retry | None (default: None) + The retry strategy to use. + azure_cognitive_services_audience: str (default: "https://cognitiveservices.azure.com/.default") + The audience for Azure Cognitive Services when using Managed Identity. + drop_unsupported_params: bool (default: True) + Whether to drop unsupported parameters for the model provider. + """ + self._model_id = model_id + self._model_config = model_config + self._tokenizer = tokenizer + self._metrics_store = metrics_store + self._metrics_processor = metrics_processor + self._track_metrics = metrics_processor is not None + self._cache = cache + self._cache_key_creator = cache_key_creator + self._rate_limiter = rate_limiter + self._retrier = retrier + + self._embedding, self._embedding_async = _create_base_embeddings( + model_config=model_config, + drop_unsupported_params=drop_unsupported_params, + azure_cognitive_services_audience=azure_cognitive_services_audience, + ) + + self._embedding, self._embedding_async = with_middleware_pipeline( + model_config=self._model_config, + model_fn=self._embedding, + async_model_fn=self._embedding_async, + request_type="embedding", + cache=self._cache, + cache_key_creator=self._cache_key_creator, + tokenizer=self._tokenizer, + metrics_processor=self._metrics_processor, + rate_limiter=self._rate_limiter, + retrier=self._retrier, + ) + + def embedding( + self, /, **kwargs: Unpack["LLMEmbeddingArgs"] + ) -> "LLMEmbeddingResponse": + """Sync embedding method.""" + request_metrics: Metrics | None = kwargs.pop("metrics", None) or {} + if not self._track_metrics: + request_metrics = None + + try: + return self._embedding(metrics=request_metrics, **kwargs) + finally: + if request_metrics: + self._metrics_store.update_metrics(metrics=request_metrics) + + async def embedding_async( + self, /, **kwargs: Unpack["LLMEmbeddingArgs"] + ) -> "LLMEmbeddingResponse": + """Async embedding method.""" + request_metrics: Metrics | None = kwargs.pop("metrics", None) or {} + if not self._track_metrics: + request_metrics = None + + try: + return await self._embedding_async(metrics=request_metrics, **kwargs) + finally: + if request_metrics: + self._metrics_store.update_metrics(metrics=request_metrics) + + @property + def metrics_store(self) -> "MetricsStore": + """Get metrics store.""" + return self._metrics_store + + @property + def tokenizer(self) -> "Tokenizer": + """Get tokenizer.""" + return self._tokenizer + + +def _create_base_embeddings( + *, + model_config: "ModelConfig", + drop_unsupported_params: bool, + azure_cognitive_services_audience: str, +) -> tuple["LLMEmbeddingFunction", "AsyncLLMEmbeddingFunction"]: + """Create base embedding functions.""" + model_provider = model_config.model_provider + model = model_config.azure_deployment_name or model_config.model + + base_args: dict[str, Any] = { + "drop_params": drop_unsupported_params, + "model": f"{model_provider}/{model}", + "api_key": model_config.api_key, + "api_base": model_config.api_base, + "api_version": model_config.api_version, + **model_config.call_args, + } + + if model_config.auth_method == AuthMethod.AzureManagedIdentity: + base_args["azure_ad_token_provider"] = get_bearer_token_provider( + DefaultAzureCredential(), azure_cognitive_services_audience + ) + + def _base_embedding(**kwargs: Any) -> LLMEmbeddingResponse: + kwargs.pop("metrics", None) # Remove metrics if present + new_args: dict[str, Any] = {**base_args, **kwargs} + + response = litellm.embedding(**new_args) + return LLMEmbeddingResponse(**response.model_dump()) + + async def _base_embedding_async(**kwargs: Any) -> LLMEmbeddingResponse: + kwargs.pop("metrics", None) # Remove metrics if present + new_args: dict[str, Any] = {**base_args, **kwargs} + + response = await litellm.aembedding(**new_args) + return LLMEmbeddingResponse(**response.model_dump()) + + return _base_embedding, _base_embedding_async diff --git a/packages/graphrag-llm/graphrag_llm/embedding/mock_llm_embedding.py b/packages/graphrag-llm/graphrag_llm/embedding/mock_llm_embedding.py new file mode 100644 index 0000000000..3b9649038c --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/embedding/mock_llm_embedding.py @@ -0,0 +1,81 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""MockLLMEmbedding.""" + +from typing import TYPE_CHECKING, Any, Unpack + +import litellm + +from graphrag_llm.embedding.embedding import LLMEmbedding +from graphrag_llm.utils import create_embedding_response + +if TYPE_CHECKING: + from graphrag_llm.config import ModelConfig + from graphrag_llm.metrics import MetricsStore + from graphrag_llm.tokenizer import Tokenizer + from graphrag_llm.types import ( + LLMEmbeddingArgs, + LLMEmbeddingResponse, + ) + +litellm.suppress_debug_info = True + + +class MockLLMEmbedding(LLMEmbedding): + """MockLLMEmbedding.""" + + _metrics_store: "MetricsStore" + _tokenizer: "Tokenizer" + _mock_responses: list[float] + _mock_index: int = 0 + + def __init__( + self, + *, + model_config: "ModelConfig", + tokenizer: "Tokenizer", + metrics_store: "MetricsStore", + **kwargs: Any, + ): + """Initialize MockLLMEmbedding.""" + self._tokenizer = tokenizer + self._metrics_store = metrics_store + + mock_responses = model_config.mock_responses + if not isinstance(mock_responses, list) or len(mock_responses) == 0: + msg = "ModelConfig.mock_responses must be a non-empty list of embedding responses." + raise ValueError(msg) + + if not all(isinstance(resp, float) for resp in mock_responses): + msg = "Each item in ModelConfig.mock_responses must be a float." + raise ValueError(msg) + + self._mock_responses = mock_responses # type: ignore + + def embedding( + self, /, **kwargs: Unpack["LLMEmbeddingArgs"] + ) -> "LLMEmbeddingResponse": + """Sync embedding method.""" + input = kwargs.get("input") + response = create_embedding_response( + self._mock_responses, batch_size=len(input) + ) + self._mock_index += 1 + return response + + async def embedding_async( + self, /, **kwargs: Unpack["LLMEmbeddingArgs"] + ) -> "LLMEmbeddingResponse": + """Async embedding method.""" + return self.embedding(**kwargs) + + @property + def metrics_store(self) -> "MetricsStore": + """Get metrics store.""" + return self._metrics_store + + @property + def tokenizer(self) -> "Tokenizer": + """Get tokenizer.""" + return self._tokenizer diff --git a/packages/graphrag-llm/graphrag_llm/metrics/__init__.py b/packages/graphrag-llm/graphrag_llm/metrics/__init__.py new file mode 100644 index 0000000000..e039bf8acf --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/metrics/__init__.py @@ -0,0 +1,34 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Metrics module for graphrag-llm.""" + +from graphrag_llm.metrics.metrics_aggregator import metrics_aggregator +from graphrag_llm.metrics.metrics_processor import MetricsProcessor +from graphrag_llm.metrics.metrics_processor_factory import ( + create_metrics_processor, + register_metrics_processor, +) +from graphrag_llm.metrics.metrics_store import MetricsStore +from graphrag_llm.metrics.metrics_store_factory import ( + create_metrics_store, + register_metrics_store, +) +from graphrag_llm.metrics.metrics_writer import MetricsWriter +from graphrag_llm.metrics.metrics_writer_factory import ( + create_metrics_writer, + register_metrics_writer, +) + +__all__ = [ + "MetricsProcessor", + "MetricsStore", + "MetricsWriter", + "create_metrics_processor", + "create_metrics_store", + "create_metrics_writer", + "metrics_aggregator", + "register_metrics_processor", + "register_metrics_store", + "register_metrics_writer", +] diff --git a/packages/graphrag-llm/graphrag_llm/metrics/default_metrics_processor.py b/packages/graphrag-llm/graphrag_llm/metrics/default_metrics_processor.py new file mode 100644 index 0000000000..7249e701a1 --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/metrics/default_metrics_processor.py @@ -0,0 +1,130 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Default Metrics Processor.""" + +from typing import TYPE_CHECKING, Any + +from graphrag_llm.metrics.metrics_processor import MetricsProcessor +from graphrag_llm.model_cost_registry import model_cost_registry +from graphrag_llm.types import LLMCompletionResponse, LLMEmbeddingResponse + +if TYPE_CHECKING: + from collections.abc import AsyncIterator, Iterator + + from graphrag_llm.config import ModelConfig + from graphrag_llm.types import ( + LLMCompletionChunk, + Metrics, + ) + + +class DefaultMetricsProcessor(MetricsProcessor): + """Default metrics processor that does nothing.""" + + def __init__(self, **kwargs: Any) -> None: + """Initialize DefaultMetricsProcessor.""" + + def process_metrics( + self, + *, + model_config: "ModelConfig", + metrics: "Metrics", + input_args: dict[str, Any], + response: "LLMCompletionResponse \ + | Iterator[LLMCompletionChunk] \ + | AsyncIterator[LLMCompletionChunk] \ + | LLMEmbeddingResponse", + ) -> None: + """Process metrics.""" + self._process_metrics_common( + model_config=model_config, + metrics=metrics, + input_args=input_args, + response=response, + ) + + def _process_metrics_common( + self, + *, + model_config: "ModelConfig", + metrics: "Metrics", + input_args: dict[str, Any], + response: "LLMCompletionResponse \ + | Iterator[LLMCompletionChunk] \ + | AsyncIterator[LLMCompletionChunk] \ + | LLMEmbeddingResponse", + ) -> None: + if isinstance(response, LLMCompletionResponse): + self._process_lm_chat_completion( + model_config=model_config, + metrics=metrics, + input_args=input_args, + response=response, + ) + elif isinstance(response, LLMEmbeddingResponse): + self._process_lm_embedding_response( + model_config=model_config, + metrics=metrics, + input_args=input_args, + response=response, + ) + + def _process_lm_chat_completion( + self, + model_config: "ModelConfig", + metrics: "Metrics", + input_args: dict[str, Any], + response: "LLMCompletionResponse", + ) -> None: + """Process LMChatCompletion metrics.""" + prompt_tokens = response.usage.prompt_tokens if response.usage else 0 + completion_tokens = response.usage.completion_tokens if response.usage else 0 + total_tokens = prompt_tokens + completion_tokens + + if total_tokens > 0: + metrics["responses_with_tokens"] = 1 + metrics["prompt_tokens"] = prompt_tokens + metrics["completion_tokens"] = completion_tokens + metrics["total_tokens"] = total_tokens + + model_id = f"{model_config.model_provider}/{model_config.model}" + model_costs = model_cost_registry.get_model_costs(model_id) + + if not model_costs: + return + + input_cost = prompt_tokens * model_costs["input_cost_per_token"] + output_cost = completion_tokens * model_costs["output_cost_per_token"] + total_cost = input_cost + output_cost + + metrics["responses_with_cost"] = 1 + metrics["input_cost"] = input_cost + metrics["output_cost"] = output_cost + metrics["total_cost"] = total_cost + + def _process_lm_embedding_response( + self, + model_config: "ModelConfig", + metrics: "Metrics", + input_args: dict[str, Any], + response: "LLMEmbeddingResponse", + ) -> None: + """Process LLMEmbeddingResponse metrics.""" + prompt_tokens = response.usage.prompt_tokens if response.usage else 0 + + if prompt_tokens > 0: + metrics["responses_with_tokens"] = 1 + metrics["prompt_tokens"] = prompt_tokens + metrics["total_tokens"] = prompt_tokens + + model_id = f"{model_config.model_provider}/{model_config.model}" + model_costs = model_cost_registry.get_model_costs(model_id) + + if not model_costs: + return + + input_cost = prompt_tokens * model_costs["input_cost_per_token"] + metrics["responses_with_cost"] = 1 + metrics["input_cost"] = input_cost + metrics["total_cost"] = input_cost diff --git a/packages/graphrag-llm/graphrag_llm/metrics/file_metrics_writer.py b/packages/graphrag-llm/graphrag_llm/metrics/file_metrics_writer.py new file mode 100644 index 0000000000..c80f345279 --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/metrics/file_metrics_writer.py @@ -0,0 +1,37 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""File metrics writer implementation.""" + +import json +from collections.abc import Callable +from datetime import datetime, timezone +from pathlib import Path +from typing import TYPE_CHECKING, Any + +from graphrag_llm.metrics.metrics_writer import MetricsWriter + +if TYPE_CHECKING: + from graphrag_llm.types import Metrics + + +class FileMetricsWriter(MetricsWriter): + """File metrics writer implementation.""" + + _log_method: Callable[..., None] | None = None + _base_dir: Path + _file_path: Path + + def __init__(self, *, base_dir: str | None = None, **kwargs: Any) -> None: + """Initialize FileMetricsWriter.""" + self._base_dir = Path(base_dir or Path.cwd()).resolve() + now = datetime.now(timezone.utc).astimezone().strftime("%Y%m%d_%H%M%S") + self._file_path = self._base_dir / f"{now}.jsonl" + + self._base_dir.mkdir(parents=True, exist_ok=True) + + def write_metrics(self, *, id: str, metrics: "Metrics") -> None: + """Write the given metrics.""" + record = json.dumps({"id": id, "metrics": metrics}) + with self._file_path.open("a", encoding="utf-8") as f: + f.write(f"{record}\n") diff --git a/packages/graphrag-llm/graphrag_llm/metrics/log_metrics_writer.py b/packages/graphrag-llm/graphrag_llm/metrics/log_metrics_writer.py new file mode 100644 index 0000000000..f09b878a12 --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/metrics/log_metrics_writer.py @@ -0,0 +1,39 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Log metrics writer implementation.""" + +import json +import logging +from collections.abc import Callable +from typing import TYPE_CHECKING, Any + +from graphrag_llm.metrics.metrics_writer import MetricsWriter + +if TYPE_CHECKING: + from graphrag_llm.types import Metrics + +logger = logging.getLogger(__name__) + +_log_methods = { + logging.DEBUG: logger.debug, + logging.INFO: logger.info, + logging.WARNING: logger.warning, + logging.ERROR: logger.error, + logging.CRITICAL: logger.critical, +} + + +class LogMetricsWriter(MetricsWriter): + """Log metrics writer implementation.""" + + _log_method: Callable[..., None] = _log_methods[logging.INFO] + + def __init__(self, *, log_level: int | None = None, **kwargs: Any) -> None: + """Initialize LogMetricsWriter.""" + if log_level and log_level in _log_methods: + self._log_method = _log_methods[log_level] + + def write_metrics(self, *, id: str, metrics: "Metrics") -> None: + """Write the given metrics.""" + self._log_method(f"Metrics for {id}: {json.dumps(metrics, indent=2)}") diff --git a/packages/graphrag-llm/graphrag_llm/metrics/memory_metrics_store.py b/packages/graphrag-llm/graphrag_llm/metrics/memory_metrics_store.py new file mode 100644 index 0000000000..c2456299e1 --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/metrics/memory_metrics_store.py @@ -0,0 +1,112 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Default metrics store.""" + +import atexit +import threading +from typing import TYPE_CHECKING, Any + +from graphrag_llm.metrics.metrics_aggregator import metrics_aggregator +from graphrag_llm.metrics.metrics_store import MetricsStore + +if TYPE_CHECKING: + from graphrag_llm.metrics.metrics_writer import MetricsWriter + from graphrag_llm.types import Metrics + +_default_sort_order: list[str] = [ + "attempted_request_count", + "successful_response_count", + "failed_response_count", + "failure_rate", + "requests_with_retries", + "retries", + "retry_rate", + "compute_duration_seconds", + "compute_duration_per_response_seconds", + "runtime_duration_seconds", + "cached_responses", + "cache_hit_rate", + "streaming_responses", + "responses_with_tokens", + "prompt_tokens", + "completion_tokens", + "total_tokens", + "tokens_per_response", + "responses_with_cost", + "input_cost", + "output_cost", + "total_cost", + "cost_per_response", +] + + +class MemoryMetricsStore(MetricsStore): + """Store for metrics.""" + + _metrics_writer: "MetricsWriter | None" = None + _id: str + _sort_order: list[str] + _thread_lock: threading.Lock + _metrics: "Metrics" + + def __init__( + self, + *, + id: str, + metrics_writer: "MetricsWriter | None" = None, + sort_order: list[str] | None = None, + **kwargs: Any, + ) -> None: + """Initialize MemoryMetricsStore.""" + self._id = id + self._sort_order = sort_order or _default_sort_order + self._thread_lock = threading.Lock() + self._metrics = {} + + if metrics_writer: + self._metrics_writer = metrics_writer + atexit.register(self._on_exit_) + + def _on_exit_(self) -> None: + if self._metrics_writer: + self._metrics_writer.write_metrics(id=self._id, metrics=self.get_metrics()) + + @property + def id(self) -> str: + """Get the ID of the metrics store.""" + return self._id + + def update_metrics(self, *, metrics: "Metrics") -> None: + """Update the store with multiple metrics.""" + with self._thread_lock: + for name, value in metrics.items(): + if name in self._metrics: + self._metrics[name] += value + else: + self._metrics[name] = value + + def _sort_metrics(self) -> "Metrics": + """Sort metrics based on the predefined sort order.""" + sorted_metrics: Metrics = {} + for key in self._sort_order: + if key in self._metrics: + sorted_metrics[key] = self._metrics[key] + for key in self._metrics: + if key not in sorted_metrics: + sorted_metrics[key] = self._metrics[key] + return sorted_metrics + + def get_metrics(self) -> "Metrics": + """Get all metrics from the store.""" + metrics_aggregator.aggregate(self._metrics) + return self._sort_metrics() + + def clear_metrics(self) -> None: + """Clear all metrics from the store. + + Returns + ------- + None + """ + self._metrics = {} diff --git a/packages/graphrag-llm/graphrag_llm/metrics/metrics_aggregator.py b/packages/graphrag-llm/graphrag_llm/metrics/metrics_aggregator.py new file mode 100644 index 0000000000..88949e0ecb --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/metrics/metrics_aggregator.py @@ -0,0 +1,140 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Metrics aggregator module.""" + +from collections.abc import Callable +from typing import TYPE_CHECKING, Any, ClassVar + +if TYPE_CHECKING: + from graphrag_llm.types.types import Metrics + + +class MetricsAggregator: + """Metrics Aggregator.""" + + _instance: ClassVar["MetricsAggregator | None"] = None + _aggregate_functions: dict[str, Callable[["Metrics"], None]] + + def __new__(cls, *args: Any, **kwargs: Any) -> "MetricsAggregator": + """Create a new instance of MetricsAggregator if it does not exist.""" + if cls._instance is None: + cls._instance = super().__new__(cls, *args, **kwargs) + return cls._instance + + def __init__(self): + if not hasattr(self, "_initialized"): + self._initialized = True + self._aggregate_functions = {} + + def register(self, name: str, func: Callable[["Metrics"], None]) -> None: + """Register an aggregate function. + + Args + ---- + name: str + The name of the aggregate function. + func: Callable[[Metrics], None] + The aggregate function to register. It should take a Metrics + dictionary as input and return None, modifying the Metrics in place. + """ + self._aggregate_functions[name] = func + + def clear(self, name: str | None = None) -> None: + """Clear registered aggregate functions. + + Args + ---- + name: str | None + The name of the aggregate function to clear. If None, clears all + registered aggregate functions. + + """ + if name: + self._aggregate_functions.pop(name, None) + else: + self._aggregate_functions.clear() + + def aggregate(self, metrics: "Metrics") -> None: + """Aggregate metrics using registered aggregate functions. + + Args + ---- + metrics: Metrics + The metrics dictionary to aggregate. + """ + for func in self._aggregate_functions.values(): + func(metrics) + + +def _failure_rate(metrics: "Metrics") -> None: + """Calculate failure rate metric.""" + attempted = metrics.get("attempted_request_count", 0) + failed = metrics.get("failed_response_count", 0) + if attempted > 0: + metrics["failure_rate"] = failed / attempted + else: + metrics["failure_rate"] = 0.0 + + +def _retry_rate(metrics: "Metrics") -> None: + """Calculate failure rate metric.""" + attempted = metrics.get("attempted_request_count", 0) + retries = metrics.get("retries", 0) + if attempted > 0 and "retries" in metrics: + metrics["retry_rate"] = retries / (retries + attempted) + elif "retries" in metrics: + metrics["retry_rate"] = 0.0 + + +def _tokens_per_response(metrics: "Metrics") -> None: + """Calculate tokens per response metric.""" + responses = metrics.get("responses_with_tokens", 0) + total_tokens = metrics.get("total_tokens", 0) + if responses > 0: + metrics["tokens_per_response"] = total_tokens / responses + else: + metrics["tokens_per_response"] = 0.0 + + +def _cost_per_response(metrics: "Metrics") -> None: + """Calculate cost per response metric.""" + responses = metrics.get("responses_with_cost", 0) + total_cost = metrics.get("total_cost", 0) + if responses > 0: + metrics["cost_per_response"] = total_cost / responses + else: + metrics["cost_per_response"] = 0.0 + + +def _compute_duration_per_response(metrics: "Metrics") -> None: + """Calculate compute duration per response metric.""" + responses = metrics.get("successful_response_count", 0) + streaming_responses = metrics.get("streaming_responses", 0) + responses = responses - streaming_responses + compute_duration = metrics.get("compute_duration_seconds", 0) + if responses > 0: + metrics["compute_duration_per_response_seconds"] = compute_duration / responses + else: + metrics["compute_duration_per_response_seconds"] = 0.0 + + +def _cache_hit_rate(metrics: "Metrics") -> None: + """Calculate cache hit rate metric.""" + responses = metrics.get("successful_response_count", 0) + cached = metrics.get("cached_responses", 0) + if responses > 0: + metrics["cache_hit_rate"] = cached / responses + else: + metrics["cache_hit_rate"] = 0.0 + + +metrics_aggregator = MetricsAggregator() +metrics_aggregator.register("failure_rate", _failure_rate) +metrics_aggregator.register("retry_rate", _retry_rate) +metrics_aggregator.register("tokens_per_response", _tokens_per_response) +metrics_aggregator.register("cost_per_response", _cost_per_response) +metrics_aggregator.register( + "compute_duration_per_response", _compute_duration_per_response +) +metrics_aggregator.register("cache_hit_rate", _cache_hit_rate) diff --git a/packages/graphrag-llm/graphrag_llm/metrics/metrics_processor.py b/packages/graphrag-llm/graphrag_llm/metrics/metrics_processor.py new file mode 100644 index 0000000000..61893742db --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/metrics/metrics_processor.py @@ -0,0 +1,59 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Metrics processor abstract base class.""" + +from abc import ABC, abstractmethod +from typing import TYPE_CHECKING, Any + +if TYPE_CHECKING: + from collections.abc import AsyncIterator, Iterator + + from graphrag_llm.config import ModelConfig + from graphrag_llm.types import ( + LLMCompletionChunk, + LLMCompletionResponse, + LLMEmbeddingResponse, + Metrics, + ) + + +class MetricsProcessor(ABC): + """Abstract base class for metrics processors.""" + + @abstractmethod + def __init__(self, **kwargs: Any): + """Initialize MetricsProcessor.""" + raise NotImplementedError + + @abstractmethod + def process_metrics( + self, + *, + model_config: "ModelConfig", + metrics: "Metrics", + input_args: dict[str, Any], + response: "LLMCompletionResponse \ + | Iterator[LLMCompletionChunk] \ + | AsyncIterator[LLMCompletionChunk] \ + | LLMEmbeddingResponse", + ) -> None: + """Process metrics. + + Update the metrics dictionary in place. + + Args + ---- + metrics: Metrics + The metrics to process. + input_args: dict[str, Any] + The input arguments passed to completion or embedding + used to generate the response. + response: LLMCompletionResponse | Iterator[LLMCompletionChunk] | LLMEmbeddingResponse + Either a completion or embedding response from the LLM. + + Returns + ------- + None + """ + raise NotImplementedError diff --git a/packages/graphrag-llm/graphrag_llm/metrics/metrics_processor_factory.py b/packages/graphrag-llm/graphrag_llm/metrics/metrics_processor_factory.py new file mode 100644 index 0000000000..74f0111a9c --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/metrics/metrics_processor_factory.py @@ -0,0 +1,79 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Metrics processor factory.""" + +from collections.abc import Callable +from typing import TYPE_CHECKING + +from graphrag_common.factory import Factory + +from graphrag_llm.config.types import MetricsProcessorType +from graphrag_llm.metrics.metrics_processor import MetricsProcessor + +if TYPE_CHECKING: + from graphrag_llm.config import MetricsConfig + + +class MetricsProcessorFactory(Factory[MetricsProcessor]): + """Factory for creating MetricsProcessor instances.""" + + +metrics_processor_factory = MetricsProcessorFactory() + + +def register_metrics_processor( + processor_type: str, + processor_initializer: Callable[..., MetricsProcessor], +) -> None: + """Register a custom metrics processor implementation. + + Args + ---- + processor_type: str + The metrics processor id to register. + processor_initializer: Callable[..., MetricsProcessor] + The metrics processor initializer to register. + """ + metrics_processor_factory.register(processor_type, processor_initializer) + + +def create_metrics_processor(metrics_config: "MetricsConfig") -> MetricsProcessor: + """Create a MetricsProcessor instance based on the configuration. + + Args + ---- + metrics_config: MetricsConfig + The configuration for the metrics processor. + + Returns + ------- + MetricsProcessor: + An instance of a MetricsProcessor subclass. + """ + strategy = metrics_config.type + init_args = metrics_config.model_dump() + + if strategy not in metrics_processor_factory: + match strategy: + case MetricsProcessorType.Default: + from graphrag_llm.metrics.default_metrics_processor import ( + DefaultMetricsProcessor, + ) + + metrics_processor_factory.register( + strategy=MetricsProcessorType.Default, + initializer=DefaultMetricsProcessor, + scope="singleton", + ) + case _: + msg = f"MetricsConfig.processor '{strategy}' is not registered in the MetricsProcessorFactory. Registered strategies: {', '.join(metrics_processor_factory.keys())}" + raise ValueError(msg) + + return metrics_processor_factory.create( + strategy=strategy, + init_args={ + **init_args, + "metrics_config": metrics_config, + }, + ) diff --git a/packages/graphrag-llm/graphrag_llm/metrics/metrics_store.py b/packages/graphrag-llm/graphrag_llm/metrics/metrics_store.py new file mode 100644 index 0000000000..a9933f203d --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/metrics/metrics_store.py @@ -0,0 +1,81 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Metrics Store.""" + +from abc import ABC, abstractmethod +from typing import TYPE_CHECKING, Any + +if TYPE_CHECKING: + from graphrag_llm.metrics.metrics_writer import MetricsWriter + from graphrag_llm.types import Metrics + + +class MetricsStore(ABC): + """Abstract base class for metrics stores.""" + + @abstractmethod + def __init__( + self, + *, + id: str, + metrics_writer: "MetricsWriter | None" = None, + **kwargs: Any, + ) -> None: + """Initialize MetricsStore. + + Args + ---- + id: str + The ID of the metrics store. + One metric store is created per ID so a good + candidate is the model id (e.g., openai/gpt-4o). + That way one store tracks and aggregates the metrics + per model. + metrics_writer: MetricsWriter + The metrics writer to use for writing metrics. + + """ + raise NotImplementedError + + @property + @abstractmethod + def id(self) -> str: + """Get the ID of the metrics store.""" + raise NotImplementedError + + @abstractmethod + def update_metrics(self, *, metrics: "Metrics") -> None: + """Update the store with multiple metrics. + + Args + ---- + metrics: Metrics + The metrics to merge into the store. + + Returns + ------- + None + """ + raise NotImplementedError + + @abstractmethod + def get_metrics(self) -> "Metrics": + """Get all metrics from the store. + + Returns + ------- + Metrics: + All metrics stored in the store. + """ + raise NotImplementedError + + @abstractmethod + def clear_metrics(self) -> None: + """Clear all metrics from the store. + + Returns + ------- + None + """ + raise NotImplementedError diff --git a/packages/graphrag-llm/graphrag_llm/metrics/metrics_store_factory.py b/packages/graphrag-llm/graphrag_llm/metrics/metrics_store_factory.py new file mode 100644 index 0000000000..51e06fc0dc --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/metrics/metrics_store_factory.py @@ -0,0 +1,91 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Metrics store factory.""" + +from collections.abc import Callable +from typing import TYPE_CHECKING, Any + +from graphrag_common.factory import Factory + +from graphrag_llm.config.types import MetricsStoreType +from graphrag_llm.metrics.metrics_store import MetricsStore + +if TYPE_CHECKING: + from graphrag_common.factory import ServiceScope + + from graphrag_llm.config import MetricsConfig + from graphrag_llm.metrics.metrics_writer import MetricsWriter + + +class MetricsStoreFactory(Factory[MetricsStore]): + """Factory for creating MetricsProcessor instances.""" + + +metrics_store_factory = MetricsStoreFactory() + + +def register_metrics_store( + store_type: str, + store_initializer: Callable[..., MetricsStore], + scope: "ServiceScope" = "transient", +) -> None: + """Register a custom metrics store implementation. + + Args + ---- + store_type: str + The metrics store id to register. + store_initializer: Callable[..., MetricsStore] + The metrics store initializer to register. + """ + metrics_store_factory.register(store_type, store_initializer, scope) + + +def create_metrics_store(config: "MetricsConfig", id: str) -> MetricsStore: + """Create a MetricsStore instance based on the configuration. + + Args + ---- + config: MetricsConfig + The configuration for the metrics store. + id: str + The identifier for the metrics store. + Example: openai/gpt-4o + + Returns + ------- + MetricsStore: + An instance of a MetricsStore subclass. + """ + strategy = config.store + metrics_writer: MetricsWriter | None = None + if config.writer: + from graphrag_llm.metrics.metrics_writer_factory import create_metrics_writer + + metrics_writer = create_metrics_writer(config) + init_args: dict[str, Any] = config.model_dump() + + if strategy not in metrics_store_factory: + match strategy: + case MetricsStoreType.Memory: + from graphrag_llm.metrics.memory_metrics_store import MemoryMetricsStore + + register_metrics_store( + store_type=strategy, + store_initializer=MemoryMetricsStore, + scope="singleton", + ) + case _: + msg = f"MetricsConfig.store '{strategy}' is not registered in the MetricsStoreFactory. Registered strategies: {', '.join(metrics_store_factory.keys())}" + raise ValueError(msg) + + return metrics_store_factory.create( + strategy=strategy, + init_args={ + **init_args, + "id": id, + "metrics_config": config, + "metrics_writer": metrics_writer, + }, + ) diff --git a/packages/graphrag-llm/graphrag_llm/metrics/metrics_writer.py b/packages/graphrag-llm/graphrag_llm/metrics/metrics_writer.py new file mode 100644 index 0000000000..4e5df0a980 --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/metrics/metrics_writer.py @@ -0,0 +1,32 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Metrics writer abstract base class.""" + +from abc import ABC, abstractmethod +from typing import TYPE_CHECKING, Any + +if TYPE_CHECKING: + from graphrag_llm.types import Metrics + + +class MetricsWriter(ABC): + """Abstract base class for metrics writers.""" + + @abstractmethod + def __init__(self, **kwargs: Any) -> None: + """Initialize MetricsWriter.""" + raise NotImplementedError + + @abstractmethod + def write_metrics(self, *, id: str, metrics: "Metrics") -> None: + """Write the given metrics. + + Args + ---- + id : str + The identifier for the metrics. + metrics : Metrics + The metrics data to write. + """ + raise NotImplementedError diff --git a/packages/graphrag-llm/graphrag_llm/metrics/metrics_writer_factory.py b/packages/graphrag-llm/graphrag_llm/metrics/metrics_writer_factory.py new file mode 100644 index 0000000000..fac8c5957d --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/metrics/metrics_writer_factory.py @@ -0,0 +1,91 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + + +"""Metrics writer factory.""" + +from collections.abc import Callable +from typing import TYPE_CHECKING + +from graphrag_common.factory import Factory + +from graphrag_llm.config.types import MetricsWriterType +from graphrag_llm.metrics.metrics_writer import MetricsWriter + +if TYPE_CHECKING: + from graphrag_common.factory import ServiceScope + + from graphrag_llm.config import MetricsConfig + + +class MetricsWriterFactory(Factory[MetricsWriter]): + """Metrics writer factory.""" + + +metrics_writer_factory = MetricsWriterFactory() + + +def register_metrics_writer( + metrics_writer_type: str, + metrics_writer_initializer: Callable[..., MetricsWriter], + scope: "ServiceScope" = "transient", +) -> None: + """Register a custom metrics writer implementation. + + Args + ---- + metrics_writer_type: str + The metrics writer id to register. + metrics_writer_initializer: Callable[..., MetricsWriter] + The metrics writer initializer to register. + scope: ServiceScope (default: "transient") + The service scope for the metrics writer. + """ + metrics_writer_factory.register( + metrics_writer_type, metrics_writer_initializer, scope + ) + + +def create_metrics_writer(metrics_config: "MetricsConfig") -> MetricsWriter: + """Create a MetricsWriter instance based on the configuration. + + Args + ---- + metrics_config: MetricsConfig + The configuration for the metrics writer. + + Returns + ------- + MetricsWriter: + An instance of a MetricsWriter subclass. + """ + strategy = metrics_config.writer + if not strategy: + msg = "MetricsConfig.writer needs to be set to create a MetricsWriter." + raise ValueError(msg) + + init_args = metrics_config.model_dump() + + if strategy not in metrics_writer_factory: + match strategy: + case MetricsWriterType.Log: + from graphrag_llm.metrics.log_metrics_writer import LogMetricsWriter + + metrics_writer_factory.register( + strategy=MetricsWriterType.Log, + initializer=LogMetricsWriter, + scope="singleton", + ) + case MetricsWriterType.File: + from graphrag_llm.metrics.file_metrics_writer import FileMetricsWriter + + metrics_writer_factory.register( + strategy=MetricsWriterType.File, + initializer=FileMetricsWriter, + scope="singleton", + ) + case _: + msg = f"MetricsConfig.writer '{strategy}' is not registered in the MetricsWriterFactory. Registered strategies: {', '.join(metrics_writer_factory.keys())}" + raise ValueError(msg) + + return metrics_writer_factory.create(strategy=strategy, init_args=init_args) diff --git a/packages/graphrag-llm/graphrag_llm/metrics/noop_metrics_store.py b/packages/graphrag-llm/graphrag_llm/metrics/noop_metrics_store.py new file mode 100644 index 0000000000..dbd41e13f7 --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/metrics/noop_metrics_store.py @@ -0,0 +1,41 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Noop metrics store.""" + +from typing import Any + +from graphrag_llm.metrics.metrics_store import MetricsStore +from graphrag_llm.types import Metrics + + +class NoopMetricsStore(MetricsStore): + """Noop store for metrics.""" + + def __init__( + self, + **kwargs: Any, + ) -> None: + """Initialize NoopMetricsStore.""" + + @property + def id(self) -> str: + """Get the ID of the metrics store.""" + return "" + + def update_metrics(self, *, metrics: Metrics) -> None: + """Noop update.""" + return + + def get_metrics(self) -> Metrics: + """Noop get all metrics from the store.""" + return {} + + def clear_metrics(self) -> None: + """Clear all metrics from the store. + + Returns + ------- + None + """ + return diff --git a/packages/graphrag-llm/graphrag_llm/middleware/__init__.py b/packages/graphrag-llm/graphrag_llm/middleware/__init__.py new file mode 100644 index 0000000000..d038b28594 --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/middleware/__init__.py @@ -0,0 +1,24 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Middleware.""" + +from graphrag_llm.middleware.with_cache import with_cache +from graphrag_llm.middleware.with_errors_for_testing import with_errors_for_testing +from graphrag_llm.middleware.with_logging import with_logging +from graphrag_llm.middleware.with_metrics import with_metrics +from graphrag_llm.middleware.with_middleware_pipeline import with_middleware_pipeline +from graphrag_llm.middleware.with_rate_limiting import with_rate_limiting +from graphrag_llm.middleware.with_request_count import with_request_count +from graphrag_llm.middleware.with_retries import with_retries + +__all__ = [ + "with_cache", + "with_errors_for_testing", + "with_logging", + "with_metrics", + "with_middleware_pipeline", + "with_rate_limiting", + "with_request_count", + "with_retries", +] diff --git a/packages/graphrag-llm/graphrag_llm/middleware/with_cache.py b/packages/graphrag-llm/graphrag_llm/middleware/with_cache.py new file mode 100644 index 0000000000..2809538073 --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/middleware/with_cache.py @@ -0,0 +1,153 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Cache middleware.""" + +import asyncio +from typing import TYPE_CHECKING, Any, Literal + +from graphrag_llm.types import LLMCompletionResponse, LLMEmbeddingResponse + +if TYPE_CHECKING: + from graphrag_cache import Cache, CacheKeyCreator + + from graphrag_llm.types import ( + AsyncLLMFunction, + LLMFunction, + Metrics, + ) + + +def with_cache( + *, + sync_middleware: "LLMFunction", + async_middleware: "AsyncLLMFunction", + request_type: Literal["chat", "embedding"], + cache: "Cache", + cache_key_creator: "CacheKeyCreator", +) -> tuple[ + "LLMFunction", + "AsyncLLMFunction", +]: + """Wrap model functions with cache middleware. + + Args + ---- + sync_middleware: LLMFunction + The synchronous model function to wrap. + Either a completion function or an embedding function. + async_middleware: AsyncLLMFunction + The asynchronous model function to wrap. + Either a completion function or an embedding function. + cache: Cache + The cache instance to use. + request_type: Literal["chat", "embedding"] + The type of request, either "chat" or "embedding". + cache_key_creator: CacheKeyCreator + The cache key creator to use. + + Returns + ------- + tuple[LLMFunction, AsyncLLMFunction] + The synchronous and asynchronous model functions with caching. + + """ + + def _cache_middleware( + **kwargs: Any, + ): + is_streaming = kwargs.get("stream") or False + is_mocked = kwargs.get("mock_response") or False + metrics: Metrics | None = kwargs.get("metrics") + + if is_streaming or is_mocked: + # don't cache streaming or mocked responses + return sync_middleware(**kwargs) + + cache_key = cache_key_creator(kwargs) + + event_loop = asyncio.new_event_loop() + asyncio.set_event_loop(event_loop) + cached_response = event_loop.run_until_complete(cache.get(cache_key)) + if ( + cached_response is not None + and isinstance(cached_response, dict) + and "response" in cached_response + and cached_response["response"] is not None + and isinstance(cached_response["response"], dict) + ): + try: + if ( + metrics is not None + and "metrics" in cached_response + and cached_response["metrics"] is not None + and isinstance(cached_response["metrics"], dict) + ): + metrics.update(cached_response["metrics"]) + metrics["cached_responses"] = 1 + + if request_type == "chat": + return LLMCompletionResponse(**cached_response["response"]) + return LLMEmbeddingResponse(**cached_response["response"]) + except Exception: # noqa: BLE001 + # Try to retrieve value from cache but if it fails, continue + # to make the request. + ... + + response = sync_middleware(**kwargs) + cache_value = { + "response": response.model_dump(), # type: ignore + "metrics": metrics if metrics is not None else {}, + } + event_loop.run_until_complete(cache.set(cache_key, cache_value)) + event_loop.close() + return response + + async def _cache_middleware_async( + **kwargs: Any, + ): + is_streaming = kwargs.get("stream") or False + is_mocked = kwargs.get("mock_response") or False + metrics: Metrics | None = kwargs.get("metrics") + + if is_streaming or is_mocked: + # don't cache streaming or mocked responses + return await async_middleware(**kwargs) + + cache_key = cache_key_creator(kwargs) + + cached_response = await cache.get(cache_key) + if ( + cached_response is not None + and isinstance(cached_response, dict) + and "response" in cached_response + and cached_response["response"] is not None + and isinstance(cached_response["response"], dict) + ): + try: + if ( + metrics is not None + and "metrics" in cached_response + and cached_response["metrics"] is not None + and isinstance(cached_response["metrics"], dict) + ): + metrics.update(cached_response["metrics"]) + metrics["cached_responses"] = 1 + + if request_type == "chat": + return LLMCompletionResponse(**cached_response["response"]) + return LLMEmbeddingResponse(**cached_response["response"]) + except Exception: # noqa: BLE001 + # Try to retrieve value from cache but if it fails, continue + # to make the request. + ... + + response = await async_middleware(**kwargs) + cache_value = { + "response": response.model_dump(), # type: ignore + "metrics": metrics if metrics is not None else {}, + } + await cache.set(cache_key, cache_value) + return response + + return (_cache_middleware, _cache_middleware_async) # type: ignore diff --git a/packages/graphrag-llm/graphrag_llm/middleware/with_errors_for_testing.py b/packages/graphrag-llm/graphrag_llm/middleware/with_errors_for_testing.py new file mode 100644 index 0000000000..cabdb1eaa4 --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/middleware/with_errors_for_testing.py @@ -0,0 +1,83 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Error testing middleware.""" + +import asyncio +import random +import time +from typing import TYPE_CHECKING, Any + +import litellm.exceptions as exceptions + +if TYPE_CHECKING: + from graphrag_llm.types import ( + AsyncLLMFunction, + LLMFunction, + ) + + +def with_errors_for_testing( + *, + sync_middleware: "LLMFunction", + async_middleware: "AsyncLLMFunction", + failure_rate: float = 0.0, + exception_type: str = "ValueError", + exception_args: list[Any] | None = None, +) -> tuple[ + "LLMFunction", + "AsyncLLMFunction", +]: + """Wrap model functions with error testing middleware. + + Args + ---- + sync_middleware: LLMFunction + The synchronous model function to wrap. + Either a completion function or an embedding function. + async_middleware: AsyncLLMFunction + The asynchronous model function to wrap. + Either a completion function or an embedding function. + failure_rate: float + The failure rate for testing, between 0.0 and 1.0. + Defaults to 0.0 (no failures). + exception_type: str + The name of the exceptions class from litellm.exceptions to raise. + Defaults to "ValueError". + exception_args: list[Any] | None + The arguments to pass to the exception when raising it. Defaults to None, + which results in a default message. + + Returns + ------- + tuple[LLMFunction, AsyncLLMFunction] + The synchronous and asynchronous model functions wrapped with error testing middleware. + """ + + def _errors_middleware( + **kwargs: Any, + ): + if failure_rate > 0.0 and random.random() <= failure_rate: # noqa: S311 + time.sleep(0.5) + + exception_cls = exceptions.__dict__.get(exception_type, ValueError) + raise exception_cls( + *(exception_args or ["Simulated failure for debugging purposes."]) + ) + + return sync_middleware(**kwargs) + + async def _errors_middleware_async( + **kwargs: Any, + ): + if failure_rate > 0.0 and random.random() <= failure_rate: # noqa: S311 + await asyncio.sleep(0.5) + + exception_cls = exceptions.__dict__.get(exception_type, ValueError) + raise exception_cls( + *(exception_args or ["Simulated failure for debugging purposes."]) + ) + + return await async_middleware(**kwargs) + + return (_errors_middleware, _errors_middleware_async) # type: ignore diff --git a/packages/graphrag-llm/graphrag_llm/middleware/with_logging.py b/packages/graphrag-llm/graphrag_llm/middleware/with_logging.py new file mode 100644 index 0000000000..121ffbdbb6 --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/middleware/with_logging.py @@ -0,0 +1,73 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Request count middleware.""" + +import logging +from typing import TYPE_CHECKING, Any + +if TYPE_CHECKING: + from graphrag_llm.types import ( + AsyncLLMFunction, + LLMFunction, + Metrics, + ) + +logger = logging.getLogger(__name__) + + +def with_logging( + *, + sync_middleware: "LLMFunction", + async_middleware: "AsyncLLMFunction", +) -> tuple[ + "LLMFunction", + "AsyncLLMFunction", +]: + """Wrap model functions with logging middleware. + + Args + ---- + sync_middleware: LLMFunction + The synchronous model function to wrap. + Either a completion function or an embedding function. + async_middleware: AsyncLLMFunction + The asynchronous model function to wrap. + Either a completion function or an embedding function. + + Returns + ------- + tuple[LLMFunction, AsyncLLMFunction] + The synchronous and asynchronous model functions wrapped with request count middleware. + """ + + def _request_count_middleware( + **kwargs: Any, + ): + metrics: Metrics | None = kwargs.get("metrics") + try: + return sync_middleware(**kwargs) + except Exception as e: + retries = metrics.get("retries", None) if metrics else None + retry_str = f" after {retries} retries" if retries else "" + logger.exception( + f"Request failed{retry_str} with exception={e}", # noqa: G004, TRY401 + ) + raise + + async def _request_count_middleware_async( + **kwargs: Any, + ): + metrics: Metrics | None = kwargs.get("metrics") + + try: + return await async_middleware(**kwargs) + except Exception as e: + retries = metrics.get("retries", None) if metrics else None + retry_str = f" after {retries} retries" if retries else "" + logger.exception( + f"Async request failed{retry_str} with exception={e}", # noqa: G004, TRY401 + ) + raise + + return (_request_count_middleware, _request_count_middleware_async) # type: ignore diff --git a/packages/graphrag-llm/graphrag_llm/middleware/with_metrics.py b/packages/graphrag-llm/graphrag_llm/middleware/with_metrics.py new file mode 100644 index 0000000000..64ff7a41f7 --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/middleware/with_metrics.py @@ -0,0 +1,98 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Metrics middleware to process metrics using a MetricsProcessor.""" + +import time +from typing import TYPE_CHECKING, Any + +if TYPE_CHECKING: + from graphrag_llm.config import ModelConfig + from graphrag_llm.metrics import MetricsProcessor + from graphrag_llm.types import ( + AsyncLLMFunction, + LLMFunction, + Metrics, + ) + + +def with_metrics( + *, + model_config: "ModelConfig", + sync_middleware: "LLMFunction", + async_middleware: "AsyncLLMFunction", + metrics_processor: "MetricsProcessor", +) -> tuple[ + "LLMFunction", + "AsyncLLMFunction", +]: + """Wrap model functions with metrics middleware. + + Args + ---- + model_config: ModelConfig + The model configuration. + sync_middleware: LLMFunction + The synchronous model function to wrap. + Either a completion function or an embedding function. + async_middleware: AsyncLLMFunction + The asynchronous model function to wrap. + Either a completion function or an embedding function. + metrics_processor: MetricsProcessor + The metrics processor to use. + + Returns + ------- + tuple[LLMFunction, AsyncLLMFunction] + The synchronous and asynchronous model functions wrapped with metrics middleware. + + """ + + def _metrics_middleware( + **kwargs: Any, + ): + metrics: Metrics | None = kwargs.get("metrics") + start_time = time.time() + response = sync_middleware(**kwargs) + end_time = time.time() + + if metrics is not None: + metrics_processor.process_metrics( + model_config=model_config, + metrics=metrics, + input_args=kwargs, + response=response, + ) + if kwargs.get("stream"): + metrics["compute_duration_seconds"] = 0 + metrics["streaming_responses"] = 1 + else: + metrics["compute_duration_seconds"] = end_time - start_time + metrics["streaming_responses"] = 0 + return response + + async def _metrics_middleware_async( + **kwargs: Any, + ): + metrics: Metrics | None = kwargs.get("metrics") + + start_time = time.time() + response = await async_middleware(**kwargs) + end_time = time.time() + + if metrics is not None: + metrics_processor.process_metrics( + model_config=model_config, + metrics=metrics, + input_args=kwargs, + response=response, + ) + if kwargs.get("stream"): + metrics["compute_duration_seconds"] = 0 + metrics["streaming_responses"] = 1 + else: + metrics["compute_duration_seconds"] = end_time - start_time + metrics["streaming_responses"] = 0 + return response + + return (_metrics_middleware, _metrics_middleware_async) # type: ignore diff --git a/packages/graphrag-llm/graphrag_llm/middleware/with_middleware_pipeline.py b/packages/graphrag-llm/graphrag_llm/middleware/with_middleware_pipeline.py new file mode 100644 index 0000000000..41860acfaf --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/middleware/with_middleware_pipeline.py @@ -0,0 +1,154 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Wraps model functions in middleware pipeline.""" + +from typing import TYPE_CHECKING, Literal + +from graphrag_llm.middleware.with_cache import with_cache +from graphrag_llm.middleware.with_errors_for_testing import with_errors_for_testing +from graphrag_llm.middleware.with_logging import with_logging +from graphrag_llm.middleware.with_metrics import with_metrics +from graphrag_llm.middleware.with_rate_limiting import with_rate_limiting +from graphrag_llm.middleware.with_request_count import with_request_count +from graphrag_llm.middleware.with_retries import with_retries + +if TYPE_CHECKING: + from graphrag_cache import Cache, CacheKeyCreator + + from graphrag_llm.config import ModelConfig + from graphrag_llm.metrics import MetricsProcessor + from graphrag_llm.rate_limit import RateLimiter + from graphrag_llm.retry import Retry + from graphrag_llm.tokenizer import Tokenizer + from graphrag_llm.types import ( + AsyncLLMFunction, + LLMFunction, + ) + + +def with_middleware_pipeline( + *, + model_config: "ModelConfig", + model_fn: "LLMFunction", + async_model_fn: "AsyncLLMFunction", + metrics_processor: "MetricsProcessor | None", + cache: "Cache | None", + cache_key_creator: "CacheKeyCreator", + request_type: Literal["chat", "embedding"], + tokenizer: "Tokenizer", + rate_limiter: "RateLimiter | None", + retrier: "Retry | None", +) -> tuple[ + "LLMFunction", + "AsyncLLMFunction", +]: + """Wrap model functions in middleware pipeline. + + Full Pipeline Order: + - with_requests_counts: Counts incoming requests and + successes, and failures that bubble back up. + - with_cache: Returns cached responses when available + and caches new successful responses that bubble back up. + - with_retries: Retries failed requests. + Since the retry middleware occurs prior to rate limiting, + all retries get back in line for rate limiting. This is + to avoid threads that retry rapidly against an endpoint, + thus increasing the required cooldown. + - with_rate_limiting: Rate limits requests. + - with_metrics: Collects metrics about the request and responses. + - with_errors_for_testing: Raises errors for testing purposes. + Relies on ModelConfig.failure_rate_for_testing to determine + the failure rate. 'failure_rate_for_testing' is not an exposed + configuration option and is only intended for internal testing. + + Args + ---- + model_config: ModelConfig + The model configuration. + model_fn: LLMFunction + The synchronous model function to wrap. + Either a completion function or an embedding function. + async_model_fn: AsyncLLMFunction + The asynchronous model function to wrap. + Either a completion function or an embedding function. + metrics_processor: MetricsProcessor | None + The metrics processor to use. If None, metrics middleware is skipped. + cache: Cache | None + The cache instance to use. If None, caching middleware is skipped. + cache_key_creator: CacheKeyCreator + The cache key creator to use. + request_type: Literal["chat", "embedding"] + The type of request, either "chat" or "embedding". + The middleware pipeline is used for both completions and embeddings + and some of the steps need to know which type of request it is. + tokenizer: Tokenizer + The tokenizer to use for rate limiting. + rate_limiter: RateLimiter | None + The rate limiter to use. If None, rate limiting middleware is skipped. + retrier: Retry | None + The retrier to use. If None, retry middleware is skipped. + + Returns + ------- + tuple[LLMFunction, AsyncLLMFunction] + The synchronous and asynchronous model functions wrapped in the middleware pipeline. + """ + extra_config = model_config.model_extra or {} + failure_rate_for_testing = extra_config.get("failure_rate_for_testing", 0.0) + + if failure_rate_for_testing > 0.0: + model_fn, async_model_fn = with_errors_for_testing( + sync_middleware=model_fn, + async_middleware=async_model_fn, + failure_rate=failure_rate_for_testing, + exception_type=extra_config.get( + "failure_rate_for_testing_exception_type", "ValueError" + ), + exception_args=extra_config.get("failure_rate_for_testing_exception_args"), + ) + + if metrics_processor: + model_fn, async_model_fn = with_metrics( + model_config=model_config, + sync_middleware=model_fn, + async_middleware=async_model_fn, + metrics_processor=metrics_processor, + ) + + if rate_limiter: + model_fn, async_model_fn = with_rate_limiting( + sync_middleware=model_fn, + async_middleware=async_model_fn, + tokenizer=tokenizer, + rate_limiter=rate_limiter, + ) + + if retrier: + model_fn, async_model_fn = with_retries( + sync_middleware=model_fn, + async_middleware=async_model_fn, + retrier=retrier, + ) + + if cache: + model_fn, async_model_fn = with_cache( + sync_middleware=model_fn, + async_middleware=async_model_fn, + request_type=request_type, + cache=cache, + cache_key_creator=cache_key_creator, + ) + + if metrics_processor: + model_fn, async_model_fn = with_request_count( + sync_middleware=model_fn, + async_middleware=async_model_fn, + ) + + model_fn, async_model_fn = with_logging( + sync_middleware=model_fn, + async_middleware=async_model_fn, + ) + + return (model_fn, async_model_fn) diff --git a/packages/graphrag-llm/graphrag_llm/middleware/with_rate_limiting.py b/packages/graphrag-llm/graphrag_llm/middleware/with_rate_limiting.py new file mode 100644 index 0000000000..8d1b09a393 --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/middleware/with_rate_limiting.py @@ -0,0 +1,79 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Rate limit middleware.""" + +from typing import TYPE_CHECKING, Any + +if TYPE_CHECKING: + from graphrag_llm.rate_limit import RateLimiter + from graphrag_llm.tokenizer import Tokenizer + from graphrag_llm.types import ( + AsyncLLMFunction, + LLMFunction, + ) + + +def with_rate_limiting( + *, + sync_middleware: "LLMFunction", + async_middleware: "AsyncLLMFunction", + rate_limiter: "RateLimiter", + tokenizer: "Tokenizer", +) -> tuple[ + "LLMFunction", + "AsyncLLMFunction", +]: + """Wrap model functions with rate limit middleware. + + Args + ---- + sync_middleware: LLMFunction + The synchronous model function to wrap. + Either a completion function or an embedding function. + async_middleware: AsyncLLMFunction + The asynchronous model function to wrap. + Either a completion function or an embedding function. + rate_limiter: RateLimiter + The rate limiter to use. + tokenizer: Tokenizer + The tokenizer to use for counting tokens. + + Returns + ------- + tuple[LLMFunction, AsyncLLMFunction] + The synchronous and asynchronous model functions wrapped with rate limit middleware. + """ + + def _rate_limit_middleware( + **kwargs: Any, + ): + token_count = int( + kwargs.get("max_tokens") or kwargs.get("max_completion_tokens") or 0 + ) + messages = kwargs.get("messages") # completion call + input: list[str] | None = kwargs.get("input") # embedding call + if messages: + token_count += tokenizer.num_prompt_tokens(messages=messages) + elif input: + token_count += sum(tokenizer.num_tokens(text) for text in input) + + with rate_limiter.acquire(token_count): + return sync_middleware(**kwargs) + + async def _rate_limit_middleware_async( + **kwargs: Any, + ): + token_count = int( + kwargs.get("max_tokens") or kwargs.get("max_completion_tokens") or 0 + ) + messages = kwargs.get("messages") # completion call + input = kwargs.get("input") # embedding call + if messages: + token_count += tokenizer.num_prompt_tokens(messages=messages) + elif input: + token_count += sum(tokenizer.num_tokens(text) for text in input) + with rate_limiter.acquire(token_count): + return await async_middleware(**kwargs) + + return (_rate_limit_middleware, _rate_limit_middleware_async) # type: ignore diff --git a/packages/graphrag-llm/graphrag_llm/middleware/with_request_count.py b/packages/graphrag-llm/graphrag_llm/middleware/with_request_count.py new file mode 100644 index 0000000000..24f61d8f47 --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/middleware/with_request_count.py @@ -0,0 +1,81 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Request count middleware.""" + +from typing import TYPE_CHECKING, Any + +if TYPE_CHECKING: + from graphrag_llm.types import ( + AsyncLLMFunction, + LLMFunction, + Metrics, + ) + + +def with_request_count( + *, + sync_middleware: "LLMFunction", + async_middleware: "AsyncLLMFunction", +) -> tuple[ + "LLMFunction", + "AsyncLLMFunction", +]: + """Wrap model functions with request count middleware. + + This is the first step in the middleware pipeline. + It counts how many requests were made, how many succeeded, and how many failed + + Args + ---- + sync_middleware: LLMFunction + The synchronous model function to wrap. + Either a completion function or an embedding function. + async_middleware: AsyncLLMFunction + The asynchronous model function to wrap. + Either a completion function or an embedding function. + + Returns + ------- + tuple[LLMFunction, AsyncLLMFunction] + The synchronous and asynchronous model functions wrapped with request count middleware. + """ + + def _request_count_middleware( + **kwargs: Any, + ): + metrics: Metrics | None = kwargs.get("metrics") + if metrics is not None: + metrics["attempted_request_count"] = 1 + metrics["successful_response_count"] = 0 + metrics["failed_response_count"] = 0 + try: + result = sync_middleware(**kwargs) + if metrics is not None: + metrics["successful_response_count"] = 1 + return result # noqa: TRY300 + except Exception: + if metrics is not None: + metrics["failed_response_count"] = 1 + raise + + async def _request_count_middleware_async( + **kwargs: Any, + ): + metrics: Metrics | None = kwargs.get("metrics") + + if metrics is not None: + metrics["attempted_request_count"] = 1 + metrics["successful_response_count"] = 0 + metrics["failed_response_count"] = 0 + try: + result = await async_middleware(**kwargs) + if metrics is not None: + metrics["successful_response_count"] = 1 + return result # noqa: TRY300 + except Exception: + if metrics is not None: + metrics["failed_response_count"] = 1 + raise + + return (_request_count_middleware, _request_count_middleware_async) # type: ignore diff --git a/packages/graphrag-llm/graphrag_llm/middleware/with_retries.py b/packages/graphrag-llm/graphrag_llm/middleware/with_retries.py new file mode 100644 index 0000000000..1e7e17b208 --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/middleware/with_retries.py @@ -0,0 +1,60 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Retry middleware.""" + +from typing import TYPE_CHECKING, Any + +if TYPE_CHECKING: + from graphrag_llm.retry import Retry + from graphrag_llm.types import ( + AsyncLLMFunction, + LLMFunction, + ) + + +def with_retries( + *, + sync_middleware: "LLMFunction", + async_middleware: "AsyncLLMFunction", + retrier: "Retry", +) -> tuple[ + "LLMFunction", + "AsyncLLMFunction", +]: + """Wrap model functions with retry middleware. + + Args + ---- + sync_middleware: LLMFunction + The synchronous model function to wrap. + Either a completion function or an embedding function. + async_middleware: AsyncLLMFunction + The asynchronous model function to wrap. + Either a completion function or an embedding function. + retrier: Retry + The retrier instance to use for retrying failed requests. + + Returns + ------- + tuple[LLMFunction, AsyncLLMFunction] + The synchronous and asynchronous model functions wrapped with retry middleware. + """ + + def _retry_middleware( + **kwargs: Any, + ): + return retrier.retry( + func=sync_middleware, + input_args=kwargs, + ) + + async def _retry_middleware_async( + **kwargs: Any, + ): + return await retrier.retry_async( + func=async_middleware, + input_args=kwargs, + ) + + return (_retry_middleware, _retry_middleware_async) # type: ignore diff --git a/packages/graphrag-llm/graphrag_llm/model_cost_registry/__init__.py b/packages/graphrag-llm/graphrag_llm/model_cost_registry/__init__.py new file mode 100644 index 0000000000..2742335b5d --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/model_cost_registry/__init__.py @@ -0,0 +1,11 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Model cost registry module.""" + +from graphrag_llm.model_cost_registry.model_cost_registry import ( + ModelCosts, + model_cost_registry, +) + +__all__ = ["ModelCosts", "model_cost_registry"] diff --git a/packages/graphrag-llm/graphrag_llm/model_cost_registry/model_cost_registry.py b/packages/graphrag-llm/graphrag_llm/model_cost_registry/model_cost_registry.py new file mode 100644 index 0000000000..28855b3055 --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/model_cost_registry/model_cost_registry.py @@ -0,0 +1,64 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Model cost registry module.""" + +from typing import Any, ClassVar, TypedDict + +from litellm import model_cost + + +class ModelCosts(TypedDict): + """Model costs.""" + + input_cost_per_token: float + output_cost_per_token: float + + +class ModelCostRegistry: + """Registry for model costs.""" + + _instance: ClassVar["ModelCostRegistry | None"] = None + _model_costs: dict[str, ModelCosts] + + def __new__(cls, *args: Any, **kwargs: Any) -> "ModelCostRegistry": + """Create a new instance of ModelCostRegistry if it does not exist.""" + if cls._instance is None: + cls._instance = super().__new__(cls, *args, **kwargs) + return cls._instance + + def __init__(self): + if not hasattr(self, "_initialized"): + self._model_costs = model_cost + self._initialized = True + + def register_model_costs(self, model: str, costs: ModelCosts) -> None: + """Register the cost per unit for a given model. + + Args + ---- + model: str + The model id, e.g., "openai/gpt-4o". + costs: ModelCosts + The costs associated with the model. + """ + self._model_costs[model] = costs + + def get_model_costs(self, model: str) -> ModelCosts | None: + """Retrieve the cost per unit for a given model. + + Args + ---- + model: str + The model id, e.g., "openai/gpt-4o". + + Returns + ------- + ModelCosts | None + The costs associated with the model, or None if not found. + + """ + return self._model_costs.get(model) + + +model_cost_registry = ModelCostRegistry() diff --git a/packages/graphrag-llm/graphrag_llm/py.typed b/packages/graphrag-llm/graphrag_llm/py.typed new file mode 100644 index 0000000000..e69de29bb2 diff --git a/packages/graphrag-llm/graphrag_llm/rate_limit/__init__.py b/packages/graphrag-llm/graphrag_llm/rate_limit/__init__.py new file mode 100644 index 0000000000..4c3316102d --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/rate_limit/__init__.py @@ -0,0 +1,16 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Rate limit module for graphrag-llm.""" + +from graphrag_llm.rate_limit.rate_limit_factory import ( + create_rate_limiter, + register_rate_limiter, +) +from graphrag_llm.rate_limit.rate_limiter import RateLimiter + +__all__ = [ + "RateLimiter", + "create_rate_limiter", + "register_rate_limiter", +] diff --git a/packages/graphrag-llm/graphrag_llm/rate_limit/rate_limit_factory.py b/packages/graphrag-llm/graphrag_llm/rate_limit/rate_limit_factory.py new file mode 100644 index 0000000000..c15cd65c67 --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/rate_limit/rate_limit_factory.py @@ -0,0 +1,84 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Rate limit factory.""" + +from collections.abc import Callable +from typing import TYPE_CHECKING + +from graphrag_common.factory import Factory + +from graphrag_llm.config import RateLimitType +from graphrag_llm.rate_limit.rate_limiter import RateLimiter + +if TYPE_CHECKING: + from graphrag_common.factory import ServiceScope + + from graphrag_llm.config import RateLimitConfig + + +class RateLimitFactory(Factory[RateLimiter]): + """Factory to create RateLimiter instances.""" + + +rate_limit_factory = RateLimitFactory() + + +def register_rate_limiter( + rate_limit_type: str, + rate_limiter_initializer: Callable[..., RateLimiter], + scope: "ServiceScope" = "transient", +) -> None: + """Register a custom RateLimiter implementation. + + Args + ---- + rate_limit_type: str + The rate limit id to register. + rate_limiter_initializer: Callable[..., RateLimiter] + The rate limiter initializer to register. + scope: ServiceScope (default: "transient") + The service scope for the rate limiter instance. + """ + rate_limit_factory.register( + strategy=rate_limit_type, + initializer=rate_limiter_initializer, + scope=scope, + ) + + +def create_rate_limiter( + rate_limit_config: "RateLimitConfig", +) -> RateLimiter: + """Create a RateLimiter instance. + + Args + ---- + rate_limit_config: RateLimitConfig + The configuration for the rate limit strategy. + + Returns + ------- + RateLimiter: + An instance of a RateLimiter subclass. + """ + strategy = rate_limit_config.type + init_args = rate_limit_config.model_dump() + + if strategy not in rate_limit_factory: + match strategy: + case RateLimitType.SlidingWindow: + from graphrag_llm.rate_limit.sliding_window_rate_limiter import ( + SlidingWindowRateLimiter, + ) + + register_rate_limiter( + rate_limit_type=RateLimitType.SlidingWindow, + rate_limiter_initializer=SlidingWindowRateLimiter, + ) + + case _: + msg = f"RateLimitConfig.type '{strategy}' is not registered in the RateLimitFactory. Registered strategies: {', '.join(rate_limit_factory.keys())}" + raise ValueError(msg) + + return rate_limit_factory.create(strategy=strategy, init_args=init_args) diff --git a/packages/graphrag/graphrag/language_model/providers/litellm/services/rate_limiter/rate_limiter.py b/packages/graphrag-llm/graphrag_llm/rate_limit/rate_limiter.py similarity index 66% rename from packages/graphrag/graphrag/language_model/providers/litellm/services/rate_limiter/rate_limiter.py rename to packages/graphrag-llm/graphrag_llm/rate_limit/rate_limiter.py index 24a01a42a0..14c32402cf 100644 --- a/packages/graphrag/graphrag/language_model/providers/litellm/services/rate_limiter/rate_limiter.py +++ b/packages/graphrag-llm/graphrag_llm/rate_limit/rate_limiter.py @@ -15,23 +15,24 @@ class RateLimiter(ABC): @abstractmethod def __init__( self, - /, **kwargs: Any, - ) -> None: ... + ) -> None: + """Initialize the Rate Limiter.""" + raise NotImplementedError @abstractmethod @contextmanager - def acquire(self, *, token_count: int) -> Iterator[None]: + def acquire(self, token_count: int) -> Iterator[None]: """ Acquire Rate Limiter. Args ---- - token_count: The estimated number of tokens for the current request. + token_count: int + The estimated number of prompt and response tokens for the current request. Yields ------ None: This context manager does not return any value. """ - msg = "RateLimiter subclasses must implement the acquire method." - raise NotImplementedError(msg) + raise NotImplementedError diff --git a/packages/graphrag-llm/graphrag_llm/rate_limit/sliding_window_rate_limiter.py b/packages/graphrag-llm/graphrag_llm/rate_limit/sliding_window_rate_limiter.py new file mode 100644 index 0000000000..c8eb89e5a7 --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/rate_limit/sliding_window_rate_limiter.py @@ -0,0 +1,143 @@ +# Copyright (c) 2025 Microsoft Corporation. +# Licensed under the MIT License + +"""LiteLLM Static Rate Limiter.""" + +import threading +import time +from collections import deque +from collections.abc import Iterator +from contextlib import contextmanager +from typing import Any + +from graphrag_llm.rate_limit.rate_limiter import RateLimiter + + +class SlidingWindowRateLimiter(RateLimiter): + """Sliding Window Rate Limiter implementation.""" + + _rpp: int | None = None + _tpp: int | None = None + _lock: threading.Lock + _rate_queue: deque[float] + _token_queue: deque[int] + _period_in_seconds: int + _last_time: float | None = None + _stagger: float = 0.0 + + def __init__( + self, + *, + period_in_seconds: int = 60, + requests_per_period: int | None = None, + tokens_per_period: int | None = None, + **kwargs: Any, + ): + """Initialize the Sliding Window Rate Limiter. + + Args + ---- + period_in_seconds: int + The time period in seconds for rate limiting. + requests_per_period: int | None + The maximum number of requests allowed per time period. If None, request limiting is disabled. + tokens_per_period: int | None + The maximum number of tokens allowed per time period. If None, token limiting is disabled. + + Raises + ------ + ValueError + If period_in_seconds is not a positive integer. + If requests_per_period or tokens_per_period are not positive integers. + """ + self._rpp = requests_per_period + self._tpp = tokens_per_period + self._lock = threading.Lock() + self._rate_queue: deque[float] = deque() + self._token_queue: deque[int] = deque() + self._period_in_seconds = period_in_seconds + self._last_time: float | None = None + + if self._rpp is not None and self._rpp > 0: + self._stagger = self._period_in_seconds / self._rpp + + @contextmanager + def acquire(self, token_count: int) -> Iterator[None]: + """ + Acquire Rate Limiter. + + Args + ---- + token_count: The estimated number of tokens for the current request. + + Yields + ------ + None: This context manager does not return any value. + """ + while True: + with self._lock: + current_time = time.time() + + # Use two sliding windows to keep track of requests and tokens per period + # Drop old requests and tokens out of the sliding windows + while ( + len(self._rate_queue) > 0 + and self._rate_queue[0] < current_time - self._period_in_seconds + ): + self._rate_queue.popleft() + self._token_queue.popleft() + + # If sliding window still exceed request limit, wait again + # Waiting requires reacquiring the lock, allowing other threads + # to see if their request fits within the rate limiting windows + # Makes more sense for token limit than request limit + if ( + self._rpp is not None + and self._rpp > 0 + and len(self._rate_queue) >= self._rpp + ): + continue + + # Check if current token window exceeds token limit + # If it does, wait again + # This does not account for the tokens from the current request + # This is intentional, as we want to allow the current request + # to be processed if it is larger than the tpm but smaller than context window. + # tpm is a rate/soft limit and not the hard limit of context window limits. + if ( + self._tpp is not None + and self._tpp > 0 + and sum(self._token_queue) >= self._tpp + ): + continue + + # This check accounts for the current request token usage + # is within the token limits bound. + # If the current requests tokens exceeds the token limit, + # Then let it be processed. + if ( + self._tpp is not None + and self._tpp > 0 + and token_count <= self._tpp + and sum(self._token_queue) + token_count > self._tpp + ): + continue + + # If there was a previous call, check if we need to stagger + if ( + self._stagger > 0 + and ( + self._last_time # is None if this is the first hit to the rate limiter + and current_time - self._last_time + < self._stagger # If more time has passed than the stagger time, we can proceed + ) + ): + time.sleep(self._stagger - (current_time - self._last_time)) + current_time = time.time() + + # Add the current request to the sliding window + self._rate_queue.append(current_time) + self._token_queue.append(token_count) + self._last_time = current_time + break + yield diff --git a/packages/graphrag-llm/graphrag_llm/retry/__init__.py b/packages/graphrag-llm/graphrag_llm/retry/__init__.py new file mode 100644 index 0000000000..638c958e37 --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/retry/__init__.py @@ -0,0 +1,13 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Retry module for graphrag-llm.""" + +from graphrag_llm.retry.retry import Retry +from graphrag_llm.retry.retry_factory import create_retry, register_retry + +__all__ = [ + "Retry", + "create_retry", + "register_retry", +] diff --git a/packages/graphrag-llm/graphrag_llm/retry/exceptions_to_skip.py b/packages/graphrag-llm/graphrag_llm/retry/exceptions_to_skip.py new file mode 100644 index 0000000000..f751b2ad28 --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/retry/exceptions_to_skip.py @@ -0,0 +1,22 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""List of exception names to skip for retries.""" + +_default_exceptions_to_skip = [ + "BadRequestError", + "UnsupportedParamsError", + "ContextWindowExceededError", + "ContentPolicyViolationError", + "ImageFetchError", + "InvalidRequestError", + "AuthenticationError", + "PermissionDeniedError", + "NotFoundError", + "UnprocessableEntityError", + "APIConnectionError", + "APIError", + "ServiceUnavailableError", + "APIResponseValidationError", + "BudgetExceededError", +] diff --git a/packages/graphrag-llm/graphrag_llm/retry/exponential_retry.py b/packages/graphrag-llm/graphrag_llm/retry/exponential_retry.py new file mode 100644 index 0000000000..f7abc9b0a8 --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/retry/exponential_retry.py @@ -0,0 +1,119 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Exponential backoff retry implementation.""" + +import asyncio +import random +import time +from collections.abc import Awaitable, Callable +from typing import TYPE_CHECKING, Any + +from graphrag_llm.retry.exceptions_to_skip import _default_exceptions_to_skip +from graphrag_llm.retry.retry import Retry + +if TYPE_CHECKING: + from graphrag_llm.types import Metrics + + +class ExponentialRetry(Retry): + """Exponential backoff retry implementation.""" + + _base_delay: float + _jitter: bool + _max_retries: int + _max_delay: float + _exceptions_to_skip: list[str] + + def __init__( + self, + *, + max_retries: int = 7, # 2^7 = 128 second max delay with default settings + base_delay: float = 2.0, + jitter: bool = True, + max_delay: float | None = None, + exceptions_to_skip: list[str] | None = None, + **kwargs: dict, + ) -> None: + """Initialize ExponentialRetry. + + Args + ---- + max_retries: int (default=7, 2^7 = 128 second max delay with default settings) + The maximum number of retries to attempt. + base_delay: float + The base delay multiplier for exponential backoff. + jitter: bool + Whether to apply jitter to the delay intervals. + max_delay: float | None + The maximum delay between retries. If None, there is no limit. + + Raises + ------ + ValueError + If max_retries is less than or equal to 0. + If base_delay is less than or equal to 1.0. + """ + self._base_delay = base_delay + self._jitter = jitter + self._max_retries = max_retries + self._max_delay = max_delay or float("inf") + self._exceptions_to_skip = exceptions_to_skip or _default_exceptions_to_skip + + def retry(self, *, func: Callable[..., Any], input_args: dict[str, Any]) -> Any: + """Retry a synchronous function.""" + retries: int = 0 + delay = 1.0 + metrics: Metrics | None = input_args.get("metrics") + while True: + try: + return func(**input_args) + except Exception as e: + if e.__class__.__name__ in self._exceptions_to_skip: + raise + + if retries >= self._max_retries: + raise + retries += 1 + delay *= self._base_delay + sleep_delay = min( + self._max_delay, + delay + (self._jitter * random.uniform(0, 1)), # noqa: S311 + ) + + time.sleep(sleep_delay) + finally: + if metrics is not None: + metrics["retries"] = retries + metrics["requests_with_retries"] = 1 if retries > 0 else 0 + + async def retry_async( + self, + *, + func: Callable[..., Awaitable[Any]], + input_args: dict[str, Any], + ) -> Any: + """Retry an asynchronous function.""" + retries: int = 0 + delay = 1.0 + metrics: Metrics | None = input_args.get("metrics") + while True: + try: + return await func(**input_args) + except Exception as e: + if e.__class__.__name__ in self._exceptions_to_skip: + raise + if retries >= self._max_retries: + raise + retries += 1 + delay *= self._base_delay + sleep_delay = min( + self._max_delay, + delay + (self._jitter * random.uniform(0, 1)), # noqa: S311 + ) + + await asyncio.sleep(sleep_delay) + finally: + if metrics is not None: + metrics["retries"] = retries + metrics["requests_with_retries"] = 1 if retries > 0 else 0 diff --git a/packages/graphrag-llm/graphrag_llm/retry/immediate_retry.py b/packages/graphrag-llm/graphrag_llm/retry/immediate_retry.py new file mode 100644 index 0000000000..cb8150c089 --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/retry/immediate_retry.py @@ -0,0 +1,85 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Native (immediate) retry implementation.""" + +from collections.abc import Awaitable, Callable +from typing import TYPE_CHECKING, Any + +from graphrag_llm.retry.exceptions_to_skip import _default_exceptions_to_skip +from graphrag_llm.retry.retry import Retry + +if TYPE_CHECKING: + from graphrag_llm.types import Metrics + + +class ImmediateRetry(Retry): + """Immediate retry implementation.""" + + _max_retries: int + _exceptions_to_skip: list[str] + + def __init__( + self, + *, + max_retries: int = 7, + exceptions_to_skip: list[str] | None = None, + **kwargs: dict, + ) -> None: + """Initialize ImmediateRetry. + + Args + ---- + max_retries: int (default=7) + The maximum number of retries to attempt. + + Raises + ------ + ValueError + If max_retries is less than or equal to 0. + """ + self._max_retries = max_retries + self._exceptions_to_skip = exceptions_to_skip or _default_exceptions_to_skip + + def retry(self, *, func: Callable[..., Any], input_args: dict[str, Any]) -> Any: + """Retry a synchronous function.""" + retries: int = 0 + metrics: Metrics | None = input_args.get("metrics") + while True: + try: + return func(**input_args) + except Exception as e: + if e.__class__.__name__ in self._exceptions_to_skip: + raise + + if retries >= self._max_retries: + raise + retries += 1 + finally: + if metrics is not None: + metrics["retries"] = retries + metrics["requests_with_retries"] = 1 if retries > 0 else 0 + + async def retry_async( + self, + *, + func: Callable[..., Awaitable[Any]], + input_args: dict[str, Any], + ) -> Any: + """Retry an asynchronous function.""" + retries: int = 0 + metrics: Metrics | None = input_args.get("metrics") + while True: + try: + return await func(**input_args) + except Exception as e: + if e.__class__.__name__ in self._exceptions_to_skip: + raise + + if retries >= self._max_retries: + raise + retries += 1 + finally: + if metrics is not None: + metrics["retries"] = retries + metrics["requests_with_retries"] = 1 if retries > 0 else 0 diff --git a/packages/graphrag-llm/graphrag_llm/retry/retry.py b/packages/graphrag-llm/graphrag_llm/retry/retry.py new file mode 100644 index 0000000000..5ed07070e8 --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/retry/retry.py @@ -0,0 +1,32 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Retry abstract base class.""" + +from abc import ABC, abstractmethod +from collections.abc import Awaitable, Callable +from typing import Any + + +class Retry(ABC): + """Retry Abstract Base Class.""" + + @abstractmethod + def __init__(self, /, **kwargs: Any): + """Initialize Retry.""" + raise NotImplementedError + + @abstractmethod + def retry(self, *, func: Callable[..., Any], input_args: dict[str, Any]) -> Any: + """Retry a synchronous function.""" + raise NotImplementedError + + @abstractmethod + async def retry_async( + self, + *, + func: Callable[..., Awaitable[Any]], + input_args: dict[str, Any], + ) -> Any: + """Retry an asynchronous function.""" + raise NotImplementedError diff --git a/packages/graphrag-llm/graphrag_llm/retry/retry_factory.py b/packages/graphrag-llm/graphrag_llm/retry/retry_factory.py new file mode 100644 index 0000000000..e0d9cc8489 --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/retry/retry_factory.py @@ -0,0 +1,86 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Retry factory.""" + +from collections.abc import Callable +from typing import TYPE_CHECKING + +from graphrag_common.factory import Factory + +from graphrag_llm.config.types import RetryType +from graphrag_llm.retry.retry import Retry + +if TYPE_CHECKING: + from graphrag_common.factory import ServiceScope + + from graphrag_llm.config.retry_config import RetryConfig + + +class RetryFactory(Factory[Retry]): + """Factory to create Retry instances.""" + + +retry_factory = RetryFactory() + + +def register_retry( + retry_type: str, + retry_initializer: Callable[..., Retry], + scope: "ServiceScope" = "transient", +) -> None: + """Register a custom Retry implementation. + + Args + ---- + retry_type: str + The retry id to register. + retry_initializer: Callable[..., Retry] + The retry initializer to register. + """ + retry_factory.register( + strategy=retry_type, + initializer=retry_initializer, + scope=scope, + ) + + +def create_retry( + retry_config: "RetryConfig", +) -> Retry: + """Create a Retry instance. + + Args + ---- + retry_config: RetryConfig + The configuration for the retry strategy. + + Returns + ------- + Retry: + An instance of a Retry subclass. + """ + strategy = retry_config.type + init_args = retry_config.model_dump() + + if strategy not in retry_factory: + match strategy: + case RetryType.ExponentialBackoff: + from graphrag_llm.retry.exponential_retry import ExponentialRetry + + retry_factory.register( + strategy=RetryType.ExponentialBackoff, + initializer=ExponentialRetry, + ) + case RetryType.Immediate: + from graphrag_llm.retry.immediate_retry import ImmediateRetry + + retry_factory.register( + strategy=RetryType.Immediate, + initializer=ImmediateRetry, + ) + case _: + msg = f"RetryConfig.type '{strategy}' is not registered in the RetryFactory. Registered strategies: {', '.join(retry_factory.keys())}" + raise ValueError(msg) + + return retry_factory.create(strategy=strategy, init_args=init_args) diff --git a/packages/graphrag-llm/graphrag_llm/templating/__init__.py b/packages/graphrag-llm/graphrag_llm/templating/__init__.py new file mode 100644 index 0000000000..3dd39619ac --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/templating/__init__.py @@ -0,0 +1,24 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Templates module.""" + +from graphrag_llm.templating.template_engine import TemplateEngine +from graphrag_llm.templating.template_engine_factory import ( + create_template_engine, + register_template_engine, +) +from graphrag_llm.templating.template_manager import TemplateManager +from graphrag_llm.templating.template_manager_factory import ( + create_template_manager, + register_template_manager, +) + +__all__ = [ + "TemplateEngine", + "TemplateManager", + "create_template_engine", + "create_template_manager", + "register_template_engine", + "register_template_manager", +] diff --git a/packages/graphrag-llm/graphrag_llm/templating/file_template_manager.py b/packages/graphrag-llm/graphrag_llm/templating/file_template_manager.py new file mode 100644 index 0000000000..a4f9a679fc --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/templating/file_template_manager.py @@ -0,0 +1,76 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""In-memory template manager implementation.""" + +from pathlib import Path +from typing import Any + +from graphrag_llm.templating.template_manager import TemplateManager + + +class FileTemplateManager(TemplateManager): + """Abstract base class for template managers.""" + + _encoding: str + _templates_extension: str + _templates_dir: Path + + def __init__( + self, + base_dir: str = "templates", + template_extension: str = ".jinja", + encoding: str = "utf-8", + **kwargs: Any, + ) -> None: + """Initialize the template manager. + + Args + ---- + base_dir: str (default="./templates") + The base directory where templates are stored. + template_extension: str (default=".jinja") + The file extension for template files. + encoding: str (default="utf-8") + The encoding used to read template files. + + Raises + ------ + ValueError + If the base directory does not exist or is not a directory. + If the template_extension is an empty string. + """ + self._templates = {} + self._encoding = encoding + + self._templates_extension = template_extension + + self._templates_dir = Path(base_dir).resolve() + if not self._templates_dir.exists() or not self._templates_dir.is_dir(): + msg = f"Templates directory '{base_dir}' does not exist or is not a directory." + raise ValueError(msg) + + def get(self, template_name: str) -> str | None: + """Retrieve a template by its name.""" + template_file = ( + self._templates_dir / f"{template_name}{self._templates_extension}" + ) + if template_file.exists() and template_file.is_file(): + return template_file.read_text(encoding=self._encoding) + return None + + def register(self, template_name: str, template: str) -> None: + """Register a new template.""" + self._templates[template_name] = template + template_path = ( + self._templates_dir / f"{template_name}{self._templates_extension}" + ) + template_path.write_text(template, encoding=self._encoding) + + def keys(self) -> list[str]: + """List all registered template names.""" + return list(self._templates.keys()) + + def __contains__(self, template_name: str) -> bool: + """Check if a template is registered.""" + return template_name in self._templates diff --git a/packages/graphrag-llm/graphrag_llm/templating/jinja_template_engine.py b/packages/graphrag-llm/graphrag_llm/templating/jinja_template_engine.py new file mode 100644 index 0000000000..e520e4b0cc --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/templating/jinja_template_engine.py @@ -0,0 +1,55 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Jinja template engine.""" + +from typing import TYPE_CHECKING, Any + +from jinja2 import StrictUndefined, Template, UndefinedError + +from graphrag_llm.templating.template_engine import TemplateEngine + +if TYPE_CHECKING: + from graphrag_llm.templating.template_manager import TemplateManager + + +class JinjaTemplateEngine(TemplateEngine): + """Jinja template engine.""" + + _templates: dict[str, Template] + _template_manager: "TemplateManager" + + def __init__(self, *, template_manager: "TemplateManager", **kwargs: Any) -> None: + """Initialize the template engine. + + Args + ---- + template_manager: TemplateManager + The template manager to use for loading templates. + """ + self._templates = {} + self._template_manager = template_manager + + def render(self, template_name: str, context: dict[str, Any]) -> str: + """Render a template with the given context.""" + jinja_template = self._templates.get(template_name) + if jinja_template is None: + template_contents = self._template_manager.get(template_name) + if template_contents is None: + msg = f"Template '{template_name}' not found." + raise KeyError(msg) + jinja_template = Template(template_contents, undefined=StrictUndefined) + self._templates[template_name] = jinja_template + try: + return jinja_template.render(**context) + except UndefinedError as e: + msg = f"Missing key in context for template '{template_name}': {e.message}" + raise KeyError(msg) from e + except Exception as e: + msg = f"Error rendering template '{template_name}': {e!s}" + raise RuntimeError(msg) from e + + @property + def template_manager(self) -> "TemplateManager": + """Template manager associated with this engine.""" + return self._template_manager diff --git a/packages/graphrag-llm/graphrag_llm/templating/template_engine.py b/packages/graphrag-llm/graphrag_llm/templating/template_engine.py new file mode 100644 index 0000000000..29b3250145 --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/templating/template_engine.py @@ -0,0 +1,53 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Abstract base class for template engines.""" + +from abc import ABC, abstractmethod +from typing import TYPE_CHECKING, Any + +if TYPE_CHECKING: + from graphrag_llm.templating.template_manager import TemplateManager + + +class TemplateEngine(ABC): + """Abstract base class for template engines.""" + + @abstractmethod + def __init__(self, *, template_manager: "TemplateManager", **kwargs: Any) -> None: + """Initialize the template engine. + + Args + ---- + template_manager: TemplateManager + The template manager to use for loading templates. + + """ + raise NotImplementedError + + @abstractmethod + def render(self, template_name: str, context: dict[str, Any]) -> str: + """Render a template with the given context. + + Args + ---- + template_name: str + The name of the template to render. + context: dict[str, str] + The context to use for rendering the template. + + Returns + ------- + str: The rendered template. + + Raises + ------ + KeyError: If the template is not found or a required key is missing in the context. + """ + raise NotImplementedError + + @property + @abstractmethod + def template_manager(self) -> "TemplateManager": + """Template manager associated with this engine.""" + raise NotImplementedError diff --git a/packages/graphrag-llm/graphrag_llm/templating/template_engine_factory.py b/packages/graphrag-llm/graphrag_llm/templating/template_engine_factory.py new file mode 100644 index 0000000000..fb7fbbbaf8 --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/templating/template_engine_factory.py @@ -0,0 +1,95 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Template engine factory implementation.""" + +from collections.abc import Callable +from typing import TYPE_CHECKING + +from graphrag_common.factory import Factory + +from graphrag_llm.config.template_engine_config import TemplateEngineConfig +from graphrag_llm.config.types import TemplateEngineType +from graphrag_llm.templating.template_engine import TemplateEngine +from graphrag_llm.templating.template_manager_factory import create_template_manager + +if TYPE_CHECKING: + from graphrag_common.factory import ServiceScope + + +class TemplateEngineFactory(Factory[TemplateEngine]): + """Factory for creating template engine instances.""" + + +template_engine_factory = TemplateEngineFactory() + + +def register_template_engine( + template_engine_type: str, + template_engine_initializer: Callable[..., TemplateEngine], + scope: "ServiceScope" = "transient", +) -> None: + """Register a custom template engine implementation. + + Args + ---- + template_engine_type: str + The template engine id to register. + template_engine_initializer: Callable[..., TemplateEngine] + The template engine initializer to register. + scope: ServiceScope (default: "transient") + The service scope for the template engine instance. + """ + template_engine_factory.register( + strategy=template_engine_type, + initializer=template_engine_initializer, + scope=scope, + ) + + +def create_template_engine( + template_engine_config: TemplateEngineConfig | None = None, +) -> TemplateEngine: + """Create a TemplateEngine instance. + + Args + ---- + template_engine_config: TemplateEngineConfig | None + The configuration for the template engine. If None, defaults will be used. + + Returns + ------- + TemplateEngine: + An instance of a TemplateEngine subclass. + """ + template_engine_config = template_engine_config or TemplateEngineConfig() + + strategy = template_engine_config.type + template_manager = create_template_manager( + template_engine_config=template_engine_config + ) + init_args = template_engine_config.model_dump() + + if strategy not in template_engine_factory: + match strategy: + case TemplateEngineType.Jinja: + from graphrag_llm.templating.jinja_template_engine import ( + JinjaTemplateEngine, + ) + + template_engine_factory.register( + strategy=TemplateEngineType.Jinja, + initializer=JinjaTemplateEngine, + scope="singleton", + ) + case _: + msg = f"TemplateEngineConfig.type '{strategy}' is not registered in the TemplateEngineFactory. Registered strategies: {', '.join(template_engine_factory.keys())}" + raise ValueError(msg) + + return template_engine_factory.create( + strategy=strategy, + init_args={ + **init_args, + "template_manager": template_manager, + }, + ) diff --git a/packages/graphrag-llm/graphrag_llm/templating/template_manager.py b/packages/graphrag-llm/graphrag_llm/templating/template_manager.py new file mode 100644 index 0000000000..6731176d96 --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/templating/template_manager.py @@ -0,0 +1,65 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Abstract base class for template managers.""" + +from abc import ABC, abstractmethod +from typing import Any + + +class TemplateManager(ABC): + """Abstract base class for template managers.""" + + @abstractmethod + def __init__(self, **kwargs: Any) -> None: + """Initialize the template manager.""" + raise NotImplementedError + + @abstractmethod + def get(self, template_name: str) -> str | None: + """Retrieve a template by its name. + + Args + ---- + template_name: str + The name of the template to retrieve. + + Returns + ------- + str | None: The content of the template, if found. + """ + raise NotImplementedError + + @abstractmethod + def register(self, template_name: str, template: str) -> None: + """Register a new template. + + Args + ---- + template_name: str + The name of the template. + template: str + The content of the template. + """ + raise NotImplementedError + + @abstractmethod + def keys(self) -> list[str]: + """List all registered template names. + + Returns + ------- + list[str]: A list of registered template names. + """ + raise NotImplementedError + + @abstractmethod + def __contains__(self, template_name: str) -> bool: + """Check if a template is registered. + + Args + ---- + template_name: str + The name of the template to check. + """ + raise NotImplementedError diff --git a/packages/graphrag-llm/graphrag_llm/templating/template_manager_factory.py b/packages/graphrag-llm/graphrag_llm/templating/template_manager_factory.py new file mode 100644 index 0000000000..9de35a3693 --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/templating/template_manager_factory.py @@ -0,0 +1,82 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Template manager factory implementation.""" + +from collections.abc import Callable +from typing import TYPE_CHECKING + +from graphrag_common.factory import Factory + +from graphrag_llm.config.template_engine_config import TemplateEngineConfig +from graphrag_llm.config.types import TemplateManagerType +from graphrag_llm.templating.template_manager import TemplateManager + +if TYPE_CHECKING: + from graphrag_common.factory import ServiceScope + + +class TemplateManagerFactory(Factory[TemplateManager]): + """Factory for creating template manager instances.""" + + +template_manager_factory = TemplateManagerFactory() + + +def register_template_manager( + template_manager_type: str, + template_manager_initializer: Callable[..., TemplateManager], + scope: "ServiceScope" = "transient", +) -> None: + """Register a custom template manager implementation. + + Args + ---- + - template_manager_type: str + The template manager id to register. + - template_manager_initializer: Callable[..., TemplateManager] + The template manager initializer to register. + """ + template_manager_factory.register( + strategy=template_manager_type, + initializer=template_manager_initializer, + scope=scope, + ) + + +def create_template_manager( + template_engine_config: TemplateEngineConfig | None = None, +) -> TemplateManager: + """Create a TemplateManager instance. + + Args + ---- + template_engine_config: TemplateEngineConfig + The configuration for the template engine. + + Returns + ------- + TemplateManager: + An instance of a TemplateManager subclass. + """ + template_engine_config = template_engine_config or TemplateEngineConfig() + strategy = template_engine_config.template_manager + init_args = template_engine_config.model_dump() + + if strategy not in template_manager_factory: + match strategy: + case TemplateManagerType.File: + from graphrag_llm.templating.file_template_manager import ( + FileTemplateManager, + ) + + template_manager_factory.register( + strategy=TemplateManagerType.File, + initializer=FileTemplateManager, + scope="singleton", + ) + case _: + msg = f"TemplateEngineConfig.template_manager '{strategy}' is not registered in the TemplateManagerFactory. Registered strategies: {', '.join(template_manager_factory.keys())}" + raise ValueError(msg) + + return template_manager_factory.create(strategy=strategy, init_args=init_args) diff --git a/packages/graphrag-llm/graphrag_llm/threading/__init__.py b/packages/graphrag-llm/graphrag_llm/threading/__init__.py new file mode 100644 index 0000000000..cc797aceed --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/threading/__init__.py @@ -0,0 +1,10 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Threading module.""" + +from graphrag_llm.threading.completion_thread_runner import completion_thread_runner + +__all__ = [ + "completion_thread_runner", +] diff --git a/packages/graphrag-llm/graphrag_llm/threading/completion_thread.py b/packages/graphrag-llm/graphrag_llm/threading/completion_thread.py new file mode 100644 index 0000000000..4b0dbd4f4b --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/threading/completion_thread.py @@ -0,0 +1,91 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Completion Thread.""" + +import threading +from queue import Empty, Queue +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from collections.abc import Iterator + + from graphrag_llm.types import ( + LLMCompletionArgs, + LLMCompletionChunk, + LLMCompletionFunction, + LLMCompletionResponse, + ) + + +LLMCompletionRequestQueue = Queue[tuple[str, "LLMCompletionArgs"] | None] +"""Input queue for LLM completions. + +A queue for tracking requests to be made to a completion endpoint. +Each item in the queue is a tuple containing a request ID and a dictionary of +completion arguments. A `None` value indicates that the thread should terminate. + +Queue Item Type: + tuple[request_id, completion_args_dict] | None + +Items in the queue are processed by a thread pool in which the results are placed +into an output queue to be handled by a response handler. +""" + + +LLMCompletionResponseQueue = Queue[ + tuple[ + str, + "LLMCompletionResponse | Iterator[LLMCompletionChunk] | Exception", + ] + | None +] +"""Output queue for LLM completion responses. + +A queue for tracking responses from completion requests. Each item in the queue is a tuple +containing the request ID and the corresponding response, which can be a full response, +a stream of chunks, or an exception if the request failed. A `None` value indicates that the +thread should terminate. + +Queue Item Type: + tuple[request_id, response | exception] | None + +Items in the queue are produced by a thread pool that processes completion requests +from an input queue. This output queue is then consumed by a response handler provided +by the user. +""" + + +class CompletionThread(threading.Thread): + """Thread for handling LLM completions.""" + + def __init__( + self, + *, + quit_process_event: threading.Event, + input_queue: LLMCompletionRequestQueue, + output_queue: LLMCompletionResponseQueue, + completion: "LLMCompletionFunction", + ) -> None: + super().__init__() + self._quit_process_event = quit_process_event + self._input_queue = input_queue + self._output_queue = output_queue + self._completion = completion + + def run(self): + """Run the completion thread.""" + while True and not self._quit_process_event.is_set(): + try: + input_data = self._input_queue.get(timeout=1) + except Empty: + continue + if input_data is None: + break + request_id, data = input_data + try: + response = self._completion(**data) + + self._output_queue.put((request_id, response)) + except Exception as e: # noqa: BLE001 + self._output_queue.put((request_id, e)) diff --git a/packages/graphrag-llm/graphrag_llm/threading/completion_thread_runner.py b/packages/graphrag-llm/graphrag_llm/threading/completion_thread_runner.py new file mode 100644 index 0000000000..bb34cf4c54 --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/threading/completion_thread_runner.py @@ -0,0 +1,243 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Completion Thread Runner.""" + +import asyncio +import sys +import threading +import time +from collections.abc import Awaitable, Iterator +from contextlib import contextmanager +from queue import Empty, Queue +from typing import TYPE_CHECKING, Protocol, Unpack, runtime_checkable + +from graphrag_llm.threading.completion_thread import CompletionThread + +if TYPE_CHECKING: + from graphrag_llm.metrics import MetricsStore + from graphrag_llm.threading.completion_thread import ( + LLMCompletionRequestQueue, + LLMCompletionResponseQueue, + ) + from graphrag_llm.types import ( + LLMCompletionArgs, + LLMCompletionChunk, + LLMCompletionFunction, + LLMCompletionResponse, + ) + + +@runtime_checkable +class ThreadedLLMCompletionResponseHandler(Protocol): + """Threaded completion response handler. + + This function is used to handle responses from the threaded completion runner. + + Args + ---- + request_id: str + The request ID associated with the completion request. + resp: LLMCompletionResponse | Iterator[LLMCompletionChunk] | Exception + The completion response, which can be a full response, a stream of chunks, + or an exception if the request failed. + + Returns + ------- + Awaitable[None] | None + The callback can be asynchronous or synchronous. + """ + + def __call__( + self, + request_id: str, + response: "LLMCompletionResponse | Iterator[LLMCompletionChunk] | Exception", + /, + ) -> Awaitable[None] | None: + """Threaded completion response handler.""" + ... + + +@runtime_checkable +class ThreadedLLMCompletionFunction(Protocol): + """Threaded completion function. + + This function is used to submit requests to a thread pool for processing. + The thread pool will process the requests and invoke the provided callback + with the responses. + + same signature as LLMCompletionFunction but requires a `request_id` parameter + to identify the request and does not return anything. + + Args + ---- + messages: LLMCompletionMessagesParam + The messages to send to the LLM. + Can be str | list[dict[str, str]] | list[ChatCompletionMessageParam]. + request_id: str + The request ID to associate with the completion request. + response_format: BaseModel | None (default=None) + The structured response format. + Must extend pydantic BaseModel. + stream: bool (default=False) + Whether to stream the response. + streaming is not supported when using response_format. + max_completion_tokens: int | None (default=None) + The maximum number of tokens to generate in the completion. + temperature: float | None (default=None) + The temperature to control how deterministic vs. creative the responses are. + top_p: float | None (default=None) + top_p for nucleus sampling, where the model considers tokens with + cumulative probabilities up to top_p. Values range from 0 to 1. + n: int | None (default=None) + The number of completions to generate for each prompt. + tools: list[Tool] | None (default=None) + Optional tools to use during completion. + https://docs.litellm.ai/docs/completion/function_call + **kwargs: Any + Additional keyword arguments. + + Returns + ------- + None + """ + + def __call__( + self, + /, + request_id: str, + **kwargs: Unpack["LLMCompletionArgs"], + ) -> None: + """Threaded Chat completion function.""" + ... + + +def _start_completion_thread_pool( + *, + completion: "LLMCompletionFunction", + quit_process_event: threading.Event, + concurrency: int, + queue_limit: int, +) -> tuple[ + list[CompletionThread], + "LLMCompletionRequestQueue", + "LLMCompletionResponseQueue", +]: + threads: list[CompletionThread] = [] + input_queue: LLMCompletionRequestQueue = Queue(queue_limit) + output_queue: LLMCompletionResponseQueue = Queue() + for _ in range(concurrency): + thread = CompletionThread( + quit_process_event=quit_process_event, + input_queue=input_queue, + output_queue=output_queue, + completion=completion, + ) + thread.start() + threads.append(thread) + + return threads, input_queue, output_queue + + +@contextmanager +def completion_thread_runner( + *, + completion: "LLMCompletionFunction", + response_handler: ThreadedLLMCompletionResponseHandler, + concurrency: int, + queue_limit: int = 0, + metrics_store: "MetricsStore | None" = None, +) -> Iterator[ThreadedLLMCompletionFunction]: + """Run a completion thread pool. + + Args + ---- + completion: LLMCompletion + The LLMCompletion instance to use for processing requests. + response_handler: ThreadedLLMCompletionResponseHandler + The callback function to handle completion responses. + (request_id, response|exception) -> Awaitable[None] | None + concurrency: int + The number of threads to spin up in a thread pool. + queue_limit: int (default=0) + The maximum number of items allowed in the input queue. + 0 means unlimited. + Set this to a value to create backpressure on the caller. + metrics_store: MetricsStore | None (default=None) + Optional metrics store to record runtime duration. + + Yields + ------ + ThreadedLLMCompletionFunction: + A function that can be used to submit completion requests to the thread pool. + (messages, request_id, **kwargs) -> None + + The thread pool will process the requests and invoke the provided callback + with the responses. + + same signature as LLMCompletionFunction but requires a `request_id` parameter + to identify the request and does not return anything. + """ + quit_process_event = threading.Event() + threads, input_queue, output_queue = _start_completion_thread_pool( + completion=completion, + quit_process_event=quit_process_event, + concurrency=concurrency, + queue_limit=queue_limit, + ) + + def _process_output( + quit_process_event: threading.Event, + output_queue: "LLMCompletionResponseQueue", + callback: ThreadedLLMCompletionResponseHandler, + ): + while True and not quit_process_event.is_set(): + try: + data = output_queue.get(timeout=1) + except Empty: + continue + if data is None: + break + request_id, response = data + response = callback(request_id, response) + + if asyncio.iscoroutine(response): + response = asyncio.run(response) + + def _process_input(request_id: str, **kwargs: Unpack["LLMCompletionArgs"]): + if not request_id: + msg = "request_id needs to be passed as a keyword argument" + raise ValueError(msg) + input_queue.put((request_id, kwargs)) + + handle_response_thread = threading.Thread( + target=_process_output, + args=(quit_process_event, output_queue, response_handler), + ) + handle_response_thread.start() + + def _cleanup(): + for _ in threads: + input_queue.put(None) + + for thread in threads: + while thread.is_alive(): + thread.join(timeout=1) + + output_queue.put(None) + + while handle_response_thread.is_alive(): + handle_response_thread.join(timeout=1) + + start_time = time.time() + try: + yield _process_input + _cleanup() + except KeyboardInterrupt: + quit_process_event.set() + sys.exit(1) + finally: + end_time = time.time() + runtime = end_time - start_time + if metrics_store: + metrics_store.update_metrics(metrics={"runtime_duration_seconds": runtime}) diff --git a/packages/graphrag-llm/graphrag_llm/threading/embedding_thread.py b/packages/graphrag-llm/graphrag_llm/threading/embedding_thread.py new file mode 100644 index 0000000000..7980d08fc1 --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/threading/embedding_thread.py @@ -0,0 +1,88 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Embedding Thread.""" + +import threading +from queue import Empty, Queue +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from graphrag_llm.types import ( + LLMEmbeddingArgs, + LLMEmbeddingFunction, + LLMEmbeddingResponse, + ) + + +LLMEmbeddingRequestQueue = Queue[tuple[str, "LLMEmbeddingArgs"] | None] +"""Input queue for LLM embeddings. + +A queue for tracking requests to be made to an embedding endpoint. +Each item in the queue is a tuple containing a request ID and a dictionary of +embedding arguments. A `None` value indicates that the thread should terminate. + +Queue Item Type: + tuple[request_id, embedding_args_dict] | None + +Items in the queue are processed by a thread pool in which the results are placed +into an output queue to be handled by a response handler. +""" + +LLMEmbeddingResponseQueue = Queue[ + tuple[ + str, + "LLMEmbeddingResponse | Exception", + ] + | None +] +"""Output queue for LLM embedding responses. + +A queue for tracking responses from embedding requests. Each item in the queue is a tuple +containing the request ID and the corresponding response, which can be a full response +or an exception if the request failed. A `None` value indicates that the +thread should terminate. + +Queue Item Type: + tuple[request_id, response | exception] | None + +Items in the queue are produced by a thread pool that processes embedding requests +from an input queue. This output queue is then consumed by a response handler provided +by the user. +""" + + +class EmbeddingThread(threading.Thread): + """Thread for handling LLM embeddings.""" + + def __init__( + self, + *, + quit_process_event: threading.Event, + input_queue: LLMEmbeddingRequestQueue, + output_queue: LLMEmbeddingResponseQueue, + embedding: "LLMEmbeddingFunction", + ) -> None: + super().__init__() + self._quit_process_event = quit_process_event + self._input_queue = input_queue + self._output_queue = output_queue + self._embedding = embedding + + def run(self) -> None: + """Run the embedding thread.""" + while not self._quit_process_event.is_set(): + try: + input_data = self._input_queue.get(timeout=0.1) + except Empty: + continue + + if input_data is None: + break + request_id, data = input_data + try: + response = self._embedding(**data) + + self._output_queue.put((request_id, response)) + except Exception as e: # noqa: BLE001 + self._output_queue.put((request_id, e)) diff --git a/packages/graphrag-llm/graphrag_llm/threading/embedding_thread_runner.py b/packages/graphrag-llm/graphrag_llm/threading/embedding_thread_runner.py new file mode 100644 index 0000000000..995e69ffda --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/threading/embedding_thread_runner.py @@ -0,0 +1,216 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Embedding Thread Runner.""" + +import asyncio +import sys +import threading +import time +from collections.abc import Awaitable, Iterator +from contextlib import contextmanager +from queue import Empty, Queue +from typing import TYPE_CHECKING, Protocol, Unpack, runtime_checkable + +from graphrag_llm.threading.embedding_thread import EmbeddingThread + +if TYPE_CHECKING: + from graphrag_llm.metrics import MetricsStore + from graphrag_llm.threading.embedding_thread import ( + LLMEmbeddingRequestQueue, + LLMEmbeddingResponseQueue, + ) + from graphrag_llm.types import ( + LLMEmbeddingArgs, + LLMEmbeddingFunction, + LLMEmbeddingResponse, + ) + + +@runtime_checkable +class ThreadedLLMEmbeddingResponseHandler(Protocol): + """Threaded embedding response handler. + + This function is used to handle responses from the threaded embedding runner. + + Args + ---- + request_id: str + The request ID associated with the embedding request. + resp: LLMEmbeddingResponse | Exception + The embedding response, which can be a full response or + an exception if the request failed. + + Returns + ------- + Awaitable[None] | None + The callback can be asynchronous or synchronous. + """ + + def __call__( + self, + request_id: str, + response: "LLMEmbeddingResponse | Exception", + /, + ) -> Awaitable[None] | None: + """Threaded embedding response handler.""" + ... + + +@runtime_checkable +class ThreadedLLMEmbeddingFunction(Protocol): + """Threaded embedding function. + + This function is used to make embedding requests in a threaded context. + + Args + ---- + request_id: str + The request ID associated with the embedding request. + input: list[str] + The input texts to be embedded. + **kwargs: Any + Additional keyword arguments. + + Returns + ------- + LLMEmbeddingResponse + The embedding response. + """ + + def __call__( + self, /, request_id: str, **kwargs: Unpack["LLMEmbeddingArgs"] + ) -> None: + """Threaded embedding function.""" + ... + + +def _start_embedding_thread_pool( + *, + embedding: "LLMEmbeddingFunction", + quit_process_event: threading.Event, + concurrency: int, + queue_limit: int, +) -> tuple[ + list["EmbeddingThread"], + "LLMEmbeddingRequestQueue", + "LLMEmbeddingResponseQueue", +]: + threads: list[EmbeddingThread] = [] + input_queue: LLMEmbeddingRequestQueue = Queue(queue_limit) + output_queue: LLMEmbeddingResponseQueue = Queue() + for _ in range(concurrency): + thread = EmbeddingThread( + quit_process_event=quit_process_event, + input_queue=input_queue, + output_queue=output_queue, + embedding=embedding, + ) + thread.start() + threads.append(thread) + + return threads, input_queue, output_queue + + +@contextmanager +def embedding_thread_runner( + *, + embedding: "LLMEmbeddingFunction", + response_handler: ThreadedLLMEmbeddingResponseHandler, + concurrency: int, + queue_limit: int = 0, + metrics_store: "MetricsStore | None" = None, +) -> Iterator[ThreadedLLMEmbeddingFunction]: + """Run an embedding thread pool. + + Args + ---- + embedding: LLMEmbeddingFunction + The LLMEmbeddingFunction instance to use for processing requests. + response_handler: ThreadedLLMEmbeddingResponseHandler + The callback function to handle embedding responses. + (request_id, response|exception) -> Awaitable[None] | None + concurrency: int + The number of threads to spin up in a thread pool. + queue_limit: int (default=0) + The maximum number of items allowed in the input queue. + 0 means unlimited. + Set this to a value to create backpressure on the caller. + metrics_store: MetricsStore | None (default=None) + Optional metrics store to record runtime duration. + + Yields + ------ + ThreadedLLMEmbeddingFunction: + A function that can be used to submit embedding requests to the thread pool. + (input, request_id, **kwargs) -> None + + The thread pool will process the requests and invoke the provided callback + with the responses. + + same signature as LLMEmbeddingFunction but requires a `request_id` parameter + to identify the request and does not return anything. + """ + quit_process_event = threading.Event() + threads, input_queue, output_queue = _start_embedding_thread_pool( + embedding=embedding, + quit_process_event=quit_process_event, + concurrency=concurrency, + queue_limit=queue_limit, + ) + + def _process_output( + quit_process_event: threading.Event, + output_queue: "LLMEmbeddingResponseQueue", + callback: ThreadedLLMEmbeddingResponseHandler, + ): + while True and not quit_process_event.is_set(): + try: + data = output_queue.get(timeout=1) + except Empty: + continue + if data is None: + break + request_id, response = data + response = callback(request_id, response) + + if asyncio.iscoroutine(response): + response = asyncio.run(response) + + def _process_input(request_id: str, **kwargs: Unpack["LLMEmbeddingArgs"]): + if not request_id: + msg = "request_id needs to be passed as a keyword argument" + raise ValueError(msg) + input_queue.put((request_id, kwargs)) + + handle_response_thread = threading.Thread( + target=_process_output, + args=(quit_process_event, output_queue, response_handler), + ) + handle_response_thread.start() + + def _cleanup(): + for _ in threads: + input_queue.put(None) + + for thread in threads: + while thread.is_alive(): + thread.join(timeout=1) + + output_queue.put(None) + + while handle_response_thread.is_alive(): + handle_response_thread.join(timeout=1) + + start_time = time.time() + try: + yield _process_input + _cleanup() + except KeyboardInterrupt: + quit_process_event.set() + sys.exit(1) + finally: + end_time = time.time() + runtime = end_time - start_time + if metrics_store: + metrics_store.update_metrics(metrics={"runtime_duration_seconds": runtime}) diff --git a/packages/graphrag-llm/graphrag_llm/tokenizer/__init__.py b/packages/graphrag-llm/graphrag_llm/tokenizer/__init__.py new file mode 100644 index 0000000000..0010fd2b5f --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/tokenizer/__init__.py @@ -0,0 +1,16 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Tokenizer module.""" + +from graphrag_llm.tokenizer.tokenizer import Tokenizer +from graphrag_llm.tokenizer.tokenizer_factory import ( + create_tokenizer, + register_tokenizer, +) + +__all__ = [ + "Tokenizer", + "create_tokenizer", + "register_tokenizer", +] diff --git a/packages/graphrag/graphrag/tokenizer/litellm_tokenizer.py b/packages/graphrag-llm/graphrag_llm/tokenizer/lite_llm_tokenizer.py similarity index 56% rename from packages/graphrag/graphrag/tokenizer/litellm_tokenizer.py rename to packages/graphrag-llm/graphrag_llm/tokenizer/lite_llm_tokenizer.py index 1a85f56086..392f113eba 100644 --- a/packages/graphrag/graphrag/tokenizer/litellm_tokenizer.py +++ b/packages/graphrag-llm/graphrag_llm/tokenizer/lite_llm_tokenizer.py @@ -3,45 +3,52 @@ """LiteLLM Tokenizer.""" +from typing import Any + from litellm import decode, encode # type: ignore -from graphrag.tokenizer.tokenizer import Tokenizer +from graphrag_llm.tokenizer.tokenizer import Tokenizer -class LitellmTokenizer(Tokenizer): +class LiteLLMTokenizer(Tokenizer): """LiteLLM Tokenizer.""" - def __init__(self, model_name: str) -> None: + _model_id: str + + def __init__(self, *, model_id: str, **kwargs: Any) -> None: """Initialize the LiteLLM Tokenizer. Args ---- - model_name (str): The name of the LiteLLM model to use for tokenization. + model_id: str + The LiteLLM model ID, e.g., "openai/gpt-4o". """ - self.model_name = model_name + self._model_id = model_id def encode(self, text: str) -> list[int]: """Encode the given text into a list of tokens. Args ---- - text (str): The input text to encode. + text: str + The input text to encode. Returns ------- list[int]: A list of tokens representing the encoded text. """ - return encode(model=self.model_name, text=text) + return encode(model=self._model_id, text=text) def decode(self, tokens: list[int]) -> str: """Decode a list of tokens back into a string. Args ---- - tokens (list[int]): A list of tokens to decode. + tokens: list[int] + A list of tokens to decode. Returns ------- str: The decoded string from the list of tokens. """ - return decode(model=self.model_name, tokens=tokens) + return decode(model=self._model_id, tokens=tokens) diff --git a/packages/graphrag/graphrag/tokenizer/tiktoken_tokenizer.py b/packages/graphrag-llm/graphrag_llm/tokenizer/tiktoken_tokenizer.py similarity index 52% rename from packages/graphrag/graphrag/tokenizer/tiktoken_tokenizer.py rename to packages/graphrag-llm/graphrag_llm/tokenizer/tiktoken_tokenizer.py index fa6c6e9b43..9388c13b51 100644 --- a/packages/graphrag/graphrag/tokenizer/tiktoken_tokenizer.py +++ b/packages/graphrag-llm/graphrag_llm/tokenizer/tiktoken_tokenizer.py @@ -1,47 +1,55 @@ # Copyright (c) 2024 Microsoft Corporation. # Licensed under the MIT License -"""Tiktoken Tokenizer.""" +"""LiteLLM Tokenizer.""" + +from typing import Any import tiktoken -from graphrag.tokenizer.tokenizer import Tokenizer +from graphrag_llm.tokenizer.tokenizer import Tokenizer class TiktokenTokenizer(Tokenizer): - """Tiktoken Tokenizer.""" + """LiteLLM Tokenizer.""" + + _encoding_name: str - def __init__(self, encoding_name: str) -> None: + def __init__(self, *, encoding_name: str, **kwargs: Any) -> None: """Initialize the Tiktoken Tokenizer. Args ---- - encoding_name (str): The name of the Tiktoken encoding to use for tokenization. + encoding_name: str + The encoding name, e.g., "gpt-4o". """ - self.encoding = tiktoken.get_encoding(encoding_name) + self._encoding_name = encoding_name + self._encoding = tiktoken.get_encoding(encoding_name) def encode(self, text: str) -> list[int]: """Encode the given text into a list of tokens. Args ---- - text (str): The input text to encode. + text: str + The input text to encode. Returns ------- list[int]: A list of tokens representing the encoded text. """ - return self.encoding.encode(text) + return self._encoding.encode(text) def decode(self, tokens: list[int]) -> str: """Decode a list of tokens back into a string. Args ---- - tokens (list[int]): A list of tokens to decode. + tokens: list[int] + A list of tokens to decode. Returns ------- str: The decoded string from the list of tokens. """ - return self.encoding.decode(tokens) + return self._encoding.decode(tokens) diff --git a/packages/graphrag-llm/graphrag_llm/tokenizer/tokenizer.py b/packages/graphrag-llm/graphrag_llm/tokenizer/tokenizer.py new file mode 100644 index 0000000000..37bc3de721 --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/tokenizer/tokenizer.py @@ -0,0 +1,111 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Tokenizer Abstract Base Class.""" + +from abc import ABC, abstractmethod +from typing import TYPE_CHECKING, Any + +if TYPE_CHECKING: + from graphrag_llm.types import LLMCompletionMessagesParam + + +class Tokenizer(ABC): + """Tokenizer Abstract Base Class.""" + + @abstractmethod + def __init__(self, **kwargs: Any) -> None: + """Initialize the LiteLLM Tokenizer.""" + + @abstractmethod + def encode(self, text: str) -> list[int]: + """Encode the given text into a list of tokens. + + Args + ---- + text: str + The input text to encode. + + Returns + ------- + list[int]: A list of tokens representing the encoded text. + """ + raise NotImplementedError + + @abstractmethod + def decode(self, tokens: list[int]) -> str: + """Decode a list of tokens back into a string. + + Args + ---- + tokens: list[int] + A list of tokens to decode. + + Returns + ------- + str: The decoded string from the list of tokens. + """ + raise NotImplementedError + + def num_prompt_tokens( + self, + messages: "LLMCompletionMessagesParam", + ) -> int: + """Count the number of tokens in a prompt for a given model. + + Counts the number of tokens used for roles, names, and content in the messages. + + modeled after: https://github.com/openai/openai-cookbook/blob/main/examples/How_to_count_tokens_with_tiktoken.ipynb + + Args + ---- + messages: LLMCompletionMessagesParam + The messages comprising the prompt. Can either be a string or a list of message dicts. + + Returns + ------- + int: The number of tokens in the prompt. + """ + total_tokens = 3 # overhead for reply + tokens_per_message = 3 # fixed overhead per message + tokens_per_name = 1 # fixed overhead per name field + + if isinstance(messages, str): + return ( + self.num_tokens(messages) + + total_tokens + + tokens_per_message + + tokens_per_name + ) + + for message in messages: + total_tokens += tokens_per_message + if not isinstance(message, dict): + message = message.model_dump() + for key, value in message.items(): + if key == "content": + if isinstance(value, str): + total_tokens += self.num_tokens(value) + elif isinstance(value, list): + for part in value: + if isinstance(part, dict) and "text" in part: + total_tokens += self.num_tokens(part["text"]) + elif key == "role": + total_tokens += self.num_tokens(str(value)) + elif key == "name": + total_tokens += self.num_tokens(str(value)) + tokens_per_name + return total_tokens + + def num_tokens(self, text: str) -> int: + """Return the number of tokens in the given text. + + Args + ---- + text: str + The input text to analyze. + + Returns + ------- + int: The number of tokens in the input text. + """ + return len(self.encode(text)) diff --git a/packages/graphrag-llm/graphrag_llm/tokenizer/tokenizer_factory.py b/packages/graphrag-llm/graphrag_llm/tokenizer/tokenizer_factory.py new file mode 100644 index 0000000000..929991c616 --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/tokenizer/tokenizer_factory.py @@ -0,0 +1,89 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Tokenizer factory.""" + +from collections.abc import Callable +from typing import TYPE_CHECKING + +from graphrag_common.factory import Factory + +from graphrag_llm.config.types import TokenizerType +from graphrag_llm.tokenizer.tokenizer import Tokenizer + +if TYPE_CHECKING: + from graphrag_common.factory import ServiceScope + + from graphrag_llm.config.tokenizer_config import TokenizerConfig + + +class TokenizerFactory(Factory[Tokenizer]): + """Factory for creating Tokenizer instances.""" + + +tokenizer_factory = TokenizerFactory() + + +def register_tokenizer( + tokenizer_type: str, + tokenizer_initializer: Callable[..., Tokenizer], + scope: "ServiceScope" = "transient", +) -> None: + """Register a custom tokenizer implementation. + + Args + ---- + tokenizer_type: str + The tokenizer id to register. + tokenizer_initializer: Callable[..., Tokenizer] + The tokenizer initializer to register. + """ + tokenizer_factory.register(tokenizer_type, tokenizer_initializer, scope) + + +def create_tokenizer(tokenizer_config: "TokenizerConfig") -> Tokenizer: + """Create a Tokenizer instance based on the configuration. + + Args + ---- + tokenizer_config: TokenizerConfig + The configuration for the tokenizer. + + Returns + ------- + Tokenizer: + An instance of a Tokenizer subclass. + """ + strategy = tokenizer_config.type + init_args = tokenizer_config.model_dump() + + if strategy not in tokenizer_factory: + match strategy: + case TokenizerType.LiteLLM: + from graphrag_llm.tokenizer.lite_llm_tokenizer import ( + LiteLLMTokenizer, + ) + + register_tokenizer( + TokenizerType.LiteLLM, + LiteLLMTokenizer, + scope="singleton", + ) + case TokenizerType.Tiktoken: + from graphrag_llm.tokenizer.tiktoken_tokenizer import ( + TiktokenTokenizer, + ) + + register_tokenizer( + TokenizerType.Tiktoken, + TiktokenTokenizer, + scope="singleton", + ) + case _: + msg = f"TokenizerConfig.type '{strategy}' is not registered in the TokenizerFactory. Registered strategies: {', '.join(tokenizer_factory.keys())}" + raise ValueError(msg) + + return tokenizer_factory.create( + strategy=strategy, + init_args=init_args, + ) diff --git a/packages/graphrag-llm/graphrag_llm/types/__init__.py b/packages/graphrag-llm/graphrag_llm/types/__init__.py new file mode 100644 index 0000000000..05c9b26687 --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/types/__init__.py @@ -0,0 +1,59 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Types module for graphrag-llm.""" + +from graphrag_llm.types.types import ( + AsyncLLMCompletionFunction, + AsyncLLMEmbeddingFunction, + AsyncLLMFunction, + LLMChoice, + LLMChoiceChunk, + LLMChoiceDelta, + LLMCompletionArgs, + LLMCompletionChunk, + LLMCompletionFunction, + LLMCompletionFunctionToolParam, + LLMCompletionMessage, + LLMCompletionMessagesParam, + LLMCompletionResponse, + LLMCompletionTokensDetails, + LLMCompletionUsage, + LLMEmbedding, + LLMEmbeddingArgs, + LLMEmbeddingFunction, + LLMEmbeddingResponse, + LLMEmbeddingUsage, + LLMFunction, + LLMPromptTokensDetails, + Metrics, + ResponseFormat, +) + +__all__ = [ + "AsyncLLMCompletionFunction", + "AsyncLLMEmbeddingFunction", + "AsyncLLMFunction", + "LLMChoice", + "LLMChoiceChunk", + "LLMChoiceDelta", + "LLMCompletionArgs", + "LLMCompletionChunk", + "LLMCompletionFunction", + "LLMCompletionFunctionToolParam", + "LLMCompletionMessage", + "LLMCompletionMessagesParam", + "LLMCompletionMessagesParam", + "LLMCompletionResponse", + "LLMCompletionTokensDetails", + "LLMCompletionUsage", + "LLMEmbedding", + "LLMEmbeddingArgs", + "LLMEmbeddingFunction", + "LLMEmbeddingResponse", + "LLMEmbeddingUsage", + "LLMFunction", + "LLMPromptTokensDetails", + "Metrics", + "ResponseFormat", +] diff --git a/packages/graphrag-llm/graphrag_llm/types/types.py b/packages/graphrag-llm/graphrag_llm/types/types.py new file mode 100644 index 0000000000..0980cba3aa --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/types/types.py @@ -0,0 +1,265 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Types for graphrag-llm.""" + +from collections.abc import AsyncIterator, Awaitable, Iterator, Sequence +from typing import ( + Any, + Generic, + Literal, + Protocol, + Required, + TypeVar, + Unpack, + runtime_checkable, +) + +from litellm import ( + AnthropicThinkingParam, + ChatCompletionAudioParam, + ChatCompletionModality, + ChatCompletionPredictionContentParam, + OpenAIWebSearchOptions, +) +from openai.types.chat.chat_completion import ( + ChatCompletion, + Choice, +) +from openai.types.chat.chat_completion_chunk import ChatCompletionChunk, ChoiceDelta +from openai.types.chat.chat_completion_chunk import Choice as ChunkChoice +from openai.types.chat.chat_completion_function_tool_param import ( + ChatCompletionFunctionToolParam, +) +from openai.types.chat.chat_completion_message import ChatCompletionMessage +from openai.types.chat.chat_completion_message_param import ChatCompletionMessageParam +from openai.types.completion_usage import ( + CompletionTokensDetails, + CompletionUsage, + PromptTokensDetails, +) +from openai.types.create_embedding_response import CreateEmbeddingResponse, Usage +from openai.types.embedding import Embedding +from pydantic import BaseModel, computed_field +from typing_extensions import TypedDict + +LLMCompletionMessagesParam = str | Sequence[ChatCompletionMessageParam | dict[str, Any]] + +LLMChoice = Choice +LLMCompletionMessage = ChatCompletionMessage + +LLMCompletionChunk = ChatCompletionChunk +LLMChoiceChunk = ChunkChoice +LLMChoiceDelta = ChoiceDelta + +LLMCompletionUsage = CompletionUsage +LLMPromptTokensDetails = PromptTokensDetails +LLMCompletionTokensDetails = CompletionTokensDetails + + +LLMEmbedding = Embedding +LLMEmbeddingUsage = Usage + +LLMCompletionFunctionToolParam = ChatCompletionFunctionToolParam + + +Metrics = dict[str, float] +"""Represents single request metrics and aggregated metrics for an entire model. + +example: { + "duration_ms": 123.45, + "successful_requests": 1, +} + +On the individual request level, successful_requests will be either 0 or 1. +On the aggregated model level, successful_requests will be the sum of all +successful requests. +""" + +ResponseFormat = TypeVar( + "ResponseFormat", + bound=BaseModel, +) +"""Generic type variable for structured response format.""" + + +class LLMCompletionResponse(ChatCompletion, Generic[ResponseFormat]): + """LLM Completion Response extending OpenAI ChatCompletion. + + The response type returned by graphrag-llm LLMCompletionFunction. + graphrag-llm automatically handles structured response parsing based on the + provided ResponseFormat model. + """ + + formatted_response: ResponseFormat | None = None # type: ignore + """Formatted response according to the specified response_format json schema.""" + + @computed_field + @property + def content(self) -> str: + """Get the content of the first choice message.""" + return self.choices[0].message.content or "" + + +class LLMCompletionArgs( + TypedDict, Generic[ResponseFormat], total=False, extra_items=Any +): + """Arguments for LLMCompletionFunction. + + Same signature as litellm.completion but without the `model` parameter + as this is already set in the model configuration. + """ + + messages: Required[LLMCompletionMessagesParam] + response_format: type[ResponseFormat] | None + timeout: float | None + temperature: float | None + top_p: float | None + n: int | None + stream: bool | None + stream_options: dict | None + stop: None + max_completion_tokens: int | None + max_tokens: int | None + modalities: list[ChatCompletionModality] | None + prediction: ChatCompletionPredictionContentParam | None + audio: ChatCompletionAudioParam | None + presence_penalty: float | None + frequency_penalty: float | None + logit_bias: dict | None + user: str | None + reasoning_effort: ( + Literal["none", "minimal", "low", "medium", "high", "default"] | None + ) + seed: int | None + tools: list | None + tool_choice: str | dict | None + logprobs: bool | None + top_logprobs: int | None + parallel_tool_calls: bool | None + web_search_options: OpenAIWebSearchOptions | None + deployment_id: Any + extra_headers: dict | None + safety_identifier: str | None + functions: list | None + function_call: str | None + thinking: AnthropicThinkingParam | None + + +@runtime_checkable +class LLMCompletionFunction(Protocol): + """Synchronous completion function. + + Same signature as litellm.completion but without the `model` parameter + as this is already set in the model configuration. + """ + + def __call__( + self, /, **kwargs: Unpack[LLMCompletionArgs[ResponseFormat]] + ) -> LLMCompletionResponse[ResponseFormat] | Iterator[LLMCompletionChunk]: + """Completion function.""" + ... + + +@runtime_checkable +class AsyncLLMCompletionFunction(Protocol): + """Asynchronous completion function. + + Same signature as litellm.completion but without the `model` parameter + as this is already set in the model configuration. + """ + + def __call__( + self, /, **kwargs: Unpack[LLMCompletionArgs[ResponseFormat]] + ) -> Awaitable[ + LLMCompletionResponse[ResponseFormat] | AsyncIterator[LLMCompletionChunk] + ]: + """Completion function.""" + ... + + +class LLMEmbeddingResponse(CreateEmbeddingResponse): + """LLM Embedding Response extending OpenAI CreateEmbeddingResponse. + + The response type returned by graphrag-llm LLMEmbeddingFunction. + Adds utilities for accessing embeddings. + """ + + @computed_field + @property + def embeddings(self) -> list[list[float]]: + """Get the embeddings as a list of lists of floats.""" + return [data.embedding for data in self.data] + + @computed_field + @property + def first_embedding(self) -> list[float]: + """Get the first embedding.""" + return self.embeddings[0] if self.embeddings else [] + + +class LLMEmbeddingArgs(TypedDict, total=False, extra_items=Any): + """Arguments for embedding functions. + + Same signature as litellm.embedding but without the `model` parameter + as this is already set in the model configuration. + """ + + input: Required[list[str]] + dimensions: int | None + encoding_format: str | None + timeout: int + user: str | None + + +@runtime_checkable +class LLMEmbeddingFunction(Protocol): + """Synchronous embedding function. + + Same signature as litellm.embedding but without the `model` parameter + as this is already set in the model configuration. + """ + + def __call__( + self, + /, + **kwargs: Unpack[LLMEmbeddingArgs], + ) -> LLMEmbeddingResponse: + """Embedding function.""" + ... + + +@runtime_checkable +class AsyncLLMEmbeddingFunction(Protocol): + """Asynchronous embedding function. + + Same signature as litellm.aembedding but without the `model` parameter + as this is already set in the model configuration. + """ + + async def __call__( + self, + /, + **kwargs: Unpack[LLMEmbeddingArgs], + ) -> LLMEmbeddingResponse: + """Embedding function.""" + ... + + +LLMFunction = TypeVar("LLMFunction", LLMCompletionFunction, LLMEmbeddingFunction) +"""Generic representation of completion and embedding functions. + +This type is used in the middleware pipeline as the pipeline can handle both +completion and embedding functions. That way services such as retries, caching, +and rate limiting can be reused for both completions and embeddings. +""" + +AsyncLLMFunction = TypeVar( + "AsyncLLMFunction", AsyncLLMCompletionFunction, AsyncLLMEmbeddingFunction +) +"""Generic representation of asynchronous completion and embedding functions. + +This type is used in the middleware pipeline as the pipeline can handle both +completion and embedding functions. That way services such as retries, caching, +and rate limiting can be reused for both completions and embeddings. +""" diff --git a/packages/graphrag-llm/graphrag_llm/utils/__init__.py b/packages/graphrag-llm/graphrag_llm/utils/__init__.py new file mode 100644 index 0000000000..8ae722bfbc --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/utils/__init__.py @@ -0,0 +1,40 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Utils module.""" + +from graphrag_llm.utils.completion_messages_builder import ( + CompletionContentPartBuilder, + CompletionMessagesBuilder, +) +from graphrag_llm.utils.create_completion_response import ( + create_completion_response, +) +from graphrag_llm.utils.create_embedding_response import create_embedding_response +from graphrag_llm.utils.function_tool_manager import ( + FunctionArgumentModel, + FunctionDefinition, + FunctionToolManager, + ToolMessage, +) +from graphrag_llm.utils.gather_completion_response import ( + gather_completion_response, + gather_completion_response_async, +) +from graphrag_llm.utils.structure_response import ( + structure_completion_response, +) + +__all__ = [ + "CompletionContentPartBuilder", + "CompletionMessagesBuilder", + "FunctionArgumentModel", + "FunctionDefinition", + "FunctionToolManager", + "ToolMessage", + "create_completion_response", + "create_embedding_response", + "gather_completion_response", + "gather_completion_response_async", + "structure_completion_response", +] diff --git a/packages/graphrag-llm/graphrag_llm/utils/completion_messages_builder.py b/packages/graphrag-llm/graphrag_llm/utils/completion_messages_builder.py new file mode 100644 index 0000000000..f61e60e6c4 --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/utils/completion_messages_builder.py @@ -0,0 +1,328 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""ChatCompletionMessageParamBuilder class.""" + +from collections.abc import Iterable +from typing import TYPE_CHECKING, Literal + +from openai.types.chat.chat_completion_assistant_message_param import ( + ChatCompletionAssistantMessageParam, +) +from openai.types.chat.chat_completion_content_part_image_param import ( + ChatCompletionContentPartImageParam, + ImageURL, +) +from openai.types.chat.chat_completion_content_part_input_audio_param import ( + ChatCompletionContentPartInputAudioParam, + InputAudio, +) +from openai.types.chat.chat_completion_content_part_param import ( + ChatCompletionContentPartParam, +) +from openai.types.chat.chat_completion_content_part_text_param import ( + ChatCompletionContentPartTextParam, +) +from openai.types.chat.chat_completion_developer_message_param import ( + ChatCompletionDeveloperMessageParam, +) +from openai.types.chat.chat_completion_function_message_param import ( + ChatCompletionFunctionMessageParam, +) +from openai.types.chat.chat_completion_message import ChatCompletionMessage +from openai.types.chat.chat_completion_system_message_param import ( + ChatCompletionSystemMessageParam, +) +from openai.types.chat.chat_completion_tool_message_param import ( + ChatCompletionToolMessageParam, +) +from openai.types.chat.chat_completion_user_message_param import ( + ChatCompletionUserMessageParam, +) + +if TYPE_CHECKING: + from openai.types.chat.chat_completion_message_param import ( + ChatCompletionMessageParam, + ) + + from graphrag_llm.types import LLMCompletionMessagesParam + + +class CompletionMessagesBuilder: + """CompletionMessagesBuilder class.""" + + def __init__(self) -> None: + """Initialize CompletionMessagesBuilder.""" + self._messages: list[ChatCompletionMessageParam] = [] + + def add_system_message( + self, + content: str | Iterable[ChatCompletionContentPartTextParam], + name: str | None = None, + ) -> "CompletionMessagesBuilder": + """Add system message. + + Parameters + ---------- + content : str | Iterable[ChatCompletionContentPartTextParam] + Content of the system message. + If passing in Iterable[ChatCompletionContentPartTextParam], may use + `CompletionContentPartBuilder` to build the content. + name : str | None + Optional name for the participant. + + Returns + ------- + None + """ + if name: + self._messages.append( + ChatCompletionSystemMessageParam( + role="system", content=content, name=name + ) + ) + else: + self._messages.append( + ChatCompletionSystemMessageParam(role="system", content=content) + ) + return self + + def add_developer_message( + self, + content: str | Iterable[ChatCompletionContentPartTextParam], + name: str | None = None, + ) -> "CompletionMessagesBuilder": + """Add developer message. + + Parameters + ---------- + content : str | Iterable[ChatCompletionContentPartTextParam] + Content of the developer message. + If passing in Iterable[ChatCompletionContentPartTextParam], may use + `CompletionContentPartBuilder` to build the content. + name : str | None + Optional name for the participant. + + Returns + ------- + None + """ + if name: + self._messages.append( + ChatCompletionDeveloperMessageParam( + role="developer", content=content, name=name + ) + ) + else: + self._messages.append( + ChatCompletionDeveloperMessageParam(role="developer", content=content) + ) + + return self + + def add_tool_message( + self, + content: str | Iterable[ChatCompletionContentPartTextParam], + tool_call_id: str, + ) -> "CompletionMessagesBuilder": + """Add developer message. + + Parameters + ---------- + content : str | Iterable[ChatCompletionContentPartTextParam] + Content of the developer message. + If passing in Iterable[ChatCompletionContentPartTextParam], may use + `CompletionContentPartBuilder` to build the content. + tool_call_id : str + ID of the tool call that this message is responding to. + + Returns + ------- + None + """ + self._messages.append( + ChatCompletionToolMessageParam( + role="tool", content=content, tool_call_id=tool_call_id + ) + ) + + return self + + def add_function_message( + self, + function_name: str, + content: str | None = None, + ) -> "CompletionMessagesBuilder": + """Add function message. + + Parameters + ---------- + function_name : str + Name of the function to call. + content : str | None + Content of the function message. + + Returns + ------- + None + """ + self._messages.append( + ChatCompletionFunctionMessageParam( + role="function", content=content, name=function_name + ) + ) + + return self + + def add_user_message( + self, + content: str | Iterable[ChatCompletionContentPartParam], + name: str | None = None, + ) -> "CompletionMessagesBuilder": + """Add user message. + + Parameters + ---------- + content : str | Iterable[ChatCompletionContentPartParam] + Content of the user message. + If passing in Iterable[ChatCompletionContentPartParam], may use + `CompletionContentPartBuilder` to build the content. + name : str | None + Optional name for the participant. + + Returns + ------- + None + """ + if name: + self._messages.append( + ChatCompletionUserMessageParam(role="user", content=content, name=name) + ) + else: + self._messages.append( + ChatCompletionUserMessageParam(role="user", content=content) + ) + + return self + + def add_assistant_message( + self, + message: str | ChatCompletionMessage, + name: str | None = None, + ) -> "CompletionMessagesBuilder": + """Add assistant message. + + Parameters + ---------- + message : ChatCompletionMessage + Previous response message. + name : str | None + Optional name for the participant. + + Returns + ------- + None + """ + args = { + "role": "assistant", + "content": message if isinstance(message, str) else message.content, + "refusal": None if isinstance(message, str) else message.refusal, + } + if name: + args["name"] = name + if not isinstance(message, str): + if message.function_call: + args["function_call"] = message.function_call + if message.tool_calls: + args["tool_calls"] = message.tool_calls + if message.audio: + args["audio"] = message.audio + + self._messages.append(ChatCompletionAssistantMessageParam(**args)) + + return self + + def build(self) -> "LLMCompletionMessagesParam": + """Get messages.""" + return self._messages + + +class CompletionContentPartBuilder: + """CompletionContentPartBuilder class.""" + + def __init__(self) -> None: + """Initialize CompletionContentPartBuilder.""" + self._content_parts: list[ChatCompletionContentPartParam] = [] + + def add_text_part(self, text: str) -> "CompletionContentPartBuilder": + """Add text part. + + Parameters + ---------- + text : str + Text content. + + Returns + ------- + None + """ + self._content_parts.append( + ChatCompletionContentPartTextParam(text=text, type="text") + ) + return self + + def add_image_part( + self, url: str, detail: Literal["auto", "low", "high"] + ) -> "CompletionContentPartBuilder": + """Add image part. + + Parameters + ---------- + url : str + Either an URL of the image or the base64 encoded image data. + detail : Literal["auto", "low", "high"] + Specifies the detail level of the image. + + Returns + ------- + None + """ + self._content_parts.append( + ChatCompletionContentPartImageParam( + image_url=ImageURL(url=url, detail=detail), type="image_url" + ) + ) + return self + + def add_audio_part( + self, data: str, _format: Literal["wav", "mp3"] + ) -> "CompletionContentPartBuilder": + """Add audio part. + + Parameters + ---------- + data : str + Base64 encoded audio data. + _format : Literal["wav", "mp3"] + The format of the encoded audio data. Currently supports "wav" and "mp3". + + Returns + ------- + None + """ + self._content_parts.append( + ChatCompletionContentPartInputAudioParam( + input_audio=InputAudio(data=data, format=_format), type="input_audio" + ) + ) + return self + + def build(self) -> list[ChatCompletionContentPartParam]: + """Get content parts. + + Returns + ------- + list[ChatCompletionContentPartParam] + List of content parts. + """ + return self._content_parts diff --git a/packages/graphrag-llm/graphrag_llm/utils/create_completion_response.py b/packages/graphrag-llm/graphrag_llm/utils/create_completion_response.py new file mode 100644 index 0000000000..7f7ffd9091 --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/utils/create_completion_response.py @@ -0,0 +1,45 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Create completion response.""" + +from graphrag_llm.types import ( + LLMChoice, + LLMCompletionMessage, + LLMCompletionResponse, + LLMCompletionUsage, +) + + +def create_completion_response(response: str) -> LLMCompletionResponse: + """Create a completion response object. + + Args: + response: The completion response string. + + Returns + ------- + LLMCompletionResponse: The completion response object. + """ + return LLMCompletionResponse( + id="completion-id", + object="chat.completion", + created=0, + model="mock-model", + choices=[ + LLMChoice( + index=0, + message=LLMCompletionMessage( + role="assistant", + content=response, + ), + finish_reason="stop", + ) + ], + usage=LLMCompletionUsage( + prompt_tokens=0, + completion_tokens=0, + total_tokens=0, + ), + formatted_response=None, + ) diff --git a/packages/graphrag-llm/graphrag_llm/utils/create_embedding_response.py b/packages/graphrag-llm/graphrag_llm/utils/create_embedding_response.py new file mode 100644 index 0000000000..b651a32816 --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/utils/create_embedding_response.py @@ -0,0 +1,39 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Create embedding response utilities.""" + +from graphrag_llm.types import LLMEmbedding, LLMEmbeddingResponse, LLMEmbeddingUsage + + +def create_embedding_response( + embeddings: list[float], batch_size: int = 1 +) -> LLMEmbeddingResponse: + """Create a CreateEmbeddingResponse object. + + Args: + embeddings: List of embedding vectors. + model: The model used to create the embeddings. + + Returns + ------- + An LLMEmbeddingResponse object. + """ + embeddings_objects = [ + LLMEmbedding( + object="embedding", + embedding=embeddings, + index=index, + ) + for index in range(batch_size) + ] + + return LLMEmbeddingResponse( + object="list", + data=embeddings_objects, + model="mock-model", + usage=LLMEmbeddingUsage( + prompt_tokens=0, + total_tokens=0, + ), + ) diff --git a/packages/graphrag-llm/graphrag_llm/utils/function_tool_manager.py b/packages/graphrag-llm/graphrag_llm/utils/function_tool_manager.py new file mode 100644 index 0000000000..029008c6b0 --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/utils/function_tool_manager.py @@ -0,0 +1,138 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Function tool manager.""" + +import json +from collections.abc import Callable +from typing import TYPE_CHECKING, Any, Generic, TypeVar + +from openai import pydantic_function_tool +from pydantic import BaseModel +from typing_extensions import TypedDict + +if TYPE_CHECKING: + from graphrag_llm.types import LLMCompletionFunctionToolParam, LLMCompletionResponse + +FunctionArgumentModel = TypeVar( + "FunctionArgumentModel", bound=BaseModel, covariant=True +) + + +class FunctionDefinition(TypedDict, Generic[FunctionArgumentModel]): + """Function definition.""" + + name: str + description: str + input_model: type[FunctionArgumentModel] + function: Callable[[FunctionArgumentModel], str] + + +class ToolMessage(TypedDict): + """Function tool response message to be added to message history.""" + + content: str + tool_call_id: str + + +class FunctionToolManager: + """Function tool manager.""" + + _tools: dict[str, FunctionDefinition[Any]] + + def __init__(self) -> None: + """Initialize FunctionToolManager.""" + self._tools = {} + + def register_function_tool( + self, + *, + name: str, + description: str, + input_model: type[FunctionArgumentModel], + function: Callable[[FunctionArgumentModel], str], + ) -> None: + """Register function tool. + + Args + ---- + name: str + The name of the function tool. + description: str + The description of the function tool. + input_model: type[T] + The pydantic model type for the function tool input. + function: Callable[[T], str] + The function to call for the function tool. + """ + self._tools[name] = { + "name": name, + "description": description, + "input_model": input_model, + "function": function, + } + + def definitions(self) -> list["LLMCompletionFunctionToolParam"]: + """Get function tool definitions. + + Returns + ------- + list[LLMCompletionFunctionToolParam] + List of function tool definitions. + """ + return [ + pydantic_function_tool( + tool_def["input_model"], + name=tool_def["name"], + description=tool_def["description"], + ) + for tool_def in self._tools.values() + ] + + def call_functions(self, response: "LLMCompletionResponse") -> list[ToolMessage]: + """Call functions based on the response. + + Args + ---- + response: LLMCompletionResponse + The LLM completion response. + + Returns + ------- + list[ToolMessage] + The list of tool response messages to be added to the message history. + """ + if not response.choices[0].message.tool_calls: + return [] + + tool_messages: list[ToolMessage] = [] + + for tool_call in response.choices[0].message.tool_calls: + if tool_call.type != "function": + continue + tool_id = tool_call.id + function_name = tool_call.function.name + function_args = tool_call.function.arguments + + if function_name not in self._tools: + msg = f"Function '{function_name}' not registered." + raise ValueError(msg) + + tool_def = self._tools[function_name] + input_model = tool_def["input_model"] + function = tool_def["function"] + + try: + parsed_args_dict = json.loads(function_args) + input_model_instance = input_model(**parsed_args_dict) + except Exception as e: + msg = f"Failed to parse arguments for function '{function_name}': {e}" + raise ValueError(msg) from e + + result = function(input_model_instance) + tool_messages.append({ + "content": result, + "tool_call_id": tool_id, + }) + + return tool_messages diff --git a/packages/graphrag-llm/graphrag_llm/utils/gather_completion_response.py b/packages/graphrag-llm/graphrag_llm/utils/gather_completion_response.py new file mode 100644 index 0000000000..0722e95efd --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/utils/gather_completion_response.py @@ -0,0 +1,57 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Gather Completion Response Utility.""" + +from collections.abc import AsyncIterator, Iterator +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from graphrag_llm.types import ( + LLMCompletionChunk, + LLMCompletionResponse, + ) + + +def gather_completion_response( + response: "LLMCompletionResponse | Iterator[LLMCompletionChunk]", +) -> str: + """Gather completion response from an iterator of response chunks. + + Args + ---- + response: LMChatCompletion | Iterator[LLMChatCompletionChunk] + The completion response or an iterator of response chunks. + + Returns + ------- + The gathered response as a single string. + """ + if isinstance(response, Iterator): + return "".join(chunk.choices[0].delta.content or "" for chunk in response) + + return response.choices[0].message.content or "" + + +async def gather_completion_response_async( + response: "LLMCompletionResponse | AsyncIterator[LLMCompletionChunk]", +) -> str: + """Gather completion response from an iterator of response chunks. + + Args + ---- + response: LMChatCompletion | AsyncIterator[LLMChatCompletionChunk] + The completion response or an iterator of response chunks. + + Returns + ------- + The gathered response as a single string. + """ + if isinstance(response, AsyncIterator): + gathered_content = "" + async for chunk in response: + gathered_content += chunk.choices[0].delta.content or "" + + return gathered_content + + return response.choices[0].message.content or "" diff --git a/packages/graphrag-llm/graphrag_llm/utils/structure_response.py b/packages/graphrag-llm/graphrag_llm/utils/structure_response.py new file mode 100644 index 0000000000..dfc261bc87 --- /dev/null +++ b/packages/graphrag-llm/graphrag_llm/utils/structure_response.py @@ -0,0 +1,29 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Structure response as pydantic base model.""" + +import json +from typing import Any, TypeVar + +from pydantic import BaseModel + +T = TypeVar("T", bound=BaseModel, covariant=True) + + +def structure_completion_response(response: str, model: type[T]) -> T: + """Structure completion response as pydantic base model. + + Args + ---- + response: str + The completion response as a JSON string. + model: type[T] + The pydantic base model type to structure the response into. + + Returns + ------- + The structured response as a pydantic base model. + """ + parsed_dict: dict[str, Any] = json.loads(response) + return model(**parsed_dict) diff --git a/packages/graphrag-llm/notebooks/01_basic.ipynb b/packages/graphrag-llm/notebooks/01_basic.ipynb new file mode 100644 index 0000000000..d015231e35 --- /dev/null +++ b/packages/graphrag-llm/notebooks/01_basic.ipynb @@ -0,0 +1,415 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "6e35563a", + "metadata": {}, + "source": [ + "# Basic Completion and Embedding Examples\n", + "\n", + "## Completion\n" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "aa03e40d", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "The capital of France is Paris.\n", + "The capital of France is Paris.\n", + "Full Response:\n", + "{\n", + " \"id\": \"chatcmpl-CyPuxOjKPmvuCvJwTJiLRH1lwO77J\",\n", + " \"choices\": [\n", + " {\n", + " \"finish_reason\": \"stop\",\n", + " \"index\": 0,\n", + " \"logprobs\": null,\n", + " \"message\": {\n", + " \"content\": \"The capital of France is Paris.\",\n", + " \"refusal\": null,\n", + " \"role\": \"assistant\",\n", + " \"annotations\": [],\n", + " \"audio\": null,\n", + " \"function_call\": null,\n", + " \"tool_calls\": null\n", + " },\n", + " \"provider_specific_fields\": {}\n", + " }\n", + " ],\n", + " \"created\": 1768515343,\n", + " \"model\": \"gpt-4o-2024-05-13\",\n", + " \"object\": \"chat.completion\",\n", + " \"service_tier\": null,\n", + " \"system_fingerprint\": \"fp_3eed281ddb\",\n", + " \"usage\": {\n", + " \"completion_tokens\": 8,\n", + " \"prompt_tokens\": 14,\n", + " \"total_tokens\": 22,\n", + " \"completion_tokens_details\": {\n", + " \"accepted_prediction_tokens\": 0,\n", + " \"audio_tokens\": 0,\n", + " \"reasoning_tokens\": 0,\n", + " \"rejected_prediction_tokens\": 0,\n", + " \"text_tokens\": null\n", + " },\n", + " \"prompt_tokens_details\": {\n", + " \"audio_tokens\": 0,\n", + " \"cached_tokens\": 0,\n", + " \"text_tokens\": null,\n", + " \"image_tokens\": null\n", + " }\n", + " },\n", + " \"formatted_response\": null,\n", + " \"content\": \"The capital of France is Paris.\"\n", + "}\n" + ] + } + ], + "source": [ + "# Copyright (c) 2024 Microsoft Corporation.\n", + "# Licensed under the MIT License\n", + "\n", + "import os\n", + "from collections.abc import AsyncIterator, Iterator\n", + "\n", + "from dotenv import load_dotenv\n", + "from graphrag_llm.completion import LLMCompletion, create_completion\n", + "from graphrag_llm.config import AuthMethod, ModelConfig\n", + "from graphrag_llm.types import LLMCompletionChunk, LLMCompletionResponse\n", + "\n", + "load_dotenv()\n", + "\n", + "api_key = os.getenv(\"GRAPHRAG_API_KEY\")\n", + "model_config = ModelConfig(\n", + " model_provider=\"azure\",\n", + " model=os.getenv(\"GRAPHRAG_MODEL\", \"gpt-4o\"),\n", + " azure_deployment_name=os.getenv(\"GRAPHRAG_MODEL\", \"gpt-4o\"),\n", + " api_base=os.getenv(\"GRAPHRAG_API_BASE\"),\n", + " api_version=os.getenv(\"GRAPHRAG_API_VERSION\", \"2025-04-01-preview\"),\n", + " api_key=api_key,\n", + " auth_method=AuthMethod.AzureManagedIdentity if not api_key else AuthMethod.ApiKey,\n", + ")\n", + "llm_completion: LLMCompletion = create_completion(model_config)\n", + "\n", + "response: LLMCompletionResponse | Iterator[LLMCompletionChunk] = (\n", + " llm_completion.completion(\n", + " messages=\"What is the capital of France?\",\n", + " )\n", + ")\n", + "\n", + "if isinstance(response, Iterator):\n", + " # Streaming response\n", + " for chunk in response:\n", + " print(chunk.choices[0].delta.content or \"\", end=\"\", flush=True)\n", + "else:\n", + " # Non-streaming response\n", + " print(response.choices[0].message.content)\n", + " # Or alternatively, access via the content property\n", + " # This is equivalent to the above line, getting the content of the first choice\n", + " print(response.content)\n", + "\n", + "print(\"Full Response:\")\n", + "print(response.model_dump_json(indent=2)) # type: ignore" + ] + }, + { + "cell_type": "markdown", + "id": "558392ce", + "metadata": {}, + "source": [ + "## Async Completion\n" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "8405fcb7", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "The capital of France is Paris.\n" + ] + } + ], + "source": [ + "response: LLMCompletionResponse = await llm_completion.completion_async(\n", + " messages=\"What is the capital of France?\",\n", + ") # type: ignore\n", + "print(response.content)" + ] + }, + { + "cell_type": "markdown", + "id": "e70fc49a", + "metadata": {}, + "source": [ + "## Streaming Completion\n" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "9f60c4e7", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "The capital of France is Paris." + ] + } + ], + "source": [ + "response = llm_completion.completion(\n", + " messages=\"What is the capital of France?\",\n", + " stream=True,\n", + ")\n", + "\n", + "if isinstance(response, Iterator):\n", + " # Streaming response\n", + " for chunk in response:\n", + " print(chunk.choices[0].delta.content or \"\", end=\"\", flush=True)" + ] + }, + { + "cell_type": "markdown", + "id": "fe8c2e35", + "metadata": {}, + "source": [ + "## Async Streaming Completion\n" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "0be849ce", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "The capital of France is Paris." + ] + } + ], + "source": [ + "response = await llm_completion.completion_async(\n", + " messages=\"What is the capital of France?\",\n", + " stream=True,\n", + ")\n", + "\n", + "if isinstance(response, AsyncIterator):\n", + " # Streaming response\n", + " async for chunk in response:\n", + " print(chunk.choices[0].delta.content or \"\", end=\"\", flush=True)" + ] + }, + { + "cell_type": "markdown", + "id": "c32070ad", + "metadata": {}, + "source": [ + "## Completion Arguments\n", + "\n", + "The completion API adheres to litellm completion API and thus the OpanAI SDK API. The `messages` parameter can be one of the following:\n", + "\n", + "- `str`: Raw string for the prompt.\n", + "- `list[dict[str, Any]]`: A list of dicts in the form `{\"role\": \"user|system|...\", \"content\": \"...\"}`\n", + "- `list[ChatCompletionMessageParam]`: A list of OpenAI `ChatCompletionMessageParam`. `graphrag_llm.utils` provides a `ChatCompletionMessageParamBuilder` to help construct these objects. See the message builder notebook for more details on using `ChatCompletionMessageParamBuilder`.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "8fe480cb", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "The capital of France is Paris.\n", + "The capital of France is Paris.\n", + "Arrr, ye got me there, matey! Truth be, back in 2006, them fancy scallywags at the International Astronomical Union be sayin' Pluto ain't a full-fledged planet no more. They be callin' it a \"dwarf planet\" now. So, officially, she be a dwarf planet, savvy?\n" + ] + } + ], + "source": [ + "from graphrag_llm.utils import (\n", + " CompletionMessagesBuilder,\n", + ")\n", + "\n", + "# raw string input\n", + "response1: LLMCompletionResponse = llm_completion.completion(\n", + " messages=\"What is the capital of France?\"\n", + ") # type: ignore\n", + "print(response1.content)\n", + "\n", + "# list of message dicts input\n", + "response2: LLMCompletionResponse = llm_completion.completion(\n", + " messages=[{\"role\": \"user\", \"content\": \"What is the capital of France?\"}]\n", + ") # type: ignore\n", + "print(response2.content)\n", + "\n", + "# using the builder to create complex message\n", + "messages = (\n", + " CompletionMessagesBuilder()\n", + " .add_system_message(\n", + " \"You are a helpful assistant that likes to talk like a pirate. Respond as if you are a pirate using pirate speak.\"\n", + " )\n", + " .add_user_message(\"Is pluto a planet? Respond with a yes or no.\")\n", + " .add_assistant_message(\"Aye, matey! Pluto be a planet in me book.\")\n", + " .add_user_message(\"Are you sure? I want the truth. Can you elaborate?\")\n", + " .build()\n", + ")\n", + "\n", + "response3: LLMCompletionResponse = llm_completion.completion(messages=messages) # type: ignore\n", + "print(response3.content)" + ] + }, + { + "cell_type": "markdown", + "id": "dda66594", + "metadata": {}, + "source": [ + "## Embedding\n" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "51fe336b", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[-0.002078542485833168, -0.04908587411046028, 0.020946789532899857]\n", + "[0.027567066252231598, -0.026544300839304924, -0.027091361582279205]\n" + ] + } + ], + "source": [ + "from graphrag_llm.embedding import LLMEmbedding, create_embedding\n", + "from graphrag_llm.types import LLMEmbeddingResponse\n", + "\n", + "embedding_config = ModelConfig(\n", + " model_provider=\"azure\",\n", + " model=os.getenv(\"GRAPHRAG_EMBEDDING_MODEL\", \"text-embedding-3-small\"),\n", + " azure_deployment_name=os.getenv(\n", + " \"GRAPHRAG_LLM_EMBEDDING_MODEL\", \"text-embedding-3-small\"\n", + " ),\n", + " api_base=os.getenv(\"GRAPHRAG_API_BASE\"),\n", + " api_version=os.getenv(\"GRAPHRAG_API_VERSION\", \"2025-04-01-preview\"),\n", + " api_key=api_key,\n", + " auth_method=AuthMethod.AzureManagedIdentity if not api_key else AuthMethod.ApiKey,\n", + ")\n", + "\n", + "llm_embedding: LLMEmbedding = create_embedding(embedding_config)\n", + "\n", + "embeddings_batch: LLMEmbeddingResponse = llm_embedding.embedding(\n", + " input=[\"Hello world\", \"How are you?\"]\n", + ")\n", + "for embedding in embeddings_batch.embeddings:\n", + " print(embedding[0:3])" + ] + }, + { + "cell_type": "markdown", + "id": "e3b7bedf", + "metadata": {}, + "source": [ + "### First Embedding\n", + "\n", + "`.embedding` batches by default, it takes a list of strings to embed. If embedding a single string then you can use `.first_embedding` on the response to obtain the first embedding.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "id": "e428c64a", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[0.05073608458042145, 0.003799507161602378, 0.019212841987609863]\n" + ] + } + ], + "source": [ + "embedding_response = llm_embedding.embedding(\n", + " input=[\"This is a single input string for embedding.\"]\n", + ")\n", + "\n", + "print(embedding_response.first_embedding[0:3])" + ] + }, + { + "cell_type": "markdown", + "id": "6b4cf0fa", + "metadata": {}, + "source": [ + "## Async Embedding\n" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "c9519657", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[-0.002078542485833168, -0.04908587411046028, 0.020946789532899857]\n", + "[0.027567066252231598, -0.026544300839304924, -0.027091361582279205]\n" + ] + } + ], + "source": [ + "embeddings_batch = await llm_embedding.embedding_async(\n", + " input=[\"Hello world\", \"How are you?\"]\n", + ")\n", + "\n", + "for embedding in embeddings_batch.embeddings:\n", + " print(embedding[0:3])" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.9" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/packages/graphrag-llm/notebooks/02_encoding_decoding.ipynb b/packages/graphrag-llm/notebooks/02_encoding_decoding.ipynb new file mode 100644 index 0000000000..3a43503fdd --- /dev/null +++ b/packages/graphrag-llm/notebooks/02_encoding_decoding.ipynb @@ -0,0 +1,169 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "578551ee", + "metadata": {}, + "source": [ + "# Encoding/Decoding\n", + "\n", + "`LLMCompletion` and `LLMEmbedding` expose a `Tokenizer` property corresponding to the underlying model.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "986a0bad", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Encoded tokens: [9906, 11, 1917, 0]\n", + "Number of tokens: 4\n", + "Number of tokens: 4\n", + "Decoded text: Hello, world!\n" + ] + } + ], + "source": [ + "# Copyright (c) 2024 Microsoft Corporation.\n", + "# Licensed under the MIT License\n", + "\n", + "import os\n", + "\n", + "from dotenv import load_dotenv\n", + "from graphrag_llm.completion import LLMCompletion, create_completion\n", + "from graphrag_llm.config import AuthMethod, ModelConfig\n", + "\n", + "load_dotenv()\n", + "\n", + "api_key = os.getenv(\"GRAPHRAG_API_KEY\")\n", + "model_config = ModelConfig(\n", + " model_provider=\"azure\",\n", + " model=os.getenv(\"GRAPHRAG_MODEL\", \"gpt-4o\"),\n", + " azure_deployment_name=os.getenv(\"GRAPHRAG_MODEL\", \"gpt-4o\"),\n", + " api_base=os.getenv(\"GRAPHRAG_API_BASE\"),\n", + " api_version=os.getenv(\"GRAPHRAG_API_VERSION\", \"2025-04-01-preview\"),\n", + " api_key=api_key,\n", + " auth_method=AuthMethod.AzureManagedIdentity if not api_key else AuthMethod.ApiKey,\n", + ")\n", + "llm_completion: LLMCompletion = create_completion(model_config)\n", + "\n", + "encoded = llm_completion.tokenizer.encode(\"Hello, world!\")\n", + "print(f\"Encoded tokens: {encoded}\")\n", + "print(f\"Number of tokens: {len(encoded)}\")\n", + "# OR\n", + "print(f\"Number of tokens: {llm_completion.tokenizer.num_tokens('Hello, world!')}\")\n", + "decoded = llm_completion.tokenizer.decode(encoded)\n", + "print(f\"Decoded text: {decoded}\")" + ] + }, + { + "cell_type": "markdown", + "id": "4e4a7515", + "metadata": {}, + "source": [ + "## Standalone Tokenizer\n" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "5920cf74", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Encoded tokens: [9906, 11, 1917, 0]\n", + "Number of tokens: 4\n", + "Decoded text: Hello, world!\n" + ] + } + ], + "source": [ + "from graphrag_llm.config import TokenizerConfig, TokenizerType\n", + "from graphrag_llm.tokenizer import create_tokenizer\n", + "\n", + "tokenizer = create_tokenizer(\n", + " TokenizerConfig(\n", + " type=TokenizerType.LiteLLM,\n", + " model_id=\"openai/text-embedding-3-small\",\n", + " )\n", + ")\n", + "\n", + "encoded = tokenizer.encode(\"Hello, world!\")\n", + "print(f\"Encoded tokens: {encoded}\")\n", + "print(f\"Number of tokens: {len(encoded)}\")\n", + "decoded = tokenizer.decode(encoded)\n", + "print(f\"Decoded text: {decoded}\")" + ] + }, + { + "cell_type": "markdown", + "id": "115f63b9", + "metadata": {}, + "source": [ + "## Tiktoken\n", + "\n", + "By default, `LLMCompletion` and `LLMEmbedding` use a litellm based tokenizer that supports the 100+ models that litellm supports but you may use a tiktoken based tokenizer by specifying a tokenizer type of `TokenizerType.Tiktoken` and providing an `encoding_name` to the config.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "abeb9753", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Encoded tokens: [13225, 11, 2375, 0]\n", + "Encoded tokens: [13225, 11, 2375, 0]\n" + ] + } + ], + "source": [ + "tokenizer = create_tokenizer(\n", + " TokenizerConfig(\n", + " type=TokenizerType.Tiktoken,\n", + " encoding_name=\"o200k_base\",\n", + " )\n", + ")\n", + "encoded = tokenizer.encode(\"Hello, world!\")\n", + "print(f\"Encoded tokens: {encoded}\")\n", + "\n", + "# Using with LLMCompletion\n", + "llm_completion: LLMCompletion = create_completion(model_config, tokenizer=tokenizer)\n", + "\n", + "encoded = llm_completion.tokenizer.encode(\"Hello, world!\")\n", + "print(f\"Encoded tokens: {encoded}\")" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.9" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/packages/graphrag-llm/notebooks/03_structured_responses.ipynb b/packages/graphrag-llm/notebooks/03_structured_responses.ipynb new file mode 100644 index 0000000000..f01499d2d2 --- /dev/null +++ b/packages/graphrag-llm/notebooks/03_structured_responses.ipynb @@ -0,0 +1,170 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "5b094500", + "metadata": {}, + "source": [ + "# Structured Response\n", + "\n", + "`LLMCompletion.completion` accepts a `response_format` parameter that is a pydantic model for parsing and returning structured responses.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "a79c242b", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "City: Seattle\n", + " Temperature: 11.1 °C\n", + " Condition: sunny\n", + "City: San Francisco\n", + " Temperature: 23.9 °C\n", + " Condition: cloudy\n" + ] + } + ], + "source": [ + "# Copyright (c) 2024 Microsoft Corporation.\n", + "# Licensed under the MIT License\n", + "\n", + "import os\n", + "\n", + "from dotenv import load_dotenv\n", + "from graphrag_llm.completion import LLMCompletion, create_completion\n", + "from graphrag_llm.config import AuthMethod, ModelConfig\n", + "from graphrag_llm.types import LLMCompletionResponse\n", + "from pydantic import BaseModel, Field\n", + "\n", + "load_dotenv()\n", + "\n", + "\n", + "class LocalWeather(BaseModel):\n", + " \"\"\"City weather information model.\"\"\"\n", + "\n", + " city: str = Field(description=\"The name of the city\")\n", + " temperature: float = Field(description=\"The temperature in Celsius\")\n", + " condition: str = Field(description=\"The weather condition description\")\n", + "\n", + "\n", + "class WeatherReports(BaseModel):\n", + " \"\"\"Weather information model.\"\"\"\n", + "\n", + " reports: list[LocalWeather] = Field(\n", + " description=\"The weather reports for multiple cities\"\n", + " )\n", + "\n", + "\n", + "api_key = os.getenv(\"GRAPHRAG_API_KEY\")\n", + "model_config = ModelConfig(\n", + " model_provider=\"azure\",\n", + " model=os.getenv(\"GRAPHRAG_MODEL\", \"gpt-4o\"),\n", + " azure_deployment_name=os.getenv(\"GRAPHRAG_MODEL\", \"gpt-4o\"),\n", + " api_base=os.getenv(\"GRAPHRAG_API_BASE\"),\n", + " api_version=os.getenv(\"GRAPHRAG_API_VERSION\", \"2025-04-01-preview\"),\n", + " api_key=api_key,\n", + " auth_method=AuthMethod.AzureManagedIdentity if not api_key else AuthMethod.ApiKey,\n", + ")\n", + "llm_completion: LLMCompletion = create_completion(model_config)\n", + "\n", + "response: LLMCompletionResponse[WeatherReports] = llm_completion.completion(\n", + " messages=\"It is sunny and 52 degrees fahrenheit in Seattle. It is cloudy and 75 degrees fahrenheit in San Francisco.\",\n", + " response_format=WeatherReports,\n", + ") # type: ignore\n", + "\n", + "local_weather_reports: WeatherReports = response.formatted_response # type: ignore\n", + "for report in local_weather_reports.reports:\n", + " print(f\"City: {report.city}\")\n", + " print(f\" Temperature: {report.temperature} °C\")\n", + " print(f\" Condition: {report.condition}\")" + ] + }, + { + "cell_type": "markdown", + "id": "6dcfa20c", + "metadata": {}, + "source": [ + "## Checking for support\n" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "aa1edadb", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Supports structured responses: True\n" + ] + } + ], + "source": [ + "print(f\"Supports structured responses: {llm_completion.supports_structured_response()}\")" + ] + }, + { + "cell_type": "markdown", + "id": "6360f512", + "metadata": {}, + "source": [ + "## Streaming\n", + "\n", + "Streaming is not supported when using `response_format`.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "e08b9ba6", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Error during streaming completion: response_format is not supported for streaming completions.\n" + ] + } + ], + "source": [ + "try:\n", + " response = llm_completion.completion(\n", + " messages=\"It is sunny and 52 degrees fahrenheit in Seattle. It is cloudy and 75 degrees fahrenheit in San Francisco.\",\n", + " response_format=WeatherReports,\n", + " stream=True,\n", + " )\n", + "except Exception as e: # noqa: BLE001\n", + " print(f\"Error during streaming completion: {e}\")" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.9" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/packages/graphrag-llm/notebooks/04_metrics.ipynb b/packages/graphrag-llm/notebooks/04_metrics.ipynb new file mode 100644 index 0000000000..e8649f0a93 --- /dev/null +++ b/packages/graphrag-llm/notebooks/04_metrics.ipynb @@ -0,0 +1,595 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "4dc68732", + "metadata": {}, + "source": [ + "# Metrics\n", + "\n", + "Metrics are automatically tracked for completion and embedding calls.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "868deb65", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Metrics for: azure/gpt-4o\n", + "{\n", + " \"attempted_request_count\": 1,\n", + " \"successful_response_count\": 1,\n", + " \"failed_response_count\": 0,\n", + " \"failure_rate\": 0.0,\n", + " \"requests_with_retries\": 0,\n", + " \"retries\": 0,\n", + " \"retry_rate\": 0.0,\n", + " \"compute_duration_seconds\": 3.4281113147735596,\n", + " \"compute_duration_per_response_seconds\": 3.4281113147735596,\n", + " \"cache_hit_rate\": 0.0,\n", + " \"streaming_responses\": 0,\n", + " \"responses_with_tokens\": 1,\n", + " \"prompt_tokens\": 14,\n", + " \"completion_tokens\": 8,\n", + " \"total_tokens\": 22,\n", + " \"tokens_per_response\": 22.0,\n", + " \"responses_with_cost\": 1,\n", + " \"input_cost\": 3.5000000000000004e-05,\n", + " \"output_cost\": 8e-05,\n", + " \"total_cost\": 0.000115,\n", + " \"cost_per_response\": 0.000115\n", + "}\n" + ] + } + ], + "source": [ + "# Copyright (c) 2024 Microsoft Corporation.\n", + "# Licensed under the MIT License\n", + "\n", + "import json\n", + "import os\n", + "\n", + "from dotenv import load_dotenv\n", + "from graphrag_llm.completion import LLMCompletion, create_completion\n", + "from graphrag_llm.config import AuthMethod, ModelConfig\n", + "\n", + "load_dotenv()\n", + "\n", + "api_key = os.getenv(\"GRAPHRAG_API_KEY\")\n", + "model_config = ModelConfig(\n", + " model_provider=\"azure\",\n", + " model=os.getenv(\"GRAPHRAG_MODEL\", \"gpt-4o\"),\n", + " azure_deployment_name=os.getenv(\"GRAPHRAG_MODEL\", \"gpt-4o\"),\n", + " api_base=os.getenv(\"GRAPHRAG_API_BASE\"),\n", + " api_version=os.getenv(\"GRAPHRAG_API_VERSION\", \"2025-04-01-preview\"),\n", + " api_key=api_key,\n", + " auth_method=AuthMethod.AzureManagedIdentity if not api_key else AuthMethod.ApiKey,\n", + ")\n", + "llm_completion: LLMCompletion = create_completion(model_config)\n", + "\n", + "response = llm_completion.completion(\n", + " messages=\"What is the capital of France?\",\n", + ")\n", + "\n", + "print(f\"Metrics for: {llm_completion.metrics_store.id}\")\n", + "print(json.dumps(llm_completion.metrics_store.get_metrics(), indent=2))" + ] + }, + { + "cell_type": "markdown", + "id": "dd9e7e19", + "metadata": {}, + "source": [ + "## Disable Metrics\n", + "\n", + "Set `metrics` to `None` in the `ModelConfig` to disable metrics.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "44ab5fcd", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Metrics for: \n", + "{}\n" + ] + } + ], + "source": [ + "model_config.metrics = None\n", + "llm_completion_no_metrics: LLMCompletion = create_completion(model_config)\n", + "\n", + "response = llm_completion_no_metrics.completion(\n", + " messages=\"What is the capital of France?\",\n", + ")\n", + "\n", + "# Now .metrics_store should be a NoOpMetricsStore\n", + "print(f\"Metrics for: {llm_completion_no_metrics.metrics_store.id}\")\n", + "print(json.dumps(llm_completion_no_metrics.metrics_store.get_metrics(), indent=2))" + ] + }, + { + "cell_type": "markdown", + "id": "f38a5a44", + "metadata": {}, + "source": [ + "## Automatic Metrics Logging\n", + "\n", + "Metrics foreach instantiated model are automatically logged on process exit. To see this, update the log level to info.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "16b71da8", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[92m22:45:27 - LiteLLM:INFO\u001b[0m: utils.py:3373 - \n", + "LiteLLM completion() model= gpt-4o; provider = azure\n", + "INFO:LiteLLM:\n", + "LiteLLM completion() model= gpt-4o; provider = azure\n", + "\u001b[92m22:45:27 - LiteLLM:INFO\u001b[0m: utils.py:1286 - Wrapper: Completed Call, calling success_handler\n", + "INFO:LiteLLM:Wrapper: Completed Call, calling success_handler\n", + "INFO:graphrag_llm.metrics.log_metrics_writer:Metrics for azure/gpt-4o: {\n", + " \"attempted_request_count\": 1,\n", + " \"successful_response_count\": 1,\n", + " \"failed_response_count\": 0,\n", + " \"failure_rate\": 0.0,\n", + " \"requests_with_retries\": 0,\n", + " \"retries\": 0,\n", + " \"retry_rate\": 0.0,\n", + " \"compute_duration_seconds\": 0.3004579544067383,\n", + " \"compute_duration_per_response_seconds\": 0.3004579544067383,\n", + " \"cache_hit_rate\": 0.0,\n", + " \"streaming_responses\": 0,\n", + " \"responses_with_tokens\": 1,\n", + " \"prompt_tokens\": 14,\n", + " \"completion_tokens\": 8,\n", + " \"total_tokens\": 22,\n", + " \"tokens_per_response\": 22.0,\n", + " \"responses_with_cost\": 1,\n", + " \"input_cost\": 3.5000000000000004e-05,\n", + " \"output_cost\": 8e-05,\n", + " \"total_cost\": 0.000115,\n", + " \"cost_per_response\": 0.000115\n", + "}\n" + ] + } + ], + "source": [ + "import logging\n", + "\n", + "logging.basicConfig(level=logging.INFO)\n", + "\n", + "llm_completion.metrics_store.clear_metrics()\n", + "response = llm_completion.completion(\n", + " messages=\"What is the capital of France?\",\n", + ")\n", + "\n", + "# NOTE: Call _on_exit_ to simulate application exit since\n", + "# the notebook process does not exit and the llm_completion\n", + "# object is not garbage collected.\n", + "# This should not be called in normal python scripts.\n", + "llm_completion.metrics_store._on_exit_() # type: ignore" + ] + }, + { + "cell_type": "markdown", + "id": "7d97bd8c", + "metadata": {}, + "source": [ + "## Save Metrics to a File\n", + "\n", + "Instead of logging on exit, metrics can automatically be saved to a file on exit by using a `MetricsWriter.File` metrics writer.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "4c16806a", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:azure.identity._credentials.environment:No environment configuration found.\n", + "INFO:azure.identity._credentials.managed_identity:ManagedIdentityCredential will use IMDS\n", + "\u001b[92m22:45:27 - LiteLLM:INFO\u001b[0m: utils.py:3373 - \n", + "LiteLLM completion() model= gpt-4o; provider = azure\n", + "INFO:LiteLLM:\n", + "LiteLLM completion() model= gpt-4o; provider = azure\n", + "\u001b[92m22:45:28 - LiteLLM:INFO\u001b[0m: utils.py:1286 - Wrapper: Completed Call, calling success_handler\n", + "INFO:LiteLLM:Wrapper: Completed Call, calling success_handler\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Contents of metrics\\20260111_211007.jsonl:\n", + "{\"id\": \"azure/gpt-4o\", \"metrics\": {\"attempted_request_count\": 1, \"successful_response_count\": 1, \"failed_response_count\": 0, \"failure_rate\": 0.0, \"requests_with_retries\": 0, \"retries\": 0, \"retry_rate\": 0.0, \"compute_duration_seconds\": 0.6868698596954346, \"compute_duration_per_response_seconds\": 0.6868698596954346, \"streaming_responses\": 0, \"responses_with_tokens\": 1, \"prompt_tokens\": 14, \"completion_tokens\": 8, \"total_tokens\": 22, \"tokens_per_response\": 22.0, \"responses_with_cost\": 1, \"input_cost\": 3.5000000000000004e-05, \"output_cost\": 8e-05, \"total_cost\": 0.000115, \"cost_per_response\": 0.000115}}\n", + "\n" + ] + } + ], + "source": [ + "from pathlib import Path\n", + "\n", + "from graphrag_llm.config import MetricsConfig, MetricsWriterType\n", + "\n", + "model_config.metrics = MetricsConfig(\n", + " writer=MetricsWriterType.File,\n", + " base_dir=\"./metrics\", # Default\n", + ")\n", + "llm_completion: LLMCompletion = create_completion(model_config)\n", + "\n", + "response = llm_completion.completion(\n", + " messages=\"What is the capital of France?\",\n", + ")\n", + "\n", + "# NOTE: Call _on_exit_ to simulate application exit since\n", + "# the notebook process does not exit and the llm_completion\n", + "# object is not garbage collected.\n", + "# This should not be called in normal python scripts.\n", + "llm_completion.metrics_store._on_exit_() # type: ignore\n", + "\n", + "metrics_dir = Path(\"./metrics\")\n", + "for metric_file in metrics_dir.glob(\"*.jsonl\"):\n", + " print(f\"Contents of {metric_file}:\")\n", + " print(metric_file.read_text())\n", + " break # Just print one file for brevity" + ] + }, + { + "cell_type": "markdown", + "id": "9076af04", + "metadata": {}, + "source": [ + "## Default Metrics\n", + "\n", + "- `attempted_request_count`: Number of network requests made, not including retries.\n", + "- `successful_response_count`: Number of successful responses.\n", + "- `failed_response_count`: Number of network requests that threw errors and could not be resolved even after retries. `successful_response_count + failed_response_count` should equal `attempted_request_count` unless the job or process was killed early.\n", + "- `failure_rate`: `failed_response_count / attempted_request_count`.\n", + "- `requests_with_retries`: Number of original requests that had to go through a retry loop.\n", + "- `retries`: Number of network requests that were retries.\n", + "- `retry_rate`: `retries / (retries + attempted_request_count)`\n", + "- `compute_duration_seconds`: Total number of seconds to complete all non-streaming network requests.\n", + "- `compute_duration_per_response_seconds`: `compute_duration_seconds / successful non-streaming responses`\n", + "- `runtime_duration_seconds`: Only present if using the batching utilities. The batching utilities run multiple completions/embedding in parallel so `runtime_duration_seconds` is the actual runtime duration. Comparing this with `compute_duration_seconds` indicates how much time was saved using the batching utilities vs if all network requests ran in series.\n", + "- `cached_responses`: Number of cached responses. Only present if using a cache. When a response is cached so are the corresponding metrics. When a response is retrieved from the cache the corresponding metrics are also retrieved from the cache and provided in the overall metrics so metrics like `compute_duration_seconds`, `input_cost`, `output_cost`, etc include cached rsponses metrics. This is helpful when having to resume stopped jobs or rerunning failed jobs. At the end of the job the metrics indicate how long and costly the job would have been when running off a fresh cache/no cache. The `cached_responses` only indicates how many network requests were skipped and retrieved from cache.\n", + "- `streaming_responses`: Number of requests using the `stream=True` parameter. Many metrics such as token counts and costs are not tracked for streaming requests as that would require analyzing the stream to completion within the middleware stack and preventing the ability to build true streaming interfaces with `graphrag-llm`\n", + "- `responses_with_tokens`: Number of responses in which token counts were obtained. Typically this should equal `successful_response_count - streaming_responses`.\n", + "- `prompt_tokens`: Total number of prompt tokens used accross all successful non-streaming network requests.\n", + "- `completion_tokens`: Total number of completion tokens accress all succesful non-streaming network requests.\n", + "- `total_tokens`: `prompt_tokens + completion_tokens`\n", + "- `tokens_per_response`: `total_tokens / responses_with_tokens`\n", + "- `responses_with_cost`: Number of responses in which costs were calculated. typically this should equal `successful_response_count - streaming_responses`.\n", + "- `input_cost`: Cost of the input tokens accross all successful non-streaming network requests.\n", + "- `output_cost`: Cost of the output tokens accross all successful non-streaming network requests.\n", + "- `total_cost`: `input_cost + output_cost`\n", + "- `cost_per_response`: `total_cost / responses_with_cost`.\n" + ] + }, + { + "cell_type": "markdown", + "id": "2749473e", + "metadata": {}, + "source": [ + "## Custom Model Costs\n", + "\n", + "The default metrics include costs for prompt tokens and completion tokens. These are calculated using a registry of known models and associated costs managed by litellm: https://github.com/BerriAI/litellm/blob/main/model_prices_and_context_window.json\n", + "\n", + "One can register custom model costs if using a custom model that is not in the registry or one that differs from the known/default cost.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7a47f496", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\u001b[92m22:45:28 - LiteLLM:INFO\u001b[0m: utils.py:3373 - \n", + "LiteLLM completion() model= gpt-4o; provider = azure\n", + "INFO:LiteLLM:\n", + "LiteLLM completion() model= gpt-4o; provider = azure\n", + "\u001b[92m22:45:28 - LiteLLM:INFO\u001b[0m: utils.py:1286 - Wrapper: Completed Call, calling success_handler\n", + "INFO:LiteLLM:Wrapper: Completed Call, calling success_handler\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{\n", + " \"attempted_request_count\": 1,\n", + " \"successful_response_count\": 1,\n", + " \"failed_response_count\": 0,\n", + " \"failure_rate\": 0.0,\n", + " \"requests_with_retries\": 0,\n", + " \"retries\": 0,\n", + " \"retry_rate\": 0.0,\n", + " \"compute_duration_seconds\": 0.3090023994445801,\n", + " \"compute_duration_per_response_seconds\": 0.3090023994445801,\n", + " \"cache_hit_rate\": 0.0,\n", + " \"streaming_responses\": 0,\n", + " \"responses_with_tokens\": 1,\n", + " \"prompt_tokens\": 14,\n", + " \"completion_tokens\": 8,\n", + " \"total_tokens\": 22,\n", + " \"tokens_per_response\": 22.0,\n", + " \"responses_with_cost\": 1,\n", + " \"input_cost\": 14000,\n", + " \"output_cost\": 40000,\n", + " \"total_cost\": 54000,\n", + " \"cost_per_response\": 54000.0\n", + "}\n" + ] + } + ], + "source": [ + "from graphrag_llm.model_cost_registry import model_cost_registry\n", + "\n", + "model_cost_registry.register_model_costs(\n", + " model=\"azure/gpt-4o\", # This should use format \"{model_provider}/{model_name}\" and not the azure deployment name\n", + " costs={\n", + " # Expensive model\n", + " \"input_cost_per_token\": 1000,\n", + " \"output_cost_per_token\": 5000,\n", + " },\n", + ")\n", + "\n", + "llm_completion.metrics_store.clear_metrics()\n", + "response = llm_completion.completion(\n", + " messages=\"What is the capital of France?\",\n", + ")\n", + "\n", + "print(json.dumps(llm_completion.metrics_store.get_metrics(), indent=2))" + ] + }, + { + "cell_type": "markdown", + "id": "4132a718", + "metadata": {}, + "source": [ + "## Custom Metrics Processor\n", + "\n", + "It is possible to register a custom metrics processor if one needs to track metrics not already tracked.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "f68ed4bb", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "INFO:azure.identity._credentials.environment:No environment configuration found.\n", + "INFO:azure.identity._credentials.managed_identity:ManagedIdentityCredential will use IMDS\n", + "\u001b[92m22:45:28 - LiteLLM:INFO\u001b[0m: utils.py:3373 - \n", + "LiteLLM completion() model= gpt-4o; provider = azure\n", + "INFO:LiteLLM:\n", + "LiteLLM completion() model= gpt-4o; provider = azure\n", + "\u001b[92m22:45:28 - LiteLLM:INFO\u001b[0m: utils.py:1286 - Wrapper: Completed Call, calling success_handler\n", + "INFO:LiteLLM:Wrapper: Completed Call, calling success_handler\n", + "\u001b[92m22:45:28 - LiteLLM:INFO\u001b[0m: utils.py:3373 - \n", + "LiteLLM completion() model= gpt-4o; provider = azure\n", + "INFO:LiteLLM:\n", + "LiteLLM completion() model= gpt-4o; provider = azure\n", + "\u001b[92m22:45:29 - LiteLLM:INFO\u001b[0m: utils.py:1286 - Wrapper: Completed Call, calling success_handler\n", + "INFO:LiteLLM:Wrapper: Completed Call, calling success_handler\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Metrics for: azure/gpt-4o\n", + "{\n", + " \"attempted_request_count\": 2,\n", + " \"successful_response_count\": 2,\n", + " \"failed_response_count\": 0,\n", + " \"failure_rate\": 0.0,\n", + " \"requests_with_retries\": 0,\n", + " \"retries\": 0,\n", + " \"retry_rate\": 0.0,\n", + " \"compute_duration_seconds\": 0.6117346286773682,\n", + " \"compute_duration_per_response_seconds\": 0.3058673143386841,\n", + " \"cache_hit_rate\": 0.0,\n", + " \"streaming_responses\": 0,\n", + " \"responses_with_tokens\": 2,\n", + " \"prompt_tokens\": 28,\n", + " \"completion_tokens\": 16,\n", + " \"total_tokens\": 44,\n", + " \"tokens_per_response\": 22.0,\n", + " \"responses_with_cost\": 2,\n", + " \"input_cost\": 28000,\n", + " \"output_cost\": 80000,\n", + " \"total_cost\": 108000,\n", + " \"cost_per_response\": 54000.0,\n", + " \"responses_with_temperature\": 1,\n", + " \"temperature_rate\": 0.5\n", + "}\n" + ] + } + ], + "source": [ + "import json\n", + "import os\n", + "from collections.abc import AsyncIterator, Iterator\n", + "from typing import Any\n", + "\n", + "from dotenv import load_dotenv\n", + "from graphrag_llm.completion import LLMCompletion, create_completion\n", + "from graphrag_llm.config import MetricsConfig, MetricsWriterType, ModelConfig\n", + "from graphrag_llm.metrics import metrics_aggregator, register_metrics_processor\n", + "from graphrag_llm.metrics.default_metrics_processor import DefaultMetricsProcessor\n", + "from graphrag_llm.types import (\n", + " LLMCompletionChunk,\n", + " LLMCompletionResponse,\n", + " LLMEmbeddingResponse,\n", + " Metrics,\n", + ")\n", + "\n", + "load_dotenv()\n", + "\n", + "\n", + "class MyCustomMetricsProcessor(DefaultMetricsProcessor):\n", + " \"\"\"Custom metrics processor.\n", + "\n", + " Inheriting from DefaultMetricsProcessor to add to the default metrics being\n", + " tracked instead of implementing the interface from scratch.\n", + "\n", + " Metrics = dict[str, float]. The metrics passed to process_metrics method\n", + " represent the metrics for a single request. Typically, you will count/flag\n", + " metrics of interest per request and then aggregate them in the metrics_aggregator.\n", + " \"\"\"\n", + "\n", + " def __init__(self, some_custom_option: str, **kwargs: Any) -> None:\n", + " \"\"\"Initialize the custom metrics processor.\"\"\"\n", + " super().__init__(**kwargs)\n", + " self._some_custom_option = some_custom_option # Not actually used\n", + "\n", + " def process_metrics(\n", + " self,\n", + " *,\n", + " model_config: ModelConfig,\n", + " metrics: Metrics,\n", + " input_args: dict[str, Any],\n", + " response: LLMCompletionResponse\n", + " | Iterator[LLMCompletionChunk]\n", + " | AsyncIterator[LLMCompletionChunk]\n", + " | LLMEmbeddingResponse,\n", + " ) -> None:\n", + " \"\"\"On top of the default metrics, track if temperature argument was used.\n", + "\n", + " Expected to mutate the metrics dict in place with metrics you want to track.\n", + "\n", + " process_metrics is only called for successful requests and will be passed in the response\n", + " from either a completion or embedding call.\n", + "\n", + " Args\n", + " ----\n", + " model_config: ModelConfig\n", + " The model config used for the request.\n", + " metrics: Metrics\n", + " The metrics dict to be mutated in place.\n", + " input_args: dict[str, Any]\n", + " The input arguments passed to completion or embedding.\n", + " response: LLMChatCompletion | Iterator[LLMChatCompletionChunk] | LLMEmbeddingResponse\n", + " Either a completion or embedding response from the LLM.\n", + " \"\"\"\n", + " # Track default metrics first\n", + " super().process_metrics(\n", + " model_config=model_config,\n", + " metrics=metrics,\n", + " input_args=input_args,\n", + " response=response,\n", + " )\n", + "\n", + " metrics[\"responses_with_temperature\"] = 1 if \"temperature\" in input_args else 0\n", + "\n", + "\n", + "# Register custom metrics processor\n", + "register_metrics_processor(\n", + " processor_type=\"custom_with_temperature\",\n", + " processor_initializer=MyCustomMetricsProcessor,\n", + ")\n", + "\n", + "\n", + "# Custom aggregator to calculate temperature usage rate\n", + "def _temperature_rate(metrics: \"Metrics\") -> None:\n", + " \"\"\"Calculate temperature usage rate.\n", + "\n", + " Custom aggregate function to track the usage rate of temperature parameter.\n", + "\n", + " Here, metrics represents the aggregated metrics for the current model.\n", + " \"\"\"\n", + " responses = metrics.get(\"successful_response_count\", 0)\n", + " temperature_responses = metrics.get(\"responses_with_temperature\", 0)\n", + " if responses > 0:\n", + " metrics[\"temperature_rate\"] = temperature_responses / responses\n", + " else:\n", + " metrics[\"temperature_rate\"] = 0.0\n", + "\n", + "\n", + "# Register custom aggregator\n", + "metrics_aggregator.register(\"temperature_rate\", _temperature_rate)\n", + "\n", + "api_key = os.getenv(\"GRAPHRAG_API_KEY\")\n", + "model_config = ModelConfig(\n", + " model_provider=\"azure\",\n", + " model=os.getenv(\"GRAPHRAG_MODEL\", \"gpt-4o\"),\n", + " azure_deployment_name=os.getenv(\"GRAPHRAG_MODEL\", \"gpt-4o\"),\n", + " api_base=os.getenv(\"GRAPHRAG_API_BASE\"),\n", + " api_version=os.getenv(\"GRAPHRAG_API_VERSION\", \"2025-04-01-preview\"),\n", + " api_key=api_key,\n", + " auth_method=AuthMethod.AzureManagedIdentity if not api_key else AuthMethod.ApiKey,\n", + " metrics=MetricsConfig(\n", + " # Use the custom metrics processor registered above\n", + " type=\"custom_with_temperature\",\n", + " some_custom_option=\"example_option_value\", # type: ignore\n", + " writer=MetricsWriterType.File,\n", + " base_dir=\"./metrics\", # Default\n", + " ),\n", + ")\n", + "llm_completion: LLMCompletion = create_completion(model_config)\n", + "\n", + "response = llm_completion.completion(\n", + " messages=\"What is the capital of France?\",\n", + ")\n", + "\n", + "response_with_temperature = llm_completion.completion(\n", + " messages=\"What is the capital of France?\",\n", + " temperature=0.7,\n", + ")\n", + "\n", + "print(f\"Metrics for: {llm_completion.metrics_store.id}\")\n", + "print(json.dumps(llm_completion.metrics_store.get_metrics(), indent=2))" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.9" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/packages/graphrag-llm/notebooks/05_caching.ipynb b/packages/graphrag-llm/notebooks/05_caching.ipynb new file mode 100644 index 0000000000..75067a54e6 --- /dev/null +++ b/packages/graphrag-llm/notebooks/05_caching.ipynb @@ -0,0 +1,264 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "347b0fc9", + "metadata": {}, + "source": [ + "# Caching\n", + "\n", + "To enabling caching, pass in a `Cache` instance to the `create_completion` or `create_embedding` functions.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "96b0c42f", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Metrics for: azure/gpt-4o\n", + "{\n", + " \"attempted_request_count\": 3,\n", + " \"successful_response_count\": 3,\n", + " \"failed_response_count\": 0,\n", + " \"failure_rate\": 0.0,\n", + " \"requests_with_retries\": 0,\n", + " \"retries\": 0,\n", + " \"retry_rate\": 0.0,\n", + " \"compute_duration_seconds\": 2.8864328861236572,\n", + " \"compute_duration_per_response_seconds\": 0.9621442953745524,\n", + " \"cached_responses\": 1,\n", + " \"streaming_responses\": 0,\n", + " \"responses_with_tokens\": 3,\n", + " \"prompt_tokens\": 191,\n", + " \"completion_tokens\": 59,\n", + " \"total_tokens\": 250,\n", + " \"tokens_per_response\": 83.33333333333333,\n", + " \"responses_with_cost\": 3,\n", + " \"input_cost\": 0.0004775,\n", + " \"output_cost\": 0.00059,\n", + " \"total_cost\": 0.0010675,\n", + " \"cost_per_response\": 0.0003558333333333334\n", + "}\n" + ] + } + ], + "source": [ + "# Copyright (c) 2024 Microsoft Corporation.\n", + "# Licensed under the MIT License\n", + "\n", + "import json\n", + "import os\n", + "\n", + "from dotenv import load_dotenv\n", + "from graphrag_cache import CacheConfig, CacheType, create_cache\n", + "from graphrag_llm.completion import LLMCompletion, create_completion\n", + "from graphrag_llm.config import AuthMethod, ModelConfig\n", + "from graphrag_storage import StorageConfig, StorageType\n", + "\n", + "load_dotenv()\n", + "\n", + "cache = create_cache()\n", + "# The above default is equivalent to:\n", + "cache = create_cache(\n", + " CacheConfig(\n", + " type=CacheType.Json,\n", + " storage=StorageConfig(type=StorageType.File, base_dir=\"cache\"),\n", + " )\n", + ")\n", + "\n", + "api_key = os.getenv(\"GRAPHRAG_API_KEY\")\n", + "model_config = ModelConfig(\n", + " model_provider=\"azure\",\n", + " model=os.getenv(\"GRAPHRAG_MODEL\", \"gpt-4o\"),\n", + " azure_deployment_name=os.getenv(\"GRAPHRAG_MODEL\", \"gpt-4o\"),\n", + " api_base=os.getenv(\"GRAPHRAG_API_BASE\"),\n", + " api_version=os.getenv(\"GRAPHRAG_API_VERSION\", \"2025-04-01-preview\"),\n", + " api_key=api_key,\n", + " auth_method=AuthMethod.AzureManagedIdentity if not api_key else AuthMethod.ApiKey,\n", + ")\n", + "llm_completion: LLMCompletion = create_completion(model_config, cache=cache)\n", + "\n", + "response = llm_completion.completion(\n", + " messages=\"What is the capital of France?\",\n", + ")\n", + "response = llm_completion.completion(\n", + " messages=\"What is the capital of France?\",\n", + ")\n", + "\n", + "print(f\"Metrics for: {llm_completion.metrics_store.id}\")\n", + "metrics = llm_completion.metrics_store.get_metrics()\n", + "print(json.dumps(metrics, indent=2))\n", + "assert metrics[\"cached_responses\"] == 1" + ] + }, + { + "cell_type": "markdown", + "id": "c70a72fc", + "metadata": {}, + "source": [ + "## Note on the above metrics\n", + "\n", + "`cached_responses == 1` since the request was cached by the time the second call was made.\n", + "\n", + "The `cached_responses` indicates how many cache hits occurred but the rest of the metrics exist as if a cache was not used. For example, `compute_duration_seconds` and all the token counts and cost counts are as if cache was not used. This is because both the response and metrics are cached and retrieved from the cache when a cache hit occurs. Metrics were designed to give an idea of how long and costly a job would be if there were no cache.\n" + ] + }, + { + "cell_type": "markdown", + "id": "27b026d7", + "metadata": {}, + "source": [ + "## Tests\n", + "\n", + "This is in here because notebooks are being used as integration tests. This ensures objects are being loaded and deserialized from cache properly and the cache is bypassing the rate limiting.\n" + ] + }, + { + "cell_type": "markdown", + "id": "22cc179e", + "metadata": {}, + "source": [ + "### Test Timing\n" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "efb228ce", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Total time for 100 requests: 0.3867683410644531 seconds\n" + ] + } + ], + "source": [ + "import time\n", + "\n", + "from graphrag_llm.config import RateLimitConfig, RateLimitType\n", + "\n", + "model_config.rate_limit = RateLimitConfig(\n", + " type=RateLimitType.SlidingWindow,\n", + " period_in_seconds=60, # limit requests per minute\n", + " requests_per_period=1, # max 1 request per minute. Without cache this would take forever\n", + ")\n", + "llm_completion: LLMCompletion = create_completion(model_config, cache=cache)\n", + "\n", + "start_time = time.time()\n", + "for _ in range(100):\n", + " response = llm_completion.completion(\n", + " messages=\"What is the capital of France?\",\n", + " )\n", + "end_time = time.time()\n", + "total_time = end_time - start_time\n", + "print(f\"Total time for 100 requests: {total_time} seconds\")\n", + "assert total_time < 5.0 # Ensure that caching is effective" + ] + }, + { + "cell_type": "markdown", + "id": "dcf4bf16", + "metadata": {}, + "source": [ + "### Test Structured Responses\n" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "21e0e1e4", + "metadata": {}, + "outputs": [], + "source": [ + "from graphrag_llm.types import LLMCompletionResponse\n", + "from pydantic import BaseModel, Field\n", + "\n", + "\n", + "class LocalWeather(BaseModel):\n", + " \"\"\"City weather information model.\"\"\"\n", + "\n", + " city: str = Field(description=\"The name of the city\")\n", + " temperature: float = Field(description=\"The temperature in Celsius\")\n", + " condition: str = Field(description=\"The weather condition description\")\n", + "\n", + "\n", + "class WeatherReports(BaseModel):\n", + " \"\"\"Weather information model.\"\"\"\n", + "\n", + " reports: list[LocalWeather] = Field(\n", + " description=\"The weather reports for multiple cities\"\n", + " )\n", + "\n", + "\n", + "llm_completion.metrics_store.clear_metrics()\n", + "response: LLMCompletionResponse[WeatherReports] = llm_completion.completion( # type: ignore\n", + " messages=\"It is sunny and 52 degrees fahrenheit in Seattle. It is cloudy and 75 degrees fahrenheit in San Francisco.\",\n", + " response_format=WeatherReports,\n", + ") # type: ignore\n", + "response: LLMCompletionResponse[WeatherReports] = llm_completion.completion( # type: ignore\n", + " messages=\"It is sunny and 52 degrees fahrenheit in Seattle. It is cloudy and 75 degrees fahrenheit in San Francisco.\",\n", + " response_format=WeatherReports,\n", + ") # type: ignore\n", + "\n", + "metrics = llm_completion.metrics_store.get_metrics()\n", + "assert metrics[\"cached_responses\"] == 1, (\n", + " f\"Expected 1 cached response, got {metrics['cached_responses']}\"\n", + ")\n", + "\n", + "\n", + "# Changing the response format should not hit the cache and\n", + "# instead be a new request and store a new response in the cache.\n", + "\n", + "\n", + "class WeatherReports2(BaseModel):\n", + " \"\"\"Weather information model.\"\"\"\n", + "\n", + " local_reports: list[LocalWeather] = Field(\n", + " description=\"The weather reports for multiple cities\"\n", + " )\n", + "\n", + "\n", + "llm_completion.metrics_store.clear_metrics()\n", + "# Same request but different response format. Should not hit cache.\n", + "response: LLMCompletionResponse[WeatherReports2] = llm_completion.completion(\n", + " messages=\"It is sunny and 52 degrees fahrenheit in Seattle. It is cloudy and 75 degrees fahrenheit in San Francisco.\",\n", + " response_format=WeatherReports2,\n", + ") # type: ignore\n", + "\n", + "metrics = llm_completion.metrics_store.get_metrics()\n", + "assert metrics.get(\"cached_responses\", 0) == 0, (\n", + " f\"Expected 0 cached responses, got {metrics['cached_responses']}\"\n", + ")" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.9" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/packages/graphrag-llm/notebooks/06_retries.ipynb b/packages/graphrag-llm/notebooks/06_retries.ipynb new file mode 100644 index 0000000000..a49dd1d2d5 --- /dev/null +++ b/packages/graphrag-llm/notebooks/06_retries.ipynb @@ -0,0 +1,115 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "d75247ed", + "metadata": {}, + "source": [ + "# Retries\n", + "\n", + "Retries are disabled by default. Retries can be enabled with the following example.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "299065b7", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Metrics for: azure/gpt-4o\n", + "{\n", + " \"attempted_request_count\": 1,\n", + " \"successful_response_count\": 1,\n", + " \"failed_response_count\": 0,\n", + " \"failure_rate\": 0.0,\n", + " \"requests_with_retries\": 1,\n", + " \"retries\": 2,\n", + " \"retry_rate\": 0.6666666666666666,\n", + " \"compute_duration_seconds\": 2.6571085453033447,\n", + " \"compute_duration_per_response_seconds\": 2.6571085453033447,\n", + " \"cache_hit_rate\": 0.0,\n", + " \"streaming_responses\": 0,\n", + " \"responses_with_tokens\": 1,\n", + " \"prompt_tokens\": 14,\n", + " \"completion_tokens\": 8,\n", + " \"total_tokens\": 22,\n", + " \"tokens_per_response\": 22.0,\n", + " \"responses_with_cost\": 1,\n", + " \"input_cost\": 3.5000000000000004e-05,\n", + " \"output_cost\": 8e-05,\n", + " \"total_cost\": 0.000115,\n", + " \"cost_per_response\": 0.000115\n", + "}\n" + ] + } + ], + "source": [ + "# Copyright (c) 2024 Microsoft Corporation.\n", + "# Licensed under the MIT License\n", + "\n", + "import json\n", + "import logging\n", + "import os\n", + "\n", + "from dotenv import load_dotenv\n", + "from graphrag_llm.completion import LLMCompletion, create_completion\n", + "from graphrag_llm.config import AuthMethod, ModelConfig, RetryConfig, RetryType\n", + "\n", + "load_dotenv()\n", + "\n", + "logging.basicConfig(level=logging.CRITICAL)\n", + "\n", + "\n", + "api_key = os.getenv(\"GRAPHRAG_API_KEY\")\n", + "model_config = ModelConfig(\n", + " model_provider=\"azure\",\n", + " model=os.getenv(\"GRAPHRAG_MODEL\", \"gpt-4o\"),\n", + " azure_deployment_name=os.getenv(\"GRAPHRAG_MODEL\", \"gpt-4o\"),\n", + " api_base=os.getenv(\"GRAPHRAG_API_BASE\"),\n", + " api_version=os.getenv(\"GRAPHRAG_API_VERSION\", \"2025-04-01-preview\"),\n", + " api_key=api_key,\n", + " auth_method=AuthMethod.AzureManagedIdentity if not api_key else AuthMethod.ApiKey,\n", + " retry=RetryConfig(\n", + " type=RetryType.ExponentialBackoff, max_retries=7, base_delay=2.0, jitter=True\n", + " ),\n", + " # Internal option to test error handling and retries\n", + " failure_rate_for_testing=0.5, # type: ignore\n", + ")\n", + "\n", + "llm_completion: LLMCompletion = create_completion(model_config)\n", + "\n", + "response = llm_completion.completion(\n", + " messages=\"What is the capital of France?\",\n", + ")\n", + "\n", + "print(f\"Metrics for: {llm_completion.metrics_store.id}\")\n", + "print(json.dumps(llm_completion.metrics_store.get_metrics(), indent=2))" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.9" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/packages/graphrag-llm/notebooks/07_rate_limiting.ipynb b/packages/graphrag-llm/notebooks/07_rate_limiting.ipynb new file mode 100644 index 0000000000..081382eac6 --- /dev/null +++ b/packages/graphrag-llm/notebooks/07_rate_limiting.ipynb @@ -0,0 +1,129 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "0a68b531", + "metadata": {}, + "source": [ + "# Rate Limiting\n", + "\n", + "Rate limiting is disabled by default. Requests can be limited by either requests per period or tokens per period or both.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "df4fa775", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Time taken for two requests: 20.87 seconds\n", + "Metrics for: azure/gpt-4o\n", + "{\n", + " \"attempted_request_count\": 2,\n", + " \"successful_response_count\": 2,\n", + " \"failed_response_count\": 0,\n", + " \"failure_rate\": 0.0,\n", + " \"requests_with_retries\": 0,\n", + " \"retries\": 0,\n", + " \"retry_rate\": 0.0,\n", + " \"compute_duration_seconds\": 3.534508228302002,\n", + " \"compute_duration_per_response_seconds\": 1.767254114151001,\n", + " \"cache_hit_rate\": 0.0,\n", + " \"streaming_responses\": 0,\n", + " \"responses_with_tokens\": 2,\n", + " \"prompt_tokens\": 28,\n", + " \"completion_tokens\": 16,\n", + " \"total_tokens\": 44,\n", + " \"tokens_per_response\": 22.0,\n", + " \"responses_with_cost\": 2,\n", + " \"input_cost\": 7.000000000000001e-05,\n", + " \"output_cost\": 0.00016,\n", + " \"total_cost\": 0.00023,\n", + " \"cost_per_response\": 0.000115\n", + "}\n" + ] + } + ], + "source": [ + "# Copyright (c) 2024 Microsoft Corporation.\n", + "# Licensed under the MIT License\n", + "\n", + "import json\n", + "import os\n", + "import time\n", + "\n", + "from dotenv import load_dotenv\n", + "from graphrag_llm.completion import LLMCompletion, create_completion\n", + "from graphrag_llm.config import AuthMethod, ModelConfig, RateLimitConfig, RateLimitType\n", + "\n", + "load_dotenv()\n", + "\n", + "api_key = os.getenv(\"GRAPHRAG_API_KEY\")\n", + "model_config = ModelConfig(\n", + " model_provider=\"azure\",\n", + " model=os.getenv(\"GRAPHRAG_MODEL\", \"gpt-4o\"),\n", + " azure_deployment_name=os.getenv(\"GRAPHRAG_MODEL\", \"gpt-4o\"),\n", + " api_base=os.getenv(\"GRAPHRAG_API_BASE\"),\n", + " api_version=os.getenv(\"GRAPHRAG_API_VERSION\", \"2025-04-01-preview\"),\n", + " api_key=api_key,\n", + " auth_method=AuthMethod.AzureManagedIdentity if not api_key else AuthMethod.ApiKey,\n", + " rate_limit=RateLimitConfig(\n", + " type=RateLimitType.SlidingWindow,\n", + " period_in_seconds=60, # limit requests per minute\n", + " requests_per_period=3, # max 3 requests per minute. Fire one off every 20 seconds\n", + " ),\n", + ")\n", + "\n", + "llm_completion: LLMCompletion = create_completion(model_config)\n", + "\n", + "start_time = time.time()\n", + "response = llm_completion.completion(\n", + " messages=\"What is the capital of France?\",\n", + ")\n", + "response = llm_completion.completion(\n", + " messages=\"What is the capital of France?\",\n", + ")\n", + "end_time = time.time()\n", + "total_time = end_time - start_time\n", + "assert total_time >= 20, \"Rate limiting did not work as expected.\"\n", + "\n", + "print(f\"Time taken for two requests: {total_time:.2f} seconds\")\n", + "print(f\"Metrics for: {llm_completion.metrics_store.id}\")\n", + "print(json.dumps(llm_completion.metrics_store.get_metrics(), indent=2))" + ] + }, + { + "cell_type": "markdown", + "id": "59f92d3f", + "metadata": {}, + "source": [ + "Notice that the `compute_duration_seconds` in the metrics only tracks how long a network request actually takes and does track paused periods that occur due to rate limits.\n" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.9" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/packages/graphrag-llm/notebooks/08_batching.ipynb b/packages/graphrag-llm/notebooks/08_batching.ipynb new file mode 100644 index 0000000000..fccd984ddd --- /dev/null +++ b/packages/graphrag-llm/notebooks/08_batching.ipynb @@ -0,0 +1,536 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "91a0ee2b", + "metadata": {}, + "source": [ + "# Batching\n" + ] + }, + { + "cell_type": "markdown", + "id": "422fcc73", + "metadata": {}, + "source": [ + "## Completion Batching\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "88e715fe", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "In the velvet silence of the night,\n", + "A canvas vast and infinite unfolds,\n", + "Where stories of the cosmos,\n", + "Metrics for: azure/gpt-4o\n", + "{\n", + " \"attempted_request_count\": 10,\n", + " \"successful_response_count\": 10,\n", + " \"failed_response_count\": 0,\n", + " \"failure_rate\": 0.0,\n", + " \"requests_with_retries\": 0,\n", + " \"retries\": 0,\n", + " \"retry_rate\": 0.0,\n", + " \"compute_duration_seconds\": 157.70280289649963,\n", + " \"compute_duration_per_response_seconds\": 15.770280289649964,\n", + " \"runtime_duration_seconds\": 18.660003900527954,\n", + " \"streaming_responses\": 0,\n", + " \"responses_with_tokens\": 10,\n", + " \"prompt_tokens\": 280,\n", + " \"completion_tokens\": 9234,\n", + " \"total_tokens\": 9514,\n", + " \"tokens_per_response\": 951.4,\n", + " \"responses_with_cost\": 10,\n", + " \"input_cost\": 0.0007,\n", + " \"output_cost\": 0.09234,\n", + " \"total_cost\": 0.09304000000000001,\n", + " \"cost_per_response\": 0.009304000000000002\n", + "}\n" + ] + } + ], + "source": [ + "# Copyright (c) 2024 Microsoft Corporation.\n", + "# Licensed under the MIT License\n", + "\n", + "import json\n", + "import os\n", + "\n", + "from dotenv import load_dotenv\n", + "from graphrag_llm.completion import LLMCompletion, create_completion\n", + "from graphrag_llm.config import AuthMethod, ModelConfig\n", + "from graphrag_llm.types import LLMCompletionArgs\n", + "\n", + "load_dotenv()\n", + "\n", + "api_key = os.getenv(\"GRAPHRAG_API_KEY\")\n", + "model_config = ModelConfig(\n", + " model_provider=\"azure\",\n", + " model=os.getenv(\"GRAPHRAG_MODEL\", \"gpt-4o\"),\n", + " azure_deployment_name=os.getenv(\"GRAPHRAG_MODEL\", \"gpt-4o\"),\n", + " api_base=os.getenv(\"GRAPHRAG_API_BASE\"),\n", + " api_version=os.getenv(\"GRAPHRAG_API_VERSION\", \"2025-04-01-preview\"),\n", + " api_key=api_key,\n", + " auth_method=AuthMethod.AzureManagedIdentity if not api_key else AuthMethod.ApiKey,\n", + ")\n", + "llm_completion: LLMCompletion = create_completion(model_config)\n", + "\n", + "\n", + "completion_requests: list[LLMCompletionArgs] = [\n", + " {\n", + " \"messages\": \"Write a 1000 word poem about the night sky and all the wonders and mysteries of the universe.\"\n", + " },\n", + "] * 10\n", + "\n", + "# Spins up to 25 concurrent requests\n", + "# Which is more than the number of requests being made\n", + "# and since rate limiting is not enabled, all the requests fire off immediately\n", + "# and complete as fast as the LLM provider allows\n", + "responses = llm_completion.completion_batch(completion_requests, concurrency=25)\n", + "for response in responses:\n", + " if isinstance(response, Exception):\n", + " print(f\"Error: {response}\")\n", + " else:\n", + " # Print the first 100 characters of the first successful response\n", + " print(response.content[0:100]) # type: ignore\n", + " break\n", + "\n", + "print(f\"Metrics for: {llm_completion.metrics_store.id}\")\n", + "print(json.dumps(llm_completion.metrics_store.get_metrics(), indent=2))" + ] + }, + { + "cell_type": "markdown", + "id": "668e1f94", + "metadata": {}, + "source": [ + "Notice the difference between `compute_duration_seconds` and `runtime_duration_seconds`. The former indicates how long all the network requests took to complete and would be how long the whole process took to complete if running the requests in series. The latter indicates how long the batch as a whole took to complete when running with concurrency.\n" + ] + }, + { + "cell_type": "markdown", + "id": "49ec7716", + "metadata": {}, + "source": [ + "### With Rate Limiting\n" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "eb73f940", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Metrics for: azure/gpt-4o\n", + "{\n", + " \"attempted_request_count\": 10,\n", + " \"successful_response_count\": 10,\n", + " \"failed_response_count\": 0,\n", + " \"failure_rate\": 0.0,\n", + " \"requests_with_retries\": 0,\n", + " \"retries\": 0,\n", + " \"retry_rate\": 0.0,\n", + " \"compute_duration_seconds\": 108.16670417785645,\n", + " \"compute_duration_per_response_seconds\": 10.816670417785645,\n", + " \"runtime_duration_seconds\": 38.489975929260254,\n", + " \"streaming_responses\": 0,\n", + " \"responses_with_tokens\": 10,\n", + " \"prompt_tokens\": 280,\n", + " \"completion_tokens\": 8965,\n", + " \"total_tokens\": 9245,\n", + " \"tokens_per_response\": 924.5,\n", + " \"responses_with_cost\": 10,\n", + " \"input_cost\": 0.0007,\n", + " \"output_cost\": 0.08965000000000002,\n", + " \"total_cost\": 0.09035000000000001,\n", + " \"cost_per_response\": 0.009035000000000001\n", + "}\n" + ] + } + ], + "source": [ + "from graphrag_llm.config import RateLimitConfig, RateLimitType\n", + "\n", + "model_config.rate_limit = RateLimitConfig(\n", + " type=RateLimitType.SlidingWindow,\n", + " period_in_seconds=60, # limit requests per minute\n", + " requests_per_period=20, # max 20 requests per minute. Fire one off every 3 seconds\n", + ")\n", + "llm_completion: LLMCompletion = create_completion(model_config)\n", + "llm_completion.metrics_store.clear_metrics()\n", + "\n", + "responses = llm_completion.completion_batch(completion_requests, concurrency=25)\n", + "\n", + "print(f\"Metrics for: {llm_completion.metrics_store.id}\")\n", + "print(json.dumps(llm_completion.metrics_store.get_metrics(), indent=2))" + ] + }, + { + "cell_type": "markdown", + "id": "ceb93f24", + "metadata": {}, + "source": [ + "Notice the `runtime_duration_seconds` is now much slower as the requests are being throttled by the rate limit.\n" + ] + }, + { + "cell_type": "markdown", + "id": "05bd00e6", + "metadata": {}, + "source": [ + "### With Cache\n" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "3cb345ec", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Metrics for: azure/gpt-4o\n", + "{\n", + " \"attempted_request_count\": 10,\n", + " \"successful_response_count\": 10,\n", + " \"failed_response_count\": 0,\n", + " \"failure_rate\": 0.0,\n", + " \"requests_with_retries\": 0,\n", + " \"retries\": 0,\n", + " \"retry_rate\": 0.0,\n", + " \"compute_duration_seconds\": 93.54697012901306,\n", + " \"compute_duration_per_response_seconds\": 9.354697012901307,\n", + " \"runtime_duration_seconds\": 10.748144149780273,\n", + " \"cached_responses\": 6,\n", + " \"streaming_responses\": 0,\n", + " \"responses_with_tokens\": 10,\n", + " \"prompt_tokens\": 280,\n", + " \"completion_tokens\": 7869,\n", + " \"total_tokens\": 8149,\n", + " \"tokens_per_response\": 814.9,\n", + " \"responses_with_cost\": 10,\n", + " \"input_cost\": 0.0007,\n", + " \"output_cost\": 0.07869000000000001,\n", + " \"total_cost\": 0.07939000000000002,\n", + " \"cost_per_response\": 0.007939000000000002\n", + "}\n" + ] + } + ], + "source": [ + "from graphrag_cache import create_cache\n", + "\n", + "cache = create_cache()\n", + "\n", + "# Redisable rate limiting\n", + "model_config.rate_limit = None\n", + "\n", + "llm_completion: LLMCompletion = create_completion(model_config, cache=cache)\n", + "llm_completion.metrics_store.clear_metrics()\n", + "\n", + "responses = llm_completion.completion_batch(completion_requests, concurrency=4)\n", + "\n", + "print(f\"Metrics for: {llm_completion.metrics_store.id}\")\n", + "print(json.dumps(llm_completion.metrics_store.get_metrics(), indent=2))" + ] + }, + { + "cell_type": "markdown", + "id": "6de2c4cf", + "metadata": {}, + "source": [ + "Notice the `cached_responses == 6` since we are spinning up `4` threads. The first 4 requests are fired off immediately prior to any data in the cache. This means when identical requests are fired in the same thread cycle they will all hit the model since the cache is not yet populated.\n", + "\n", + "The `cached_responses` indicates how many cache hits occurred but the rest of the metrics exist as if a cache was not used. For example, `compute_duration_seconds` and all the tokens and cost counts are as if cache was not used so `compute_duration_seconds` includes network timings for the cached responses. This is because both the response and metrics are cached and retrieved from the cache when a cache hit occurs. This means the above metrics should closely match the metrics from the first example in this notebook other than the `runtime_duration_seconds` which gives the true idea of how long a job takes to run. Rerunning a job with a fully hydrated cache should result in a quick `runtime_duration_seconds`. Metrics were designed to give an idea of how long and costly a job would be if there were no cache.\n" + ] + }, + { + "cell_type": "markdown", + "id": "3e6d20d6", + "metadata": {}, + "source": [ + "## Embedding Batching\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "2e95c4e6", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Embedding vector length: 1536\n", + "[0.012382184155285358, -0.0487498939037323, 0.02962493523955345, 0.0321056991815567, -0.030259549617767334]\n", + "Embedding vector length: 1536\n", + "[-0.01842353865504265, -0.00725775770843029, 0.0036669441033154726, -0.0542047917842865, -0.022724902257323265]\n", + "Embedding vector length: 1536\n", + "[-0.055969491600990295, 0.023217301815748215, -0.007630861829966307, 0.002210293198004365, 0.01284848153591156]\n", + "Metrics for: azure/text-embedding-3-small\n", + "{\n", + " \"attempted_request_count\": 2,\n", + " \"successful_response_count\": 2,\n", + " \"failed_response_count\": 0,\n", + " \"failure_rate\": 0.0,\n", + " \"requests_with_retries\": 0,\n", + " \"retries\": 0,\n", + " \"retry_rate\": 0.0,\n", + " \"compute_duration_seconds\": 2.0372798442840576,\n", + " \"compute_duration_per_response_seconds\": 1.0186399221420288,\n", + " \"runtime_duration_seconds\": 1.02105712890625,\n", + " \"streaming_responses\": 0,\n", + " \"responses_with_tokens\": 2,\n", + " \"prompt_tokens\": 23,\n", + " \"total_tokens\": 23,\n", + " \"tokens_per_response\": 11.5,\n", + " \"responses_with_cost\": 2,\n", + " \"input_cost\": 4.6e-07,\n", + " \"total_cost\": 4.6e-07,\n", + " \"cost_per_response\": 2.3e-07\n", + "}\n" + ] + } + ], + "source": [ + "from graphrag_llm.embedding import LLMEmbedding, create_embedding\n", + "from graphrag_llm.types import LLMEmbeddingArgs\n", + "\n", + "embedding_config = ModelConfig(\n", + " model_provider=\"azure\",\n", + " model=os.getenv(\"GRAPHRAG_EMBEDDING_MODEL\", \"text-embedding-3-small\"),\n", + " azure_deployment_name=os.getenv(\n", + " \"GRAPHRAG_LLM_EMBEDDING_MODEL\", \"text-embedding-3-small\"\n", + " ),\n", + " api_base=os.getenv(\"GRAPHRAG_API_BASE\"),\n", + " api_version=os.getenv(\"GRAPHRAG_API_VERSION\", \"2025-04-01-preview\"),\n", + " api_key=api_key,\n", + " auth_method=AuthMethod.AzureManagedIdentity if not api_key else AuthMethod.ApiKey,\n", + ")\n", + "\n", + "llm_embedding: LLMEmbedding = create_embedding(embedding_config)\n", + "\n", + "# A single embedding request already accepts a list of inputs to embed\n", + "# Here we demonstrate batching multiple embedding requests concurrently\n", + "# The first request has two inputs to embed and the second has one input\n", + "embedding_requests: list[LLMEmbeddingArgs] = [\n", + " {\"input\": [\"Hello World.\", \"The quick brown fox jumps over the lazy dog.\"]},\n", + " {\"input\": [\"GraphRag is an amazing LLM framework.\"]},\n", + "]\n", + "\n", + "responses = llm_embedding.embedding_batch(embedding_requests, concurrency=4)\n", + "for response in responses:\n", + " if isinstance(response, Exception):\n", + " print(f\"Error: {response}\")\n", + " else:\n", + " for embedding in response.embeddings:\n", + " print(f\"Embedding vector length: {len(embedding)}\")\n", + " print(embedding[0:5]) # Print first 5 dimensions of the embedding vector\n", + "\n", + "print(f\"Metrics for: {llm_embedding.metrics_store.id}\")\n", + "print(json.dumps(llm_embedding.metrics_store.get_metrics(), indent=2))" + ] + }, + { + "cell_type": "markdown", + "id": "0ab62eca", + "metadata": {}, + "source": [ + "## Details\n", + "\n", + "The batch utils start up `concurrency` number of threads in a thread pool and then push all requests into an input queue where free threads pick up the next request to process. The threads will process requests within any defined rate limits and retry any failed request according to the retry settings. If a request fails after all the retries the thread will capture the exception and return it. Thus the batch result may contain exceptions.\n" + ] + }, + { + "cell_type": "markdown", + "id": "005ee408", + "metadata": {}, + "source": [ + "## Thread Pool\n", + "\n", + "The batch utils are convenient if all your requests are loaded in memory. If you wish to stream over an input source then you can use the lower level thread pool utils.\n" + ] + }, + { + "cell_type": "markdown", + "id": "b4a6553c", + "metadata": {}, + "source": [ + "### Completion Thread Pool\n" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "05643c93", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "request_number_1: Succeeded\n", + "request_number_2: Succeeded\n", + "request_number_6: Succeeded\n", + "request_number_9: Succeeded\n", + "request_number_0: Succeeded\n", + "request_number_5: Succeeded\n", + "request_number_7: Succeeded\n", + "request_number_4: Succeeded\n", + "request_number_3: Succeeded\n", + "request_number_8: Succeeded\n", + "Metrics for: azure/gpt-4o\n", + "{\n", + " \"attempted_request_count\": 10,\n", + " \"successful_response_count\": 10,\n", + " \"failed_response_count\": 0,\n", + " \"failure_rate\": 0.0,\n", + " \"requests_with_retries\": 0,\n", + " \"retries\": 0,\n", + " \"retry_rate\": 0.0,\n", + " \"compute_duration_seconds\": 107.33663082122803,\n", + " \"compute_duration_per_response_seconds\": 10.733663082122803,\n", + " \"runtime_duration_seconds\": 0.04277801513671875,\n", + " \"cached_responses\": 10,\n", + " \"streaming_responses\": 0,\n", + " \"responses_with_tokens\": 10,\n", + " \"prompt_tokens\": 280,\n", + " \"completion_tokens\": 9240,\n", + " \"total_tokens\": 9520,\n", + " \"tokens_per_response\": 952.0,\n", + " \"responses_with_cost\": 10,\n", + " \"input_cost\": 0.0007,\n", + " \"output_cost\": 0.0924,\n", + " \"total_cost\": 0.0931,\n", + " \"cost_per_response\": 0.00931\n", + "}\n" + ] + } + ], + "source": [ + "from collections.abc import Iterator\n", + "\n", + "from graphrag_llm.types import LLMCompletionChunk, LLMCompletionResponse\n", + "\n", + "llm_completion.metrics_store.clear_metrics()\n", + "\n", + "\n", + "# The response handler may also be asynchronous if needed\n", + "def _handle_response(\n", + " request_id: str,\n", + " resp: LLMCompletionResponse | Iterator[LLMCompletionChunk] | Exception,\n", + "):\n", + " # Imagine streaming responses to disk or elsewhere\n", + " if isinstance(resp, Exception):\n", + " print(f\"{request_id}: Failed\")\n", + " else:\n", + " print(f\"{request_id}: Succeeded\")\n", + "\n", + "\n", + "with llm_completion.completion_thread_pool(\n", + " response_handler=_handle_response,\n", + " concurrency=25,\n", + " # set queue_limit to create backpressure on reading the requests\n", + " queue_limit=10,\n", + ") as completion:\n", + " # Iterating over a list of completion requests already in memory\n", + " # but can imagine reading them from disk or another source\n", + " # The completion function returned from the context manager\n", + " # will block if the queue_limit is reached until some requests complete\n", + " # and also requires a request_id for tracking the requests\n", + " # and allowing you to identify them in the response handler\n", + " for index, request in enumerate(completion_requests):\n", + " completion(request_id=f\"request_number_{index}\", **request)\n", + "\n", + "# Using the same request that was used in the caching example so\n", + "# this should complete instantly from cache\n", + "print(f\"Metrics for: {llm_completion.metrics_store.id}\")\n", + "print(json.dumps(llm_completion.metrics_store.get_metrics(), indent=2))" + ] + }, + { + "cell_type": "markdown", + "id": "6e254d56", + "metadata": {}, + "source": [ + "### Embedding Thread Pool\n" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "7eed1a15", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "embedding_request_number_1: Succeeded\n", + "embedding_request_number_0: Succeeded\n" + ] + } + ], + "source": [ + "from graphrag_llm.types import LLMEmbeddingResponse\n", + "\n", + "llm_embedding.metrics_store.clear_metrics()\n", + "\n", + "\n", + "# The response handler may also be asynchronous if needed\n", + "def _handle_response(\n", + " request_id: str,\n", + " resp: LLMEmbeddingResponse | Exception,\n", + "):\n", + " if isinstance(resp, Exception):\n", + " print(f\"{request_id}: Failed\")\n", + " else:\n", + " print(f\"{request_id}: Succeeded\")\n", + "\n", + "\n", + "with llm_embedding.embedding_thread_pool(\n", + " response_handler=_handle_response,\n", + " concurrency=25,\n", + " queue_limit=10,\n", + ") as embedding:\n", + " for index, request in enumerate(embedding_requests):\n", + " embedding(request_id=f\"embedding_request_number_{index}\", **request)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.9" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/packages/graphrag-llm/notebooks/09_message_builder_and_history.ipynb b/packages/graphrag-llm/notebooks/09_message_builder_and_history.ipynb new file mode 100644 index 0000000000..acae5c7f2d --- /dev/null +++ b/packages/graphrag-llm/notebooks/09_message_builder_and_history.ipynb @@ -0,0 +1,199 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "b22254c3", + "metadata": {}, + "source": [ + "# Message Builder\n", + "\n", + "The completion API adheres to litellm completion API and thus the OpanAI SDK API. The `messages` parameter can be one of the following:\n", + "\n", + "- `str`: Raw string for the prompt.\n", + "- `list[dict[str, Any]]`: A list of dicts in the form `{\"role\": \"user|system|...\", \"content\": \"...\"}`\n", + "- `list[ChatCompletionMessageParam]`: A list of OpenAI `ChatCompletionMessageParam`.\n", + "\n", + "`graphrag_llm.utils` provides a `ChatCompletionMessageParamBuilder` to help construct these objects. Below are examples of using the builder.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "553f83d9", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Arrr, ye got me there, matey. Truth be, Pluto ain't considered a full-fledged planet no more. Back in 2006, them scallywags at the International Astronomical Union demoted it to a “dwarf planet.” So in the eyes of modern astronomers, 'tis a no.\n" + ] + } + ], + "source": [ + "# Copyright (c) 2024 Microsoft Corporation.\n", + "# Licensed under the MIT License\n", + "\n", + "import os\n", + "\n", + "from dotenv import load_dotenv\n", + "from graphrag_llm.completion import LLMCompletion, create_completion\n", + "from graphrag_llm.config import AuthMethod, ModelConfig\n", + "from graphrag_llm.types import LLMCompletionResponse\n", + "from graphrag_llm.utils import (\n", + " CompletionMessagesBuilder,\n", + ")\n", + "\n", + "load_dotenv()\n", + "\n", + "api_key = os.getenv(\"GRAPHRAG_API_KEY\")\n", + "model_config = ModelConfig(\n", + " model_provider=\"azure\",\n", + " model=os.getenv(\"GRAPHRAG_MODEL\", \"gpt-4o\"),\n", + " azure_deployment_name=os.getenv(\"GRAPHRAG_MODEL\", \"gpt-4o\"),\n", + " api_base=os.getenv(\"GRAPHRAG_API_BASE\"),\n", + " api_version=os.getenv(\"GRAPHRAG_API_VERSION\", \"2025-04-01-preview\"),\n", + " api_key=api_key,\n", + " auth_method=AuthMethod.AzureManagedIdentity if not api_key else AuthMethod.ApiKey,\n", + ")\n", + "llm_completion: LLMCompletion = create_completion(model_config)\n", + "\n", + "\n", + "messages = (\n", + " CompletionMessagesBuilder()\n", + " .add_system_message(\n", + " \"You are a helpful assistant that likes to talk like a pirate. Respond as if you are a pirate using pirate speak.\"\n", + " )\n", + " .add_user_message(\"Is pluto a planet? Respond with a yes or no.\")\n", + " .add_assistant_message(\"Aye, matey! Pluto be a planet in me book.\")\n", + " .add_user_message(\"Are you sure? I want the truth. Can you elaborate?\")\n", + " .build()\n", + ")\n", + "\n", + "response: LLMCompletionResponse = llm_completion.completion(messages=messages) # type: ignore\n", + "\n", + "print(response.content)" + ] + }, + { + "cell_type": "markdown", + "id": "acb265fe", + "metadata": {}, + "source": [ + "## Other Message Types\n", + "\n", + "Can use the `ChatCompletionMessageParamBuilder` along with `ChatCompletionContentPartParamBuilder` to build more complicated messages such as those using images.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c7b094a6", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "The image features a capybara floating in space. The backdrop displays a colorful and vibrant interstellar scene filled with nebulae and stars, showcasing various shades of blue, purple, pink, and green. The capybara is slightly tilted with its face foregrounded, giving a whimsical and surreal feel as if it is soaring through the cosmos.\n" + ] + } + ], + "source": [ + "from graphrag_llm.utils import CompletionContentPartBuilder\n", + "\n", + "messages = (\n", + " CompletionMessagesBuilder()\n", + " .add_user_message(\n", + " # Instead of providing a string we are providing content parts\n", + " # By using the CompletionContentPartBuilder\n", + " CompletionContentPartBuilder()\n", + " .add_text_part(\"Describe this image\")\n", + " .add_image_part(\n", + " # Can also be a base64 encoded image string\n", + " url=\"https://th.bing.com/th/id/OUG.0A10DBFCEB3A9A7C6707FCF6F0D96BFD?cb=ucfimg2&ucfimg=1&rs=1&pid=ImgDetMain&o=7&rm=3\",\n", + " detail=\"high\",\n", + " )\n", + " .build()\n", + " )\n", + " .build()\n", + ")\n", + "\n", + "response: LLMCompletionResponse = llm_completion.completion(messages=messages) # type: ignore\n", + "print(response.content)" + ] + }, + { + "cell_type": "markdown", + "id": "e8b9be3e", + "metadata": {}, + "source": [ + "## History\n", + "\n", + "The first example eluded to how the `ChatCompletionMessageParamBuilder` can be used to track history.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "92abc427", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "User: Is Pluto a planet? Answer with a yes or no.\n", + "Assistant: No.\n", + "User: Can you elaborate?\n", + "Assistant: In 2006, the International Astronomical Union (IAU) redefined the criteria for classifying planets. According to this new definition, for an object to be considered a planet, it must:\n", + "\n", + "1. Orbit the Sun.\n", + "2. Be spherical in shape (have sufficient mass for its gravity to overcome rigid body forces so that it assumes a nearly round shape).\n", + "3. Have cleared its orbit of other debris.\n", + "\n", + "Pluto meets the first two criteria but does not meet the third criterion because it shares its orbit with other objects in the Kuiper Belt. Therefore, Pluto was reclassified as a \"dwarf planet.\"\n" + ] + } + ], + "source": [ + "user_messages = [\"Is Pluto a planet? Answer with a yes or no.\", \"Can you elaborate?\"]\n", + "\n", + "messages_builder = CompletionMessagesBuilder()\n", + "\n", + "for msg in user_messages:\n", + " print(f\"User: {msg}\")\n", + "\n", + " messages_builder.add_user_message(msg)\n", + "\n", + " response: LLMCompletionResponse = llm_completion.completion(\n", + " messages=messages_builder.build()\n", + " ) # type: ignore\n", + " print(f\"Assistant: {response.content}\")\n", + "\n", + " messages_builder.add_assistant_message(response.content)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.9" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/packages/graphrag-llm/notebooks/10_tool_calling.ipynb b/packages/graphrag-llm/notebooks/10_tool_calling.ipynb new file mode 100644 index 0000000000..8ebec94eb1 --- /dev/null +++ b/packages/graphrag-llm/notebooks/10_tool_calling.ipynb @@ -0,0 +1,387 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "a56845b0", + "metadata": {}, + "source": [ + "# Function Tool Calling\n", + "\n", + "In order to use function tools, the completion endpoint needs a json schema of the function(s). This notebook uses `pydantic` to describe a function and its parameters and the `OpenAI` built-in `pydantic_function_tool` to create the necessary json schema. Other techniques may be used to create a definition for your functions.\n" + ] + }, + { + "cell_type": "markdown", + "id": "daf62482", + "metadata": {}, + "source": [ + "## Manual Function Tool Calling\n", + "\n", + "This example demonstrates function tool calling by manually using `pydantic` and `pydantic_function_tool`. See the next example for a simplified approach.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "53437ac4", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Adding 5 and 7 gives you 12.\n" + ] + } + ], + "source": [ + "# Copyright (c) 2024 Microsoft Corporation.\n", + "# Licensed under the MIT License\n", + "\n", + "import json\n", + "import os\n", + "\n", + "from dotenv import load_dotenv\n", + "from graphrag_llm.completion import LLMCompletion, create_completion\n", + "from graphrag_llm.config import AuthMethod, ModelConfig\n", + "from graphrag_llm.types import LLMCompletionResponse\n", + "from graphrag_llm.utils import (\n", + " CompletionMessagesBuilder,\n", + ")\n", + "from openai import pydantic_function_tool\n", + "from pydantic import BaseModel, ConfigDict, Field\n", + "\n", + "load_dotenv()\n", + "\n", + "api_key = os.getenv(\"GRAPHRAG_API_KEY\")\n", + "model_config = ModelConfig(\n", + " model_provider=\"azure\",\n", + " model=os.getenv(\"GRAPHRAG_MODEL\", \"gpt-4o\"),\n", + " azure_deployment_name=os.getenv(\"GRAPHRAG_MODEL\", \"gpt-4o\"),\n", + " api_base=os.getenv(\"GRAPHRAG_API_BASE\"),\n", + " api_version=os.getenv(\"GRAPHRAG_API_VERSION\", \"2025-04-01-preview\"),\n", + " api_key=api_key,\n", + " auth_method=AuthMethod.AzureManagedIdentity if not api_key else AuthMethod.ApiKey,\n", + ")\n", + "llm_completion: LLMCompletion = create_completion(model_config)\n", + "\n", + "\n", + "class AddTwoNumbers(BaseModel):\n", + " \"\"\"Input Argument for add two numbers.\"\"\"\n", + "\n", + " model_config = ConfigDict(\n", + " extra=\"forbid\",\n", + " )\n", + "\n", + " a: int = Field(description=\"The first number to add.\")\n", + " b: int = Field(description=\"The second number to add.\")\n", + "\n", + "\n", + "# The actual function\n", + "def add_two_numbers(options: AddTwoNumbers) -> int:\n", + " \"\"\"Add two numbers.\"\"\"\n", + " return options.a + options.b\n", + "\n", + "\n", + "add_definition = pydantic_function_tool(\n", + " AddTwoNumbers,\n", + " # Function name and description\n", + " name=\"my_add_two_numbers_function\",\n", + " description=\"Add two numbers.\",\n", + ")\n", + "\n", + "# Mapping of available functions\n", + "available_functions = {\n", + " \"my_add_two_numbers_function\": {\n", + " \"function\": add_two_numbers,\n", + " \"input_model\": AddTwoNumbers,\n", + " },\n", + "}\n", + "\n", + "messages_builder = CompletionMessagesBuilder().add_user_message(\n", + " \"Add 5 and 7 using a function call.\"\n", + ")\n", + "\n", + "response: LLMCompletionResponse = llm_completion.completion(\n", + " messages=messages_builder.build(),\n", + " tools=[add_definition],\n", + ") # type: ignore\n", + "\n", + "if not response.choices[0].message.tool_calls:\n", + " msg = \"No function call found in response.\"\n", + " raise ValueError(msg)\n", + "\n", + "# Add the assistant message with the function call to the message history\n", + "messages_builder.add_assistant_message(\n", + " message=response.choices[0].message,\n", + ")\n", + "\n", + "for tool_call in response.choices[0].message.tool_calls:\n", + " tool_id = tool_call.id\n", + " if tool_call.type != \"function\":\n", + " continue\n", + " function_name = tool_call.function.name\n", + " function_args = tool_call.function.arguments\n", + "\n", + " args_dict = json.loads(function_args)\n", + "\n", + " InputModel = available_functions[function_name][\"input_model\"]\n", + " function = available_functions[function_name][\"function\"]\n", + " input_options = InputModel(**args_dict)\n", + "\n", + " result = function(input_options)\n", + "\n", + " messages_builder.add_tool_message(\n", + " content=str(result),\n", + " tool_call_id=tool_id,\n", + " )\n", + "\n", + "final_response: LLMCompletionResponse = llm_completion.completion(\n", + " messages=messages_builder.build(),\n", + ") # type: ignore\n", + "print(final_response.content)" + ] + }, + { + "cell_type": "markdown", + "id": "b31c7a9c", + "metadata": {}, + "source": [ + "### Function Tool Definition\n" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "eb6950e8", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{\n", + " \"type\": \"function\",\n", + " \"function\": {\n", + " \"name\": \"my_add_two_numbers_function\",\n", + " \"strict\": true,\n", + " \"parameters\": {\n", + " \"additionalProperties\": false,\n", + " \"description\": \"Input Argument for add two numbers.\",\n", + " \"properties\": {\n", + " \"a\": {\n", + " \"description\": \"The first number to add.\",\n", + " \"title\": \"A\",\n", + " \"type\": \"integer\"\n", + " },\n", + " \"b\": {\n", + " \"description\": \"The second number to add.\",\n", + " \"title\": \"B\",\n", + " \"type\": \"integer\"\n", + " }\n", + " },\n", + " \"required\": [\n", + " \"a\",\n", + " \"b\"\n", + " ],\n", + " \"title\": \"AddTwoNumbers\",\n", + " \"type\": \"object\"\n", + " },\n", + " \"description\": \"Add two numbers.\"\n", + " }\n", + "}\n" + ] + } + ], + "source": [ + "# View the output schema\n", + "# This is what is passed to the completion tools param\n", + "# Created using pydantic and pydantic_function_tool\n", + "# but may be created manually as well\n", + "print(json.dumps(add_definition, indent=2))" + ] + }, + { + "cell_type": "markdown", + "id": "660de4c9", + "metadata": {}, + "source": [ + "## Tool Calling with FunctionToolManager\n", + "\n", + "If using `pydantic` to describe function arguments, you can use the `FunctionToolManager` to register functions, produce defintions, and call functions in response to the LLM. This helps automate some of the above work.\n", + "\n", + "The following example demonstrates calling multiple functions in one LLM call.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4fae701e", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Adding numbers: 3 8\n", + "Multiplying numbers: 9 5\n", + "Reversing text: GraphRAG\n", + "3 + 8 is 11, 9 * 5 is 45, and the reversed string 'GraphRAG' is 'GARhparG'.\n" + ] + } + ], + "source": [ + "# Copyright (c) 2024 Microsoft Corporation.\n", + "# Licensed under the MIT License\n", + "\n", + "import os\n", + "\n", + "from dotenv import load_dotenv\n", + "from graphrag_llm.completion import LLMCompletion, create_completion\n", + "from graphrag_llm.config import AuthMethod, ModelConfig\n", + "from graphrag_llm.types import LLMCompletionResponse\n", + "from graphrag_llm.utils import (\n", + " CompletionMessagesBuilder,\n", + " FunctionToolManager,\n", + ")\n", + "from pydantic import BaseModel, ConfigDict, Field\n", + "\n", + "load_dotenv()\n", + "\n", + "api_key = os.getenv(\"GRAPHRAG_API_KEY\")\n", + "model_config = ModelConfig(\n", + " model_provider=\"azure\",\n", + " model=os.getenv(\"GRAPHRAG_MODEL\", \"gpt-4o\"),\n", + " azure_deployment_name=os.getenv(\"GRAPHRAG_MODEL\", \"gpt-4o\"),\n", + " api_base=os.getenv(\"GRAPHRAG_API_BASE\"),\n", + " api_version=os.getenv(\"GRAPHRAG_API_VERSION\", \"2025-04-01-preview\"),\n", + " api_key=api_key,\n", + " auth_method=AuthMethod.AzureManagedIdentity if not api_key else AuthMethod.ApiKey,\n", + ")\n", + "llm_completion: LLMCompletion = create_completion(model_config)\n", + "\n", + "\n", + "class NumbersInput(BaseModel):\n", + " \"\"\"Numbers input.\"\"\"\n", + "\n", + " model_config = ConfigDict(\n", + " extra=\"forbid\",\n", + " )\n", + "\n", + " a: int = Field(description=\"The first number.\")\n", + " b: int = Field(description=\"The second number.\")\n", + "\n", + "\n", + "def add(options: NumbersInput) -> str:\n", + " \"\"\"Add two numbers.\"\"\"\n", + " # Print something to ensure function is called for verification\n", + " print(\"Adding numbers:\", options.a, options.b)\n", + " return str(options.a + options.b)\n", + "\n", + "\n", + "def multiply(options: NumbersInput) -> str:\n", + " \"\"\"Multiply two numbers.\"\"\"\n", + " # Print something to ensure function is called for verification\n", + " print(\"Multiplying numbers:\", options.a, options.b)\n", + " return str(options.a * options.b)\n", + "\n", + "\n", + "class TextInput(BaseModel):\n", + " \"\"\"Text input.\"\"\"\n", + "\n", + " model_config = ConfigDict(\n", + " extra=\"forbid\",\n", + " )\n", + "\n", + " test: str = Field(description=\"The string to reverse.\")\n", + "\n", + "\n", + "def reverse_text(options: TextInput) -> str:\n", + " \"\"\"Reverse a string.\"\"\"\n", + " # Print something to ensure function is called for verification\n", + " print(\"Reversing text:\", options.test)\n", + " return options.test[::-1]\n", + "\n", + "\n", + "function_tool_manager = FunctionToolManager()\n", + "\n", + "function_tool_manager.register_function_tool(\n", + " name=\"add\",\n", + " description=\"Add two numbers.\",\n", + " function=add,\n", + " input_model=NumbersInput,\n", + ")\n", + "function_tool_manager.register_function_tool(\n", + " name=\"multiply\",\n", + " description=\"Multiply two numbers.\",\n", + " function=multiply,\n", + " input_model=NumbersInput,\n", + ")\n", + "function_tool_manager.register_function_tool(\n", + " name=\"reverse_text\",\n", + " description=\"Reverse a string.\",\n", + " function=reverse_text,\n", + " input_model=TextInput,\n", + ")\n", + "\n", + "\n", + "messages_builder = CompletionMessagesBuilder().add_user_message(\n", + " \"What is 3 + 8 and 9 * 5? Also, reverse the string 'GraphRAG'.\"\n", + ")\n", + "\n", + "# Multiple tool calls in parallel\n", + "response: LLMCompletionResponse = llm_completion.completion(\n", + " messages=messages_builder.build(),\n", + " tools=function_tool_manager.definitions(),\n", + " parallel_tool_calls=True,\n", + ") # type: ignore\n", + "\n", + "# Add the assistant message with the function call to the message history\n", + "messages_builder.add_assistant_message(\n", + " message=response.choices[0].message,\n", + ")\n", + "\n", + "tool_results = function_tool_manager.call_functions(response)\n", + "\n", + "for tool_message in tool_results:\n", + " messages_builder.add_tool_message(**tool_message)\n", + "\n", + "final_response: LLMCompletionResponse = llm_completion.completion(\n", + " messages=messages_builder.build(),\n", + ") # type: ignore\n", + "print(final_response.content)" + ] + }, + { + "cell_type": "markdown", + "id": "b2d36f7a", + "metadata": {}, + "source": [ + "## MCP Tools\n", + "\n", + "**Not currently supported**. `graphrag_llm` currently only implements the `completion` endpoints which do not support MCP tools.\n" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.9" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/packages/graphrag-llm/notebooks/11_templating.ipynb b/packages/graphrag-llm/notebooks/11_templating.ipynb new file mode 100644 index 0000000000..bfee940c53 --- /dev/null +++ b/packages/graphrag-llm/notebooks/11_templating.ipynb @@ -0,0 +1,149 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "6945138b", + "metadata": {}, + "source": [ + "# Templating\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "3322e6a7", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "The rendered message to parse: \n", + "It is sunny and 52 degrees fahrenheit in Seattle.\n", + "\n", + "It is cloudy and 75 degrees fahrenheit in San Francisco.\n", + "\n", + "City: Seattle\n", + " Temperature: 11.1 °C\n", + " Condition: Sunny\n", + "City: San Francisco\n", + " Temperature: 23.9 °C\n", + " Condition: Cloudy\n" + ] + } + ], + "source": [ + "# Copyright (c) 2024 Microsoft Corporation.\n", + "# Licensed under the MIT License\n", + "\n", + "import os\n", + "\n", + "from dotenv import load_dotenv\n", + "from graphrag_llm.completion import LLMCompletion, create_completion\n", + "from graphrag_llm.config import (\n", + " AuthMethod,\n", + " ModelConfig,\n", + " TemplateEngineConfig,\n", + " TemplateEngineType,\n", + " TemplateManagerType,\n", + ")\n", + "from graphrag_llm.templating import create_template_engine\n", + "from graphrag_llm.types import LLMCompletionResponse\n", + "from pydantic import BaseModel, Field\n", + "\n", + "load_dotenv()\n", + "\n", + "api_key = os.getenv(\"GRAPHRAG_API_KEY\")\n", + "model_config = ModelConfig(\n", + " model_provider=\"azure\",\n", + " model=os.getenv(\"GRAPHRAG_MODEL\", \"gpt-4o\"),\n", + " azure_deployment_name=os.getenv(\"GRAPHRAG_MODEL\", \"gpt-4o\"),\n", + " api_base=os.getenv(\"GRAPHRAG_API_BASE\"),\n", + " api_version=os.getenv(\"GRAPHRAG_API_VERSION\", \"2025-04-01-preview\"),\n", + " api_key=api_key,\n", + " auth_method=AuthMethod.AzureManagedIdentity if not api_key else AuthMethod.ApiKey,\n", + ")\n", + "llm_completion: LLMCompletion = create_completion(model_config)\n", + "\n", + "\n", + "template_engine = create_template_engine()\n", + "\n", + "# The above default is the same as the following configuration:\n", + "template_engine = create_template_engine(\n", + " TemplateEngineConfig(\n", + " type=TemplateEngineType.Jinja,\n", + " template_manager=TemplateManagerType.File,\n", + " base_dir=\"templates\",\n", + " template_extension=\".jinja\",\n", + " encoding=\"utf-8\",\n", + " )\n", + ")\n", + "\n", + "msg = template_engine.render(\n", + " # Name of the template file without extension\n", + " template_name=\"weather_listings\",\n", + " # Values to fill in the template\n", + " context={\n", + " \"weather_reports\": [\n", + " {\"city\": \"Seattle\", \"temperature_f\": 52, \"condition\": \"sunny\"},\n", + " {\"city\": \"San Francisco\", \"temperature_f\": 75, \"condition\": \"cloudy\"},\n", + " ]\n", + " },\n", + ")\n", + "\n", + "\n", + "print(f\"The rendered message to parse: {msg}\")\n", + "\n", + "\n", + "# Structured response parsing using pydantic\n", + "class LocalWeather(BaseModel):\n", + " \"\"\"City weather information model.\"\"\"\n", + "\n", + " city: str = Field(description=\"The name of the city\")\n", + " temperature: float = Field(description=\"The temperature in Celsius\")\n", + " condition: str = Field(description=\"The weather condition description\")\n", + "\n", + "\n", + "class WeatherReports(BaseModel):\n", + " \"\"\"Weather information model.\"\"\"\n", + "\n", + " reports: list[LocalWeather] = Field(\n", + " description=\"The weather reports for multiple cities\"\n", + " )\n", + "\n", + "\n", + "response: LLMCompletionResponse[WeatherReports] = llm_completion.completion(\n", + " messages=msg,\n", + " response_format=WeatherReports,\n", + ") # type: ignore\n", + "\n", + "local_weather_reports: WeatherReports = response.formatted_response # type: ignore\n", + "for report in local_weather_reports.reports:\n", + " print(f\"City: {report.city}\")\n", + " print(f\" Temperature: {report.temperature} °C\")\n", + " print(f\" Condition: {report.condition}\")" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.9" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/packages/graphrag-llm/notebooks/12_mocking.ipynb b/packages/graphrag-llm/notebooks/12_mocking.ipynb new file mode 100644 index 0000000000..6cee72f293 --- /dev/null +++ b/packages/graphrag-llm/notebooks/12_mocking.ipynb @@ -0,0 +1,164 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "9418b981", + "metadata": {}, + "source": [ + "# Mocking\n" + ] + }, + { + "cell_type": "markdown", + "id": "1d000d70", + "metadata": {}, + "source": [ + "## Completions\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "792c4fa3", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Who cares?\n", + "You tell me!\n", + "{\"reports\":[{\"city\":\"New York\",\"temperature\":22.5,\"condition\":\"Sunny\"}]}\n", + "Who cares?\n" + ] + } + ], + "source": [ + "# Copyright (c) 2024 Microsoft Corporation.\n", + "# Licensed under the MIT License\n", + "\n", + "import os\n", + "\n", + "from graphrag_llm.completion import LLMCompletion, create_completion\n", + "from graphrag_llm.config import LLMProviderType, ModelConfig\n", + "from graphrag_llm.types import LLMCompletionResponse\n", + "from pydantic import BaseModel, Field\n", + "\n", + "\n", + "class LocalWeather(BaseModel):\n", + " \"\"\"City weather information model.\"\"\"\n", + "\n", + " city: str = Field(description=\"The name of the city\")\n", + " temperature: float = Field(description=\"The temperature in Celsius\")\n", + " condition: str = Field(description=\"The weather condition description\")\n", + "\n", + "\n", + "class WeatherReports(BaseModel):\n", + " \"\"\"Weather information model.\"\"\"\n", + "\n", + " reports: list[LocalWeather] = Field(\n", + " description=\"The weather reports for multiple cities\"\n", + " )\n", + "\n", + "\n", + "weather_reports = WeatherReports(\n", + " reports=[\n", + " LocalWeather(city=\"New York\", temperature=22.5, condition=\"Sunny\"),\n", + " ]\n", + ")\n", + "\n", + "api_key = os.getenv(\"GRAPHRAG_API_KEY\")\n", + "model_config = ModelConfig(\n", + " type=LLMProviderType.MockLLM,\n", + " model_provider=\"openai\",\n", + " model=\"gpt-4o\",\n", + " mock_responses=[\"Who cares?\", \"You tell me!\", weather_reports.model_dump_json()],\n", + ")\n", + "llm_completion: LLMCompletion = create_completion(model_config)\n", + "\n", + "response: LLMCompletionResponse = llm_completion.completion(\n", + " messages=\"What is the capital of France?\",\n", + ") # type: ignore\n", + "\n", + "print(response.content)\n", + "\n", + "response: LLMCompletionResponse = llm_completion.completion(\n", + " messages=\"Should be second response\",\n", + ") # type: ignore\n", + "print(response.content)\n", + "\n", + "response_formatted: LLMCompletionResponse[WeatherReports] = llm_completion.completion(\n", + " messages=\"Structured response.\",\n", + " response_format=WeatherReports,\n", + ") # type: ignore\n", + "print(response_formatted.formatted_response.model_dump_json()) # type: ignore\n", + "\n", + "response: LLMCompletionResponse = llm_completion.completion(\n", + " messages=\"Should cycle back to first response\",\n", + ") # type: ignore\n", + "print(response.content)" + ] + }, + { + "cell_type": "markdown", + "id": "2c8f1b7a", + "metadata": {}, + "source": [ + "## Embeddings\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6eec6dc3", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[1.0, 2.0, 3.0]\n", + "[1.0, 2.0, 3.0]\n" + ] + } + ], + "source": [ + "from graphrag_llm.embedding import LLMEmbedding, create_embedding\n", + "\n", + "embedding_config = ModelConfig(\n", + " type=LLMProviderType.MockLLM,\n", + " model_provider=\"openai\",\n", + " model=\"text-embedding-3-small\",\n", + " mock_responses=[1.0, 2.0, 3.0],\n", + ")\n", + "\n", + "llm_embedding: LLMEmbedding = create_embedding(embedding_config)\n", + "\n", + "embeddings_response = llm_embedding.embedding(input=[\"Hello world\", \"How are you?\"])\n", + "for embedding in embeddings_response.embeddings:\n", + " print(embedding[0:3])" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.9" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/packages/graphrag-llm/notebooks/README.md b/packages/graphrag-llm/notebooks/README.md new file mode 100644 index 0000000000..0496ddc6aa --- /dev/null +++ b/packages/graphrag-llm/notebooks/README.md @@ -0,0 +1,11 @@ +To run the notebooks you need to add a `.env` file to the `notebooks` directory with the following information + +``` +GRAPHRAG_MODEL="..." +GRAPHRAG_EMBEDDING_MODEL="..." +GRAPHRAG_API_BASE="..." +# API Key and version are optional +# If not provided, Azure managed identity will be used +GRAPHRAG_API_KEY="..." +GRAPHRAG_API_VERSION="..." +``` \ No newline at end of file diff --git a/packages/graphrag-llm/notebooks/templates/weather_listings.jinja b/packages/graphrag-llm/notebooks/templates/weather_listings.jinja new file mode 100644 index 0000000000..b3f083dea8 --- /dev/null +++ b/packages/graphrag-llm/notebooks/templates/weather_listings.jinja @@ -0,0 +1,3 @@ +{% for report in weather_reports %} +It is {{report.condition}} and {{report.temperature_f}} degrees fahrenheit in {{report.city}}. +{% endfor %} \ No newline at end of file diff --git a/packages/graphrag-llm/pyproject.toml b/packages/graphrag-llm/pyproject.toml new file mode 100644 index 0000000000..c52e6b40c8 --- /dev/null +++ b/packages/graphrag-llm/pyproject.toml @@ -0,0 +1,46 @@ +[project] +name = "graphrag-llm" +version = "2.7.0" +description = "GraphRAG LLM package." +authors = [ + {name = "Alonso Guevara Fernández", email = "alonsog@microsoft.com"}, + {name = "Andrés Morales Esquivel", email = "andresmor@microsoft.com"}, + {name = "Chris Trevino", email = "chtrevin@microsoft.com"}, + {name = "David Tittsworth", email = "datittsw@microsoft.com"}, + {name = "Dayenne de Souza", email = "ddesouza@microsoft.com"}, + {name = "Derek Worthen", email = "deworthe@microsoft.com"}, + {name = "Gaudy Blanco Meneses", email = "gaudyb@microsoft.com"}, + {name = "Ha Trinh", email = "trinhha@microsoft.com"}, + {name = "Jonathan Larson", email = "jolarso@microsoft.com"}, + {name = "Josh Bradley", email = "joshbradley@microsoft.com"}, + {name = "Kate Lytvynets", email = "kalytv@microsoft.com"}, + {name = "Kenny Zhang", email = "zhangken@microsoft.com"}, + {name = "Mónica Carvajal"}, + {name = "Nathan Evans", email = "naevans@microsoft.com"}, + {name = "Rodrigo Racanicci", email = "rracanicci@microsoft.com"}, + {name = "Sarah Smith", email = "smithsarah@microsoft.com"}, +] +license = "MIT" +readme = "README.md" +license-files = ["LICENSE"] +requires-python = ">=3.10,<3.13" +classifiers = [ + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", +] +dependencies = [ + "azure-identity~=1.19.0", + "graphrag-cache==2.7.0", + "graphrag-common==2.7.0", + "jinja2~=3.1", + "litellm~=1.80", + "nest-asyncio2~=1.7", + "pydantic~=2.10", + "typing-extensions~=4.12" +] + +[build-system] +requires = ["hatchling>=1.27.0,<2.0.0"] +build-backend = "hatchling.build" diff --git a/packages/graphrag/graphrag/api/prompt_tune.py b/packages/graphrag/graphrag/api/prompt_tune.py index e051102377..e1741f014b 100644 --- a/packages/graphrag/graphrag/api/prompt_tune.py +++ b/packages/graphrag/graphrag/api/prompt_tune.py @@ -13,11 +13,10 @@ import logging +from graphrag_llm.completion import create_completion from pydantic import PositiveInt, validate_call -from graphrag.callbacks.noop_workflow_callbacks import NoopWorkflowCallbacks from graphrag.config.models.graph_rag_config import GraphRagConfig -from graphrag.language_model.manager import ModelManager from graphrag.logger.standard_logging import init_loggers from graphrag.prompt_tune.defaults import MAX_TOKEN_COUNT, PROMPT_TUNING_MODEL_ID from graphrag.prompt_tune.generator.community_report_rating import ( @@ -100,16 +99,10 @@ async def generate_indexing_prompts( # Create LLM from config # TODO: Expose a way to specify Prompt Tuning model ID through config logger.info("Retrieving language model configuration...") - default_llm_settings = config.get_language_model_config(PROMPT_TUNING_MODEL_ID) + default_llm_settings = config.get_completion_model_config(PROMPT_TUNING_MODEL_ID) logger.info("Creating language model...") - llm = ModelManager().register_chat( - name="prompt_tuning", - model_type=default_llm_settings.type, - config=default_llm_settings, - callbacks=NoopWorkflowCallbacks(), - cache=None, - ) + llm = create_completion(default_llm_settings) if not domain: logger.info("Generating domain...") @@ -128,8 +121,8 @@ async def generate_indexing_prompts( ) entity_types = None - extract_graph_llm_settings = config.get_language_model_config( - config.extract_graph.model_id + extract_graph_llm_settings = config.get_completion_model_config( + config.extract_graph.completion_model_id ) if discover_entity_types: logger.info("Generating entity types...") @@ -138,7 +131,7 @@ async def generate_indexing_prompts( domain=domain, persona=persona, docs=doc_list, - json_mode=extract_graph_llm_settings.model_supports_json or False, + json_mode=True, ) logger.info("Generating entity relationship examples...") diff --git a/packages/graphrag/graphrag/language_model/providers/litellm/services/__init__.py b/packages/graphrag/graphrag/cache/__init__.py similarity index 76% rename from packages/graphrag/graphrag/language_model/providers/litellm/services/__init__.py rename to packages/graphrag/graphrag/cache/__init__.py index dd0cf9fe2a..60ffdf7a1f 100644 --- a/packages/graphrag/graphrag/language_model/providers/litellm/services/__init__.py +++ b/packages/graphrag/graphrag/cache/__init__.py @@ -1,4 +1,4 @@ # Copyright (c) 2025 Microsoft Corporation. # Licensed under the MIT License -"""LiteLLM Services.""" +"""Cache module.""" diff --git a/packages/graphrag/graphrag/cache/cache_key_creator.py b/packages/graphrag/graphrag/cache/cache_key_creator.py new file mode 100644 index 0000000000..704035f253 --- /dev/null +++ b/packages/graphrag/graphrag/cache/cache_key_creator.py @@ -0,0 +1,44 @@ +# Copyright (c) 2025 Microsoft Corporation. +# Licensed under the MIT License + +"""Cache key creation for Graphrag.""" + +from typing import Any + +from graphrag_llm.cache import create_cache_key + +_CACHE_VERSION = 4 +""" +If there's a breaking change in what we cache, we should increment this version number to invalidate existing caches. + +fnllm was on cache version 2 and though we generate +similar cache keys, the objects stored in cache by fnllm and litellm are different. +Using litellm model providers will not be able to reuse caches generated by fnllm +thus we start with version 3 for litellm. + +graphrag-llm package is now on version 4. +This is to account for changes to the ModelConfig that affect the cache key and +occurred when pulling this package out of graphrag. +graphrag-llm, now that is supports metrics, also caches metrics which were not cached before. +""" + + +def cache_key_creator( + input_args: dict[str, Any], +) -> str: + """Generate a cache key based on input arguments. + + Args + ____ + input_args: dict[str, Any] + The input arguments for the model call. + + Returns + ------- + str + The generated cache key in the format + `{prefix}_{data_hash}_v{version}` if prefix is provided. + """ + base_key = create_cache_key(input_args) + + return f"{base_key}_v{_CACHE_VERSION}" diff --git a/packages/graphrag/graphrag/cli/initialize.py b/packages/graphrag/graphrag/cli/initialize.py index 2eb5ef785e..3bf0c69c76 100644 --- a/packages/graphrag/graphrag/cli/initialize.py +++ b/packages/graphrag/graphrag/cli/initialize.py @@ -67,7 +67,7 @@ def initialize_project_at( ).resolve() input_path.mkdir(parents=True, exist_ok=True) # using replace with custom tokens instead of format here because we have a placeholder for GRAPHRAG_API_KEY that is used later for .env overlay - formatted = INIT_YAML.replace("", model).replace( + formatted = INIT_YAML.replace("", model).replace( "", embedding_model ) settings_yaml.write_text(formatted, encoding="utf-8", errors="strict") diff --git a/packages/graphrag/graphrag/cli/main.py b/packages/graphrag/graphrag/cli/main.py index f79517f575..823b31b4e6 100644 --- a/packages/graphrag/graphrag/cli/main.py +++ b/packages/graphrag/graphrag/cli/main.py @@ -11,7 +11,7 @@ import typer from graphrag.config.defaults import ( - DEFAULT_CHAT_MODEL, + DEFAULT_COMPLETION_MODEL, DEFAULT_EMBEDDING_MODEL, graphrag_config_defaults, ) @@ -109,7 +109,7 @@ def _initialize_cli( autocompletion=ROOT_AUTOCOMPLETE, ), model: str = typer.Option( - DEFAULT_CHAT_MODEL, + DEFAULT_COMPLETION_MODEL, "--model", "-m", prompt="Specify the default chat model to use", diff --git a/packages/graphrag/graphrag/config/defaults.py b/packages/graphrag/graphrag/config/defaults.py index 7f7fd3f9e8..a76caf1126 100644 --- a/packages/graphrag/graphrag/config/defaults.py +++ b/packages/graphrag/graphrag/config/defaults.py @@ -10,14 +10,13 @@ from graphrag_cache import CacheType from graphrag_chunking.chunk_strategy_type import ChunkerType from graphrag_input import InputType +from graphrag_llm.config import AuthMethod from graphrag_storage import StorageType from graphrag_vectors import VectorStoreType from graphrag.config.embeddings import default_embeddings from graphrag.config.enums import ( AsyncType, - AuthType, - ModelType, NounPhraseExtractorType, ReportingType, ) @@ -29,13 +28,11 @@ DEFAULT_OUTPUT_BASE_DIR = "output" DEFAULT_CACHE_BASE_DIR = "cache" DEFAULT_UPDATE_OUTPUT_BASE_DIR = "update_output" -DEFAULT_CHAT_MODEL_ID = "default_chat_model" -DEFAULT_CHAT_MODEL_TYPE = ModelType.Chat -DEFAULT_CHAT_MODEL_AUTH_TYPE = AuthType.APIKey -DEFAULT_CHAT_MODEL = "gpt-4.1" +DEFAULT_COMPLETION_MODEL_ID = "default_completion_model" +DEFAULT_COMPLETION_MODEL_AUTH_TYPE = AuthMethod.ApiKey +DEFAULT_COMPLETION_MODEL = "gpt-4.1" DEFAULT_EMBEDDING_MODEL_ID = "default_embedding_model" -DEFAULT_EMBEDDING_MODEL_TYPE = ModelType.Embedding -DEFAULT_EMBEDDING_MODEL_AUTH_TYPE = AuthType.APIKey +DEFAULT_EMBEDDING_MODEL_AUTH_TYPE = AuthMethod.ApiKey DEFAULT_EMBEDDING_MODEL = "text-embedding-3-large" DEFAULT_MODEL_PROVIDER = "openai" @@ -52,7 +49,7 @@ class BasicSearchDefaults: prompt: None = None k: int = 10 max_context_tokens: int = 12_000 - chat_model_id: str = DEFAULT_CHAT_MODEL_ID + completion_model_id: str = DEFAULT_COMPLETION_MODEL_ID embedding_model_id: str = DEFAULT_EMBEDDING_MODEL_ID @@ -84,7 +81,7 @@ class CommunityReportDefaults: text_prompt: None = None max_length: int = 2000 max_input_length: int = 8000 - model_id: str = DEFAULT_CHAT_MODEL_ID + completion_model_id: str = DEFAULT_COMPLETION_MODEL_ID model_instance_name: str = "community_reporting" @@ -113,7 +110,7 @@ class DriftSearchDefaults: local_search_n: int = 1 local_search_llm_max_gen_tokens: int | None = None local_search_llm_max_gen_completion_tokens: int | None = None - chat_model_id: str = DEFAULT_CHAT_MODEL_ID + completion_model_id: str = DEFAULT_COMPLETION_MODEL_ID embedding_model_id: str = DEFAULT_EMBEDDING_MODEL_ID @@ -121,7 +118,7 @@ class DriftSearchDefaults: class EmbedTextDefaults: """Default values for embedding text.""" - model_id: str = DEFAULT_EMBEDDING_MODEL_ID + embedding_model_id: str = DEFAULT_EMBEDDING_MODEL_ID model_instance_name: str = "text_embedding" batch_size: int = 16 batch_max_tokens: int = 8191 @@ -138,7 +135,7 @@ class ExtractClaimsDefaults: "Any claims or facts that could be relevant to information discovery." ) max_gleanings: int = 1 - model_id: str = DEFAULT_CHAT_MODEL_ID + completion_model_id: str = DEFAULT_COMPLETION_MODEL_ID model_instance_name: str = "extract_claims" @@ -151,7 +148,7 @@ class ExtractGraphDefaults: default_factory=lambda: ["organization", "person", "geo", "event"] ) max_gleanings: int = 1 - model_id: str = DEFAULT_CHAT_MODEL_ID + completion_model_id: str = DEFAULT_COMPLETION_MODEL_ID model_instance_name: str = "extract_graph" @@ -209,7 +206,7 @@ class GlobalSearchDefaults: dynamic_search_num_repeats: int = 1 dynamic_search_use_summary: bool = False dynamic_search_max_level: int = 2 - chat_model_id: str = DEFAULT_CHAT_MODEL_ID + completion_model_id: str = DEFAULT_COMPLETION_MODEL_ID @dataclass @@ -259,41 +256,6 @@ class CacheDefaults: storage: CacheStorageDefaults = field(default_factory=CacheStorageDefaults) -@dataclass -class LanguageModelDefaults: - """Default values for language model.""" - - api_key: None = None - auth_type: ClassVar[AuthType] = AuthType.APIKey - model_provider: str | None = None - encoding_model: str = "" - max_tokens: int | None = None - temperature: float = 0 - max_completion_tokens: int | None = None - reasoning_effort: str | None = None - top_p: float = 1 - n: int = 1 - frequency_penalty: float = 0.0 - presence_penalty: float = 0.0 - request_timeout: float = 600.0 - api_base: None = None - api_version: None = None - deployment_name: None = None - organization: None = None - proxy: None = None - audience: None = None - model_supports_json: None = None - tokens_per_minute: None = None - requests_per_minute: None = None - rate_limit_strategy: str | None = "static" - retry_strategy: str = "exponential_backoff" - max_retries: int = 10 - max_retry_wait: float = 10.0 - concurrent_requests: int = 25 - responses: None = None - async_mode: AsyncType = AsyncType.Threaded - - @dataclass class LocalSearchDefaults: """Default values for local search.""" @@ -305,7 +267,7 @@ class LocalSearchDefaults: top_k_entities: int = 10 top_k_relationships: int = 10 max_context_tokens: int = 12_000 - chat_model_id: str = DEFAULT_CHAT_MODEL_ID + completion_model_id: str = DEFAULT_COMPLETION_MODEL_ID embedding_model_id: str = DEFAULT_EMBEDDING_MODEL_ID @@ -356,7 +318,7 @@ class SummarizeDescriptionsDefaults: prompt: None = None max_length: int = 500 max_input_tokens: int = 4_000 - model_id: str = DEFAULT_CHAT_MODEL_ID + completion_model_id: str = DEFAULT_COMPLETION_MODEL_ID model_instance_name: str = "summarize_descriptions" @@ -380,6 +342,10 @@ class GraphRagConfigDefaults: """Default values for GraphRAG.""" models: dict = field(default_factory=dict) + completion_models: dict = field(default_factory=dict) + embedding_models: dict = field(default_factory=dict) + concurrent_requests: int = 25 + async_mode: AsyncType = AsyncType.Threaded reporting: ReportingDefaults = field(default_factory=ReportingDefaults) input_storage: InputStorageDefaults = field(default_factory=InputStorageDefaults) output_storage: OutputStorageDefaults = field(default_factory=OutputStorageDefaults) @@ -415,6 +381,5 @@ class GraphRagConfigDefaults: workflows: None = None -language_model_defaults = LanguageModelDefaults() vector_store_defaults = VectorStoreDefaults() graphrag_config_defaults = GraphRagConfigDefaults() diff --git a/packages/graphrag/graphrag/config/enums.py b/packages/graphrag/graphrag/config/enums.py index 8389a724cd..5084f2154b 100644 --- a/packages/graphrag/graphrag/config/enums.py +++ b/packages/graphrag/graphrag/config/enums.py @@ -21,31 +21,6 @@ def __repr__(self): return f'"{self.value}"' -class ModelType(str, Enum): - """LLMType enum class definition.""" - - # Embeddings - Embedding = "embedding" - - # Chat Completion - Chat = "chat" - - # Debug - MockChat = "mock_chat" - MockEmbedding = "mock_embedding" - - def __repr__(self): - """Get a string representation.""" - return f'"{self.value}"' - - -class AuthType(str, Enum): - """AuthType enum class definition.""" - - APIKey = "api_key" - AzureManagedIdentity = "azure_managed_identity" - - class AsyncType(str, Enum): """Enum for the type of async to use.""" diff --git a/packages/graphrag/graphrag/config/init_content.py b/packages/graphrag/graphrag/config/init_content.py index df075ada0e..fd0f5aa70e 100644 --- a/packages/graphrag/graphrag/config/init_content.py +++ b/packages/graphrag/graphrag/config/init_content.py @@ -6,7 +6,6 @@ import graphrag.config.defaults as defs from graphrag.config.defaults import ( graphrag_config_defaults, - language_model_defaults, vector_store_defaults, ) @@ -17,34 +16,29 @@ ### LLM settings ### ## There are a number of settings to tune the threading and token limits for LLM calls - check the docs. -models: - {defs.DEFAULT_CHAT_MODEL_ID}: - type: {defs.DEFAULT_CHAT_MODEL_TYPE.value} +completion_models: + {defs.DEFAULT_COMPLETION_MODEL_ID}: model_provider: {defs.DEFAULT_MODEL_PROVIDER} - auth_type: {defs.DEFAULT_CHAT_MODEL_AUTH_TYPE.value} # or azure_managed_identity + model: + auth_method: {defs.DEFAULT_COMPLETION_MODEL_AUTH_TYPE} # or azure_managed_identity api_key: ${{GRAPHRAG_API_KEY}} # set this in the generated .env file, or remove if managed identity - model: - # api_base: https://.openai.azure.com - # api_version: 2024-05-01-preview - model_supports_json: true # recommended if this is available for your model. - concurrent_requests: {language_model_defaults.concurrent_requests} - retry_strategy: {language_model_defaults.retry_strategy} - max_retries: {language_model_defaults.max_retries} - tokens_per_minute: null - requests_per_minute: null + retry: + type: exponential_backoff + base_delay: 2.0 + max_retries: 7 + jitter: true + +embedding_models: {defs.DEFAULT_EMBEDDING_MODEL_ID}: - type: {defs.DEFAULT_EMBEDDING_MODEL_TYPE.value} model_provider: {defs.DEFAULT_MODEL_PROVIDER} - auth_type: {defs.DEFAULT_EMBEDDING_MODEL_AUTH_TYPE.value} - api_key: ${{GRAPHRAG_API_KEY}} model: - # api_base: https://.openai.azure.com - # api_version: 2024-05-01-preview - concurrent_requests: {language_model_defaults.concurrent_requests} - retry_strategy: {language_model_defaults.retry_strategy} - max_retries: {language_model_defaults.max_retries} - tokens_per_minute: null - requests_per_minute: null + auth_method: {defs.DEFAULT_EMBEDDING_MODEL_AUTH_TYPE} + api_key: ${{GRAPHRAG_API_KEY}} + retry: + type: exponential_backoff + base_delay: 2.0 + max_retries: 7 + jitter: true ### Document processing settings ### @@ -62,8 +56,8 @@ ## connection_string and container_name must be provided input_storage: - type: {graphrag_config_defaults.input_storage.type} # [file, blob, cosmosdb] - base_dir: "{graphrag_config_defaults.input_storage.base_dir}" + type: {graphrag_config_defaults.input_storage.type} # [file, blob, cosmosdb] + base_dir: "{graphrag_config_defaults.input_storage.base_dir}" output_storage: type: {graphrag_config_defaults.output_storage.type} # [file, blob, cosmosdb] @@ -86,16 +80,16 @@ ### Workflow settings ### embed_text: - model_id: {graphrag_config_defaults.embed_text.model_id} + embedding_model_id: {graphrag_config_defaults.embed_text.embedding_model_id} extract_graph: - model_id: {graphrag_config_defaults.extract_graph.model_id} + completion_model_id: {graphrag_config_defaults.extract_graph.completion_model_id} prompt: "prompts/extract_graph.txt" entity_types: [{",".join(graphrag_config_defaults.extract_graph.entity_types)}] max_gleanings: {graphrag_config_defaults.extract_graph.max_gleanings} summarize_descriptions: - model_id: {graphrag_config_defaults.summarize_descriptions.model_id} + completion_model_id: {graphrag_config_defaults.summarize_descriptions.completion_model_id} prompt: "prompts/summarize_descriptions.txt" max_length: {graphrag_config_defaults.summarize_descriptions.max_length} @@ -108,13 +102,13 @@ extract_claims: enabled: false - model_id: {graphrag_config_defaults.extract_claims.model_id} + completion_model_id: {graphrag_config_defaults.extract_claims.completion_model_id} prompt: "prompts/extract_claims.txt" description: "{graphrag_config_defaults.extract_claims.description}" max_gleanings: {graphrag_config_defaults.extract_claims.max_gleanings} community_reports: - model_id: {graphrag_config_defaults.community_reports.model_id} + completion_model_id: {graphrag_config_defaults.community_reports.completion_model_id} graph_prompt: "prompts/community_report_graph.txt" text_prompt: "prompts/community_report_text.txt" max_length: {graphrag_config_defaults.community_reports.max_length} @@ -129,24 +123,24 @@ ## See the config docs: https://microsoft.github.io/graphrag/config/yaml/#query local_search: - chat_model_id: {graphrag_config_defaults.local_search.chat_model_id} + completion_model_id: {graphrag_config_defaults.local_search.completion_model_id} embedding_model_id: {graphrag_config_defaults.local_search.embedding_model_id} prompt: "prompts/local_search_system_prompt.txt" global_search: - chat_model_id: {graphrag_config_defaults.global_search.chat_model_id} + completion_model_id: {graphrag_config_defaults.global_search.completion_model_id} map_prompt: "prompts/global_search_map_system_prompt.txt" reduce_prompt: "prompts/global_search_reduce_system_prompt.txt" knowledge_prompt: "prompts/global_search_knowledge_system_prompt.txt" drift_search: - chat_model_id: {graphrag_config_defaults.drift_search.chat_model_id} + completion_model_id: {graphrag_config_defaults.drift_search.completion_model_id} embedding_model_id: {graphrag_config_defaults.drift_search.embedding_model_id} prompt: "prompts/drift_search_system_prompt.txt" reduce_prompt: "prompts/drift_search_reduce_prompt.txt" basic_search: - chat_model_id: {graphrag_config_defaults.basic_search.chat_model_id} + completion_model_id: {graphrag_config_defaults.basic_search.completion_model_id} embedding_model_id: {graphrag_config_defaults.basic_search.embedding_model_id} prompt: "prompts/basic_search_system_prompt.txt" """ diff --git a/packages/graphrag/graphrag/config/models/basic_search_config.py b/packages/graphrag/graphrag/config/models/basic_search_config.py index 66a1e68577..5b48b74d82 100644 --- a/packages/graphrag/graphrag/config/models/basic_search_config.py +++ b/packages/graphrag/graphrag/config/models/basic_search_config.py @@ -15,9 +15,9 @@ class BasicSearchConfig(BaseModel): description="The basic search prompt to use.", default=graphrag_config_defaults.basic_search.prompt, ) - chat_model_id: str = Field( + completion_model_id: str = Field( description="The model ID to use for basic search.", - default=graphrag_config_defaults.basic_search.chat_model_id, + default=graphrag_config_defaults.basic_search.completion_model_id, ) embedding_model_id: str = Field( description="The model ID to use for text embeddings.", diff --git a/packages/graphrag/graphrag/config/models/community_reports_config.py b/packages/graphrag/graphrag/config/models/community_reports_config.py index c4f920cefe..604eafe484 100644 --- a/packages/graphrag/graphrag/config/models/community_reports_config.py +++ b/packages/graphrag/graphrag/config/models/community_reports_config.py @@ -26,9 +26,9 @@ class CommunityReportPrompts: class CommunityReportsConfig(BaseModel): """Configuration section for community reports.""" - model_id: str = Field( + completion_model_id: str = Field( description="The model ID to use for community reports.", - default=graphrag_config_defaults.community_reports.model_id, + default=graphrag_config_defaults.community_reports.completion_model_id, ) model_instance_name: str = Field( description="The model singleton instance name. This primarily affects the cache storage partitioning.", diff --git a/packages/graphrag/graphrag/config/models/drift_search_config.py b/packages/graphrag/graphrag/config/models/drift_search_config.py index a6edf66474..ce77fc6b1f 100644 --- a/packages/graphrag/graphrag/config/models/drift_search_config.py +++ b/packages/graphrag/graphrag/config/models/drift_search_config.py @@ -19,9 +19,9 @@ class DRIFTSearchConfig(BaseModel): description="The drift search reduce prompt to use.", default=graphrag_config_defaults.drift_search.reduce_prompt, ) - chat_model_id: str = Field( + completion_model_id: str = Field( description="The model ID to use for drift search.", - default=graphrag_config_defaults.drift_search.chat_model_id, + default=graphrag_config_defaults.drift_search.completion_model_id, ) embedding_model_id: str = Field( description="The model ID to use for drift search.", diff --git a/packages/graphrag/graphrag/config/models/embed_text_config.py b/packages/graphrag/graphrag/config/models/embed_text_config.py index c33409d2f5..9720c5aec4 100644 --- a/packages/graphrag/graphrag/config/models/embed_text_config.py +++ b/packages/graphrag/graphrag/config/models/embed_text_config.py @@ -11,9 +11,9 @@ class EmbedTextConfig(BaseModel): """Configuration section for text embeddings.""" - model_id: str = Field( + embedding_model_id: str = Field( description="The model ID to use for text embeddings.", - default=graphrag_config_defaults.embed_text.model_id, + default=graphrag_config_defaults.embed_text.embedding_model_id, ) model_instance_name: str = Field( description="The model singleton instance name. This primarily affects the cache storage partitioning.", diff --git a/packages/graphrag/graphrag/config/models/extract_claims_config.py b/packages/graphrag/graphrag/config/models/extract_claims_config.py index 63fec7ac5a..db8e525a70 100644 --- a/packages/graphrag/graphrag/config/models/extract_claims_config.py +++ b/packages/graphrag/graphrag/config/models/extract_claims_config.py @@ -26,9 +26,9 @@ class ExtractClaimsConfig(BaseModel): description="Whether claim extraction is enabled.", default=graphrag_config_defaults.extract_claims.enabled, ) - model_id: str = Field( + completion_model_id: str = Field( description="The model ID to use for claim extraction.", - default=graphrag_config_defaults.extract_claims.model_id, + default=graphrag_config_defaults.extract_claims.completion_model_id, ) model_instance_name: str = Field( description="The model singleton instance name. This primarily affects the cache storage partitioning.", diff --git a/packages/graphrag/graphrag/config/models/extract_graph_config.py b/packages/graphrag/graphrag/config/models/extract_graph_config.py index 81c8df4235..22323a998b 100644 --- a/packages/graphrag/graphrag/config/models/extract_graph_config.py +++ b/packages/graphrag/graphrag/config/models/extract_graph_config.py @@ -22,9 +22,9 @@ class ExtractGraphPrompts: class ExtractGraphConfig(BaseModel): """Configuration section for entity extraction.""" - model_id: str = Field( + completion_model_id: str = Field( description="The model ID to use for text embeddings.", - default=graphrag_config_defaults.extract_graph.model_id, + default=graphrag_config_defaults.extract_graph.completion_model_id, ) model_instance_name: str = Field( description="The model singleton instance name. This primarily affects the cache storage partitioning.", diff --git a/packages/graphrag/graphrag/config/models/global_search_config.py b/packages/graphrag/graphrag/config/models/global_search_config.py index c350efcea6..1b48500ed2 100644 --- a/packages/graphrag/graphrag/config/models/global_search_config.py +++ b/packages/graphrag/graphrag/config/models/global_search_config.py @@ -19,9 +19,9 @@ class GlobalSearchConfig(BaseModel): description="The global search reducer to use.", default=graphrag_config_defaults.global_search.reduce_prompt, ) - chat_model_id: str = Field( + completion_model_id: str = Field( description="The model ID to use for global search.", - default=graphrag_config_defaults.global_search.chat_model_id, + default=graphrag_config_defaults.global_search.completion_model_id, ) knowledge_prompt: str | None = Field( description="The global search general prompt to use.", diff --git a/packages/graphrag/graphrag/config/models/graph_rag_config.py b/packages/graphrag/graphrag/config/models/graph_rag_config.py index 0bb74b2385..07a7eba074 100644 --- a/packages/graphrag/graphrag/config/models/graph_rag_config.py +++ b/packages/graphrag/graphrag/config/models/graph_rag_config.py @@ -10,13 +10,14 @@ from graphrag_cache import CacheConfig from graphrag_chunking.chunking_config import ChunkingConfig from graphrag_input import InputConfig +from graphrag_llm.config import ModelConfig from graphrag_storage import StorageConfig, StorageType from graphrag_vectors import IndexSchema, VectorStoreConfig, VectorStoreType from pydantic import BaseModel, Field, model_validator from graphrag.config.defaults import graphrag_config_defaults from graphrag.config.embeddings import all_embeddings -from graphrag.config.enums import ReportingType +from graphrag.config.enums import AsyncType, ReportingType from graphrag.config.models.basic_search_config import BasicSearchConfig from graphrag.config.models.cluster_graph_config import ClusterGraphConfig from graphrag.config.models.community_reports_config import CommunityReportsConfig @@ -26,7 +27,6 @@ from graphrag.config.models.extract_graph_config import ExtractGraphConfig from graphrag.config.models.extract_graph_nlp_config import ExtractGraphNLPConfig from graphrag.config.models.global_search_config import GlobalSearchConfig -from graphrag.config.models.language_model_config import LanguageModelConfig from graphrag.config.models.local_search_config import LocalSearchConfig from graphrag.config.models.prune_graph_config import PruneGraphConfig from graphrag.config.models.reporting_config import ReportingConfig @@ -34,12 +34,6 @@ from graphrag.config.models.summarize_descriptions_config import ( SummarizeDescriptionsConfig, ) -from graphrag.language_model.providers.litellm.services.rate_limiter.rate_limiter_factory import ( - RateLimiterFactory, -) -from graphrag.language_model.providers.litellm.services.retry.retry_factory import ( - RetryFactory, -) class GraphRagConfig(BaseModel): @@ -53,54 +47,25 @@ def __str__(self): """Get a string representation.""" return self.model_dump_json(indent=4) - models: dict[str, LanguageModelConfig] = Field( - description="Available language model configurations.", - default=graphrag_config_defaults.models, + completion_models: dict[str, ModelConfig] = Field( + description="Available completion model configurations.", + default=graphrag_config_defaults.completion_models, ) - def _validate_retry_services(self) -> None: - """Validate the retry services configuration.""" - retry_factory = RetryFactory() - - for model_id, model in self.models.items(): - if model.retry_strategy != "none": - if model.retry_strategy not in retry_factory: - msg = f"Retry strategy '{model.retry_strategy}' for model '{model_id}' is not registered. Available strategies: {', '.join(retry_factory.keys())}" - raise ValueError(msg) - - _ = retry_factory.create( - strategy=model.retry_strategy, - init_args={ - "max_retries": model.max_retries, - "max_retry_wait": model.max_retry_wait, - }, - ) - - def _validate_rate_limiter_services(self) -> None: - """Validate the rate limiter services configuration.""" - rate_limiter_factory = RateLimiterFactory() + embedding_models: dict[str, ModelConfig] = Field( + description="Available embedding model configurations.", + default=graphrag_config_defaults.embedding_models, + ) - for model_id, model in self.models.items(): - if model.rate_limit_strategy is not None: - if model.rate_limit_strategy not in rate_limiter_factory: - msg = f"Rate Limiter strategy '{model.rate_limit_strategy}' for model '{model_id}' is not registered. Available strategies: {', '.join(rate_limiter_factory.keys())}" - raise ValueError(msg) + concurrent_requests: int = Field( + description="The default number of concurrent requests to make to language models.", + default=graphrag_config_defaults.concurrent_requests, + ) - rpm = ( - model.requests_per_minute - if type(model.requests_per_minute) is int - else None - ) - tpm = ( - model.tokens_per_minute - if type(model.tokens_per_minute) is int - else None - ) - if rpm is not None or tpm is not None: - _ = rate_limiter_factory.create( - strategy=model.rate_limit_strategy, - init_args={"rpm": rpm, "tpm": tpm}, - ) + async_mode: AsyncType = Field( + description="The default asynchronous mode to use for language model requests.", + default=graphrag_config_defaults.async_mode, + ) input: InputConfig = Field( description="The input configuration.", default=InputConfig() @@ -299,22 +264,41 @@ def _validate_vector_store_db_uri(self) -> None: store.db_uri = graphrag_config_defaults.vector_store.db_uri store.db_uri = str(Path(store.db_uri).resolve()) - def _validate_factories(self) -> None: - """Validate the factories used in the configuration.""" - self._validate_retry_services() - self._validate_rate_limiter_services() + def get_completion_model_config(self, model_id: str) -> ModelConfig: + """Get a completion model configuration by ID. + + Parameters + ---------- + model_id : str + The ID of the model to get. Should match an ID in the completion_models list. + + Returns + ------- + ModelConfig + The model configuration if found. + + Raises + ------ + ValueError + If the model ID is not found in the configuration. + """ + if model_id not in self.completion_models: + err_msg = f"Model ID {model_id} not found in completion_models. Please rerun `graphrag init` and set the completion_models configuration." + raise ValueError(err_msg) + + return self.completion_models[model_id] - def get_language_model_config(self, model_id: str) -> LanguageModelConfig: - """Get a model configuration by ID. + def get_embedding_model_config(self, model_id: str) -> ModelConfig: + """Get an embedding model configuration by ID. Parameters ---------- model_id : str - The ID of the model to get. Should match an ID in the models list. + The ID of the model to get. Should match an ID in the embedding_models list. Returns ------- - LanguageModelConfig + ModelConfig The model configuration if found. Raises @@ -322,11 +306,11 @@ def get_language_model_config(self, model_id: str) -> LanguageModelConfig: ValueError If the model ID is not found in the configuration. """ - if model_id not in self.models: - err_msg = f"Model ID {model_id} not found in configuration. Please rerun `graphrag init` and set the model configuration." + if model_id not in self.embedding_models: + err_msg = f"Model ID {model_id} not found in embedding_models. Please rerun `graphrag init` and set the embedding_models configuration." raise ValueError(err_msg) - return self.models[model_id] + return self.embedding_models[model_id] @model_validator(mode="after") def _validate_model(self): @@ -336,5 +320,4 @@ def _validate_model(self): self._validate_output_base_dir() self._validate_update_output_storage_base_dir() self._validate_vector_store() - self._validate_factories() return self diff --git a/packages/graphrag/graphrag/config/models/language_model_config.py b/packages/graphrag/graphrag/config/models/language_model_config.py deleted file mode 100644 index 11c46d76c7..0000000000 --- a/packages/graphrag/graphrag/config/models/language_model_config.py +++ /dev/null @@ -1,345 +0,0 @@ -# Copyright (c) 2024 Microsoft Corporation. -# Licensed under the MIT License - -"""Language model configuration.""" - -import logging - -from pydantic import BaseModel, Field, model_validator - -from graphrag.config.defaults import language_model_defaults -from graphrag.config.enums import AsyncType, AuthType, ModelType -from graphrag.config.errors import ( - ApiKeyMissingError, - AzureApiBaseMissingError, - AzureApiVersionMissingError, - ConflictingSettingsError, -) -from graphrag.language_model.factory import ChatModelFactory, EmbeddingModelFactory - -logger = logging.getLogger(__name__) - - -class LanguageModelConfig(BaseModel): - """Language model configuration.""" - - api_key: str | None = Field( - description="The API key to use for the LLM service.", - default=language_model_defaults.api_key, - ) - - def _validate_api_key(self) -> None: - """Validate the API key. - - API Key is required when using OpenAI API - or when using Azure API with API Key authentication. - For the time being, this check is extra verbose for clarity. - It will also raise an exception if an API Key is provided - when one is not expected such as the case of using Azure - Managed Identity. - - Raises - ------ - ApiKeyMissingError - If the API key is missing and is required. - """ - if self.auth_type == AuthType.APIKey and ( - self.api_key is None or self.api_key.strip() == "" - ): - raise ApiKeyMissingError( - self.type, - self.auth_type.value, - ) - - if (self.auth_type == AuthType.AzureManagedIdentity) and ( - self.api_key is not None and self.api_key.strip() != "" - ): - msg = "API Key should not be provided when using Azure Managed Identity. Please rerun `graphrag init` and remove the api_key when using Azure Managed Identity." - raise ConflictingSettingsError(msg) - - auth_type: AuthType = Field( - description="The authentication type.", - default=language_model_defaults.auth_type, - ) - - def _validate_auth_type(self) -> None: - """Validate the authentication type. - - auth_type must be api_key when using OpenAI and - can be either api_key or azure_managed_identity when using AOI. - - Raises - ------ - ConflictingSettingsError - If the Azure authentication type conflicts with the model being used. - """ - if ( - self.auth_type == AuthType.AzureManagedIdentity - and self.model_provider != "azure" - ): - msg = f"auth_type of azure_managed_identity is not supported for model type {self.type}. Please rerun `graphrag init` and set the auth_type to api_key." - raise ConflictingSettingsError(msg) - - type: ModelType | str = Field(description="The type of LLM model to use.") - - def _validate_type(self) -> None: - """Validate the model type. - - Raises - ------ - KeyError - If the model name is not recognized. - """ - # Type should be contained by the registered models - if ( - self.type not in ChatModelFactory() - and self.type not in EmbeddingModelFactory() - ): - msg = f"Model type {self.type} is not recognized, must be one of {ChatModelFactory().keys() + EmbeddingModelFactory().keys()}." - raise KeyError(msg) - - model_provider: str | None = Field( - description="The model provider to use.", - default=language_model_defaults.model_provider, - ) - - def _validate_model_provider(self) -> None: - """Validate the model provider. - - Required when using Litellm. - - Raises - ------ - KeyError - If the model provider is not recognized. - """ - if (self.type == ModelType.Chat or self.type == ModelType.Embedding) and ( - self.model_provider is None or self.model_provider.strip() == "" - ): - msg = f"Model provider must be specified when using type == {self.type}." - raise KeyError(msg) - - model: str = Field(description="The LLM model to use.") - encoding_model: str = Field( - description="The encoding model to use", - default=language_model_defaults.encoding_model, - ) - - api_base: str | None = Field( - description="The base URL for the LLM API.", - default=language_model_defaults.api_base, - ) - - def _validate_api_base(self) -> None: - """Validate the API base. - - Required when using AOI. - - Raises - ------ - AzureApiBaseMissingError - If the API base is missing and is required. - """ - if (self.model_provider == "azure") and ( - self.api_base is None or self.api_base.strip() == "" - ): - raise AzureApiBaseMissingError(self.type) - - api_version: str | None = Field( - description="The version of the LLM API to use.", - default=language_model_defaults.api_version, - ) - - def _validate_api_version(self) -> None: - """Validate the API version. - - Required when using AOI. - - Raises - ------ - AzureApiBaseMissingError - If the API base is missing and is required. - """ - if (self.model_provider == "azure") and ( - self.api_version is None or self.api_version.strip() == "" - ): - raise AzureApiVersionMissingError(self.type) - - deployment_name: str | None = Field( - description="The deployment name to use for the LLM service.", - default=language_model_defaults.deployment_name, - ) - - def _validate_deployment_name(self) -> None: - """Validate the deployment name. - - Required when using AOI. - - Raises - ------ - AzureDeploymentNameMissingError - If the deployment name is missing and is required. - """ - if (self.model_provider == "azure") and ( - self.deployment_name is None or self.deployment_name.strip() == "" - ): - msg = f"deployment_name is not set for Azure-hosted model. This will default to your model name ({self.model}). If different, this should be set." - logger.debug(msg) - - organization: str | None = Field( - description="The organization to use for the LLM service.", - default=language_model_defaults.organization, - ) - proxy: str | None = Field( - description="The proxy to use for the LLM service.", - default=language_model_defaults.proxy, - ) - audience: str | None = Field( - description="Azure resource URI to use with managed identity for the llm connection.", - default=language_model_defaults.audience, - ) - model_supports_json: bool | None = Field( - description="Whether the model supports JSON output mode.", - default=language_model_defaults.model_supports_json, - ) - request_timeout: float = Field( - description="The request timeout to use.", - default=language_model_defaults.request_timeout, - ) - tokens_per_minute: int | None = Field( - description="The number of tokens per minute to use for the LLM service.", - default=language_model_defaults.tokens_per_minute, - ) - - def _validate_tokens_per_minute(self) -> None: - """Validate the tokens per minute. - - Raises - ------ - ValueError - If the tokens per minute is less than 0. - """ - # If the value is a number, check if it is less than 1 - if isinstance(self.tokens_per_minute, int) and self.tokens_per_minute < 1: - msg = f"Tokens per minute must be a non zero positive number or null. Suggested value: {language_model_defaults.tokens_per_minute}." - raise ValueError(msg) - - requests_per_minute: int | None = Field( - description="The number of requests per minute to use for the LLM service.", - default=language_model_defaults.requests_per_minute, - ) - - def _validate_requests_per_minute(self) -> None: - """Validate the requests per minute. - - Raises - ------ - ValueError - If the requests per minute is less than 0. - """ - # If the value is a number, check if it is less than 1 - if isinstance(self.requests_per_minute, int) and self.requests_per_minute < 1: - msg = f"Requests per minute must be a non zero positive number or null. Suggested value: {language_model_defaults.requests_per_minute}." - raise ValueError(msg) - - rate_limit_strategy: str | None = Field( - description="The rate limit strategy to use for the LLM service.", - default=language_model_defaults.rate_limit_strategy, - ) - - retry_strategy: str = Field( - description="The retry strategy to use for the LLM service.", - default=language_model_defaults.retry_strategy, - ) - max_retries: int = Field( - description="The maximum number of retries to use for the LLM service.", - default=language_model_defaults.max_retries, - ) - - def _validate_max_retries(self) -> None: - """Validate the maximum retries. - - Raises - ------ - ValueError - If the maximum retries is less than 0. - """ - if self.max_retries < 1: - msg = f"Maximum retries must be greater than or equal to 1. Suggested value: {language_model_defaults.max_retries}." - raise ValueError(msg) - - max_retry_wait: float = Field( - description="The maximum retry wait to use for the LLM service.", - default=language_model_defaults.max_retry_wait, - ) - concurrent_requests: int = Field( - description="Whether to use concurrent requests for the LLM service.", - default=language_model_defaults.concurrent_requests, - ) - async_mode: AsyncType = Field( - description="The async mode to use.", default=language_model_defaults.async_mode - ) - responses: list[str | BaseModel] | None = Field( - default=language_model_defaults.responses, - description="Static responses to use in mock mode.", - ) - max_tokens: int | None = Field( - description="The maximum number of tokens to generate.", - default=language_model_defaults.max_tokens, - ) - temperature: float = Field( - description="The temperature to use for token generation.", - default=language_model_defaults.temperature, - ) - max_completion_tokens: int | None = Field( - description="The maximum number of tokens to consume. This includes reasoning tokens for the o* reasoning models.", - default=language_model_defaults.max_completion_tokens, - ) - reasoning_effort: str | None = Field( - description="Level of effort OpenAI reasoning models should expend. Supported options are 'low', 'medium', 'high'; and OAI defaults to 'medium'.", - default=language_model_defaults.reasoning_effort, - ) - top_p: float = Field( - description="The top-p value to use for token generation.", - default=language_model_defaults.top_p, - ) - n: int = Field( - description="The number of completions to generate.", - default=language_model_defaults.n, - ) - frequency_penalty: float = Field( - description="The frequency penalty to use for token generation.", - default=language_model_defaults.frequency_penalty, - ) - presence_penalty: float = Field( - description="The presence penalty to use for token generation.", - default=language_model_defaults.presence_penalty, - ) - - def _validate_azure_settings(self) -> None: - """Validate the Azure settings. - - Raises - ------ - AzureApiBaseMissingError - If the API base is missing and is required. - AzureApiVersionMissingError - If the API version is missing and is required. - AzureDeploymentNameMissingError - If the deployment name is missing and is required. - """ - self._validate_api_base() - self._validate_api_version() - self._validate_deployment_name() - - @model_validator(mode="after") - def _validate_model(self): - self._validate_type() - self._validate_model_provider() - self._validate_auth_type() - self._validate_api_key() - self._validate_tokens_per_minute() - self._validate_requests_per_minute() - self._validate_max_retries() - self._validate_azure_settings() - return self diff --git a/packages/graphrag/graphrag/config/models/local_search_config.py b/packages/graphrag/graphrag/config/models/local_search_config.py index 4cf31ffe0e..c9adbe9098 100644 --- a/packages/graphrag/graphrag/config/models/local_search_config.py +++ b/packages/graphrag/graphrag/config/models/local_search_config.py @@ -15,9 +15,9 @@ class LocalSearchConfig(BaseModel): description="The local search prompt to use.", default=graphrag_config_defaults.local_search.prompt, ) - chat_model_id: str = Field( + completion_model_id: str = Field( description="The model ID to use for local search.", - default=graphrag_config_defaults.local_search.chat_model_id, + default=graphrag_config_defaults.local_search.completion_model_id, ) embedding_model_id: str = Field( description="The model ID to use for text embeddings.", diff --git a/packages/graphrag/graphrag/config/models/summarize_descriptions_config.py b/packages/graphrag/graphrag/config/models/summarize_descriptions_config.py index 024d2d964b..ac8f1dbca3 100644 --- a/packages/graphrag/graphrag/config/models/summarize_descriptions_config.py +++ b/packages/graphrag/graphrag/config/models/summarize_descriptions_config.py @@ -22,9 +22,9 @@ class SummarizeDescriptionsPrompts: class SummarizeDescriptionsConfig(BaseModel): """Configuration section for description summarization.""" - model_id: str = Field( + completion_model_id: str = Field( description="The model ID to use for summarization.", - default=graphrag_config_defaults.summarize_descriptions.model_id, + default=graphrag_config_defaults.summarize_descriptions.completion_model_id, ) model_instance_name: str = Field( description="The model singleton instance name. This primarily affects the cache storage partitioning.", diff --git a/packages/graphrag/graphrag/index/operations/embed_text/embed_text.py b/packages/graphrag/graphrag/index/operations/embed_text/embed_text.py index 2e075ac655..af0b79bbb5 100644 --- a/packages/graphrag/graphrag/index/operations/embed_text/embed_text.py +++ b/packages/graphrag/graphrag/index/operations/embed_text/embed_text.py @@ -4,15 +4,18 @@ """A module containing embed_text method definition.""" import logging +from typing import TYPE_CHECKING import numpy as np import pandas as pd +from graphrag_llm.tokenizer import Tokenizer from graphrag_vectors import VectorStore, VectorStoreDocument from graphrag.callbacks.workflow_callbacks import WorkflowCallbacks from graphrag.index.operations.embed_text.run_embed_text import run_embed_text -from graphrag.language_model.protocol.base import EmbeddingModel -from graphrag.tokenizer.tokenizer import Tokenizer + +if TYPE_CHECKING: + from graphrag_llm.embedding import LLMEmbedding logger = logging.getLogger(__name__) @@ -20,7 +23,7 @@ async def embed_text( input: pd.DataFrame, callbacks: WorkflowCallbacks, - model: EmbeddingModel, + model: "LLMEmbedding", tokenizer: Tokenizer, embed_column: str, batch_size: int, diff --git a/packages/graphrag/graphrag/index/operations/embed_text/run_embed_text.py b/packages/graphrag/graphrag/index/operations/embed_text/run_embed_text.py index 43ed669567..6755c146bb 100644 --- a/packages/graphrag/graphrag/index/operations/embed_text/run_embed_text.py +++ b/packages/graphrag/graphrag/index/operations/embed_text/run_embed_text.py @@ -6,15 +6,18 @@ import asyncio import logging from dataclasses import dataclass +from typing import TYPE_CHECKING import numpy as np from graphrag_chunking.token_chunker import split_text_on_tokens +from graphrag_llm.tokenizer import Tokenizer from graphrag.callbacks.workflow_callbacks import WorkflowCallbacks from graphrag.index.utils.is_null import is_null -from graphrag.language_model.protocol.base import EmbeddingModel from graphrag.logger.progress import ProgressTicker, progress_ticker -from graphrag.tokenizer.tokenizer import Tokenizer + +if TYPE_CHECKING: + from graphrag_llm.embedding import LLMEmbedding logger = logging.getLogger(__name__) @@ -29,7 +32,7 @@ class TextEmbeddingResult: async def run_embed_text( input: list[str], callbacks: WorkflowCallbacks, - model: EmbeddingModel, + model: "LLMEmbedding", tokenizer: Tokenizer, batch_size: int, batch_max_tokens: int, @@ -71,15 +74,15 @@ async def run_embed_text( async def _execute( - model: EmbeddingModel, + model: "LLMEmbedding", chunks: list[list[str]], tick: ProgressTicker, semaphore: asyncio.Semaphore, ) -> list[list[float]]: async def embed(chunk: list[str]): async with semaphore: - chunk_embeddings = await model.aembed_batch(chunk) - result = np.array(chunk_embeddings) + embeddings_response = await model.embedding_async(input=chunk) + result = np.array(embeddings_response.embeddings) tick(1) return result diff --git a/packages/graphrag/graphrag/index/operations/extract_covariates/claim_extractor.py b/packages/graphrag/graphrag/index/operations/extract_covariates/claim_extractor.py index 99db5d7e2b..a334f360c2 100644 --- a/packages/graphrag/graphrag/index/operations/extract_covariates/claim_extractor.py +++ b/packages/graphrag/graphrag/index/operations/extract_covariates/claim_extractor.py @@ -6,16 +6,23 @@ import logging import traceback from dataclasses import dataclass -from typing import Any +from typing import TYPE_CHECKING, Any + +from graphrag_llm.utils import ( + CompletionMessagesBuilder, +) from graphrag.config.defaults import graphrag_config_defaults from graphrag.index.typing.error_handler import ErrorHandlerFn -from graphrag.language_model.protocol.base import ChatModel from graphrag.prompts.index.extract_claims import ( CONTINUE_PROMPT, LOOP_PROMPT, ) +if TYPE_CHECKING: + from graphrag_llm.completion import LLMCompletion + from graphrag_llm.types import LLMCompletionResponse + INPUT_TEXT_KEY = "input_text" INPUT_ENTITY_SPEC_KEY = "entity_specs" INPUT_CLAIM_DESCRIPTION_KEY = "claim_description" @@ -39,14 +46,14 @@ class ClaimExtractorResult: class ClaimExtractor: """Claim extractor class definition.""" - _model: ChatModel + _model: "LLMCompletion" _extraction_prompt: str _max_gleanings: int _on_error: ErrorHandlerFn def __init__( self, - model: ChatModel, + model: "LLMCompletion", extraction_prompt: str, max_gleanings: int | None = None, on_error: ErrorHandlerFn | None = None, @@ -112,26 +119,31 @@ def _clean_claim( async def _process_document( self, text: str, claim_description: str, entity_spec: dict ) -> list[dict]: - response = await self._model.achat( + messages_builder = CompletionMessagesBuilder().add_user_message( self._extraction_prompt.format(**{ INPUT_TEXT_KEY: text, INPUT_CLAIM_DESCRIPTION_KEY: claim_description, INPUT_ENTITY_SPEC_KEY: entity_spec, }) ) - results = response.output.content or "" + + response: LLMCompletionResponse = await self._model.completion_async( + messages=messages_builder.build(), + ) # type: ignore + results = response.content + messages_builder.add_assistant_message(results) claims = results.strip().removesuffix(COMPLETION_DELIMITER) # if gleanings are specified, enter a loop to extract more claims # there are two exit criteria: (a) we hit the configured max, (b) the model says there are no more claims if self._max_gleanings > 0: for i in range(self._max_gleanings): - response = await self._model.achat( - CONTINUE_PROMPT, - name=f"extract-continuation-{i}", - history=response.history, - ) - extension = response.output.content or "" + messages_builder.add_user_message(CONTINUE_PROMPT) + response: LLMCompletionResponse = await self._model.completion_async( + messages=messages_builder.build(), + ) # type: ignore + extension = response.content + messages_builder.add_assistant_message(extension) claims += RECORD_DELIMITER + extension.strip().removesuffix( COMPLETION_DELIMITER ) @@ -140,13 +152,12 @@ async def _process_document( if i >= self._max_gleanings - 1: break - response = await self._model.achat( - LOOP_PROMPT, - name=f"extract-loopcheck-{i}", - history=response.history, - ) + messages_builder.add_user_message(LOOP_PROMPT) + response: LLMCompletionResponse = await self._model.completion_async( + messages=messages_builder.build(), + ) # type: ignore - if response.output.content != "Y": + if response.content != "Y": break return self._parse_claim_tuples(results) diff --git a/packages/graphrag/graphrag/index/operations/extract_covariates/extract_covariates.py b/packages/graphrag/graphrag/index/operations/extract_covariates/extract_covariates.py index bc2e1fa9de..2d366511e2 100644 --- a/packages/graphrag/graphrag/index/operations/extract_covariates/extract_covariates.py +++ b/packages/graphrag/graphrag/index/operations/extract_covariates/extract_covariates.py @@ -6,7 +6,7 @@ import logging from collections.abc import Iterable from dataclasses import asdict -from typing import Any +from typing import TYPE_CHECKING, Any import pandas as pd @@ -18,7 +18,9 @@ CovariateExtractionResult, ) from graphrag.index.utils.derive_from_rows import derive_from_rows -from graphrag.language_model.protocol.base import ChatModel + +if TYPE_CHECKING: + from graphrag_llm.completion import LLMCompletion logger = logging.getLogger(__name__) @@ -26,7 +28,7 @@ async def extract_covariates( input: pd.DataFrame, callbacks: WorkflowCallbacks, - model: ChatModel, + model: "LLMCompletion", column: str, covariate_type: str, max_gleanings: int, @@ -75,7 +77,7 @@ async def run_extract_claims( input: str | Iterable[str], entity_types: list[str], resolved_entities_map: dict[str, str], - model: ChatModel, + model: "LLMCompletion", max_gleanings: int, claim_description: str, prompt: str, diff --git a/packages/graphrag/graphrag/index/operations/extract_graph/extract_graph.py b/packages/graphrag/graphrag/index/operations/extract_graph/extract_graph.py index 96661c412d..cb7ccfa6ba 100644 --- a/packages/graphrag/graphrag/index/operations/extract_graph/extract_graph.py +++ b/packages/graphrag/graphrag/index/operations/extract_graph/extract_graph.py @@ -4,6 +4,7 @@ """A module containing extract_graph method.""" import logging +from typing import TYPE_CHECKING import pandas as pd @@ -11,7 +12,9 @@ from graphrag.config.enums import AsyncType from graphrag.index.operations.extract_graph.graph_extractor import GraphExtractor from graphrag.index.utils.derive_from_rows import derive_from_rows -from graphrag.language_model.protocol.base import ChatModel + +if TYPE_CHECKING: + from graphrag_llm.completion import LLMCompletion logger = logging.getLogger(__name__) @@ -21,7 +24,7 @@ async def extract_graph( callbacks: WorkflowCallbacks, text_column: str, id_column: str, - model: ChatModel, + model: "LLMCompletion", prompt: str, entity_types: list[str], max_gleanings: int, @@ -72,7 +75,7 @@ async def _run_extract_graph( text: str, source_id: str, entity_types: list[str], - model: ChatModel, + model: "LLMCompletion", prompt: str, max_gleanings: int, ) -> tuple[pd.DataFrame, pd.DataFrame]: diff --git a/packages/graphrag/graphrag/index/operations/extract_graph/graph_extractor.py b/packages/graphrag/graphrag/index/operations/extract_graph/graph_extractor.py index 6d37bf4688..ce78c0f6b9 100644 --- a/packages/graphrag/graphrag/index/operations/extract_graph/graph_extractor.py +++ b/packages/graphrag/graphrag/index/operations/extract_graph/graph_extractor.py @@ -6,18 +6,24 @@ import logging import re import traceback -from typing import Any +from typing import TYPE_CHECKING, Any import pandas as pd +from graphrag_llm.utils import ( + CompletionMessagesBuilder, +) from graphrag.index.typing.error_handler import ErrorHandlerFn from graphrag.index.utils.string import clean_str -from graphrag.language_model.protocol.base import ChatModel from graphrag.prompts.index.extract_graph import ( CONTINUE_PROMPT, LOOP_PROMPT, ) +if TYPE_CHECKING: + from graphrag_llm.completion import LLMCompletion + from graphrag_llm.types import LLMCompletionResponse + INPUT_TEXT_KEY = "input_text" RECORD_DELIMITER_KEY = "record_delimiter" COMPLETION_DELIMITER_KEY = "completion_delimiter" @@ -32,14 +38,14 @@ class GraphExtractor: """Unipartite graph extractor class definition.""" - _model: ChatModel + _model: "LLMCompletion" _extraction_prompt: str _max_gleanings: int _on_error: ErrorHandlerFn def __init__( self, - model: ChatModel, + model: "LLMCompletion", prompt: str, max_gleanings: int, on_error: ErrorHandlerFn | None = None, @@ -77,35 +83,40 @@ async def __call__( ) async def _process_document(self, text: str, entity_types: list[str]) -> str: - response = await self._model.achat( + messages_builder = CompletionMessagesBuilder().add_user_message( self._extraction_prompt.format(**{ INPUT_TEXT_KEY: text, ENTITY_TYPES_KEY: ",".join(entity_types), - }), + }) ) - results = response.output.content or "" + + response: LLMCompletionResponse = await self._model.completion_async( + messages=messages_builder.build(), + ) # type: ignore + results = response.content + messages_builder.add_assistant_message(results) # if gleanings are specified, enter a loop to extract more entities # there are two exit criteria: (a) we hit the configured max, (b) the model says there are no more entities if self._max_gleanings > 0: for i in range(self._max_gleanings): - response = await self._model.achat( - CONTINUE_PROMPT, - name=f"extract-continuation-{i}", - history=response.history, - ) - results += response.output.content or "" + messages_builder.add_user_message(CONTINUE_PROMPT) + response: LLMCompletionResponse = await self._model.completion_async( + messages=messages_builder.build(), + ) # type: ignore + response_text = response.content + messages_builder.add_assistant_message(response_text) + results += response_text # if this is the final glean, don't bother updating the continuation flag if i >= self._max_gleanings - 1: break - response = await self._model.achat( - LOOP_PROMPT, - name=f"extract-loopcheck-{i}", - history=response.history, - ) - if response.output.content != "Y": + messages_builder.add_user_message(LOOP_PROMPT) + response: LLMCompletionResponse = await self._model.completion_async( + messages=messages_builder.build(), + ) # type: ignore + if response.content != "Y": break return results diff --git a/packages/graphrag/graphrag/index/operations/summarize_communities/build_mixed_context.py b/packages/graphrag/graphrag/index/operations/summarize_communities/build_mixed_context.py index 6ae59a40c8..27f4d35e7d 100644 --- a/packages/graphrag/graphrag/index/operations/summarize_communities/build_mixed_context.py +++ b/packages/graphrag/graphrag/index/operations/summarize_communities/build_mixed_context.py @@ -4,12 +4,12 @@ """A module containing build_mixed_context method definition.""" import pandas as pd +from graphrag_llm.tokenizer import Tokenizer import graphrag.data_model.schemas as schemas from graphrag.index.operations.summarize_communities.graph_context.sort_context import ( sort_context, ) -from graphrag.tokenizer.tokenizer import Tokenizer def build_mixed_context( diff --git a/packages/graphrag/graphrag/index/operations/summarize_communities/community_reports_extractor.py b/packages/graphrag/graphrag/index/operations/summarize_communities/community_reports_extractor.py index 3ca29251a0..4513abe95b 100644 --- a/packages/graphrag/graphrag/index/operations/summarize_communities/community_reports_extractor.py +++ b/packages/graphrag/graphrag/index/operations/summarize_communities/community_reports_extractor.py @@ -6,11 +6,14 @@ import logging import traceback from dataclasses import dataclass +from typing import TYPE_CHECKING from pydantic import BaseModel, Field from graphrag.index.typing.error_handler import ErrorHandlerFn -from graphrag.language_model.protocol.base import ChatModel + +if TYPE_CHECKING: + from graphrag_llm.completion import LLMCompletion logger = logging.getLogger(__name__) @@ -49,7 +52,7 @@ class CommunityReportsResult: class CommunityReportsExtractor: """Community reports extractor class definition.""" - _model: ChatModel + _model: "LLMCompletion" _extraction_prompt: str _output_formatter_prompt: str _on_error: ErrorHandlerFn @@ -57,7 +60,7 @@ class CommunityReportsExtractor: def __init__( self, - model: ChatModel, + model: "LLMCompletion", extraction_prompt: str, max_report_length: int, on_error: ErrorHandlerFn | None = None, @@ -76,14 +79,12 @@ async def __call__(self, input_text: str): INPUT_TEXT_KEY: input_text, MAX_LENGTH_KEY: str(self._max_report_length), }) - response = await self._model.achat( - prompt, - json=True, # Leaving this as True to avoid creating new cache entries - name="create_community_report", - json_model=CommunityReportResponse, # A model is required when using json mode + response = await self._model.completion_async( + messages=prompt, + response_format=CommunityReportResponse, # A model is required when using json mode ) - output = response.parsed_response + output = response.formatted_response # type: ignore except Exception as e: logger.exception("error generating community report") self._on_error(e, traceback.format_exc(), None) diff --git a/packages/graphrag/graphrag/index/operations/summarize_communities/graph_context/context_builder.py b/packages/graphrag/graphrag/index/operations/summarize_communities/graph_context/context_builder.py index 34d281b3f8..e3801f4ae3 100644 --- a/packages/graphrag/graphrag/index/operations/summarize_communities/graph_context/context_builder.py +++ b/packages/graphrag/graphrag/index/operations/summarize_communities/graph_context/context_builder.py @@ -7,6 +7,7 @@ from typing import cast import pandas as pd +from graphrag_llm.tokenizer import Tokenizer import graphrag.data_model.schemas as schemas from graphrag.callbacks.workflow_callbacks import WorkflowCallbacks @@ -30,7 +31,6 @@ where_column_equals, ) from graphrag.logger.progress import progress_iterable -from graphrag.tokenizer.tokenizer import Tokenizer logger = logging.getLogger(__name__) diff --git a/packages/graphrag/graphrag/index/operations/summarize_communities/graph_context/sort_context.py b/packages/graphrag/graphrag/index/operations/summarize_communities/graph_context/sort_context.py index 3b7863f2f8..18d843e4c5 100644 --- a/packages/graphrag/graphrag/index/operations/summarize_communities/graph_context/sort_context.py +++ b/packages/graphrag/graphrag/index/operations/summarize_communities/graph_context/sort_context.py @@ -3,9 +3,9 @@ """Sort context by degree in descending order.""" import pandas as pd +from graphrag_llm.tokenizer import Tokenizer import graphrag.data_model.schemas as schemas -from graphrag.tokenizer.tokenizer import Tokenizer def sort_context( diff --git a/packages/graphrag/graphrag/index/operations/summarize_communities/summarize_communities.py b/packages/graphrag/graphrag/index/operations/summarize_communities/summarize_communities.py index 709a4e3b5a..ddc3706652 100644 --- a/packages/graphrag/graphrag/index/operations/summarize_communities/summarize_communities.py +++ b/packages/graphrag/graphrag/index/operations/summarize_communities/summarize_communities.py @@ -5,8 +5,10 @@ import logging from collections.abc import Callable +from typing import TYPE_CHECKING import pandas as pd +from graphrag_llm.tokenizer import Tokenizer import graphrag.data_model.schemas as schemas from graphrag.callbacks.noop_workflow_callbacks import NoopWorkflowCallbacks @@ -24,9 +26,10 @@ get_levels, ) from graphrag.index.utils.derive_from_rows import derive_from_rows -from graphrag.language_model.protocol.base import ChatModel from graphrag.logger.progress import progress_ticker -from graphrag.tokenizer.tokenizer import Tokenizer + +if TYPE_CHECKING: + from graphrag_llm.completion import LLMCompletion logger = logging.getLogger(__name__) @@ -37,7 +40,7 @@ async def summarize_communities( local_contexts, level_context_builder: Callable, callbacks: WorkflowCallbacks, - model: ChatModel, + model: "LLMCompletion", prompt: str, tokenizer: Tokenizer, max_input_length: int, @@ -98,7 +101,7 @@ async def run_generate(record): async def _generate_report( runner: CommunityReportsStrategy, - model: ChatModel, + model: "LLMCompletion", extraction_prompt: str, community_id: int, community_level: int, @@ -120,7 +123,7 @@ async def run_extractor( community: str | int, input: str, level: int, - model: ChatModel, + model: "LLMCompletion", extraction_prompt: str, max_report_length: int, ) -> CommunityReport | None: diff --git a/packages/graphrag/graphrag/index/operations/summarize_communities/text_unit_context/context_builder.py b/packages/graphrag/graphrag/index/operations/summarize_communities/text_unit_context/context_builder.py index 8d6e0ae2e0..6eaf05c948 100644 --- a/packages/graphrag/graphrag/index/operations/summarize_communities/text_unit_context/context_builder.py +++ b/packages/graphrag/graphrag/index/operations/summarize_communities/text_unit_context/context_builder.py @@ -7,6 +7,7 @@ from typing import cast import pandas as pd +from graphrag_llm.tokenizer import Tokenizer import graphrag.data_model.schemas as schemas from graphrag.index.operations.summarize_communities.build_mixed_context import ( @@ -18,7 +19,6 @@ from graphrag.index.operations.summarize_communities.text_unit_context.sort_context import ( sort_context, ) -from graphrag.tokenizer.tokenizer import Tokenizer logger = logging.getLogger(__name__) diff --git a/packages/graphrag/graphrag/index/operations/summarize_communities/text_unit_context/sort_context.py b/packages/graphrag/graphrag/index/operations/summarize_communities/text_unit_context/sort_context.py index 7bad931b59..b062551337 100644 --- a/packages/graphrag/graphrag/index/operations/summarize_communities/text_unit_context/sort_context.py +++ b/packages/graphrag/graphrag/index/operations/summarize_communities/text_unit_context/sort_context.py @@ -6,9 +6,9 @@ import logging import pandas as pd +from graphrag_llm.tokenizer import Tokenizer import graphrag.data_model.schemas as schemas -from graphrag.tokenizer.tokenizer import Tokenizer logger = logging.getLogger(__name__) diff --git a/packages/graphrag/graphrag/index/operations/summarize_communities/typing.py b/packages/graphrag/graphrag/index/operations/summarize_communities/typing.py index e59c4f33b1..73d8dd6f89 100644 --- a/packages/graphrag/graphrag/index/operations/summarize_communities/typing.py +++ b/packages/graphrag/graphrag/index/operations/summarize_communities/typing.py @@ -4,11 +4,12 @@ """A module containing 'Finding' and 'CommunityReport' models.""" from collections.abc import Awaitable, Callable -from typing import Any +from typing import TYPE_CHECKING, Any from typing_extensions import TypedDict -from graphrag.language_model.protocol.base import ChatModel +if TYPE_CHECKING: + from graphrag_llm.completion import LLMCompletion RowContext = dict[str, Any] Claim = dict[str, Any] @@ -40,7 +41,7 @@ class CommunityReport(TypedDict): str | int, str, int, - ChatModel, + "LLMCompletion", str, int, ], diff --git a/packages/graphrag/graphrag/index/operations/summarize_descriptions/description_summary_extractor.py b/packages/graphrag/graphrag/index/operations/summarize_descriptions/description_summary_extractor.py index d31a12611b..f74a549ca6 100644 --- a/packages/graphrag/graphrag/index/operations/summarize_descriptions/description_summary_extractor.py +++ b/packages/graphrag/graphrag/index/operations/summarize_descriptions/description_summary_extractor.py @@ -5,10 +5,13 @@ import json from dataclasses import dataclass +from typing import TYPE_CHECKING from graphrag.index.typing.error_handler import ErrorHandlerFn -from graphrag.language_model.protocol.base import ChatModel -from graphrag.tokenizer.get_tokenizer import get_tokenizer + +if TYPE_CHECKING: + from graphrag_llm.completion import LLMCompletion + from graphrag_llm.types import LLMCompletionResponse # these tokens are used in the prompt ENTITY_NAME_KEY = "entity_name" @@ -27,7 +30,7 @@ class SummarizationResult: class SummarizeExtractor: """Unipartite graph extractor class definition.""" - _model: ChatModel + _model: "LLMCompletion" _summarization_prompt: str _on_error: ErrorHandlerFn _max_summary_length: int @@ -35,7 +38,7 @@ class SummarizeExtractor: def __init__( self, - model: ChatModel, + model: "LLMCompletion", max_summary_length: int, max_input_tokens: int, summarization_prompt: str, @@ -44,7 +47,7 @@ def __init__( """Init method definition.""" # TODO: streamline construction self._model = model - self._tokenizer = get_tokenizer(model.config) + self._tokenizer = model.tokenizer self._summarization_prompt = summarization_prompt self._on_error = on_error or (lambda _e, _s, _d: None) self._max_summary_length = max_summary_length @@ -118,15 +121,14 @@ async def _summarize_descriptions_with_llm( self, id: str | tuple[str, str] | list[str], descriptions: list[str] ): """Summarize descriptions using the LLM.""" - response = await self._model.achat( - self._summarization_prompt.format(**{ + response: LLMCompletionResponse = await self._model.completion_async( + messages=self._summarization_prompt.format(**{ ENTITY_NAME_KEY: json.dumps(id, ensure_ascii=False), DESCRIPTION_LIST_KEY: json.dumps( sorted(descriptions), ensure_ascii=False ), MAX_LENGTH_KEY: self._max_summary_length, }), - name="summarize", - ) + ) # type: ignore # Calculate result - return str(response.output.content) + return response.content diff --git a/packages/graphrag/graphrag/index/operations/summarize_descriptions/summarize_descriptions.py b/packages/graphrag/graphrag/index/operations/summarize_descriptions/summarize_descriptions.py index 48aaf37a00..a959afd79b 100644 --- a/packages/graphrag/graphrag/index/operations/summarize_descriptions/summarize_descriptions.py +++ b/packages/graphrag/graphrag/index/operations/summarize_descriptions/summarize_descriptions.py @@ -5,6 +5,7 @@ import asyncio import logging +from typing import TYPE_CHECKING import pandas as pd @@ -15,9 +16,11 @@ from graphrag.index.operations.summarize_descriptions.typing import ( SummarizedDescriptionResult, ) -from graphrag.language_model.protocol.base import ChatModel from graphrag.logger.progress import ProgressTicker, progress_ticker +if TYPE_CHECKING: + from graphrag_llm.completion import LLMCompletion + logger = logging.getLogger(__name__) @@ -25,7 +28,7 @@ async def summarize_descriptions( entities_df: pd.DataFrame, relationships_df: pd.DataFrame, callbacks: WorkflowCallbacks, - model: ChatModel, + model: "LLMCompletion", max_summary_length: int, max_input_tokens: int, prompt: str, @@ -115,7 +118,7 @@ async def do_summarize_descriptions( async def run_summarize_descriptions( id: str | tuple[str, str], descriptions: list[str], - model: ChatModel, + model: "LLMCompletion", max_summary_length: int, max_input_tokens: int, prompt: str, diff --git a/packages/graphrag/graphrag/index/text_splitting/text_splitting.py b/packages/graphrag/graphrag/index/text_splitting/text_splitting.py index 7be6c538ae..0f42cd6ba5 100644 --- a/packages/graphrag/graphrag/index/text_splitting/text_splitting.py +++ b/packages/graphrag/graphrag/index/text_splitting/text_splitting.py @@ -9,9 +9,9 @@ from typing import cast import pandas as pd +from graphrag_llm.tokenizer import Tokenizer from graphrag.tokenizer.get_tokenizer import get_tokenizer -from graphrag.tokenizer.tokenizer import Tokenizer EncodedText = list[int] DecodeFn = Callable[[EncodedText], str] diff --git a/packages/graphrag/graphrag/index/validate_config.py b/packages/graphrag/graphrag/index/validate_config.py index 014592df47..4062b8de9a 100644 --- a/packages/graphrag/graphrag/index/validate_config.py +++ b/packages/graphrag/graphrag/index/validate_config.py @@ -7,47 +7,35 @@ import logging import sys -from graphrag.callbacks.noop_workflow_callbacks import NoopWorkflowCallbacks +from graphrag_llm.completion import create_completion +from graphrag_llm.embedding import create_embedding + from graphrag.config.models.graph_rag_config import GraphRagConfig -from graphrag.language_model.manager import ModelManager logger = logging.getLogger(__name__) def validate_config_names(parameters: GraphRagConfig) -> None: """Validate config file for model deployment name typos, by running a quick test message for each.""" - for id, config in parameters.models.items(): - if config.type == "chat": - llm = ModelManager().register_chat( - name="test-llm", - model_type=config.type, - config=config, - callbacks=NoopWorkflowCallbacks(), - cache=None, - ) - try: - asyncio.run( - llm.achat("This is an LLM connectivity test. Say Hello World") + for id, config in parameters.completion_models.items(): + llm = create_completion(config) + try: + llm.completion(messages="This is an LLM connectivity test. Say Hello World") + logger.info("LLM Config Params Validated") + except Exception as e: # noqa: BLE001 + logger.error(f"LLM configuration error detected.\n{e}") # noqa + print(f"Failed to validate language model ({id}) params", e) # noqa: T201 + sys.exit(1) + for id, config in parameters.embedding_models.items(): + embed_llm = create_embedding(config) + try: + asyncio.run( + embed_llm.embedding_async( + input=["This is an LLM Embedding Test String"] ) - logger.info("LLM Config Params Validated") - except Exception as e: # noqa: BLE001 - logger.error(f"LLM configuration error detected.\n{e}") # noqa - print(f"Failed to validate language model ({id}) params", e) # noqa: T201 - sys.exit(1) - elif config.type == "embedding": - embed_llm = ModelManager().register_embedding( - name="test-embed-llm", - model_type=config.type, - config=config, - callbacks=NoopWorkflowCallbacks(), - cache=None, ) - try: - asyncio.run( - embed_llm.aembed_batch(["This is an LLM Embedding Test String"]) - ) - logger.info("Embedding LLM Config Params Validated") - except Exception as e: # noqa: BLE001 - logger.error(f"Embedding configuration error detected.\n{e}") # noqa - print(f"Failed to validate embedding model ({id}) params", e) # noqa: T201 - sys.exit(1) + logger.info("Embedding LLM Config Params Validated") + except Exception as e: # noqa: BLE001 + logger.error(f"Embedding configuration error detected.\n{e}") # noqa + print(f"Failed to validate embedding model ({id}) params", e) # noqa: T201 + sys.exit(1) diff --git a/packages/graphrag/graphrag/index/workflows/create_base_text_units.py b/packages/graphrag/graphrag/index/workflows/create_base_text_units.py index 1b1d9b48a9..ec6abc2578 100644 --- a/packages/graphrag/graphrag/index/workflows/create_base_text_units.py +++ b/packages/graphrag/graphrag/index/workflows/create_base_text_units.py @@ -11,6 +11,7 @@ from graphrag_chunking.chunker_factory import create_chunker from graphrag_chunking.transformers import add_metadata from graphrag_input import TextDocument +from graphrag_llm.tokenizer import Tokenizer from graphrag.callbacks.workflow_callbacks import WorkflowCallbacks from graphrag.config.models.graph_rag_config import GraphRagConfig @@ -19,7 +20,6 @@ from graphrag.index.utils.hashing import gen_sha512_hash from graphrag.logger.progress import progress_ticker from graphrag.tokenizer.get_tokenizer import get_tokenizer -from graphrag.tokenizer.tokenizer import Tokenizer from graphrag.utils.storage import load_table_from_storage, write_table_to_storage logger = logging.getLogger(__name__) diff --git a/packages/graphrag/graphrag/index/workflows/create_community_reports.py b/packages/graphrag/graphrag/index/workflows/create_community_reports.py index 981e47227e..5fc2b0842b 100644 --- a/packages/graphrag/graphrag/index/workflows/create_community_reports.py +++ b/packages/graphrag/graphrag/index/workflows/create_community_reports.py @@ -4,10 +4,14 @@ """A module containing run_workflow method definition.""" import logging +from typing import TYPE_CHECKING import pandas as pd +from graphrag_llm.completion import create_completion +from graphrag_llm.tokenizer import Tokenizer import graphrag.data_model.schemas as schemas +from graphrag.cache.cache_key_creator import cache_key_creator from graphrag.callbacks.workflow_callbacks import WorkflowCallbacks from graphrag.config.enums import AsyncType from graphrag.config.models.graph_rag_config import GraphRagConfig @@ -26,16 +30,15 @@ ) from graphrag.index.typing.context import PipelineRunContext from graphrag.index.typing.workflow import WorkflowFunctionOutput -from graphrag.language_model.manager import ModelManager -from graphrag.language_model.protocol.base import ChatModel -from graphrag.tokenizer.get_tokenizer import get_tokenizer -from graphrag.tokenizer.tokenizer import Tokenizer from graphrag.utils.storage import ( load_table_from_storage, storage_has_table, write_table_to_storage, ) +if TYPE_CHECKING: + from graphrag_llm.completion import LLMCompletion + logger = logging.getLogger(__name__) @@ -54,18 +57,18 @@ async def run_workflow( ): claims = await load_table_from_storage("covariates", context.output_storage) - model_config = config.get_language_model_config(config.community_reports.model_id) + model_config = config.get_completion_model_config( + config.community_reports.completion_model_id + ) prompts = config.community_reports.resolved_prompts() - model = ModelManager().get_or_create_chat_model( - name=config.community_reports.model_instance_name, - model_type=model_config.type, - config=model_config, - callbacks=context.callbacks, - cache=context.cache, + model = create_completion( + model_config, + cache=context.cache.child(config.community_reports.model_instance_name), + cache_key_creator=cache_key_creator, ) - tokenizer = get_tokenizer(model_config) + tokenizer = model.tokenizer output = await create_community_reports( edges_input=edges, @@ -78,8 +81,8 @@ async def run_workflow( prompt=prompts.graph_prompt, max_input_length=config.community_reports.max_input_length, max_report_length=config.community_reports.max_length, - num_threads=model_config.concurrent_requests, - async_type=model_config.async_mode, + num_threads=config.concurrent_requests, + async_type=config.async_mode, ) await write_table_to_storage(output, "community_reports", context.output_storage) @@ -94,7 +97,7 @@ async def create_community_reports( communities: pd.DataFrame, claims_input: pd.DataFrame | None, callbacks: WorkflowCallbacks, - model: ChatModel, + model: "LLMCompletion", tokenizer: Tokenizer, prompt: str, max_input_length: int, diff --git a/packages/graphrag/graphrag/index/workflows/create_community_reports_text.py b/packages/graphrag/graphrag/index/workflows/create_community_reports_text.py index ac269df8bc..8a6be96e68 100644 --- a/packages/graphrag/graphrag/index/workflows/create_community_reports_text.py +++ b/packages/graphrag/graphrag/index/workflows/create_community_reports_text.py @@ -4,9 +4,13 @@ """A module containing run_workflow method definition.""" import logging +from typing import TYPE_CHECKING import pandas as pd +from graphrag_llm.completion import create_completion +from graphrag_llm.tokenizer import Tokenizer +from graphrag.cache.cache_key_creator import cache_key_creator from graphrag.callbacks.workflow_callbacks import WorkflowCallbacks from graphrag.config.enums import AsyncType from graphrag.config.models.graph_rag_config import GraphRagConfig @@ -25,12 +29,11 @@ ) from graphrag.index.typing.context import PipelineRunContext from graphrag.index.typing.workflow import WorkflowFunctionOutput -from graphrag.language_model.manager import ModelManager -from graphrag.language_model.protocol.base import ChatModel -from graphrag.tokenizer.get_tokenizer import get_tokenizer -from graphrag.tokenizer.tokenizer import Tokenizer from graphrag.utils.storage import load_table_from_storage, write_table_to_storage +if TYPE_CHECKING: + from graphrag_llm.completion import LLMCompletion + logger = logging.getLogger(__name__) @@ -45,16 +48,16 @@ async def run_workflow( text_units = await load_table_from_storage("text_units", context.output_storage) - model_config = config.get_language_model_config(config.community_reports.model_id) - model = ModelManager().get_or_create_chat_model( - name=config.community_reports.model_instance_name, - model_type=model_config.type, - config=model_config, - callbacks=context.callbacks, - cache=context.cache, + model_config = config.get_completion_model_config( + config.community_reports.completion_model_id + ) + model = create_completion( + model_config, + cache=context.cache.child(config.community_reports.model_instance_name), + cache_key_creator=cache_key_creator, ) - tokenizer = get_tokenizer(model_config) + tokenizer = model.tokenizer prompts = config.community_reports.resolved_prompts() @@ -68,8 +71,8 @@ async def run_workflow( prompt=prompts.text_prompt, max_input_length=config.community_reports.max_input_length, max_report_length=config.community_reports.max_length, - num_threads=model_config.concurrent_requests, - async_type=model_config.async_mode, + num_threads=config.concurrent_requests, + async_type=config.async_mode, ) await write_table_to_storage(output, "community_reports", context.output_storage) @@ -83,7 +86,7 @@ async def create_community_reports_text( communities: pd.DataFrame, text_units: pd.DataFrame, callbacks: WorkflowCallbacks, - model: ChatModel, + model: "LLMCompletion", tokenizer: Tokenizer, prompt: str, max_input_length: int, diff --git a/packages/graphrag/graphrag/index/workflows/extract_covariates.py b/packages/graphrag/graphrag/index/workflows/extract_covariates.py index 99a8f279ed..18b470a8b1 100644 --- a/packages/graphrag/graphrag/index/workflows/extract_covariates.py +++ b/packages/graphrag/graphrag/index/workflows/extract_covariates.py @@ -4,10 +4,13 @@ """A module containing run_workflow method definition.""" import logging +from typing import TYPE_CHECKING from uuid import uuid4 import pandas as pd +from graphrag_llm.completion import create_completion +from graphrag.cache.cache_key_creator import cache_key_creator from graphrag.callbacks.workflow_callbacks import WorkflowCallbacks from graphrag.config.defaults import DEFAULT_ENTITY_TYPES from graphrag.config.enums import AsyncType @@ -18,10 +21,11 @@ ) from graphrag.index.typing.context import PipelineRunContext from graphrag.index.typing.workflow import WorkflowFunctionOutput -from graphrag.language_model.manager import ModelManager -from graphrag.language_model.protocol.base import ChatModel from graphrag.utils.storage import load_table_from_storage, write_table_to_storage +if TYPE_CHECKING: + from graphrag_llm.completion import LLMCompletion + logger = logging.getLogger(__name__) @@ -35,14 +39,14 @@ async def run_workflow( if config.extract_claims.enabled: text_units = await load_table_from_storage("text_units", context.output_storage) - model_config = config.get_language_model_config(config.extract_claims.model_id) + model_config = config.get_completion_model_config( + config.extract_claims.completion_model_id + ) - model = ModelManager().get_or_create_chat_model( - name=config.extract_claims.model_instance_name, - model_type=model_config.type, - config=model_config, - callbacks=context.callbacks, - cache=context.cache, + model = create_completion( + model_config, + cache=context.cache.child(config.extract_claims.model_instance_name), + cache_key_creator=cache_key_creator, ) prompts = config.extract_claims.resolved_prompts() @@ -56,8 +60,8 @@ async def run_workflow( claim_description=config.extract_claims.description, prompt=prompts.extraction_prompt, entity_types=DEFAULT_ENTITY_TYPES, - num_threads=model_config.concurrent_requests, - async_type=model_config.async_mode, + num_threads=config.concurrent_requests, + async_type=config.async_mode, ) await write_table_to_storage(output, "covariates", context.output_storage) @@ -69,7 +73,7 @@ async def run_workflow( async def extract_covariates( text_units: pd.DataFrame, callbacks: WorkflowCallbacks, - model: ChatModel, + model: "LLMCompletion", covariate_type: str, max_gleanings: int, claim_description: str, diff --git a/packages/graphrag/graphrag/index/workflows/extract_graph.py b/packages/graphrag/graphrag/index/workflows/extract_graph.py index 8ce8669109..6d6520e401 100644 --- a/packages/graphrag/graphrag/index/workflows/extract_graph.py +++ b/packages/graphrag/graphrag/index/workflows/extract_graph.py @@ -4,9 +4,12 @@ """A module containing run_workflow method definition.""" import logging +from typing import TYPE_CHECKING import pandas as pd +from graphrag_llm.completion import create_completion +from graphrag.cache.cache_key_creator import cache_key_creator from graphrag.callbacks.workflow_callbacks import WorkflowCallbacks from graphrag.config.enums import AsyncType from graphrag.config.models.graph_rag_config import GraphRagConfig @@ -18,10 +21,11 @@ ) from graphrag.index.typing.context import PipelineRunContext from graphrag.index.typing.workflow import WorkflowFunctionOutput -from graphrag.language_model.manager import ModelManager -from graphrag.language_model.protocol.base import ChatModel from graphrag.utils.storage import load_table_from_storage, write_table_to_storage +if TYPE_CHECKING: + from graphrag_llm.completion import LLMCompletion + logger = logging.getLogger(__name__) @@ -33,26 +37,24 @@ async def run_workflow( logger.info("Workflow started: extract_graph") text_units = await load_table_from_storage("text_units", context.output_storage) - extraction_model_config = config.get_language_model_config( - config.extract_graph.model_id + extraction_model_config = config.get_completion_model_config( + config.extract_graph.completion_model_id ) extraction_prompts = config.extract_graph.resolved_prompts() - extraction_model = ModelManager().get_or_create_chat_model( - name=config.extract_graph.model_instance_name, - model_type=extraction_model_config.type, - config=extraction_model_config, - cache=context.cache, + extraction_model = create_completion( + extraction_model_config, + cache=context.cache.child(config.extract_graph.model_instance_name), + cache_key_creator=cache_key_creator, ) - summarization_model_config = config.get_language_model_config( - config.summarize_descriptions.model_id + summarization_model_config = config.get_completion_model_config( + config.summarize_descriptions.completion_model_id ) summarization_prompts = config.summarize_descriptions.resolved_prompts() - summarization_model = ModelManager().get_or_create_chat_model( - name=config.summarize_descriptions.model_instance_name, - model_type=summarization_model_config.type, - config=summarization_model_config, - cache=context.cache, + summarization_model = create_completion( + summarization_model_config, + cache=context.cache.child(config.summarize_descriptions.model_instance_name), + cache_key_creator=cache_key_creator, ) entities, relationships, raw_entities, raw_relationships = await extract_graph( @@ -62,13 +64,13 @@ async def run_workflow( extraction_prompt=extraction_prompts.extraction_prompt, entity_types=config.extract_graph.entity_types, max_gleanings=config.extract_graph.max_gleanings, - extraction_num_threads=extraction_model_config.concurrent_requests, - extraction_async_type=extraction_model_config.async_mode, + extraction_num_threads=config.concurrent_requests, + extraction_async_type=config.async_mode, summarization_model=summarization_model, max_summary_length=config.summarize_descriptions.max_length, max_input_tokens=config.summarize_descriptions.max_input_tokens, summarization_prompt=summarization_prompts.summarize_prompt, - summarization_num_threads=summarization_model_config.concurrent_requests, + summarization_num_threads=config.concurrent_requests, ) await write_table_to_storage(entities, "entities", context.output_storage) @@ -94,13 +96,13 @@ async def run_workflow( async def extract_graph( text_units: pd.DataFrame, callbacks: WorkflowCallbacks, - extraction_model: ChatModel, + extraction_model: "LLMCompletion", extraction_prompt: str, entity_types: list[str], max_gleanings: int, extraction_num_threads: int, extraction_async_type: AsyncType, - summarization_model: ChatModel, + summarization_model: "LLMCompletion", max_summary_length: int, max_input_tokens: int, summarization_prompt: str, @@ -155,7 +157,7 @@ async def get_summarized_entities_relationships( extracted_entities: pd.DataFrame, extracted_relationships: pd.DataFrame, callbacks: WorkflowCallbacks, - model: ChatModel, + model: "LLMCompletion", max_summary_length: int, max_input_tokens: int, summarization_prompt: str, diff --git a/packages/graphrag/graphrag/index/workflows/generate_text_embeddings.py b/packages/graphrag/graphrag/index/workflows/generate_text_embeddings.py index 104631c451..16b726028e 100644 --- a/packages/graphrag/graphrag/index/workflows/generate_text_embeddings.py +++ b/packages/graphrag/graphrag/index/workflows/generate_text_embeddings.py @@ -4,13 +4,17 @@ """A module containing run_workflow method definition.""" import logging +from typing import TYPE_CHECKING import pandas as pd +from graphrag_llm.embedding import create_embedding +from graphrag_llm.tokenizer import Tokenizer from graphrag_vectors import ( VectorStoreConfig, create_vector_store, ) +from graphrag.cache.cache_key_creator import cache_key_creator from graphrag.callbacks.workflow_callbacks import WorkflowCallbacks from graphrag.config.embeddings import ( community_full_content_embedding, @@ -21,15 +25,14 @@ from graphrag.index.operations.embed_text.embed_text import embed_text from graphrag.index.typing.context import PipelineRunContext from graphrag.index.typing.workflow import WorkflowFunctionOutput -from graphrag.language_model.manager import ModelManager -from graphrag.language_model.protocol.base import EmbeddingModel -from graphrag.tokenizer.get_tokenizer import get_tokenizer -from graphrag.tokenizer.tokenizer import Tokenizer from graphrag.utils.storage import ( load_table_from_storage, write_table_to_storage, ) +if TYPE_CHECKING: + from graphrag_llm.embedding import LLMEmbedding + logger = logging.getLogger(__name__) @@ -53,17 +56,17 @@ async def run_workflow( "community_reports", context.output_storage ) - model_config = config.get_language_model_config(config.embed_text.model_id) + model_config = config.get_embedding_model_config( + config.embed_text.embedding_model_id + ) - model = ModelManager().get_or_create_embedding_model( - name=config.embed_text.model_instance_name, - model_type=model_config.type, - config=model_config, - callbacks=context.callbacks, - cache=context.cache, + model = create_embedding( + model_config, + cache=context.cache.child(config.embed_text.model_instance_name), + cache_key_creator=cache_key_creator, ) - tokenizer = get_tokenizer(model_config) + tokenizer = model.tokenizer output = await generate_text_embeddings( text_units=text_units, @@ -74,7 +77,7 @@ async def run_workflow( tokenizer=tokenizer, batch_size=config.embed_text.batch_size, batch_max_tokens=config.embed_text.batch_max_tokens, - num_threads=model_config.concurrent_requests, + num_threads=config.concurrent_requests, vector_store_config=config.vector_store, embedded_fields=embedded_fields, ) @@ -96,7 +99,7 @@ async def generate_text_embeddings( entities: pd.DataFrame | None, community_reports: pd.DataFrame | None, callbacks: WorkflowCallbacks, - model: EmbeddingModel, + model: "LLMEmbedding", tokenizer: Tokenizer, batch_size: int, batch_max_tokens: int, @@ -154,7 +157,7 @@ async def _run_embeddings( data: pd.DataFrame, embed_column: str, callbacks: WorkflowCallbacks, - model: EmbeddingModel, + model: "LLMEmbedding", tokenizer: Tokenizer, batch_size: int, batch_max_tokens: int, diff --git a/packages/graphrag/graphrag/index/workflows/update_entities_relationships.py b/packages/graphrag/graphrag/index/workflows/update_entities_relationships.py index 468f0e5d1a..225c12d9b9 100644 --- a/packages/graphrag/graphrag/index/workflows/update_entities_relationships.py +++ b/packages/graphrag/graphrag/index/workflows/update_entities_relationships.py @@ -7,8 +7,10 @@ import pandas as pd from graphrag_cache import Cache +from graphrag_llm.completion import create_completion from graphrag_storage import Storage +from graphrag.cache.cache_key_creator import cache_key_creator from graphrag.callbacks.workflow_callbacks import WorkflowCallbacks from graphrag.config.models.graph_rag_config import GraphRagConfig from graphrag.index.run.utils import get_update_storages @@ -17,7 +19,6 @@ from graphrag.index.update.entities import _group_and_resolve_entities from graphrag.index.update.relationships import _update_and_merge_relationships from graphrag.index.workflows.extract_graph import get_summarized_entities_relationships -from graphrag.language_model.manager import ModelManager from graphrag.utils.storage import load_table_from_storage, write_table_to_storage logger = logging.getLogger(__name__) @@ -78,15 +79,14 @@ async def _update_entities_and_relationships( delta_relationships, ) - summarization_model_config = config.get_language_model_config( - config.summarize_descriptions.model_id + summarization_model_config = config.get_completion_model_config( + config.summarize_descriptions.completion_model_id ) prompts = config.summarize_descriptions.resolved_prompts() - model = ModelManager().get_or_create_chat_model( - name="summarize_descriptions", - model_type=summarization_model_config.type, - config=summarization_model_config, - cache=cache, + model = create_completion( + summarization_model_config, + cache=cache.child("summarize_descriptions"), + cache_key_creator=cache_key_creator, ) ( @@ -100,7 +100,7 @@ async def _update_entities_and_relationships( max_summary_length=config.summarize_descriptions.max_length, max_input_tokens=config.summarize_descriptions.max_input_tokens, summarization_prompt=prompts.summarize_prompt, - num_threads=summarization_model_config.concurrent_requests, + num_threads=config.concurrent_requests, ) # Save the updated entities back to storage diff --git a/packages/graphrag/graphrag/index/workflows/update_text_embeddings.py b/packages/graphrag/graphrag/index/workflows/update_text_embeddings.py index 4c6c280650..375bb69df4 100644 --- a/packages/graphrag/graphrag/index/workflows/update_text_embeddings.py +++ b/packages/graphrag/graphrag/index/workflows/update_text_embeddings.py @@ -5,13 +5,14 @@ import logging +from graphrag_llm.embedding import create_embedding + +from graphrag.cache.cache_key_creator import cache_key_creator from graphrag.config.models.graph_rag_config import GraphRagConfig from graphrag.index.run.utils import get_update_storages from graphrag.index.typing.context import PipelineRunContext from graphrag.index.typing.workflow import WorkflowFunctionOutput from graphrag.index.workflows.generate_text_embeddings import generate_text_embeddings -from graphrag.language_model.manager import ModelManager -from graphrag.tokenizer.get_tokenizer import get_tokenizer from graphrag.utils.storage import write_table_to_storage logger = logging.getLogger(__name__) @@ -34,17 +35,17 @@ async def run_workflow( embedded_fields = config.embed_text.names - model_config = config.get_language_model_config(config.embed_text.model_id) + model_config = config.get_embedding_model_config( + config.embed_text.embedding_model_id + ) - model = ModelManager().get_or_create_embedding_model( - name="text_embedding", - model_type=model_config.type, - config=model_config, - callbacks=context.callbacks, - cache=context.cache, + model = create_embedding( + model_config, + cache=context.cache.child("text_embedding"), + cache_key_creator=cache_key_creator, ) - tokenizer = get_tokenizer(model_config) + tokenizer = model.tokenizer result = await generate_text_embeddings( text_units=merged_text_units, @@ -55,7 +56,7 @@ async def run_workflow( tokenizer=tokenizer, batch_size=config.embed_text.batch_size, batch_max_tokens=config.embed_text.batch_max_tokens, - num_threads=model_config.concurrent_requests, + num_threads=config.concurrent_requests, vector_store_config=config.vector_store, embedded_fields=embedded_fields, ) diff --git a/packages/graphrag/graphrag/language_model/__init__.py b/packages/graphrag/graphrag/language_model/__init__.py deleted file mode 100644 index 1c84bfd23a..0000000000 --- a/packages/graphrag/graphrag/language_model/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -# Copyright (c) 2025 Microsoft Corporation. -# Licensed under the MIT License - -"""GraphRAG Language Models module. Allows for provider registrations while providing some out-of-the-box solutions.""" diff --git a/packages/graphrag/graphrag/language_model/cache/__init__.py b/packages/graphrag/graphrag/language_model/cache/__init__.py deleted file mode 100644 index 41cca7905f..0000000000 --- a/packages/graphrag/graphrag/language_model/cache/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -# Copyright (c) 2025 Microsoft Corporation. -# Licensed under the MIT License - -"""Cache provider definitions for Language Models.""" diff --git a/packages/graphrag/graphrag/language_model/cache/base.py b/packages/graphrag/graphrag/language_model/cache/base.py deleted file mode 100644 index 554d02c8c5..0000000000 --- a/packages/graphrag/graphrag/language_model/cache/base.py +++ /dev/null @@ -1,36 +0,0 @@ -# Copyright (c) 2025 Microsoft Corporation. -# Licensed under the MIT License - -"""Base cache protocol definition.""" - -from typing import Any, Protocol - - -class ModelCache(Protocol): - """Base cache protocol.""" - - async def has(self, key: str) -> bool: - """Check if the cache has a value.""" - ... - - async def get(self, key: str) -> Any | None: - """Retrieve a value from the cache.""" - ... - - async def set( - self, key: str, value: Any, metadata: dict[str, Any] | None = None - ) -> None: - """Write a value into the cache.""" - ... - - async def remove(self, key: str) -> None: - """Remove a value from the cache.""" - ... - - async def clear(self) -> None: - """Clear the cache.""" - ... - - def child(self, key: str) -> Any: - """Create a child cache.""" - ... diff --git a/packages/graphrag/graphrag/language_model/events/__init__.py b/packages/graphrag/graphrag/language_model/events/__init__.py deleted file mode 100644 index c6abec322f..0000000000 --- a/packages/graphrag/graphrag/language_model/events/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -# Copyright (c) 2025 Microsoft Corporation. -# Licensed under the MIT License - -"""Model Event handler modules.""" diff --git a/packages/graphrag/graphrag/language_model/events/base.py b/packages/graphrag/graphrag/language_model/events/base.py deleted file mode 100644 index 940da3f1ff..0000000000 --- a/packages/graphrag/graphrag/language_model/events/base.py +++ /dev/null @@ -1,19 +0,0 @@ -# Copyright (c) 2025 Microsoft Corporation. -# Licensed under the MIT License - -"""Base model events protocol.""" - -from typing import Any, Protocol - - -class ModelEventHandler(Protocol): - """Protocol for Model event handling.""" - - async def on_error( - self, - error: BaseException | None, - traceback: str | None = None, - arguments: dict[str, Any] | None = None, - ) -> None: - """Handle an model error.""" - ... diff --git a/packages/graphrag/graphrag/language_model/factory.py b/packages/graphrag/graphrag/language_model/factory.py deleted file mode 100644 index 2836d07d12..0000000000 --- a/packages/graphrag/graphrag/language_model/factory.py +++ /dev/null @@ -1,26 +0,0 @@ -# Copyright (c) 2025 Microsoft Corporation. -# Licensed under the MIT License - -"""A package containing a factory for supported llm types.""" - -from graphrag_common.factory import Factory - -from graphrag.config.enums import ModelType -from graphrag.language_model.protocol.base import ChatModel, EmbeddingModel -from graphrag.language_model.providers.litellm.chat_model import LitellmChatModel -from graphrag.language_model.providers.litellm.embedding_model import ( - LitellmEmbeddingModel, -) - - -class ChatModelFactory(Factory[ChatModel]): - """Singleton factory for creating ChatModel instances.""" - - -class EmbeddingModelFactory(Factory[EmbeddingModel]): - """Singleton factory for creating EmbeddingModel instances.""" - - -# --- Register default implementations --- -ChatModelFactory().register(ModelType.Chat, LitellmChatModel) -EmbeddingModelFactory().register(ModelType.Embedding, LitellmEmbeddingModel) diff --git a/packages/graphrag/graphrag/language_model/manager.py b/packages/graphrag/graphrag/language_model/manager.py deleted file mode 100644 index 29349cb278..0000000000 --- a/packages/graphrag/graphrag/language_model/manager.py +++ /dev/null @@ -1,151 +0,0 @@ -# Copyright (c) 2025 Microsoft Corporation. -# Licensed under the MIT License - -"""Singleton LLM Manager for ChatLLM and EmbeddingsLLM instances. - -This manager lets you register chat and embeddings LLMs independently. -It leverages the LLMFactory for instantiation. -""" - -from __future__ import annotations - -from typing import TYPE_CHECKING, Any, ClassVar - -from typing_extensions import Self - -from graphrag.language_model.factory import ChatModelFactory, EmbeddingModelFactory - -if TYPE_CHECKING: - from graphrag.language_model.protocol.base import ChatModel, EmbeddingModel - - -class ModelManager: - """Singleton manager for LLM instances.""" - - _instance: ClassVar[ModelManager | None] = None - - def __new__(cls) -> Self: - """Create a new instance of LLMManager if it does not exist.""" - if cls._instance is None: - cls._instance = super().__new__(cls) - return cls._instance # type: ignore[return-value] - - def __init__(self) -> None: - # Avoid reinitialization in the singleton. - if not hasattr(self, "_initialized"): - self.chat_models: dict[str, ChatModel] = {} - self.embedding_models: dict[str, EmbeddingModel] = {} - self._initialized = True - - @classmethod - def get_instance(cls) -> ModelManager: - """Return the singleton instance of LLMManager.""" - return cls.__new__(cls) - - def register_chat( - self, name: str, model_type: str, **chat_kwargs: Any - ) -> ChatModel: - """ - Register a ChatLLM instance under a unique name. - - Args: - name: Unique identifier for the ChatLLM instance. - model_type: Key for the ChatLLM implementation in LLMFactory. - **chat_kwargs: Additional parameters for instantiation. - """ - chat_kwargs["name"] = name - self.chat_models[name] = ChatModelFactory().create(model_type, chat_kwargs) - return self.chat_models[name] - - def register_embedding( - self, name: str, model_type: str, **embedding_kwargs: Any - ) -> EmbeddingModel: - """ - Register an EmbeddingsLLM instance under a unique name. - - Args: - name: Unique identifier for the EmbeddingsLLM instance. - embedding_key: Key for the EmbeddingsLLM implementation in LLMFactory. - **embedding_kwargs: Additional parameters for instantiation. - """ - embedding_kwargs["name"] = name - self.embedding_models[name] = EmbeddingModelFactory().create( - model_type, embedding_kwargs - ) - return self.embedding_models[name] - - def get_chat_model(self, name: str) -> ChatModel | None: - """ - Retrieve the ChatLLM instance registered under the given name. - - Raises - ------ - ValueError: If no ChatLLM is registered under the name. - """ - if name not in self.chat_models: - msg = f"No ChatLLM registered under the name '{name}'." - raise ValueError(msg) - return self.chat_models[name] - - def get_embedding_model(self, name: str) -> EmbeddingModel | None: - """ - Retrieve the EmbeddingsLLM instance registered under the given name. - - Raises - ------ - ValueError: If no EmbeddingsLLM is registered under the name. - """ - if name not in self.embedding_models: - msg = f"No EmbeddingsLLM registered under the name '{name}'." - raise ValueError(msg) - return self.embedding_models[name] - - def get_or_create_chat_model( - self, name: str, model_type: str, **chat_kwargs: Any - ) -> ChatModel: - """ - Retrieve the ChatLLM instance registered under the given name. - - If the ChatLLM does not exist, it is created and registered. - - Args: - name: Unique identifier for the ChatLLM instance. - model_type: Key for the ChatModel implementation in LLMFactory. - **chat_kwargs: Additional parameters for instantiation. - """ - if name not in self.chat_models: - return self.register_chat(name, model_type, **chat_kwargs) - return self.chat_models[name] - - def get_or_create_embedding_model( - self, name: str, model_type: str, **embedding_kwargs: Any - ) -> EmbeddingModel: - """ - Retrieve the EmbeddingsLLM instance registered under the given name. - - If the EmbeddingsLLM does not exist, it is created and registered. - - Args: - name: Unique identifier for the EmbeddingsLLM instance. - model_type: Key for the EmbeddingsLLM implementation in LLMFactory. - **embedding_kwargs: Additional parameters for instantiation. - """ - if name not in self.embedding_models: - return self.register_embedding(name, model_type, **embedding_kwargs) - return self.embedding_models[name] - - def remove_chat(self, name: str) -> None: - """Remove the ChatLLM instance registered under the given name.""" - self.chat_models.pop(name, None) - - def remove_embedding(self, name: str) -> None: - """Remove the EmbeddingsLLM instance registered under the given name.""" - self.embedding_models.pop(name, None) - - def list_chat_models(self) -> dict[str, ChatModel]: - """Return a copy of all registered ChatLLM instances.""" - return dict(self.chat_models) - - def list_embedding_models(self) -> dict[str, EmbeddingModel]: - """Return a copy of all registered EmbeddingsLLM instances.""" - return dict(self.embedding_models) diff --git a/packages/graphrag/graphrag/language_model/protocol/__init__.py b/packages/graphrag/graphrag/language_model/protocol/__init__.py deleted file mode 100644 index 12432bd1f5..0000000000 --- a/packages/graphrag/graphrag/language_model/protocol/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -# Copyright (c) 2025 Microsoft Corporation. -# Licensed under the MIT License - -"""Base protocol definitions for LLMs.""" diff --git a/packages/graphrag/graphrag/language_model/protocol/base.py b/packages/graphrag/graphrag/language_model/protocol/base.py deleted file mode 100644 index 74cd38746e..0000000000 --- a/packages/graphrag/graphrag/language_model/protocol/base.py +++ /dev/null @@ -1,166 +0,0 @@ -# Copyright (c) 2025 Microsoft Corporation. -# Licensed under the MIT License - -"""Base llm protocol definitions.""" - -from __future__ import annotations - -from typing import TYPE_CHECKING, Any, Protocol - -if TYPE_CHECKING: - from collections.abc import AsyncGenerator, Generator - - from graphrag.config.models.language_model_config import LanguageModelConfig - from graphrag.language_model.response.base import ModelResponse - - -class EmbeddingModel(Protocol): - """ - Protocol for an embedding-based Language Model (LM). - - This protocol defines the methods required for an embedding-based LM. - """ - - config: LanguageModelConfig - """Passthrough of the config used to create the model instance.""" - - async def aembed_batch( - self, text_list: list[str], **kwargs: Any - ) -> list[list[float]]: - """ - Generate an embedding vector for the given list of strings. - - Args: - text: The text to generate an embedding for. - **kwargs: Additional keyword arguments (e.g., model parameters). - - Returns - ------- - A collections of list of floats representing the embedding vector for each item in the batch. - """ - ... - - async def aembed(self, text: str, **kwargs: Any) -> list[float]: - """ - Generate an embedding vector for the given text. - - Args: - text: The text to generate an embedding for. - **kwargs: Additional keyword arguments (e.g., model parameters). - - Returns - ------- - A list of floats representing the embedding vector. - """ - ... - - def embed_batch(self, text_list: list[str], **kwargs: Any) -> list[list[float]]: - """ - Generate an embedding vector for the given list of strings. - - Args: - text: The text to generate an embedding for. - **kwargs: Additional keyword arguments (e.g., model parameters). - - Returns - ------- - A collections of list of floats representing the embedding vector for each item in the batch. - """ - ... - - def embed(self, text: str, **kwargs: Any) -> list[float]: - """ - Generate an embedding vector for the given text. - - Args: - text: The text to generate an embedding for. - **kwargs: Additional keyword arguments (e.g., model parameters). - - Returns - ------- - A list of floats representing the embedding vector. - """ - ... - - -class ChatModel(Protocol): - """ - Protocol for a chat-based Language Model (LM). - - This protocol defines the methods required for a chat-based LM. - Prompt is always required for the chat method, and any other keyword arguments are forwarded to the Model provider. - """ - - config: LanguageModelConfig - """Passthrough of the config used to create the model instance.""" - - async def achat( - self, prompt: str, history: list | None = None, **kwargs: Any - ) -> ModelResponse: - """ - Generate a response for the given text. - - Args: - prompt: The text to generate a response for. - history: The conversation history. - **kwargs: Additional keyword arguments (e.g., model parameters). - - Returns - ------- - A string representing the response. - - """ - ... - - async def achat_stream( - self, prompt: str, history: list | None = None, **kwargs: Any - ) -> AsyncGenerator[str, None]: - """ - Generate a response for the given text using a streaming interface. - - Args: - prompt: The text to generate a response for. - history: The conversation history. - **kwargs: Additional keyword arguments (e.g., model parameters). - - Returns - ------- - A generator that yields strings representing the response. - """ - yield "" # Yield an empty string so that the function is recognized as a generator - ... - - def chat( - self, prompt: str, history: list | None = None, **kwargs: Any - ) -> ModelResponse: - """ - Generate a response for the given text. - - Args: - prompt: The text to generate a response for. - history: The conversation history. - **kwargs: Additional keyword arguments (e.g., model parameters). - - Returns - ------- - A string representing the response. - - """ - ... - - def chat_stream( - self, prompt: str, history: list | None = None, **kwargs: Any - ) -> Generator[str, None]: - """ - Generate a response for the given text using a streaming interface. - - Args: - prompt: The text to generate a response for. - history: The conversation history. - **kwargs: Additional keyword arguments (e.g., model parameters). - - Returns - ------- - A generator that yields strings representing the response. - """ - ... diff --git a/packages/graphrag/graphrag/language_model/providers/__init__.py b/packages/graphrag/graphrag/language_model/providers/__init__.py deleted file mode 100644 index d635f898ba..0000000000 --- a/packages/graphrag/graphrag/language_model/providers/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -# Copyright (c) 2025 Microsoft Corporation. -# Licensed under the MIT License - -"""Model Providers module.""" diff --git a/packages/graphrag/graphrag/language_model/providers/litellm/__init__.py b/packages/graphrag/graphrag/language_model/providers/litellm/__init__.py deleted file mode 100644 index a1f948aba8..0000000000 --- a/packages/graphrag/graphrag/language_model/providers/litellm/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -# Copyright (c) 2025 Microsoft Corporation. -# Licensed under the MIT License - -"""GraphRAG LiteLLM module. Provides LiteLLM-based implementations of chat and embedding models.""" diff --git a/packages/graphrag/graphrag/language_model/providers/litellm/chat_model.py b/packages/graphrag/graphrag/language_model/providers/litellm/chat_model.py deleted file mode 100644 index 4e198bb423..0000000000 --- a/packages/graphrag/graphrag/language_model/providers/litellm/chat_model.py +++ /dev/null @@ -1,403 +0,0 @@ -# Copyright (c) 2025 Microsoft Corporation. -# Licensed under the MIT License - -"""Chat model implementation using Litellm.""" - -import inspect -import json -from collections.abc import AsyncGenerator, Generator -from typing import TYPE_CHECKING, Any, cast - -import litellm -from azure.identity import DefaultAzureCredential, get_bearer_token_provider -from litellm import ( - CustomStreamWrapper, - ModelResponse, # type: ignore - acompletion, - completion, -) -from pydantic import BaseModel, Field - -from graphrag.config.defaults import COGNITIVE_SERVICES_AUDIENCE -from graphrag.config.enums import AuthType -from graphrag.language_model.providers.litellm.request_wrappers.with_cache import ( - with_cache, -) -from graphrag.language_model.providers.litellm.request_wrappers.with_logging import ( - with_logging, -) -from graphrag.language_model.providers.litellm.request_wrappers.with_rate_limiter import ( - with_rate_limiter, -) -from graphrag.language_model.providers.litellm.request_wrappers.with_retries import ( - with_retries, -) -from graphrag.language_model.providers.litellm.types import ( - AFixedModelCompletion, - FixedModelCompletion, -) - -if TYPE_CHECKING: - from graphrag_cache import Cache - - from graphrag.config.models.language_model_config import LanguageModelConfig - from graphrag.language_model.response.base import ModelResponse as MR # noqa: N817 - -litellm.suppress_debug_info = True - - -def _create_base_completions( - model_config: "LanguageModelConfig", -) -> tuple[FixedModelCompletion, AFixedModelCompletion]: - """Wrap the base litellm completion function with the model configuration. - - Args - ---- - model_config: The configuration for the language model. - - Returns - ------- - A tuple containing the synchronous and asynchronous completion functions. - """ - model_provider = model_config.model_provider - model = model_config.deployment_name or model_config.model - - base_args: dict[str, Any] = { - "drop_params": True, # LiteLLM drop unsupported params for selected model. - "model": f"{model_provider}/{model}", - "timeout": model_config.request_timeout, - "top_p": model_config.top_p, - "n": model_config.n, - "temperature": model_config.temperature, - "frequency_penalty": model_config.frequency_penalty, - "presence_penalty": model_config.presence_penalty, - "api_base": model_config.api_base, - "api_version": model_config.api_version, - "api_key": model_config.api_key, - "organization": model_config.organization, - "proxy": model_config.proxy, - "audience": model_config.audience, - "max_tokens": model_config.max_tokens, - "max_completion_tokens": model_config.max_completion_tokens, - "reasoning_effort": model_config.reasoning_effort, - } - - if model_config.auth_type == AuthType.AzureManagedIdentity: - if model_config.model_provider != "azure": - msg = "Azure Managed Identity authentication is only supported for Azure models." - raise ValueError(msg) - - base_args["azure_scope"] = base_args.pop("audience") - base_args["azure_ad_token_provider"] = get_bearer_token_provider( - DefaultAzureCredential(), - model_config.audience or COGNITIVE_SERVICES_AUDIENCE, - ) - - def _base_completion(**kwargs: Any) -> ModelResponse | CustomStreamWrapper: - new_args = {**base_args, **kwargs} - - if "name" in new_args: - new_args.pop("name") - - return completion(**new_args) - - async def _base_acompletion(**kwargs: Any) -> ModelResponse | CustomStreamWrapper: - new_args = {**base_args, **kwargs} - - if "name" in new_args: - new_args.pop("name") - - return await acompletion(**new_args) - - return (_base_completion, _base_acompletion) - - -def _create_completions( - model_config: "LanguageModelConfig", - cache: "Cache | None", - cache_key_prefix: str, -) -> tuple[FixedModelCompletion, AFixedModelCompletion]: - """Wrap the base litellm completion function with the model configuration and additional features. - - Wrap the base litellm completion function with instance variables based on the model configuration. - Then wrap additional features such as rate limiting, retries, and caching, if enabled. - - Final function composition order: - - Logging(Cache(Retries(RateLimiter(ModelCompletion())))) - - Args - ---- - model_config: The configuration for the language model. - cache: Optional cache for storing responses. - cache_key_prefix: Prefix for cache keys. - - Returns - ------- - A tuple containing the synchronous and asynchronous completion functions. - - """ - completion, acompletion = _create_base_completions(model_config) - - if model_config.rate_limit_strategy is not None and ( - model_config.requests_per_minute is not None - or model_config.tokens_per_minute is not None - ): - completion, acompletion = with_rate_limiter( - sync_fn=completion, - async_fn=acompletion, - model_config=model_config, - rpm=model_config.requests_per_minute, - tpm=model_config.tokens_per_minute, - ) - - if model_config.retry_strategy != "none": - completion, acompletion = with_retries( - sync_fn=completion, - async_fn=acompletion, - model_config=model_config, - ) - - if cache is not None: - completion, acompletion = with_cache( - sync_fn=completion, - async_fn=acompletion, - model_config=model_config, - cache=cache, - request_type="chat", - cache_key_prefix=cache_key_prefix, - ) - - completion, acompletion = with_logging( - sync_fn=completion, - async_fn=acompletion, - ) - - return (completion, acompletion) - - -class LitellmModelOutput(BaseModel): - """A model representing the output from a language model.""" - - content: str = Field(description="The generated text content") - full_response: None = Field( - default=None, description="The full response from the model, if available" - ) - - -class LitellmModelResponse(BaseModel): - """A model representing the response from a language model.""" - - output: LitellmModelOutput = Field(description="The output from the model") - parsed_response: BaseModel | None = Field( - default=None, description="Parsed response from the model" - ) - history: list = Field( - default_factory=list, - description="Conversation history including the prompt and response", - ) - - -class LitellmChatModel: - """LiteLLM-based Chat Model.""" - - def __init__( - self, - name: str, - config: "LanguageModelConfig", - cache: "Cache | None" = None, - **kwargs: Any, - ): - self.name = name - self.config = config - self.cache = cache.child(self.name) if cache else None - self.completion, self.acompletion = _create_completions( - config, self.cache, "chat" - ) - - def _get_kwargs(self, **kwargs: Any) -> dict[str, Any]: - """Get model arguments supported by litellm.""" - args_to_include = [ - "name", - "modalities", - "prediction", - "audio", - "logit_bias", - "metadata", - "user", - "response_format", - "seed", - "tools", - "tool_choice", - "logprobs", - "top_logprobs", - "parallel_tool_calls", - "web_search_options", - "extra_headers", - "functions", - "function_call", - "thinking", - ] - new_args = {k: v for k, v in kwargs.items() if k in args_to_include} - - # If using JSON, check if response_format should be a Pydantic model or just a general JSON object - if kwargs.get("json"): - new_args["response_format"] = {"type": "json_object"} - - if ( - "json_model" in kwargs - and inspect.isclass(kwargs["json_model"]) - and issubclass(kwargs["json_model"], BaseModel) - ): - new_args["response_format"] = kwargs["json_model"] - - return new_args - - async def achat( - self, prompt: str, history: list | None = None, **kwargs: Any - ) -> "MR": - """ - Generate a response for the given prompt and history. - - Args - ---- - prompt: The prompt to generate a response for. - history: Optional chat history. - **kwargs: Additional keyword arguments (e.g., model parameters). - - Returns - ------- - LitellmModelResponse: The generated model response. - """ - new_kwargs = self._get_kwargs(**kwargs) - messages: list[dict[str, str]] = history or [] - messages.append({"role": "user", "content": prompt}) - - response = await self.acompletion(messages=messages, stream=False, **new_kwargs) # type: ignore - - messages.append({ - "role": "assistant", - "content": response.choices[0].message.content or "", # type: ignore - }) - - parsed_response: BaseModel | None = None - if "response_format" in new_kwargs: - parsed_dict: dict[str, Any] = json.loads( - response.choices[0].message.content or "{}" # type: ignore - ) - parsed_response = parsed_dict # type: ignore - if inspect.isclass(new_kwargs["response_format"]) and issubclass( - new_kwargs["response_format"], BaseModel - ): - # If response_format is a pydantic model, instantiate it - model_initializer = cast( - "type[BaseModel]", new_kwargs["response_format"] - ) - parsed_response = model_initializer(**parsed_dict) - - return LitellmModelResponse( - output=LitellmModelOutput( - content=response.choices[0].message.content or "" # type: ignore - ), - parsed_response=parsed_response, - history=messages, - ) - - async def achat_stream( - self, prompt: str, history: list | None = None, **kwargs: Any - ) -> AsyncGenerator[str, None]: - """ - Generate a response for the given prompt and history. - - Args - ---- - prompt: The prompt to generate a response for. - history: Optional chat history. - **kwargs: Additional keyword arguments (e.g., model parameters). - - Returns - ------- - AsyncGenerator[str, None]: The generated response as a stream of strings. - """ - new_kwargs = self._get_kwargs(**kwargs) - messages: list[dict[str, str]] = history or [] - messages.append({"role": "user", "content": prompt}) - - response = await self.acompletion(messages=messages, stream=True, **new_kwargs) # type: ignore - - async for chunk in response: # type: ignore - if chunk.choices and chunk.choices[0].delta.content: - yield chunk.choices[0].delta.content - - def chat(self, prompt: str, history: list | None = None, **kwargs: Any) -> "MR": - """ - Generate a response for the given prompt and history. - - Args - ---- - prompt: The prompt to generate a response for. - history: Optional chat history. - **kwargs: Additional keyword arguments (e.g., model parameters). - - Returns - ------- - LitellmModelResponse: The generated model response. - """ - new_kwargs = self._get_kwargs(**kwargs) - messages: list[dict[str, str]] = history or [] - messages.append({"role": "user", "content": prompt}) - - response = self.completion(messages=messages, stream=False, **new_kwargs) # type: ignore - - messages.append({ - "role": "assistant", - "content": response.choices[0].message.content or "", # type: ignore - }) - - parsed_response: BaseModel | None = None - if "response_format" in new_kwargs: - parsed_dict: dict[str, Any] = json.loads( - response.choices[0].message.content or "{}" # type: ignore - ) - parsed_response = parsed_dict # type: ignore - if inspect.isclass(new_kwargs["response_format"]) and issubclass( - new_kwargs["response_format"], BaseModel - ): - # If response_format is a pydantic model, instantiate it - model_initializer = cast( - "type[BaseModel]", new_kwargs["response_format"] - ) - parsed_response = model_initializer(**parsed_dict) - - return LitellmModelResponse( - output=LitellmModelOutput( - content=response.choices[0].message.content or "" # type: ignore - ), - parsed_response=parsed_response, - history=messages, - ) - - def chat_stream( - self, prompt: str, history: list | None = None, **kwargs: Any - ) -> Generator[str, None]: - """ - Generate a response for the given prompt and history. - - Args - ---- - prompt: The prompt to generate a response for. - history: Optional chat history. - **kwargs: Additional keyword arguments (e.g., model parameters). - - Returns - ------- - Generator[str, None]: The generated response as a stream of strings. - """ - new_kwargs = self._get_kwargs(**kwargs) - messages: list[dict[str, str]] = history or [] - messages.append({"role": "user", "content": prompt}) - - response = self.completion(messages=messages, stream=True, **new_kwargs) # type: ignore - - for chunk in response: - if chunk.choices and chunk.choices[0].delta.content: # type: ignore - yield chunk.choices[0].delta.content # type: ignore diff --git a/packages/graphrag/graphrag/language_model/providers/litellm/embedding_model.py b/packages/graphrag/graphrag/language_model/providers/litellm/embedding_model.py deleted file mode 100644 index 328eb2e16d..0000000000 --- a/packages/graphrag/graphrag/language_model/providers/litellm/embedding_model.py +++ /dev/null @@ -1,269 +0,0 @@ -# Copyright (c) 2025 Microsoft Corporation. -# Licensed under the MIT License - -"""Embedding model implementation using Litellm.""" - -from typing import TYPE_CHECKING, Any - -import litellm -from azure.identity import DefaultAzureCredential, get_bearer_token_provider -from litellm import ( - EmbeddingResponse, # type: ignore - aembedding, - embedding, -) - -from graphrag.config.defaults import COGNITIVE_SERVICES_AUDIENCE -from graphrag.config.enums import AuthType -from graphrag.language_model.providers.litellm.request_wrappers.with_cache import ( - with_cache, -) -from graphrag.language_model.providers.litellm.request_wrappers.with_logging import ( - with_logging, -) -from graphrag.language_model.providers.litellm.request_wrappers.with_rate_limiter import ( - with_rate_limiter, -) -from graphrag.language_model.providers.litellm.request_wrappers.with_retries import ( - with_retries, -) -from graphrag.language_model.providers.litellm.types import ( - AFixedModelEmbedding, - FixedModelEmbedding, -) - -if TYPE_CHECKING: - from graphrag_cache import Cache - - from graphrag.config.models.language_model_config import LanguageModelConfig - -litellm.suppress_debug_info = True - - -def _create_base_embeddings( - model_config: "LanguageModelConfig", -) -> tuple[FixedModelEmbedding, AFixedModelEmbedding]: - """Wrap the base litellm embedding function with the model configuration. - - Args - ---- - model_config: The configuration for the language model. - - Returns - ------- - A tuple containing the synchronous and asynchronous embedding functions. - """ - model_provider = model_config.model_provider - model = model_config.deployment_name or model_config.model - - base_args: dict[str, Any] = { - "drop_params": True, # LiteLLM drop unsupported params for selected model. - "model": f"{model_provider}/{model}", - "timeout": model_config.request_timeout, - "api_base": model_config.api_base, - "api_version": model_config.api_version, - "api_key": model_config.api_key, - "organization": model_config.organization, - "proxy": model_config.proxy, - "audience": model_config.audience, - } - - if model_config.auth_type == AuthType.AzureManagedIdentity: - if model_config.model_provider != "azure": - msg = "Azure Managed Identity authentication is only supported for Azure models." - raise ValueError(msg) - - base_args["azure_scope"] = base_args.pop("audience") - base_args["azure_ad_token_provider"] = get_bearer_token_provider( - DefaultAzureCredential(), - model_config.audience or COGNITIVE_SERVICES_AUDIENCE, - ) - - def _base_embedding(**kwargs: Any) -> EmbeddingResponse: - new_args = {**base_args, **kwargs} - - if "name" in new_args: - new_args.pop("name") - - return embedding(**new_args) - - async def _base_aembedding(**kwargs: Any) -> EmbeddingResponse: - new_args = {**base_args, **kwargs} - - if "name" in new_args: - new_args.pop("name") - - return await aembedding(**new_args) - - return (_base_embedding, _base_aembedding) - - -def _create_embeddings( - model_config: "LanguageModelConfig", - cache: "Cache | None", - cache_key_prefix: str, -) -> tuple[FixedModelEmbedding, AFixedModelEmbedding]: - """Wrap the base litellm embedding function with the model configuration and additional features. - - Wrap the base litellm embedding function with instance variables based on the model configuration. - Then wrap additional features such as rate limiting, retries, and caching, if enabled. - - Final function composition order: - - Logging(Cache(Retries(RateLimiter(ModelEmbedding())))) - - Args - ---- - model_config: The configuration for the language model. - cache: Optional cache for storing responses. - cache_key_prefix: Prefix for cache keys. - - Returns - ------- - A tuple containing the synchronous and asynchronous embedding functions. - - """ - embedding, aembedding = _create_base_embeddings(model_config) - - if model_config.rate_limit_strategy is not None and ( - model_config.requests_per_minute is not None - or model_config.tokens_per_minute is not None - ): - embedding, aembedding = with_rate_limiter( - sync_fn=embedding, - async_fn=aembedding, - model_config=model_config, - rpm=model_config.requests_per_minute, - tpm=model_config.tokens_per_minute, - ) - - if model_config.retry_strategy != "none": - embedding, aembedding = with_retries( - sync_fn=embedding, - async_fn=aembedding, - model_config=model_config, - ) - - if cache is not None: - embedding, aembedding = with_cache( - sync_fn=embedding, - async_fn=aembedding, - model_config=model_config, - cache=cache, - request_type="embedding", - cache_key_prefix=cache_key_prefix, - ) - - embedding, aembedding = with_logging( - sync_fn=embedding, - async_fn=aembedding, - ) - - return (embedding, aembedding) - - -class LitellmEmbeddingModel: - """LiteLLM-based Embedding Model.""" - - def __init__( - self, - name: str, - config: "LanguageModelConfig", - cache: "Cache | None" = None, - **kwargs: Any, - ): - self.name = name - self.config = config - self.cache = cache.child(self.name) if cache else None - self.embedding, self.aembedding = _create_embeddings( - config, self.cache, "embeddings" - ) - - def _get_kwargs(self, **kwargs: Any) -> dict[str, Any]: - """Get model arguments supported by litellm.""" - args_to_include = [ - "name", - "dimensions", - "encoding_format", - "timeout", - "user", - ] - return {k: v for k, v in kwargs.items() if k in args_to_include} - - async def aembed_batch( - self, text_list: list[str], **kwargs: Any - ) -> list[list[float]]: - """ - Batch generate embeddings. - - Args - ---- - text_list: A batch of text inputs to generate embeddings for. - **kwargs: Additional keyword arguments (e.g., model parameters). - - Returns - ------- - A Batch of embeddings. - """ - new_kwargs = self._get_kwargs(**kwargs) - response = await self.aembedding(input=text_list, **new_kwargs) - - return [emb.get("embedding", []) for emb in response.data] - - async def aembed(self, text: str, **kwargs: Any) -> list[float]: - """ - Async embed. - - Args: - text: The text to generate an embedding for. - **kwargs: Additional keyword arguments (e.g., model parameters). - - Returns - ------- - An embedding. - """ - new_kwargs = self._get_kwargs(**kwargs) - response = await self.aembedding(input=[text], **new_kwargs) - - return ( - response.data[0].get("embedding", []) - if response.data and response.data[0] - else [] - ) - - def embed_batch(self, text_list: list[str], **kwargs: Any) -> list[list[float]]: - """ - Batch generate embeddings. - - Args: - text_list: A batch of text inputs to generate embeddings for. - **kwargs: Additional keyword arguments (e.g., model parameters). - - Returns - ------- - A Batch of embeddings. - """ - new_kwargs = self._get_kwargs(**kwargs) - response = self.embedding(input=text_list, **new_kwargs) - - return [emb.get("embedding", []) for emb in response.data] - - def embed(self, text: str, **kwargs: Any) -> list[float]: - """ - Embed a single text input. - - Args: - text: The text to generate an embedding for. - **kwargs: Additional keyword arguments (e.g., model parameters). - - Returns - ------- - An embedding. - """ - new_kwargs = self._get_kwargs(**kwargs) - response = self.embedding(input=[text], **new_kwargs) - - return ( - response.data[0].get("embedding", []) - if response.data and response.data[0] - else [] - ) diff --git a/packages/graphrag/graphrag/language_model/providers/litellm/get_cache_key.py b/packages/graphrag/graphrag/language_model/providers/litellm/get_cache_key.py deleted file mode 100644 index 0d6938d45a..0000000000 --- a/packages/graphrag/graphrag/language_model/providers/litellm/get_cache_key.py +++ /dev/null @@ -1,140 +0,0 @@ -# Copyright (c) 2025 Microsoft Corporation. -# Licensed under the MIT License - -""" -LiteLLM cache key generation. - -Modeled after the fnllm cache key generation. -https://github.com/microsoft/essex-toolkit/blob/23d3077b65c0e8f1d89c397a2968fe570a25f790/python/fnllm/fnllm/caching/base.py#L50 -""" - -import hashlib -import inspect -import json -from typing import TYPE_CHECKING, Any - -from pydantic import BaseModel - -if TYPE_CHECKING: - from graphrag.config.models.language_model_config import LanguageModelConfig - - -_CACHE_VERSION = 3 -""" -If there's a breaking change in what we cache, we should increment this version number to invalidate existing caches. - -fnllm was on cache version 2 and though we generate -similar cache keys, the objects stored in cache by fnllm and litellm are different. -Using litellm model providers will not be able to reuse caches generated by fnllm -thus we start with version 3 for litellm. -""" - - -def get_cache_key( - model_config: "LanguageModelConfig", - prefix: str, - messages: str | None = None, - input: str | None = None, - **kwargs: Any, -) -> str: - """Generate a cache key based on the model configuration and input arguments. - - Modeled after the fnllm cache key generation. - https://github.com/microsoft/essex-toolkit/blob/23d3077b65c0e8f1d89c397a2968fe570a25f790/python/fnllm/fnllm/caching/base.py#L50 - - Args - ____ - model_config: The configuration of the language model. - prefix: A prefix for the cache key. - **kwargs: Additional model input parameters. - - Returns - ------- - `{prefix}_{data_hash}_v{version}` if prefix is provided. - """ - cache_key: dict[str, Any] = { - "parameters": _get_parameters(model_config, **kwargs), - } - - if messages is not None and input is not None: - msg = "Only one of 'messages' or 'input' should be provided." - raise ValueError(msg) - - if messages is not None: - cache_key["messages"] = messages - elif input is not None: - cache_key["input"] = input - else: - msg = "Either 'messages' or 'input' must be provided." - raise ValueError(msg) - - data_hash = _hash(json.dumps(cache_key, sort_keys=True)) - - name = kwargs.get("name") - - if name: - prefix += f"_{name}" - - return f"{prefix}_{data_hash}_v{_CACHE_VERSION}" - - -def _get_parameters( - model_config: "LanguageModelConfig", - **kwargs: Any, -) -> dict[str, Any]: - """Pluck out the parameters that define a cache key. - - Use the same parameters as fnllm except request timeout. - - embeddings: https://github.com/microsoft/essex-toolkit/blob/main/python/fnllm/fnllm/openai/types/embeddings/parameters.py#L12 - - chat: https://github.com/microsoft/essex-toolkit/blob/main/python/fnllm/fnllm/openai/types/chat/parameters.py#L25 - - Args - ____ - model_config: The configuration of the language model. - **kwargs: Additional model input parameters. - - Returns - ------- - dict[str, Any]: A dictionary of parameters that define the cache key. - """ - parameters = { - "model": model_config.deployment_name or model_config.model, - "frequency_penalty": model_config.frequency_penalty, - "max_tokens": model_config.max_tokens, - "max_completion_tokens": model_config.max_completion_tokens, - "n": model_config.n, - "presence_penalty": model_config.presence_penalty, - "temperature": model_config.temperature, - "top_p": model_config.top_p, - "reasoning_effort": model_config.reasoning_effort, - } - keys_to_cache = [ - "function_call", - "functions", - "logit_bias", - "logprobs", - "parallel_tool_calls", - "seed", - "service_tier", - "stop", - "tool_choice", - "tools", - "top_logprobs", - "user", - "dimensions", - "encoding_format", - ] - parameters.update({key: kwargs.get(key) for key in keys_to_cache if key in kwargs}) - - response_format = kwargs.get("response_format") - if inspect.isclass(response_format) and issubclass(response_format, BaseModel): - parameters["response_format"] = str(response_format) - elif response_format is not None: - parameters["response_format"] = response_format - - return parameters - - -def _hash(input: str) -> str: - """Generate a hash for the input string.""" - return hashlib.sha256(input.encode()).hexdigest() diff --git a/packages/graphrag/graphrag/language_model/providers/litellm/request_wrappers/__init__.py b/packages/graphrag/graphrag/language_model/providers/litellm/request_wrappers/__init__.py deleted file mode 100644 index b1ba631645..0000000000 --- a/packages/graphrag/graphrag/language_model/providers/litellm/request_wrappers/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -# Copyright (c) 2025 Microsoft Corporation. -# Licensed under the MIT License - -"""LiteLLM completion/embedding function wrappers.""" diff --git a/packages/graphrag/graphrag/language_model/providers/litellm/request_wrappers/with_cache.py b/packages/graphrag/graphrag/language_model/providers/litellm/request_wrappers/with_cache.py deleted file mode 100644 index c7299caaca..0000000000 --- a/packages/graphrag/graphrag/language_model/providers/litellm/request_wrappers/with_cache.py +++ /dev/null @@ -1,108 +0,0 @@ -# Copyright (c) 2025 Microsoft Corporation. -# Licensed under the MIT License - -"""LiteLLM completion/embedding cache wrapper.""" - -import asyncio -from typing import TYPE_CHECKING, Any, Literal - -from litellm import EmbeddingResponse, ModelResponse # type: ignore - -from graphrag.language_model.providers.litellm.get_cache_key import get_cache_key -from graphrag.language_model.providers.litellm.types import ( - AsyncLitellmRequestFunc, - LitellmRequestFunc, -) - -if TYPE_CHECKING: - from graphrag_cache import Cache - - from graphrag.config.models.language_model_config import LanguageModelConfig - - -def with_cache( - *, - sync_fn: LitellmRequestFunc, - async_fn: AsyncLitellmRequestFunc, - model_config: "LanguageModelConfig", - cache: "Cache", - request_type: Literal["chat", "embedding"], - cache_key_prefix: str, -) -> tuple[LitellmRequestFunc, AsyncLitellmRequestFunc]: - """ - Wrap the synchronous and asynchronous request functions with caching. - - Args - ---- - sync_fn: The synchronous chat/embedding request function to wrap. - async_fn: The asynchronous chat/embedding request function to wrap. - model_config: The configuration for the language model. - cache: The cache to use for storing responses. - request_type: The type of request being made, either "chat" or "embedding". - cache_key_prefix: The prefix to use for cache keys. - - Returns - ------- - A tuple containing the wrapped synchronous and asynchronous chat/embedding request functions. - """ - - def _wrapped_with_cache(**kwargs: Any) -> Any: - is_streaming = kwargs.get("stream", False) - if is_streaming: - return sync_fn(**kwargs) - cache_key = get_cache_key( - model_config=model_config, prefix=cache_key_prefix, **kwargs - ) - event_loop = asyncio.get_event_loop() - cached_response = event_loop.run_until_complete(cache.get(cache_key)) - if ( - cached_response is not None - and isinstance(cached_response, dict) - and "response" in cached_response - and cached_response["response"] is not None - and isinstance(cached_response["response"], dict) - ): - try: - if request_type == "chat": - return ModelResponse(**cached_response["response"]) - return EmbeddingResponse(**cached_response["response"]) - except Exception: # noqa: BLE001 - # Try to retrieve value from cache but if it fails, continue - # to make the request. - ... - response = sync_fn(**kwargs) - event_loop.run_until_complete( - cache.set(cache_key, {"response": response.model_dump()}) - ) - return response - - async def _wrapped_with_cache_async( - **kwargs: Any, - ) -> Any: - is_streaming = kwargs.get("stream", False) - if is_streaming: - return await async_fn(**kwargs) - cache_key = get_cache_key( - model_config=model_config, prefix=cache_key_prefix, **kwargs - ) - cached_response = await cache.get(cache_key) - if ( - cached_response is not None - and isinstance(cached_response, dict) - and "response" in cached_response - and cached_response["response"] is not None - and isinstance(cached_response["response"], dict) - ): - try: - if request_type == "chat": - return ModelResponse(**cached_response["response"]) - return EmbeddingResponse(**cached_response["response"]) - except Exception: # noqa: BLE001 - # Try to retrieve value from cache but if it fails, continue - # to make the request. - ... - response = await async_fn(**kwargs) - await cache.set(cache_key, {"response": response.model_dump()}) - return response - - return (_wrapped_with_cache, _wrapped_with_cache_async) diff --git a/packages/graphrag/graphrag/language_model/providers/litellm/request_wrappers/with_logging.py b/packages/graphrag/graphrag/language_model/providers/litellm/request_wrappers/with_logging.py deleted file mode 100644 index a353f455fc..0000000000 --- a/packages/graphrag/graphrag/language_model/providers/litellm/request_wrappers/with_logging.py +++ /dev/null @@ -1,56 +0,0 @@ -# Copyright (c) 2025 Microsoft Corporation. -# Licensed under the MIT License - -"""LiteLLM completion/embedding logging wrapper.""" - -import logging -from typing import Any - -from graphrag.language_model.providers.litellm.types import ( - AsyncLitellmRequestFunc, - LitellmRequestFunc, -) - -logger = logging.getLogger(__name__) - - -def with_logging( - *, - sync_fn: LitellmRequestFunc, - async_fn: AsyncLitellmRequestFunc, -) -> tuple[LitellmRequestFunc, AsyncLitellmRequestFunc]: - """ - Wrap the synchronous and asynchronous request functions with retries. - - Args - ---- - sync_fn: The synchronous chat/embedding request function to wrap. - async_fn: The asynchronous chat/embedding request function to wrap. - model_config: The configuration for the language model. - - Returns - ------- - A tuple containing the wrapped synchronous and asynchronous chat/embedding request functions. - """ - - def _wrapped_with_logging(**kwargs: Any) -> Any: - try: - return sync_fn(**kwargs) - except Exception as e: - logger.exception( - f"with_logging: Request failed with exception={e}", # noqa: G004, TRY401 - ) - raise - - async def _wrapped_with_logging_async( - **kwargs: Any, - ) -> Any: - try: - return await async_fn(**kwargs) - except Exception as e: - logger.exception( - f"with_logging: Async request failed with exception={e}", # noqa: G004, TRY401 - ) - raise - - return (_wrapped_with_logging, _wrapped_with_logging_async) diff --git a/packages/graphrag/graphrag/language_model/providers/litellm/request_wrappers/with_rate_limiter.py b/packages/graphrag/graphrag/language_model/providers/litellm/request_wrappers/with_rate_limiter.py deleted file mode 100644 index 108369444f..0000000000 --- a/packages/graphrag/graphrag/language_model/providers/litellm/request_wrappers/with_rate_limiter.py +++ /dev/null @@ -1,97 +0,0 @@ -# Copyright (c) 2025 Microsoft Corporation. -# Licensed under the MIT License - -"""LiteLLM completion/embedding rate limiter wrapper.""" - -from typing import TYPE_CHECKING, Any - -from litellm import token_counter # type: ignore - -from graphrag.language_model.providers.litellm.services.rate_limiter.rate_limiter_factory import ( - RateLimiterFactory, -) -from graphrag.language_model.providers.litellm.types import ( - AsyncLitellmRequestFunc, - LitellmRequestFunc, -) - -if TYPE_CHECKING: - from graphrag.config.models.language_model_config import LanguageModelConfig - - -def with_rate_limiter( - *, - sync_fn: LitellmRequestFunc, - async_fn: AsyncLitellmRequestFunc, - model_config: "LanguageModelConfig", - rpm: int | None = None, - tpm: int | None = None, -) -> tuple[LitellmRequestFunc, AsyncLitellmRequestFunc]: - """ - Wrap the synchronous and asynchronous request functions with rate limiting. - - Args - ---- - sync_fn: The synchronous chat/embedding request function to wrap. - async_fn: The asynchronous chat/embedding request function to wrap. - model_config: The configuration for the language model. - processing_event: A threading event that can be used to pause the rate limiter. - rpm: An optional requests per minute limit. - tpm: An optional tokens per minute limit. - - If `rpm` and `tpm` is set to 0 or None, rate limiting is disabled. - - Returns - ------- - A tuple containing the wrapped synchronous and asynchronous chat/embedding request functions. - """ - rate_limiter_factory = RateLimiterFactory() - - if ( - model_config.rate_limit_strategy is None - or model_config.rate_limit_strategy not in rate_limiter_factory - ): - msg = f"Rate Limiter strategy '{model_config.rate_limit_strategy}' is none or not registered. Available strategies: {', '.join(rate_limiter_factory.keys())}" - raise ValueError(msg) - - rate_limiter_service = rate_limiter_factory.create( - strategy=model_config.rate_limit_strategy, init_args={"rpm": rpm, "tpm": tpm} - ) - - max_tokens = model_config.max_completion_tokens or model_config.max_tokens or 0 - - def _wrapped_with_rate_limiter(**kwargs: Any) -> Any: - token_count = max_tokens - if "messages" in kwargs: - token_count += token_counter( - model=model_config.model, - messages=kwargs["messages"], - ) - elif "input" in kwargs: - token_count += token_counter( - model=model_config.model, - text=kwargs["input"], - ) - - with rate_limiter_service.acquire(token_count=token_count): - return sync_fn(**kwargs) - - async def _wrapped_with_rate_limiter_async( - **kwargs: Any, - ) -> Any: - token_count = max_tokens - if "messages" in kwargs: - token_count += token_counter( - model=model_config.model, - messages=kwargs["messages"], - ) - elif "input" in kwargs: - token_count += token_counter( - model=model_config.model, - text=kwargs["input"], - ) - - with rate_limiter_service.acquire(token_count=token_count): - return await async_fn(**kwargs) - - return (_wrapped_with_rate_limiter, _wrapped_with_rate_limiter_async) diff --git a/packages/graphrag/graphrag/language_model/providers/litellm/request_wrappers/with_retries.py b/packages/graphrag/graphrag/language_model/providers/litellm/request_wrappers/with_retries.py deleted file mode 100644 index 53e13f3fe9..0000000000 --- a/packages/graphrag/graphrag/language_model/providers/litellm/request_wrappers/with_retries.py +++ /dev/null @@ -1,56 +0,0 @@ -# Copyright (c) 2025 Microsoft Corporation. -# Licensed under the MIT License - -"""LiteLLM completion/embedding retries wrapper.""" - -from typing import TYPE_CHECKING, Any - -from graphrag.language_model.providers.litellm.services.retry.retry_factory import ( - RetryFactory, -) -from graphrag.language_model.providers.litellm.types import ( - AsyncLitellmRequestFunc, - LitellmRequestFunc, -) - -if TYPE_CHECKING: - from graphrag.config.models.language_model_config import LanguageModelConfig - - -def with_retries( - *, - sync_fn: LitellmRequestFunc, - async_fn: AsyncLitellmRequestFunc, - model_config: "LanguageModelConfig", -) -> tuple[LitellmRequestFunc, AsyncLitellmRequestFunc]: - """ - Wrap the synchronous and asynchronous request functions with retries. - - Args - ---- - sync_fn: The synchronous chat/embedding request function to wrap. - async_fn: The asynchronous chat/embedding request function to wrap. - model_config: The configuration for the language model. - - Returns - ------- - A tuple containing the wrapped synchronous and asynchronous chat/embedding request functions. - """ - retry_factory = RetryFactory() - retry_service = retry_factory.create( - strategy=model_config.retry_strategy, - init_args={ - "max_retries": model_config.max_retries, - "max_retry_wait": model_config.max_retry_wait, - }, - ) - - def _wrapped_with_retries(**kwargs: Any) -> Any: - return retry_service.retry(func=sync_fn, **kwargs) - - async def _wrapped_with_retries_async( - **kwargs: Any, - ) -> Any: - return await retry_service.aretry(func=async_fn, **kwargs) - - return (_wrapped_with_retries, _wrapped_with_retries_async) diff --git a/packages/graphrag/graphrag/language_model/providers/litellm/services/rate_limiter/__init__.py b/packages/graphrag/graphrag/language_model/providers/litellm/services/rate_limiter/__init__.py deleted file mode 100644 index 3c80d9f3f1..0000000000 --- a/packages/graphrag/graphrag/language_model/providers/litellm/services/rate_limiter/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -# Copyright (c) 2025 Microsoft Corporation. -# Licensed under the MIT License - -"""LiteLLM Rate Limiter.""" diff --git a/packages/graphrag/graphrag/language_model/providers/litellm/services/rate_limiter/rate_limiter_factory.py b/packages/graphrag/graphrag/language_model/providers/litellm/services/rate_limiter/rate_limiter_factory.py deleted file mode 100644 index 084f0bd0d1..0000000000 --- a/packages/graphrag/graphrag/language_model/providers/litellm/services/rate_limiter/rate_limiter_factory.py +++ /dev/null @@ -1,21 +0,0 @@ -# Copyright (c) 2025 Microsoft Corporation. -# Licensed under the MIT License - -"""LiteLLM Rate Limiter Factory.""" - -from graphrag_common.factory import Factory - -from graphrag.language_model.providers.litellm.services.rate_limiter.rate_limiter import ( - RateLimiter, -) -from graphrag.language_model.providers.litellm.services.rate_limiter.static_rate_limiter import ( - StaticRateLimiter, -) - - -class RateLimiterFactory(Factory[RateLimiter]): - """Singleton factory for creating rate limiter services.""" - - -rate_limiter_factory = RateLimiterFactory() -rate_limiter_factory.register("static", StaticRateLimiter) diff --git a/packages/graphrag/graphrag/language_model/providers/litellm/services/rate_limiter/static_rate_limiter.py b/packages/graphrag/graphrag/language_model/providers/litellm/services/rate_limiter/static_rate_limiter.py deleted file mode 100644 index 43681ceb10..0000000000 --- a/packages/graphrag/graphrag/language_model/providers/litellm/services/rate_limiter/static_rate_limiter.py +++ /dev/null @@ -1,133 +0,0 @@ -# Copyright (c) 2025 Microsoft Corporation. -# Licensed under the MIT License - -"""LiteLLM Static Rate Limiter.""" - -import threading -import time -from collections import deque -from collections.abc import Iterator -from contextlib import contextmanager -from typing import Any - -from graphrag.language_model.providers.litellm.services.rate_limiter.rate_limiter import ( - RateLimiter, -) - - -class StaticRateLimiter(RateLimiter): - """Static Rate Limiter implementation.""" - - def __init__( - self, - *, - rpm: int | None = None, - tpm: int | None = None, - default_stagger: float = 0.0, - period_in_seconds: int = 60, - **kwargs: Any, - ): - if rpm is None and tpm is None: - msg = "Both TPM and RPM cannot be None (disabled), one or both must be set to a positive integer." - raise ValueError(msg) - if (rpm is not None and rpm <= 0) or (tpm is not None and tpm <= 0): - msg = "RPM and TPM must be either None (disabled) or positive integers." - raise ValueError(msg) - if default_stagger < 0: - msg = "Default stagger must be a >= 0." - raise ValueError(msg) - if period_in_seconds <= 0: - msg = "Period in seconds must be a positive integer." - raise ValueError(msg) - self.rpm = rpm - self.tpm = tpm - self._lock = threading.Lock() - self.rate_queue: deque[float] = deque() - self.token_queue: deque[int] = deque() - self.period_in_seconds = period_in_seconds - self._last_time: float | None = None - - self.stagger = default_stagger - if self.rpm is not None and self.rpm > 0: - self.stagger = self.period_in_seconds / self.rpm - - @contextmanager - def acquire(self, *, token_count: int) -> Iterator[None]: - """ - Acquire Rate Limiter. - - Args - ---- - token_count: The estimated number of tokens for the current request. - - Yields - ------ - None: This context manager does not return any value. - """ - while True: - with self._lock: - current_time = time.time() - - # Use two sliding windows to keep track of #requests and tokens per period - # Drop old requests and tokens out of the sliding windows - while ( - len(self.rate_queue) > 0 - and self.rate_queue[0] < current_time - self.period_in_seconds - ): - self.rate_queue.popleft() - self.token_queue.popleft() - - # If sliding window still exceed request limit, wait again - # Waiting requires reacquiring the lock, allowing other threads - # to see if their request fits within the rate limiting windows - # Makes more sense for token limit than request limit - if ( - self.rpm is not None - and self.rpm > 0 - and len(self.rate_queue) >= self.rpm - ): - continue - - # Check if current token window exceeds token limit - # If it does, wait again - # This does not account for the tokens from the current request - # This is intentional, as we want to allow the current request - # to be processed if it is larger than the tpm but smaller than context window. - # tpm is a rate/soft limit and not the hard limit of context window limits. - if ( - self.tpm is not None - and self.tpm > 0 - and sum(self.token_queue) >= self.tpm - ): - continue - - # This check accounts for the current request token usage - # is within the token limits bound. - # If the current requests token limit exceeds the token limit, - # Then let it be processed. - if ( - self.tpm is not None - and self.tpm > 0 - and token_count <= self.tpm - and sum(self.token_queue) + token_count > self.tpm - ): - continue - - # If there was a previous call, check if we need to stagger - if ( - self.stagger > 0 - and ( - self._last_time # is None if this is the first hit to the rate limiter - and current_time - self._last_time - < self.stagger # If more time has passed than the stagger time, we can proceed - ) - ): - time.sleep(self.stagger - (current_time - self._last_time)) - current_time = time.time() - - # Add the current request to the sliding window - self.rate_queue.append(current_time) - self.token_queue.append(token_count) - self._last_time = current_time - break - yield diff --git a/packages/graphrag/graphrag/language_model/providers/litellm/services/retry/__init__.py b/packages/graphrag/graphrag/language_model/providers/litellm/services/retry/__init__.py deleted file mode 100644 index f01e0020e8..0000000000 --- a/packages/graphrag/graphrag/language_model/providers/litellm/services/retry/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -# Copyright (c) 2025 Microsoft Corporation. -# Licensed under the MIT License - -"""LiteLLM Retry Services.""" diff --git a/packages/graphrag/graphrag/language_model/providers/litellm/services/retry/exponential_retry.py b/packages/graphrag/graphrag/language_model/providers/litellm/services/retry/exponential_retry.py deleted file mode 100644 index e008322be0..0000000000 --- a/packages/graphrag/graphrag/language_model/providers/litellm/services/retry/exponential_retry.py +++ /dev/null @@ -1,83 +0,0 @@ -# Copyright (c) 2025 Microsoft Corporation. -# Licensed under the MIT License - -"""LiteLLM Exponential Retry Service.""" - -import asyncio -import logging -import random -import time -from collections.abc import Awaitable, Callable -from typing import Any - -from graphrag.language_model.providers.litellm.services.retry.retry import Retry - -logger = logging.getLogger(__name__) - - -class ExponentialRetry(Retry): - """LiteLLM Exponential Retry Service.""" - - def __init__( - self, - *, - max_retries: int = 5, - base_delay: float = 2.0, - jitter: bool = True, - **kwargs: Any, - ): - if max_retries <= 0: - msg = "max_retries must be greater than 0." - raise ValueError(msg) - - if base_delay <= 1.0: - msg = "base_delay must be greater than 1.0." - raise ValueError(msg) - - self._max_retries = max_retries - self._base_delay = base_delay - self._jitter = jitter - - def retry(self, func: Callable[..., Any], **kwargs: Any) -> Any: - """Retry a synchronous function.""" - retries = 0 - delay = 1.0 # Initial delay in seconds - while True: - try: - return func(**kwargs) - except Exception as e: - if retries >= self._max_retries: - logger.exception( - f"ExponentialRetry: Max retries exceeded, retries={retries}, max_retries={self._max_retries}, exception={e}", # noqa: G004, TRY401 - ) - raise - retries += 1 - delay *= self._base_delay - logger.exception( - f"ExponentialRetry: Request failed, retrying, retries={retries}, delay={delay}, max_retries={self._max_retries}, exception={e}", # noqa: G004, TRY401 - ) - time.sleep(delay + (self._jitter * random.uniform(0, 1))) # noqa: S311 - - async def aretry( - self, - func: Callable[..., Awaitable[Any]], - **kwargs: Any, - ) -> Any: - """Retry an asynchronous function.""" - retries = 0 - delay = 1.0 # Initial delay in seconds - while True: - try: - return await func(**kwargs) - except Exception as e: - if retries >= self._max_retries: - logger.exception( - f"ExponentialRetry: Max retries exceeded, retries={retries}, max_retries={self._max_retries}, exception={e}", # noqa: G004, TRY401 - ) - raise - retries += 1 - delay *= self._base_delay - logger.exception( - f"ExponentialRetry: Request failed, retrying, retries={retries}, delay={delay}, max_retries={self._max_retries}, exception={e}", # noqa: G004, TRY401 - ) - await asyncio.sleep(delay + (self._jitter * random.uniform(0, 1))) # noqa: S311 diff --git a/packages/graphrag/graphrag/language_model/providers/litellm/services/retry/incremental_wait_retry.py b/packages/graphrag/graphrag/language_model/providers/litellm/services/retry/incremental_wait_retry.py deleted file mode 100644 index 97fbdbf9c9..0000000000 --- a/packages/graphrag/graphrag/language_model/providers/litellm/services/retry/incremental_wait_retry.py +++ /dev/null @@ -1,81 +0,0 @@ -# Copyright (c) 2025 Microsoft Corporation. -# Licensed under the MIT License - -"""LiteLLM Incremental Wait Retry Service.""" - -import asyncio -import logging -import time -from collections.abc import Awaitable, Callable -from typing import Any - -from graphrag.language_model.providers.litellm.services.retry.retry import Retry - -logger = logging.getLogger(__name__) - - -class IncrementalWaitRetry(Retry): - """LiteLLM Incremental Wait Retry Service.""" - - def __init__( - self, - *, - max_retry_wait: float, - max_retries: int = 5, - **kwargs: Any, - ): - if max_retries <= 0: - msg = "max_retries must be greater than 0." - raise ValueError(msg) - - if max_retry_wait <= 0: - msg = "max_retry_wait must be greater than 0." - raise ValueError(msg) - - self._max_retries = max_retries - self._max_retry_wait = max_retry_wait - self._increment = max_retry_wait / max_retries - - def retry(self, func: Callable[..., Any], **kwargs: Any) -> Any: - """Retry a synchronous function.""" - retries = 0 - delay = 0.0 - while True: - try: - return func(**kwargs) - except Exception as e: - if retries >= self._max_retries: - logger.exception( - f"IncrementalWaitRetry: Max retries exceeded, retries={retries}, max_retries={self._max_retries}, exception={e}", # noqa: G004, TRY401 - ) - raise - retries += 1 - delay += self._increment - logger.exception( - f"IncrementalWaitRetry: Request failed, retrying after incremental delay, retries={retries}, delay={delay}, max_retries={self._max_retries}, exception={e}", # noqa: G004, TRY401 - ) - time.sleep(delay) - - async def aretry( - self, - func: Callable[..., Awaitable[Any]], - **kwargs: Any, - ) -> Any: - """Retry an asynchronous function.""" - retries = 0 - delay = 0.0 - while True: - try: - return await func(**kwargs) - except Exception as e: - if retries >= self._max_retries: - logger.exception( - f"IncrementalWaitRetry: Max retries exceeded, retries={retries}, max_retries={self._max_retries}, exception={e}", # noqa: G004, TRY401 - ) - raise - retries += 1 - delay += self._increment - logger.exception( - f"IncrementalWaitRetry: Request failed, retrying after incremental delay, retries={retries}, delay={delay}, max_retries={self._max_retries}, exception={e}", # noqa: G004, TRY401 - ) - await asyncio.sleep(delay) diff --git a/packages/graphrag/graphrag/language_model/providers/litellm/services/retry/native_wait_retry.py b/packages/graphrag/graphrag/language_model/providers/litellm/services/retry/native_wait_retry.py deleted file mode 100644 index 088f454213..0000000000 --- a/packages/graphrag/graphrag/language_model/providers/litellm/services/retry/native_wait_retry.py +++ /dev/null @@ -1,66 +0,0 @@ -# Copyright (c) 2025 Microsoft Corporation. -# Licensed under the MIT License - -"""LiteLLM Native Retry Service.""" - -import logging -from collections.abc import Awaitable, Callable -from typing import Any - -from graphrag.language_model.providers.litellm.services.retry.retry import Retry - -logger = logging.getLogger(__name__) - - -class NativeRetry(Retry): - """LiteLLM Native Retry Service.""" - - def __init__( - self, - *, - max_retries: int = 5, - **kwargs: Any, - ): - if max_retries <= 0: - msg = "max_retries must be greater than 0." - raise ValueError(msg) - - self._max_retries = max_retries - - def retry(self, func: Callable[..., Any], **kwargs: Any) -> Any: - """Retry a synchronous function.""" - retries = 0 - while True: - try: - return func(**kwargs) - except Exception as e: - if retries >= self._max_retries: - logger.exception( - f"NativeRetry: Max retries exceeded, retries={retries}, max_retries={self._max_retries}, exception={e}", # noqa: G004, TRY401 - ) - raise - retries += 1 - logger.exception( - f"NativeRetry: Request failed, immediately retrying, retries={retries}, max_retries={self._max_retries}, exception={e}", # noqa: G004, TRY401 - ) - - async def aretry( - self, - func: Callable[..., Awaitable[Any]], - **kwargs: Any, - ) -> Any: - """Retry an asynchronous function.""" - retries = 0 - while True: - try: - return await func(**kwargs) - except Exception as e: - if retries >= self._max_retries: - logger.exception( - f"NativeRetry: Max retries exceeded, retries={retries}, max_retries={self._max_retries}, exception={e}", # noqa: G004, TRY401 - ) - raise - retries += 1 - logger.exception( - f"NativeRetry: Request failed, immediately retrying, retries={retries}, max_retries={self._max_retries}, exception={e}", # noqa: G004, TRY401 - ) diff --git a/packages/graphrag/graphrag/language_model/providers/litellm/services/retry/random_wait_retry.py b/packages/graphrag/graphrag/language_model/providers/litellm/services/retry/random_wait_retry.py deleted file mode 100644 index 603f439d1f..0000000000 --- a/packages/graphrag/graphrag/language_model/providers/litellm/services/retry/random_wait_retry.py +++ /dev/null @@ -1,79 +0,0 @@ -# Copyright (c) 2025 Microsoft Corporation. -# Licensed under the MIT License - -"""LiteLLM Random Wait Retry Service.""" - -import asyncio -import logging -import random -import time -from collections.abc import Awaitable, Callable -from typing import Any - -from graphrag.language_model.providers.litellm.services.retry.retry import Retry - -logger = logging.getLogger(__name__) - - -class RandomWaitRetry(Retry): - """LiteLLM Random Wait Retry Service.""" - - def __init__( - self, - *, - max_retry_wait: float, - max_retries: int = 5, - **kwargs: Any, - ): - if max_retries <= 0: - msg = "max_retries must be greater than 0." - raise ValueError(msg) - - if max_retry_wait <= 0: - msg = "max_retry_wait must be greater than 0." - raise ValueError(msg) - - self._max_retries = max_retries - self._max_retry_wait = max_retry_wait - - def retry(self, func: Callable[..., Any], **kwargs: Any) -> Any: - """Retry a synchronous function.""" - retries = 0 - while True: - try: - return func(**kwargs) - except Exception as e: - if retries >= self._max_retries: - logger.exception( - f"RandomWaitRetry: Max retries exceeded, retries={retries}, max_retries={self._max_retries}, exception={e}", # noqa: G004, TRY401 - ) - raise - retries += 1 - delay = random.uniform(0, self._max_retry_wait) # noqa: S311 - logger.exception( - f"RandomWaitRetry: Request failed, retrying after random delay, retries={retries}, delay={delay}, max_retries={self._max_retries}, exception={e}", # noqa: G004, TRY401 - ) - time.sleep(delay) - - async def aretry( - self, - func: Callable[..., Awaitable[Any]], - **kwargs: Any, - ) -> Any: - """Retry an asynchronous function.""" - retries = 0 - while True: - try: - return await func(**kwargs) - except Exception as e: - if retries >= self._max_retries: - logger.exception( - f"RandomWaitRetry: Max retries exceeded, retries={retries}, max_retries={self._max_retries}, exception={e}", # noqa: G004, TRY401 - ) - raise - retries += 1 - delay = random.uniform(0, self._max_retry_wait) # noqa: S311 - logger.exception( - f"RandomWaitRetry: Request failed, retrying after random delay, retries={retries}, delay={delay}, max_retries={self._max_retries}, exception={e}", # noqa: G004, TRY401 - ) - await asyncio.sleep(delay) diff --git a/packages/graphrag/graphrag/language_model/providers/litellm/services/retry/retry.py b/packages/graphrag/graphrag/language_model/providers/litellm/services/retry/retry.py deleted file mode 100644 index 4f53e598c6..0000000000 --- a/packages/graphrag/graphrag/language_model/providers/litellm/services/retry/retry.py +++ /dev/null @@ -1,33 +0,0 @@ -# Copyright (c) 2025 Microsoft Corporation. -# Licensed under the MIT License - -"""LiteLLM Retry Abstract Base Class.""" - -from abc import ABC, abstractmethod -from collections.abc import Awaitable, Callable -from typing import Any - - -class Retry(ABC): - """LiteLLM Retry Abstract Base Class.""" - - @abstractmethod - def __init__(self, /, **kwargs: Any): - msg = "Retry subclasses must implement the __init__ method." - raise NotImplementedError(msg) - - @abstractmethod - def retry(self, func: Callable[..., Any], **kwargs: Any) -> Any: - """Retry a synchronous function.""" - msg = "Subclasses must implement this method" - raise NotImplementedError(msg) - - @abstractmethod - async def aretry( - self, - func: Callable[..., Awaitable[Any]], - **kwargs: Any, - ) -> Any: - """Retry an asynchronous function.""" - msg = "Subclasses must implement this method" - raise NotImplementedError(msg) diff --git a/packages/graphrag/graphrag/language_model/providers/litellm/services/retry/retry_factory.py b/packages/graphrag/graphrag/language_model/providers/litellm/services/retry/retry_factory.py deleted file mode 100644 index 692463b0ca..0000000000 --- a/packages/graphrag/graphrag/language_model/providers/litellm/services/retry/retry_factory.py +++ /dev/null @@ -1,32 +0,0 @@ -# Copyright (c) 2025 Microsoft Corporation. -# Licensed under the MIT License - -"""LiteLLM Retry Factory.""" - -from graphrag_common.factory import Factory - -from graphrag.language_model.providers.litellm.services.retry.exponential_retry import ( - ExponentialRetry, -) -from graphrag.language_model.providers.litellm.services.retry.incremental_wait_retry import ( - IncrementalWaitRetry, -) -from graphrag.language_model.providers.litellm.services.retry.native_wait_retry import ( - NativeRetry, -) -from graphrag.language_model.providers.litellm.services.retry.random_wait_retry import ( - RandomWaitRetry, -) -from graphrag.language_model.providers.litellm.services.retry.retry import Retry - - -class RetryFactory(Factory[Retry]): - """Singleton factory for creating retry services.""" - - -retry_factory = RetryFactory() - -retry_factory.register("native", NativeRetry) -retry_factory.register("exponential_backoff", ExponentialRetry) -retry_factory.register("random_wait", RandomWaitRetry) -retry_factory.register("incremental_wait", IncrementalWaitRetry) diff --git a/packages/graphrag/graphrag/language_model/providers/litellm/types.py b/packages/graphrag/graphrag/language_model/providers/litellm/types.py deleted file mode 100644 index cec39b13e2..0000000000 --- a/packages/graphrag/graphrag/language_model/providers/litellm/types.py +++ /dev/null @@ -1,235 +0,0 @@ -# Copyright (c) 2025 Microsoft Corporation. -# Licensed under the MIT License - -"""LiteLLM types.""" - -from typing import ( - Any, - Protocol, - runtime_checkable, -) - -from litellm import ( - AnthropicThinkingParam, - BaseModel, - ChatCompletionAudioParam, - ChatCompletionModality, - ChatCompletionPredictionContentParam, - CustomStreamWrapper, - EmbeddingResponse, # type: ignore - ModelResponse, # type: ignore - OpenAIWebSearchOptions, -) -from openai.types.chat.chat_completion import ( - ChatCompletion, - Choice, -) -from openai.types.chat.chat_completion_chunk import ChatCompletionChunk, ChoiceDelta -from openai.types.chat.chat_completion_chunk import Choice as ChunkChoice -from openai.types.chat.chat_completion_message import ChatCompletionMessage -from openai.types.chat.chat_completion_message_param import ChatCompletionMessageParam -from openai.types.completion_usage import ( - CompletionTokensDetails, - CompletionUsage, - PromptTokensDetails, -) -from openai.types.create_embedding_response import CreateEmbeddingResponse, Usage -from openai.types.embedding import Embedding - -LMChatCompletionMessageParam = ChatCompletionMessageParam | dict[str, str] - -LMChatCompletion = ChatCompletion -LMChoice = Choice -LMChatCompletionMessage = ChatCompletionMessage - -LMChatCompletionChunk = ChatCompletionChunk -LMChoiceChunk = ChunkChoice -LMChoiceDelta = ChoiceDelta - -LMCompletionUsage = CompletionUsage -LMPromptTokensDetails = PromptTokensDetails -LMCompletionTokensDetails = CompletionTokensDetails - - -LMEmbeddingResponse = CreateEmbeddingResponse -LMEmbedding = Embedding -LMEmbeddingUsage = Usage - - -@runtime_checkable -class FixedModelCompletion(Protocol): - """ - Synchronous chat completion function. - - Same signature as litellm.completion but without the `model` parameter - as this is already set in the model configuration. - """ - - def __call__( - self, - *, - messages: list = [], # type: ignore # noqa: B006 - stream: bool | None = None, - stream_options: dict | None = None, # type: ignore - stop=None, # type: ignore - max_completion_tokens: int | None = None, - max_tokens: int | None = None, - modalities: list[ChatCompletionModality] | None = None, - prediction: ChatCompletionPredictionContentParam | None = None, - audio: ChatCompletionAudioParam | None = None, - logit_bias: dict | None = None, # type: ignore - user: str | None = None, - # openai v1.0+ new params - response_format: dict | type[BaseModel] | None = None, # type: ignore - seed: int | None = None, - tools: list | None = None, # type: ignore - tool_choice: str | dict | None = None, # type: ignore - logprobs: bool | None = None, - top_logprobs: int | None = None, - parallel_tool_calls: bool | None = None, - web_search_options: OpenAIWebSearchOptions | None = None, - deployment_id=None, # type: ignore - extra_headers: dict | None = None, # type: ignore - # soon to be deprecated params by OpenAI - functions: list | None = None, # type: ignore - function_call: str | None = None, - # Optional liteLLM function params - thinking: AnthropicThinkingParam | None = None, - **kwargs: Any, - ) -> ModelResponse | CustomStreamWrapper: - """Chat completion function.""" - ... - - -@runtime_checkable -class AFixedModelCompletion(Protocol): - """ - Asynchronous chat completion function. - - Same signature as litellm.acompletion but without the `model` parameter - as this is already set in the model configuration. - """ - - async def __call__( - self, - *, - # Optional OpenAI params: see https://platform.openai.com/docs/api-reference/chat/create - messages: list = [], # type: ignore # noqa: B006 - stream: bool | None = None, - stream_options: dict | None = None, # type: ignore - stop=None, # type: ignore - max_completion_tokens: int | None = None, - max_tokens: int | None = None, - modalities: list[ChatCompletionModality] | None = None, - prediction: ChatCompletionPredictionContentParam | None = None, - audio: ChatCompletionAudioParam | None = None, - logit_bias: dict | None = None, # type: ignore - user: str | None = None, - # openai v1.0+ new params - response_format: dict | type[BaseModel] | None = None, # type: ignore - seed: int | None = None, - tools: list | None = None, # type: ignore - tool_choice: str | dict | None = None, # type: ignore - logprobs: bool | None = None, - top_logprobs: int | None = None, - parallel_tool_calls: bool | None = None, - web_search_options: OpenAIWebSearchOptions | None = None, - deployment_id=None, # type: ignore - extra_headers: dict | None = None, # type: ignore - # soon to be deprecated params by OpenAI - functions: list | None = None, # type: ignore - function_call: str | None = None, - # Optional liteLLM function params - thinking: AnthropicThinkingParam | None = None, - **kwargs: Any, - ) -> ModelResponse | CustomStreamWrapper: - """Chat completion function.""" - ... - - -@runtime_checkable -class FixedModelEmbedding(Protocol): - """ - Synchronous embedding function. - - Same signature as litellm.embedding but without the `model` parameter - as this is already set in the model configuration. - """ - - def __call__( - self, - *, - request_id: str | None = None, - input: list = [], # type: ignore # noqa: B006 - # Optional params - dimensions: int | None = None, - encoding_format: str | None = None, - timeout: int = 600, # default to 10 minutes - # set api_base, api_version, api_key - api_base: str | None = None, - api_version: str | None = None, - api_key: str | None = None, - api_type: str | None = None, - caching: bool = False, - user: str | None = None, - **kwargs: Any, - ) -> EmbeddingResponse: - """Embedding function.""" - ... - - -@runtime_checkable -class AFixedModelEmbedding(Protocol): - """ - Asynchronous embedding function. - - Same signature as litellm.embedding but without the `model` parameter - as this is already set in the model configuration. - """ - - async def __call__( - self, - *, - request_id: str | None = None, - input: list = [], # type: ignore # noqa: B006 - # Optional params - dimensions: int | None = None, - encoding_format: str | None = None, - timeout: int = 600, # default to 10 minutes - # set api_base, api_version, api_key - api_base: str | None = None, - api_version: str | None = None, - api_key: str | None = None, - api_type: str | None = None, - caching: bool = False, - user: str | None = None, - **kwargs: Any, - ) -> EmbeddingResponse: - """Embedding function.""" - ... - - -@runtime_checkable -class LitellmRequestFunc(Protocol): - """ - Synchronous request function. - - Represents either a chat completion or embedding function. - """ - - def __call__(self, /, **kwargs: Any) -> Any: - """Request function.""" - ... - - -@runtime_checkable -class AsyncLitellmRequestFunc(Protocol): - """ - Asynchronous request function. - - Represents either a chat completion or embedding function. - """ - - async def __call__(self, /, **kwargs: Any) -> Any: - """Request function.""" - ... diff --git a/packages/graphrag/graphrag/language_model/response/__init__.py b/packages/graphrag/graphrag/language_model/response/__init__.py deleted file mode 100644 index 3c4721caab..0000000000 --- a/packages/graphrag/graphrag/language_model/response/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -# Copyright (c) 2025 Microsoft Corporation. -# Licensed under the MIT License - -"""A module containing Model response definitions.""" diff --git a/packages/graphrag/graphrag/language_model/response/base.py b/packages/graphrag/graphrag/language_model/response/base.py deleted file mode 100644 index 178259c4b7..0000000000 --- a/packages/graphrag/graphrag/language_model/response/base.py +++ /dev/null @@ -1,71 +0,0 @@ -# Copyright (c) 2025 Microsoft Corporation. -# Licensed under the MIT License - -"""Base llm response protocol.""" - -from typing import Any, Generic, Protocol, TypeVar - -from pydantic import BaseModel, Field - -T = TypeVar("T", bound=BaseModel, covariant=True) - - -class ModelOutput(Protocol): - """Protocol for Model response's output object.""" - - @property - def content(self) -> str: - """Return the textual content of the output.""" - ... - - @property - def full_response(self) -> dict[str, Any] | None: - """Return the complete JSON response returned by the model.""" - ... - - -class ModelResponse(Protocol, Generic[T]): - """Protocol for LLM response.""" - - @property - def output(self) -> ModelOutput: - """Return the output of the response.""" - ... - - @property - def parsed_response(self) -> T | None: - """Return the parsed response.""" - ... - - @property - def history(self) -> list: - """Return the history of the response.""" - ... - - -class BaseModelOutput(BaseModel): - """Base class for LLM output.""" - - content: str = Field(..., description="The textual content of the output.") - """The textual content of the output.""" - full_response: dict[str, Any] | None = Field( - None, description="The complete JSON response returned by the LLM provider." - ) - """The complete JSON response returned by the LLM provider.""" - - -class BaseModelResponse(BaseModel, Generic[T]): - """Base class for a Model response.""" - - output: BaseModelOutput - """""" - parsed_response: T | None = None - """Parsed response.""" - history: list[Any] = Field(default_factory=list) - """History of the response.""" - tool_calls: list = Field(default_factory=list) - """Tool calls required by the Model. These will be instances of the LLM tools (with filled parameters).""" - metrics: Any | None = None - """Request/response metrics.""" - cache_hit: bool | None = None - """Whether the response was a cache hit.""" diff --git a/packages/graphrag/graphrag/language_model/response/base.pyi b/packages/graphrag/graphrag/language_model/response/base.pyi deleted file mode 100644 index 7a33b0a304..0000000000 --- a/packages/graphrag/graphrag/language_model/response/base.pyi +++ /dev/null @@ -1,50 +0,0 @@ -# Copyright (c) 2025 Microsoft Corporation. -# Licensed under the MIT License - -from typing import Any, Generic, Protocol, TypeVar - -from pydantic import BaseModel - -_T = TypeVar("_T", bound=BaseModel, covariant=True) - -class ModelOutput(Protocol): - @property - def content(self) -> str: ... - @property - def full_response(self) -> dict[str, Any] | None: ... - -class ModelResponse(Protocol, Generic[_T]): - @property - def output(self) -> ModelOutput: ... - @property - def parsed_response(self) -> _T | None: ... - @property - def history(self) -> list[Any]: ... - -class BaseModelOutput(BaseModel): - content: str - full_response: dict[str, Any] | None - - def __init__( - self, - content: str, - full_response: dict[str, Any] | None = None, - ) -> None: ... - -class BaseModelResponse(BaseModel, Generic[_T]): - output: BaseModelOutput - parsed_response: _T | None - history: list[Any] - tool_calls: list[Any] - metrics: Any | None - cache_hit: bool | None - - def __init__( - self, - output: BaseModelOutput, - parsed_response: _T | None = None, - history: list[Any] = ..., # default provided by Pydantic - tool_calls: list[Any] = ..., # default provided by Pydantic - metrics: Any | None = None, - cache_hit: bool | None = None, - ) -> None: ... diff --git a/packages/graphrag/graphrag/language_model/util.py b/packages/graphrag/graphrag/language_model/util.py deleted file mode 100644 index fd33077afd..0000000000 --- a/packages/graphrag/graphrag/language_model/util.py +++ /dev/null @@ -1,41 +0,0 @@ -# Copyright (c) 2024 Microsoft Corporation. -# Licensed under the MIT License - -"""Utility functions for language models.""" - -from typing import Any - -from graphrag.config.models.language_model_config import LanguageModelConfig - - -def is_reasoning_model(model: str) -> bool: - """Return whether the model uses a known OpenAI reasoning model.""" - return model.lower() in {"o1", "o1-mini", "o3-mini"} - - -def get_openai_model_parameters_from_config( - config: LanguageModelConfig, -) -> dict[str, Any]: - """Get the model parameters for a given config, adjusting for reasoning API differences.""" - return get_openai_model_parameters_from_dict(config.model_dump()) - - -def get_openai_model_parameters_from_dict(config: dict[str, Any]) -> dict[str, Any]: - """Get the model parameters for a given config, adjusting for reasoning API differences.""" - params = { - "n": config.get("n"), - } - if is_reasoning_model(config["model"]): - params["max_completion_tokens"] = config.get("max_completion_tokens") - params["reasoning_effort"] = config.get("reasoning_effort") - else: - params["max_tokens"] = config.get("max_tokens") - params["temperature"] = config.get("temperature") - params["frequency_penalty"] = config.get("frequency_penalty") - params["presence_penalty"] = config.get("presence_penalty") - params["top_p"] = config.get("top_p") - - if config.get("response_format"): - params["response_format"] = config["response_format"] - - return params diff --git a/packages/graphrag/graphrag/logger/standard_logging.py b/packages/graphrag/graphrag/logger/standard_logging.py index d9e4d0f26f..de62f5e031 100644 --- a/packages/graphrag/graphrag/logger/standard_logging.py +++ b/packages/graphrag/graphrag/logger/standard_logging.py @@ -67,13 +67,20 @@ def init_loggers( log_level = logging.DEBUG if verbose else logging.INFO logger.setLevel(log_level) - # clear any existing handlers to avoid duplicate logs - if logger.hasHandlers(): - # Close file handlers properly before removing them - for handler in logger.handlers: - if isinstance(handler, logging.FileHandler): - handler.close() - logger.handlers.clear() + llm_logger = logging.getLogger("graphrag_llm") + llm_logger.setLevel(log_level) + + def _clear_handlers(logger: logging.Logger) -> None: + # clear any existing handlers to avoid duplicate logs + if logger.hasHandlers(): + # Close file handlers properly before removing them + for handler in logger.handlers: + if isinstance(handler, logging.FileHandler): + handler.close() + logger.handlers.clear() + + _clear_handlers(logger) + _clear_handlers(llm_logger) reporting_config = config.reporting config_dict = reporting_config.model_dump() @@ -81,3 +88,4 @@ def init_loggers( handler = LoggerFactory().create(reporting_config.type, args) logger.addHandler(handler) + llm_logger.addHandler(handler) diff --git a/packages/graphrag/graphrag/prompt_tune/defaults.py b/packages/graphrag/graphrag/prompt_tune/defaults.py index 6fb84f170a..2095b5c8ac 100644 --- a/packages/graphrag/graphrag/prompt_tune/defaults.py +++ b/packages/graphrag/graphrag/prompt_tune/defaults.py @@ -17,4 +17,4 @@ MIN_CHUNK_SIZE = 200 N_SUBSET_MAX = 300 MIN_CHUNK_OVERLAP = 0 -PROMPT_TUNING_MODEL_ID = "default_chat_model" +PROMPT_TUNING_MODEL_ID = "default_completion_model" diff --git a/packages/graphrag/graphrag/prompt_tune/generator/community_report_rating.py b/packages/graphrag/graphrag/prompt_tune/generator/community_report_rating.py index 22cf73105f..a4cae4c75f 100644 --- a/packages/graphrag/graphrag/prompt_tune/generator/community_report_rating.py +++ b/packages/graphrag/graphrag/prompt_tune/generator/community_report_rating.py @@ -2,21 +2,25 @@ # Copyright (c) 2024 Microsoft Corporation. # Licensed under the MIT License +from typing import TYPE_CHECKING -from graphrag.language_model.protocol.base import ChatModel from graphrag.prompt_tune.prompt.community_report_rating import ( GENERATE_REPORT_RATING_PROMPT, ) +if TYPE_CHECKING: + from graphrag_llm.completion import LLMCompletion + from graphrag_llm.types import LLMCompletionResponse + async def generate_community_report_rating( - model: ChatModel, domain: str, persona: str, docs: str | list[str] + model: "LLMCompletion", domain: str, persona: str, docs: str | list[str] ) -> str: """Generate an LLM persona to use for GraphRAG prompts. Parameters ---------- - - llm (CompletionLLM): The LLM to use for generation + - model (LLMCompletion): The LLM to use for generation - domain (str): The domain to generate a rating for - persona (str): The persona to generate a rating for for - docs (str | list[str]): Documents used to contextualize the rating @@ -30,6 +34,8 @@ async def generate_community_report_rating( domain=domain, persona=persona, input_text=docs_str ) - response = await model.achat(domain_prompt) + response: LLMCompletionResponse = await model.completion_async( + messages=domain_prompt + ) # type: ignore - return str(response.output.content).strip() + return response.content diff --git a/packages/graphrag/graphrag/prompt_tune/generator/community_reporter_role.py b/packages/graphrag/graphrag/prompt_tune/generator/community_reporter_role.py index d3c90d181e..b38d678b06 100644 --- a/packages/graphrag/graphrag/prompt_tune/generator/community_reporter_role.py +++ b/packages/graphrag/graphrag/prompt_tune/generator/community_reporter_role.py @@ -3,20 +3,25 @@ """Generate a community reporter role for community summarization.""" -from graphrag.language_model.protocol.base import ChatModel +from typing import TYPE_CHECKING + from graphrag.prompt_tune.prompt.community_reporter_role import ( GENERATE_COMMUNITY_REPORTER_ROLE_PROMPT, ) +if TYPE_CHECKING: + from graphrag_llm.completion import LLMCompletion + from graphrag_llm.types import LLMCompletionResponse + async def generate_community_reporter_role( - model: ChatModel, domain: str, persona: str, docs: str | list[str] + model: "LLMCompletion", domain: str, persona: str, docs: str | list[str] ) -> str: """Generate an LLM persona to use for GraphRAG prompts. Parameters ---------- - - llm (CompletionLLM): The LLM to use for generation + - model (LLMCompletion): The LLM to use for generation - domain (str): The domain to generate a persona for - persona (str): The persona to generate a role for - docs (str | list[str]): The domain to generate a persona for @@ -30,6 +35,8 @@ async def generate_community_reporter_role( domain=domain, persona=persona, input_text=docs_str ) - response = await model.achat(domain_prompt) + response: LLMCompletionResponse = await model.completion_async( + messages=domain_prompt + ) # type: ignore - return str(response.output.content) + return response.content diff --git a/packages/graphrag/graphrag/prompt_tune/generator/domain.py b/packages/graphrag/graphrag/prompt_tune/generator/domain.py index 7838594ccc..1135d67d9f 100644 --- a/packages/graphrag/graphrag/prompt_tune/generator/domain.py +++ b/packages/graphrag/graphrag/prompt_tune/generator/domain.py @@ -3,16 +3,21 @@ """Domain generation for GraphRAG prompts.""" -from graphrag.language_model.protocol.base import ChatModel +from typing import TYPE_CHECKING + from graphrag.prompt_tune.prompt.domain import GENERATE_DOMAIN_PROMPT +if TYPE_CHECKING: + from graphrag_llm.completion import LLMCompletion + from graphrag_llm.types import LLMCompletionResponse + -async def generate_domain(model: ChatModel, docs: str | list[str]) -> str: +async def generate_domain(model: "LLMCompletion", docs: str | list[str]) -> str: """Generate an LLM persona to use for GraphRAG prompts. Parameters ---------- - - llm (CompletionLLM): The LLM to use for generation + - model (LLMCompletion): The LLM to use for generation - docs (str | list[str]): The domain to generate a persona for Returns @@ -22,6 +27,8 @@ async def generate_domain(model: ChatModel, docs: str | list[str]) -> str: docs_str = " ".join(docs) if isinstance(docs, list) else docs domain_prompt = GENERATE_DOMAIN_PROMPT.format(input_text=docs_str) - response = await model.achat(domain_prompt) + response: LLMCompletionResponse = await model.completion_async( + messages=domain_prompt + ) # type: ignore - return str(response.output.content) + return response.content diff --git a/packages/graphrag/graphrag/prompt_tune/generator/entity_relationship.py b/packages/graphrag/graphrag/prompt_tune/generator/entity_relationship.py index 70225cbb02..9282366e45 100644 --- a/packages/graphrag/graphrag/prompt_tune/generator/entity_relationship.py +++ b/packages/graphrag/graphrag/prompt_tune/generator/entity_relationship.py @@ -4,19 +4,27 @@ """Entity relationship example generation module.""" import asyncio +from typing import TYPE_CHECKING + +from graphrag_llm.utils import ( + CompletionMessagesBuilder, +) -from graphrag.language_model.protocol.base import ChatModel from graphrag.prompt_tune.prompt.entity_relationship import ( ENTITY_RELATIONSHIPS_GENERATION_JSON_PROMPT, ENTITY_RELATIONSHIPS_GENERATION_PROMPT, UNTYPED_ENTITY_RELATIONSHIPS_GENERATION_PROMPT, ) +if TYPE_CHECKING: + from graphrag_llm.completion import LLMCompletion + from graphrag_llm.types import LLMCompletionResponse + MAX_EXAMPLES = 5 async def generate_entity_relationship_examples( - model: ChatModel, + model: "LLMCompletion", persona: str, entity_types: str | list[str] | None, docs: str | list[str], @@ -29,7 +37,8 @@ async def generate_entity_relationship_examples( on the json_mode parameter. """ docs_list = [docs] if isinstance(docs, str) else docs - history = [{"content": persona, "role": "system"}] + + msg_builder = CompletionMessagesBuilder().add_system_message(persona) if entity_types: entity_types_str = ( @@ -57,9 +66,13 @@ async def generate_entity_relationship_examples( messages = messages[:MAX_EXAMPLES] tasks = [ - model.achat(message, history=history, json=json_mode) for message in messages + model.completion_async( + messages=msg_builder.add_user_message(message).build(), + response_format_json_object=json_mode, + ) + for message in messages ] - responses = await asyncio.gather(*tasks) + responses: list[LLMCompletionResponse] = await asyncio.gather(*tasks) # type: ignore - return [str(response.output.content) for response in responses] + return [response.content for response in responses] diff --git a/packages/graphrag/graphrag/prompt_tune/generator/entity_types.py b/packages/graphrag/graphrag/prompt_tune/generator/entity_types.py index d68ab52115..21c58086e7 100644 --- a/packages/graphrag/graphrag/prompt_tune/generator/entity_types.py +++ b/packages/graphrag/graphrag/prompt_tune/generator/entity_types.py @@ -3,15 +3,23 @@ """Entity type generation module for fine-tuning.""" +from typing import TYPE_CHECKING + +from graphrag_llm.utils import ( + CompletionMessagesBuilder, +) from pydantic import BaseModel -from graphrag.language_model.protocol.base import ChatModel from graphrag.prompt_tune.defaults import DEFAULT_TASK from graphrag.prompt_tune.prompt.entity_types import ( ENTITY_TYPE_GENERATION_JSON_PROMPT, ENTITY_TYPE_GENERATION_PROMPT, ) +if TYPE_CHECKING: + from graphrag_llm.completion import LLMCompletion + from graphrag_llm.types import LLMCompletionResponse + class EntityTypesResponse(BaseModel): """Entity types response model.""" @@ -20,7 +28,7 @@ class EntityTypesResponse(BaseModel): async def generate_entity_types( - model: ChatModel, + model: "LLMCompletion", domain: str, persona: str, docs: str | list[str], @@ -43,17 +51,24 @@ async def generate_entity_types( else ENTITY_TYPE_GENERATION_PROMPT ).format(task=formatted_task, input_text=docs_str) - history = [{"role": "system", "content": persona}] + messages = ( + CompletionMessagesBuilder() + .add_system_message(persona) + .add_user_message(entity_types_prompt) + .build() + ) if json_mode: - response = await model.achat( - entity_types_prompt, - history=history, - json=json_mode, - json_model=EntityTypesResponse, - ) - parsed_model = response.parsed_response + response: LLMCompletionResponse[ + EntityTypesResponse + ] = await model.completion_async( + messages=messages, + response_format=EntityTypesResponse, + ) # type: ignore + parsed_model = response.formatted_response return parsed_model.entity_types if parsed_model else [] - response = await model.achat(entity_types_prompt, history=history, json=json_mode) - return str(response.output.content) + non_json_response: LLMCompletionResponse = await model.completion_async( + messages=messages + ) # type: ignore + return non_json_response.content diff --git a/packages/graphrag/graphrag/prompt_tune/generator/extract_graph_prompt.py b/packages/graphrag/graphrag/prompt_tune/generator/extract_graph_prompt.py index db0f87cf97..46707c45bf 100644 --- a/packages/graphrag/graphrag/prompt_tune/generator/extract_graph_prompt.py +++ b/packages/graphrag/graphrag/prompt_tune/generator/extract_graph_prompt.py @@ -5,6 +5,8 @@ from pathlib import Path +from graphrag_llm.tokenizer import Tokenizer + from graphrag.prompt_tune.template.extract_graph import ( EXAMPLE_EXTRACTION_TEMPLATE, GRAPH_EXTRACTION_JSON_PROMPT, @@ -13,7 +15,6 @@ UNTYPED_GRAPH_EXTRACTION_PROMPT, ) from graphrag.tokenizer.get_tokenizer import get_tokenizer -from graphrag.tokenizer.tokenizer import Tokenizer EXTRACT_GRAPH_FILENAME = "extract_graph.txt" diff --git a/packages/graphrag/graphrag/prompt_tune/generator/language.py b/packages/graphrag/graphrag/prompt_tune/generator/language.py index 5c00fd6b5a..029f31180b 100644 --- a/packages/graphrag/graphrag/prompt_tune/generator/language.py +++ b/packages/graphrag/graphrag/prompt_tune/generator/language.py @@ -3,16 +3,21 @@ """Language detection for GraphRAG prompts.""" -from graphrag.language_model.protocol.base import ChatModel +from typing import TYPE_CHECKING + from graphrag.prompt_tune.prompt.language import DETECT_LANGUAGE_PROMPT +if TYPE_CHECKING: + from graphrag_llm.completion import LLMCompletion + from graphrag_llm.types import LLMCompletionResponse + -async def detect_language(model: ChatModel, docs: str | list[str]) -> str: +async def detect_language(model: "LLMCompletion", docs: str | list[str]) -> str: """Detect input language to use for GraphRAG prompts. Parameters ---------- - - llm (CompletionLLM): The LLM to use for generation + - model (LLMCompletion): The LLM to use for generation - docs (str | list[str]): The docs to detect language from Returns @@ -22,6 +27,8 @@ async def detect_language(model: ChatModel, docs: str | list[str]) -> str: docs_str = " ".join(docs) if isinstance(docs, list) else docs language_prompt = DETECT_LANGUAGE_PROMPT.format(input_text=docs_str) - response = await model.achat(language_prompt) + response: LLMCompletionResponse = await model.completion_async( + messages=language_prompt + ) # type: ignore - return str(response.output.content) + return response.content diff --git a/packages/graphrag/graphrag/prompt_tune/generator/persona.py b/packages/graphrag/graphrag/prompt_tune/generator/persona.py index b9bf485d8d..b6fdc18ff2 100644 --- a/packages/graphrag/graphrag/prompt_tune/generator/persona.py +++ b/packages/graphrag/graphrag/prompt_tune/generator/persona.py @@ -3,25 +3,32 @@ """Persona generating module for fine-tuning GraphRAG prompts.""" -from graphrag.language_model.protocol.base import ChatModel +from typing import TYPE_CHECKING + from graphrag.prompt_tune.defaults import DEFAULT_TASK from graphrag.prompt_tune.prompt.persona import GENERATE_PERSONA_PROMPT +if TYPE_CHECKING: + from graphrag_llm.completion import LLMCompletion + from graphrag_llm.types import LLMCompletionResponse + async def generate_persona( - model: ChatModel, domain: str, task: str = DEFAULT_TASK + model: "LLMCompletion", domain: str, task: str = DEFAULT_TASK ) -> str: """Generate an LLM persona to use for GraphRAG prompts. Parameters ---------- - - llm (CompletionLLM): The LLM to use for generation + - model (LLMCompletion): The LLM to use for generation - domain (str): The domain to generate a persona for - task (str): The task to generate a persona for. Default is DEFAULT_TASK """ formatted_task = task.format(domain=domain) persona_prompt = GENERATE_PERSONA_PROMPT.format(sample_task=formatted_task) - response = await model.achat(persona_prompt) + response: LLMCompletionResponse = await model.completion_async( + messages=persona_prompt + ) # type: ignore - return str(response.output.content) + return response.content diff --git a/packages/graphrag/graphrag/prompt_tune/loader/input.py b/packages/graphrag/graphrag/prompt_tune/loader/input.py index 2b4e2b1861..0cfdb2299a 100644 --- a/packages/graphrag/graphrag/prompt_tune/loader/input.py +++ b/packages/graphrag/graphrag/prompt_tune/loader/input.py @@ -8,9 +8,9 @@ import numpy as np import pandas as pd -from graphrag_cache.noop_cache import NoopCache from graphrag_chunking.chunker_factory import create_chunker from graphrag_input import create_input_reader +from graphrag_llm.embedding import create_embedding from graphrag_storage import create_storage from graphrag.callbacks.noop_workflow_callbacks import NoopWorkflowCallbacks @@ -19,14 +19,12 @@ run_embed_text, ) from graphrag.index.workflows.create_base_text_units import create_base_text_units -from graphrag.language_model.manager import ModelManager from graphrag.prompt_tune.defaults import ( LIMIT, N_SUBSET_MAX, K, ) from graphrag.prompt_tune.types import DocSelectionType -from graphrag.tokenizer.get_tokenizer import get_tokenizer def _sample_chunks_from_embeddings( @@ -51,17 +49,11 @@ async def load_docs_in_chunks( k: int = K, ) -> list[str]: """Load docs into chunks for generating prompts.""" - embeddings_llm_settings = config.get_language_model_config( - config.embed_text.model_id + embeddings_llm_settings = config.get_embedding_model_config( + config.embed_text.embedding_model_id ) - model = ModelManager().get_or_create_embedding_model( - name="text_embedding", - model_type=embeddings_llm_settings.type, - config=embeddings_llm_settings, - callbacks=NoopWorkflowCallbacks(), - cache=NoopCache(), - ) - tokenizer = get_tokenizer(embeddings_llm_settings) + model = create_embedding(embeddings_llm_settings) + tokenizer = model.tokenizer chunker = create_chunker(config.chunking, tokenizer.encode, tokenizer.decode) input_storage = create_storage(config.input_storage) input_reader = create_input_reader(config.input, input_storage) @@ -99,7 +91,7 @@ async def load_docs_in_chunks( tokenizer=tokenizer, batch_size=config.embed_text.batch_size, batch_max_tokens=config.embed_text.batch_max_tokens, - num_threads=embeddings_llm_settings.concurrent_requests, + num_threads=config.concurrent_requests, ) embeddings = np.array(embedding_results.embeddings) chunks_df = _sample_chunks_from_embeddings(chunks_df, embeddings, k=k) diff --git a/packages/graphrag/graphrag/query/context_builder/community_context.py b/packages/graphrag/graphrag/query/context_builder/community_context.py index 4917be0186..2d3238b0d0 100644 --- a/packages/graphrag/graphrag/query/context_builder/community_context.py +++ b/packages/graphrag/graphrag/query/context_builder/community_context.py @@ -8,11 +8,11 @@ from typing import Any, cast import pandas as pd +from graphrag_llm.tokenizer import Tokenizer from graphrag.data_model.community_report import CommunityReport from graphrag.data_model.entity import Entity from graphrag.tokenizer.get_tokenizer import get_tokenizer -from graphrag.tokenizer.tokenizer import Tokenizer logger = logging.getLogger(__name__) diff --git a/packages/graphrag/graphrag/query/context_builder/conversation_history.py b/packages/graphrag/graphrag/query/context_builder/conversation_history.py index c20998121c..1170c7c0da 100644 --- a/packages/graphrag/graphrag/query/context_builder/conversation_history.py +++ b/packages/graphrag/graphrag/query/context_builder/conversation_history.py @@ -7,9 +7,9 @@ from enum import Enum import pandas as pd +from graphrag_llm.tokenizer import Tokenizer from graphrag.tokenizer.get_tokenizer import get_tokenizer -from graphrag.tokenizer.tokenizer import Tokenizer """ Enum for conversation roles diff --git a/packages/graphrag/graphrag/query/context_builder/dynamic_community_selection.py b/packages/graphrag/graphrag/query/context_builder/dynamic_community_selection.py index 3981e5c8e5..0ef22b9176 100644 --- a/packages/graphrag/graphrag/query/context_builder/dynamic_community_selection.py +++ b/packages/graphrag/graphrag/query/context_builder/dynamic_community_selection.py @@ -8,14 +8,17 @@ from collections import Counter from copy import deepcopy from time import time -from typing import Any +from typing import TYPE_CHECKING, Any + +from graphrag_llm.tokenizer import Tokenizer from graphrag.data_model.community import Community from graphrag.data_model.community_report import CommunityReport -from graphrag.language_model.protocol.base import ChatModel from graphrag.query.context_builder.rate_prompt import RATE_QUERY from graphrag.query.context_builder.rate_relevancy import rate_relevancy -from graphrag.tokenizer.tokenizer import Tokenizer + +if TYPE_CHECKING: + from graphrag_llm.completion import LLMCompletion logger = logging.getLogger(__name__) @@ -30,7 +33,7 @@ def __init__( self, community_reports: list[CommunityReport], communities: list[Community], - model: ChatModel, + model: "LLMCompletion", tokenizer: Tokenizer, rate_query: str = RATE_QUERY, use_summary: bool = False, diff --git a/packages/graphrag/graphrag/query/context_builder/entity_extraction.py b/packages/graphrag/graphrag/query/context_builder/entity_extraction.py index 289b3b7ea0..6eab03b5cc 100644 --- a/packages/graphrag/graphrag/query/context_builder/entity_extraction.py +++ b/packages/graphrag/graphrag/query/context_builder/entity_extraction.py @@ -4,18 +4,21 @@ """Orchestration Context Builders.""" from enum import Enum +from typing import TYPE_CHECKING from graphrag_vectors import VectorStore from graphrag.data_model.entity import Entity from graphrag.data_model.relationship import Relationship -from graphrag.language_model.protocol.base import EmbeddingModel from graphrag.query.input.retrieval.entities import ( get_entity_by_id, get_entity_by_key, get_entity_by_name, ) +if TYPE_CHECKING: + from graphrag_llm.embedding import LLMEmbedding + class EntityVectorStoreKey(str, Enum): """Keys used as ids in the entity embedding vectorstores.""" @@ -38,7 +41,7 @@ def from_string(value: str) -> "EntityVectorStoreKey": def map_query_to_entities( query: str, text_embedding_vectorstore: VectorStore, - text_embedder: EmbeddingModel, + text_embedder: "LLMEmbedding", all_entities_dict: dict[str, Entity], embedding_vectorstore_key: str = EntityVectorStoreKey.ID, include_entity_names: list[str] | None = None, @@ -58,7 +61,7 @@ def map_query_to_entities( # oversample to account for excluded entities search_results = text_embedding_vectorstore.similarity_search_by_text( text=query, - text_embedder=lambda t: text_embedder.embed(t), + text_embedder=lambda t: text_embedder.embedding(input=[t]).first_embedding, k=k * oversample_scaler, ) for result in search_results: diff --git a/packages/graphrag/graphrag/query/context_builder/local_context.py b/packages/graphrag/graphrag/query/context_builder/local_context.py index a2d8a54533..b84566bde0 100644 --- a/packages/graphrag/graphrag/query/context_builder/local_context.py +++ b/packages/graphrag/graphrag/query/context_builder/local_context.py @@ -7,6 +7,7 @@ from typing import Any, cast import pandas as pd +from graphrag_llm.tokenizer import Tokenizer from graphrag.data_model.covariate import Covariate from graphrag.data_model.entity import Entity @@ -24,7 +25,6 @@ to_relationship_dataframe, ) from graphrag.tokenizer.get_tokenizer import get_tokenizer -from graphrag.tokenizer.tokenizer import Tokenizer def build_entity_context( diff --git a/packages/graphrag/graphrag/query/context_builder/rate_relevancy.py b/packages/graphrag/graphrag/query/context_builder/rate_relevancy.py index 6fa128999f..16474369ac 100644 --- a/packages/graphrag/graphrag/query/context_builder/rate_relevancy.py +++ b/packages/graphrag/graphrag/query/context_builder/rate_relevancy.py @@ -6,14 +6,20 @@ import asyncio import logging from contextlib import nullcontext -from typing import Any +from typing import TYPE_CHECKING, Any import numpy as np +from graphrag_llm.tokenizer import Tokenizer +from graphrag_llm.utils import ( + CompletionMessagesBuilder, + gather_completion_response_async, +) -from graphrag.language_model.protocol.base import ChatModel from graphrag.query.context_builder.rate_prompt import RATE_QUERY from graphrag.query.llm.text_utils import try_parse_json_object -from graphrag.tokenizer.tokenizer import Tokenizer + +if TYPE_CHECKING: + from graphrag_llm.completion import LLMCompletion logger = logging.getLogger(__name__) @@ -21,7 +27,7 @@ async def rate_relevancy( query: str, description: str, - model: ChatModel, + model: "LLMCompletion", tokenizer: Tokenizer, rate_query: str = RATE_QUERY, num_repeats: int = 1, @@ -42,18 +48,21 @@ async def rate_relevancy( semaphore: asyncio.Semaphore to limit the number of concurrent LLM calls (default: None) """ llm_calls, prompt_tokens, output_tokens, ratings = 0, 0, 0, [] - messages = [ - { - "role": "system", - "content": rate_query.format(description=description, question=query), - }, - ] + + messages_builder = ( + CompletionMessagesBuilder() + .add_system_message(rate_query.format(description=description, question=query)) + .add_user_message(query) + ) + for _ in range(num_repeats): async with semaphore if semaphore is not None else nullcontext(): - model_response = await model.achat( - prompt=query, history=messages, model_parameters=model_params, json=True + model_response = await model.completion_async( + messages=messages_builder.build(), + response_format_json_object=True, + **model_params, ) - response = model_response.output.content + response = await gather_completion_response_async(model_response) try: _, parsed_response = try_parse_json_object(response) ratings.append(parsed_response["rating"]) @@ -63,7 +72,7 @@ async def rate_relevancy( logger.warning("Error parsing json response, defaulting to rating 1") ratings.append(1) llm_calls += 1 - prompt_tokens += tokenizer.num_tokens(messages[0]["content"]) + prompt_tokens += tokenizer.num_prompt_tokens(messages_builder.build()) output_tokens += tokenizer.num_tokens(response) # select the decision with the most votes options, counts = np.unique(ratings, return_counts=True) diff --git a/packages/graphrag/graphrag/query/context_builder/source_context.py b/packages/graphrag/graphrag/query/context_builder/source_context.py index eaf308f629..ff121b8cdd 100644 --- a/packages/graphrag/graphrag/query/context_builder/source_context.py +++ b/packages/graphrag/graphrag/query/context_builder/source_context.py @@ -7,11 +7,11 @@ from typing import Any, cast import pandas as pd +from graphrag_llm.tokenizer import Tokenizer from graphrag.data_model.relationship import Relationship from graphrag.data_model.text_unit import TextUnit from graphrag.tokenizer.get_tokenizer import get_tokenizer -from graphrag.tokenizer.tokenizer import Tokenizer """ Contain util functions to build text unit context for the search's system prompt diff --git a/packages/graphrag/graphrag/query/factory.py b/packages/graphrag/graphrag/query/factory.py index 4ff36100cc..5655074ed0 100644 --- a/packages/graphrag/graphrag/query/factory.py +++ b/packages/graphrag/graphrag/query/factory.py @@ -3,6 +3,8 @@ """Query Factory methods to support CLI.""" +from graphrag_llm.completion import create_completion +from graphrag_llm.embedding import create_embedding from graphrag_vectors import VectorStore from graphrag.callbacks.query_callbacks import QueryCallbacks @@ -13,10 +15,6 @@ from graphrag.data_model.entity import Entity from graphrag.data_model.relationship import Relationship from graphrag.data_model.text_unit import TextUnit -from graphrag.language_model.manager import ModelManager -from graphrag.language_model.util import ( - get_openai_model_parameters_from_config, -) from graphrag.query.context_builder.entity_extraction import EntityVectorStoreKey from graphrag.query.structured_search.basic_search.basic_context import ( BasicSearchContext, @@ -34,7 +32,6 @@ LocalSearchMixedContext, ) from graphrag.query.structured_search.local_search.search import LocalSearch -from graphrag.tokenizer.get_tokenizer import get_tokenizer def get_local_search_engine( @@ -50,29 +47,23 @@ def get_local_search_engine( callbacks: list[QueryCallbacks] | None = None, ) -> LocalSearch: """Create a local search engine based on data + configuration.""" - model_settings = config.get_language_model_config(config.local_search.chat_model_id) - - chat_model = ModelManager().get_or_create_chat_model( - name="local_search_chat", - model_type=model_settings.type, - config=model_settings, + model_settings = config.get_completion_model_config( + config.local_search.completion_model_id ) - embedding_settings = config.get_language_model_config( + chat_model = create_completion(model_settings) + + embedding_settings = config.get_embedding_model_config( config.local_search.embedding_model_id ) - embedding_model = ModelManager().get_or_create_embedding_model( - name="local_search_embedding", - model_type=embedding_settings.type, - config=embedding_settings, - ) + embedding_model = create_embedding(embedding_settings) - tokenizer = get_tokenizer(model_config=model_settings) + tokenizer = chat_model.tokenizer ls_config = config.local_search - model_params = get_openai_model_parameters_from_config(model_settings) + model_params = model_settings.call_args return LocalSearch( model=chat_model, @@ -122,20 +113,16 @@ def get_global_search_engine( callbacks: list[QueryCallbacks] | None = None, ) -> GlobalSearch: """Create a global search engine based on data + configuration.""" - model_settings = config.get_language_model_config( - config.global_search.chat_model_id + model_settings = config.get_completion_model_config( + config.global_search.completion_model_id ) - model = ModelManager().get_or_create_chat_model( - name="global_search", - model_type=model_settings.type, - config=model_settings, - ) + model = create_completion(model_settings) - model_params = get_openai_model_parameters_from_config(model_settings) + model_params = model_settings.call_args # Here we get encoding based on specified encoding name - tokenizer = get_tokenizer(model_config=model_settings) + tokenizer = model.tokenizer gs_config = config.global_search dynamic_community_selection_kwargs = {} @@ -148,7 +135,7 @@ def get_global_search_engine( "keep_parent": gs_config.dynamic_search_keep_parent, "num_repeats": gs_config.dynamic_search_num_repeats, "use_summary": gs_config.dynamic_search_use_summary, - "concurrent_coroutines": model_settings.concurrent_requests, + "concurrent_coroutines": config.concurrent_requests, "threshold": gs_config.dynamic_search_threshold, "max_level": gs_config.dynamic_search_max_level, "model_params": {**model_params}, @@ -187,7 +174,7 @@ def get_global_search_engine( "max_context_tokens": gs_config.max_context_tokens, "context_name": "Reports", }, - concurrent_coroutines=model_settings.concurrent_requests, + concurrent_coroutines=config.concurrent_requests, response_type=response_type, callbacks=callbacks, ) @@ -206,27 +193,19 @@ def get_drift_search_engine( callbacks: list[QueryCallbacks] | None = None, ) -> DRIFTSearch: """Create a local search engine based on data + configuration.""" - chat_model_settings = config.get_language_model_config( - config.drift_search.chat_model_id + chat_model_settings = config.get_completion_model_config( + config.drift_search.completion_model_id ) - chat_model = ModelManager().get_or_create_chat_model( - name="drift_search_chat", - model_type=chat_model_settings.type, - config=chat_model_settings, - ) + chat_model = create_completion(chat_model_settings) - embedding_model_settings = config.get_language_model_config( + embedding_model_settings = config.get_embedding_model_config( config.drift_search.embedding_model_id ) - embedding_model = ModelManager().get_or_create_embedding_model( - name="drift_search_embedding", - model_type=embedding_model_settings.type, - config=embedding_model_settings, - ) + embedding_model = create_embedding(embedding_model_settings) - tokenizer = get_tokenizer(model_config=chat_model_settings) + tokenizer = chat_model.tokenizer return DRIFTSearch( model=chat_model, @@ -257,31 +236,23 @@ def get_basic_search_engine( callbacks: list[QueryCallbacks] | None = None, ) -> BasicSearch: """Create a basic search engine based on data + configuration.""" - chat_model_settings = config.get_language_model_config( - config.basic_search.chat_model_id + chat_model_settings = config.get_completion_model_config( + config.basic_search.completion_model_id ) - chat_model = ModelManager().get_or_create_chat_model( - name="basic_search_chat", - model_type=chat_model_settings.type, - config=chat_model_settings, - ) + chat_model = create_completion(chat_model_settings) - embedding_model_settings = config.get_language_model_config( + embedding_model_settings = config.get_embedding_model_config( config.basic_search.embedding_model_id ) - embedding_model = ModelManager().get_or_create_embedding_model( - name="basic_search_embedding", - model_type=embedding_model_settings.type, - config=embedding_model_settings, - ) + embedding_model = create_embedding(embedding_model_settings) - tokenizer = get_tokenizer(model_config=chat_model_settings) + tokenizer = chat_model.tokenizer bs_config = config.basic_search - model_params = get_openai_model_parameters_from_config(chat_model_settings) + model_params = chat_model_settings.call_args return BasicSearch( model=chat_model, diff --git a/packages/graphrag/graphrag/query/indexer_adapters.py b/packages/graphrag/graphrag/query/indexer_adapters.py index f0d5ff7dae..c347fa11de 100644 --- a/packages/graphrag/graphrag/query/indexer_adapters.py +++ b/packages/graphrag/graphrag/query/indexer_adapters.py @@ -7,7 +7,7 @@ """ import logging -from typing import cast +from typing import TYPE_CHECKING, cast import pandas as pd from graphrag_vectors import VectorStore @@ -18,7 +18,6 @@ from graphrag.data_model.entity import Entity from graphrag.data_model.relationship import Relationship from graphrag.data_model.text_unit import TextUnit -from graphrag.language_model.protocol.base import EmbeddingModel from graphrag.query.input.loaders.dfs import ( read_communities, read_community_reports, @@ -28,6 +27,9 @@ read_text_units, ) +if TYPE_CHECKING: + from graphrag_llm.embedding import LLMEmbedding + logger = logging.getLogger(__name__) @@ -192,7 +194,7 @@ def read_indexer_communities( def embed_community_reports( reports_df: pd.DataFrame, - embedder: EmbeddingModel, + embedder: "LLMEmbedding", source_col: str = "full_content", embedding_col: str = "full_content_embedding", ) -> pd.DataFrame: @@ -203,7 +205,7 @@ def embed_community_reports( if embedding_col not in reports_df.columns: reports_df[embedding_col] = reports_df.loc[:, source_col].apply( - lambda x: embedder.embed(x) + lambda x: embedder.embedding(input=[x]).first_embedding ) return reports_df diff --git a/packages/graphrag/graphrag/query/llm/text_utils.py b/packages/graphrag/graphrag/query/llm/text_utils.py index ddd6abe1ef..b5b13d22a1 100644 --- a/packages/graphrag/graphrag/query/llm/text_utils.py +++ b/packages/graphrag/graphrag/query/llm/text_utils.py @@ -9,10 +9,10 @@ from collections.abc import Iterator from itertools import islice +from graphrag_llm.tokenizer import Tokenizer from json_repair import repair_json from graphrag.tokenizer.get_tokenizer import get_tokenizer -from graphrag.tokenizer.tokenizer import Tokenizer logger = logging.getLogger(__name__) diff --git a/packages/graphrag/graphrag/query/question_gen/base.py b/packages/graphrag/graphrag/query/question_gen/base.py index 2195aee52a..d45ec60317 100644 --- a/packages/graphrag/graphrag/query/question_gen/base.py +++ b/packages/graphrag/graphrag/query/question_gen/base.py @@ -5,15 +5,17 @@ from abc import ABC, abstractmethod from dataclasses import dataclass -from typing import Any +from typing import TYPE_CHECKING, Any + +from graphrag_llm.tokenizer import Tokenizer -from graphrag.language_model.protocol.base import ChatModel from graphrag.query.context_builder.builders import ( GlobalContextBuilder, LocalContextBuilder, ) -from graphrag.tokenizer.get_tokenizer import get_tokenizer -from graphrag.tokenizer.tokenizer import Tokenizer + +if TYPE_CHECKING: + from graphrag_llm.completion import LLMCompletion @dataclass @@ -32,7 +34,7 @@ class BaseQuestionGen(ABC): def __init__( self, - model: ChatModel, + model: "LLMCompletion", context_builder: GlobalContextBuilder | LocalContextBuilder, tokenizer: Tokenizer | None = None, model_params: dict[str, Any] | None = None, @@ -40,7 +42,7 @@ def __init__( ): self.model = model self.context_builder = context_builder - self.tokenizer = tokenizer or get_tokenizer(model.config) + self.tokenizer = tokenizer or model.tokenizer self.model_params = model_params or {} self.context_builder_params = context_builder_params or {} diff --git a/packages/graphrag/graphrag/query/question_gen/local_gen.py b/packages/graphrag/graphrag/query/question_gen/local_gen.py index 82795e9cf3..7e4fb3dadb 100644 --- a/packages/graphrag/graphrag/query/question_gen/local_gen.py +++ b/packages/graphrag/graphrag/query/question_gen/local_gen.py @@ -5,10 +5,12 @@ import logging import time -from typing import Any, cast +from typing import TYPE_CHECKING, Any, cast + +from graphrag_llm.tokenizer import Tokenizer +from graphrag_llm.utils import CompletionMessagesBuilder from graphrag.callbacks.llm_callbacks import BaseLLMCallback -from graphrag.language_model.protocol.base import ChatModel from graphrag.prompts.query.question_gen_system_prompt import QUESTION_SYSTEM_PROMPT from graphrag.query.context_builder.builders import ( ContextBuilderResult, @@ -18,7 +20,12 @@ ConversationHistory, ) from graphrag.query.question_gen.base import BaseQuestionGen, QuestionResult -from graphrag.tokenizer.tokenizer import Tokenizer + +if TYPE_CHECKING: + from collections.abc import AsyncIterator + + from graphrag_llm.completion import LLMCompletion + from graphrag_llm.types import LLMCompletionChunk logger = logging.getLogger(__name__) @@ -28,7 +35,7 @@ class LocalQuestionGen(BaseQuestionGen): def __init__( self, - model: ChatModel, + model: "LLMCompletion", context_builder: LocalContextBuilder, tokenizer: Tokenizer | None = None, system_prompt: str = QUESTION_SYSTEM_PROMPT, @@ -94,19 +101,28 @@ async def agenerate( system_prompt = self.system_prompt.format( context_data=context_data, question_count=question_count ) - question_messages = [ - {"role": "system", "content": system_prompt}, - ] + + messages_builder = ( + CompletionMessagesBuilder() + .add_system_message(system_prompt) + .add_user_message(question_text) + ) response = "" - async for chunk in self.model.achat_stream( - prompt=question_text, - history=question_messages, - model_parameters=self.model_params, - ): - response += chunk + + response_stream: AsyncIterator[ + LLMCompletionChunk + ] = await self.model.completion_async( + messages=messages_builder.build(), + stream=True, + **self.model_params, + ) # type: ignore + + async for chunk in response_stream: + response_text = chunk.choices[0].delta.content or "" + response += response_text for callback in self.callbacks: - callback.on_llm_new_token(chunk) + callback.on_llm_new_token(response_text) return QuestionResult( response=response.split("\n"), @@ -176,20 +192,28 @@ async def generate( system_prompt = self.system_prompt.format( context_data=context_data, question_count=question_count ) - question_messages = [ - {"role": "system", "content": system_prompt}, - {"role": "user", "content": question_text}, - ] + + messages_builder = ( + CompletionMessagesBuilder() + .add_system_message(system_prompt) + .add_user_message(question_text) + ) response = "" - async for chunk in self.model.achat_stream( - prompt=question_text, - history=question_messages, - model_parameters=self.model_params, - ): - response += chunk + + response_stream: AsyncIterator[ + LLMCompletionChunk + ] = await self.model.completion_async( + messages=messages_builder.build(), + stream=True, + **self.model_params, + ) # type: ignore + + async for chunk in response_stream: + response_text = chunk.choices[0].delta.content or "" + response += response_text for callback in self.callbacks: - callback.on_llm_new_token(chunk) + callback.on_llm_new_token(response_text) return QuestionResult( response=response.split("\n"), diff --git a/packages/graphrag/graphrag/query/structured_search/base.py b/packages/graphrag/graphrag/query/structured_search/base.py index 753b419f69..1b5d1ca4eb 100644 --- a/packages/graphrag/graphrag/query/structured_search/base.py +++ b/packages/graphrag/graphrag/query/structured_search/base.py @@ -6,11 +6,11 @@ from abc import ABC, abstractmethod from collections.abc import AsyncGenerator from dataclasses import dataclass -from typing import Any, Generic, TypeVar +from typing import TYPE_CHECKING, Any, Generic, TypeVar import pandas as pd +from graphrag_llm.tokenizer import Tokenizer -from graphrag.language_model.protocol.base import ChatModel from graphrag.query.context_builder.builders import ( BasicContextBuilder, DRIFTContextBuilder, @@ -20,8 +20,9 @@ from graphrag.query.context_builder.conversation_history import ( ConversationHistory, ) -from graphrag.tokenizer.get_tokenizer import get_tokenizer -from graphrag.tokenizer.tokenizer import Tokenizer + +if TYPE_CHECKING: + from graphrag_llm.completion import LLMCompletion @dataclass @@ -57,7 +58,7 @@ class BaseSearch(ABC, Generic[T]): def __init__( self, - model: ChatModel, + model: "LLMCompletion", context_builder: T, tokenizer: Tokenizer | None = None, model_params: dict[str, Any] | None = None, @@ -65,7 +66,7 @@ def __init__( ): self.model = model self.context_builder = context_builder - self.tokenizer = tokenizer or get_tokenizer() + self.tokenizer = tokenizer or model.tokenizer self.model_params = model_params or {} self.context_builder_params = context_builder_params or {} diff --git a/packages/graphrag/graphrag/query/structured_search/basic_search/basic_context.py b/packages/graphrag/graphrag/query/structured_search/basic_search/basic_context.py index 57bb16efca..882ee3d49e 100644 --- a/packages/graphrag/graphrag/query/structured_search/basic_search/basic_context.py +++ b/packages/graphrag/graphrag/query/structured_search/basic_search/basic_context.py @@ -4,20 +4,22 @@ """Basic Context Builder implementation.""" import logging -from typing import cast +from typing import TYPE_CHECKING, cast import pandas as pd +from graphrag_llm.tokenizer import Tokenizer from graphrag_vectors import VectorStore from graphrag.data_model.text_unit import TextUnit -from graphrag.language_model.protocol.base import EmbeddingModel from graphrag.query.context_builder.builders import ( BasicContextBuilder, ContextBuilderResult, ) from graphrag.query.context_builder.conversation_history import ConversationHistory from graphrag.tokenizer.get_tokenizer import get_tokenizer -from graphrag.tokenizer.tokenizer import Tokenizer + +if TYPE_CHECKING: + from graphrag_llm.embedding import LLMEmbedding logger = logging.getLogger(__name__) @@ -27,7 +29,7 @@ class BasicSearchContext(BasicContextBuilder): def __init__( self, - text_embedder: EmbeddingModel, + text_embedder: "LLMEmbedding", text_unit_embeddings: VectorStore, text_units: list[TextUnit] | None = None, tokenizer: Tokenizer | None = None, @@ -55,7 +57,9 @@ def build_context( if query != "": related_texts = self.text_unit_embeddings.similarity_search_by_text( text=query, - text_embedder=lambda t: self.text_embedder.embed(t), + text_embedder=lambda t: self.text_embedder.embedding( + input=[t] + ).first_embedding, k=k, ) diff --git a/packages/graphrag/graphrag/query/structured_search/basic_search/search.py b/packages/graphrag/graphrag/query/structured_search/basic_search/search.py index ce5f656845..a8672fb4e8 100644 --- a/packages/graphrag/graphrag/query/structured_search/basic_search/search.py +++ b/packages/graphrag/graphrag/query/structured_search/basic_search/search.py @@ -5,18 +5,23 @@ import logging import time -from collections.abc import AsyncGenerator -from typing import Any +from collections.abc import AsyncGenerator, AsyncIterator +from typing import TYPE_CHECKING, Any + +from graphrag_llm.tokenizer import Tokenizer +from graphrag_llm.utils import CompletionMessagesBuilder from graphrag.callbacks.query_callbacks import QueryCallbacks -from graphrag.language_model.protocol.base import ChatModel from graphrag.prompts.query.basic_search_system_prompt import ( BASIC_SEARCH_SYSTEM_PROMPT, ) from graphrag.query.context_builder.builders import BasicContextBuilder from graphrag.query.context_builder.conversation_history import ConversationHistory from graphrag.query.structured_search.base import BaseSearch, SearchResult -from graphrag.tokenizer.tokenizer import Tokenizer + +if TYPE_CHECKING: + from graphrag_llm.completion import LLMCompletion + from graphrag_llm.types import LLMCompletionChunk logger = logging.getLogger(__name__) """ @@ -29,7 +34,7 @@ class BasicSearch(BaseSearch[BasicContextBuilder]): def __init__( self, - model: ChatModel, + model: "LLMCompletion", context_builder: BasicContextBuilder, tokenizer: Tokenizer | None = None, system_prompt: str | None = None, @@ -77,19 +82,28 @@ async def search( context_data=context_result.context_chunks, response_type=self.response_type, ) - search_messages = [ - {"role": "system", "content": search_prompt}, - ] + + messages_builder = ( + CompletionMessagesBuilder() + .add_system_message(search_prompt) + .add_user_message(query) + ) response = "" - async for chunk in self.model.achat_stream( - prompt=query, - history=search_messages, - model_parameters=self.model_params, - ): + + response_stream: AsyncIterator[LLMCompletionChunk] = ( + self.model.completion_async( + messages=messages_builder.build(), + stream=True, + **self.model_params, + ) + ) # type: ignore + + async for chunk in response_stream: + response_text = chunk.choices[0].delta.content or "" for callback in self.callbacks: - callback.on_llm_new_token(chunk) - response += chunk + callback.on_llm_new_token(response_text) + response += response_text llm_calls["response"] = 1 prompt_tokens["response"] = len(self.tokenizer.encode(search_prompt)) @@ -143,18 +157,26 @@ async def stream_search( search_prompt = self.system_prompt.format( context_data=context_result.context_chunks, response_type=self.response_type ) - search_messages = [ - {"role": "system", "content": search_prompt}, - ] + + messages_builder = ( + CompletionMessagesBuilder() + .add_system_message(search_prompt) + .add_user_message(query) + ) for callback in self.callbacks: callback.on_context(context_result.context_records) - async for chunk_response in self.model.achat_stream( - prompt=query, - history=search_messages, - model_parameters=self.model_params, - ): + response_stream: AsyncIterator[ + LLMCompletionChunk + ] = await self.model.completion_async( + messages=messages_builder.build(), + stream=True, + **self.model_params, + ) # type: ignore + + async for chunk in response_stream: + response_text = chunk.choices[0].delta.content or "" for callback in self.callbacks: - callback.on_llm_new_token(chunk_response) - yield chunk_response + callback.on_llm_new_token(response_text) + yield response_text diff --git a/packages/graphrag/graphrag/query/structured_search/drift_search/drift_context.py b/packages/graphrag/graphrag/query/structured_search/drift_search/drift_context.py index 41649f2f6c..bacc39cf99 100644 --- a/packages/graphrag/graphrag/query/structured_search/drift_search/drift_context.py +++ b/packages/graphrag/graphrag/query/structured_search/drift_search/drift_context.py @@ -5,10 +5,11 @@ import logging from dataclasses import asdict -from typing import Any +from typing import TYPE_CHECKING, Any import numpy as np import pandas as pd +from graphrag_llm.tokenizer import Tokenizer from graphrag_vectors import VectorStore from graphrag.config.models.drift_search_config import DRIFTSearchConfig @@ -17,7 +18,6 @@ from graphrag.data_model.entity import Entity from graphrag.data_model.relationship import Relationship from graphrag.data_model.text_unit import TextUnit -from graphrag.language_model.protocol.base import ChatModel, EmbeddingModel from graphrag.prompts.query.drift_search_system_prompt import ( DRIFT_LOCAL_SYSTEM_PROMPT, DRIFT_REDUCE_PROMPT, @@ -28,8 +28,10 @@ from graphrag.query.structured_search.local_search.mixed_context import ( LocalSearchMixedContext, ) -from graphrag.tokenizer.get_tokenizer import get_tokenizer -from graphrag.tokenizer.tokenizer import Tokenizer + +if TYPE_CHECKING: + from graphrag_llm.completion import LLMCompletion + from graphrag_llm.embedding import LLMEmbedding logger = logging.getLogger(__name__) @@ -39,9 +41,9 @@ class DRIFTSearchContextBuilder(DRIFTContextBuilder): def __init__( self, - model: ChatModel, + model: "LLMCompletion", config: DRIFTSearchConfig, - text_embedder: EmbeddingModel, + text_embedder: "LLMEmbedding", entities: list[Entity], entity_text_embeddings: VectorStore, text_units: list[TextUnit] | None = None, @@ -59,7 +61,7 @@ def __init__( self.config = config self.model = model self.text_embedder = text_embedder - self.tokenizer = tokenizer or get_tokenizer() + self.tokenizer = tokenizer or model.tokenizer self.local_system_prompt = local_system_prompt or DRIFT_LOCAL_SYSTEM_PROMPT self.reduce_system_prompt = reduce_system_prompt or DRIFT_REDUCE_PROMPT diff --git a/packages/graphrag/graphrag/query/structured_search/drift_search/primer.py b/packages/graphrag/graphrag/query/structured_search/drift_search/primer.py index 2a7f145711..36723064db 100644 --- a/packages/graphrag/graphrag/query/structured_search/drift_search/primer.py +++ b/packages/graphrag/graphrag/query/structured_search/drift_search/primer.py @@ -3,35 +3,53 @@ """Primer for DRIFT search.""" -import json import logging import secrets import time +from typing import TYPE_CHECKING import numpy as np import pandas as pd +from graphrag_llm.tokenizer import Tokenizer +from pydantic import BaseModel, Field from tqdm.asyncio import tqdm_asyncio from graphrag.config.models.drift_search_config import DRIFTSearchConfig from graphrag.data_model.community_report import CommunityReport -from graphrag.language_model.protocol.base import ChatModel, EmbeddingModel from graphrag.prompts.query.drift_search_system_prompt import ( DRIFT_PRIMER_PROMPT, ) from graphrag.query.structured_search.base import SearchResult -from graphrag.tokenizer.get_tokenizer import get_tokenizer -from graphrag.tokenizer.tokenizer import Tokenizer + +if TYPE_CHECKING: + from graphrag_llm.completion import LLMCompletion + from graphrag_llm.embedding import LLMEmbedding + from graphrag_llm.types import LLMCompletionResponse logger = logging.getLogger(__name__) +class PrimerResponse(BaseModel): + """Response model for the primer.""" + + intermediate_answer: str = Field( + description="This answer should match the level of detail and length found in the community summaries. The intermediate answer should be exactly 2000 characters long. This must be formatted in markdown and must begin with a header that explains how the following text is related to the query.", + ) + score: int = Field( + description="A score on how well the intermediate answer addresses the query. A score of 0 indicates a poor, unfocused answer, while a score of 100 indicates a highly focused, relevant answer that addresses the query in its entirety." + ) + follow_up_queries: list[str] = Field( + description="A list of follow-up queries that could be asked to further explore the topic. These should be formatted as a list of strings. Generate at least five good follow-up queries." + ) + + class PrimerQueryProcessor: """Process the query by expanding it using community reports and generate follow-up actions.""" def __init__( self, - chat_model: ChatModel, - text_embedder: EmbeddingModel, + chat_model: "LLMCompletion", + text_embedder: "LLMEmbedding", reports: list[CommunityReport], tokenizer: Tokenizer | None = None, ): @@ -46,7 +64,7 @@ def __init__( """ self.chat_model = chat_model self.text_embedder = text_embedder - self.tokenizer = tokenizer or get_tokenizer() + self.tokenizer = tokenizer or chat_model.tokenizer self.reports = reports async def expand_query(self, query: str) -> tuple[str, dict[str, int]]: @@ -67,8 +85,10 @@ async def expand_query(self, query: str) -> tuple[str, dict[str, int]]: {template}\n" Ensure that the hypothetical answer does not reference new named entities that are not present in the original query.""" - model_response = await self.chat_model.achat(prompt) - text = model_response.output.content + model_response: LLMCompletionResponse = await self.chat_model.completion_async( + messages=prompt + ) # type: ignore + text = model_response.content prompt_tokens = len(self.tokenizer.encode(prompt)) output_tokens = len(self.tokenizer.encode(text)) @@ -95,7 +115,9 @@ async def __call__(self, query: str) -> tuple[list[float], dict[str, int]]: """ hyde_query, token_ct = await self.expand_query(query) logger.debug("Expanded query: %s", hyde_query) - return self.text_embedder.embed(hyde_query), token_ct + return self.text_embedder.embedding( + input=[hyde_query] + ).first_embedding, token_ct class DRIFTPrimer: @@ -104,7 +126,7 @@ class DRIFTPrimer: def __init__( self, config: DRIFTSearchConfig, - chat_model: ChatModel, + chat_model: "LLMCompletion", tokenizer: Tokenizer | None = None, ): """ @@ -117,7 +139,7 @@ def __init__( """ self.chat_model = chat_model self.config = config - self.tokenizer = tokenizer or get_tokenizer() + self.tokenizer = tokenizer or chat_model.tokenizer async def decompose_query( self, query: str, reports: pd.DataFrame @@ -137,15 +159,18 @@ async def decompose_query( prompt = DRIFT_PRIMER_PROMPT.format( query=query, community_reports=community_reports ) - model_response = await self.chat_model.achat(prompt, json=True) - response = model_response.output.content + model_response: LLMCompletionResponse[ + PrimerResponse + ] = await self.chat_model.completion_async( + messages=prompt, response_format=PrimerResponse + ) # type: ignore - parsed_response = json.loads(response) + parsed_response = model_response.formatted_response.model_dump() # type: ignore token_ct = { "llm_calls": 1, "prompt_tokens": len(self.tokenizer.encode(prompt)), - "output_tokens": len(self.tokenizer.encode(response)), + "output_tokens": len(self.tokenizer.encode(model_response.content)), } return parsed_response, token_ct diff --git a/packages/graphrag/graphrag/query/structured_search/drift_search/search.py b/packages/graphrag/graphrag/query/structured_search/drift_search/search.py index 14f208c77a..070c5b5519 100644 --- a/packages/graphrag/graphrag/query/structured_search/drift_search/search.py +++ b/packages/graphrag/graphrag/query/structured_search/drift_search/search.py @@ -5,16 +5,17 @@ import logging import time -from collections.abc import AsyncGenerator -from typing import Any +from collections.abc import AsyncGenerator, AsyncIterator +from typing import TYPE_CHECKING, Any +from graphrag_llm.tokenizer import Tokenizer +from graphrag_llm.utils import ( + CompletionMessagesBuilder, + gather_completion_response_async, +) from tqdm.asyncio import tqdm_asyncio from graphrag.callbacks.query_callbacks import QueryCallbacks -from graphrag.language_model.protocol.base import ChatModel -from graphrag.language_model.util import ( - get_openai_model_parameters_from_dict, -) from graphrag.query.context_builder.conversation_history import ConversationHistory from graphrag.query.context_builder.entity_extraction import EntityVectorStoreKey from graphrag.query.structured_search.base import BaseSearch, SearchResult @@ -25,8 +26,10 @@ from graphrag.query.structured_search.drift_search.primer import DRIFTPrimer from graphrag.query.structured_search.drift_search.state import QueryState from graphrag.query.structured_search.local_search.search import LocalSearch -from graphrag.tokenizer.get_tokenizer import get_tokenizer -from graphrag.tokenizer.tokenizer import Tokenizer + +if TYPE_CHECKING: + from graphrag_llm.completion import LLMCompletion + from graphrag_llm.types import LLMCompletionChunk logger = logging.getLogger(__name__) @@ -36,7 +39,7 @@ class DRIFTSearch(BaseSearch[DRIFTSearchContextBuilder]): def __init__( self, - model: ChatModel, + model: "LLMCompletion", context_builder: DRIFTSearchContextBuilder, tokenizer: Tokenizer | None = None, query_state: QueryState | None = None, @@ -55,7 +58,7 @@ def __init__( super().__init__(model, context_builder, tokenizer) self.context_builder = context_builder - self.tokenizer = tokenizer or get_tokenizer() + self.tokenizer = tokenizer or model.tokenizer self.query_state = query_state or QueryState() self.primer = DRIFTPrimer( config=self.context_builder.config, @@ -86,15 +89,13 @@ def init_local_search(self) -> LocalSearch: "max_context_tokens": self.context_builder.config.local_search_max_data_tokens, } - model_params = get_openai_model_parameters_from_dict({ - "model": self.model.config.model, - "max_tokens": self.context_builder.config.local_search_llm_max_gen_tokens, + model_params = { "temperature": self.context_builder.config.local_search_temperature, "n": self.context_builder.config.local_search_n, "top_p": self.context_builder.config.local_search_top_p, "max_completion_tokens": self.context_builder.config.local_search_llm_max_gen_completion_tokens, - "response_format": {"type": "json_object"}, - }) + "response_format_json_object": True, + } return LocalSearch( model=self.model, @@ -280,12 +281,10 @@ async def search( for callback in self.callbacks: callback.on_reduce_response_start(response_state) - model_params = get_openai_model_parameters_from_dict({ - "model": self.model.config.model, - "max_tokens": self.context_builder.config.reduce_max_tokens, + model_params = { "temperature": self.context_builder.config.reduce_temperature, "max_completion_tokens": self.context_builder.config.reduce_max_completion_tokens, - }) + } reduced_response = await self._reduce_response( responses=response_state, @@ -331,12 +330,10 @@ async def stream_search( for callback in self.callbacks: callback.on_reduce_response_start(result.response) - model_params = get_openai_model_parameters_from_dict({ - "model": self.model.config.model, - "max_tokens": self.context_builder.config.reduce_max_tokens, + model_params = { "temperature": self.context_builder.config.reduce_temperature, "max_completion_tokens": self.context_builder.config.reduce_max_completion_tokens, - }) + } full_response = "" async for resp in self._reduce_response_streaming( @@ -390,17 +387,19 @@ async def _reduce_response( context_data=reduce_responses, response_type=self.context_builder.response_type, ) - search_messages = [ - {"role": "system", "content": search_prompt}, - ] - model_response = await self.model.achat( - prompt=query, - history=search_messages, - model_parameters=llm_kwargs.get("model_params", {}), + messages_builder = ( + CompletionMessagesBuilder() + .add_system_message(search_prompt) + .add_user_message(query) ) - reduced_response = model_response.output.content + model_response = await self.model.completion_async( + messages=messages_builder.build(), + **llm_kwargs.get("model_params", {}), + ) + + reduced_response = await gather_completion_response_async(model_response) llm_calls["reduce"] = 1 prompt_tokens["reduce"] = len(self.tokenizer.encode(search_prompt)) + len( @@ -445,15 +444,21 @@ async def _reduce_response_streaming( context_data=reduce_responses, response_type=self.context_builder.response_type, ) - search_messages = [ - {"role": "system", "content": search_prompt}, - ] - async for response in self.model.achat_stream( - prompt=query, - history=search_messages, - model_parameters=model_params, - ): + messages_builder = ( + CompletionMessagesBuilder() + .add_system_message(search_prompt) + .add_user_message(query) + ) + + response_search: AsyncIterator[ + LLMCompletionChunk + ] = await self.model.completion_async( + messages=messages_builder.build(), stream=True, **model_params + ) # type: ignore + + async for chunk in response_search: + response_text = chunk.choices[0].delta.content or "" for callback in self.callbacks: - callback.on_llm_new_token(response) - yield response + callback.on_llm_new_token(response_text) + yield response_text diff --git a/packages/graphrag/graphrag/query/structured_search/global_search/community_context.py b/packages/graphrag/graphrag/query/structured_search/global_search/community_context.py index 1709aab1a8..eb63073095 100644 --- a/packages/graphrag/graphrag/query/structured_search/global_search/community_context.py +++ b/packages/graphrag/graphrag/query/structured_search/global_search/community_context.py @@ -5,6 +5,8 @@ from typing import Any +from graphrag_llm.tokenizer import Tokenizer + from graphrag.data_model.community import Community from graphrag.data_model.community_report import CommunityReport from graphrag.data_model.entity import Entity @@ -20,7 +22,6 @@ ) from graphrag.query.structured_search.base import GlobalContextBuilder from graphrag.tokenizer.get_tokenizer import get_tokenizer -from graphrag.tokenizer.tokenizer import Tokenizer class GlobalCommunityContext(GlobalContextBuilder): diff --git a/packages/graphrag/graphrag/query/structured_search/global_search/search.py b/packages/graphrag/graphrag/query/structured_search/global_search/search.py index 86b95d0088..b84043dbde 100644 --- a/packages/graphrag/graphrag/query/structured_search/global_search/search.py +++ b/packages/graphrag/graphrag/query/structured_search/global_search/search.py @@ -7,14 +7,18 @@ import json import logging import time -from collections.abc import AsyncGenerator +from collections.abc import AsyncGenerator, AsyncIterator from dataclasses import dataclass -from typing import Any +from typing import TYPE_CHECKING, Any import pandas as pd +from graphrag_llm.tokenizer import Tokenizer +from graphrag_llm.utils import ( + CompletionMessagesBuilder, + gather_completion_response_async, +) from graphrag.callbacks.query_callbacks import QueryCallbacks -from graphrag.language_model.protocol.base import ChatModel from graphrag.prompts.query.global_search_knowledge_system_prompt import ( GENERAL_KNOWLEDGE_INSTRUCTION, ) @@ -31,7 +35,10 @@ ) from graphrag.query.llm.text_utils import try_parse_json_object from graphrag.query.structured_search.base import BaseSearch, SearchResult -from graphrag.tokenizer.tokenizer import Tokenizer + +if TYPE_CHECKING: + from graphrag_llm.completion import LLMCompletion + from graphrag_llm.types import LLMCompletionChunk logger = logging.getLogger(__name__) @@ -50,7 +57,7 @@ class GlobalSearch(BaseSearch[GlobalContextBuilder]): def __init__( self, - model: ChatModel, + model: "LLMCompletion", context_builder: GlobalContextBuilder, tokenizer: Tokenizer | None = None, map_system_prompt: str | None = None, @@ -87,7 +94,7 @@ def __init__( self.map_llm_params = map_llm_params if map_llm_params else {} self.reduce_llm_params = reduce_llm_params if reduce_llm_params else {} if json_mode: - self.map_llm_params["response_format"] = {"type": "json_object"} + self.map_llm_params["response_format_json_object"] = True else: # remove response_format key if json_mode is False self.map_llm_params.pop("response_format", None) @@ -220,17 +227,20 @@ async def _map_response_single_batch( search_prompt = self.map_system_prompt.format( context_data=context_data, max_length=max_length ) - search_messages = [ - {"role": "system", "content": search_prompt}, - ] + + messages_builder = ( + CompletionMessagesBuilder() + .add_system_message(search_prompt) + .add_user_message(query) + ) + async with self.semaphore: - model_response = await self.model.achat( - prompt=query, - history=search_messages, - model_parameters=llm_kwargs, - json=True, + model_response = await self.model.completion_async( + messages=messages_builder.build(), + response_format_json_object=True, + **llm_kwargs, ) - search_response = model_response.output.content + search_response = await gather_completion_response_async(model_response) logger.debug("Map response: %s", search_response) try: # parse search response json @@ -376,20 +386,28 @@ async def _reduce_response( ) if self.allow_general_knowledge: search_prompt += "\n" + self.general_knowledge_inclusion_prompt - search_messages = [ - {"role": "system", "content": search_prompt}, - {"role": "user", "content": query}, - ] + + messages_builder = ( + CompletionMessagesBuilder() + .add_system_message(search_prompt) + .add_user_message(query) + ) search_response = "" - async for chunk_response in self.model.achat_stream( - prompt=query, - history=search_messages, - model_parameters=llm_kwargs, - ): - search_response += chunk_response + + response_search: AsyncIterator[ + LLMCompletionChunk + ] = await self.model.completion_async( + messages=messages_builder.build(), + stream=True, + **llm_kwargs, + ) # type: ignore + + async for chunk in response_search: + response_text = chunk.choices[0].delta.content or "" + search_response += response_text for callback in self.callbacks: - callback.on_llm_new_token(chunk_response) + callback.on_llm_new_token(response_text) return SearchResult( response=search_response, @@ -481,15 +499,23 @@ async def _stream_reduce_response( ) if self.allow_general_knowledge: search_prompt += "\n" + self.general_knowledge_inclusion_prompt - search_messages = [ - {"role": "system", "content": search_prompt}, - ] - async for chunk_response in self.model.achat_stream( - prompt=query, - history=search_messages, - **llm_kwargs, - ): + messages_builder = ( + CompletionMessagesBuilder() + .add_system_message(search_prompt) + .add_user_message(query) + ) + + response_search: AsyncIterator[ + LLMCompletionChunk + ] = await self.model.completion_async( + messages=messages_builder.build(), + stream=True, + **llm_kwargs.get("model_parameters", {}), + ) # type: ignore + + async for chunk in response_search: + response_text = chunk.choices[0].delta.content or "" for callback in self.callbacks: - callback.on_llm_new_token(chunk_response) - yield chunk_response + callback.on_llm_new_token(response_text) + yield response_text diff --git a/packages/graphrag/graphrag/query/structured_search/local_search/mixed_context.py b/packages/graphrag/graphrag/query/structured_search/local_search/mixed_context.py index a3540dfcb2..34d8cf7d93 100644 --- a/packages/graphrag/graphrag/query/structured_search/local_search/mixed_context.py +++ b/packages/graphrag/graphrag/query/structured_search/local_search/mixed_context.py @@ -4,9 +4,10 @@ import logging from copy import deepcopy -from typing import Any +from typing import TYPE_CHECKING, Any import pandas as pd +from graphrag_llm.tokenizer import Tokenizer from graphrag_vectors import VectorStore from graphrag.data_model.community_report import CommunityReport @@ -14,7 +15,6 @@ from graphrag.data_model.entity import Entity from graphrag.data_model.relationship import Relationship from graphrag.data_model.text_unit import TextUnit -from graphrag.language_model.protocol.base import EmbeddingModel from graphrag.query.context_builder.builders import ContextBuilderResult from graphrag.query.context_builder.community_context import ( build_community_context, @@ -42,7 +42,9 @@ from graphrag.query.input.retrieval.text_units import get_candidate_text_units from graphrag.query.structured_search.base import LocalContextBuilder from graphrag.tokenizer.get_tokenizer import get_tokenizer -from graphrag.tokenizer.tokenizer import Tokenizer + +if TYPE_CHECKING: + from graphrag_llm.embedding import LLMEmbedding logger = logging.getLogger(__name__) @@ -54,7 +56,7 @@ def __init__( self, entities: list[Entity], entity_text_embeddings: VectorStore, - text_embedder: EmbeddingModel, + text_embedder: "LLMEmbedding", text_units: list[TextUnit] | None = None, community_reports: list[CommunityReport] | None = None, relationships: list[Relationship] | None = None, diff --git a/packages/graphrag/graphrag/query/structured_search/local_search/search.py b/packages/graphrag/graphrag/query/structured_search/local_search/search.py index 64fc884213..728bbcc246 100644 --- a/packages/graphrag/graphrag/query/structured_search/local_search/search.py +++ b/packages/graphrag/graphrag/query/structured_search/local_search/search.py @@ -5,11 +5,13 @@ import logging import time -from collections.abc import AsyncGenerator -from typing import Any +from collections.abc import AsyncGenerator, AsyncIterator +from typing import TYPE_CHECKING, Any + +from graphrag_llm.tokenizer import Tokenizer +from graphrag_llm.utils import CompletionMessagesBuilder from graphrag.callbacks.query_callbacks import QueryCallbacks -from graphrag.language_model.protocol.base import ChatModel from graphrag.prompts.query.local_search_system_prompt import ( LOCAL_SEARCH_SYSTEM_PROMPT, ) @@ -18,7 +20,10 @@ ConversationHistory, ) from graphrag.query.structured_search.base import BaseSearch, SearchResult -from graphrag.tokenizer.tokenizer import Tokenizer + +if TYPE_CHECKING: + from graphrag_llm.completion import LLMCompletion + from graphrag_llm.types import LLMCompletionChunk logger = logging.getLogger(__name__) @@ -28,7 +33,7 @@ class LocalSearch(BaseSearch[LocalContextBuilder]): def __init__( self, - model: ChatModel, + model: "LLMCompletion", context_builder: LocalContextBuilder, tokenizer: Tokenizer | None = None, system_prompt: str | None = None, @@ -83,20 +88,28 @@ async def search( context_data=context_result.context_chunks, response_type=self.response_type, ) - history_messages = [ - {"role": "system", "content": search_prompt}, - ] + + messages_builder = ( + CompletionMessagesBuilder() + .add_system_message(search_prompt) + .add_user_message(query) + ) full_response = "" - async for response in self.model.achat_stream( - prompt=query, - history=history_messages, - model_parameters=self.model_params, - ): - full_response += response + response: AsyncIterator[ + LLMCompletionChunk + ] = await self.model.completion_async( + messages=messages_builder.build(), + stream=True, + **self.model_params, + ) # type: ignore + + async for chunk in response: + response_text = chunk.choices[0].delta.content or "" + full_response += response_text for callback in self.callbacks: - callback.on_llm_new_token(response) + callback.on_llm_new_token(response_text) llm_calls["response"] = 1 prompt_tokens["response"] = len(self.tokenizer.encode(search_prompt)) @@ -147,18 +160,24 @@ async def stream_search( search_prompt = self.system_prompt.format( context_data=context_result.context_chunks, response_type=self.response_type ) - history_messages = [ - {"role": "system", "content": search_prompt}, - ] + + messages_builder = ( + CompletionMessagesBuilder() + .add_system_message(search_prompt) + .add_user_message(query) + ) for callback in self.callbacks: callback.on_context(context_result.context_records) - async for response in self.model.achat_stream( - prompt=query, - history=history_messages, - model_parameters=self.model_params, - ): + response: AsyncIterator[LLMCompletionChunk] = await self.model.completion_async( + messages=messages_builder.build(), + stream=True, + **self.model_params, + ) # type: ignore + + async for chunk in response: + response_text = chunk.choices[0].delta.content or "" for callback in self.callbacks: - callback.on_llm_new_token(response) - yield response + callback.on_llm_new_token(response_text) + yield response_text diff --git a/packages/graphrag/graphrag/tokenizer/get_tokenizer.py b/packages/graphrag/graphrag/tokenizer/get_tokenizer.py index d1bac40a1a..ed1ffec99c 100644 --- a/packages/graphrag/graphrag/tokenizer/get_tokenizer.py +++ b/packages/graphrag/graphrag/tokenizer/get_tokenizer.py @@ -3,15 +3,14 @@ """Get Tokenizer.""" +from graphrag_llm.config import ModelConfig, TokenizerConfig, TokenizerType +from graphrag_llm.tokenizer import Tokenizer, create_tokenizer + from graphrag.config.defaults import ENCODING_MODEL -from graphrag.config.models.language_model_config import LanguageModelConfig -from graphrag.tokenizer.litellm_tokenizer import LitellmTokenizer -from graphrag.tokenizer.tiktoken_tokenizer import TiktokenTokenizer -from graphrag.tokenizer.tokenizer import Tokenizer def get_tokenizer( - model_config: LanguageModelConfig | None = None, + model_config: "ModelConfig | None" = None, encoding_model: str | None = None, ) -> Tokenizer: """ @@ -32,12 +31,18 @@ def get_tokenizer( An instance of a Tokenizer. """ if model_config is not None: - if model_config.encoding_model.strip() != "": - # User has manually specified a tiktoken encoding model to use for the provided model configuration. - return TiktokenTokenizer(encoding_name=model_config.encoding_model) - - return LitellmTokenizer(model_name=model_config.model) + return create_tokenizer( + TokenizerConfig( + type=TokenizerType.LiteLLM, + model_id=f"{model_config.model_provider}/{model_config.model}", + ) + ) if encoding_model is None: encoding_model = ENCODING_MODEL - return TiktokenTokenizer(encoding_name=encoding_model) + return create_tokenizer( + TokenizerConfig( + type=TokenizerType.Tiktoken, + encoding_name=encoding_model, + ) + ) diff --git a/packages/graphrag/graphrag/tokenizer/tokenizer.py b/packages/graphrag/graphrag/tokenizer/tokenizer.py deleted file mode 100644 index 32c0b2bd23..0000000000 --- a/packages/graphrag/graphrag/tokenizer/tokenizer.py +++ /dev/null @@ -1,53 +0,0 @@ -# Copyright (c) 2024 Microsoft Corporation. -# Licensed under the MIT License - -"""Tokenizer Abstract Base Class.""" - -from abc import ABC, abstractmethod - - -class Tokenizer(ABC): - """Tokenizer Abstract Base Class.""" - - @abstractmethod - def encode(self, text: str) -> list[int]: - """Encode the given text into a list of tokens. - - Args - ---- - text (str): The input text to encode. - - Returns - ------- - list[int]: A list of tokens representing the encoded text. - """ - msg = "The encode method must be implemented by subclasses." - raise NotImplementedError(msg) - - @abstractmethod - def decode(self, tokens: list[int]) -> str: - """Decode a list of tokens back into a string. - - Args - ---- - tokens (list[int]): A list of tokens to decode. - - Returns - ------- - str: The decoded string from the list of tokens. - """ - msg = "The decode method must be implemented by subclasses." - raise NotImplementedError(msg) - - def num_tokens(self, text: str) -> int: - """Return the number of tokens in the given text. - - Args - ---- - text (str): The input text to analyze. - - Returns - ------- - int: The number of tokens in the input text. - """ - return len(self.encode(text)) diff --git a/packages/graphrag/pyproject.toml b/packages/graphrag/pyproject.toml index 2db8c4f835..c752339b29 100644 --- a/packages/graphrag/pyproject.toml +++ b/packages/graphrag/pyproject.toml @@ -32,34 +32,27 @@ classifiers = [ ] dependencies = [ - "aiofiles~=24.1", - "azure-cosmos~=4.9", "azure-identity~=1.19", "azure-search-documents~=11.5", "azure-storage-blob~=12.24", "devtools~=0.12", - "environs~=11.0", "graphrag-cache==2.7.0", "graphrag-common==2.7.0", "graphrag-input==2.7.0", + "graphrag-llm==2.7.0", "graphrag-storage==2.7.0", "graphrag-vectors==2.7.0", "graspologic-native~=1.2", "json-repair~=0.30", - "lancedb~=0.24.1", - "litellm~=1.77", - "fastuuid~=0.13", "networkx~=3.4", "nltk==3.9.1", "numpy>=2.1.0", - "openai~=1.68", "pandas>=2.3.0", "pyarrow~=22.0", "pydantic~=2.10", "spacy~=3.8", "blis~=1.0", "textblob~=0.18", - "tiktoken~=0.11", "tqdm~=4.67", "typing-extensions~=4.12", "typer~=0.16", diff --git a/pyproject.toml b/pyproject.toml index a14efa5ab0..cf3a9aebb3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -44,6 +44,7 @@ dev = [ "ruff~=0.8", "semversioner~=2.0", "update-toml~=0.2", + "pytest-xdist[psutil]~=3.8.0", ] [tool.uv] @@ -59,6 +60,7 @@ graphrag-input = { workspace = true } graphrag-storage = { workspace = true } graphrag-cache = { workspace = true } graphrag-vectors = { workspace = true } +graphrag-llm = { workspace = true } # Keep poethepoet for task management to minimize changes [tool.poe.tasks] @@ -79,6 +81,7 @@ _semversioner_update_graphrag_storage_toml_version = "update-toml update --file _semversioner_update_graphrag_cache_toml_version = "update-toml update --file packages/graphrag-cache/pyproject.toml --path project.version --value $(uv run semversioner current-version)" _semversioner_update_graphrag_input_toml_version = "update-toml update --file packages/graphrag-input/pyproject.toml --path project.version --value $(uv run semversioner current-version)" _semversioner_update_graphrag_vectors_toml_version = "update-toml update --file packages/graphrag-vectors/pyproject.toml --path project.version --value $(uv run semversioner current-version)" +_semversioner_update_graphrag_llm_toml_version = "update-toml update --file packages/graphrag-llm/pyproject.toml --path project.version --value $(uv run semversioner current-version)" _semversioner_update_workspace_dependency_versions = "python -m scripts.update_workspace_dependency_versions" semversioner_add = "semversioner add-change" coverage_report = 'coverage report --omit "**/tests/**" --show-missing' @@ -89,7 +92,7 @@ _test_all = "coverage run -m pytest ./tests" test_unit = "pytest ./tests/unit" test_integration = "pytest ./tests/integration" test_smoke = "pytest ./tests/smoke" -test_notebook = "pytest ./tests/notebook" +test_notebook = "pytest -n auto ./tests/notebook" test_verbs = "pytest ./tests/verbs" index = "python -m graphrag index" update = "python -m graphrag update" @@ -117,6 +120,8 @@ sequence = [ '_semversioner_update_graphrag_input_toml_version', '_semversioner_update_graphrag_storage_toml_version', '_semversioner_update_graphrag_cache_toml_version', + "_semversioner_update_graphrag_vectors_toml_version", + '_semversioner_update_graphrag_llm_toml_version', '_semversioner_update_workspace_dependency_versions', '_sync', ] @@ -235,6 +240,8 @@ include = [ "packages/graphrag/graphrag", "packages/graphrag-common/graphrag_common", "packages/graphrag-storage/graphrag_storage", + "packages/graphrag-cache/graphrag_cache", + "packages/graphrag-llm/graphrag_llm", "tests" ] exclude = ["**/node_modules", "**/__pycache__"] diff --git a/tests/__init__.py b/tests/__init__.py index 5f525950fb..2d6d270212 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -3,14 +3,3 @@ """Tests for the GraphRAG LLM module.""" - -# Register MOCK providers -from graphrag.config.enums import ModelType -from graphrag.language_model.factory import ChatModelFactory, EmbeddingModelFactory - -from tests.mock_provider import MockChatLLM, MockEmbeddingLLM - -ChatModelFactory().register(ModelType.MockChat, lambda **kwargs: MockChatLLM(**kwargs)) -EmbeddingModelFactory().register( - ModelType.MockEmbedding, lambda **kwargs: MockEmbeddingLLM(**kwargs) -) diff --git a/tests/fixtures/min-csv/settings.yml b/tests/fixtures/min-csv/settings.yml index bf7aa59644..30717727c2 100644 --- a/tests/fixtures/min-csv/settings.yml +++ b/tests/fixtures/min-csv/settings.yml @@ -1,37 +1,32 @@ -models: - default_chat_model: - azure_auth_type: api_key - type: chat +completion_models: + default_completion_model: model_provider: azure api_key: ${GRAPHRAG_API_KEY} api_base: ${GRAPHRAG_API_BASE} api_version: "2025-04-01-preview" model: gpt-4.1 - retry_strategy: exponential_backoff - rate_limit_strategy: static - tokens_per_minute: 250_000 - requests_per_minute: 250 - model_supports_json: true - concurrent_requests: 25 + azure_deployment_name: gpt-4.1 + rate_limit: + type: sliding_window + tokens_per_period: 250_000 + requests_per_period: 250 +embedding_models: default_embedding_model: - azure_auth_type: api_key - type: embedding model_provider: azure api_key: ${GRAPHRAG_API_KEY} api_base: ${GRAPHRAG_API_BASE} api_version: "2025-04-01-preview" model: text-embedding-3-large - retry_strategy: exponential_backoff - rate_limit_strategy: static - tokens_per_minute: 250_000 - requests_per_minute: 250 - concurrent_requests: 25 + azure_deployment_name: text-embedding-3-large + rate_limit: + type: sliding_window + tokens_per_period: 250_000 + requests_per_period: 250 vector_store: - default_vector_store: - type: "lancedb" - db_uri: "./tests/fixtures/min-csv/lancedb" - container_name: "lancedb_ci" + type: "lancedb" + db_uri: "./tests/fixtures/min-csv/lancedb" + overwrite: True input: type: csv diff --git a/tests/fixtures/text/settings.yml b/tests/fixtures/text/settings.yml index 24e59b5942..ed4138e3c6 100644 --- a/tests/fixtures/text/settings.yml +++ b/tests/fixtures/text/settings.yml @@ -1,31 +1,27 @@ -models: - default_chat_model: - azure_auth_type: api_key - type: chat +completion_models: + default_completion_model: model_provider: azure api_key: ${GRAPHRAG_API_KEY} api_base: ${GRAPHRAG_API_BASE} api_version: "2025-04-01-preview" model: gpt-4.1 - retry_strategy: exponential_backoff - rate_limit_strategy: static - tokens_per_minute: 250_000 - requests_per_minute: 250 - model_supports_json: true - concurrent_requests: 25 + azure_deployment_name: gpt-4.1 + rate_limit: + type: sliding_window + tokens_per_period: 250_000 + requests_per_period: 250 +embedding_models: default_embedding_model: - azure_auth_type: api_key - type: embedding model_provider: azure api_key: ${GRAPHRAG_API_KEY} api_base: ${GRAPHRAG_API_BASE} api_version: "2025-04-01-preview" model: text-embedding-3-large - retry_strategy: exponential_backoff - rate_limit_strategy: static - tokens_per_minute: 250_000 - requests_per_minute: 250 - concurrent_requests: 25 + azure_deployment_name: text-embedding-3-large + rate_limit: + type: sliding_window + tokens_per_period: 250_000 + requests_per_period: 250 vector_store: default_vector_store: diff --git a/tests/integration/language_model/test_factory.py b/tests/integration/language_model/test_factory.py index 9757c68d99..526cb3e8dd 100644 --- a/tests/integration/language_model/test_factory.py +++ b/tests/integration/language_model/test_factory.py @@ -6,92 +6,99 @@ These tests will test the LLMFactory class and the creation of custom and provided LLMs. """ -from collections.abc import AsyncGenerator, Generator -from typing import Any - -from graphrag.language_model.factory import ChatModelFactory, EmbeddingModelFactory -from graphrag.language_model.manager import ModelManager -from graphrag.language_model.response.base import ( - BaseModelOutput, - BaseModelResponse, - ModelResponse, -) - +from typing import TYPE_CHECKING, Any, Unpack -async def test_create_custom_chat_model(): - class CustomChatModel: +from graphrag_llm.completion import ( + LLMCompletion, + create_completion, + register_completion, +) +from graphrag_llm.config import ModelConfig +from graphrag_llm.embedding import LLMEmbedding, create_embedding, register_embedding + +if TYPE_CHECKING: + from collections.abc import AsyncIterator, Iterator + + from graphrag_llm.metrics import MetricsStore + from graphrag_llm.tokenizer import Tokenizer + from graphrag_llm.types import ( + LLMCompletionArgs, + LLMCompletionChunk, + LLMCompletionResponse, + LLMEmbeddingArgs, + LLMEmbeddingResponse, + ResponseFormat, + ) + + +def test_create_custom_chat_model(): + class CustomChatModel(LLMCompletion): config: Any def __init__(self, **kwargs): pass - async def achat( - self, prompt: str, history: list | None = None, **kwargs: Any - ) -> ModelResponse: - return BaseModelResponse(output=BaseModelOutput(content="content")) - - def chat( - self, prompt: str, history: list | None = None, **kwargs: Any - ) -> ModelResponse: - return BaseModelResponse( - output=BaseModelOutput( - content="content", full_response={"key": "value"} - ) - ) - - async def achat_stream( - self, prompt: str, history: list | None = None, **kwargs: Any - ) -> AsyncGenerator[str, None]: - yield "" - - def chat_stream( - self, prompt: str, history: list | None = None, **kwargs: Any - ) -> Generator[str, None]: ... - - ChatModelFactory().register("custom_chat", CustomChatModel) - model = ModelManager().get_or_create_chat_model("custom", "custom_chat") + def supports_structured_response(self) -> bool: + return True + + def completion( + self, + /, + **kwargs: Unpack["LLMCompletionArgs[ResponseFormat]"], + ) -> "LLMCompletionResponse[ResponseFormat] | Iterator[LLMCompletionChunk]": ... + + async def completion_async( + self, + /, + **kwargs: Unpack["LLMCompletionArgs[ResponseFormat]"], + ) -> ( + "LLMCompletionResponse[ResponseFormat] | AsyncIterator[LLMCompletionChunk]" + ): ... + + @property + def metrics_store(self) -> "MetricsStore": ... + + @property + def tokenizer(self) -> "Tokenizer": ... + + register_completion("custom_chat", CustomChatModel) + + model = create_completion( + ModelConfig( + type="custom_chat", + model_provider="custom_provider", + model="custom_chat_model", + ) + ) assert isinstance(model, CustomChatModel) - response = await model.achat("prompt") - assert response.output.content == "content" - assert response.output.full_response is None - - response = model.chat("prompt") - assert response.output.content == "content" - assert response.output.full_response == {"key": "value"} - -async def test_create_custom_embedding_llm(): - class CustomEmbeddingModel: - config: Any - - def __init__(self, **kwargs): - pass - async def aembed(self, text: str, **kwargs) -> list[float]: - return [1.0] +def test_create_custom_embedding_llm(): + class CustomEmbeddingModel(LLMEmbedding): + def __init__(self, **kwargs): ... - def embed(self, text: str, **kwargs) -> list[float]: - return [1.0] + def embedding( + self, /, **kwargs: Unpack["LLMEmbeddingArgs"] + ) -> "LLMEmbeddingResponse": ... - async def aembed_batch( - self, text_list: list[str], **kwargs - ) -> list[list[float]]: - return [[1.0]] + async def embedding_async( + self, /, **kwargs: Unpack["LLMEmbeddingArgs"] + ) -> "LLMEmbeddingResponse": ... - def embed_batch(self, text_list: list[str], **kwargs) -> list[list[float]]: - return [[1.0]] + @property + def metrics_store(self) -> "MetricsStore": ... - EmbeddingModelFactory().register("custom_embedding", CustomEmbeddingModel) - llm = ModelManager().get_or_create_embedding_model("custom", "custom_embedding") - assert isinstance(llm, CustomEmbeddingModel) - response = await llm.aembed("text") - assert response == [1.0] + @property + def tokenizer(self) -> "Tokenizer": ... - response = llm.embed("text") - assert response == [1.0] + register_embedding("custom_embedding", CustomEmbeddingModel) - response = await llm.aembed_batch(["text"]) - assert response == [[1.0]] + model = create_embedding( + ModelConfig( + type="custom_embedding", + model_provider="custom_provider", + model="custom_embedding_model", + ) + ) - response = llm.embed_batch(["text"]) - assert response == [[1.0]] + assert isinstance(model, CustomEmbeddingModel) diff --git a/tests/integration/language_model/test_rate_limiter.py b/tests/integration/language_model/test_rate_limiter.py index a2e2f882b8..1bc541b16a 100644 --- a/tests/integration/language_model/test_rate_limiter.py +++ b/tests/integration/language_model/test_rate_limiter.py @@ -8,13 +8,8 @@ from math import ceil from queue import Queue -import pytest -from graphrag.language_model.providers.litellm.services.rate_limiter.rate_limiter import ( - RateLimiter, -) -from graphrag.language_model.providers.litellm.services.rate_limiter.rate_limiter_factory import ( - RateLimiterFactory, -) +from graphrag_llm.config import RateLimitConfig, RateLimitType +from graphrag_llm.rate_limit import RateLimiter, create_rate_limiter from tests.integration.language_model.utils import ( assert_max_num_values_per_period, @@ -22,8 +17,6 @@ bin_time_intervals, ) -rate_limiter_factory = RateLimiterFactory() - _period_in_seconds = 1 _rpm = 4 _tpm = 75 @@ -46,59 +39,14 @@ def test_binning(): ] -def test_rate_limiter_validation(): - """Test that the rate limiter can be created with valid parameters.""" - - # Valid parameters - rate_limiter = rate_limiter_factory.create( - strategy="static", init_args={"rpm": 60, "tpm": 10000, "period_in_seconds": 60} - ) - assert rate_limiter is not None - - # Invalid strategy - with pytest.raises( - ValueError, - match=r"Strategy 'invalid_strategy' is not registered.", - ): - rate_limiter_factory.create( - strategy="invalid_strategy", init_args={"rpm": 60, "tpm": 10000} - ) - - # Both rpm and tpm are None - with pytest.raises( - ValueError, - match=r"Both TPM and RPM cannot be None \(disabled\), one or both must be set to a positive integer.", - ): - rate_limiter_factory.create(strategy="static") - - # Invalid rpm - with pytest.raises( - ValueError, - match=r"RPM and TPM must be either None \(disabled\) or positive integers.", - ): - rate_limiter_factory.create(strategy="static", init_args={"rpm": -10}) - - # Invalid tpm - with pytest.raises( - ValueError, - match=r"RPM and TPM must be either None \(disabled\) or positive integers.", - ): - rate_limiter_factory.create(strategy="static", init_args={"tpm": -10}) - - # Invalid period_in_seconds - with pytest.raises( - ValueError, match=r"Period in seconds must be a positive integer." - ): - rate_limiter_factory.create( - strategy="static", init_args={"rpm": 10, "period_in_seconds": -10} - ) - - def test_rpm(): """Test that the rate limiter enforces RPM limits.""" - rate_limiter = rate_limiter_factory.create( - strategy="static", - init_args={"rpm": _rpm, "period_in_seconds": _period_in_seconds}, + rate_limiter = create_rate_limiter( + RateLimitConfig( + type=RateLimitType.SlidingWindow, + period_in_seconds=_period_in_seconds, + requests_per_period=_rpm, + ) ) time_values: list[float] = [] @@ -125,9 +73,12 @@ def test_rpm(): def test_tpm(): """Test that the rate limiter enforces TPM limits.""" - rate_limiter = rate_limiter_factory.create( - strategy="static", - init_args={"tpm": _tpm, "period_in_seconds": _period_in_seconds}, + rate_limiter = create_rate_limiter( + RateLimitConfig( + type=RateLimitType.SlidingWindow, + period_in_seconds=_period_in_seconds, + tokens_per_period=_tpm, + ) ) time_values: list[float] = [] @@ -159,9 +110,12 @@ def test_token_in_request_exceeds_tpm(): greater than the tpm limit but still below the context window limit of the underlying model. In this case, the request should still be allowed to proceed but may take up its own rate limit bin. """ - rate_limiter = rate_limiter_factory.create( - strategy="static", - init_args={"tpm": _tpm, "period_in_seconds": _period_in_seconds}, + rate_limiter = create_rate_limiter( + RateLimitConfig( + type=RateLimitType.SlidingWindow, + period_in_seconds=_period_in_seconds, + tokens_per_period=_tpm, + ) ) time_values: list[float] = [] @@ -184,9 +138,13 @@ def test_token_in_request_exceeds_tpm(): def test_rpm_and_tpm_with_rpm_as_limiting_factor(): """Test that the rate limiter enforces RPM and TPM limits.""" - rate_limiter = rate_limiter_factory.create( - strategy="static", - init_args={"rpm": _rpm, "tpm": _tpm, "period_in_seconds": _period_in_seconds}, + rate_limiter = create_rate_limiter( + RateLimitConfig( + type=RateLimitType.SlidingWindow, + period_in_seconds=_period_in_seconds, + requests_per_period=_rpm, + tokens_per_period=_tpm, + ) ) time_values: list[float] = [] @@ -214,9 +172,13 @@ def test_rpm_and_tpm_with_rpm_as_limiting_factor(): def test_rpm_and_tpm_with_tpm_as_limiting_factor(): """Test that the rate limiter enforces TPM limits.""" - rate_limiter = rate_limiter_factory.create( - strategy="static", - init_args={"rpm": _rpm, "tpm": _tpm, "period_in_seconds": _period_in_seconds}, + rate_limiter = create_rate_limiter( + RateLimitConfig( + type=RateLimitType.SlidingWindow, + period_in_seconds=_period_in_seconds, + requests_per_period=_rpm, + tokens_per_period=_tpm, + ) ) time_values: list[float] = [] @@ -259,9 +221,13 @@ def _run_rate_limiter( def test_rpm_threaded(): """Test that the rate limiter enforces RPM limits in a threaded environment.""" - rate_limiter = rate_limiter_factory.create( - strategy="static", - init_args={"rpm": _rpm, "tpm": _tpm, "period_in_seconds": _period_in_seconds}, + rate_limiter = create_rate_limiter( + RateLimitConfig( + type=RateLimitType.SlidingWindow, + period_in_seconds=_period_in_seconds, + requests_per_period=_rpm, + tokens_per_period=_tpm, + ) ) input_queue: Queue[int | None] = Queue() @@ -320,9 +286,13 @@ def test_rpm_threaded(): def test_tpm_threaded(): """Test that the rate limiter enforces TPM limits in a threaded environment.""" - rate_limiter = rate_limiter_factory.create( - strategy="static", - init_args={"rpm": _rpm, "tpm": _tpm, "period_in_seconds": _period_in_seconds}, + rate_limiter = create_rate_limiter( + RateLimitConfig( + type=RateLimitType.SlidingWindow, + period_in_seconds=_period_in_seconds, + requests_per_period=_rpm, + tokens_per_period=_tpm, + ) ) input_queue: Queue[int | None] = Queue() diff --git a/tests/integration/language_model/test_retries.py b/tests/integration/language_model/test_retries.py index 912eb00e4b..98e25b209e 100644 --- a/tests/integration/language_model/test_retries.py +++ b/tests/integration/language_model/test_retries.py @@ -4,65 +4,97 @@ """Test LiteLLM Retries.""" import time +from typing import Any +import httpx +import litellm.exceptions as exceptions import pytest -from graphrag.language_model.providers.litellm.services.retry.retry_factory import ( - RetryFactory, -) - -retry_factory = RetryFactory() +from graphrag_llm.config import RetryConfig, RetryType +from graphrag_llm.retry import create_retry @pytest.mark.parametrize( - ("strategy", "max_retries", "max_retry_wait", "expected_time"), + ("config", "max_retries", "expected_time"), [ ( - "native", - 3, # 3 retries - 0, # native retry does not adhere to max_retry_wait - 0, # immediate retry, expect 0 seconds elapsed time + RetryConfig( + type=RetryType.ExponentialBackoff, + max_retries=3, + base_delay=2.0, + jitter=False, + ), + 3, + 2 + 4 + 8, # No jitter, so exact times ), ( - "exponential_backoff", - 3, # 3 retries - 0, # exponential retry does not adhere to max_retry_wait - 14, # (2^1 + jitter) + (2^2 + jitter) + (2^3 + jitter) = 2 + 4 + 8 + 3*jitter = 14 seconds min total runtime + RetryConfig( + type=RetryType.Immediate, + max_retries=3, + ), + 3, + 0, # Immediate retry, so no delay ), + ], +) +def test_retries(config: RetryConfig, max_retries: int, expected_time: float) -> None: + """ + Test various retry strategies with various configurations. + """ + retry_service = create_retry(config) + + # start at -1 because the first call is not a retry + retries = -1 + + def mock_func(): + nonlocal retries + retries += 1 + msg = "Mock error for testing retries" + raise ValueError(msg) + + start_time = time.time() + with pytest.raises(ValueError, match="Mock error for testing retries"): + retry_service.retry(func=mock_func, input_args={}) + elapsed_time = time.time() - start_time + + assert retries == max_retries, f"Expected {max_retries} retries, got {retries}" + assert elapsed_time >= expected_time, ( + f"Expected elapsed time >= {expected_time}, got {elapsed_time}" + ) + + +@pytest.mark.parametrize( + ("config", "max_retries", "expected_time"), + [ ( - "random_wait", - 3, # 3 retries - 2, # random wait [0, 2] seconds - 0, # unpredictable, don't know what the total runtime will be + RetryConfig( + type=RetryType.ExponentialBackoff, + max_retries=3, + base_delay=2.0, + jitter=False, + ), + 3, + 2 + 4 + 8, # No jitter, so exact times ), ( - "incremental_wait", - 3, # 3 retries - 3, # wait for a max of 3 seconds on a single retry. - 6, # Wait 3/3 * 1 on first retry, 3/3 * 2 on second, 3/3 * 3 on third, 1 + 2 + 3 = 6 seconds total runtime. + RetryConfig( + type=RetryType.Immediate, + max_retries=3, + ), + 3, + 0, # Immediate retry, so no delay ), ], ) -def test_retries( - strategy: str, max_retries: int, max_retry_wait: int, expected_time: float +async def test_retries_async( + config: RetryConfig, max_retries: int, expected_time: float ) -> None: """ Test various retry strategies with various configurations. - - Args - ---- - strategy: The retry strategy to use. - max_retries: The maximum number of retry attempts. - max_retry_wait: The maximum wait time between retries. """ - retry_service = retry_factory.create( - strategy=strategy, - init_args={ - "max_retries": max_retries, - "max_retry_wait": max_retry_wait, - }, - ) + retry_service = create_retry(config) - retries = 0 + # start at -1 because the first call is not a retry + retries = -1 def mock_func(): nonlocal retries @@ -72,80 +104,140 @@ def mock_func(): start_time = time.time() with pytest.raises(ValueError, match="Mock error for testing retries"): - retry_service.retry(func=mock_func) + await retry_service.retry_async(func=mock_func, input_args={}) elapsed_time = time.time() - start_time - # subtract 1 from retries because the first call is not a retry - assert retries - 1 == max_retries, f"Expected {max_retries} retries, got {retries}" + assert retries == max_retries, f"Expected {max_retries} retries, got {retries}" assert elapsed_time >= expected_time, ( f"Expected elapsed time >= {expected_time}, got {elapsed_time}" ) @pytest.mark.parametrize( - ("strategy", "max_retries", "max_retry_wait", "expected_time"), + "config", + [ + ( + RetryConfig( + type=RetryType.ExponentialBackoff, + max_retries=3, + base_delay=2.0, + jitter=False, + ) + ), + ( + RetryConfig( + type=RetryType.Immediate, + max_retries=3, + ) + ), + ], +) +@pytest.mark.parametrize( + ("exception", "exception_args"), [ ( - "native", - 3, # 3 retries - 0, # native retry does not adhere to max_retry_wait - 0, # immediate retry, expect 0 seconds elapsed time + "BadRequestError", + ["Oh no!", "", ""], + ), + ( + "UnsupportedParamsError", + ["Oh no!", "", ""], + ), + ( + "ContextWindowExceededError", + ["Oh no!", "", ""], + ), + ( + "ContentPolicyViolationError", + ["Oh no!", "", ""], + ), + ( + "ImageFetchError", + ["Oh no!", "", ""], + ), + ( + "InvalidRequestError", + ["Oh no!", "", ""], + ), + ( + "AuthenticationError", + ["Oh no!", "", ""], + ), + ( + "PermissionDeniedError", + [ + "Oh no!", + "", + "", + httpx.Response( + status_code=403, + request=httpx.Request( + method="GET", url="https://litellm.ai" + ), # mock request object + ), + ], + ), + ( + "NotFoundError", + ["Oh no!", "", ""], ), ( - "exponential_backoff", - 3, # 3 retries - 0, # exponential retry does not adhere to max_retry_wait - 14, # (2^1 + jitter) + (2^2 + jitter) + (2^3 + jitter) = 2 + 4 + 8 + 3*jitter = 14 seconds min total runtime + "UnprocessableEntityError", + [ + "Oh no!", + "", + "", + httpx.Response( + status_code=403, + request=httpx.Request( + method="GET", url="https://litellm.ai" + ), # mock request object + ), + ], ), ( - "random_wait", - 3, # 3 retries - 2, # random wait [0, 2] seconds - 0, # unpredictable, don't know what the total runtime will be + "APIConnectionError", + ["Oh no!", "", ""], ), ( - "incremental_wait", - 3, # 3 retries - 3, # wait for a max of 3 seconds on a single retry. - 6, # Wait 3/3 * 1 on first retry, 3/3 * 2 on second, 3/3 * 3 on third, 1 + 2 + 3 = 6 seconds total runtime. + "APIError", + [500, "Oh no!", "", ""], + ), + ( + "ServiceUnavailableError", + ["Oh no!", "", ""], + ), + ( + "APIResponseValidationError", + ["Oh no!", "", ""], + ), + ( + "BudgetExceededError", + ["Oh no!", "", ""], ), ], ) -async def test_retries_async( - strategy: str, max_retries: int, max_retry_wait: int, expected_time: float +def test_exponential_backoff_skipping_exceptions( + config: RetryConfig, exception: str, exception_args: list[Any] ) -> None: """ - Test various retry strategies with various configurations. - - Args - ---- - strategy: The retry strategy to use. - max_retries: The maximum number of retry attempts. - max_retry_wait: The maximum wait time between retries. + Test skipping retries for exceptions that should not cause a retry. """ - retry_service = retry_factory.create( - strategy=strategy, - init_args={ - "max_retries": max_retries, - "max_retry_wait": max_retry_wait, - }, - ) + retry_service = create_retry(config) - retries = 0 + # start at -1 because the first call is not a retry + retries = -1 + exception_cls = exceptions.__dict__[exception] - async def mock_func(): # noqa: RUF029 + def mock_func(): nonlocal retries retries += 1 - msg = "Mock error for testing retries" - raise ValueError(msg) + raise exception_cls(*exception_args) - start_time = time.time() - with pytest.raises(ValueError, match="Mock error for testing retries"): - await retry_service.aretry(func=mock_func) - elapsed_time = time.time() - start_time + with pytest.raises(exception_cls, match="Oh no!"): + retry_service.retry(func=mock_func, input_args={}) # subtract 1 from retries because the first call is not a retry - assert retries - 1 == max_retries, f"Expected {max_retries} retries, got {retries}" - assert elapsed_time >= expected_time, ( - f"Expected elapsed time >= {expected_time}, got {elapsed_time}" + assert retries == 0, ( + f"Expected not to retry for '{exception}' exception. Got {retries} retries." ) diff --git a/tests/mock_provider.py b/tests/mock_provider.py deleted file mode 100644 index 97b7aa9076..0000000000 --- a/tests/mock_provider.py +++ /dev/null @@ -1,121 +0,0 @@ -# Copyright (c) 2025 Microsoft Corporation. -# Licensed under the MIT License - -"""A module containing 'MockChatLLM' and 'MockEmbeddingLLM' models.""" - -from collections.abc import AsyncGenerator, Generator -from typing import Any - -from graphrag.config.enums import ModelType -from graphrag.config.models.language_model_config import LanguageModelConfig -from graphrag.language_model.response.base import ( - BaseModelOutput, - BaseModelResponse, - ModelResponse, -) -from pydantic import BaseModel - - -class MockChatLLM: - """A mock chat LLM provider.""" - - def __init__( - self, - responses: list[str | BaseModel] | None = None, - config: LanguageModelConfig | None = None, - json: bool = False, - **kwargs: Any, - ): - self.responses = config.responses if config and config.responses else responses - self.response_index = 0 - self.config = config or LanguageModelConfig( - type=ModelType.MockChat, model="gpt-4.1", api_key="mock" - ) - - async def achat( - self, - prompt: str, - history: list | None = None, - **kwargs, - ) -> ModelResponse: - """Return the next response in the list.""" - return self.chat(prompt, history, **kwargs) - - async def achat_stream( - self, - prompt: str, - history: list | None = None, - **kwargs, - ) -> AsyncGenerator[str, None]: - """Return the next response in the list.""" - if not self.responses: - return - - for response in self.responses: - response = ( - response.model_dump_json() - if isinstance(response, BaseModel) - else response - ) - - yield response - - def chat( - self, - prompt: str, - history: list | None = None, - **kwargs, - ) -> ModelResponse: - """Return the next response in the list.""" - if not self.responses: - return BaseModelResponse(output=BaseModelOutput(content="")) - - response = self.responses[self.response_index % len(self.responses)] - self.response_index += 1 - - parsed_json = response if isinstance(response, BaseModel) else None - response = ( - response.model_dump_json() if isinstance(response, BaseModel) else response - ) - - return BaseModelResponse( - output=BaseModelOutput(content=response), - parsed_response=parsed_json, - ) - - def chat_stream( - self, - prompt: str, - history: list | None = None, - **kwargs, - ) -> Generator[str, None]: - """Return the next response in the list.""" - raise NotImplementedError - - -class MockEmbeddingLLM: - """A mock embedding LLM provider.""" - - def __init__(self, **kwargs: Any): - self.config = LanguageModelConfig( - type=ModelType.MockEmbedding, model="text-embedding-3-large", api_key="mock" - ) - self.vectors = 3072 * [1.0] - - def embed_batch(self, text_list: list[str], **kwargs: Any) -> list[list[float]]: - """Generate an embedding for the input text.""" - return [self.vectors for _ in text_list] - - def embed(self, text: str, **kwargs: Any) -> list[float]: - """Generate an embedding for the input text.""" - return self.vectors - - async def aembed(self, text: str, **kwargs: Any) -> list[float]: - """Generate an embedding for the input text.""" - return self.vectors - - async def aembed_batch( - self, text_list: list[str], **kwargs: Any - ) -> list[list[float]]: - """Generate an embedding for the input text.""" - return [self.vectors for _ in text_list] diff --git a/tests/notebook/test_notebooks.py b/tests/notebook/test_notebooks.py index 9f9d9b1222..47d759fcea 100644 --- a/tests/notebook/test_notebooks.py +++ b/tests/notebook/test_notebooks.py @@ -1,18 +1,36 @@ # Copyright (c) 2024 Microsoft Corporation. # Licensed under the MIT License import subprocess +from dataclasses import dataclass from pathlib import Path import nbformat import pytest -NOTEBOOKS_PATH = Path("examples_notebooks") -EXCLUDED_PATH = NOTEBOOKS_PATH / "community_contrib" + +@dataclass +class NotebookDetails: + dir: Path + excluded_filenames: list[str] + + +NOTEBOOKS: list[NotebookDetails] = [ + NotebookDetails( + dir=Path("packages/graphrag-llm/notebooks"), + excluded_filenames=[], + ), + # Was in previous test but not actually pointing at a notebooks location + # NotebookDetails( + # dir=Path("examples_notebooks"), # noqa: ERA001 + # excluded_filenames=["community_contrib"], # noqa: ERA001 + # ), +] notebooks_list = [ - notebook - for notebook in NOTEBOOKS_PATH.rglob("*.ipynb") - if EXCLUDED_PATH not in notebook.parents + nb + for details in NOTEBOOKS + for nb in details.dir.rglob("*.ipynb") + if nb.stem not in details.excluded_filenames ] @@ -21,6 +39,8 @@ def _notebook_run(filepath: Path): :returns execution errors """ args = [ + "uv", + "run", "jupyter", "nbconvert", "--to", @@ -29,7 +49,7 @@ def _notebook_run(filepath: Path): "-y", "--no-prompt", "--stdout", - str(filepath.absolute().resolve()), + str(filepath.resolve()), ] notebook = subprocess.check_output(args) nb = nbformat.reads(notebook, nbformat.current_nbformat) @@ -43,6 +63,18 @@ def _notebook_run(filepath: Path): ] +def clear_cache(): + cache_dir = Path("packages/graphrag-llm/notebooks/cache") + if cache_dir.exists(): + for file in cache_dir.iterdir(): + if file.is_file(): + file.unlink() + cache_dir.rmdir() + + +clear_cache() + + @pytest.mark.parametrize("notebook_path", notebooks_list) def test_notebook(notebook_path: Path): assert _notebook_run(notebook_path) == [] diff --git a/tests/unit/chunking/test_chunker.py b/tests/unit/chunking/test_chunker.py index dd68b0ab6f..5addee34d2 100644 --- a/tests/unit/chunking/test_chunker.py +++ b/tests/unit/chunking/test_chunker.py @@ -1,10 +1,10 @@ # Copyright (c) 2024 Microsoft Corporation. # Licensed under the MIT License +from typing import Any from unittest.mock import Mock, patch from graphrag.tokenizer.get_tokenizer import get_tokenizer -from graphrag.tokenizer.tokenizer import Tokenizer from graphrag_chunking.bootstrap_nltk import bootstrap from graphrag_chunking.chunk_strategy_type import ChunkerType from graphrag_chunking.chunker_factory import create_chunker @@ -12,9 +12,13 @@ from graphrag_chunking.token_chunker import ( split_text_on_tokens, ) +from graphrag_llm.tokenizer import Tokenizer class MockTokenizer(Tokenizer): + def __init__(self, **kwargs: Any) -> None: + """Initialize the LiteLLM Tokenizer.""" + def encode(self, text) -> list[int]: return [ord(char) for char in text] diff --git a/tests/unit/config/fixtures/minimal_config/settings.yaml b/tests/unit/config/fixtures/minimal_config/settings.yaml index 0321b5cf6f..049c2fa131 100644 --- a/tests/unit/config/fixtures/minimal_config/settings.yaml +++ b/tests/unit/config/fixtures/minimal_config/settings.yaml @@ -1,11 +1,11 @@ -models: - default_chat_model: +completion_models: + default_completion_model: api_key: ${CUSTOM_API_KEY} - type: chat model_provider: openai model: gpt-4.1 + +embedding_models: default_embedding_model: api_key: ${CUSTOM_API_KEY} - type: embedding model_provider: openai model: text-embedding-3-large diff --git a/tests/unit/config/fixtures/minimal_config_missing_env_var/settings.yaml b/tests/unit/config/fixtures/minimal_config_missing_env_var/settings.yaml index 764306275f..ab370e4ab6 100644 --- a/tests/unit/config/fixtures/minimal_config_missing_env_var/settings.yaml +++ b/tests/unit/config/fixtures/minimal_config_missing_env_var/settings.yaml @@ -1,11 +1,11 @@ -models: - default_chat_model: +completion_models: + default_completion_model: api_key: ${SOME_NON_EXISTENT_ENV_VAR} - type: chat model_provider: openai model: gpt-4.1 + +embedding_models: default_embedding_model: api_key: ${SOME_NON_EXISTENT_ENV_VAR} - type: embedding model_provider: openai model: text-embedding-3-large diff --git a/tests/unit/config/test_config.py b/tests/unit/config/test_config.py index 545e66b7c5..67962673e4 100644 --- a/tests/unit/config/test_config.py +++ b/tests/unit/config/test_config.py @@ -5,142 +5,24 @@ from pathlib import Path from unittest import mock -import graphrag.config.defaults as defs -import pytest -from graphrag.config.enums import AuthType, ModelType from graphrag.config.load_config import load_config from graphrag.config.models.graph_rag_config import GraphRagConfig -from pydantic import ValidationError from tests.unit.config.utils import ( - DEFAULT_EMBEDDING_MODEL_CONFIG, - DEFAULT_MODEL_CONFIG, + DEFAULT_COMPLETION_MODELS, + DEFAULT_EMBEDDING_MODELS, FAKE_API_KEY, assert_graphrag_configs, get_default_graphrag_config, ) -def test_missing_openai_required_api_key() -> None: - model_config_missing_api_key = { - defs.DEFAULT_CHAT_MODEL_ID: { - "type": ModelType.Chat, - "model_provider": "openai", - "model": defs.DEFAULT_CHAT_MODEL, - }, - defs.DEFAULT_EMBEDDING_MODEL_ID: DEFAULT_EMBEDDING_MODEL_CONFIG, - } - - # API Key required for OpenAIChat - with pytest.raises(ValidationError): - GraphRagConfig(models=model_config_missing_api_key) - - # API Key required for OpenAIEmbedding - model_config_missing_api_key[defs.DEFAULT_CHAT_MODEL_ID]["type"] = ( - ModelType.Embedding - ) - with pytest.raises(ValidationError): - GraphRagConfig(models=model_config_missing_api_key) - - -def test_missing_azure_api_key() -> None: - model_config_missing_api_key = { - defs.DEFAULT_CHAT_MODEL_ID: { - "type": ModelType.Chat, - "model_provider": "azure", - "auth_type": AuthType.APIKey, - "model": defs.DEFAULT_CHAT_MODEL, - "api_base": "some_api_base", - "api_version": "some_api_version", - "deployment_name": "some_deployment_name", - }, - defs.DEFAULT_EMBEDDING_MODEL_ID: DEFAULT_EMBEDDING_MODEL_CONFIG, - } - - with pytest.raises(ValidationError): - GraphRagConfig(models=model_config_missing_api_key) - - # API Key not required for managed identity - model_config_missing_api_key[defs.DEFAULT_CHAT_MODEL_ID]["auth_type"] = ( - AuthType.AzureManagedIdentity - ) - GraphRagConfig(models=model_config_missing_api_key) - - -def test_conflicting_auth_type() -> None: - model_config_invalid_auth_type = { - defs.DEFAULT_CHAT_MODEL_ID: { - "auth_type": AuthType.AzureManagedIdentity, - "type": ModelType.Chat, - "model_provider": "openai", - "model": defs.DEFAULT_CHAT_MODEL, - }, - defs.DEFAULT_EMBEDDING_MODEL_ID: DEFAULT_EMBEDDING_MODEL_CONFIG, - } - - with pytest.raises(ValidationError): - GraphRagConfig(models=model_config_invalid_auth_type) - - -def test_conflicting_azure_api_key() -> None: - model_config_conflicting_api_key = { - defs.DEFAULT_CHAT_MODEL_ID: { - "type": ModelType.Chat, - "model_provider": "azure", - "auth_type": AuthType.AzureManagedIdentity, - "model": defs.DEFAULT_CHAT_MODEL, - "api_base": "some_api_base", - "api_version": "some_api_version", - "deployment_name": "some_deployment_name", - "api_key": "THIS_SHOULD_NOT_BE_SET_WHEN_USING_MANAGED_IDENTITY", - }, - defs.DEFAULT_EMBEDDING_MODEL_ID: DEFAULT_EMBEDDING_MODEL_CONFIG, - } - - with pytest.raises(ValidationError): - GraphRagConfig(models=model_config_conflicting_api_key) - - -base_azure_model_config = { - "type": ModelType.Chat, - "model_provider": "azure", - "auth_type": AuthType.AzureManagedIdentity, - "model": defs.DEFAULT_CHAT_MODEL, - "api_base": "some_api_base", - "api_version": "some_api_version", - "deployment_name": "some_deployment_name", -} - - -def test_missing_azure_api_base() -> None: - missing_api_base_config = base_azure_model_config.copy() - del missing_api_base_config["api_base"] - - with pytest.raises(ValidationError): - GraphRagConfig( - models={ - defs.DEFAULT_CHAT_MODEL_ID: missing_api_base_config, - defs.DEFAULT_EMBEDDING_MODEL_ID: DEFAULT_EMBEDDING_MODEL_CONFIG, - } # type: ignore - ) - - -def test_missing_azure_api_version() -> None: - missing_api_version_config = base_azure_model_config.copy() - del missing_api_version_config["api_version"] - - with pytest.raises(ValidationError): - GraphRagConfig( - models={ - defs.DEFAULT_CHAT_MODEL_ID: missing_api_version_config, - defs.DEFAULT_EMBEDDING_MODEL_ID: DEFAULT_EMBEDDING_MODEL_CONFIG, - } # type: ignore - ) - - def test_default_config() -> None: expected = get_default_graphrag_config() - actual = GraphRagConfig(models=DEFAULT_MODEL_CONFIG) # type: ignore + actual = GraphRagConfig( + completion_models=DEFAULT_COMPLETION_MODELS, # type: ignore + embedding_models=DEFAULT_EMBEDDING_MODELS, # type: ignore + ) assert_graphrag_configs(actual, expected) diff --git a/tests/unit/config/test_metrics_config.py b/tests/unit/config/test_metrics_config.py new file mode 100644 index 0000000000..967389c252 --- /dev/null +++ b/tests/unit/config/test_metrics_config.py @@ -0,0 +1,29 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Test metrics configuration loading.""" + +import pytest +from graphrag_llm.config import ( + MetricsConfig, + MetricsWriterType, +) + + +def test_file_metrics_writer_validation() -> None: + """Test that missing required parameters raise validation errors.""" + + with pytest.raises( + ValueError, + match="base_dir must be specified for file-based metrics writer\\.", + ): + _ = MetricsConfig( + writer=MetricsWriterType.File, + base_dir=" ", + ) + + # passes validation + _ = MetricsConfig( + writer=MetricsWriterType.File, + base_dir="./metrics", + ) diff --git a/tests/unit/config/test_model_config.py b/tests/unit/config/test_model_config.py new file mode 100644 index 0000000000..67de71bf7f --- /dev/null +++ b/tests/unit/config/test_model_config.py @@ -0,0 +1,121 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Test model configuration loading.""" + +import pytest +from graphrag_llm.config import AuthMethod, LLMProviderType, ModelConfig +from pydantic import ValidationError + + +def test_litellm_provider_validation() -> None: + """Test that missing required parameters raise validation errors.""" + + with pytest.raises(ValidationError): + _ = ModelConfig( + type=LLMProviderType.LiteLLM, + model_provider="openai", + model="", + ) + + with pytest.raises(ValidationError): + _ = ModelConfig( + type=LLMProviderType.LiteLLM, + model_provider="", + model="gpt-4o", + ) + + with pytest.raises( + ValueError, + match="api_key must be set when auth_method=api_key\\.", + ): + _ = ModelConfig( + type=LLMProviderType.LiteLLM, + model_provider="openai", + model="gpt-4o", + ) + + with pytest.raises( + ValueError, + match="azure_deployment_name should not be specified for non-Azure model providers\\.", + ): + _ = ModelConfig( + type=LLMProviderType.LiteLLM, + model_provider="openai", + model="gpt-4o", + azure_deployment_name="some-deployment", + ) + + with pytest.raises( + ValueError, + match="azure_deployment_name and api_base must be specified with the 'azure' model provider\\.", + ): + _ = ModelConfig( + type=LLMProviderType.LiteLLM, + model_provider="azure", + model="gpt-4o", + ) + + with pytest.raises( + ValueError, + match="azure_deployment_name and api_base must be specified with the 'azure' model provider\\.", + ): + _ = ModelConfig( + type=LLMProviderType.LiteLLM, + model_provider="azure", + model="gpt-4o", + azure_deployment_name="my-deployment", + ) + + with pytest.raises( + ValueError, + match="api_key should not be set when using Azure Managed Identity\\.", + ): + _ = ModelConfig( + type=LLMProviderType.LiteLLM, + model_provider="azure", + model="gpt-4o", + azure_deployment_name="gpt-4o", + api_base="https://my-azure-endpoint/", + api_version="2024-06-01", + auth_method=AuthMethod.AzureManagedIdentity, + api_key="some-api-key", + ) + + with pytest.raises( + ValueError, + match="api_key must be set when auth_method=api_key\\.", + ): + _ = ModelConfig( + type=LLMProviderType.LiteLLM, + model_provider="azure", + azure_deployment_name="gpt-4o", + api_base="https://my-azure-endpoint/", + api_version="2024-06-01", + model="gpt-4o", + ) + + # pass validation + _ = ModelConfig( + type=LLMProviderType.LiteLLM, + model_provider="openai", + model="gpt-4o", + api_key="NOT_A_REAL_API_KEY", + ) + _ = ModelConfig( + type=LLMProviderType.LiteLLM, + model_provider="azure", + model="gpt-4o", + azure_deployment_name="gpt-4o", + api_base="https://my-azure-endpoint/", + api_key="NOT_A_REAL_API_KEY", + ) + _ = ModelConfig( + type=LLMProviderType.LiteLLM, + model_provider="azure", + model="gpt-4o", + azure_deployment_name="gpt-4o", + api_base="https://my-azure-endpoint/", + api_version="2024-06-01", + auth_method=AuthMethod.AzureManagedIdentity, + ) diff --git a/tests/unit/config/test_rate_limit_config.py b/tests/unit/config/test_rate_limit_config.py new file mode 100644 index 0000000000..938e1d4d4a --- /dev/null +++ b/tests/unit/config/test_rate_limit_config.py @@ -0,0 +1,66 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Test rate limit configuration loading.""" + +import pytest +from graphrag_llm.config import RateLimitConfig, RateLimitType + + +def test_sliding_window_validation() -> None: + """Test that missing required parameters raise validation errors.""" + + with pytest.raises( + ValueError, + match="period_in_seconds must be a positive integer for Sliding Window rate limit\\.", + ): + _ = RateLimitConfig( + type=RateLimitType.SlidingWindow, + period_in_seconds=0, + requests_per_period=100, + tokens_per_period=1000, + ) + + with pytest.raises( + ValueError, + match="At least one of requests_per_period or tokens_per_period must be specified for Sliding Window rate limit\\.", + ): + _ = RateLimitConfig( + type=RateLimitType.SlidingWindow, + ) + + with pytest.raises( + ValueError, + match="requests_per_period must be a positive integer for Sliding Window rate limit\\.", + ): + _ = RateLimitConfig( + type=RateLimitType.SlidingWindow, + period_in_seconds=60, + requests_per_period=-10, + ) + + with pytest.raises( + ValueError, + match="tokens_per_period must be a positive integer for Sliding Window rate limit\\.", + ): + _ = RateLimitConfig( + type=RateLimitType.SlidingWindow, + period_in_seconds=60, + tokens_per_period=-10, + ) + + # passes validation + _ = RateLimitConfig( + type=RateLimitType.SlidingWindow, + requests_per_period=100, + ) + _ = RateLimitConfig( + type=RateLimitType.SlidingWindow, + tokens_per_period=1000, + ) + _ = RateLimitConfig( + type=RateLimitType.SlidingWindow, + period_in_seconds=60, + requests_per_period=100, + tokens_per_period=1000, + ) diff --git a/tests/unit/config/test_retry_config.py b/tests/unit/config/test_retry_config.py new file mode 100644 index 0000000000..2c3aff8375 --- /dev/null +++ b/tests/unit/config/test_retry_config.py @@ -0,0 +1,67 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Test retry configuration loading.""" + +import pytest +from graphrag_llm.config import RetryConfig, RetryType + + +def test_exponential_backoff_validation() -> None: + """Test that missing required parameters raise validation errors.""" + + with pytest.raises( + ValueError, + match="max_retries must be greater than 1 for Exponential Backoff retry\\.", + ): + _ = RetryConfig( + type=RetryType.ExponentialBackoff, + max_retries=0, + ) + + with pytest.raises( + ValueError, + match="base_delay must be greater than 1\\.0 for Exponential Backoff retry\\.", + ): + _ = RetryConfig( + type=RetryType.ExponentialBackoff, + base_delay=0.5, + ) + + with pytest.raises( + ValueError, + match="max_delay must be greater than 1 for Exponential Backoff retry\\.", + ): + _ = RetryConfig( + type=RetryType.ExponentialBackoff, + max_delay=0.5, + ) + + # passes validation + _ = RetryConfig(type=RetryType.ExponentialBackoff) + _ = RetryConfig( + type=RetryType.ExponentialBackoff, + max_retries=5, + base_delay=2.0, + max_delay=30, + ) + + +def test_immediate_validation() -> None: + """Test that missing required parameters raise validation errors.""" + + with pytest.raises( + ValueError, + match="max_retries must be greater than 1 for Immediate retry\\.", + ): + _ = RetryConfig( + type=RetryType.Immediate, + max_retries=0, + ) + + # passes validation + _ = RetryConfig(type=RetryType.Immediate) + _ = RetryConfig( + type=RetryType.Immediate, + max_retries=3, + ) diff --git a/tests/unit/config/test_template_engine_config.py b/tests/unit/config/test_template_engine_config.py new file mode 100644 index 0000000000..26aa01b895 --- /dev/null +++ b/tests/unit/config/test_template_engine_config.py @@ -0,0 +1,44 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Test metrics configuration loading.""" + +import pytest +from graphrag_llm.config import ( + TemplateEngineConfig, + TemplateEngineType, + TemplateManagerType, +) + + +def test_template_engine_config_validation() -> None: + """Test that missing required parameters raise validation errors.""" + + with pytest.raises( + ValueError, + match="base_dir must be specified for file-based template managers\\.", + ): + _ = TemplateEngineConfig( + type=TemplateEngineType.Jinja, + template_manager=TemplateManagerType.File, + base_dir=" ", + ) + + with pytest.raises( + ValueError, + match="template_extension cannot be an empty string for file-based template managers\\.", + ): + _ = TemplateEngineConfig( + type=TemplateEngineType.Jinja, + template_manager=TemplateManagerType.File, + base_dir="./templates", + template_extension=" ", + ) + + # passes validation + _ = TemplateEngineConfig( + type=TemplateEngineType.Jinja, + template_manager=TemplateManagerType.File, + base_dir="./templates", + template_extension=".jinja", + ) diff --git a/tests/unit/config/test_tokenizer_config.py b/tests/unit/config/test_tokenizer_config.py new file mode 100644 index 0000000000..62b7f6ebf6 --- /dev/null +++ b/tests/unit/config/test_tokenizer_config.py @@ -0,0 +1,39 @@ +# Copyright (c) 2024 Microsoft Corporation. +# Licensed under the MIT License + +"""Test tokenizer configuration loading.""" + +import pytest +from graphrag_llm.config import TokenizerConfig, TokenizerType + + +def test_litellm_tokenizer_validation() -> None: + """Test that missing required parameters raise validation errors.""" + + with pytest.raises( + ValueError, + match="model_id must be specified for LiteLLM tokenizer\\.", + ): + _ = TokenizerConfig( + type=TokenizerType.LiteLLM, + model_id="", + ) + + with pytest.raises( + ValueError, + match="encoding_name must be specified for TikToken tokenizer\\.", + ): + _ = TokenizerConfig( + type=TokenizerType.Tiktoken, + encoding_name="", + ) + + # passes validation + _ = TokenizerConfig( + type=TokenizerType.LiteLLM, + model_id="openai/gpt-4o", + ) + _ = TokenizerConfig( + type=TokenizerType.Tiktoken, + encoding_name="o200k-base", + ) diff --git a/tests/unit/config/utils.py b/tests/unit/config/utils.py index 813502d548..b9c72a6ec2 100644 --- a/tests/unit/config/utils.py +++ b/tests/unit/config/utils.py @@ -17,7 +17,6 @@ ) from graphrag.config.models.global_search_config import GlobalSearchConfig from graphrag.config.models.graph_rag_config import GraphRagConfig -from graphrag.config.models.language_model_config import LanguageModelConfig from graphrag.config.models.local_search_config import LocalSearchConfig from graphrag.config.models.prune_graph_config import PruneGraphConfig from graphrag.config.models.reporting_config import ReportingConfig @@ -28,28 +27,29 @@ from graphrag_cache import CacheConfig from graphrag_chunking.chunking_config import ChunkingConfig from graphrag_input import InputConfig +from graphrag_llm.config import MetricsConfig, ModelConfig, RateLimitConfig, RetryConfig from graphrag_storage import StorageConfig from graphrag_vectors import VectorStoreConfig -from pydantic import BaseModel FAKE_API_KEY = "NOT_AN_API_KEY" -DEFAULT_CHAT_MODEL_CONFIG = { +DEFAULT_COMPLETION_MODEL_CONFIG = { "api_key": FAKE_API_KEY, - "type": defs.DEFAULT_CHAT_MODEL_TYPE.value, - "model": defs.DEFAULT_CHAT_MODEL, + "model": defs.DEFAULT_COMPLETION_MODEL, "model_provider": defs.DEFAULT_MODEL_PROVIDER, } DEFAULT_EMBEDDING_MODEL_CONFIG = { "api_key": FAKE_API_KEY, - "type": defs.DEFAULT_EMBEDDING_MODEL_TYPE.value, "model": defs.DEFAULT_EMBEDDING_MODEL, "model_provider": defs.DEFAULT_MODEL_PROVIDER, } -DEFAULT_MODEL_CONFIG = { - defs.DEFAULT_CHAT_MODEL_ID: DEFAULT_CHAT_MODEL_CONFIG, +DEFAULT_COMPLETION_MODELS = { + defs.DEFAULT_COMPLETION_MODEL_ID: DEFAULT_COMPLETION_MODEL_CONFIG, +} + +DEFAULT_EMBEDDING_MODELS = { defs.DEFAULT_EMBEDDING_MODEL_ID: DEFAULT_EMBEDDING_MODEL_CONFIG, } @@ -57,49 +57,59 @@ def get_default_graphrag_config() -> GraphRagConfig: return GraphRagConfig(**{ **asdict(defs.graphrag_config_defaults), - "models": DEFAULT_MODEL_CONFIG, + "completion_models": DEFAULT_COMPLETION_MODELS, + "embedding_models": DEFAULT_EMBEDDING_MODELS, }) -def assert_language_model_configs( - actual: LanguageModelConfig, expected: LanguageModelConfig +def assert_retry_configs(actual: RetryConfig, expected: RetryConfig) -> None: + assert actual.type == expected.type + assert actual.max_retries == expected.max_retries + assert actual.base_delay == expected.base_delay + assert actual.jitter == expected.jitter + assert actual.max_delay == expected.max_delay + + +def assert_rate_limit_configs( + actual: RateLimitConfig, expected: RateLimitConfig ) -> None: - assert actual.api_key == expected.api_key - assert actual.auth_type == expected.auth_type assert actual.type == expected.type + assert actual.period_in_seconds == expected.period_in_seconds + assert actual.requests_per_period == expected.requests_per_period + assert actual.tokens_per_period == expected.tokens_per_period + + +def assert_metrics_configs(actual: MetricsConfig, expected: MetricsConfig) -> None: + assert actual.type == expected.type + assert actual.store == expected.store + assert actual.writer == expected.writer + assert actual.log_level == expected.log_level + assert actual.base_dir == expected.base_dir + + +def assert_model_configs(actual: ModelConfig, expected: ModelConfig) -> None: + assert actual.type == expected.type + assert actual.model_provider == expected.model_provider assert actual.model == expected.model - assert actual.encoding_model == expected.encoding_model - assert actual.max_tokens == expected.max_tokens - assert actual.temperature == expected.temperature - assert actual.max_completion_tokens == expected.max_completion_tokens - assert actual.top_p == expected.top_p - assert actual.n == expected.n - assert actual.frequency_penalty == expected.frequency_penalty - assert actual.presence_penalty == expected.presence_penalty - assert actual.request_timeout == expected.request_timeout + assert actual.call_args == expected.call_args assert actual.api_base == expected.api_base assert actual.api_version == expected.api_version - assert actual.deployment_name == expected.deployment_name - assert actual.organization == expected.organization - assert actual.proxy == expected.proxy - assert actual.audience == expected.audience - assert actual.model_supports_json == expected.model_supports_json - assert actual.tokens_per_minute == expected.tokens_per_minute - assert actual.requests_per_minute == expected.requests_per_minute - assert actual.retry_strategy == expected.retry_strategy - assert actual.max_retries == expected.max_retries - assert actual.max_retry_wait == expected.max_retry_wait - assert actual.concurrent_requests == expected.concurrent_requests - assert actual.async_mode == expected.async_mode - if actual.responses is not None: - assert expected.responses is not None - assert len(actual.responses) == len(expected.responses) - for e, a in zip(actual.responses, expected.responses, strict=True): - assert isinstance(e, BaseModel) - assert isinstance(a, BaseModel) - assert e.model_dump() == a.model_dump() + assert actual.api_key == expected.api_key + assert actual.auth_method == expected.auth_method + assert actual.azure_deployment_name == expected.azure_deployment_name + if actual.retry and expected.retry: + assert_retry_configs(actual.retry, expected.retry) + else: + assert actual.retry == expected.retry + if actual.rate_limit and expected.rate_limit: + assert_rate_limit_configs(actual.rate_limit, expected.rate_limit) + else: + assert actual.rate_limit == expected.rate_limit + if actual.metrics and expected.metrics: + assert_metrics_configs(actual.metrics, expected.metrics) else: - assert expected.responses is None + assert actual.metrics == expected.metrics + assert actual.mock_responses == expected.mock_responses def assert_vector_store_configs( @@ -155,7 +165,7 @@ def assert_text_embedding_configs( assert actual.batch_size == expected.batch_size assert actual.batch_max_tokens == expected.batch_max_tokens assert actual.names == expected.names - assert actual.model_id == expected.model_id + assert actual.embedding_model_id == expected.embedding_model_id def assert_chunking_configs(actual: ChunkingConfig, expected: ChunkingConfig) -> None: @@ -179,7 +189,7 @@ def assert_extract_graph_configs( assert actual.prompt == expected.prompt assert actual.entity_types == expected.entity_types assert actual.max_gleanings == expected.max_gleanings - assert actual.model_id == expected.model_id + assert actual.completion_model_id == expected.completion_model_id def assert_text_analyzer_configs( @@ -222,7 +232,7 @@ def assert_summarize_descriptions_configs( ) -> None: assert actual.prompt == expected.prompt assert actual.max_length == expected.max_length - assert actual.model_id == expected.model_id + assert actual.completion_model_id == expected.completion_model_id def assert_community_reports_configs( @@ -232,7 +242,7 @@ def assert_community_reports_configs( assert actual.text_prompt == expected.text_prompt assert actual.max_length == expected.max_length assert actual.max_input_length == expected.max_input_length - assert actual.model_id == expected.model_id + assert actual.completion_model_id == expected.completion_model_id def assert_extract_claims_configs( @@ -242,7 +252,7 @@ def assert_extract_claims_configs( assert actual.prompt == expected.prompt assert actual.description == expected.description assert actual.max_gleanings == expected.max_gleanings - assert actual.model_id == expected.model_id + assert actual.completion_model_id == expected.completion_model_id def assert_cluster_graph_configs( @@ -325,12 +335,19 @@ def assert_basic_search_configs( def assert_graphrag_configs(actual: GraphRagConfig, expected: GraphRagConfig) -> None: - a_keys = sorted(actual.models.keys()) - e_keys = sorted(expected.models.keys()) - assert len(a_keys) == len(e_keys) - for a, e in zip(a_keys, e_keys, strict=False): + completion_keys = sorted(actual.completion_models.keys()) + expected_completion_keys = sorted(expected.completion_models.keys()) + assert len(completion_keys) == len(expected_completion_keys) + for a, e in zip(completion_keys, expected_completion_keys, strict=False): + assert a == e + assert_model_configs(actual.completion_models[a], expected.completion_models[e]) + + embedding_keys = sorted(actual.embedding_models.keys()) + expected_embedding_keys = sorted(expected.embedding_models.keys()) + assert len(embedding_keys) == len(expected_embedding_keys) + for a, e in zip(embedding_keys, expected_embedding_keys, strict=False): assert a == e - assert_language_model_configs(actual.models[a], expected.models[e]) + assert_model_configs(actual.embedding_models[a], expected.embedding_models[e]) assert_vector_store_configs(actual.vector_store, expected.vector_store) assert_reporting_configs(actual.reporting, expected.reporting) diff --git a/tests/unit/graphrag_factory/test_factory.py b/tests/unit/graphrag_factory/test_factory.py index 5ddfdd8d1a..94e59b9c02 100644 --- a/tests/unit/graphrag_factory/test_factory.py +++ b/tests/unit/graphrag_factory/test_factory.py @@ -59,7 +59,7 @@ class TestFactory(Factory[TestABC]): assert trans2.get_value() == "test2" single1 = factory.create("singleton_strategy", {"value": "singleton"}) - single2 = factory.create("singleton_strategy", {"value": "ignored"}) + single2 = factory.create("singleton_strategy", {"value": "singleton"}) assert single1 is single2 assert single1.get_value() == "singleton" diff --git a/tests/unit/indexing/verbs/entities/extraction/strategies/graph_intelligence/test_gi_entity_extraction.py b/tests/unit/indexing/verbs/entities/extraction/strategies/graph_intelligence/test_gi_entity_extraction.py index 7d89d54220..486bf5fcdf 100644 --- a/tests/unit/indexing/verbs/entities/extraction/strategies/graph_intelligence/test_gi_entity_extraction.py +++ b/tests/unit/indexing/verbs/entities/extraction/strategies/graph_intelligence/test_gi_entity_extraction.py @@ -4,8 +4,8 @@ from graphrag.index.operations.extract_graph.extract_graph import _run_extract_graph from graphrag.prompts.index.extract_graph import GRAPH_EXTRACTION_PROMPT - -from tests.unit.indexing.verbs.helpers.mock_llm import create_mock_llm +from graphrag_llm.completion import create_completion +from graphrag_llm.config import LLMProviderType, ModelConfig SIMPLE_EXTRACTION_RESPONSE = """ ("entity"<|>TEST_ENTITY_1<|>COMPANY<|>TEST_ENTITY_1 is a test company) @@ -20,6 +20,16 @@ """.strip() +model = create_completion( + ModelConfig( + type=LLMProviderType.MockLLM, + model_provider="openai", + model="gpt-4o", + mock_responses=[SIMPLE_EXTRACTION_RESPONSE], + ) +) + + class TestRunChain(unittest.IsolatedAsyncioTestCase): async def test_run_extract_graph_single_document_correct_entities_returned(self): entities_df, _ = await _run_extract_graph( @@ -27,10 +37,7 @@ async def test_run_extract_graph_single_document_correct_entities_returned(self) source_id="1", entity_types=["person"], max_gleanings=0, - model=create_mock_llm( - responses=[SIMPLE_EXTRACTION_RESPONSE], - name="test_run_extract_graph_single_document_correct_entities_returned", - ), + model=model, prompt=GRAPH_EXTRACTION_PROMPT, ) @@ -44,10 +51,7 @@ async def test_run_extract_graph_single_document_correct_edges_returned(self): source_id="1", entity_types=["person"], max_gleanings=0, - model=create_mock_llm( - responses=[SIMPLE_EXTRACTION_RESPONSE], - name="test_run_extract_graph_single_document_correct_edges_returned", - ), + model=model, prompt=GRAPH_EXTRACTION_PROMPT, ) @@ -66,10 +70,7 @@ async def test_run_extract_graph_single_document_source_ids_mapped(self): source_id="1", entity_types=["person"], max_gleanings=0, - model=create_mock_llm( - responses=[SIMPLE_EXTRACTION_RESPONSE], - name="test_run_extract_graph_single_document_source_ids_mapped", - ), + model=model, prompt=GRAPH_EXTRACTION_PROMPT, ) diff --git a/tests/unit/indexing/verbs/helpers/mock_llm.py b/tests/unit/indexing/verbs/helpers/mock_llm.py deleted file mode 100644 index efb89158b3..0000000000 --- a/tests/unit/indexing/verbs/helpers/mock_llm.py +++ /dev/null @@ -1,12 +0,0 @@ -# Copyright (c) 2024 Microsoft Corporation. -# Licensed under the MIT License -from graphrag.language_model.manager import ModelManager -from graphrag.language_model.protocol.base import ChatModel -from pydantic import BaseModel - - -def create_mock_llm(responses: list[str | BaseModel], name: str = "mock") -> ChatModel: - """Creates a mock LLM that returns the given responses.""" - return ModelManager().get_or_create_chat_model( - name, "mock_chat", responses=responses - ) diff --git a/tests/unit/query/context_builder/test_entity_extraction.py b/tests/unit/query/context_builder/test_entity_extraction.py index 0d1c7c0018..c20c34c395 100644 --- a/tests/unit/query/context_builder/test_entity_extraction.py +++ b/tests/unit/query/context_builder/test_entity_extraction.py @@ -4,11 +4,12 @@ from typing import Any from graphrag.data_model.entity import Entity -from graphrag.language_model.manager import ModelManager from graphrag.query.context_builder.entity_extraction import ( EntityVectorStoreKey, map_query_to_entities, ) +from graphrag_llm.config import LLMProviderType, ModelConfig +from graphrag_llm.embedding import create_embedding from graphrag_vectors import ( TextEmbedder, VectorStore, @@ -16,6 +17,15 @@ VectorStoreSearchResult, ) +embedding_model = create_embedding( + ModelConfig( + type=LLMProviderType.MockLLM, + model_provider="openai", + model="text-embedding-3-small", + mock_responses=[1.0, 1.0, 1.0], + ) +) + class MockVectorStore(VectorStore): def __init__(self, documents: list[VectorStoreDocument]) -> None: @@ -92,9 +102,7 @@ def test_map_query_to_entities(): text_embedding_vectorstore=MockVectorStore([ VectorStoreDocument(id=entity.title, vector=None) for entity in entities ]), - text_embedder=ModelManager().get_or_create_embedding_model( - model_type="mock_embedding", name="mock" - ), + text_embedder=embedding_model, all_entities_dict={entity.id: entity for entity in entities}, embedding_vectorstore_key=EntityVectorStoreKey.TITLE, k=1, @@ -113,9 +121,7 @@ def test_map_query_to_entities(): text_embedding_vectorstore=MockVectorStore([ VectorStoreDocument(id=entity.id, vector=None) for entity in entities ]), - text_embedder=ModelManager().get_or_create_embedding_model( - model_type="mock_embedding", name="mock" - ), + text_embedder=embedding_model, all_entities_dict={entity.id: entity for entity in entities}, embedding_vectorstore_key=EntityVectorStoreKey.TITLE, k=2, diff --git a/tests/verbs/test_create_base_text_units.py b/tests/verbs/test_create_base_text_units.py index 77546c4c88..34bad99dc7 100644 --- a/tests/verbs/test_create_base_text_units.py +++ b/tests/verbs/test_create_base_text_units.py @@ -1,12 +1,12 @@ # Copyright (c) 2024 Microsoft Corporation. # Licensed under the MIT License -from graphrag.config.models.graph_rag_config import GraphRagConfig from graphrag.index.workflows.create_base_text_units import run_workflow from graphrag.utils.storage import load_table_from_storage +from tests.unit.config.utils import get_default_graphrag_config + from .util import ( - DEFAULT_MODEL_CONFIG, compare_outputs, create_test_context, load_test_table, @@ -18,7 +18,7 @@ async def test_create_base_text_units(): context = await create_test_context() - config = GraphRagConfig(models=DEFAULT_MODEL_CONFIG) # type: ignore + config = get_default_graphrag_config() config.chunking.prepend_metadata = ["title"] await run_workflow(config, context) diff --git a/tests/verbs/test_create_communities.py b/tests/verbs/test_create_communities.py index 5754c814a4..d5505d7a31 100644 --- a/tests/verbs/test_create_communities.py +++ b/tests/verbs/test_create_communities.py @@ -1,15 +1,15 @@ # Copyright (c) 2024 Microsoft Corporation. # Licensed under the MIT License -from graphrag.config.models.graph_rag_config import GraphRagConfig from graphrag.data_model.schemas import COMMUNITIES_FINAL_COLUMNS from graphrag.index.workflows.create_communities import ( run_workflow, ) from graphrag.utils.storage import load_table_from_storage +from tests.unit.config.utils import get_default_graphrag_config + from .util import ( - DEFAULT_MODEL_CONFIG, compare_outputs, create_test_context, load_test_table, @@ -26,7 +26,7 @@ async def test_create_communities(): ], ) - config = GraphRagConfig(models=DEFAULT_MODEL_CONFIG) # type: ignore + config = get_default_graphrag_config() await run_workflow( config, diff --git a/tests/verbs/test_create_community_reports.py b/tests/verbs/test_create_community_reports.py index d479120ce2..a36b6c7a66 100644 --- a/tests/verbs/test_create_community_reports.py +++ b/tests/verbs/test_create_community_reports.py @@ -2,7 +2,6 @@ # Licensed under the MIT License -from graphrag.config.models.graph_rag_config import GraphRagConfig from graphrag.data_model.schemas import COMMUNITY_REPORTS_FINAL_COLUMNS from graphrag.index.operations.summarize_communities.community_reports_extractor import ( CommunityReportResponse, @@ -13,8 +12,9 @@ ) from graphrag.utils.storage import load_table_from_storage +from tests.unit.config.utils import get_default_graphrag_config + from .util import ( - DEFAULT_MODEL_CONFIG, compare_outputs, create_test_context, load_test_table, @@ -34,7 +34,7 @@ summary="", explanation=" PipelineRunContext: """Create a test context with tables loaded into storage storage.""" diff --git a/uv.lock b/uv.lock index ddff9f008b..e1fe637635 100644 --- a/uv.lock +++ b/uv.lock @@ -1,11 +1,9 @@ version = 1 -revision = 1 -requires-python = ">=3.11, <3.14" +revision = 3 +requires-python = ">=3.11, <3.13" resolution-markers = [ - "python_full_version >= '3.13' and sys_platform == 'win32'", - "python_full_version >= '3.13' and sys_platform != 'win32'", - "python_full_version == '3.12.*' and sys_platform == 'win32'", - "python_full_version == '3.12.*' and sys_platform != 'win32'", + "python_full_version >= '3.12' and sys_platform == 'win32'", + "python_full_version >= '3.12' and sys_platform != 'win32'", "python_full_version < '3.12' and sys_platform == 'win32'", "python_full_version < '3.12' and sys_platform != 'win32'", ] @@ -17,6 +15,7 @@ members = [ "graphrag-chunking", "graphrag-common", "graphrag-input", + "graphrag-llm", "graphrag-monorepo", "graphrag-storage", "graphrag-vectors", @@ -26,18 +25,18 @@ members = [ name = "aiofiles" version = "24.1.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0b/03/a88171e277e8caa88a4c77808c20ebb04ba74cc4681bf1e9416c862de237/aiofiles-24.1.0.tar.gz", hash = "sha256:22a075c9e5a3810f0c2e48f3008c94d68c65d763b9b03857924c99e57355166c", size = 30247 } +sdist = { url = "https://files.pythonhosted.org/packages/0b/03/a88171e277e8caa88a4c77808c20ebb04ba74cc4681bf1e9416c862de237/aiofiles-24.1.0.tar.gz", hash = "sha256:22a075c9e5a3810f0c2e48f3008c94d68c65d763b9b03857924c99e57355166c", size = 30247, upload-time = "2024-06-24T11:02:03.584Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a5/45/30bb92d442636f570cb5651bc661f52b610e2eec3f891a5dc3a4c3667db0/aiofiles-24.1.0-py3-none-any.whl", hash = "sha256:b4ec55f4195e3eb5d7abd1bf7e061763e864dd4954231fb8539a0ef8bb8260e5", size = 15896 }, + { url = "https://files.pythonhosted.org/packages/a5/45/30bb92d442636f570cb5651bc661f52b610e2eec3f891a5dc3a4c3667db0/aiofiles-24.1.0-py3-none-any.whl", hash = "sha256:b4ec55f4195e3eb5d7abd1bf7e061763e864dd4954231fb8539a0ef8bb8260e5", size = 15896, upload-time = "2024-06-24T11:02:01.529Z" }, ] [[package]] name = "aiohappyeyeballs" version = "2.6.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/26/30/f84a107a9c4331c14b2b586036f40965c128aa4fee4dda5d3d51cb14ad54/aiohappyeyeballs-2.6.1.tar.gz", hash = "sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558", size = 22760 } +sdist = { url = "https://files.pythonhosted.org/packages/26/30/f84a107a9c4331c14b2b586036f40965c128aa4fee4dda5d3d51cb14ad54/aiohappyeyeballs-2.6.1.tar.gz", hash = "sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558", size = 22760, upload-time = "2025-03-12T01:42:48.764Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0f/15/5bf3b99495fb160b63f95972b81750f18f7f4e02ad051373b669d17d44f2/aiohappyeyeballs-2.6.1-py3-none-any.whl", hash = "sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8", size = 15265 }, + { url = "https://files.pythonhosted.org/packages/0f/15/5bf3b99495fb160b63f95972b81750f18f7f4e02ad051373b669d17d44f2/aiohappyeyeballs-2.6.1-py3-none-any.whl", hash = "sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8", size = 15265, upload-time = "2025-03-12T01:42:47.083Z" }, ] [[package]] @@ -53,59 +52,42 @@ dependencies = [ { name = "propcache" }, { name = "yarl" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/62/f1/8515650ac3121a9e55c7b217c60e7fae3e0134b5acfe65691781b5356929/aiohttp-3.13.0.tar.gz", hash = "sha256:378dbc57dd8cf341ce243f13fa1fa5394d68e2e02c15cd5f28eae35a70ec7f67", size = 7832348 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b1/db/df80cacac46cd548a736c5535b13cc18925cf6f9f83cd128cf3839842219/aiohttp-3.13.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:99eb94e97a42367fef5fc11e28cb2362809d3e70837f6e60557816c7106e2e20", size = 741374 }, - { url = "https://files.pythonhosted.org/packages/ae/f9/2d6d93fd57ab4726e18a7cdab083772eda8302d682620fbf2aef48322351/aiohttp-3.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4696665b2713021c6eba3e2b882a86013763b442577fe5d2056a42111e732eca", size = 494956 }, - { url = "https://files.pythonhosted.org/packages/89/a6/e1c061b079fed04ffd6777950c82f2e8246fd08b7b3c4f56fdd47f697e5a/aiohttp-3.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3e6a38366f7f0d0f6ed7a1198055150c52fda552b107dad4785c0852ad7685d1", size = 491154 }, - { url = "https://files.pythonhosted.org/packages/fe/4d/ee8913c0d2c7da37fdc98673a342b51611eaa0871682b37b8430084e35b5/aiohttp-3.13.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:aab715b1a0c37f7f11f9f1f579c6fbaa51ef569e47e3c0a4644fba46077a9409", size = 1745707 }, - { url = "https://files.pythonhosted.org/packages/f9/70/26b2c97e8fa68644aec43d788940984c5f3b53a8d1468d5baaa328f809c9/aiohttp-3.13.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7972c82bed87d7bd8e374b60a6b6e816d75ba4f7c2627c2d14eed216e62738e1", size = 1702404 }, - { url = "https://files.pythonhosted.org/packages/65/1e/c8aa3c293a0e8b18968b1b88e9bd8fb269eb67eb7449f504a4c3e175b159/aiohttp-3.13.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ca8313cb852af788c78d5afdea24c40172cbfff8b35e58b407467732fde20390", size = 1805519 }, - { url = "https://files.pythonhosted.org/packages/51/b6/a3753fe86249eb441768658cfc00f8c4e0913b255c13be00ddb8192775e1/aiohttp-3.13.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:6c333a2385d2a6298265f4b3e960590f787311b87f6b5e6e21bb8375914ef504", size = 1893904 }, - { url = "https://files.pythonhosted.org/packages/51/6d/7b1e020fe1d2a2be7cf0ce5e35922f345e3507cf337faa1a6563c42065c1/aiohttp-3.13.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cc6d5fc5edbfb8041d9607f6a417997fa4d02de78284d386bea7ab767b5ea4f3", size = 1745043 }, - { url = "https://files.pythonhosted.org/packages/e6/df/aad5dce268f9d4f29759c3eeb5fb5995c569d76abb267468dc1075218d5b/aiohttp-3.13.0-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:7ddedba3d0043349edc79df3dc2da49c72b06d59a45a42c1c8d987e6b8d175b8", size = 1604765 }, - { url = "https://files.pythonhosted.org/packages/1c/19/a84a0e97b2da2224c8b85e1aef5cac834d07b2903c17bff1a6bdbc7041d2/aiohttp-3.13.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:23ca762140159417a6bbc959ca1927f6949711851e56f2181ddfe8d63512b5ad", size = 1721737 }, - { url = "https://files.pythonhosted.org/packages/6c/61/ca6ad390128d964a08554fd63d6df5810fb5fbc7e599cb9e617f1729ae19/aiohttp-3.13.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:bfe824d6707a5dc3c5676685f624bc0c63c40d79dc0239a7fd6c034b98c25ebe", size = 1716052 }, - { url = "https://files.pythonhosted.org/packages/2a/71/769e249e6625372c7d14be79b8b8c3b0592963a09793fb3d36758e60952c/aiohttp-3.13.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:3c11fa5dd2ef773a8a5a6daa40243d83b450915992eab021789498dc87acc114", size = 1783532 }, - { url = "https://files.pythonhosted.org/packages/66/64/b9cd03cdbb629bc492e4a744fbe96550a8340b0cd7a0cc4a9c90cfecd8d3/aiohttp-3.13.0-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:00fdfe370cffede3163ba9d3f190b32c0cfc8c774f6f67395683d7b0e48cdb8a", size = 1593072 }, - { url = "https://files.pythonhosted.org/packages/24/0e/87922c8cfdbd09f5e2197e9d87714a98c99c423560d44739e3af55400fe3/aiohttp-3.13.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:6475e42ef92717a678bfbf50885a682bb360a6f9c8819fb1a388d98198fdcb80", size = 1798613 }, - { url = "https://files.pythonhosted.org/packages/c5/bb/a3adfe2af76e1ee9e3b5464522004b148b266bc99d7ec424ca7843d64a3c/aiohttp-3.13.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:77da5305a410910218b99f2a963092f4277d8a9c1f429c1ff1b026d1826bd0b6", size = 1737480 }, - { url = "https://files.pythonhosted.org/packages/ad/53/e124dcbd64e6365602f3493fe37a11ca5b7ac0a40822a6e2bc8260cd08e0/aiohttp-3.13.0-cp311-cp311-win32.whl", hash = "sha256:2f9d9ea547618d907f2ee6670c9a951f059c5994e4b6de8dcf7d9747b420c820", size = 429824 }, - { url = "https://files.pythonhosted.org/packages/3e/bd/485d98b372a2cd6998484a93ddd401ec6b6031657661c36846a10e2a1f6e/aiohttp-3.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:0f19f7798996d4458c669bd770504f710014926e9970f4729cf55853ae200469", size = 454137 }, - { url = "https://files.pythonhosted.org/packages/3a/95/7e8bdfa6e79099a086d59d42589492f1fe9d29aae3cefb58b676015ce278/aiohttp-3.13.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1c272a9a18a5ecc48a7101882230046b83023bb2a662050ecb9bfcb28d9ab53a", size = 735585 }, - { url = "https://files.pythonhosted.org/packages/9f/20/2f1d3ee06ee94eafe516810705219bff234d09f135d6951661661d5595ae/aiohttp-3.13.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:97891a23d7fd4e1afe9c2f4473e04595e4acb18e4733b910b6577b74e7e21985", size = 490613 }, - { url = "https://files.pythonhosted.org/packages/74/15/ab8600ef6dc1dcd599009a81acfed2ea407037e654d32e47e344e0b08c34/aiohttp-3.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:475bd56492ce5f4cffe32b5533c6533ee0c406d1d0e6924879f83adcf51da0ae", size = 489750 }, - { url = "https://files.pythonhosted.org/packages/33/59/752640c2b86ca987fe5703a01733b00d375e6cd2392bc7574489934e64e5/aiohttp-3.13.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c32ada0abb4bc94c30be2b681c42f058ab104d048da6f0148280a51ce98add8c", size = 1736812 }, - { url = "https://files.pythonhosted.org/packages/3d/c6/dd6b86ddb852a7fdbcdc7a45b6bdc80178aef713c08279afcaee7a5a9f07/aiohttp-3.13.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:4af1f8877ca46ecdd0bc0d4a6b66d4b2bddc84a79e2e8366bc0d5308e76bceb8", size = 1698535 }, - { url = "https://files.pythonhosted.org/packages/33/e2/27c92d205b9e8cee7661670e8e9f187931b71e26d42796b153d2a0ba6949/aiohttp-3.13.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e04ab827ec4f775817736b20cdc8350f40327f9b598dec4e18c9ffdcbea88a93", size = 1766573 }, - { url = "https://files.pythonhosted.org/packages/df/6a/1fc1ad71d130a30f7a207d8d958a41224c29b834463b5185efb2dbff6ad4/aiohttp-3.13.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a6d9487b9471ec36b0faedf52228cd732e89be0a2bbd649af890b5e2ce422353", size = 1865229 }, - { url = "https://files.pythonhosted.org/packages/14/51/d0c1701a79fcb0109cff5304da16226581569b89a282d8e7f1549a7e3ec0/aiohttp-3.13.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2e66c57416352f36bf98f6641ddadd47c93740a22af7150d3e9a1ef6e983f9a8", size = 1750379 }, - { url = "https://files.pythonhosted.org/packages/ae/3d/2ec4b934f85856de1c0c18e90adc8902adadbfac2b3c0b831bfeb7214fc8/aiohttp-3.13.0-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:469167d5372f5bb3aedff4fc53035d593884fff2617a75317740e885acd48b04", size = 1560798 }, - { url = "https://files.pythonhosted.org/packages/38/56/e23d9c3e13006e599fdce3851517c70279e177871e3e567d22cf3baf5d6c/aiohttp-3.13.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a9f3546b503975a69b547c9fd1582cad10ede1ce6f3e313a2f547c73a3d7814f", size = 1697552 }, - { url = "https://files.pythonhosted.org/packages/56/cb/caa32c2ccaeca0a3dc39129079fd2ad02f9406c3a5f7924340435b87d4cd/aiohttp-3.13.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:6b4174fcec98601f0cfdf308ee29a6ae53c55f14359e848dab4e94009112ee7d", size = 1718609 }, - { url = "https://files.pythonhosted.org/packages/fb/c0/5911856fef9e40fd1ccbb8c54a90116875d5753a92c1cac66ce2059b390d/aiohttp-3.13.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:a533873a7a4ec2270fb362ee5a0d3b98752e4e1dc9042b257cd54545a96bd8ed", size = 1735887 }, - { url = "https://files.pythonhosted.org/packages/0e/48/8d6f4757a24c02f0a454c043556593a00645d10583859f7156db44d8b7d3/aiohttp-3.13.0-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:ce887c5e54411d607ee0959cac15bb31d506d86a9bcaddf0b7e9d63325a7a802", size = 1553079 }, - { url = "https://files.pythonhosted.org/packages/39/fa/e82c9445e40b50e46770702b5b6ca2f767966d53e1a5eef03583ceac6df6/aiohttp-3.13.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:d871f6a30d43e32fc9252dc7b9febe1a042b3ff3908aa83868d7cf7c9579a59b", size = 1762750 }, - { url = "https://files.pythonhosted.org/packages/3d/e6/9d30554e7f1e700bfeae4ab6b153d5dc7441606a9ec5e929288fa93a1477/aiohttp-3.13.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:222c828243b4789d79a706a876910f656fad4381661691220ba57b2ab4547865", size = 1717461 }, - { url = "https://files.pythonhosted.org/packages/1f/e5/29cca547990a59ea54f0674fc01de98519fc628cfceeab6175711750eca7/aiohttp-3.13.0-cp312-cp312-win32.whl", hash = "sha256:682d2e434ff2f1108314ff7f056ce44e457f12dbed0249b24e106e385cf154b9", size = 424633 }, - { url = "https://files.pythonhosted.org/packages/8b/68/46dd042d7bc62eab30bafdb8569f55ef125c3a88bb174270324224f8df56/aiohttp-3.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:0a2be20eb23888df130214b91c262a90e2de1553d6fb7de9e9010cec994c0ff2", size = 451401 }, - { url = "https://files.pythonhosted.org/packages/86/2c/ac53efdc9c10e41399acc2395af98f835b86d0141d5c3820857eb9f6a14a/aiohttp-3.13.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:00243e51f16f6ec0fb021659d4af92f675f3cf9f9b39efd142aa3ad641d8d1e6", size = 730090 }, - { url = "https://files.pythonhosted.org/packages/13/18/1ac95683e1c1d48ef4503965c96f5401618a04c139edae12e200392daae8/aiohttp-3.13.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:059978d2fddc462e9211362cbc8446747ecd930537fa559d3d25c256f032ff54", size = 488041 }, - { url = "https://files.pythonhosted.org/packages/fd/79/ef0d477c771a642d1a881b92d226314c43d3c74bc674c93e12e679397a97/aiohttp-3.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:564b36512a7da3b386143c611867e3f7cfb249300a1bf60889bd9985da67ab77", size = 486989 }, - { url = "https://files.pythonhosted.org/packages/37/b4/0e440481a0e77a551d6c5dcab5d11f1ff6b2b2ddb8dedc24f54f5caad732/aiohttp-3.13.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4aa995b9156ae499393d949a456a7ab0b994a8241a96db73a3b73c7a090eff6a", size = 1718331 }, - { url = "https://files.pythonhosted.org/packages/e6/59/76c421cc4a75bb1aceadb92f20ee6f05a990aa6960c64b59e8e0d340e3f5/aiohttp-3.13.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:55ca0e95a3905f62f00900255ed807c580775174252999286f283e646d675a49", size = 1686263 }, - { url = "https://files.pythonhosted.org/packages/ec/ac/5095f12a79c7775f402cfc3e83651b6e0a92ade10ddf7f2c78c4fed79f71/aiohttp-3.13.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:49ce7525853a981fc35d380aa2353536a01a9ec1b30979ea4e35966316cace7e", size = 1754265 }, - { url = "https://files.pythonhosted.org/packages/05/d7/a48e4989bd76cc70600c505bbdd0d90ca1ad7f9053eceeb9dbcf9345a9ec/aiohttp-3.13.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2117be9883501eaf95503bd313eb4c7a23d567edd44014ba15835a1e9ec6d852", size = 1856486 }, - { url = "https://files.pythonhosted.org/packages/1e/02/45b388b49e37933f316e1fb39c0de6fb1d77384b0c8f4cf6af5f2cbe3ea6/aiohttp-3.13.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d169c47e40c911f728439da853b6fd06da83761012e6e76f11cb62cddae7282b", size = 1737545 }, - { url = "https://files.pythonhosted.org/packages/6c/a7/4fde058f1605c34a219348a83a99f14724cc64e68a42480fc03cf40f9ea3/aiohttp-3.13.0-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:703ad3f742fc81e543638a7bebddd35acadaa0004a5e00535e795f4b6f2c25ca", size = 1552958 }, - { url = "https://files.pythonhosted.org/packages/d1/12/0bac4d29231981e3aa234e88d1931f6ba38135ff4c2cf3afbb7895527630/aiohttp-3.13.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5bf635c3476f4119b940cc8d94ad454cbe0c377e61b4527f0192aabeac1e9370", size = 1681166 }, - { url = "https://files.pythonhosted.org/packages/71/95/b829eb5f8ac1ca1d8085bb8df614c8acf3ff32e23ad5ad1173c7c9761daa/aiohttp-3.13.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:cfe6285ef99e7ee51cef20609be2bc1dd0e8446462b71c9db8bb296ba632810a", size = 1710516 }, - { url = "https://files.pythonhosted.org/packages/47/6d/15ccf4ef3c254d899f62580e0c7fc717014f4d14a3ac31771e505d2c736c/aiohttp-3.13.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:34d8af6391c5f2e69749d7f037b614b8c5c42093c251f336bdbfa4b03c57d6c4", size = 1731354 }, - { url = "https://files.pythonhosted.org/packages/46/6a/8acf6c57e03b6fdcc8b4c06392e66abaff3213ea275e41db3edb20738d91/aiohttp-3.13.0-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:12f5d820fadc5848d4559ea838aef733cf37ed2a1103bba148ac2f5547c14c29", size = 1548040 }, - { url = "https://files.pythonhosted.org/packages/75/7d/fbfd59ab2a83fe2578ce79ac3db49727b81e9f4c3376217ad09c03c6d279/aiohttp-3.13.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:0f1338b61ea66f4757a0544ed8a02ccbf60e38d9cfb3225888888dd4475ebb96", size = 1756031 }, - { url = "https://files.pythonhosted.org/packages/99/e7/cc9f0fdf06cab3ca61e6b62bff9a4b978b8ca736e9d76ddf54365673ab19/aiohttp-3.13.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:582770f82513419512da096e8df21ca44f86a2e56e25dc93c5ab4df0fe065bf0", size = 1714933 }, - { url = "https://files.pythonhosted.org/packages/db/43/7abbe1de94748a58a71881163ee280fd3217db36e8344d109f63638fe16a/aiohttp-3.13.0-cp313-cp313-win32.whl", hash = "sha256:3194b8cab8dbc882f37c13ef1262e0a3d62064fa97533d3aa124771f7bf1ecee", size = 423799 }, - { url = "https://files.pythonhosted.org/packages/c9/58/afab7f2b9e7df88c995995172eb78cae8a3d5a62d5681abaade86b3f0089/aiohttp-3.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:7897298b3eedc790257fef8a6ec582ca04e9dbe568ba4a9a890913b925b8ea21", size = 450138 }, +sdist = { url = "https://files.pythonhosted.org/packages/62/f1/8515650ac3121a9e55c7b217c60e7fae3e0134b5acfe65691781b5356929/aiohttp-3.13.0.tar.gz", hash = "sha256:378dbc57dd8cf341ce243f13fa1fa5394d68e2e02c15cd5f28eae35a70ec7f67", size = 7832348, upload-time = "2025-10-06T19:58:48.089Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b1/db/df80cacac46cd548a736c5535b13cc18925cf6f9f83cd128cf3839842219/aiohttp-3.13.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:99eb94e97a42367fef5fc11e28cb2362809d3e70837f6e60557816c7106e2e20", size = 741374, upload-time = "2025-10-06T19:55:13.095Z" }, + { url = "https://files.pythonhosted.org/packages/ae/f9/2d6d93fd57ab4726e18a7cdab083772eda8302d682620fbf2aef48322351/aiohttp-3.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4696665b2713021c6eba3e2b882a86013763b442577fe5d2056a42111e732eca", size = 494956, upload-time = "2025-10-06T19:55:14.687Z" }, + { url = "https://files.pythonhosted.org/packages/89/a6/e1c061b079fed04ffd6777950c82f2e8246fd08b7b3c4f56fdd47f697e5a/aiohttp-3.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3e6a38366f7f0d0f6ed7a1198055150c52fda552b107dad4785c0852ad7685d1", size = 491154, upload-time = "2025-10-06T19:55:16.661Z" }, + { url = "https://files.pythonhosted.org/packages/fe/4d/ee8913c0d2c7da37fdc98673a342b51611eaa0871682b37b8430084e35b5/aiohttp-3.13.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:aab715b1a0c37f7f11f9f1f579c6fbaa51ef569e47e3c0a4644fba46077a9409", size = 1745707, upload-time = "2025-10-06T19:55:18.376Z" }, + { url = "https://files.pythonhosted.org/packages/f9/70/26b2c97e8fa68644aec43d788940984c5f3b53a8d1468d5baaa328f809c9/aiohttp-3.13.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7972c82bed87d7bd8e374b60a6b6e816d75ba4f7c2627c2d14eed216e62738e1", size = 1702404, upload-time = "2025-10-06T19:55:20.098Z" }, + { url = "https://files.pythonhosted.org/packages/65/1e/c8aa3c293a0e8b18968b1b88e9bd8fb269eb67eb7449f504a4c3e175b159/aiohttp-3.13.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ca8313cb852af788c78d5afdea24c40172cbfff8b35e58b407467732fde20390", size = 1805519, upload-time = "2025-10-06T19:55:21.811Z" }, + { url = "https://files.pythonhosted.org/packages/51/b6/a3753fe86249eb441768658cfc00f8c4e0913b255c13be00ddb8192775e1/aiohttp-3.13.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:6c333a2385d2a6298265f4b3e960590f787311b87f6b5e6e21bb8375914ef504", size = 1893904, upload-time = "2025-10-06T19:55:23.462Z" }, + { url = "https://files.pythonhosted.org/packages/51/6d/7b1e020fe1d2a2be7cf0ce5e35922f345e3507cf337faa1a6563c42065c1/aiohttp-3.13.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cc6d5fc5edbfb8041d9607f6a417997fa4d02de78284d386bea7ab767b5ea4f3", size = 1745043, upload-time = "2025-10-06T19:55:25.208Z" }, + { url = "https://files.pythonhosted.org/packages/e6/df/aad5dce268f9d4f29759c3eeb5fb5995c569d76abb267468dc1075218d5b/aiohttp-3.13.0-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:7ddedba3d0043349edc79df3dc2da49c72b06d59a45a42c1c8d987e6b8d175b8", size = 1604765, upload-time = "2025-10-06T19:55:27.157Z" }, + { url = "https://files.pythonhosted.org/packages/1c/19/a84a0e97b2da2224c8b85e1aef5cac834d07b2903c17bff1a6bdbc7041d2/aiohttp-3.13.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:23ca762140159417a6bbc959ca1927f6949711851e56f2181ddfe8d63512b5ad", size = 1721737, upload-time = "2025-10-06T19:55:28.854Z" }, + { url = "https://files.pythonhosted.org/packages/6c/61/ca6ad390128d964a08554fd63d6df5810fb5fbc7e599cb9e617f1729ae19/aiohttp-3.13.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:bfe824d6707a5dc3c5676685f624bc0c63c40d79dc0239a7fd6c034b98c25ebe", size = 1716052, upload-time = "2025-10-06T19:55:30.563Z" }, + { url = "https://files.pythonhosted.org/packages/2a/71/769e249e6625372c7d14be79b8b8c3b0592963a09793fb3d36758e60952c/aiohttp-3.13.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:3c11fa5dd2ef773a8a5a6daa40243d83b450915992eab021789498dc87acc114", size = 1783532, upload-time = "2025-10-06T19:55:32.798Z" }, + { url = "https://files.pythonhosted.org/packages/66/64/b9cd03cdbb629bc492e4a744fbe96550a8340b0cd7a0cc4a9c90cfecd8d3/aiohttp-3.13.0-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:00fdfe370cffede3163ba9d3f190b32c0cfc8c774f6f67395683d7b0e48cdb8a", size = 1593072, upload-time = "2025-10-06T19:55:34.686Z" }, + { url = "https://files.pythonhosted.org/packages/24/0e/87922c8cfdbd09f5e2197e9d87714a98c99c423560d44739e3af55400fe3/aiohttp-3.13.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:6475e42ef92717a678bfbf50885a682bb360a6f9c8819fb1a388d98198fdcb80", size = 1798613, upload-time = "2025-10-06T19:55:36.393Z" }, + { url = "https://files.pythonhosted.org/packages/c5/bb/a3adfe2af76e1ee9e3b5464522004b148b266bc99d7ec424ca7843d64a3c/aiohttp-3.13.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:77da5305a410910218b99f2a963092f4277d8a9c1f429c1ff1b026d1826bd0b6", size = 1737480, upload-time = "2025-10-06T19:55:38.043Z" }, + { url = "https://files.pythonhosted.org/packages/ad/53/e124dcbd64e6365602f3493fe37a11ca5b7ac0a40822a6e2bc8260cd08e0/aiohttp-3.13.0-cp311-cp311-win32.whl", hash = "sha256:2f9d9ea547618d907f2ee6670c9a951f059c5994e4b6de8dcf7d9747b420c820", size = 429824, upload-time = "2025-10-06T19:55:39.595Z" }, + { url = "https://files.pythonhosted.org/packages/3e/bd/485d98b372a2cd6998484a93ddd401ec6b6031657661c36846a10e2a1f6e/aiohttp-3.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:0f19f7798996d4458c669bd770504f710014926e9970f4729cf55853ae200469", size = 454137, upload-time = "2025-10-06T19:55:41.617Z" }, + { url = "https://files.pythonhosted.org/packages/3a/95/7e8bdfa6e79099a086d59d42589492f1fe9d29aae3cefb58b676015ce278/aiohttp-3.13.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1c272a9a18a5ecc48a7101882230046b83023bb2a662050ecb9bfcb28d9ab53a", size = 735585, upload-time = "2025-10-06T19:55:43.401Z" }, + { url = "https://files.pythonhosted.org/packages/9f/20/2f1d3ee06ee94eafe516810705219bff234d09f135d6951661661d5595ae/aiohttp-3.13.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:97891a23d7fd4e1afe9c2f4473e04595e4acb18e4733b910b6577b74e7e21985", size = 490613, upload-time = "2025-10-06T19:55:45.237Z" }, + { url = "https://files.pythonhosted.org/packages/74/15/ab8600ef6dc1dcd599009a81acfed2ea407037e654d32e47e344e0b08c34/aiohttp-3.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:475bd56492ce5f4cffe32b5533c6533ee0c406d1d0e6924879f83adcf51da0ae", size = 489750, upload-time = "2025-10-06T19:55:46.937Z" }, + { url = "https://files.pythonhosted.org/packages/33/59/752640c2b86ca987fe5703a01733b00d375e6cd2392bc7574489934e64e5/aiohttp-3.13.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c32ada0abb4bc94c30be2b681c42f058ab104d048da6f0148280a51ce98add8c", size = 1736812, upload-time = "2025-10-06T19:55:48.917Z" }, + { url = "https://files.pythonhosted.org/packages/3d/c6/dd6b86ddb852a7fdbcdc7a45b6bdc80178aef713c08279afcaee7a5a9f07/aiohttp-3.13.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:4af1f8877ca46ecdd0bc0d4a6b66d4b2bddc84a79e2e8366bc0d5308e76bceb8", size = 1698535, upload-time = "2025-10-06T19:55:50.75Z" }, + { url = "https://files.pythonhosted.org/packages/33/e2/27c92d205b9e8cee7661670e8e9f187931b71e26d42796b153d2a0ba6949/aiohttp-3.13.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e04ab827ec4f775817736b20cdc8350f40327f9b598dec4e18c9ffdcbea88a93", size = 1766573, upload-time = "2025-10-06T19:55:53.106Z" }, + { url = "https://files.pythonhosted.org/packages/df/6a/1fc1ad71d130a30f7a207d8d958a41224c29b834463b5185efb2dbff6ad4/aiohttp-3.13.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a6d9487b9471ec36b0faedf52228cd732e89be0a2bbd649af890b5e2ce422353", size = 1865229, upload-time = "2025-10-06T19:55:55.01Z" }, + { url = "https://files.pythonhosted.org/packages/14/51/d0c1701a79fcb0109cff5304da16226581569b89a282d8e7f1549a7e3ec0/aiohttp-3.13.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2e66c57416352f36bf98f6641ddadd47c93740a22af7150d3e9a1ef6e983f9a8", size = 1750379, upload-time = "2025-10-06T19:55:57.219Z" }, + { url = "https://files.pythonhosted.org/packages/ae/3d/2ec4b934f85856de1c0c18e90adc8902adadbfac2b3c0b831bfeb7214fc8/aiohttp-3.13.0-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:469167d5372f5bb3aedff4fc53035d593884fff2617a75317740e885acd48b04", size = 1560798, upload-time = "2025-10-06T19:55:58.888Z" }, + { url = "https://files.pythonhosted.org/packages/38/56/e23d9c3e13006e599fdce3851517c70279e177871e3e567d22cf3baf5d6c/aiohttp-3.13.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a9f3546b503975a69b547c9fd1582cad10ede1ce6f3e313a2f547c73a3d7814f", size = 1697552, upload-time = "2025-10-06T19:56:01.172Z" }, + { url = "https://files.pythonhosted.org/packages/56/cb/caa32c2ccaeca0a3dc39129079fd2ad02f9406c3a5f7924340435b87d4cd/aiohttp-3.13.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:6b4174fcec98601f0cfdf308ee29a6ae53c55f14359e848dab4e94009112ee7d", size = 1718609, upload-time = "2025-10-06T19:56:03.102Z" }, + { url = "https://files.pythonhosted.org/packages/fb/c0/5911856fef9e40fd1ccbb8c54a90116875d5753a92c1cac66ce2059b390d/aiohttp-3.13.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:a533873a7a4ec2270fb362ee5a0d3b98752e4e1dc9042b257cd54545a96bd8ed", size = 1735887, upload-time = "2025-10-06T19:56:04.841Z" }, + { url = "https://files.pythonhosted.org/packages/0e/48/8d6f4757a24c02f0a454c043556593a00645d10583859f7156db44d8b7d3/aiohttp-3.13.0-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:ce887c5e54411d607ee0959cac15bb31d506d86a9bcaddf0b7e9d63325a7a802", size = 1553079, upload-time = "2025-10-06T19:56:07.197Z" }, + { url = "https://files.pythonhosted.org/packages/39/fa/e82c9445e40b50e46770702b5b6ca2f767966d53e1a5eef03583ceac6df6/aiohttp-3.13.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:d871f6a30d43e32fc9252dc7b9febe1a042b3ff3908aa83868d7cf7c9579a59b", size = 1762750, upload-time = "2025-10-06T19:56:09.376Z" }, + { url = "https://files.pythonhosted.org/packages/3d/e6/9d30554e7f1e700bfeae4ab6b153d5dc7441606a9ec5e929288fa93a1477/aiohttp-3.13.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:222c828243b4789d79a706a876910f656fad4381661691220ba57b2ab4547865", size = 1717461, upload-time = "2025-10-06T19:56:11.551Z" }, + { url = "https://files.pythonhosted.org/packages/1f/e5/29cca547990a59ea54f0674fc01de98519fc628cfceeab6175711750eca7/aiohttp-3.13.0-cp312-cp312-win32.whl", hash = "sha256:682d2e434ff2f1108314ff7f056ce44e457f12dbed0249b24e106e385cf154b9", size = 424633, upload-time = "2025-10-06T19:56:13.316Z" }, + { url = "https://files.pythonhosted.org/packages/8b/68/46dd042d7bc62eab30bafdb8569f55ef125c3a88bb174270324224f8df56/aiohttp-3.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:0a2be20eb23888df130214b91c262a90e2de1553d6fb7de9e9010cec994c0ff2", size = 451401, upload-time = "2025-10-06T19:56:15.188Z" }, ] [[package]] @@ -114,20 +96,20 @@ version = "1.4.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "frozenlist" }, - { name = "typing-extensions", marker = "python_full_version < '3.13'" }, + { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/61/62/06741b579156360248d1ec624842ad0edf697050bbaf7c3e46394e106ad1/aiosignal-1.4.0.tar.gz", hash = "sha256:f47eecd9468083c2029cc99945502cb7708b082c232f9aca65da147157b251c7", size = 25007 } +sdist = { url = "https://files.pythonhosted.org/packages/61/62/06741b579156360248d1ec624842ad0edf697050bbaf7c3e46394e106ad1/aiosignal-1.4.0.tar.gz", hash = "sha256:f47eecd9468083c2029cc99945502cb7708b082c232f9aca65da147157b251c7", size = 25007, upload-time = "2025-07-03T22:54:43.528Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/fb/76/641ae371508676492379f16e2fa48f4e2c11741bd63c48be4b12a6b09cba/aiosignal-1.4.0-py3-none-any.whl", hash = "sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e", size = 7490 }, + { url = "https://files.pythonhosted.org/packages/fb/76/641ae371508676492379f16e2fa48f4e2c11741bd63c48be4b12a6b09cba/aiosignal-1.4.0-py3-none-any.whl", hash = "sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e", size = 7490, upload-time = "2025-07-03T22:54:42.156Z" }, ] [[package]] name = "annotated-types" version = "0.7.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081 } +sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643 }, + { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, ] [[package]] @@ -137,20 +119,20 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "idna" }, { name = "sniffio" }, - { name = "typing-extensions", marker = "python_full_version < '3.13'" }, + { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c6/78/7d432127c41b50bccba979505f272c16cbcadcc33645d5fa3a738110ae75/anyio-4.11.0.tar.gz", hash = "sha256:82a8d0b81e318cc5ce71a5f1f8b5c4e63619620b63141ef8c995fa0db95a57c4", size = 219094 } +sdist = { url = "https://files.pythonhosted.org/packages/c6/78/7d432127c41b50bccba979505f272c16cbcadcc33645d5fa3a738110ae75/anyio-4.11.0.tar.gz", hash = "sha256:82a8d0b81e318cc5ce71a5f1f8b5c4e63619620b63141ef8c995fa0db95a57c4", size = 219094, upload-time = "2025-09-23T09:19:12.58Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/15/b3/9b1a8074496371342ec1e796a96f99c82c945a339cd81a8e73de28b4cf9e/anyio-4.11.0-py3-none-any.whl", hash = "sha256:0287e96f4d26d4149305414d4e3bc32f0dcd0862365a4bddea19d7a1ec38c4fc", size = 109097 }, + { url = "https://files.pythonhosted.org/packages/15/b3/9b1a8074496371342ec1e796a96f99c82c945a339cd81a8e73de28b4cf9e/anyio-4.11.0-py3-none-any.whl", hash = "sha256:0287e96f4d26d4149305414d4e3bc32f0dcd0862365a4bddea19d7a1ec38c4fc", size = 109097, upload-time = "2025-09-23T09:19:10.601Z" }, ] [[package]] name = "appnope" version = "0.1.4" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/35/5d/752690df9ef5b76e169e68d6a129fa6d08a7100ca7f754c89495db3c6019/appnope-0.1.4.tar.gz", hash = "sha256:1de3860566df9caf38f01f86f65e0e13e379af54f9e4bee1e66b48f2efffd1ee", size = 4170 } +sdist = { url = "https://files.pythonhosted.org/packages/35/5d/752690df9ef5b76e169e68d6a129fa6d08a7100ca7f754c89495db3c6019/appnope-0.1.4.tar.gz", hash = "sha256:1de3860566df9caf38f01f86f65e0e13e379af54f9e4bee1e66b48f2efffd1ee", size = 4170, upload-time = "2024-02-06T09:43:11.258Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/81/29/5ecc3a15d5a33e31b26c11426c45c501e439cb865d0bff96315d86443b78/appnope-0.1.4-py2.py3-none-any.whl", hash = "sha256:502575ee11cd7a28c0205f379b525beefebab9d161b7c964670864014ed7213c", size = 4321 }, + { url = "https://files.pythonhosted.org/packages/81/29/5ecc3a15d5a33e31b26c11426c45c501e439cb865d0bff96315d86443b78/appnope-0.1.4-py2.py3-none-any.whl", hash = "sha256:502575ee11cd7a28c0205f379b525beefebab9d161b7c964670864014ed7213c", size = 4321, upload-time = "2024-02-06T09:43:09.663Z" }, ] [[package]] @@ -160,9 +142,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "argon2-cffi-bindings" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/0e/89/ce5af8a7d472a67cc819d5d998aa8c82c5d860608c4db9f46f1162d7dab9/argon2_cffi-25.1.0.tar.gz", hash = "sha256:694ae5cc8a42f4c4e2bf2ca0e64e51e23a040c6a517a85074683d3959e1346c1", size = 45706 } +sdist = { url = "https://files.pythonhosted.org/packages/0e/89/ce5af8a7d472a67cc819d5d998aa8c82c5d860608c4db9f46f1162d7dab9/argon2_cffi-25.1.0.tar.gz", hash = "sha256:694ae5cc8a42f4c4e2bf2ca0e64e51e23a040c6a517a85074683d3959e1346c1", size = 45706, upload-time = "2025-06-03T06:55:32.073Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/4f/d3/a8b22fa575b297cd6e3e3b0155c7e25db170edf1c74783d6a31a2490b8d9/argon2_cffi-25.1.0-py3-none-any.whl", hash = "sha256:fdc8b074db390fccb6eb4a3604ae7231f219aa669a2652e0f20e16ba513d5741", size = 14657 }, + { url = "https://files.pythonhosted.org/packages/4f/d3/a8b22fa575b297cd6e3e3b0155c7e25db170edf1c74783d6a31a2490b8d9/argon2_cffi-25.1.0-py3-none-any.whl", hash = "sha256:fdc8b074db390fccb6eb4a3604ae7231f219aa669a2652e0f20e16ba513d5741", size = 14657, upload-time = "2025-06-03T06:55:30.804Z" }, ] [[package]] @@ -172,18 +154,18 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cffi" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/5c/2d/db8af0df73c1cf454f71b2bbe5e356b8c1f8041c979f505b3d3186e520a9/argon2_cffi_bindings-25.1.0.tar.gz", hash = "sha256:b957f3e6ea4d55d820e40ff76f450952807013d361a65d7f28acc0acbf29229d", size = 1783441 } +sdist = { url = "https://files.pythonhosted.org/packages/5c/2d/db8af0df73c1cf454f71b2bbe5e356b8c1f8041c979f505b3d3186e520a9/argon2_cffi_bindings-25.1.0.tar.gz", hash = "sha256:b957f3e6ea4d55d820e40ff76f450952807013d361a65d7f28acc0acbf29229d", size = 1783441, upload-time = "2025-07-30T10:02:05.147Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1d/57/96b8b9f93166147826da5f90376e784a10582dd39a393c99bb62cfcf52f0/argon2_cffi_bindings-25.1.0-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:aecba1723ae35330a008418a91ea6cfcedf6d31e5fbaa056a166462ff066d500", size = 54121 }, - { url = "https://files.pythonhosted.org/packages/0a/08/a9bebdb2e0e602dde230bdde8021b29f71f7841bd54801bcfd514acb5dcf/argon2_cffi_bindings-25.1.0-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:2630b6240b495dfab90aebe159ff784d08ea999aa4b0d17efa734055a07d2f44", size = 29177 }, - { url = "https://files.pythonhosted.org/packages/b6/02/d297943bcacf05e4f2a94ab6f462831dc20158614e5d067c35d4e63b9acb/argon2_cffi_bindings-25.1.0-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:7aef0c91e2c0fbca6fc68e7555aa60ef7008a739cbe045541e438373bc54d2b0", size = 31090 }, - { url = "https://files.pythonhosted.org/packages/c1/93/44365f3d75053e53893ec6d733e4a5e3147502663554b4d864587c7828a7/argon2_cffi_bindings-25.1.0-cp39-abi3-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1e021e87faa76ae0d413b619fe2b65ab9a037f24c60a1e6cc43457ae20de6dc6", size = 81246 }, - { url = "https://files.pythonhosted.org/packages/09/52/94108adfdd6e2ddf58be64f959a0b9c7d4ef2fa71086c38356d22dc501ea/argon2_cffi_bindings-25.1.0-cp39-abi3-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d3e924cfc503018a714f94a49a149fdc0b644eaead5d1f089330399134fa028a", size = 87126 }, - { url = "https://files.pythonhosted.org/packages/72/70/7a2993a12b0ffa2a9271259b79cc616e2389ed1a4d93842fac5a1f923ffd/argon2_cffi_bindings-25.1.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:c87b72589133f0346a1cb8d5ecca4b933e3c9b64656c9d175270a000e73b288d", size = 80343 }, - { url = "https://files.pythonhosted.org/packages/78/9a/4e5157d893ffc712b74dbd868c7f62365618266982b64accab26bab01edc/argon2_cffi_bindings-25.1.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1db89609c06afa1a214a69a462ea741cf735b29a57530478c06eb81dd403de99", size = 86777 }, - { url = "https://files.pythonhosted.org/packages/74/cd/15777dfde1c29d96de7f18edf4cc94c385646852e7c7b0320aa91ccca583/argon2_cffi_bindings-25.1.0-cp39-abi3-win32.whl", hash = "sha256:473bcb5f82924b1becbb637b63303ec8d10e84c8d241119419897a26116515d2", size = 27180 }, - { url = "https://files.pythonhosted.org/packages/e2/c6/a759ece8f1829d1f162261226fbfd2c6832b3ff7657384045286d2afa384/argon2_cffi_bindings-25.1.0-cp39-abi3-win_amd64.whl", hash = "sha256:a98cd7d17e9f7ce244c0803cad3c23a7d379c301ba618a5fa76a67d116618b98", size = 31715 }, - { url = "https://files.pythonhosted.org/packages/42/b9/f8d6fa329ab25128b7e98fd83a3cb34d9db5b059a9847eddb840a0af45dd/argon2_cffi_bindings-25.1.0-cp39-abi3-win_arm64.whl", hash = "sha256:b0fdbcf513833809c882823f98dc2f931cf659d9a1429616ac3adebb49f5db94", size = 27149 }, + { url = "https://files.pythonhosted.org/packages/1d/57/96b8b9f93166147826da5f90376e784a10582dd39a393c99bb62cfcf52f0/argon2_cffi_bindings-25.1.0-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:aecba1723ae35330a008418a91ea6cfcedf6d31e5fbaa056a166462ff066d500", size = 54121, upload-time = "2025-07-30T10:01:50.815Z" }, + { url = "https://files.pythonhosted.org/packages/0a/08/a9bebdb2e0e602dde230bdde8021b29f71f7841bd54801bcfd514acb5dcf/argon2_cffi_bindings-25.1.0-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:2630b6240b495dfab90aebe159ff784d08ea999aa4b0d17efa734055a07d2f44", size = 29177, upload-time = "2025-07-30T10:01:51.681Z" }, + { url = "https://files.pythonhosted.org/packages/b6/02/d297943bcacf05e4f2a94ab6f462831dc20158614e5d067c35d4e63b9acb/argon2_cffi_bindings-25.1.0-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:7aef0c91e2c0fbca6fc68e7555aa60ef7008a739cbe045541e438373bc54d2b0", size = 31090, upload-time = "2025-07-30T10:01:53.184Z" }, + { url = "https://files.pythonhosted.org/packages/c1/93/44365f3d75053e53893ec6d733e4a5e3147502663554b4d864587c7828a7/argon2_cffi_bindings-25.1.0-cp39-abi3-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1e021e87faa76ae0d413b619fe2b65ab9a037f24c60a1e6cc43457ae20de6dc6", size = 81246, upload-time = "2025-07-30T10:01:54.145Z" }, + { url = "https://files.pythonhosted.org/packages/09/52/94108adfdd6e2ddf58be64f959a0b9c7d4ef2fa71086c38356d22dc501ea/argon2_cffi_bindings-25.1.0-cp39-abi3-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d3e924cfc503018a714f94a49a149fdc0b644eaead5d1f089330399134fa028a", size = 87126, upload-time = "2025-07-30T10:01:55.074Z" }, + { url = "https://files.pythonhosted.org/packages/72/70/7a2993a12b0ffa2a9271259b79cc616e2389ed1a4d93842fac5a1f923ffd/argon2_cffi_bindings-25.1.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:c87b72589133f0346a1cb8d5ecca4b933e3c9b64656c9d175270a000e73b288d", size = 80343, upload-time = "2025-07-30T10:01:56.007Z" }, + { url = "https://files.pythonhosted.org/packages/78/9a/4e5157d893ffc712b74dbd868c7f62365618266982b64accab26bab01edc/argon2_cffi_bindings-25.1.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1db89609c06afa1a214a69a462ea741cf735b29a57530478c06eb81dd403de99", size = 86777, upload-time = "2025-07-30T10:01:56.943Z" }, + { url = "https://files.pythonhosted.org/packages/74/cd/15777dfde1c29d96de7f18edf4cc94c385646852e7c7b0320aa91ccca583/argon2_cffi_bindings-25.1.0-cp39-abi3-win32.whl", hash = "sha256:473bcb5f82924b1becbb637b63303ec8d10e84c8d241119419897a26116515d2", size = 27180, upload-time = "2025-07-30T10:01:57.759Z" }, + { url = "https://files.pythonhosted.org/packages/e2/c6/a759ece8f1829d1f162261226fbfd2c6832b3ff7657384045286d2afa384/argon2_cffi_bindings-25.1.0-cp39-abi3-win_amd64.whl", hash = "sha256:a98cd7d17e9f7ce244c0803cad3c23a7d379c301ba618a5fa76a67d116618b98", size = 31715, upload-time = "2025-07-30T10:01:58.56Z" }, + { url = "https://files.pythonhosted.org/packages/42/b9/f8d6fa329ab25128b7e98fd83a3cb34d9db5b059a9847eddb840a0af45dd/argon2_cffi_bindings-25.1.0-cp39-abi3-win_arm64.whl", hash = "sha256:b0fdbcf513833809c882823f98dc2f931cf659d9a1429616ac3adebb49f5db94", size = 27149, upload-time = "2025-07-30T10:01:59.329Z" }, ] [[package]] @@ -194,9 +176,9 @@ dependencies = [ { name = "python-dateutil" }, { name = "types-python-dateutil" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/2e/00/0f6e8fcdb23ea632c866620cc872729ff43ed91d284c866b515c6342b173/arrow-1.3.0.tar.gz", hash = "sha256:d4540617648cb5f895730f1ad8c82a65f2dad0166f57b75f3ca54759c4d67a85", size = 131960 } +sdist = { url = "https://files.pythonhosted.org/packages/2e/00/0f6e8fcdb23ea632c866620cc872729ff43ed91d284c866b515c6342b173/arrow-1.3.0.tar.gz", hash = "sha256:d4540617648cb5f895730f1ad8c82a65f2dad0166f57b75f3ca54759c4d67a85", size = 131960, upload-time = "2023-09-30T22:11:18.25Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f8/ed/e97229a566617f2ae958a6b13e7cc0f585470eac730a73e9e82c32a3cdd2/arrow-1.3.0-py3-none-any.whl", hash = "sha256:c728b120ebc00eb84e01882a6f5e7927a53960aa990ce7dd2b10f39005a67f80", size = 66419 }, + { url = "https://files.pythonhosted.org/packages/f8/ed/e97229a566617f2ae958a6b13e7cc0f585470eac730a73e9e82c32a3cdd2/arrow-1.3.0-py3-none-any.whl", hash = "sha256:c728b120ebc00eb84e01882a6f5e7927a53960aa990ce7dd2b10f39005a67f80", size = 66419, upload-time = "2023-09-30T22:11:16.072Z" }, ] [[package]] @@ -206,36 +188,36 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "six" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/45/1d/f03bcb60c4a3212e15f99a56085d93093a497718adf828d050b9d675da81/asttokens-2.4.1.tar.gz", hash = "sha256:b03869718ba9a6eb027e134bfdf69f38a236d681c83c160d510768af11254ba0", size = 62284 } +sdist = { url = "https://files.pythonhosted.org/packages/45/1d/f03bcb60c4a3212e15f99a56085d93093a497718adf828d050b9d675da81/asttokens-2.4.1.tar.gz", hash = "sha256:b03869718ba9a6eb027e134bfdf69f38a236d681c83c160d510768af11254ba0", size = 62284, upload-time = "2023-10-26T10:03:05.06Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/45/86/4736ac618d82a20d87d2f92ae19441ebc7ac9e7a581d7e58bbe79233b24a/asttokens-2.4.1-py2.py3-none-any.whl", hash = "sha256:051ed49c3dcae8913ea7cd08e46a606dba30b79993209636c4875bc1d637bc24", size = 27764 }, + { url = "https://files.pythonhosted.org/packages/45/86/4736ac618d82a20d87d2f92ae19441ebc7ac9e7a581d7e58bbe79233b24a/asttokens-2.4.1-py2.py3-none-any.whl", hash = "sha256:051ed49c3dcae8913ea7cd08e46a606dba30b79993209636c4875bc1d637bc24", size = 27764, upload-time = "2023-10-26T10:03:01.789Z" }, ] [[package]] name = "async-lru" version = "2.0.5" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b2/4d/71ec4d3939dc755264f680f6c2b4906423a304c3d18e96853f0a595dfe97/async_lru-2.0.5.tar.gz", hash = "sha256:481d52ccdd27275f42c43a928b4a50c3bfb2d67af4e78b170e3e0bb39c66e5bb", size = 10380 } +sdist = { url = "https://files.pythonhosted.org/packages/b2/4d/71ec4d3939dc755264f680f6c2b4906423a304c3d18e96853f0a595dfe97/async_lru-2.0.5.tar.gz", hash = "sha256:481d52ccdd27275f42c43a928b4a50c3bfb2d67af4e78b170e3e0bb39c66e5bb", size = 10380, upload-time = "2025-03-16T17:25:36.919Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/03/49/d10027df9fce941cb8184e78a02857af36360d33e1721df81c5ed2179a1a/async_lru-2.0.5-py3-none-any.whl", hash = "sha256:ab95404d8d2605310d345932697371a5f40def0487c03d6d0ad9138de52c9943", size = 6069 }, + { url = "https://files.pythonhosted.org/packages/03/49/d10027df9fce941cb8184e78a02857af36360d33e1721df81c5ed2179a1a/async_lru-2.0.5-py3-none-any.whl", hash = "sha256:ab95404d8d2605310d345932697371a5f40def0487c03d6d0ad9138de52c9943", size = 6069, upload-time = "2025-03-16T17:25:35.422Z" }, ] [[package]] name = "attrs" version = "25.4.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6b/5c/685e6633917e101e5dcb62b9dd76946cbb57c26e133bae9e0cd36033c0a9/attrs-25.4.0.tar.gz", hash = "sha256:16d5969b87f0859ef33a48b35d55ac1be6e42ae49d5e853b597db70c35c57e11", size = 934251 } +sdist = { url = "https://files.pythonhosted.org/packages/6b/5c/685e6633917e101e5dcb62b9dd76946cbb57c26e133bae9e0cd36033c0a9/attrs-25.4.0.tar.gz", hash = "sha256:16d5969b87f0859ef33a48b35d55ac1be6e42ae49d5e853b597db70c35c57e11", size = 934251, upload-time = "2025-10-06T13:54:44.725Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3a/2a/7cc015f5b9f5db42b7d48157e23356022889fc354a2813c15934b7cb5c0e/attrs-25.4.0-py3-none-any.whl", hash = "sha256:adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373", size = 67615 }, + { url = "https://files.pythonhosted.org/packages/3a/2a/7cc015f5b9f5db42b7d48157e23356022889fc354a2813c15934b7cb5c0e/attrs-25.4.0-py3-none-any.whl", hash = "sha256:adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373", size = 67615, upload-time = "2025-10-06T13:54:43.17Z" }, ] [[package]] name = "azure-common" version = "1.1.28" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/3e/71/f6f71a276e2e69264a97ad39ef850dca0a04fce67b12570730cb38d0ccac/azure-common-1.1.28.zip", hash = "sha256:4ac0cd3214e36b6a1b6a442686722a5d8cc449603aa833f3f0f40bda836704a3", size = 20914 } +sdist = { url = "https://files.pythonhosted.org/packages/3e/71/f6f71a276e2e69264a97ad39ef850dca0a04fce67b12570730cb38d0ccac/azure-common-1.1.28.zip", hash = "sha256:4ac0cd3214e36b6a1b6a442686722a5d8cc449603aa833f3f0f40bda836704a3", size = 20914, upload-time = "2022-02-03T19:39:44.373Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/62/55/7f118b9c1b23ec15ca05d15a578d8207aa1706bc6f7c87218efffbbf875d/azure_common-1.1.28-py2.py3-none-any.whl", hash = "sha256:5c12d3dcf4ec20599ca6b0d3e09e86e146353d443e7fcc050c9a19c1f9df20ad", size = 14462 }, + { url = "https://files.pythonhosted.org/packages/62/55/7f118b9c1b23ec15ca05d15a578d8207aa1706bc6f7c87218efffbbf875d/azure_common-1.1.28-py2.py3-none-any.whl", hash = "sha256:5c12d3dcf4ec20599ca6b0d3e09e86e146353d443e7fcc050c9a19c1f9df20ad", size = 14462, upload-time = "2022-02-03T19:39:42.417Z" }, ] [[package]] @@ -247,9 +229,9 @@ dependencies = [ { name = "six" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/15/6b/2653adc0f33adba8f11b1903701e6b1c10d34ce5d8e25dfa13a422f832b0/azure_core-1.35.1.tar.gz", hash = "sha256:435d05d6df0fff2f73fb3c15493bb4721ede14203f1ff1382aa6b6b2bdd7e562", size = 345290 } +sdist = { url = "https://files.pythonhosted.org/packages/15/6b/2653adc0f33adba8f11b1903701e6b1c10d34ce5d8e25dfa13a422f832b0/azure_core-1.35.1.tar.gz", hash = "sha256:435d05d6df0fff2f73fb3c15493bb4721ede14203f1ff1382aa6b6b2bdd7e562", size = 345290, upload-time = "2025-09-11T22:58:04.481Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/27/52/805980aa1ba18282077c484dba634ef0ede1e84eec8be9c92b2e162d0ed6/azure_core-1.35.1-py3-none-any.whl", hash = "sha256:12da0c9e08e48e198f9158b56ddbe33b421477e1dc98c2e1c8f9e254d92c468b", size = 211800 }, + { url = "https://files.pythonhosted.org/packages/27/52/805980aa1ba18282077c484dba634ef0ede1e84eec8be9c92b2e162d0ed6/azure_core-1.35.1-py3-none-any.whl", hash = "sha256:12da0c9e08e48e198f9158b56ddbe33b421477e1dc98c2e1c8f9e254d92c468b", size = 211800, upload-time = "2025-09-11T22:58:06.281Z" }, ] [[package]] @@ -260,14 +242,14 @@ dependencies = [ { name = "azure-core" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/be/7c/a4e7810f85e7f83d94265ef5ff0fb1efad55a768de737d940151ea2eec45/azure_cosmos-4.9.0.tar.gz", hash = "sha256:c70db4cbf55b0ff261ed7bb8aa325a5dfa565d3c6eaa43d75d26ae5e2ad6d74f", size = 1824155 } +sdist = { url = "https://files.pythonhosted.org/packages/be/7c/a4e7810f85e7f83d94265ef5ff0fb1efad55a768de737d940151ea2eec45/azure_cosmos-4.9.0.tar.gz", hash = "sha256:c70db4cbf55b0ff261ed7bb8aa325a5dfa565d3c6eaa43d75d26ae5e2ad6d74f", size = 1824155, upload-time = "2024-11-19T04:09:30.195Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/61/dc/380f843744535497acd0b85aacb59565c84fc28bf938c8d6e897a858cd95/azure_cosmos-4.9.0-py3-none-any.whl", hash = "sha256:3b60eaa01a16a857d0faf0cec304bac6fa8620a81bc268ce760339032ef617fe", size = 303157 }, + { url = "https://files.pythonhosted.org/packages/61/dc/380f843744535497acd0b85aacb59565c84fc28bf938c8d6e897a858cd95/azure_cosmos-4.9.0-py3-none-any.whl", hash = "sha256:3b60eaa01a16a857d0faf0cec304bac6fa8620a81bc268ce760339032ef617fe", size = 303157, upload-time = "2024-11-19T04:09:32.148Z" }, ] [[package]] name = "azure-identity" -version = "1.25.1" +version = "1.19.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "azure-core" }, @@ -276,9 +258,9 @@ dependencies = [ { name = "msal-extensions" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/06/8d/1a6c41c28a37eab26dc85ab6c86992c700cd3f4a597d9ed174b0e9c69489/azure_identity-1.25.1.tar.gz", hash = "sha256:87ca8328883de6036443e1c37b40e8dc8fb74898240f61071e09d2e369361456", size = 279826 } +sdist = { url = "https://files.pythonhosted.org/packages/aa/91/cbaeff9eb0b838f0d35b4607ac1c6195c735c8eb17db235f8f60e622934c/azure_identity-1.19.0.tar.gz", hash = "sha256:500144dc18197d7019b81501165d4fa92225f03778f17d7ca8a2a180129a9c83", size = 263058, upload-time = "2024-10-08T15:41:33.554Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/83/7b/5652771e24fff12da9dde4c20ecf4682e606b104f26419d139758cc935a6/azure_identity-1.25.1-py3-none-any.whl", hash = "sha256:e9edd720af03dff020223cd269fa3a61e8f345ea75443858273bcb44844ab651", size = 191317 }, + { url = "https://files.pythonhosted.org/packages/f0/d5/3995ed12f941f4a41a273d9b1709282e825ef87ed8eab3833038fee54d59/azure_identity-1.19.0-py3-none-any.whl", hash = "sha256:e3f6558c181692d7509f09de10cca527c7dce426776454fb97df512a46527e81", size = 187587, upload-time = "2024-10-08T15:41:36.423Z" }, ] [[package]] @@ -291,9 +273,9 @@ dependencies = [ { name = "isodate" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/cf/68/9d59a0bed5fd9581b45444e8abc3ecda97e0466ae0f03affc7cddfb9fa74/azure_search_documents-11.6.0.tar.gz", hash = "sha256:fcc807076ff82024be576ffccb0d0f3261e5c2a112a6666b86ec70bbdb2e1d64", size = 311194 } +sdist = { url = "https://files.pythonhosted.org/packages/cf/68/9d59a0bed5fd9581b45444e8abc3ecda97e0466ae0f03affc7cddfb9fa74/azure_search_documents-11.6.0.tar.gz", hash = "sha256:fcc807076ff82024be576ffccb0d0f3261e5c2a112a6666b86ec70bbdb2e1d64", size = 311194, upload-time = "2025-10-09T22:04:03.655Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c5/4c/d74e5c3ccc0b9ead0e400a2d70ded67554b56a5d799aaa8bf5baaacf4aea/azure_search_documents-11.6.0-py3-none-any.whl", hash = "sha256:c3eb2deaf7926844e99a881830861225ef68e8b3bc067a76019e87fc7f5586dc", size = 307935 }, + { url = "https://files.pythonhosted.org/packages/c5/4c/d74e5c3ccc0b9ead0e400a2d70ded67554b56a5d799aaa8bf5baaacf4aea/azure_search_documents-11.6.0-py3-none-any.whl", hash = "sha256:c3eb2deaf7926844e99a881830861225ef68e8b3bc067a76019e87fc7f5586dc", size = 307935, upload-time = "2025-10-09T22:04:05.008Z" }, ] [[package]] @@ -306,31 +288,30 @@ dependencies = [ { name = "isodate" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/96/95/3e3414491ce45025a1cde107b6ae72bf72049e6021597c201cd6a3029b9a/azure_storage_blob-12.26.0.tar.gz", hash = "sha256:5dd7d7824224f7de00bfeb032753601c982655173061e242f13be6e26d78d71f", size = 583332 } +sdist = { url = "https://files.pythonhosted.org/packages/96/95/3e3414491ce45025a1cde107b6ae72bf72049e6021597c201cd6a3029b9a/azure_storage_blob-12.26.0.tar.gz", hash = "sha256:5dd7d7824224f7de00bfeb032753601c982655173061e242f13be6e26d78d71f", size = 583332, upload-time = "2025-07-16T21:34:07.644Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5b/64/63dbfdd83b31200ac58820a7951ddfdeed1fbee9285b0f3eae12d1357155/azure_storage_blob-12.26.0-py3-none-any.whl", hash = "sha256:8c5631b8b22b4f53ec5fff2f3bededf34cfef111e2af613ad42c9e6de00a77fe", size = 412907 }, + { url = "https://files.pythonhosted.org/packages/5b/64/63dbfdd83b31200ac58820a7951ddfdeed1fbee9285b0f3eae12d1357155/azure_storage_blob-12.26.0-py3-none-any.whl", hash = "sha256:8c5631b8b22b4f53ec5fff2f3bededf34cfef111e2af613ad42c9e6de00a77fe", size = 412907, upload-time = "2025-07-16T21:34:09.367Z" }, ] [[package]] name = "babel" version = "2.17.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/7d/6b/d52e42361e1aa00709585ecc30b3f9684b3ab62530771402248b1b1d6240/babel-2.17.0.tar.gz", hash = "sha256:0c54cffb19f690cdcc52a3b50bcbf71e07a808d1c80d549f2459b9d2cf0afb9d", size = 9951852 } +sdist = { url = "https://files.pythonhosted.org/packages/7d/6b/d52e42361e1aa00709585ecc30b3f9684b3ab62530771402248b1b1d6240/babel-2.17.0.tar.gz", hash = "sha256:0c54cffb19f690cdcc52a3b50bcbf71e07a808d1c80d549f2459b9d2cf0afb9d", size = 9951852, upload-time = "2025-02-01T15:17:41.026Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b7/b8/3fe70c75fe32afc4bb507f75563d39bc5642255d1d94f1f23604725780bf/babel-2.17.0-py3-none-any.whl", hash = "sha256:4d0b53093fdfb4b21c92b5213dba5a1b23885afa8383709427046b21c366e5f2", size = 10182537 }, + { url = "https://files.pythonhosted.org/packages/b7/b8/3fe70c75fe32afc4bb507f75563d39bc5642255d1d94f1f23604725780bf/babel-2.17.0-py3-none-any.whl", hash = "sha256:4d0b53093fdfb4b21c92b5213dba5a1b23885afa8383709427046b21c366e5f2", size = 10182537, upload-time = "2025-02-01T15:17:37.39Z" }, ] [[package]] name = "backrefs" version = "5.9" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/eb/a7/312f673df6a79003279e1f55619abbe7daebbb87c17c976ddc0345c04c7b/backrefs-5.9.tar.gz", hash = "sha256:808548cb708d66b82ee231f962cb36faaf4f2baab032f2fbb783e9c2fdddaa59", size = 5765857 } +sdist = { url = "https://files.pythonhosted.org/packages/eb/a7/312f673df6a79003279e1f55619abbe7daebbb87c17c976ddc0345c04c7b/backrefs-5.9.tar.gz", hash = "sha256:808548cb708d66b82ee231f962cb36faaf4f2baab032f2fbb783e9c2fdddaa59", size = 5765857, upload-time = "2025-06-22T19:34:13.97Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/19/4d/798dc1f30468134906575156c089c492cf79b5a5fd373f07fe26c4d046bf/backrefs-5.9-py310-none-any.whl", hash = "sha256:db8e8ba0e9de81fcd635f440deab5ae5f2591b54ac1ebe0550a2ca063488cd9f", size = 380267 }, - { url = "https://files.pythonhosted.org/packages/55/07/f0b3375bf0d06014e9787797e6b7cc02b38ac9ff9726ccfe834d94e9991e/backrefs-5.9-py311-none-any.whl", hash = "sha256:6907635edebbe9b2dc3de3a2befff44d74f30a4562adbb8b36f21252ea19c5cf", size = 392072 }, - { url = "https://files.pythonhosted.org/packages/9d/12/4f345407259dd60a0997107758ba3f221cf89a9b5a0f8ed5b961aef97253/backrefs-5.9-py312-none-any.whl", hash = "sha256:7fdf9771f63e6028d7fee7e0c497c81abda597ea45d6b8f89e8ad76994f5befa", size = 397947 }, - { url = "https://files.pythonhosted.org/packages/10/bf/fa31834dc27a7f05e5290eae47c82690edc3a7b37d58f7fb35a1bdbf355b/backrefs-5.9-py313-none-any.whl", hash = "sha256:cc37b19fa219e93ff825ed1fed8879e47b4d89aa7a1884860e2db64ccd7c676b", size = 399843 }, - { url = "https://files.pythonhosted.org/packages/41/ff/392bff89415399a979be4a65357a41d92729ae8580a66073d8ec8d810f98/backrefs-5.9-py39-none-any.whl", hash = "sha256:f48ee18f6252b8f5777a22a00a09a85de0ca931658f1dd96d4406a34f3748c60", size = 380265 }, + { url = "https://files.pythonhosted.org/packages/19/4d/798dc1f30468134906575156c089c492cf79b5a5fd373f07fe26c4d046bf/backrefs-5.9-py310-none-any.whl", hash = "sha256:db8e8ba0e9de81fcd635f440deab5ae5f2591b54ac1ebe0550a2ca063488cd9f", size = 380267, upload-time = "2025-06-22T19:34:05.252Z" }, + { url = "https://files.pythonhosted.org/packages/55/07/f0b3375bf0d06014e9787797e6b7cc02b38ac9ff9726ccfe834d94e9991e/backrefs-5.9-py311-none-any.whl", hash = "sha256:6907635edebbe9b2dc3de3a2befff44d74f30a4562adbb8b36f21252ea19c5cf", size = 392072, upload-time = "2025-06-22T19:34:06.743Z" }, + { url = "https://files.pythonhosted.org/packages/9d/12/4f345407259dd60a0997107758ba3f221cf89a9b5a0f8ed5b961aef97253/backrefs-5.9-py312-none-any.whl", hash = "sha256:7fdf9771f63e6028d7fee7e0c497c81abda597ea45d6b8f89e8ad76994f5befa", size = 397947, upload-time = "2025-06-22T19:34:08.172Z" }, + { url = "https://files.pythonhosted.org/packages/41/ff/392bff89415399a979be4a65357a41d92729ae8580a66073d8ec8d810f98/backrefs-5.9-py39-none-any.whl", hash = "sha256:f48ee18f6252b8f5777a22a00a09a85de0ca931658f1dd96d4406a34f3748c60", size = 380265, upload-time = "2025-06-22T19:34:12.405Z" }, ] [[package]] @@ -341,9 +322,9 @@ dependencies = [ { name = "soupsieve" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/77/e9/df2358efd7659577435e2177bfa69cba6c33216681af51a707193dec162a/beautifulsoup4-4.14.2.tar.gz", hash = "sha256:2a98ab9f944a11acee9cc848508ec28d9228abfd522ef0fad6a02a72e0ded69e", size = 625822 } +sdist = { url = "https://files.pythonhosted.org/packages/77/e9/df2358efd7659577435e2177bfa69cba6c33216681af51a707193dec162a/beautifulsoup4-4.14.2.tar.gz", hash = "sha256:2a98ab9f944a11acee9cc848508ec28d9228abfd522ef0fad6a02a72e0ded69e", size = 625822, upload-time = "2025-09-29T10:05:42.613Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/94/fe/3aed5d0be4d404d12d36ab97e2f1791424d9ca39c2f754a6285d59a3b01d/beautifulsoup4-4.14.2-py3-none-any.whl", hash = "sha256:5ef6fa3a8cbece8488d66985560f97ed091e22bbc4e9c2338508a9d5de6d4515", size = 106392 }, + { url = "https://files.pythonhosted.org/packages/94/fe/3aed5d0be4d404d12d36ab97e2f1791424d9ca39c2f754a6285d59a3b01d/beautifulsoup4-4.14.2-py3-none-any.whl", hash = "sha256:5ef6fa3a8cbece8488d66985560f97ed091e22bbc4e9c2338508a9d5de6d4515", size = 106392, upload-time = "2025-09-29T10:05:43.771Z" }, ] [[package]] @@ -353,9 +334,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "webencodings" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/76/9a/0e33f5054c54d349ea62c277191c020c2d6ef1d65ab2cb1993f91ec846d1/bleach-6.2.0.tar.gz", hash = "sha256:123e894118b8a599fd80d3ec1a6d4cc7ce4e5882b1317a7e1ba69b56e95f991f", size = 203083 } +sdist = { url = "https://files.pythonhosted.org/packages/76/9a/0e33f5054c54d349ea62c277191c020c2d6ef1d65ab2cb1993f91ec846d1/bleach-6.2.0.tar.gz", hash = "sha256:123e894118b8a599fd80d3ec1a6d4cc7ce4e5882b1317a7e1ba69b56e95f991f", size = 203083, upload-time = "2024-10-29T18:30:40.477Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/fc/55/96142937f66150805c25c4d0f31ee4132fd33497753400734f9dfdcbdc66/bleach-6.2.0-py3-none-any.whl", hash = "sha256:117d9c6097a7c3d22fd578fcd8d35ff1e125df6736f554da4e432fdd63f31e5e", size = 163406 }, + { url = "https://files.pythonhosted.org/packages/fc/55/96142937f66150805c25c4d0f31ee4132fd33497753400734f9dfdcbdc66/bleach-6.2.0-py3-none-any.whl", hash = "sha256:117d9c6097a7c3d22fd578fcd8d35ff1e125df6736f554da4e432fdd63f31e5e", size = 163406, upload-time = "2024-10-29T18:30:38.186Z" }, ] [package.optional-dependencies] @@ -370,47 +351,40 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "numpy" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d0/d0/d8cc8c9a4488a787e7fa430f6055e5bd1ddb22c340a751d9e901b82e2efe/blis-1.3.3.tar.gz", hash = "sha256:034d4560ff3cc43e8aa37e188451b0440e3261d989bb8a42ceee865607715ecd", size = 2644873 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a1/0a/a4c8736bc497d386b0ffc76d321f478c03f1a4725e52092f93b38beb3786/blis-1.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e10c8d3e892b1dbdff365b9d00e08291876fc336915bf1a5e9f188ed087e1a91", size = 6925522 }, - { url = "https://files.pythonhosted.org/packages/83/5a/3437009282f23684ecd3963a8b034f9307cdd2bf4484972e5a6b096bf9ac/blis-1.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:66e6249564f1db22e8af1e0513ff64134041fa7e03c8dd73df74db3f4d8415a7", size = 1232787 }, - { url = "https://files.pythonhosted.org/packages/d1/0e/82221910d16259ce3017c1442c468a3f206a4143a96fbba9f5b5b81d62e8/blis-1.3.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7260da065958b4e5475f62f44895ef9d673b0f47dcf61b672b22b7dae1a18505", size = 2844596 }, - { url = "https://files.pythonhosted.org/packages/6c/93/ab547f1a5c23e20bca16fbcf04021c32aac3f969be737ea4980509a7ca90/blis-1.3.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e9327a6ca67de8ae76fe071e8584cc7f3b2e8bfadece4961d40f2826e1cda2df", size = 11377746 }, - { url = "https://files.pythonhosted.org/packages/6e/a6/7733820aa62da32526287a63cd85c103b2b323b186c8ee43b7772ff7017c/blis-1.3.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c4ae70629cf302035d268858a10ca4eb6242a01b2dc8d64422f8e6dcb8a8ee74", size = 3041954 }, - { url = "https://files.pythonhosted.org/packages/87/53/e39d67fd3296b649772780ca6aab081412838ecb54e0b0c6432d01626a50/blis-1.3.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:45866a9027d43b93e8b59980a23c5d7358b6536fc04606286e39fdcfce1101c2", size = 14251222 }, - { url = "https://files.pythonhosted.org/packages/ea/44/b749f8777b020b420bceaaf60f66432fc30cc904ca5b69640ec9cbef11ed/blis-1.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:27f82b8633030f8d095d2b412dffa7eb6dbc8ee43813139909a20012e54422ea", size = 6171233 }, - { url = "https://files.pythonhosted.org/packages/16/d1/429cf0cf693d4c7dc2efed969bd474e315aab636e4a95f66c4ed7264912d/blis-1.3.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2a1c74e100665f8e918ebdbae2794576adf1f691680b5cdb8b29578432f623ef", size = 6929663 }, - { url = "https://files.pythonhosted.org/packages/11/69/363c8df8d98b3cc97be19aad6aabb2c9c53f372490d79316bdee92d476e7/blis-1.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3f6c595185176ce021316263e1a1d636a3425b6c48366c1fd712d08d0b71849a", size = 1230939 }, - { url = "https://files.pythonhosted.org/packages/96/2a/fbf65d906d823d839076c5150a6f8eb5ecbc5f9135e0b6510609bda1e6b7/blis-1.3.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d734b19fba0be7944f272dfa7b443b37c61f9476d9ab054a9ac53555ceadd2e0", size = 2818835 }, - { url = "https://files.pythonhosted.org/packages/d5/ad/58deaa3ad856dd3cc96493e40ffd2ed043d18d4d304f85a65cde1ccbf644/blis-1.3.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1ef6d6e2b599a3a2788eb6d9b443533961265aa4ec49d574ed4bb846e548dcdb", size = 11366550 }, - { url = "https://files.pythonhosted.org/packages/78/82/816a7adfe1f7acc8151f01ec86ef64467a3c833932d8f19f8e06613b8a4e/blis-1.3.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8c888438ae99c500422d50698e3028b65caa8ebb44e24204d87fda2df64058f7", size = 3023686 }, - { url = "https://files.pythonhosted.org/packages/1e/e2/0e93b865f648b5519360846669a35f28ee8f4e1d93d054f6850d8afbabde/blis-1.3.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8177879fd3590b5eecdd377f9deafb5dc8af6d684f065bd01553302fb3fcf9a7", size = 14250939 }, - { url = "https://files.pythonhosted.org/packages/20/07/fb43edc2ff0a6a367e4a94fc39eb3b85aa1e55e24cc857af2db145ce9f0d/blis-1.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:f20f7ad69aaffd1ce14fe77de557b6df9b61e0c9e582f75a843715d836b5c8af", size = 6192759 }, - { url = "https://files.pythonhosted.org/packages/e6/f7/d26e62d9be3d70473a63e0a5d30bae49c2fe138bebac224adddcdef8a7ce/blis-1.3.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1e647341f958421a86b028a2efe16ce19c67dba2a05f79e8f7e80b1ff45328aa", size = 6928322 }, - { url = "https://files.pythonhosted.org/packages/4a/78/750d12da388f714958eb2f2fd177652323bbe7ec528365c37129edd6eb84/blis-1.3.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d563160f874abb78a57e346f07312c5323f7ad67b6370052b6b17087ef234a8e", size = 1229635 }, - { url = "https://files.pythonhosted.org/packages/e8/36/eac4199c5b200a5f3e93cad197da8d26d909f218eb444c4f552647c95240/blis-1.3.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:30b8a5b90cb6cb81d1ada9ae05aa55fb8e70d9a0ae9db40d2401bb9c1c8f14c4", size = 2815650 }, - { url = "https://files.pythonhosted.org/packages/bf/51/472e7b36a6bedb5242a9757e7486f702c3619eff76e256735d0c8b1679c6/blis-1.3.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e9f5c53b277f6ac5b3ca30bc12ebab7ea16c8f8c36b14428abb56924213dc127", size = 11359008 }, - { url = "https://files.pythonhosted.org/packages/84/da/d0dfb6d6e6321ae44df0321384c32c322bd07b15740d7422727a1a49fc5d/blis-1.3.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6297e7616c158b305c9a8a4e47ca5fc9b0785194dd96c903b1a1591a7ca21ddf", size = 3011959 }, - { url = "https://files.pythonhosted.org/packages/20/c5/2b0b5e556fa0364ed671051ea078a6d6d7b979b1cfef78d64ad3ca5f0c7f/blis-1.3.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:3f966ca74f89f8a33e568b9a1d71992fc9a0d29a423e047f0a212643e21b5458", size = 14232456 }, - { url = "https://files.pythonhosted.org/packages/31/07/4cdc81a47bf862c0b06d91f1bc6782064e8b69ac9b5d4ff51d97e4ff03da/blis-1.3.3-cp313-cp313-win_amd64.whl", hash = "sha256:7a0fc4b237a3a453bdc3c7ab48d91439fcd2d013b665c46948d9eaf9c3e45a97", size = 6192624 }, +sdist = { url = "https://files.pythonhosted.org/packages/d0/d0/d8cc8c9a4488a787e7fa430f6055e5bd1ddb22c340a751d9e901b82e2efe/blis-1.3.3.tar.gz", hash = "sha256:034d4560ff3cc43e8aa37e188451b0440e3261d989bb8a42ceee865607715ecd", size = 2644873, upload-time = "2025-11-17T12:28:30.511Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a1/0a/a4c8736bc497d386b0ffc76d321f478c03f1a4725e52092f93b38beb3786/blis-1.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e10c8d3e892b1dbdff365b9d00e08291876fc336915bf1a5e9f188ed087e1a91", size = 6925522, upload-time = "2025-11-17T12:27:29.199Z" }, + { url = "https://files.pythonhosted.org/packages/83/5a/3437009282f23684ecd3963a8b034f9307cdd2bf4484972e5a6b096bf9ac/blis-1.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:66e6249564f1db22e8af1e0513ff64134041fa7e03c8dd73df74db3f4d8415a7", size = 1232787, upload-time = "2025-11-17T12:27:30.996Z" }, + { url = "https://files.pythonhosted.org/packages/d1/0e/82221910d16259ce3017c1442c468a3f206a4143a96fbba9f5b5b81d62e8/blis-1.3.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7260da065958b4e5475f62f44895ef9d673b0f47dcf61b672b22b7dae1a18505", size = 2844596, upload-time = "2025-11-17T12:27:32.601Z" }, + { url = "https://files.pythonhosted.org/packages/6c/93/ab547f1a5c23e20bca16fbcf04021c32aac3f969be737ea4980509a7ca90/blis-1.3.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e9327a6ca67de8ae76fe071e8584cc7f3b2e8bfadece4961d40f2826e1cda2df", size = 11377746, upload-time = "2025-11-17T12:27:35.342Z" }, + { url = "https://files.pythonhosted.org/packages/6e/a6/7733820aa62da32526287a63cd85c103b2b323b186c8ee43b7772ff7017c/blis-1.3.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c4ae70629cf302035d268858a10ca4eb6242a01b2dc8d64422f8e6dcb8a8ee74", size = 3041954, upload-time = "2025-11-17T12:27:37.479Z" }, + { url = "https://files.pythonhosted.org/packages/87/53/e39d67fd3296b649772780ca6aab081412838ecb54e0b0c6432d01626a50/blis-1.3.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:45866a9027d43b93e8b59980a23c5d7358b6536fc04606286e39fdcfce1101c2", size = 14251222, upload-time = "2025-11-17T12:27:39.705Z" }, + { url = "https://files.pythonhosted.org/packages/ea/44/b749f8777b020b420bceaaf60f66432fc30cc904ca5b69640ec9cbef11ed/blis-1.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:27f82b8633030f8d095d2b412dffa7eb6dbc8ee43813139909a20012e54422ea", size = 6171233, upload-time = "2025-11-17T12:27:41.921Z" }, + { url = "https://files.pythonhosted.org/packages/16/d1/429cf0cf693d4c7dc2efed969bd474e315aab636e4a95f66c4ed7264912d/blis-1.3.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2a1c74e100665f8e918ebdbae2794576adf1f691680b5cdb8b29578432f623ef", size = 6929663, upload-time = "2025-11-17T12:27:44.482Z" }, + { url = "https://files.pythonhosted.org/packages/11/69/363c8df8d98b3cc97be19aad6aabb2c9c53f372490d79316bdee92d476e7/blis-1.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3f6c595185176ce021316263e1a1d636a3425b6c48366c1fd712d08d0b71849a", size = 1230939, upload-time = "2025-11-17T12:27:46.19Z" }, + { url = "https://files.pythonhosted.org/packages/96/2a/fbf65d906d823d839076c5150a6f8eb5ecbc5f9135e0b6510609bda1e6b7/blis-1.3.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d734b19fba0be7944f272dfa7b443b37c61f9476d9ab054a9ac53555ceadd2e0", size = 2818835, upload-time = "2025-11-17T12:27:48.167Z" }, + { url = "https://files.pythonhosted.org/packages/d5/ad/58deaa3ad856dd3cc96493e40ffd2ed043d18d4d304f85a65cde1ccbf644/blis-1.3.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1ef6d6e2b599a3a2788eb6d9b443533961265aa4ec49d574ed4bb846e548dcdb", size = 11366550, upload-time = "2025-11-17T12:27:49.958Z" }, + { url = "https://files.pythonhosted.org/packages/78/82/816a7adfe1f7acc8151f01ec86ef64467a3c833932d8f19f8e06613b8a4e/blis-1.3.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8c888438ae99c500422d50698e3028b65caa8ebb44e24204d87fda2df64058f7", size = 3023686, upload-time = "2025-11-17T12:27:52.062Z" }, + { url = "https://files.pythonhosted.org/packages/1e/e2/0e93b865f648b5519360846669a35f28ee8f4e1d93d054f6850d8afbabde/blis-1.3.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8177879fd3590b5eecdd377f9deafb5dc8af6d684f065bd01553302fb3fcf9a7", size = 14250939, upload-time = "2025-11-17T12:27:53.847Z" }, + { url = "https://files.pythonhosted.org/packages/20/07/fb43edc2ff0a6a367e4a94fc39eb3b85aa1e55e24cc857af2db145ce9f0d/blis-1.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:f20f7ad69aaffd1ce14fe77de557b6df9b61e0c9e582f75a843715d836b5c8af", size = 6192759, upload-time = "2025-11-17T12:27:56.176Z" }, ] [[package]] name = "catalogue" version = "2.0.10" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/38/b4/244d58127e1cdf04cf2dc7d9566f0d24ef01d5ce21811bab088ecc62b5ea/catalogue-2.0.10.tar.gz", hash = "sha256:4f56daa940913d3f09d589c191c74e5a6d51762b3a9e37dd53b7437afd6cda15", size = 19561 } +sdist = { url = "https://files.pythonhosted.org/packages/38/b4/244d58127e1cdf04cf2dc7d9566f0d24ef01d5ce21811bab088ecc62b5ea/catalogue-2.0.10.tar.gz", hash = "sha256:4f56daa940913d3f09d589c191c74e5a6d51762b3a9e37dd53b7437afd6cda15", size = 19561, upload-time = "2023-09-25T06:29:24.962Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/9e/96/d32b941a501ab566a16358d68b6eb4e4acc373fab3c3c4d7d9e649f7b4bb/catalogue-2.0.10-py3-none-any.whl", hash = "sha256:58c2de0020aa90f4a2da7dfad161bf7b3b054c86a5f09fcedc0b2b740c109a9f", size = 17325 }, + { url = "https://files.pythonhosted.org/packages/9e/96/d32b941a501ab566a16358d68b6eb4e4acc373fab3c3c4d7d9e649f7b4bb/catalogue-2.0.10-py3-none-any.whl", hash = "sha256:58c2de0020aa90f4a2da7dfad161bf7b3b054c86a5f09fcedc0b2b740c109a9f", size = 17325, upload-time = "2023-09-25T06:29:23.337Z" }, ] [[package]] name = "certifi" version = "2025.10.5" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/4c/5b/b6ce21586237c77ce67d01dc5507039d444b630dd76611bbca2d8e5dcd91/certifi-2025.10.5.tar.gz", hash = "sha256:47c09d31ccf2acf0be3f701ea53595ee7e0b8fa08801c6624be771df09ae7b43", size = 164519 } +sdist = { url = "https://files.pythonhosted.org/packages/4c/5b/b6ce21586237c77ce67d01dc5507039d444b630dd76611bbca2d8e5dcd91/certifi-2025.10.5.tar.gz", hash = "sha256:47c09d31ccf2acf0be3f701ea53595ee7e0b8fa08801c6624be771df09ae7b43", size = 164519, upload-time = "2025-10-05T04:12:15.808Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e4/37/af0d2ef3967ac0d6113837b44a4f0bfe1328c2b9763bd5b1744520e5cfed/certifi-2025.10.5-py3-none-any.whl", hash = "sha256:0f212c2744a9bb6de0c56639a6f68afe01ecd92d91f14ae897c4fe7bbeeef0de", size = 163286 }, + { url = "https://files.pythonhosted.org/packages/e4/37/af0d2ef3967ac0d6113837b44a4f0bfe1328c2b9763bd5b1744520e5cfed/certifi-2025.10.5-py3-none-any.whl", hash = "sha256:0f212c2744a9bb6de0c56639a6f68afe01ecd92d91f14ae897c4fe7bbeeef0de", size = 163286, upload-time = "2025-10-05T04:12:14.03Z" }, ] [[package]] @@ -420,87 +394,64 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pycparser", marker = "implementation_name != 'PyPy'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529", size = 523588 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/12/4a/3dfd5f7850cbf0d06dc84ba9aa00db766b52ca38d8b86e3a38314d52498c/cffi-2.0.0-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:b4c854ef3adc177950a8dfc81a86f5115d2abd545751a304c5bcf2c2c7283cfe", size = 184344 }, - { url = "https://files.pythonhosted.org/packages/4f/8b/f0e4c441227ba756aafbe78f117485b25bb26b1c059d01f137fa6d14896b/cffi-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2de9a304e27f7596cd03d16f1b7c72219bd944e99cc52b84d0145aefb07cbd3c", size = 180560 }, - { url = "https://files.pythonhosted.org/packages/b1/b7/1200d354378ef52ec227395d95c2576330fd22a869f7a70e88e1447eb234/cffi-2.0.0-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:baf5215e0ab74c16e2dd324e8ec067ef59e41125d3eade2b863d294fd5035c92", size = 209613 }, - { url = "https://files.pythonhosted.org/packages/b8/56/6033f5e86e8cc9bb629f0077ba71679508bdf54a9a5e112a3c0b91870332/cffi-2.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:730cacb21e1bdff3ce90babf007d0a0917cc3e6492f336c2f0134101e0944f93", size = 216476 }, - { url = "https://files.pythonhosted.org/packages/dc/7f/55fecd70f7ece178db2f26128ec41430d8720f2d12ca97bf8f0a628207d5/cffi-2.0.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:6824f87845e3396029f3820c206e459ccc91760e8fa24422f8b0c3d1731cbec5", size = 203374 }, - { url = "https://files.pythonhosted.org/packages/84/ef/a7b77c8bdc0f77adc3b46888f1ad54be8f3b7821697a7b89126e829e676a/cffi-2.0.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9de40a7b0323d889cf8d23d1ef214f565ab154443c42737dfe52ff82cf857664", size = 202597 }, - { url = "https://files.pythonhosted.org/packages/d7/91/500d892b2bf36529a75b77958edfcd5ad8e2ce4064ce2ecfeab2125d72d1/cffi-2.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8941aaadaf67246224cee8c3803777eed332a19d909b47e29c9842ef1e79ac26", size = 215574 }, - { url = "https://files.pythonhosted.org/packages/44/64/58f6255b62b101093d5df22dcb752596066c7e89dd725e0afaed242a61be/cffi-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a05d0c237b3349096d3981b727493e22147f934b20f6f125a3eba8f994bec4a9", size = 218971 }, - { url = "https://files.pythonhosted.org/packages/ab/49/fa72cebe2fd8a55fbe14956f9970fe8eb1ac59e5df042f603ef7c8ba0adc/cffi-2.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94698a9c5f91f9d138526b48fe26a199609544591f859c870d477351dc7b2414", size = 211972 }, - { url = "https://files.pythonhosted.org/packages/0b/28/dd0967a76aab36731b6ebfe64dec4e981aff7e0608f60c2d46b46982607d/cffi-2.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5fed36fccc0612a53f1d4d9a816b50a36702c28a2aa880cb8a122b3466638743", size = 217078 }, - { url = "https://files.pythonhosted.org/packages/2b/c0/015b25184413d7ab0a410775fdb4a50fca20f5589b5dab1dbbfa3baad8ce/cffi-2.0.0-cp311-cp311-win32.whl", hash = "sha256:c649e3a33450ec82378822b3dad03cc228b8f5963c0c12fc3b1e0ab940f768a5", size = 172076 }, - { url = "https://files.pythonhosted.org/packages/ae/8f/dc5531155e7070361eb1b7e4c1a9d896d0cb21c49f807a6c03fd63fc877e/cffi-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:66f011380d0e49ed280c789fbd08ff0d40968ee7b665575489afa95c98196ab5", size = 182820 }, - { url = "https://files.pythonhosted.org/packages/95/5c/1b493356429f9aecfd56bc171285a4c4ac8697f76e9bbbbb105e537853a1/cffi-2.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:c6638687455baf640e37344fe26d37c404db8b80d037c3d29f58fe8d1c3b194d", size = 177635 }, - { url = "https://files.pythonhosted.org/packages/ea/47/4f61023ea636104d4f16ab488e268b93008c3d0bb76893b1b31db1f96802/cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d", size = 185271 }, - { url = "https://files.pythonhosted.org/packages/df/a2/781b623f57358e360d62cdd7a8c681f074a71d445418a776eef0aadb4ab4/cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c", size = 181048 }, - { url = "https://files.pythonhosted.org/packages/ff/df/a4f0fbd47331ceeba3d37c2e51e9dfc9722498becbeec2bd8bc856c9538a/cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe", size = 212529 }, - { url = "https://files.pythonhosted.org/packages/d5/72/12b5f8d3865bf0f87cf1404d8c374e7487dcf097a1c91c436e72e6badd83/cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062", size = 220097 }, - { url = "https://files.pythonhosted.org/packages/c2/95/7a135d52a50dfa7c882ab0ac17e8dc11cec9d55d2c18dda414c051c5e69e/cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e", size = 207983 }, - { url = "https://files.pythonhosted.org/packages/3a/c8/15cb9ada8895957ea171c62dc78ff3e99159ee7adb13c0123c001a2546c1/cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037", size = 206519 }, - { url = "https://files.pythonhosted.org/packages/78/2d/7fa73dfa841b5ac06c7b8855cfc18622132e365f5b81d02230333ff26e9e/cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba", size = 219572 }, - { url = "https://files.pythonhosted.org/packages/07/e0/267e57e387b4ca276b90f0434ff88b2c2241ad72b16d31836adddfd6031b/cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94", size = 222963 }, - { url = "https://files.pythonhosted.org/packages/b6/75/1f2747525e06f53efbd878f4d03bac5b859cbc11c633d0fb81432d98a795/cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187", size = 221361 }, - { url = "https://files.pythonhosted.org/packages/7b/2b/2b6435f76bfeb6bbf055596976da087377ede68df465419d192acf00c437/cffi-2.0.0-cp312-cp312-win32.whl", hash = "sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18", size = 172932 }, - { url = "https://files.pythonhosted.org/packages/f8/ed/13bd4418627013bec4ed6e54283b1959cf6db888048c7cf4b4c3b5b36002/cffi-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5", size = 183557 }, - { url = "https://files.pythonhosted.org/packages/95/31/9f7f93ad2f8eff1dbc1c3656d7ca5bfd8fb52c9d786b4dcf19b2d02217fa/cffi-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6", size = 177762 }, - { url = "https://files.pythonhosted.org/packages/4b/8d/a0a47a0c9e413a658623d014e91e74a50cdd2c423f7ccfd44086ef767f90/cffi-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:00bdf7acc5f795150faa6957054fbbca2439db2f775ce831222b66f192f03beb", size = 185230 }, - { url = "https://files.pythonhosted.org/packages/4a/d2/a6c0296814556c68ee32009d9c2ad4f85f2707cdecfd7727951ec228005d/cffi-2.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:45d5e886156860dc35862657e1494b9bae8dfa63bf56796f2fb56e1679fc0bca", size = 181043 }, - { url = "https://files.pythonhosted.org/packages/b0/1e/d22cc63332bd59b06481ceaac49d6c507598642e2230f201649058a7e704/cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b", size = 212446 }, - { url = "https://files.pythonhosted.org/packages/a9/f5/a2c23eb03b61a0b8747f211eb716446c826ad66818ddc7810cc2cc19b3f2/cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b", size = 220101 }, - { url = "https://files.pythonhosted.org/packages/f2/7f/e6647792fc5850d634695bc0e6ab4111ae88e89981d35ac269956605feba/cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2", size = 207948 }, - { url = "https://files.pythonhosted.org/packages/cb/1e/a5a1bd6f1fb30f22573f76533de12a00bf274abcdc55c8edab639078abb6/cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3", size = 206422 }, - { url = "https://files.pythonhosted.org/packages/98/df/0a1755e750013a2081e863e7cd37e0cdd02664372c754e5560099eb7aa44/cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26", size = 219499 }, - { url = "https://files.pythonhosted.org/packages/50/e1/a969e687fcf9ea58e6e2a928ad5e2dd88cc12f6f0ab477e9971f2309b57c/cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c", size = 222928 }, - { url = "https://files.pythonhosted.org/packages/36/54/0362578dd2c9e557a28ac77698ed67323ed5b9775ca9d3fe73fe191bb5d8/cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b", size = 221302 }, - { url = "https://files.pythonhosted.org/packages/eb/6d/bf9bda840d5f1dfdbf0feca87fbdb64a918a69bca42cfa0ba7b137c48cb8/cffi-2.0.0-cp313-cp313-win32.whl", hash = "sha256:74a03b9698e198d47562765773b4a8309919089150a0bb17d829ad7b44b60d27", size = 172909 }, - { url = "https://files.pythonhosted.org/packages/37/18/6519e1ee6f5a1e579e04b9ddb6f1676c17368a7aba48299c3759bbc3c8b3/cffi-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:19f705ada2530c1167abacb171925dd886168931e0a7b78f5bffcae5c6b5be75", size = 183402 }, - { url = "https://files.pythonhosted.org/packages/cb/0e/02ceeec9a7d6ee63bb596121c2c8e9b3a9e150936f4fbef6ca1943e6137c/cffi-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:256f80b80ca3853f90c21b23ee78cd008713787b1b1e93eae9f3d6a7134abd91", size = 177780 }, +sdist = { url = "https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529", size = 523588, upload-time = "2025-09-08T23:24:04.541Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/12/4a/3dfd5f7850cbf0d06dc84ba9aa00db766b52ca38d8b86e3a38314d52498c/cffi-2.0.0-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:b4c854ef3adc177950a8dfc81a86f5115d2abd545751a304c5bcf2c2c7283cfe", size = 184344, upload-time = "2025-09-08T23:22:26.456Z" }, + { url = "https://files.pythonhosted.org/packages/4f/8b/f0e4c441227ba756aafbe78f117485b25bb26b1c059d01f137fa6d14896b/cffi-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2de9a304e27f7596cd03d16f1b7c72219bd944e99cc52b84d0145aefb07cbd3c", size = 180560, upload-time = "2025-09-08T23:22:28.197Z" }, + { url = "https://files.pythonhosted.org/packages/b1/b7/1200d354378ef52ec227395d95c2576330fd22a869f7a70e88e1447eb234/cffi-2.0.0-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:baf5215e0ab74c16e2dd324e8ec067ef59e41125d3eade2b863d294fd5035c92", size = 209613, upload-time = "2025-09-08T23:22:29.475Z" }, + { url = "https://files.pythonhosted.org/packages/b8/56/6033f5e86e8cc9bb629f0077ba71679508bdf54a9a5e112a3c0b91870332/cffi-2.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:730cacb21e1bdff3ce90babf007d0a0917cc3e6492f336c2f0134101e0944f93", size = 216476, upload-time = "2025-09-08T23:22:31.063Z" }, + { url = "https://files.pythonhosted.org/packages/dc/7f/55fecd70f7ece178db2f26128ec41430d8720f2d12ca97bf8f0a628207d5/cffi-2.0.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:6824f87845e3396029f3820c206e459ccc91760e8fa24422f8b0c3d1731cbec5", size = 203374, upload-time = "2025-09-08T23:22:32.507Z" }, + { url = "https://files.pythonhosted.org/packages/84/ef/a7b77c8bdc0f77adc3b46888f1ad54be8f3b7821697a7b89126e829e676a/cffi-2.0.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9de40a7b0323d889cf8d23d1ef214f565ab154443c42737dfe52ff82cf857664", size = 202597, upload-time = "2025-09-08T23:22:34.132Z" }, + { url = "https://files.pythonhosted.org/packages/d7/91/500d892b2bf36529a75b77958edfcd5ad8e2ce4064ce2ecfeab2125d72d1/cffi-2.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8941aaadaf67246224cee8c3803777eed332a19d909b47e29c9842ef1e79ac26", size = 215574, upload-time = "2025-09-08T23:22:35.443Z" }, + { url = "https://files.pythonhosted.org/packages/44/64/58f6255b62b101093d5df22dcb752596066c7e89dd725e0afaed242a61be/cffi-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a05d0c237b3349096d3981b727493e22147f934b20f6f125a3eba8f994bec4a9", size = 218971, upload-time = "2025-09-08T23:22:36.805Z" }, + { url = "https://files.pythonhosted.org/packages/ab/49/fa72cebe2fd8a55fbe14956f9970fe8eb1ac59e5df042f603ef7c8ba0adc/cffi-2.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94698a9c5f91f9d138526b48fe26a199609544591f859c870d477351dc7b2414", size = 211972, upload-time = "2025-09-08T23:22:38.436Z" }, + { url = "https://files.pythonhosted.org/packages/0b/28/dd0967a76aab36731b6ebfe64dec4e981aff7e0608f60c2d46b46982607d/cffi-2.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5fed36fccc0612a53f1d4d9a816b50a36702c28a2aa880cb8a122b3466638743", size = 217078, upload-time = "2025-09-08T23:22:39.776Z" }, + { url = "https://files.pythonhosted.org/packages/2b/c0/015b25184413d7ab0a410775fdb4a50fca20f5589b5dab1dbbfa3baad8ce/cffi-2.0.0-cp311-cp311-win32.whl", hash = "sha256:c649e3a33450ec82378822b3dad03cc228b8f5963c0c12fc3b1e0ab940f768a5", size = 172076, upload-time = "2025-09-08T23:22:40.95Z" }, + { url = "https://files.pythonhosted.org/packages/ae/8f/dc5531155e7070361eb1b7e4c1a9d896d0cb21c49f807a6c03fd63fc877e/cffi-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:66f011380d0e49ed280c789fbd08ff0d40968ee7b665575489afa95c98196ab5", size = 182820, upload-time = "2025-09-08T23:22:42.463Z" }, + { url = "https://files.pythonhosted.org/packages/95/5c/1b493356429f9aecfd56bc171285a4c4ac8697f76e9bbbbb105e537853a1/cffi-2.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:c6638687455baf640e37344fe26d37c404db8b80d037c3d29f58fe8d1c3b194d", size = 177635, upload-time = "2025-09-08T23:22:43.623Z" }, + { url = "https://files.pythonhosted.org/packages/ea/47/4f61023ea636104d4f16ab488e268b93008c3d0bb76893b1b31db1f96802/cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d", size = 185271, upload-time = "2025-09-08T23:22:44.795Z" }, + { url = "https://files.pythonhosted.org/packages/df/a2/781b623f57358e360d62cdd7a8c681f074a71d445418a776eef0aadb4ab4/cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c", size = 181048, upload-time = "2025-09-08T23:22:45.938Z" }, + { url = "https://files.pythonhosted.org/packages/ff/df/a4f0fbd47331ceeba3d37c2e51e9dfc9722498becbeec2bd8bc856c9538a/cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe", size = 212529, upload-time = "2025-09-08T23:22:47.349Z" }, + { url = "https://files.pythonhosted.org/packages/d5/72/12b5f8d3865bf0f87cf1404d8c374e7487dcf097a1c91c436e72e6badd83/cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062", size = 220097, upload-time = "2025-09-08T23:22:48.677Z" }, + { url = "https://files.pythonhosted.org/packages/c2/95/7a135d52a50dfa7c882ab0ac17e8dc11cec9d55d2c18dda414c051c5e69e/cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e", size = 207983, upload-time = "2025-09-08T23:22:50.06Z" }, + { url = "https://files.pythonhosted.org/packages/3a/c8/15cb9ada8895957ea171c62dc78ff3e99159ee7adb13c0123c001a2546c1/cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037", size = 206519, upload-time = "2025-09-08T23:22:51.364Z" }, + { url = "https://files.pythonhosted.org/packages/78/2d/7fa73dfa841b5ac06c7b8855cfc18622132e365f5b81d02230333ff26e9e/cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba", size = 219572, upload-time = "2025-09-08T23:22:52.902Z" }, + { url = "https://files.pythonhosted.org/packages/07/e0/267e57e387b4ca276b90f0434ff88b2c2241ad72b16d31836adddfd6031b/cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94", size = 222963, upload-time = "2025-09-08T23:22:54.518Z" }, + { url = "https://files.pythonhosted.org/packages/b6/75/1f2747525e06f53efbd878f4d03bac5b859cbc11c633d0fb81432d98a795/cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187", size = 221361, upload-time = "2025-09-08T23:22:55.867Z" }, + { url = "https://files.pythonhosted.org/packages/7b/2b/2b6435f76bfeb6bbf055596976da087377ede68df465419d192acf00c437/cffi-2.0.0-cp312-cp312-win32.whl", hash = "sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18", size = 172932, upload-time = "2025-09-08T23:22:57.188Z" }, + { url = "https://files.pythonhosted.org/packages/f8/ed/13bd4418627013bec4ed6e54283b1959cf6db888048c7cf4b4c3b5b36002/cffi-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5", size = 183557, upload-time = "2025-09-08T23:22:58.351Z" }, + { url = "https://files.pythonhosted.org/packages/95/31/9f7f93ad2f8eff1dbc1c3656d7ca5bfd8fb52c9d786b4dcf19b2d02217fa/cffi-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6", size = 177762, upload-time = "2025-09-08T23:22:59.668Z" }, ] [[package]] name = "charset-normalizer" version = "3.4.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/83/2d/5fd176ceb9b2fc619e63405525573493ca23441330fcdaee6bef9460e924/charset_normalizer-3.4.3.tar.gz", hash = "sha256:6fce4b8500244f6fcb71465d4a4930d132ba9ab8e71a7859e6a5d59851068d14", size = 122371 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/7f/b5/991245018615474a60965a7c9cd2b4efbaabd16d582a5547c47ee1c7730b/charset_normalizer-3.4.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b256ee2e749283ef3ddcff51a675ff43798d92d746d1a6e4631bf8c707d22d0b", size = 204483 }, - { url = "https://files.pythonhosted.org/packages/c7/2a/ae245c41c06299ec18262825c1569c5d3298fc920e4ddf56ab011b417efd/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:13faeacfe61784e2559e690fc53fa4c5ae97c6fcedb8eb6fb8d0a15b475d2c64", size = 145520 }, - { url = "https://files.pythonhosted.org/packages/3a/a4/b3b6c76e7a635748c4421d2b92c7b8f90a432f98bda5082049af37ffc8e3/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:00237675befef519d9af72169d8604a067d92755e84fe76492fef5441db05b91", size = 158876 }, - { url = "https://files.pythonhosted.org/packages/e2/e6/63bb0e10f90a8243c5def74b5b105b3bbbfb3e7bb753915fe333fb0c11ea/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:585f3b2a80fbd26b048a0be90c5aae8f06605d3c92615911c3a2b03a8a3b796f", size = 156083 }, - { url = "https://files.pythonhosted.org/packages/87/df/b7737ff046c974b183ea9aa111b74185ac8c3a326c6262d413bd5a1b8c69/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0e78314bdc32fa80696f72fa16dc61168fda4d6a0c014e0380f9d02f0e5d8a07", size = 150295 }, - { url = "https://files.pythonhosted.org/packages/61/f1/190d9977e0084d3f1dc169acd060d479bbbc71b90bf3e7bf7b9927dec3eb/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:96b2b3d1a83ad55310de8c7b4a2d04d9277d5591f40761274856635acc5fcb30", size = 148379 }, - { url = "https://files.pythonhosted.org/packages/4c/92/27dbe365d34c68cfe0ca76f1edd70e8705d82b378cb54ebbaeabc2e3029d/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:939578d9d8fd4299220161fdd76e86c6a251987476f5243e8864a7844476ba14", size = 160018 }, - { url = "https://files.pythonhosted.org/packages/99/04/baae2a1ea1893a01635d475b9261c889a18fd48393634b6270827869fa34/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:fd10de089bcdcd1be95a2f73dbe6254798ec1bda9f450d5828c96f93e2536b9c", size = 157430 }, - { url = "https://files.pythonhosted.org/packages/2f/36/77da9c6a328c54d17b960c89eccacfab8271fdaaa228305330915b88afa9/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1e8ac75d72fa3775e0b7cb7e4629cec13b7514d928d15ef8ea06bca03ef01cae", size = 151600 }, - { url = "https://files.pythonhosted.org/packages/64/d4/9eb4ff2c167edbbf08cdd28e19078bf195762e9bd63371689cab5ecd3d0d/charset_normalizer-3.4.3-cp311-cp311-win32.whl", hash = "sha256:6cf8fd4c04756b6b60146d98cd8a77d0cdae0e1ca20329da2ac85eed779b6849", size = 99616 }, - { url = "https://files.pythonhosted.org/packages/f4/9c/996a4a028222e7761a96634d1820de8a744ff4327a00ada9c8942033089b/charset_normalizer-3.4.3-cp311-cp311-win_amd64.whl", hash = "sha256:31a9a6f775f9bcd865d88ee350f0ffb0e25936a7f930ca98995c05abf1faf21c", size = 107108 }, - { url = "https://files.pythonhosted.org/packages/e9/5e/14c94999e418d9b87682734589404a25854d5f5d0408df68bc15b6ff54bb/charset_normalizer-3.4.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e28e334d3ff134e88989d90ba04b47d84382a828c061d0d1027b1b12a62b39b1", size = 205655 }, - { url = "https://files.pythonhosted.org/packages/7d/a8/c6ec5d389672521f644505a257f50544c074cf5fc292d5390331cd6fc9c3/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0cacf8f7297b0c4fcb74227692ca46b4a5852f8f4f24b3c766dd94a1075c4884", size = 146223 }, - { url = "https://files.pythonhosted.org/packages/fc/eb/a2ffb08547f4e1e5415fb69eb7db25932c52a52bed371429648db4d84fb1/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c6fd51128a41297f5409deab284fecbe5305ebd7e5a1f959bee1c054622b7018", size = 159366 }, - { url = "https://files.pythonhosted.org/packages/82/10/0fd19f20c624b278dddaf83b8464dcddc2456cb4b02bb902a6da126b87a1/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3cfb2aad70f2c6debfbcb717f23b7eb55febc0bb23dcffc0f076009da10c6392", size = 157104 }, - { url = "https://files.pythonhosted.org/packages/16/ab/0233c3231af734f5dfcf0844aa9582d5a1466c985bbed6cedab85af9bfe3/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1606f4a55c0fd363d754049cdf400175ee96c992b1f8018b993941f221221c5f", size = 151830 }, - { url = "https://files.pythonhosted.org/packages/ae/02/e29e22b4e02839a0e4a06557b1999d0a47db3567e82989b5bb21f3fbbd9f/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:027b776c26d38b7f15b26a5da1044f376455fb3766df8fc38563b4efbc515154", size = 148854 }, - { url = "https://files.pythonhosted.org/packages/05/6b/e2539a0a4be302b481e8cafb5af8792da8093b486885a1ae4d15d452bcec/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:42e5088973e56e31e4fa58eb6bd709e42fc03799c11c42929592889a2e54c491", size = 160670 }, - { url = "https://files.pythonhosted.org/packages/31/e7/883ee5676a2ef217a40ce0bffcc3d0dfbf9e64cbcfbdf822c52981c3304b/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cc34f233c9e71701040d772aa7490318673aa7164a0efe3172b2981218c26d93", size = 158501 }, - { url = "https://files.pythonhosted.org/packages/c1/35/6525b21aa0db614cf8b5792d232021dca3df7f90a1944db934efa5d20bb1/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:320e8e66157cc4e247d9ddca8e21f427efc7a04bbd0ac8a9faf56583fa543f9f", size = 153173 }, - { url = "https://files.pythonhosted.org/packages/50/ee/f4704bad8201de513fdc8aac1cabc87e38c5818c93857140e06e772b5892/charset_normalizer-3.4.3-cp312-cp312-win32.whl", hash = "sha256:fb6fecfd65564f208cbf0fba07f107fb661bcd1a7c389edbced3f7a493f70e37", size = 99822 }, - { url = "https://files.pythonhosted.org/packages/39/f5/3b3836ca6064d0992c58c7561c6b6eee1b3892e9665d650c803bd5614522/charset_normalizer-3.4.3-cp312-cp312-win_amd64.whl", hash = "sha256:86df271bf921c2ee3818f0522e9a5b8092ca2ad8b065ece5d7d9d0e9f4849bcc", size = 107543 }, - { url = "https://files.pythonhosted.org/packages/65/ca/2135ac97709b400c7654b4b764daf5c5567c2da45a30cdd20f9eefe2d658/charset_normalizer-3.4.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:14c2a87c65b351109f6abfc424cab3927b3bdece6f706e4d12faaf3d52ee5efe", size = 205326 }, - { url = "https://files.pythonhosted.org/packages/71/11/98a04c3c97dd34e49c7d247083af03645ca3730809a5509443f3c37f7c99/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:41d1fc408ff5fdfb910200ec0e74abc40387bccb3252f3f27c0676731df2b2c8", size = 146008 }, - { url = "https://files.pythonhosted.org/packages/60/f5/4659a4cb3c4ec146bec80c32d8bb16033752574c20b1252ee842a95d1a1e/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:1bb60174149316da1c35fa5233681f7c0f9f514509b8e399ab70fea5f17e45c9", size = 159196 }, - { url = "https://files.pythonhosted.org/packages/86/9e/f552f7a00611f168b9a5865a1414179b2c6de8235a4fa40189f6f79a1753/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:30d006f98569de3459c2fc1f2acde170b7b2bd265dc1943e87e1a4efe1b67c31", size = 156819 }, - { url = "https://files.pythonhosted.org/packages/7e/95/42aa2156235cbc8fa61208aded06ef46111c4d3f0de233107b3f38631803/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:416175faf02e4b0810f1f38bcb54682878a4af94059a1cd63b8747244420801f", size = 151350 }, - { url = "https://files.pythonhosted.org/packages/c2/a9/3865b02c56f300a6f94fc631ef54f0a8a29da74fb45a773dfd3dcd380af7/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6aab0f181c486f973bc7262a97f5aca3ee7e1437011ef0c2ec04b5a11d16c927", size = 148644 }, - { url = "https://files.pythonhosted.org/packages/77/d9/cbcf1a2a5c7d7856f11e7ac2d782aec12bdfea60d104e60e0aa1c97849dc/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:fdabf8315679312cfa71302f9bd509ded4f2f263fb5b765cf1433b39106c3cc9", size = 160468 }, - { url = "https://files.pythonhosted.org/packages/f6/42/6f45efee8697b89fda4d50580f292b8f7f9306cb2971d4b53f8914e4d890/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:bd28b817ea8c70215401f657edef3a8aa83c29d447fb0b622c35403780ba11d5", size = 158187 }, - { url = "https://files.pythonhosted.org/packages/70/99/f1c3bdcfaa9c45b3ce96f70b14f070411366fa19549c1d4832c935d8e2c3/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:18343b2d246dc6761a249ba1fb13f9ee9a2bcd95decc767319506056ea4ad4dc", size = 152699 }, - { url = "https://files.pythonhosted.org/packages/a3/ad/b0081f2f99a4b194bcbb1934ef3b12aa4d9702ced80a37026b7607c72e58/charset_normalizer-3.4.3-cp313-cp313-win32.whl", hash = "sha256:6fb70de56f1859a3f71261cbe41005f56a7842cc348d3aeb26237560bfa5e0ce", size = 99580 }, - { url = "https://files.pythonhosted.org/packages/9a/8f/ae790790c7b64f925e5c953b924aaa42a243fb778fed9e41f147b2a5715a/charset_normalizer-3.4.3-cp313-cp313-win_amd64.whl", hash = "sha256:cf1ebb7d78e1ad8ec2a8c4732c7be2e736f6e5123a4146c5b89c9d1f585f8cef", size = 107366 }, - { url = "https://files.pythonhosted.org/packages/8a/1f/f041989e93b001bc4e44bb1669ccdcf54d3f00e628229a85b08d330615c5/charset_normalizer-3.4.3-py3-none-any.whl", hash = "sha256:ce571ab16d890d23b5c278547ba694193a45011ff86a9162a71307ed9f86759a", size = 53175 }, +sdist = { url = "https://files.pythonhosted.org/packages/83/2d/5fd176ceb9b2fc619e63405525573493ca23441330fcdaee6bef9460e924/charset_normalizer-3.4.3.tar.gz", hash = "sha256:6fce4b8500244f6fcb71465d4a4930d132ba9ab8e71a7859e6a5d59851068d14", size = 122371, upload-time = "2025-08-09T07:57:28.46Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7f/b5/991245018615474a60965a7c9cd2b4efbaabd16d582a5547c47ee1c7730b/charset_normalizer-3.4.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b256ee2e749283ef3ddcff51a675ff43798d92d746d1a6e4631bf8c707d22d0b", size = 204483, upload-time = "2025-08-09T07:55:53.12Z" }, + { url = "https://files.pythonhosted.org/packages/c7/2a/ae245c41c06299ec18262825c1569c5d3298fc920e4ddf56ab011b417efd/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:13faeacfe61784e2559e690fc53fa4c5ae97c6fcedb8eb6fb8d0a15b475d2c64", size = 145520, upload-time = "2025-08-09T07:55:54.712Z" }, + { url = "https://files.pythonhosted.org/packages/3a/a4/b3b6c76e7a635748c4421d2b92c7b8f90a432f98bda5082049af37ffc8e3/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:00237675befef519d9af72169d8604a067d92755e84fe76492fef5441db05b91", size = 158876, upload-time = "2025-08-09T07:55:56.024Z" }, + { url = "https://files.pythonhosted.org/packages/e2/e6/63bb0e10f90a8243c5def74b5b105b3bbbfb3e7bb753915fe333fb0c11ea/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:585f3b2a80fbd26b048a0be90c5aae8f06605d3c92615911c3a2b03a8a3b796f", size = 156083, upload-time = "2025-08-09T07:55:57.582Z" }, + { url = "https://files.pythonhosted.org/packages/87/df/b7737ff046c974b183ea9aa111b74185ac8c3a326c6262d413bd5a1b8c69/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0e78314bdc32fa80696f72fa16dc61168fda4d6a0c014e0380f9d02f0e5d8a07", size = 150295, upload-time = "2025-08-09T07:55:59.147Z" }, + { url = "https://files.pythonhosted.org/packages/61/f1/190d9977e0084d3f1dc169acd060d479bbbc71b90bf3e7bf7b9927dec3eb/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:96b2b3d1a83ad55310de8c7b4a2d04d9277d5591f40761274856635acc5fcb30", size = 148379, upload-time = "2025-08-09T07:56:00.364Z" }, + { url = "https://files.pythonhosted.org/packages/4c/92/27dbe365d34c68cfe0ca76f1edd70e8705d82b378cb54ebbaeabc2e3029d/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:939578d9d8fd4299220161fdd76e86c6a251987476f5243e8864a7844476ba14", size = 160018, upload-time = "2025-08-09T07:56:01.678Z" }, + { url = "https://files.pythonhosted.org/packages/99/04/baae2a1ea1893a01635d475b9261c889a18fd48393634b6270827869fa34/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:fd10de089bcdcd1be95a2f73dbe6254798ec1bda9f450d5828c96f93e2536b9c", size = 157430, upload-time = "2025-08-09T07:56:02.87Z" }, + { url = "https://files.pythonhosted.org/packages/2f/36/77da9c6a328c54d17b960c89eccacfab8271fdaaa228305330915b88afa9/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1e8ac75d72fa3775e0b7cb7e4629cec13b7514d928d15ef8ea06bca03ef01cae", size = 151600, upload-time = "2025-08-09T07:56:04.089Z" }, + { url = "https://files.pythonhosted.org/packages/64/d4/9eb4ff2c167edbbf08cdd28e19078bf195762e9bd63371689cab5ecd3d0d/charset_normalizer-3.4.3-cp311-cp311-win32.whl", hash = "sha256:6cf8fd4c04756b6b60146d98cd8a77d0cdae0e1ca20329da2ac85eed779b6849", size = 99616, upload-time = "2025-08-09T07:56:05.658Z" }, + { url = "https://files.pythonhosted.org/packages/f4/9c/996a4a028222e7761a96634d1820de8a744ff4327a00ada9c8942033089b/charset_normalizer-3.4.3-cp311-cp311-win_amd64.whl", hash = "sha256:31a9a6f775f9bcd865d88ee350f0ffb0e25936a7f930ca98995c05abf1faf21c", size = 107108, upload-time = "2025-08-09T07:56:07.176Z" }, + { url = "https://files.pythonhosted.org/packages/e9/5e/14c94999e418d9b87682734589404a25854d5f5d0408df68bc15b6ff54bb/charset_normalizer-3.4.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e28e334d3ff134e88989d90ba04b47d84382a828c061d0d1027b1b12a62b39b1", size = 205655, upload-time = "2025-08-09T07:56:08.475Z" }, + { url = "https://files.pythonhosted.org/packages/7d/a8/c6ec5d389672521f644505a257f50544c074cf5fc292d5390331cd6fc9c3/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0cacf8f7297b0c4fcb74227692ca46b4a5852f8f4f24b3c766dd94a1075c4884", size = 146223, upload-time = "2025-08-09T07:56:09.708Z" }, + { url = "https://files.pythonhosted.org/packages/fc/eb/a2ffb08547f4e1e5415fb69eb7db25932c52a52bed371429648db4d84fb1/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c6fd51128a41297f5409deab284fecbe5305ebd7e5a1f959bee1c054622b7018", size = 159366, upload-time = "2025-08-09T07:56:11.326Z" }, + { url = "https://files.pythonhosted.org/packages/82/10/0fd19f20c624b278dddaf83b8464dcddc2456cb4b02bb902a6da126b87a1/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3cfb2aad70f2c6debfbcb717f23b7eb55febc0bb23dcffc0f076009da10c6392", size = 157104, upload-time = "2025-08-09T07:56:13.014Z" }, + { url = "https://files.pythonhosted.org/packages/16/ab/0233c3231af734f5dfcf0844aa9582d5a1466c985bbed6cedab85af9bfe3/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1606f4a55c0fd363d754049cdf400175ee96c992b1f8018b993941f221221c5f", size = 151830, upload-time = "2025-08-09T07:56:14.428Z" }, + { url = "https://files.pythonhosted.org/packages/ae/02/e29e22b4e02839a0e4a06557b1999d0a47db3567e82989b5bb21f3fbbd9f/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:027b776c26d38b7f15b26a5da1044f376455fb3766df8fc38563b4efbc515154", size = 148854, upload-time = "2025-08-09T07:56:16.051Z" }, + { url = "https://files.pythonhosted.org/packages/05/6b/e2539a0a4be302b481e8cafb5af8792da8093b486885a1ae4d15d452bcec/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:42e5088973e56e31e4fa58eb6bd709e42fc03799c11c42929592889a2e54c491", size = 160670, upload-time = "2025-08-09T07:56:17.314Z" }, + { url = "https://files.pythonhosted.org/packages/31/e7/883ee5676a2ef217a40ce0bffcc3d0dfbf9e64cbcfbdf822c52981c3304b/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cc34f233c9e71701040d772aa7490318673aa7164a0efe3172b2981218c26d93", size = 158501, upload-time = "2025-08-09T07:56:18.641Z" }, + { url = "https://files.pythonhosted.org/packages/c1/35/6525b21aa0db614cf8b5792d232021dca3df7f90a1944db934efa5d20bb1/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:320e8e66157cc4e247d9ddca8e21f427efc7a04bbd0ac8a9faf56583fa543f9f", size = 153173, upload-time = "2025-08-09T07:56:20.289Z" }, + { url = "https://files.pythonhosted.org/packages/50/ee/f4704bad8201de513fdc8aac1cabc87e38c5818c93857140e06e772b5892/charset_normalizer-3.4.3-cp312-cp312-win32.whl", hash = "sha256:fb6fecfd65564f208cbf0fba07f107fb661bcd1a7c389edbced3f7a493f70e37", size = 99822, upload-time = "2025-08-09T07:56:21.551Z" }, + { url = "https://files.pythonhosted.org/packages/39/f5/3b3836ca6064d0992c58c7561c6b6eee1b3892e9665d650c803bd5614522/charset_normalizer-3.4.3-cp312-cp312-win_amd64.whl", hash = "sha256:86df271bf921c2ee3818f0522e9a5b8092ca2ad8b065ece5d7d9d0e9f4849bcc", size = 107543, upload-time = "2025-08-09T07:56:23.115Z" }, + { url = "https://files.pythonhosted.org/packages/8a/1f/f041989e93b001bc4e44bb1669ccdcf54d3f00e628229a85b08d330615c5/charset_normalizer-3.4.3-py3-none-any.whl", hash = "sha256:ce571ab16d890d23b5c278547ba694193a45011ff86a9162a71307ed9f86759a", size = 53175, upload-time = "2025-08-09T07:57:26.864Z" }, ] [[package]] @@ -510,27 +461,27 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/46/61/de6cd827efad202d7057d93e0fed9294b96952e188f7384832791c7b2254/click-8.3.0.tar.gz", hash = "sha256:e7b8232224eba16f4ebe410c25ced9f7875cb5f3263ffc93cc3e8da705e229c4", size = 276943 } +sdist = { url = "https://files.pythonhosted.org/packages/46/61/de6cd827efad202d7057d93e0fed9294b96952e188f7384832791c7b2254/click-8.3.0.tar.gz", hash = "sha256:e7b8232224eba16f4ebe410c25ced9f7875cb5f3263ffc93cc3e8da705e229c4", size = 276943, upload-time = "2025-09-18T17:32:23.696Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/db/d3/9dcc0f5797f070ec8edf30fbadfb200e71d9db6b84d211e3b2085a7589a0/click-8.3.0-py3-none-any.whl", hash = "sha256:9b9f285302c6e3064f4330c05f05b81945b2a39544279343e6e7c5f27a9baddc", size = 107295 }, + { url = "https://files.pythonhosted.org/packages/db/d3/9dcc0f5797f070ec8edf30fbadfb200e71d9db6b84d211e3b2085a7589a0/click-8.3.0-py3-none-any.whl", hash = "sha256:9b9f285302c6e3064f4330c05f05b81945b2a39544279343e6e7c5f27a9baddc", size = 107295, upload-time = "2025-09-18T17:32:22.42Z" }, ] [[package]] name = "cloudpathlib" version = "0.23.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f4/18/2ac35d6b3015a0c74e923d94fc69baf8307f7c3233de015d69f99e17afa8/cloudpathlib-0.23.0.tar.gz", hash = "sha256:eb38a34c6b8a048ecfd2b2f60917f7cbad4a105b7c979196450c2f541f4d6b4b", size = 53126 } +sdist = { url = "https://files.pythonhosted.org/packages/f4/18/2ac35d6b3015a0c74e923d94fc69baf8307f7c3233de015d69f99e17afa8/cloudpathlib-0.23.0.tar.gz", hash = "sha256:eb38a34c6b8a048ecfd2b2f60917f7cbad4a105b7c979196450c2f541f4d6b4b", size = 53126, upload-time = "2025-10-07T22:47:56.278Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ae/8a/c4bb04426d608be4a3171efa2e233d2c59a5c8937850c10d098e126df18e/cloudpathlib-0.23.0-py3-none-any.whl", hash = "sha256:8520b3b01468fee77de37ab5d50b1b524ea6b4a8731c35d1b7407ac0cd716002", size = 62755 }, + { url = "https://files.pythonhosted.org/packages/ae/8a/c4bb04426d608be4a3171efa2e233d2c59a5c8937850c10d098e126df18e/cloudpathlib-0.23.0-py3-none-any.whl", hash = "sha256:8520b3b01468fee77de37ab5d50b1b524ea6b4a8731c35d1b7407ac0cd716002", size = 62755, upload-time = "2025-10-07T22:47:54.905Z" }, ] [[package]] name = "colorama" version = "0.4.6" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697 } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 }, + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, ] [[package]] @@ -540,18 +491,18 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "humanfriendly" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/cc/c7/eed8f27100517e8c0e6b923d5f0845d0cb99763da6fdee00478f91db7325/coloredlogs-15.0.1.tar.gz", hash = "sha256:7c991aa71a4577af2f82600d8f8f3a89f936baeaf9b50a9c197da014e5bf16b0", size = 278520 } +sdist = { url = "https://files.pythonhosted.org/packages/cc/c7/eed8f27100517e8c0e6b923d5f0845d0cb99763da6fdee00478f91db7325/coloredlogs-15.0.1.tar.gz", hash = "sha256:7c991aa71a4577af2f82600d8f8f3a89f936baeaf9b50a9c197da014e5bf16b0", size = 278520, upload-time = "2021-06-11T10:22:45.202Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a7/06/3d6badcf13db419e25b07041d9c7b4a2c331d3f4e7134445ec5df57714cd/coloredlogs-15.0.1-py2.py3-none-any.whl", hash = "sha256:612ee75c546f53e92e70049c9dbfcc18c935a2b9a53b66085ce9ef6a6e5c0934", size = 46018 }, + { url = "https://files.pythonhosted.org/packages/a7/06/3d6badcf13db419e25b07041d9c7b4a2c331d3f4e7134445ec5df57714cd/coloredlogs-15.0.1-py2.py3-none-any.whl", hash = "sha256:612ee75c546f53e92e70049c9dbfcc18c935a2b9a53b66085ce9ef6a6e5c0934", size = 46018, upload-time = "2021-06-11T10:22:42.561Z" }, ] [[package]] name = "comm" version = "0.2.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/4c/13/7d740c5849255756bc17888787313b61fd38a0a8304fc4f073dfc46122aa/comm-0.2.3.tar.gz", hash = "sha256:2dc8048c10962d55d7ad693be1e7045d891b7ce8d999c97963a5e3e99c055971", size = 6319 } +sdist = { url = "https://files.pythonhosted.org/packages/4c/13/7d740c5849255756bc17888787313b61fd38a0a8304fc4f073dfc46122aa/comm-0.2.3.tar.gz", hash = "sha256:2dc8048c10962d55d7ad693be1e7045d891b7ce8d999c97963a5e3e99c055971", size = 6319, upload-time = "2025-07-25T14:02:04.452Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/60/97/891a0971e1e4a8c5d2b20bbe0e524dc04548d2307fee33cdeba148fd4fc7/comm-0.2.3-py3-none-any.whl", hash = "sha256:c615d91d75f7f04f095b30d1c1711babd43bdc6419c1be9886a85f2f4e489417", size = 7294 }, + { url = "https://files.pythonhosted.org/packages/60/97/891a0971e1e4a8c5d2b20bbe0e524dc04548d2307fee33cdeba148fd4fc7/comm-0.2.3-py3-none-any.whl", hash = "sha256:c615d91d75f7f04f095b30d1c1711babd43bdc6419c1be9886a85f2f4e489417", size = 7294, upload-time = "2025-07-25T14:02:02.896Z" }, ] [[package]] @@ -562,70 +513,44 @@ dependencies = [ { name = "pydantic" }, { name = "srsly" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/51/d3/57c6631159a1b48d273b40865c315cf51f89df7a9d1101094ef12e3a37c2/confection-0.1.5.tar.gz", hash = "sha256:8e72dd3ca6bd4f48913cd220f10b8275978e740411654b6e8ca6d7008c590f0e", size = 38924 } +sdist = { url = "https://files.pythonhosted.org/packages/51/d3/57c6631159a1b48d273b40865c315cf51f89df7a9d1101094ef12e3a37c2/confection-0.1.5.tar.gz", hash = "sha256:8e72dd3ca6bd4f48913cd220f10b8275978e740411654b6e8ca6d7008c590f0e", size = 38924, upload-time = "2024-05-31T16:17:01.559Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0c/00/3106b1854b45bd0474ced037dfe6b73b90fe68a68968cef47c23de3d43d2/confection-0.1.5-py3-none-any.whl", hash = "sha256:e29d3c3f8eac06b3f77eb9dfb4bf2fc6bcc9622a98ca00a698e3d019c6430b14", size = 35451 }, + { url = "https://files.pythonhosted.org/packages/0c/00/3106b1854b45bd0474ced037dfe6b73b90fe68a68968cef47c23de3d43d2/confection-0.1.5-py3-none-any.whl", hash = "sha256:e29d3c3f8eac06b3f77eb9dfb4bf2fc6bcc9622a98ca00a698e3d019c6430b14", size = 35451, upload-time = "2024-05-31T16:16:59.075Z" }, ] [[package]] name = "coverage" version = "7.10.7" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/51/26/d22c300112504f5f9a9fd2297ce33c35f3d353e4aeb987c8419453b2a7c2/coverage-7.10.7.tar.gz", hash = "sha256:f4ab143ab113be368a3e9b795f9cd7906c5ef407d6173fe9675a902e1fffc239", size = 827704 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d2/5d/c1a17867b0456f2e9ce2d8d4708a4c3a089947d0bec9c66cdf60c9e7739f/coverage-7.10.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a609f9c93113be646f44c2a0256d6ea375ad047005d7f57a5c15f614dc1b2f59", size = 218102 }, - { url = "https://files.pythonhosted.org/packages/54/f0/514dcf4b4e3698b9a9077f084429681bf3aad2b4a72578f89d7f643eb506/coverage-7.10.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:65646bb0359386e07639c367a22cf9b5bf6304e8630b565d0626e2bdf329227a", size = 218505 }, - { url = "https://files.pythonhosted.org/packages/20/f6/9626b81d17e2a4b25c63ac1b425ff307ecdeef03d67c9a147673ae40dc36/coverage-7.10.7-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5f33166f0dfcce728191f520bd2692914ec70fac2713f6bf3ce59c3deacb4699", size = 248898 }, - { url = "https://files.pythonhosted.org/packages/b0/ef/bd8e719c2f7417ba03239052e099b76ea1130ac0cbb183ee1fcaa58aaff3/coverage-7.10.7-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:35f5e3f9e455bb17831876048355dca0f758b6df22f49258cb5a91da23ef437d", size = 250831 }, - { url = "https://files.pythonhosted.org/packages/a5/b6/bf054de41ec948b151ae2b79a55c107f5760979538f5fb80c195f2517718/coverage-7.10.7-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4da86b6d62a496e908ac2898243920c7992499c1712ff7c2b6d837cc69d9467e", size = 252937 }, - { url = "https://files.pythonhosted.org/packages/0f/e5/3860756aa6f9318227443c6ce4ed7bf9e70bb7f1447a0353f45ac5c7974b/coverage-7.10.7-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:6b8b09c1fad947c84bbbc95eca841350fad9cbfa5a2d7ca88ac9f8d836c92e23", size = 249021 }, - { url = "https://files.pythonhosted.org/packages/26/0f/bd08bd042854f7fd07b45808927ebcce99a7ed0f2f412d11629883517ac2/coverage-7.10.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4376538f36b533b46f8971d3a3e63464f2c7905c9800db97361c43a2b14792ab", size = 250626 }, - { url = "https://files.pythonhosted.org/packages/8e/a7/4777b14de4abcc2e80c6b1d430f5d51eb18ed1d75fca56cbce5f2db9b36e/coverage-7.10.7-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:121da30abb574f6ce6ae09840dae322bef734480ceafe410117627aa54f76d82", size = 248682 }, - { url = "https://files.pythonhosted.org/packages/34/72/17d082b00b53cd45679bad682fac058b87f011fd8b9fe31d77f5f8d3a4e4/coverage-7.10.7-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:88127d40df529336a9836870436fc2751c339fbaed3a836d42c93f3e4bd1d0a2", size = 248402 }, - { url = "https://files.pythonhosted.org/packages/81/7a/92367572eb5bdd6a84bfa278cc7e97db192f9f45b28c94a9ca1a921c3577/coverage-7.10.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ba58bbcd1b72f136080c0bccc2400d66cc6115f3f906c499013d065ac33a4b61", size = 249320 }, - { url = "https://files.pythonhosted.org/packages/2f/88/a23cc185f6a805dfc4fdf14a94016835eeb85e22ac3a0e66d5e89acd6462/coverage-7.10.7-cp311-cp311-win32.whl", hash = "sha256:972b9e3a4094b053a4e46832b4bc829fc8a8d347160eb39d03f1690316a99c14", size = 220536 }, - { url = "https://files.pythonhosted.org/packages/fe/ef/0b510a399dfca17cec7bc2f05ad8bd78cf55f15c8bc9a73ab20c5c913c2e/coverage-7.10.7-cp311-cp311-win_amd64.whl", hash = "sha256:a7b55a944a7f43892e28ad4bc0561dfd5f0d73e605d1aa5c3c976b52aea121d2", size = 221425 }, - { url = "https://files.pythonhosted.org/packages/51/7f/023657f301a276e4ba1850f82749bc136f5a7e8768060c2e5d9744a22951/coverage-7.10.7-cp311-cp311-win_arm64.whl", hash = "sha256:736f227fb490f03c6488f9b6d45855f8e0fd749c007f9303ad30efab0e73c05a", size = 220103 }, - { url = "https://files.pythonhosted.org/packages/13/e4/eb12450f71b542a53972d19117ea5a5cea1cab3ac9e31b0b5d498df1bd5a/coverage-7.10.7-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7bb3b9ddb87ef7725056572368040c32775036472d5a033679d1fa6c8dc08417", size = 218290 }, - { url = "https://files.pythonhosted.org/packages/37/66/593f9be12fc19fb36711f19a5371af79a718537204d16ea1d36f16bd78d2/coverage-7.10.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:18afb24843cbc175687225cab1138c95d262337f5473512010e46831aa0c2973", size = 218515 }, - { url = "https://files.pythonhosted.org/packages/66/80/4c49f7ae09cafdacc73fbc30949ffe77359635c168f4e9ff33c9ebb07838/coverage-7.10.7-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:399a0b6347bcd3822be369392932884b8216d0944049ae22925631a9b3d4ba4c", size = 250020 }, - { url = "https://files.pythonhosted.org/packages/a6/90/a64aaacab3b37a17aaedd83e8000142561a29eb262cede42d94a67f7556b/coverage-7.10.7-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:314f2c326ded3f4b09be11bc282eb2fc861184bc95748ae67b360ac962770be7", size = 252769 }, - { url = "https://files.pythonhosted.org/packages/98/2e/2dda59afd6103b342e096f246ebc5f87a3363b5412609946c120f4e7750d/coverage-7.10.7-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c41e71c9cfb854789dee6fc51e46743a6d138b1803fab6cb860af43265b42ea6", size = 253901 }, - { url = "https://files.pythonhosted.org/packages/53/dc/8d8119c9051d50f3119bb4a75f29f1e4a6ab9415cd1fa8bf22fcc3fb3b5f/coverage-7.10.7-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc01f57ca26269c2c706e838f6422e2a8788e41b3e3c65e2f41148212e57cd59", size = 250413 }, - { url = "https://files.pythonhosted.org/packages/98/b3/edaff9c5d79ee4d4b6d3fe046f2b1d799850425695b789d491a64225d493/coverage-7.10.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a6442c59a8ac8b85812ce33bc4d05bde3fb22321fa8294e2a5b487c3505f611b", size = 251820 }, - { url = "https://files.pythonhosted.org/packages/11/25/9a0728564bb05863f7e513e5a594fe5ffef091b325437f5430e8cfb0d530/coverage-7.10.7-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:78a384e49f46b80fb4c901d52d92abe098e78768ed829c673fbb53c498bef73a", size = 249941 }, - { url = "https://files.pythonhosted.org/packages/e0/fd/ca2650443bfbef5b0e74373aac4df67b08180d2f184b482c41499668e258/coverage-7.10.7-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:5e1e9802121405ede4b0133aa4340ad8186a1d2526de5b7c3eca519db7bb89fb", size = 249519 }, - { url = "https://files.pythonhosted.org/packages/24/79/f692f125fb4299b6f963b0745124998ebb8e73ecdfce4ceceb06a8c6bec5/coverage-7.10.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d41213ea25a86f69efd1575073d34ea11aabe075604ddf3d148ecfec9e1e96a1", size = 251375 }, - { url = "https://files.pythonhosted.org/packages/5e/75/61b9bbd6c7d24d896bfeec57acba78e0f8deac68e6baf2d4804f7aae1f88/coverage-7.10.7-cp312-cp312-win32.whl", hash = "sha256:77eb4c747061a6af8d0f7bdb31f1e108d172762ef579166ec84542f711d90256", size = 220699 }, - { url = "https://files.pythonhosted.org/packages/ca/f3/3bf7905288b45b075918d372498f1cf845b5b579b723c8fd17168018d5f5/coverage-7.10.7-cp312-cp312-win_amd64.whl", hash = "sha256:f51328ffe987aecf6d09f3cd9d979face89a617eacdaea43e7b3080777f647ba", size = 221512 }, - { url = "https://files.pythonhosted.org/packages/5c/44/3e32dbe933979d05cf2dac5e697c8599cfe038aaf51223ab901e208d5a62/coverage-7.10.7-cp312-cp312-win_arm64.whl", hash = "sha256:bda5e34f8a75721c96085903c6f2197dc398c20ffd98df33f866a9c8fd95f4bf", size = 220147 }, - { url = "https://files.pythonhosted.org/packages/9a/94/b765c1abcb613d103b64fcf10395f54d69b0ef8be6a0dd9c524384892cc7/coverage-7.10.7-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:981a651f543f2854abd3b5fcb3263aac581b18209be49863ba575de6edf4c14d", size = 218320 }, - { url = "https://files.pythonhosted.org/packages/72/4f/732fff31c119bb73b35236dd333030f32c4bfe909f445b423e6c7594f9a2/coverage-7.10.7-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:73ab1601f84dc804f7812dc297e93cd99381162da39c47040a827d4e8dafe63b", size = 218575 }, - { url = "https://files.pythonhosted.org/packages/87/02/ae7e0af4b674be47566707777db1aa375474f02a1d64b9323e5813a6cdd5/coverage-7.10.7-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:a8b6f03672aa6734e700bbcd65ff050fd19cddfec4b031cc8cf1c6967de5a68e", size = 249568 }, - { url = "https://files.pythonhosted.org/packages/a2/77/8c6d22bf61921a59bce5471c2f1f7ac30cd4ac50aadde72b8c48d5727902/coverage-7.10.7-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:10b6ba00ab1132a0ce4428ff68cf50a25efd6840a42cdf4239c9b99aad83be8b", size = 252174 }, - { url = "https://files.pythonhosted.org/packages/b1/20/b6ea4f69bbb52dac0aebd62157ba6a9dddbfe664f5af8122dac296c3ee15/coverage-7.10.7-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c79124f70465a150e89340de5963f936ee97097d2ef76c869708c4248c63ca49", size = 253447 }, - { url = "https://files.pythonhosted.org/packages/f9/28/4831523ba483a7f90f7b259d2018fef02cb4d5b90bc7c1505d6e5a84883c/coverage-7.10.7-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:69212fbccdbd5b0e39eac4067e20a4a5256609e209547d86f740d68ad4f04911", size = 249779 }, - { url = "https://files.pythonhosted.org/packages/a7/9f/4331142bc98c10ca6436d2d620c3e165f31e6c58d43479985afce6f3191c/coverage-7.10.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7ea7c6c9d0d286d04ed3541747e6597cbe4971f22648b68248f7ddcd329207f0", size = 251604 }, - { url = "https://files.pythonhosted.org/packages/ce/60/bda83b96602036b77ecf34e6393a3836365481b69f7ed7079ab85048202b/coverage-7.10.7-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b9be91986841a75042b3e3243d0b3cb0b2434252b977baaf0cd56e960fe1e46f", size = 249497 }, - { url = "https://files.pythonhosted.org/packages/5f/af/152633ff35b2af63977edd835d8e6430f0caef27d171edf2fc76c270ef31/coverage-7.10.7-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:b281d5eca50189325cfe1f365fafade89b14b4a78d9b40b05ddd1fc7d2a10a9c", size = 249350 }, - { url = "https://files.pythonhosted.org/packages/9d/71/d92105d122bd21cebba877228990e1646d862e34a98bb3374d3fece5a794/coverage-7.10.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:99e4aa63097ab1118e75a848a28e40d68b08a5e19ce587891ab7fd04475e780f", size = 251111 }, - { url = "https://files.pythonhosted.org/packages/a2/9e/9fdb08f4bf476c912f0c3ca292e019aab6712c93c9344a1653986c3fd305/coverage-7.10.7-cp313-cp313-win32.whl", hash = "sha256:dc7c389dce432500273eaf48f410b37886be9208b2dd5710aaf7c57fd442c698", size = 220746 }, - { url = "https://files.pythonhosted.org/packages/b1/b1/a75fd25df44eab52d1931e89980d1ada46824c7a3210be0d3c88a44aaa99/coverage-7.10.7-cp313-cp313-win_amd64.whl", hash = "sha256:cac0fdca17b036af3881a9d2729a850b76553f3f716ccb0360ad4dbc06b3b843", size = 221541 }, - { url = "https://files.pythonhosted.org/packages/14/3a/d720d7c989562a6e9a14b2c9f5f2876bdb38e9367126d118495b89c99c37/coverage-7.10.7-cp313-cp313-win_arm64.whl", hash = "sha256:4b6f236edf6e2f9ae8fcd1332da4e791c1b6ba0dc16a2dc94590ceccb482e546", size = 220170 }, - { url = "https://files.pythonhosted.org/packages/bb/22/e04514bf2a735d8b0add31d2b4ab636fc02370730787c576bb995390d2d5/coverage-7.10.7-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a0ec07fd264d0745ee396b666d47cef20875f4ff2375d7c4f58235886cc1ef0c", size = 219029 }, - { url = "https://files.pythonhosted.org/packages/11/0b/91128e099035ece15da3445d9015e4b4153a6059403452d324cbb0a575fa/coverage-7.10.7-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:dd5e856ebb7bfb7672b0086846db5afb4567a7b9714b8a0ebafd211ec7ce6a15", size = 219259 }, - { url = "https://files.pythonhosted.org/packages/8b/51/66420081e72801536a091a0c8f8c1f88a5c4bf7b9b1bdc6222c7afe6dc9b/coverage-7.10.7-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:f57b2a3c8353d3e04acf75b3fed57ba41f5c0646bbf1d10c7c282291c97936b4", size = 260592 }, - { url = "https://files.pythonhosted.org/packages/5d/22/9b8d458c2881b22df3db5bb3e7369e63d527d986decb6c11a591ba2364f7/coverage-7.10.7-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:1ef2319dd15a0b009667301a3f84452a4dc6fddfd06b0c5c53ea472d3989fbf0", size = 262768 }, - { url = "https://files.pythonhosted.org/packages/f7/08/16bee2c433e60913c610ea200b276e8eeef084b0d200bdcff69920bd5828/coverage-7.10.7-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:83082a57783239717ceb0ad584de3c69cf581b2a95ed6bf81ea66034f00401c0", size = 264995 }, - { url = "https://files.pythonhosted.org/packages/20/9d/e53eb9771d154859b084b90201e5221bca7674ba449a17c101a5031d4054/coverage-7.10.7-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:50aa94fb1fb9a397eaa19c0d5ec15a5edd03a47bf1a3a6111a16b36e190cff65", size = 259546 }, - { url = "https://files.pythonhosted.org/packages/ad/b0/69bc7050f8d4e56a89fb550a1577d5d0d1db2278106f6f626464067b3817/coverage-7.10.7-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:2120043f147bebb41c85b97ac45dd173595ff14f2a584f2963891cbcc3091541", size = 262544 }, - { url = "https://files.pythonhosted.org/packages/ef/4b/2514b060dbd1bc0aaf23b852c14bb5818f244c664cb16517feff6bb3a5ab/coverage-7.10.7-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:2fafd773231dd0378fdba66d339f84904a8e57a262f583530f4f156ab83863e6", size = 260308 }, - { url = "https://files.pythonhosted.org/packages/54/78/7ba2175007c246d75e496f64c06e94122bdb914790a1285d627a918bd271/coverage-7.10.7-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:0b944ee8459f515f28b851728ad224fa2d068f1513ef6b7ff1efafeb2185f999", size = 258920 }, - { url = "https://files.pythonhosted.org/packages/c0/b3/fac9f7abbc841409b9a410309d73bfa6cfb2e51c3fada738cb607ce174f8/coverage-7.10.7-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4b583b97ab2e3efe1b3e75248a9b333bd3f8b0b1b8e5b45578e05e5850dfb2c2", size = 261434 }, - { url = "https://files.pythonhosted.org/packages/ee/51/a03bec00d37faaa891b3ff7387192cef20f01604e5283a5fabc95346befa/coverage-7.10.7-cp313-cp313t-win32.whl", hash = "sha256:2a78cd46550081a7909b3329e2266204d584866e8d97b898cd7fb5ac8d888b1a", size = 221403 }, - { url = "https://files.pythonhosted.org/packages/53/22/3cf25d614e64bf6d8e59c7c669b20d6d940bb337bdee5900b9ca41c820bb/coverage-7.10.7-cp313-cp313t-win_amd64.whl", hash = "sha256:33a5e6396ab684cb43dc7befa386258acb2d7fae7f67330ebb85ba4ea27938eb", size = 222469 }, - { url = "https://files.pythonhosted.org/packages/49/a1/00164f6d30d8a01c3c9c48418a7a5be394de5349b421b9ee019f380df2a0/coverage-7.10.7-cp313-cp313t-win_arm64.whl", hash = "sha256:86b0e7308289ddde73d863b7683f596d8d21c7d8664ce1dee061d0bcf3fbb4bb", size = 220731 }, - { url = "https://files.pythonhosted.org/packages/ec/16/114df1c291c22cac3b0c127a73e0af5c12ed7bbb6558d310429a0ae24023/coverage-7.10.7-py3-none-any.whl", hash = "sha256:f7941f6f2fe6dd6807a1208737b8a0cbcf1cc6d7b07d24998ad2d63590868260", size = 209952 }, +sdist = { url = "https://files.pythonhosted.org/packages/51/26/d22c300112504f5f9a9fd2297ce33c35f3d353e4aeb987c8419453b2a7c2/coverage-7.10.7.tar.gz", hash = "sha256:f4ab143ab113be368a3e9b795f9cd7906c5ef407d6173fe9675a902e1fffc239", size = 827704, upload-time = "2025-09-21T20:03:56.815Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d2/5d/c1a17867b0456f2e9ce2d8d4708a4c3a089947d0bec9c66cdf60c9e7739f/coverage-7.10.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a609f9c93113be646f44c2a0256d6ea375ad047005d7f57a5c15f614dc1b2f59", size = 218102, upload-time = "2025-09-21T20:01:16.089Z" }, + { url = "https://files.pythonhosted.org/packages/54/f0/514dcf4b4e3698b9a9077f084429681bf3aad2b4a72578f89d7f643eb506/coverage-7.10.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:65646bb0359386e07639c367a22cf9b5bf6304e8630b565d0626e2bdf329227a", size = 218505, upload-time = "2025-09-21T20:01:17.788Z" }, + { url = "https://files.pythonhosted.org/packages/20/f6/9626b81d17e2a4b25c63ac1b425ff307ecdeef03d67c9a147673ae40dc36/coverage-7.10.7-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5f33166f0dfcce728191f520bd2692914ec70fac2713f6bf3ce59c3deacb4699", size = 248898, upload-time = "2025-09-21T20:01:19.488Z" }, + { url = "https://files.pythonhosted.org/packages/b0/ef/bd8e719c2f7417ba03239052e099b76ea1130ac0cbb183ee1fcaa58aaff3/coverage-7.10.7-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:35f5e3f9e455bb17831876048355dca0f758b6df22f49258cb5a91da23ef437d", size = 250831, upload-time = "2025-09-21T20:01:20.817Z" }, + { url = "https://files.pythonhosted.org/packages/a5/b6/bf054de41ec948b151ae2b79a55c107f5760979538f5fb80c195f2517718/coverage-7.10.7-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4da86b6d62a496e908ac2898243920c7992499c1712ff7c2b6d837cc69d9467e", size = 252937, upload-time = "2025-09-21T20:01:22.171Z" }, + { url = "https://files.pythonhosted.org/packages/0f/e5/3860756aa6f9318227443c6ce4ed7bf9e70bb7f1447a0353f45ac5c7974b/coverage-7.10.7-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:6b8b09c1fad947c84bbbc95eca841350fad9cbfa5a2d7ca88ac9f8d836c92e23", size = 249021, upload-time = "2025-09-21T20:01:23.907Z" }, + { url = "https://files.pythonhosted.org/packages/26/0f/bd08bd042854f7fd07b45808927ebcce99a7ed0f2f412d11629883517ac2/coverage-7.10.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4376538f36b533b46f8971d3a3e63464f2c7905c9800db97361c43a2b14792ab", size = 250626, upload-time = "2025-09-21T20:01:25.721Z" }, + { url = "https://files.pythonhosted.org/packages/8e/a7/4777b14de4abcc2e80c6b1d430f5d51eb18ed1d75fca56cbce5f2db9b36e/coverage-7.10.7-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:121da30abb574f6ce6ae09840dae322bef734480ceafe410117627aa54f76d82", size = 248682, upload-time = "2025-09-21T20:01:27.105Z" }, + { url = "https://files.pythonhosted.org/packages/34/72/17d082b00b53cd45679bad682fac058b87f011fd8b9fe31d77f5f8d3a4e4/coverage-7.10.7-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:88127d40df529336a9836870436fc2751c339fbaed3a836d42c93f3e4bd1d0a2", size = 248402, upload-time = "2025-09-21T20:01:28.629Z" }, + { url = "https://files.pythonhosted.org/packages/81/7a/92367572eb5bdd6a84bfa278cc7e97db192f9f45b28c94a9ca1a921c3577/coverage-7.10.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ba58bbcd1b72f136080c0bccc2400d66cc6115f3f906c499013d065ac33a4b61", size = 249320, upload-time = "2025-09-21T20:01:30.004Z" }, + { url = "https://files.pythonhosted.org/packages/2f/88/a23cc185f6a805dfc4fdf14a94016835eeb85e22ac3a0e66d5e89acd6462/coverage-7.10.7-cp311-cp311-win32.whl", hash = "sha256:972b9e3a4094b053a4e46832b4bc829fc8a8d347160eb39d03f1690316a99c14", size = 220536, upload-time = "2025-09-21T20:01:32.184Z" }, + { url = "https://files.pythonhosted.org/packages/fe/ef/0b510a399dfca17cec7bc2f05ad8bd78cf55f15c8bc9a73ab20c5c913c2e/coverage-7.10.7-cp311-cp311-win_amd64.whl", hash = "sha256:a7b55a944a7f43892e28ad4bc0561dfd5f0d73e605d1aa5c3c976b52aea121d2", size = 221425, upload-time = "2025-09-21T20:01:33.557Z" }, + { url = "https://files.pythonhosted.org/packages/51/7f/023657f301a276e4ba1850f82749bc136f5a7e8768060c2e5d9744a22951/coverage-7.10.7-cp311-cp311-win_arm64.whl", hash = "sha256:736f227fb490f03c6488f9b6d45855f8e0fd749c007f9303ad30efab0e73c05a", size = 220103, upload-time = "2025-09-21T20:01:34.929Z" }, + { url = "https://files.pythonhosted.org/packages/13/e4/eb12450f71b542a53972d19117ea5a5cea1cab3ac9e31b0b5d498df1bd5a/coverage-7.10.7-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7bb3b9ddb87ef7725056572368040c32775036472d5a033679d1fa6c8dc08417", size = 218290, upload-time = "2025-09-21T20:01:36.455Z" }, + { url = "https://files.pythonhosted.org/packages/37/66/593f9be12fc19fb36711f19a5371af79a718537204d16ea1d36f16bd78d2/coverage-7.10.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:18afb24843cbc175687225cab1138c95d262337f5473512010e46831aa0c2973", size = 218515, upload-time = "2025-09-21T20:01:37.982Z" }, + { url = "https://files.pythonhosted.org/packages/66/80/4c49f7ae09cafdacc73fbc30949ffe77359635c168f4e9ff33c9ebb07838/coverage-7.10.7-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:399a0b6347bcd3822be369392932884b8216d0944049ae22925631a9b3d4ba4c", size = 250020, upload-time = "2025-09-21T20:01:39.617Z" }, + { url = "https://files.pythonhosted.org/packages/a6/90/a64aaacab3b37a17aaedd83e8000142561a29eb262cede42d94a67f7556b/coverage-7.10.7-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:314f2c326ded3f4b09be11bc282eb2fc861184bc95748ae67b360ac962770be7", size = 252769, upload-time = "2025-09-21T20:01:41.341Z" }, + { url = "https://files.pythonhosted.org/packages/98/2e/2dda59afd6103b342e096f246ebc5f87a3363b5412609946c120f4e7750d/coverage-7.10.7-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c41e71c9cfb854789dee6fc51e46743a6d138b1803fab6cb860af43265b42ea6", size = 253901, upload-time = "2025-09-21T20:01:43.042Z" }, + { url = "https://files.pythonhosted.org/packages/53/dc/8d8119c9051d50f3119bb4a75f29f1e4a6ab9415cd1fa8bf22fcc3fb3b5f/coverage-7.10.7-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc01f57ca26269c2c706e838f6422e2a8788e41b3e3c65e2f41148212e57cd59", size = 250413, upload-time = "2025-09-21T20:01:44.469Z" }, + { url = "https://files.pythonhosted.org/packages/98/b3/edaff9c5d79ee4d4b6d3fe046f2b1d799850425695b789d491a64225d493/coverage-7.10.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a6442c59a8ac8b85812ce33bc4d05bde3fb22321fa8294e2a5b487c3505f611b", size = 251820, upload-time = "2025-09-21T20:01:45.915Z" }, + { url = "https://files.pythonhosted.org/packages/11/25/9a0728564bb05863f7e513e5a594fe5ffef091b325437f5430e8cfb0d530/coverage-7.10.7-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:78a384e49f46b80fb4c901d52d92abe098e78768ed829c673fbb53c498bef73a", size = 249941, upload-time = "2025-09-21T20:01:47.296Z" }, + { url = "https://files.pythonhosted.org/packages/e0/fd/ca2650443bfbef5b0e74373aac4df67b08180d2f184b482c41499668e258/coverage-7.10.7-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:5e1e9802121405ede4b0133aa4340ad8186a1d2526de5b7c3eca519db7bb89fb", size = 249519, upload-time = "2025-09-21T20:01:48.73Z" }, + { url = "https://files.pythonhosted.org/packages/24/79/f692f125fb4299b6f963b0745124998ebb8e73ecdfce4ceceb06a8c6bec5/coverage-7.10.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d41213ea25a86f69efd1575073d34ea11aabe075604ddf3d148ecfec9e1e96a1", size = 251375, upload-time = "2025-09-21T20:01:50.529Z" }, + { url = "https://files.pythonhosted.org/packages/5e/75/61b9bbd6c7d24d896bfeec57acba78e0f8deac68e6baf2d4804f7aae1f88/coverage-7.10.7-cp312-cp312-win32.whl", hash = "sha256:77eb4c747061a6af8d0f7bdb31f1e108d172762ef579166ec84542f711d90256", size = 220699, upload-time = "2025-09-21T20:01:51.941Z" }, + { url = "https://files.pythonhosted.org/packages/ca/f3/3bf7905288b45b075918d372498f1cf845b5b579b723c8fd17168018d5f5/coverage-7.10.7-cp312-cp312-win_amd64.whl", hash = "sha256:f51328ffe987aecf6d09f3cd9d979face89a617eacdaea43e7b3080777f647ba", size = 221512, upload-time = "2025-09-21T20:01:53.481Z" }, + { url = "https://files.pythonhosted.org/packages/5c/44/3e32dbe933979d05cf2dac5e697c8599cfe038aaf51223ab901e208d5a62/coverage-7.10.7-cp312-cp312-win_arm64.whl", hash = "sha256:bda5e34f8a75721c96085903c6f2197dc398c20ffd98df33f866a9c8fd95f4bf", size = 220147, upload-time = "2025-09-21T20:01:55.2Z" }, + { url = "https://files.pythonhosted.org/packages/ec/16/114df1c291c22cac3b0c127a73e0af5c12ed7bbb6558d310429a0ae24023/coverage-7.10.7-py3-none-any.whl", hash = "sha256:f7941f6f2fe6dd6807a1208737b8a0cbcf1cc6d7b07d24998ad2d63590868260", size = 209952, upload-time = "2025-09-21T20:03:53.918Z" }, ] [[package]] @@ -635,112 +560,101 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/4a/9b/e301418629f7bfdf72db9e80ad6ed9d1b83c487c471803eaa6464c511a01/cryptography-46.0.2.tar.gz", hash = "sha256:21b6fc8c71a3f9a604f028a329e5560009cc4a3a828bfea5fcba8eb7647d88fe", size = 749293 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e0/98/7a8df8c19a335c8028414738490fc3955c0cecbfdd37fcc1b9c3d04bd561/cryptography-46.0.2-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:f3e32ab7dd1b1ef67b9232c4cf5e2ee4cd517d4316ea910acaaa9c5712a1c663", size = 7261255 }, - { url = "https://files.pythonhosted.org/packages/c6/38/b2adb2aa1baa6706adc3eb746691edd6f90a656a9a65c3509e274d15a2b8/cryptography-46.0.2-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1fd1a69086926b623ef8126b4c33d5399ce9e2f3fac07c9c734c2a4ec38b6d02", size = 4297596 }, - { url = "https://files.pythonhosted.org/packages/e4/27/0f190ada240003119488ae66c897b5e97149292988f556aef4a6a2a57595/cryptography-46.0.2-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:bb7fb9cd44c2582aa5990cf61a4183e6f54eea3172e54963787ba47287edd135", size = 4450899 }, - { url = "https://files.pythonhosted.org/packages/85/d5/e4744105ab02fdf6bb58ba9a816e23b7a633255987310b4187d6745533db/cryptography-46.0.2-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:9066cfd7f146f291869a9898b01df1c9b0e314bfa182cef432043f13fc462c92", size = 4300382 }, - { url = "https://files.pythonhosted.org/packages/33/fb/bf9571065c18c04818cb07de90c43fc042c7977c68e5de6876049559c72f/cryptography-46.0.2-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:97e83bf4f2f2c084d8dd792d13841d0a9b241643151686010866bbd076b19659", size = 4017347 }, - { url = "https://files.pythonhosted.org/packages/35/72/fc51856b9b16155ca071080e1a3ad0c3a8e86616daf7eb018d9565b99baa/cryptography-46.0.2-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:4a766d2a5d8127364fd936572c6e6757682fc5dfcbdba1632d4554943199f2fa", size = 4983500 }, - { url = "https://files.pythonhosted.org/packages/c1/53/0f51e926799025e31746d454ab2e36f8c3f0d41592bc65cb9840368d3275/cryptography-46.0.2-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:fab8f805e9675e61ed8538f192aad70500fa6afb33a8803932999b1049363a08", size = 4482591 }, - { url = "https://files.pythonhosted.org/packages/86/96/4302af40b23ab8aa360862251fb8fc450b2a06ff24bc5e261c2007f27014/cryptography-46.0.2-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:1e3b6428a3d56043bff0bb85b41c535734204e599c1c0977e1d0f261b02f3ad5", size = 4300019 }, - { url = "https://files.pythonhosted.org/packages/9b/59/0be12c7fcc4c5e34fe2b665a75bc20958473047a30d095a7657c218fa9e8/cryptography-46.0.2-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:1a88634851d9b8de8bb53726f4300ab191d3b2f42595e2581a54b26aba71b7cc", size = 4950006 }, - { url = "https://files.pythonhosted.org/packages/55/1d/42fda47b0111834b49e31590ae14fd020594d5e4dadd639bce89ad790fba/cryptography-46.0.2-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:be939b99d4e091eec9a2bcf41aaf8f351f312cd19ff74b5c83480f08a8a43e0b", size = 4482088 }, - { url = "https://files.pythonhosted.org/packages/17/50/60f583f69aa1602c2bdc7022dae86a0d2b837276182f8c1ec825feb9b874/cryptography-46.0.2-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9f13b040649bc18e7eb37936009b24fd31ca095a5c647be8bb6aaf1761142bd1", size = 4425599 }, - { url = "https://files.pythonhosted.org/packages/d1/57/d8d4134cd27e6e94cf44adb3f3489f935bde85f3a5508e1b5b43095b917d/cryptography-46.0.2-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:9bdc25e4e01b261a8fda4e98618f1c9515febcecebc9566ddf4a70c63967043b", size = 4697458 }, - { url = "https://files.pythonhosted.org/packages/d1/2b/531e37408573e1da33adfb4c58875013ee8ac7d548d1548967d94a0ae5c4/cryptography-46.0.2-cp311-abi3-win32.whl", hash = "sha256:8b9bf67b11ef9e28f4d78ff88b04ed0929fcd0e4f70bb0f704cfc32a5c6311ee", size = 3056077 }, - { url = "https://files.pythonhosted.org/packages/a8/cd/2f83cafd47ed2dc5a3a9c783ff5d764e9e70d3a160e0df9a9dcd639414ce/cryptography-46.0.2-cp311-abi3-win_amd64.whl", hash = "sha256:758cfc7f4c38c5c5274b55a57ef1910107436f4ae842478c4989abbd24bd5acb", size = 3512585 }, - { url = "https://files.pythonhosted.org/packages/00/36/676f94e10bfaa5c5b86c469ff46d3e0663c5dc89542f7afbadac241a3ee4/cryptography-46.0.2-cp311-abi3-win_arm64.whl", hash = "sha256:218abd64a2e72f8472c2102febb596793347a3e65fafbb4ad50519969da44470", size = 2927474 }, - { url = "https://files.pythonhosted.org/packages/d5/bb/fa95abcf147a1b0bb94d95f53fbb09da77b24c776c5d87d36f3d94521d2c/cryptography-46.0.2-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:a08e7401a94c002e79dc3bc5231b6558cd4b2280ee525c4673f650a37e2c7685", size = 7248090 }, - { url = "https://files.pythonhosted.org/packages/b7/66/f42071ce0e3ffbfa80a88feadb209c779fda92a23fbc1e14f74ebf72ef6b/cryptography-46.0.2-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d30bc11d35743bf4ddf76674a0a369ec8a21f87aaa09b0661b04c5f6c46e8d7b", size = 4293123 }, - { url = "https://files.pythonhosted.org/packages/a8/5d/1fdbd2e5c1ba822828d250e5a966622ef00185e476d1cd2726b6dd135e53/cryptography-46.0.2-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:bca3f0ce67e5a2a2cf524e86f44697c4323a86e0fd7ba857de1c30d52c11ede1", size = 4439524 }, - { url = "https://files.pythonhosted.org/packages/c8/c1/5e4989a7d102d4306053770d60f978c7b6b1ea2ff8c06e0265e305b23516/cryptography-46.0.2-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ff798ad7a957a5021dcbab78dfff681f0cf15744d0e6af62bd6746984d9c9e9c", size = 4297264 }, - { url = "https://files.pythonhosted.org/packages/28/78/b56f847d220cb1d6d6aef5a390e116ad603ce13a0945a3386a33abc80385/cryptography-46.0.2-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:cb5e8daac840e8879407acbe689a174f5ebaf344a062f8918e526824eb5d97af", size = 4011872 }, - { url = "https://files.pythonhosted.org/packages/e1/80/2971f214b066b888944f7b57761bf709ee3f2cf805619a18b18cab9b263c/cryptography-46.0.2-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:3f37aa12b2d91e157827d90ce78f6180f0c02319468a0aea86ab5a9566da644b", size = 4978458 }, - { url = "https://files.pythonhosted.org/packages/a5/84/0cb0a2beaa4f1cbe63ebec4e97cd7e0e9f835d0ba5ee143ed2523a1e0016/cryptography-46.0.2-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:5e38f203160a48b93010b07493c15f2babb4e0f2319bbd001885adb3f3696d21", size = 4472195 }, - { url = "https://files.pythonhosted.org/packages/30/8b/2b542ddbf78835c7cd67b6fa79e95560023481213a060b92352a61a10efe/cryptography-46.0.2-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:d19f5f48883752b5ab34cff9e2f7e4a7f216296f33714e77d1beb03d108632b6", size = 4296791 }, - { url = "https://files.pythonhosted.org/packages/78/12/9065b40201b4f4876e93b9b94d91feb18de9150d60bd842a16a21565007f/cryptography-46.0.2-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:04911b149eae142ccd8c9a68892a70c21613864afb47aba92d8c7ed9cc001023", size = 4939629 }, - { url = "https://files.pythonhosted.org/packages/f6/9e/6507dc048c1b1530d372c483dfd34e7709fc542765015425f0442b08547f/cryptography-46.0.2-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:8b16c1ede6a937c291d41176934268e4ccac2c6521c69d3f5961c5a1e11e039e", size = 4471988 }, - { url = "https://files.pythonhosted.org/packages/b1/86/d025584a5f7d5c5ec8d3633dbcdce83a0cd579f1141ceada7817a4c26934/cryptography-46.0.2-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:747b6f4a4a23d5a215aadd1d0b12233b4119c4313df83ab4137631d43672cc90", size = 4422989 }, - { url = "https://files.pythonhosted.org/packages/4b/39/536370418b38a15a61bbe413006b79dfc3d2b4b0eafceb5581983f973c15/cryptography-46.0.2-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6b275e398ab3a7905e168c036aad54b5969d63d3d9099a0a66cc147a3cc983be", size = 4685578 }, - { url = "https://files.pythonhosted.org/packages/15/52/ea7e2b1910f547baed566c866fbb86de2402e501a89ecb4871ea7f169a81/cryptography-46.0.2-cp38-abi3-win32.whl", hash = "sha256:0b507c8e033307e37af61cb9f7159b416173bdf5b41d11c4df2e499a1d8e007c", size = 3036711 }, - { url = "https://files.pythonhosted.org/packages/71/9e/171f40f9c70a873e73c2efcdbe91e1d4b1777a03398fa1c4af3c56a2477a/cryptography-46.0.2-cp38-abi3-win_amd64.whl", hash = "sha256:f9b2dc7668418fb6f221e4bf701f716e05e8eadb4f1988a2487b11aedf8abe62", size = 3500007 }, - { url = "https://files.pythonhosted.org/packages/3e/7c/15ad426257615f9be8caf7f97990cf3dcbb5b8dd7ed7e0db581a1c4759dd/cryptography-46.0.2-cp38-abi3-win_arm64.whl", hash = "sha256:91447f2b17e83c9e0c89f133119d83f94ce6e0fb55dd47da0a959316e6e9cfa1", size = 2918153 }, - { url = "https://files.pythonhosted.org/packages/b7/8c/1aabe338149a7d0f52c3e30f2880b20027ca2a485316756ed6f000462db3/cryptography-46.0.2-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1d3b3edd145953832e09607986f2bd86f85d1dc9c48ced41808b18009d9f30e5", size = 3714495 }, - { url = "https://files.pythonhosted.org/packages/e3/0a/0d10eb970fe3e57da9e9ddcfd9464c76f42baf7b3d0db4a782d6746f788f/cryptography-46.0.2-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:fe245cf4a73c20592f0f48da39748b3513db114465be78f0a36da847221bd1b4", size = 4243379 }, - { url = "https://files.pythonhosted.org/packages/7d/60/e274b4d41a9eb82538b39950a74ef06e9e4d723cb998044635d9deb1b435/cryptography-46.0.2-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:2b9cad9cf71d0c45566624ff76654e9bae5f8a25970c250a26ccfc73f8553e2d", size = 4409533 }, - { url = "https://files.pythonhosted.org/packages/19/9a/fb8548f762b4749aebd13b57b8f865de80258083fe814957f9b0619cfc56/cryptography-46.0.2-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:9bd26f2f75a925fdf5e0a446c0de2714f17819bf560b44b7480e4dd632ad6c46", size = 4243120 }, - { url = "https://files.pythonhosted.org/packages/71/60/883f24147fd4a0c5cab74ac7e36a1ff3094a54ba5c3a6253d2ff4b19255b/cryptography-46.0.2-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:7282d8f092b5be7172d6472f29b0631f39f18512a3642aefe52c3c0e0ccfad5a", size = 4408940 }, - { url = "https://files.pythonhosted.org/packages/d9/b5/c5e179772ec38adb1c072b3aa13937d2860509ba32b2462bf1dda153833b/cryptography-46.0.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:c4b93af7920cdf80f71650769464ccf1fb49a4b56ae0024173c24c48eb6b1612", size = 3438518 }, +sdist = { url = "https://files.pythonhosted.org/packages/4a/9b/e301418629f7bfdf72db9e80ad6ed9d1b83c487c471803eaa6464c511a01/cryptography-46.0.2.tar.gz", hash = "sha256:21b6fc8c71a3f9a604f028a329e5560009cc4a3a828bfea5fcba8eb7647d88fe", size = 749293, upload-time = "2025-10-01T00:29:11.856Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e0/98/7a8df8c19a335c8028414738490fc3955c0cecbfdd37fcc1b9c3d04bd561/cryptography-46.0.2-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:f3e32ab7dd1b1ef67b9232c4cf5e2ee4cd517d4316ea910acaaa9c5712a1c663", size = 7261255, upload-time = "2025-10-01T00:27:22.947Z" }, + { url = "https://files.pythonhosted.org/packages/c6/38/b2adb2aa1baa6706adc3eb746691edd6f90a656a9a65c3509e274d15a2b8/cryptography-46.0.2-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1fd1a69086926b623ef8126b4c33d5399ce9e2f3fac07c9c734c2a4ec38b6d02", size = 4297596, upload-time = "2025-10-01T00:27:25.258Z" }, + { url = "https://files.pythonhosted.org/packages/e4/27/0f190ada240003119488ae66c897b5e97149292988f556aef4a6a2a57595/cryptography-46.0.2-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:bb7fb9cd44c2582aa5990cf61a4183e6f54eea3172e54963787ba47287edd135", size = 4450899, upload-time = "2025-10-01T00:27:27.458Z" }, + { url = "https://files.pythonhosted.org/packages/85/d5/e4744105ab02fdf6bb58ba9a816e23b7a633255987310b4187d6745533db/cryptography-46.0.2-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:9066cfd7f146f291869a9898b01df1c9b0e314bfa182cef432043f13fc462c92", size = 4300382, upload-time = "2025-10-01T00:27:29.091Z" }, + { url = "https://files.pythonhosted.org/packages/33/fb/bf9571065c18c04818cb07de90c43fc042c7977c68e5de6876049559c72f/cryptography-46.0.2-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:97e83bf4f2f2c084d8dd792d13841d0a9b241643151686010866bbd076b19659", size = 4017347, upload-time = "2025-10-01T00:27:30.767Z" }, + { url = "https://files.pythonhosted.org/packages/35/72/fc51856b9b16155ca071080e1a3ad0c3a8e86616daf7eb018d9565b99baa/cryptography-46.0.2-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:4a766d2a5d8127364fd936572c6e6757682fc5dfcbdba1632d4554943199f2fa", size = 4983500, upload-time = "2025-10-01T00:27:32.741Z" }, + { url = "https://files.pythonhosted.org/packages/c1/53/0f51e926799025e31746d454ab2e36f8c3f0d41592bc65cb9840368d3275/cryptography-46.0.2-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:fab8f805e9675e61ed8538f192aad70500fa6afb33a8803932999b1049363a08", size = 4482591, upload-time = "2025-10-01T00:27:34.869Z" }, + { url = "https://files.pythonhosted.org/packages/86/96/4302af40b23ab8aa360862251fb8fc450b2a06ff24bc5e261c2007f27014/cryptography-46.0.2-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:1e3b6428a3d56043bff0bb85b41c535734204e599c1c0977e1d0f261b02f3ad5", size = 4300019, upload-time = "2025-10-01T00:27:37.029Z" }, + { url = "https://files.pythonhosted.org/packages/9b/59/0be12c7fcc4c5e34fe2b665a75bc20958473047a30d095a7657c218fa9e8/cryptography-46.0.2-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:1a88634851d9b8de8bb53726f4300ab191d3b2f42595e2581a54b26aba71b7cc", size = 4950006, upload-time = "2025-10-01T00:27:40.272Z" }, + { url = "https://files.pythonhosted.org/packages/55/1d/42fda47b0111834b49e31590ae14fd020594d5e4dadd639bce89ad790fba/cryptography-46.0.2-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:be939b99d4e091eec9a2bcf41aaf8f351f312cd19ff74b5c83480f08a8a43e0b", size = 4482088, upload-time = "2025-10-01T00:27:42.668Z" }, + { url = "https://files.pythonhosted.org/packages/17/50/60f583f69aa1602c2bdc7022dae86a0d2b837276182f8c1ec825feb9b874/cryptography-46.0.2-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9f13b040649bc18e7eb37936009b24fd31ca095a5c647be8bb6aaf1761142bd1", size = 4425599, upload-time = "2025-10-01T00:27:44.616Z" }, + { url = "https://files.pythonhosted.org/packages/d1/57/d8d4134cd27e6e94cf44adb3f3489f935bde85f3a5508e1b5b43095b917d/cryptography-46.0.2-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:9bdc25e4e01b261a8fda4e98618f1c9515febcecebc9566ddf4a70c63967043b", size = 4697458, upload-time = "2025-10-01T00:27:46.209Z" }, + { url = "https://files.pythonhosted.org/packages/d1/2b/531e37408573e1da33adfb4c58875013ee8ac7d548d1548967d94a0ae5c4/cryptography-46.0.2-cp311-abi3-win32.whl", hash = "sha256:8b9bf67b11ef9e28f4d78ff88b04ed0929fcd0e4f70bb0f704cfc32a5c6311ee", size = 3056077, upload-time = "2025-10-01T00:27:48.424Z" }, + { url = "https://files.pythonhosted.org/packages/a8/cd/2f83cafd47ed2dc5a3a9c783ff5d764e9e70d3a160e0df9a9dcd639414ce/cryptography-46.0.2-cp311-abi3-win_amd64.whl", hash = "sha256:758cfc7f4c38c5c5274b55a57ef1910107436f4ae842478c4989abbd24bd5acb", size = 3512585, upload-time = "2025-10-01T00:27:50.521Z" }, + { url = "https://files.pythonhosted.org/packages/00/36/676f94e10bfaa5c5b86c469ff46d3e0663c5dc89542f7afbadac241a3ee4/cryptography-46.0.2-cp311-abi3-win_arm64.whl", hash = "sha256:218abd64a2e72f8472c2102febb596793347a3e65fafbb4ad50519969da44470", size = 2927474, upload-time = "2025-10-01T00:27:52.91Z" }, + { url = "https://files.pythonhosted.org/packages/d5/bb/fa95abcf147a1b0bb94d95f53fbb09da77b24c776c5d87d36f3d94521d2c/cryptography-46.0.2-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:a08e7401a94c002e79dc3bc5231b6558cd4b2280ee525c4673f650a37e2c7685", size = 7248090, upload-time = "2025-10-01T00:28:22.846Z" }, + { url = "https://files.pythonhosted.org/packages/b7/66/f42071ce0e3ffbfa80a88feadb209c779fda92a23fbc1e14f74ebf72ef6b/cryptography-46.0.2-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d30bc11d35743bf4ddf76674a0a369ec8a21f87aaa09b0661b04c5f6c46e8d7b", size = 4293123, upload-time = "2025-10-01T00:28:25.072Z" }, + { url = "https://files.pythonhosted.org/packages/a8/5d/1fdbd2e5c1ba822828d250e5a966622ef00185e476d1cd2726b6dd135e53/cryptography-46.0.2-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:bca3f0ce67e5a2a2cf524e86f44697c4323a86e0fd7ba857de1c30d52c11ede1", size = 4439524, upload-time = "2025-10-01T00:28:26.808Z" }, + { url = "https://files.pythonhosted.org/packages/c8/c1/5e4989a7d102d4306053770d60f978c7b6b1ea2ff8c06e0265e305b23516/cryptography-46.0.2-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ff798ad7a957a5021dcbab78dfff681f0cf15744d0e6af62bd6746984d9c9e9c", size = 4297264, upload-time = "2025-10-01T00:28:29.327Z" }, + { url = "https://files.pythonhosted.org/packages/28/78/b56f847d220cb1d6d6aef5a390e116ad603ce13a0945a3386a33abc80385/cryptography-46.0.2-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:cb5e8daac840e8879407acbe689a174f5ebaf344a062f8918e526824eb5d97af", size = 4011872, upload-time = "2025-10-01T00:28:31.479Z" }, + { url = "https://files.pythonhosted.org/packages/e1/80/2971f214b066b888944f7b57761bf709ee3f2cf805619a18b18cab9b263c/cryptography-46.0.2-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:3f37aa12b2d91e157827d90ce78f6180f0c02319468a0aea86ab5a9566da644b", size = 4978458, upload-time = "2025-10-01T00:28:33.267Z" }, + { url = "https://files.pythonhosted.org/packages/a5/84/0cb0a2beaa4f1cbe63ebec4e97cd7e0e9f835d0ba5ee143ed2523a1e0016/cryptography-46.0.2-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:5e38f203160a48b93010b07493c15f2babb4e0f2319bbd001885adb3f3696d21", size = 4472195, upload-time = "2025-10-01T00:28:36.039Z" }, + { url = "https://files.pythonhosted.org/packages/30/8b/2b542ddbf78835c7cd67b6fa79e95560023481213a060b92352a61a10efe/cryptography-46.0.2-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:d19f5f48883752b5ab34cff9e2f7e4a7f216296f33714e77d1beb03d108632b6", size = 4296791, upload-time = "2025-10-01T00:28:37.732Z" }, + { url = "https://files.pythonhosted.org/packages/78/12/9065b40201b4f4876e93b9b94d91feb18de9150d60bd842a16a21565007f/cryptography-46.0.2-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:04911b149eae142ccd8c9a68892a70c21613864afb47aba92d8c7ed9cc001023", size = 4939629, upload-time = "2025-10-01T00:28:39.654Z" }, + { url = "https://files.pythonhosted.org/packages/f6/9e/6507dc048c1b1530d372c483dfd34e7709fc542765015425f0442b08547f/cryptography-46.0.2-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:8b16c1ede6a937c291d41176934268e4ccac2c6521c69d3f5961c5a1e11e039e", size = 4471988, upload-time = "2025-10-01T00:28:41.822Z" }, + { url = "https://files.pythonhosted.org/packages/b1/86/d025584a5f7d5c5ec8d3633dbcdce83a0cd579f1141ceada7817a4c26934/cryptography-46.0.2-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:747b6f4a4a23d5a215aadd1d0b12233b4119c4313df83ab4137631d43672cc90", size = 4422989, upload-time = "2025-10-01T00:28:43.608Z" }, + { url = "https://files.pythonhosted.org/packages/4b/39/536370418b38a15a61bbe413006b79dfc3d2b4b0eafceb5581983f973c15/cryptography-46.0.2-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6b275e398ab3a7905e168c036aad54b5969d63d3d9099a0a66cc147a3cc983be", size = 4685578, upload-time = "2025-10-01T00:28:45.361Z" }, + { url = "https://files.pythonhosted.org/packages/15/52/ea7e2b1910f547baed566c866fbb86de2402e501a89ecb4871ea7f169a81/cryptography-46.0.2-cp38-abi3-win32.whl", hash = "sha256:0b507c8e033307e37af61cb9f7159b416173bdf5b41d11c4df2e499a1d8e007c", size = 3036711, upload-time = "2025-10-01T00:28:47.096Z" }, + { url = "https://files.pythonhosted.org/packages/71/9e/171f40f9c70a873e73c2efcdbe91e1d4b1777a03398fa1c4af3c56a2477a/cryptography-46.0.2-cp38-abi3-win_amd64.whl", hash = "sha256:f9b2dc7668418fb6f221e4bf701f716e05e8eadb4f1988a2487b11aedf8abe62", size = 3500007, upload-time = "2025-10-01T00:28:48.967Z" }, + { url = "https://files.pythonhosted.org/packages/3e/7c/15ad426257615f9be8caf7f97990cf3dcbb5b8dd7ed7e0db581a1c4759dd/cryptography-46.0.2-cp38-abi3-win_arm64.whl", hash = "sha256:91447f2b17e83c9e0c89f133119d83f94ce6e0fb55dd47da0a959316e6e9cfa1", size = 2918153, upload-time = "2025-10-01T00:28:51.003Z" }, + { url = "https://files.pythonhosted.org/packages/b7/8c/1aabe338149a7d0f52c3e30f2880b20027ca2a485316756ed6f000462db3/cryptography-46.0.2-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1d3b3edd145953832e09607986f2bd86f85d1dc9c48ced41808b18009d9f30e5", size = 3714495, upload-time = "2025-10-01T00:28:57.222Z" }, + { url = "https://files.pythonhosted.org/packages/e3/0a/0d10eb970fe3e57da9e9ddcfd9464c76f42baf7b3d0db4a782d6746f788f/cryptography-46.0.2-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:fe245cf4a73c20592f0f48da39748b3513db114465be78f0a36da847221bd1b4", size = 4243379, upload-time = "2025-10-01T00:28:58.989Z" }, + { url = "https://files.pythonhosted.org/packages/7d/60/e274b4d41a9eb82538b39950a74ef06e9e4d723cb998044635d9deb1b435/cryptography-46.0.2-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:2b9cad9cf71d0c45566624ff76654e9bae5f8a25970c250a26ccfc73f8553e2d", size = 4409533, upload-time = "2025-10-01T00:29:00.785Z" }, + { url = "https://files.pythonhosted.org/packages/19/9a/fb8548f762b4749aebd13b57b8f865de80258083fe814957f9b0619cfc56/cryptography-46.0.2-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:9bd26f2f75a925fdf5e0a446c0de2714f17819bf560b44b7480e4dd632ad6c46", size = 4243120, upload-time = "2025-10-01T00:29:02.515Z" }, + { url = "https://files.pythonhosted.org/packages/71/60/883f24147fd4a0c5cab74ac7e36a1ff3094a54ba5c3a6253d2ff4b19255b/cryptography-46.0.2-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:7282d8f092b5be7172d6472f29b0631f39f18512a3642aefe52c3c0e0ccfad5a", size = 4408940, upload-time = "2025-10-01T00:29:04.42Z" }, + { url = "https://files.pythonhosted.org/packages/d9/b5/c5e179772ec38adb1c072b3aa13937d2860509ba32b2462bf1dda153833b/cryptography-46.0.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:c4b93af7920cdf80f71650769464ccf1fb49a4b56ae0024173c24c48eb6b1612", size = 3438518, upload-time = "2025-10-01T00:29:06.139Z" }, ] [[package]] name = "cymem" version = "2.0.11" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f2/4a/1acd761fb6ac4c560e823ce40536a62f886f2d59b2763b5c3fc7e9d92101/cymem-2.0.11.tar.gz", hash = "sha256:efe49a349d4a518be6b6c6b255d4a80f740a341544bde1a807707c058b88d0bd", size = 10346 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/03/e3/d98e3976f4ffa99cddebc1ce379d4d62e3eb1da22285267f902c99cc3395/cymem-2.0.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3ee54039aad3ef65de82d66c40516bf54586287b46d32c91ea0530c34e8a2745", size = 42005 }, - { url = "https://files.pythonhosted.org/packages/41/b4/7546faf2ab63e59befc95972316d62276cec153f7d4d60e7b0d5e08f0602/cymem-2.0.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4c05ef75b5db217be820604e43a47ccbbafea98ab6659d07cea92fa3c864ea58", size = 41747 }, - { url = "https://files.pythonhosted.org/packages/7d/4e/042f372e5b3eb7f5f3dd7677161771d301de2b6fa3f7c74e1cebcd502552/cymem-2.0.11-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a8d5381e5793ce531bac0dbc00829c8381f18605bb67e4b61d34f8850463da40", size = 217647 }, - { url = "https://files.pythonhosted.org/packages/48/cb/2207679e4b92701f78cf141e1ab4f81f55247dbe154eb426b842a0a993de/cymem-2.0.11-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2b9d3f42d7249ac81802135cad51d707def058001a32f73fc7fbf3de7045ac7", size = 218857 }, - { url = "https://files.pythonhosted.org/packages/31/7a/76ae3b7a39ab2531029d281e43fcfcaad728c2341b150a81a3a1f5587cf3/cymem-2.0.11-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:39b78f2195d20b75c2d465732f6b8e8721c5d4eb012777c2cb89bdb45a043185", size = 206148 }, - { url = "https://files.pythonhosted.org/packages/25/f9/d0fc0191ac79f15638ddb59237aa76f234691374d7d7950e10f384bd8a25/cymem-2.0.11-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:2203bd6525a80d8fd0c94654a263af21c0387ae1d5062cceaebb652bf9bad7bc", size = 207112 }, - { url = "https://files.pythonhosted.org/packages/56/c8/75f75889401b20f4c3a7c5965dda09df42913e904ddc2ffe7ef3bdf25061/cymem-2.0.11-cp311-cp311-win_amd64.whl", hash = "sha256:aa54af7314de400634448da1f935b61323da80a49484074688d344fb2036681b", size = 39360 }, - { url = "https://files.pythonhosted.org/packages/71/67/0d74f7e9d79f934368a78fb1d1466b94bebdbff14f8ae94dd3e4ea8738bb/cymem-2.0.11-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a0fbe19ce653cd688842d81e5819dc63f911a26e192ef30b0b89f0ab2b192ff2", size = 42621 }, - { url = "https://files.pythonhosted.org/packages/4a/d6/f7a19c63b48efc3f00a3ee8d69070ac90202e1e378f6cf81b8671f0cf762/cymem-2.0.11-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:de72101dc0e6326f6a2f73e05a438d1f3c6110d41044236d0fbe62925091267d", size = 42249 }, - { url = "https://files.pythonhosted.org/packages/d7/60/cdc434239813eef547fb99b6d0bafe31178501702df9b77c4108c9a216f6/cymem-2.0.11-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bee4395917f6588b8ac1699499128842768b391fe8896e8626950b4da5f9a406", size = 224758 }, - { url = "https://files.pythonhosted.org/packages/1d/68/8fa6efae17cd3b2ba9a2f83b824867c5b65b06f7aec3f8a0d0cabdeffb9b/cymem-2.0.11-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5b02f2b17d760dc3fe5812737b1ce4f684641cdd751d67761d333a3b5ea97b83", size = 227995 }, - { url = "https://files.pythonhosted.org/packages/e4/f3/ceda70bf6447880140602285b7c6fa171cb7c78b623d35345cc32505cd06/cymem-2.0.11-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:04ee6b4041ddec24512d6e969ed6445e57917f01e73b9dabbe17b7e6b27fef05", size = 215325 }, - { url = "https://files.pythonhosted.org/packages/d3/47/6915eaa521e1ce7a0ba480eecb6870cb4f681bcd64ced88c2f0ed7a744b4/cymem-2.0.11-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e1048dae7e627ee25f22c87bb670b13e06bc0aecc114b89b959a798d487d1bf4", size = 216447 }, - { url = "https://files.pythonhosted.org/packages/7b/be/8e02bdd31e557f642741a06c8e886782ef78f0b00daffd681922dc9bbc88/cymem-2.0.11-cp312-cp312-win_amd64.whl", hash = "sha256:0c269c7a867d74adeb9db65fa1d226342aacf44d64b7931282f0b0eb22eb6275", size = 39283 }, - { url = "https://files.pythonhosted.org/packages/bd/90/b064e2677e27a35cf3605146abc3285d4f599cc1b6c18fc445ae876dd1e3/cymem-2.0.11-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4a311c82f743275c84f708df89ac5bf60ddefe4713d532000c887931e22941f", size = 42389 }, - { url = "https://files.pythonhosted.org/packages/fd/60/7aa0561a6c1f0d42643b02c4fdeb2a16181b0ff4e85d73d2d80c6689e92a/cymem-2.0.11-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:02ed92bead896cca36abad00502b14fa651bdf5d8319461126a2d5ac8c9674c5", size = 41948 }, - { url = "https://files.pythonhosted.org/packages/5f/4e/88a29cc5575374982e527b4ebcab3781bdc826ce693c6418a0f836544246/cymem-2.0.11-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:44ddd3588379f8f376116384af99e3fb5f90091d90f520c341942618bf22f05e", size = 219382 }, - { url = "https://files.pythonhosted.org/packages/9b/3a/8f96e167e93b7f7ec105ed7b25c77bbf215d15bcbf4a24082cdc12234cd6/cymem-2.0.11-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87ec985623624bbd298762d8163fc194a096cb13282731a017e09ff8a60bb8b1", size = 222974 }, - { url = "https://files.pythonhosted.org/packages/6a/fc/ce016bb0c66a4776345fac7508fddec3b739b9dd4363094ac89cce048832/cymem-2.0.11-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e3385a47285435848e0ed66cfd29b35f3ed8703218e2b17bd7a0c053822f26bf", size = 213426 }, - { url = "https://files.pythonhosted.org/packages/5c/c8/accf7cc768f751447a5050b14a195af46798bc22767ac25f49b02861b1eb/cymem-2.0.11-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5461e65340d6572eb64deadce79242a446a1d39cb7bf70fe7b7e007eb0d799b0", size = 219195 }, - { url = "https://files.pythonhosted.org/packages/74/65/c162fbac63e867a055240b6600b92ef96c0eb7a1895312ac53c4be93d056/cymem-2.0.11-cp313-cp313-win_amd64.whl", hash = "sha256:25da111adf425c29af0cfd9fecfec1c71c8d82e2244a85166830a0817a66ada7", size = 39090 }, +sdist = { url = "https://files.pythonhosted.org/packages/f2/4a/1acd761fb6ac4c560e823ce40536a62f886f2d59b2763b5c3fc7e9d92101/cymem-2.0.11.tar.gz", hash = "sha256:efe49a349d4a518be6b6c6b255d4a80f740a341544bde1a807707c058b88d0bd", size = 10346, upload-time = "2025-01-16T21:50:41.045Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/03/e3/d98e3976f4ffa99cddebc1ce379d4d62e3eb1da22285267f902c99cc3395/cymem-2.0.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3ee54039aad3ef65de82d66c40516bf54586287b46d32c91ea0530c34e8a2745", size = 42005, upload-time = "2025-01-16T21:49:34.977Z" }, + { url = "https://files.pythonhosted.org/packages/41/b4/7546faf2ab63e59befc95972316d62276cec153f7d4d60e7b0d5e08f0602/cymem-2.0.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4c05ef75b5db217be820604e43a47ccbbafea98ab6659d07cea92fa3c864ea58", size = 41747, upload-time = "2025-01-16T21:49:36.108Z" }, + { url = "https://files.pythonhosted.org/packages/7d/4e/042f372e5b3eb7f5f3dd7677161771d301de2b6fa3f7c74e1cebcd502552/cymem-2.0.11-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a8d5381e5793ce531bac0dbc00829c8381f18605bb67e4b61d34f8850463da40", size = 217647, upload-time = "2025-01-16T21:49:37.433Z" }, + { url = "https://files.pythonhosted.org/packages/48/cb/2207679e4b92701f78cf141e1ab4f81f55247dbe154eb426b842a0a993de/cymem-2.0.11-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2b9d3f42d7249ac81802135cad51d707def058001a32f73fc7fbf3de7045ac7", size = 218857, upload-time = "2025-01-16T21:49:40.09Z" }, + { url = "https://files.pythonhosted.org/packages/31/7a/76ae3b7a39ab2531029d281e43fcfcaad728c2341b150a81a3a1f5587cf3/cymem-2.0.11-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:39b78f2195d20b75c2d465732f6b8e8721c5d4eb012777c2cb89bdb45a043185", size = 206148, upload-time = "2025-01-16T21:49:41.383Z" }, + { url = "https://files.pythonhosted.org/packages/25/f9/d0fc0191ac79f15638ddb59237aa76f234691374d7d7950e10f384bd8a25/cymem-2.0.11-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:2203bd6525a80d8fd0c94654a263af21c0387ae1d5062cceaebb652bf9bad7bc", size = 207112, upload-time = "2025-01-16T21:49:43.986Z" }, + { url = "https://files.pythonhosted.org/packages/56/c8/75f75889401b20f4c3a7c5965dda09df42913e904ddc2ffe7ef3bdf25061/cymem-2.0.11-cp311-cp311-win_amd64.whl", hash = "sha256:aa54af7314de400634448da1f935b61323da80a49484074688d344fb2036681b", size = 39360, upload-time = "2025-01-16T21:49:45.479Z" }, + { url = "https://files.pythonhosted.org/packages/71/67/0d74f7e9d79f934368a78fb1d1466b94bebdbff14f8ae94dd3e4ea8738bb/cymem-2.0.11-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a0fbe19ce653cd688842d81e5819dc63f911a26e192ef30b0b89f0ab2b192ff2", size = 42621, upload-time = "2025-01-16T21:49:46.585Z" }, + { url = "https://files.pythonhosted.org/packages/4a/d6/f7a19c63b48efc3f00a3ee8d69070ac90202e1e378f6cf81b8671f0cf762/cymem-2.0.11-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:de72101dc0e6326f6a2f73e05a438d1f3c6110d41044236d0fbe62925091267d", size = 42249, upload-time = "2025-01-16T21:49:48.973Z" }, + { url = "https://files.pythonhosted.org/packages/d7/60/cdc434239813eef547fb99b6d0bafe31178501702df9b77c4108c9a216f6/cymem-2.0.11-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bee4395917f6588b8ac1699499128842768b391fe8896e8626950b4da5f9a406", size = 224758, upload-time = "2025-01-16T21:49:51.382Z" }, + { url = "https://files.pythonhosted.org/packages/1d/68/8fa6efae17cd3b2ba9a2f83b824867c5b65b06f7aec3f8a0d0cabdeffb9b/cymem-2.0.11-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5b02f2b17d760dc3fe5812737b1ce4f684641cdd751d67761d333a3b5ea97b83", size = 227995, upload-time = "2025-01-16T21:49:54.538Z" }, + { url = "https://files.pythonhosted.org/packages/e4/f3/ceda70bf6447880140602285b7c6fa171cb7c78b623d35345cc32505cd06/cymem-2.0.11-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:04ee6b4041ddec24512d6e969ed6445e57917f01e73b9dabbe17b7e6b27fef05", size = 215325, upload-time = "2025-01-16T21:49:57.229Z" }, + { url = "https://files.pythonhosted.org/packages/d3/47/6915eaa521e1ce7a0ba480eecb6870cb4f681bcd64ced88c2f0ed7a744b4/cymem-2.0.11-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e1048dae7e627ee25f22c87bb670b13e06bc0aecc114b89b959a798d487d1bf4", size = 216447, upload-time = "2025-01-16T21:50:00.432Z" }, + { url = "https://files.pythonhosted.org/packages/7b/be/8e02bdd31e557f642741a06c8e886782ef78f0b00daffd681922dc9bbc88/cymem-2.0.11-cp312-cp312-win_amd64.whl", hash = "sha256:0c269c7a867d74adeb9db65fa1d226342aacf44d64b7931282f0b0eb22eb6275", size = 39283, upload-time = "2025-01-16T21:50:03.384Z" }, ] [[package]] name = "debugpy" version = "1.8.17" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/15/ad/71e708ff4ca377c4230530d6a7aa7992592648c122a2cd2b321cf8b35a76/debugpy-1.8.17.tar.gz", hash = "sha256:fd723b47a8c08892b1a16b2c6239a8b96637c62a59b94bb5dab4bac592a58a8e", size = 1644129 } +sdist = { url = "https://files.pythonhosted.org/packages/15/ad/71e708ff4ca377c4230530d6a7aa7992592648c122a2cd2b321cf8b35a76/debugpy-1.8.17.tar.gz", hash = "sha256:fd723b47a8c08892b1a16b2c6239a8b96637c62a59b94bb5dab4bac592a58a8e", size = 1644129, upload-time = "2025-09-17T16:33:20.633Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d8/53/3af72b5c159278c4a0cf4cffa518675a0e73bdb7d1cac0239b815502d2ce/debugpy-1.8.17-cp311-cp311-macosx_15_0_universal2.whl", hash = "sha256:d3fce3f0e3de262a3b67e69916d001f3e767661c6e1ee42553009d445d1cd840", size = 2207154 }, - { url = "https://files.pythonhosted.org/packages/8f/6d/204f407df45600e2245b4a39860ed4ba32552330a0b3f5f160ae4cc30072/debugpy-1.8.17-cp311-cp311-manylinux_2_34_x86_64.whl", hash = "sha256:c6bdf134457ae0cac6fb68205776be635d31174eeac9541e1d0c062165c6461f", size = 3170322 }, - { url = "https://files.pythonhosted.org/packages/f2/13/1b8f87d39cf83c6b713de2620c31205299e6065622e7dd37aff4808dd410/debugpy-1.8.17-cp311-cp311-win32.whl", hash = "sha256:e79a195f9e059edfe5d8bf6f3749b2599452d3e9380484cd261f6b7cd2c7c4da", size = 5155078 }, - { url = "https://files.pythonhosted.org/packages/c2/c5/c012c60a2922cc91caa9675d0ddfbb14ba59e1e36228355f41cab6483469/debugpy-1.8.17-cp311-cp311-win_amd64.whl", hash = "sha256:b532282ad4eca958b1b2d7dbcb2b7218e02cb934165859b918e3b6ba7772d3f4", size = 5179011 }, - { url = "https://files.pythonhosted.org/packages/08/2b/9d8e65beb2751876c82e1aceb32f328c43ec872711fa80257c7674f45650/debugpy-1.8.17-cp312-cp312-macosx_15_0_universal2.whl", hash = "sha256:f14467edef672195c6f6b8e27ce5005313cb5d03c9239059bc7182b60c176e2d", size = 2549522 }, - { url = "https://files.pythonhosted.org/packages/b4/78/eb0d77f02971c05fca0eb7465b18058ba84bd957062f5eec82f941ac792a/debugpy-1.8.17-cp312-cp312-manylinux_2_34_x86_64.whl", hash = "sha256:24693179ef9dfa20dca8605905a42b392be56d410c333af82f1c5dff807a64cc", size = 4309417 }, - { url = "https://files.pythonhosted.org/packages/37/42/c40f1d8cc1fed1e75ea54298a382395b8b937d923fcf41ab0797a554f555/debugpy-1.8.17-cp312-cp312-win32.whl", hash = "sha256:6a4e9dacf2cbb60d2514ff7b04b4534b0139facbf2abdffe0639ddb6088e59cf", size = 5277130 }, - { url = "https://files.pythonhosted.org/packages/72/22/84263b205baad32b81b36eac076de0cdbe09fe2d0637f5b32243dc7c925b/debugpy-1.8.17-cp312-cp312-win_amd64.whl", hash = "sha256:e8f8f61c518952fb15f74a302e068b48d9c4691768ade433e4adeea961993464", size = 5319053 }, - { url = "https://files.pythonhosted.org/packages/50/76/597e5cb97d026274ba297af8d89138dfd9e695767ba0e0895edb20963f40/debugpy-1.8.17-cp313-cp313-macosx_15_0_universal2.whl", hash = "sha256:857c1dd5d70042502aef1c6d1c2801211f3ea7e56f75e9c335f434afb403e464", size = 2538386 }, - { url = "https://files.pythonhosted.org/packages/5f/60/ce5c34fcdfec493701f9d1532dba95b21b2f6394147234dce21160bd923f/debugpy-1.8.17-cp313-cp313-manylinux_2_34_x86_64.whl", hash = "sha256:3bea3b0b12f3946e098cce9b43c3c46e317b567f79570c3f43f0b96d00788088", size = 4292100 }, - { url = "https://files.pythonhosted.org/packages/e8/95/7873cf2146577ef71d2a20bf553f12df865922a6f87b9e8ee1df04f01785/debugpy-1.8.17-cp313-cp313-win32.whl", hash = "sha256:e34ee844c2f17b18556b5bbe59e1e2ff4e86a00282d2a46edab73fd7f18f4a83", size = 5277002 }, - { url = "https://files.pythonhosted.org/packages/46/11/18c79a1cee5ff539a94ec4aa290c1c069a5580fd5cfd2fb2e282f8e905da/debugpy-1.8.17-cp313-cp313-win_amd64.whl", hash = "sha256:6c5cd6f009ad4fca8e33e5238210dc1e5f42db07d4b6ab21ac7ffa904a196420", size = 5319047 }, - { url = "https://files.pythonhosted.org/packages/b0/d0/89247ec250369fc76db477720a26b2fce7ba079ff1380e4ab4529d2fe233/debugpy-1.8.17-py2.py3-none-any.whl", hash = "sha256:60c7dca6571efe660ccb7a9508d73ca14b8796c4ed484c2002abba714226cfef", size = 5283210 }, + { url = "https://files.pythonhosted.org/packages/d8/53/3af72b5c159278c4a0cf4cffa518675a0e73bdb7d1cac0239b815502d2ce/debugpy-1.8.17-cp311-cp311-macosx_15_0_universal2.whl", hash = "sha256:d3fce3f0e3de262a3b67e69916d001f3e767661c6e1ee42553009d445d1cd840", size = 2207154, upload-time = "2025-09-17T16:33:29.457Z" }, + { url = "https://files.pythonhosted.org/packages/8f/6d/204f407df45600e2245b4a39860ed4ba32552330a0b3f5f160ae4cc30072/debugpy-1.8.17-cp311-cp311-manylinux_2_34_x86_64.whl", hash = "sha256:c6bdf134457ae0cac6fb68205776be635d31174eeac9541e1d0c062165c6461f", size = 3170322, upload-time = "2025-09-17T16:33:30.837Z" }, + { url = "https://files.pythonhosted.org/packages/f2/13/1b8f87d39cf83c6b713de2620c31205299e6065622e7dd37aff4808dd410/debugpy-1.8.17-cp311-cp311-win32.whl", hash = "sha256:e79a195f9e059edfe5d8bf6f3749b2599452d3e9380484cd261f6b7cd2c7c4da", size = 5155078, upload-time = "2025-09-17T16:33:33.331Z" }, + { url = "https://files.pythonhosted.org/packages/c2/c5/c012c60a2922cc91caa9675d0ddfbb14ba59e1e36228355f41cab6483469/debugpy-1.8.17-cp311-cp311-win_amd64.whl", hash = "sha256:b532282ad4eca958b1b2d7dbcb2b7218e02cb934165859b918e3b6ba7772d3f4", size = 5179011, upload-time = "2025-09-17T16:33:35.711Z" }, + { url = "https://files.pythonhosted.org/packages/08/2b/9d8e65beb2751876c82e1aceb32f328c43ec872711fa80257c7674f45650/debugpy-1.8.17-cp312-cp312-macosx_15_0_universal2.whl", hash = "sha256:f14467edef672195c6f6b8e27ce5005313cb5d03c9239059bc7182b60c176e2d", size = 2549522, upload-time = "2025-09-17T16:33:38.466Z" }, + { url = "https://files.pythonhosted.org/packages/b4/78/eb0d77f02971c05fca0eb7465b18058ba84bd957062f5eec82f941ac792a/debugpy-1.8.17-cp312-cp312-manylinux_2_34_x86_64.whl", hash = "sha256:24693179ef9dfa20dca8605905a42b392be56d410c333af82f1c5dff807a64cc", size = 4309417, upload-time = "2025-09-17T16:33:41.299Z" }, + { url = "https://files.pythonhosted.org/packages/37/42/c40f1d8cc1fed1e75ea54298a382395b8b937d923fcf41ab0797a554f555/debugpy-1.8.17-cp312-cp312-win32.whl", hash = "sha256:6a4e9dacf2cbb60d2514ff7b04b4534b0139facbf2abdffe0639ddb6088e59cf", size = 5277130, upload-time = "2025-09-17T16:33:43.554Z" }, + { url = "https://files.pythonhosted.org/packages/72/22/84263b205baad32b81b36eac076de0cdbe09fe2d0637f5b32243dc7c925b/debugpy-1.8.17-cp312-cp312-win_amd64.whl", hash = "sha256:e8f8f61c518952fb15f74a302e068b48d9c4691768ade433e4adeea961993464", size = 5319053, upload-time = "2025-09-17T16:33:53.033Z" }, + { url = "https://files.pythonhosted.org/packages/b0/d0/89247ec250369fc76db477720a26b2fce7ba079ff1380e4ab4529d2fe233/debugpy-1.8.17-py2.py3-none-any.whl", hash = "sha256:60c7dca6571efe660ccb7a9508d73ca14b8796c4ed484c2002abba714226cfef", size = 5283210, upload-time = "2025-09-17T16:34:25.835Z" }, ] [[package]] name = "decorator" version = "5.2.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/43/fa/6d96a0978d19e17b68d634497769987b16c8f4cd0a7a05048bec693caa6b/decorator-5.2.1.tar.gz", hash = "sha256:65f266143752f734b0a7cc83c46f4618af75b8c5911b00ccb61d0ac9b6da0360", size = 56711 } +sdist = { url = "https://files.pythonhosted.org/packages/43/fa/6d96a0978d19e17b68d634497769987b16c8f4cd0a7a05048bec693caa6b/decorator-5.2.1.tar.gz", hash = "sha256:65f266143752f734b0a7cc83c46f4618af75b8c5911b00ccb61d0ac9b6da0360", size = 56711, upload-time = "2025-02-24T04:41:34.073Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/4e/8c/f3147f5c4b73e7550fe5f9352eaa956ae838d5c51eb58e7a25b9f3e2643b/decorator-5.2.1-py3-none-any.whl", hash = "sha256:d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a", size = 9190 }, + { url = "https://files.pythonhosted.org/packages/4e/8c/f3147f5c4b73e7550fe5f9352eaa956ae838d5c51eb58e7a25b9f3e2643b/decorator-5.2.1-py3-none-any.whl", hash = "sha256:d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a", size = 9190, upload-time = "2025-02-24T04:41:32.565Z" }, ] [[package]] name = "defusedxml" version = "0.7.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0f/d5/c66da9b79e5bdb124974bfe172b4daf3c984ebd9c2a06e2b8a4dc7331c72/defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69", size = 75520 } +sdist = { url = "https://files.pythonhosted.org/packages/0f/d5/c66da9b79e5bdb124974bfe172b4daf3c984ebd9c2a06e2b8a4dc7331c72/defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69", size = 75520, upload-time = "2021-03-08T10:59:26.269Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/07/6c/aa3f2f849e01cb6a001cd8554a88d4c77c5c1a31c95bdf1cf9301e6d9ef4/defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61", size = 25604 }, + { url = "https://files.pythonhosted.org/packages/07/6c/aa3f2f849e01cb6a001cd8554a88d4c77c5c1a31c95bdf1cf9301e6d9ef4/defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61", size = 25604, upload-time = "2021-03-08T10:59:24.45Z" }, ] [[package]] @@ -750,9 +664,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "packaging" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/5a/d3/8ae2869247df154b64c1884d7346d412fed0c49df84db635aab2d1c40e62/deprecation-2.1.0.tar.gz", hash = "sha256:72b3bde64e5d778694b0cf68178aed03d15e15477116add3fb773e581f9518ff", size = 173788 } +sdist = { url = "https://files.pythonhosted.org/packages/5a/d3/8ae2869247df154b64c1884d7346d412fed0c49df84db635aab2d1c40e62/deprecation-2.1.0.tar.gz", hash = "sha256:72b3bde64e5d778694b0cf68178aed03d15e15477116add3fb773e581f9518ff", size = 173788, upload-time = "2020-04-20T14:23:38.738Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/02/c3/253a89ee03fc9b9682f1541728eb66db7db22148cd94f89ab22528cd1e1b/deprecation-2.1.0-py2.py3-none-any.whl", hash = "sha256:a10811591210e1fb0e768a8c25517cabeabcba6f0bf96564f8ff45189f90b14a", size = 11178 }, + { url = "https://files.pythonhosted.org/packages/02/c3/253a89ee03fc9b9682f1541728eb66db7db22148cd94f89ab22528cd1e1b/deprecation-2.1.0-py2.py3-none-any.whl", hash = "sha256:a10811591210e1fb0e768a8c25517cabeabcba6f0bf96564f8ff45189f90b14a", size = 11178, upload-time = "2020-04-20T14:23:36.581Z" }, ] [[package]] @@ -765,16 +679,16 @@ dependencies = [ { name = "packaging" }, { name = "requirements-parser" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a3/31/3e2f4a9b43bd807b28a49d673b9b5f8dcc7265d43950b24e875ba90e6205/deptry-0.23.1.tar.gz", hash = "sha256:5d23e0ef25f3c56405c05383a476edda55944563c5c47a3e9249ed3ec860d382", size = 460016 } +sdist = { url = "https://files.pythonhosted.org/packages/a3/31/3e2f4a9b43bd807b28a49d673b9b5f8dcc7265d43950b24e875ba90e6205/deptry-0.23.1.tar.gz", hash = "sha256:5d23e0ef25f3c56405c05383a476edda55944563c5c47a3e9249ed3ec860d382", size = 460016, upload-time = "2025-07-31T05:54:49.681Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/cb/d0/9785c0e7fdab12f5324467d70ba65ad03b9d4071a13fc182b6d98bab6208/deptry-0.23.1-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:f0b231d098fb5b48d8973c9f192c353ffdd395770063424969fa7f15ddfea7d8", size = 1768731 }, - { url = "https://files.pythonhosted.org/packages/c5/4b/46aded35e0de153936b2214e49e5935179eed9f23cbd3a9a0cd9a5ab0abd/deptry-0.23.1-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:bf057f514bb2fa18a2b192a7f7372bd14577ff46b11486933e8383dfef461983", size = 1667240 }, - { url = "https://files.pythonhosted.org/packages/ef/f7/206330f68280a1af7edb8bea87f383dbaa4e3b02b37199d40f86e4c43048/deptry-0.23.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ee3f5663bb1c048e2aaf25a4d9e6d09cc1f3b3396ee248980878c6a6c9c0e21", size = 1772019 }, - { url = "https://files.pythonhosted.org/packages/c5/80/51a9e94349b47013e2fd78fd221b12202a7866cd2e0882cfd87d63055e88/deptry-0.23.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae0366dc5f50a5fb29cf90de1110c5e368513de6c1b2dac439f2817f3f752616", size = 1855973 }, - { url = "https://files.pythonhosted.org/packages/d5/7a/bff10ddd26ce39c56a9a35bdc98fcf44c2befe5954c8da4bb895e3f750bb/deptry-0.23.1-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:ab156a90a9eda5819aeb1c1da585dd4d5ec509029399a38771a49e78f40db90f", size = 1946957 }, - { url = "https://files.pythonhosted.org/packages/7e/b6/c80b190cbd817d1f75f8d02d4b6f4d430b2f3014a09d3895684e291e473b/deptry-0.23.1-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:651c7eb168233755152fcc468713c024d64a03069645187edb4a17ba61ce6133", size = 2025282 }, - { url = "https://files.pythonhosted.org/packages/3c/58/1dfb7a6c4ec2daf123264d2c30f53f45791fee46cd0244be5bf97597d2aa/deptry-0.23.1-cp39-abi3-win_amd64.whl", hash = "sha256:8da1e8f70e7086ebc228f3a4a3cfb5aa127b09b5eef60d694503d6bb79809025", size = 1631377 }, - { url = "https://files.pythonhosted.org/packages/18/d3/667b974cf42fc50245a8028beb9966643ee214ca567cc6df6e876feca5ed/deptry-0.23.1-cp39-abi3-win_arm64.whl", hash = "sha256:f589497a5809717db4dcf2aa840f2847c0a4c489331608e538850b6a9ab1c30b", size = 1551113 }, + { url = "https://files.pythonhosted.org/packages/cb/d0/9785c0e7fdab12f5324467d70ba65ad03b9d4071a13fc182b6d98bab6208/deptry-0.23.1-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:f0b231d098fb5b48d8973c9f192c353ffdd395770063424969fa7f15ddfea7d8", size = 1768731, upload-time = "2025-07-31T05:54:47.348Z" }, + { url = "https://files.pythonhosted.org/packages/c5/4b/46aded35e0de153936b2214e49e5935179eed9f23cbd3a9a0cd9a5ab0abd/deptry-0.23.1-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:bf057f514bb2fa18a2b192a7f7372bd14577ff46b11486933e8383dfef461983", size = 1667240, upload-time = "2025-07-31T05:54:43.956Z" }, + { url = "https://files.pythonhosted.org/packages/ef/f7/206330f68280a1af7edb8bea87f383dbaa4e3b02b37199d40f86e4c43048/deptry-0.23.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ee3f5663bb1c048e2aaf25a4d9e6d09cc1f3b3396ee248980878c6a6c9c0e21", size = 1772019, upload-time = "2025-07-31T05:54:31.165Z" }, + { url = "https://files.pythonhosted.org/packages/c5/80/51a9e94349b47013e2fd78fd221b12202a7866cd2e0882cfd87d63055e88/deptry-0.23.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae0366dc5f50a5fb29cf90de1110c5e368513de6c1b2dac439f2817f3f752616", size = 1855973, upload-time = "2025-07-31T05:54:37.733Z" }, + { url = "https://files.pythonhosted.org/packages/d5/7a/bff10ddd26ce39c56a9a35bdc98fcf44c2befe5954c8da4bb895e3f750bb/deptry-0.23.1-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:ab156a90a9eda5819aeb1c1da585dd4d5ec509029399a38771a49e78f40db90f", size = 1946957, upload-time = "2025-07-31T05:54:34.567Z" }, + { url = "https://files.pythonhosted.org/packages/7e/b6/c80b190cbd817d1f75f8d02d4b6f4d430b2f3014a09d3895684e291e473b/deptry-0.23.1-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:651c7eb168233755152fcc468713c024d64a03069645187edb4a17ba61ce6133", size = 2025282, upload-time = "2025-07-31T05:54:40.906Z" }, + { url = "https://files.pythonhosted.org/packages/3c/58/1dfb7a6c4ec2daf123264d2c30f53f45791fee46cd0244be5bf97597d2aa/deptry-0.23.1-cp39-abi3-win_amd64.whl", hash = "sha256:8da1e8f70e7086ebc228f3a4a3cfb5aa127b09b5eef60d694503d6bb79809025", size = 1631377, upload-time = "2025-07-31T05:54:51.951Z" }, + { url = "https://files.pythonhosted.org/packages/18/d3/667b974cf42fc50245a8028beb9966643ee214ca567cc6df6e876feca5ed/deptry-0.23.1-cp39-abi3-win_arm64.whl", hash = "sha256:f589497a5809717db4dcf2aa840f2847c0a4c489331608e538850b6a9ab1c30b", size = 1551113, upload-time = "2025-07-31T05:54:50.679Z" }, ] [[package]] @@ -786,99 +700,84 @@ dependencies = [ { name = "executing" }, { name = "pygments" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/84/75/b78198620640d394bc435c17bb49db18419afdd6cfa3ed8bcfe14034ec80/devtools-0.12.2.tar.gz", hash = "sha256:efceab184cb35e3a11fa8e602cc4fadacaa2e859e920fc6f87bf130b69885507", size = 75005 } +sdist = { url = "https://files.pythonhosted.org/packages/84/75/b78198620640d394bc435c17bb49db18419afdd6cfa3ed8bcfe14034ec80/devtools-0.12.2.tar.gz", hash = "sha256:efceab184cb35e3a11fa8e602cc4fadacaa2e859e920fc6f87bf130b69885507", size = 75005, upload-time = "2023-09-03T16:57:00.679Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d1/ae/afb1487556e2dc827a17097aac8158a25b433a345386f0e249f6d2694ccb/devtools-0.12.2-py3-none-any.whl", hash = "sha256:c366e3de1df4cdd635f1ad8cbcd3af01a384d7abda71900e68d43b04eb6aaca7", size = 19411 }, + { url = "https://files.pythonhosted.org/packages/d1/ae/afb1487556e2dc827a17097aac8158a25b433a345386f0e249f6d2694ccb/devtools-0.12.2-py3-none-any.whl", hash = "sha256:c366e3de1df4cdd635f1ad8cbcd3af01a384d7abda71900e68d43b04eb6aaca7", size = 19411, upload-time = "2023-09-03T16:56:59.049Z" }, ] [[package]] name = "distro" version = "1.9.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/fc/f8/98eea607f65de6527f8a2e8885fc8015d3e6f5775df186e443e0964a11c3/distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed", size = 60722 } +sdist = { url = "https://files.pythonhosted.org/packages/fc/f8/98eea607f65de6527f8a2e8885fc8015d3e6f5775df186e443e0964a11c3/distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed", size = 60722, upload-time = "2023-12-24T09:54:32.31Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/12/b3/231ffd4ab1fc9d679809f356cebee130ac7daa00d6d6f3206dd4fd137e9e/distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2", size = 20277 }, + { url = "https://files.pythonhosted.org/packages/12/b3/231ffd4ab1fc9d679809f356cebee130ac7daa00d6d6f3206dd4fd137e9e/distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2", size = 20277, upload-time = "2023-12-24T09:54:30.421Z" }, ] [[package]] -name = "environs" -version = "11.2.1" +name = "execnet" +version = "2.1.2" source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "marshmallow" }, - { name = "python-dotenv" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/77/08/2b7d9cacf2b27482c9218ee6762336aa47bdb9d07ee26a136d072a328297/environs-11.2.1.tar.gz", hash = "sha256:e068ae3174cef52ba4b95ead22e639056a02465f616e62323e04ae08e86a75a4", size = 27485 } +sdist = { url = "https://files.pythonhosted.org/packages/bf/89/780e11f9588d9e7128a3f87788354c7946a9cbb1401ad38a48c4db9a4f07/execnet-2.1.2.tar.gz", hash = "sha256:63d83bfdd9a23e35b9c6a3261412324f964c2ec8dcd8d3c6916ee9373e0befcd", size = 166622, upload-time = "2025-11-12T09:56:37.75Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1a/21/1e0d8de234e9d0c675ea8fd50f9e7ad66fae32c207bc982f1d14f7c0835b/environs-11.2.1-py3-none-any.whl", hash = "sha256:9d2080cf25807a26fc0d4301e2d7b62c64fbf547540f21e3a30cc02bc5fbe948", size = 12923 }, + { url = "https://files.pythonhosted.org/packages/ab/84/02fc1827e8cdded4aa65baef11296a9bbe595c474f0d6d758af082d849fd/execnet-2.1.2-py3-none-any.whl", hash = "sha256:67fba928dd5a544b783f6056f449e5e3931a5c378b128bc18501f7ea79e296ec", size = 40708, upload-time = "2025-11-12T09:56:36.333Z" }, ] [[package]] name = "executing" version = "2.2.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/cc/28/c14e053b6762b1044f34a13aab6859bbf40456d37d23aa286ac24cfd9a5d/executing-2.2.1.tar.gz", hash = "sha256:3632cc370565f6648cc328b32435bd120a1e4ebb20c77e3fdde9a13cd1e533c4", size = 1129488 } +sdist = { url = "https://files.pythonhosted.org/packages/cc/28/c14e053b6762b1044f34a13aab6859bbf40456d37d23aa286ac24cfd9a5d/executing-2.2.1.tar.gz", hash = "sha256:3632cc370565f6648cc328b32435bd120a1e4ebb20c77e3fdde9a13cd1e533c4", size = 1129488, upload-time = "2025-09-01T09:48:10.866Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c1/ea/53f2148663b321f21b5a606bd5f191517cf40b7072c0497d3c92c4a13b1e/executing-2.2.1-py2.py3-none-any.whl", hash = "sha256:760643d3452b4d777d295bb167ccc74c64a81df23fb5e08eff250c425a4b2017", size = 28317 }, + { url = "https://files.pythonhosted.org/packages/c1/ea/53f2148663b321f21b5a606bd5f191517cf40b7072c0497d3c92c4a13b1e/executing-2.2.1-py2.py3-none-any.whl", hash = "sha256:760643d3452b4d777d295bb167ccc74c64a81df23fb5e08eff250c425a4b2017", size = 28317, upload-time = "2025-09-01T09:48:08.5Z" }, ] [[package]] name = "fastjsonschema" version = "2.21.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/20/b5/23b216d9d985a956623b6bd12d4086b60f0059b27799f23016af04a74ea1/fastjsonschema-2.21.2.tar.gz", hash = "sha256:b1eb43748041c880796cd077f1a07c3d94e93ae84bba5ed36800a33554ae05de", size = 374130 } +sdist = { url = "https://files.pythonhosted.org/packages/20/b5/23b216d9d985a956623b6bd12d4086b60f0059b27799f23016af04a74ea1/fastjsonschema-2.21.2.tar.gz", hash = "sha256:b1eb43748041c880796cd077f1a07c3d94e93ae84bba5ed36800a33554ae05de", size = 374130, upload-time = "2025-08-14T18:49:36.666Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/cb/a8/20d0723294217e47de6d9e2e40fd4a9d2f7c4b6ef974babd482a59743694/fastjsonschema-2.21.2-py3-none-any.whl", hash = "sha256:1c797122d0a86c5cace2e54bf4e819c36223b552017172f32c5c024a6b77e463", size = 24024 }, + { url = "https://files.pythonhosted.org/packages/cb/a8/20d0723294217e47de6d9e2e40fd4a9d2f7c4b6ef974babd482a59743694/fastjsonschema-2.21.2-py3-none-any.whl", hash = "sha256:1c797122d0a86c5cace2e54bf4e819c36223b552017172f32c5c024a6b77e463", size = 24024, upload-time = "2025-08-14T18:49:34.776Z" }, ] [[package]] name = "fastuuid" version = "0.14.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/c3/7d/d9daedf0f2ebcacd20d599928f8913e9d2aea1d56d2d355a93bfa2b611d7/fastuuid-0.14.0.tar.gz", hash = "sha256:178947fc2f995b38497a74172adee64fdeb8b7ec18f2a5934d037641ba265d26", size = 18232 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/98/f3/12481bda4e5b6d3e698fbf525df4443cc7dce746f246b86b6fcb2fba1844/fastuuid-0.14.0-cp311-cp311-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:73946cb950c8caf65127d4e9a325e2b6be0442a224fd51ba3b6ac44e1912ce34", size = 516386 }, - { url = "https://files.pythonhosted.org/packages/59/19/2fc58a1446e4d72b655648eb0879b04e88ed6fa70d474efcf550f640f6ec/fastuuid-0.14.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:12ac85024637586a5b69645e7ed986f7535106ed3013640a393a03e461740cb7", size = 264569 }, - { url = "https://files.pythonhosted.org/packages/78/29/3c74756e5b02c40cfcc8b1d8b5bac4edbd532b55917a6bcc9113550e99d1/fastuuid-0.14.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:05a8dde1f395e0c9b4be515b7a521403d1e8349443e7641761af07c7ad1624b1", size = 254366 }, - { url = "https://files.pythonhosted.org/packages/52/96/d761da3fccfa84f0f353ce6e3eb8b7f76b3aa21fd25e1b00a19f9c80a063/fastuuid-0.14.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09378a05020e3e4883dfdab438926f31fea15fd17604908f3d39cbeb22a0b4dc", size = 278978 }, - { url = "https://files.pythonhosted.org/packages/fc/c2/f84c90167cc7765cb82b3ff7808057608b21c14a38531845d933a4637307/fastuuid-0.14.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbb0c4b15d66b435d2538f3827f05e44e2baafcc003dd7d8472dc67807ab8fd8", size = 279692 }, - { url = "https://files.pythonhosted.org/packages/af/7b/4bacd03897b88c12348e7bd77943bac32ccf80ff98100598fcff74f75f2e/fastuuid-0.14.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cd5a7f648d4365b41dbf0e38fe8da4884e57bed4e77c83598e076ac0c93995e7", size = 303384 }, - { url = "https://files.pythonhosted.org/packages/c0/a2/584f2c29641df8bd810d00c1f21d408c12e9ad0c0dafdb8b7b29e5ddf787/fastuuid-0.14.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c0a94245afae4d7af8c43b3159d5e3934c53f47140be0be624b96acd672ceb73", size = 460921 }, - { url = "https://files.pythonhosted.org/packages/24/68/c6b77443bb7764c760e211002c8638c0c7cce11cb584927e723215ba1398/fastuuid-0.14.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:2b29e23c97e77c3a9514d70ce343571e469098ac7f5a269320a0f0b3e193ab36", size = 480575 }, - { url = "https://files.pythonhosted.org/packages/5a/87/93f553111b33f9bb83145be12868c3c475bf8ea87c107063d01377cc0e8e/fastuuid-0.14.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1e690d48f923c253f28151b3a6b4e335f2b06bf669c68a02665bc150b7839e94", size = 452317 }, - { url = "https://files.pythonhosted.org/packages/9e/8c/a04d486ca55b5abb7eaa65b39df8d891b7b1635b22db2163734dc273579a/fastuuid-0.14.0-cp311-cp311-win32.whl", hash = "sha256:a6f46790d59ab38c6aa0e35c681c0484b50dc0acf9e2679c005d61e019313c24", size = 154804 }, - { url = "https://files.pythonhosted.org/packages/9c/b2/2d40bf00820de94b9280366a122cbaa60090c8cf59e89ac3938cf5d75895/fastuuid-0.14.0-cp311-cp311-win_amd64.whl", hash = "sha256:e150eab56c95dc9e3fefc234a0eedb342fac433dacc273cd4d150a5b0871e1fa", size = 156099 }, - { url = "https://files.pythonhosted.org/packages/02/a2/e78fcc5df65467f0d207661b7ef86c5b7ac62eea337c0c0fcedbeee6fb13/fastuuid-0.14.0-cp312-cp312-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:77e94728324b63660ebf8adb27055e92d2e4611645bf12ed9d88d30486471d0a", size = 510164 }, - { url = "https://files.pythonhosted.org/packages/2b/b3/c846f933f22f581f558ee63f81f29fa924acd971ce903dab1a9b6701816e/fastuuid-0.14.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:caa1f14d2102cb8d353096bc6ef6c13b2c81f347e6ab9d6fbd48b9dea41c153d", size = 261837 }, - { url = "https://files.pythonhosted.org/packages/54/ea/682551030f8c4fa9a769d9825570ad28c0c71e30cf34020b85c1f7ee7382/fastuuid-0.14.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d23ef06f9e67163be38cece704170486715b177f6baae338110983f99a72c070", size = 251370 }, - { url = "https://files.pythonhosted.org/packages/14/dd/5927f0a523d8e6a76b70968e6004966ee7df30322f5fc9b6cdfb0276646a/fastuuid-0.14.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0c9ec605ace243b6dbe3bd27ebdd5d33b00d8d1d3f580b39fdd15cd96fd71796", size = 277766 }, - { url = "https://files.pythonhosted.org/packages/16/6e/c0fb547eef61293153348f12e0f75a06abb322664b34a1573a7760501336/fastuuid-0.14.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:808527f2407f58a76c916d6aa15d58692a4a019fdf8d4c32ac7ff303b7d7af09", size = 278105 }, - { url = "https://files.pythonhosted.org/packages/2d/b1/b9c75e03b768f61cf2e84ee193dc18601aeaf89a4684b20f2f0e9f52b62c/fastuuid-0.14.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2fb3c0d7fef6674bbeacdd6dbd386924a7b60b26de849266d1ff6602937675c8", size = 301564 }, - { url = "https://files.pythonhosted.org/packages/fc/fa/f7395fdac07c7a54f18f801744573707321ca0cee082e638e36452355a9d/fastuuid-0.14.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab3f5d36e4393e628a4df337c2c039069344db5f4b9d2a3c9cea48284f1dd741", size = 459659 }, - { url = "https://files.pythonhosted.org/packages/66/49/c9fd06a4a0b1f0f048aacb6599e7d96e5d6bc6fa680ed0d46bf111929d1b/fastuuid-0.14.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:b9a0ca4f03b7e0b01425281ffd44e99d360e15c895f1907ca105854ed85e2057", size = 478430 }, - { url = "https://files.pythonhosted.org/packages/be/9c/909e8c95b494e8e140e8be6165d5fc3f61fdc46198c1554df7b3e1764471/fastuuid-0.14.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3acdf655684cc09e60fb7e4cf524e8f42ea760031945aa8086c7eae2eeeabeb8", size = 450894 }, - { url = "https://files.pythonhosted.org/packages/90/eb/d29d17521976e673c55ef7f210d4cdd72091a9ec6755d0fd4710d9b3c871/fastuuid-0.14.0-cp312-cp312-win32.whl", hash = "sha256:9579618be6280700ae36ac42c3efd157049fe4dd40ca49b021280481c78c3176", size = 154374 }, - { url = "https://files.pythonhosted.org/packages/cc/fc/f5c799a6ea6d877faec0472d0b27c079b47c86b1cdc577720a5386483b36/fastuuid-0.14.0-cp312-cp312-win_amd64.whl", hash = "sha256:d9e4332dc4ba054434a9594cbfaf7823b57993d7d8e7267831c3e059857cf397", size = 156550 }, - { url = "https://files.pythonhosted.org/packages/a5/83/ae12dd39b9a39b55d7f90abb8971f1a5f3c321fd72d5aa83f90dc67fe9ed/fastuuid-0.14.0-cp313-cp313-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:77a09cb7427e7af74c594e409f7731a0cf887221de2f698e1ca0ebf0f3139021", size = 510720 }, - { url = "https://files.pythonhosted.org/packages/53/b0/a4b03ff5d00f563cc7546b933c28cb3f2a07344b2aec5834e874f7d44143/fastuuid-0.14.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:9bd57289daf7b153bfa3e8013446aa144ce5e8c825e9e366d455155ede5ea2dc", size = 262024 }, - { url = "https://files.pythonhosted.org/packages/9c/6d/64aee0a0f6a58eeabadd582e55d0d7d70258ffdd01d093b30c53d668303b/fastuuid-0.14.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ac60fc860cdf3c3f327374db87ab8e064c86566ca8c49d2e30df15eda1b0c2d5", size = 251679 }, - { url = "https://files.pythonhosted.org/packages/60/f5/a7e9cda8369e4f7919d36552db9b2ae21db7915083bc6336f1b0082c8b2e/fastuuid-0.14.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ab32f74bd56565b186f036e33129da77db8be09178cd2f5206a5d4035fb2a23f", size = 277862 }, - { url = "https://files.pythonhosted.org/packages/f0/d3/8ce11827c783affffd5bd4d6378b28eb6cc6d2ddf41474006b8d62e7448e/fastuuid-0.14.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33e678459cf4addaedd9936bbb038e35b3f6b2061330fd8f2f6a1d80414c0f87", size = 278278 }, - { url = "https://files.pythonhosted.org/packages/a2/51/680fb6352d0bbade04036da46264a8001f74b7484e2fd1f4da9e3db1c666/fastuuid-0.14.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1e3cc56742f76cd25ecb98e4b82a25f978ccffba02e4bdce8aba857b6d85d87b", size = 301788 }, - { url = "https://files.pythonhosted.org/packages/fa/7c/2014b5785bd8ebdab04ec857635ebd84d5ee4950186a577db9eff0fb8ff6/fastuuid-0.14.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:cb9a030f609194b679e1660f7e32733b7a0f332d519c5d5a6a0a580991290022", size = 459819 }, - { url = "https://files.pythonhosted.org/packages/01/d2/524d4ceeba9160e7a9bc2ea3e8f4ccf1ad78f3bde34090ca0c51f09a5e91/fastuuid-0.14.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:09098762aad4f8da3a888eb9ae01c84430c907a297b97166b8abc07b640f2995", size = 478546 }, - { url = "https://files.pythonhosted.org/packages/bc/17/354d04951ce114bf4afc78e27a18cfbd6ee319ab1829c2d5fb5e94063ac6/fastuuid-0.14.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:1383fff584fa249b16329a059c68ad45d030d5a4b70fb7c73a08d98fd53bcdab", size = 450921 }, - { url = "https://files.pythonhosted.org/packages/fb/be/d7be8670151d16d88f15bb121c5b66cdb5ea6a0c2a362d0dcf30276ade53/fastuuid-0.14.0-cp313-cp313-win32.whl", hash = "sha256:a0809f8cc5731c066c909047f9a314d5f536c871a7a22e815cc4967c110ac9ad", size = 154559 }, - { url = "https://files.pythonhosted.org/packages/22/1d/5573ef3624ceb7abf4a46073d3554e37191c868abc3aecd5289a72f9810a/fastuuid-0.14.0-cp313-cp313-win_amd64.whl", hash = "sha256:0df14e92e7ad3276327631c9e7cec09e32572ce82089c55cb1bb8df71cf394ed", size = 156539 }, +sdist = { url = "https://files.pythonhosted.org/packages/c3/7d/d9daedf0f2ebcacd20d599928f8913e9d2aea1d56d2d355a93bfa2b611d7/fastuuid-0.14.0.tar.gz", hash = "sha256:178947fc2f995b38497a74172adee64fdeb8b7ec18f2a5934d037641ba265d26", size = 18232, upload-time = "2025-10-19T22:19:22.402Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/98/f3/12481bda4e5b6d3e698fbf525df4443cc7dce746f246b86b6fcb2fba1844/fastuuid-0.14.0-cp311-cp311-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:73946cb950c8caf65127d4e9a325e2b6be0442a224fd51ba3b6ac44e1912ce34", size = 516386, upload-time = "2025-10-19T22:42:40.176Z" }, + { url = "https://files.pythonhosted.org/packages/59/19/2fc58a1446e4d72b655648eb0879b04e88ed6fa70d474efcf550f640f6ec/fastuuid-0.14.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:12ac85024637586a5b69645e7ed986f7535106ed3013640a393a03e461740cb7", size = 264569, upload-time = "2025-10-19T22:25:50.977Z" }, + { url = "https://files.pythonhosted.org/packages/78/29/3c74756e5b02c40cfcc8b1d8b5bac4edbd532b55917a6bcc9113550e99d1/fastuuid-0.14.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:05a8dde1f395e0c9b4be515b7a521403d1e8349443e7641761af07c7ad1624b1", size = 254366, upload-time = "2025-10-19T22:29:49.166Z" }, + { url = "https://files.pythonhosted.org/packages/52/96/d761da3fccfa84f0f353ce6e3eb8b7f76b3aa21fd25e1b00a19f9c80a063/fastuuid-0.14.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09378a05020e3e4883dfdab438926f31fea15fd17604908f3d39cbeb22a0b4dc", size = 278978, upload-time = "2025-10-19T22:35:41.306Z" }, + { url = "https://files.pythonhosted.org/packages/fc/c2/f84c90167cc7765cb82b3ff7808057608b21c14a38531845d933a4637307/fastuuid-0.14.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbb0c4b15d66b435d2538f3827f05e44e2baafcc003dd7d8472dc67807ab8fd8", size = 279692, upload-time = "2025-10-19T22:25:36.997Z" }, + { url = "https://files.pythonhosted.org/packages/af/7b/4bacd03897b88c12348e7bd77943bac32ccf80ff98100598fcff74f75f2e/fastuuid-0.14.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cd5a7f648d4365b41dbf0e38fe8da4884e57bed4e77c83598e076ac0c93995e7", size = 303384, upload-time = "2025-10-19T22:29:46.578Z" }, + { url = "https://files.pythonhosted.org/packages/c0/a2/584f2c29641df8bd810d00c1f21d408c12e9ad0c0dafdb8b7b29e5ddf787/fastuuid-0.14.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c0a94245afae4d7af8c43b3159d5e3934c53f47140be0be624b96acd672ceb73", size = 460921, upload-time = "2025-10-19T22:36:42.006Z" }, + { url = "https://files.pythonhosted.org/packages/24/68/c6b77443bb7764c760e211002c8638c0c7cce11cb584927e723215ba1398/fastuuid-0.14.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:2b29e23c97e77c3a9514d70ce343571e469098ac7f5a269320a0f0b3e193ab36", size = 480575, upload-time = "2025-10-19T22:28:18.975Z" }, + { url = "https://files.pythonhosted.org/packages/5a/87/93f553111b33f9bb83145be12868c3c475bf8ea87c107063d01377cc0e8e/fastuuid-0.14.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1e690d48f923c253f28151b3a6b4e335f2b06bf669c68a02665bc150b7839e94", size = 452317, upload-time = "2025-10-19T22:25:32.75Z" }, + { url = "https://files.pythonhosted.org/packages/9e/8c/a04d486ca55b5abb7eaa65b39df8d891b7b1635b22db2163734dc273579a/fastuuid-0.14.0-cp311-cp311-win32.whl", hash = "sha256:a6f46790d59ab38c6aa0e35c681c0484b50dc0acf9e2679c005d61e019313c24", size = 154804, upload-time = "2025-10-19T22:24:15.615Z" }, + { url = "https://files.pythonhosted.org/packages/9c/b2/2d40bf00820de94b9280366a122cbaa60090c8cf59e89ac3938cf5d75895/fastuuid-0.14.0-cp311-cp311-win_amd64.whl", hash = "sha256:e150eab56c95dc9e3fefc234a0eedb342fac433dacc273cd4d150a5b0871e1fa", size = 156099, upload-time = "2025-10-19T22:24:31.646Z" }, + { url = "https://files.pythonhosted.org/packages/02/a2/e78fcc5df65467f0d207661b7ef86c5b7ac62eea337c0c0fcedbeee6fb13/fastuuid-0.14.0-cp312-cp312-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:77e94728324b63660ebf8adb27055e92d2e4611645bf12ed9d88d30486471d0a", size = 510164, upload-time = "2025-10-19T22:31:45.635Z" }, + { url = "https://files.pythonhosted.org/packages/2b/b3/c846f933f22f581f558ee63f81f29fa924acd971ce903dab1a9b6701816e/fastuuid-0.14.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:caa1f14d2102cb8d353096bc6ef6c13b2c81f347e6ab9d6fbd48b9dea41c153d", size = 261837, upload-time = "2025-10-19T22:38:38.53Z" }, + { url = "https://files.pythonhosted.org/packages/54/ea/682551030f8c4fa9a769d9825570ad28c0c71e30cf34020b85c1f7ee7382/fastuuid-0.14.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d23ef06f9e67163be38cece704170486715b177f6baae338110983f99a72c070", size = 251370, upload-time = "2025-10-19T22:40:26.07Z" }, + { url = "https://files.pythonhosted.org/packages/14/dd/5927f0a523d8e6a76b70968e6004966ee7df30322f5fc9b6cdfb0276646a/fastuuid-0.14.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0c9ec605ace243b6dbe3bd27ebdd5d33b00d8d1d3f580b39fdd15cd96fd71796", size = 277766, upload-time = "2025-10-19T22:37:23.779Z" }, + { url = "https://files.pythonhosted.org/packages/16/6e/c0fb547eef61293153348f12e0f75a06abb322664b34a1573a7760501336/fastuuid-0.14.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:808527f2407f58a76c916d6aa15d58692a4a019fdf8d4c32ac7ff303b7d7af09", size = 278105, upload-time = "2025-10-19T22:26:56.821Z" }, + { url = "https://files.pythonhosted.org/packages/2d/b1/b9c75e03b768f61cf2e84ee193dc18601aeaf89a4684b20f2f0e9f52b62c/fastuuid-0.14.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2fb3c0d7fef6674bbeacdd6dbd386924a7b60b26de849266d1ff6602937675c8", size = 301564, upload-time = "2025-10-19T22:30:31.604Z" }, + { url = "https://files.pythonhosted.org/packages/fc/fa/f7395fdac07c7a54f18f801744573707321ca0cee082e638e36452355a9d/fastuuid-0.14.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab3f5d36e4393e628a4df337c2c039069344db5f4b9d2a3c9cea48284f1dd741", size = 459659, upload-time = "2025-10-19T22:31:32.341Z" }, + { url = "https://files.pythonhosted.org/packages/66/49/c9fd06a4a0b1f0f048aacb6599e7d96e5d6bc6fa680ed0d46bf111929d1b/fastuuid-0.14.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:b9a0ca4f03b7e0b01425281ffd44e99d360e15c895f1907ca105854ed85e2057", size = 478430, upload-time = "2025-10-19T22:26:22.962Z" }, + { url = "https://files.pythonhosted.org/packages/be/9c/909e8c95b494e8e140e8be6165d5fc3f61fdc46198c1554df7b3e1764471/fastuuid-0.14.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3acdf655684cc09e60fb7e4cf524e8f42ea760031945aa8086c7eae2eeeabeb8", size = 450894, upload-time = "2025-10-19T22:27:01.647Z" }, + { url = "https://files.pythonhosted.org/packages/90/eb/d29d17521976e673c55ef7f210d4cdd72091a9ec6755d0fd4710d9b3c871/fastuuid-0.14.0-cp312-cp312-win32.whl", hash = "sha256:9579618be6280700ae36ac42c3efd157049fe4dd40ca49b021280481c78c3176", size = 154374, upload-time = "2025-10-19T22:29:19.879Z" }, + { url = "https://files.pythonhosted.org/packages/cc/fc/f5c799a6ea6d877faec0472d0b27c079b47c86b1cdc577720a5386483b36/fastuuid-0.14.0-cp312-cp312-win_amd64.whl", hash = "sha256:d9e4332dc4ba054434a9594cbfaf7823b57993d7d8e7267831c3e059857cf397", size = 156550, upload-time = "2025-10-19T22:27:49.658Z" }, ] [[package]] name = "filelock" version = "3.20.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/58/46/0028a82567109b5ef6e4d2a1f04a583fb513e6cf9527fcdd09afd817deeb/filelock-3.20.0.tar.gz", hash = "sha256:711e943b4ec6be42e1d4e6690b48dc175c822967466bb31c0c293f34334c13f4", size = 18922 } +sdist = { url = "https://files.pythonhosted.org/packages/58/46/0028a82567109b5ef6e4d2a1f04a583fb513e6cf9527fcdd09afd817deeb/filelock-3.20.0.tar.gz", hash = "sha256:711e943b4ec6be42e1d4e6690b48dc175c822967466bb31c0c293f34334c13f4", size = 18922, upload-time = "2025-10-08T18:03:50.056Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/76/91/7216b27286936c16f5b4d0c530087e4a54eead683e6b0b73dd0c64844af6/filelock-3.20.0-py3-none-any.whl", hash = "sha256:339b4732ffda5cd79b13f4e2711a31b0365ce445d95d243bb996273d072546a2", size = 16054 }, + { url = "https://files.pythonhosted.org/packages/76/91/7216b27286936c16f5b4d0c530087e4a54eead683e6b0b73dd0c64844af6/filelock-3.20.0-py3-none-any.whl", hash = "sha256:339b4732ffda5cd79b13f4e2711a31b0365ce445d95d243bb996273d072546a2", size = 16054, upload-time = "2025-10-08T18:03:48.35Z" }, ] [[package]] @@ -886,98 +785,66 @@ name = "flatbuffers" version = "25.12.19" source = { registry = "https://pypi.org/simple" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e8/2d/d2a548598be01649e2d46231d151a6c56d10b964d94043a335ae56ea2d92/flatbuffers-25.12.19-py2.py3-none-any.whl", hash = "sha256:7634f50c427838bb021c2d66a3d1168e9d199b0607e6329399f04846d42e20b4", size = 26661 }, + { url = "https://files.pythonhosted.org/packages/e8/2d/d2a548598be01649e2d46231d151a6c56d10b964d94043a335ae56ea2d92/flatbuffers-25.12.19-py2.py3-none-any.whl", hash = "sha256:7634f50c427838bb021c2d66a3d1168e9d199b0607e6329399f04846d42e20b4", size = 26661, upload-time = "2025-12-19T23:16:13.622Z" }, ] [[package]] name = "fqdn" version = "1.5.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/30/3e/a80a8c077fd798951169626cde3e239adeba7dab75deb3555716415bd9b0/fqdn-1.5.1.tar.gz", hash = "sha256:105ed3677e767fb5ca086a0c1f4bb66ebc3c100be518f0e0d755d9eae164d89f", size = 6015 } +sdist = { url = "https://files.pythonhosted.org/packages/30/3e/a80a8c077fd798951169626cde3e239adeba7dab75deb3555716415bd9b0/fqdn-1.5.1.tar.gz", hash = "sha256:105ed3677e767fb5ca086a0c1f4bb66ebc3c100be518f0e0d755d9eae164d89f", size = 6015, upload-time = "2021-03-11T07:16:29.08Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/cf/58/8acf1b3e91c58313ce5cb67df61001fc9dcd21be4fadb76c1a2d540e09ed/fqdn-1.5.1-py3-none-any.whl", hash = "sha256:3a179af3761e4df6eb2e026ff9e1a3033d3587bf980a0b1b2e1e5d08d7358014", size = 9121 }, + { url = "https://files.pythonhosted.org/packages/cf/58/8acf1b3e91c58313ce5cb67df61001fc9dcd21be4fadb76c1a2d540e09ed/fqdn-1.5.1-py3-none-any.whl", hash = "sha256:3a179af3761e4df6eb2e026ff9e1a3033d3587bf980a0b1b2e1e5d08d7358014", size = 9121, upload-time = "2021-03-11T07:16:28.351Z" }, ] [[package]] name = "frozenlist" version = "1.8.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/2d/f5/c831fac6cc817d26fd54c7eaccd04ef7e0288806943f7cc5bbf69f3ac1f0/frozenlist-1.8.0.tar.gz", hash = "sha256:3ede829ed8d842f6cd48fc7081d7a41001a56f1f38603f9d49bf3020d59a31ad", size = 45875 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/bc/03/077f869d540370db12165c0aa51640a873fb661d8b315d1d4d67b284d7ac/frozenlist-1.8.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:09474e9831bc2b2199fad6da3c14c7b0fbdd377cce9d3d77131be28906cb7d84", size = 86912 }, - { url = "https://files.pythonhosted.org/packages/df/b5/7610b6bd13e4ae77b96ba85abea1c8cb249683217ef09ac9e0ae93f25a91/frozenlist-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:17c883ab0ab67200b5f964d2b9ed6b00971917d5d8a92df149dc2c9779208ee9", size = 50046 }, - { url = "https://files.pythonhosted.org/packages/6e/ef/0e8f1fe32f8a53dd26bdd1f9347efe0778b0fddf62789ea683f4cc7d787d/frozenlist-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa47e444b8ba08fffd1c18e8cdb9a75db1b6a27f17507522834ad13ed5922b93", size = 50119 }, - { url = "https://files.pythonhosted.org/packages/11/b1/71a477adc7c36e5fb628245dfbdea2166feae310757dea848d02bd0689fd/frozenlist-1.8.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2552f44204b744fba866e573be4c1f9048d6a324dfe14475103fd51613eb1d1f", size = 231067 }, - { url = "https://files.pythonhosted.org/packages/45/7e/afe40eca3a2dc19b9904c0f5d7edfe82b5304cb831391edec0ac04af94c2/frozenlist-1.8.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:957e7c38f250991e48a9a73e6423db1bb9dd14e722a10f6b8bb8e16a0f55f695", size = 233160 }, - { url = "https://files.pythonhosted.org/packages/a6/aa/7416eac95603ce428679d273255ffc7c998d4132cfae200103f164b108aa/frozenlist-1.8.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:8585e3bb2cdea02fc88ffa245069c36555557ad3609e83be0ec71f54fd4abb52", size = 228544 }, - { url = "https://files.pythonhosted.org/packages/8b/3d/2a2d1f683d55ac7e3875e4263d28410063e738384d3adc294f5ff3d7105e/frozenlist-1.8.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:edee74874ce20a373d62dc28b0b18b93f645633c2943fd90ee9d898550770581", size = 243797 }, - { url = "https://files.pythonhosted.org/packages/78/1e/2d5565b589e580c296d3bb54da08d206e797d941a83a6fdea42af23be79c/frozenlist-1.8.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:c9a63152fe95756b85f31186bddf42e4c02c6321207fd6601a1c89ebac4fe567", size = 247923 }, - { url = "https://files.pythonhosted.org/packages/aa/c3/65872fcf1d326a7f101ad4d86285c403c87be7d832b7470b77f6d2ed5ddc/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b6db2185db9be0a04fecf2f241c70b63b1a242e2805be291855078f2b404dd6b", size = 230886 }, - { url = "https://files.pythonhosted.org/packages/a0/76/ac9ced601d62f6956f03cc794f9e04c81719509f85255abf96e2510f4265/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:f4be2e3d8bc8aabd566f8d5b8ba7ecc09249d74ba3c9ed52e54dc23a293f0b92", size = 245731 }, - { url = "https://files.pythonhosted.org/packages/b9/49/ecccb5f2598daf0b4a1415497eba4c33c1e8ce07495eb07d2860c731b8d5/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:c8d1634419f39ea6f5c427ea2f90ca85126b54b50837f31497f3bf38266e853d", size = 241544 }, - { url = "https://files.pythonhosted.org/packages/53/4b/ddf24113323c0bbcc54cb38c8b8916f1da7165e07b8e24a717b4a12cbf10/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:1a7fa382a4a223773ed64242dbe1c9c326ec09457e6b8428efb4118c685c3dfd", size = 241806 }, - { url = "https://files.pythonhosted.org/packages/a7/fb/9b9a084d73c67175484ba2789a59f8eebebd0827d186a8102005ce41e1ba/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:11847b53d722050808926e785df837353bd4d75f1d494377e59b23594d834967", size = 229382 }, - { url = "https://files.pythonhosted.org/packages/95/a3/c8fb25aac55bf5e12dae5c5aa6a98f85d436c1dc658f21c3ac73f9fa95e5/frozenlist-1.8.0-cp311-cp311-win32.whl", hash = "sha256:27c6e8077956cf73eadd514be8fb04d77fc946a7fe9f7fe167648b0b9085cc25", size = 39647 }, - { url = "https://files.pythonhosted.org/packages/0a/f5/603d0d6a02cfd4c8f2a095a54672b3cf967ad688a60fb9faf04fc4887f65/frozenlist-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:ac913f8403b36a2c8610bbfd25b8013488533e71e62b4b4adce9c86c8cea905b", size = 44064 }, - { url = "https://files.pythonhosted.org/packages/5d/16/c2c9ab44e181f043a86f9a8f84d5124b62dbcb3a02c0977ec72b9ac1d3e0/frozenlist-1.8.0-cp311-cp311-win_arm64.whl", hash = "sha256:d4d3214a0f8394edfa3e303136d0575eece0745ff2b47bd2cb2e66dd92d4351a", size = 39937 }, - { url = "https://files.pythonhosted.org/packages/69/29/948b9aa87e75820a38650af445d2ef2b6b8a6fab1a23b6bb9e4ef0be2d59/frozenlist-1.8.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:78f7b9e5d6f2fdb88cdde9440dc147259b62b9d3b019924def9f6478be254ac1", size = 87782 }, - { url = "https://files.pythonhosted.org/packages/64/80/4f6e318ee2a7c0750ed724fa33a4bdf1eacdc5a39a7a24e818a773cd91af/frozenlist-1.8.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:229bf37d2e4acdaf808fd3f06e854a4a7a3661e871b10dc1f8f1896a3b05f18b", size = 50594 }, - { url = "https://files.pythonhosted.org/packages/2b/94/5c8a2b50a496b11dd519f4a24cb5496cf125681dd99e94c604ccdea9419a/frozenlist-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f833670942247a14eafbb675458b4e61c82e002a148f49e68257b79296e865c4", size = 50448 }, - { url = "https://files.pythonhosted.org/packages/6a/bd/d91c5e39f490a49df14320f4e8c80161cfcce09f1e2cde1edd16a551abb3/frozenlist-1.8.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:494a5952b1c597ba44e0e78113a7266e656b9794eec897b19ead706bd7074383", size = 242411 }, - { url = "https://files.pythonhosted.org/packages/8f/83/f61505a05109ef3293dfb1ff594d13d64a2324ac3482be2cedc2be818256/frozenlist-1.8.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:96f423a119f4777a4a056b66ce11527366a8bb92f54e541ade21f2374433f6d4", size = 243014 }, - { url = "https://files.pythonhosted.org/packages/d8/cb/cb6c7b0f7d4023ddda30cf56b8b17494eb3a79e3fda666bf735f63118b35/frozenlist-1.8.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3462dd9475af2025c31cc61be6652dfa25cbfb56cbbf52f4ccfe029f38decaf8", size = 234909 }, - { url = "https://files.pythonhosted.org/packages/31/c5/cd7a1f3b8b34af009fb17d4123c5a778b44ae2804e3ad6b86204255f9ec5/frozenlist-1.8.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c4c800524c9cd9bac5166cd6f55285957fcfc907db323e193f2afcd4d9abd69b", size = 250049 }, - { url = "https://files.pythonhosted.org/packages/c0/01/2f95d3b416c584a1e7f0e1d6d31998c4a795f7544069ee2e0962a4b60740/frozenlist-1.8.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d6a5df73acd3399d893dafc71663ad22534b5aa4f94e8a2fabfe856c3c1b6a52", size = 256485 }, - { url = "https://files.pythonhosted.org/packages/ce/03/024bf7720b3abaebcff6d0793d73c154237b85bdf67b7ed55e5e9596dc9a/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:405e8fe955c2280ce66428b3ca55e12b3c4e9c336fb2103a4937e891c69a4a29", size = 237619 }, - { url = "https://files.pythonhosted.org/packages/69/fa/f8abdfe7d76b731f5d8bd217827cf6764d4f1d9763407e42717b4bed50a0/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:908bd3f6439f2fef9e85031b59fd4f1297af54415fb60e4254a95f75b3cab3f3", size = 250320 }, - { url = "https://files.pythonhosted.org/packages/f5/3c/b051329f718b463b22613e269ad72138cc256c540f78a6de89452803a47d/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:294e487f9ec720bd8ffcebc99d575f7eff3568a08a253d1ee1a0378754b74143", size = 246820 }, - { url = "https://files.pythonhosted.org/packages/0f/ae/58282e8f98e444b3f4dd42448ff36fa38bef29e40d40f330b22e7108f565/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:74c51543498289c0c43656701be6b077f4b265868fa7f8a8859c197006efb608", size = 250518 }, - { url = "https://files.pythonhosted.org/packages/8f/96/007e5944694d66123183845a106547a15944fbbb7154788cbf7272789536/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:776f352e8329135506a1d6bf16ac3f87bc25b28e765949282dcc627af36123aa", size = 239096 }, - { url = "https://files.pythonhosted.org/packages/66/bb/852b9d6db2fa40be96f29c0d1205c306288f0684df8fd26ca1951d461a56/frozenlist-1.8.0-cp312-cp312-win32.whl", hash = "sha256:433403ae80709741ce34038da08511d4a77062aa924baf411ef73d1146e74faf", size = 39985 }, - { url = "https://files.pythonhosted.org/packages/b8/af/38e51a553dd66eb064cdf193841f16f077585d4d28394c2fa6235cb41765/frozenlist-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:34187385b08f866104f0c0617404c8eb08165ab1272e884abc89c112e9c00746", size = 44591 }, - { url = "https://files.pythonhosted.org/packages/a7/06/1dc65480ab147339fecc70797e9c2f69d9cea9cf38934ce08df070fdb9cb/frozenlist-1.8.0-cp312-cp312-win_arm64.whl", hash = "sha256:fe3c58d2f5db5fbd18c2987cba06d51b0529f52bc3a6cdc33d3f4eab725104bd", size = 40102 }, - { url = "https://files.pythonhosted.org/packages/2d/40/0832c31a37d60f60ed79e9dfb5a92e1e2af4f40a16a29abcc7992af9edff/frozenlist-1.8.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8d92f1a84bb12d9e56f818b3a746f3efba93c1b63c8387a73dde655e1e42282a", size = 85717 }, - { url = "https://files.pythonhosted.org/packages/30/ba/b0b3de23f40bc55a7057bd38434e25c34fa48e17f20ee273bbde5e0650f3/frozenlist-1.8.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:96153e77a591c8adc2ee805756c61f59fef4cf4073a9275ee86fe8cba41241f7", size = 49651 }, - { url = "https://files.pythonhosted.org/packages/0c/ab/6e5080ee374f875296c4243c381bbdef97a9ac39c6e3ce1d5f7d42cb78d6/frozenlist-1.8.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f21f00a91358803399890ab167098c131ec2ddd5f8f5fd5fe9c9f2c6fcd91e40", size = 49417 }, - { url = "https://files.pythonhosted.org/packages/d5/4e/e4691508f9477ce67da2015d8c00acd751e6287739123113a9fca6f1604e/frozenlist-1.8.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:fb30f9626572a76dfe4293c7194a09fb1fe93ba94c7d4f720dfae3b646b45027", size = 234391 }, - { url = "https://files.pythonhosted.org/packages/40/76/c202df58e3acdf12969a7895fd6f3bc016c642e6726aa63bd3025e0fc71c/frozenlist-1.8.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eaa352d7047a31d87dafcacbabe89df0aa506abb5b1b85a2fb91bc3faa02d822", size = 233048 }, - { url = "https://files.pythonhosted.org/packages/f9/c0/8746afb90f17b73ca5979c7a3958116e105ff796e718575175319b5bb4ce/frozenlist-1.8.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:03ae967b4e297f58f8c774c7eabcce57fe3c2434817d4385c50661845a058121", size = 226549 }, - { url = "https://files.pythonhosted.org/packages/7e/eb/4c7eefc718ff72f9b6c4893291abaae5fbc0c82226a32dcd8ef4f7a5dbef/frozenlist-1.8.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f6292f1de555ffcc675941d65fffffb0a5bcd992905015f85d0592201793e0e5", size = 239833 }, - { url = "https://files.pythonhosted.org/packages/c2/4e/e5c02187cf704224f8b21bee886f3d713ca379535f16893233b9d672ea71/frozenlist-1.8.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:29548f9b5b5e3460ce7378144c3010363d8035cea44bc0bf02d57f5a685e084e", size = 245363 }, - { url = "https://files.pythonhosted.org/packages/1f/96/cb85ec608464472e82ad37a17f844889c36100eed57bea094518bf270692/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ec3cc8c5d4084591b4237c0a272cc4f50a5b03396a47d9caaf76f5d7b38a4f11", size = 229314 }, - { url = "https://files.pythonhosted.org/packages/5d/6f/4ae69c550e4cee66b57887daeebe006fe985917c01d0fff9caab9883f6d0/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:517279f58009d0b1f2e7c1b130b377a349405da3f7621ed6bfae50b10adf20c1", size = 243365 }, - { url = "https://files.pythonhosted.org/packages/7a/58/afd56de246cf11780a40a2c28dc7cbabbf06337cc8ddb1c780a2d97e88d8/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:db1e72ede2d0d7ccb213f218df6a078a9c09a7de257c2fe8fcef16d5925230b1", size = 237763 }, - { url = "https://files.pythonhosted.org/packages/cb/36/cdfaf6ed42e2644740d4a10452d8e97fa1c062e2a8006e4b09f1b5fd7d63/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:b4dec9482a65c54a5044486847b8a66bf10c9cb4926d42927ec4e8fd5db7fed8", size = 240110 }, - { url = "https://files.pythonhosted.org/packages/03/a8/9ea226fbefad669f11b52e864c55f0bd57d3c8d7eb07e9f2e9a0b39502e1/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:21900c48ae04d13d416f0e1e0c4d81f7931f73a9dfa0b7a8746fb2fe7dd970ed", size = 233717 }, - { url = "https://files.pythonhosted.org/packages/1e/0b/1b5531611e83ba7d13ccc9988967ea1b51186af64c42b7a7af465dcc9568/frozenlist-1.8.0-cp313-cp313-win32.whl", hash = "sha256:8b7b94a067d1c504ee0b16def57ad5738701e4ba10cec90529f13fa03c833496", size = 39628 }, - { url = "https://files.pythonhosted.org/packages/d8/cf/174c91dbc9cc49bc7b7aab74d8b734e974d1faa8f191c74af9b7e80848e6/frozenlist-1.8.0-cp313-cp313-win_amd64.whl", hash = "sha256:878be833caa6a3821caf85eb39c5ba92d28e85df26d57afb06b35b2efd937231", size = 43882 }, - { url = "https://files.pythonhosted.org/packages/c1/17/502cd212cbfa96eb1388614fe39a3fc9ab87dbbe042b66f97acb57474834/frozenlist-1.8.0-cp313-cp313-win_arm64.whl", hash = "sha256:44389d135b3ff43ba8cc89ff7f51f5a0bb6b63d829c8300f79a2fe4fe61bcc62", size = 39676 }, - { url = "https://files.pythonhosted.org/packages/d2/5c/3bbfaa920dfab09e76946a5d2833a7cbdf7b9b4a91c714666ac4855b88b4/frozenlist-1.8.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:e25ac20a2ef37e91c1b39938b591457666a0fa835c7783c3a8f33ea42870db94", size = 89235 }, - { url = "https://files.pythonhosted.org/packages/d2/d6/f03961ef72166cec1687e84e8925838442b615bd0b8854b54923ce5b7b8a/frozenlist-1.8.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:07cdca25a91a4386d2e76ad992916a85038a9b97561bf7a3fd12d5d9ce31870c", size = 50742 }, - { url = "https://files.pythonhosted.org/packages/1e/bb/a6d12b7ba4c3337667d0e421f7181c82dda448ce4e7ad7ecd249a16fa806/frozenlist-1.8.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4e0c11f2cc6717e0a741f84a527c52616140741cd812a50422f83dc31749fb52", size = 51725 }, - { url = "https://files.pythonhosted.org/packages/bc/71/d1fed0ffe2c2ccd70b43714c6cab0f4188f09f8a67a7914a6b46ee30f274/frozenlist-1.8.0-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b3210649ee28062ea6099cfda39e147fa1bc039583c8ee4481cb7811e2448c51", size = 284533 }, - { url = "https://files.pythonhosted.org/packages/c9/1f/fb1685a7b009d89f9bf78a42d94461bc06581f6e718c39344754a5d9bada/frozenlist-1.8.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:581ef5194c48035a7de2aefc72ac6539823bb71508189e5de01d60c9dcd5fa65", size = 292506 }, - { url = "https://files.pythonhosted.org/packages/e6/3b/b991fe1612703f7e0d05c0cf734c1b77aaf7c7d321df4572e8d36e7048c8/frozenlist-1.8.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3ef2d026f16a2b1866e1d86fc4e1291e1ed8a387b2c333809419a2f8b3a77b82", size = 274161 }, - { url = "https://files.pythonhosted.org/packages/ca/ec/c5c618767bcdf66e88945ec0157d7f6c4a1322f1473392319b7a2501ded7/frozenlist-1.8.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:5500ef82073f599ac84d888e3a8c1f77ac831183244bfd7f11eaa0289fb30714", size = 294676 }, - { url = "https://files.pythonhosted.org/packages/7c/ce/3934758637d8f8a88d11f0585d6495ef54b2044ed6ec84492a91fa3b27aa/frozenlist-1.8.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:50066c3997d0091c411a66e710f4e11752251e6d2d73d70d8d5d4c76442a199d", size = 300638 }, - { url = "https://files.pythonhosted.org/packages/fc/4f/a7e4d0d467298f42de4b41cbc7ddaf19d3cfeabaf9ff97c20c6c7ee409f9/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:5c1c8e78426e59b3f8005e9b19f6ff46e5845895adbde20ece9218319eca6506", size = 283067 }, - { url = "https://files.pythonhosted.org/packages/dc/48/c7b163063d55a83772b268e6d1affb960771b0e203b632cfe09522d67ea5/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:eefdba20de0d938cec6a89bd4d70f346a03108a19b9df4248d3cf0d88f1b0f51", size = 292101 }, - { url = "https://files.pythonhosted.org/packages/9f/d0/2366d3c4ecdc2fd391e0afa6e11500bfba0ea772764d631bbf82f0136c9d/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:cf253e0e1c3ceb4aaff6df637ce033ff6535fb8c70a764a8f46aafd3d6ab798e", size = 289901 }, - { url = "https://files.pythonhosted.org/packages/b8/94/daff920e82c1b70e3618a2ac39fbc01ae3e2ff6124e80739ce5d71c9b920/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:032efa2674356903cd0261c4317a561a6850f3ac864a63fc1583147fb05a79b0", size = 289395 }, - { url = "https://files.pythonhosted.org/packages/e3/20/bba307ab4235a09fdcd3cc5508dbabd17c4634a1af4b96e0f69bfe551ebd/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6da155091429aeba16851ecb10a9104a108bcd32f6c1642867eadaee401c1c41", size = 283659 }, - { url = "https://files.pythonhosted.org/packages/fd/00/04ca1c3a7a124b6de4f8a9a17cc2fcad138b4608e7a3fc5877804b8715d7/frozenlist-1.8.0-cp313-cp313t-win32.whl", hash = "sha256:0f96534f8bfebc1a394209427d0f8a63d343c9779cda6fc25e8e121b5fd8555b", size = 43492 }, - { url = "https://files.pythonhosted.org/packages/59/5e/c69f733a86a94ab10f68e496dc6b7e8bc078ebb415281d5698313e3af3a1/frozenlist-1.8.0-cp313-cp313t-win_amd64.whl", hash = "sha256:5d63a068f978fc69421fb0e6eb91a9603187527c86b7cd3f534a5b77a592b888", size = 48034 }, - { url = "https://files.pythonhosted.org/packages/16/6c/be9d79775d8abe79b05fa6d23da99ad6e7763a1d080fbae7290b286093fd/frozenlist-1.8.0-cp313-cp313t-win_arm64.whl", hash = "sha256:bf0a7e10b077bf5fb9380ad3ae8ce20ef919a6ad93b4552896419ac7e1d8e042", size = 41749 }, - { url = "https://files.pythonhosted.org/packages/9a/9a/e35b4a917281c0b8419d4207f4334c8e8c5dbf4f3f5f9ada73958d937dcc/frozenlist-1.8.0-py3-none-any.whl", hash = "sha256:0c18a16eab41e82c295618a77502e17b195883241c563b00f0aa5106fc4eaa0d", size = 13409 }, +sdist = { url = "https://files.pythonhosted.org/packages/2d/f5/c831fac6cc817d26fd54c7eaccd04ef7e0288806943f7cc5bbf69f3ac1f0/frozenlist-1.8.0.tar.gz", hash = "sha256:3ede829ed8d842f6cd48fc7081d7a41001a56f1f38603f9d49bf3020d59a31ad", size = 45875, upload-time = "2025-10-06T05:38:17.865Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bc/03/077f869d540370db12165c0aa51640a873fb661d8b315d1d4d67b284d7ac/frozenlist-1.8.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:09474e9831bc2b2199fad6da3c14c7b0fbdd377cce9d3d77131be28906cb7d84", size = 86912, upload-time = "2025-10-06T05:35:45.98Z" }, + { url = "https://files.pythonhosted.org/packages/df/b5/7610b6bd13e4ae77b96ba85abea1c8cb249683217ef09ac9e0ae93f25a91/frozenlist-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:17c883ab0ab67200b5f964d2b9ed6b00971917d5d8a92df149dc2c9779208ee9", size = 50046, upload-time = "2025-10-06T05:35:47.009Z" }, + { url = "https://files.pythonhosted.org/packages/6e/ef/0e8f1fe32f8a53dd26bdd1f9347efe0778b0fddf62789ea683f4cc7d787d/frozenlist-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa47e444b8ba08fffd1c18e8cdb9a75db1b6a27f17507522834ad13ed5922b93", size = 50119, upload-time = "2025-10-06T05:35:48.38Z" }, + { url = "https://files.pythonhosted.org/packages/11/b1/71a477adc7c36e5fb628245dfbdea2166feae310757dea848d02bd0689fd/frozenlist-1.8.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2552f44204b744fba866e573be4c1f9048d6a324dfe14475103fd51613eb1d1f", size = 231067, upload-time = "2025-10-06T05:35:49.97Z" }, + { url = "https://files.pythonhosted.org/packages/45/7e/afe40eca3a2dc19b9904c0f5d7edfe82b5304cb831391edec0ac04af94c2/frozenlist-1.8.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:957e7c38f250991e48a9a73e6423db1bb9dd14e722a10f6b8bb8e16a0f55f695", size = 233160, upload-time = "2025-10-06T05:35:51.729Z" }, + { url = "https://files.pythonhosted.org/packages/a6/aa/7416eac95603ce428679d273255ffc7c998d4132cfae200103f164b108aa/frozenlist-1.8.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:8585e3bb2cdea02fc88ffa245069c36555557ad3609e83be0ec71f54fd4abb52", size = 228544, upload-time = "2025-10-06T05:35:53.246Z" }, + { url = "https://files.pythonhosted.org/packages/8b/3d/2a2d1f683d55ac7e3875e4263d28410063e738384d3adc294f5ff3d7105e/frozenlist-1.8.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:edee74874ce20a373d62dc28b0b18b93f645633c2943fd90ee9d898550770581", size = 243797, upload-time = "2025-10-06T05:35:54.497Z" }, + { url = "https://files.pythonhosted.org/packages/78/1e/2d5565b589e580c296d3bb54da08d206e797d941a83a6fdea42af23be79c/frozenlist-1.8.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:c9a63152fe95756b85f31186bddf42e4c02c6321207fd6601a1c89ebac4fe567", size = 247923, upload-time = "2025-10-06T05:35:55.861Z" }, + { url = "https://files.pythonhosted.org/packages/aa/c3/65872fcf1d326a7f101ad4d86285c403c87be7d832b7470b77f6d2ed5ddc/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b6db2185db9be0a04fecf2f241c70b63b1a242e2805be291855078f2b404dd6b", size = 230886, upload-time = "2025-10-06T05:35:57.399Z" }, + { url = "https://files.pythonhosted.org/packages/a0/76/ac9ced601d62f6956f03cc794f9e04c81719509f85255abf96e2510f4265/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:f4be2e3d8bc8aabd566f8d5b8ba7ecc09249d74ba3c9ed52e54dc23a293f0b92", size = 245731, upload-time = "2025-10-06T05:35:58.563Z" }, + { url = "https://files.pythonhosted.org/packages/b9/49/ecccb5f2598daf0b4a1415497eba4c33c1e8ce07495eb07d2860c731b8d5/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:c8d1634419f39ea6f5c427ea2f90ca85126b54b50837f31497f3bf38266e853d", size = 241544, upload-time = "2025-10-06T05:35:59.719Z" }, + { url = "https://files.pythonhosted.org/packages/53/4b/ddf24113323c0bbcc54cb38c8b8916f1da7165e07b8e24a717b4a12cbf10/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:1a7fa382a4a223773ed64242dbe1c9c326ec09457e6b8428efb4118c685c3dfd", size = 241806, upload-time = "2025-10-06T05:36:00.959Z" }, + { url = "https://files.pythonhosted.org/packages/a7/fb/9b9a084d73c67175484ba2789a59f8eebebd0827d186a8102005ce41e1ba/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:11847b53d722050808926e785df837353bd4d75f1d494377e59b23594d834967", size = 229382, upload-time = "2025-10-06T05:36:02.22Z" }, + { url = "https://files.pythonhosted.org/packages/95/a3/c8fb25aac55bf5e12dae5c5aa6a98f85d436c1dc658f21c3ac73f9fa95e5/frozenlist-1.8.0-cp311-cp311-win32.whl", hash = "sha256:27c6e8077956cf73eadd514be8fb04d77fc946a7fe9f7fe167648b0b9085cc25", size = 39647, upload-time = "2025-10-06T05:36:03.409Z" }, + { url = "https://files.pythonhosted.org/packages/0a/f5/603d0d6a02cfd4c8f2a095a54672b3cf967ad688a60fb9faf04fc4887f65/frozenlist-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:ac913f8403b36a2c8610bbfd25b8013488533e71e62b4b4adce9c86c8cea905b", size = 44064, upload-time = "2025-10-06T05:36:04.368Z" }, + { url = "https://files.pythonhosted.org/packages/5d/16/c2c9ab44e181f043a86f9a8f84d5124b62dbcb3a02c0977ec72b9ac1d3e0/frozenlist-1.8.0-cp311-cp311-win_arm64.whl", hash = "sha256:d4d3214a0f8394edfa3e303136d0575eece0745ff2b47bd2cb2e66dd92d4351a", size = 39937, upload-time = "2025-10-06T05:36:05.669Z" }, + { url = "https://files.pythonhosted.org/packages/69/29/948b9aa87e75820a38650af445d2ef2b6b8a6fab1a23b6bb9e4ef0be2d59/frozenlist-1.8.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:78f7b9e5d6f2fdb88cdde9440dc147259b62b9d3b019924def9f6478be254ac1", size = 87782, upload-time = "2025-10-06T05:36:06.649Z" }, + { url = "https://files.pythonhosted.org/packages/64/80/4f6e318ee2a7c0750ed724fa33a4bdf1eacdc5a39a7a24e818a773cd91af/frozenlist-1.8.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:229bf37d2e4acdaf808fd3f06e854a4a7a3661e871b10dc1f8f1896a3b05f18b", size = 50594, upload-time = "2025-10-06T05:36:07.69Z" }, + { url = "https://files.pythonhosted.org/packages/2b/94/5c8a2b50a496b11dd519f4a24cb5496cf125681dd99e94c604ccdea9419a/frozenlist-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f833670942247a14eafbb675458b4e61c82e002a148f49e68257b79296e865c4", size = 50448, upload-time = "2025-10-06T05:36:08.78Z" }, + { url = "https://files.pythonhosted.org/packages/6a/bd/d91c5e39f490a49df14320f4e8c80161cfcce09f1e2cde1edd16a551abb3/frozenlist-1.8.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:494a5952b1c597ba44e0e78113a7266e656b9794eec897b19ead706bd7074383", size = 242411, upload-time = "2025-10-06T05:36:09.801Z" }, + { url = "https://files.pythonhosted.org/packages/8f/83/f61505a05109ef3293dfb1ff594d13d64a2324ac3482be2cedc2be818256/frozenlist-1.8.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:96f423a119f4777a4a056b66ce11527366a8bb92f54e541ade21f2374433f6d4", size = 243014, upload-time = "2025-10-06T05:36:11.394Z" }, + { url = "https://files.pythonhosted.org/packages/d8/cb/cb6c7b0f7d4023ddda30cf56b8b17494eb3a79e3fda666bf735f63118b35/frozenlist-1.8.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3462dd9475af2025c31cc61be6652dfa25cbfb56cbbf52f4ccfe029f38decaf8", size = 234909, upload-time = "2025-10-06T05:36:12.598Z" }, + { url = "https://files.pythonhosted.org/packages/31/c5/cd7a1f3b8b34af009fb17d4123c5a778b44ae2804e3ad6b86204255f9ec5/frozenlist-1.8.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c4c800524c9cd9bac5166cd6f55285957fcfc907db323e193f2afcd4d9abd69b", size = 250049, upload-time = "2025-10-06T05:36:14.065Z" }, + { url = "https://files.pythonhosted.org/packages/c0/01/2f95d3b416c584a1e7f0e1d6d31998c4a795f7544069ee2e0962a4b60740/frozenlist-1.8.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d6a5df73acd3399d893dafc71663ad22534b5aa4f94e8a2fabfe856c3c1b6a52", size = 256485, upload-time = "2025-10-06T05:36:15.39Z" }, + { url = "https://files.pythonhosted.org/packages/ce/03/024bf7720b3abaebcff6d0793d73c154237b85bdf67b7ed55e5e9596dc9a/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:405e8fe955c2280ce66428b3ca55e12b3c4e9c336fb2103a4937e891c69a4a29", size = 237619, upload-time = "2025-10-06T05:36:16.558Z" }, + { url = "https://files.pythonhosted.org/packages/69/fa/f8abdfe7d76b731f5d8bd217827cf6764d4f1d9763407e42717b4bed50a0/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:908bd3f6439f2fef9e85031b59fd4f1297af54415fb60e4254a95f75b3cab3f3", size = 250320, upload-time = "2025-10-06T05:36:17.821Z" }, + { url = "https://files.pythonhosted.org/packages/f5/3c/b051329f718b463b22613e269ad72138cc256c540f78a6de89452803a47d/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:294e487f9ec720bd8ffcebc99d575f7eff3568a08a253d1ee1a0378754b74143", size = 246820, upload-time = "2025-10-06T05:36:19.046Z" }, + { url = "https://files.pythonhosted.org/packages/0f/ae/58282e8f98e444b3f4dd42448ff36fa38bef29e40d40f330b22e7108f565/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:74c51543498289c0c43656701be6b077f4b265868fa7f8a8859c197006efb608", size = 250518, upload-time = "2025-10-06T05:36:20.763Z" }, + { url = "https://files.pythonhosted.org/packages/8f/96/007e5944694d66123183845a106547a15944fbbb7154788cbf7272789536/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:776f352e8329135506a1d6bf16ac3f87bc25b28e765949282dcc627af36123aa", size = 239096, upload-time = "2025-10-06T05:36:22.129Z" }, + { url = "https://files.pythonhosted.org/packages/66/bb/852b9d6db2fa40be96f29c0d1205c306288f0684df8fd26ca1951d461a56/frozenlist-1.8.0-cp312-cp312-win32.whl", hash = "sha256:433403ae80709741ce34038da08511d4a77062aa924baf411ef73d1146e74faf", size = 39985, upload-time = "2025-10-06T05:36:23.661Z" }, + { url = "https://files.pythonhosted.org/packages/b8/af/38e51a553dd66eb064cdf193841f16f077585d4d28394c2fa6235cb41765/frozenlist-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:34187385b08f866104f0c0617404c8eb08165ab1272e884abc89c112e9c00746", size = 44591, upload-time = "2025-10-06T05:36:24.958Z" }, + { url = "https://files.pythonhosted.org/packages/a7/06/1dc65480ab147339fecc70797e9c2f69d9cea9cf38934ce08df070fdb9cb/frozenlist-1.8.0-cp312-cp312-win_arm64.whl", hash = "sha256:fe3c58d2f5db5fbd18c2987cba06d51b0529f52bc3a6cdc33d3f4eab725104bd", size = 40102, upload-time = "2025-10-06T05:36:26.333Z" }, + { url = "https://files.pythonhosted.org/packages/9a/9a/e35b4a917281c0b8419d4207f4334c8e8c5dbf4f3f5f9ada73958d937dcc/frozenlist-1.8.0-py3-none-any.whl", hash = "sha256:0c18a16eab41e82c295618a77502e17b195883241c563b00f0aa5106fc4eaa0d", size = 13409, upload-time = "2025-10-06T05:38:16.721Z" }, ] [[package]] name = "fsspec" version = "2025.9.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/de/e0/bab50af11c2d75c9c4a2a26a5254573c0bd97cea152254401510950486fa/fsspec-2025.9.0.tar.gz", hash = "sha256:19fd429483d25d28b65ec68f9f4adc16c17ea2c7c7bf54ec61360d478fb19c19", size = 304847 } +sdist = { url = "https://files.pythonhosted.org/packages/de/e0/bab50af11c2d75c9c4a2a26a5254573c0bd97cea152254401510950486fa/fsspec-2025.9.0.tar.gz", hash = "sha256:19fd429483d25d28b65ec68f9f4adc16c17ea2c7c7bf54ec61360d478fb19c19", size = 304847, upload-time = "2025-09-02T19:10:49.215Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/47/71/70db47e4f6ce3e5c37a607355f80da8860a33226be640226ac52cb05ef2e/fsspec-2025.9.0-py3-none-any.whl", hash = "sha256:530dc2a2af60a414a832059574df4a6e10cce927f6f4a78209390fe38955cfb7", size = 199289 }, + { url = "https://files.pythonhosted.org/packages/47/71/70db47e4f6ce3e5c37a607355f80da8860a33226be640226ac52cb05ef2e/fsspec-2025.9.0-py3-none-any.whl", hash = "sha256:530dc2a2af60a414a832059574df4a6e10cce927f6f4a78209390fe38955cfb7", size = 199289, upload-time = "2025-09-02T19:10:47.708Z" }, ] [[package]] @@ -987,9 +854,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "python-dateutil" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d9/29/d40217cbe2f6b1359e00c6c307bb3fc876ba74068cbab3dde77f03ca0dc4/ghp-import-2.1.0.tar.gz", hash = "sha256:9c535c4c61193c2df8871222567d7fd7e5014d835f97dc7b7439069e2413d343", size = 10943 } +sdist = { url = "https://files.pythonhosted.org/packages/d9/29/d40217cbe2f6b1359e00c6c307bb3fc876ba74068cbab3dde77f03ca0dc4/ghp-import-2.1.0.tar.gz", hash = "sha256:9c535c4c61193c2df8871222567d7fd7e5014d835f97dc7b7439069e2413d343", size = 10943, upload-time = "2022-05-02T15:47:16.11Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f7/ec/67fbef5d497f86283db54c22eec6f6140243aae73265799baaaa19cd17fb/ghp_import-2.1.0-py3-none-any.whl", hash = "sha256:8337dd7b50877f163d4c0289bc1f1c7f127550241988d568c1db512c4324a619", size = 11034 }, + { url = "https://files.pythonhosted.org/packages/f7/ec/67fbef5d497f86283db54c22eec6f6140243aae73265799baaaa19cd17fb/ghp_import-2.1.0-py3-none-any.whl", hash = "sha256:8337dd7b50877f163d4c0289bc1f1c7f127550241988d568c1db512c4324a619", size = 11034, upload-time = "2022-05-02T15:47:14.552Z" }, ] [[package]] @@ -997,34 +864,27 @@ name = "graphrag" version = "2.7.0" source = { editable = "packages/graphrag" } dependencies = [ - { name = "aiofiles" }, - { name = "azure-cosmos" }, { name = "azure-identity" }, { name = "azure-search-documents" }, { name = "azure-storage-blob" }, { name = "blis" }, { name = "devtools" }, - { name = "environs" }, - { name = "fastuuid" }, { name = "graphrag-cache" }, { name = "graphrag-common" }, { name = "graphrag-input" }, + { name = "graphrag-llm" }, { name = "graphrag-storage" }, { name = "graphrag-vectors" }, { name = "graspologic-native" }, { name = "json-repair" }, - { name = "lancedb" }, - { name = "litellm" }, { name = "networkx" }, { name = "nltk" }, { name = "numpy" }, - { name = "openai" }, { name = "pandas" }, { name = "pyarrow" }, { name = "pydantic" }, { name = "spacy" }, { name = "textblob" }, - { name = "tiktoken" }, { name = "tqdm" }, { name = "typer" }, { name = "typing-extensions" }, @@ -1032,34 +892,27 @@ dependencies = [ [package.metadata] requires-dist = [ - { name = "aiofiles", specifier = "~=24.1" }, - { name = "azure-cosmos", specifier = "~=4.9" }, { name = "azure-identity", specifier = "~=1.19" }, { name = "azure-search-documents", specifier = "~=11.5" }, { name = "azure-storage-blob", specifier = "~=12.24" }, { name = "blis", specifier = "~=1.0" }, { name = "devtools", specifier = "~=0.12" }, - { name = "environs", specifier = "~=11.0" }, - { name = "fastuuid", specifier = "~=0.13" }, { name = "graphrag-cache", editable = "packages/graphrag-cache" }, { name = "graphrag-common", editable = "packages/graphrag-common" }, { name = "graphrag-input", editable = "packages/graphrag-input" }, + { name = "graphrag-llm", editable = "packages/graphrag-llm" }, { name = "graphrag-storage", editable = "packages/graphrag-storage" }, { name = "graphrag-vectors", editable = "packages/graphrag-vectors" }, { name = "graspologic-native", specifier = "~=1.2" }, { name = "json-repair", specifier = "~=0.30" }, - { name = "lancedb", specifier = "~=0.24.1" }, - { name = "litellm", specifier = "~=1.77" }, { name = "networkx", specifier = "~=3.4" }, { name = "nltk", specifier = "==3.9.1" }, { name = "numpy", specifier = ">=2.1.0" }, - { name = "openai", specifier = "~=1.68" }, { name = "pandas", specifier = ">=2.3.0" }, { name = "pyarrow", specifier = "~=22.0" }, { name = "pydantic", specifier = "~=2.10" }, { name = "spacy", specifier = "~=3.8" }, { name = "textblob", specifier = "~=0.18" }, - { name = "tiktoken", specifier = "~=0.11" }, { name = "tqdm", specifier = "~=4.67" }, { name = "typer", specifier = "~=0.16" }, { name = "typing-extensions", specifier = "~=4.12" }, @@ -1129,6 +982,33 @@ requires-dist = [ { name = "pydantic", specifier = "~=2.10" }, ] +[[package]] +name = "graphrag-llm" +version = "2.7.0" +source = { editable = "packages/graphrag-llm" } +dependencies = [ + { name = "azure-identity" }, + { name = "graphrag-cache" }, + { name = "graphrag-common" }, + { name = "jinja2" }, + { name = "litellm" }, + { name = "nest-asyncio2" }, + { name = "pydantic" }, + { name = "typing-extensions" }, +] + +[package.metadata] +requires-dist = [ + { name = "azure-identity", specifier = "~=1.19.0" }, + { name = "graphrag-cache", editable = "packages/graphrag-cache" }, + { name = "graphrag-common", editable = "packages/graphrag-common" }, + { name = "jinja2", specifier = "~=3.1" }, + { name = "litellm", specifier = "~=1.80" }, + { name = "nest-asyncio2", specifier = "~=1.7" }, + { name = "pydantic", specifier = "~=2.10" }, + { name = "typing-extensions", specifier = "~=4.12" }, +] + [[package]] name = "graphrag-monorepo" version = "0.0.0" @@ -1152,6 +1032,7 @@ dev = [ { name = "pytest-asyncio" }, { name = "pytest-dotenv" }, { name = "pytest-timeout" }, + { name = "pytest-xdist", extra = ["psutil"] }, { name = "ruff" }, { name = "semversioner" }, { name = "update-toml" }, @@ -1177,6 +1058,7 @@ dev = [ { name = "pytest-asyncio", specifier = "~=0.24" }, { name = "pytest-dotenv", specifier = "~=0.5" }, { name = "pytest-timeout", specifier = "~=2.3" }, + { name = "pytest-xdist", extras = ["psutil"], specifier = "~=3.8.0" }, { name = "ruff", specifier = "~=0.8" }, { name = "semversioner", specifier = "~=2.0" }, { name = "update-toml", specifier = "~=0.2" }, @@ -1240,36 +1122,36 @@ requires-dist = [ name = "graspologic-native" version = "1.2.5" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/25/2d/62b30d89533643ccf4778a18eb023f291b8877b5d85de3342f07b2d363a7/graspologic_native-1.2.5.tar.gz", hash = "sha256:27ea7e01fa44466c0b4cdd678d4561e5d3dc0cb400015683b7ae1386031257a0", size = 2512729 } +sdist = { url = "https://files.pythonhosted.org/packages/25/2d/62b30d89533643ccf4778a18eb023f291b8877b5d85de3342f07b2d363a7/graspologic_native-1.2.5.tar.gz", hash = "sha256:27ea7e01fa44466c0b4cdd678d4561e5d3dc0cb400015683b7ae1386031257a0", size = 2512729, upload-time = "2025-04-02T19:34:22.961Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ae/86/10748f4c474b0c8f6060dd379bb0c4da5d42779244bb13a58656ffb44a03/graspologic_native-1.2.5-cp38-abi3-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:bf05f2e162ae2a2a8d6e8cfccbe3586d1faa0b808159ff950478348df557c61e", size = 648437 }, - { url = "https://files.pythonhosted.org/packages/42/cc/b75ea35755340bedda29727e5388390c639ea533f55b9249f5ac3003f656/graspologic_native-1.2.5-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a7fff06ed49c3875cf351bb09a92ae7cbc169ce92dcc4c3439e28e801f822ae", size = 352044 }, - { url = "https://files.pythonhosted.org/packages/8e/55/15e6e4f18bf249b529ac4cd1522b03f5c9ef9284a2f7bfaa1fd1f96464fe/graspologic_native-1.2.5-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53e7e993e7d70fe0d860773fc62812fbb8cb4ef2d11d8661a1f06f8772593915", size = 364644 }, - { url = "https://files.pythonhosted.org/packages/3b/51/21097af79f3d68626539ab829bdbf6cc42933f020e161972927d916e394c/graspologic_native-1.2.5-cp38-abi3-win_amd64.whl", hash = "sha256:c3ef2172d774083d7e2c8e77daccd218571ddeebeb2c1703cebb1a2cc4c56e07", size = 210438 }, + { url = "https://files.pythonhosted.org/packages/ae/86/10748f4c474b0c8f6060dd379bb0c4da5d42779244bb13a58656ffb44a03/graspologic_native-1.2.5-cp38-abi3-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:bf05f2e162ae2a2a8d6e8cfccbe3586d1faa0b808159ff950478348df557c61e", size = 648437, upload-time = "2025-04-02T19:34:16.29Z" }, + { url = "https://files.pythonhosted.org/packages/42/cc/b75ea35755340bedda29727e5388390c639ea533f55b9249f5ac3003f656/graspologic_native-1.2.5-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a7fff06ed49c3875cf351bb09a92ae7cbc169ce92dcc4c3439e28e801f822ae", size = 352044, upload-time = "2025-04-02T19:34:18.153Z" }, + { url = "https://files.pythonhosted.org/packages/8e/55/15e6e4f18bf249b529ac4cd1522b03f5c9ef9284a2f7bfaa1fd1f96464fe/graspologic_native-1.2.5-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53e7e993e7d70fe0d860773fc62812fbb8cb4ef2d11d8661a1f06f8772593915", size = 364644, upload-time = "2025-04-02T19:34:19.486Z" }, + { url = "https://files.pythonhosted.org/packages/3b/51/21097af79f3d68626539ab829bdbf6cc42933f020e161972927d916e394c/graspologic_native-1.2.5-cp38-abi3-win_amd64.whl", hash = "sha256:c3ef2172d774083d7e2c8e77daccd218571ddeebeb2c1703cebb1a2cc4c56e07", size = 210438, upload-time = "2025-04-02T19:34:21.139Z" }, ] [[package]] name = "h11" version = "0.16.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250 } +sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515 }, + { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, ] [[package]] name = "hf-xet" version = "1.1.10" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/74/31/feeddfce1748c4a233ec1aa5b7396161c07ae1aa9b7bdbc9a72c3c7dd768/hf_xet-1.1.10.tar.gz", hash = "sha256:408aef343800a2102374a883f283ff29068055c111f003ff840733d3b715bb97", size = 487910 } +sdist = { url = "https://files.pythonhosted.org/packages/74/31/feeddfce1748c4a233ec1aa5b7396161c07ae1aa9b7bdbc9a72c3c7dd768/hf_xet-1.1.10.tar.gz", hash = "sha256:408aef343800a2102374a883f283ff29068055c111f003ff840733d3b715bb97", size = 487910, upload-time = "2025-09-12T20:10:27.12Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f7/a2/343e6d05de96908366bdc0081f2d8607d61200be2ac802769c4284cc65bd/hf_xet-1.1.10-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:686083aca1a6669bc85c21c0563551cbcdaa5cf7876a91f3d074a030b577231d", size = 2761466 }, - { url = "https://files.pythonhosted.org/packages/31/f9/6215f948ac8f17566ee27af6430ea72045e0418ce757260248b483f4183b/hf_xet-1.1.10-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:71081925383b66b24eedff3013f8e6bbd41215c3338be4b94ba75fd75b21513b", size = 2623807 }, - { url = "https://files.pythonhosted.org/packages/15/07/86397573efefff941e100367bbda0b21496ffcdb34db7ab51912994c32a2/hf_xet-1.1.10-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b6bceb6361c80c1cc42b5a7b4e3efd90e64630bcf11224dcac50ef30a47e435", size = 3186960 }, - { url = "https://files.pythonhosted.org/packages/01/a7/0b2e242b918cc30e1f91980f3c4b026ff2eedaf1e2ad96933bca164b2869/hf_xet-1.1.10-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:eae7c1fc8a664e54753ffc235e11427ca61f4b0477d757cc4eb9ae374b69f09c", size = 3087167 }, - { url = "https://files.pythonhosted.org/packages/4a/25/3e32ab61cc7145b11eee9d745988e2f0f4fafda81b25980eebf97d8cff15/hf_xet-1.1.10-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0a0005fd08f002180f7a12d4e13b22be277725bc23ed0529f8add5c7a6309c06", size = 3248612 }, - { url = "https://files.pythonhosted.org/packages/2c/3d/ab7109e607ed321afaa690f557a9ada6d6d164ec852fd6bf9979665dc3d6/hf_xet-1.1.10-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:f900481cf6e362a6c549c61ff77468bd59d6dd082f3170a36acfef2eb6a6793f", size = 3353360 }, - { url = "https://files.pythonhosted.org/packages/ee/0e/471f0a21db36e71a2f1752767ad77e92d8cde24e974e03d662931b1305ec/hf_xet-1.1.10-cp37-abi3-win_amd64.whl", hash = "sha256:5f54b19cc347c13235ae7ee98b330c26dd65ef1df47e5316ffb1e87713ca7045", size = 2804691 }, + { url = "https://files.pythonhosted.org/packages/f7/a2/343e6d05de96908366bdc0081f2d8607d61200be2ac802769c4284cc65bd/hf_xet-1.1.10-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:686083aca1a6669bc85c21c0563551cbcdaa5cf7876a91f3d074a030b577231d", size = 2761466, upload-time = "2025-09-12T20:10:22.836Z" }, + { url = "https://files.pythonhosted.org/packages/31/f9/6215f948ac8f17566ee27af6430ea72045e0418ce757260248b483f4183b/hf_xet-1.1.10-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:71081925383b66b24eedff3013f8e6bbd41215c3338be4b94ba75fd75b21513b", size = 2623807, upload-time = "2025-09-12T20:10:21.118Z" }, + { url = "https://files.pythonhosted.org/packages/15/07/86397573efefff941e100367bbda0b21496ffcdb34db7ab51912994c32a2/hf_xet-1.1.10-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b6bceb6361c80c1cc42b5a7b4e3efd90e64630bcf11224dcac50ef30a47e435", size = 3186960, upload-time = "2025-09-12T20:10:19.336Z" }, + { url = "https://files.pythonhosted.org/packages/01/a7/0b2e242b918cc30e1f91980f3c4b026ff2eedaf1e2ad96933bca164b2869/hf_xet-1.1.10-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:eae7c1fc8a664e54753ffc235e11427ca61f4b0477d757cc4eb9ae374b69f09c", size = 3087167, upload-time = "2025-09-12T20:10:17.255Z" }, + { url = "https://files.pythonhosted.org/packages/4a/25/3e32ab61cc7145b11eee9d745988e2f0f4fafda81b25980eebf97d8cff15/hf_xet-1.1.10-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0a0005fd08f002180f7a12d4e13b22be277725bc23ed0529f8add5c7a6309c06", size = 3248612, upload-time = "2025-09-12T20:10:24.093Z" }, + { url = "https://files.pythonhosted.org/packages/2c/3d/ab7109e607ed321afaa690f557a9ada6d6d164ec852fd6bf9979665dc3d6/hf_xet-1.1.10-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:f900481cf6e362a6c549c61ff77468bd59d6dd082f3170a36acfef2eb6a6793f", size = 3353360, upload-time = "2025-09-12T20:10:25.563Z" }, + { url = "https://files.pythonhosted.org/packages/ee/0e/471f0a21db36e71a2f1752767ad77e92d8cde24e974e03d662931b1305ec/hf_xet-1.1.10-cp37-abi3-win_amd64.whl", hash = "sha256:5f54b19cc347c13235ae7ee98b330c26dd65ef1df47e5316ffb1e87713ca7045", size = 2804691, upload-time = "2025-09-12T20:10:28.433Z" }, ] [[package]] @@ -1280,9 +1162,9 @@ dependencies = [ { name = "certifi" }, { name = "h11" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484 } +sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784 }, + { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" }, ] [[package]] @@ -1295,9 +1177,9 @@ dependencies = [ { name = "httpcore" }, { name = "idna" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406 } +sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517 }, + { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" }, ] [[package]] @@ -1314,9 +1196,9 @@ dependencies = [ { name = "tqdm" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/10/7e/a0a97de7c73671863ca6b3f61fa12518caf35db37825e43d63a70956738c/huggingface_hub-0.35.3.tar.gz", hash = "sha256:350932eaa5cc6a4747efae85126ee220e4ef1b54e29d31c3b45c5612ddf0b32a", size = 461798 } +sdist = { url = "https://files.pythonhosted.org/packages/10/7e/a0a97de7c73671863ca6b3f61fa12518caf35db37825e43d63a70956738c/huggingface_hub-0.35.3.tar.gz", hash = "sha256:350932eaa5cc6a4747efae85126ee220e4ef1b54e29d31c3b45c5612ddf0b32a", size = 461798, upload-time = "2025-09-29T14:29:58.625Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/31/a0/651f93d154cb72323358bf2bbae3e642bdb5d2f1bfc874d096f7cb159fa0/huggingface_hub-0.35.3-py3-none-any.whl", hash = "sha256:0e3a01829c19d86d03793e4577816fe3bdfc1602ac62c7fb220d593d351224ba", size = 564262 }, + { url = "https://files.pythonhosted.org/packages/31/a0/651f93d154cb72323358bf2bbae3e642bdb5d2f1bfc874d096f7cb159fa0/huggingface_hub-0.35.3-py3-none-any.whl", hash = "sha256:0e3a01829c19d86d03793e4577816fe3bdfc1602ac62c7fb220d593d351224ba", size = 564262, upload-time = "2025-09-29T14:29:55.813Z" }, ] [[package]] @@ -1326,18 +1208,18 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pyreadline3", marker = "sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/cc/3f/2c29224acb2e2df4d2046e4c73ee2662023c58ff5b113c4c1adac0886c43/humanfriendly-10.0.tar.gz", hash = "sha256:6b0b831ce8f15f7300721aa49829fc4e83921a9a301cc7f606be6686a2288ddc", size = 360702 } +sdist = { url = "https://files.pythonhosted.org/packages/cc/3f/2c29224acb2e2df4d2046e4c73ee2662023c58ff5b113c4c1adac0886c43/humanfriendly-10.0.tar.gz", hash = "sha256:6b0b831ce8f15f7300721aa49829fc4e83921a9a301cc7f606be6686a2288ddc", size = 360702, upload-time = "2021-09-17T21:40:43.31Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f0/0f/310fb31e39e2d734ccaa2c0fb981ee41f7bd5056ce9bc29b2248bd569169/humanfriendly-10.0-py2.py3-none-any.whl", hash = "sha256:1697e1a8a8f550fd43c2865cd84542fc175a61dcb779b6fee18cf6b6ccba1477", size = 86794 }, + { url = "https://files.pythonhosted.org/packages/f0/0f/310fb31e39e2d734ccaa2c0fb981ee41f7bd5056ce9bc29b2248bd569169/humanfriendly-10.0-py2.py3-none-any.whl", hash = "sha256:1697e1a8a8f550fd43c2865cd84542fc175a61dcb779b6fee18cf6b6ccba1477", size = 86794, upload-time = "2021-09-17T21:40:39.897Z" }, ] [[package]] name = "idna" version = "3.10" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490 } +sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490, upload-time = "2024-09-15T18:07:39.745Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442 }, + { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" }, ] [[package]] @@ -1347,18 +1229,18 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "zipp" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/76/66/650a33bd90f786193e4de4b3ad86ea60b53c89b669a5c7be931fac31cdb0/importlib_metadata-8.7.0.tar.gz", hash = "sha256:d13b81ad223b890aa16c5471f2ac3056cf76c5f10f82d6f9292f0b415f389000", size = 56641 } +sdist = { url = "https://files.pythonhosted.org/packages/76/66/650a33bd90f786193e4de4b3ad86ea60b53c89b669a5c7be931fac31cdb0/importlib_metadata-8.7.0.tar.gz", hash = "sha256:d13b81ad223b890aa16c5471f2ac3056cf76c5f10f82d6f9292f0b415f389000", size = 56641, upload-time = "2025-04-27T15:29:01.736Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/20/b0/36bd937216ec521246249be3bf9855081de4c5e06a0c9b4219dbeda50373/importlib_metadata-8.7.0-py3-none-any.whl", hash = "sha256:e5dd1551894c77868a30651cef00984d50e1002d06942a7101d34870c5f02afd", size = 27656 }, + { url = "https://files.pythonhosted.org/packages/20/b0/36bd937216ec521246249be3bf9855081de4c5e06a0c9b4219dbeda50373/importlib_metadata-8.7.0-py3-none-any.whl", hash = "sha256:e5dd1551894c77868a30651cef00984d50e1002d06942a7101d34870c5f02afd", size = 27656, upload-time = "2025-04-27T15:29:00.214Z" }, ] [[package]] name = "iniconfig" version = "2.1.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", size = 4793 } +sdist = { url = "https://files.pythonhosted.org/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", size = 4793, upload-time = "2025-03-19T20:09:59.721Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050 }, + { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" }, ] [[package]] @@ -1380,9 +1262,9 @@ dependencies = [ { name = "tornado" }, { name = "traitlets" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/bb/76/11082e338e0daadc89c8ff866185de11daf67d181901038f9e139d109761/ipykernel-6.30.1.tar.gz", hash = "sha256:6abb270161896402e76b91394fcdce5d1be5d45f456671e5080572f8505be39b", size = 166260 } +sdist = { url = "https://files.pythonhosted.org/packages/bb/76/11082e338e0daadc89c8ff866185de11daf67d181901038f9e139d109761/ipykernel-6.30.1.tar.gz", hash = "sha256:6abb270161896402e76b91394fcdce5d1be5d45f456671e5080572f8505be39b", size = 166260, upload-time = "2025-08-04T15:47:35.018Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/fc/c7/b445faca8deb954fe536abebff4ece5b097b923de482b26e78448c89d1dd/ipykernel-6.30.1-py3-none-any.whl", hash = "sha256:aa6b9fb93dca949069d8b85b6c79b2518e32ac583ae9c7d37c51d119e18b3fb4", size = 117484 }, + { url = "https://files.pythonhosted.org/packages/fc/c7/b445faca8deb954fe536abebff4ece5b097b923de482b26e78448c89d1dd/ipykernel-6.30.1-py3-none-any.whl", hash = "sha256:aa6b9fb93dca949069d8b85b6c79b2518e32ac583ae9c7d37c51d119e18b3fb4", size = 117484, upload-time = "2025-08-04T15:47:32.622Z" }, ] [[package]] @@ -1402,9 +1284,9 @@ dependencies = [ { name = "traitlets" }, { name = "typing-extensions", marker = "python_full_version < '3.12'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/2a/34/29b18c62e39ee2f7a6a3bba7efd952729d8aadd45ca17efc34453b717665/ipython-9.6.0.tar.gz", hash = "sha256:5603d6d5d356378be5043e69441a072b50a5b33b4503428c77b04cb8ce7bc731", size = 4396932 } +sdist = { url = "https://files.pythonhosted.org/packages/2a/34/29b18c62e39ee2f7a6a3bba7efd952729d8aadd45ca17efc34453b717665/ipython-9.6.0.tar.gz", hash = "sha256:5603d6d5d356378be5043e69441a072b50a5b33b4503428c77b04cb8ce7bc731", size = 4396932, upload-time = "2025-09-29T10:55:53.948Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/48/c5/d5e07995077e48220269c28a221e168c91123ad5ceee44d548f54a057fc0/ipython-9.6.0-py3-none-any.whl", hash = "sha256:5f77efafc886d2f023442479b8149e7d86547ad0a979e9da9f045d252f648196", size = 616170 }, + { url = "https://files.pythonhosted.org/packages/48/c5/d5e07995077e48220269c28a221e168c91123ad5ceee44d548f54a057fc0/ipython-9.6.0-py3-none-any.whl", hash = "sha256:5f77efafc886d2f023442479b8149e7d86547ad0a979e9da9f045d252f648196", size = 616170, upload-time = "2025-09-29T10:55:47.676Z" }, ] [[package]] @@ -1414,9 +1296,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pygments" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ef/4c/5dd1d8af08107f88c7f741ead7a40854b8ac24ddf9ae850afbcf698aa552/ipython_pygments_lexers-1.1.1.tar.gz", hash = "sha256:09c0138009e56b6854f9535736f4171d855c8c08a563a0dcd8022f78355c7e81", size = 8393 } +sdist = { url = "https://files.pythonhosted.org/packages/ef/4c/5dd1d8af08107f88c7f741ead7a40854b8ac24ddf9ae850afbcf698aa552/ipython_pygments_lexers-1.1.1.tar.gz", hash = "sha256:09c0138009e56b6854f9535736f4171d855c8c08a563a0dcd8022f78355c7e81", size = 8393, upload-time = "2025-01-17T11:24:34.505Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d9/33/1f075bf72b0b747cb3288d011319aaf64083cf2efef8354174e3ed4540e2/ipython_pygments_lexers-1.1.1-py3-none-any.whl", hash = "sha256:a9462224a505ade19a605f71f8fa63c2048833ce50abc86768a0d81d876dc81c", size = 8074 }, + { url = "https://files.pythonhosted.org/packages/d9/33/1f075bf72b0b747cb3288d011319aaf64083cf2efef8354174e3ed4540e2/ipython_pygments_lexers-1.1.1-py3-none-any.whl", hash = "sha256:a9462224a505ade19a605f71f8fa63c2048833ce50abc86768a0d81d876dc81c", size = 8074, upload-time = "2025-01-17T11:24:33.271Z" }, ] [[package]] @@ -1430,18 +1312,18 @@ dependencies = [ { name = "traitlets" }, { name = "widgetsnbextension" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/3e/48/d3dbac45c2814cb73812f98dd6b38bbcc957a4e7bb31d6ea9c03bf94ed87/ipywidgets-8.1.7.tar.gz", hash = "sha256:15f1ac050b9ccbefd45dccfbb2ef6bed0029d8278682d569d71b8dd96bee0376", size = 116721 } +sdist = { url = "https://files.pythonhosted.org/packages/3e/48/d3dbac45c2814cb73812f98dd6b38bbcc957a4e7bb31d6ea9c03bf94ed87/ipywidgets-8.1.7.tar.gz", hash = "sha256:15f1ac050b9ccbefd45dccfbb2ef6bed0029d8278682d569d71b8dd96bee0376", size = 116721, upload-time = "2025-05-05T12:42:03.489Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/58/6a/9166369a2f092bd286d24e6307de555d63616e8ddb373ebad2b5635ca4cd/ipywidgets-8.1.7-py3-none-any.whl", hash = "sha256:764f2602d25471c213919b8a1997df04bef869251db4ca8efba1b76b1bd9f7bb", size = 139806 }, + { url = "https://files.pythonhosted.org/packages/58/6a/9166369a2f092bd286d24e6307de555d63616e8ddb373ebad2b5635ca4cd/ipywidgets-8.1.7-py3-none-any.whl", hash = "sha256:764f2602d25471c213919b8a1997df04bef869251db4ca8efba1b76b1bd9f7bb", size = 139806, upload-time = "2025-05-05T12:41:56.833Z" }, ] [[package]] name = "isodate" version = "0.7.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/54/4d/e940025e2ce31a8ce1202635910747e5a87cc3a6a6bb2d00973375014749/isodate-0.7.2.tar.gz", hash = "sha256:4cd1aa0f43ca76f4a6c6c0292a85f40b35ec2e43e315b59f06e6d32171a953e6", size = 29705 } +sdist = { url = "https://files.pythonhosted.org/packages/54/4d/e940025e2ce31a8ce1202635910747e5a87cc3a6a6bb2d00973375014749/isodate-0.7.2.tar.gz", hash = "sha256:4cd1aa0f43ca76f4a6c6c0292a85f40b35ec2e43e315b59f06e6d32171a953e6", size = 29705, upload-time = "2024-10-08T23:04:11.5Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/15/aa/0aca39a37d3c7eb941ba736ede56d689e7be91cab5d9ca846bde3999eba6/isodate-0.7.2-py3-none-any.whl", hash = "sha256:28009937d8031054830160fce6d409ed342816b543597cece116d966c6d99e15", size = 22320 }, + { url = "https://files.pythonhosted.org/packages/15/aa/0aca39a37d3c7eb941ba736ede56d689e7be91cab5d9ca846bde3999eba6/isodate-0.7.2-py3-none-any.whl", hash = "sha256:28009937d8031054830160fce6d409ed342816b543597cece116d966c6d99e15", size = 22320, upload-time = "2024-10-08T23:04:09.501Z" }, ] [[package]] @@ -1451,9 +1333,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "arrow" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/7c/1a/3c8edc664e06e6bd06cce40c6b22da5f1429aa4224d0c590f3be21c91ead/isoduration-20.11.0.tar.gz", hash = "sha256:ac2f9015137935279eac671f94f89eb00584f940f5dc49462a0c4ee692ba1bd9", size = 11649 } +sdist = { url = "https://files.pythonhosted.org/packages/7c/1a/3c8edc664e06e6bd06cce40c6b22da5f1429aa4224d0c590f3be21c91ead/isoduration-20.11.0.tar.gz", hash = "sha256:ac2f9015137935279eac671f94f89eb00584f940f5dc49462a0c4ee692ba1bd9", size = 11649, upload-time = "2020-11-01T11:00:00.312Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7b/55/e5326141505c5d5e34c5e0935d2908a74e4561eca44108fbfb9c13d2911a/isoduration-20.11.0-py3-none-any.whl", hash = "sha256:b2904c2a4228c3d44f409c8ae8e2370eb21a26f7ac2ec5446df141dde3452042", size = 11321 }, + { url = "https://files.pythonhosted.org/packages/7b/55/e5326141505c5d5e34c5e0935d2908a74e4561eca44108fbfb9c13d2911a/isoduration-20.11.0-py3-none-any.whl", hash = "sha256:b2904c2a4228c3d44f409c8ae8e2370eb21a26f7ac2ec5446df141dde3452042", size = 11321, upload-time = "2020-11-01T10:59:58.02Z" }, ] [[package]] @@ -1463,9 +1345,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "parso" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/72/3a/79a912fbd4d8dd6fbb02bf69afd3bb72cf0c729bb3063c6f4498603db17a/jedi-0.19.2.tar.gz", hash = "sha256:4770dc3de41bde3966b02eb84fbcf557fb33cce26ad23da12c742fb50ecb11f0", size = 1231287 } +sdist = { url = "https://files.pythonhosted.org/packages/72/3a/79a912fbd4d8dd6fbb02bf69afd3bb72cf0c729bb3063c6f4498603db17a/jedi-0.19.2.tar.gz", hash = "sha256:4770dc3de41bde3966b02eb84fbcf557fb33cce26ad23da12c742fb50ecb11f0", size = 1231287, upload-time = "2024-11-11T01:41:42.873Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c0/5a/9cac0c82afec3d09ccd97c8b6502d48f165f9124db81b4bcb90b4af974ee/jedi-0.19.2-py2.py3-none-any.whl", hash = "sha256:a8ef22bde8490f57fe5c7681a3c83cb58874daf72b4784de3cce5b6ef6edb5b9", size = 1572278 }, + { url = "https://files.pythonhosted.org/packages/c0/5a/9cac0c82afec3d09ccd97c8b6502d48f165f9124db81b4bcb90b4af974ee/jedi-0.19.2-py2.py3-none-any.whl", hash = "sha256:a8ef22bde8490f57fe5c7681a3c83cb58874daf72b4784de3cce5b6ef6edb5b9", size = 1572278, upload-time = "2024-11-11T01:41:40.175Z" }, ] [[package]] @@ -1475,93 +1357,78 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "markupsafe" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115 } +sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115, upload-time = "2025-03-05T20:05:02.478Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899 }, + { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" }, ] [[package]] name = "jiter" version = "0.11.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/9d/c0/a3bb4cc13aced219dd18191ea66e874266bd8aa7b96744e495e1c733aa2d/jiter-0.11.0.tar.gz", hash = "sha256:1d9637eaf8c1d6a63d6562f2a6e5ab3af946c66037eb1b894e8fad75422266e4", size = 167094 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/38/55/a69fefeef09c2eaabae44b935a1aa81517e49639c0a0c25d861cb18cd7ac/jiter-0.11.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:cb5d9db02979c3f49071fce51a48f4b4e4cf574175fb2b11c7a535fa4867b222", size = 309503 }, - { url = "https://files.pythonhosted.org/packages/bd/d5/a6aba9e6551f32f9c127184f398208e4eddb96c59ac065c8a92056089d28/jiter-0.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1dc6a123f3471c4730db7ca8ba75f1bb3dcb6faeb8d46dd781083e7dee88b32d", size = 317688 }, - { url = "https://files.pythonhosted.org/packages/bb/f3/5e86f57c1883971cdc8535d0429c2787bf734840a231da30a3be12850562/jiter-0.11.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09858f8d230f031c7b8e557429102bf050eea29c77ad9c34c8fe253c5329acb7", size = 337418 }, - { url = "https://files.pythonhosted.org/packages/5e/4f/a71d8a24c2a70664970574a8e0b766663f5ef788f7fe1cc20ee0c016d488/jiter-0.11.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dbe2196c4a0ce760925a74ab4456bf644748ab0979762139626ad138f6dac72d", size = 361423 }, - { url = "https://files.pythonhosted.org/packages/8f/e5/b09076f4e7fd9471b91e16f9f3dc7330b161b738f3b39b2c37054a36e26a/jiter-0.11.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5beb56d22b63647bafd0b74979216fdee80c580c0c63410be8c11053860ffd09", size = 486367 }, - { url = "https://files.pythonhosted.org/packages/fb/f1/98cb3a36f5e62f80cd860f0179f948d9eab5a316d55d3e1bab98d9767af5/jiter-0.11.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97025d09ef549795d8dc720a824312cee3253c890ac73c621721ddfc75066789", size = 376335 }, - { url = "https://files.pythonhosted.org/packages/9f/d8/ec74886497ea393c29dbd7651ddecc1899e86404a6b1f84a3ddab0ab59fd/jiter-0.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d50880a6da65d8c23a2cf53c412847d9757e74cc9a3b95c5704a1d1a24667347", size = 348981 }, - { url = "https://files.pythonhosted.org/packages/24/93/d22ad7fa3b86ade66c86153ceea73094fc2af8b20c59cb7fceab9fea4704/jiter-0.11.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:452d80a1c86c095a242007bd9fc5d21b8a8442307193378f891cb8727e469648", size = 385797 }, - { url = "https://files.pythonhosted.org/packages/c8/bd/e25ff4a4df226e9b885f7cb01ee4b9dc74e3000e612d6f723860d71a1f34/jiter-0.11.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e84e58198d4894668eec2da660ffff60e0f3e60afa790ecc50cb12b0e02ca1d4", size = 516597 }, - { url = "https://files.pythonhosted.org/packages/be/fb/beda613db7d93ffa2fdd2683f90f2f5dce8daf4bc2d0d2829e7de35308c6/jiter-0.11.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:df64edcfc5dd5279a791eea52aa113d432c933119a025b0b5739f90d2e4e75f1", size = 508853 }, - { url = "https://files.pythonhosted.org/packages/20/64/c5b0d93490634e41e38e2a15de5d54fdbd2c9f64a19abb0f95305b63373c/jiter-0.11.0-cp311-cp311-win32.whl", hash = "sha256:144fc21337d21b1d048f7f44bf70881e1586401d405ed3a98c95a114a9994982", size = 205140 }, - { url = "https://files.pythonhosted.org/packages/a1/e6/c347c0e6f5796e97d4356b7e5ff0ce336498b7f4ef848fae621a56f1ccf3/jiter-0.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:b0f32e644d241293b892b1a6dd8f0b9cc029bfd94c97376b2681c36548aabab7", size = 204311 }, - { url = "https://files.pythonhosted.org/packages/ba/b5/3009b112b8f673e568ef79af9863d8309a15f0a8cdcc06ed6092051f377e/jiter-0.11.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:2fb7b377688cc3850bbe5c192a6bd493562a0bc50cbc8b047316428fbae00ada", size = 305510 }, - { url = "https://files.pythonhosted.org/packages/fe/82/15514244e03b9e71e086bbe2a6de3e4616b48f07d5f834200c873956fb8c/jiter-0.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a1b7cbe3f25bd0d8abb468ba4302a5d45617ee61b2a7a638f63fee1dc086be99", size = 316521 }, - { url = "https://files.pythonhosted.org/packages/92/94/7a2e905f40ad2d6d660e00b68d818f9e29fb87ffe82774f06191e93cbe4a/jiter-0.11.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0a7f0ec81d5b7588c5cade1eb1925b91436ae6726dc2df2348524aeabad5de6", size = 338214 }, - { url = "https://files.pythonhosted.org/packages/a8/9c/5791ed5bdc76f12110158d3316a7a3ec0b1413d018b41c5ed399549d3ad5/jiter-0.11.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:07630bb46ea2a6b9c6ed986c6e17e35b26148cce2c535454b26ee3f0e8dcaba1", size = 361280 }, - { url = "https://files.pythonhosted.org/packages/d4/7f/b7d82d77ff0d2cb06424141000176b53a9e6b16a1125525bb51ea4990c2e/jiter-0.11.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7764f27d28cd4a9cbc61704dfcd80c903ce3aad106a37902d3270cd6673d17f4", size = 487895 }, - { url = "https://files.pythonhosted.org/packages/42/44/10a1475d46f1fc1fd5cc2e82c58e7bca0ce5852208e0fa5df2f949353321/jiter-0.11.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1d4a6c4a737d486f77f842aeb22807edecb4a9417e6700c7b981e16d34ba7c72", size = 378421 }, - { url = "https://files.pythonhosted.org/packages/9a/5f/0dc34563d8164d31d07bc09d141d3da08157a68dcd1f9b886fa4e917805b/jiter-0.11.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf408d2a0abd919b60de8c2e7bc5eeab72d4dafd18784152acc7c9adc3291591", size = 347932 }, - { url = "https://files.pythonhosted.org/packages/f7/de/b68f32a4fcb7b4a682b37c73a0e5dae32180140cd1caf11aef6ad40ddbf2/jiter-0.11.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cdef53eda7d18e799625023e1e250dbc18fbc275153039b873ec74d7e8883e09", size = 386959 }, - { url = "https://files.pythonhosted.org/packages/76/0a/c08c92e713b6e28972a846a81ce374883dac2f78ec6f39a0dad9f2339c3a/jiter-0.11.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:53933a38ef7b551dd9c7f1064f9d7bb235bb3168d0fa5f14f0798d1b7ea0d9c5", size = 517187 }, - { url = "https://files.pythonhosted.org/packages/89/b5/4a283bec43b15aad54fcae18d951f06a2ec3f78db5708d3b59a48e9c3fbd/jiter-0.11.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:11840d2324c9ab5162fc1abba23bc922124fedcff0d7b7f85fffa291e2f69206", size = 509461 }, - { url = "https://files.pythonhosted.org/packages/34/a5/f8bad793010534ea73c985caaeef8cc22dfb1fedb15220ecdf15c623c07a/jiter-0.11.0-cp312-cp312-win32.whl", hash = "sha256:4f01a744d24a5f2bb4a11657a1b27b61dc038ae2e674621a74020406e08f749b", size = 206664 }, - { url = "https://files.pythonhosted.org/packages/ed/42/5823ec2b1469395a160b4bf5f14326b4a098f3b6898fbd327366789fa5d3/jiter-0.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:29fff31190ab3a26de026da2f187814f4b9c6695361e20a9ac2123e4d4378a4c", size = 203520 }, - { url = "https://files.pythonhosted.org/packages/97/c4/d530e514d0f4f29b2b68145e7b389cbc7cac7f9c8c23df43b04d3d10fa3e/jiter-0.11.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:4441a91b80a80249f9a6452c14b2c24708f139f64de959943dfeaa6cb915e8eb", size = 305021 }, - { url = "https://files.pythonhosted.org/packages/7a/77/796a19c567c5734cbfc736a6f987affc0d5f240af8e12063c0fb93990ffa/jiter-0.11.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ff85fc6d2a431251ad82dbd1ea953affb5a60376b62e7d6809c5cd058bb39471", size = 314384 }, - { url = "https://files.pythonhosted.org/packages/14/9c/824334de0b037b91b6f3fa9fe5a191c83977c7ec4abe17795d3cb6d174cf/jiter-0.11.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5e86126d64706fd28dfc46f910d496923c6f95b395138c02d0e252947f452bd", size = 337389 }, - { url = "https://files.pythonhosted.org/packages/a2/95/ed4feab69e6cf9b2176ea29d4ef9d01a01db210a3a2c8a31a44ecdc68c38/jiter-0.11.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4ad8bd82165961867a10f52010590ce0b7a8c53da5ddd8bbb62fef68c181b921", size = 360519 }, - { url = "https://files.pythonhosted.org/packages/b5/0c/2ad00f38d3e583caba3909d95b7da1c3a7cd82c0aa81ff4317a8016fb581/jiter-0.11.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b42c2cd74273455ce439fd9528db0c6e84b5623cb74572305bdd9f2f2961d3df", size = 487198 }, - { url = "https://files.pythonhosted.org/packages/ea/8b/919b64cf3499b79bdfba6036da7b0cac5d62d5c75a28fb45bad7819e22f0/jiter-0.11.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0062dab98172dd0599fcdbf90214d0dcde070b1ff38a00cc1b90e111f071982", size = 377835 }, - { url = "https://files.pythonhosted.org/packages/29/7f/8ebe15b6e0a8026b0d286c083b553779b4dd63db35b43a3f171b544de91d/jiter-0.11.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb948402821bc76d1f6ef0f9e19b816f9b09f8577844ba7140f0b6afe994bc64", size = 347655 }, - { url = "https://files.pythonhosted.org/packages/8e/64/332127cef7e94ac75719dda07b9a472af6158ba819088d87f17f3226a769/jiter-0.11.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:25a5b1110cca7329fd0daf5060faa1234be5c11e988948e4f1a1923b6a457fe1", size = 386135 }, - { url = "https://files.pythonhosted.org/packages/20/c8/557b63527442f84c14774159948262a9d4fabb0d61166f11568f22fc60d2/jiter-0.11.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:bf11807e802a214daf6c485037778843fadd3e2ec29377ae17e0706ec1a25758", size = 516063 }, - { url = "https://files.pythonhosted.org/packages/86/13/4164c819df4a43cdc8047f9a42880f0ceef5afeb22e8b9675c0528ebdccd/jiter-0.11.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:dbb57da40631c267861dd0090461222060960012d70fd6e4c799b0f62d0ba166", size = 508139 }, - { url = "https://files.pythonhosted.org/packages/fa/70/6e06929b401b331d41ddb4afb9f91cd1168218e3371972f0afa51c9f3c31/jiter-0.11.0-cp313-cp313-win32.whl", hash = "sha256:8e36924dad32c48d3c5e188d169e71dc6e84d6cb8dedefea089de5739d1d2f80", size = 206369 }, - { url = "https://files.pythonhosted.org/packages/f4/0d/8185b8e15de6dce24f6afae63380e16377dd75686d56007baa4f29723ea1/jiter-0.11.0-cp313-cp313-win_amd64.whl", hash = "sha256:452d13e4fd59698408087235259cebe67d9d49173b4dacb3e8d35ce4acf385d6", size = 202538 }, - { url = "https://files.pythonhosted.org/packages/13/3a/d61707803260d59520721fa326babfae25e9573a88d8b7b9cb54c5423a59/jiter-0.11.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:089f9df9f69532d1339e83142438668f52c97cd22ee2d1195551c2b1a9e6cf33", size = 313737 }, - { url = "https://files.pythonhosted.org/packages/cd/cc/c9f0eec5d00f2a1da89f6bdfac12b8afdf8d5ad974184863c75060026457/jiter-0.11.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:29ed1fe69a8c69bf0f2a962d8d706c7b89b50f1332cd6b9fbda014f60bd03a03", size = 346183 }, - { url = "https://files.pythonhosted.org/packages/a6/87/fc632776344e7aabbab05a95a0075476f418c5d29ab0f2eec672b7a1f0ac/jiter-0.11.0-cp313-cp313t-win_amd64.whl", hash = "sha256:a4d71d7ea6ea8786291423fe209acf6f8d398a0759d03e7f24094acb8ab686ba", size = 204225 }, - { url = "https://files.pythonhosted.org/packages/70/f3/ce100253c80063a7b8b406e1d1562657fd4b9b4e1b562db40e68645342fb/jiter-0.11.0-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:902b43386c04739229076bd1c4c69de5d115553d982ab442a8ae82947c72ede7", size = 336380 }, +sdist = { url = "https://files.pythonhosted.org/packages/9d/c0/a3bb4cc13aced219dd18191ea66e874266bd8aa7b96744e495e1c733aa2d/jiter-0.11.0.tar.gz", hash = "sha256:1d9637eaf8c1d6a63d6562f2a6e5ab3af946c66037eb1b894e8fad75422266e4", size = 167094, upload-time = "2025-09-15T09:20:38.212Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/38/55/a69fefeef09c2eaabae44b935a1aa81517e49639c0a0c25d861cb18cd7ac/jiter-0.11.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:cb5d9db02979c3f49071fce51a48f4b4e4cf574175fb2b11c7a535fa4867b222", size = 309503, upload-time = "2025-09-15T09:19:08.191Z" }, + { url = "https://files.pythonhosted.org/packages/bd/d5/a6aba9e6551f32f9c127184f398208e4eddb96c59ac065c8a92056089d28/jiter-0.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1dc6a123f3471c4730db7ca8ba75f1bb3dcb6faeb8d46dd781083e7dee88b32d", size = 317688, upload-time = "2025-09-15T09:19:09.918Z" }, + { url = "https://files.pythonhosted.org/packages/bb/f3/5e86f57c1883971cdc8535d0429c2787bf734840a231da30a3be12850562/jiter-0.11.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09858f8d230f031c7b8e557429102bf050eea29c77ad9c34c8fe253c5329acb7", size = 337418, upload-time = "2025-09-15T09:19:11.078Z" }, + { url = "https://files.pythonhosted.org/packages/5e/4f/a71d8a24c2a70664970574a8e0b766663f5ef788f7fe1cc20ee0c016d488/jiter-0.11.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dbe2196c4a0ce760925a74ab4456bf644748ab0979762139626ad138f6dac72d", size = 361423, upload-time = "2025-09-15T09:19:13.286Z" }, + { url = "https://files.pythonhosted.org/packages/8f/e5/b09076f4e7fd9471b91e16f9f3dc7330b161b738f3b39b2c37054a36e26a/jiter-0.11.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5beb56d22b63647bafd0b74979216fdee80c580c0c63410be8c11053860ffd09", size = 486367, upload-time = "2025-09-15T09:19:14.546Z" }, + { url = "https://files.pythonhosted.org/packages/fb/f1/98cb3a36f5e62f80cd860f0179f948d9eab5a316d55d3e1bab98d9767af5/jiter-0.11.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97025d09ef549795d8dc720a824312cee3253c890ac73c621721ddfc75066789", size = 376335, upload-time = "2025-09-15T09:19:15.939Z" }, + { url = "https://files.pythonhosted.org/packages/9f/d8/ec74886497ea393c29dbd7651ddecc1899e86404a6b1f84a3ddab0ab59fd/jiter-0.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d50880a6da65d8c23a2cf53c412847d9757e74cc9a3b95c5704a1d1a24667347", size = 348981, upload-time = "2025-09-15T09:19:17.568Z" }, + { url = "https://files.pythonhosted.org/packages/24/93/d22ad7fa3b86ade66c86153ceea73094fc2af8b20c59cb7fceab9fea4704/jiter-0.11.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:452d80a1c86c095a242007bd9fc5d21b8a8442307193378f891cb8727e469648", size = 385797, upload-time = "2025-09-15T09:19:19.121Z" }, + { url = "https://files.pythonhosted.org/packages/c8/bd/e25ff4a4df226e9b885f7cb01ee4b9dc74e3000e612d6f723860d71a1f34/jiter-0.11.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e84e58198d4894668eec2da660ffff60e0f3e60afa790ecc50cb12b0e02ca1d4", size = 516597, upload-time = "2025-09-15T09:19:20.301Z" }, + { url = "https://files.pythonhosted.org/packages/be/fb/beda613db7d93ffa2fdd2683f90f2f5dce8daf4bc2d0d2829e7de35308c6/jiter-0.11.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:df64edcfc5dd5279a791eea52aa113d432c933119a025b0b5739f90d2e4e75f1", size = 508853, upload-time = "2025-09-15T09:19:22.075Z" }, + { url = "https://files.pythonhosted.org/packages/20/64/c5b0d93490634e41e38e2a15de5d54fdbd2c9f64a19abb0f95305b63373c/jiter-0.11.0-cp311-cp311-win32.whl", hash = "sha256:144fc21337d21b1d048f7f44bf70881e1586401d405ed3a98c95a114a9994982", size = 205140, upload-time = "2025-09-15T09:19:23.351Z" }, + { url = "https://files.pythonhosted.org/packages/a1/e6/c347c0e6f5796e97d4356b7e5ff0ce336498b7f4ef848fae621a56f1ccf3/jiter-0.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:b0f32e644d241293b892b1a6dd8f0b9cc029bfd94c97376b2681c36548aabab7", size = 204311, upload-time = "2025-09-15T09:19:24.591Z" }, + { url = "https://files.pythonhosted.org/packages/ba/b5/3009b112b8f673e568ef79af9863d8309a15f0a8cdcc06ed6092051f377e/jiter-0.11.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:2fb7b377688cc3850bbe5c192a6bd493562a0bc50cbc8b047316428fbae00ada", size = 305510, upload-time = "2025-09-15T09:19:25.893Z" }, + { url = "https://files.pythonhosted.org/packages/fe/82/15514244e03b9e71e086bbe2a6de3e4616b48f07d5f834200c873956fb8c/jiter-0.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a1b7cbe3f25bd0d8abb468ba4302a5d45617ee61b2a7a638f63fee1dc086be99", size = 316521, upload-time = "2025-09-15T09:19:27.525Z" }, + { url = "https://files.pythonhosted.org/packages/92/94/7a2e905f40ad2d6d660e00b68d818f9e29fb87ffe82774f06191e93cbe4a/jiter-0.11.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0a7f0ec81d5b7588c5cade1eb1925b91436ae6726dc2df2348524aeabad5de6", size = 338214, upload-time = "2025-09-15T09:19:28.727Z" }, + { url = "https://files.pythonhosted.org/packages/a8/9c/5791ed5bdc76f12110158d3316a7a3ec0b1413d018b41c5ed399549d3ad5/jiter-0.11.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:07630bb46ea2a6b9c6ed986c6e17e35b26148cce2c535454b26ee3f0e8dcaba1", size = 361280, upload-time = "2025-09-15T09:19:30.013Z" }, + { url = "https://files.pythonhosted.org/packages/d4/7f/b7d82d77ff0d2cb06424141000176b53a9e6b16a1125525bb51ea4990c2e/jiter-0.11.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7764f27d28cd4a9cbc61704dfcd80c903ce3aad106a37902d3270cd6673d17f4", size = 487895, upload-time = "2025-09-15T09:19:31.424Z" }, + { url = "https://files.pythonhosted.org/packages/42/44/10a1475d46f1fc1fd5cc2e82c58e7bca0ce5852208e0fa5df2f949353321/jiter-0.11.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1d4a6c4a737d486f77f842aeb22807edecb4a9417e6700c7b981e16d34ba7c72", size = 378421, upload-time = "2025-09-15T09:19:32.746Z" }, + { url = "https://files.pythonhosted.org/packages/9a/5f/0dc34563d8164d31d07bc09d141d3da08157a68dcd1f9b886fa4e917805b/jiter-0.11.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf408d2a0abd919b60de8c2e7bc5eeab72d4dafd18784152acc7c9adc3291591", size = 347932, upload-time = "2025-09-15T09:19:34.612Z" }, + { url = "https://files.pythonhosted.org/packages/f7/de/b68f32a4fcb7b4a682b37c73a0e5dae32180140cd1caf11aef6ad40ddbf2/jiter-0.11.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cdef53eda7d18e799625023e1e250dbc18fbc275153039b873ec74d7e8883e09", size = 386959, upload-time = "2025-09-15T09:19:35.994Z" }, + { url = "https://files.pythonhosted.org/packages/76/0a/c08c92e713b6e28972a846a81ce374883dac2f78ec6f39a0dad9f2339c3a/jiter-0.11.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:53933a38ef7b551dd9c7f1064f9d7bb235bb3168d0fa5f14f0798d1b7ea0d9c5", size = 517187, upload-time = "2025-09-15T09:19:37.426Z" }, + { url = "https://files.pythonhosted.org/packages/89/b5/4a283bec43b15aad54fcae18d951f06a2ec3f78db5708d3b59a48e9c3fbd/jiter-0.11.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:11840d2324c9ab5162fc1abba23bc922124fedcff0d7b7f85fffa291e2f69206", size = 509461, upload-time = "2025-09-15T09:19:38.761Z" }, + { url = "https://files.pythonhosted.org/packages/34/a5/f8bad793010534ea73c985caaeef8cc22dfb1fedb15220ecdf15c623c07a/jiter-0.11.0-cp312-cp312-win32.whl", hash = "sha256:4f01a744d24a5f2bb4a11657a1b27b61dc038ae2e674621a74020406e08f749b", size = 206664, upload-time = "2025-09-15T09:19:40.096Z" }, + { url = "https://files.pythonhosted.org/packages/ed/42/5823ec2b1469395a160b4bf5f14326b4a098f3b6898fbd327366789fa5d3/jiter-0.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:29fff31190ab3a26de026da2f187814f4b9c6695361e20a9ac2123e4d4378a4c", size = 203520, upload-time = "2025-09-15T09:19:41.798Z" }, + { url = "https://files.pythonhosted.org/packages/70/f3/ce100253c80063a7b8b406e1d1562657fd4b9b4e1b562db40e68645342fb/jiter-0.11.0-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:902b43386c04739229076bd1c4c69de5d115553d982ab442a8ae82947c72ede7", size = 336380, upload-time = "2025-09-15T09:20:36.867Z" }, ] [[package]] name = "joblib" version = "1.5.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e8/5d/447af5ea094b9e4c4054f82e223ada074c552335b9b4b2d14bd9b35a67c4/joblib-1.5.2.tar.gz", hash = "sha256:3faa5c39054b2f03ca547da9b2f52fde67c06240c31853f306aea97f13647b55", size = 331077 } +sdist = { url = "https://files.pythonhosted.org/packages/e8/5d/447af5ea094b9e4c4054f82e223ada074c552335b9b4b2d14bd9b35a67c4/joblib-1.5.2.tar.gz", hash = "sha256:3faa5c39054b2f03ca547da9b2f52fde67c06240c31853f306aea97f13647b55", size = 331077, upload-time = "2025-08-27T12:15:46.575Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1e/e8/685f47e0d754320684db4425a0967f7d3fa70126bffd76110b7009a0090f/joblib-1.5.2-py3-none-any.whl", hash = "sha256:4e1f0bdbb987e6d843c70cf43714cb276623def372df3c22fe5266b2670bc241", size = 308396 }, + { url = "https://files.pythonhosted.org/packages/1e/e8/685f47e0d754320684db4425a0967f7d3fa70126bffd76110b7009a0090f/joblib-1.5.2-py3-none-any.whl", hash = "sha256:4e1f0bdbb987e6d843c70cf43714cb276623def372df3c22fe5266b2670bc241", size = 308396, upload-time = "2025-08-27T12:15:45.188Z" }, ] [[package]] name = "json-repair" version = "0.52.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f3/63/2c3c3c8cc1c28a0a20a9ab0eff5439c989ce3cc5956d8a4c7cf1eae0a06e/json_repair-0.52.0.tar.gz", hash = "sha256:0eee59cb3145b462b0734d4cf3246b797686caa669d52eee8dd30e09ea6d7876", size = 35384 } +sdist = { url = "https://files.pythonhosted.org/packages/f3/63/2c3c3c8cc1c28a0a20a9ab0eff5439c989ce3cc5956d8a4c7cf1eae0a06e/json_repair-0.52.0.tar.gz", hash = "sha256:0eee59cb3145b462b0734d4cf3246b797686caa669d52eee8dd30e09ea6d7876", size = 35384, upload-time = "2025-10-05T17:18:12.387Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c6/7f/3a4e456da9a0f9ac54d9842ed51e96960826a98456f0826a9b3e808713c4/json_repair-0.52.0-py3-none-any.whl", hash = "sha256:c783069906a456f62e2a553fbef32a420a4745ff943e2014411728edcc7bf60a", size = 26350 }, + { url = "https://files.pythonhosted.org/packages/c6/7f/3a4e456da9a0f9ac54d9842ed51e96960826a98456f0826a9b3e808713c4/json_repair-0.52.0-py3-none-any.whl", hash = "sha256:c783069906a456f62e2a553fbef32a420a4745ff943e2014411728edcc7bf60a", size = 26350, upload-time = "2025-10-05T17:18:10.859Z" }, ] [[package]] name = "json5" version = "0.12.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/12/ae/929aee9619e9eba9015207a9d2c1c54db18311da7eb4dcf6d41ad6f0eb67/json5-0.12.1.tar.gz", hash = "sha256:b2743e77b3242f8d03c143dd975a6ec7c52e2f2afe76ed934e53503dd4ad4990", size = 52191 } +sdist = { url = "https://files.pythonhosted.org/packages/12/ae/929aee9619e9eba9015207a9d2c1c54db18311da7eb4dcf6d41ad6f0eb67/json5-0.12.1.tar.gz", hash = "sha256:b2743e77b3242f8d03c143dd975a6ec7c52e2f2afe76ed934e53503dd4ad4990", size = 52191, upload-time = "2025-08-12T19:47:42.583Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/85/e2/05328bd2621be49a6fed9e3030b1e51a2d04537d3f816d211b9cc53c5262/json5-0.12.1-py3-none-any.whl", hash = "sha256:d9c9b3bc34a5f54d43c35e11ef7cb87d8bdd098c6ace87117a7b7e83e705c1d5", size = 36119 }, + { url = "https://files.pythonhosted.org/packages/85/e2/05328bd2621be49a6fed9e3030b1e51a2d04537d3f816d211b9cc53c5262/json5-0.12.1-py3-none-any.whl", hash = "sha256:d9c9b3bc34a5f54d43c35e11ef7cb87d8bdd098c6ace87117a7b7e83e705c1d5", size = 36119, upload-time = "2025-08-12T19:47:41.131Z" }, ] [[package]] name = "jsonpointer" version = "3.0.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6a/0a/eebeb1fa92507ea94016a2a790b93c2ae41a7e18778f85471dc54475ed25/jsonpointer-3.0.0.tar.gz", hash = "sha256:2b2d729f2091522d61c3b31f82e11870f60b68f43fbc705cb76bf4b832af59ef", size = 9114 } +sdist = { url = "https://files.pythonhosted.org/packages/6a/0a/eebeb1fa92507ea94016a2a790b93c2ae41a7e18778f85471dc54475ed25/jsonpointer-3.0.0.tar.gz", hash = "sha256:2b2d729f2091522d61c3b31f82e11870f60b68f43fbc705cb76bf4b832af59ef", size = 9114, upload-time = "2024-06-10T19:24:42.462Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/71/92/5e77f98553e9e75130c78900d000368476aed74276eb8ae8796f65f00918/jsonpointer-3.0.0-py2.py3-none-any.whl", hash = "sha256:13e088adc14fca8b6aa8177c044e12701e6ad4b28ff10e65f2267a90109c9942", size = 7595 }, + { url = "https://files.pythonhosted.org/packages/71/92/5e77f98553e9e75130c78900d000368476aed74276eb8ae8796f65f00918/jsonpointer-3.0.0-py2.py3-none-any.whl", hash = "sha256:13e088adc14fca8b6aa8177c044e12701e6ad4b28ff10e65f2267a90109c9942", size = 7595, upload-time = "2024-06-10T19:24:40.698Z" }, ] [[package]] @@ -1574,9 +1441,9 @@ dependencies = [ { name = "referencing" }, { name = "rpds-py" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/74/69/f7185de793a29082a9f3c7728268ffb31cb5095131a9c139a74078e27336/jsonschema-4.25.1.tar.gz", hash = "sha256:e4a9655ce0da0c0b67a085847e00a3a51449e1157f4f75e9fb5aa545e122eb85", size = 357342 } +sdist = { url = "https://files.pythonhosted.org/packages/74/69/f7185de793a29082a9f3c7728268ffb31cb5095131a9c139a74078e27336/jsonschema-4.25.1.tar.gz", hash = "sha256:e4a9655ce0da0c0b67a085847e00a3a51449e1157f4f75e9fb5aa545e122eb85", size = 357342, upload-time = "2025-08-18T17:03:50.038Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/bf/9c/8c95d856233c1f82500c2450b8c68576b4cf1c871db3afac5c34ff84e6fd/jsonschema-4.25.1-py3-none-any.whl", hash = "sha256:3fba0169e345c7175110351d456342c364814cfcf3b964ba4587f22915230a63", size = 90040 }, + { url = "https://files.pythonhosted.org/packages/bf/9c/8c95d856233c1f82500c2450b8c68576b4cf1c871db3afac5c34ff84e6fd/jsonschema-4.25.1-py3-none-any.whl", hash = "sha256:3fba0169e345c7175110351d456342c364814cfcf3b964ba4587f22915230a63", size = 90040, upload-time = "2025-08-18T17:03:48.373Z" }, ] [package.optional-dependencies] @@ -1599,9 +1466,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "referencing" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/19/74/a633ee74eb36c44aa6d1095e7cc5569bebf04342ee146178e2d36600708b/jsonschema_specifications-2025.9.1.tar.gz", hash = "sha256:b540987f239e745613c7a9176f3edb72b832a4ac465cf02712288397832b5e8d", size = 32855 } +sdist = { url = "https://files.pythonhosted.org/packages/19/74/a633ee74eb36c44aa6d1095e7cc5569bebf04342ee146178e2d36600708b/jsonschema_specifications-2025.9.1.tar.gz", hash = "sha256:b540987f239e745613c7a9176f3edb72b832a4ac465cf02712288397832b5e8d", size = 32855, upload-time = "2025-09-08T01:34:59.186Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/41/45/1a4ed80516f02155c51f51e8cedb3c1902296743db0bbc66608a0db2814f/jsonschema_specifications-2025.9.1-py3-none-any.whl", hash = "sha256:98802fee3a11ee76ecaca44429fda8a41bff98b00a0f2838151b113f210cc6fe", size = 18437 }, + { url = "https://files.pythonhosted.org/packages/41/45/1a4ed80516f02155c51f51e8cedb3c1902296743db0bbc66608a0db2814f/jsonschema_specifications-2025.9.1-py3-none-any.whl", hash = "sha256:98802fee3a11ee76ecaca44429fda8a41bff98b00a0f2838151b113f210cc6fe", size = 18437, upload-time = "2025-09-08T01:34:57.871Z" }, ] [[package]] @@ -1616,9 +1483,9 @@ dependencies = [ { name = "nbconvert" }, { name = "notebook" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/58/f3/af28ea964ab8bc1e472dba2e82627d36d470c51f5cd38c37502eeffaa25e/jupyter-1.1.1.tar.gz", hash = "sha256:d55467bceabdea49d7e3624af7e33d59c37fff53ed3a350e1ac957bed731de7a", size = 5714959 } +sdist = { url = "https://files.pythonhosted.org/packages/58/f3/af28ea964ab8bc1e472dba2e82627d36d470c51f5cd38c37502eeffaa25e/jupyter-1.1.1.tar.gz", hash = "sha256:d55467bceabdea49d7e3624af7e33d59c37fff53ed3a350e1ac957bed731de7a", size = 5714959, upload-time = "2024-08-30T07:15:48.299Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/38/64/285f20a31679bf547b75602702f7800e74dbabae36ef324f716c02804753/jupyter-1.1.1-py2.py3-none-any.whl", hash = "sha256:7a59533c22af65439b24bbe60373a4e95af8f16ac65a6c00820ad378e3f7cc83", size = 2657 }, + { url = "https://files.pythonhosted.org/packages/38/64/285f20a31679bf547b75602702f7800e74dbabae36ef324f716c02804753/jupyter-1.1.1-py2.py3-none-any.whl", hash = "sha256:7a59533c22af65439b24bbe60373a4e95af8f16ac65a6c00820ad378e3f7cc83", size = 2657, upload-time = "2024-08-30T07:15:47.045Z" }, ] [[package]] @@ -1632,9 +1499,9 @@ dependencies = [ { name = "tornado" }, { name = "traitlets" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/71/22/bf9f12fdaeae18019a468b68952a60fe6dbab5d67cd2a103cac7659b41ca/jupyter_client-8.6.3.tar.gz", hash = "sha256:35b3a0947c4a6e9d589eb97d7d4cd5e90f910ee73101611f01283732bd6d9419", size = 342019 } +sdist = { url = "https://files.pythonhosted.org/packages/71/22/bf9f12fdaeae18019a468b68952a60fe6dbab5d67cd2a103cac7659b41ca/jupyter_client-8.6.3.tar.gz", hash = "sha256:35b3a0947c4a6e9d589eb97d7d4cd5e90f910ee73101611f01283732bd6d9419", size = 342019, upload-time = "2024-09-17T10:44:17.613Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/11/85/b0394e0b6fcccd2c1eeefc230978a6f8cb0c5df1e4cd3e7625735a0d7d1e/jupyter_client-8.6.3-py3-none-any.whl", hash = "sha256:e8a19cc986cc45905ac3362915f410f3af85424b4c0905e94fa5f2cb08e8f23f", size = 106105 }, + { url = "https://files.pythonhosted.org/packages/11/85/b0394e0b6fcccd2c1eeefc230978a6f8cb0c5df1e4cd3e7625735a0d7d1e/jupyter_client-8.6.3-py3-none-any.whl", hash = "sha256:e8a19cc986cc45905ac3362915f410f3af85424b4c0905e94fa5f2cb08e8f23f", size = 106105, upload-time = "2024-09-17T10:44:15.218Z" }, ] [[package]] @@ -1651,9 +1518,9 @@ dependencies = [ { name = "pyzmq" }, { name = "traitlets" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/bd/2d/e2fd31e2fc41c14e2bcb6c976ab732597e907523f6b2420305f9fc7fdbdb/jupyter_console-6.6.3.tar.gz", hash = "sha256:566a4bf31c87adbfadf22cdf846e3069b59a71ed5da71d6ba4d8aaad14a53539", size = 34363 } +sdist = { url = "https://files.pythonhosted.org/packages/bd/2d/e2fd31e2fc41c14e2bcb6c976ab732597e907523f6b2420305f9fc7fdbdb/jupyter_console-6.6.3.tar.gz", hash = "sha256:566a4bf31c87adbfadf22cdf846e3069b59a71ed5da71d6ba4d8aaad14a53539", size = 34363, upload-time = "2023-03-06T14:13:31.02Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ca/77/71d78d58f15c22db16328a476426f7ac4a60d3a5a7ba3b9627ee2f7903d4/jupyter_console-6.6.3-py3-none-any.whl", hash = "sha256:309d33409fcc92ffdad25f0bcdf9a4a9daa61b6f341177570fdac03de5352485", size = 24510 }, + { url = "https://files.pythonhosted.org/packages/ca/77/71d78d58f15c22db16328a476426f7ac4a60d3a5a7ba3b9627ee2f7903d4/jupyter_console-6.6.3-py3-none-any.whl", hash = "sha256:309d33409fcc92ffdad25f0bcdf9a4a9daa61b6f341177570fdac03de5352485", size = 24510, upload-time = "2023-03-06T14:13:28.229Z" }, ] [[package]] @@ -1665,9 +1532,9 @@ dependencies = [ { name = "pywin32", marker = "platform_python_implementation != 'PyPy' and sys_platform == 'win32'" }, { name = "traitlets" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/99/1b/72906d554acfeb588332eaaa6f61577705e9ec752ddb486f302dafa292d9/jupyter_core-5.8.1.tar.gz", hash = "sha256:0a5f9706f70e64786b75acba995988915ebd4601c8a52e534a40b51c95f59941", size = 88923 } +sdist = { url = "https://files.pythonhosted.org/packages/99/1b/72906d554acfeb588332eaaa6f61577705e9ec752ddb486f302dafa292d9/jupyter_core-5.8.1.tar.gz", hash = "sha256:0a5f9706f70e64786b75acba995988915ebd4601c8a52e534a40b51c95f59941", size = 88923, upload-time = "2025-05-27T07:38:16.655Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2f/57/6bffd4b20b88da3800c5d691e0337761576ee688eb01299eae865689d2df/jupyter_core-5.8.1-py3-none-any.whl", hash = "sha256:c28d268fc90fb53f1338ded2eb410704c5449a358406e8a948b75706e24863d0", size = 28880 }, + { url = "https://files.pythonhosted.org/packages/2f/57/6bffd4b20b88da3800c5d691e0337761576ee688eb01299eae865689d2df/jupyter_core-5.8.1-py3-none-any.whl", hash = "sha256:c28d268fc90fb53f1338ded2eb410704c5449a358406e8a948b75706e24863d0", size = 28880, upload-time = "2025-05-27T07:38:15.137Z" }, ] [[package]] @@ -1684,9 +1551,9 @@ dependencies = [ { name = "rfc3986-validator" }, { name = "traitlets" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9d/c3/306d090461e4cf3cd91eceaff84bede12a8e52cd821c2d20c9a4fd728385/jupyter_events-0.12.0.tar.gz", hash = "sha256:fc3fce98865f6784c9cd0a56a20644fc6098f21c8c33834a8d9fe383c17e554b", size = 62196 } +sdist = { url = "https://files.pythonhosted.org/packages/9d/c3/306d090461e4cf3cd91eceaff84bede12a8e52cd821c2d20c9a4fd728385/jupyter_events-0.12.0.tar.gz", hash = "sha256:fc3fce98865f6784c9cd0a56a20644fc6098f21c8c33834a8d9fe383c17e554b", size = 62196, upload-time = "2025-02-03T17:23:41.485Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e2/48/577993f1f99c552f18a0428731a755e06171f9902fa118c379eb7c04ea22/jupyter_events-0.12.0-py3-none-any.whl", hash = "sha256:6464b2fa5ad10451c3d35fabc75eab39556ae1e2853ad0c0cc31b656731a97fb", size = 19430 }, + { url = "https://files.pythonhosted.org/packages/e2/48/577993f1f99c552f18a0428731a755e06171f9902fa118c379eb7c04ea22/jupyter_events-0.12.0-py3-none-any.whl", hash = "sha256:6464b2fa5ad10451c3d35fabc75eab39556ae1e2853ad0c0cc31b656731a97fb", size = 19430, upload-time = "2025-02-03T17:23:38.643Z" }, ] [[package]] @@ -1696,9 +1563,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "jupyter-server" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/eb/5a/9066c9f8e94ee517133cd98dba393459a16cd48bba71a82f16a65415206c/jupyter_lsp-2.3.0.tar.gz", hash = "sha256:458aa59339dc868fb784d73364f17dbce8836e906cd75fd471a325cba02e0245", size = 54823 } +sdist = { url = "https://files.pythonhosted.org/packages/eb/5a/9066c9f8e94ee517133cd98dba393459a16cd48bba71a82f16a65415206c/jupyter_lsp-2.3.0.tar.gz", hash = "sha256:458aa59339dc868fb784d73364f17dbce8836e906cd75fd471a325cba02e0245", size = 54823, upload-time = "2025-08-27T17:47:34.671Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1a/60/1f6cee0c46263de1173894f0fafcb3475ded276c472c14d25e0280c18d6d/jupyter_lsp-2.3.0-py3-none-any.whl", hash = "sha256:e914a3cb2addf48b1c7710914771aaf1819d46b2e5a79b0f917b5478ec93f34f", size = 76687 }, + { url = "https://files.pythonhosted.org/packages/1a/60/1f6cee0c46263de1173894f0fafcb3475ded276c472c14d25e0280c18d6d/jupyter_lsp-2.3.0-py3-none-any.whl", hash = "sha256:e914a3cb2addf48b1c7710914771aaf1819d46b2e5a79b0f917b5478ec93f34f", size = 76687, upload-time = "2025-08-27T17:47:33.15Z" }, ] [[package]] @@ -1726,9 +1593,9 @@ dependencies = [ { name = "traitlets" }, { name = "websocket-client" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/5b/ac/e040ec363d7b6b1f11304cc9f209dac4517ece5d5e01821366b924a64a50/jupyter_server-2.17.0.tar.gz", hash = "sha256:c38ea898566964c888b4772ae1ed58eca84592e88251d2cfc4d171f81f7e99d5", size = 731949 } +sdist = { url = "https://files.pythonhosted.org/packages/5b/ac/e040ec363d7b6b1f11304cc9f209dac4517ece5d5e01821366b924a64a50/jupyter_server-2.17.0.tar.gz", hash = "sha256:c38ea898566964c888b4772ae1ed58eca84592e88251d2cfc4d171f81f7e99d5", size = 731949, upload-time = "2025-08-21T14:42:54.042Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/92/80/a24767e6ca280f5a49525d987bf3e4d7552bf67c8be07e8ccf20271f8568/jupyter_server-2.17.0-py3-none-any.whl", hash = "sha256:e8cb9c7db4251f51ed307e329b81b72ccf2056ff82d50524debde1ee1870e13f", size = 388221 }, + { url = "https://files.pythonhosted.org/packages/92/80/a24767e6ca280f5a49525d987bf3e4d7552bf67c8be07e8ccf20271f8568/jupyter_server-2.17.0-py3-none-any.whl", hash = "sha256:e8cb9c7db4251f51ed307e329b81b72ccf2056ff82d50524debde1ee1870e13f", size = 388221, upload-time = "2025-08-21T14:42:52.034Z" }, ] [[package]] @@ -1739,9 +1606,9 @@ dependencies = [ { name = "pywinpty", marker = "os_name == 'nt'" }, { name = "terminado" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/fc/d5/562469734f476159e99a55426d697cbf8e7eb5efe89fb0e0b4f83a3d3459/jupyter_server_terminals-0.5.3.tar.gz", hash = "sha256:5ae0295167220e9ace0edcfdb212afd2b01ee8d179fe6f23c899590e9b8a5269", size = 31430 } +sdist = { url = "https://files.pythonhosted.org/packages/fc/d5/562469734f476159e99a55426d697cbf8e7eb5efe89fb0e0b4f83a3d3459/jupyter_server_terminals-0.5.3.tar.gz", hash = "sha256:5ae0295167220e9ace0edcfdb212afd2b01ee8d179fe6f23c899590e9b8a5269", size = 31430, upload-time = "2024-03-12T14:37:03.049Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/07/2d/2b32cdbe8d2a602f697a649798554e4f072115438e92249624e532e8aca6/jupyter_server_terminals-0.5.3-py3-none-any.whl", hash = "sha256:41ee0d7dc0ebf2809c668e0fc726dfaf258fcd3e769568996ca731b6194ae9aa", size = 13656 }, + { url = "https://files.pythonhosted.org/packages/07/2d/2b32cdbe8d2a602f697a649798554e4f072115438e92249624e532e8aca6/jupyter_server_terminals-0.5.3-py3-none-any.whl", hash = "sha256:41ee0d7dc0ebf2809c668e0fc726dfaf258fcd3e769568996ca731b6194ae9aa", size = 13656, upload-time = "2024-03-12T14:37:00.708Z" }, ] [[package]] @@ -1763,18 +1630,18 @@ dependencies = [ { name = "tornado" }, { name = "traitlets" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/45/b2/7dad2d0049a904d17c070226a4f78f81905f93bfe09503722d210ccf9335/jupyterlab-4.4.9.tar.gz", hash = "sha256:ea55aca8269909016d5fde2dc09b97128bc931230183fe7e2920ede5154ad9c2", size = 22966654 } +sdist = { url = "https://files.pythonhosted.org/packages/45/b2/7dad2d0049a904d17c070226a4f78f81905f93bfe09503722d210ccf9335/jupyterlab-4.4.9.tar.gz", hash = "sha256:ea55aca8269909016d5fde2dc09b97128bc931230183fe7e2920ede5154ad9c2", size = 22966654, upload-time = "2025-09-26T17:28:20.158Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1f/fd/ac0979ebd1b1975c266c99b96930b0a66609c3f6e5d76979ca6eb3073896/jupyterlab-4.4.9-py3-none-any.whl", hash = "sha256:394c902827350c017430a8370b9f40c03c098773084bc53930145c146d3d2cb2", size = 12292552 }, + { url = "https://files.pythonhosted.org/packages/1f/fd/ac0979ebd1b1975c266c99b96930b0a66609c3f6e5d76979ca6eb3073896/jupyterlab-4.4.9-py3-none-any.whl", hash = "sha256:394c902827350c017430a8370b9f40c03c098773084bc53930145c146d3d2cb2", size = 12292552, upload-time = "2025-09-26T17:28:15.663Z" }, ] [[package]] name = "jupyterlab-pygments" version = "0.3.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/90/51/9187be60d989df97f5f0aba133fa54e7300f17616e065d1ada7d7646b6d6/jupyterlab_pygments-0.3.0.tar.gz", hash = "sha256:721aca4d9029252b11cfa9d185e5b5af4d54772bb8072f9b7036f4170054d35d", size = 512900 } +sdist = { url = "https://files.pythonhosted.org/packages/90/51/9187be60d989df97f5f0aba133fa54e7300f17616e065d1ada7d7646b6d6/jupyterlab_pygments-0.3.0.tar.gz", hash = "sha256:721aca4d9029252b11cfa9d185e5b5af4d54772bb8072f9b7036f4170054d35d", size = 512900, upload-time = "2023-11-23T09:26:37.44Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b1/dd/ead9d8ea85bf202d90cc513b533f9c363121c7792674f78e0d8a854b63b4/jupyterlab_pygments-0.3.0-py3-none-any.whl", hash = "sha256:841a89020971da1d8693f1a99997aefc5dc424bb1b251fd6322462a1b8842780", size = 15884 }, + { url = "https://files.pythonhosted.org/packages/b1/dd/ead9d8ea85bf202d90cc513b533f9c363121c7792674f78e0d8a854b63b4/jupyterlab_pygments-0.3.0-py3-none-any.whl", hash = "sha256:841a89020971da1d8693f1a99997aefc5dc424bb1b251fd6322462a1b8842780", size = 15884, upload-time = "2023-11-23T09:26:34.325Z" }, ] [[package]] @@ -1790,18 +1657,18 @@ dependencies = [ { name = "packaging" }, { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/0a/c9/a883ce65eb27905ce77ace410d83587c82ea64dc85a48d1f7ed52bcfa68d/jupyterlab_server-2.27.3.tar.gz", hash = "sha256:eb36caca59e74471988f0ae25c77945610b887f777255aa21f8065def9e51ed4", size = 76173 } +sdist = { url = "https://files.pythonhosted.org/packages/0a/c9/a883ce65eb27905ce77ace410d83587c82ea64dc85a48d1f7ed52bcfa68d/jupyterlab_server-2.27.3.tar.gz", hash = "sha256:eb36caca59e74471988f0ae25c77945610b887f777255aa21f8065def9e51ed4", size = 76173, upload-time = "2024-07-16T17:02:04.149Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/54/09/2032e7d15c544a0e3cd831c51d77a8ca57f7555b2e1b2922142eddb02a84/jupyterlab_server-2.27.3-py3-none-any.whl", hash = "sha256:e697488f66c3db49df675158a77b3b017520d772c6e1548c7d9bcc5df7944ee4", size = 59700 }, + { url = "https://files.pythonhosted.org/packages/54/09/2032e7d15c544a0e3cd831c51d77a8ca57f7555b2e1b2922142eddb02a84/jupyterlab_server-2.27.3-py3-none-any.whl", hash = "sha256:e697488f66c3db49df675158a77b3b017520d772c6e1548c7d9bcc5df7944ee4", size = 59700, upload-time = "2024-07-16T17:02:01.115Z" }, ] [[package]] name = "jupyterlab-widgets" version = "3.0.15" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b9/7d/160595ca88ee87ac6ba95d82177d29ec60aaa63821d3077babb22ce031a5/jupyterlab_widgets-3.0.15.tar.gz", hash = "sha256:2920888a0c2922351a9202817957a68c07d99673504d6cd37345299e971bb08b", size = 213149 } +sdist = { url = "https://files.pythonhosted.org/packages/b9/7d/160595ca88ee87ac6ba95d82177d29ec60aaa63821d3077babb22ce031a5/jupyterlab_widgets-3.0.15.tar.gz", hash = "sha256:2920888a0c2922351a9202817957a68c07d99673504d6cd37345299e971bb08b", size = 213149, upload-time = "2025-05-05T12:32:31.004Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/43/6a/ca128561b22b60bd5a0c4ea26649e68c8556b82bc70a0c396eebc977fe86/jupyterlab_widgets-3.0.15-py3-none-any.whl", hash = "sha256:d59023d7d7ef71400d51e6fee9a88867f6e65e10a4201605d2d7f3e8f012a31c", size = 216571 }, + { url = "https://files.pythonhosted.org/packages/43/6a/ca128561b22b60bd5a0c4ea26649e68c8556b82bc70a0c396eebc977fe86/jupyterlab_widgets-3.0.15-py3-none-any.whl", hash = "sha256:d59023d7d7ef71400d51e6fee9a88867f6e65e10a4201605d2d7f3e8f012a31c", size = 216571, upload-time = "2025-05-05T12:32:29.534Z" }, ] [[package]] @@ -1815,9 +1682,9 @@ dependencies = [ { name = "packaging" }, { name = "pyyaml" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/15/14/41faf71e168fcc6c48268f0fc67ba0d6acf6ee4e2c5c785c2bccb967c29d/jupytext-1.17.3.tar.gz", hash = "sha256:8b6dae76d63c95cad47b493c38f0d9c74491fb621dcd0980abfcac4c8f168679", size = 3753151 } +sdist = { url = "https://files.pythonhosted.org/packages/15/14/41faf71e168fcc6c48268f0fc67ba0d6acf6ee4e2c5c785c2bccb967c29d/jupytext-1.17.3.tar.gz", hash = "sha256:8b6dae76d63c95cad47b493c38f0d9c74491fb621dcd0980abfcac4c8f168679", size = 3753151, upload-time = "2025-08-28T18:30:51.117Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/36/86/751ec86adb66104d15e650b704f89dddd64ba29283178b9651b9bc84b624/jupytext-1.17.3-py3-none-any.whl", hash = "sha256:09b0a94cd904416e823a5ba9f41bd181031215b6fc682d2b5c18e68354feb17c", size = 166548 }, + { url = "https://files.pythonhosted.org/packages/36/86/751ec86adb66104d15e650b704f89dddd64ba29283178b9651b9bc84b624/jupytext-1.17.3-py3-none-any.whl", hash = "sha256:09b0a94cd904416e823a5ba9f41bd181031215b6fc682d2b5c18e68354feb17c", size = 166548, upload-time = "2025-08-28T18:30:47.733Z" }, ] [[package]] @@ -1834,13 +1701,13 @@ dependencies = [ { name = "tqdm" }, ] wheels = [ - { url = "https://files.pythonhosted.org/packages/a2/4a/cbdb6b7a8ca621282c3d9dedae00b372c09b430c69fc0ac149b5b9092b6c/lancedb-0.24.1-cp39-abi3-macosx_10_15_x86_64.whl", hash = "sha256:ae5f028920222ad325521fb447558e274eb92dfd7c189f5875dc3bcc7de07ea6", size = 32792946 }, - { url = "https://files.pythonhosted.org/packages/71/90/7c5218b5d81382901680bb365bb55f92fefa28434c049ec6236be73b7ac1/lancedb-0.24.1-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:baf1eba0e2a8762753dba466e92792a4a21ec504612125ec1d8edd6c15b17eba", size = 30290214 }, - { url = "https://files.pythonhosted.org/packages/e1/01/b184e8f1e94e27b9297778dfde65259a94994138d7d4330334bfdf5756e1/lancedb-0.24.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc20c56936d8198330a5dee2f1a17dc1f2145a7b48f81bc32193ca16f3907f3b", size = 31147217 }, - { url = "https://files.pythonhosted.org/packages/4e/02/7e67ea8e49757e42251df4e665699fe4d0962f336e3d113ebff84f22bee9/lancedb-0.24.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2509810c743e094d2d900fdc7b0f2bc3cc52a970ecdfb5d404c22b8b8da14cc", size = 34315305 }, - { url = "https://files.pythonhosted.org/packages/46/b9/770c17793062dacaf52c5641af706cffca6ef803fbe80422d7948fc4a0cb/lancedb-0.24.1-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:2fbbdf6a6e6189fc3d026677a303a1b7e0bdfe9b690cfee93c586f6b76eb10ba", size = 31157887 }, - { url = "https://files.pythonhosted.org/packages/9c/ef/f896a8cabf99bc87e8bdc49df0bd08db09a86e8f333312c15375da21921f/lancedb-0.24.1-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:e8ef48eaa8c6c2093f40cbae4968c1fa9126934022b4d6462c5a019688731597", size = 34354984 }, - { url = "https://files.pythonhosted.org/packages/b9/a2/0ab0979ac987313e2dd9b52fddb98eae9cb048e0faebca7a5f9e0a352ea5/lancedb-0.24.1-cp39-abi3-win_amd64.whl", hash = "sha256:091d1757776fd7a0d7adbc5d507f4356e9f479c38a0446009724d8e52d66cbb3", size = 36228285 }, + { url = "https://files.pythonhosted.org/packages/a2/4a/cbdb6b7a8ca621282c3d9dedae00b372c09b430c69fc0ac149b5b9092b6c/lancedb-0.24.1-cp39-abi3-macosx_10_15_x86_64.whl", hash = "sha256:ae5f028920222ad325521fb447558e274eb92dfd7c189f5875dc3bcc7de07ea6", size = 32792946, upload-time = "2025-07-10T22:21:44.578Z" }, + { url = "https://files.pythonhosted.org/packages/71/90/7c5218b5d81382901680bb365bb55f92fefa28434c049ec6236be73b7ac1/lancedb-0.24.1-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:baf1eba0e2a8762753dba466e92792a4a21ec504612125ec1d8edd6c15b17eba", size = 30290214, upload-time = "2025-07-10T22:25:47.422Z" }, + { url = "https://files.pythonhosted.org/packages/e1/01/b184e8f1e94e27b9297778dfde65259a94994138d7d4330334bfdf5756e1/lancedb-0.24.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc20c56936d8198330a5dee2f1a17dc1f2145a7b48f81bc32193ca16f3907f3b", size = 31147217, upload-time = "2025-07-10T21:55:21.461Z" }, + { url = "https://files.pythonhosted.org/packages/4e/02/7e67ea8e49757e42251df4e665699fe4d0962f336e3d113ebff84f22bee9/lancedb-0.24.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2509810c743e094d2d900fdc7b0f2bc3cc52a970ecdfb5d404c22b8b8da14cc", size = 34315305, upload-time = "2025-07-10T21:59:31.539Z" }, + { url = "https://files.pythonhosted.org/packages/46/b9/770c17793062dacaf52c5641af706cffca6ef803fbe80422d7948fc4a0cb/lancedb-0.24.1-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:2fbbdf6a6e6189fc3d026677a303a1b7e0bdfe9b690cfee93c586f6b76eb10ba", size = 31157887, upload-time = "2025-07-10T21:55:33.138Z" }, + { url = "https://files.pythonhosted.org/packages/9c/ef/f896a8cabf99bc87e8bdc49df0bd08db09a86e8f333312c15375da21921f/lancedb-0.24.1-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:e8ef48eaa8c6c2093f40cbae4968c1fa9126934022b4d6462c5a019688731597", size = 34354984, upload-time = "2025-07-10T22:00:26.294Z" }, + { url = "https://files.pythonhosted.org/packages/b9/a2/0ab0979ac987313e2dd9b52fddb98eae9cb048e0faebca7a5f9e0a352ea5/lancedb-0.24.1-cp39-abi3-win_amd64.whl", hash = "sha256:091d1757776fd7a0d7adbc5d507f4356e9f479c38a0446009724d8e52d66cbb3", size = 36228285, upload-time = "2025-07-10T22:16:45.817Z" }, ] [[package]] @@ -1850,9 +1717,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "language-data" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/3a/7a/5a97e327063409a5caa21541e6d08ae4a0f2da328447e9f2c7b39e179226/langcodes-3.5.0.tar.gz", hash = "sha256:1eef8168d07e51e131a2497ffecad4b663f6208e7c3ae3b8dc15c51734a6f801", size = 191030 } +sdist = { url = "https://files.pythonhosted.org/packages/3a/7a/5a97e327063409a5caa21541e6d08ae4a0f2da328447e9f2c7b39e179226/langcodes-3.5.0.tar.gz", hash = "sha256:1eef8168d07e51e131a2497ffecad4b663f6208e7c3ae3b8dc15c51734a6f801", size = 191030, upload-time = "2024-11-19T10:23:45.546Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c3/6b/068c2ea7a712bf805c62445bd9e9c06d7340358ef2824150eceac027444b/langcodes-3.5.0-py3-none-any.whl", hash = "sha256:853c69d1a35e0e13da2f427bb68fb2fa4a8f4fb899e0c62ad8df8d073dcfed33", size = 182974 }, + { url = "https://files.pythonhosted.org/packages/c3/6b/068c2ea7a712bf805c62445bd9e9c06d7340358ef2824150eceac027444b/langcodes-3.5.0-py3-none-any.whl", hash = "sha256:853c69d1a35e0e13da2f427bb68fb2fa4a8f4fb899e0c62ad8df8d073dcfed33", size = 182974, upload-time = "2024-11-19T10:23:42.824Z" }, ] [[package]] @@ -1862,23 +1729,23 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "marisa-trie" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/dd/ce/3f144716a9f2cbf42aa86ebc8b085a184be25c80aa453eea17c294d239c1/language_data-1.3.0.tar.gz", hash = "sha256:7600ef8aa39555145d06c89f0c324bf7dab834ea0b0a439d8243762e3ebad7ec", size = 5129310 } +sdist = { url = "https://files.pythonhosted.org/packages/dd/ce/3f144716a9f2cbf42aa86ebc8b085a184be25c80aa453eea17c294d239c1/language_data-1.3.0.tar.gz", hash = "sha256:7600ef8aa39555145d06c89f0c324bf7dab834ea0b0a439d8243762e3ebad7ec", size = 5129310, upload-time = "2024-11-19T10:21:37.912Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5d/e9/5a5ffd9b286db82be70d677d0a91e4d58f7912bb8dd026ddeeb4abe70679/language_data-1.3.0-py3-none-any.whl", hash = "sha256:e2ee943551b5ae5f89cd0e801d1fc3835bb0ef5b7e9c3a4e8e17b2b214548fbf", size = 5385760 }, + { url = "https://files.pythonhosted.org/packages/5d/e9/5a5ffd9b286db82be70d677d0a91e4d58f7912bb8dd026ddeeb4abe70679/language_data-1.3.0-py3-none-any.whl", hash = "sha256:e2ee943551b5ae5f89cd0e801d1fc3835bb0ef5b7e9c3a4e8e17b2b214548fbf", size = 5385760, upload-time = "2024-11-19T10:21:36.005Z" }, ] [[package]] name = "lark" version = "1.3.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/1d/37/a13baf0135f348af608c667633cbe5d13aa2c5c15a56ae9ad3e6cba45ae3/lark-1.3.0.tar.gz", hash = "sha256:9a3839d0ca5e1faf7cfa3460e420e859b66bcbde05b634e73c369c8244c5fa48", size = 259551 } +sdist = { url = "https://files.pythonhosted.org/packages/1d/37/a13baf0135f348af608c667633cbe5d13aa2c5c15a56ae9ad3e6cba45ae3/lark-1.3.0.tar.gz", hash = "sha256:9a3839d0ca5e1faf7cfa3460e420e859b66bcbde05b634e73c369c8244c5fa48", size = 259551, upload-time = "2025-09-22T13:45:05.072Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a8/3e/1c6b43277de64fc3c0333b0e72ab7b52ddaaea205210d60d9b9f83c3d0c7/lark-1.3.0-py3-none-any.whl", hash = "sha256:80661f261fb2584a9828a097a2432efd575af27d20be0fd35d17f0fe37253831", size = 113002 }, + { url = "https://files.pythonhosted.org/packages/a8/3e/1c6b43277de64fc3c0333b0e72ab7b52ddaaea205210d60d9b9f83c3d0c7/lark-1.3.0-py3-none-any.whl", hash = "sha256:80661f261fb2584a9828a097a2432efd575af27d20be0fd35d17f0fe37253831", size = 113002, upload-time = "2025-09-22T13:45:03.747Z" }, ] [[package]] name = "litellm" -version = "1.77.7" +version = "1.80.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "aiohttp" }, @@ -1894,9 +1761,9 @@ dependencies = [ { name = "tiktoken" }, { name = "tokenizers" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/5a/4b/4e9a204462687ca3796cc0fdaefbd624d7b2216edd4ad243d60a3b95127e/litellm-1.77.7.tar.gz", hash = "sha256:e3398fb2575b98726e787c0a1481daed5938d58cafdcd96fbca80c312221af3e", size = 10401706 } +sdist = { url = "https://files.pythonhosted.org/packages/bd/8c/48d533affdbc6d485b7ad4221cd3b40b8c12f9f5568edfe0be0b11e7b945/litellm-1.80.0.tar.gz", hash = "sha256:eeac733eb6b226f9e5fb020f72fe13a32b3354b001dc62bcf1bc4d9b526d6231", size = 11591976, upload-time = "2025-11-16T00:03:51.812Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/86/50/53df2244d4aca2af73d2f2c6ad21c731cf24bd0dbe89d896184a1eaa874f/litellm-1.77.7-py3-none-any.whl", hash = "sha256:1b3a1b17bd521a0ad25226fb62a912602c803922aabb4a16adf83834673be574", size = 9223061 }, + { url = "https://files.pythonhosted.org/packages/ea/53/aa31e4d057b3746b3c323ca993003d6cf15ef987e7fe7ceb53681695ae87/litellm-1.80.0-py3-none-any.whl", hash = "sha256:fd0009758f4772257048d74bf79bb64318859adb4ea49a8b66fdbc718cd80b6e", size = 10492975, upload-time = "2025-11-16T00:03:49.182Z" }, ] [[package]] @@ -1909,61 +1776,45 @@ dependencies = [ { name = "onnxruntime" }, { name = "python-dotenv" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a3/f3/3d1dcdd7b9c41d589f5cff252d32ed91cdf86ba84391cfc81d9d8773571d/magika-0.6.3.tar.gz", hash = "sha256:7cc52aa7359af861957043e2bf7265ed4741067251c104532765cd668c0c0cb1", size = 3042784 } +sdist = { url = "https://files.pythonhosted.org/packages/a3/f3/3d1dcdd7b9c41d589f5cff252d32ed91cdf86ba84391cfc81d9d8773571d/magika-0.6.3.tar.gz", hash = "sha256:7cc52aa7359af861957043e2bf7265ed4741067251c104532765cd668c0c0cb1", size = 3042784, upload-time = "2025-10-30T15:22:34.499Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a2/e4/35c323beb3280482c94299d61626116856ac2d4ec16ecef50afc4fdd4291/magika-0.6.3-py3-none-any.whl", hash = "sha256:eda443d08006ee495e02083b32e51b98cb3696ab595a7d13900d8e2ef506ec9d", size = 2969474 }, - { url = "https://files.pythonhosted.org/packages/25/8f/132b0d7cd51c02c39fd52658a5896276c30c8cc2fd453270b19db8c40f7e/magika-0.6.3-py3-none-macosx_11_0_arm64.whl", hash = "sha256:86901e64b05dde5faff408c9b8245495b2e1fd4c226e3393d3d2a3fee65c504b", size = 13358841 }, - { url = "https://files.pythonhosted.org/packages/c4/03/5ed859be502903a68b7b393b17ae0283bf34195cfcca79ce2dc25b9290e7/magika-0.6.3-py3-none-manylinux_2_28_x86_64.whl", hash = "sha256:3d9661eedbdf445ac9567e97e7ceefb93545d77a6a32858139ea966b5806fb64", size = 15367335 }, - { url = "https://files.pythonhosted.org/packages/7b/9e/f8ee7d644affa3b80efdd623a3d75865c8f058f3950cb87fb0c48e3559bc/magika-0.6.3-py3-none-win_amd64.whl", hash = "sha256:e57f75674447b20cab4db928ae58ab264d7d8582b55183a0b876711c2b2787f3", size = 12692831 }, + { url = "https://files.pythonhosted.org/packages/a2/e4/35c323beb3280482c94299d61626116856ac2d4ec16ecef50afc4fdd4291/magika-0.6.3-py3-none-any.whl", hash = "sha256:eda443d08006ee495e02083b32e51b98cb3696ab595a7d13900d8e2ef506ec9d", size = 2969474, upload-time = "2025-10-30T15:22:25.298Z" }, + { url = "https://files.pythonhosted.org/packages/25/8f/132b0d7cd51c02c39fd52658a5896276c30c8cc2fd453270b19db8c40f7e/magika-0.6.3-py3-none-macosx_11_0_arm64.whl", hash = "sha256:86901e64b05dde5faff408c9b8245495b2e1fd4c226e3393d3d2a3fee65c504b", size = 13358841, upload-time = "2025-10-30T15:22:27.413Z" }, + { url = "https://files.pythonhosted.org/packages/c4/03/5ed859be502903a68b7b393b17ae0283bf34195cfcca79ce2dc25b9290e7/magika-0.6.3-py3-none-manylinux_2_28_x86_64.whl", hash = "sha256:3d9661eedbdf445ac9567e97e7ceefb93545d77a6a32858139ea966b5806fb64", size = 15367335, upload-time = "2025-10-30T15:22:29.907Z" }, + { url = "https://files.pythonhosted.org/packages/7b/9e/f8ee7d644affa3b80efdd623a3d75865c8f058f3950cb87fb0c48e3559bc/magika-0.6.3-py3-none-win_amd64.whl", hash = "sha256:e57f75674447b20cab4db928ae58ab264d7d8582b55183a0b876711c2b2787f3", size = 12692831, upload-time = "2025-10-30T15:22:32.063Z" }, ] [[package]] name = "marisa-trie" version = "1.3.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/c5/e3/c9066e74076b90f9701ccd23d6a0b8c1d583feefdec576dc3e1bb093c50d/marisa_trie-1.3.1.tar.gz", hash = "sha256:97107fd12f30e4f8fea97790343a2d2d9a79d93697fe14e1b6f6363c984ff85b", size = 212454 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a7/bf/2f1fe6c9fcd2b509c6dfaaf26e35128947d6d3718d0b39510903c55b7bed/marisa_trie-1.3.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5ef045f694ef66079b4e00c4c9063a00183d6af7d1ff643de6ea5c3b0d9af01b", size = 174027 }, - { url = "https://files.pythonhosted.org/packages/a9/5a/de7936d58ed0de847180cee2b95143d420223c5ade0c093d55113f628237/marisa_trie-1.3.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cbd28f95d5f30d9a7af6130869568e75bfd7ef2e0adfb1480f1f44480f5d3603", size = 158478 }, - { url = "https://files.pythonhosted.org/packages/48/cc/80611aadefcd0bcf8cd1795cb4643bb27213319a221ba04fe071da0b75cd/marisa_trie-1.3.1-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b173ec46d521308f7c97d96d6e05cf2088e0548f82544ec9a8656af65593304d", size = 1257535 }, - { url = "https://files.pythonhosted.org/packages/36/89/c4eeefb956318047036e6bdc572b6112b2059d595e85961267a90aa40458/marisa_trie-1.3.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:954fef9185f8a79441b4e433695116636bf66402945cfee404f8983bafa59788", size = 1275566 }, - { url = "https://files.pythonhosted.org/packages/c4/63/d775a2fdfc4b555120381cd2aa6dff1845576bc14fb13796ae1b1e8dbaf7/marisa_trie-1.3.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ca644534f15f85bba14c412afc17de07531e79a766ce85b8dbf3f8b6e7758f20", size = 2199831 }, - { url = "https://files.pythonhosted.org/packages/50/aa/e5053927dc3cac77acc9b27f6f87e75c880f5d3d5eac9111fe13b1d8bf6f/marisa_trie-1.3.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3834304fdeaa1c9b73596ad5a6c01a44fc19c13c115194704b85f7fbdf0a7b8e", size = 2283830 }, - { url = "https://files.pythonhosted.org/packages/71/3e/e314906d0de5b1a44780a23c79bb62a9aafd876e2a4e80fb34f58c721da4/marisa_trie-1.3.1-cp311-cp311-win32.whl", hash = "sha256:70b4c96f9119cfeb4dc6a0cf4afc9f92f0b002cde225bcd910915d976c78e66a", size = 117335 }, - { url = "https://files.pythonhosted.org/packages/b0/2b/85623566621135de3d57497811f94679b4fb2a8f16148ef67133c2abab7a/marisa_trie-1.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:986eaf35a7f63c878280609ecd37edf8a074f7601c199acfec81d03f1ee9a39a", size = 143985 }, - { url = "https://files.pythonhosted.org/packages/3f/40/ee7ea61b88d62d2189b5c4a27bc0fc8d9c32f8b8dc6daf1c93a7b7ad34ac/marisa_trie-1.3.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5b7c1e7fa6c3b855e8cfbabf38454d7decbaba1c567d0cd58880d033c6b363bd", size = 173454 }, - { url = "https://files.pythonhosted.org/packages/9c/fc/58635811586898041004b2197a085253706ede211324a53ec01612a50e20/marisa_trie-1.3.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c12b44c190deb0d67655021da1f2d0a7d61a257bf844101cf982e68ed344f28d", size = 155305 }, - { url = "https://files.pythonhosted.org/packages/fe/98/88ca0c98d37034a3237acaf461d210cbcfeb6687929e5ba0e354971fa3ed/marisa_trie-1.3.1-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9688c7b45f744366a4ef661e399f24636ebe440d315ab35d768676c59c613186", size = 1244834 }, - { url = "https://files.pythonhosted.org/packages/f3/5f/93b3e3607ccd693a768eafee60829cd14ea1810b75aa48e8b20e27b332c4/marisa_trie-1.3.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:99a00cab4cf9643a87977c87a5c8961aa44fff8d5dd46e00250135f686e7dedf", size = 1265148 }, - { url = "https://files.pythonhosted.org/packages/db/6e/051d7d25c7fb2b3df605c8bd782513ebbb33fddf3bae6cf46cf268cca89f/marisa_trie-1.3.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:83efc045fc58ca04c91a96c9b894d8a19ac6553677a76f96df01ff9f0405f53d", size = 2172726 }, - { url = "https://files.pythonhosted.org/packages/58/da/244d9d4e414ce6c73124cba4cc293dd140bf3b04ca18dec64c2775cca951/marisa_trie-1.3.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0b9816ab993001a7854b02a7daec228892f35bd5ab0ac493bacbd1b80baec9f1", size = 2256104 }, - { url = "https://files.pythonhosted.org/packages/c4/f1/1a36ecd7da6668685a7753522af89a19928ffc80f1cc1dbc301af216f011/marisa_trie-1.3.1-cp312-cp312-win32.whl", hash = "sha256:c785fd6dae9daa6825734b7b494cdac972f958be1f9cb3fb1f32be8598d2b936", size = 115624 }, - { url = "https://files.pythonhosted.org/packages/35/b2/aabd1c9f1c102aa31d66633ed5328c447be166e0a703f9723e682478fd83/marisa_trie-1.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:9868b7a8e0f648d09ffe25ac29511e6e208cc5fb0d156c295385f9d5dc2a138e", size = 138562 }, - { url = "https://files.pythonhosted.org/packages/46/a2/8331b995c1b3eee83aa745f4a6502d737ec523d5955a48f167d4177db105/marisa_trie-1.3.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9de573d933db4753a50af891bcb3ffbfe14e200406214c223aa5dfe2163f316d", size = 172272 }, - { url = "https://files.pythonhosted.org/packages/97/b8/7b9681b5c0ea1bb950f907a4e3919eb7f7b7b3febafaae346f3b3f199f6f/marisa_trie-1.3.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f4bae4f920f2a1082eaf766c1883df7da84abdf333bafa15b8717c10416a615e", size = 154671 }, - { url = "https://files.pythonhosted.org/packages/ca/16/929c1f83fdcff13f8d08500f434aaa18c21c8168d16cf81585d69085e980/marisa_trie-1.3.1-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bf9f2b97fcfd5e2dbb0090d0664023872dcde990df0b545eca8d0ce95795a409", size = 1238754 }, - { url = "https://files.pythonhosted.org/packages/0f/0a/b0e04d3ef91a87d4c7ea0b66c004fdfc6e65c9ed83edaebecfb482dfe0ed/marisa_trie-1.3.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ecdb19d33b26738a32602ef432b06cc6deeca4b498ce67ba8e5e39c8a7c19745", size = 1262653 }, - { url = "https://files.pythonhosted.org/packages/de/1f/0ecf610ddc9a209ee63116baabb47584d5b8ecd01610091a593d9429537e/marisa_trie-1.3.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a7416f1a084eb889c5792c57317875aeaa86abfe0bdc6f167712cebcec1d36ee", size = 2172399 }, - { url = "https://files.pythonhosted.org/packages/ac/74/6b47deff3b3920449c135b9187c80f0d656adcdc5d41463745a61b012ea1/marisa_trie-1.3.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ee428575377e29c636f2b4b3b0488875dcea310c6c5b3412ec4ef997f7bb37cc", size = 2255138 }, - { url = "https://files.pythonhosted.org/packages/bd/fa/3dbcbe93dfaa626a5b3e741e7bcf3d7389aa5777175213bd8d9a9d3c992d/marisa_trie-1.3.1-cp313-cp313-win32.whl", hash = "sha256:d0f87bdf660f01e88ab3a507955697b2e3284065afa0b94fc9e77d6ad153ed5e", size = 115391 }, - { url = "https://files.pythonhosted.org/packages/3b/ce/ddfab303646b21aef07ff9dbc83fba92e5d493f49d3bc03d899ffd45c86f/marisa_trie-1.3.1-cp313-cp313-win_amd64.whl", hash = "sha256:a83f5f7ae3494e0cc25211296252b1b86901c788ed82c83adda19d0c98f828d6", size = 139130 }, - { url = "https://files.pythonhosted.org/packages/5a/1e/734b618048ad05c50cb1673ce2c6e836dc38ddeeeb011ed1804af07327a4/marisa_trie-1.3.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a850b151bd1e3a5d9afef113adc22727d696603659d575d7e84f994bd8d04bf1", size = 175131 }, - { url = "https://files.pythonhosted.org/packages/d3/78/c7051147cc918cb8ff4a2920e11a9b17d9dcb4d8fc122122694b486e2bfe/marisa_trie-1.3.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:9dc61fb8f8993589544f6df268229c6cf0a56ad4ed3e8585a9cd23c5ad79527b", size = 163094 }, - { url = "https://files.pythonhosted.org/packages/ee/b8/3b904178d7878319aacaabae5131c1f281519aaac0f8c68c8ed312912ccf/marisa_trie-1.3.1-cp313-cp313t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d4bd41a6e73c0d0adafe4de449b6d35530a4ce6a836a6ee839baf117785ecfd7", size = 1279812 }, - { url = "https://files.pythonhosted.org/packages/fb/bf/e77a1284247b980560b4104bbdd5d06ed2c2ae3d56ab954f97293b6dbbcd/marisa_trie-1.3.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8c8b2386d2d22c57880ed20a913ceca86363765623175671137484a7d223f07a", size = 1285690 }, - { url = "https://files.pythonhosted.org/packages/48/82/f6f10db5ec72de2642499f3a6e4e8607bbd2cfb28269ea08d0d8ddac3313/marisa_trie-1.3.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9c56001badaf1779afae5c24b7ab85938644ab8ef3c5fd438ab5d49621b84482", size = 2197943 }, - { url = "https://files.pythonhosted.org/packages/2a/d0/74b6c3011b1ebf4a8131430156b14c3af694082cf34c392fff766096fd4b/marisa_trie-1.3.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:83a3748088d117a9b15d8981c947df9e4f56eb2e4b5456ae34fe1f83666c9185", size = 2280132 }, - { url = "https://files.pythonhosted.org/packages/28/b2/b8b0cb738fa3ab07309ed92025c6e1b278f84c7255e976921a52b30d8d1b/marisa_trie-1.3.1-cp313-cp313t-win32.whl", hash = "sha256:137010598d8cebc53dbfb7caf59bde96c33a6af555e3e1bdbf30269b6a157e1e", size = 126446 }, - { url = "https://files.pythonhosted.org/packages/b6/c6/2381648d0c946556ef51c673397cea40712d945444ceed0a0a0b51a174d2/marisa_trie-1.3.1-cp313-cp313t-win_amd64.whl", hash = "sha256:ec633e108f277f2b7f4671d933a909f39bba549910bf103e2940b87a14da2783", size = 153885 }, +sdist = { url = "https://files.pythonhosted.org/packages/c5/e3/c9066e74076b90f9701ccd23d6a0b8c1d583feefdec576dc3e1bb093c50d/marisa_trie-1.3.1.tar.gz", hash = "sha256:97107fd12f30e4f8fea97790343a2d2d9a79d93697fe14e1b6f6363c984ff85b", size = 212454, upload-time = "2025-08-26T15:13:18.401Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a7/bf/2f1fe6c9fcd2b509c6dfaaf26e35128947d6d3718d0b39510903c55b7bed/marisa_trie-1.3.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5ef045f694ef66079b4e00c4c9063a00183d6af7d1ff643de6ea5c3b0d9af01b", size = 174027, upload-time = "2025-08-26T15:12:01.434Z" }, + { url = "https://files.pythonhosted.org/packages/a9/5a/de7936d58ed0de847180cee2b95143d420223c5ade0c093d55113f628237/marisa_trie-1.3.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cbd28f95d5f30d9a7af6130869568e75bfd7ef2e0adfb1480f1f44480f5d3603", size = 158478, upload-time = "2025-08-26T15:12:02.429Z" }, + { url = "https://files.pythonhosted.org/packages/48/cc/80611aadefcd0bcf8cd1795cb4643bb27213319a221ba04fe071da0b75cd/marisa_trie-1.3.1-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b173ec46d521308f7c97d96d6e05cf2088e0548f82544ec9a8656af65593304d", size = 1257535, upload-time = "2025-08-26T15:12:04.271Z" }, + { url = "https://files.pythonhosted.org/packages/36/89/c4eeefb956318047036e6bdc572b6112b2059d595e85961267a90aa40458/marisa_trie-1.3.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:954fef9185f8a79441b4e433695116636bf66402945cfee404f8983bafa59788", size = 1275566, upload-time = "2025-08-26T15:12:05.874Z" }, + { url = "https://files.pythonhosted.org/packages/c4/63/d775a2fdfc4b555120381cd2aa6dff1845576bc14fb13796ae1b1e8dbaf7/marisa_trie-1.3.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ca644534f15f85bba14c412afc17de07531e79a766ce85b8dbf3f8b6e7758f20", size = 2199831, upload-time = "2025-08-26T15:12:07.175Z" }, + { url = "https://files.pythonhosted.org/packages/50/aa/e5053927dc3cac77acc9b27f6f87e75c880f5d3d5eac9111fe13b1d8bf6f/marisa_trie-1.3.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3834304fdeaa1c9b73596ad5a6c01a44fc19c13c115194704b85f7fbdf0a7b8e", size = 2283830, upload-time = "2025-08-26T15:12:08.319Z" }, + { url = "https://files.pythonhosted.org/packages/71/3e/e314906d0de5b1a44780a23c79bb62a9aafd876e2a4e80fb34f58c721da4/marisa_trie-1.3.1-cp311-cp311-win32.whl", hash = "sha256:70b4c96f9119cfeb4dc6a0cf4afc9f92f0b002cde225bcd910915d976c78e66a", size = 117335, upload-time = "2025-08-26T15:12:09.776Z" }, + { url = "https://files.pythonhosted.org/packages/b0/2b/85623566621135de3d57497811f94679b4fb2a8f16148ef67133c2abab7a/marisa_trie-1.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:986eaf35a7f63c878280609ecd37edf8a074f7601c199acfec81d03f1ee9a39a", size = 143985, upload-time = "2025-08-26T15:12:10.988Z" }, + { url = "https://files.pythonhosted.org/packages/3f/40/ee7ea61b88d62d2189b5c4a27bc0fc8d9c32f8b8dc6daf1c93a7b7ad34ac/marisa_trie-1.3.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5b7c1e7fa6c3b855e8cfbabf38454d7decbaba1c567d0cd58880d033c6b363bd", size = 173454, upload-time = "2025-08-26T15:12:12.13Z" }, + { url = "https://files.pythonhosted.org/packages/9c/fc/58635811586898041004b2197a085253706ede211324a53ec01612a50e20/marisa_trie-1.3.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c12b44c190deb0d67655021da1f2d0a7d61a257bf844101cf982e68ed344f28d", size = 155305, upload-time = "2025-08-26T15:12:13.374Z" }, + { url = "https://files.pythonhosted.org/packages/fe/98/88ca0c98d37034a3237acaf461d210cbcfeb6687929e5ba0e354971fa3ed/marisa_trie-1.3.1-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9688c7b45f744366a4ef661e399f24636ebe440d315ab35d768676c59c613186", size = 1244834, upload-time = "2025-08-26T15:12:14.795Z" }, + { url = "https://files.pythonhosted.org/packages/f3/5f/93b3e3607ccd693a768eafee60829cd14ea1810b75aa48e8b20e27b332c4/marisa_trie-1.3.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:99a00cab4cf9643a87977c87a5c8961aa44fff8d5dd46e00250135f686e7dedf", size = 1265148, upload-time = "2025-08-26T15:12:16.229Z" }, + { url = "https://files.pythonhosted.org/packages/db/6e/051d7d25c7fb2b3df605c8bd782513ebbb33fddf3bae6cf46cf268cca89f/marisa_trie-1.3.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:83efc045fc58ca04c91a96c9b894d8a19ac6553677a76f96df01ff9f0405f53d", size = 2172726, upload-time = "2025-08-26T15:12:18.467Z" }, + { url = "https://files.pythonhosted.org/packages/58/da/244d9d4e414ce6c73124cba4cc293dd140bf3b04ca18dec64c2775cca951/marisa_trie-1.3.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0b9816ab993001a7854b02a7daec228892f35bd5ab0ac493bacbd1b80baec9f1", size = 2256104, upload-time = "2025-08-26T15:12:20.168Z" }, + { url = "https://files.pythonhosted.org/packages/c4/f1/1a36ecd7da6668685a7753522af89a19928ffc80f1cc1dbc301af216f011/marisa_trie-1.3.1-cp312-cp312-win32.whl", hash = "sha256:c785fd6dae9daa6825734b7b494cdac972f958be1f9cb3fb1f32be8598d2b936", size = 115624, upload-time = "2025-08-26T15:12:21.233Z" }, + { url = "https://files.pythonhosted.org/packages/35/b2/aabd1c9f1c102aa31d66633ed5328c447be166e0a703f9723e682478fd83/marisa_trie-1.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:9868b7a8e0f648d09ffe25ac29511e6e208cc5fb0d156c295385f9d5dc2a138e", size = 138562, upload-time = "2025-08-26T15:12:22.632Z" }, ] [[package]] name = "markdown" version = "3.9" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/8d/37/02347f6d6d8279247a5837082ebc26fc0d5aaeaf75aa013fcbb433c777ab/markdown-3.9.tar.gz", hash = "sha256:d2900fe1782bd33bdbbd56859defef70c2e78fc46668f8eb9df3128138f2cb6a", size = 364585 } +sdist = { url = "https://files.pythonhosted.org/packages/8d/37/02347f6d6d8279247a5837082ebc26fc0d5aaeaf75aa013fcbb433c777ab/markdown-3.9.tar.gz", hash = "sha256:d2900fe1782bd33bdbbd56859defef70c2e78fc46668f8eb9df3128138f2cb6a", size = 364585, upload-time = "2025-09-04T20:25:22.885Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/70/ae/44c4a6a4cbb496d93c6257954260fe3a6e91b7bed2240e5dad2a717f5111/markdown-3.9-py3-none-any.whl", hash = "sha256:9f4d91ed810864ea88a6f32c07ba8bee1346c0cc1f6b1f9f6c822f2a9667d280", size = 107441 }, + { url = "https://files.pythonhosted.org/packages/70/ae/44c4a6a4cbb496d93c6257954260fe3a6e91b7bed2240e5dad2a717f5111/markdown-3.9-py3-none-any.whl", hash = "sha256:9f4d91ed810864ea88a6f32c07ba8bee1346c0cc1f6b1f9f6c822f2a9667d280", size = 107441, upload-time = "2025-09-04T20:25:21.784Z" }, ] [[package]] @@ -1973,9 +1824,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "mdurl" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/5b/f5/4ec618ed16cc4f8fb3b701563655a69816155e79e24a17b651541804721d/markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3", size = 73070 } +sdist = { url = "https://files.pythonhosted.org/packages/5b/f5/4ec618ed16cc4f8fb3b701563655a69816155e79e24a17b651541804721d/markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3", size = 73070, upload-time = "2025-08-11T12:57:52.854Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147", size = 87321 }, + { url = "https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147", size = 87321, upload-time = "2025-08-11T12:57:51.923Z" }, ] [[package]] @@ -1986,9 +1837,9 @@ dependencies = [ { name = "beautifulsoup4" }, { name = "six" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/3f/bc/c8c8eea5335341306b0fa7e1cb33c5e1c8d24ef70ddd684da65f41c49c92/markdownify-1.2.2.tar.gz", hash = "sha256:b274f1b5943180b031b699b199cbaeb1e2ac938b75851849a31fd0c3d6603d09", size = 18816 } +sdist = { url = "https://files.pythonhosted.org/packages/3f/bc/c8c8eea5335341306b0fa7e1cb33c5e1c8d24ef70ddd684da65f41c49c92/markdownify-1.2.2.tar.gz", hash = "sha256:b274f1b5943180b031b699b199cbaeb1e2ac938b75851849a31fd0c3d6603d09", size = 18816, upload-time = "2025-11-16T19:21:18.565Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/43/ce/f1e3e9d959db134cedf06825fae8d5b294bd368aacdd0831a3975b7c4d55/markdownify-1.2.2-py3-none-any.whl", hash = "sha256:3f02d3cc52714084d6e589f70397b6fc9f2f3a8531481bf35e8cc39f975e186a", size = 15724 }, + { url = "https://files.pythonhosted.org/packages/43/ce/f1e3e9d959db134cedf06825fae8d5b294bd368aacdd0831a3975b7c4d55/markdownify-1.2.2-py3-none-any.whl", hash = "sha256:3f02d3cc52714084d6e589f70397b6fc9f2f3a8531481bf35e8cc39f975e186a", size = 15724, upload-time = "2025-11-16T19:21:17.622Z" }, ] [[package]] @@ -2004,70 +1855,39 @@ dependencies = [ { name = "onnxruntime", marker = "sys_platform == 'win32'" }, { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f3/4d/06567465c1886c2ea47bac24eab0c96bb6b4ecea47224323409dc9cbb614/markitdown-0.1.4.tar.gz", hash = "sha256:e72a481d1a50c82ff744e85e3289f79a940c5d0ad5ffa2b37c33de814c195bb1", size = 39951 } +sdist = { url = "https://files.pythonhosted.org/packages/f3/4d/06567465c1886c2ea47bac24eab0c96bb6b4ecea47224323409dc9cbb614/markitdown-0.1.4.tar.gz", hash = "sha256:e72a481d1a50c82ff744e85e3289f79a940c5d0ad5ffa2b37c33de814c195bb1", size = 39951, upload-time = "2025-12-01T18:20:30.937Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/9f/b3/6138d2b23d5b534b0fa3736987a2e11bcef5419cbc9286c8afd229d21558/markitdown-0.1.4-py3-none-any.whl", hash = "sha256:d7f3805716b22545f693d355e28e89584226c0614b3b80b7c4a3f825f068492d", size = 58314 }, + { url = "https://files.pythonhosted.org/packages/9f/b3/6138d2b23d5b534b0fa3736987a2e11bcef5419cbc9286c8afd229d21558/markitdown-0.1.4-py3-none-any.whl", hash = "sha256:d7f3805716b22545f693d355e28e89584226c0614b3b80b7c4a3f825f068492d", size = 58314, upload-time = "2025-12-01T18:20:32.345Z" }, ] [[package]] name = "markupsafe" version = "3.0.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/7e/99/7690b6d4034fffd95959cbe0c02de8deb3098cc577c67bb6a24fe5d7caa7/markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698", size = 80313 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/08/db/fefacb2136439fc8dd20e797950e749aa1f4997ed584c62cfb8ef7c2be0e/markupsafe-3.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1cc7ea17a6824959616c525620e387f6dd30fec8cb44f649e31712db02123dad", size = 11631 }, - { url = "https://files.pythonhosted.org/packages/e1/2e/5898933336b61975ce9dc04decbc0a7f2fee78c30353c5efba7f2d6ff27a/markupsafe-3.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4bd4cd07944443f5a265608cc6aab442e4f74dff8088b0dfc8238647b8f6ae9a", size = 12058 }, - { url = "https://files.pythonhosted.org/packages/1d/09/adf2df3699d87d1d8184038df46a9c80d78c0148492323f4693df54e17bb/markupsafe-3.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b5420a1d9450023228968e7e6a9ce57f65d148ab56d2313fcd589eee96a7a50", size = 24287 }, - { url = "https://files.pythonhosted.org/packages/30/ac/0273f6fcb5f42e314c6d8cd99effae6a5354604d461b8d392b5ec9530a54/markupsafe-3.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0bf2a864d67e76e5c9a34dc26ec616a66b9888e25e7b9460e1c76d3293bd9dbf", size = 22940 }, - { url = "https://files.pythonhosted.org/packages/19/ae/31c1be199ef767124c042c6c3e904da327a2f7f0cd63a0337e1eca2967a8/markupsafe-3.0.3-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc51efed119bc9cfdf792cdeaa4d67e8f6fcccab66ed4bfdd6bde3e59bfcbb2f", size = 21887 }, - { url = "https://files.pythonhosted.org/packages/b2/76/7edcab99d5349a4532a459e1fe64f0b0467a3365056ae550d3bcf3f79e1e/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:068f375c472b3e7acbe2d5318dea141359e6900156b5b2ba06a30b169086b91a", size = 23692 }, - { url = "https://files.pythonhosted.org/packages/a4/28/6e74cdd26d7514849143d69f0bf2399f929c37dc2b31e6829fd2045b2765/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:7be7b61bb172e1ed687f1754f8e7484f1c8019780f6f6b0786e76bb01c2ae115", size = 21471 }, - { url = "https://files.pythonhosted.org/packages/62/7e/a145f36a5c2945673e590850a6f8014318d5577ed7e5920a4b3448e0865d/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f9e130248f4462aaa8e2552d547f36ddadbeaa573879158d721bbd33dfe4743a", size = 22923 }, - { url = "https://files.pythonhosted.org/packages/0f/62/d9c46a7f5c9adbeeeda52f5b8d802e1094e9717705a645efc71b0913a0a8/markupsafe-3.0.3-cp311-cp311-win32.whl", hash = "sha256:0db14f5dafddbb6d9208827849fad01f1a2609380add406671a26386cdf15a19", size = 14572 }, - { url = "https://files.pythonhosted.org/packages/83/8a/4414c03d3f891739326e1783338e48fb49781cc915b2e0ee052aa490d586/markupsafe-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:de8a88e63464af587c950061a5e6a67d3632e36df62b986892331d4620a35c01", size = 15077 }, - { url = "https://files.pythonhosted.org/packages/35/73/893072b42e6862f319b5207adc9ae06070f095b358655f077f69a35601f0/markupsafe-3.0.3-cp311-cp311-win_arm64.whl", hash = "sha256:3b562dd9e9ea93f13d53989d23a7e775fdfd1066c33494ff43f5418bc8c58a5c", size = 13876 }, - { url = "https://files.pythonhosted.org/packages/5a/72/147da192e38635ada20e0a2e1a51cf8823d2119ce8883f7053879c2199b5/markupsafe-3.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d53197da72cc091b024dd97249dfc7794d6a56530370992a5e1a08983ad9230e", size = 11615 }, - { url = "https://files.pythonhosted.org/packages/9a/81/7e4e08678a1f98521201c3079f77db69fb552acd56067661f8c2f534a718/markupsafe-3.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1872df69a4de6aead3491198eaf13810b565bdbeec3ae2dc8780f14458ec73ce", size = 12020 }, - { url = "https://files.pythonhosted.org/packages/1e/2c/799f4742efc39633a1b54a92eec4082e4f815314869865d876824c257c1e/markupsafe-3.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a7e8ae81ae39e62a41ec302f972ba6ae23a5c5396c8e60113e9066ef893da0d", size = 24332 }, - { url = "https://files.pythonhosted.org/packages/3c/2e/8d0c2ab90a8c1d9a24f0399058ab8519a3279d1bd4289511d74e909f060e/markupsafe-3.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d6dd0be5b5b189d31db7cda48b91d7e0a9795f31430b7f271219ab30f1d3ac9d", size = 22947 }, - { url = "https://files.pythonhosted.org/packages/2c/54/887f3092a85238093a0b2154bd629c89444f395618842e8b0c41783898ea/markupsafe-3.0.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:94c6f0bb423f739146aec64595853541634bde58b2135f27f61c1ffd1cd4d16a", size = 21962 }, - { url = "https://files.pythonhosted.org/packages/c9/2f/336b8c7b6f4a4d95e91119dc8521402461b74a485558d8f238a68312f11c/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:be8813b57049a7dc738189df53d69395eba14fb99345e0a5994914a3864c8a4b", size = 23760 }, - { url = "https://files.pythonhosted.org/packages/32/43/67935f2b7e4982ffb50a4d169b724d74b62a3964bc1a9a527f5ac4f1ee2b/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:83891d0e9fb81a825d9a6d61e3f07550ca70a076484292a70fde82c4b807286f", size = 21529 }, - { url = "https://files.pythonhosted.org/packages/89/e0/4486f11e51bbba8b0c041098859e869e304d1c261e59244baa3d295d47b7/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:77f0643abe7495da77fb436f50f8dab76dbc6e5fd25d39589a0f1fe6548bfa2b", size = 23015 }, - { url = "https://files.pythonhosted.org/packages/2f/e1/78ee7a023dac597a5825441ebd17170785a9dab23de95d2c7508ade94e0e/markupsafe-3.0.3-cp312-cp312-win32.whl", hash = "sha256:d88b440e37a16e651bda4c7c2b930eb586fd15ca7406cb39e211fcff3bf3017d", size = 14540 }, - { url = "https://files.pythonhosted.org/packages/aa/5b/bec5aa9bbbb2c946ca2733ef9c4ca91c91b6a24580193e891b5f7dbe8e1e/markupsafe-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:26a5784ded40c9e318cfc2bdb30fe164bdb8665ded9cd64d500a34fb42067b1c", size = 15105 }, - { url = "https://files.pythonhosted.org/packages/e5/f1/216fc1bbfd74011693a4fd837e7026152e89c4bcf3e77b6692fba9923123/markupsafe-3.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:35add3b638a5d900e807944a078b51922212fb3dedb01633a8defc4b01a3c85f", size = 13906 }, - { url = "https://files.pythonhosted.org/packages/38/2f/907b9c7bbba283e68f20259574b13d005c121a0fa4c175f9bed27c4597ff/markupsafe-3.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e1cf1972137e83c5d4c136c43ced9ac51d0e124706ee1c8aa8532c1287fa8795", size = 11622 }, - { url = "https://files.pythonhosted.org/packages/9c/d9/5f7756922cdd676869eca1c4e3c0cd0df60ed30199ffd775e319089cb3ed/markupsafe-3.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:116bb52f642a37c115f517494ea5feb03889e04df47eeff5b130b1808ce7c219", size = 12029 }, - { url = "https://files.pythonhosted.org/packages/00/07/575a68c754943058c78f30db02ee03a64b3c638586fba6a6dd56830b30a3/markupsafe-3.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:133a43e73a802c5562be9bbcd03d090aa5a1fe899db609c29e8c8d815c5f6de6", size = 24374 }, - { url = "https://files.pythonhosted.org/packages/a9/21/9b05698b46f218fc0e118e1f8168395c65c8a2c750ae2bab54fc4bd4e0e8/markupsafe-3.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfcd093f13f0f0b7fdd0f198b90053bf7b2f02a3927a30e63f3ccc9df56b676", size = 22980 }, - { url = "https://files.pythonhosted.org/packages/7f/71/544260864f893f18b6827315b988c146b559391e6e7e8f7252839b1b846a/markupsafe-3.0.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:509fa21c6deb7a7a273d629cf5ec029bc209d1a51178615ddf718f5918992ab9", size = 21990 }, - { url = "https://files.pythonhosted.org/packages/c2/28/b50fc2f74d1ad761af2f5dcce7492648b983d00a65b8c0e0cb457c82ebbe/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4afe79fb3de0b7097d81da19090f4df4f8d3a2b3adaa8764138aac2e44f3af1", size = 23784 }, - { url = "https://files.pythonhosted.org/packages/ed/76/104b2aa106a208da8b17a2fb72e033a5a9d7073c68f7e508b94916ed47a9/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:795e7751525cae078558e679d646ae45574b47ed6e7771863fcc079a6171a0fc", size = 21588 }, - { url = "https://files.pythonhosted.org/packages/b5/99/16a5eb2d140087ebd97180d95249b00a03aa87e29cc224056274f2e45fd6/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8485f406a96febb5140bfeca44a73e3ce5116b2501ac54fe953e488fb1d03b12", size = 23041 }, - { url = "https://files.pythonhosted.org/packages/19/bc/e7140ed90c5d61d77cea142eed9f9c303f4c4806f60a1044c13e3f1471d0/markupsafe-3.0.3-cp313-cp313-win32.whl", hash = "sha256:bdd37121970bfd8be76c5fb069c7751683bdf373db1ed6c010162b2a130248ed", size = 14543 }, - { url = "https://files.pythonhosted.org/packages/05/73/c4abe620b841b6b791f2edc248f556900667a5a1cf023a6646967ae98335/markupsafe-3.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:9a1abfdc021a164803f4d485104931fb8f8c1efd55bc6b748d2f5774e78b62c5", size = 15113 }, - { url = "https://files.pythonhosted.org/packages/f0/3a/fa34a0f7cfef23cf9500d68cb7c32dd64ffd58a12b09225fb03dd37d5b80/markupsafe-3.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:7e68f88e5b8799aa49c85cd116c932a1ac15caaa3f5db09087854d218359e485", size = 13911 }, - { url = "https://files.pythonhosted.org/packages/e4/d7/e05cd7efe43a88a17a37b3ae96e79a19e846f3f456fe79c57ca61356ef01/markupsafe-3.0.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:218551f6df4868a8d527e3062d0fb968682fe92054e89978594c28e642c43a73", size = 11658 }, - { url = "https://files.pythonhosted.org/packages/99/9e/e412117548182ce2148bdeacdda3bb494260c0b0184360fe0d56389b523b/markupsafe-3.0.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3524b778fe5cfb3452a09d31e7b5adefeea8c5be1d43c4f810ba09f2ceb29d37", size = 12066 }, - { url = "https://files.pythonhosted.org/packages/bc/e6/fa0ffcda717ef64a5108eaa7b4f5ed28d56122c9a6d70ab8b72f9f715c80/markupsafe-3.0.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4e885a3d1efa2eadc93c894a21770e4bc67899e3543680313b09f139e149ab19", size = 25639 }, - { url = "https://files.pythonhosted.org/packages/96/ec/2102e881fe9d25fc16cb4b25d5f5cde50970967ffa5dddafdb771237062d/markupsafe-3.0.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8709b08f4a89aa7586de0aadc8da56180242ee0ada3999749b183aa23df95025", size = 23569 }, - { url = "https://files.pythonhosted.org/packages/4b/30/6f2fce1f1f205fc9323255b216ca8a235b15860c34b6798f810f05828e32/markupsafe-3.0.3-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b8512a91625c9b3da6f127803b166b629725e68af71f8184ae7e7d54686a56d6", size = 23284 }, - { url = "https://files.pythonhosted.org/packages/58/47/4a0ccea4ab9f5dcb6f79c0236d954acb382202721e704223a8aafa38b5c8/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9b79b7a16f7fedff2495d684f2b59b0457c3b493778c9eed31111be64d58279f", size = 24801 }, - { url = "https://files.pythonhosted.org/packages/6a/70/3780e9b72180b6fecb83a4814d84c3bf4b4ae4bf0b19c27196104149734c/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:12c63dfb4a98206f045aa9563db46507995f7ef6d83b2f68eda65c307c6829eb", size = 22769 }, - { url = "https://files.pythonhosted.org/packages/98/c5/c03c7f4125180fc215220c035beac6b9cb684bc7a067c84fc69414d315f5/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8f71bc33915be5186016f675cd83a1e08523649b0e33efdb898db577ef5bb009", size = 23642 }, - { url = "https://files.pythonhosted.org/packages/80/d6/2d1b89f6ca4bff1036499b1e29a1d02d282259f3681540e16563f27ebc23/markupsafe-3.0.3-cp313-cp313t-win32.whl", hash = "sha256:69c0b73548bc525c8cb9a251cddf1931d1db4d2258e9599c28c07ef3580ef354", size = 14612 }, - { url = "https://files.pythonhosted.org/packages/2b/98/e48a4bfba0a0ffcf9925fe2d69240bfaa19c6f7507b8cd09c70684a53c1e/markupsafe-3.0.3-cp313-cp313t-win_amd64.whl", hash = "sha256:1b4b79e8ebf6b55351f0d91fe80f893b4743f104bff22e90697db1590e47a218", size = 15200 }, - { url = "https://files.pythonhosted.org/packages/0e/72/e3cc540f351f316e9ed0f092757459afbc595824ca724cbc5a5d4263713f/markupsafe-3.0.3-cp313-cp313t-win_arm64.whl", hash = "sha256:ad2cf8aa28b8c020ab2fc8287b0f823d0a7d8630784c31e9ee5edea20f406287", size = 13973 }, -] - -[[package]] -name = "marshmallow" -version = "4.0.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/cc/ff/8f092fe402ef12aa71b7f4ceba0c557ce4d5876a9cf421e01a67b7210560/marshmallow-4.0.1.tar.gz", hash = "sha256:e1d860bd262737cb2d34e1541b84cb52c32c72c9474e3fe6f30f137ef8b0d97f", size = 220453 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/cc/18/297efc62b3539b9cd379fc49be3740a02e4c8a43e486f50322cfe0b9568a/marshmallow-4.0.1-py3-none-any.whl", hash = "sha256:72f14ef346f81269dbddee891bac547dda1501e9e08b6a809756ea3dbb7936a1", size = 48414 }, +sdist = { url = "https://files.pythonhosted.org/packages/7e/99/7690b6d4034fffd95959cbe0c02de8deb3098cc577c67bb6a24fe5d7caa7/markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698", size = 80313, upload-time = "2025-09-27T18:37:40.426Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/08/db/fefacb2136439fc8dd20e797950e749aa1f4997ed584c62cfb8ef7c2be0e/markupsafe-3.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1cc7ea17a6824959616c525620e387f6dd30fec8cb44f649e31712db02123dad", size = 11631, upload-time = "2025-09-27T18:36:18.185Z" }, + { url = "https://files.pythonhosted.org/packages/e1/2e/5898933336b61975ce9dc04decbc0a7f2fee78c30353c5efba7f2d6ff27a/markupsafe-3.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4bd4cd07944443f5a265608cc6aab442e4f74dff8088b0dfc8238647b8f6ae9a", size = 12058, upload-time = "2025-09-27T18:36:19.444Z" }, + { url = "https://files.pythonhosted.org/packages/1d/09/adf2df3699d87d1d8184038df46a9c80d78c0148492323f4693df54e17bb/markupsafe-3.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b5420a1d9450023228968e7e6a9ce57f65d148ab56d2313fcd589eee96a7a50", size = 24287, upload-time = "2025-09-27T18:36:20.768Z" }, + { url = "https://files.pythonhosted.org/packages/30/ac/0273f6fcb5f42e314c6d8cd99effae6a5354604d461b8d392b5ec9530a54/markupsafe-3.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0bf2a864d67e76e5c9a34dc26ec616a66b9888e25e7b9460e1c76d3293bd9dbf", size = 22940, upload-time = "2025-09-27T18:36:22.249Z" }, + { url = "https://files.pythonhosted.org/packages/19/ae/31c1be199ef767124c042c6c3e904da327a2f7f0cd63a0337e1eca2967a8/markupsafe-3.0.3-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc51efed119bc9cfdf792cdeaa4d67e8f6fcccab66ed4bfdd6bde3e59bfcbb2f", size = 21887, upload-time = "2025-09-27T18:36:23.535Z" }, + { url = "https://files.pythonhosted.org/packages/b2/76/7edcab99d5349a4532a459e1fe64f0b0467a3365056ae550d3bcf3f79e1e/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:068f375c472b3e7acbe2d5318dea141359e6900156b5b2ba06a30b169086b91a", size = 23692, upload-time = "2025-09-27T18:36:24.823Z" }, + { url = "https://files.pythonhosted.org/packages/a4/28/6e74cdd26d7514849143d69f0bf2399f929c37dc2b31e6829fd2045b2765/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:7be7b61bb172e1ed687f1754f8e7484f1c8019780f6f6b0786e76bb01c2ae115", size = 21471, upload-time = "2025-09-27T18:36:25.95Z" }, + { url = "https://files.pythonhosted.org/packages/62/7e/a145f36a5c2945673e590850a6f8014318d5577ed7e5920a4b3448e0865d/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f9e130248f4462aaa8e2552d547f36ddadbeaa573879158d721bbd33dfe4743a", size = 22923, upload-time = "2025-09-27T18:36:27.109Z" }, + { url = "https://files.pythonhosted.org/packages/0f/62/d9c46a7f5c9adbeeeda52f5b8d802e1094e9717705a645efc71b0913a0a8/markupsafe-3.0.3-cp311-cp311-win32.whl", hash = "sha256:0db14f5dafddbb6d9208827849fad01f1a2609380add406671a26386cdf15a19", size = 14572, upload-time = "2025-09-27T18:36:28.045Z" }, + { url = "https://files.pythonhosted.org/packages/83/8a/4414c03d3f891739326e1783338e48fb49781cc915b2e0ee052aa490d586/markupsafe-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:de8a88e63464af587c950061a5e6a67d3632e36df62b986892331d4620a35c01", size = 15077, upload-time = "2025-09-27T18:36:29.025Z" }, + { url = "https://files.pythonhosted.org/packages/35/73/893072b42e6862f319b5207adc9ae06070f095b358655f077f69a35601f0/markupsafe-3.0.3-cp311-cp311-win_arm64.whl", hash = "sha256:3b562dd9e9ea93f13d53989d23a7e775fdfd1066c33494ff43f5418bc8c58a5c", size = 13876, upload-time = "2025-09-27T18:36:29.954Z" }, + { url = "https://files.pythonhosted.org/packages/5a/72/147da192e38635ada20e0a2e1a51cf8823d2119ce8883f7053879c2199b5/markupsafe-3.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d53197da72cc091b024dd97249dfc7794d6a56530370992a5e1a08983ad9230e", size = 11615, upload-time = "2025-09-27T18:36:30.854Z" }, + { url = "https://files.pythonhosted.org/packages/9a/81/7e4e08678a1f98521201c3079f77db69fb552acd56067661f8c2f534a718/markupsafe-3.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1872df69a4de6aead3491198eaf13810b565bdbeec3ae2dc8780f14458ec73ce", size = 12020, upload-time = "2025-09-27T18:36:31.971Z" }, + { url = "https://files.pythonhosted.org/packages/1e/2c/799f4742efc39633a1b54a92eec4082e4f815314869865d876824c257c1e/markupsafe-3.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a7e8ae81ae39e62a41ec302f972ba6ae23a5c5396c8e60113e9066ef893da0d", size = 24332, upload-time = "2025-09-27T18:36:32.813Z" }, + { url = "https://files.pythonhosted.org/packages/3c/2e/8d0c2ab90a8c1d9a24f0399058ab8519a3279d1bd4289511d74e909f060e/markupsafe-3.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d6dd0be5b5b189d31db7cda48b91d7e0a9795f31430b7f271219ab30f1d3ac9d", size = 22947, upload-time = "2025-09-27T18:36:33.86Z" }, + { url = "https://files.pythonhosted.org/packages/2c/54/887f3092a85238093a0b2154bd629c89444f395618842e8b0c41783898ea/markupsafe-3.0.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:94c6f0bb423f739146aec64595853541634bde58b2135f27f61c1ffd1cd4d16a", size = 21962, upload-time = "2025-09-27T18:36:35.099Z" }, + { url = "https://files.pythonhosted.org/packages/c9/2f/336b8c7b6f4a4d95e91119dc8521402461b74a485558d8f238a68312f11c/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:be8813b57049a7dc738189df53d69395eba14fb99345e0a5994914a3864c8a4b", size = 23760, upload-time = "2025-09-27T18:36:36.001Z" }, + { url = "https://files.pythonhosted.org/packages/32/43/67935f2b7e4982ffb50a4d169b724d74b62a3964bc1a9a527f5ac4f1ee2b/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:83891d0e9fb81a825d9a6d61e3f07550ca70a076484292a70fde82c4b807286f", size = 21529, upload-time = "2025-09-27T18:36:36.906Z" }, + { url = "https://files.pythonhosted.org/packages/89/e0/4486f11e51bbba8b0c041098859e869e304d1c261e59244baa3d295d47b7/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:77f0643abe7495da77fb436f50f8dab76dbc6e5fd25d39589a0f1fe6548bfa2b", size = 23015, upload-time = "2025-09-27T18:36:37.868Z" }, + { url = "https://files.pythonhosted.org/packages/2f/e1/78ee7a023dac597a5825441ebd17170785a9dab23de95d2c7508ade94e0e/markupsafe-3.0.3-cp312-cp312-win32.whl", hash = "sha256:d88b440e37a16e651bda4c7c2b930eb586fd15ca7406cb39e211fcff3bf3017d", size = 14540, upload-time = "2025-09-27T18:36:38.761Z" }, + { url = "https://files.pythonhosted.org/packages/aa/5b/bec5aa9bbbb2c946ca2733ef9c4ca91c91b6a24580193e891b5f7dbe8e1e/markupsafe-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:26a5784ded40c9e318cfc2bdb30fe164bdb8665ded9cd64d500a34fb42067b1c", size = 15105, upload-time = "2025-09-27T18:36:39.701Z" }, + { url = "https://files.pythonhosted.org/packages/e5/f1/216fc1bbfd74011693a4fd837e7026152e89c4bcf3e77b6692fba9923123/markupsafe-3.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:35add3b638a5d900e807944a078b51922212fb3dedb01633a8defc4b01a3c85f", size = 13906, upload-time = "2025-09-27T18:36:40.689Z" }, ] [[package]] @@ -2077,9 +1897,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "traitlets" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/99/5b/a36a337438a14116b16480db471ad061c36c3694df7c2084a0da7ba538b7/matplotlib_inline-0.1.7.tar.gz", hash = "sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90", size = 8159 } +sdist = { url = "https://files.pythonhosted.org/packages/99/5b/a36a337438a14116b16480db471ad061c36c3694df7c2084a0da7ba538b7/matplotlib_inline-0.1.7.tar.gz", hash = "sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90", size = 8159, upload-time = "2024-04-15T13:44:44.803Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/8f/8e/9ad090d3553c280a8060fbf6e24dc1c0c29704ee7d1c372f0c174aa59285/matplotlib_inline-0.1.7-py3-none-any.whl", hash = "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca", size = 9899 }, + { url = "https://files.pythonhosted.org/packages/8f/8e/9ad090d3553c280a8060fbf6e24dc1c0c29704ee7d1c372f0c174aa59285/matplotlib_inline-0.1.7-py3-none-any.whl", hash = "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca", size = 9899, upload-time = "2024-04-15T13:44:43.265Z" }, ] [[package]] @@ -2089,36 +1909,36 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "markdown-it-py" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b2/fd/a756d36c0bfba5f6e39a1cdbdbfdd448dc02692467d83816dff4592a1ebc/mdit_py_plugins-0.5.0.tar.gz", hash = "sha256:f4918cb50119f50446560513a8e311d574ff6aaed72606ddae6d35716fe809c6", size = 44655 } +sdist = { url = "https://files.pythonhosted.org/packages/b2/fd/a756d36c0bfba5f6e39a1cdbdbfdd448dc02692467d83816dff4592a1ebc/mdit_py_plugins-0.5.0.tar.gz", hash = "sha256:f4918cb50119f50446560513a8e311d574ff6aaed72606ddae6d35716fe809c6", size = 44655, upload-time = "2025-08-11T07:25:49.083Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/fb/86/dd6e5db36df29e76c7a7699123569a4a18c1623ce68d826ed96c62643cae/mdit_py_plugins-0.5.0-py3-none-any.whl", hash = "sha256:07a08422fc1936a5d26d146759e9155ea466e842f5ab2f7d2266dd084c8dab1f", size = 57205 }, + { url = "https://files.pythonhosted.org/packages/fb/86/dd6e5db36df29e76c7a7699123569a4a18c1623ce68d826ed96c62643cae/mdit_py_plugins-0.5.0-py3-none-any.whl", hash = "sha256:07a08422fc1936a5d26d146759e9155ea466e842f5ab2f7d2266dd084c8dab1f", size = 57205, upload-time = "2025-08-11T07:25:47.597Z" }, ] [[package]] name = "mdurl" version = "0.1.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729 } +sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729, upload-time = "2022-08-14T12:40:10.846Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979 }, + { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" }, ] [[package]] name = "mergedeep" version = "1.3.4" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/3a/41/580bb4006e3ed0361b8151a01d324fb03f420815446c7def45d02f74c270/mergedeep-1.3.4.tar.gz", hash = "sha256:0096d52e9dad9939c3d975a774666af186eda617e6ca84df4c94dec30004f2a8", size = 4661 } +sdist = { url = "https://files.pythonhosted.org/packages/3a/41/580bb4006e3ed0361b8151a01d324fb03f420815446c7def45d02f74c270/mergedeep-1.3.4.tar.gz", hash = "sha256:0096d52e9dad9939c3d975a774666af186eda617e6ca84df4c94dec30004f2a8", size = 4661, upload-time = "2021-02-05T18:55:30.623Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2c/19/04f9b178c2d8a15b076c8b5140708fa6ffc5601fb6f1e975537072df5b2a/mergedeep-1.3.4-py3-none-any.whl", hash = "sha256:70775750742b25c0d8f36c55aed03d24c3384d17c951b3175d898bd778ef0307", size = 6354 }, + { url = "https://files.pythonhosted.org/packages/2c/19/04f9b178c2d8a15b076c8b5140708fa6ffc5601fb6f1e975537072df5b2a/mergedeep-1.3.4-py3-none-any.whl", hash = "sha256:70775750742b25c0d8f36c55aed03d24c3384d17c951b3175d898bd778ef0307", size = 6354, upload-time = "2021-02-05T18:55:29.583Z" }, ] [[package]] name = "mistune" version = "3.1.4" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d7/02/a7fb8b21d4d55ac93cdcde9d3638da5dd0ebdd3a4fed76c7725e10b81cbe/mistune-3.1.4.tar.gz", hash = "sha256:b5a7f801d389f724ec702840c11d8fc48f2b33519102fc7ee739e8177b672164", size = 94588 } +sdist = { url = "https://files.pythonhosted.org/packages/d7/02/a7fb8b21d4d55ac93cdcde9d3638da5dd0ebdd3a4fed76c7725e10b81cbe/mistune-3.1.4.tar.gz", hash = "sha256:b5a7f801d389f724ec702840c11d8fc48f2b33519102fc7ee739e8177b672164", size = 94588, upload-time = "2025-08-29T07:20:43.594Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7a/f0/8282d9641415e9e33df173516226b404d367a0fc55e1a60424a152913abc/mistune-3.1.4-py3-none-any.whl", hash = "sha256:93691da911e5d9d2e23bc54472892aff676df27a75274962ff9edc210364266d", size = 53481 }, + { url = "https://files.pythonhosted.org/packages/7a/f0/8282d9641415e9e33df173516226b404d367a0fc55e1a60424a152913abc/mistune-3.1.4-py3-none-any.whl", hash = "sha256:93691da911e5d9d2e23bc54472892aff676df27a75274962ff9edc210364266d", size = 53481, upload-time = "2025-08-29T07:20:42.218Z" }, ] [[package]] @@ -2140,9 +1960,9 @@ dependencies = [ { name = "pyyaml-env-tag" }, { name = "watchdog" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/bc/c6/bbd4f061bd16b378247f12953ffcb04786a618ce5e904b8c5a01a0309061/mkdocs-1.6.1.tar.gz", hash = "sha256:7b432f01d928c084353ab39c57282f29f92136665bdd6abf7c1ec8d822ef86f2", size = 3889159 } +sdist = { url = "https://files.pythonhosted.org/packages/bc/c6/bbd4f061bd16b378247f12953ffcb04786a618ce5e904b8c5a01a0309061/mkdocs-1.6.1.tar.gz", hash = "sha256:7b432f01d928c084353ab39c57282f29f92136665bdd6abf7c1ec8d822ef86f2", size = 3889159, upload-time = "2024-08-30T12:24:06.899Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/22/5b/dbc6a8cddc9cfa9c4971d59fb12bb8d42e161b7e7f8cc89e49137c5b279c/mkdocs-1.6.1-py3-none-any.whl", hash = "sha256:db91759624d1647f3f34aa0c3f327dd2601beae39a366d6e064c03468d35c20e", size = 3864451 }, + { url = "https://files.pythonhosted.org/packages/22/5b/dbc6a8cddc9cfa9c4971d59fb12bb8d42e161b7e7f8cc89e49137c5b279c/mkdocs-1.6.1-py3-none-any.whl", hash = "sha256:db91759624d1647f3f34aa0c3f327dd2601beae39a366d6e064c03468d35c20e", size = 3864451, upload-time = "2024-08-30T12:24:05.054Z" }, ] [[package]] @@ -2152,9 +1972,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "mkdocs" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/1d/52/8243589d294cf6091c1145896915fe50feea0e91d64d843942d0175770c2/mkdocs-exclude-search-0.6.6.tar.gz", hash = "sha256:3cdff1b9afdc1b227019cd1e124f401453235b92153d60c0e5e651a76be4f044", size = 9501 } +sdist = { url = "https://files.pythonhosted.org/packages/1d/52/8243589d294cf6091c1145896915fe50feea0e91d64d843942d0175770c2/mkdocs-exclude-search-0.6.6.tar.gz", hash = "sha256:3cdff1b9afdc1b227019cd1e124f401453235b92153d60c0e5e651a76be4f044", size = 9501, upload-time = "2023-12-03T22:58:21.259Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3b/ef/9af45ffb1bdba684a0694922abae0bb771e9777aba005933f838b7f1bcea/mkdocs_exclude_search-0.6.6-py3-none-any.whl", hash = "sha256:2b4b941d1689808db533fe4a6afba75ce76c9bab8b21d4e31efc05fd8c4e0a4f", size = 7821 }, + { url = "https://files.pythonhosted.org/packages/3b/ef/9af45ffb1bdba684a0694922abae0bb771e9777aba005933f838b7f1bcea/mkdocs_exclude_search-0.6.6-py3-none-any.whl", hash = "sha256:2b4b941d1689808db533fe4a6afba75ce76c9bab8b21d4e31efc05fd8c4e0a4f", size = 7821, upload-time = "2023-12-03T22:58:19.355Z" }, ] [[package]] @@ -2166,9 +1986,9 @@ dependencies = [ { name = "platformdirs" }, { name = "pyyaml" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/98/f5/ed29cd50067784976f25ed0ed6fcd3c2ce9eb90650aa3b2796ddf7b6870b/mkdocs_get_deps-0.2.0.tar.gz", hash = "sha256:162b3d129c7fad9b19abfdcb9c1458a651628e4b1dea628ac68790fb3061c60c", size = 10239 } +sdist = { url = "https://files.pythonhosted.org/packages/98/f5/ed29cd50067784976f25ed0ed6fcd3c2ce9eb90650aa3b2796ddf7b6870b/mkdocs_get_deps-0.2.0.tar.gz", hash = "sha256:162b3d129c7fad9b19abfdcb9c1458a651628e4b1dea628ac68790fb3061c60c", size = 10239, upload-time = "2023-11-20T17:51:09.981Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/9f/d4/029f984e8d3f3b6b726bd33cafc473b75e9e44c0f7e80a5b29abc466bdea/mkdocs_get_deps-0.2.0-py3-none-any.whl", hash = "sha256:2bf11d0b133e77a0dd036abeeb06dec8775e46efa526dc70667d8863eefc6134", size = 9521 }, + { url = "https://files.pythonhosted.org/packages/9f/d4/029f984e8d3f3b6b726bd33cafc473b75e9e44c0f7e80a5b29abc466bdea/mkdocs_get_deps-0.2.0-py3-none-any.whl", hash = "sha256:2bf11d0b133e77a0dd036abeeb06dec8775e46efa526dc70667d8863eefc6134", size = 9521, upload-time = "2023-11-20T17:51:08.587Z" }, ] [[package]] @@ -2183,9 +2003,9 @@ dependencies = [ { name = "nbconvert" }, { name = "pygments" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/6c/23/6ffb8d2fd2117aa860a04c6fe2510b21bc3c3c085907ffdd851caba53152/mkdocs_jupyter-0.25.1.tar.gz", hash = "sha256:0e9272ff4947e0ec683c92423a4bfb42a26477c103ab1a6ab8277e2dcc8f7afe", size = 1626747 } +sdist = { url = "https://files.pythonhosted.org/packages/6c/23/6ffb8d2fd2117aa860a04c6fe2510b21bc3c3c085907ffdd851caba53152/mkdocs_jupyter-0.25.1.tar.gz", hash = "sha256:0e9272ff4947e0ec683c92423a4bfb42a26477c103ab1a6ab8277e2dcc8f7afe", size = 1626747, upload-time = "2024-10-15T14:56:32.373Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/08/37/5f1fd5c3f6954b3256f8126275e62af493b96fb6aef6c0dbc4ee326032ad/mkdocs_jupyter-0.25.1-py3-none-any.whl", hash = "sha256:3f679a857609885d322880e72533ef5255561bbfdb13cfee2a1e92ef4d4ad8d8", size = 1456197 }, + { url = "https://files.pythonhosted.org/packages/08/37/5f1fd5c3f6954b3256f8126275e62af493b96fb6aef6c0dbc4ee326032ad/mkdocs_jupyter-0.25.1-py3-none-any.whl", hash = "sha256:3f679a857609885d322880e72533ef5255561bbfdb13cfee2a1e92ef4d4ad8d8", size = 1456197, upload-time = "2024-10-15T14:56:29.854Z" }, ] [[package]] @@ -2205,18 +2025,18 @@ dependencies = [ { name = "pymdown-extensions" }, { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ff/d5/ab83ca9aa314954b0a9e8849780bdd01866a3cfcb15ffb7e3a61ca06ff0b/mkdocs_material-9.6.21.tar.gz", hash = "sha256:b01aa6d2731322438056f360f0e623d3faae981f8f2d8c68b1b973f4f2657870", size = 4043097 } +sdist = { url = "https://files.pythonhosted.org/packages/ff/d5/ab83ca9aa314954b0a9e8849780bdd01866a3cfcb15ffb7e3a61ca06ff0b/mkdocs_material-9.6.21.tar.gz", hash = "sha256:b01aa6d2731322438056f360f0e623d3faae981f8f2d8c68b1b973f4f2657870", size = 4043097, upload-time = "2025-09-30T19:11:27.517Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/cf/4f/98681c2030375fe9b057dbfb9008b68f46c07dddf583f4df09bf8075e37f/mkdocs_material-9.6.21-py3-none-any.whl", hash = "sha256:aa6a5ab6fb4f6d381588ac51da8782a4d3757cb3d1b174f81a2ec126e1f22c92", size = 9203097 }, + { url = "https://files.pythonhosted.org/packages/cf/4f/98681c2030375fe9b057dbfb9008b68f46c07dddf583f4df09bf8075e37f/mkdocs_material-9.6.21-py3-none-any.whl", hash = "sha256:aa6a5ab6fb4f6d381588ac51da8782a4d3757cb3d1b174f81a2ec126e1f22c92", size = 9203097, upload-time = "2025-09-30T19:11:24.063Z" }, ] [[package]] name = "mkdocs-material-extensions" version = "1.3.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/79/9b/9b4c96d6593b2a541e1cb8b34899a6d021d208bb357042823d4d2cabdbe7/mkdocs_material_extensions-1.3.1.tar.gz", hash = "sha256:10c9511cea88f568257f960358a467d12b970e1f7b2c0e5fb2bb48cab1928443", size = 11847 } +sdist = { url = "https://files.pythonhosted.org/packages/79/9b/9b4c96d6593b2a541e1cb8b34899a6d021d208bb357042823d4d2cabdbe7/mkdocs_material_extensions-1.3.1.tar.gz", hash = "sha256:10c9511cea88f568257f960358a467d12b970e1f7b2c0e5fb2bb48cab1928443", size = 11847, upload-time = "2023-11-22T19:09:45.208Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5b/54/662a4743aa81d9582ee9339d4ffa3c8fd40a4965e033d77b9da9774d3960/mkdocs_material_extensions-1.3.1-py3-none-any.whl", hash = "sha256:adff8b62700b25cb77b53358dad940f3ef973dd6db797907c49e3c2ef3ab4e31", size = 8728 }, + { url = "https://files.pythonhosted.org/packages/5b/54/662a4743aa81d9582ee9339d4ffa3c8fd40a4965e033d77b9da9774d3960/mkdocs_material_extensions-1.3.1-py3-none-any.whl", hash = "sha256:adff8b62700b25cb77b53358dad940f3ef973dd6db797907c49e3c2ef3ab4e31", size = 8728, upload-time = "2023-11-22T19:09:43.465Z" }, ] [[package]] @@ -2227,18 +2047,18 @@ dependencies = [ { name = "markdown" }, { name = "typer" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/91/1a/b2ac21a04c8e487a1fccc3982f9d91319b83a64c3fc3dc51d89658f43b57/mkdocs_typer-0.0.3.tar.gz", hash = "sha256:4dd37f024190a82aaf0f6c984faafb15167d34eab7e29a6a85e61362423a4eb7", size = 11381 } +sdist = { url = "https://files.pythonhosted.org/packages/91/1a/b2ac21a04c8e487a1fccc3982f9d91319b83a64c3fc3dc51d89658f43b57/mkdocs_typer-0.0.3.tar.gz", hash = "sha256:4dd37f024190a82aaf0f6c984faafb15167d34eab7e29a6a85e61362423a4eb7", size = 11381, upload-time = "2023-06-21T16:33:39.93Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/38/34/4d6722b7cdb5e37474272205df6f2080ad01aff74570820a83dedb314f1b/mkdocs_typer-0.0.3-py3-none-any.whl", hash = "sha256:b2a9a44da590a7100114fde4de9123fedfea692d229379984db20ee3b3f12d7c", size = 11564 }, + { url = "https://files.pythonhosted.org/packages/38/34/4d6722b7cdb5e37474272205df6f2080ad01aff74570820a83dedb314f1b/mkdocs_typer-0.0.3-py3-none-any.whl", hash = "sha256:b2a9a44da590a7100114fde4de9123fedfea692d229379984db20ee3b3f12d7c", size = 11564, upload-time = "2023-06-21T16:33:38.597Z" }, ] [[package]] name = "mpmath" version = "1.3.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e0/47/dd32fa426cc72114383ac549964eecb20ecfd886d1e5ccf5340b55b02f57/mpmath-1.3.0.tar.gz", hash = "sha256:7a28eb2a9774d00c7bc92411c19a89209d5da7c4c9a9e227be8330a23a25b91f", size = 508106 } +sdist = { url = "https://files.pythonhosted.org/packages/e0/47/dd32fa426cc72114383ac549964eecb20ecfd886d1e5ccf5340b55b02f57/mpmath-1.3.0.tar.gz", hash = "sha256:7a28eb2a9774d00c7bc92411c19a89209d5da7c4c9a9e227be8330a23a25b91f", size = 508106, upload-time = "2023-03-07T16:47:11.061Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/43/e3/7d92a15f894aa0c9c4b49b8ee9ac9850d6e63b03c9c32c0367a13ae62209/mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c", size = 536198 }, + { url = "https://files.pythonhosted.org/packages/43/e3/7d92a15f894aa0c9c4b49b8ee9ac9850d6e63b03c9c32c0367a13ae62209/mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c", size = 536198, upload-time = "2023-03-07T16:47:09.197Z" }, ] [[package]] @@ -2250,9 +2070,9 @@ dependencies = [ { name = "pyjwt", extra = ["crypto"] }, { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/cf/0e/c857c46d653e104019a84f22d4494f2119b4fe9f896c92b4b864b3b045cc/msal-1.34.0.tar.gz", hash = "sha256:76ba83b716ea5a6d75b0279c0ac353a0e05b820ca1f6682c0eb7f45190c43c2f", size = 153961 } +sdist = { url = "https://files.pythonhosted.org/packages/cf/0e/c857c46d653e104019a84f22d4494f2119b4fe9f896c92b4b864b3b045cc/msal-1.34.0.tar.gz", hash = "sha256:76ba83b716ea5a6d75b0279c0ac353a0e05b820ca1f6682c0eb7f45190c43c2f", size = 153961, upload-time = "2025-09-22T23:05:48.989Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c2/dc/18d48843499e278538890dc709e9ee3dea8375f8be8e82682851df1b48b5/msal-1.34.0-py3-none-any.whl", hash = "sha256:f669b1644e4950115da7a176441b0e13ec2975c29528d8b9e81316023676d6e1", size = 116987 }, + { url = "https://files.pythonhosted.org/packages/c2/dc/18d48843499e278538890dc709e9ee3dea8375f8be8e82682851df1b48b5/msal-1.34.0-py3-none-any.whl", hash = "sha256:f669b1644e4950115da7a176441b0e13ec2975c29528d8b9e81316023676d6e1", size = 116987, upload-time = "2025-09-22T23:05:47.294Z" }, ] [[package]] @@ -2262,119 +2082,76 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "msal" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/01/99/5d239b6156eddf761a636bded1118414d161bd6b7b37a9335549ed159396/msal_extensions-1.3.1.tar.gz", hash = "sha256:c5b0fd10f65ef62b5f1d62f4251d51cbcaf003fcedae8c91b040a488614be1a4", size = 23315 } +sdist = { url = "https://files.pythonhosted.org/packages/01/99/5d239b6156eddf761a636bded1118414d161bd6b7b37a9335549ed159396/msal_extensions-1.3.1.tar.gz", hash = "sha256:c5b0fd10f65ef62b5f1d62f4251d51cbcaf003fcedae8c91b040a488614be1a4", size = 23315, upload-time = "2025-03-14T23:51:03.902Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5e/75/bd9b7bb966668920f06b200e84454c8f3566b102183bc55c5473d96cb2b9/msal_extensions-1.3.1-py3-none-any.whl", hash = "sha256:96d3de4d034504e969ac5e85bae8106c8373b5c6568e4c8fa7af2eca9dbe6bca", size = 20583 }, + { url = "https://files.pythonhosted.org/packages/5e/75/bd9b7bb966668920f06b200e84454c8f3566b102183bc55c5473d96cb2b9/msal_extensions-1.3.1-py3-none-any.whl", hash = "sha256:96d3de4d034504e969ac5e85bae8106c8373b5c6568e4c8fa7af2eca9dbe6bca", size = 20583, upload-time = "2025-03-14T23:51:03.016Z" }, ] [[package]] name = "multidict" version = "6.7.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/80/1e/5492c365f222f907de1039b91f922b93fa4f764c713ee858d235495d8f50/multidict-6.7.0.tar.gz", hash = "sha256:c6e99d9a65ca282e578dfea819cfa9c0a62b2499d8677392e09feaf305e9e6f5", size = 101834 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/34/9e/5c727587644d67b2ed479041e4b1c58e30afc011e3d45d25bbe35781217c/multidict-6.7.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4d409aa42a94c0b3fa617708ef5276dfe81012ba6753a0370fcc9d0195d0a1fc", size = 76604 }, - { url = "https://files.pythonhosted.org/packages/17/e4/67b5c27bd17c085a5ea8f1ec05b8a3e5cba0ca734bfcad5560fb129e70ca/multidict-6.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:14c9e076eede3b54c636f8ce1c9c252b5f057c62131211f0ceeec273810c9721", size = 44715 }, - { url = "https://files.pythonhosted.org/packages/4d/e1/866a5d77be6ea435711bef2a4291eed11032679b6b28b56b4776ab06ba3e/multidict-6.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4c09703000a9d0fa3c3404b27041e574cc7f4df4c6563873246d0e11812a94b6", size = 44332 }, - { url = "https://files.pythonhosted.org/packages/31/61/0c2d50241ada71ff61a79518db85ada85fdabfcf395d5968dae1cbda04e5/multidict-6.7.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:a265acbb7bb33a3a2d626afbe756371dce0279e7b17f4f4eda406459c2b5ff1c", size = 245212 }, - { url = "https://files.pythonhosted.org/packages/ac/e0/919666a4e4b57fff1b57f279be1c9316e6cdc5de8a8b525d76f6598fefc7/multidict-6.7.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:51cb455de290ae462593e5b1cb1118c5c22ea7f0d3620d9940bf695cea5a4bd7", size = 246671 }, - { url = "https://files.pythonhosted.org/packages/a1/cc/d027d9c5a520f3321b65adea289b965e7bcbd2c34402663f482648c716ce/multidict-6.7.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:db99677b4457c7a5c5a949353e125ba72d62b35f74e26da141530fbb012218a7", size = 225491 }, - { url = "https://files.pythonhosted.org/packages/75/c4/bbd633980ce6155a28ff04e6a6492dd3335858394d7bb752d8b108708558/multidict-6.7.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f470f68adc395e0183b92a2f4689264d1ea4b40504a24d9882c27375e6662bb9", size = 257322 }, - { url = "https://files.pythonhosted.org/packages/4c/6d/d622322d344f1f053eae47e033b0b3f965af01212de21b10bcf91be991fb/multidict-6.7.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0db4956f82723cc1c270de9c6e799b4c341d327762ec78ef82bb962f79cc07d8", size = 254694 }, - { url = "https://files.pythonhosted.org/packages/a8/9f/78f8761c2705d4c6d7516faed63c0ebdac569f6db1bef95e0d5218fdc146/multidict-6.7.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3e56d780c238f9e1ae66a22d2adf8d16f485381878250db8d496623cd38b22bd", size = 246715 }, - { url = "https://files.pythonhosted.org/packages/78/59/950818e04f91b9c2b95aab3d923d9eabd01689d0dcd889563988e9ea0fd8/multidict-6.7.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9d14baca2ee12c1a64740d4531356ba50b82543017f3ad6de0deb943c5979abb", size = 243189 }, - { url = "https://files.pythonhosted.org/packages/7a/3d/77c79e1934cad2ee74991840f8a0110966d9599b3af95964c0cd79bb905b/multidict-6.7.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:295a92a76188917c7f99cda95858c822f9e4aae5824246bba9b6b44004ddd0a6", size = 237845 }, - { url = "https://files.pythonhosted.org/packages/63/1b/834ce32a0a97a3b70f86437f685f880136677ac00d8bce0027e9fd9c2db7/multidict-6.7.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:39f1719f57adbb767ef592a50ae5ebb794220d1188f9ca93de471336401c34d2", size = 246374 }, - { url = "https://files.pythonhosted.org/packages/23/ef/43d1c3ba205b5dec93dc97f3fba179dfa47910fc73aaaea4f7ceb41cec2a/multidict-6.7.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:0a13fb8e748dfc94749f622de065dd5c1def7e0d2216dba72b1d8069a389c6ff", size = 253345 }, - { url = "https://files.pythonhosted.org/packages/6b/03/eaf95bcc2d19ead522001f6a650ef32811aa9e3624ff0ad37c445c7a588c/multidict-6.7.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e3aa16de190d29a0ea1b48253c57d99a68492c8dd8948638073ab9e74dc9410b", size = 246940 }, - { url = "https://files.pythonhosted.org/packages/e8/df/ec8a5fd66ea6cd6f525b1fcbb23511b033c3e9bc42b81384834ffa484a62/multidict-6.7.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a048ce45dcdaaf1defb76b2e684f997fb5abf74437b6cb7b22ddad934a964e34", size = 242229 }, - { url = "https://files.pythonhosted.org/packages/8a/a2/59b405d59fd39ec86d1142630e9049243015a5f5291ba49cadf3c090c541/multidict-6.7.0-cp311-cp311-win32.whl", hash = "sha256:a90af66facec4cebe4181b9e62a68be65e45ac9b52b67de9eec118701856e7ff", size = 41308 }, - { url = "https://files.pythonhosted.org/packages/32/0f/13228f26f8b882c34da36efa776c3b7348455ec383bab4a66390e42963ae/multidict-6.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:95b5ffa4349df2887518bb839409bcf22caa72d82beec453216802f475b23c81", size = 46037 }, - { url = "https://files.pythonhosted.org/packages/84/1f/68588e31b000535a3207fd3c909ebeec4fb36b52c442107499c18a896a2a/multidict-6.7.0-cp311-cp311-win_arm64.whl", hash = "sha256:329aa225b085b6f004a4955271a7ba9f1087e39dcb7e65f6284a988264a63912", size = 43023 }, - { url = "https://files.pythonhosted.org/packages/c2/9e/9f61ac18d9c8b475889f32ccfa91c9f59363480613fc807b6e3023d6f60b/multidict-6.7.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:8a3862568a36d26e650a19bb5cbbba14b71789032aebc0423f8cc5f150730184", size = 76877 }, - { url = "https://files.pythonhosted.org/packages/38/6f/614f09a04e6184f8824268fce4bc925e9849edfa654ddd59f0b64508c595/multidict-6.7.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:960c60b5849b9b4f9dcc9bea6e3626143c252c74113df2c1540aebce70209b45", size = 45467 }, - { url = "https://files.pythonhosted.org/packages/b3/93/c4f67a436dd026f2e780c433277fff72be79152894d9fc36f44569cab1a6/multidict-6.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2049be98fb57a31b4ccf870bf377af2504d4ae35646a19037ec271e4c07998aa", size = 43834 }, - { url = "https://files.pythonhosted.org/packages/7f/f5/013798161ca665e4a422afbc5e2d9e4070142a9ff8905e482139cd09e4d0/multidict-6.7.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0934f3843a1860dd465d38895c17fce1f1cb37295149ab05cd1b9a03afacb2a7", size = 250545 }, - { url = "https://files.pythonhosted.org/packages/71/2f/91dbac13e0ba94669ea5119ba267c9a832f0cb65419aca75549fcf09a3dc/multidict-6.7.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b3e34f3a1b8131ba06f1a73adab24f30934d148afcd5f5de9a73565a4404384e", size = 258305 }, - { url = "https://files.pythonhosted.org/packages/ef/b0/754038b26f6e04488b48ac621f779c341338d78503fb45403755af2df477/multidict-6.7.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:efbb54e98446892590dc2458c19c10344ee9a883a79b5cec4bc34d6656e8d546", size = 242363 }, - { url = "https://files.pythonhosted.org/packages/87/15/9da40b9336a7c9fa606c4cf2ed80a649dffeb42b905d4f63a1d7eb17d746/multidict-6.7.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a35c5fc61d4f51eb045061e7967cfe3123d622cd500e8868e7c0c592a09fedc4", size = 268375 }, - { url = "https://files.pythonhosted.org/packages/82/72/c53fcade0cc94dfaad583105fd92b3a783af2091eddcb41a6d5a52474000/multidict-6.7.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:29fe6740ebccba4175af1b9b87bf553e9c15cd5868ee967e010efcf94e4fd0f1", size = 269346 }, - { url = "https://files.pythonhosted.org/packages/0d/e2/9baffdae21a76f77ef8447f1a05a96ec4bc0a24dae08767abc0a2fe680b8/multidict-6.7.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:123e2a72e20537add2f33a79e605f6191fba2afda4cbb876e35c1a7074298a7d", size = 256107 }, - { url = "https://files.pythonhosted.org/packages/3c/06/3f06f611087dc60d65ef775f1fb5aca7c6d61c6db4990e7cda0cef9b1651/multidict-6.7.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b284e319754366c1aee2267a2036248b24eeb17ecd5dc16022095e747f2f4304", size = 253592 }, - { url = "https://files.pythonhosted.org/packages/20/24/54e804ec7945b6023b340c412ce9c3f81e91b3bf5fa5ce65558740141bee/multidict-6.7.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:803d685de7be4303b5a657b76e2f6d1240e7e0a8aa2968ad5811fa2285553a12", size = 251024 }, - { url = "https://files.pythonhosted.org/packages/14/48/011cba467ea0b17ceb938315d219391d3e421dfd35928e5dbdc3f4ae76ef/multidict-6.7.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c04a328260dfd5db8c39538f999f02779012268f54614902d0afc775d44e0a62", size = 251484 }, - { url = "https://files.pythonhosted.org/packages/0d/2f/919258b43bb35b99fa127435cfb2d91798eb3a943396631ef43e3720dcf4/multidict-6.7.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8a19cdb57cd3df4cd865849d93ee14920fb97224300c88501f16ecfa2604b4e0", size = 263579 }, - { url = "https://files.pythonhosted.org/packages/31/22/a0e884d86b5242b5a74cf08e876bdf299e413016b66e55511f7a804a366e/multidict-6.7.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9b2fd74c52accced7e75de26023b7dccee62511a600e62311b918ec5c168fc2a", size = 259654 }, - { url = "https://files.pythonhosted.org/packages/b2/e5/17e10e1b5c5f5a40f2fcbb45953c9b215f8a4098003915e46a93f5fcaa8f/multidict-6.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3e8bfdd0e487acf992407a140d2589fe598238eaeffa3da8448d63a63cd363f8", size = 251511 }, - { url = "https://files.pythonhosted.org/packages/e3/9a/201bb1e17e7af53139597069c375e7b0dcbd47594604f65c2d5359508566/multidict-6.7.0-cp312-cp312-win32.whl", hash = "sha256:dd32a49400a2c3d52088e120ee00c1e3576cbff7e10b98467962c74fdb762ed4", size = 41895 }, - { url = "https://files.pythonhosted.org/packages/46/e2/348cd32faad84eaf1d20cce80e2bb0ef8d312c55bca1f7fa9865e7770aaf/multidict-6.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:92abb658ef2d7ef22ac9f8bb88e8b6c3e571671534e029359b6d9e845923eb1b", size = 46073 }, - { url = "https://files.pythonhosted.org/packages/25/ec/aad2613c1910dce907480e0c3aa306905830f25df2e54ccc9dea450cb5aa/multidict-6.7.0-cp312-cp312-win_arm64.whl", hash = "sha256:490dab541a6a642ce1a9d61a4781656b346a55c13038f0b1244653828e3a83ec", size = 43226 }, - { url = "https://files.pythonhosted.org/packages/d2/86/33272a544eeb36d66e4d9a920602d1a2f57d4ebea4ef3cdfe5a912574c95/multidict-6.7.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:bee7c0588aa0076ce77c0ea5d19a68d76ad81fcd9fe8501003b9a24f9d4000f6", size = 76135 }, - { url = "https://files.pythonhosted.org/packages/91/1c/eb97db117a1ebe46d457a3d235a7b9d2e6dcab174f42d1b67663dd9e5371/multidict-6.7.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7ef6b61cad77091056ce0e7ce69814ef72afacb150b7ac6a3e9470def2198159", size = 45117 }, - { url = "https://files.pythonhosted.org/packages/f1/d8/6c3442322e41fb1dd4de8bd67bfd11cd72352ac131f6368315617de752f1/multidict-6.7.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9c0359b1ec12b1d6849c59f9d319610b7f20ef990a6d454ab151aa0e3b9f78ca", size = 43472 }, - { url = "https://files.pythonhosted.org/packages/75/3f/e2639e80325af0b6c6febdf8e57cc07043ff15f57fa1ef808f4ccb5ac4cd/multidict-6.7.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:cd240939f71c64bd658f186330603aac1a9a81bf6273f523fca63673cb7378a8", size = 249342 }, - { url = "https://files.pythonhosted.org/packages/5d/cc/84e0585f805cbeaa9cbdaa95f9a3d6aed745b9d25700623ac89a6ecff400/multidict-6.7.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a60a4d75718a5efa473ebd5ab685786ba0c67b8381f781d1be14da49f1a2dc60", size = 257082 }, - { url = "https://files.pythonhosted.org/packages/b0/9c/ac851c107c92289acbbf5cfb485694084690c1b17e555f44952c26ddc5bd/multidict-6.7.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:53a42d364f323275126aff81fb67c5ca1b7a04fda0546245730a55c8c5f24bc4", size = 240704 }, - { url = "https://files.pythonhosted.org/packages/50/cc/5f93e99427248c09da95b62d64b25748a5f5c98c7c2ab09825a1d6af0e15/multidict-6.7.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3b29b980d0ddbecb736735ee5bef69bb2ddca56eff603c86f3f29a1128299b4f", size = 266355 }, - { url = "https://files.pythonhosted.org/packages/ec/0c/2ec1d883ceb79c6f7f6d7ad90c919c898f5d1c6ea96d322751420211e072/multidict-6.7.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f8a93b1c0ed2d04b97a5e9336fd2d33371b9a6e29ab7dd6503d63407c20ffbaf", size = 267259 }, - { url = "https://files.pythonhosted.org/packages/c6/2d/f0b184fa88d6630aa267680bdb8623fb69cb0d024b8c6f0d23f9a0f406d3/multidict-6.7.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9ff96e8815eecacc6645da76c413eb3b3d34cfca256c70b16b286a687d013c32", size = 254903 }, - { url = "https://files.pythonhosted.org/packages/06/c9/11ea263ad0df7dfabcad404feb3c0dd40b131bc7f232d5537f2fb1356951/multidict-6.7.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7516c579652f6a6be0e266aec0acd0db80829ca305c3d771ed898538804c2036", size = 252365 }, - { url = "https://files.pythonhosted.org/packages/41/88/d714b86ee2c17d6e09850c70c9d310abac3d808ab49dfa16b43aba9d53fd/multidict-6.7.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:040f393368e63fb0f3330e70c26bfd336656bed925e5cbe17c9da839a6ab13ec", size = 250062 }, - { url = "https://files.pythonhosted.org/packages/15/fe/ad407bb9e818c2b31383f6131ca19ea7e35ce93cf1310fce69f12e89de75/multidict-6.7.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b3bc26a951007b1057a1c543af845f1c7e3e71cc240ed1ace7bf4484aa99196e", size = 249683 }, - { url = "https://files.pythonhosted.org/packages/8c/a4/a89abdb0229e533fb925e7c6e5c40201c2873efebc9abaf14046a4536ee6/multidict-6.7.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:7b022717c748dd1992a83e219587aabe45980d88969f01b316e78683e6285f64", size = 261254 }, - { url = "https://files.pythonhosted.org/packages/8d/aa/0e2b27bd88b40a4fb8dc53dd74eecac70edaa4c1dd0707eb2164da3675b3/multidict-6.7.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:9600082733859f00d79dee64effc7aef1beb26adb297416a4ad2116fd61374bd", size = 257967 }, - { url = "https://files.pythonhosted.org/packages/d0/8e/0c67b7120d5d5f6d874ed85a085f9dc770a7f9d8813e80f44a9fec820bb7/multidict-6.7.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:94218fcec4d72bc61df51c198d098ce2b378e0ccbac41ddbed5ef44092913288", size = 250085 }, - { url = "https://files.pythonhosted.org/packages/ba/55/b73e1d624ea4b8fd4dd07a3bb70f6e4c7c6c5d9d640a41c6ffe5cdbd2a55/multidict-6.7.0-cp313-cp313-win32.whl", hash = "sha256:a37bd74c3fa9d00be2d7b8eca074dc56bd8077ddd2917a839bd989612671ed17", size = 41713 }, - { url = "https://files.pythonhosted.org/packages/32/31/75c59e7d3b4205075b4c183fa4ca398a2daf2303ddf616b04ae6ef55cffe/multidict-6.7.0-cp313-cp313-win_amd64.whl", hash = "sha256:30d193c6cc6d559db42b6bcec8a5d395d34d60c9877a0b71ecd7c204fcf15390", size = 45915 }, - { url = "https://files.pythonhosted.org/packages/31/2a/8987831e811f1184c22bc2e45844934385363ee61c0a2dcfa8f71b87e608/multidict-6.7.0-cp313-cp313-win_arm64.whl", hash = "sha256:ea3334cabe4d41b7ccd01e4d349828678794edbc2d3ae97fc162a3312095092e", size = 43077 }, - { url = "https://files.pythonhosted.org/packages/e8/68/7b3a5170a382a340147337b300b9eb25a9ddb573bcdfff19c0fa3f31ffba/multidict-6.7.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:ad9ce259f50abd98a1ca0aa6e490b58c316a0fce0617f609723e40804add2c00", size = 83114 }, - { url = "https://files.pythonhosted.org/packages/55/5c/3fa2d07c84df4e302060f555bbf539310980362236ad49f50eeb0a1c1eb9/multidict-6.7.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:07f5594ac6d084cbb5de2df218d78baf55ef150b91f0ff8a21cc7a2e3a5a58eb", size = 48442 }, - { url = "https://files.pythonhosted.org/packages/fc/56/67212d33239797f9bd91962bb899d72bb0f4c35a8652dcdb8ed049bef878/multidict-6.7.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:0591b48acf279821a579282444814a2d8d0af624ae0bc600aa4d1b920b6e924b", size = 46885 }, - { url = "https://files.pythonhosted.org/packages/46/d1/908f896224290350721597a61a69cd19b89ad8ee0ae1f38b3f5cd12ea2ac/multidict-6.7.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:749a72584761531d2b9467cfbdfd29487ee21124c304c4b6cb760d8777b27f9c", size = 242588 }, - { url = "https://files.pythonhosted.org/packages/ab/67/8604288bbd68680eee0ab568fdcb56171d8b23a01bcd5cb0c8fedf6e5d99/multidict-6.7.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b4c3d199f953acd5b446bf7c0de1fe25d94e09e79086f8dc2f48a11a129cdf1", size = 249966 }, - { url = "https://files.pythonhosted.org/packages/20/33/9228d76339f1ba51e3efef7da3ebd91964d3006217aae13211653193c3ff/multidict-6.7.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:9fb0211dfc3b51efea2f349ec92c114d7754dd62c01f81c3e32b765b70c45c9b", size = 228618 }, - { url = "https://files.pythonhosted.org/packages/f8/2d/25d9b566d10cab1c42b3b9e5b11ef79c9111eaf4463b8c257a3bd89e0ead/multidict-6.7.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a027ec240fe73a8d6281872690b988eed307cd7d91b23998ff35ff577ca688b5", size = 257539 }, - { url = "https://files.pythonhosted.org/packages/b6/b1/8d1a965e6637fc33de3c0d8f414485c2b7e4af00f42cab3d84e7b955c222/multidict-6.7.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1d964afecdf3a8288789df2f5751dc0a8261138c3768d9af117ed384e538fad", size = 256345 }, - { url = "https://files.pythonhosted.org/packages/ba/0c/06b5a8adbdeedada6f4fb8d8f193d44a347223b11939b42953eeb6530b6b/multidict-6.7.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:caf53b15b1b7df9fbd0709aa01409000a2b4dd03a5f6f5cc548183c7c8f8b63c", size = 247934 }, - { url = "https://files.pythonhosted.org/packages/8f/31/b2491b5fe167ca044c6eb4b8f2c9f3b8a00b24c432c365358eadac5d7625/multidict-6.7.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:654030da3197d927f05a536a66186070e98765aa5142794c9904555d3a9d8fb5", size = 245243 }, - { url = "https://files.pythonhosted.org/packages/61/1a/982913957cb90406c8c94f53001abd9eafc271cb3e70ff6371590bec478e/multidict-6.7.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:2090d3718829d1e484706a2f525e50c892237b2bf9b17a79b059cb98cddc2f10", size = 235878 }, - { url = "https://files.pythonhosted.org/packages/be/c0/21435d804c1a1cf7a2608593f4d19bca5bcbd7a81a70b253fdd1c12af9c0/multidict-6.7.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:2d2cfeec3f6f45651b3d408c4acec0ebf3daa9bc8a112a084206f5db5d05b754", size = 243452 }, - { url = "https://files.pythonhosted.org/packages/54/0a/4349d540d4a883863191be6eb9a928846d4ec0ea007d3dcd36323bb058ac/multidict-6.7.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:4ef089f985b8c194d341eb2c24ae6e7408c9a0e2e5658699c92f497437d88c3c", size = 252312 }, - { url = "https://files.pythonhosted.org/packages/26/64/d5416038dbda1488daf16b676e4dbfd9674dde10a0cc8f4fc2b502d8125d/multidict-6.7.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e93a0617cd16998784bf4414c7e40f17a35d2350e5c6f0bd900d3a8e02bd3762", size = 246935 }, - { url = "https://files.pythonhosted.org/packages/9f/8c/8290c50d14e49f35e0bd4abc25e1bc7711149ca9588ab7d04f886cdf03d9/multidict-6.7.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f0feece2ef8ebc42ed9e2e8c78fc4aa3cf455733b507c09ef7406364c94376c6", size = 243385 }, - { url = "https://files.pythonhosted.org/packages/ef/a0/f83ae75e42d694b3fbad3e047670e511c138be747bc713cf1b10d5096416/multidict-6.7.0-cp313-cp313t-win32.whl", hash = "sha256:19a1d55338ec1be74ef62440ca9e04a2f001a04d0cc49a4983dc320ff0f3212d", size = 47777 }, - { url = "https://files.pythonhosted.org/packages/dc/80/9b174a92814a3830b7357307a792300f42c9e94664b01dee8e457551fa66/multidict-6.7.0-cp313-cp313t-win_amd64.whl", hash = "sha256:3da4fb467498df97e986af166b12d01f05d2e04f978a9c1c680ea1988e0bc4b6", size = 53104 }, - { url = "https://files.pythonhosted.org/packages/cc/28/04baeaf0428d95bb7a7bea0e691ba2f31394338ba424fb0679a9ed0f4c09/multidict-6.7.0-cp313-cp313t-win_arm64.whl", hash = "sha256:b4121773c49a0776461f4a904cdf6264c88e42218aaa8407e803ca8025872792", size = 45503 }, - { url = "https://files.pythonhosted.org/packages/b7/da/7d22601b625e241d4f23ef1ebff8acfc60da633c9e7e7922e24d10f592b3/multidict-6.7.0-py3-none-any.whl", hash = "sha256:394fc5c42a333c9ffc3e421a4c85e08580d990e08b99f6bf35b4132114c5dcb3", size = 12317 }, +sdist = { url = "https://files.pythonhosted.org/packages/80/1e/5492c365f222f907de1039b91f922b93fa4f764c713ee858d235495d8f50/multidict-6.7.0.tar.gz", hash = "sha256:c6e99d9a65ca282e578dfea819cfa9c0a62b2499d8677392e09feaf305e9e6f5", size = 101834, upload-time = "2025-10-06T14:52:30.657Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/34/9e/5c727587644d67b2ed479041e4b1c58e30afc011e3d45d25bbe35781217c/multidict-6.7.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4d409aa42a94c0b3fa617708ef5276dfe81012ba6753a0370fcc9d0195d0a1fc", size = 76604, upload-time = "2025-10-06T14:48:54.277Z" }, + { url = "https://files.pythonhosted.org/packages/17/e4/67b5c27bd17c085a5ea8f1ec05b8a3e5cba0ca734bfcad5560fb129e70ca/multidict-6.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:14c9e076eede3b54c636f8ce1c9c252b5f057c62131211f0ceeec273810c9721", size = 44715, upload-time = "2025-10-06T14:48:55.445Z" }, + { url = "https://files.pythonhosted.org/packages/4d/e1/866a5d77be6ea435711bef2a4291eed11032679b6b28b56b4776ab06ba3e/multidict-6.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4c09703000a9d0fa3c3404b27041e574cc7f4df4c6563873246d0e11812a94b6", size = 44332, upload-time = "2025-10-06T14:48:56.706Z" }, + { url = "https://files.pythonhosted.org/packages/31/61/0c2d50241ada71ff61a79518db85ada85fdabfcf395d5968dae1cbda04e5/multidict-6.7.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:a265acbb7bb33a3a2d626afbe756371dce0279e7b17f4f4eda406459c2b5ff1c", size = 245212, upload-time = "2025-10-06T14:48:58.042Z" }, + { url = "https://files.pythonhosted.org/packages/ac/e0/919666a4e4b57fff1b57f279be1c9316e6cdc5de8a8b525d76f6598fefc7/multidict-6.7.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:51cb455de290ae462593e5b1cb1118c5c22ea7f0d3620d9940bf695cea5a4bd7", size = 246671, upload-time = "2025-10-06T14:49:00.004Z" }, + { url = "https://files.pythonhosted.org/packages/a1/cc/d027d9c5a520f3321b65adea289b965e7bcbd2c34402663f482648c716ce/multidict-6.7.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:db99677b4457c7a5c5a949353e125ba72d62b35f74e26da141530fbb012218a7", size = 225491, upload-time = "2025-10-06T14:49:01.393Z" }, + { url = "https://files.pythonhosted.org/packages/75/c4/bbd633980ce6155a28ff04e6a6492dd3335858394d7bb752d8b108708558/multidict-6.7.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f470f68adc395e0183b92a2f4689264d1ea4b40504a24d9882c27375e6662bb9", size = 257322, upload-time = "2025-10-06T14:49:02.745Z" }, + { url = "https://files.pythonhosted.org/packages/4c/6d/d622322d344f1f053eae47e033b0b3f965af01212de21b10bcf91be991fb/multidict-6.7.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0db4956f82723cc1c270de9c6e799b4c341d327762ec78ef82bb962f79cc07d8", size = 254694, upload-time = "2025-10-06T14:49:04.15Z" }, + { url = "https://files.pythonhosted.org/packages/a8/9f/78f8761c2705d4c6d7516faed63c0ebdac569f6db1bef95e0d5218fdc146/multidict-6.7.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3e56d780c238f9e1ae66a22d2adf8d16f485381878250db8d496623cd38b22bd", size = 246715, upload-time = "2025-10-06T14:49:05.967Z" }, + { url = "https://files.pythonhosted.org/packages/78/59/950818e04f91b9c2b95aab3d923d9eabd01689d0dcd889563988e9ea0fd8/multidict-6.7.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9d14baca2ee12c1a64740d4531356ba50b82543017f3ad6de0deb943c5979abb", size = 243189, upload-time = "2025-10-06T14:49:07.37Z" }, + { url = "https://files.pythonhosted.org/packages/7a/3d/77c79e1934cad2ee74991840f8a0110966d9599b3af95964c0cd79bb905b/multidict-6.7.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:295a92a76188917c7f99cda95858c822f9e4aae5824246bba9b6b44004ddd0a6", size = 237845, upload-time = "2025-10-06T14:49:08.759Z" }, + { url = "https://files.pythonhosted.org/packages/63/1b/834ce32a0a97a3b70f86437f685f880136677ac00d8bce0027e9fd9c2db7/multidict-6.7.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:39f1719f57adbb767ef592a50ae5ebb794220d1188f9ca93de471336401c34d2", size = 246374, upload-time = "2025-10-06T14:49:10.574Z" }, + { url = "https://files.pythonhosted.org/packages/23/ef/43d1c3ba205b5dec93dc97f3fba179dfa47910fc73aaaea4f7ceb41cec2a/multidict-6.7.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:0a13fb8e748dfc94749f622de065dd5c1def7e0d2216dba72b1d8069a389c6ff", size = 253345, upload-time = "2025-10-06T14:49:12.331Z" }, + { url = "https://files.pythonhosted.org/packages/6b/03/eaf95bcc2d19ead522001f6a650ef32811aa9e3624ff0ad37c445c7a588c/multidict-6.7.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e3aa16de190d29a0ea1b48253c57d99a68492c8dd8948638073ab9e74dc9410b", size = 246940, upload-time = "2025-10-06T14:49:13.821Z" }, + { url = "https://files.pythonhosted.org/packages/e8/df/ec8a5fd66ea6cd6f525b1fcbb23511b033c3e9bc42b81384834ffa484a62/multidict-6.7.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a048ce45dcdaaf1defb76b2e684f997fb5abf74437b6cb7b22ddad934a964e34", size = 242229, upload-time = "2025-10-06T14:49:15.603Z" }, + { url = "https://files.pythonhosted.org/packages/8a/a2/59b405d59fd39ec86d1142630e9049243015a5f5291ba49cadf3c090c541/multidict-6.7.0-cp311-cp311-win32.whl", hash = "sha256:a90af66facec4cebe4181b9e62a68be65e45ac9b52b67de9eec118701856e7ff", size = 41308, upload-time = "2025-10-06T14:49:16.871Z" }, + { url = "https://files.pythonhosted.org/packages/32/0f/13228f26f8b882c34da36efa776c3b7348455ec383bab4a66390e42963ae/multidict-6.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:95b5ffa4349df2887518bb839409bcf22caa72d82beec453216802f475b23c81", size = 46037, upload-time = "2025-10-06T14:49:18.457Z" }, + { url = "https://files.pythonhosted.org/packages/84/1f/68588e31b000535a3207fd3c909ebeec4fb36b52c442107499c18a896a2a/multidict-6.7.0-cp311-cp311-win_arm64.whl", hash = "sha256:329aa225b085b6f004a4955271a7ba9f1087e39dcb7e65f6284a988264a63912", size = 43023, upload-time = "2025-10-06T14:49:19.648Z" }, + { url = "https://files.pythonhosted.org/packages/c2/9e/9f61ac18d9c8b475889f32ccfa91c9f59363480613fc807b6e3023d6f60b/multidict-6.7.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:8a3862568a36d26e650a19bb5cbbba14b71789032aebc0423f8cc5f150730184", size = 76877, upload-time = "2025-10-06T14:49:20.884Z" }, + { url = "https://files.pythonhosted.org/packages/38/6f/614f09a04e6184f8824268fce4bc925e9849edfa654ddd59f0b64508c595/multidict-6.7.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:960c60b5849b9b4f9dcc9bea6e3626143c252c74113df2c1540aebce70209b45", size = 45467, upload-time = "2025-10-06T14:49:22.054Z" }, + { url = "https://files.pythonhosted.org/packages/b3/93/c4f67a436dd026f2e780c433277fff72be79152894d9fc36f44569cab1a6/multidict-6.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2049be98fb57a31b4ccf870bf377af2504d4ae35646a19037ec271e4c07998aa", size = 43834, upload-time = "2025-10-06T14:49:23.566Z" }, + { url = "https://files.pythonhosted.org/packages/7f/f5/013798161ca665e4a422afbc5e2d9e4070142a9ff8905e482139cd09e4d0/multidict-6.7.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0934f3843a1860dd465d38895c17fce1f1cb37295149ab05cd1b9a03afacb2a7", size = 250545, upload-time = "2025-10-06T14:49:24.882Z" }, + { url = "https://files.pythonhosted.org/packages/71/2f/91dbac13e0ba94669ea5119ba267c9a832f0cb65419aca75549fcf09a3dc/multidict-6.7.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b3e34f3a1b8131ba06f1a73adab24f30934d148afcd5f5de9a73565a4404384e", size = 258305, upload-time = "2025-10-06T14:49:26.778Z" }, + { url = "https://files.pythonhosted.org/packages/ef/b0/754038b26f6e04488b48ac621f779c341338d78503fb45403755af2df477/multidict-6.7.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:efbb54e98446892590dc2458c19c10344ee9a883a79b5cec4bc34d6656e8d546", size = 242363, upload-time = "2025-10-06T14:49:28.562Z" }, + { url = "https://files.pythonhosted.org/packages/87/15/9da40b9336a7c9fa606c4cf2ed80a649dffeb42b905d4f63a1d7eb17d746/multidict-6.7.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a35c5fc61d4f51eb045061e7967cfe3123d622cd500e8868e7c0c592a09fedc4", size = 268375, upload-time = "2025-10-06T14:49:29.96Z" }, + { url = "https://files.pythonhosted.org/packages/82/72/c53fcade0cc94dfaad583105fd92b3a783af2091eddcb41a6d5a52474000/multidict-6.7.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:29fe6740ebccba4175af1b9b87bf553e9c15cd5868ee967e010efcf94e4fd0f1", size = 269346, upload-time = "2025-10-06T14:49:31.404Z" }, + { url = "https://files.pythonhosted.org/packages/0d/e2/9baffdae21a76f77ef8447f1a05a96ec4bc0a24dae08767abc0a2fe680b8/multidict-6.7.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:123e2a72e20537add2f33a79e605f6191fba2afda4cbb876e35c1a7074298a7d", size = 256107, upload-time = "2025-10-06T14:49:32.974Z" }, + { url = "https://files.pythonhosted.org/packages/3c/06/3f06f611087dc60d65ef775f1fb5aca7c6d61c6db4990e7cda0cef9b1651/multidict-6.7.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b284e319754366c1aee2267a2036248b24eeb17ecd5dc16022095e747f2f4304", size = 253592, upload-time = "2025-10-06T14:49:34.52Z" }, + { url = "https://files.pythonhosted.org/packages/20/24/54e804ec7945b6023b340c412ce9c3f81e91b3bf5fa5ce65558740141bee/multidict-6.7.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:803d685de7be4303b5a657b76e2f6d1240e7e0a8aa2968ad5811fa2285553a12", size = 251024, upload-time = "2025-10-06T14:49:35.956Z" }, + { url = "https://files.pythonhosted.org/packages/14/48/011cba467ea0b17ceb938315d219391d3e421dfd35928e5dbdc3f4ae76ef/multidict-6.7.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c04a328260dfd5db8c39538f999f02779012268f54614902d0afc775d44e0a62", size = 251484, upload-time = "2025-10-06T14:49:37.631Z" }, + { url = "https://files.pythonhosted.org/packages/0d/2f/919258b43bb35b99fa127435cfb2d91798eb3a943396631ef43e3720dcf4/multidict-6.7.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8a19cdb57cd3df4cd865849d93ee14920fb97224300c88501f16ecfa2604b4e0", size = 263579, upload-time = "2025-10-06T14:49:39.502Z" }, + { url = "https://files.pythonhosted.org/packages/31/22/a0e884d86b5242b5a74cf08e876bdf299e413016b66e55511f7a804a366e/multidict-6.7.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9b2fd74c52accced7e75de26023b7dccee62511a600e62311b918ec5c168fc2a", size = 259654, upload-time = "2025-10-06T14:49:41.32Z" }, + { url = "https://files.pythonhosted.org/packages/b2/e5/17e10e1b5c5f5a40f2fcbb45953c9b215f8a4098003915e46a93f5fcaa8f/multidict-6.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3e8bfdd0e487acf992407a140d2589fe598238eaeffa3da8448d63a63cd363f8", size = 251511, upload-time = "2025-10-06T14:49:46.021Z" }, + { url = "https://files.pythonhosted.org/packages/e3/9a/201bb1e17e7af53139597069c375e7b0dcbd47594604f65c2d5359508566/multidict-6.7.0-cp312-cp312-win32.whl", hash = "sha256:dd32a49400a2c3d52088e120ee00c1e3576cbff7e10b98467962c74fdb762ed4", size = 41895, upload-time = "2025-10-06T14:49:48.718Z" }, + { url = "https://files.pythonhosted.org/packages/46/e2/348cd32faad84eaf1d20cce80e2bb0ef8d312c55bca1f7fa9865e7770aaf/multidict-6.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:92abb658ef2d7ef22ac9f8bb88e8b6c3e571671534e029359b6d9e845923eb1b", size = 46073, upload-time = "2025-10-06T14:49:50.28Z" }, + { url = "https://files.pythonhosted.org/packages/25/ec/aad2613c1910dce907480e0c3aa306905830f25df2e54ccc9dea450cb5aa/multidict-6.7.0-cp312-cp312-win_arm64.whl", hash = "sha256:490dab541a6a642ce1a9d61a4781656b346a55c13038f0b1244653828e3a83ec", size = 43226, upload-time = "2025-10-06T14:49:52.304Z" }, + { url = "https://files.pythonhosted.org/packages/b7/da/7d22601b625e241d4f23ef1ebff8acfc60da633c9e7e7922e24d10f592b3/multidict-6.7.0-py3-none-any.whl", hash = "sha256:394fc5c42a333c9ffc3e421a4c85e08580d990e08b99f6bf35b4132114c5dcb3", size = 12317, upload-time = "2025-10-06T14:52:29.272Z" }, ] [[package]] name = "murmurhash" version = "1.0.13" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/54/e9/02efbc6dfc2dd2085da3daacf9a8c17e8356019eceaedbfa21555e32d2af/murmurhash-1.0.13.tar.gz", hash = "sha256:737246d41ee00ff74b07b0bd1f0888be304d203ce668e642c86aa64ede30f8b7", size = 13258 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/2c/d1/9d13a02d9c8bfff10b1f68d19df206eaf2a8011defeccf7eb05ea0b8c54e/murmurhash-1.0.13-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b20d168370bc3ce82920121b78ab35ae244070a9b18798f4a2e8678fa03bd7e0", size = 26410 }, - { url = "https://files.pythonhosted.org/packages/14/b0/3ee762e98cf9a8c2df9c8b377c326f3dd4495066d4eace9066fca46eba7a/murmurhash-1.0.13-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cef667d2e83bdceea3bc20c586c491fa442662ace1aea66ff5e3a18bb38268d8", size = 26679 }, - { url = "https://files.pythonhosted.org/packages/39/06/24618f79cd5aac48490932e50263bddfd1ea90f7123d49bfe806a5982675/murmurhash-1.0.13-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:507148e50929ba1fce36898808573b9f81c763d5676f3fc6e4e832ff56b66992", size = 125970 }, - { url = "https://files.pythonhosted.org/packages/e8/09/0e7afce0a422692506c85474a26fb3a03c1971b2b5f7e7745276c4b3de7f/murmurhash-1.0.13-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64d50f6173d266ad165beb8bca6101d824217fc9279f9e9981f4c0245c1e7ee6", size = 123390 }, - { url = "https://files.pythonhosted.org/packages/22/4c/c98f579b1a951b2bcc722a35270a2eec105c1e21585c9b314a02079e3c4d/murmurhash-1.0.13-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0f272e15a84a8ae5f8b4bc0a68f9f47be38518ddffc72405791178058e9d019a", size = 124007 }, - { url = "https://files.pythonhosted.org/packages/df/f8/1b0dcebc8df8e091341617102b5b3b97deb6435f345b84f75382c290ec2c/murmurhash-1.0.13-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f9423e0b0964ed1013a06c970199538c7ef9ca28c0be54798c0f1473a6591761", size = 123705 }, - { url = "https://files.pythonhosted.org/packages/79/17/f2a38558e150a0669d843f75e128afb83c1a67af41885ea2acb940e18e2a/murmurhash-1.0.13-cp311-cp311-win_amd64.whl", hash = "sha256:83b81e7084b696df3d853f2c78e0c9bda6b285d643f923f1a6fa9ab145d705c5", size = 24572 }, - { url = "https://files.pythonhosted.org/packages/e1/53/56ce2d8d4b9ab89557cb1d00ffce346b80a2eb2d8c7944015e5c83eacdec/murmurhash-1.0.13-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:bbe882e46cb3f86e092d8a1dd7a5a1c992da1ae3b39f7dd4507b6ce33dae7f92", size = 26859 }, - { url = "https://files.pythonhosted.org/packages/f8/85/3a0ad54a61257c31496545ae6861515d640316f93681d1dd917e7be06634/murmurhash-1.0.13-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:52a33a12ecedc432493692c207c784b06b6427ffaa897fc90b7a76e65846478d", size = 26900 }, - { url = "https://files.pythonhosted.org/packages/d0/cd/6651de26744b50ff11c79f0c0d41244db039625de53c0467a7a52876b2d8/murmurhash-1.0.13-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:950403a7f0dc2d9c8d0710f07c296f2daab66299d9677d6c65d6b6fa2cb30aaa", size = 131367 }, - { url = "https://files.pythonhosted.org/packages/50/6c/01ded95ddce33811c9766cae4ce32e0a54288da1d909ee2bcaa6ed13b9f1/murmurhash-1.0.13-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fde9fb5d2c106d86ff3ef2e4a9a69c2a8d23ba46e28c6b30034dc58421bc107b", size = 128943 }, - { url = "https://files.pythonhosted.org/packages/ab/27/e539a9622d7bea3ae22706c1eb80d4af80f9dddd93b54d151955c2ae4011/murmurhash-1.0.13-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3aa55d62773745616e1ab19345dece122f6e6d09224f7be939cc5b4c513c8473", size = 129108 }, - { url = "https://files.pythonhosted.org/packages/7a/84/18af5662e07d06839ad4db18ce026e6f8ef850d7b0ba92817b28dad28ba6/murmurhash-1.0.13-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:060dfef1b405cf02c450f182fb629f76ebe7f79657cced2db5054bc29b34938b", size = 129175 }, - { url = "https://files.pythonhosted.org/packages/fe/8d/b01d3ee1f1cf3957250223b7c6ce35454f38fbf4abe236bf04a3f769341d/murmurhash-1.0.13-cp312-cp312-win_amd64.whl", hash = "sha256:a8e79627d44a6e20a6487effc30bfe1c74754c13d179106e68cc6d07941b022c", size = 24869 }, - { url = "https://files.pythonhosted.org/packages/00/b4/8919dfdc4a131ad38a57b2c5de69f4bd74538bf546637ee59ebaebe6e5a4/murmurhash-1.0.13-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b8a7f8befd901379b6dc57a9e49c5188454113747ad6aa8cdd951a6048e10790", size = 26852 }, - { url = "https://files.pythonhosted.org/packages/b4/32/ce78bef5d6101568bcb12f5bb5103fabcbe23723ec52e76ff66132d5dbb7/murmurhash-1.0.13-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f741aab86007510199193eee4f87c5ece92bc5a6ca7d0fe0d27335c1203dface", size = 26900 }, - { url = "https://files.pythonhosted.org/packages/0c/4c/0f47c0b4f6b31a1de84d65f9573832c78cd47b4b8ce25ab5596a8238d150/murmurhash-1.0.13-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82614f18fa6d9d83da6bb0918f3789a3e1555d0ce12c2548153e97f79b29cfc9", size = 130033 }, - { url = "https://files.pythonhosted.org/packages/e0/cb/e47233e32fb792dcc9fb18a2cf65f795d47179b29c2b4a2034689f14c707/murmurhash-1.0.13-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91f22a48b9454712e0690aa0b76cf0156a5d5a083d23ec7e209cfaeef28f56ff", size = 130619 }, - { url = "https://files.pythonhosted.org/packages/8f/f1/f89911bf304ba5d385ccd346cc7fbb1c1450a24f093b592c3bfe87768467/murmurhash-1.0.13-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c4bc7938627b8fcb3d598fe6657cc96d1e31f4eba6a871b523c1512ab6dacb3e", size = 127643 }, - { url = "https://files.pythonhosted.org/packages/a4/24/262229221f6840c1a04a46051075e99675e591571abcca6b9a8b6aa1602b/murmurhash-1.0.13-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:58a61f1fc840f9ef704e638c39b8517bab1d21f1a9dbb6ba3ec53e41360e44ec", size = 127981 }, - { url = "https://files.pythonhosted.org/packages/18/25/addbc1d28f83252732ac3e57334d42f093890b4c2cce483ba01a42bc607c/murmurhash-1.0.13-cp313-cp313-win_amd64.whl", hash = "sha256:c451a22f14c2f40e7abaea521ee24fa0e46fbec480c4304c25c946cdb6e81883", size = 24880 }, +sdist = { url = "https://files.pythonhosted.org/packages/54/e9/02efbc6dfc2dd2085da3daacf9a8c17e8356019eceaedbfa21555e32d2af/murmurhash-1.0.13.tar.gz", hash = "sha256:737246d41ee00ff74b07b0bd1f0888be304d203ce668e642c86aa64ede30f8b7", size = 13258, upload-time = "2025-05-22T12:35:57.019Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2c/d1/9d13a02d9c8bfff10b1f68d19df206eaf2a8011defeccf7eb05ea0b8c54e/murmurhash-1.0.13-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b20d168370bc3ce82920121b78ab35ae244070a9b18798f4a2e8678fa03bd7e0", size = 26410, upload-time = "2025-05-22T12:35:20.786Z" }, + { url = "https://files.pythonhosted.org/packages/14/b0/3ee762e98cf9a8c2df9c8b377c326f3dd4495066d4eace9066fca46eba7a/murmurhash-1.0.13-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cef667d2e83bdceea3bc20c586c491fa442662ace1aea66ff5e3a18bb38268d8", size = 26679, upload-time = "2025-05-22T12:35:21.808Z" }, + { url = "https://files.pythonhosted.org/packages/39/06/24618f79cd5aac48490932e50263bddfd1ea90f7123d49bfe806a5982675/murmurhash-1.0.13-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:507148e50929ba1fce36898808573b9f81c763d5676f3fc6e4e832ff56b66992", size = 125970, upload-time = "2025-05-22T12:35:23.222Z" }, + { url = "https://files.pythonhosted.org/packages/e8/09/0e7afce0a422692506c85474a26fb3a03c1971b2b5f7e7745276c4b3de7f/murmurhash-1.0.13-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64d50f6173d266ad165beb8bca6101d824217fc9279f9e9981f4c0245c1e7ee6", size = 123390, upload-time = "2025-05-22T12:35:24.303Z" }, + { url = "https://files.pythonhosted.org/packages/22/4c/c98f579b1a951b2bcc722a35270a2eec105c1e21585c9b314a02079e3c4d/murmurhash-1.0.13-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0f272e15a84a8ae5f8b4bc0a68f9f47be38518ddffc72405791178058e9d019a", size = 124007, upload-time = "2025-05-22T12:35:25.446Z" }, + { url = "https://files.pythonhosted.org/packages/df/f8/1b0dcebc8df8e091341617102b5b3b97deb6435f345b84f75382c290ec2c/murmurhash-1.0.13-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f9423e0b0964ed1013a06c970199538c7ef9ca28c0be54798c0f1473a6591761", size = 123705, upload-time = "2025-05-22T12:35:26.709Z" }, + { url = "https://files.pythonhosted.org/packages/79/17/f2a38558e150a0669d843f75e128afb83c1a67af41885ea2acb940e18e2a/murmurhash-1.0.13-cp311-cp311-win_amd64.whl", hash = "sha256:83b81e7084b696df3d853f2c78e0c9bda6b285d643f923f1a6fa9ab145d705c5", size = 24572, upload-time = "2025-05-22T12:35:30.38Z" }, + { url = "https://files.pythonhosted.org/packages/e1/53/56ce2d8d4b9ab89557cb1d00ffce346b80a2eb2d8c7944015e5c83eacdec/murmurhash-1.0.13-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:bbe882e46cb3f86e092d8a1dd7a5a1c992da1ae3b39f7dd4507b6ce33dae7f92", size = 26859, upload-time = "2025-05-22T12:35:31.815Z" }, + { url = "https://files.pythonhosted.org/packages/f8/85/3a0ad54a61257c31496545ae6861515d640316f93681d1dd917e7be06634/murmurhash-1.0.13-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:52a33a12ecedc432493692c207c784b06b6427ffaa897fc90b7a76e65846478d", size = 26900, upload-time = "2025-05-22T12:35:34.267Z" }, + { url = "https://files.pythonhosted.org/packages/d0/cd/6651de26744b50ff11c79f0c0d41244db039625de53c0467a7a52876b2d8/murmurhash-1.0.13-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:950403a7f0dc2d9c8d0710f07c296f2daab66299d9677d6c65d6b6fa2cb30aaa", size = 131367, upload-time = "2025-05-22T12:35:35.258Z" }, + { url = "https://files.pythonhosted.org/packages/50/6c/01ded95ddce33811c9766cae4ce32e0a54288da1d909ee2bcaa6ed13b9f1/murmurhash-1.0.13-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fde9fb5d2c106d86ff3ef2e4a9a69c2a8d23ba46e28c6b30034dc58421bc107b", size = 128943, upload-time = "2025-05-22T12:35:36.358Z" }, + { url = "https://files.pythonhosted.org/packages/ab/27/e539a9622d7bea3ae22706c1eb80d4af80f9dddd93b54d151955c2ae4011/murmurhash-1.0.13-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3aa55d62773745616e1ab19345dece122f6e6d09224f7be939cc5b4c513c8473", size = 129108, upload-time = "2025-05-22T12:35:37.864Z" }, + { url = "https://files.pythonhosted.org/packages/7a/84/18af5662e07d06839ad4db18ce026e6f8ef850d7b0ba92817b28dad28ba6/murmurhash-1.0.13-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:060dfef1b405cf02c450f182fb629f76ebe7f79657cced2db5054bc29b34938b", size = 129175, upload-time = "2025-05-22T12:35:38.928Z" }, + { url = "https://files.pythonhosted.org/packages/fe/8d/b01d3ee1f1cf3957250223b7c6ce35454f38fbf4abe236bf04a3f769341d/murmurhash-1.0.13-cp312-cp312-win_amd64.whl", hash = "sha256:a8e79627d44a6e20a6487effc30bfe1c74754c13d179106e68cc6d07941b022c", size = 24869, upload-time = "2025-05-22T12:35:40.035Z" }, ] [[package]] @@ -2387,9 +2164,9 @@ dependencies = [ { name = "nbformat" }, { name = "traitlets" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/87/66/7ffd18d58eae90d5721f9f39212327695b749e23ad44b3881744eaf4d9e8/nbclient-0.10.2.tar.gz", hash = "sha256:90b7fc6b810630db87a6d0c2250b1f0ab4cf4d3c27a299b0cde78a4ed3fd9193", size = 62424 } +sdist = { url = "https://files.pythonhosted.org/packages/87/66/7ffd18d58eae90d5721f9f39212327695b749e23ad44b3881744eaf4d9e8/nbclient-0.10.2.tar.gz", hash = "sha256:90b7fc6b810630db87a6d0c2250b1f0ab4cf4d3c27a299b0cde78a4ed3fd9193", size = 62424, upload-time = "2024-12-19T10:32:27.164Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/34/6d/e7fa07f03a4a7b221d94b4d586edb754a9b0dc3c9e2c93353e9fa4e0d117/nbclient-0.10.2-py3-none-any.whl", hash = "sha256:4ffee11e788b4a27fabeb7955547e4318a5298f34342a4bfd01f2e1faaeadc3d", size = 25434 }, + { url = "https://files.pythonhosted.org/packages/34/6d/e7fa07f03a4a7b221d94b4d586edb754a9b0dc3c9e2c93353e9fa4e0d117/nbclient-0.10.2-py3-none-any.whl", hash = "sha256:4ffee11e788b4a27fabeb7955547e4318a5298f34342a4bfd01f2e1faaeadc3d", size = 25434, upload-time = "2024-12-19T10:32:24.139Z" }, ] [[package]] @@ -2412,9 +2189,9 @@ dependencies = [ { name = "pygments" }, { name = "traitlets" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a3/59/f28e15fc47ffb73af68a8d9b47367a8630d76e97ae85ad18271b9db96fdf/nbconvert-7.16.6.tar.gz", hash = "sha256:576a7e37c6480da7b8465eefa66c17844243816ce1ccc372633c6b71c3c0f582", size = 857715 } +sdist = { url = "https://files.pythonhosted.org/packages/a3/59/f28e15fc47ffb73af68a8d9b47367a8630d76e97ae85ad18271b9db96fdf/nbconvert-7.16.6.tar.gz", hash = "sha256:576a7e37c6480da7b8465eefa66c17844243816ce1ccc372633c6b71c3c0f582", size = 857715, upload-time = "2025-01-28T09:29:14.724Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/cc/9a/cd673b2f773a12c992f41309ef81b99da1690426bd2f96957a7ade0d3ed7/nbconvert-7.16.6-py3-none-any.whl", hash = "sha256:1375a7b67e0c2883678c48e506dc320febb57685e5ee67faa51b18a90f3a712b", size = 258525 }, + { url = "https://files.pythonhosted.org/packages/cc/9a/cd673b2f773a12c992f41309ef81b99da1690426bd2f96957a7ade0d3ed7/nbconvert-7.16.6-py3-none-any.whl", hash = "sha256:1375a7b67e0c2883678c48e506dc320febb57685e5ee67faa51b18a90f3a712b", size = 258525, upload-time = "2025-01-28T09:29:12.551Z" }, ] [[package]] @@ -2427,27 +2204,36 @@ dependencies = [ { name = "jupyter-core" }, { name = "traitlets" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/6d/fd/91545e604bc3dad7dca9ed03284086039b294c6b3d75c0d2fa45f9e9caf3/nbformat-5.10.4.tar.gz", hash = "sha256:322168b14f937a5d11362988ecac2a4952d3d8e3a2cbeb2319584631226d5b3a", size = 142749 } +sdist = { url = "https://files.pythonhosted.org/packages/6d/fd/91545e604bc3dad7dca9ed03284086039b294c6b3d75c0d2fa45f9e9caf3/nbformat-5.10.4.tar.gz", hash = "sha256:322168b14f937a5d11362988ecac2a4952d3d8e3a2cbeb2319584631226d5b3a", size = 142749, upload-time = "2024-04-04T11:20:37.371Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a9/82/0340caa499416c78e5d8f5f05947ae4bc3cba53c9f038ab6e9ed964e22f1/nbformat-5.10.4-py3-none-any.whl", hash = "sha256:3b48d6c8fbca4b299bf3982ea7db1af21580e4fec269ad087b9e81588891200b", size = 78454 }, + { url = "https://files.pythonhosted.org/packages/a9/82/0340caa499416c78e5d8f5f05947ae4bc3cba53c9f038ab6e9ed964e22f1/nbformat-5.10.4-py3-none-any.whl", hash = "sha256:3b48d6c8fbca4b299bf3982ea7db1af21580e4fec269ad087b9e81588891200b", size = 78454, upload-time = "2024-04-04T11:20:34.895Z" }, ] [[package]] name = "nest-asyncio" version = "1.6.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/83/f8/51569ac65d696c8ecbee95938f89d4abf00f47d58d48f6fbabfe8f0baefe/nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe", size = 7418 } +sdist = { url = "https://files.pythonhosted.org/packages/83/f8/51569ac65d696c8ecbee95938f89d4abf00f47d58d48f6fbabfe8f0baefe/nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe", size = 7418, upload-time = "2024-01-21T14:25:19.227Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/c4/c2971a3ba4c6103a3d10c4b0f24f461ddc027f0f09763220cf35ca1401b3/nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c", size = 5195, upload-time = "2024-01-21T14:25:17.223Z" }, +] + +[[package]] +name = "nest-asyncio2" +version = "1.7.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2d/eb/ecf8bbf9d22a4e8f7be1628336fe0202da7660790053aa28abeb6c15eb14/nest_asyncio2-1.7.1.tar.gz", hash = "sha256:a1fe5bbbd20894dcceb1842322d74992c5834d5ab692af2c4f59a9a4fcf75fe8", size = 13797, upload-time = "2025-11-20T20:46:07.085Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a0/c4/c2971a3ba4c6103a3d10c4b0f24f461ddc027f0f09763220cf35ca1401b3/nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c", size = 5195 }, + { url = "https://files.pythonhosted.org/packages/8c/48/c1f1ddcfd04bba60470235c2f83733ecff43ebe068dc7715aab60bc92ad8/nest_asyncio2-1.7.1-py3-none-any.whl", hash = "sha256:f83bc1744c3cfa7d47fd29431e5e168db6cb76eda1bb20108955c32f60d7eddf", size = 7504, upload-time = "2025-11-20T20:46:05.704Z" }, ] [[package]] name = "networkx" version = "3.5" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6c/4f/ccdb8ad3a38e583f214547fd2f7ff1fc160c43a75af88e6aec213404b96a/networkx-3.5.tar.gz", hash = "sha256:d4c6f9cf81f52d69230866796b82afbccdec3db7ae4fbd1b65ea750feed50037", size = 2471065 } +sdist = { url = "https://files.pythonhosted.org/packages/6c/4f/ccdb8ad3a38e583f214547fd2f7ff1fc160c43a75af88e6aec213404b96a/networkx-3.5.tar.gz", hash = "sha256:d4c6f9cf81f52d69230866796b82afbccdec3db7ae4fbd1b65ea750feed50037", size = 2471065, upload-time = "2025-05-29T11:35:07.804Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/eb/8d/776adee7bbf76365fdd7f2552710282c79a4ead5d2a46408c9043a2b70ba/networkx-3.5-py3-none-any.whl", hash = "sha256:0030d386a9a06dee3565298b4a734b68589749a544acbb6c412dc9e2489ec6ec", size = 2034406 }, + { url = "https://files.pythonhosted.org/packages/eb/8d/776adee7bbf76365fdd7f2552710282c79a4ead5d2a46408c9043a2b70ba/networkx-3.5-py3-none-any.whl", hash = "sha256:0030d386a9a06dee3565298b4a734b68589749a544acbb6c412dc9e2489ec6ec", size = 2034406, upload-time = "2025-05-29T11:35:04.961Z" }, ] [[package]] @@ -2460,18 +2246,18 @@ dependencies = [ { name = "regex" }, { name = "tqdm" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/3c/87/db8be88ad32c2d042420b6fd9ffd4a149f9a0d7f0e86b3f543be2eeeedd2/nltk-3.9.1.tar.gz", hash = "sha256:87d127bd3de4bd89a4f81265e5fa59cb1b199b27440175370f7417d2bc7ae868", size = 2904691 } +sdist = { url = "https://files.pythonhosted.org/packages/3c/87/db8be88ad32c2d042420b6fd9ffd4a149f9a0d7f0e86b3f543be2eeeedd2/nltk-3.9.1.tar.gz", hash = "sha256:87d127bd3de4bd89a4f81265e5fa59cb1b199b27440175370f7417d2bc7ae868", size = 2904691, upload-time = "2024-08-18T19:48:37.769Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/4d/66/7d9e26593edda06e8cb531874633f7c2372279c3b0f46235539fe546df8b/nltk-3.9.1-py3-none-any.whl", hash = "sha256:4fa26829c5b00715afe3061398a8989dc643b92ce7dd93fb4585a70930d168a1", size = 1505442 }, + { url = "https://files.pythonhosted.org/packages/4d/66/7d9e26593edda06e8cb531874633f7c2372279c3b0f46235539fe546df8b/nltk-3.9.1-py3-none-any.whl", hash = "sha256:4fa26829c5b00715afe3061398a8989dc643b92ce7dd93fb4585a70930d168a1", size = 1505442, upload-time = "2024-08-18T19:48:21.909Z" }, ] [[package]] name = "nodeenv" version = "1.9.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/43/16/fc88b08840de0e0a72a2f9d8c6bae36be573e475a6326ae854bcc549fc45/nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f", size = 47437 } +sdist = { url = "https://files.pythonhosted.org/packages/43/16/fc88b08840de0e0a72a2f9d8c6bae36be573e475a6326ae854bcc549fc45/nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f", size = 47437, upload-time = "2024-06-04T18:44:11.171Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314 }, + { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314, upload-time = "2024-06-04T18:44:08.352Z" }, ] [[package]] @@ -2485,9 +2271,9 @@ dependencies = [ { name = "notebook-shim" }, { name = "tornado" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/04/09/f6f64ba156842ef68d3ea763fa171a2f7e7224f200a15dd4af5b83c34756/notebook-7.4.7.tar.gz", hash = "sha256:3f0a04027dfcee8a876de48fba13ab77ec8c12f72f848a222ed7f5081b9e342a", size = 13937702 } +sdist = { url = "https://files.pythonhosted.org/packages/04/09/f6f64ba156842ef68d3ea763fa171a2f7e7224f200a15dd4af5b83c34756/notebook-7.4.7.tar.gz", hash = "sha256:3f0a04027dfcee8a876de48fba13ab77ec8c12f72f848a222ed7f5081b9e342a", size = 13937702, upload-time = "2025-09-27T08:00:22.536Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6c/d7/06d13087e20388926e7423d2489e728d2e59f2453039cdb0574a7c070e76/notebook-7.4.7-py3-none-any.whl", hash = "sha256:362b7c95527f7dd3c4c84d410b782872fd9c734fb2524c11dd92758527b6eda6", size = 14342894 }, + { url = "https://files.pythonhosted.org/packages/6c/d7/06d13087e20388926e7423d2489e728d2e59f2453039cdb0574a7c070e76/notebook-7.4.7-py3-none-any.whl", hash = "sha256:362b7c95527f7dd3c4c84d410b782872fd9c734fb2524c11dd92758527b6eda6", size = 14342894, upload-time = "2025-09-27T08:00:18.496Z" }, ] [[package]] @@ -2497,68 +2283,46 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "jupyter-server" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/54/d2/92fa3243712b9a3e8bafaf60aac366da1cada3639ca767ff4b5b3654ec28/notebook_shim-0.2.4.tar.gz", hash = "sha256:b4b2cfa1b65d98307ca24361f5b30fe785b53c3fd07b7a47e89acb5e6ac638cb", size = 13167 } +sdist = { url = "https://files.pythonhosted.org/packages/54/d2/92fa3243712b9a3e8bafaf60aac366da1cada3639ca767ff4b5b3654ec28/notebook_shim-0.2.4.tar.gz", hash = "sha256:b4b2cfa1b65d98307ca24361f5b30fe785b53c3fd07b7a47e89acb5e6ac638cb", size = 13167, upload-time = "2024-02-14T23:35:18.353Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f9/33/bd5b9137445ea4b680023eb0469b2bb969d61303dedb2aac6560ff3d14a1/notebook_shim-0.2.4-py3-none-any.whl", hash = "sha256:411a5be4e9dc882a074ccbcae671eda64cceb068767e9a3419096986560e1cef", size = 13307 }, + { url = "https://files.pythonhosted.org/packages/f9/33/bd5b9137445ea4b680023eb0469b2bb969d61303dedb2aac6560ff3d14a1/notebook_shim-0.2.4-py3-none-any.whl", hash = "sha256:411a5be4e9dc882a074ccbcae671eda64cceb068767e9a3419096986560e1cef", size = 13307, upload-time = "2024-02-14T23:35:16.286Z" }, ] [[package]] name = "numpy" version = "2.3.5" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/76/65/21b3bc86aac7b8f2862db1e808f1ea22b028e30a225a34a5ede9bf8678f2/numpy-2.3.5.tar.gz", hash = "sha256:784db1dcdab56bf0517743e746dfb0f885fc68d948aba86eeec2cba234bdf1c0", size = 20584950 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/43/77/84dd1d2e34d7e2792a236ba180b5e8fcc1e3e414e761ce0253f63d7f572e/numpy-2.3.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:de5672f4a7b200c15a4127042170a694d4df43c992948f5e1af57f0174beed10", size = 17034641 }, - { url = "https://files.pythonhosted.org/packages/2a/ea/25e26fa5837106cde46ae7d0b667e20f69cbbc0efd64cba8221411ab26ae/numpy-2.3.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:acfd89508504a19ed06ef963ad544ec6664518c863436306153e13e94605c218", size = 12528324 }, - { url = "https://files.pythonhosted.org/packages/4d/1a/e85f0eea4cf03d6a0228f5c0256b53f2df4bc794706e7df019fc622e47f1/numpy-2.3.5-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:ffe22d2b05504f786c867c8395de703937f934272eb67586817b46188b4ded6d", size = 5356872 }, - { url = "https://files.pythonhosted.org/packages/5c/bb/35ef04afd567f4c989c2060cde39211e4ac5357155c1833bcd1166055c61/numpy-2.3.5-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:872a5cf366aec6bb1147336480fef14c9164b154aeb6542327de4970282cd2f5", size = 6893148 }, - { url = "https://files.pythonhosted.org/packages/f2/2b/05bbeb06e2dff5eab512dfc678b1cc5ee94d8ac5956a0885c64b6b26252b/numpy-2.3.5-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3095bdb8dd297e5920b010e96134ed91d852d81d490e787beca7e35ae1d89cf7", size = 14557282 }, - { url = "https://files.pythonhosted.org/packages/65/fb/2b23769462b34398d9326081fad5655198fcf18966fcb1f1e49db44fbf31/numpy-2.3.5-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8cba086a43d54ca804ce711b2a940b16e452807acebe7852ff327f1ecd49b0d4", size = 16897903 }, - { url = "https://files.pythonhosted.org/packages/ac/14/085f4cf05fc3f1e8aa95e85404e984ffca9b2275a5dc2b1aae18a67538b8/numpy-2.3.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6cf9b429b21df6b99f4dee7a1218b8b7ffbbe7df8764dc0bd60ce8a0708fed1e", size = 16341672 }, - { url = "https://files.pythonhosted.org/packages/6f/3b/1f73994904142b2aa290449b3bb99772477b5fd94d787093e4f24f5af763/numpy-2.3.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:396084a36abdb603546b119d96528c2f6263921c50df3c8fd7cb28873a237748", size = 18838896 }, - { url = "https://files.pythonhosted.org/packages/cd/b9/cf6649b2124f288309ffc353070792caf42ad69047dcc60da85ee85fea58/numpy-2.3.5-cp311-cp311-win32.whl", hash = "sha256:b0c7088a73aef3d687c4deef8452a3ac7c1be4e29ed8bf3b366c8111128ac60c", size = 6563608 }, - { url = "https://files.pythonhosted.org/packages/aa/44/9fe81ae1dcc29c531843852e2874080dc441338574ccc4306b39e2ff6e59/numpy-2.3.5-cp311-cp311-win_amd64.whl", hash = "sha256:a414504bef8945eae5f2d7cb7be2d4af77c5d1cb5e20b296c2c25b61dff2900c", size = 13078442 }, - { url = "https://files.pythonhosted.org/packages/6d/a7/f99a41553d2da82a20a2f22e93c94f928e4490bb447c9ff3c4ff230581d3/numpy-2.3.5-cp311-cp311-win_arm64.whl", hash = "sha256:0cd00b7b36e35398fa2d16af7b907b65304ef8bb4817a550e06e5012929830fa", size = 10458555 }, - { url = "https://files.pythonhosted.org/packages/44/37/e669fe6cbb2b96c62f6bbedc6a81c0f3b7362f6a59230b23caa673a85721/numpy-2.3.5-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:74ae7b798248fe62021dbf3c914245ad45d1a6b0cb4a29ecb4b31d0bfbc4cc3e", size = 16733873 }, - { url = "https://files.pythonhosted.org/packages/c5/65/df0db6c097892c9380851ab9e44b52d4f7ba576b833996e0080181c0c439/numpy-2.3.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ee3888d9ff7c14604052b2ca5535a30216aa0a58e948cdd3eeb8d3415f638769", size = 12259838 }, - { url = "https://files.pythonhosted.org/packages/5b/e1/1ee06e70eb2136797abe847d386e7c0e830b67ad1d43f364dd04fa50d338/numpy-2.3.5-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:612a95a17655e213502f60cfb9bf9408efdc9eb1d5f50535cc6eb365d11b42b5", size = 5088378 }, - { url = "https://files.pythonhosted.org/packages/6d/9c/1ca85fb86708724275103b81ec4cf1ac1d08f465368acfc8da7ab545bdae/numpy-2.3.5-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:3101e5177d114a593d79dd79658650fe28b5a0d8abeb8ce6f437c0e6df5be1a4", size = 6628559 }, - { url = "https://files.pythonhosted.org/packages/74/78/fcd41e5a0ce4f3f7b003da85825acddae6d7ecb60cf25194741b036ca7d6/numpy-2.3.5-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b973c57ff8e184109db042c842423ff4f60446239bd585a5131cc47f06f789d", size = 14250702 }, - { url = "https://files.pythonhosted.org/packages/b6/23/2a1b231b8ff672b4c450dac27164a8b2ca7d9b7144f9c02d2396518352eb/numpy-2.3.5-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0d8163f43acde9a73c2a33605353a4f1bc4798745a8b1d73183b28e5b435ae28", size = 16606086 }, - { url = "https://files.pythonhosted.org/packages/a0/c5/5ad26fbfbe2012e190cc7d5003e4d874b88bb18861d0829edc140a713021/numpy-2.3.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:51c1e14eb1e154ebd80e860722f9e6ed6ec89714ad2db2d3aa33c31d7c12179b", size = 16025985 }, - { url = "https://files.pythonhosted.org/packages/d2/fa/dd48e225c46c819288148d9d060b047fd2a6fb1eb37eae25112ee4cb4453/numpy-2.3.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b46b4ec24f7293f23adcd2d146960559aaf8020213de8ad1909dba6c013bf89c", size = 18542976 }, - { url = "https://files.pythonhosted.org/packages/05/79/ccbd23a75862d95af03d28b5c6901a1b7da4803181513d52f3b86ed9446e/numpy-2.3.5-cp312-cp312-win32.whl", hash = "sha256:3997b5b3c9a771e157f9aae01dd579ee35ad7109be18db0e85dbdbe1de06e952", size = 6285274 }, - { url = "https://files.pythonhosted.org/packages/2d/57/8aeaf160312f7f489dea47ab61e430b5cb051f59a98ae68b7133ce8fa06a/numpy-2.3.5-cp312-cp312-win_amd64.whl", hash = "sha256:86945f2ee6d10cdfd67bcb4069c1662dd711f7e2a4343db5cecec06b87cf31aa", size = 12782922 }, - { url = "https://files.pythonhosted.org/packages/78/a6/aae5cc2ca78c45e64b9ef22f089141d661516856cf7c8a54ba434576900d/numpy-2.3.5-cp312-cp312-win_arm64.whl", hash = "sha256:f28620fe26bee16243be2b7b874da327312240a7cdc38b769a697578d2100013", size = 10194667 }, - { url = "https://files.pythonhosted.org/packages/db/69/9cde09f36da4b5a505341180a3f2e6fadc352fd4d2b7096ce9778db83f1a/numpy-2.3.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d0f23b44f57077c1ede8c5f26b30f706498b4862d3ff0a7298b8411dd2f043ff", size = 16728251 }, - { url = "https://files.pythonhosted.org/packages/79/fb/f505c95ceddd7027347b067689db71ca80bd5ecc926f913f1a23e65cf09b/numpy-2.3.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:aa5bc7c5d59d831d9773d1170acac7893ce3a5e130540605770ade83280e7188", size = 12254652 }, - { url = "https://files.pythonhosted.org/packages/78/da/8c7738060ca9c31b30e9301ee0cf6c5ffdbf889d9593285a1cead337f9a5/numpy-2.3.5-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:ccc933afd4d20aad3c00bcef049cb40049f7f196e0397f1109dba6fed63267b0", size = 5083172 }, - { url = "https://files.pythonhosted.org/packages/a4/b4/ee5bb2537fb9430fd2ef30a616c3672b991a4129bb1c7dcc42aa0abbe5d7/numpy-2.3.5-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:afaffc4393205524af9dfa400fa250143a6c3bc646c08c9f5e25a9f4b4d6a903", size = 6622990 }, - { url = "https://files.pythonhosted.org/packages/95/03/dc0723a013c7d7c19de5ef29e932c3081df1c14ba582b8b86b5de9db7f0f/numpy-2.3.5-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9c75442b2209b8470d6d5d8b1c25714270686f14c749028d2199c54e29f20b4d", size = 14248902 }, - { url = "https://files.pythonhosted.org/packages/f5/10/ca162f45a102738958dcec8023062dad0cbc17d1ab99d68c4e4a6c45fb2b/numpy-2.3.5-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11e06aa0af8c0f05104d56450d6093ee639e15f24ecf62d417329d06e522e017", size = 16597430 }, - { url = "https://files.pythonhosted.org/packages/2a/51/c1e29be863588db58175175f057286900b4b3327a1351e706d5e0f8dd679/numpy-2.3.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ed89927b86296067b4f81f108a2271d8926467a8868e554eaf370fc27fa3ccaf", size = 16024551 }, - { url = "https://files.pythonhosted.org/packages/83/68/8236589d4dbb87253d28259d04d9b814ec0ecce7cb1c7fed29729f4c3a78/numpy-2.3.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:51c55fe3451421f3a6ef9a9c1439e82101c57a2c9eab9feb196a62b1a10b58ce", size = 18533275 }, - { url = "https://files.pythonhosted.org/packages/40/56/2932d75b6f13465239e3b7b7e511be27f1b8161ca2510854f0b6e521c395/numpy-2.3.5-cp313-cp313-win32.whl", hash = "sha256:1978155dd49972084bd6ef388d66ab70f0c323ddee6f693d539376498720fb7e", size = 6277637 }, - { url = "https://files.pythonhosted.org/packages/0c/88/e2eaa6cffb115b85ed7c7c87775cb8bcf0816816bc98ca8dbfa2ee33fe6e/numpy-2.3.5-cp313-cp313-win_amd64.whl", hash = "sha256:00dc4e846108a382c5869e77c6ed514394bdeb3403461d25a829711041217d5b", size = 12779090 }, - { url = "https://files.pythonhosted.org/packages/8f/88/3f41e13a44ebd4034ee17baa384acac29ba6a4fcc2aca95f6f08ca0447d1/numpy-2.3.5-cp313-cp313-win_arm64.whl", hash = "sha256:0472f11f6ec23a74a906a00b48a4dcf3849209696dff7c189714511268d103ae", size = 10194710 }, - { url = "https://files.pythonhosted.org/packages/13/cb/71744144e13389d577f867f745b7df2d8489463654a918eea2eeb166dfc9/numpy-2.3.5-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:414802f3b97f3c1eef41e530aaba3b3c1620649871d8cb38c6eaff034c2e16bd", size = 16827292 }, - { url = "https://files.pythonhosted.org/packages/71/80/ba9dc6f2a4398e7f42b708a7fdc841bb638d353be255655498edbf9a15a8/numpy-2.3.5-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5ee6609ac3604fa7780e30a03e5e241a7956f8e2fcfe547d51e3afa5247ac47f", size = 12378897 }, - { url = "https://files.pythonhosted.org/packages/2e/6d/db2151b9f64264bcceccd51741aa39b50150de9b602d98ecfe7e0c4bff39/numpy-2.3.5-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:86d835afea1eaa143012a2d7a3f45a3adce2d7adc8b4961f0b362214d800846a", size = 5207391 }, - { url = "https://files.pythonhosted.org/packages/80/ae/429bacace5ccad48a14c4ae5332f6aa8ab9f69524193511d60ccdfdc65fa/numpy-2.3.5-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:30bc11310e8153ca664b14c5f1b73e94bd0503681fcf136a163de856f3a50139", size = 6721275 }, - { url = "https://files.pythonhosted.org/packages/74/5b/1919abf32d8722646a38cd527bc3771eb229a32724ee6ba340ead9b92249/numpy-2.3.5-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1062fde1dcf469571705945b0f221b73928f34a20c904ffb45db101907c3454e", size = 14306855 }, - { url = "https://files.pythonhosted.org/packages/a5/87/6831980559434973bebc30cd9c1f21e541a0f2b0c280d43d3afd909b66d0/numpy-2.3.5-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ce581db493ea1a96c0556360ede6607496e8bf9b3a8efa66e06477267bc831e9", size = 16657359 }, - { url = "https://files.pythonhosted.org/packages/dd/91/c797f544491ee99fd00495f12ebb7802c440c1915811d72ac5b4479a3356/numpy-2.3.5-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:cc8920d2ec5fa99875b670bb86ddeb21e295cb07aa331810d9e486e0b969d946", size = 16093374 }, - { url = "https://files.pythonhosted.org/packages/74/a6/54da03253afcbe7a72785ec4da9c69fb7a17710141ff9ac5fcb2e32dbe64/numpy-2.3.5-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:9ee2197ef8c4f0dfe405d835f3b6a14f5fee7782b5de51ba06fb65fc9b36e9f1", size = 18594587 }, - { url = "https://files.pythonhosted.org/packages/80/e9/aff53abbdd41b0ecca94285f325aff42357c6b5abc482a3fcb4994290b18/numpy-2.3.5-cp313-cp313t-win32.whl", hash = "sha256:70b37199913c1bd300ff6e2693316c6f869c7ee16378faf10e4f5e3275b299c3", size = 6405940 }, - { url = "https://files.pythonhosted.org/packages/d5/81/50613fec9d4de5480de18d4f8ef59ad7e344d497edbef3cfd80f24f98461/numpy-2.3.5-cp313-cp313t-win_amd64.whl", hash = "sha256:b501b5fa195cc9e24fe102f21ec0a44dffc231d2af79950b451e0d99cea02234", size = 12920341 }, - { url = "https://files.pythonhosted.org/packages/bb/ab/08fd63b9a74303947f34f0bd7c5903b9c5532c2d287bead5bdf4c556c486/numpy-2.3.5-cp313-cp313t-win_arm64.whl", hash = "sha256:a80afd79f45f3c4a7d341f13acbe058d1ca8ac017c165d3fa0d3de6bc1a079d7", size = 10262507 }, - { url = "https://files.pythonhosted.org/packages/c6/65/f9dea8e109371ade9c782b4e4756a82edf9d3366bca495d84d79859a0b79/numpy-2.3.5-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:f0963b55cdd70fad460fa4c1341f12f976bb26cb66021a5580329bd498988310", size = 16910689 }, - { url = "https://files.pythonhosted.org/packages/00/4f/edb00032a8fb92ec0a679d3830368355da91a69cab6f3e9c21b64d0bb986/numpy-2.3.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:f4255143f5160d0de972d28c8f9665d882b5f61309d8362fdd3e103cf7bf010c", size = 12457053 }, - { url = "https://files.pythonhosted.org/packages/16/a4/e8a53b5abd500a63836a29ebe145fc1ab1f2eefe1cfe59276020373ae0aa/numpy-2.3.5-pp311-pypy311_pp73-macosx_14_0_arm64.whl", hash = "sha256:a4b9159734b326535f4dd01d947f919c6eefd2d9827466a696c44ced82dfbc18", size = 5285635 }, - { url = "https://files.pythonhosted.org/packages/a3/2f/37eeb9014d9c8b3e9c55bc599c68263ca44fdbc12a93e45a21d1d56df737/numpy-2.3.5-pp311-pypy311_pp73-macosx_14_0_x86_64.whl", hash = "sha256:2feae0d2c91d46e59fcd62784a3a83b3fb677fead592ce51b5a6fbb4f95965ff", size = 6801770 }, - { url = "https://files.pythonhosted.org/packages/7d/e4/68d2f474df2cb671b2b6c2986a02e520671295647dad82484cde80ca427b/numpy-2.3.5-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ffac52f28a7849ad7576293c0cb7b9f08304e8f7d738a8cb8a90ec4c55a998eb", size = 14391768 }, - { url = "https://files.pythonhosted.org/packages/b8/50/94ccd8a2b141cb50651fddd4f6a48874acb3c91c8f0842b08a6afc4b0b21/numpy-2.3.5-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63c0e9e7eea69588479ebf4a8a270d5ac22763cc5854e9a7eae952a3908103f7", size = 16729263 }, - { url = "https://files.pythonhosted.org/packages/2d/ee/346fa473e666fe14c52fcdd19ec2424157290a032d4c41f98127bfb31ac7/numpy-2.3.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:f16417ec91f12f814b10bafe79ef77e70113a2f5f7018640e7425ff979253425", size = 12967213 }, +sdist = { url = "https://files.pythonhosted.org/packages/76/65/21b3bc86aac7b8f2862db1e808f1ea22b028e30a225a34a5ede9bf8678f2/numpy-2.3.5.tar.gz", hash = "sha256:784db1dcdab56bf0517743e746dfb0f885fc68d948aba86eeec2cba234bdf1c0", size = 20584950, upload-time = "2025-11-16T22:52:42.067Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/43/77/84dd1d2e34d7e2792a236ba180b5e8fcc1e3e414e761ce0253f63d7f572e/numpy-2.3.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:de5672f4a7b200c15a4127042170a694d4df43c992948f5e1af57f0174beed10", size = 17034641, upload-time = "2025-11-16T22:49:19.336Z" }, + { url = "https://files.pythonhosted.org/packages/2a/ea/25e26fa5837106cde46ae7d0b667e20f69cbbc0efd64cba8221411ab26ae/numpy-2.3.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:acfd89508504a19ed06ef963ad544ec6664518c863436306153e13e94605c218", size = 12528324, upload-time = "2025-11-16T22:49:22.582Z" }, + { url = "https://files.pythonhosted.org/packages/4d/1a/e85f0eea4cf03d6a0228f5c0256b53f2df4bc794706e7df019fc622e47f1/numpy-2.3.5-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:ffe22d2b05504f786c867c8395de703937f934272eb67586817b46188b4ded6d", size = 5356872, upload-time = "2025-11-16T22:49:25.408Z" }, + { url = "https://files.pythonhosted.org/packages/5c/bb/35ef04afd567f4c989c2060cde39211e4ac5357155c1833bcd1166055c61/numpy-2.3.5-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:872a5cf366aec6bb1147336480fef14c9164b154aeb6542327de4970282cd2f5", size = 6893148, upload-time = "2025-11-16T22:49:27.549Z" }, + { url = "https://files.pythonhosted.org/packages/f2/2b/05bbeb06e2dff5eab512dfc678b1cc5ee94d8ac5956a0885c64b6b26252b/numpy-2.3.5-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3095bdb8dd297e5920b010e96134ed91d852d81d490e787beca7e35ae1d89cf7", size = 14557282, upload-time = "2025-11-16T22:49:30.964Z" }, + { url = "https://files.pythonhosted.org/packages/65/fb/2b23769462b34398d9326081fad5655198fcf18966fcb1f1e49db44fbf31/numpy-2.3.5-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8cba086a43d54ca804ce711b2a940b16e452807acebe7852ff327f1ecd49b0d4", size = 16897903, upload-time = "2025-11-16T22:49:34.191Z" }, + { url = "https://files.pythonhosted.org/packages/ac/14/085f4cf05fc3f1e8aa95e85404e984ffca9b2275a5dc2b1aae18a67538b8/numpy-2.3.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6cf9b429b21df6b99f4dee7a1218b8b7ffbbe7df8764dc0bd60ce8a0708fed1e", size = 16341672, upload-time = "2025-11-16T22:49:37.2Z" }, + { url = "https://files.pythonhosted.org/packages/6f/3b/1f73994904142b2aa290449b3bb99772477b5fd94d787093e4f24f5af763/numpy-2.3.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:396084a36abdb603546b119d96528c2f6263921c50df3c8fd7cb28873a237748", size = 18838896, upload-time = "2025-11-16T22:49:39.727Z" }, + { url = "https://files.pythonhosted.org/packages/cd/b9/cf6649b2124f288309ffc353070792caf42ad69047dcc60da85ee85fea58/numpy-2.3.5-cp311-cp311-win32.whl", hash = "sha256:b0c7088a73aef3d687c4deef8452a3ac7c1be4e29ed8bf3b366c8111128ac60c", size = 6563608, upload-time = "2025-11-16T22:49:42.079Z" }, + { url = "https://files.pythonhosted.org/packages/aa/44/9fe81ae1dcc29c531843852e2874080dc441338574ccc4306b39e2ff6e59/numpy-2.3.5-cp311-cp311-win_amd64.whl", hash = "sha256:a414504bef8945eae5f2d7cb7be2d4af77c5d1cb5e20b296c2c25b61dff2900c", size = 13078442, upload-time = "2025-11-16T22:49:43.99Z" }, + { url = "https://files.pythonhosted.org/packages/6d/a7/f99a41553d2da82a20a2f22e93c94f928e4490bb447c9ff3c4ff230581d3/numpy-2.3.5-cp311-cp311-win_arm64.whl", hash = "sha256:0cd00b7b36e35398fa2d16af7b907b65304ef8bb4817a550e06e5012929830fa", size = 10458555, upload-time = "2025-11-16T22:49:47.092Z" }, + { url = "https://files.pythonhosted.org/packages/44/37/e669fe6cbb2b96c62f6bbedc6a81c0f3b7362f6a59230b23caa673a85721/numpy-2.3.5-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:74ae7b798248fe62021dbf3c914245ad45d1a6b0cb4a29ecb4b31d0bfbc4cc3e", size = 16733873, upload-time = "2025-11-16T22:49:49.84Z" }, + { url = "https://files.pythonhosted.org/packages/c5/65/df0db6c097892c9380851ab9e44b52d4f7ba576b833996e0080181c0c439/numpy-2.3.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ee3888d9ff7c14604052b2ca5535a30216aa0a58e948cdd3eeb8d3415f638769", size = 12259838, upload-time = "2025-11-16T22:49:52.863Z" }, + { url = "https://files.pythonhosted.org/packages/5b/e1/1ee06e70eb2136797abe847d386e7c0e830b67ad1d43f364dd04fa50d338/numpy-2.3.5-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:612a95a17655e213502f60cfb9bf9408efdc9eb1d5f50535cc6eb365d11b42b5", size = 5088378, upload-time = "2025-11-16T22:49:55.055Z" }, + { url = "https://files.pythonhosted.org/packages/6d/9c/1ca85fb86708724275103b81ec4cf1ac1d08f465368acfc8da7ab545bdae/numpy-2.3.5-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:3101e5177d114a593d79dd79658650fe28b5a0d8abeb8ce6f437c0e6df5be1a4", size = 6628559, upload-time = "2025-11-16T22:49:57.371Z" }, + { url = "https://files.pythonhosted.org/packages/74/78/fcd41e5a0ce4f3f7b003da85825acddae6d7ecb60cf25194741b036ca7d6/numpy-2.3.5-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b973c57ff8e184109db042c842423ff4f60446239bd585a5131cc47f06f789d", size = 14250702, upload-time = "2025-11-16T22:49:59.632Z" }, + { url = "https://files.pythonhosted.org/packages/b6/23/2a1b231b8ff672b4c450dac27164a8b2ca7d9b7144f9c02d2396518352eb/numpy-2.3.5-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0d8163f43acde9a73c2a33605353a4f1bc4798745a8b1d73183b28e5b435ae28", size = 16606086, upload-time = "2025-11-16T22:50:02.127Z" }, + { url = "https://files.pythonhosted.org/packages/a0/c5/5ad26fbfbe2012e190cc7d5003e4d874b88bb18861d0829edc140a713021/numpy-2.3.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:51c1e14eb1e154ebd80e860722f9e6ed6ec89714ad2db2d3aa33c31d7c12179b", size = 16025985, upload-time = "2025-11-16T22:50:04.536Z" }, + { url = "https://files.pythonhosted.org/packages/d2/fa/dd48e225c46c819288148d9d060b047fd2a6fb1eb37eae25112ee4cb4453/numpy-2.3.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b46b4ec24f7293f23adcd2d146960559aaf8020213de8ad1909dba6c013bf89c", size = 18542976, upload-time = "2025-11-16T22:50:07.557Z" }, + { url = "https://files.pythonhosted.org/packages/05/79/ccbd23a75862d95af03d28b5c6901a1b7da4803181513d52f3b86ed9446e/numpy-2.3.5-cp312-cp312-win32.whl", hash = "sha256:3997b5b3c9a771e157f9aae01dd579ee35ad7109be18db0e85dbdbe1de06e952", size = 6285274, upload-time = "2025-11-16T22:50:10.746Z" }, + { url = "https://files.pythonhosted.org/packages/2d/57/8aeaf160312f7f489dea47ab61e430b5cb051f59a98ae68b7133ce8fa06a/numpy-2.3.5-cp312-cp312-win_amd64.whl", hash = "sha256:86945f2ee6d10cdfd67bcb4069c1662dd711f7e2a4343db5cecec06b87cf31aa", size = 12782922, upload-time = "2025-11-16T22:50:12.811Z" }, + { url = "https://files.pythonhosted.org/packages/78/a6/aae5cc2ca78c45e64b9ef22f089141d661516856cf7c8a54ba434576900d/numpy-2.3.5-cp312-cp312-win_arm64.whl", hash = "sha256:f28620fe26bee16243be2b7b874da327312240a7cdc38b769a697578d2100013", size = 10194667, upload-time = "2025-11-16T22:50:16.16Z" }, + { url = "https://files.pythonhosted.org/packages/c6/65/f9dea8e109371ade9c782b4e4756a82edf9d3366bca495d84d79859a0b79/numpy-2.3.5-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:f0963b55cdd70fad460fa4c1341f12f976bb26cb66021a5580329bd498988310", size = 16910689, upload-time = "2025-11-16T22:52:23.247Z" }, + { url = "https://files.pythonhosted.org/packages/00/4f/edb00032a8fb92ec0a679d3830368355da91a69cab6f3e9c21b64d0bb986/numpy-2.3.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:f4255143f5160d0de972d28c8f9665d882b5f61309d8362fdd3e103cf7bf010c", size = 12457053, upload-time = "2025-11-16T22:52:26.367Z" }, + { url = "https://files.pythonhosted.org/packages/16/a4/e8a53b5abd500a63836a29ebe145fc1ab1f2eefe1cfe59276020373ae0aa/numpy-2.3.5-pp311-pypy311_pp73-macosx_14_0_arm64.whl", hash = "sha256:a4b9159734b326535f4dd01d947f919c6eefd2d9827466a696c44ced82dfbc18", size = 5285635, upload-time = "2025-11-16T22:52:29.266Z" }, + { url = "https://files.pythonhosted.org/packages/a3/2f/37eeb9014d9c8b3e9c55bc599c68263ca44fdbc12a93e45a21d1d56df737/numpy-2.3.5-pp311-pypy311_pp73-macosx_14_0_x86_64.whl", hash = "sha256:2feae0d2c91d46e59fcd62784a3a83b3fb677fead592ce51b5a6fbb4f95965ff", size = 6801770, upload-time = "2025-11-16T22:52:31.421Z" }, + { url = "https://files.pythonhosted.org/packages/7d/e4/68d2f474df2cb671b2b6c2986a02e520671295647dad82484cde80ca427b/numpy-2.3.5-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ffac52f28a7849ad7576293c0cb7b9f08304e8f7d738a8cb8a90ec4c55a998eb", size = 14391768, upload-time = "2025-11-16T22:52:33.593Z" }, + { url = "https://files.pythonhosted.org/packages/b8/50/94ccd8a2b141cb50651fddd4f6a48874acb3c91c8f0842b08a6afc4b0b21/numpy-2.3.5-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63c0e9e7eea69588479ebf4a8a270d5ac22763cc5854e9a7eae952a3908103f7", size = 16729263, upload-time = "2025-11-16T22:52:36.369Z" }, + { url = "https://files.pythonhosted.org/packages/2d/ee/346fa473e666fe14c52fcdd19ec2424157290a032d4c41f98127bfb31ac7/numpy-2.3.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:f16417ec91f12f814b10bafe79ef77e70113a2f5f7018640e7425ff979253425", size = 12967213, upload-time = "2025-11-16T22:52:39.38Z" }, ] [[package]] @@ -2574,22 +2338,16 @@ dependencies = [ { name = "sympy" }, ] wheels = [ - { url = "https://files.pythonhosted.org/packages/95/8d/2634e2959b34aa8a0037989f4229e9abcfa484e9c228f99633b3241768a6/onnxruntime-1.20.1-cp311-cp311-macosx_13_0_universal2.whl", hash = "sha256:06bfbf02ca9ab5f28946e0f912a562a5f005301d0c419283dc57b3ed7969bb7b", size = 30998725 }, - { url = "https://files.pythonhosted.org/packages/a5/da/c44bf9bd66cd6d9018a921f053f28d819445c4d84b4dd4777271b0fe52a2/onnxruntime-1.20.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f6243e34d74423bdd1edf0ae9596dd61023b260f546ee17d701723915f06a9f7", size = 11955227 }, - { url = "https://files.pythonhosted.org/packages/11/ac/4120dfb74c8e45cce1c664fc7f7ce010edd587ba67ac41489f7432eb9381/onnxruntime-1.20.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5eec64c0269dcdb8d9a9a53dc4d64f87b9e0c19801d9321246a53b7eb5a7d1bc", size = 13331703 }, - { url = "https://files.pythonhosted.org/packages/12/f1/cefacac137f7bb7bfba57c50c478150fcd3c54aca72762ac2c05ce0532c1/onnxruntime-1.20.1-cp311-cp311-win32.whl", hash = "sha256:a19bc6e8c70e2485a1725b3d517a2319603acc14c1f1a017dda0afe6d4665b41", size = 9813977 }, - { url = "https://files.pythonhosted.org/packages/2c/2d/2d4d202c0bcfb3a4cc2b171abb9328672d7f91d7af9ea52572722c6d8d96/onnxruntime-1.20.1-cp311-cp311-win_amd64.whl", hash = "sha256:8508887eb1c5f9537a4071768723ec7c30c28eb2518a00d0adcd32c89dea3221", size = 11329895 }, - { url = "https://files.pythonhosted.org/packages/e5/39/9335e0874f68f7d27103cbffc0e235e32e26759202df6085716375c078bb/onnxruntime-1.20.1-cp312-cp312-macosx_13_0_universal2.whl", hash = "sha256:22b0655e2bf4f2161d52706e31f517a0e54939dc393e92577df51808a7edc8c9", size = 31007580 }, - { url = "https://files.pythonhosted.org/packages/c5/9d/a42a84e10f1744dd27c6f2f9280cc3fb98f869dd19b7cd042e391ee2ab61/onnxruntime-1.20.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f1f56e898815963d6dc4ee1c35fc6c36506466eff6d16f3cb9848cea4e8c8172", size = 11952833 }, - { url = "https://files.pythonhosted.org/packages/47/42/2f71f5680834688a9c81becbe5c5bb996fd33eaed5c66ae0606c3b1d6a02/onnxruntime-1.20.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bb71a814f66517a65628c9e4a2bb530a6edd2cd5d87ffa0af0f6f773a027d99e", size = 13333903 }, - { url = "https://files.pythonhosted.org/packages/c8/f1/aabfdf91d013320aa2fc46cf43c88ca0182860ff15df872b4552254a9680/onnxruntime-1.20.1-cp312-cp312-win32.whl", hash = "sha256:bd386cc9ee5f686ee8a75ba74037750aca55183085bf1941da8efcfe12d5b120", size = 9814562 }, - { url = "https://files.pythonhosted.org/packages/dd/80/76979e0b744307d488c79e41051117634b956612cc731f1028eb17ee7294/onnxruntime-1.20.1-cp312-cp312-win_amd64.whl", hash = "sha256:19c2d843eb074f385e8bbb753a40df780511061a63f9def1b216bf53860223fb", size = 11331482 }, - { url = "https://files.pythonhosted.org/packages/f7/71/c5d980ac4189589267a06f758bd6c5667d07e55656bed6c6c0580733ad07/onnxruntime-1.20.1-cp313-cp313-macosx_13_0_universal2.whl", hash = "sha256:cc01437a32d0042b606f462245c8bbae269e5442797f6213e36ce61d5abdd8cc", size = 31007574 }, - { url = "https://files.pythonhosted.org/packages/81/0d/13bbd9489be2a6944f4a940084bfe388f1100472f38c07080a46fbd4ab96/onnxruntime-1.20.1-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fb44b08e017a648924dbe91b82d89b0c105b1adcfe31e90d1dc06b8677ad37be", size = 11951459 }, - { url = "https://files.pythonhosted.org/packages/c0/ea/4454ae122874fd52bbb8a961262de81c5f932edeb1b72217f594c700d6ef/onnxruntime-1.20.1-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bda6aebdf7917c1d811f21d41633df00c58aff2bef2f598f69289c1f1dabc4b3", size = 13331620 }, - { url = "https://files.pythonhosted.org/packages/d8/e0/50db43188ca1c945decaa8fc2a024c33446d31afed40149897d4f9de505f/onnxruntime-1.20.1-cp313-cp313-win_amd64.whl", hash = "sha256:d30367df7e70f1d9fc5a6a68106f5961686d39b54d3221f760085524e8d38e16", size = 11331758 }, - { url = "https://files.pythonhosted.org/packages/d8/55/3821c5fd60b52a6c82a00bba18531793c93c4addfe64fbf061e235c5617a/onnxruntime-1.20.1-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c9158465745423b2b5d97ed25aa7740c7d38d2993ee2e5c3bfacb0c4145c49d8", size = 11950342 }, - { url = "https://files.pythonhosted.org/packages/14/56/fd990ca222cef4f9f4a9400567b9a15b220dee2eafffb16b2adbc55c8281/onnxruntime-1.20.1-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0df6f2df83d61f46e842dbcde610ede27218947c33e994545a22333491e72a3b", size = 13337040 }, + { url = "https://files.pythonhosted.org/packages/95/8d/2634e2959b34aa8a0037989f4229e9abcfa484e9c228f99633b3241768a6/onnxruntime-1.20.1-cp311-cp311-macosx_13_0_universal2.whl", hash = "sha256:06bfbf02ca9ab5f28946e0f912a562a5f005301d0c419283dc57b3ed7969bb7b", size = 30998725, upload-time = "2024-11-21T00:48:51.013Z" }, + { url = "https://files.pythonhosted.org/packages/a5/da/c44bf9bd66cd6d9018a921f053f28d819445c4d84b4dd4777271b0fe52a2/onnxruntime-1.20.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f6243e34d74423bdd1edf0ae9596dd61023b260f546ee17d701723915f06a9f7", size = 11955227, upload-time = "2024-11-21T00:48:54.556Z" }, + { url = "https://files.pythonhosted.org/packages/11/ac/4120dfb74c8e45cce1c664fc7f7ce010edd587ba67ac41489f7432eb9381/onnxruntime-1.20.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5eec64c0269dcdb8d9a9a53dc4d64f87b9e0c19801d9321246a53b7eb5a7d1bc", size = 13331703, upload-time = "2024-11-21T00:48:57.97Z" }, + { url = "https://files.pythonhosted.org/packages/12/f1/cefacac137f7bb7bfba57c50c478150fcd3c54aca72762ac2c05ce0532c1/onnxruntime-1.20.1-cp311-cp311-win32.whl", hash = "sha256:a19bc6e8c70e2485a1725b3d517a2319603acc14c1f1a017dda0afe6d4665b41", size = 9813977, upload-time = "2024-11-21T00:49:00.519Z" }, + { url = "https://files.pythonhosted.org/packages/2c/2d/2d4d202c0bcfb3a4cc2b171abb9328672d7f91d7af9ea52572722c6d8d96/onnxruntime-1.20.1-cp311-cp311-win_amd64.whl", hash = "sha256:8508887eb1c5f9537a4071768723ec7c30c28eb2518a00d0adcd32c89dea3221", size = 11329895, upload-time = "2024-11-21T00:49:03.845Z" }, + { url = "https://files.pythonhosted.org/packages/e5/39/9335e0874f68f7d27103cbffc0e235e32e26759202df6085716375c078bb/onnxruntime-1.20.1-cp312-cp312-macosx_13_0_universal2.whl", hash = "sha256:22b0655e2bf4f2161d52706e31f517a0e54939dc393e92577df51808a7edc8c9", size = 31007580, upload-time = "2024-11-21T00:49:07.029Z" }, + { url = "https://files.pythonhosted.org/packages/c5/9d/a42a84e10f1744dd27c6f2f9280cc3fb98f869dd19b7cd042e391ee2ab61/onnxruntime-1.20.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f1f56e898815963d6dc4ee1c35fc6c36506466eff6d16f3cb9848cea4e8c8172", size = 11952833, upload-time = "2024-11-21T00:49:10.563Z" }, + { url = "https://files.pythonhosted.org/packages/47/42/2f71f5680834688a9c81becbe5c5bb996fd33eaed5c66ae0606c3b1d6a02/onnxruntime-1.20.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bb71a814f66517a65628c9e4a2bb530a6edd2cd5d87ffa0af0f6f773a027d99e", size = 13333903, upload-time = "2024-11-21T00:49:12.984Z" }, + { url = "https://files.pythonhosted.org/packages/c8/f1/aabfdf91d013320aa2fc46cf43c88ca0182860ff15df872b4552254a9680/onnxruntime-1.20.1-cp312-cp312-win32.whl", hash = "sha256:bd386cc9ee5f686ee8a75ba74037750aca55183085bf1941da8efcfe12d5b120", size = 9814562, upload-time = "2024-11-21T00:49:15.453Z" }, + { url = "https://files.pythonhosted.org/packages/dd/80/76979e0b744307d488c79e41051117634b956612cc731f1028eb17ee7294/onnxruntime-1.20.1-cp312-cp312-win_amd64.whl", hash = "sha256:19c2d843eb074f385e8bbb753a40df780511061a63f9def1b216bf53860223fb", size = 11331482, upload-time = "2024-11-21T00:49:19.412Z" }, ] [[package]] @@ -2606,36 +2364,36 @@ dependencies = [ { name = "tqdm" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c6/a1/a303104dc55fc546a3f6914c842d3da471c64eec92043aef8f652eb6c524/openai-1.109.1.tar.gz", hash = "sha256:d173ed8dbca665892a6db099b4a2dfac624f94d20a93f46eb0b56aae940ed869", size = 564133 } +sdist = { url = "https://files.pythonhosted.org/packages/c6/a1/a303104dc55fc546a3f6914c842d3da471c64eec92043aef8f652eb6c524/openai-1.109.1.tar.gz", hash = "sha256:d173ed8dbca665892a6db099b4a2dfac624f94d20a93f46eb0b56aae940ed869", size = 564133, upload-time = "2025-09-24T13:00:53.075Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1d/2a/7dd3d207ec669cacc1f186fd856a0f61dbc255d24f6fdc1a6715d6051b0f/openai-1.109.1-py3-none-any.whl", hash = "sha256:6bcaf57086cf59159b8e27447e4e7dd019db5d29a438072fbd49c290c7e65315", size = 948627 }, + { url = "https://files.pythonhosted.org/packages/1d/2a/7dd3d207ec669cacc1f186fd856a0f61dbc255d24f6fdc1a6715d6051b0f/openai-1.109.1-py3-none-any.whl", hash = "sha256:6bcaf57086cf59159b8e27447e4e7dd019db5d29a438072fbd49c290c7e65315", size = 948627, upload-time = "2025-09-24T13:00:50.754Z" }, ] [[package]] name = "overrides" version = "7.7.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/36/86/b585f53236dec60aba864e050778b25045f857e17f6e5ea0ae95fe80edd2/overrides-7.7.0.tar.gz", hash = "sha256:55158fa3d93b98cc75299b1e67078ad9003ca27945c76162c1c0766d6f91820a", size = 22812 } +sdist = { url = "https://files.pythonhosted.org/packages/36/86/b585f53236dec60aba864e050778b25045f857e17f6e5ea0ae95fe80edd2/overrides-7.7.0.tar.gz", hash = "sha256:55158fa3d93b98cc75299b1e67078ad9003ca27945c76162c1c0766d6f91820a", size = 22812, upload-time = "2024-01-27T21:01:33.423Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2c/ab/fc8290c6a4c722e5514d80f62b2dc4c4df1a68a41d1364e625c35990fcf3/overrides-7.7.0-py3-none-any.whl", hash = "sha256:c7ed9d062f78b8e4c1a7b70bd8796b35ead4d9f510227ef9c5dc7626c60d7e49", size = 17832 }, + { url = "https://files.pythonhosted.org/packages/2c/ab/fc8290c6a4c722e5514d80f62b2dc4c4df1a68a41d1364e625c35990fcf3/overrides-7.7.0-py3-none-any.whl", hash = "sha256:c7ed9d062f78b8e4c1a7b70bd8796b35ead4d9f510227ef9c5dc7626c60d7e49", size = 17832, upload-time = "2024-01-27T21:01:31.393Z" }, ] [[package]] name = "packaging" version = "25.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727 } +sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469 }, + { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" }, ] [[package]] name = "paginate" version = "0.5.7" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ec/46/68dde5b6bc00c1296ec6466ab27dddede6aec9af1b99090e1107091b3b84/paginate-0.5.7.tar.gz", hash = "sha256:22bd083ab41e1a8b4f3690544afb2c60c25e5c9a63a30fa2f483f6c60c8e5945", size = 19252 } +sdist = { url = "https://files.pythonhosted.org/packages/ec/46/68dde5b6bc00c1296ec6466ab27dddede6aec9af1b99090e1107091b3b84/paginate-0.5.7.tar.gz", hash = "sha256:22bd083ab41e1a8b4f3690544afb2c60c25e5c9a63a30fa2f483f6c60c8e5945", size = 19252, upload-time = "2024-08-25T14:17:24.139Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/90/96/04b8e52da071d28f5e21a805b19cb9390aa17a47462ac87f5e2696b9566d/paginate-0.5.7-py2.py3-none-any.whl", hash = "sha256:b885e2af73abcf01d9559fd5216b57ef722f8c42affbb63942377668e35c7591", size = 13746 }, + { url = "https://files.pythonhosted.org/packages/90/96/04b8e52da071d28f5e21a805b19cb9390aa17a47462ac87f5e2696b9566d/paginate-0.5.7-py2.py3-none-any.whl", hash = "sha256:b885e2af73abcf01d9559fd5216b57ef722f8c42affbb63942377668e35c7591", size = 13746, upload-time = "2024-08-25T14:17:22.55Z" }, ] [[package]] @@ -2648,35 +2406,22 @@ dependencies = [ { name = "pytz" }, { name = "tzdata" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/33/01/d40b85317f86cf08d853a4f495195c73815fdf205eef3993821720274518/pandas-2.3.3.tar.gz", hash = "sha256:e05e1af93b977f7eafa636d043f9f94c7ee3ac81af99c13508215942e64c993b", size = 4495223 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c1/fa/7ac648108144a095b4fb6aa3de1954689f7af60a14cf25583f4960ecb878/pandas-2.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:602b8615ebcc4a0c1751e71840428ddebeb142ec02c786e8ad6b1ce3c8dec523", size = 11578790 }, - { url = "https://files.pythonhosted.org/packages/9b/35/74442388c6cf008882d4d4bdfc4109be87e9b8b7ccd097ad1e7f006e2e95/pandas-2.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8fe25fc7b623b0ef6b5009149627e34d2a4657e880948ec3c840e9402e5c1b45", size = 10833831 }, - { url = "https://files.pythonhosted.org/packages/fe/e4/de154cbfeee13383ad58d23017da99390b91d73f8c11856f2095e813201b/pandas-2.3.3-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b468d3dad6ff947df92dcb32ede5b7bd41a9b3cceef0a30ed925f6d01fb8fa66", size = 12199267 }, - { url = "https://files.pythonhosted.org/packages/bf/c9/63f8d545568d9ab91476b1818b4741f521646cbdd151c6efebf40d6de6f7/pandas-2.3.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b98560e98cb334799c0b07ca7967ac361a47326e9b4e5a7dfb5ab2b1c9d35a1b", size = 12789281 }, - { url = "https://files.pythonhosted.org/packages/f2/00/a5ac8c7a0e67fd1a6059e40aa08fa1c52cc00709077d2300e210c3ce0322/pandas-2.3.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1d37b5848ba49824e5c30bedb9c830ab9b7751fd049bc7914533e01c65f79791", size = 13240453 }, - { url = "https://files.pythonhosted.org/packages/27/4d/5c23a5bc7bd209231618dd9e606ce076272c9bc4f12023a70e03a86b4067/pandas-2.3.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:db4301b2d1f926ae677a751eb2bd0e8c5f5319c9cb3f88b0becbbb0b07b34151", size = 13890361 }, - { url = "https://files.pythonhosted.org/packages/8e/59/712db1d7040520de7a4965df15b774348980e6df45c129b8c64d0dbe74ef/pandas-2.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:f086f6fe114e19d92014a1966f43a3e62285109afe874f067f5abbdcbb10e59c", size = 11348702 }, - { url = "https://files.pythonhosted.org/packages/9c/fb/231d89e8637c808b997d172b18e9d4a4bc7bf31296196c260526055d1ea0/pandas-2.3.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d21f6d74eb1725c2efaa71a2bfc661a0689579b58e9c0ca58a739ff0b002b53", size = 11597846 }, - { url = "https://files.pythonhosted.org/packages/5c/bd/bf8064d9cfa214294356c2d6702b716d3cf3bb24be59287a6a21e24cae6b/pandas-2.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3fd2f887589c7aa868e02632612ba39acb0b8948faf5cc58f0850e165bd46f35", size = 10729618 }, - { url = "https://files.pythonhosted.org/packages/57/56/cf2dbe1a3f5271370669475ead12ce77c61726ffd19a35546e31aa8edf4e/pandas-2.3.3-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ecaf1e12bdc03c86ad4a7ea848d66c685cb6851d807a26aa245ca3d2017a1908", size = 11737212 }, - { url = "https://files.pythonhosted.org/packages/e5/63/cd7d615331b328e287d8233ba9fdf191a9c2d11b6af0c7a59cfcec23de68/pandas-2.3.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b3d11d2fda7eb164ef27ffc14b4fcab16a80e1ce67e9f57e19ec0afaf715ba89", size = 12362693 }, - { url = "https://files.pythonhosted.org/packages/a6/de/8b1895b107277d52f2b42d3a6806e69cfef0d5cf1d0ba343470b9d8e0a04/pandas-2.3.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a68e15f780eddf2b07d242e17a04aa187a7ee12b40b930bfdd78070556550e98", size = 12771002 }, - { url = "https://files.pythonhosted.org/packages/87/21/84072af3187a677c5893b170ba2c8fbe450a6ff911234916da889b698220/pandas-2.3.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:371a4ab48e950033bcf52b6527eccb564f52dc826c02afd9a1bc0ab731bba084", size = 13450971 }, - { url = "https://files.pythonhosted.org/packages/86/41/585a168330ff063014880a80d744219dbf1dd7a1c706e75ab3425a987384/pandas-2.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:a16dcec078a01eeef8ee61bf64074b4e524a2a3f4b3be9326420cabe59c4778b", size = 10992722 }, - { url = "https://files.pythonhosted.org/packages/cd/4b/18b035ee18f97c1040d94debd8f2e737000ad70ccc8f5513f4eefad75f4b/pandas-2.3.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:56851a737e3470de7fa88e6131f41281ed440d29a9268dcbf0002da5ac366713", size = 11544671 }, - { url = "https://files.pythonhosted.org/packages/31/94/72fac03573102779920099bcac1c3b05975c2cb5f01eac609faf34bed1ca/pandas-2.3.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:bdcd9d1167f4885211e401b3036c0c8d9e274eee67ea8d0758a256d60704cfe8", size = 10680807 }, - { url = "https://files.pythonhosted.org/packages/16/87/9472cf4a487d848476865321de18cc8c920b8cab98453ab79dbbc98db63a/pandas-2.3.3-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e32e7cc9af0f1cc15548288a51a3b681cc2a219faa838e995f7dc53dbab1062d", size = 11709872 }, - { url = "https://files.pythonhosted.org/packages/15/07/284f757f63f8a8d69ed4472bfd85122bd086e637bf4ed09de572d575a693/pandas-2.3.3-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:318d77e0e42a628c04dc56bcef4b40de67918f7041c2b061af1da41dcff670ac", size = 12306371 }, - { url = "https://files.pythonhosted.org/packages/33/81/a3afc88fca4aa925804a27d2676d22dcd2031c2ebe08aabd0ae55b9ff282/pandas-2.3.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4e0a175408804d566144e170d0476b15d78458795bb18f1304fb94160cabf40c", size = 12765333 }, - { url = "https://files.pythonhosted.org/packages/8d/0f/b4d4ae743a83742f1153464cf1a8ecfafc3ac59722a0b5c8602310cb7158/pandas-2.3.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:93c2d9ab0fc11822b5eece72ec9587e172f63cff87c00b062f6e37448ced4493", size = 13418120 }, - { url = "https://files.pythonhosted.org/packages/4f/c7/e54682c96a895d0c808453269e0b5928a07a127a15704fedb643e9b0a4c8/pandas-2.3.3-cp313-cp313-win_amd64.whl", hash = "sha256:f8bfc0e12dc78f777f323f55c58649591b2cd0c43534e8355c51d3fede5f4dee", size = 10993991 }, - { url = "https://files.pythonhosted.org/packages/f9/ca/3f8d4f49740799189e1395812f3bf23b5e8fc7c190827d55a610da72ce55/pandas-2.3.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:75ea25f9529fdec2d2e93a42c523962261e567d250b0013b16210e1d40d7c2e5", size = 12048227 }, - { url = "https://files.pythonhosted.org/packages/0e/5a/f43efec3e8c0cc92c4663ccad372dbdff72b60bdb56b2749f04aa1d07d7e/pandas-2.3.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:74ecdf1d301e812db96a465a525952f4dde225fdb6d8e5a521d47e1f42041e21", size = 11411056 }, - { url = "https://files.pythonhosted.org/packages/46/b1/85331edfc591208c9d1a63a06baa67b21d332e63b7a591a5ba42a10bb507/pandas-2.3.3-cp313-cp313t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6435cb949cb34ec11cc9860246ccb2fdc9ecd742c12d3304989017d53f039a78", size = 11645189 }, - { url = "https://files.pythonhosted.org/packages/44/23/78d645adc35d94d1ac4f2a3c4112ab6f5b8999f4898b8cdf01252f8df4a9/pandas-2.3.3-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:900f47d8f20860de523a1ac881c4c36d65efcb2eb850e6948140fa781736e110", size = 12121912 }, - { url = "https://files.pythonhosted.org/packages/53/da/d10013df5e6aaef6b425aa0c32e1fc1f3e431e4bcabd420517dceadce354/pandas-2.3.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a45c765238e2ed7d7c608fc5bc4a6f88b642f2f01e70c0c23d2224dd21829d86", size = 12712160 }, - { url = "https://files.pythonhosted.org/packages/bd/17/e756653095a083d8a37cbd816cb87148debcfcd920129b25f99dd8d04271/pandas-2.3.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c4fc4c21971a1a9f4bdb4c73978c7f7256caa3e62b323f70d6cb80db583350bc", size = 13199233 }, +sdist = { url = "https://files.pythonhosted.org/packages/33/01/d40b85317f86cf08d853a4f495195c73815fdf205eef3993821720274518/pandas-2.3.3.tar.gz", hash = "sha256:e05e1af93b977f7eafa636d043f9f94c7ee3ac81af99c13508215942e64c993b", size = 4495223, upload-time = "2025-09-29T23:34:51.853Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c1/fa/7ac648108144a095b4fb6aa3de1954689f7af60a14cf25583f4960ecb878/pandas-2.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:602b8615ebcc4a0c1751e71840428ddebeb142ec02c786e8ad6b1ce3c8dec523", size = 11578790, upload-time = "2025-09-29T23:18:30.065Z" }, + { url = "https://files.pythonhosted.org/packages/9b/35/74442388c6cf008882d4d4bdfc4109be87e9b8b7ccd097ad1e7f006e2e95/pandas-2.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8fe25fc7b623b0ef6b5009149627e34d2a4657e880948ec3c840e9402e5c1b45", size = 10833831, upload-time = "2025-09-29T23:38:56.071Z" }, + { url = "https://files.pythonhosted.org/packages/fe/e4/de154cbfeee13383ad58d23017da99390b91d73f8c11856f2095e813201b/pandas-2.3.3-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b468d3dad6ff947df92dcb32ede5b7bd41a9b3cceef0a30ed925f6d01fb8fa66", size = 12199267, upload-time = "2025-09-29T23:18:41.627Z" }, + { url = "https://files.pythonhosted.org/packages/bf/c9/63f8d545568d9ab91476b1818b4741f521646cbdd151c6efebf40d6de6f7/pandas-2.3.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b98560e98cb334799c0b07ca7967ac361a47326e9b4e5a7dfb5ab2b1c9d35a1b", size = 12789281, upload-time = "2025-09-29T23:18:56.834Z" }, + { url = "https://files.pythonhosted.org/packages/f2/00/a5ac8c7a0e67fd1a6059e40aa08fa1c52cc00709077d2300e210c3ce0322/pandas-2.3.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1d37b5848ba49824e5c30bedb9c830ab9b7751fd049bc7914533e01c65f79791", size = 13240453, upload-time = "2025-09-29T23:19:09.247Z" }, + { url = "https://files.pythonhosted.org/packages/27/4d/5c23a5bc7bd209231618dd9e606ce076272c9bc4f12023a70e03a86b4067/pandas-2.3.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:db4301b2d1f926ae677a751eb2bd0e8c5f5319c9cb3f88b0becbbb0b07b34151", size = 13890361, upload-time = "2025-09-29T23:19:25.342Z" }, + { url = "https://files.pythonhosted.org/packages/8e/59/712db1d7040520de7a4965df15b774348980e6df45c129b8c64d0dbe74ef/pandas-2.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:f086f6fe114e19d92014a1966f43a3e62285109afe874f067f5abbdcbb10e59c", size = 11348702, upload-time = "2025-09-29T23:19:38.296Z" }, + { url = "https://files.pythonhosted.org/packages/9c/fb/231d89e8637c808b997d172b18e9d4a4bc7bf31296196c260526055d1ea0/pandas-2.3.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d21f6d74eb1725c2efaa71a2bfc661a0689579b58e9c0ca58a739ff0b002b53", size = 11597846, upload-time = "2025-09-29T23:19:48.856Z" }, + { url = "https://files.pythonhosted.org/packages/5c/bd/bf8064d9cfa214294356c2d6702b716d3cf3bb24be59287a6a21e24cae6b/pandas-2.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3fd2f887589c7aa868e02632612ba39acb0b8948faf5cc58f0850e165bd46f35", size = 10729618, upload-time = "2025-09-29T23:39:08.659Z" }, + { url = "https://files.pythonhosted.org/packages/57/56/cf2dbe1a3f5271370669475ead12ce77c61726ffd19a35546e31aa8edf4e/pandas-2.3.3-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ecaf1e12bdc03c86ad4a7ea848d66c685cb6851d807a26aa245ca3d2017a1908", size = 11737212, upload-time = "2025-09-29T23:19:59.765Z" }, + { url = "https://files.pythonhosted.org/packages/e5/63/cd7d615331b328e287d8233ba9fdf191a9c2d11b6af0c7a59cfcec23de68/pandas-2.3.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b3d11d2fda7eb164ef27ffc14b4fcab16a80e1ce67e9f57e19ec0afaf715ba89", size = 12362693, upload-time = "2025-09-29T23:20:14.098Z" }, + { url = "https://files.pythonhosted.org/packages/a6/de/8b1895b107277d52f2b42d3a6806e69cfef0d5cf1d0ba343470b9d8e0a04/pandas-2.3.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a68e15f780eddf2b07d242e17a04aa187a7ee12b40b930bfdd78070556550e98", size = 12771002, upload-time = "2025-09-29T23:20:26.76Z" }, + { url = "https://files.pythonhosted.org/packages/87/21/84072af3187a677c5893b170ba2c8fbe450a6ff911234916da889b698220/pandas-2.3.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:371a4ab48e950033bcf52b6527eccb564f52dc826c02afd9a1bc0ab731bba084", size = 13450971, upload-time = "2025-09-29T23:20:41.344Z" }, + { url = "https://files.pythonhosted.org/packages/86/41/585a168330ff063014880a80d744219dbf1dd7a1c706e75ab3425a987384/pandas-2.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:a16dcec078a01eeef8ee61bf64074b4e524a2a3f4b3be9326420cabe59c4778b", size = 10992722, upload-time = "2025-09-29T23:20:54.139Z" }, ] [[package]] @@ -2687,45 +2432,45 @@ dependencies = [ { name = "numpy" }, { name = "types-pytz" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/1b/3b/32be58a125db39d0b5f62cc93795f32b5bb2915bd5c4a46f0e35171985e2/pandas_stubs-2.3.2.250926.tar.gz", hash = "sha256:c64b9932760ceefb96a3222b953e6a251321a9832a28548be6506df473a66406", size = 102147 } +sdist = { url = "https://files.pythonhosted.org/packages/1b/3b/32be58a125db39d0b5f62cc93795f32b5bb2915bd5c4a46f0e35171985e2/pandas_stubs-2.3.2.250926.tar.gz", hash = "sha256:c64b9932760ceefb96a3222b953e6a251321a9832a28548be6506df473a66406", size = 102147, upload-time = "2025-09-26T19:50:39.522Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/40/96/1e4a035eaf4dce9610aac6e43026d0c6baa05773daf6d21e635a4fe19e21/pandas_stubs-2.3.2.250926-py3-none-any.whl", hash = "sha256:81121818453dcfe00f45c852f4dceee043640b813830f6e7bd084a4ef7ff7270", size = 159995 }, + { url = "https://files.pythonhosted.org/packages/40/96/1e4a035eaf4dce9610aac6e43026d0c6baa05773daf6d21e635a4fe19e21/pandas_stubs-2.3.2.250926-py3-none-any.whl", hash = "sha256:81121818453dcfe00f45c852f4dceee043640b813830f6e7bd084a4ef7ff7270", size = 159995, upload-time = "2025-09-26T19:50:38.241Z" }, ] [[package]] name = "pandocfilters" version = "1.5.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/70/6f/3dd4940bbe001c06a65f88e36bad298bc7a0de5036115639926b0c5c0458/pandocfilters-1.5.1.tar.gz", hash = "sha256:002b4a555ee4ebc03f8b66307e287fa492e4a77b4ea14d3f934328297bb4939e", size = 8454 } +sdist = { url = "https://files.pythonhosted.org/packages/70/6f/3dd4940bbe001c06a65f88e36bad298bc7a0de5036115639926b0c5c0458/pandocfilters-1.5.1.tar.gz", hash = "sha256:002b4a555ee4ebc03f8b66307e287fa492e4a77b4ea14d3f934328297bb4939e", size = 8454, upload-time = "2024-01-18T20:08:13.726Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ef/af/4fbc8cab944db5d21b7e2a5b8e9211a03a79852b1157e2c102fcc61ac440/pandocfilters-1.5.1-py2.py3-none-any.whl", hash = "sha256:93be382804a9cdb0a7267585f157e5d1731bbe5545a85b268d6f5fe6232de2bc", size = 8663 }, + { url = "https://files.pythonhosted.org/packages/ef/af/4fbc8cab944db5d21b7e2a5b8e9211a03a79852b1157e2c102fcc61ac440/pandocfilters-1.5.1-py2.py3-none-any.whl", hash = "sha256:93be382804a9cdb0a7267585f157e5d1731bbe5545a85b268d6f5fe6232de2bc", size = 8663, upload-time = "2024-01-18T20:08:11.28Z" }, ] [[package]] name = "parso" version = "0.8.5" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d4/de/53e0bcf53d13e005bd8c92e7855142494f41171b34c2536b86187474184d/parso-0.8.5.tar.gz", hash = "sha256:034d7354a9a018bdce352f48b2a8a450f05e9d6ee85db84764e9b6bd96dafe5a", size = 401205 } +sdist = { url = "https://files.pythonhosted.org/packages/d4/de/53e0bcf53d13e005bd8c92e7855142494f41171b34c2536b86187474184d/parso-0.8.5.tar.gz", hash = "sha256:034d7354a9a018bdce352f48b2a8a450f05e9d6ee85db84764e9b6bd96dafe5a", size = 401205, upload-time = "2025-08-23T15:15:28.028Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/16/32/f8e3c85d1d5250232a5d3477a2a28cc291968ff175caeadaf3cc19ce0e4a/parso-0.8.5-py2.py3-none-any.whl", hash = "sha256:646204b5ee239c396d040b90f9e272e9a8017c630092bf59980beb62fd033887", size = 106668 }, + { url = "https://files.pythonhosted.org/packages/16/32/f8e3c85d1d5250232a5d3477a2a28cc291968ff175caeadaf3cc19ce0e4a/parso-0.8.5-py2.py3-none-any.whl", hash = "sha256:646204b5ee239c396d040b90f9e272e9a8017c630092bf59980beb62fd033887", size = 106668, upload-time = "2025-08-23T15:15:25.663Z" }, ] [[package]] name = "pastel" version = "0.2.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/76/f1/4594f5e0fcddb6953e5b8fe00da8c317b8b41b547e2b3ae2da7512943c62/pastel-0.2.1.tar.gz", hash = "sha256:e6581ac04e973cac858828c6202c1e1e81fee1dc7de7683f3e1ffe0bfd8a573d", size = 7555 } +sdist = { url = "https://files.pythonhosted.org/packages/76/f1/4594f5e0fcddb6953e5b8fe00da8c317b8b41b547e2b3ae2da7512943c62/pastel-0.2.1.tar.gz", hash = "sha256:e6581ac04e973cac858828c6202c1e1e81fee1dc7de7683f3e1ffe0bfd8a573d", size = 7555, upload-time = "2020-09-16T19:21:12.43Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/aa/18/a8444036c6dd65ba3624c63b734d3ba95ba63ace513078e1580590075d21/pastel-0.2.1-py2.py3-none-any.whl", hash = "sha256:4349225fcdf6c2bb34d483e523475de5bb04a5c10ef711263452cb37d7dd4364", size = 5955 }, + { url = "https://files.pythonhosted.org/packages/aa/18/a8444036c6dd65ba3624c63b734d3ba95ba63ace513078e1580590075d21/pastel-0.2.1-py2.py3-none-any.whl", hash = "sha256:4349225fcdf6c2bb34d483e523475de5bb04a5c10ef711263452cb37d7dd4364", size = 5955, upload-time = "2020-09-16T19:21:11.409Z" }, ] [[package]] name = "pathspec" version = "0.12.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043 } +sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043, upload-time = "2023-12-10T22:30:45Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191 }, + { url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191, upload-time = "2023-12-10T22:30:43.14Z" }, ] [[package]] @@ -2735,27 +2480,27 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "ptyprocess", marker = "sys_platform != 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/42/92/cc564bf6381ff43ce1f4d06852fc19a2f11d180f23dc32d9588bee2f149d/pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f", size = 166450 } +sdist = { url = "https://files.pythonhosted.org/packages/42/92/cc564bf6381ff43ce1f4d06852fc19a2f11d180f23dc32d9588bee2f149d/pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f", size = 166450, upload-time = "2023-11-25T09:07:26.339Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/9e/c3/059298687310d527a58bb01f3b1965787ee3b40dce76752eda8b44e9a2c5/pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523", size = 63772 }, + { url = "https://files.pythonhosted.org/packages/9e/c3/059298687310d527a58bb01f3b1965787ee3b40dce76752eda8b44e9a2c5/pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523", size = 63772, upload-time = "2023-11-25T06:56:14.81Z" }, ] [[package]] name = "platformdirs" version = "4.5.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/61/33/9611380c2bdb1225fdef633e2a9610622310fed35ab11dac9620972ee088/platformdirs-4.5.0.tar.gz", hash = "sha256:70ddccdd7c99fc5942e9fc25636a8b34d04c24b335100223152c2803e4063312", size = 21632 } +sdist = { url = "https://files.pythonhosted.org/packages/61/33/9611380c2bdb1225fdef633e2a9610622310fed35ab11dac9620972ee088/platformdirs-4.5.0.tar.gz", hash = "sha256:70ddccdd7c99fc5942e9fc25636a8b34d04c24b335100223152c2803e4063312", size = 21632, upload-time = "2025-10-08T17:44:48.791Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/73/cb/ac7874b3e5d58441674fb70742e6c374b28b0c7cb988d37d991cde47166c/platformdirs-4.5.0-py3-none-any.whl", hash = "sha256:e578a81bb873cbb89a41fcc904c7ef523cc18284b7e3b3ccf06aca1403b7ebd3", size = 18651 }, + { url = "https://files.pythonhosted.org/packages/73/cb/ac7874b3e5d58441674fb70742e6c374b28b0c7cb988d37d991cde47166c/platformdirs-4.5.0-py3-none-any.whl", hash = "sha256:e578a81bb873cbb89a41fcc904c7ef523cc18284b7e3b3ccf06aca1403b7ebd3", size = 18651, upload-time = "2025-10-08T17:44:47.223Z" }, ] [[package]] name = "pluggy" version = "1.6.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412 } +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538 }, + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, ] [[package]] @@ -2766,9 +2511,9 @@ dependencies = [ { name = "pastel" }, { name = "pyyaml" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a5/f2/273fe54a78dc5c6c8dd63db71f5a6ceb95e4648516b5aeaeff4bde804e44/poethepoet-0.37.0.tar.gz", hash = "sha256:73edf458707c674a079baa46802e21455bda3a7f82a408e58c31b9f4fe8e933d", size = 68570 } +sdist = { url = "https://files.pythonhosted.org/packages/a5/f2/273fe54a78dc5c6c8dd63db71f5a6ceb95e4648516b5aeaeff4bde804e44/poethepoet-0.37.0.tar.gz", hash = "sha256:73edf458707c674a079baa46802e21455bda3a7f82a408e58c31b9f4fe8e933d", size = 68570, upload-time = "2025-08-11T18:00:29.103Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/92/1b/5337af1a6a478d25a3e3c56b9b4b42b0a160314e02f4a0498d5322c8dac4/poethepoet-0.37.0-py3-none-any.whl", hash = "sha256:861790276315abcc8df1b4bd60e28c3d48a06db273edd3092f3c94e1a46e5e22", size = 90062 }, + { url = "https://files.pythonhosted.org/packages/92/1b/5337af1a6a478d25a3e3c56b9b4b42b0a160314e02f4a0498d5322c8dac4/poethepoet-0.37.0-py3-none-any.whl", hash = "sha256:861790276315abcc8df1b4bd60e28c3d48a06db273edd3092f3c94e1a46e5e22", size = 90062, upload-time = "2025-08-11T18:00:27.595Z" }, ] [[package]] @@ -2779,38 +2524,31 @@ dependencies = [ { name = "cymem" }, { name = "murmurhash" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/4d/3a/db814f67a05b6d7f9c15d38edef5ec9b21415710705b393883de92aee5ef/preshed-3.0.10.tar.gz", hash = "sha256:5a5c8e685e941f4ffec97f1fbf32694b8107858891a4bc34107fac981d8296ff", size = 15039 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/08/99/c3709638f687da339504d1daeca48604cadb338bf3556a1484d1f0cd95e6/preshed-3.0.10-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d96c4fe2b41c1cdcc8c4fc1fdb10f922a6095c0430a3ebe361fe62c78902d068", size = 131486 }, - { url = "https://files.pythonhosted.org/packages/e0/27/0fd36b63caa8bbf57b31a121d9565d385bbd7521771d4eb93e17d326873d/preshed-3.0.10-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cb01ea930b96f3301526a2ab26f41347d07555e4378c4144c6b7645074f2ebb0", size = 127938 }, - { url = "https://files.pythonhosted.org/packages/90/54/6a876d9cc8d401a9c1fb6bb8ca5a31b3664d0bcb888a9016258a1ae17344/preshed-3.0.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dd1f0a7b7d150e229d073fd4fe94f72610cae992e907cee74687c4695873a98", size = 842263 }, - { url = "https://files.pythonhosted.org/packages/1c/7d/ff19f74d15ee587905bafa3582883cfe2f72b574e6d691ee64dc690dc276/preshed-3.0.10-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fd7b350c280137f324cd447afbf6ba9a849af0e8898850046ac6f34010e08bd", size = 842913 }, - { url = "https://files.pythonhosted.org/packages/f1/3a/1c345a26463345557705b61965e1e0a732cc0e9c6dfd4787845dbfa50b4a/preshed-3.0.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cf6a5fdc89ad06079aa6ee63621e417d4f4cf2a3d8b63c72728baad35a9ff641", size = 820548 }, - { url = "https://files.pythonhosted.org/packages/7f/6b/71f25e2b7a23dba168f43edfae0bb508552dbef89114ce65c73f2ea7172f/preshed-3.0.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b4c29a7bd66985808ad181c9ad05205a6aa7400cd0f98426acd7bc86588b93f8", size = 840379 }, - { url = "https://files.pythonhosted.org/packages/3a/86/d8f32b0b31a36ee8770a9b1a95321430e364cd0ba4bfebb7348aed2f198d/preshed-3.0.10-cp311-cp311-win_amd64.whl", hash = "sha256:1367c1fd6f44296305315d4e1c3fe3171787d4d01c1008a76bc9466bd79c3249", size = 117655 }, - { url = "https://files.pythonhosted.org/packages/c3/14/322a4f58bc25991a87f216acb1351800739b0794185d27508ee86c35f382/preshed-3.0.10-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6e9c46933d55c8898c8f7a6019a8062cd87ef257b075ada2dd5d1e57810189ea", size = 131367 }, - { url = "https://files.pythonhosted.org/packages/38/80/67507653c35620cace913f617df6d6f658b87e8da83087b851557d65dd86/preshed-3.0.10-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5c4ebc4f8ef0114d55f2ffdce4965378129c7453d0203664aeeb03055572d9e4", size = 126535 }, - { url = "https://files.pythonhosted.org/packages/db/b1/ab4f811aeaf20af0fa47148c1c54b62d7e8120d59025bd0a3f773bb67725/preshed-3.0.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ab5ab4c6dfd3746fb4328e7fbeb2a0544416b872db02903bfac18e6f5cd412f", size = 864907 }, - { url = "https://files.pythonhosted.org/packages/fb/db/fe37c1f99cfb26805dd89381ddd54901307feceb267332eaaca228e9f9c1/preshed-3.0.10-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40586fd96ae3974c552a7cd78781b6844ecb1559ee7556586f487058cf13dd96", size = 869329 }, - { url = "https://files.pythonhosted.org/packages/a7/fd/efb6a6233d1cd969966f3f65bdd8e662579c3d83114e5c356cec1927b1f7/preshed-3.0.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a606c24cda931306b98e0edfafed3309bffcf8d6ecfe07804db26024c4f03cd6", size = 846829 }, - { url = "https://files.pythonhosted.org/packages/14/49/0e4ce5db3bf86b081abb08a404fb37b7c2dbfd7a73ec6c0bc71b650307eb/preshed-3.0.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:394015566f9354738be903447039e8dbc6d93ba5adf091af694eb03c4e726b1e", size = 874008 }, - { url = "https://files.pythonhosted.org/packages/6f/17/76d6593fc2d055d4e413b68a8c87b70aa9b7697d4972cb8062559edcf6e9/preshed-3.0.10-cp312-cp312-win_amd64.whl", hash = "sha256:fd7e38225937e580420c84d1996dde9b4f726aacd9405093455c3a2fa60fede5", size = 116701 }, - { url = "https://files.pythonhosted.org/packages/bf/5e/87671bc58c4f6c8cf0a5601ccd74b8bb50281ff28aa4ab3e3cad5cd9d06a/preshed-3.0.10-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:23e6e0581a517597f3f76bc24a4cdb0ba5509933d4f61c34fca49649dd71edf9", size = 129184 }, - { url = "https://files.pythonhosted.org/packages/92/69/b3969a3c95778def5bf5126484a1f7d2ad324d1040077f55f56e027d8ea4/preshed-3.0.10-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:574e6d6056981540310ff181b47a2912f4bddc91bcace3c7a9c6726eafda24ca", size = 124258 }, - { url = "https://files.pythonhosted.org/packages/32/df/6e828ec4565bf33bd4803a3eb3b1102830b739143e5d6c132bf7181a58ec/preshed-3.0.10-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2bd658dd73e853d1bb5597976a407feafa681b9d6155bc9bc7b4c2acc2a6ee96", size = 825445 }, - { url = "https://files.pythonhosted.org/packages/05/3d/478b585f304920e51f328c9231e22f30dc64baa68e079e08a46ab72be738/preshed-3.0.10-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5b95396046328ffb461a68859ce2141aca4815b8624167832d28ced70d541626", size = 831690 }, - { url = "https://files.pythonhosted.org/packages/c3/65/938f21f77227e8d398d46fb10b9d1b3467be859468ce8db138fc3d50589c/preshed-3.0.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:3e6728b2028bbe79565eb6cf676b5bae5ce1f9cc56e4bf99bb28ce576f88054d", size = 808593 }, - { url = "https://files.pythonhosted.org/packages/6c/1c/2a3961fc88bc72300ff7e4ca54689bda90d2d77cc994167cc09a310480b6/preshed-3.0.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c4ef96cb28bf5f08de9c070143113e168efccbb68fd4961e7d445f734c051a97", size = 837333 }, - { url = "https://files.pythonhosted.org/packages/fa/8c/d3e30f80b2ef21f267f09f0b7d18995adccc928ede5b73ea3fe54e1303f4/preshed-3.0.10-cp313-cp313-win_amd64.whl", hash = "sha256:97e0e2edfd25a7dfba799b49b3c5cc248ad0318a76edd9d5fd2c82aa3d5c64ed", size = 115769 }, +sdist = { url = "https://files.pythonhosted.org/packages/4d/3a/db814f67a05b6d7f9c15d38edef5ec9b21415710705b393883de92aee5ef/preshed-3.0.10.tar.gz", hash = "sha256:5a5c8e685e941f4ffec97f1fbf32694b8107858891a4bc34107fac981d8296ff", size = 15039, upload-time = "2025-05-26T15:18:33.612Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/08/99/c3709638f687da339504d1daeca48604cadb338bf3556a1484d1f0cd95e6/preshed-3.0.10-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d96c4fe2b41c1cdcc8c4fc1fdb10f922a6095c0430a3ebe361fe62c78902d068", size = 131486, upload-time = "2025-05-26T15:17:52.231Z" }, + { url = "https://files.pythonhosted.org/packages/e0/27/0fd36b63caa8bbf57b31a121d9565d385bbd7521771d4eb93e17d326873d/preshed-3.0.10-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cb01ea930b96f3301526a2ab26f41347d07555e4378c4144c6b7645074f2ebb0", size = 127938, upload-time = "2025-05-26T15:17:54.19Z" }, + { url = "https://files.pythonhosted.org/packages/90/54/6a876d9cc8d401a9c1fb6bb8ca5a31b3664d0bcb888a9016258a1ae17344/preshed-3.0.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dd1f0a7b7d150e229d073fd4fe94f72610cae992e907cee74687c4695873a98", size = 842263, upload-time = "2025-05-26T15:17:55.398Z" }, + { url = "https://files.pythonhosted.org/packages/1c/7d/ff19f74d15ee587905bafa3582883cfe2f72b574e6d691ee64dc690dc276/preshed-3.0.10-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fd7b350c280137f324cd447afbf6ba9a849af0e8898850046ac6f34010e08bd", size = 842913, upload-time = "2025-05-26T15:17:56.687Z" }, + { url = "https://files.pythonhosted.org/packages/f1/3a/1c345a26463345557705b61965e1e0a732cc0e9c6dfd4787845dbfa50b4a/preshed-3.0.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cf6a5fdc89ad06079aa6ee63621e417d4f4cf2a3d8b63c72728baad35a9ff641", size = 820548, upload-time = "2025-05-26T15:17:58.057Z" }, + { url = "https://files.pythonhosted.org/packages/7f/6b/71f25e2b7a23dba168f43edfae0bb508552dbef89114ce65c73f2ea7172f/preshed-3.0.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b4c29a7bd66985808ad181c9ad05205a6aa7400cd0f98426acd7bc86588b93f8", size = 840379, upload-time = "2025-05-26T15:17:59.565Z" }, + { url = "https://files.pythonhosted.org/packages/3a/86/d8f32b0b31a36ee8770a9b1a95321430e364cd0ba4bfebb7348aed2f198d/preshed-3.0.10-cp311-cp311-win_amd64.whl", hash = "sha256:1367c1fd6f44296305315d4e1c3fe3171787d4d01c1008a76bc9466bd79c3249", size = 117655, upload-time = "2025-05-26T15:18:00.836Z" }, + { url = "https://files.pythonhosted.org/packages/c3/14/322a4f58bc25991a87f216acb1351800739b0794185d27508ee86c35f382/preshed-3.0.10-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6e9c46933d55c8898c8f7a6019a8062cd87ef257b075ada2dd5d1e57810189ea", size = 131367, upload-time = "2025-05-26T15:18:02.408Z" }, + { url = "https://files.pythonhosted.org/packages/38/80/67507653c35620cace913f617df6d6f658b87e8da83087b851557d65dd86/preshed-3.0.10-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5c4ebc4f8ef0114d55f2ffdce4965378129c7453d0203664aeeb03055572d9e4", size = 126535, upload-time = "2025-05-26T15:18:03.589Z" }, + { url = "https://files.pythonhosted.org/packages/db/b1/ab4f811aeaf20af0fa47148c1c54b62d7e8120d59025bd0a3f773bb67725/preshed-3.0.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ab5ab4c6dfd3746fb4328e7fbeb2a0544416b872db02903bfac18e6f5cd412f", size = 864907, upload-time = "2025-05-26T15:18:04.794Z" }, + { url = "https://files.pythonhosted.org/packages/fb/db/fe37c1f99cfb26805dd89381ddd54901307feceb267332eaaca228e9f9c1/preshed-3.0.10-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40586fd96ae3974c552a7cd78781b6844ecb1559ee7556586f487058cf13dd96", size = 869329, upload-time = "2025-05-26T15:18:06.353Z" }, + { url = "https://files.pythonhosted.org/packages/a7/fd/efb6a6233d1cd969966f3f65bdd8e662579c3d83114e5c356cec1927b1f7/preshed-3.0.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a606c24cda931306b98e0edfafed3309bffcf8d6ecfe07804db26024c4f03cd6", size = 846829, upload-time = "2025-05-26T15:18:07.716Z" }, + { url = "https://files.pythonhosted.org/packages/14/49/0e4ce5db3bf86b081abb08a404fb37b7c2dbfd7a73ec6c0bc71b650307eb/preshed-3.0.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:394015566f9354738be903447039e8dbc6d93ba5adf091af694eb03c4e726b1e", size = 874008, upload-time = "2025-05-26T15:18:09.364Z" }, + { url = "https://files.pythonhosted.org/packages/6f/17/76d6593fc2d055d4e413b68a8c87b70aa9b7697d4972cb8062559edcf6e9/preshed-3.0.10-cp312-cp312-win_amd64.whl", hash = "sha256:fd7e38225937e580420c84d1996dde9b4f726aacd9405093455c3a2fa60fede5", size = 116701, upload-time = "2025-05-26T15:18:11.905Z" }, ] [[package]] name = "prometheus-client" version = "0.23.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/23/53/3edb5d68ecf6b38fcbcc1ad28391117d2a322d9a1a3eff04bfdb184d8c3b/prometheus_client-0.23.1.tar.gz", hash = "sha256:6ae8f9081eaaaf153a2e959d2e6c4f4fb57b12ef76c8c7980202f1e57b48b2ce", size = 80481 } +sdist = { url = "https://files.pythonhosted.org/packages/23/53/3edb5d68ecf6b38fcbcc1ad28391117d2a322d9a1a3eff04bfdb184d8c3b/prometheus_client-0.23.1.tar.gz", hash = "sha256:6ae8f9081eaaaf153a2e959d2e6c4f4fb57b12ef76c8c7980202f1e57b48b2ce", size = 80481, upload-time = "2025-09-18T20:47:25.043Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b8/db/14bafcb4af2139e046d03fd00dea7873e48eafe18b7d2797e73d6681f210/prometheus_client-0.23.1-py3-none-any.whl", hash = "sha256:dd1913e6e76b59cfe44e7a4b83e01afc9873c1bdfd2ed8739f1e76aeca115f99", size = 61145 }, + { url = "https://files.pythonhosted.org/packages/b8/db/14bafcb4af2139e046d03fd00dea7873e48eafe18b7d2797e73d6681f210/prometheus_client-0.23.1-py3-none-any.whl", hash = "sha256:dd1913e6e76b59cfe44e7a4b83e01afc9873c1bdfd2ed8739f1e76aeca115f99", size = 61145, upload-time = "2025-09-18T20:47:23.875Z" }, ] [[package]] @@ -2820,172 +2558,128 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "wcwidth" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a1/96/06e01a7b38dce6fe1db213e061a4602dd6032a8a97ef6c1a862537732421/prompt_toolkit-3.0.52.tar.gz", hash = "sha256:28cde192929c8e7321de85de1ddbe736f1375148b02f2e17edd840042b1be855", size = 434198 } +sdist = { url = "https://files.pythonhosted.org/packages/a1/96/06e01a7b38dce6fe1db213e061a4602dd6032a8a97ef6c1a862537732421/prompt_toolkit-3.0.52.tar.gz", hash = "sha256:28cde192929c8e7321de85de1ddbe736f1375148b02f2e17edd840042b1be855", size = 434198, upload-time = "2025-08-27T15:24:02.057Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/84/03/0d3ce49e2505ae70cf43bc5bb3033955d2fc9f932163e84dc0779cc47f48/prompt_toolkit-3.0.52-py3-none-any.whl", hash = "sha256:9aac639a3bbd33284347de5ad8d68ecc044b91a762dc39b7c21095fcd6a19955", size = 391431 }, + { url = "https://files.pythonhosted.org/packages/84/03/0d3ce49e2505ae70cf43bc5bb3033955d2fc9f932163e84dc0779cc47f48/prompt_toolkit-3.0.52-py3-none-any.whl", hash = "sha256:9aac639a3bbd33284347de5ad8d68ecc044b91a762dc39b7c21095fcd6a19955", size = 391431, upload-time = "2025-08-27T15:23:59.498Z" }, ] [[package]] name = "propcache" version = "0.4.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/9e/da/e9fc233cf63743258bff22b3dfa7ea5baef7b5bc324af47a0ad89b8ffc6f/propcache-0.4.1.tar.gz", hash = "sha256:f48107a8c637e80362555f37ecf49abe20370e557cc4ab374f04ec4423c97c3d", size = 46442 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/8c/d4/4e2c9aaf7ac2242b9358f98dccd8f90f2605402f5afeff6c578682c2c491/propcache-0.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:60a8fda9644b7dfd5dece8c61d8a85e271cb958075bfc4e01083c148b61a7caf", size = 80208 }, - { url = "https://files.pythonhosted.org/packages/c2/21/d7b68e911f9c8e18e4ae43bdbc1e1e9bbd971f8866eb81608947b6f585ff/propcache-0.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c30b53e7e6bda1d547cabb47c825f3843a0a1a42b0496087bb58d8fedf9f41b5", size = 45777 }, - { url = "https://files.pythonhosted.org/packages/d3/1d/11605e99ac8ea9435651ee71ab4cb4bf03f0949586246476a25aadfec54a/propcache-0.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6918ecbd897443087a3b7cd978d56546a812517dcaaca51b49526720571fa93e", size = 47647 }, - { url = "https://files.pythonhosted.org/packages/58/1a/3c62c127a8466c9c843bccb503d40a273e5cc69838805f322e2826509e0d/propcache-0.4.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3d902a36df4e5989763425a8ab9e98cd8ad5c52c823b34ee7ef307fd50582566", size = 214929 }, - { url = "https://files.pythonhosted.org/packages/56/b9/8fa98f850960b367c4b8fe0592e7fc341daa7a9462e925228f10a60cf74f/propcache-0.4.1-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a9695397f85973bb40427dedddf70d8dc4a44b22f1650dd4af9eedf443d45165", size = 221778 }, - { url = "https://files.pythonhosted.org/packages/46/a6/0ab4f660eb59649d14b3d3d65c439421cf2f87fe5dd68591cbe3c1e78a89/propcache-0.4.1-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2bb07ffd7eaad486576430c89f9b215f9e4be68c4866a96e97db9e97fead85dc", size = 228144 }, - { url = "https://files.pythonhosted.org/packages/52/6a/57f43e054fb3d3a56ac9fc532bc684fc6169a26c75c353e65425b3e56eef/propcache-0.4.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fd6f30fdcf9ae2a70abd34da54f18da086160e4d7d9251f81f3da0ff84fc5a48", size = 210030 }, - { url = "https://files.pythonhosted.org/packages/40/e2/27e6feebb5f6b8408fa29f5efbb765cd54c153ac77314d27e457a3e993b7/propcache-0.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fc38cba02d1acba4e2869eef1a57a43dfbd3d49a59bf90dda7444ec2be6a5570", size = 208252 }, - { url = "https://files.pythonhosted.org/packages/9e/f8/91c27b22ccda1dbc7967f921c42825564fa5336a01ecd72eb78a9f4f53c2/propcache-0.4.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:67fad6162281e80e882fb3ec355398cf72864a54069d060321f6cd0ade95fe85", size = 202064 }, - { url = "https://files.pythonhosted.org/packages/f2/26/7f00bd6bd1adba5aafe5f4a66390f243acab58eab24ff1a08bebb2ef9d40/propcache-0.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f10207adf04d08bec185bae14d9606a1444715bc99180f9331c9c02093e1959e", size = 212429 }, - { url = "https://files.pythonhosted.org/packages/84/89/fd108ba7815c1117ddca79c228f3f8a15fc82a73bca8b142eb5de13b2785/propcache-0.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e9b0d8d0845bbc4cfcdcbcdbf5086886bc8157aa963c31c777ceff7846c77757", size = 216727 }, - { url = "https://files.pythonhosted.org/packages/79/37/3ec3f7e3173e73f1d600495d8b545b53802cbf35506e5732dd8578db3724/propcache-0.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:981333cb2f4c1896a12f4ab92a9cc8f09ea664e9b7dbdc4eff74627af3a11c0f", size = 205097 }, - { url = "https://files.pythonhosted.org/packages/61/b0/b2631c19793f869d35f47d5a3a56fb19e9160d3c119f15ac7344fc3ccae7/propcache-0.4.1-cp311-cp311-win32.whl", hash = "sha256:f1d2f90aeec838a52f1c1a32fe9a619fefd5e411721a9117fbf82aea638fe8a1", size = 38084 }, - { url = "https://files.pythonhosted.org/packages/f4/78/6cce448e2098e9f3bfc91bb877f06aa24b6ccace872e39c53b2f707c4648/propcache-0.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:364426a62660f3f699949ac8c621aad6977be7126c5807ce48c0aeb8e7333ea6", size = 41637 }, - { url = "https://files.pythonhosted.org/packages/9c/e9/754f180cccd7f51a39913782c74717c581b9cc8177ad0e949f4d51812383/propcache-0.4.1-cp311-cp311-win_arm64.whl", hash = "sha256:e53f3a38d3510c11953f3e6a33f205c6d1b001129f972805ca9b42fc308bc239", size = 38064 }, - { url = "https://files.pythonhosted.org/packages/a2/0f/f17b1b2b221d5ca28b4b876e8bb046ac40466513960646bda8e1853cdfa2/propcache-0.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e153e9cd40cc8945138822807139367f256f89c6810c2634a4f6902b52d3b4e2", size = 80061 }, - { url = "https://files.pythonhosted.org/packages/76/47/8ccf75935f51448ba9a16a71b783eb7ef6b9ee60f5d14c7f8a8a79fbeed7/propcache-0.4.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cd547953428f7abb73c5ad82cbb32109566204260d98e41e5dfdc682eb7f8403", size = 46037 }, - { url = "https://files.pythonhosted.org/packages/0a/b6/5c9a0e42df4d00bfb4a3cbbe5cf9f54260300c88a0e9af1f47ca5ce17ac0/propcache-0.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f048da1b4f243fc44f205dfd320933a951b8d89e0afd4c7cacc762a8b9165207", size = 47324 }, - { url = "https://files.pythonhosted.org/packages/9e/d3/6c7ee328b39a81ee877c962469f1e795f9db87f925251efeb0545e0020d0/propcache-0.4.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ec17c65562a827bba85e3872ead335f95405ea1674860d96483a02f5c698fa72", size = 225505 }, - { url = "https://files.pythonhosted.org/packages/01/5d/1c53f4563490b1d06a684742cc6076ef944bc6457df6051b7d1a877c057b/propcache-0.4.1-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:405aac25c6394ef275dee4c709be43745d36674b223ba4eb7144bf4d691b7367", size = 230242 }, - { url = "https://files.pythonhosted.org/packages/20/e1/ce4620633b0e2422207c3cb774a0ee61cac13abc6217763a7b9e2e3f4a12/propcache-0.4.1-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0013cb6f8dde4b2a2f66903b8ba740bdfe378c943c4377a200551ceb27f379e4", size = 238474 }, - { url = "https://files.pythonhosted.org/packages/46/4b/3aae6835b8e5f44ea6a68348ad90f78134047b503765087be2f9912140ea/propcache-0.4.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:15932ab57837c3368b024473a525e25d316d8353016e7cc0e5ba9eb343fbb1cf", size = 221575 }, - { url = "https://files.pythonhosted.org/packages/6e/a5/8a5e8678bcc9d3a1a15b9a29165640d64762d424a16af543f00629c87338/propcache-0.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:031dce78b9dc099f4c29785d9cf5577a3faf9ebf74ecbd3c856a7b92768c3df3", size = 216736 }, - { url = "https://files.pythonhosted.org/packages/f1/63/b7b215eddeac83ca1c6b934f89d09a625aa9ee4ba158338854c87210cc36/propcache-0.4.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:ab08df6c9a035bee56e31af99be621526bd237bea9f32def431c656b29e41778", size = 213019 }, - { url = "https://files.pythonhosted.org/packages/57/74/f580099a58c8af587cac7ba19ee7cb418506342fbbe2d4a4401661cca886/propcache-0.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4d7af63f9f93fe593afbf104c21b3b15868efb2c21d07d8732c0c4287e66b6a6", size = 220376 }, - { url = "https://files.pythonhosted.org/packages/c4/ee/542f1313aff7eaf19c2bb758c5d0560d2683dac001a1c96d0774af799843/propcache-0.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cfc27c945f422e8b5071b6e93169679e4eb5bf73bbcbf1ba3ae3a83d2f78ebd9", size = 226988 }, - { url = "https://files.pythonhosted.org/packages/8f/18/9c6b015dd9c6930f6ce2229e1f02fb35298b847f2087ea2b436a5bfa7287/propcache-0.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:35c3277624a080cc6ec6f847cbbbb5b49affa3598c4535a0a4682a697aaa5c75", size = 215615 }, - { url = "https://files.pythonhosted.org/packages/80/9e/e7b85720b98c45a45e1fca6a177024934dc9bc5f4d5dd04207f216fc33ed/propcache-0.4.1-cp312-cp312-win32.whl", hash = "sha256:671538c2262dadb5ba6395e26c1731e1d52534bfe9ae56d0b5573ce539266aa8", size = 38066 }, - { url = "https://files.pythonhosted.org/packages/54/09/d19cff2a5aaac632ec8fc03737b223597b1e347416934c1b3a7df079784c/propcache-0.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:cb2d222e72399fcf5890d1d5cc1060857b9b236adff2792ff48ca2dfd46c81db", size = 41655 }, - { url = "https://files.pythonhosted.org/packages/68/ab/6b5c191bb5de08036a8c697b265d4ca76148efb10fa162f14af14fb5f076/propcache-0.4.1-cp312-cp312-win_arm64.whl", hash = "sha256:204483131fb222bdaaeeea9f9e6c6ed0cac32731f75dfc1d4a567fc1926477c1", size = 37789 }, - { url = "https://files.pythonhosted.org/packages/bf/df/6d9c1b6ac12b003837dde8a10231a7344512186e87b36e855bef32241942/propcache-0.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:43eedf29202c08550aac1d14e0ee619b0430aaef78f85864c1a892294fbc28cf", size = 77750 }, - { url = "https://files.pythonhosted.org/packages/8b/e8/677a0025e8a2acf07d3418a2e7ba529c9c33caf09d3c1f25513023c1db56/propcache-0.4.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d62cdfcfd89ccb8de04e0eda998535c406bf5e060ffd56be6c586cbcc05b3311", size = 44780 }, - { url = "https://files.pythonhosted.org/packages/89/a4/92380f7ca60f99ebae761936bc48a72a639e8a47b29050615eef757cb2a7/propcache-0.4.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cae65ad55793da34db5f54e4029b89d3b9b9490d8abe1b4c7ab5d4b8ec7ebf74", size = 46308 }, - { url = "https://files.pythonhosted.org/packages/2d/48/c5ac64dee5262044348d1d78a5f85dd1a57464a60d30daee946699963eb3/propcache-0.4.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:333ddb9031d2704a301ee3e506dc46b1fe5f294ec198ed6435ad5b6a085facfe", size = 208182 }, - { url = "https://files.pythonhosted.org/packages/c6/0c/cd762dd011a9287389a6a3eb43aa30207bde253610cca06824aeabfe9653/propcache-0.4.1-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:fd0858c20f078a32cf55f7e81473d96dcf3b93fd2ccdb3d40fdf54b8573df3af", size = 211215 }, - { url = "https://files.pythonhosted.org/packages/30/3e/49861e90233ba36890ae0ca4c660e95df565b2cd15d4a68556ab5865974e/propcache-0.4.1-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:678ae89ebc632c5c204c794f8dab2837c5f159aeb59e6ed0539500400577298c", size = 218112 }, - { url = "https://files.pythonhosted.org/packages/f1/8b/544bc867e24e1bd48f3118cecd3b05c694e160a168478fa28770f22fd094/propcache-0.4.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d472aeb4fbf9865e0c6d622d7f4d54a4e101a89715d8904282bb5f9a2f476c3f", size = 204442 }, - { url = "https://files.pythonhosted.org/packages/50/a6/4282772fd016a76d3e5c0df58380a5ea64900afd836cec2c2f662d1b9bb3/propcache-0.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4d3df5fa7e36b3225954fba85589da77a0fe6a53e3976de39caf04a0db4c36f1", size = 199398 }, - { url = "https://files.pythonhosted.org/packages/3e/ec/d8a7cd406ee1ddb705db2139f8a10a8a427100347bd698e7014351c7af09/propcache-0.4.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:ee17f18d2498f2673e432faaa71698032b0127ebf23ae5974eeaf806c279df24", size = 196920 }, - { url = "https://files.pythonhosted.org/packages/f6/6c/f38ab64af3764f431e359f8baf9e0a21013e24329e8b85d2da32e8ed07ca/propcache-0.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:580e97762b950f993ae618e167e7be9256b8353c2dcd8b99ec100eb50f5286aa", size = 203748 }, - { url = "https://files.pythonhosted.org/packages/d6/e3/fa846bd70f6534d647886621388f0a265254d30e3ce47e5c8e6e27dbf153/propcache-0.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:501d20b891688eb8e7aa903021f0b72d5a55db40ffaab27edefd1027caaafa61", size = 205877 }, - { url = "https://files.pythonhosted.org/packages/e2/39/8163fc6f3133fea7b5f2827e8eba2029a0277ab2c5beee6c1db7b10fc23d/propcache-0.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a0bd56e5b100aef69bd8562b74b46254e7c8812918d3baa700c8a8009b0af66", size = 199437 }, - { url = "https://files.pythonhosted.org/packages/93/89/caa9089970ca49c7c01662bd0eeedfe85494e863e8043565aeb6472ce8fe/propcache-0.4.1-cp313-cp313-win32.whl", hash = "sha256:bcc9aaa5d80322bc2fb24bb7accb4a30f81e90ab8d6ba187aec0744bc302ad81", size = 37586 }, - { url = "https://files.pythonhosted.org/packages/f5/ab/f76ec3c3627c883215b5c8080debb4394ef5a7a29be811f786415fc1e6fd/propcache-0.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:381914df18634f5494334d201e98245c0596067504b9372d8cf93f4bb23e025e", size = 40790 }, - { url = "https://files.pythonhosted.org/packages/59/1b/e71ae98235f8e2ba5004d8cb19765a74877abf189bc53fc0c80d799e56c3/propcache-0.4.1-cp313-cp313-win_arm64.whl", hash = "sha256:8873eb4460fd55333ea49b7d189749ecf6e55bf85080f11b1c4530ed3034cba1", size = 37158 }, - { url = "https://files.pythonhosted.org/packages/83/ce/a31bbdfc24ee0dcbba458c8175ed26089cf109a55bbe7b7640ed2470cfe9/propcache-0.4.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:92d1935ee1f8d7442da9c0c4fa7ac20d07e94064184811b685f5c4fada64553b", size = 81451 }, - { url = "https://files.pythonhosted.org/packages/25/9c/442a45a470a68456e710d96cacd3573ef26a1d0a60067e6a7d5e655621ed/propcache-0.4.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:473c61b39e1460d386479b9b2f337da492042447c9b685f28be4f74d3529e566", size = 46374 }, - { url = "https://files.pythonhosted.org/packages/f4/bf/b1d5e21dbc3b2e889ea4327044fb16312a736d97640fb8b6aa3f9c7b3b65/propcache-0.4.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:c0ef0aaafc66fbd87842a3fe3902fd889825646bc21149eafe47be6072725835", size = 48396 }, - { url = "https://files.pythonhosted.org/packages/f4/04/5b4c54a103d480e978d3c8a76073502b18db0c4bc17ab91b3cb5092ad949/propcache-0.4.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f95393b4d66bfae908c3ca8d169d5f79cd65636ae15b5e7a4f6e67af675adb0e", size = 275950 }, - { url = "https://files.pythonhosted.org/packages/b4/c1/86f846827fb969c4b78b0af79bba1d1ea2156492e1b83dea8b8a6ae27395/propcache-0.4.1-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c07fda85708bc48578467e85099645167a955ba093be0a2dcba962195676e859", size = 273856 }, - { url = "https://files.pythonhosted.org/packages/36/1d/fc272a63c8d3bbad6878c336c7a7dea15e8f2d23a544bda43205dfa83ada/propcache-0.4.1-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:af223b406d6d000830c6f65f1e6431783fc3f713ba3e6cc8c024d5ee96170a4b", size = 280420 }, - { url = "https://files.pythonhosted.org/packages/07/0c/01f2219d39f7e53d52e5173bcb09c976609ba30209912a0680adfb8c593a/propcache-0.4.1-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a78372c932c90ee474559c5ddfffd718238e8673c340dc21fe45c5b8b54559a0", size = 263254 }, - { url = "https://files.pythonhosted.org/packages/2d/18/cd28081658ce597898f0c4d174d4d0f3c5b6d4dc27ffafeef835c95eb359/propcache-0.4.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:564d9f0d4d9509e1a870c920a89b2fec951b44bf5ba7d537a9e7c1ccec2c18af", size = 261205 }, - { url = "https://files.pythonhosted.org/packages/7a/71/1f9e22eb8b8316701c2a19fa1f388c8a3185082607da8e406a803c9b954e/propcache-0.4.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:17612831fda0138059cc5546f4d12a2aacfb9e47068c06af35c400ba58ba7393", size = 247873 }, - { url = "https://files.pythonhosted.org/packages/4a/65/3d4b61f36af2b4eddba9def857959f1016a51066b4f1ce348e0cf7881f58/propcache-0.4.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:41a89040cb10bd345b3c1a873b2bf36413d48da1def52f268a055f7398514874", size = 262739 }, - { url = "https://files.pythonhosted.org/packages/2a/42/26746ab087faa77c1c68079b228810436ccd9a5ce9ac85e2b7307195fd06/propcache-0.4.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e35b88984e7fa64aacecea39236cee32dd9bd8c55f57ba8a75cf2399553f9bd7", size = 263514 }, - { url = "https://files.pythonhosted.org/packages/94/13/630690fe201f5502d2403dd3cfd451ed8858fe3c738ee88d095ad2ff407b/propcache-0.4.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6f8b465489f927b0df505cbe26ffbeed4d6d8a2bbc61ce90eb074ff129ef0ab1", size = 257781 }, - { url = "https://files.pythonhosted.org/packages/92/f7/1d4ec5841505f423469efbfc381d64b7b467438cd5a4bbcbb063f3b73d27/propcache-0.4.1-cp313-cp313t-win32.whl", hash = "sha256:2ad890caa1d928c7c2965b48f3a3815c853180831d0e5503d35cf00c472f4717", size = 41396 }, - { url = "https://files.pythonhosted.org/packages/48/f0/615c30622316496d2cbbc29f5985f7777d3ada70f23370608c1d3e081c1f/propcache-0.4.1-cp313-cp313t-win_amd64.whl", hash = "sha256:f7ee0e597f495cf415bcbd3da3caa3bd7e816b74d0d52b8145954c5e6fd3ff37", size = 44897 }, - { url = "https://files.pythonhosted.org/packages/fd/ca/6002e46eccbe0e33dcd4069ef32f7f1c9e243736e07adca37ae8c4830ec3/propcache-0.4.1-cp313-cp313t-win_arm64.whl", hash = "sha256:929d7cbe1f01bb7baffb33dc14eb5691c95831450a26354cd210a8155170c93a", size = 39789 }, - { url = "https://files.pythonhosted.org/packages/5b/5a/bc7b4a4ef808fa59a816c17b20c4bef6884daebbdf627ff2a161da67da19/propcache-0.4.1-py3-none-any.whl", hash = "sha256:af2a6052aeb6cf17d3e46ee169099044fd8224cbaf75c76a2ef596e8163e2237", size = 13305 }, +sdist = { url = "https://files.pythonhosted.org/packages/9e/da/e9fc233cf63743258bff22b3dfa7ea5baef7b5bc324af47a0ad89b8ffc6f/propcache-0.4.1.tar.gz", hash = "sha256:f48107a8c637e80362555f37ecf49abe20370e557cc4ab374f04ec4423c97c3d", size = 46442, upload-time = "2025-10-08T19:49:02.291Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8c/d4/4e2c9aaf7ac2242b9358f98dccd8f90f2605402f5afeff6c578682c2c491/propcache-0.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:60a8fda9644b7dfd5dece8c61d8a85e271cb958075bfc4e01083c148b61a7caf", size = 80208, upload-time = "2025-10-08T19:46:24.597Z" }, + { url = "https://files.pythonhosted.org/packages/c2/21/d7b68e911f9c8e18e4ae43bdbc1e1e9bbd971f8866eb81608947b6f585ff/propcache-0.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c30b53e7e6bda1d547cabb47c825f3843a0a1a42b0496087bb58d8fedf9f41b5", size = 45777, upload-time = "2025-10-08T19:46:25.733Z" }, + { url = "https://files.pythonhosted.org/packages/d3/1d/11605e99ac8ea9435651ee71ab4cb4bf03f0949586246476a25aadfec54a/propcache-0.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6918ecbd897443087a3b7cd978d56546a812517dcaaca51b49526720571fa93e", size = 47647, upload-time = "2025-10-08T19:46:27.304Z" }, + { url = "https://files.pythonhosted.org/packages/58/1a/3c62c127a8466c9c843bccb503d40a273e5cc69838805f322e2826509e0d/propcache-0.4.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3d902a36df4e5989763425a8ab9e98cd8ad5c52c823b34ee7ef307fd50582566", size = 214929, upload-time = "2025-10-08T19:46:28.62Z" }, + { url = "https://files.pythonhosted.org/packages/56/b9/8fa98f850960b367c4b8fe0592e7fc341daa7a9462e925228f10a60cf74f/propcache-0.4.1-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a9695397f85973bb40427dedddf70d8dc4a44b22f1650dd4af9eedf443d45165", size = 221778, upload-time = "2025-10-08T19:46:30.358Z" }, + { url = "https://files.pythonhosted.org/packages/46/a6/0ab4f660eb59649d14b3d3d65c439421cf2f87fe5dd68591cbe3c1e78a89/propcache-0.4.1-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2bb07ffd7eaad486576430c89f9b215f9e4be68c4866a96e97db9e97fead85dc", size = 228144, upload-time = "2025-10-08T19:46:32.607Z" }, + { url = "https://files.pythonhosted.org/packages/52/6a/57f43e054fb3d3a56ac9fc532bc684fc6169a26c75c353e65425b3e56eef/propcache-0.4.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fd6f30fdcf9ae2a70abd34da54f18da086160e4d7d9251f81f3da0ff84fc5a48", size = 210030, upload-time = "2025-10-08T19:46:33.969Z" }, + { url = "https://files.pythonhosted.org/packages/40/e2/27e6feebb5f6b8408fa29f5efbb765cd54c153ac77314d27e457a3e993b7/propcache-0.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fc38cba02d1acba4e2869eef1a57a43dfbd3d49a59bf90dda7444ec2be6a5570", size = 208252, upload-time = "2025-10-08T19:46:35.309Z" }, + { url = "https://files.pythonhosted.org/packages/9e/f8/91c27b22ccda1dbc7967f921c42825564fa5336a01ecd72eb78a9f4f53c2/propcache-0.4.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:67fad6162281e80e882fb3ec355398cf72864a54069d060321f6cd0ade95fe85", size = 202064, upload-time = "2025-10-08T19:46:36.993Z" }, + { url = "https://files.pythonhosted.org/packages/f2/26/7f00bd6bd1adba5aafe5f4a66390f243acab58eab24ff1a08bebb2ef9d40/propcache-0.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f10207adf04d08bec185bae14d9606a1444715bc99180f9331c9c02093e1959e", size = 212429, upload-time = "2025-10-08T19:46:38.398Z" }, + { url = "https://files.pythonhosted.org/packages/84/89/fd108ba7815c1117ddca79c228f3f8a15fc82a73bca8b142eb5de13b2785/propcache-0.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e9b0d8d0845bbc4cfcdcbcdbf5086886bc8157aa963c31c777ceff7846c77757", size = 216727, upload-time = "2025-10-08T19:46:39.732Z" }, + { url = "https://files.pythonhosted.org/packages/79/37/3ec3f7e3173e73f1d600495d8b545b53802cbf35506e5732dd8578db3724/propcache-0.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:981333cb2f4c1896a12f4ab92a9cc8f09ea664e9b7dbdc4eff74627af3a11c0f", size = 205097, upload-time = "2025-10-08T19:46:41.025Z" }, + { url = "https://files.pythonhosted.org/packages/61/b0/b2631c19793f869d35f47d5a3a56fb19e9160d3c119f15ac7344fc3ccae7/propcache-0.4.1-cp311-cp311-win32.whl", hash = "sha256:f1d2f90aeec838a52f1c1a32fe9a619fefd5e411721a9117fbf82aea638fe8a1", size = 38084, upload-time = "2025-10-08T19:46:42.693Z" }, + { url = "https://files.pythonhosted.org/packages/f4/78/6cce448e2098e9f3bfc91bb877f06aa24b6ccace872e39c53b2f707c4648/propcache-0.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:364426a62660f3f699949ac8c621aad6977be7126c5807ce48c0aeb8e7333ea6", size = 41637, upload-time = "2025-10-08T19:46:43.778Z" }, + { url = "https://files.pythonhosted.org/packages/9c/e9/754f180cccd7f51a39913782c74717c581b9cc8177ad0e949f4d51812383/propcache-0.4.1-cp311-cp311-win_arm64.whl", hash = "sha256:e53f3a38d3510c11953f3e6a33f205c6d1b001129f972805ca9b42fc308bc239", size = 38064, upload-time = "2025-10-08T19:46:44.872Z" }, + { url = "https://files.pythonhosted.org/packages/a2/0f/f17b1b2b221d5ca28b4b876e8bb046ac40466513960646bda8e1853cdfa2/propcache-0.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e153e9cd40cc8945138822807139367f256f89c6810c2634a4f6902b52d3b4e2", size = 80061, upload-time = "2025-10-08T19:46:46.075Z" }, + { url = "https://files.pythonhosted.org/packages/76/47/8ccf75935f51448ba9a16a71b783eb7ef6b9ee60f5d14c7f8a8a79fbeed7/propcache-0.4.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cd547953428f7abb73c5ad82cbb32109566204260d98e41e5dfdc682eb7f8403", size = 46037, upload-time = "2025-10-08T19:46:47.23Z" }, + { url = "https://files.pythonhosted.org/packages/0a/b6/5c9a0e42df4d00bfb4a3cbbe5cf9f54260300c88a0e9af1f47ca5ce17ac0/propcache-0.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f048da1b4f243fc44f205dfd320933a951b8d89e0afd4c7cacc762a8b9165207", size = 47324, upload-time = "2025-10-08T19:46:48.384Z" }, + { url = "https://files.pythonhosted.org/packages/9e/d3/6c7ee328b39a81ee877c962469f1e795f9db87f925251efeb0545e0020d0/propcache-0.4.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ec17c65562a827bba85e3872ead335f95405ea1674860d96483a02f5c698fa72", size = 225505, upload-time = "2025-10-08T19:46:50.055Z" }, + { url = "https://files.pythonhosted.org/packages/01/5d/1c53f4563490b1d06a684742cc6076ef944bc6457df6051b7d1a877c057b/propcache-0.4.1-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:405aac25c6394ef275dee4c709be43745d36674b223ba4eb7144bf4d691b7367", size = 230242, upload-time = "2025-10-08T19:46:51.815Z" }, + { url = "https://files.pythonhosted.org/packages/20/e1/ce4620633b0e2422207c3cb774a0ee61cac13abc6217763a7b9e2e3f4a12/propcache-0.4.1-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0013cb6f8dde4b2a2f66903b8ba740bdfe378c943c4377a200551ceb27f379e4", size = 238474, upload-time = "2025-10-08T19:46:53.208Z" }, + { url = "https://files.pythonhosted.org/packages/46/4b/3aae6835b8e5f44ea6a68348ad90f78134047b503765087be2f9912140ea/propcache-0.4.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:15932ab57837c3368b024473a525e25d316d8353016e7cc0e5ba9eb343fbb1cf", size = 221575, upload-time = "2025-10-08T19:46:54.511Z" }, + { url = "https://files.pythonhosted.org/packages/6e/a5/8a5e8678bcc9d3a1a15b9a29165640d64762d424a16af543f00629c87338/propcache-0.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:031dce78b9dc099f4c29785d9cf5577a3faf9ebf74ecbd3c856a7b92768c3df3", size = 216736, upload-time = "2025-10-08T19:46:56.212Z" }, + { url = "https://files.pythonhosted.org/packages/f1/63/b7b215eddeac83ca1c6b934f89d09a625aa9ee4ba158338854c87210cc36/propcache-0.4.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:ab08df6c9a035bee56e31af99be621526bd237bea9f32def431c656b29e41778", size = 213019, upload-time = "2025-10-08T19:46:57.595Z" }, + { url = "https://files.pythonhosted.org/packages/57/74/f580099a58c8af587cac7ba19ee7cb418506342fbbe2d4a4401661cca886/propcache-0.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4d7af63f9f93fe593afbf104c21b3b15868efb2c21d07d8732c0c4287e66b6a6", size = 220376, upload-time = "2025-10-08T19:46:59.067Z" }, + { url = "https://files.pythonhosted.org/packages/c4/ee/542f1313aff7eaf19c2bb758c5d0560d2683dac001a1c96d0774af799843/propcache-0.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cfc27c945f422e8b5071b6e93169679e4eb5bf73bbcbf1ba3ae3a83d2f78ebd9", size = 226988, upload-time = "2025-10-08T19:47:00.544Z" }, + { url = "https://files.pythonhosted.org/packages/8f/18/9c6b015dd9c6930f6ce2229e1f02fb35298b847f2087ea2b436a5bfa7287/propcache-0.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:35c3277624a080cc6ec6f847cbbbb5b49affa3598c4535a0a4682a697aaa5c75", size = 215615, upload-time = "2025-10-08T19:47:01.968Z" }, + { url = "https://files.pythonhosted.org/packages/80/9e/e7b85720b98c45a45e1fca6a177024934dc9bc5f4d5dd04207f216fc33ed/propcache-0.4.1-cp312-cp312-win32.whl", hash = "sha256:671538c2262dadb5ba6395e26c1731e1d52534bfe9ae56d0b5573ce539266aa8", size = 38066, upload-time = "2025-10-08T19:47:03.503Z" }, + { url = "https://files.pythonhosted.org/packages/54/09/d19cff2a5aaac632ec8fc03737b223597b1e347416934c1b3a7df079784c/propcache-0.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:cb2d222e72399fcf5890d1d5cc1060857b9b236adff2792ff48ca2dfd46c81db", size = 41655, upload-time = "2025-10-08T19:47:04.973Z" }, + { url = "https://files.pythonhosted.org/packages/68/ab/6b5c191bb5de08036a8c697b265d4ca76148efb10fa162f14af14fb5f076/propcache-0.4.1-cp312-cp312-win_arm64.whl", hash = "sha256:204483131fb222bdaaeeea9f9e6c6ed0cac32731f75dfc1d4a567fc1926477c1", size = 37789, upload-time = "2025-10-08T19:47:06.077Z" }, + { url = "https://files.pythonhosted.org/packages/5b/5a/bc7b4a4ef808fa59a816c17b20c4bef6884daebbdf627ff2a161da67da19/propcache-0.4.1-py3-none-any.whl", hash = "sha256:af2a6052aeb6cf17d3e46ee169099044fd8224cbaf75c76a2ef596e8163e2237", size = 13305, upload-time = "2025-10-08T19:49:00.792Z" }, ] [[package]] name = "protobuf" version = "6.33.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/34/44/e49ecff446afeec9d1a66d6bbf9adc21e3c7cea7803a920ca3773379d4f6/protobuf-6.33.2.tar.gz", hash = "sha256:56dc370c91fbb8ac85bc13582c9e373569668a290aa2e66a590c2a0d35ddb9e4", size = 444296 } +sdist = { url = "https://files.pythonhosted.org/packages/34/44/e49ecff446afeec9d1a66d6bbf9adc21e3c7cea7803a920ca3773379d4f6/protobuf-6.33.2.tar.gz", hash = "sha256:56dc370c91fbb8ac85bc13582c9e373569668a290aa2e66a590c2a0d35ddb9e4", size = 444296, upload-time = "2025-12-06T00:17:53.311Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/bc/91/1e3a34881a88697a7354ffd177e8746e97a722e5e8db101544b47e84afb1/protobuf-6.33.2-cp310-abi3-win32.whl", hash = "sha256:87eb388bd2d0f78febd8f4c8779c79247b26a5befad525008e49a6955787ff3d", size = 425603 }, - { url = "https://files.pythonhosted.org/packages/64/20/4d50191997e917ae13ad0a235c8b42d8c1ab9c3e6fd455ca16d416944355/protobuf-6.33.2-cp310-abi3-win_amd64.whl", hash = "sha256:fc2a0e8b05b180e5fc0dd1559fe8ebdae21a27e81ac77728fb6c42b12c7419b4", size = 436930 }, - { url = "https://files.pythonhosted.org/packages/b2/ca/7e485da88ba45c920fb3f50ae78de29ab925d9e54ef0de678306abfbb497/protobuf-6.33.2-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:d9b19771ca75935b3a4422957bc518b0cecb978b31d1dd12037b088f6bcc0e43", size = 427621 }, - { url = "https://files.pythonhosted.org/packages/7d/4f/f743761e41d3b2b2566748eb76bbff2b43e14d5fcab694f494a16458b05f/protobuf-6.33.2-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:b5d3b5625192214066d99b2b605f5783483575656784de223f00a8d00754fc0e", size = 324460 }, - { url = "https://files.pythonhosted.org/packages/b1/fa/26468d00a92824020f6f2090d827078c09c9c587e34cbfd2d0c7911221f8/protobuf-6.33.2-cp39-abi3-manylinux2014_s390x.whl", hash = "sha256:8cd7640aee0b7828b6d03ae518b5b4806fdfc1afe8de82f79c3454f8aef29872", size = 339168 }, - { url = "https://files.pythonhosted.org/packages/56/13/333b8f421738f149d4fe5e49553bc2a2ab75235486259f689b4b91f96cec/protobuf-6.33.2-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:1f8017c48c07ec5859106533b682260ba3d7c5567b1ca1f24297ce03384d1b4f", size = 323270 }, - { url = "https://files.pythonhosted.org/packages/0e/15/4f02896cc3df04fc465010a4c6a0cd89810f54617a32a70ef531ed75d61c/protobuf-6.33.2-py3-none-any.whl", hash = "sha256:7636aad9bb01768870266de5dc009de2d1b936771b38a793f73cbbf279c91c5c", size = 170501 }, + { url = "https://files.pythonhosted.org/packages/bc/91/1e3a34881a88697a7354ffd177e8746e97a722e5e8db101544b47e84afb1/protobuf-6.33.2-cp310-abi3-win32.whl", hash = "sha256:87eb388bd2d0f78febd8f4c8779c79247b26a5befad525008e49a6955787ff3d", size = 425603, upload-time = "2025-12-06T00:17:41.114Z" }, + { url = "https://files.pythonhosted.org/packages/64/20/4d50191997e917ae13ad0a235c8b42d8c1ab9c3e6fd455ca16d416944355/protobuf-6.33.2-cp310-abi3-win_amd64.whl", hash = "sha256:fc2a0e8b05b180e5fc0dd1559fe8ebdae21a27e81ac77728fb6c42b12c7419b4", size = 436930, upload-time = "2025-12-06T00:17:43.278Z" }, + { url = "https://files.pythonhosted.org/packages/b2/ca/7e485da88ba45c920fb3f50ae78de29ab925d9e54ef0de678306abfbb497/protobuf-6.33.2-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:d9b19771ca75935b3a4422957bc518b0cecb978b31d1dd12037b088f6bcc0e43", size = 427621, upload-time = "2025-12-06T00:17:44.445Z" }, + { url = "https://files.pythonhosted.org/packages/7d/4f/f743761e41d3b2b2566748eb76bbff2b43e14d5fcab694f494a16458b05f/protobuf-6.33.2-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:b5d3b5625192214066d99b2b605f5783483575656784de223f00a8d00754fc0e", size = 324460, upload-time = "2025-12-06T00:17:45.678Z" }, + { url = "https://files.pythonhosted.org/packages/b1/fa/26468d00a92824020f6f2090d827078c09c9c587e34cbfd2d0c7911221f8/protobuf-6.33.2-cp39-abi3-manylinux2014_s390x.whl", hash = "sha256:8cd7640aee0b7828b6d03ae518b5b4806fdfc1afe8de82f79c3454f8aef29872", size = 339168, upload-time = "2025-12-06T00:17:46.813Z" }, + { url = "https://files.pythonhosted.org/packages/56/13/333b8f421738f149d4fe5e49553bc2a2ab75235486259f689b4b91f96cec/protobuf-6.33.2-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:1f8017c48c07ec5859106533b682260ba3d7c5567b1ca1f24297ce03384d1b4f", size = 323270, upload-time = "2025-12-06T00:17:48.253Z" }, + { url = "https://files.pythonhosted.org/packages/0e/15/4f02896cc3df04fc465010a4c6a0cd89810f54617a32a70ef531ed75d61c/protobuf-6.33.2-py3-none-any.whl", hash = "sha256:7636aad9bb01768870266de5dc009de2d1b936771b38a793f73cbbf279c91c5c", size = 170501, upload-time = "2025-12-06T00:17:52.211Z" }, ] [[package]] name = "psutil" version = "7.1.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b3/31/4723d756b59344b643542936e37a31d1d3204bcdc42a7daa8ee9eb06fb50/psutil-7.1.0.tar.gz", hash = "sha256:655708b3c069387c8b77b072fc429a57d0e214221d01c0a772df7dfedcb3bcd2", size = 497660 } +sdist = { url = "https://files.pythonhosted.org/packages/b3/31/4723d756b59344b643542936e37a31d1d3204bcdc42a7daa8ee9eb06fb50/psutil-7.1.0.tar.gz", hash = "sha256:655708b3c069387c8b77b072fc429a57d0e214221d01c0a772df7dfedcb3bcd2", size = 497660, upload-time = "2025-09-17T20:14:52.902Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/46/62/ce4051019ee20ce0ed74432dd73a5bb087a6704284a470bb8adff69a0932/psutil-7.1.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:76168cef4397494250e9f4e73eb3752b146de1dd950040b29186d0cce1d5ca13", size = 245242 }, - { url = "https://files.pythonhosted.org/packages/38/61/f76959fba841bf5b61123fbf4b650886dc4094c6858008b5bf73d9057216/psutil-7.1.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:5d007560c8c372efdff9e4579c2846d71de737e4605f611437255e81efcca2c5", size = 246682 }, - { url = "https://files.pythonhosted.org/packages/88/7a/37c99d2e77ec30d63398ffa6a660450b8a62517cabe44b3e9bae97696e8d/psutil-7.1.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22e4454970b32472ce7deaa45d045b34d3648ce478e26a04c7e858a0a6e75ff3", size = 287994 }, - { url = "https://files.pythonhosted.org/packages/9d/de/04c8c61232f7244aa0a4b9a9fbd63a89d5aeaf94b2fc9d1d16e2faa5cbb0/psutil-7.1.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c70e113920d51e89f212dd7be06219a9b88014e63a4cec69b684c327bc474e3", size = 291163 }, - { url = "https://files.pythonhosted.org/packages/f4/58/c4f976234bf6d4737bc8c02a81192f045c307b72cf39c9e5c5a2d78927f6/psutil-7.1.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7d4a113425c037300de3ac8b331637293da9be9713855c4fc9d2d97436d7259d", size = 293625 }, - { url = "https://files.pythonhosted.org/packages/79/87/157c8e7959ec39ced1b11cc93c730c4fb7f9d408569a6c59dbd92ceb35db/psutil-7.1.0-cp37-abi3-win32.whl", hash = "sha256:09ad740870c8d219ed8daae0ad3b726d3bf9a028a198e7f3080f6a1888b99bca", size = 244812 }, - { url = "https://files.pythonhosted.org/packages/bf/e9/b44c4f697276a7a95b8e94d0e320a7bf7f3318521b23de69035540b39838/psutil-7.1.0-cp37-abi3-win_amd64.whl", hash = "sha256:57f5e987c36d3146c0dd2528cd42151cf96cd359b9d67cfff836995cc5df9a3d", size = 247965 }, - { url = "https://files.pythonhosted.org/packages/26/65/1070a6e3c036f39142c2820c4b52e9243246fcfc3f96239ac84472ba361e/psutil-7.1.0-cp37-abi3-win_arm64.whl", hash = "sha256:6937cb68133e7c97b6cc9649a570c9a18ba0efebed46d8c5dae4c07fa1b67a07", size = 244971 }, + { url = "https://files.pythonhosted.org/packages/46/62/ce4051019ee20ce0ed74432dd73a5bb087a6704284a470bb8adff69a0932/psutil-7.1.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:76168cef4397494250e9f4e73eb3752b146de1dd950040b29186d0cce1d5ca13", size = 245242, upload-time = "2025-09-17T20:14:56.126Z" }, + { url = "https://files.pythonhosted.org/packages/38/61/f76959fba841bf5b61123fbf4b650886dc4094c6858008b5bf73d9057216/psutil-7.1.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:5d007560c8c372efdff9e4579c2846d71de737e4605f611437255e81efcca2c5", size = 246682, upload-time = "2025-09-17T20:14:58.25Z" }, + { url = "https://files.pythonhosted.org/packages/88/7a/37c99d2e77ec30d63398ffa6a660450b8a62517cabe44b3e9bae97696e8d/psutil-7.1.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22e4454970b32472ce7deaa45d045b34d3648ce478e26a04c7e858a0a6e75ff3", size = 287994, upload-time = "2025-09-17T20:14:59.901Z" }, + { url = "https://files.pythonhosted.org/packages/9d/de/04c8c61232f7244aa0a4b9a9fbd63a89d5aeaf94b2fc9d1d16e2faa5cbb0/psutil-7.1.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c70e113920d51e89f212dd7be06219a9b88014e63a4cec69b684c327bc474e3", size = 291163, upload-time = "2025-09-17T20:15:01.481Z" }, + { url = "https://files.pythonhosted.org/packages/f4/58/c4f976234bf6d4737bc8c02a81192f045c307b72cf39c9e5c5a2d78927f6/psutil-7.1.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7d4a113425c037300de3ac8b331637293da9be9713855c4fc9d2d97436d7259d", size = 293625, upload-time = "2025-09-17T20:15:04.492Z" }, + { url = "https://files.pythonhosted.org/packages/79/87/157c8e7959ec39ced1b11cc93c730c4fb7f9d408569a6c59dbd92ceb35db/psutil-7.1.0-cp37-abi3-win32.whl", hash = "sha256:09ad740870c8d219ed8daae0ad3b726d3bf9a028a198e7f3080f6a1888b99bca", size = 244812, upload-time = "2025-09-17T20:15:07.462Z" }, + { url = "https://files.pythonhosted.org/packages/bf/e9/b44c4f697276a7a95b8e94d0e320a7bf7f3318521b23de69035540b39838/psutil-7.1.0-cp37-abi3-win_amd64.whl", hash = "sha256:57f5e987c36d3146c0dd2528cd42151cf96cd359b9d67cfff836995cc5df9a3d", size = 247965, upload-time = "2025-09-17T20:15:09.673Z" }, + { url = "https://files.pythonhosted.org/packages/26/65/1070a6e3c036f39142c2820c4b52e9243246fcfc3f96239ac84472ba361e/psutil-7.1.0-cp37-abi3-win_arm64.whl", hash = "sha256:6937cb68133e7c97b6cc9649a570c9a18ba0efebed46d8c5dae4c07fa1b67a07", size = 244971, upload-time = "2025-09-17T20:15:12.262Z" }, ] [[package]] name = "ptyprocess" version = "0.7.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/20/e5/16ff212c1e452235a90aeb09066144d0c5a6a8c0834397e03f5224495c4e/ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220", size = 70762 } +sdist = { url = "https://files.pythonhosted.org/packages/20/e5/16ff212c1e452235a90aeb09066144d0c5a6a8c0834397e03f5224495c4e/ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220", size = 70762, upload-time = "2020-12-28T15:15:30.155Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/22/a6/858897256d0deac81a172289110f31629fc4cee19b6f01283303e18c8db3/ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35", size = 13993 }, + { url = "https://files.pythonhosted.org/packages/22/a6/858897256d0deac81a172289110f31629fc4cee19b6f01283303e18c8db3/ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35", size = 13993, upload-time = "2020-12-28T15:15:28.35Z" }, ] [[package]] name = "pure-eval" version = "0.2.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/cd/05/0a34433a064256a578f1783a10da6df098ceaa4a57bbeaa96a6c0352786b/pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42", size = 19752 } +sdist = { url = "https://files.pythonhosted.org/packages/cd/05/0a34433a064256a578f1783a10da6df098ceaa4a57bbeaa96a6c0352786b/pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42", size = 19752, upload-time = "2024-07-21T12:58:21.801Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/8e/37/efad0257dc6e593a18957422533ff0f87ede7c9c6ea010a2177d738fb82f/pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0", size = 11842 }, + { url = "https://files.pythonhosted.org/packages/8e/37/efad0257dc6e593a18957422533ff0f87ede7c9c6ea010a2177d738fb82f/pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0", size = 11842, upload-time = "2024-07-21T12:58:20.04Z" }, ] [[package]] name = "pyarrow" version = "22.0.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/30/53/04a7fdc63e6056116c9ddc8b43bc28c12cdd181b85cbeadb79278475f3ae/pyarrow-22.0.0.tar.gz", hash = "sha256:3d600dc583260d845c7d8a6db540339dd883081925da2bd1c5cb808f720b3cd9", size = 1151151 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/2e/b7/18f611a8cdc43417f9394a3ccd3eace2f32183c08b9eddc3d17681819f37/pyarrow-22.0.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:3e294c5eadfb93d78b0763e859a0c16d4051fc1c5231ae8956d61cb0b5666f5a", size = 34272022 }, - { url = "https://files.pythonhosted.org/packages/26/5c/f259e2526c67eb4b9e511741b19870a02363a47a35edbebc55c3178db22d/pyarrow-22.0.0-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:69763ab2445f632d90b504a815a2a033f74332997052b721002298ed6de40f2e", size = 35995834 }, - { url = "https://files.pythonhosted.org/packages/50/8d/281f0f9b9376d4b7f146913b26fac0aa2829cd1ee7e997f53a27411bbb92/pyarrow-22.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:b41f37cabfe2463232684de44bad753d6be08a7a072f6a83447eeaf0e4d2a215", size = 45030348 }, - { url = "https://files.pythonhosted.org/packages/f5/e5/53c0a1c428f0976bf22f513d79c73000926cb00b9c138d8e02daf2102e18/pyarrow-22.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:35ad0f0378c9359b3f297299c3309778bb03b8612f987399a0333a560b43862d", size = 47699480 }, - { url = "https://files.pythonhosted.org/packages/95/e1/9dbe4c465c3365959d183e6345d0a8d1dc5b02ca3f8db4760b3bc834cf25/pyarrow-22.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8382ad21458075c2e66a82a29d650f963ce51c7708c7c0ff313a8c206c4fd5e8", size = 48011148 }, - { url = "https://files.pythonhosted.org/packages/c5/b4/7caf5d21930061444c3cf4fa7535c82faf5263e22ce43af7c2759ceb5b8b/pyarrow-22.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1a812a5b727bc09c3d7ea072c4eebf657c2f7066155506ba31ebf4792f88f016", size = 50276964 }, - { url = "https://files.pythonhosted.org/packages/ae/f3/cec89bd99fa3abf826f14d4e53d3d11340ce6f6af4d14bdcd54cd83b6576/pyarrow-22.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:ec5d40dd494882704fb876c16fa7261a69791e784ae34e6b5992e977bd2e238c", size = 28106517 }, - { url = "https://files.pythonhosted.org/packages/af/63/ba23862d69652f85b615ca14ad14f3bcfc5bf1b99ef3f0cd04ff93fdad5a/pyarrow-22.0.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:bea79263d55c24a32b0d79c00a1c58bb2ee5f0757ed95656b01c0fb310c5af3d", size = 34211578 }, - { url = "https://files.pythonhosted.org/packages/b1/d0/f9ad86fe809efd2bcc8be32032fa72e8b0d112b01ae56a053006376c5930/pyarrow-22.0.0-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:12fe549c9b10ac98c91cf791d2945e878875d95508e1a5d14091a7aaa66d9cf8", size = 35989906 }, - { url = "https://files.pythonhosted.org/packages/b4/a8/f910afcb14630e64d673f15904ec27dd31f1e009b77033c365c84e8c1e1d/pyarrow-22.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:334f900ff08ce0423407af97e6c26ad5d4e3b0763645559ece6fbf3747d6a8f5", size = 45021677 }, - { url = "https://files.pythonhosted.org/packages/13/95/aec81f781c75cd10554dc17a25849c720d54feafb6f7847690478dcf5ef8/pyarrow-22.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:c6c791b09c57ed76a18b03f2631753a4960eefbbca80f846da8baefc6491fcfe", size = 47726315 }, - { url = "https://files.pythonhosted.org/packages/bb/d4/74ac9f7a54cfde12ee42734ea25d5a3c9a45db78f9def949307a92720d37/pyarrow-22.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:c3200cb41cdbc65156e5f8c908d739b0dfed57e890329413da2748d1a2cd1a4e", size = 47990906 }, - { url = "https://files.pythonhosted.org/packages/2e/71/fedf2499bf7a95062eafc989ace56572f3343432570e1c54e6599d5b88da/pyarrow-22.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ac93252226cf288753d8b46280f4edf3433bf9508b6977f8dd8526b521a1bbb9", size = 50306783 }, - { url = "https://files.pythonhosted.org/packages/68/ed/b202abd5a5b78f519722f3d29063dda03c114711093c1995a33b8e2e0f4b/pyarrow-22.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:44729980b6c50a5f2bfcc2668d36c569ce17f8b17bccaf470c4313dcbbf13c9d", size = 27972883 }, - { url = "https://files.pythonhosted.org/packages/a6/d6/d0fac16a2963002fc22c8fa75180a838737203d558f0ed3b564c4a54eef5/pyarrow-22.0.0-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:e6e95176209257803a8b3d0394f21604e796dadb643d2f7ca21b66c9c0b30c9a", size = 34204629 }, - { url = "https://files.pythonhosted.org/packages/c6/9c/1d6357347fbae062ad3f17082f9ebc29cc733321e892c0d2085f42a2212b/pyarrow-22.0.0-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:001ea83a58024818826a9e3f89bf9310a114f7e26dfe404a4c32686f97bd7901", size = 35985783 }, - { url = "https://files.pythonhosted.org/packages/ff/c0/782344c2ce58afbea010150df07e3a2f5fdad299cd631697ae7bd3bac6e3/pyarrow-22.0.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:ce20fe000754f477c8a9125543f1936ea5b8867c5406757c224d745ed033e691", size = 45020999 }, - { url = "https://files.pythonhosted.org/packages/1b/8b/5362443737a5307a7b67c1017c42cd104213189b4970bf607e05faf9c525/pyarrow-22.0.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:e0a15757fccb38c410947df156f9749ae4a3c89b2393741a50521f39a8cf202a", size = 47724601 }, - { url = "https://files.pythonhosted.org/packages/69/4d/76e567a4fc2e190ee6072967cb4672b7d9249ac59ae65af2d7e3047afa3b/pyarrow-22.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cedb9dd9358e4ea1d9bce3665ce0797f6adf97ff142c8e25b46ba9cdd508e9b6", size = 48001050 }, - { url = "https://files.pythonhosted.org/packages/01/5e/5653f0535d2a1aef8223cee9d92944cb6bccfee5cf1cd3f462d7cb022790/pyarrow-22.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:252be4a05f9d9185bb8c18e83764ebcfea7185076c07a7a662253af3a8c07941", size = 50307877 }, - { url = "https://files.pythonhosted.org/packages/2d/f8/1d0bd75bf9328a3b826e24a16e5517cd7f9fbf8d34a3184a4566ef5a7f29/pyarrow-22.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:a4893d31e5ef780b6edcaf63122df0f8d321088bb0dee4c8c06eccb1ca28d145", size = 27977099 }, - { url = "https://files.pythonhosted.org/packages/90/81/db56870c997805bf2b0f6eeeb2d68458bf4654652dccdcf1bf7a42d80903/pyarrow-22.0.0-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:f7fe3dbe871294ba70d789be16b6e7e52b418311e166e0e3cba9522f0f437fb1", size = 34336685 }, - { url = "https://files.pythonhosted.org/packages/1c/98/0727947f199aba8a120f47dfc229eeb05df15bcd7a6f1b669e9f882afc58/pyarrow-22.0.0-cp313-cp313t-macosx_12_0_x86_64.whl", hash = "sha256:ba95112d15fd4f1105fb2402c4eab9068f0554435e9b7085924bcfaac2cc306f", size = 36032158 }, - { url = "https://files.pythonhosted.org/packages/96/b4/9babdef9c01720a0785945c7cf550e4acd0ebcd7bdd2e6f0aa7981fa85e2/pyarrow-22.0.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:c064e28361c05d72eed8e744c9605cbd6d2bb7481a511c74071fd9b24bc65d7d", size = 44892060 }, - { url = "https://files.pythonhosted.org/packages/f8/ca/2f8804edd6279f78a37062d813de3f16f29183874447ef6d1aadbb4efa0f/pyarrow-22.0.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:6f9762274496c244d951c819348afbcf212714902742225f649cf02823a6a10f", size = 47504395 }, - { url = "https://files.pythonhosted.org/packages/b9/f0/77aa5198fd3943682b2e4faaf179a674f0edea0d55d326d83cb2277d9363/pyarrow-22.0.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a9d9ffdc2ab696f6b15b4d1f7cec6658e1d788124418cb30030afbae31c64746", size = 48066216 }, - { url = "https://files.pythonhosted.org/packages/79/87/a1937b6e78b2aff18b706d738c9e46ade5bfcf11b294e39c87706a0089ac/pyarrow-22.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ec1a15968a9d80da01e1d30349b2b0d7cc91e96588ee324ce1b5228175043e95", size = 50288552 }, - { url = "https://files.pythonhosted.org/packages/60/ae/b5a5811e11f25788ccfdaa8f26b6791c9807119dffcf80514505527c384c/pyarrow-22.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:bba208d9c7decf9961998edf5c65e3ea4355d5818dd6cd0f6809bec1afb951cc", size = 28262504 }, +sdist = { url = "https://files.pythonhosted.org/packages/30/53/04a7fdc63e6056116c9ddc8b43bc28c12cdd181b85cbeadb79278475f3ae/pyarrow-22.0.0.tar.gz", hash = "sha256:3d600dc583260d845c7d8a6db540339dd883081925da2bd1c5cb808f720b3cd9", size = 1151151, upload-time = "2025-10-24T12:30:00.762Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2e/b7/18f611a8cdc43417f9394a3ccd3eace2f32183c08b9eddc3d17681819f37/pyarrow-22.0.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:3e294c5eadfb93d78b0763e859a0c16d4051fc1c5231ae8956d61cb0b5666f5a", size = 34272022, upload-time = "2025-10-24T10:04:28.973Z" }, + { url = "https://files.pythonhosted.org/packages/26/5c/f259e2526c67eb4b9e511741b19870a02363a47a35edbebc55c3178db22d/pyarrow-22.0.0-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:69763ab2445f632d90b504a815a2a033f74332997052b721002298ed6de40f2e", size = 35995834, upload-time = "2025-10-24T10:04:35.467Z" }, + { url = "https://files.pythonhosted.org/packages/50/8d/281f0f9b9376d4b7f146913b26fac0aa2829cd1ee7e997f53a27411bbb92/pyarrow-22.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:b41f37cabfe2463232684de44bad753d6be08a7a072f6a83447eeaf0e4d2a215", size = 45030348, upload-time = "2025-10-24T10:04:43.366Z" }, + { url = "https://files.pythonhosted.org/packages/f5/e5/53c0a1c428f0976bf22f513d79c73000926cb00b9c138d8e02daf2102e18/pyarrow-22.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:35ad0f0378c9359b3f297299c3309778bb03b8612f987399a0333a560b43862d", size = 47699480, upload-time = "2025-10-24T10:04:51.486Z" }, + { url = "https://files.pythonhosted.org/packages/95/e1/9dbe4c465c3365959d183e6345d0a8d1dc5b02ca3f8db4760b3bc834cf25/pyarrow-22.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8382ad21458075c2e66a82a29d650f963ce51c7708c7c0ff313a8c206c4fd5e8", size = 48011148, upload-time = "2025-10-24T10:04:59.585Z" }, + { url = "https://files.pythonhosted.org/packages/c5/b4/7caf5d21930061444c3cf4fa7535c82faf5263e22ce43af7c2759ceb5b8b/pyarrow-22.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1a812a5b727bc09c3d7ea072c4eebf657c2f7066155506ba31ebf4792f88f016", size = 50276964, upload-time = "2025-10-24T10:05:08.175Z" }, + { url = "https://files.pythonhosted.org/packages/ae/f3/cec89bd99fa3abf826f14d4e53d3d11340ce6f6af4d14bdcd54cd83b6576/pyarrow-22.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:ec5d40dd494882704fb876c16fa7261a69791e784ae34e6b5992e977bd2e238c", size = 28106517, upload-time = "2025-10-24T10:05:14.314Z" }, + { url = "https://files.pythonhosted.org/packages/af/63/ba23862d69652f85b615ca14ad14f3bcfc5bf1b99ef3f0cd04ff93fdad5a/pyarrow-22.0.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:bea79263d55c24a32b0d79c00a1c58bb2ee5f0757ed95656b01c0fb310c5af3d", size = 34211578, upload-time = "2025-10-24T10:05:21.583Z" }, + { url = "https://files.pythonhosted.org/packages/b1/d0/f9ad86fe809efd2bcc8be32032fa72e8b0d112b01ae56a053006376c5930/pyarrow-22.0.0-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:12fe549c9b10ac98c91cf791d2945e878875d95508e1a5d14091a7aaa66d9cf8", size = 35989906, upload-time = "2025-10-24T10:05:29.485Z" }, + { url = "https://files.pythonhosted.org/packages/b4/a8/f910afcb14630e64d673f15904ec27dd31f1e009b77033c365c84e8c1e1d/pyarrow-22.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:334f900ff08ce0423407af97e6c26ad5d4e3b0763645559ece6fbf3747d6a8f5", size = 45021677, upload-time = "2025-10-24T10:05:38.274Z" }, + { url = "https://files.pythonhosted.org/packages/13/95/aec81f781c75cd10554dc17a25849c720d54feafb6f7847690478dcf5ef8/pyarrow-22.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:c6c791b09c57ed76a18b03f2631753a4960eefbbca80f846da8baefc6491fcfe", size = 47726315, upload-time = "2025-10-24T10:05:47.314Z" }, + { url = "https://files.pythonhosted.org/packages/bb/d4/74ac9f7a54cfde12ee42734ea25d5a3c9a45db78f9def949307a92720d37/pyarrow-22.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:c3200cb41cdbc65156e5f8c908d739b0dfed57e890329413da2748d1a2cd1a4e", size = 47990906, upload-time = "2025-10-24T10:05:58.254Z" }, + { url = "https://files.pythonhosted.org/packages/2e/71/fedf2499bf7a95062eafc989ace56572f3343432570e1c54e6599d5b88da/pyarrow-22.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ac93252226cf288753d8b46280f4edf3433bf9508b6977f8dd8526b521a1bbb9", size = 50306783, upload-time = "2025-10-24T10:06:08.08Z" }, + { url = "https://files.pythonhosted.org/packages/68/ed/b202abd5a5b78f519722f3d29063dda03c114711093c1995a33b8e2e0f4b/pyarrow-22.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:44729980b6c50a5f2bfcc2668d36c569ce17f8b17bccaf470c4313dcbbf13c9d", size = 27972883, upload-time = "2025-10-24T10:06:14.204Z" }, ] [[package]] name = "pycparser" version = "2.23" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/fe/cf/d2d3b9f5699fb1e4615c8e32ff220203e43b248e1dfcc6736ad9057731ca/pycparser-2.23.tar.gz", hash = "sha256:78816d4f24add8f10a06d6f05b4d424ad9e96cfebf68a4ddc99c65c0720d00c2", size = 173734 } +sdist = { url = "https://files.pythonhosted.org/packages/fe/cf/d2d3b9f5699fb1e4615c8e32ff220203e43b248e1dfcc6736ad9057731ca/pycparser-2.23.tar.gz", hash = "sha256:78816d4f24add8f10a06d6f05b4d424ad9e96cfebf68a4ddc99c65c0720d00c2", size = 173734, upload-time = "2025-09-09T13:23:47.91Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a0/e3/59cd50310fc9b59512193629e1984c1f95e5c8ae6e5d8c69532ccc65a7fe/pycparser-2.23-py3-none-any.whl", hash = "sha256:e5c6e8d3fbad53479cab09ac03729e0a9faf2bee3db8208a550daf5af81a5934", size = 118140 }, + { url = "https://files.pythonhosted.org/packages/a0/e3/59cd50310fc9b59512193629e1984c1f95e5c8ae6e5d8c69532ccc65a7fe/pycparser-2.23-py3-none-any.whl", hash = "sha256:e5c6e8d3fbad53479cab09ac03729e0a9faf2bee3db8208a550daf5af81a5934", size = 118140, upload-time = "2025-09-09T13:23:46.651Z" }, ] [[package]] @@ -2998,9 +2692,9 @@ dependencies = [ { name = "typing-extensions" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c3/da/b8a7ee04378a53f6fefefc0c5e05570a3ebfdfa0523a878bcd3b475683ee/pydantic-2.12.0.tar.gz", hash = "sha256:c1a077e6270dbfb37bfd8b498b3981e2bb18f68103720e51fa6c306a5a9af563", size = 814760 } +sdist = { url = "https://files.pythonhosted.org/packages/c3/da/b8a7ee04378a53f6fefefc0c5e05570a3ebfdfa0523a878bcd3b475683ee/pydantic-2.12.0.tar.gz", hash = "sha256:c1a077e6270dbfb37bfd8b498b3981e2bb18f68103720e51fa6c306a5a9af563", size = 814760, upload-time = "2025-10-07T15:58:03.467Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f4/9d/d5c855424e2e5b6b626fbc6ec514d8e655a600377ce283008b115abb7445/pydantic-2.12.0-py3-none-any.whl", hash = "sha256:f6a1da352d42790537e95e83a8bdfb91c7efbae63ffd0b86fa823899e807116f", size = 459730 }, + { url = "https://files.pythonhosted.org/packages/f4/9d/d5c855424e2e5b6b626fbc6ec514d8e655a600377ce283008b115abb7445/pydantic-2.12.0-py3-none-any.whl", hash = "sha256:f6a1da352d42790537e95e83a8bdfb91c7efbae63ffd0b86fa823899e807116f", size = 459730, upload-time = "2025-10-07T15:58:01.576Z" }, ] [[package]] @@ -3010,87 +2704,70 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/7d/14/12b4a0d2b0b10d8e1d9a24ad94e7bbb43335eaf29c0c4e57860e8a30734a/pydantic_core-2.41.1.tar.gz", hash = "sha256:1ad375859a6d8c356b7704ec0f547a58e82ee80bb41baa811ad710e124bc8f2f", size = 454870 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f6/a9/ec440f02e57beabdfd804725ef1e38ac1ba00c49854d298447562e119513/pydantic_core-2.41.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4f276a6134fe1fc1daa692642a3eaa2b7b858599c49a7610816388f5e37566a1", size = 2111456 }, - { url = "https://files.pythonhosted.org/packages/f0/f9/6bc15bacfd8dcfc073a1820a564516d9c12a435a9a332d4cbbfd48828ddd/pydantic_core-2.41.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:07588570a805296ece009c59d9a679dc08fab72fb337365afb4f3a14cfbfc176", size = 1915012 }, - { url = "https://files.pythonhosted.org/packages/38/8a/d9edcdcdfe80bade17bed424284427c08bea892aaec11438fa52eaeaf79c/pydantic_core-2.41.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28527e4b53400cd60ffbd9812ccb2b5135d042129716d71afd7e45bf42b855c0", size = 1973762 }, - { url = "https://files.pythonhosted.org/packages/d5/b3/ff225c6d49fba4279de04677c1c876fc3dc6562fd0c53e9bfd66f58c51a8/pydantic_core-2.41.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:46a1c935c9228bad738c8a41de06478770927baedf581d172494ab36a6b96575", size = 2065386 }, - { url = "https://files.pythonhosted.org/packages/47/ba/183e8c0be4321314af3fd1ae6bfc7eafdd7a49bdea5da81c56044a207316/pydantic_core-2.41.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:447ddf56e2b7d28d200d3e9eafa936fe40485744b5a824b67039937580b3cb20", size = 2252317 }, - { url = "https://files.pythonhosted.org/packages/57/c5/aab61e94fd02f45c65f1f8c9ec38bb3b33fbf001a1837c74870e97462572/pydantic_core-2.41.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:63892ead40c1160ac860b5debcc95c95c5a0035e543a8b5a4eac70dd22e995f4", size = 2373405 }, - { url = "https://files.pythonhosted.org/packages/e5/4f/3aaa3bd1ea420a15acc42d7d3ccb3b0bbc5444ae2f9dbc1959f8173e16b8/pydantic_core-2.41.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f4a9543ca355e6df8fbe9c83e9faab707701e9103ae857ecb40f1c0cf8b0e94d", size = 2073794 }, - { url = "https://files.pythonhosted.org/packages/58/bd/e3975cdebe03ec080ef881648de316c73f2a6be95c14fc4efb2f7bdd0d41/pydantic_core-2.41.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f2611bdb694116c31e551ed82e20e39a90bea9b7ad9e54aaf2d045ad621aa7a1", size = 2194430 }, - { url = "https://files.pythonhosted.org/packages/2b/b8/6b7e7217f147d3b3105b57fb1caec3c4f667581affdfaab6d1d277e1f749/pydantic_core-2.41.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fecc130893a9b5f7bfe230be1bb8c61fe66a19db8ab704f808cb25a82aad0bc9", size = 2154611 }, - { url = "https://files.pythonhosted.org/packages/fe/7b/239c2fe76bd8b7eef9ae2140d737368a3c6fea4fd27f8f6b4cde6baa3ce9/pydantic_core-2.41.1-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:1e2df5f8344c99b6ea5219f00fdc8950b8e6f2c422fbc1cc122ec8641fac85a1", size = 2329809 }, - { url = "https://files.pythonhosted.org/packages/bd/2e/77a821a67ff0786f2f14856d6bd1348992f695ee90136a145d7a445c1ff6/pydantic_core-2.41.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:35291331e9d8ed94c257bab6be1cb3a380b5eee570a2784bffc055e18040a2ea", size = 2327907 }, - { url = "https://files.pythonhosted.org/packages/fd/9a/b54512bb9df7f64c586b369328c30481229b70ca6a5fcbb90b715e15facf/pydantic_core-2.41.1-cp311-cp311-win32.whl", hash = "sha256:2876a095292668d753f1a868c4a57c4ac9f6acbd8edda8debe4218d5848cf42f", size = 1989964 }, - { url = "https://files.pythonhosted.org/packages/9d/72/63c9a4f1a5c950e65dd522d7dd67f167681f9d4f6ece3b80085a0329f08f/pydantic_core-2.41.1-cp311-cp311-win_amd64.whl", hash = "sha256:b92d6c628e9a338846a28dfe3fcdc1a3279388624597898b105e078cdfc59298", size = 2025158 }, - { url = "https://files.pythonhosted.org/packages/d8/16/4e2706184209f61b50c231529257c12eb6bd9eb36e99ea1272e4815d2200/pydantic_core-2.41.1-cp311-cp311-win_arm64.whl", hash = "sha256:7d82ae99409eb69d507a89835488fb657faa03ff9968a9379567b0d2e2e56bc5", size = 1972297 }, - { url = "https://files.pythonhosted.org/packages/ee/bc/5f520319ee1c9e25010412fac4154a72e0a40d0a19eb00281b1f200c0947/pydantic_core-2.41.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:db2f82c0ccbce8f021ad304ce35cbe02aa2f95f215cac388eed542b03b4d5eb4", size = 2099300 }, - { url = "https://files.pythonhosted.org/packages/31/14/010cd64c5c3814fb6064786837ec12604be0dd46df3327cf8474e38abbbd/pydantic_core-2.41.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:47694a31c710ced9205d5f1e7e8af3ca57cbb8a503d98cb9e33e27c97a501601", size = 1910179 }, - { url = "https://files.pythonhosted.org/packages/8e/2e/23fc2a8a93efad52df302fdade0a60f471ecc0c7aac889801ac24b4c07d6/pydantic_core-2.41.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:93e9decce94daf47baf9e9d392f5f2557e783085f7c5e522011545d9d6858e00", size = 1957225 }, - { url = "https://files.pythonhosted.org/packages/b9/b6/6db08b2725b2432b9390844852e11d320281e5cea8a859c52c68001975fa/pydantic_core-2.41.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ab0adafdf2b89c8b84f847780a119437a0931eca469f7b44d356f2b426dd9741", size = 2053315 }, - { url = "https://files.pythonhosted.org/packages/61/d9/4de44600f2d4514b44f3f3aeeda2e14931214b6b5bf52479339e801ce748/pydantic_core-2.41.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5da98cc81873f39fd56882e1569c4677940fbc12bce6213fad1ead784192d7c8", size = 2224298 }, - { url = "https://files.pythonhosted.org/packages/7a/ae/dbe51187a7f35fc21b283c5250571a94e36373eb557c1cba9f29a9806dcf/pydantic_core-2.41.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:209910e88afb01fd0fd403947b809ba8dba0e08a095e1f703294fda0a8fdca51", size = 2351797 }, - { url = "https://files.pythonhosted.org/packages/b5/a7/975585147457c2e9fb951c7c8dab56deeb6aa313f3aa72c2fc0df3f74a49/pydantic_core-2.41.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:365109d1165d78d98e33c5bfd815a9b5d7d070f578caefaabcc5771825b4ecb5", size = 2074921 }, - { url = "https://files.pythonhosted.org/packages/62/37/ea94d1d0c01dec1b7d236c7cec9103baab0021f42500975de3d42522104b/pydantic_core-2.41.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:706abf21e60a2857acdb09502bc853ee5bce732955e7b723b10311114f033115", size = 2187767 }, - { url = "https://files.pythonhosted.org/packages/d3/fe/694cf9fdd3a777a618c3afd210dba7b414cb8a72b1bd29b199c2e5765fee/pydantic_core-2.41.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bf0bd5417acf7f6a7ec3b53f2109f587be176cb35f9cf016da87e6017437a72d", size = 2136062 }, - { url = "https://files.pythonhosted.org/packages/0f/ae/174aeabd89916fbd2988cc37b81a59e1186e952afd2a7ed92018c22f31ca/pydantic_core-2.41.1-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:2e71b1c6ceb9c78424ae9f63a07292fb769fb890a4e7efca5554c47f33a60ea5", size = 2317819 }, - { url = "https://files.pythonhosted.org/packages/65/e8/e9aecafaebf53fc456314f72886068725d6fba66f11b013532dc21259343/pydantic_core-2.41.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:80745b9770b4a38c25015b517451c817799bfb9d6499b0d13d8227ec941cb513", size = 2312267 }, - { url = "https://files.pythonhosted.org/packages/35/2f/1c2e71d2a052f9bb2f2df5a6a05464a0eb800f9e8d9dd800202fe31219e1/pydantic_core-2.41.1-cp312-cp312-win32.whl", hash = "sha256:83b64d70520e7890453f1aa21d66fda44e7b35f1cfea95adf7b4289a51e2b479", size = 1990927 }, - { url = "https://files.pythonhosted.org/packages/b1/78/562998301ff2588b9c6dcc5cb21f52fa919d6e1decc75a35055feb973594/pydantic_core-2.41.1-cp312-cp312-win_amd64.whl", hash = "sha256:377defd66ee2003748ee93c52bcef2d14fde48fe28a0b156f88c3dbf9bc49a50", size = 2034703 }, - { url = "https://files.pythonhosted.org/packages/b2/53/d95699ce5a5cdb44bb470bd818b848b9beadf51459fd4ea06667e8ede862/pydantic_core-2.41.1-cp312-cp312-win_arm64.whl", hash = "sha256:c95caff279d49c1d6cdfe2996e6c2ad712571d3b9caaa209a404426c326c4bde", size = 1972719 }, - { url = "https://files.pythonhosted.org/packages/27/8a/6d54198536a90a37807d31a156642aae7a8e1263ed9fe6fc6245defe9332/pydantic_core-2.41.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:70e790fce5f05204ef4403159857bfcd587779da78627b0babb3654f75361ebf", size = 2105825 }, - { url = "https://files.pythonhosted.org/packages/4f/2e/4784fd7b22ac9c8439db25bf98ffed6853d01e7e560a346e8af821776ccc/pydantic_core-2.41.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9cebf1ca35f10930612d60bd0f78adfacee824c30a880e3534ba02c207cceceb", size = 1910126 }, - { url = "https://files.pythonhosted.org/packages/f3/92/31eb0748059ba5bd0aa708fb4bab9fcb211461ddcf9e90702a6542f22d0d/pydantic_core-2.41.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:170406a37a5bc82c22c3274616bf6f17cc7df9c4a0a0a50449e559cb755db669", size = 1961472 }, - { url = "https://files.pythonhosted.org/packages/ab/91/946527792275b5c4c7dde4cfa3e81241bf6900e9fee74fb1ba43e0c0f1ab/pydantic_core-2.41.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:12d4257fc9187a0ccd41b8b327d6a4e57281ab75e11dda66a9148ef2e1fb712f", size = 2063230 }, - { url = "https://files.pythonhosted.org/packages/31/5d/a35c5d7b414e5c0749f1d9f0d159ee2ef4bab313f499692896b918014ee3/pydantic_core-2.41.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a75a33b4db105dd1c8d57839e17ee12db8d5ad18209e792fa325dbb4baeb00f4", size = 2229469 }, - { url = "https://files.pythonhosted.org/packages/21/4d/8713737c689afa57ecfefe38db78259d4484c97aa494979e6a9d19662584/pydantic_core-2.41.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08a589f850803a74e0fcb16a72081cafb0d72a3cdda500106942b07e76b7bf62", size = 2347986 }, - { url = "https://files.pythonhosted.org/packages/f6/ec/929f9a3a5ed5cda767081494bacd32f783e707a690ce6eeb5e0730ec4986/pydantic_core-2.41.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a97939d6ea44763c456bd8a617ceada2c9b96bb5b8ab3dfa0d0827df7619014", size = 2072216 }, - { url = "https://files.pythonhosted.org/packages/26/55/a33f459d4f9cc8786d9db42795dbecc84fa724b290d7d71ddc3d7155d46a/pydantic_core-2.41.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2ae423c65c556f09569524b80ffd11babff61f33055ef9773d7c9fabc11ed8d", size = 2193047 }, - { url = "https://files.pythonhosted.org/packages/77/af/d5c6959f8b089f2185760a2779079e3c2c411bfc70ea6111f58367851629/pydantic_core-2.41.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:4dc703015fbf8764d6a8001c327a87f1823b7328d40b47ce6000c65918ad2b4f", size = 2140613 }, - { url = "https://files.pythonhosted.org/packages/58/e5/2c19bd2a14bffe7fabcf00efbfbd3ac430aaec5271b504a938ff019ac7be/pydantic_core-2.41.1-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:968e4ffdfd35698a5fe659e5e44c508b53664870a8e61c8f9d24d3d145d30257", size = 2327641 }, - { url = "https://files.pythonhosted.org/packages/93/ef/e0870ccda798c54e6b100aff3c4d49df5458fd64217e860cb9c3b0a403f4/pydantic_core-2.41.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:fff2b76c8e172d34771cd4d4f0ade08072385310f214f823b5a6ad4006890d32", size = 2318229 }, - { url = "https://files.pythonhosted.org/packages/b1/4b/c3b991d95f5deb24d0bd52e47bcf716098fa1afe0ce2d4bd3125b38566ba/pydantic_core-2.41.1-cp313-cp313-win32.whl", hash = "sha256:a38a5263185407ceb599f2f035faf4589d57e73c7146d64f10577f6449e8171d", size = 1997911 }, - { url = "https://files.pythonhosted.org/packages/a7/ce/5c316fd62e01f8d6be1b7ee6b54273214e871772997dc2c95e204997a055/pydantic_core-2.41.1-cp313-cp313-win_amd64.whl", hash = "sha256:b42ae7fd6760782c975897e1fdc810f483b021b32245b0105d40f6e7a3803e4b", size = 2034301 }, - { url = "https://files.pythonhosted.org/packages/29/41/902640cfd6a6523194123e2c3373c60f19006447f2fb06f76de4e8466c5b/pydantic_core-2.41.1-cp313-cp313-win_arm64.whl", hash = "sha256:ad4111acc63b7384e205c27a2f15e23ac0ee21a9d77ad6f2e9cb516ec90965fb", size = 1977238 }, - { url = "https://files.pythonhosted.org/packages/04/04/28b040e88c1b89d851278478842f0bdf39c7a05da9e850333c6c8cbe7dfa/pydantic_core-2.41.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:440d0df7415b50084a4ba9d870480c16c5f67c0d1d4d5119e3f70925533a0edc", size = 1875626 }, - { url = "https://files.pythonhosted.org/packages/d6/58/b41dd3087505220bb58bc81be8c3e8cbc037f5710cd3c838f44f90bdd704/pydantic_core-2.41.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:71eaa38d342099405dae6484216dcf1e8e4b0bebd9b44a4e08c9b43db6a2ab67", size = 2045708 }, - { url = "https://files.pythonhosted.org/packages/d7/b8/760f23754e40bf6c65b94a69b22c394c24058a0ef7e2aa471d2e39219c1a/pydantic_core-2.41.1-cp313-cp313t-win_amd64.whl", hash = "sha256:555ecf7e50f1161d3f693bc49f23c82cf6cdeafc71fa37a06120772a09a38795", size = 1997171 }, - { url = "https://files.pythonhosted.org/packages/16/89/d0afad37ba25f5801735af1472e650b86baad9fe807a42076508e4824a2a/pydantic_core-2.41.1-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:68f2251559b8efa99041bb63571ec7cdd2d715ba74cc82b3bc9eff824ebc8bf0", size = 2124001 }, - { url = "https://files.pythonhosted.org/packages/8e/c4/08609134b34520568ddebb084d9ed0a2a3f5f52b45739e6e22cb3a7112eb/pydantic_core-2.41.1-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:c7bc140c596097cb53b30546ca257dbe3f19282283190b1b5142928e5d5d3a20", size = 1941841 }, - { url = "https://files.pythonhosted.org/packages/2a/43/94a4877094e5fe19a3f37e7e817772263e2c573c94f1e3fa2b1eee56ef3b/pydantic_core-2.41.1-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2896510fce8f4725ec518f8b9d7f015a00db249d2fd40788f442af303480063d", size = 1961129 }, - { url = "https://files.pythonhosted.org/packages/a2/30/23a224d7e25260eb5f69783a63667453037e07eb91ff0e62dabaadd47128/pydantic_core-2.41.1-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ced20e62cfa0f496ba68fa5d6c7ee71114ea67e2a5da3114d6450d7f4683572a", size = 2148770 }, - { url = "https://files.pythonhosted.org/packages/2b/3e/a51c5f5d37b9288ba30683d6e96f10fa8f1defad1623ff09f1020973b577/pydantic_core-2.41.1-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:b04fa9ed049461a7398138c604b00550bc89e3e1151d84b81ad6dc93e39c4c06", size = 2115344 }, - { url = "https://files.pythonhosted.org/packages/5a/bd/389504c9e0600ef4502cd5238396b527afe6ef8981a6a15cd1814fc7b434/pydantic_core-2.41.1-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:b3b7d9cfbfdc43c80a16638c6dc2768e3956e73031fca64e8e1a3ae744d1faeb", size = 1927994 }, - { url = "https://files.pythonhosted.org/packages/ff/9c/5111c6b128861cb792a4c082677e90dac4f2e090bb2e2fe06aa5b2d39027/pydantic_core-2.41.1-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eec83fc6abef04c7f9bec616e2d76ee9a6a4ae2a359b10c21d0f680e24a247ca", size = 1959394 }, - { url = "https://files.pythonhosted.org/packages/14/3f/cfec8b9a0c48ce5d64409ec5e1903cb0b7363da38f14b41de2fcb3712700/pydantic_core-2.41.1-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6771a2d9f83c4038dfad5970a3eef215940682b2175e32bcc817bdc639019b28", size = 2147365 }, - { url = "https://files.pythonhosted.org/packages/e6/6c/fa3e45c2b054a1e627a89a364917f12cbe3abc3e91b9004edaae16e7b3c5/pydantic_core-2.41.1-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:af2385d3f98243fb733862f806c5bb9122e5fba05b373e3af40e3c82d711cef1", size = 2112094 }, - { url = "https://files.pythonhosted.org/packages/e5/17/7eebc38b4658cc8e6902d0befc26388e4c2a5f2e179c561eeb43e1922c7b/pydantic_core-2.41.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:6550617a0c2115be56f90c31a5370261d8ce9dbf051c3ed53b51172dd34da696", size = 1935300 }, - { url = "https://files.pythonhosted.org/packages/2b/00/9fe640194a1717a464ab861d43595c268830f98cb1e2705aa134b3544b70/pydantic_core-2.41.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc17b6ecf4983d298686014c92ebc955a9f9baf9f57dad4065e7906e7bee6222", size = 1970417 }, - { url = "https://files.pythonhosted.org/packages/b2/ad/f4cdfaf483b78ee65362363e73b6b40c48e067078d7b146e8816d5945ad6/pydantic_core-2.41.1-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:42ae9352cf211f08b04ea110563d6b1e415878eea5b4c70f6bdb17dca3b932d2", size = 2190745 }, - { url = "https://files.pythonhosted.org/packages/cb/c1/18f416d40a10f44e9387497ba449f40fdb1478c61ba05c4b6bdb82300362/pydantic_core-2.41.1-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:e82947de92068b0a21681a13dd2102387197092fbe7defcfb8453e0913866506", size = 2150888 }, - { url = "https://files.pythonhosted.org/packages/42/30/134c8a921630d8a88d6f905a562495a6421e959a23c19b0f49b660801d67/pydantic_core-2.41.1-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:e244c37d5471c9acdcd282890c6c4c83747b77238bfa19429b8473586c907656", size = 2324489 }, - { url = "https://files.pythonhosted.org/packages/9c/48/a9263aeaebdec81e941198525b43edb3b44f27cfa4cb8005b8d3eb8dec72/pydantic_core-2.41.1-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:1e798b4b304a995110d41ec93653e57975620ccb2842ba9420037985e7d7284e", size = 2322763 }, - { url = "https://files.pythonhosted.org/packages/1d/62/755d2bd2593f701c5839fc084e9c2c5e2418f460383ad04e3b5d0befc3ca/pydantic_core-2.41.1-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:f1fc716c0eb1663c59699b024428ad5ec2bcc6b928527b8fe28de6cb89f47efb", size = 2144046 }, +sdist = { url = "https://files.pythonhosted.org/packages/7d/14/12b4a0d2b0b10d8e1d9a24ad94e7bbb43335eaf29c0c4e57860e8a30734a/pydantic_core-2.41.1.tar.gz", hash = "sha256:1ad375859a6d8c356b7704ec0f547a58e82ee80bb41baa811ad710e124bc8f2f", size = 454870, upload-time = "2025-10-07T10:50:45.974Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f6/a9/ec440f02e57beabdfd804725ef1e38ac1ba00c49854d298447562e119513/pydantic_core-2.41.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4f276a6134fe1fc1daa692642a3eaa2b7b858599c49a7610816388f5e37566a1", size = 2111456, upload-time = "2025-10-06T21:10:09.824Z" }, + { url = "https://files.pythonhosted.org/packages/f0/f9/6bc15bacfd8dcfc073a1820a564516d9c12a435a9a332d4cbbfd48828ddd/pydantic_core-2.41.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:07588570a805296ece009c59d9a679dc08fab72fb337365afb4f3a14cfbfc176", size = 1915012, upload-time = "2025-10-06T21:10:11.599Z" }, + { url = "https://files.pythonhosted.org/packages/38/8a/d9edcdcdfe80bade17bed424284427c08bea892aaec11438fa52eaeaf79c/pydantic_core-2.41.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28527e4b53400cd60ffbd9812ccb2b5135d042129716d71afd7e45bf42b855c0", size = 1973762, upload-time = "2025-10-06T21:10:13.154Z" }, + { url = "https://files.pythonhosted.org/packages/d5/b3/ff225c6d49fba4279de04677c1c876fc3dc6562fd0c53e9bfd66f58c51a8/pydantic_core-2.41.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:46a1c935c9228bad738c8a41de06478770927baedf581d172494ab36a6b96575", size = 2065386, upload-time = "2025-10-06T21:10:14.436Z" }, + { url = "https://files.pythonhosted.org/packages/47/ba/183e8c0be4321314af3fd1ae6bfc7eafdd7a49bdea5da81c56044a207316/pydantic_core-2.41.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:447ddf56e2b7d28d200d3e9eafa936fe40485744b5a824b67039937580b3cb20", size = 2252317, upload-time = "2025-10-06T21:10:15.719Z" }, + { url = "https://files.pythonhosted.org/packages/57/c5/aab61e94fd02f45c65f1f8c9ec38bb3b33fbf001a1837c74870e97462572/pydantic_core-2.41.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:63892ead40c1160ac860b5debcc95c95c5a0035e543a8b5a4eac70dd22e995f4", size = 2373405, upload-time = "2025-10-06T21:10:17.017Z" }, + { url = "https://files.pythonhosted.org/packages/e5/4f/3aaa3bd1ea420a15acc42d7d3ccb3b0bbc5444ae2f9dbc1959f8173e16b8/pydantic_core-2.41.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f4a9543ca355e6df8fbe9c83e9faab707701e9103ae857ecb40f1c0cf8b0e94d", size = 2073794, upload-time = "2025-10-06T21:10:18.383Z" }, + { url = "https://files.pythonhosted.org/packages/58/bd/e3975cdebe03ec080ef881648de316c73f2a6be95c14fc4efb2f7bdd0d41/pydantic_core-2.41.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f2611bdb694116c31e551ed82e20e39a90bea9b7ad9e54aaf2d045ad621aa7a1", size = 2194430, upload-time = "2025-10-06T21:10:19.638Z" }, + { url = "https://files.pythonhosted.org/packages/2b/b8/6b7e7217f147d3b3105b57fb1caec3c4f667581affdfaab6d1d277e1f749/pydantic_core-2.41.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fecc130893a9b5f7bfe230be1bb8c61fe66a19db8ab704f808cb25a82aad0bc9", size = 2154611, upload-time = "2025-10-06T21:10:21.28Z" }, + { url = "https://files.pythonhosted.org/packages/fe/7b/239c2fe76bd8b7eef9ae2140d737368a3c6fea4fd27f8f6b4cde6baa3ce9/pydantic_core-2.41.1-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:1e2df5f8344c99b6ea5219f00fdc8950b8e6f2c422fbc1cc122ec8641fac85a1", size = 2329809, upload-time = "2025-10-06T21:10:22.678Z" }, + { url = "https://files.pythonhosted.org/packages/bd/2e/77a821a67ff0786f2f14856d6bd1348992f695ee90136a145d7a445c1ff6/pydantic_core-2.41.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:35291331e9d8ed94c257bab6be1cb3a380b5eee570a2784bffc055e18040a2ea", size = 2327907, upload-time = "2025-10-06T21:10:24.447Z" }, + { url = "https://files.pythonhosted.org/packages/fd/9a/b54512bb9df7f64c586b369328c30481229b70ca6a5fcbb90b715e15facf/pydantic_core-2.41.1-cp311-cp311-win32.whl", hash = "sha256:2876a095292668d753f1a868c4a57c4ac9f6acbd8edda8debe4218d5848cf42f", size = 1989964, upload-time = "2025-10-06T21:10:25.676Z" }, + { url = "https://files.pythonhosted.org/packages/9d/72/63c9a4f1a5c950e65dd522d7dd67f167681f9d4f6ece3b80085a0329f08f/pydantic_core-2.41.1-cp311-cp311-win_amd64.whl", hash = "sha256:b92d6c628e9a338846a28dfe3fcdc1a3279388624597898b105e078cdfc59298", size = 2025158, upload-time = "2025-10-06T21:10:27.522Z" }, + { url = "https://files.pythonhosted.org/packages/d8/16/4e2706184209f61b50c231529257c12eb6bd9eb36e99ea1272e4815d2200/pydantic_core-2.41.1-cp311-cp311-win_arm64.whl", hash = "sha256:7d82ae99409eb69d507a89835488fb657faa03ff9968a9379567b0d2e2e56bc5", size = 1972297, upload-time = "2025-10-06T21:10:28.814Z" }, + { url = "https://files.pythonhosted.org/packages/ee/bc/5f520319ee1c9e25010412fac4154a72e0a40d0a19eb00281b1f200c0947/pydantic_core-2.41.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:db2f82c0ccbce8f021ad304ce35cbe02aa2f95f215cac388eed542b03b4d5eb4", size = 2099300, upload-time = "2025-10-06T21:10:30.463Z" }, + { url = "https://files.pythonhosted.org/packages/31/14/010cd64c5c3814fb6064786837ec12604be0dd46df3327cf8474e38abbbd/pydantic_core-2.41.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:47694a31c710ced9205d5f1e7e8af3ca57cbb8a503d98cb9e33e27c97a501601", size = 1910179, upload-time = "2025-10-06T21:10:31.782Z" }, + { url = "https://files.pythonhosted.org/packages/8e/2e/23fc2a8a93efad52df302fdade0a60f471ecc0c7aac889801ac24b4c07d6/pydantic_core-2.41.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:93e9decce94daf47baf9e9d392f5f2557e783085f7c5e522011545d9d6858e00", size = 1957225, upload-time = "2025-10-06T21:10:33.11Z" }, + { url = "https://files.pythonhosted.org/packages/b9/b6/6db08b2725b2432b9390844852e11d320281e5cea8a859c52c68001975fa/pydantic_core-2.41.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ab0adafdf2b89c8b84f847780a119437a0931eca469f7b44d356f2b426dd9741", size = 2053315, upload-time = "2025-10-06T21:10:34.87Z" }, + { url = "https://files.pythonhosted.org/packages/61/d9/4de44600f2d4514b44f3f3aeeda2e14931214b6b5bf52479339e801ce748/pydantic_core-2.41.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5da98cc81873f39fd56882e1569c4677940fbc12bce6213fad1ead784192d7c8", size = 2224298, upload-time = "2025-10-06T21:10:36.233Z" }, + { url = "https://files.pythonhosted.org/packages/7a/ae/dbe51187a7f35fc21b283c5250571a94e36373eb557c1cba9f29a9806dcf/pydantic_core-2.41.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:209910e88afb01fd0fd403947b809ba8dba0e08a095e1f703294fda0a8fdca51", size = 2351797, upload-time = "2025-10-06T21:10:37.601Z" }, + { url = "https://files.pythonhosted.org/packages/b5/a7/975585147457c2e9fb951c7c8dab56deeb6aa313f3aa72c2fc0df3f74a49/pydantic_core-2.41.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:365109d1165d78d98e33c5bfd815a9b5d7d070f578caefaabcc5771825b4ecb5", size = 2074921, upload-time = "2025-10-06T21:10:38.927Z" }, + { url = "https://files.pythonhosted.org/packages/62/37/ea94d1d0c01dec1b7d236c7cec9103baab0021f42500975de3d42522104b/pydantic_core-2.41.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:706abf21e60a2857acdb09502bc853ee5bce732955e7b723b10311114f033115", size = 2187767, upload-time = "2025-10-06T21:10:40.651Z" }, + { url = "https://files.pythonhosted.org/packages/d3/fe/694cf9fdd3a777a618c3afd210dba7b414cb8a72b1bd29b199c2e5765fee/pydantic_core-2.41.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bf0bd5417acf7f6a7ec3b53f2109f587be176cb35f9cf016da87e6017437a72d", size = 2136062, upload-time = "2025-10-06T21:10:42.09Z" }, + { url = "https://files.pythonhosted.org/packages/0f/ae/174aeabd89916fbd2988cc37b81a59e1186e952afd2a7ed92018c22f31ca/pydantic_core-2.41.1-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:2e71b1c6ceb9c78424ae9f63a07292fb769fb890a4e7efca5554c47f33a60ea5", size = 2317819, upload-time = "2025-10-06T21:10:43.974Z" }, + { url = "https://files.pythonhosted.org/packages/65/e8/e9aecafaebf53fc456314f72886068725d6fba66f11b013532dc21259343/pydantic_core-2.41.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:80745b9770b4a38c25015b517451c817799bfb9d6499b0d13d8227ec941cb513", size = 2312267, upload-time = "2025-10-06T21:10:45.34Z" }, + { url = "https://files.pythonhosted.org/packages/35/2f/1c2e71d2a052f9bb2f2df5a6a05464a0eb800f9e8d9dd800202fe31219e1/pydantic_core-2.41.1-cp312-cp312-win32.whl", hash = "sha256:83b64d70520e7890453f1aa21d66fda44e7b35f1cfea95adf7b4289a51e2b479", size = 1990927, upload-time = "2025-10-06T21:10:46.738Z" }, + { url = "https://files.pythonhosted.org/packages/b1/78/562998301ff2588b9c6dcc5cb21f52fa919d6e1decc75a35055feb973594/pydantic_core-2.41.1-cp312-cp312-win_amd64.whl", hash = "sha256:377defd66ee2003748ee93c52bcef2d14fde48fe28a0b156f88c3dbf9bc49a50", size = 2034703, upload-time = "2025-10-06T21:10:48.524Z" }, + { url = "https://files.pythonhosted.org/packages/b2/53/d95699ce5a5cdb44bb470bd818b848b9beadf51459fd4ea06667e8ede862/pydantic_core-2.41.1-cp312-cp312-win_arm64.whl", hash = "sha256:c95caff279d49c1d6cdfe2996e6c2ad712571d3b9caaa209a404426c326c4bde", size = 1972719, upload-time = "2025-10-06T21:10:50.256Z" }, + { url = "https://files.pythonhosted.org/packages/16/89/d0afad37ba25f5801735af1472e650b86baad9fe807a42076508e4824a2a/pydantic_core-2.41.1-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:68f2251559b8efa99041bb63571ec7cdd2d715ba74cc82b3bc9eff824ebc8bf0", size = 2124001, upload-time = "2025-10-07T10:49:54.369Z" }, + { url = "https://files.pythonhosted.org/packages/8e/c4/08609134b34520568ddebb084d9ed0a2a3f5f52b45739e6e22cb3a7112eb/pydantic_core-2.41.1-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:c7bc140c596097cb53b30546ca257dbe3f19282283190b1b5142928e5d5d3a20", size = 1941841, upload-time = "2025-10-07T10:49:56.248Z" }, + { url = "https://files.pythonhosted.org/packages/2a/43/94a4877094e5fe19a3f37e7e817772263e2c573c94f1e3fa2b1eee56ef3b/pydantic_core-2.41.1-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2896510fce8f4725ec518f8b9d7f015a00db249d2fd40788f442af303480063d", size = 1961129, upload-time = "2025-10-07T10:49:58.298Z" }, + { url = "https://files.pythonhosted.org/packages/a2/30/23a224d7e25260eb5f69783a63667453037e07eb91ff0e62dabaadd47128/pydantic_core-2.41.1-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ced20e62cfa0f496ba68fa5d6c7ee71114ea67e2a5da3114d6450d7f4683572a", size = 2148770, upload-time = "2025-10-07T10:49:59.959Z" }, + { url = "https://files.pythonhosted.org/packages/2b/3e/a51c5f5d37b9288ba30683d6e96f10fa8f1defad1623ff09f1020973b577/pydantic_core-2.41.1-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:b04fa9ed049461a7398138c604b00550bc89e3e1151d84b81ad6dc93e39c4c06", size = 2115344, upload-time = "2025-10-07T10:50:02.466Z" }, + { url = "https://files.pythonhosted.org/packages/5a/bd/389504c9e0600ef4502cd5238396b527afe6ef8981a6a15cd1814fc7b434/pydantic_core-2.41.1-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:b3b7d9cfbfdc43c80a16638c6dc2768e3956e73031fca64e8e1a3ae744d1faeb", size = 1927994, upload-time = "2025-10-07T10:50:04.379Z" }, + { url = "https://files.pythonhosted.org/packages/ff/9c/5111c6b128861cb792a4c082677e90dac4f2e090bb2e2fe06aa5b2d39027/pydantic_core-2.41.1-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eec83fc6abef04c7f9bec616e2d76ee9a6a4ae2a359b10c21d0f680e24a247ca", size = 1959394, upload-time = "2025-10-07T10:50:06.335Z" }, + { url = "https://files.pythonhosted.org/packages/14/3f/cfec8b9a0c48ce5d64409ec5e1903cb0b7363da38f14b41de2fcb3712700/pydantic_core-2.41.1-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6771a2d9f83c4038dfad5970a3eef215940682b2175e32bcc817bdc639019b28", size = 2147365, upload-time = "2025-10-07T10:50:07.978Z" }, + { url = "https://files.pythonhosted.org/packages/e6/6c/fa3e45c2b054a1e627a89a364917f12cbe3abc3e91b9004edaae16e7b3c5/pydantic_core-2.41.1-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:af2385d3f98243fb733862f806c5bb9122e5fba05b373e3af40e3c82d711cef1", size = 2112094, upload-time = "2025-10-07T10:50:25.513Z" }, + { url = "https://files.pythonhosted.org/packages/e5/17/7eebc38b4658cc8e6902d0befc26388e4c2a5f2e179c561eeb43e1922c7b/pydantic_core-2.41.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:6550617a0c2115be56f90c31a5370261d8ce9dbf051c3ed53b51172dd34da696", size = 1935300, upload-time = "2025-10-07T10:50:27.715Z" }, + { url = "https://files.pythonhosted.org/packages/2b/00/9fe640194a1717a464ab861d43595c268830f98cb1e2705aa134b3544b70/pydantic_core-2.41.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc17b6ecf4983d298686014c92ebc955a9f9baf9f57dad4065e7906e7bee6222", size = 1970417, upload-time = "2025-10-07T10:50:29.573Z" }, + { url = "https://files.pythonhosted.org/packages/b2/ad/f4cdfaf483b78ee65362363e73b6b40c48e067078d7b146e8816d5945ad6/pydantic_core-2.41.1-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:42ae9352cf211f08b04ea110563d6b1e415878eea5b4c70f6bdb17dca3b932d2", size = 2190745, upload-time = "2025-10-07T10:50:31.48Z" }, + { url = "https://files.pythonhosted.org/packages/cb/c1/18f416d40a10f44e9387497ba449f40fdb1478c61ba05c4b6bdb82300362/pydantic_core-2.41.1-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:e82947de92068b0a21681a13dd2102387197092fbe7defcfb8453e0913866506", size = 2150888, upload-time = "2025-10-07T10:50:33.477Z" }, + { url = "https://files.pythonhosted.org/packages/42/30/134c8a921630d8a88d6f905a562495a6421e959a23c19b0f49b660801d67/pydantic_core-2.41.1-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:e244c37d5471c9acdcd282890c6c4c83747b77238bfa19429b8473586c907656", size = 2324489, upload-time = "2025-10-07T10:50:36.48Z" }, + { url = "https://files.pythonhosted.org/packages/9c/48/a9263aeaebdec81e941198525b43edb3b44f27cfa4cb8005b8d3eb8dec72/pydantic_core-2.41.1-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:1e798b4b304a995110d41ec93653e57975620ccb2842ba9420037985e7d7284e", size = 2322763, upload-time = "2025-10-07T10:50:38.751Z" }, + { url = "https://files.pythonhosted.org/packages/1d/62/755d2bd2593f701c5839fc084e9c2c5e2418f460383ad04e3b5d0befc3ca/pydantic_core-2.41.1-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:f1fc716c0eb1663c59699b024428ad5ec2bcc6b928527b8fe28de6cb89f47efb", size = 2144046, upload-time = "2025-10-07T10:50:40.686Z" }, ] [[package]] name = "pygments" version = "2.19.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631 } +sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217 }, + { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, ] [[package]] name = "pyjwt" version = "2.10.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e7/46/bd74733ff231675599650d3e47f361794b22ef3e3770998dda30d3b63726/pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953", size = 87785 } +sdist = { url = "https://files.pythonhosted.org/packages/e7/46/bd74733ff231675599650d3e47f361794b22ef3e3770998dda30d3b63726/pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953", size = 87785, upload-time = "2024-11-28T03:43:29.933Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/61/ad/689f02752eeec26aed679477e80e632ef1b682313be70793d798c1d5fc8f/PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb", size = 22997 }, + { url = "https://files.pythonhosted.org/packages/61/ad/689f02752eeec26aed679477e80e632ef1b682313be70793d798c1d5fc8f/PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb", size = 22997, upload-time = "2024-11-28T03:43:27.893Z" }, ] [package.optional-dependencies] @@ -3106,18 +2783,18 @@ dependencies = [ { name = "markdown" }, { name = "pyyaml" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/55/b3/6d2b3f149bc5413b0a29761c2c5832d8ce904a1d7f621e86616d96f505cc/pymdown_extensions-10.16.1.tar.gz", hash = "sha256:aace82bcccba3efc03e25d584e6a22d27a8e17caa3f4dd9f207e49b787aa9a91", size = 853277 } +sdist = { url = "https://files.pythonhosted.org/packages/55/b3/6d2b3f149bc5413b0a29761c2c5832d8ce904a1d7f621e86616d96f505cc/pymdown_extensions-10.16.1.tar.gz", hash = "sha256:aace82bcccba3efc03e25d584e6a22d27a8e17caa3f4dd9f207e49b787aa9a91", size = 853277, upload-time = "2025-07-28T16:19:34.167Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e4/06/43084e6cbd4b3bc0e80f6be743b2e79fbc6eed8de9ad8c629939fa55d972/pymdown_extensions-10.16.1-py3-none-any.whl", hash = "sha256:d6ba157a6c03146a7fb122b2b9a121300056384eafeec9c9f9e584adfdb2a32d", size = 266178 }, + { url = "https://files.pythonhosted.org/packages/e4/06/43084e6cbd4b3bc0e80f6be743b2e79fbc6eed8de9ad8c629939fa55d972/pymdown_extensions-10.16.1-py3-none-any.whl", hash = "sha256:d6ba157a6c03146a7fb122b2b9a121300056384eafeec9c9f9e584adfdb2a32d", size = 266178, upload-time = "2025-07-28T16:19:31.401Z" }, ] [[package]] name = "pyreadline3" version = "3.5.4" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0f/49/4cea918a08f02817aabae639e3d0ac046fef9f9180518a3ad394e22da148/pyreadline3-3.5.4.tar.gz", hash = "sha256:8d57d53039a1c75adba8e50dd3d992b28143480816187ea5efbd5c78e6c885b7", size = 99839 } +sdist = { url = "https://files.pythonhosted.org/packages/0f/49/4cea918a08f02817aabae639e3d0ac046fef9f9180518a3ad394e22da148/pyreadline3-3.5.4.tar.gz", hash = "sha256:8d57d53039a1c75adba8e50dd3d992b28143480816187ea5efbd5c78e6c885b7", size = 99839, upload-time = "2024-09-19T02:40:10.062Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5a/dc/491b7661614ab97483abf2056be1deee4dc2490ecbf7bff9ab5cdbac86e1/pyreadline3-3.5.4-py3-none-any.whl", hash = "sha256:eaf8e6cc3c49bcccf145fc6067ba8643d1df34d604a1ec0eccbf7a18e6d3fae6", size = 83178 }, + { url = "https://files.pythonhosted.org/packages/5a/dc/491b7661614ab97483abf2056be1deee4dc2490ecbf7bff9ab5cdbac86e1/pyreadline3-3.5.4-py3-none-any.whl", hash = "sha256:eaf8e6cc3c49bcccf145fc6067ba8643d1df34d604a1ec0eccbf7a18e6d3fae6", size = 83178, upload-time = "2024-09-19T02:40:08.598Z" }, ] [[package]] @@ -3128,9 +2805,9 @@ dependencies = [ { name = "nodeenv" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f7/16/6b4fbdd1fef59a0292cbb99f790b44983e390321eccbc5921b4d161da5d1/pyright-1.1.406.tar.gz", hash = "sha256:c4872bc58c9643dac09e8a2e74d472c62036910b3bd37a32813989ef7576ea2c", size = 4113151 } +sdist = { url = "https://files.pythonhosted.org/packages/f7/16/6b4fbdd1fef59a0292cbb99f790b44983e390321eccbc5921b4d161da5d1/pyright-1.1.406.tar.gz", hash = "sha256:c4872bc58c9643dac09e8a2e74d472c62036910b3bd37a32813989ef7576ea2c", size = 4113151, upload-time = "2025-10-02T01:04:45.488Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f6/a2/e309afbb459f50507103793aaef85ca4348b66814c86bc73908bdeb66d12/pyright-1.1.406-py3-none-any.whl", hash = "sha256:1d81fb43c2407bf566e97e57abb01c811973fdb21b2df8df59f870f688bdca71", size = 5980982 }, + { url = "https://files.pythonhosted.org/packages/f6/a2/e309afbb459f50507103793aaef85ca4348b66814c86bc73908bdeb66d12/pyright-1.1.406-py3-none-any.whl", hash = "sha256:1d81fb43c2407bf566e97e57abb01c811973fdb21b2df8df59f870f688bdca71", size = 5980982, upload-time = "2025-10-02T01:04:43.137Z" }, ] [[package]] @@ -3144,9 +2821,9 @@ dependencies = [ { name = "pluggy" }, { name = "pygments" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a3/5c/00a0e072241553e1a7496d638deababa67c5058571567b92a7eaa258397c/pytest-8.4.2.tar.gz", hash = "sha256:86c0d0b93306b961d58d62a4db4879f27fe25513d4b969df351abdddb3c30e01", size = 1519618 } +sdist = { url = "https://files.pythonhosted.org/packages/a3/5c/00a0e072241553e1a7496d638deababa67c5058571567b92a7eaa258397c/pytest-8.4.2.tar.gz", hash = "sha256:86c0d0b93306b961d58d62a4db4879f27fe25513d4b969df351abdddb3c30e01", size = 1519618, upload-time = "2025-09-04T14:34:22.711Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a8/a4/20da314d277121d6534b3a980b29035dcd51e6744bd79075a6ce8fa4eb8d/pytest-8.4.2-py3-none-any.whl", hash = "sha256:872f880de3fc3a5bdc88a11b39c9710c3497a547cfa9320bc3c5e62fbf272e79", size = 365750 }, + { url = "https://files.pythonhosted.org/packages/a8/a4/20da314d277121d6534b3a980b29035dcd51e6744bd79075a6ce8fa4eb8d/pytest-8.4.2-py3-none-any.whl", hash = "sha256:872f880de3fc3a5bdc88a11b39c9710c3497a547cfa9320bc3c5e62fbf272e79", size = 365750, upload-time = "2025-09-04T14:34:20.226Z" }, ] [[package]] @@ -3156,9 +2833,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pytest" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/8e/c4/453c52c659521066969523e87d85d54139bbd17b78f09532fb8eb8cdb58e/pytest_asyncio-0.26.0.tar.gz", hash = "sha256:c4df2a697648241ff39e7f0e4a73050b03f123f760673956cf0d72a4990e312f", size = 54156 } +sdist = { url = "https://files.pythonhosted.org/packages/8e/c4/453c52c659521066969523e87d85d54139bbd17b78f09532fb8eb8cdb58e/pytest_asyncio-0.26.0.tar.gz", hash = "sha256:c4df2a697648241ff39e7f0e4a73050b03f123f760673956cf0d72a4990e312f", size = 54156, upload-time = "2025-03-25T06:22:28.883Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/20/7f/338843f449ace853647ace35870874f69a764d251872ed1b4de9f234822c/pytest_asyncio-0.26.0-py3-none-any.whl", hash = "sha256:7b51ed894f4fbea1340262bdae5135797ebbe21d8638978e35d31c6d19f72fb0", size = 19694 }, + { url = "https://files.pythonhosted.org/packages/20/7f/338843f449ace853647ace35870874f69a764d251872ed1b4de9f234822c/pytest_asyncio-0.26.0-py3-none-any.whl", hash = "sha256:7b51ed894f4fbea1340262bdae5135797ebbe21d8638978e35d31c6d19f72fb0", size = 19694, upload-time = "2025-03-25T06:22:27.807Z" }, ] [[package]] @@ -3169,9 +2846,9 @@ dependencies = [ { name = "pytest" }, { name = "python-dotenv" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/cd/b0/cafee9c627c1bae228eb07c9977f679b3a7cb111b488307ab9594ba9e4da/pytest-dotenv-0.5.2.tar.gz", hash = "sha256:2dc6c3ac6d8764c71c6d2804e902d0ff810fa19692e95fe138aefc9b1aa73732", size = 3782 } +sdist = { url = "https://files.pythonhosted.org/packages/cd/b0/cafee9c627c1bae228eb07c9977f679b3a7cb111b488307ab9594ba9e4da/pytest-dotenv-0.5.2.tar.gz", hash = "sha256:2dc6c3ac6d8764c71c6d2804e902d0ff810fa19692e95fe138aefc9b1aa73732", size = 3782, upload-time = "2020-06-16T12:38:03.4Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d0/da/9da67c67b3d0963160e3d2cbc7c38b6fae342670cc8e6d5936644b2cf944/pytest_dotenv-0.5.2-py3-none-any.whl", hash = "sha256:40a2cece120a213898afaa5407673f6bd924b1fa7eafce6bda0e8abffe2f710f", size = 3993 }, + { url = "https://files.pythonhosted.org/packages/d0/da/9da67c67b3d0963160e3d2cbc7c38b6fae342670cc8e6d5936644b2cf944/pytest_dotenv-0.5.2-py3-none-any.whl", hash = "sha256:40a2cece120a213898afaa5407673f6bd924b1fa7eafce6bda0e8abffe2f710f", size = 3993, upload-time = "2020-06-16T12:38:01.139Z" }, ] [[package]] @@ -3181,9 +2858,27 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pytest" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ac/82/4c9ecabab13363e72d880f2fb504c5f750433b2b6f16e99f4ec21ada284c/pytest_timeout-2.4.0.tar.gz", hash = "sha256:7e68e90b01f9eff71332b25001f85c75495fc4e3a836701876183c4bcfd0540a", size = 17973 } +sdist = { url = "https://files.pythonhosted.org/packages/ac/82/4c9ecabab13363e72d880f2fb504c5f750433b2b6f16e99f4ec21ada284c/pytest_timeout-2.4.0.tar.gz", hash = "sha256:7e68e90b01f9eff71332b25001f85c75495fc4e3a836701876183c4bcfd0540a", size = 17973, upload-time = "2025-05-05T19:44:34.99Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fa/b6/3127540ecdf1464a00e5a01ee60a1b09175f6913f0644ac748494d9c4b21/pytest_timeout-2.4.0-py3-none-any.whl", hash = "sha256:c42667e5cdadb151aeb5b26d114aff6bdf5a907f176a007a30b940d3d865b5c2", size = 14382, upload-time = "2025-05-05T19:44:33.502Z" }, +] + +[[package]] +name = "pytest-xdist" +version = "3.8.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "execnet" }, + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/78/b4/439b179d1ff526791eb921115fca8e44e596a13efeda518b9d845a619450/pytest_xdist-3.8.0.tar.gz", hash = "sha256:7e578125ec9bc6050861aa93f2d59f1d8d085595d6551c2c90b6f4fad8d3a9f1", size = 88069, upload-time = "2025-07-01T13:30:59.346Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/fa/b6/3127540ecdf1464a00e5a01ee60a1b09175f6913f0644ac748494d9c4b21/pytest_timeout-2.4.0-py3-none-any.whl", hash = "sha256:c42667e5cdadb151aeb5b26d114aff6bdf5a907f176a007a30b940d3d865b5c2", size = 14382 }, + { url = "https://files.pythonhosted.org/packages/ca/31/d4e37e9e550c2b92a9cbc2e4d0b7420a27224968580b5a447f420847c975/pytest_xdist-3.8.0-py3-none-any.whl", hash = "sha256:202ca578cfeb7370784a8c33d6d05bc6e13b4f25b5053c30a152269fd10f0b88", size = 46396, upload-time = "2025-07-01T13:30:56.632Z" }, +] + +[package.optional-dependencies] +psutil = [ + { name = "psutil" }, ] [[package]] @@ -3193,36 +2888,36 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "six" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432 } +sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432, upload-time = "2024-03-01T18:36:20.211Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892 }, + { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" }, ] [[package]] name = "python-dotenv" version = "1.1.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f6/b0/4bc07ccd3572a2f9df7e6782f52b0c6c90dcbb803ac4a167702d7d0dfe1e/python_dotenv-1.1.1.tar.gz", hash = "sha256:a8a6399716257f45be6a007360200409fce5cda2661e3dec71d23dc15f6189ab", size = 41978 } +sdist = { url = "https://files.pythonhosted.org/packages/f6/b0/4bc07ccd3572a2f9df7e6782f52b0c6c90dcbb803ac4a167702d7d0dfe1e/python_dotenv-1.1.1.tar.gz", hash = "sha256:a8a6399716257f45be6a007360200409fce5cda2661e3dec71d23dc15f6189ab", size = 41978, upload-time = "2025-06-24T04:21:07.341Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5f/ed/539768cf28c661b5b068d66d96a2f155c4971a5d55684a514c1a0e0dec2f/python_dotenv-1.1.1-py3-none-any.whl", hash = "sha256:31f23644fe2602f88ff55e1f5c79ba497e01224ee7737937930c448e4d0e24dc", size = 20556 }, + { url = "https://files.pythonhosted.org/packages/5f/ed/539768cf28c661b5b068d66d96a2f155c4971a5d55684a514c1a0e0dec2f/python_dotenv-1.1.1-py3-none-any.whl", hash = "sha256:31f23644fe2602f88ff55e1f5c79ba497e01224ee7737937930c448e4d0e24dc", size = 20556, upload-time = "2025-06-24T04:21:06.073Z" }, ] [[package]] name = "python-json-logger" version = "4.0.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/29/bf/eca6a3d43db1dae7070f70e160ab20b807627ba953663ba07928cdd3dc58/python_json_logger-4.0.0.tar.gz", hash = "sha256:f58e68eb46e1faed27e0f574a55a0455eecd7b8a5b88b85a784519ba3cff047f", size = 17683 } +sdist = { url = "https://files.pythonhosted.org/packages/29/bf/eca6a3d43db1dae7070f70e160ab20b807627ba953663ba07928cdd3dc58/python_json_logger-4.0.0.tar.gz", hash = "sha256:f58e68eb46e1faed27e0f574a55a0455eecd7b8a5b88b85a784519ba3cff047f", size = 17683, upload-time = "2025-10-06T04:15:18.984Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/51/e5/fecf13f06e5e5f67e8837d777d1bc43fac0ed2b77a676804df5c34744727/python_json_logger-4.0.0-py3-none-any.whl", hash = "sha256:af09c9daf6a813aa4cc7180395f50f2a9e5fa056034c9953aec92e381c5ba1e2", size = 15548 }, + { url = "https://files.pythonhosted.org/packages/51/e5/fecf13f06e5e5f67e8837d777d1bc43fac0ed2b77a676804df5c34744727/python_json_logger-4.0.0-py3-none-any.whl", hash = "sha256:af09c9daf6a813aa4cc7180395f50f2a9e5fa056034c9953aec92e381c5ba1e2", size = 15548, upload-time = "2025-10-06T04:15:17.553Z" }, ] [[package]] name = "pytz" version = "2025.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f8/bf/abbd3cdfb8fbc7fb3d4d38d320f2441b1e7cbe29be4f23797b4a2b5d8aac/pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3", size = 320884 } +sdist = { url = "https://files.pythonhosted.org/packages/f8/bf/abbd3cdfb8fbc7fb3d4d38d320f2441b1e7cbe29be4f23797b4a2b5d8aac/pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3", size = 320884, upload-time = "2025-03-25T02:25:00.538Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/81/c4/34e93fe5f5429d7570ec1fa436f1986fb1f00c3e0f43a589fe2bbcd22c3f/pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00", size = 509225 }, + { url = "https://files.pythonhosted.org/packages/81/c4/34e93fe5f5429d7570ec1fa436f1986fb1f00c3e0f43a589fe2bbcd22c3f/pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00", size = 509225, upload-time = "2025-03-25T02:24:58.468Z" }, ] [[package]] @@ -3230,64 +2925,49 @@ name = "pywin32" version = "311" source = { registry = "https://pypi.org/simple" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7c/af/449a6a91e5d6db51420875c54f6aff7c97a86a3b13a0b4f1a5c13b988de3/pywin32-311-cp311-cp311-win32.whl", hash = "sha256:184eb5e436dea364dcd3d2316d577d625c0351bf237c4e9a5fabbcfa5a58b151", size = 8697031 }, - { url = "https://files.pythonhosted.org/packages/51/8f/9bb81dd5bb77d22243d33c8397f09377056d5c687aa6d4042bea7fbf8364/pywin32-311-cp311-cp311-win_amd64.whl", hash = "sha256:3ce80b34b22b17ccbd937a6e78e7225d80c52f5ab9940fe0506a1a16f3dab503", size = 9508308 }, - { url = "https://files.pythonhosted.org/packages/44/7b/9c2ab54f74a138c491aba1b1cd0795ba61f144c711daea84a88b63dc0f6c/pywin32-311-cp311-cp311-win_arm64.whl", hash = "sha256:a733f1388e1a842abb67ffa8e7aad0e70ac519e09b0f6a784e65a136ec7cefd2", size = 8703930 }, - { url = "https://files.pythonhosted.org/packages/e7/ab/01ea1943d4eba0f850c3c61e78e8dd59757ff815ff3ccd0a84de5f541f42/pywin32-311-cp312-cp312-win32.whl", hash = "sha256:750ec6e621af2b948540032557b10a2d43b0cee2ae9758c54154d711cc852d31", size = 8706543 }, - { url = "https://files.pythonhosted.org/packages/d1/a8/a0e8d07d4d051ec7502cd58b291ec98dcc0c3fff027caad0470b72cfcc2f/pywin32-311-cp312-cp312-win_amd64.whl", hash = "sha256:b8c095edad5c211ff31c05223658e71bf7116daa0ecf3ad85f3201ea3190d067", size = 9495040 }, - { url = "https://files.pythonhosted.org/packages/ba/3a/2ae996277b4b50f17d61f0603efd8253cb2d79cc7ae159468007b586396d/pywin32-311-cp312-cp312-win_arm64.whl", hash = "sha256:e286f46a9a39c4a18b319c28f59b61de793654af2f395c102b4f819e584b5852", size = 8710102 }, - { url = "https://files.pythonhosted.org/packages/a5/be/3fd5de0979fcb3994bfee0d65ed8ca9506a8a1260651b86174f6a86f52b3/pywin32-311-cp313-cp313-win32.whl", hash = "sha256:f95ba5a847cba10dd8c4d8fefa9f2a6cf283b8b88ed6178fa8a6c1ab16054d0d", size = 8705700 }, - { url = "https://files.pythonhosted.org/packages/e3/28/e0a1909523c6890208295a29e05c2adb2126364e289826c0a8bc7297bd5c/pywin32-311-cp313-cp313-win_amd64.whl", hash = "sha256:718a38f7e5b058e76aee1c56ddd06908116d35147e133427e59a3983f703a20d", size = 9494700 }, - { url = "https://files.pythonhosted.org/packages/04/bf/90339ac0f55726dce7d794e6d79a18a91265bdf3aa70b6b9ca52f35e022a/pywin32-311-cp313-cp313-win_arm64.whl", hash = "sha256:7b4075d959648406202d92a2310cb990fea19b535c7f4a78d3f5e10b926eeb8a", size = 8709318 }, + { url = "https://files.pythonhosted.org/packages/7c/af/449a6a91e5d6db51420875c54f6aff7c97a86a3b13a0b4f1a5c13b988de3/pywin32-311-cp311-cp311-win32.whl", hash = "sha256:184eb5e436dea364dcd3d2316d577d625c0351bf237c4e9a5fabbcfa5a58b151", size = 8697031, upload-time = "2025-07-14T20:13:13.266Z" }, + { url = "https://files.pythonhosted.org/packages/51/8f/9bb81dd5bb77d22243d33c8397f09377056d5c687aa6d4042bea7fbf8364/pywin32-311-cp311-cp311-win_amd64.whl", hash = "sha256:3ce80b34b22b17ccbd937a6e78e7225d80c52f5ab9940fe0506a1a16f3dab503", size = 9508308, upload-time = "2025-07-14T20:13:15.147Z" }, + { url = "https://files.pythonhosted.org/packages/44/7b/9c2ab54f74a138c491aba1b1cd0795ba61f144c711daea84a88b63dc0f6c/pywin32-311-cp311-cp311-win_arm64.whl", hash = "sha256:a733f1388e1a842abb67ffa8e7aad0e70ac519e09b0f6a784e65a136ec7cefd2", size = 8703930, upload-time = "2025-07-14T20:13:16.945Z" }, + { url = "https://files.pythonhosted.org/packages/e7/ab/01ea1943d4eba0f850c3c61e78e8dd59757ff815ff3ccd0a84de5f541f42/pywin32-311-cp312-cp312-win32.whl", hash = "sha256:750ec6e621af2b948540032557b10a2d43b0cee2ae9758c54154d711cc852d31", size = 8706543, upload-time = "2025-07-14T20:13:20.765Z" }, + { url = "https://files.pythonhosted.org/packages/d1/a8/a0e8d07d4d051ec7502cd58b291ec98dcc0c3fff027caad0470b72cfcc2f/pywin32-311-cp312-cp312-win_amd64.whl", hash = "sha256:b8c095edad5c211ff31c05223658e71bf7116daa0ecf3ad85f3201ea3190d067", size = 9495040, upload-time = "2025-07-14T20:13:22.543Z" }, + { url = "https://files.pythonhosted.org/packages/ba/3a/2ae996277b4b50f17d61f0603efd8253cb2d79cc7ae159468007b586396d/pywin32-311-cp312-cp312-win_arm64.whl", hash = "sha256:e286f46a9a39c4a18b319c28f59b61de793654af2f395c102b4f819e584b5852", size = 8710102, upload-time = "2025-07-14T20:13:24.682Z" }, ] [[package]] name = "pywinpty" version = "3.0.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f3/bb/a7cc2967c5c4eceb6cc49cfe39447d4bfc56e6c865e7c2249b6eb978935f/pywinpty-3.0.2.tar.gz", hash = "sha256:1505cc4cb248af42cb6285a65c9c2086ee9e7e574078ee60933d5d7fa86fb004", size = 30669 } +sdist = { url = "https://files.pythonhosted.org/packages/f3/bb/a7cc2967c5c4eceb6cc49cfe39447d4bfc56e6c865e7c2249b6eb978935f/pywinpty-3.0.2.tar.gz", hash = "sha256:1505cc4cb248af42cb6285a65c9c2086ee9e7e574078ee60933d5d7fa86fb004", size = 30669, upload-time = "2025-10-03T21:16:29.205Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a6/a1/409c1651c9f874d598c10f51ff586c416625601df4bca315d08baec4c3e3/pywinpty-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:327790d70e4c841ebd9d0f295a780177149aeb405bca44c7115a3de5c2054b23", size = 2050304 }, - { url = "https://files.pythonhosted.org/packages/02/4e/1098484e042c9485f56f16eb2b69b43b874bd526044ee401512234cf9e04/pywinpty-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:99fdd9b455f0ad6419aba6731a7a0d2f88ced83c3c94a80ff9533d95fa8d8a9e", size = 2050391 }, - { url = "https://files.pythonhosted.org/packages/fc/19/b757fe28008236a4a713e813283721b8a40aa60cd7d3f83549f2e25a3155/pywinpty-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:18f78b81e4cfee6aabe7ea8688441d30247b73e52cd9657138015c5f4ee13a51", size = 2050057 }, - { url = "https://files.pythonhosted.org/packages/cb/44/cbae12ecf6f4fa4129c36871fd09c6bef4f98d5f625ecefb5e2449765508/pywinpty-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:663383ecfab7fc382cc97ea5c4f7f0bb32c2f889259855df6ea34e5df42d305b", size = 2049874 }, + { url = "https://files.pythonhosted.org/packages/a6/a1/409c1651c9f874d598c10f51ff586c416625601df4bca315d08baec4c3e3/pywinpty-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:327790d70e4c841ebd9d0f295a780177149aeb405bca44c7115a3de5c2054b23", size = 2050304, upload-time = "2025-10-03T21:19:29.466Z" }, + { url = "https://files.pythonhosted.org/packages/02/4e/1098484e042c9485f56f16eb2b69b43b874bd526044ee401512234cf9e04/pywinpty-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:99fdd9b455f0ad6419aba6731a7a0d2f88ced83c3c94a80ff9533d95fa8d8a9e", size = 2050391, upload-time = "2025-10-03T21:19:01.642Z" }, ] [[package]] name = "pyyaml" version = "6.0.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/6d/16/a95b6757765b7b031c9374925bb718d55e0a9ba8a1b6a12d25962ea44347/pyyaml-6.0.3-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:44edc647873928551a01e7a563d7452ccdebee747728c1080d881d68af7b997e", size = 185826 }, - { url = "https://files.pythonhosted.org/packages/16/19/13de8e4377ed53079ee996e1ab0a9c33ec2faf808a4647b7b4c0d46dd239/pyyaml-6.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:652cb6edd41e718550aad172851962662ff2681490a8a711af6a4d288dd96824", size = 175577 }, - { url = "https://files.pythonhosted.org/packages/0c/62/d2eb46264d4b157dae1275b573017abec435397aa59cbcdab6fc978a8af4/pyyaml-6.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:10892704fc220243f5305762e276552a0395f7beb4dbf9b14ec8fd43b57f126c", size = 775556 }, - { url = "https://files.pythonhosted.org/packages/10/cb/16c3f2cf3266edd25aaa00d6c4350381c8b012ed6f5276675b9eba8d9ff4/pyyaml-6.0.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:850774a7879607d3a6f50d36d04f00ee69e7fc816450e5f7e58d7f17f1ae5c00", size = 882114 }, - { url = "https://files.pythonhosted.org/packages/71/60/917329f640924b18ff085ab889a11c763e0b573da888e8404ff486657602/pyyaml-6.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8bb0864c5a28024fac8a632c443c87c5aa6f215c0b126c449ae1a150412f31d", size = 806638 }, - { url = "https://files.pythonhosted.org/packages/dd/6f/529b0f316a9fd167281a6c3826b5583e6192dba792dd55e3203d3f8e655a/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1d37d57ad971609cf3c53ba6a7e365e40660e3be0e5175fa9f2365a379d6095a", size = 767463 }, - { url = "https://files.pythonhosted.org/packages/f2/6a/b627b4e0c1dd03718543519ffb2f1deea4a1e6d42fbab8021936a4d22589/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37503bfbfc9d2c40b344d06b2199cf0e96e97957ab1c1b546fd4f87e53e5d3e4", size = 794986 }, - { url = "https://files.pythonhosted.org/packages/45/91/47a6e1c42d9ee337c4839208f30d9f09caa9f720ec7582917b264defc875/pyyaml-6.0.3-cp311-cp311-win32.whl", hash = "sha256:8098f252adfa6c80ab48096053f512f2321f0b998f98150cea9bd23d83e1467b", size = 142543 }, - { url = "https://files.pythonhosted.org/packages/da/e3/ea007450a105ae919a72393cb06f122f288ef60bba2dc64b26e2646fa315/pyyaml-6.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:9f3bfb4965eb874431221a3ff3fdcddc7e74e3b07799e0e84ca4a0f867d449bf", size = 158763 }, - { url = "https://files.pythonhosted.org/packages/d1/33/422b98d2195232ca1826284a76852ad5a86fe23e31b009c9886b2d0fb8b2/pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196", size = 182063 }, - { url = "https://files.pythonhosted.org/packages/89/a0/6cf41a19a1f2f3feab0e9c0b74134aa2ce6849093d5517a0c550fe37a648/pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0", size = 173973 }, - { url = "https://files.pythonhosted.org/packages/ed/23/7a778b6bd0b9a8039df8b1b1d80e2e2ad78aa04171592c8a5c43a56a6af4/pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28", size = 775116 }, - { url = "https://files.pythonhosted.org/packages/65/30/d7353c338e12baef4ecc1b09e877c1970bd3382789c159b4f89d6a70dc09/pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c", size = 844011 }, - { url = "https://files.pythonhosted.org/packages/8b/9d/b3589d3877982d4f2329302ef98a8026e7f4443c765c46cfecc8858c6b4b/pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc", size = 807870 }, - { url = "https://files.pythonhosted.org/packages/05/c0/b3be26a015601b822b97d9149ff8cb5ead58c66f981e04fedf4e762f4bd4/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e", size = 761089 }, - { url = "https://files.pythonhosted.org/packages/be/8e/98435a21d1d4b46590d5459a22d88128103f8da4c2d4cb8f14f2a96504e1/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea", size = 790181 }, - { url = "https://files.pythonhosted.org/packages/74/93/7baea19427dcfbe1e5a372d81473250b379f04b1bd3c4c5ff825e2327202/pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5", size = 137658 }, - { url = "https://files.pythonhosted.org/packages/86/bf/899e81e4cce32febab4fb42bb97dcdf66bc135272882d1987881a4b519e9/pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b", size = 154003 }, - { url = "https://files.pythonhosted.org/packages/1a/08/67bd04656199bbb51dbed1439b7f27601dfb576fb864099c7ef0c3e55531/pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd", size = 140344 }, - { url = "https://files.pythonhosted.org/packages/d1/11/0fd08f8192109f7169db964b5707a2f1e8b745d4e239b784a5a1dd80d1db/pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8", size = 181669 }, - { url = "https://files.pythonhosted.org/packages/b1/16/95309993f1d3748cd644e02e38b75d50cbc0d9561d21f390a76242ce073f/pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1", size = 173252 }, - { url = "https://files.pythonhosted.org/packages/50/31/b20f376d3f810b9b2371e72ef5adb33879b25edb7a6d072cb7ca0c486398/pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c", size = 767081 }, - { url = "https://files.pythonhosted.org/packages/49/1e/a55ca81e949270d5d4432fbbd19dfea5321eda7c41a849d443dc92fd1ff7/pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5", size = 841159 }, - { url = "https://files.pythonhosted.org/packages/74/27/e5b8f34d02d9995b80abcef563ea1f8b56d20134d8f4e5e81733b1feceb2/pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6", size = 801626 }, - { url = "https://files.pythonhosted.org/packages/f9/11/ba845c23988798f40e52ba45f34849aa8a1f2d4af4b798588010792ebad6/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6", size = 753613 }, - { url = "https://files.pythonhosted.org/packages/3d/e0/7966e1a7bfc0a45bf0a7fb6b98ea03fc9b8d84fa7f2229e9659680b69ee3/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be", size = 794115 }, - { url = "https://files.pythonhosted.org/packages/de/94/980b50a6531b3019e45ddeada0626d45fa85cbe22300844a7983285bed3b/pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26", size = 137427 }, - { url = "https://files.pythonhosted.org/packages/97/c9/39d5b874e8b28845e4ec2202b5da735d0199dbe5b8fb85f91398814a9a46/pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c", size = 154090 }, - { url = "https://files.pythonhosted.org/packages/73/e8/2bdf3ca2090f68bb3d75b44da7bbc71843b19c9f2b9cb9b0f4ab7a5a4329/pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb", size = 140246 }, +sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960, upload-time = "2025-09-25T21:33:16.546Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6d/16/a95b6757765b7b031c9374925bb718d55e0a9ba8a1b6a12d25962ea44347/pyyaml-6.0.3-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:44edc647873928551a01e7a563d7452ccdebee747728c1080d881d68af7b997e", size = 185826, upload-time = "2025-09-25T21:31:58.655Z" }, + { url = "https://files.pythonhosted.org/packages/16/19/13de8e4377ed53079ee996e1ab0a9c33ec2faf808a4647b7b4c0d46dd239/pyyaml-6.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:652cb6edd41e718550aad172851962662ff2681490a8a711af6a4d288dd96824", size = 175577, upload-time = "2025-09-25T21:32:00.088Z" }, + { url = "https://files.pythonhosted.org/packages/0c/62/d2eb46264d4b157dae1275b573017abec435397aa59cbcdab6fc978a8af4/pyyaml-6.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:10892704fc220243f5305762e276552a0395f7beb4dbf9b14ec8fd43b57f126c", size = 775556, upload-time = "2025-09-25T21:32:01.31Z" }, + { url = "https://files.pythonhosted.org/packages/10/cb/16c3f2cf3266edd25aaa00d6c4350381c8b012ed6f5276675b9eba8d9ff4/pyyaml-6.0.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:850774a7879607d3a6f50d36d04f00ee69e7fc816450e5f7e58d7f17f1ae5c00", size = 882114, upload-time = "2025-09-25T21:32:03.376Z" }, + { url = "https://files.pythonhosted.org/packages/71/60/917329f640924b18ff085ab889a11c763e0b573da888e8404ff486657602/pyyaml-6.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8bb0864c5a28024fac8a632c443c87c5aa6f215c0b126c449ae1a150412f31d", size = 806638, upload-time = "2025-09-25T21:32:04.553Z" }, + { url = "https://files.pythonhosted.org/packages/dd/6f/529b0f316a9fd167281a6c3826b5583e6192dba792dd55e3203d3f8e655a/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1d37d57ad971609cf3c53ba6a7e365e40660e3be0e5175fa9f2365a379d6095a", size = 767463, upload-time = "2025-09-25T21:32:06.152Z" }, + { url = "https://files.pythonhosted.org/packages/f2/6a/b627b4e0c1dd03718543519ffb2f1deea4a1e6d42fbab8021936a4d22589/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37503bfbfc9d2c40b344d06b2199cf0e96e97957ab1c1b546fd4f87e53e5d3e4", size = 794986, upload-time = "2025-09-25T21:32:07.367Z" }, + { url = "https://files.pythonhosted.org/packages/45/91/47a6e1c42d9ee337c4839208f30d9f09caa9f720ec7582917b264defc875/pyyaml-6.0.3-cp311-cp311-win32.whl", hash = "sha256:8098f252adfa6c80ab48096053f512f2321f0b998f98150cea9bd23d83e1467b", size = 142543, upload-time = "2025-09-25T21:32:08.95Z" }, + { url = "https://files.pythonhosted.org/packages/da/e3/ea007450a105ae919a72393cb06f122f288ef60bba2dc64b26e2646fa315/pyyaml-6.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:9f3bfb4965eb874431221a3ff3fdcddc7e74e3b07799e0e84ca4a0f867d449bf", size = 158763, upload-time = "2025-09-25T21:32:09.96Z" }, + { url = "https://files.pythonhosted.org/packages/d1/33/422b98d2195232ca1826284a76852ad5a86fe23e31b009c9886b2d0fb8b2/pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196", size = 182063, upload-time = "2025-09-25T21:32:11.445Z" }, + { url = "https://files.pythonhosted.org/packages/89/a0/6cf41a19a1f2f3feab0e9c0b74134aa2ce6849093d5517a0c550fe37a648/pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0", size = 173973, upload-time = "2025-09-25T21:32:12.492Z" }, + { url = "https://files.pythonhosted.org/packages/ed/23/7a778b6bd0b9a8039df8b1b1d80e2e2ad78aa04171592c8a5c43a56a6af4/pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28", size = 775116, upload-time = "2025-09-25T21:32:13.652Z" }, + { url = "https://files.pythonhosted.org/packages/65/30/d7353c338e12baef4ecc1b09e877c1970bd3382789c159b4f89d6a70dc09/pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c", size = 844011, upload-time = "2025-09-25T21:32:15.21Z" }, + { url = "https://files.pythonhosted.org/packages/8b/9d/b3589d3877982d4f2329302ef98a8026e7f4443c765c46cfecc8858c6b4b/pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc", size = 807870, upload-time = "2025-09-25T21:32:16.431Z" }, + { url = "https://files.pythonhosted.org/packages/05/c0/b3be26a015601b822b97d9149ff8cb5ead58c66f981e04fedf4e762f4bd4/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e", size = 761089, upload-time = "2025-09-25T21:32:17.56Z" }, + { url = "https://files.pythonhosted.org/packages/be/8e/98435a21d1d4b46590d5459a22d88128103f8da4c2d4cb8f14f2a96504e1/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea", size = 790181, upload-time = "2025-09-25T21:32:18.834Z" }, + { url = "https://files.pythonhosted.org/packages/74/93/7baea19427dcfbe1e5a372d81473250b379f04b1bd3c4c5ff825e2327202/pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5", size = 137658, upload-time = "2025-09-25T21:32:20.209Z" }, + { url = "https://files.pythonhosted.org/packages/86/bf/899e81e4cce32febab4fb42bb97dcdf66bc135272882d1987881a4b519e9/pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b", size = 154003, upload-time = "2025-09-25T21:32:21.167Z" }, + { url = "https://files.pythonhosted.org/packages/1a/08/67bd04656199bbb51dbed1439b7f27601dfb576fb864099c7ef0c3e55531/pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd", size = 140344, upload-time = "2025-09-25T21:32:22.617Z" }, ] [[package]] @@ -3297,9 +2977,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pyyaml" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/eb/2e/79c822141bfd05a853236b504869ebc6b70159afc570e1d5a20641782eaa/pyyaml_env_tag-1.1.tar.gz", hash = "sha256:2eb38b75a2d21ee0475d6d97ec19c63287a7e140231e4214969d0eac923cd7ff", size = 5737 } +sdist = { url = "https://files.pythonhosted.org/packages/eb/2e/79c822141bfd05a853236b504869ebc6b70159afc570e1d5a20641782eaa/pyyaml_env_tag-1.1.tar.gz", hash = "sha256:2eb38b75a2d21ee0475d6d97ec19c63287a7e140231e4214969d0eac923cd7ff", size = 5737, upload-time = "2025-05-13T15:24:01.64Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/04/11/432f32f8097b03e3cd5fe57e88efb685d964e2e5178a48ed61e841f7fdce/pyyaml_env_tag-1.1-py3-none-any.whl", hash = "sha256:17109e1a528561e32f026364712fee1264bc2ea6715120891174ed1b980d2e04", size = 4722 }, + { url = "https://files.pythonhosted.org/packages/04/11/432f32f8097b03e3cd5fe57e88efb685d964e2e5178a48ed61e841f7fdce/pyyaml_env_tag-1.1-py3-none-any.whl", hash = "sha256:17109e1a528561e32f026364712fee1264bc2ea6715120891174ed1b980d2e04", size = 4722, upload-time = "2025-05-13T15:23:59.629Z" }, ] [[package]] @@ -3309,45 +2989,33 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cffi", marker = "implementation_name == 'pypy'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/04/0b/3c9baedbdf613ecaa7aa07027780b8867f57b6293b6ee50de316c9f3222b/pyzmq-27.1.0.tar.gz", hash = "sha256:ac0765e3d44455adb6ddbf4417dcce460fc40a05978c08efdf2948072f6db540", size = 281750 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/06/5d/305323ba86b284e6fcb0d842d6adaa2999035f70f8c38a9b6d21ad28c3d4/pyzmq-27.1.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:226b091818d461a3bef763805e75685e478ac17e9008f49fce2d3e52b3d58b86", size = 1333328 }, - { url = "https://files.pythonhosted.org/packages/bd/a0/fc7e78a23748ad5443ac3275943457e8452da67fda347e05260261108cbc/pyzmq-27.1.0-cp311-cp311-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:0790a0161c281ca9723f804871b4027f2e8b5a528d357c8952d08cd1a9c15581", size = 908803 }, - { url = "https://files.pythonhosted.org/packages/7e/22/37d15eb05f3bdfa4abea6f6d96eb3bb58585fbd3e4e0ded4e743bc650c97/pyzmq-27.1.0-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c895a6f35476b0c3a54e3eb6ccf41bf3018de937016e6e18748317f25d4e925f", size = 668836 }, - { url = "https://files.pythonhosted.org/packages/b1/c4/2a6fe5111a01005fc7af3878259ce17684fabb8852815eda6225620f3c59/pyzmq-27.1.0-cp311-cp311-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5bbf8d3630bf96550b3be8e1fc0fea5cbdc8d5466c1192887bd94869da17a63e", size = 857038 }, - { url = "https://files.pythonhosted.org/packages/cb/eb/bfdcb41d0db9cd233d6fb22dc131583774135505ada800ebf14dfb0a7c40/pyzmq-27.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:15c8bd0fe0dabf808e2d7a681398c4e5ded70a551ab47482067a572c054c8e2e", size = 1657531 }, - { url = "https://files.pythonhosted.org/packages/ab/21/e3180ca269ed4a0de5c34417dfe71a8ae80421198be83ee619a8a485b0c7/pyzmq-27.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:bafcb3dd171b4ae9f19ee6380dfc71ce0390fefaf26b504c0e5f628d7c8c54f2", size = 2034786 }, - { url = "https://files.pythonhosted.org/packages/3b/b1/5e21d0b517434b7f33588ff76c177c5a167858cc38ef740608898cd329f2/pyzmq-27.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e829529fcaa09937189178115c49c504e69289abd39967cd8a4c215761373394", size = 1894220 }, - { url = "https://files.pythonhosted.org/packages/03/f2/44913a6ff6941905efc24a1acf3d3cb6146b636c546c7406c38c49c403d4/pyzmq-27.1.0-cp311-cp311-win32.whl", hash = "sha256:6df079c47d5902af6db298ec92151db82ecb557af663098b92f2508c398bb54f", size = 567155 }, - { url = "https://files.pythonhosted.org/packages/23/6d/d8d92a0eb270a925c9b4dd039c0b4dc10abc2fcbc48331788824ef113935/pyzmq-27.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:190cbf120fbc0fc4957b56866830def56628934a9d112aec0e2507aa6a032b97", size = 633428 }, - { url = "https://files.pythonhosted.org/packages/ae/14/01afebc96c5abbbd713ecfc7469cfb1bc801c819a74ed5c9fad9a48801cb/pyzmq-27.1.0-cp311-cp311-win_arm64.whl", hash = "sha256:eca6b47df11a132d1745eb3b5b5e557a7dae2c303277aa0e69c6ba91b8736e07", size = 559497 }, - { url = "https://files.pythonhosted.org/packages/92/e7/038aab64a946d535901103da16b953c8c9cc9c961dadcbf3609ed6428d23/pyzmq-27.1.0-cp312-abi3-macosx_10_15_universal2.whl", hash = "sha256:452631b640340c928fa343801b0d07eb0c3789a5ffa843f6e1a9cee0ba4eb4fc", size = 1306279 }, - { url = "https://files.pythonhosted.org/packages/e8/5e/c3c49fdd0f535ef45eefcc16934648e9e59dace4a37ee88fc53f6cd8e641/pyzmq-27.1.0-cp312-abi3-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:1c179799b118e554b66da67d88ed66cd37a169f1f23b5d9f0a231b4e8d44a113", size = 895645 }, - { url = "https://files.pythonhosted.org/packages/f8/e5/b0b2504cb4e903a74dcf1ebae157f9e20ebb6ea76095f6cfffea28c42ecd/pyzmq-27.1.0-cp312-abi3-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3837439b7f99e60312f0c926a6ad437b067356dc2bc2ec96eb395fd0fe804233", size = 652574 }, - { url = "https://files.pythonhosted.org/packages/f8/9b/c108cdb55560eaf253f0cbdb61b29971e9fb34d9c3499b0e96e4e60ed8a5/pyzmq-27.1.0-cp312-abi3-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:43ad9a73e3da1fab5b0e7e13402f0b2fb934ae1c876c51d0afff0e7c052eca31", size = 840995 }, - { url = "https://files.pythonhosted.org/packages/c2/bb/b79798ca177b9eb0825b4c9998c6af8cd2a7f15a6a1a4272c1d1a21d382f/pyzmq-27.1.0-cp312-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0de3028d69d4cdc475bfe47a6128eb38d8bc0e8f4d69646adfbcd840facbac28", size = 1642070 }, - { url = "https://files.pythonhosted.org/packages/9c/80/2df2e7977c4ede24c79ae39dcef3899bfc5f34d1ca7a5b24f182c9b7a9ca/pyzmq-27.1.0-cp312-abi3-musllinux_1_2_i686.whl", hash = "sha256:cf44a7763aea9298c0aa7dbf859f87ed7012de8bda0f3977b6fb1d96745df856", size = 2021121 }, - { url = "https://files.pythonhosted.org/packages/46/bd/2d45ad24f5f5ae7e8d01525eb76786fa7557136555cac7d929880519e33a/pyzmq-27.1.0-cp312-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:f30f395a9e6fbca195400ce833c731e7b64c3919aa481af4d88c3759e0cb7496", size = 1878550 }, - { url = "https://files.pythonhosted.org/packages/e6/2f/104c0a3c778d7c2ab8190e9db4f62f0b6957b53c9d87db77c284b69f33ea/pyzmq-27.1.0-cp312-abi3-win32.whl", hash = "sha256:250e5436a4ba13885494412b3da5d518cd0d3a278a1ae640e113c073a5f88edd", size = 559184 }, - { url = "https://files.pythonhosted.org/packages/fc/7f/a21b20d577e4100c6a41795842028235998a643b1ad406a6d4163ea8f53e/pyzmq-27.1.0-cp312-abi3-win_amd64.whl", hash = "sha256:9ce490cf1d2ca2ad84733aa1d69ce6855372cb5ce9223802450c9b2a7cba0ccf", size = 619480 }, - { url = "https://files.pythonhosted.org/packages/78/c2/c012beae5f76b72f007a9e91ee9401cb88c51d0f83c6257a03e785c81cc2/pyzmq-27.1.0-cp312-abi3-win_arm64.whl", hash = "sha256:75a2f36223f0d535a0c919e23615fc85a1e23b71f40c7eb43d7b1dedb4d8f15f", size = 552993 }, - { url = "https://files.pythonhosted.org/packages/60/cb/84a13459c51da6cec1b7b1dc1a47e6db6da50b77ad7fd9c145842750a011/pyzmq-27.1.0-cp313-cp313-android_24_arm64_v8a.whl", hash = "sha256:93ad4b0855a664229559e45c8d23797ceac03183c7b6f5b4428152a6b06684a5", size = 1122436 }, - { url = "https://files.pythonhosted.org/packages/dc/b6/94414759a69a26c3dd674570a81813c46a078767d931a6c70ad29fc585cb/pyzmq-27.1.0-cp313-cp313-android_24_x86_64.whl", hash = "sha256:fbb4f2400bfda24f12f009cba62ad5734148569ff4949b1b6ec3b519444342e6", size = 1156301 }, - { url = "https://files.pythonhosted.org/packages/a5/ad/15906493fd40c316377fd8a8f6b1f93104f97a752667763c9b9c1b71d42d/pyzmq-27.1.0-cp313-cp313t-macosx_10_15_universal2.whl", hash = "sha256:e343d067f7b151cfe4eb3bb796a7752c9d369eed007b91231e817071d2c2fec7", size = 1341197 }, - { url = "https://files.pythonhosted.org/packages/14/1d/d343f3ce13db53a54cb8946594e567410b2125394dafcc0268d8dda027e0/pyzmq-27.1.0-cp313-cp313t-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:08363b2011dec81c354d694bdecaef4770e0ae96b9afea70b3f47b973655cc05", size = 897275 }, - { url = "https://files.pythonhosted.org/packages/69/2d/d83dd6d7ca929a2fc67d2c3005415cdf322af7751d773524809f9e585129/pyzmq-27.1.0-cp313-cp313t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d54530c8c8b5b8ddb3318f481297441af102517602b569146185fa10b63f4fa9", size = 660469 }, - { url = "https://files.pythonhosted.org/packages/3e/cd/9822a7af117f4bc0f1952dbe9ef8358eb50a24928efd5edf54210b850259/pyzmq-27.1.0-cp313-cp313t-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6f3afa12c392f0a44a2414056d730eebc33ec0926aae92b5ad5cf26ebb6cc128", size = 847961 }, - { url = "https://files.pythonhosted.org/packages/9a/12/f003e824a19ed73be15542f172fd0ec4ad0b60cf37436652c93b9df7c585/pyzmq-27.1.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:c65047adafe573ff023b3187bb93faa583151627bc9c51fc4fb2c561ed689d39", size = 1650282 }, - { url = "https://files.pythonhosted.org/packages/d5/4a/e82d788ed58e9a23995cee70dbc20c9aded3d13a92d30d57ec2291f1e8a3/pyzmq-27.1.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:90e6e9441c946a8b0a667356f7078d96411391a3b8f80980315455574177ec97", size = 2024468 }, - { url = "https://files.pythonhosted.org/packages/d9/94/2da0a60841f757481e402b34bf4c8bf57fa54a5466b965de791b1e6f747d/pyzmq-27.1.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:add071b2d25f84e8189aaf0882d39a285b42fa3853016ebab234a5e78c7a43db", size = 1885394 }, - { url = "https://files.pythonhosted.org/packages/4f/6f/55c10e2e49ad52d080dc24e37adb215e5b0d64990b57598abc2e3f01725b/pyzmq-27.1.0-cp313-cp313t-win32.whl", hash = "sha256:7ccc0700cfdf7bd487bea8d850ec38f204478681ea02a582a8da8171b7f90a1c", size = 574964 }, - { url = "https://files.pythonhosted.org/packages/87/4d/2534970ba63dd7c522d8ca80fb92777f362c0f321900667c615e2067cb29/pyzmq-27.1.0-cp313-cp313t-win_amd64.whl", hash = "sha256:8085a9fba668216b9b4323be338ee5437a235fe275b9d1610e422ccc279733e2", size = 641029 }, - { url = "https://files.pythonhosted.org/packages/f6/fa/f8aea7a28b0641f31d40dea42d7ef003fded31e184ef47db696bc74cd610/pyzmq-27.1.0-cp313-cp313t-win_arm64.whl", hash = "sha256:6bb54ca21bcfe361e445256c15eedf083f153811c37be87e0514934d6913061e", size = 561541 }, - { url = "https://files.pythonhosted.org/packages/4c/c6/c4dcdecdbaa70969ee1fdced6d7b8f60cfabe64d25361f27ac4665a70620/pyzmq-27.1.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:18770c8d3563715387139060d37859c02ce40718d1faf299abddcdcc6a649066", size = 836265 }, - { url = "https://files.pythonhosted.org/packages/3e/79/f38c92eeaeb03a2ccc2ba9866f0439593bb08c5e3b714ac1d553e5c96e25/pyzmq-27.1.0-pp311-pypy311_pp73-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:ac25465d42f92e990f8d8b0546b01c391ad431c3bf447683fdc40565941d0604", size = 800208 }, - { url = "https://files.pythonhosted.org/packages/49/0e/3f0d0d335c6b3abb9b7b723776d0b21fa7f3a6c819a0db6097059aada160/pyzmq-27.1.0-pp311-pypy311_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:53b40f8ae006f2734ee7608d59ed661419f087521edbfc2149c3932e9c14808c", size = 567747 }, - { url = "https://files.pythonhosted.org/packages/a1/cf/f2b3784d536250ffd4be70e049f3b60981235d70c6e8ce7e3ef21e1adb25/pyzmq-27.1.0-pp311-pypy311_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f605d884e7c8be8fe1aa94e0a783bf3f591b84c24e4bc4f3e7564c82ac25e271", size = 747371 }, - { url = "https://files.pythonhosted.org/packages/01/1b/5dbe84eefc86f48473947e2f41711aded97eecef1231f4558f1f02713c12/pyzmq-27.1.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:c9f7f6e13dff2e44a6afeaf2cf54cee5929ad64afaf4d40b50f93c58fc687355", size = 544862 }, +sdist = { url = "https://files.pythonhosted.org/packages/04/0b/3c9baedbdf613ecaa7aa07027780b8867f57b6293b6ee50de316c9f3222b/pyzmq-27.1.0.tar.gz", hash = "sha256:ac0765e3d44455adb6ddbf4417dcce460fc40a05978c08efdf2948072f6db540", size = 281750, upload-time = "2025-09-08T23:10:18.157Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/06/5d/305323ba86b284e6fcb0d842d6adaa2999035f70f8c38a9b6d21ad28c3d4/pyzmq-27.1.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:226b091818d461a3bef763805e75685e478ac17e9008f49fce2d3e52b3d58b86", size = 1333328, upload-time = "2025-09-08T23:07:45.946Z" }, + { url = "https://files.pythonhosted.org/packages/bd/a0/fc7e78a23748ad5443ac3275943457e8452da67fda347e05260261108cbc/pyzmq-27.1.0-cp311-cp311-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:0790a0161c281ca9723f804871b4027f2e8b5a528d357c8952d08cd1a9c15581", size = 908803, upload-time = "2025-09-08T23:07:47.551Z" }, + { url = "https://files.pythonhosted.org/packages/7e/22/37d15eb05f3bdfa4abea6f6d96eb3bb58585fbd3e4e0ded4e743bc650c97/pyzmq-27.1.0-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c895a6f35476b0c3a54e3eb6ccf41bf3018de937016e6e18748317f25d4e925f", size = 668836, upload-time = "2025-09-08T23:07:49.436Z" }, + { url = "https://files.pythonhosted.org/packages/b1/c4/2a6fe5111a01005fc7af3878259ce17684fabb8852815eda6225620f3c59/pyzmq-27.1.0-cp311-cp311-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5bbf8d3630bf96550b3be8e1fc0fea5cbdc8d5466c1192887bd94869da17a63e", size = 857038, upload-time = "2025-09-08T23:07:51.234Z" }, + { url = "https://files.pythonhosted.org/packages/cb/eb/bfdcb41d0db9cd233d6fb22dc131583774135505ada800ebf14dfb0a7c40/pyzmq-27.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:15c8bd0fe0dabf808e2d7a681398c4e5ded70a551ab47482067a572c054c8e2e", size = 1657531, upload-time = "2025-09-08T23:07:52.795Z" }, + { url = "https://files.pythonhosted.org/packages/ab/21/e3180ca269ed4a0de5c34417dfe71a8ae80421198be83ee619a8a485b0c7/pyzmq-27.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:bafcb3dd171b4ae9f19ee6380dfc71ce0390fefaf26b504c0e5f628d7c8c54f2", size = 2034786, upload-time = "2025-09-08T23:07:55.047Z" }, + { url = "https://files.pythonhosted.org/packages/3b/b1/5e21d0b517434b7f33588ff76c177c5a167858cc38ef740608898cd329f2/pyzmq-27.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e829529fcaa09937189178115c49c504e69289abd39967cd8a4c215761373394", size = 1894220, upload-time = "2025-09-08T23:07:57.172Z" }, + { url = "https://files.pythonhosted.org/packages/03/f2/44913a6ff6941905efc24a1acf3d3cb6146b636c546c7406c38c49c403d4/pyzmq-27.1.0-cp311-cp311-win32.whl", hash = "sha256:6df079c47d5902af6db298ec92151db82ecb557af663098b92f2508c398bb54f", size = 567155, upload-time = "2025-09-08T23:07:59.05Z" }, + { url = "https://files.pythonhosted.org/packages/23/6d/d8d92a0eb270a925c9b4dd039c0b4dc10abc2fcbc48331788824ef113935/pyzmq-27.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:190cbf120fbc0fc4957b56866830def56628934a9d112aec0e2507aa6a032b97", size = 633428, upload-time = "2025-09-08T23:08:00.663Z" }, + { url = "https://files.pythonhosted.org/packages/ae/14/01afebc96c5abbbd713ecfc7469cfb1bc801c819a74ed5c9fad9a48801cb/pyzmq-27.1.0-cp311-cp311-win_arm64.whl", hash = "sha256:eca6b47df11a132d1745eb3b5b5e557a7dae2c303277aa0e69c6ba91b8736e07", size = 559497, upload-time = "2025-09-08T23:08:02.15Z" }, + { url = "https://files.pythonhosted.org/packages/92/e7/038aab64a946d535901103da16b953c8c9cc9c961dadcbf3609ed6428d23/pyzmq-27.1.0-cp312-abi3-macosx_10_15_universal2.whl", hash = "sha256:452631b640340c928fa343801b0d07eb0c3789a5ffa843f6e1a9cee0ba4eb4fc", size = 1306279, upload-time = "2025-09-08T23:08:03.807Z" }, + { url = "https://files.pythonhosted.org/packages/e8/5e/c3c49fdd0f535ef45eefcc16934648e9e59dace4a37ee88fc53f6cd8e641/pyzmq-27.1.0-cp312-abi3-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:1c179799b118e554b66da67d88ed66cd37a169f1f23b5d9f0a231b4e8d44a113", size = 895645, upload-time = "2025-09-08T23:08:05.301Z" }, + { url = "https://files.pythonhosted.org/packages/f8/e5/b0b2504cb4e903a74dcf1ebae157f9e20ebb6ea76095f6cfffea28c42ecd/pyzmq-27.1.0-cp312-abi3-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3837439b7f99e60312f0c926a6ad437b067356dc2bc2ec96eb395fd0fe804233", size = 652574, upload-time = "2025-09-08T23:08:06.828Z" }, + { url = "https://files.pythonhosted.org/packages/f8/9b/c108cdb55560eaf253f0cbdb61b29971e9fb34d9c3499b0e96e4e60ed8a5/pyzmq-27.1.0-cp312-abi3-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:43ad9a73e3da1fab5b0e7e13402f0b2fb934ae1c876c51d0afff0e7c052eca31", size = 840995, upload-time = "2025-09-08T23:08:08.396Z" }, + { url = "https://files.pythonhosted.org/packages/c2/bb/b79798ca177b9eb0825b4c9998c6af8cd2a7f15a6a1a4272c1d1a21d382f/pyzmq-27.1.0-cp312-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0de3028d69d4cdc475bfe47a6128eb38d8bc0e8f4d69646adfbcd840facbac28", size = 1642070, upload-time = "2025-09-08T23:08:09.989Z" }, + { url = "https://files.pythonhosted.org/packages/9c/80/2df2e7977c4ede24c79ae39dcef3899bfc5f34d1ca7a5b24f182c9b7a9ca/pyzmq-27.1.0-cp312-abi3-musllinux_1_2_i686.whl", hash = "sha256:cf44a7763aea9298c0aa7dbf859f87ed7012de8bda0f3977b6fb1d96745df856", size = 2021121, upload-time = "2025-09-08T23:08:11.907Z" }, + { url = "https://files.pythonhosted.org/packages/46/bd/2d45ad24f5f5ae7e8d01525eb76786fa7557136555cac7d929880519e33a/pyzmq-27.1.0-cp312-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:f30f395a9e6fbca195400ce833c731e7b64c3919aa481af4d88c3759e0cb7496", size = 1878550, upload-time = "2025-09-08T23:08:13.513Z" }, + { url = "https://files.pythonhosted.org/packages/e6/2f/104c0a3c778d7c2ab8190e9db4f62f0b6957b53c9d87db77c284b69f33ea/pyzmq-27.1.0-cp312-abi3-win32.whl", hash = "sha256:250e5436a4ba13885494412b3da5d518cd0d3a278a1ae640e113c073a5f88edd", size = 559184, upload-time = "2025-09-08T23:08:15.163Z" }, + { url = "https://files.pythonhosted.org/packages/fc/7f/a21b20d577e4100c6a41795842028235998a643b1ad406a6d4163ea8f53e/pyzmq-27.1.0-cp312-abi3-win_amd64.whl", hash = "sha256:9ce490cf1d2ca2ad84733aa1d69ce6855372cb5ce9223802450c9b2a7cba0ccf", size = 619480, upload-time = "2025-09-08T23:08:17.192Z" }, + { url = "https://files.pythonhosted.org/packages/78/c2/c012beae5f76b72f007a9e91ee9401cb88c51d0f83c6257a03e785c81cc2/pyzmq-27.1.0-cp312-abi3-win_arm64.whl", hash = "sha256:75a2f36223f0d535a0c919e23615fc85a1e23b71f40c7eb43d7b1dedb4d8f15f", size = 552993, upload-time = "2025-09-08T23:08:18.926Z" }, + { url = "https://files.pythonhosted.org/packages/4c/c6/c4dcdecdbaa70969ee1fdced6d7b8f60cfabe64d25361f27ac4665a70620/pyzmq-27.1.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:18770c8d3563715387139060d37859c02ce40718d1faf299abddcdcc6a649066", size = 836265, upload-time = "2025-09-08T23:09:49.376Z" }, + { url = "https://files.pythonhosted.org/packages/3e/79/f38c92eeaeb03a2ccc2ba9866f0439593bb08c5e3b714ac1d553e5c96e25/pyzmq-27.1.0-pp311-pypy311_pp73-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:ac25465d42f92e990f8d8b0546b01c391ad431c3bf447683fdc40565941d0604", size = 800208, upload-time = "2025-09-08T23:09:51.073Z" }, + { url = "https://files.pythonhosted.org/packages/49/0e/3f0d0d335c6b3abb9b7b723776d0b21fa7f3a6c819a0db6097059aada160/pyzmq-27.1.0-pp311-pypy311_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:53b40f8ae006f2734ee7608d59ed661419f087521edbfc2149c3932e9c14808c", size = 567747, upload-time = "2025-09-08T23:09:52.698Z" }, + { url = "https://files.pythonhosted.org/packages/a1/cf/f2b3784d536250ffd4be70e049f3b60981235d70c6e8ce7e3ef21e1adb25/pyzmq-27.1.0-pp311-pypy311_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f605d884e7c8be8fe1aa94e0a783bf3f591b84c24e4bc4f3e7564c82ac25e271", size = 747371, upload-time = "2025-09-08T23:09:54.563Z" }, + { url = "https://files.pythonhosted.org/packages/01/1b/5dbe84eefc86f48473947e2f41711aded97eecef1231f4558f1f02713c12/pyzmq-27.1.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:c9f7f6e13dff2e44a6afeaf2cf54cee5929ad64afaf4d40b50f93c58fc687355", size = 544862, upload-time = "2025-09-08T23:09:56.509Z" }, ] [[package]] @@ -3357,75 +3025,47 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "attrs" }, { name = "rpds-py" }, - { name = "typing-extensions", marker = "python_full_version < '3.13'" }, + { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/2f/db/98b5c277be99dd18bfd91dd04e1b759cad18d1a338188c936e92f921c7e2/referencing-0.36.2.tar.gz", hash = "sha256:df2e89862cd09deabbdba16944cc3f10feb6b3e6f18e902f7cc25609a34775aa", size = 74744 } +sdist = { url = "https://files.pythonhosted.org/packages/2f/db/98b5c277be99dd18bfd91dd04e1b759cad18d1a338188c936e92f921c7e2/referencing-0.36.2.tar.gz", hash = "sha256:df2e89862cd09deabbdba16944cc3f10feb6b3e6f18e902f7cc25609a34775aa", size = 74744, upload-time = "2025-01-25T08:48:16.138Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c1/b1/3baf80dc6d2b7bc27a95a67752d0208e410351e3feb4eb78de5f77454d8d/referencing-0.36.2-py3-none-any.whl", hash = "sha256:e8699adbbf8b5c7de96d8ffa0eb5c158b3beafce084968e2ea8bb08c6794dcd0", size = 26775 }, + { url = "https://files.pythonhosted.org/packages/c1/b1/3baf80dc6d2b7bc27a95a67752d0208e410351e3feb4eb78de5f77454d8d/referencing-0.36.2-py3-none-any.whl", hash = "sha256:e8699adbbf8b5c7de96d8ffa0eb5c158b3beafce084968e2ea8bb08c6794dcd0", size = 26775, upload-time = "2025-01-25T08:48:14.241Z" }, ] [[package]] name = "regex" version = "2025.9.18" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/49/d3/eaa0d28aba6ad1827ad1e716d9a93e1ba963ada61887498297d3da715133/regex-2025.9.18.tar.gz", hash = "sha256:c5ba23274c61c6fef447ba6a39333297d0c247f53059dba0bca415cac511edc4", size = 400917 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/58/61/80eda662fc4eb32bfedc331f42390974c9e89c7eac1b79cd9eea4d7c458c/regex-2025.9.18-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:51076980cd08cd13c88eb7365427ae27f0d94e7cebe9ceb2bb9ffdae8fc4d82a", size = 484832 }, - { url = "https://files.pythonhosted.org/packages/a6/d9/33833d9abddf3f07ad48504ddb53fe3b22f353214bbb878a72eee1e3ddbf/regex-2025.9.18-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:828446870bd7dee4e0cbeed767f07961aa07f0ea3129f38b3ccecebc9742e0b8", size = 288994 }, - { url = "https://files.pythonhosted.org/packages/2a/b3/526ee96b0d70ea81980cbc20c3496fa582f775a52e001e2743cc33b2fa75/regex-2025.9.18-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c28821d5637866479ec4cc23b8c990f5bc6dd24e5e4384ba4a11d38a526e1414", size = 286619 }, - { url = "https://files.pythonhosted.org/packages/65/4f/c2c096b02a351b33442aed5895cdd8bf87d372498d2100927c5a053d7ba3/regex-2025.9.18-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:726177ade8e481db669e76bf99de0b278783be8acd11cef71165327abd1f170a", size = 792454 }, - { url = "https://files.pythonhosted.org/packages/24/15/b562c9d6e47c403c4b5deb744f8b4bf6e40684cf866c7b077960a925bdff/regex-2025.9.18-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f5cca697da89b9f8ea44115ce3130f6c54c22f541943ac8e9900461edc2b8bd4", size = 858723 }, - { url = "https://files.pythonhosted.org/packages/f2/01/dba305409849e85b8a1a681eac4c03ed327d8de37895ddf9dc137f59c140/regex-2025.9.18-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:dfbde38f38004703c35666a1e1c088b778e35d55348da2b7b278914491698d6a", size = 905899 }, - { url = "https://files.pythonhosted.org/packages/fe/d0/c51d1e6a80eab11ef96a4cbad17fc0310cf68994fb01a7283276b7e5bbd6/regex-2025.9.18-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f2f422214a03fab16bfa495cfec72bee4aaa5731843b771860a471282f1bf74f", size = 798981 }, - { url = "https://files.pythonhosted.org/packages/c4/5e/72db90970887bbe02296612bd61b0fa31e6d88aa24f6a4853db3e96c575e/regex-2025.9.18-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a295916890f4df0902e4286bc7223ee7f9e925daa6dcdec4192364255b70561a", size = 781900 }, - { url = "https://files.pythonhosted.org/packages/50/ff/596be45eea8e9bc31677fde243fa2904d00aad1b32c31bce26c3dbba0b9e/regex-2025.9.18-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:5db95ff632dbabc8c38c4e82bf545ab78d902e81160e6e455598014f0abe66b9", size = 852952 }, - { url = "https://files.pythonhosted.org/packages/e5/1b/2dfa348fa551e900ed3f5f63f74185b6a08e8a76bc62bc9c106f4f92668b/regex-2025.9.18-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:fb967eb441b0f15ae610b7069bdb760b929f267efbf522e814bbbfffdf125ce2", size = 844355 }, - { url = "https://files.pythonhosted.org/packages/f4/bf/aefb1def27fe33b8cbbb19c75c13aefccfbef1c6686f8e7f7095705969c7/regex-2025.9.18-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f04d2f20da4053d96c08f7fde6e1419b7ec9dbcee89c96e3d731fca77f411b95", size = 787254 }, - { url = "https://files.pythonhosted.org/packages/e3/4e/8ef042e7cf0dbbb401e784e896acfc1b367b95dfbfc9ada94c2ed55a081f/regex-2025.9.18-cp311-cp311-win32.whl", hash = "sha256:895197241fccf18c0cea7550c80e75f185b8bd55b6924fcae269a1a92c614a07", size = 264129 }, - { url = "https://files.pythonhosted.org/packages/b4/7d/c4fcabf80dcdd6821c0578ad9b451f8640b9110fb3dcb74793dd077069ff/regex-2025.9.18-cp311-cp311-win_amd64.whl", hash = "sha256:7e2b414deae99166e22c005e154a5513ac31493db178d8aec92b3269c9cce8c9", size = 276160 }, - { url = "https://files.pythonhosted.org/packages/64/f8/0e13c8ae4d6df9d128afaba138342d532283d53a4c1e7a8c93d6756c8f4a/regex-2025.9.18-cp311-cp311-win_arm64.whl", hash = "sha256:fb137ec7c5c54f34a25ff9b31f6b7b0c2757be80176435bf367111e3f71d72df", size = 268471 }, - { url = "https://files.pythonhosted.org/packages/b0/99/05859d87a66ae7098222d65748f11ef7f2dff51bfd7482a4e2256c90d72b/regex-2025.9.18-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:436e1b31d7efd4dcd52091d076482031c611dde58bf9c46ca6d0a26e33053a7e", size = 486335 }, - { url = "https://files.pythonhosted.org/packages/97/7e/d43d4e8b978890932cf7b0957fce58c5b08c66f32698f695b0c2c24a48bf/regex-2025.9.18-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c190af81e5576b9c5fdc708f781a52ff20f8b96386c6e2e0557a78402b029f4a", size = 289720 }, - { url = "https://files.pythonhosted.org/packages/bb/3b/ff80886089eb5dcf7e0d2040d9aaed539e25a94300403814bb24cc775058/regex-2025.9.18-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e4121f1ce2b2b5eec4b397cc1b277686e577e658d8f5870b7eb2d726bd2300ab", size = 287257 }, - { url = "https://files.pythonhosted.org/packages/ee/66/243edf49dd8720cba8d5245dd4d6adcb03a1defab7238598c0c97cf549b8/regex-2025.9.18-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:300e25dbbf8299d87205e821a201057f2ef9aa3deb29caa01cd2cac669e508d5", size = 797463 }, - { url = "https://files.pythonhosted.org/packages/df/71/c9d25a1142c70432e68bb03211d4a82299cd1c1fbc41db9409a394374ef5/regex-2025.9.18-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7b47fcf9f5316c0bdaf449e879407e1b9937a23c3b369135ca94ebc8d74b1742", size = 862670 }, - { url = "https://files.pythonhosted.org/packages/f8/8f/329b1efc3a64375a294e3a92d43372bf1a351aa418e83c21f2f01cf6ec41/regex-2025.9.18-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:57a161bd3acaa4b513220b49949b07e252165e6b6dc910ee7617a37ff4f5b425", size = 910881 }, - { url = "https://files.pythonhosted.org/packages/35/9e/a91b50332a9750519320ed30ec378b74c996f6befe282cfa6bb6cea7e9fd/regex-2025.9.18-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4f130c3a7845ba42de42f380fff3c8aebe89a810747d91bcf56d40a069f15352", size = 802011 }, - { url = "https://files.pythonhosted.org/packages/a4/1d/6be3b8d7856b6e0d7ee7f942f437d0a76e0d5622983abbb6d21e21ab9a17/regex-2025.9.18-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5f96fa342b6f54dcba928dd452e8d8cb9f0d63e711d1721cd765bb9f73bb048d", size = 786668 }, - { url = "https://files.pythonhosted.org/packages/cb/ce/4a60e53df58bd157c5156a1736d3636f9910bdcc271d067b32b7fcd0c3a8/regex-2025.9.18-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:0f0d676522d68c207828dcd01fb6f214f63f238c283d9f01d85fc664c7c85b56", size = 856578 }, - { url = "https://files.pythonhosted.org/packages/86/e8/162c91bfe7217253afccde112868afb239f94703de6580fb235058d506a6/regex-2025.9.18-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:40532bff8a1a0621e7903ae57fce88feb2e8a9a9116d341701302c9302aef06e", size = 849017 }, - { url = "https://files.pythonhosted.org/packages/35/34/42b165bc45289646ea0959a1bc7531733e90b47c56a72067adfe6b3251f6/regex-2025.9.18-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:039f11b618ce8d71a1c364fdee37da1012f5a3e79b1b2819a9f389cd82fd6282", size = 788150 }, - { url = "https://files.pythonhosted.org/packages/79/5d/cdd13b1f3c53afa7191593a7ad2ee24092a5a46417725ffff7f64be8342d/regex-2025.9.18-cp312-cp312-win32.whl", hash = "sha256:e1dd06f981eb226edf87c55d523131ade7285137fbde837c34dc9d1bf309f459", size = 264536 }, - { url = "https://files.pythonhosted.org/packages/e0/f5/4a7770c9a522e7d2dc1fa3ffc83ab2ab33b0b22b447e62cffef186805302/regex-2025.9.18-cp312-cp312-win_amd64.whl", hash = "sha256:3d86b5247bf25fa3715e385aa9ff272c307e0636ce0c9595f64568b41f0a9c77", size = 275501 }, - { url = "https://files.pythonhosted.org/packages/df/05/9ce3e110e70d225ecbed455b966003a3afda5e58e8aec2964042363a18f4/regex-2025.9.18-cp312-cp312-win_arm64.whl", hash = "sha256:032720248cbeeae6444c269b78cb15664458b7bb9ed02401d3da59fe4d68c3a5", size = 268601 }, - { url = "https://files.pythonhosted.org/packages/d2/c7/5c48206a60ce33711cf7dcaeaed10dd737733a3569dc7e1dce324dd48f30/regex-2025.9.18-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:2a40f929cd907c7e8ac7566ac76225a77701a6221bca937bdb70d56cb61f57b2", size = 485955 }, - { url = "https://files.pythonhosted.org/packages/e9/be/74fc6bb19a3c491ec1ace943e622b5a8539068771e8705e469b2da2306a7/regex-2025.9.18-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:c90471671c2cdf914e58b6af62420ea9ecd06d1554d7474d50133ff26ae88feb", size = 289583 }, - { url = "https://files.pythonhosted.org/packages/25/c4/9ceaa433cb5dc515765560f22a19578b95b92ff12526e5a259321c4fc1a0/regex-2025.9.18-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1a351aff9e07a2dabb5022ead6380cff17a4f10e4feb15f9100ee56c4d6d06af", size = 287000 }, - { url = "https://files.pythonhosted.org/packages/7d/e6/68bc9393cb4dc68018456568c048ac035854b042bc7c33cb9b99b0680afa/regex-2025.9.18-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bc4b8e9d16e20ddfe16430c23468a8707ccad3365b06d4536142e71823f3ca29", size = 797535 }, - { url = "https://files.pythonhosted.org/packages/6a/1c/ebae9032d34b78ecfe9bd4b5e6575b55351dc8513485bb92326613732b8c/regex-2025.9.18-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4b8cdbddf2db1c5e80338ba2daa3cfa3dec73a46fff2a7dda087c8efbf12d62f", size = 862603 }, - { url = "https://files.pythonhosted.org/packages/3b/74/12332c54b3882557a4bcd2b99f8be581f5c6a43cf1660a85b460dd8ff468/regex-2025.9.18-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a276937d9d75085b2c91fb48244349c6954f05ee97bba0963ce24a9d915b8b68", size = 910829 }, - { url = "https://files.pythonhosted.org/packages/86/70/ba42d5ed606ee275f2465bfc0e2208755b06cdabd0f4c7c4b614d51b57ab/regex-2025.9.18-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:92a8e375ccdc1256401c90e9dc02b8642894443d549ff5e25e36d7cf8a80c783", size = 802059 }, - { url = "https://files.pythonhosted.org/packages/da/c5/fcb017e56396a7f2f8357412638d7e2963440b131a3ca549be25774b3641/regex-2025.9.18-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0dc6893b1f502d73037cf807a321cdc9be29ef3d6219f7970f842475873712ac", size = 786781 }, - { url = "https://files.pythonhosted.org/packages/c6/ee/21c4278b973f630adfb3bcb23d09d83625f3ab1ca6e40ebdffe69901c7a1/regex-2025.9.18-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:a61e85bfc63d232ac14b015af1261f826260c8deb19401c0597dbb87a864361e", size = 856578 }, - { url = "https://files.pythonhosted.org/packages/87/0b/de51550dc7274324435c8f1539373ac63019b0525ad720132866fff4a16a/regex-2025.9.18-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:1ef86a9ebc53f379d921fb9a7e42b92059ad3ee800fcd9e0fe6181090e9f6c23", size = 849119 }, - { url = "https://files.pythonhosted.org/packages/60/52/383d3044fc5154d9ffe4321696ee5b2ee4833a28c29b137c22c33f41885b/regex-2025.9.18-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d3bc882119764ba3a119fbf2bd4f1b47bc56c1da5d42df4ed54ae1e8e66fdf8f", size = 788219 }, - { url = "https://files.pythonhosted.org/packages/20/bd/2614fc302671b7359972ea212f0e3a92df4414aaeacab054a8ce80a86073/regex-2025.9.18-cp313-cp313-win32.whl", hash = "sha256:3810a65675845c3bdfa58c3c7d88624356dd6ee2fc186628295e0969005f928d", size = 264517 }, - { url = "https://files.pythonhosted.org/packages/07/0f/ab5c1581e6563a7bffdc1974fb2d25f05689b88e2d416525271f232b1946/regex-2025.9.18-cp313-cp313-win_amd64.whl", hash = "sha256:16eaf74b3c4180ede88f620f299e474913ab6924d5c4b89b3833bc2345d83b3d", size = 275481 }, - { url = "https://files.pythonhosted.org/packages/49/22/ee47672bc7958f8c5667a587c2600a4fba8b6bab6e86bd6d3e2b5f7cac42/regex-2025.9.18-cp313-cp313-win_arm64.whl", hash = "sha256:4dc98ba7dd66bd1261927a9f49bd5ee2bcb3660f7962f1ec02617280fc00f5eb", size = 268598 }, - { url = "https://files.pythonhosted.org/packages/e8/83/6887e16a187c6226cb85d8301e47d3b73ecc4505a3a13d8da2096b44fd76/regex-2025.9.18-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:fe5d50572bc885a0a799410a717c42b1a6b50e2f45872e2b40f4f288f9bce8a2", size = 489765 }, - { url = "https://files.pythonhosted.org/packages/51/c5/e2f7325301ea2916ff301c8d963ba66b1b2c1b06694191df80a9c4fea5d0/regex-2025.9.18-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:1b9d9a2d6cda6621551ca8cf7a06f103adf72831153f3c0d982386110870c4d3", size = 291228 }, - { url = "https://files.pythonhosted.org/packages/91/60/7d229d2bc6961289e864a3a3cfebf7d0d250e2e65323a8952cbb7e22d824/regex-2025.9.18-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:13202e4c4ac0ef9a317fff817674b293c8f7e8c68d3190377d8d8b749f566e12", size = 289270 }, - { url = "https://files.pythonhosted.org/packages/3c/d7/b4f06868ee2958ff6430df89857fbf3d43014bbf35538b6ec96c2704e15d/regex-2025.9.18-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:874ff523b0fecffb090f80ae53dc93538f8db954c8bb5505f05b7787ab3402a0", size = 806326 }, - { url = "https://files.pythonhosted.org/packages/d6/e4/bca99034a8f1b9b62ccf337402a8e5b959dd5ba0e5e5b2ead70273df3277/regex-2025.9.18-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:d13ab0490128f2bb45d596f754148cd750411afc97e813e4b3a61cf278a23bb6", size = 871556 }, - { url = "https://files.pythonhosted.org/packages/6d/df/e06ffaf078a162f6dd6b101a5ea9b44696dca860a48136b3ae4a9caf25e2/regex-2025.9.18-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:05440bc172bc4b4b37fb9667e796597419404dbba62e171e1f826d7d2a9ebcef", size = 913817 }, - { url = "https://files.pythonhosted.org/packages/9e/05/25b05480b63292fd8e84800b1648e160ca778127b8d2367a0a258fa2e225/regex-2025.9.18-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5514b8e4031fdfaa3d27e92c75719cbe7f379e28cacd939807289bce76d0e35a", size = 811055 }, - { url = "https://files.pythonhosted.org/packages/70/97/7bc7574655eb651ba3a916ed4b1be6798ae97af30104f655d8efd0cab24b/regex-2025.9.18-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:65d3c38c39efce73e0d9dc019697b39903ba25b1ad45ebbd730d2cf32741f40d", size = 794534 }, - { url = "https://files.pythonhosted.org/packages/b4/c2/d5da49166a52dda879855ecdba0117f073583db2b39bb47ce9a3378a8e9e/regex-2025.9.18-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:ae77e447ebc144d5a26d50055c6ddba1d6ad4a865a560ec7200b8b06bc529368", size = 866684 }, - { url = "https://files.pythonhosted.org/packages/bd/2d/0a5c4e6ec417de56b89ff4418ecc72f7e3feca806824c75ad0bbdae0516b/regex-2025.9.18-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e3ef8cf53dc8df49d7e28a356cf824e3623764e9833348b655cfed4524ab8a90", size = 853282 }, - { url = "https://files.pythonhosted.org/packages/f4/8e/d656af63e31a86572ec829665d6fa06eae7e144771e0330650a8bb865635/regex-2025.9.18-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:9feb29817df349c976da9a0debf775c5c33fc1c8ad7b9f025825da99374770b7", size = 797830 }, - { url = "https://files.pythonhosted.org/packages/db/ce/06edc89df8f7b83ffd321b6071be4c54dc7332c0f77860edc40ce57d757b/regex-2025.9.18-cp313-cp313t-win32.whl", hash = "sha256:168be0d2f9b9d13076940b1ed774f98595b4e3c7fc54584bba81b3cc4181742e", size = 267281 }, - { url = "https://files.pythonhosted.org/packages/83/9a/2b5d9c8b307a451fd17068719d971d3634ca29864b89ed5c18e499446d4a/regex-2025.9.18-cp313-cp313t-win_amd64.whl", hash = "sha256:d59ecf3bb549e491c8104fea7313f3563c7b048e01287db0a90485734a70a730", size = 278724 }, - { url = "https://files.pythonhosted.org/packages/3d/70/177d31e8089a278a764f8ec9a3faac8d14a312d622a47385d4b43905806f/regex-2025.9.18-cp313-cp313t-win_arm64.whl", hash = "sha256:dbef80defe9fb21310948a2595420b36c6d641d9bea4c991175829b2cc4bc06a", size = 269771 }, +sdist = { url = "https://files.pythonhosted.org/packages/49/d3/eaa0d28aba6ad1827ad1e716d9a93e1ba963ada61887498297d3da715133/regex-2025.9.18.tar.gz", hash = "sha256:c5ba23274c61c6fef447ba6a39333297d0c247f53059dba0bca415cac511edc4", size = 400917, upload-time = "2025-09-19T00:38:35.79Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/58/61/80eda662fc4eb32bfedc331f42390974c9e89c7eac1b79cd9eea4d7c458c/regex-2025.9.18-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:51076980cd08cd13c88eb7365427ae27f0d94e7cebe9ceb2bb9ffdae8fc4d82a", size = 484832, upload-time = "2025-09-19T00:35:30.011Z" }, + { url = "https://files.pythonhosted.org/packages/a6/d9/33833d9abddf3f07ad48504ddb53fe3b22f353214bbb878a72eee1e3ddbf/regex-2025.9.18-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:828446870bd7dee4e0cbeed767f07961aa07f0ea3129f38b3ccecebc9742e0b8", size = 288994, upload-time = "2025-09-19T00:35:31.733Z" }, + { url = "https://files.pythonhosted.org/packages/2a/b3/526ee96b0d70ea81980cbc20c3496fa582f775a52e001e2743cc33b2fa75/regex-2025.9.18-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c28821d5637866479ec4cc23b8c990f5bc6dd24e5e4384ba4a11d38a526e1414", size = 286619, upload-time = "2025-09-19T00:35:33.221Z" }, + { url = "https://files.pythonhosted.org/packages/65/4f/c2c096b02a351b33442aed5895cdd8bf87d372498d2100927c5a053d7ba3/regex-2025.9.18-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:726177ade8e481db669e76bf99de0b278783be8acd11cef71165327abd1f170a", size = 792454, upload-time = "2025-09-19T00:35:35.361Z" }, + { url = "https://files.pythonhosted.org/packages/24/15/b562c9d6e47c403c4b5deb744f8b4bf6e40684cf866c7b077960a925bdff/regex-2025.9.18-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f5cca697da89b9f8ea44115ce3130f6c54c22f541943ac8e9900461edc2b8bd4", size = 858723, upload-time = "2025-09-19T00:35:36.949Z" }, + { url = "https://files.pythonhosted.org/packages/f2/01/dba305409849e85b8a1a681eac4c03ed327d8de37895ddf9dc137f59c140/regex-2025.9.18-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:dfbde38f38004703c35666a1e1c088b778e35d55348da2b7b278914491698d6a", size = 905899, upload-time = "2025-09-19T00:35:38.723Z" }, + { url = "https://files.pythonhosted.org/packages/fe/d0/c51d1e6a80eab11ef96a4cbad17fc0310cf68994fb01a7283276b7e5bbd6/regex-2025.9.18-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f2f422214a03fab16bfa495cfec72bee4aaa5731843b771860a471282f1bf74f", size = 798981, upload-time = "2025-09-19T00:35:40.416Z" }, + { url = "https://files.pythonhosted.org/packages/c4/5e/72db90970887bbe02296612bd61b0fa31e6d88aa24f6a4853db3e96c575e/regex-2025.9.18-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a295916890f4df0902e4286bc7223ee7f9e925daa6dcdec4192364255b70561a", size = 781900, upload-time = "2025-09-19T00:35:42.077Z" }, + { url = "https://files.pythonhosted.org/packages/50/ff/596be45eea8e9bc31677fde243fa2904d00aad1b32c31bce26c3dbba0b9e/regex-2025.9.18-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:5db95ff632dbabc8c38c4e82bf545ab78d902e81160e6e455598014f0abe66b9", size = 852952, upload-time = "2025-09-19T00:35:43.751Z" }, + { url = "https://files.pythonhosted.org/packages/e5/1b/2dfa348fa551e900ed3f5f63f74185b6a08e8a76bc62bc9c106f4f92668b/regex-2025.9.18-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:fb967eb441b0f15ae610b7069bdb760b929f267efbf522e814bbbfffdf125ce2", size = 844355, upload-time = "2025-09-19T00:35:45.309Z" }, + { url = "https://files.pythonhosted.org/packages/f4/bf/aefb1def27fe33b8cbbb19c75c13aefccfbef1c6686f8e7f7095705969c7/regex-2025.9.18-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f04d2f20da4053d96c08f7fde6e1419b7ec9dbcee89c96e3d731fca77f411b95", size = 787254, upload-time = "2025-09-19T00:35:46.904Z" }, + { url = "https://files.pythonhosted.org/packages/e3/4e/8ef042e7cf0dbbb401e784e896acfc1b367b95dfbfc9ada94c2ed55a081f/regex-2025.9.18-cp311-cp311-win32.whl", hash = "sha256:895197241fccf18c0cea7550c80e75f185b8bd55b6924fcae269a1a92c614a07", size = 264129, upload-time = "2025-09-19T00:35:48.597Z" }, + { url = "https://files.pythonhosted.org/packages/b4/7d/c4fcabf80dcdd6821c0578ad9b451f8640b9110fb3dcb74793dd077069ff/regex-2025.9.18-cp311-cp311-win_amd64.whl", hash = "sha256:7e2b414deae99166e22c005e154a5513ac31493db178d8aec92b3269c9cce8c9", size = 276160, upload-time = "2025-09-19T00:36:00.45Z" }, + { url = "https://files.pythonhosted.org/packages/64/f8/0e13c8ae4d6df9d128afaba138342d532283d53a4c1e7a8c93d6756c8f4a/regex-2025.9.18-cp311-cp311-win_arm64.whl", hash = "sha256:fb137ec7c5c54f34a25ff9b31f6b7b0c2757be80176435bf367111e3f71d72df", size = 268471, upload-time = "2025-09-19T00:36:02.149Z" }, + { url = "https://files.pythonhosted.org/packages/b0/99/05859d87a66ae7098222d65748f11ef7f2dff51bfd7482a4e2256c90d72b/regex-2025.9.18-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:436e1b31d7efd4dcd52091d076482031c611dde58bf9c46ca6d0a26e33053a7e", size = 486335, upload-time = "2025-09-19T00:36:03.661Z" }, + { url = "https://files.pythonhosted.org/packages/97/7e/d43d4e8b978890932cf7b0957fce58c5b08c66f32698f695b0c2c24a48bf/regex-2025.9.18-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c190af81e5576b9c5fdc708f781a52ff20f8b96386c6e2e0557a78402b029f4a", size = 289720, upload-time = "2025-09-19T00:36:05.471Z" }, + { url = "https://files.pythonhosted.org/packages/bb/3b/ff80886089eb5dcf7e0d2040d9aaed539e25a94300403814bb24cc775058/regex-2025.9.18-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e4121f1ce2b2b5eec4b397cc1b277686e577e658d8f5870b7eb2d726bd2300ab", size = 287257, upload-time = "2025-09-19T00:36:07.072Z" }, + { url = "https://files.pythonhosted.org/packages/ee/66/243edf49dd8720cba8d5245dd4d6adcb03a1defab7238598c0c97cf549b8/regex-2025.9.18-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:300e25dbbf8299d87205e821a201057f2ef9aa3deb29caa01cd2cac669e508d5", size = 797463, upload-time = "2025-09-19T00:36:08.399Z" }, + { url = "https://files.pythonhosted.org/packages/df/71/c9d25a1142c70432e68bb03211d4a82299cd1c1fbc41db9409a394374ef5/regex-2025.9.18-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7b47fcf9f5316c0bdaf449e879407e1b9937a23c3b369135ca94ebc8d74b1742", size = 862670, upload-time = "2025-09-19T00:36:10.101Z" }, + { url = "https://files.pythonhosted.org/packages/f8/8f/329b1efc3a64375a294e3a92d43372bf1a351aa418e83c21f2f01cf6ec41/regex-2025.9.18-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:57a161bd3acaa4b513220b49949b07e252165e6b6dc910ee7617a37ff4f5b425", size = 910881, upload-time = "2025-09-19T00:36:12.223Z" }, + { url = "https://files.pythonhosted.org/packages/35/9e/a91b50332a9750519320ed30ec378b74c996f6befe282cfa6bb6cea7e9fd/regex-2025.9.18-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4f130c3a7845ba42de42f380fff3c8aebe89a810747d91bcf56d40a069f15352", size = 802011, upload-time = "2025-09-19T00:36:13.901Z" }, + { url = "https://files.pythonhosted.org/packages/a4/1d/6be3b8d7856b6e0d7ee7f942f437d0a76e0d5622983abbb6d21e21ab9a17/regex-2025.9.18-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5f96fa342b6f54dcba928dd452e8d8cb9f0d63e711d1721cd765bb9f73bb048d", size = 786668, upload-time = "2025-09-19T00:36:15.391Z" }, + { url = "https://files.pythonhosted.org/packages/cb/ce/4a60e53df58bd157c5156a1736d3636f9910bdcc271d067b32b7fcd0c3a8/regex-2025.9.18-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:0f0d676522d68c207828dcd01fb6f214f63f238c283d9f01d85fc664c7c85b56", size = 856578, upload-time = "2025-09-19T00:36:16.845Z" }, + { url = "https://files.pythonhosted.org/packages/86/e8/162c91bfe7217253afccde112868afb239f94703de6580fb235058d506a6/regex-2025.9.18-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:40532bff8a1a0621e7903ae57fce88feb2e8a9a9116d341701302c9302aef06e", size = 849017, upload-time = "2025-09-19T00:36:18.597Z" }, + { url = "https://files.pythonhosted.org/packages/35/34/42b165bc45289646ea0959a1bc7531733e90b47c56a72067adfe6b3251f6/regex-2025.9.18-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:039f11b618ce8d71a1c364fdee37da1012f5a3e79b1b2819a9f389cd82fd6282", size = 788150, upload-time = "2025-09-19T00:36:20.464Z" }, + { url = "https://files.pythonhosted.org/packages/79/5d/cdd13b1f3c53afa7191593a7ad2ee24092a5a46417725ffff7f64be8342d/regex-2025.9.18-cp312-cp312-win32.whl", hash = "sha256:e1dd06f981eb226edf87c55d523131ade7285137fbde837c34dc9d1bf309f459", size = 264536, upload-time = "2025-09-19T00:36:21.922Z" }, + { url = "https://files.pythonhosted.org/packages/e0/f5/4a7770c9a522e7d2dc1fa3ffc83ab2ab33b0b22b447e62cffef186805302/regex-2025.9.18-cp312-cp312-win_amd64.whl", hash = "sha256:3d86b5247bf25fa3715e385aa9ff272c307e0636ce0c9595f64568b41f0a9c77", size = 275501, upload-time = "2025-09-19T00:36:23.4Z" }, + { url = "https://files.pythonhosted.org/packages/df/05/9ce3e110e70d225ecbed455b966003a3afda5e58e8aec2964042363a18f4/regex-2025.9.18-cp312-cp312-win_arm64.whl", hash = "sha256:032720248cbeeae6444c269b78cb15664458b7bb9ed02401d3da59fe4d68c3a5", size = 268601, upload-time = "2025-09-19T00:36:25.092Z" }, ] [[package]] @@ -3438,9 +3078,9 @@ dependencies = [ { name = "idna" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517 } +sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738 }, + { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" }, ] [[package]] @@ -3450,9 +3090,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "packaging" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/95/96/fb6dbfebb524d5601d359a47c78fe7ba1eef90fc4096404aa60c9a906fbb/requirements_parser-0.13.0.tar.gz", hash = "sha256:0843119ca2cb2331de4eb31b10d70462e39ace698fd660a915c247d2301a4418", size = 22630 } +sdist = { url = "https://files.pythonhosted.org/packages/95/96/fb6dbfebb524d5601d359a47c78fe7ba1eef90fc4096404aa60c9a906fbb/requirements_parser-0.13.0.tar.gz", hash = "sha256:0843119ca2cb2331de4eb31b10d70462e39ace698fd660a915c247d2301a4418", size = 22630, upload-time = "2025-05-21T13:42:05.464Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/bd/60/50fbb6ffb35f733654466f1a90d162bcbea358adc3b0871339254fbc37b2/requirements_parser-0.13.0-py3-none-any.whl", hash = "sha256:2b3173faecf19ec5501971b7222d38f04cb45bb9d87d0ad629ca71e2e62ded14", size = 14782 }, + { url = "https://files.pythonhosted.org/packages/bd/60/50fbb6ffb35f733654466f1a90d162bcbea358adc3b0871339254fbc37b2/requirements_parser-0.13.0-py3-none-any.whl", hash = "sha256:2b3173faecf19ec5501971b7222d38f04cb45bb9d87d0ad629ca71e2e62ded14", size = 14782, upload-time = "2025-05-21T13:42:04.007Z" }, ] [[package]] @@ -3462,18 +3102,18 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "six" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/28/ea/a9387748e2d111c3c2b275ba970b735e04e15cdb1eb30693b6b5708c4dbd/rfc3339_validator-0.1.4.tar.gz", hash = "sha256:138a2abdf93304ad60530167e51d2dfb9549521a836871b88d7f4695d0022f6b", size = 5513 } +sdist = { url = "https://files.pythonhosted.org/packages/28/ea/a9387748e2d111c3c2b275ba970b735e04e15cdb1eb30693b6b5708c4dbd/rfc3339_validator-0.1.4.tar.gz", hash = "sha256:138a2abdf93304ad60530167e51d2dfb9549521a836871b88d7f4695d0022f6b", size = 5513, upload-time = "2021-05-12T16:37:54.178Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7b/44/4e421b96b67b2daff264473f7465db72fbdf36a07e05494f50300cc7b0c6/rfc3339_validator-0.1.4-py2.py3-none-any.whl", hash = "sha256:24f6ec1eda14ef823da9e36ec7113124b39c04d50a4d3d3a3c2859577e7791fa", size = 3490 }, + { url = "https://files.pythonhosted.org/packages/7b/44/4e421b96b67b2daff264473f7465db72fbdf36a07e05494f50300cc7b0c6/rfc3339_validator-0.1.4-py2.py3-none-any.whl", hash = "sha256:24f6ec1eda14ef823da9e36ec7113124b39c04d50a4d3d3a3c2859577e7791fa", size = 3490, upload-time = "2021-05-12T16:37:52.536Z" }, ] [[package]] name = "rfc3986-validator" version = "0.1.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/da/88/f270de456dd7d11dcc808abfa291ecdd3f45ff44e3b549ffa01b126464d0/rfc3986_validator-0.1.1.tar.gz", hash = "sha256:3d44bde7921b3b9ec3ae4e3adca370438eccebc676456449b145d533b240d055", size = 6760 } +sdist = { url = "https://files.pythonhosted.org/packages/da/88/f270de456dd7d11dcc808abfa291ecdd3f45ff44e3b549ffa01b126464d0/rfc3986_validator-0.1.1.tar.gz", hash = "sha256:3d44bde7921b3b9ec3ae4e3adca370438eccebc676456449b145d533b240d055", size = 6760, upload-time = "2019-10-28T16:00:19.144Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/9e/51/17023c0f8f1869d8806b979a2bffa3f861f26a3f1a66b094288323fba52f/rfc3986_validator-0.1.1-py2.py3-none-any.whl", hash = "sha256:2f235c432ef459970b4306369336b9d5dbdda31b510ca1e327636e01f528bfa9", size = 4242 }, + { url = "https://files.pythonhosted.org/packages/9e/51/17023c0f8f1869d8806b979a2bffa3f861f26a3f1a66b094288323fba52f/rfc3986_validator-0.1.1-py2.py3-none-any.whl", hash = "sha256:2f235c432ef459970b4306369336b9d5dbdda31b510ca1e327636e01f528bfa9", size = 4242, upload-time = "2019-10-28T16:00:13.976Z" }, ] [[package]] @@ -3483,9 +3123,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "lark" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/2c/06/37c1a5557acf449e8e406a830a05bf885ac47d33270aec454ef78675008d/rfc3987_syntax-1.1.0.tar.gz", hash = "sha256:717a62cbf33cffdd16dfa3a497d81ce48a660ea691b1ddd7be710c22f00b4a0d", size = 14239 } +sdist = { url = "https://files.pythonhosted.org/packages/2c/06/37c1a5557acf449e8e406a830a05bf885ac47d33270aec454ef78675008d/rfc3987_syntax-1.1.0.tar.gz", hash = "sha256:717a62cbf33cffdd16dfa3a497d81ce48a660ea691b1ddd7be710c22f00b4a0d", size = 14239, upload-time = "2025-07-18T01:05:05.015Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7e/71/44ce230e1b7fadd372515a97e32a83011f906ddded8d03e3c6aafbdedbb7/rfc3987_syntax-1.1.0-py3-none-any.whl", hash = "sha256:6c3d97604e4c5ce9f714898e05401a0445a641cfa276432b0a648c80856f6a3f", size = 8046 }, + { url = "https://files.pythonhosted.org/packages/7e/71/44ce230e1b7fadd372515a97e32a83011f906ddded8d03e3c6aafbdedbb7/rfc3987_syntax-1.1.0-py3-none-any.whl", hash = "sha256:6c3d97604e4c5ce9f714898e05401a0445a641cfa276432b0a648c80856f6a3f", size = 8046, upload-time = "2025-07-18T01:05:03.843Z" }, ] [[package]] @@ -3496,114 +3136,85 @@ dependencies = [ { name = "markdown-it-py" }, { name = "pygments" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/fb/d2/8920e102050a0de7bfabeb4c4614a49248cf8d5d7a8d01885fbb24dc767a/rich-14.2.0.tar.gz", hash = "sha256:73ff50c7c0c1c77c8243079283f4edb376f0f6442433aecb8ce7e6d0b92d1fe4", size = 219990 } +sdist = { url = "https://files.pythonhosted.org/packages/fb/d2/8920e102050a0de7bfabeb4c4614a49248cf8d5d7a8d01885fbb24dc767a/rich-14.2.0.tar.gz", hash = "sha256:73ff50c7c0c1c77c8243079283f4edb376f0f6442433aecb8ce7e6d0b92d1fe4", size = 219990, upload-time = "2025-10-09T14:16:53.064Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/25/7a/b0178788f8dc6cafce37a212c99565fa1fe7872c70c6c9c1e1a372d9d88f/rich-14.2.0-py3-none-any.whl", hash = "sha256:76bc51fe2e57d2b1be1f96c524b890b816e334ab4c1e45888799bfaab0021edd", size = 243393 }, + { url = "https://files.pythonhosted.org/packages/25/7a/b0178788f8dc6cafce37a212c99565fa1fe7872c70c6c9c1e1a372d9d88f/rich-14.2.0-py3-none-any.whl", hash = "sha256:76bc51fe2e57d2b1be1f96c524b890b816e334ab4c1e45888799bfaab0021edd", size = 243393, upload-time = "2025-10-09T14:16:51.245Z" }, ] [[package]] name = "rpds-py" version = "0.27.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e9/dd/2c0cbe774744272b0ae725f44032c77bdcab6e8bcf544bffa3b6e70c8dba/rpds_py-0.27.1.tar.gz", hash = "sha256:26a1c73171d10b7acccbded82bf6a586ab8203601e565badc74bbbf8bc5a10f8", size = 27479 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b5/c1/7907329fbef97cbd49db6f7303893bd1dd5a4a3eae415839ffdfb0762cae/rpds_py-0.27.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:be898f271f851f68b318872ce6ebebbc62f303b654e43bf72683dbdc25b7c881", size = 371063 }, - { url = "https://files.pythonhosted.org/packages/11/94/2aab4bc86228bcf7c48760990273653a4900de89c7537ffe1b0d6097ed39/rpds_py-0.27.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:62ac3d4e3e07b58ee0ddecd71d6ce3b1637de2d373501412df395a0ec5f9beb5", size = 353210 }, - { url = "https://files.pythonhosted.org/packages/3a/57/f5eb3ecf434342f4f1a46009530e93fd201a0b5b83379034ebdb1d7c1a58/rpds_py-0.27.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4708c5c0ceb2d034f9991623631d3d23cb16e65c83736ea020cdbe28d57c0a0e", size = 381636 }, - { url = "https://files.pythonhosted.org/packages/ae/f4/ef95c5945e2ceb5119571b184dd5a1cc4b8541bbdf67461998cfeac9cb1e/rpds_py-0.27.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:abfa1171a9952d2e0002aba2ad3780820b00cc3d9c98c6630f2e93271501f66c", size = 394341 }, - { url = "https://files.pythonhosted.org/packages/5a/7e/4bd610754bf492d398b61725eb9598ddd5eb86b07d7d9483dbcd810e20bc/rpds_py-0.27.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b507d19f817ebaca79574b16eb2ae412e5c0835542c93fe9983f1e432aca195", size = 523428 }, - { url = "https://files.pythonhosted.org/packages/9f/e5/059b9f65a8c9149361a8b75094864ab83b94718344db511fd6117936ed2a/rpds_py-0.27.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:168b025f8fd8d8d10957405f3fdcef3dc20f5982d398f90851f4abc58c566c52", size = 402923 }, - { url = "https://files.pythonhosted.org/packages/f5/48/64cabb7daced2968dd08e8a1b7988bf358d7bd5bcd5dc89a652f4668543c/rpds_py-0.27.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb56c6210ef77caa58e16e8c17d35c63fe3f5b60fd9ba9d424470c3400bcf9ed", size = 384094 }, - { url = "https://files.pythonhosted.org/packages/ae/e1/dc9094d6ff566bff87add8a510c89b9e158ad2ecd97ee26e677da29a9e1b/rpds_py-0.27.1-cp311-cp311-manylinux_2_31_riscv64.whl", hash = "sha256:d252f2d8ca0195faa707f8eb9368955760880b2b42a8ee16d382bf5dd807f89a", size = 401093 }, - { url = "https://files.pythonhosted.org/packages/37/8e/ac8577e3ecdd5593e283d46907d7011618994e1d7ab992711ae0f78b9937/rpds_py-0.27.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6e5e54da1e74b91dbc7996b56640f79b195d5925c2b78efaa8c5d53e1d88edde", size = 417969 }, - { url = "https://files.pythonhosted.org/packages/66/6d/87507430a8f74a93556fe55c6485ba9c259949a853ce407b1e23fea5ba31/rpds_py-0.27.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ffce0481cc6e95e5b3f0a47ee17ffbd234399e6d532f394c8dce320c3b089c21", size = 558302 }, - { url = "https://files.pythonhosted.org/packages/3a/bb/1db4781ce1dda3eecc735e3152659a27b90a02ca62bfeea17aee45cc0fbc/rpds_py-0.27.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:a205fdfe55c90c2cd8e540ca9ceba65cbe6629b443bc05db1f590a3db8189ff9", size = 589259 }, - { url = "https://files.pythonhosted.org/packages/7b/0e/ae1c8943d11a814d01b482e1f8da903f88047a962dff9bbdadf3bd6e6fd1/rpds_py-0.27.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:689fb5200a749db0415b092972e8eba85847c23885c8543a8b0f5c009b1a5948", size = 554983 }, - { url = "https://files.pythonhosted.org/packages/b2/d5/0b2a55415931db4f112bdab072443ff76131b5ac4f4dc98d10d2d357eb03/rpds_py-0.27.1-cp311-cp311-win32.whl", hash = "sha256:3182af66048c00a075010bc7f4860f33913528a4b6fc09094a6e7598e462fe39", size = 217154 }, - { url = "https://files.pythonhosted.org/packages/24/75/3b7ffe0d50dc86a6a964af0d1cc3a4a2cdf437cb7b099a4747bbb96d1819/rpds_py-0.27.1-cp311-cp311-win_amd64.whl", hash = "sha256:b4938466c6b257b2f5c4ff98acd8128ec36b5059e5c8f8372d79316b1c36bb15", size = 228627 }, - { url = "https://files.pythonhosted.org/packages/8d/3f/4fd04c32abc02c710f09a72a30c9a55ea3cc154ef8099078fd50a0596f8e/rpds_py-0.27.1-cp311-cp311-win_arm64.whl", hash = "sha256:2f57af9b4d0793e53266ee4325535a31ba48e2f875da81a9177c9926dfa60746", size = 220998 }, - { url = "https://files.pythonhosted.org/packages/bd/fe/38de28dee5df58b8198c743fe2bea0c785c6d40941b9950bac4cdb71a014/rpds_py-0.27.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:ae2775c1973e3c30316892737b91f9283f9908e3cc7625b9331271eaaed7dc90", size = 361887 }, - { url = "https://files.pythonhosted.org/packages/7c/9a/4b6c7eedc7dd90986bf0fab6ea2a091ec11c01b15f8ba0a14d3f80450468/rpds_py-0.27.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2643400120f55c8a96f7c9d858f7be0c88d383cd4653ae2cf0d0c88f668073e5", size = 345795 }, - { url = "https://files.pythonhosted.org/packages/6f/0e/e650e1b81922847a09cca820237b0edee69416a01268b7754d506ade11ad/rpds_py-0.27.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16323f674c089b0360674a4abd28d5042947d54ba620f72514d69be4ff64845e", size = 385121 }, - { url = "https://files.pythonhosted.org/packages/1b/ea/b306067a712988e2bff00dcc7c8f31d26c29b6d5931b461aa4b60a013e33/rpds_py-0.27.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9a1f4814b65eacac94a00fc9a526e3fdafd78e439469644032032d0d63de4881", size = 398976 }, - { url = "https://files.pythonhosted.org/packages/2c/0a/26dc43c8840cb8fe239fe12dbc8d8de40f2365e838f3d395835dde72f0e5/rpds_py-0.27.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ba32c16b064267b22f1850a34051121d423b6f7338a12b9459550eb2096e7ec", size = 525953 }, - { url = "https://files.pythonhosted.org/packages/22/14/c85e8127b573aaf3a0cbd7fbb8c9c99e735a4a02180c84da2a463b766e9e/rpds_py-0.27.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e5c20f33fd10485b80f65e800bbe5f6785af510b9f4056c5a3c612ebc83ba6cb", size = 407915 }, - { url = "https://files.pythonhosted.org/packages/ed/7b/8f4fee9ba1fb5ec856eb22d725a4efa3deb47f769597c809e03578b0f9d9/rpds_py-0.27.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:466bfe65bd932da36ff279ddd92de56b042f2266d752719beb97b08526268ec5", size = 386883 }, - { url = "https://files.pythonhosted.org/packages/86/47/28fa6d60f8b74fcdceba81b272f8d9836ac0340570f68f5df6b41838547b/rpds_py-0.27.1-cp312-cp312-manylinux_2_31_riscv64.whl", hash = "sha256:41e532bbdcb57c92ba3be62c42e9f096431b4cf478da9bc3bc6ce5c38ab7ba7a", size = 405699 }, - { url = "https://files.pythonhosted.org/packages/d0/fd/c5987b5e054548df56953a21fe2ebed51fc1ec7c8f24fd41c067b68c4a0a/rpds_py-0.27.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f149826d742b406579466283769a8ea448eed82a789af0ed17b0cd5770433444", size = 423713 }, - { url = "https://files.pythonhosted.org/packages/ac/ba/3c4978b54a73ed19a7d74531be37a8bcc542d917c770e14d372b8daea186/rpds_py-0.27.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:80c60cfb5310677bd67cb1e85a1e8eb52e12529545441b43e6f14d90b878775a", size = 562324 }, - { url = "https://files.pythonhosted.org/packages/b5/6c/6943a91768fec16db09a42b08644b960cff540c66aab89b74be6d4a144ba/rpds_py-0.27.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:7ee6521b9baf06085f62ba9c7a3e5becffbc32480d2f1b351559c001c38ce4c1", size = 593646 }, - { url = "https://files.pythonhosted.org/packages/11/73/9d7a8f4be5f4396f011a6bb7a19fe26303a0dac9064462f5651ced2f572f/rpds_py-0.27.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a512c8263249a9d68cac08b05dd59d2b3f2061d99b322813cbcc14c3c7421998", size = 558137 }, - { url = "https://files.pythonhosted.org/packages/6e/96/6772cbfa0e2485bcceef8071de7821f81aeac8bb45fbfd5542a3e8108165/rpds_py-0.27.1-cp312-cp312-win32.whl", hash = "sha256:819064fa048ba01b6dadc5116f3ac48610435ac9a0058bbde98e569f9e785c39", size = 221343 }, - { url = "https://files.pythonhosted.org/packages/67/b6/c82f0faa9af1c6a64669f73a17ee0eeef25aff30bb9a1c318509efe45d84/rpds_py-0.27.1-cp312-cp312-win_amd64.whl", hash = "sha256:d9199717881f13c32c4046a15f024971a3b78ad4ea029e8da6b86e5aa9cf4594", size = 232497 }, - { url = "https://files.pythonhosted.org/packages/e1/96/2817b44bd2ed11aebacc9251da03689d56109b9aba5e311297b6902136e2/rpds_py-0.27.1-cp312-cp312-win_arm64.whl", hash = "sha256:33aa65b97826a0e885ef6e278fbd934e98cdcfed80b63946025f01e2f5b29502", size = 222790 }, - { url = "https://files.pythonhosted.org/packages/cc/77/610aeee8d41e39080c7e14afa5387138e3c9fa9756ab893d09d99e7d8e98/rpds_py-0.27.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:e4b9fcfbc021633863a37e92571d6f91851fa656f0180246e84cbd8b3f6b329b", size = 361741 }, - { url = "https://files.pythonhosted.org/packages/3a/fc/c43765f201c6a1c60be2043cbdb664013def52460a4c7adace89d6682bf4/rpds_py-0.27.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1441811a96eadca93c517d08df75de45e5ffe68aa3089924f963c782c4b898cf", size = 345574 }, - { url = "https://files.pythonhosted.org/packages/20/42/ee2b2ca114294cd9847d0ef9c26d2b0851b2e7e00bf14cc4c0b581df0fc3/rpds_py-0.27.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55266dafa22e672f5a4f65019015f90336ed31c6383bd53f5e7826d21a0e0b83", size = 385051 }, - { url = "https://files.pythonhosted.org/packages/fd/e8/1e430fe311e4799e02e2d1af7c765f024e95e17d651612425b226705f910/rpds_py-0.27.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d78827d7ac08627ea2c8e02c9e5b41180ea5ea1f747e9db0915e3adf36b62dcf", size = 398395 }, - { url = "https://files.pythonhosted.org/packages/82/95/9dc227d441ff2670651c27a739acb2535ccaf8b351a88d78c088965e5996/rpds_py-0.27.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae92443798a40a92dc5f0b01d8a7c93adde0c4dc965310a29ae7c64d72b9fad2", size = 524334 }, - { url = "https://files.pythonhosted.org/packages/87/01/a670c232f401d9ad461d9a332aa4080cd3cb1d1df18213dbd0d2a6a7ab51/rpds_py-0.27.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c46c9dd2403b66a2a3b9720ec4b74d4ab49d4fabf9f03dfdce2d42af913fe8d0", size = 407691 }, - { url = "https://files.pythonhosted.org/packages/03/36/0a14aebbaa26fe7fab4780c76f2239e76cc95a0090bdb25e31d95c492fcd/rpds_py-0.27.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2efe4eb1d01b7f5f1939f4ef30ecea6c6b3521eec451fb93191bf84b2a522418", size = 386868 }, - { url = "https://files.pythonhosted.org/packages/3b/03/8c897fb8b5347ff6c1cc31239b9611c5bf79d78c984430887a353e1409a1/rpds_py-0.27.1-cp313-cp313-manylinux_2_31_riscv64.whl", hash = "sha256:15d3b4d83582d10c601f481eca29c3f138d44c92187d197aff663a269197c02d", size = 405469 }, - { url = "https://files.pythonhosted.org/packages/da/07/88c60edc2df74850d496d78a1fdcdc7b54360a7f610a4d50008309d41b94/rpds_py-0.27.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4ed2e16abbc982a169d30d1a420274a709949e2cbdef119fe2ec9d870b42f274", size = 422125 }, - { url = "https://files.pythonhosted.org/packages/6b/86/5f4c707603e41b05f191a749984f390dabcbc467cf833769b47bf14ba04f/rpds_py-0.27.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a75f305c9b013289121ec0f1181931975df78738cdf650093e6b86d74aa7d8dd", size = 562341 }, - { url = "https://files.pythonhosted.org/packages/b2/92/3c0cb2492094e3cd9baf9e49bbb7befeceb584ea0c1a8b5939dca4da12e5/rpds_py-0.27.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:67ce7620704745881a3d4b0ada80ab4d99df390838839921f99e63c474f82cf2", size = 592511 }, - { url = "https://files.pythonhosted.org/packages/10/bb/82e64fbb0047c46a168faa28d0d45a7851cd0582f850b966811d30f67ad8/rpds_py-0.27.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9d992ac10eb86d9b6f369647b6a3f412fc0075cfd5d799530e84d335e440a002", size = 557736 }, - { url = "https://files.pythonhosted.org/packages/00/95/3c863973d409210da7fb41958172c6b7dbe7fc34e04d3cc1f10bb85e979f/rpds_py-0.27.1-cp313-cp313-win32.whl", hash = "sha256:4f75e4bd8ab8db624e02c8e2fc4063021b58becdbe6df793a8111d9343aec1e3", size = 221462 }, - { url = "https://files.pythonhosted.org/packages/ce/2c/5867b14a81dc217b56d95a9f2a40fdbc56a1ab0181b80132beeecbd4b2d6/rpds_py-0.27.1-cp313-cp313-win_amd64.whl", hash = "sha256:f9025faafc62ed0b75a53e541895ca272815bec18abe2249ff6501c8f2e12b83", size = 232034 }, - { url = "https://files.pythonhosted.org/packages/c7/78/3958f3f018c01923823f1e47f1cc338e398814b92d83cd278364446fac66/rpds_py-0.27.1-cp313-cp313-win_arm64.whl", hash = "sha256:ed10dc32829e7d222b7d3b93136d25a406ba9788f6a7ebf6809092da1f4d279d", size = 222392 }, - { url = "https://files.pythonhosted.org/packages/01/76/1cdf1f91aed5c3a7bf2eba1f1c4e4d6f57832d73003919a20118870ea659/rpds_py-0.27.1-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:92022bbbad0d4426e616815b16bc4127f83c9a74940e1ccf3cfe0b387aba0228", size = 358355 }, - { url = "https://files.pythonhosted.org/packages/c3/6f/bf142541229374287604caf3bb2a4ae17f0a580798fd72d3b009b532db4e/rpds_py-0.27.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:47162fdab9407ec3f160805ac3e154df042e577dd53341745fc7fb3f625e6d92", size = 342138 }, - { url = "https://files.pythonhosted.org/packages/1a/77/355b1c041d6be40886c44ff5e798b4e2769e497b790f0f7fd1e78d17e9a8/rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb89bec23fddc489e5d78b550a7b773557c9ab58b7946154a10a6f7a214a48b2", size = 380247 }, - { url = "https://files.pythonhosted.org/packages/d6/a4/d9cef5c3946ea271ce2243c51481971cd6e34f21925af2783dd17b26e815/rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e48af21883ded2b3e9eb48cb7880ad8598b31ab752ff3be6457001d78f416723", size = 390699 }, - { url = "https://files.pythonhosted.org/packages/3a/06/005106a7b8c6c1a7e91b73169e49870f4af5256119d34a361ae5240a0c1d/rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6f5b7bd8e219ed50299e58551a410b64daafb5017d54bbe822e003856f06a802", size = 521852 }, - { url = "https://files.pythonhosted.org/packages/e5/3e/50fb1dac0948e17a02eb05c24510a8fe12d5ce8561c6b7b7d1339ab7ab9c/rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08f1e20bccf73b08d12d804d6e1c22ca5530e71659e6673bce31a6bb71c1e73f", size = 402582 }, - { url = "https://files.pythonhosted.org/packages/cb/b0/f4e224090dc5b0ec15f31a02d746ab24101dd430847c4d99123798661bfc/rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dc5dceeaefcc96dc192e3a80bbe1d6c410c469e97bdd47494a7d930987f18b2", size = 384126 }, - { url = "https://files.pythonhosted.org/packages/54/77/ac339d5f82b6afff1df8f0fe0d2145cc827992cb5f8eeb90fc9f31ef7a63/rpds_py-0.27.1-cp313-cp313t-manylinux_2_31_riscv64.whl", hash = "sha256:d76f9cc8665acdc0c9177043746775aa7babbf479b5520b78ae4002d889f5c21", size = 399486 }, - { url = "https://files.pythonhosted.org/packages/d6/29/3e1c255eee6ac358c056a57d6d6869baa00a62fa32eea5ee0632039c50a3/rpds_py-0.27.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:134fae0e36022edad8290a6661edf40c023562964efea0cc0ec7f5d392d2aaef", size = 414832 }, - { url = "https://files.pythonhosted.org/packages/3f/db/6d498b844342deb3fa1d030598db93937a9964fcf5cb4da4feb5f17be34b/rpds_py-0.27.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:eb11a4f1b2b63337cfd3b4d110af778a59aae51c81d195768e353d8b52f88081", size = 557249 }, - { url = "https://files.pythonhosted.org/packages/60/f3/690dd38e2310b6f68858a331399b4d6dbb9132c3e8ef8b4333b96caf403d/rpds_py-0.27.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:13e608ac9f50a0ed4faec0e90ece76ae33b34c0e8656e3dceb9a7db994c692cd", size = 587356 }, - { url = "https://files.pythonhosted.org/packages/86/e3/84507781cccd0145f35b1dc32c72675200c5ce8d5b30f813e49424ef68fc/rpds_py-0.27.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:dd2135527aa40f061350c3f8f89da2644de26cd73e4de458e79606384f4f68e7", size = 555300 }, - { url = "https://files.pythonhosted.org/packages/e5/ee/375469849e6b429b3516206b4580a79e9ef3eb12920ddbd4492b56eaacbe/rpds_py-0.27.1-cp313-cp313t-win32.whl", hash = "sha256:3020724ade63fe320a972e2ffd93b5623227e684315adce194941167fee02688", size = 216714 }, - { url = "https://files.pythonhosted.org/packages/21/87/3fc94e47c9bd0742660e84706c311a860dcae4374cf4a03c477e23ce605a/rpds_py-0.27.1-cp313-cp313t-win_amd64.whl", hash = "sha256:8ee50c3e41739886606388ba3ab3ee2aae9f35fb23f833091833255a31740797", size = 228943 }, - { url = "https://files.pythonhosted.org/packages/0c/ed/e1fba02de17f4f76318b834425257c8ea297e415e12c68b4361f63e8ae92/rpds_py-0.27.1-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cdfe4bb2f9fe7458b7453ad3c33e726d6d1c7c0a72960bcc23800d77384e42df", size = 371402 }, - { url = "https://files.pythonhosted.org/packages/af/7c/e16b959b316048b55585a697e94add55a4ae0d984434d279ea83442e460d/rpds_py-0.27.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:8fabb8fd848a5f75a2324e4a84501ee3a5e3c78d8603f83475441866e60b94a3", size = 354084 }, - { url = "https://files.pythonhosted.org/packages/de/c1/ade645f55de76799fdd08682d51ae6724cb46f318573f18be49b1e040428/rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eda8719d598f2f7f3e0f885cba8646644b55a187762bec091fa14a2b819746a9", size = 383090 }, - { url = "https://files.pythonhosted.org/packages/1f/27/89070ca9b856e52960da1472efcb6c20ba27cfe902f4f23ed095b9cfc61d/rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3c64d07e95606ec402a0a1c511fe003873fa6af630bda59bac77fac8b4318ebc", size = 394519 }, - { url = "https://files.pythonhosted.org/packages/b3/28/be120586874ef906aa5aeeae95ae8df4184bc757e5b6bd1c729ccff45ed5/rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:93a2ed40de81bcff59aabebb626562d48332f3d028ca2036f1d23cbb52750be4", size = 523817 }, - { url = "https://files.pythonhosted.org/packages/a8/ef/70cc197bc11cfcde02a86f36ac1eed15c56667c2ebddbdb76a47e90306da/rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:387ce8c44ae94e0ec50532d9cb0edce17311024c9794eb196b90e1058aadeb66", size = 403240 }, - { url = "https://files.pythonhosted.org/packages/cf/35/46936cca449f7f518f2f4996e0e8344db4b57e2081e752441154089d2a5f/rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aaf94f812c95b5e60ebaf8bfb1898a7d7cb9c1af5744d4a67fa47796e0465d4e", size = 385194 }, - { url = "https://files.pythonhosted.org/packages/e1/62/29c0d3e5125c3270b51415af7cbff1ec587379c84f55a5761cc9efa8cd06/rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_31_riscv64.whl", hash = "sha256:4848ca84d6ded9b58e474dfdbad4b8bfb450344c0551ddc8d958bf4b36aa837c", size = 402086 }, - { url = "https://files.pythonhosted.org/packages/8f/66/03e1087679227785474466fdd04157fb793b3b76e3fcf01cbf4c693c1949/rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2bde09cbcf2248b73c7c323be49b280180ff39fadcfe04e7b6f54a678d02a7cf", size = 419272 }, - { url = "https://files.pythonhosted.org/packages/6a/24/e3e72d265121e00b063aef3e3501e5b2473cf1b23511d56e529531acf01e/rpds_py-0.27.1-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:94c44ee01fd21c9058f124d2d4f0c9dc7634bec93cd4b38eefc385dabe71acbf", size = 560003 }, - { url = "https://files.pythonhosted.org/packages/26/ca/f5a344c534214cc2d41118c0699fffbdc2c1bc7046f2a2b9609765ab9c92/rpds_py-0.27.1-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:df8b74962e35c9249425d90144e721eed198e6555a0e22a563d29fe4486b51f6", size = 590482 }, - { url = "https://files.pythonhosted.org/packages/ce/08/4349bdd5c64d9d193c360aa9db89adeee6f6682ab8825dca0a3f535f434f/rpds_py-0.27.1-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:dc23e6820e3b40847e2f4a7726462ba0cf53089512abe9ee16318c366494c17a", size = 556523 }, +sdist = { url = "https://files.pythonhosted.org/packages/e9/dd/2c0cbe774744272b0ae725f44032c77bdcab6e8bcf544bffa3b6e70c8dba/rpds_py-0.27.1.tar.gz", hash = "sha256:26a1c73171d10b7acccbded82bf6a586ab8203601e565badc74bbbf8bc5a10f8", size = 27479, upload-time = "2025-08-27T12:16:36.024Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b5/c1/7907329fbef97cbd49db6f7303893bd1dd5a4a3eae415839ffdfb0762cae/rpds_py-0.27.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:be898f271f851f68b318872ce6ebebbc62f303b654e43bf72683dbdc25b7c881", size = 371063, upload-time = "2025-08-27T12:12:47.856Z" }, + { url = "https://files.pythonhosted.org/packages/11/94/2aab4bc86228bcf7c48760990273653a4900de89c7537ffe1b0d6097ed39/rpds_py-0.27.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:62ac3d4e3e07b58ee0ddecd71d6ce3b1637de2d373501412df395a0ec5f9beb5", size = 353210, upload-time = "2025-08-27T12:12:49.187Z" }, + { url = "https://files.pythonhosted.org/packages/3a/57/f5eb3ecf434342f4f1a46009530e93fd201a0b5b83379034ebdb1d7c1a58/rpds_py-0.27.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4708c5c0ceb2d034f9991623631d3d23cb16e65c83736ea020cdbe28d57c0a0e", size = 381636, upload-time = "2025-08-27T12:12:50.492Z" }, + { url = "https://files.pythonhosted.org/packages/ae/f4/ef95c5945e2ceb5119571b184dd5a1cc4b8541bbdf67461998cfeac9cb1e/rpds_py-0.27.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:abfa1171a9952d2e0002aba2ad3780820b00cc3d9c98c6630f2e93271501f66c", size = 394341, upload-time = "2025-08-27T12:12:52.024Z" }, + { url = "https://files.pythonhosted.org/packages/5a/7e/4bd610754bf492d398b61725eb9598ddd5eb86b07d7d9483dbcd810e20bc/rpds_py-0.27.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b507d19f817ebaca79574b16eb2ae412e5c0835542c93fe9983f1e432aca195", size = 523428, upload-time = "2025-08-27T12:12:53.779Z" }, + { url = "https://files.pythonhosted.org/packages/9f/e5/059b9f65a8c9149361a8b75094864ab83b94718344db511fd6117936ed2a/rpds_py-0.27.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:168b025f8fd8d8d10957405f3fdcef3dc20f5982d398f90851f4abc58c566c52", size = 402923, upload-time = "2025-08-27T12:12:55.15Z" }, + { url = "https://files.pythonhosted.org/packages/f5/48/64cabb7daced2968dd08e8a1b7988bf358d7bd5bcd5dc89a652f4668543c/rpds_py-0.27.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb56c6210ef77caa58e16e8c17d35c63fe3f5b60fd9ba9d424470c3400bcf9ed", size = 384094, upload-time = "2025-08-27T12:12:57.194Z" }, + { url = "https://files.pythonhosted.org/packages/ae/e1/dc9094d6ff566bff87add8a510c89b9e158ad2ecd97ee26e677da29a9e1b/rpds_py-0.27.1-cp311-cp311-manylinux_2_31_riscv64.whl", hash = "sha256:d252f2d8ca0195faa707f8eb9368955760880b2b42a8ee16d382bf5dd807f89a", size = 401093, upload-time = "2025-08-27T12:12:58.985Z" }, + { url = "https://files.pythonhosted.org/packages/37/8e/ac8577e3ecdd5593e283d46907d7011618994e1d7ab992711ae0f78b9937/rpds_py-0.27.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6e5e54da1e74b91dbc7996b56640f79b195d5925c2b78efaa8c5d53e1d88edde", size = 417969, upload-time = "2025-08-27T12:13:00.367Z" }, + { url = "https://files.pythonhosted.org/packages/66/6d/87507430a8f74a93556fe55c6485ba9c259949a853ce407b1e23fea5ba31/rpds_py-0.27.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ffce0481cc6e95e5b3f0a47ee17ffbd234399e6d532f394c8dce320c3b089c21", size = 558302, upload-time = "2025-08-27T12:13:01.737Z" }, + { url = "https://files.pythonhosted.org/packages/3a/bb/1db4781ce1dda3eecc735e3152659a27b90a02ca62bfeea17aee45cc0fbc/rpds_py-0.27.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:a205fdfe55c90c2cd8e540ca9ceba65cbe6629b443bc05db1f590a3db8189ff9", size = 589259, upload-time = "2025-08-27T12:13:03.127Z" }, + { url = "https://files.pythonhosted.org/packages/7b/0e/ae1c8943d11a814d01b482e1f8da903f88047a962dff9bbdadf3bd6e6fd1/rpds_py-0.27.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:689fb5200a749db0415b092972e8eba85847c23885c8543a8b0f5c009b1a5948", size = 554983, upload-time = "2025-08-27T12:13:04.516Z" }, + { url = "https://files.pythonhosted.org/packages/b2/d5/0b2a55415931db4f112bdab072443ff76131b5ac4f4dc98d10d2d357eb03/rpds_py-0.27.1-cp311-cp311-win32.whl", hash = "sha256:3182af66048c00a075010bc7f4860f33913528a4b6fc09094a6e7598e462fe39", size = 217154, upload-time = "2025-08-27T12:13:06.278Z" }, + { url = "https://files.pythonhosted.org/packages/24/75/3b7ffe0d50dc86a6a964af0d1cc3a4a2cdf437cb7b099a4747bbb96d1819/rpds_py-0.27.1-cp311-cp311-win_amd64.whl", hash = "sha256:b4938466c6b257b2f5c4ff98acd8128ec36b5059e5c8f8372d79316b1c36bb15", size = 228627, upload-time = "2025-08-27T12:13:07.625Z" }, + { url = "https://files.pythonhosted.org/packages/8d/3f/4fd04c32abc02c710f09a72a30c9a55ea3cc154ef8099078fd50a0596f8e/rpds_py-0.27.1-cp311-cp311-win_arm64.whl", hash = "sha256:2f57af9b4d0793e53266ee4325535a31ba48e2f875da81a9177c9926dfa60746", size = 220998, upload-time = "2025-08-27T12:13:08.972Z" }, + { url = "https://files.pythonhosted.org/packages/bd/fe/38de28dee5df58b8198c743fe2bea0c785c6d40941b9950bac4cdb71a014/rpds_py-0.27.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:ae2775c1973e3c30316892737b91f9283f9908e3cc7625b9331271eaaed7dc90", size = 361887, upload-time = "2025-08-27T12:13:10.233Z" }, + { url = "https://files.pythonhosted.org/packages/7c/9a/4b6c7eedc7dd90986bf0fab6ea2a091ec11c01b15f8ba0a14d3f80450468/rpds_py-0.27.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2643400120f55c8a96f7c9d858f7be0c88d383cd4653ae2cf0d0c88f668073e5", size = 345795, upload-time = "2025-08-27T12:13:11.65Z" }, + { url = "https://files.pythonhosted.org/packages/6f/0e/e650e1b81922847a09cca820237b0edee69416a01268b7754d506ade11ad/rpds_py-0.27.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16323f674c089b0360674a4abd28d5042947d54ba620f72514d69be4ff64845e", size = 385121, upload-time = "2025-08-27T12:13:13.008Z" }, + { url = "https://files.pythonhosted.org/packages/1b/ea/b306067a712988e2bff00dcc7c8f31d26c29b6d5931b461aa4b60a013e33/rpds_py-0.27.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9a1f4814b65eacac94a00fc9a526e3fdafd78e439469644032032d0d63de4881", size = 398976, upload-time = "2025-08-27T12:13:14.368Z" }, + { url = "https://files.pythonhosted.org/packages/2c/0a/26dc43c8840cb8fe239fe12dbc8d8de40f2365e838f3d395835dde72f0e5/rpds_py-0.27.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ba32c16b064267b22f1850a34051121d423b6f7338a12b9459550eb2096e7ec", size = 525953, upload-time = "2025-08-27T12:13:15.774Z" }, + { url = "https://files.pythonhosted.org/packages/22/14/c85e8127b573aaf3a0cbd7fbb8c9c99e735a4a02180c84da2a463b766e9e/rpds_py-0.27.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e5c20f33fd10485b80f65e800bbe5f6785af510b9f4056c5a3c612ebc83ba6cb", size = 407915, upload-time = "2025-08-27T12:13:17.379Z" }, + { url = "https://files.pythonhosted.org/packages/ed/7b/8f4fee9ba1fb5ec856eb22d725a4efa3deb47f769597c809e03578b0f9d9/rpds_py-0.27.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:466bfe65bd932da36ff279ddd92de56b042f2266d752719beb97b08526268ec5", size = 386883, upload-time = "2025-08-27T12:13:18.704Z" }, + { url = "https://files.pythonhosted.org/packages/86/47/28fa6d60f8b74fcdceba81b272f8d9836ac0340570f68f5df6b41838547b/rpds_py-0.27.1-cp312-cp312-manylinux_2_31_riscv64.whl", hash = "sha256:41e532bbdcb57c92ba3be62c42e9f096431b4cf478da9bc3bc6ce5c38ab7ba7a", size = 405699, upload-time = "2025-08-27T12:13:20.089Z" }, + { url = "https://files.pythonhosted.org/packages/d0/fd/c5987b5e054548df56953a21fe2ebed51fc1ec7c8f24fd41c067b68c4a0a/rpds_py-0.27.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f149826d742b406579466283769a8ea448eed82a789af0ed17b0cd5770433444", size = 423713, upload-time = "2025-08-27T12:13:21.436Z" }, + { url = "https://files.pythonhosted.org/packages/ac/ba/3c4978b54a73ed19a7d74531be37a8bcc542d917c770e14d372b8daea186/rpds_py-0.27.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:80c60cfb5310677bd67cb1e85a1e8eb52e12529545441b43e6f14d90b878775a", size = 562324, upload-time = "2025-08-27T12:13:22.789Z" }, + { url = "https://files.pythonhosted.org/packages/b5/6c/6943a91768fec16db09a42b08644b960cff540c66aab89b74be6d4a144ba/rpds_py-0.27.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:7ee6521b9baf06085f62ba9c7a3e5becffbc32480d2f1b351559c001c38ce4c1", size = 593646, upload-time = "2025-08-27T12:13:24.122Z" }, + { url = "https://files.pythonhosted.org/packages/11/73/9d7a8f4be5f4396f011a6bb7a19fe26303a0dac9064462f5651ced2f572f/rpds_py-0.27.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a512c8263249a9d68cac08b05dd59d2b3f2061d99b322813cbcc14c3c7421998", size = 558137, upload-time = "2025-08-27T12:13:25.557Z" }, + { url = "https://files.pythonhosted.org/packages/6e/96/6772cbfa0e2485bcceef8071de7821f81aeac8bb45fbfd5542a3e8108165/rpds_py-0.27.1-cp312-cp312-win32.whl", hash = "sha256:819064fa048ba01b6dadc5116f3ac48610435ac9a0058bbde98e569f9e785c39", size = 221343, upload-time = "2025-08-27T12:13:26.967Z" }, + { url = "https://files.pythonhosted.org/packages/67/b6/c82f0faa9af1c6a64669f73a17ee0eeef25aff30bb9a1c318509efe45d84/rpds_py-0.27.1-cp312-cp312-win_amd64.whl", hash = "sha256:d9199717881f13c32c4046a15f024971a3b78ad4ea029e8da6b86e5aa9cf4594", size = 232497, upload-time = "2025-08-27T12:13:28.326Z" }, + { url = "https://files.pythonhosted.org/packages/e1/96/2817b44bd2ed11aebacc9251da03689d56109b9aba5e311297b6902136e2/rpds_py-0.27.1-cp312-cp312-win_arm64.whl", hash = "sha256:33aa65b97826a0e885ef6e278fbd934e98cdcfed80b63946025f01e2f5b29502", size = 222790, upload-time = "2025-08-27T12:13:29.71Z" }, + { url = "https://files.pythonhosted.org/packages/0c/ed/e1fba02de17f4f76318b834425257c8ea297e415e12c68b4361f63e8ae92/rpds_py-0.27.1-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cdfe4bb2f9fe7458b7453ad3c33e726d6d1c7c0a72960bcc23800d77384e42df", size = 371402, upload-time = "2025-08-27T12:15:51.561Z" }, + { url = "https://files.pythonhosted.org/packages/af/7c/e16b959b316048b55585a697e94add55a4ae0d984434d279ea83442e460d/rpds_py-0.27.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:8fabb8fd848a5f75a2324e4a84501ee3a5e3c78d8603f83475441866e60b94a3", size = 354084, upload-time = "2025-08-27T12:15:53.219Z" }, + { url = "https://files.pythonhosted.org/packages/de/c1/ade645f55de76799fdd08682d51ae6724cb46f318573f18be49b1e040428/rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eda8719d598f2f7f3e0f885cba8646644b55a187762bec091fa14a2b819746a9", size = 383090, upload-time = "2025-08-27T12:15:55.158Z" }, + { url = "https://files.pythonhosted.org/packages/1f/27/89070ca9b856e52960da1472efcb6c20ba27cfe902f4f23ed095b9cfc61d/rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3c64d07e95606ec402a0a1c511fe003873fa6af630bda59bac77fac8b4318ebc", size = 394519, upload-time = "2025-08-27T12:15:57.238Z" }, + { url = "https://files.pythonhosted.org/packages/b3/28/be120586874ef906aa5aeeae95ae8df4184bc757e5b6bd1c729ccff45ed5/rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:93a2ed40de81bcff59aabebb626562d48332f3d028ca2036f1d23cbb52750be4", size = 523817, upload-time = "2025-08-27T12:15:59.237Z" }, + { url = "https://files.pythonhosted.org/packages/a8/ef/70cc197bc11cfcde02a86f36ac1eed15c56667c2ebddbdb76a47e90306da/rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:387ce8c44ae94e0ec50532d9cb0edce17311024c9794eb196b90e1058aadeb66", size = 403240, upload-time = "2025-08-27T12:16:00.923Z" }, + { url = "https://files.pythonhosted.org/packages/cf/35/46936cca449f7f518f2f4996e0e8344db4b57e2081e752441154089d2a5f/rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aaf94f812c95b5e60ebaf8bfb1898a7d7cb9c1af5744d4a67fa47796e0465d4e", size = 385194, upload-time = "2025-08-27T12:16:02.802Z" }, + { url = "https://files.pythonhosted.org/packages/e1/62/29c0d3e5125c3270b51415af7cbff1ec587379c84f55a5761cc9efa8cd06/rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_31_riscv64.whl", hash = "sha256:4848ca84d6ded9b58e474dfdbad4b8bfb450344c0551ddc8d958bf4b36aa837c", size = 402086, upload-time = "2025-08-27T12:16:04.806Z" }, + { url = "https://files.pythonhosted.org/packages/8f/66/03e1087679227785474466fdd04157fb793b3b76e3fcf01cbf4c693c1949/rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2bde09cbcf2248b73c7c323be49b280180ff39fadcfe04e7b6f54a678d02a7cf", size = 419272, upload-time = "2025-08-27T12:16:06.471Z" }, + { url = "https://files.pythonhosted.org/packages/6a/24/e3e72d265121e00b063aef3e3501e5b2473cf1b23511d56e529531acf01e/rpds_py-0.27.1-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:94c44ee01fd21c9058f124d2d4f0c9dc7634bec93cd4b38eefc385dabe71acbf", size = 560003, upload-time = "2025-08-27T12:16:08.06Z" }, + { url = "https://files.pythonhosted.org/packages/26/ca/f5a344c534214cc2d41118c0699fffbdc2c1bc7046f2a2b9609765ab9c92/rpds_py-0.27.1-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:df8b74962e35c9249425d90144e721eed198e6555a0e22a563d29fe4486b51f6", size = 590482, upload-time = "2025-08-27T12:16:10.137Z" }, + { url = "https://files.pythonhosted.org/packages/ce/08/4349bdd5c64d9d193c360aa9db89adeee6f6682ab8825dca0a3f535f434f/rpds_py-0.27.1-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:dc23e6820e3b40847e2f4a7726462ba0cf53089512abe9ee16318c366494c17a", size = 556523, upload-time = "2025-08-27T12:16:12.188Z" }, ] [[package]] name = "ruff" version = "0.14.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/41/b9/9bd84453ed6dd04688de9b3f3a4146a1698e8faae2ceeccce4e14c67ae17/ruff-0.14.0.tar.gz", hash = "sha256:62ec8969b7510f77945df916de15da55311fade8d6050995ff7f680afe582c57", size = 5452071 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/3a/4e/79d463a5f80654e93fa653ebfb98e0becc3f0e7cf6219c9ddedf1e197072/ruff-0.14.0-py3-none-linux_armv6l.whl", hash = "sha256:58e15bffa7054299becf4bab8a1187062c6f8cafbe9f6e39e0d5aface455d6b3", size = 12494532 }, - { url = "https://files.pythonhosted.org/packages/ee/40/e2392f445ed8e02aa6105d49db4bfff01957379064c30f4811c3bf38aece/ruff-0.14.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:838d1b065f4df676b7c9957992f2304e41ead7a50a568185efd404297d5701e8", size = 13160768 }, - { url = "https://files.pythonhosted.org/packages/75/da/2a656ea7c6b9bd14c7209918268dd40e1e6cea65f4bb9880eaaa43b055cd/ruff-0.14.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:703799d059ba50f745605b04638fa7e9682cc3da084b2092feee63500ff3d9b8", size = 12363376 }, - { url = "https://files.pythonhosted.org/packages/42/e2/1ffef5a1875add82416ff388fcb7ea8b22a53be67a638487937aea81af27/ruff-0.14.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ba9a8925e90f861502f7d974cc60e18ca29c72bb0ee8bfeabb6ade35a3abde7", size = 12608055 }, - { url = "https://files.pythonhosted.org/packages/4a/32/986725199d7cee510d9f1dfdf95bf1efc5fa9dd714d0d85c1fb1f6be3bc3/ruff-0.14.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e41f785498bd200ffc276eb9e1570c019c1d907b07cfb081092c8ad51975bbe7", size = 12318544 }, - { url = "https://files.pythonhosted.org/packages/9a/ed/4969cefd53315164c94eaf4da7cfba1f267dc275b0abdd593d11c90829a3/ruff-0.14.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30a58c087aef4584c193aebf2700f0fbcfc1e77b89c7385e3139956fa90434e2", size = 14001280 }, - { url = "https://files.pythonhosted.org/packages/ab/ad/96c1fc9f8854c37681c9613d825925c7f24ca1acfc62a4eb3896b50bacd2/ruff-0.14.0-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:f8d07350bc7af0a5ce8812b7d5c1a7293cf02476752f23fdfc500d24b79b783c", size = 15027286 }, - { url = "https://files.pythonhosted.org/packages/b3/00/1426978f97df4fe331074baf69615f579dc4e7c37bb4c6f57c2aad80c87f/ruff-0.14.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eec3bbbf3a7d5482b5c1f42d5fc972774d71d107d447919fca620b0be3e3b75e", size = 14451506 }, - { url = "https://files.pythonhosted.org/packages/58/d5/9c1cea6e493c0cf0647674cca26b579ea9d2a213b74b5c195fbeb9678e15/ruff-0.14.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16b68e183a0e28e5c176d51004aaa40559e8f90065a10a559176713fcf435206", size = 13437384 }, - { url = "https://files.pythonhosted.org/packages/29/b4/4cd6a4331e999fc05d9d77729c95503f99eae3ba1160469f2b64866964e3/ruff-0.14.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb732d17db2e945cfcbbc52af0143eda1da36ca8ae25083dd4f66f1542fdf82e", size = 13447976 }, - { url = "https://files.pythonhosted.org/packages/3b/c0/ac42f546d07e4f49f62332576cb845d45c67cf5610d1851254e341d563b6/ruff-0.14.0-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:c958f66ab884b7873e72df38dcabee03d556a8f2ee1b8538ee1c2bbd619883dd", size = 13682850 }, - { url = "https://files.pythonhosted.org/packages/5f/c4/4b0c9bcadd45b4c29fe1af9c5d1dc0ca87b4021665dfbe1c4688d407aa20/ruff-0.14.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:7eb0499a2e01f6e0c285afc5bac43ab380cbfc17cd43a2e1dd10ec97d6f2c42d", size = 12449825 }, - { url = "https://files.pythonhosted.org/packages/4b/a8/e2e76288e6c16540fa820d148d83e55f15e994d852485f221b9524514730/ruff-0.14.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:4c63b2d99fafa05efca0ab198fd48fa6030d57e4423df3f18e03aa62518c565f", size = 12272599 }, - { url = "https://files.pythonhosted.org/packages/18/14/e2815d8eff847391af632b22422b8207704222ff575dec8d044f9ab779b2/ruff-0.14.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:668fce701b7a222f3f5327f86909db2bbe99c30877c8001ff934c5413812ac02", size = 13193828 }, - { url = "https://files.pythonhosted.org/packages/44/c6/61ccc2987cf0aecc588ff8f3212dea64840770e60d78f5606cd7dc34de32/ruff-0.14.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:a86bf575e05cb68dcb34e4c7dfe1064d44d3f0c04bbc0491949092192b515296", size = 13628617 }, - { url = "https://files.pythonhosted.org/packages/73/e6/03b882225a1b0627e75339b420883dc3c90707a8917d2284abef7a58d317/ruff-0.14.0-py3-none-win32.whl", hash = "sha256:7450a243d7125d1c032cb4b93d9625dea46c8c42b4f06c6b709baac168e10543", size = 12367872 }, - { url = "https://files.pythonhosted.org/packages/41/77/56cf9cf01ea0bfcc662de72540812e5ba8e9563f33ef3d37ab2174892c47/ruff-0.14.0-py3-none-win_amd64.whl", hash = "sha256:ea95da28cd874c4d9c922b39381cbd69cb7e7b49c21b8152b014bd4f52acddc2", size = 13464628 }, - { url = "https://files.pythonhosted.org/packages/c6/2a/65880dfd0e13f7f13a775998f34703674a4554906167dce02daf7865b954/ruff-0.14.0-py3-none-win_arm64.whl", hash = "sha256:f42c9495f5c13ff841b1da4cb3c2a42075409592825dada7c5885c2c844ac730", size = 12565142 }, +sdist = { url = "https://files.pythonhosted.org/packages/41/b9/9bd84453ed6dd04688de9b3f3a4146a1698e8faae2ceeccce4e14c67ae17/ruff-0.14.0.tar.gz", hash = "sha256:62ec8969b7510f77945df916de15da55311fade8d6050995ff7f680afe582c57", size = 5452071, upload-time = "2025-10-07T18:21:55.763Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3a/4e/79d463a5f80654e93fa653ebfb98e0becc3f0e7cf6219c9ddedf1e197072/ruff-0.14.0-py3-none-linux_armv6l.whl", hash = "sha256:58e15bffa7054299becf4bab8a1187062c6f8cafbe9f6e39e0d5aface455d6b3", size = 12494532, upload-time = "2025-10-07T18:21:00.373Z" }, + { url = "https://files.pythonhosted.org/packages/ee/40/e2392f445ed8e02aa6105d49db4bfff01957379064c30f4811c3bf38aece/ruff-0.14.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:838d1b065f4df676b7c9957992f2304e41ead7a50a568185efd404297d5701e8", size = 13160768, upload-time = "2025-10-07T18:21:04.73Z" }, + { url = "https://files.pythonhosted.org/packages/75/da/2a656ea7c6b9bd14c7209918268dd40e1e6cea65f4bb9880eaaa43b055cd/ruff-0.14.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:703799d059ba50f745605b04638fa7e9682cc3da084b2092feee63500ff3d9b8", size = 12363376, upload-time = "2025-10-07T18:21:07.833Z" }, + { url = "https://files.pythonhosted.org/packages/42/e2/1ffef5a1875add82416ff388fcb7ea8b22a53be67a638487937aea81af27/ruff-0.14.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ba9a8925e90f861502f7d974cc60e18ca29c72bb0ee8bfeabb6ade35a3abde7", size = 12608055, upload-time = "2025-10-07T18:21:10.72Z" }, + { url = "https://files.pythonhosted.org/packages/4a/32/986725199d7cee510d9f1dfdf95bf1efc5fa9dd714d0d85c1fb1f6be3bc3/ruff-0.14.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e41f785498bd200ffc276eb9e1570c019c1d907b07cfb081092c8ad51975bbe7", size = 12318544, upload-time = "2025-10-07T18:21:13.741Z" }, + { url = "https://files.pythonhosted.org/packages/9a/ed/4969cefd53315164c94eaf4da7cfba1f267dc275b0abdd593d11c90829a3/ruff-0.14.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30a58c087aef4584c193aebf2700f0fbcfc1e77b89c7385e3139956fa90434e2", size = 14001280, upload-time = "2025-10-07T18:21:16.411Z" }, + { url = "https://files.pythonhosted.org/packages/ab/ad/96c1fc9f8854c37681c9613d825925c7f24ca1acfc62a4eb3896b50bacd2/ruff-0.14.0-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:f8d07350bc7af0a5ce8812b7d5c1a7293cf02476752f23fdfc500d24b79b783c", size = 15027286, upload-time = "2025-10-07T18:21:19.577Z" }, + { url = "https://files.pythonhosted.org/packages/b3/00/1426978f97df4fe331074baf69615f579dc4e7c37bb4c6f57c2aad80c87f/ruff-0.14.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eec3bbbf3a7d5482b5c1f42d5fc972774d71d107d447919fca620b0be3e3b75e", size = 14451506, upload-time = "2025-10-07T18:21:22.779Z" }, + { url = "https://files.pythonhosted.org/packages/58/d5/9c1cea6e493c0cf0647674cca26b579ea9d2a213b74b5c195fbeb9678e15/ruff-0.14.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16b68e183a0e28e5c176d51004aaa40559e8f90065a10a559176713fcf435206", size = 13437384, upload-time = "2025-10-07T18:21:25.758Z" }, + { url = "https://files.pythonhosted.org/packages/29/b4/4cd6a4331e999fc05d9d77729c95503f99eae3ba1160469f2b64866964e3/ruff-0.14.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb732d17db2e945cfcbbc52af0143eda1da36ca8ae25083dd4f66f1542fdf82e", size = 13447976, upload-time = "2025-10-07T18:21:28.83Z" }, + { url = "https://files.pythonhosted.org/packages/3b/c0/ac42f546d07e4f49f62332576cb845d45c67cf5610d1851254e341d563b6/ruff-0.14.0-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:c958f66ab884b7873e72df38dcabee03d556a8f2ee1b8538ee1c2bbd619883dd", size = 13682850, upload-time = "2025-10-07T18:21:31.842Z" }, + { url = "https://files.pythonhosted.org/packages/5f/c4/4b0c9bcadd45b4c29fe1af9c5d1dc0ca87b4021665dfbe1c4688d407aa20/ruff-0.14.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:7eb0499a2e01f6e0c285afc5bac43ab380cbfc17cd43a2e1dd10ec97d6f2c42d", size = 12449825, upload-time = "2025-10-07T18:21:35.074Z" }, + { url = "https://files.pythonhosted.org/packages/4b/a8/e2e76288e6c16540fa820d148d83e55f15e994d852485f221b9524514730/ruff-0.14.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:4c63b2d99fafa05efca0ab198fd48fa6030d57e4423df3f18e03aa62518c565f", size = 12272599, upload-time = "2025-10-07T18:21:38.08Z" }, + { url = "https://files.pythonhosted.org/packages/18/14/e2815d8eff847391af632b22422b8207704222ff575dec8d044f9ab779b2/ruff-0.14.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:668fce701b7a222f3f5327f86909db2bbe99c30877c8001ff934c5413812ac02", size = 13193828, upload-time = "2025-10-07T18:21:41.216Z" }, + { url = "https://files.pythonhosted.org/packages/44/c6/61ccc2987cf0aecc588ff8f3212dea64840770e60d78f5606cd7dc34de32/ruff-0.14.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:a86bf575e05cb68dcb34e4c7dfe1064d44d3f0c04bbc0491949092192b515296", size = 13628617, upload-time = "2025-10-07T18:21:44.04Z" }, + { url = "https://files.pythonhosted.org/packages/73/e6/03b882225a1b0627e75339b420883dc3c90707a8917d2284abef7a58d317/ruff-0.14.0-py3-none-win32.whl", hash = "sha256:7450a243d7125d1c032cb4b93d9625dea46c8c42b4f06c6b709baac168e10543", size = 12367872, upload-time = "2025-10-07T18:21:46.67Z" }, + { url = "https://files.pythonhosted.org/packages/41/77/56cf9cf01ea0bfcc662de72540812e5ba8e9563f33ef3d37ab2174892c47/ruff-0.14.0-py3-none-win_amd64.whl", hash = "sha256:ea95da28cd874c4d9c922b39381cbd69cb7e7b49c21b8152b014bd4f52acddc2", size = 13464628, upload-time = "2025-10-07T18:21:50.318Z" }, + { url = "https://files.pythonhosted.org/packages/c6/2a/65880dfd0e13f7f13a775998f34703674a4554906167dce02daf7865b954/ruff-0.14.0-py3-none-win_arm64.whl", hash = "sha256:f42c9495f5c13ff841b1da4cb3c2a42075409592825dada7c5885c2c844ac730", size = 12565142, upload-time = "2025-10-07T18:21:53.577Z" }, ] [[package]] @@ -3615,45 +3226,45 @@ dependencies = [ { name = "jinja2" }, { name = "packaging" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/2e/fd/a1cdcd6e9e02e03b2fa1bf5cc5eccbe5a180ef02d14393beffb0ee7e7318/semversioner-2.0.8.tar.gz", hash = "sha256:2da3946441a0279d71e34be0122e38d339c6df3399e91487dfff4826d151ee05", size = 15850 } +sdist = { url = "https://files.pythonhosted.org/packages/2e/fd/a1cdcd6e9e02e03b2fa1bf5cc5eccbe5a180ef02d14393beffb0ee7e7318/semversioner-2.0.8.tar.gz", hash = "sha256:2da3946441a0279d71e34be0122e38d339c6df3399e91487dfff4826d151ee05", size = 15850, upload-time = "2025-07-23T12:36:08.468Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/00/1b/29615eec4934f21e6146d117a9459a0c5aa7c5f60b80039f2bddc3bfae1e/semversioner-2.0.8-py2.py3-none-any.whl", hash = "sha256:4d99c8f08e2fabb830470d1e04503b26048adc65bb3921fa408078a331dc85da", size = 13654 }, + { url = "https://files.pythonhosted.org/packages/00/1b/29615eec4934f21e6146d117a9459a0c5aa7c5f60b80039f2bddc3bfae1e/semversioner-2.0.8-py2.py3-none-any.whl", hash = "sha256:4d99c8f08e2fabb830470d1e04503b26048adc65bb3921fa408078a331dc85da", size = 13654, upload-time = "2025-07-23T12:36:07.655Z" }, ] [[package]] name = "send2trash" version = "1.8.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/fd/3a/aec9b02217bb79b87bbc1a21bc6abc51e3d5dcf65c30487ac96c0908c722/Send2Trash-1.8.3.tar.gz", hash = "sha256:b18e7a3966d99871aefeb00cfbcfdced55ce4871194810fc71f4aa484b953abf", size = 17394 } +sdist = { url = "https://files.pythonhosted.org/packages/fd/3a/aec9b02217bb79b87bbc1a21bc6abc51e3d5dcf65c30487ac96c0908c722/Send2Trash-1.8.3.tar.gz", hash = "sha256:b18e7a3966d99871aefeb00cfbcfdced55ce4871194810fc71f4aa484b953abf", size = 17394, upload-time = "2024-04-07T00:01:09.267Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/40/b0/4562db6223154aa4e22f939003cb92514c79f3d4dccca3444253fd17f902/Send2Trash-1.8.3-py3-none-any.whl", hash = "sha256:0c31227e0bd08961c7665474a3d1ef7193929fedda4233843689baa056be46c9", size = 18072 }, + { url = "https://files.pythonhosted.org/packages/40/b0/4562db6223154aa4e22f939003cb92514c79f3d4dccca3444253fd17f902/Send2Trash-1.8.3-py3-none-any.whl", hash = "sha256:0c31227e0bd08961c7665474a3d1ef7193929fedda4233843689baa056be46c9", size = 18072, upload-time = "2024-04-07T00:01:07.438Z" }, ] [[package]] name = "setuptools" version = "80.9.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/18/5d/3bf57dcd21979b887f014ea83c24ae194cfcd12b9e0fda66b957c69d1fca/setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c", size = 1319958 } +sdist = { url = "https://files.pythonhosted.org/packages/18/5d/3bf57dcd21979b887f014ea83c24ae194cfcd12b9e0fda66b957c69d1fca/setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c", size = 1319958, upload-time = "2025-05-27T00:56:51.443Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a3/dc/17031897dae0efacfea57dfd3a82fdd2a2aeb58e0ff71b77b87e44edc772/setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922", size = 1201486 }, + { url = "https://files.pythonhosted.org/packages/a3/dc/17031897dae0efacfea57dfd3a82fdd2a2aeb58e0ff71b77b87e44edc772/setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922", size = 1201486, upload-time = "2025-05-27T00:56:49.664Z" }, ] [[package]] name = "shellingham" version = "1.5.4" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/58/15/8b3609fd3830ef7b27b655beb4b4e9c62313a4e8da8c676e142cc210d58e/shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de", size = 10310 } +sdist = { url = "https://files.pythonhosted.org/packages/58/15/8b3609fd3830ef7b27b655beb4b4e9c62313a4e8da8c676e142cc210d58e/shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de", size = 10310, upload-time = "2023-10-24T04:13:40.426Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686", size = 9755 }, + { url = "https://files.pythonhosted.org/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686", size = 9755, upload-time = "2023-10-24T04:13:38.866Z" }, ] [[package]] name = "six" version = "1.17.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031 } +sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload-time = "2024-12-04T17:35:28.174Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050 }, + { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" }, ] [[package]] @@ -3663,27 +3274,27 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "wrapt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/16/be/bf2d60280a9d7fac98ece2150a22538fa4332cda67d04d9618c8406f791e/smart_open-7.3.1.tar.gz", hash = "sha256:b33fee8dffd206f189d5e704106a8723afb4210d2ff47e0e1f7fbe436187a990", size = 51405 } +sdist = { url = "https://files.pythonhosted.org/packages/16/be/bf2d60280a9d7fac98ece2150a22538fa4332cda67d04d9618c8406f791e/smart_open-7.3.1.tar.gz", hash = "sha256:b33fee8dffd206f189d5e704106a8723afb4210d2ff47e0e1f7fbe436187a990", size = 51405, upload-time = "2025-09-08T10:03:53.726Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e5/d9/460cf1d58945dd771c228c29d5664f431dfc4060d3d092fed40546b11472/smart_open-7.3.1-py3-none-any.whl", hash = "sha256:e243b2e7f69d6c0c96dd763d6fbbedbb4e0e4fc6d74aa007acc5b018d523858c", size = 61722 }, + { url = "https://files.pythonhosted.org/packages/e5/d9/460cf1d58945dd771c228c29d5664f431dfc4060d3d092fed40546b11472/smart_open-7.3.1-py3-none-any.whl", hash = "sha256:e243b2e7f69d6c0c96dd763d6fbbedbb4e0e4fc6d74aa007acc5b018d523858c", size = 61722, upload-time = "2025-09-08T10:03:52.02Z" }, ] [[package]] name = "sniffio" version = "1.3.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372 } +sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235 }, + { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" }, ] [[package]] name = "soupsieve" version = "2.8" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6d/e6/21ccce3262dd4889aa3332e5a119a3491a95e8f60939870a3a035aabac0d/soupsieve-2.8.tar.gz", hash = "sha256:e2dd4a40a628cb5f28f6d4b0db8800b8f581b65bb380b97de22ba5ca8d72572f", size = 103472 } +sdist = { url = "https://files.pythonhosted.org/packages/6d/e6/21ccce3262dd4889aa3332e5a119a3491a95e8f60939870a3a035aabac0d/soupsieve-2.8.tar.gz", hash = "sha256:e2dd4a40a628cb5f28f6d4b0db8800b8f581b65bb380b97de22ba5ca8d72572f", size = 103472, upload-time = "2025-08-27T15:39:51.78Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/14/a0/bb38d3b76b8cae341dad93a2dd83ab7462e6dbcdd84d43f54ee60a8dc167/soupsieve-2.8-py3-none-any.whl", hash = "sha256:0cc76456a30e20f5d7f2e14a98a4ae2ee4e5abdc7c5ea0aafe795f344bc7984c", size = 36679 }, + { url = "https://files.pythonhosted.org/packages/14/a0/bb38d3b76b8cae341dad93a2dd83ab7462e6dbcdd84d43f54ee60a8dc167/soupsieve-2.8-py3-none-any.whl", hash = "sha256:0cc76456a30e20f5d7f2e14a98a4ae2ee4e5abdc7c5ea0aafe795f344bc7984c", size = 36679, upload-time = "2025-08-27T15:39:50.179Z" }, ] [[package]] @@ -3711,47 +3322,40 @@ dependencies = [ { name = "wasabi" }, { name = "weasel" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/1e/9e/fb4e1cefe3fbd51ea6a243e5a3d2bc629baa9a28930bf4be6fe5672fa1ca/spacy-3.8.7.tar.gz", hash = "sha256:700fd174c6c552276be142c48e70bb53cae24c4dd86003c4432af9cb93e4c908", size = 1316143 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/29/c5/5fbb3a4e694d4855a5bab87af9664377c48b89691f180ad3cde4faeaf35c/spacy-3.8.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bdff8b9b556468a6dd527af17f0ddf9fb0b0bee92ee7703339ddf542361cff98", size = 6746140 }, - { url = "https://files.pythonhosted.org/packages/03/2a/43afac516eb82409ca47d7206f982beaf265d2ba06a72ca07cf06b290c20/spacy-3.8.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9194b7cf015ed9b4450ffb162da49c8a9305e76b468de036b0948abdfc748a37", size = 6392440 }, - { url = "https://files.pythonhosted.org/packages/6f/83/2ea68c18e2b1b9a6f6b30ef63eb9d07e979626b9595acfdb5394f18923c4/spacy-3.8.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7dc38b78d48b9c2a80a3eea95f776304993f63fc307f07cdd104441442f92f1e", size = 32699126 }, - { url = "https://files.pythonhosted.org/packages/0a/0a/bb90e9aa0b3c527876627567d82517aabab08006ccf63796c33b0242254d/spacy-3.8.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e43bd70772751b8fc7a14f338d087a3d297195d43d171832923ef66204b23ab", size = 33008865 }, - { url = "https://files.pythonhosted.org/packages/39/dd/8e906ba378457107ab0394976ea9f7b12fdb2cad682ef1a2ccf473d61e5f/spacy-3.8.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c402bf5dcf345fd96d202378c54bc345219681e3531f911d99567d569328c45f", size = 31933169 }, - { url = "https://files.pythonhosted.org/packages/c9/b5/42df07eb837a923fbb42509864d5c7c2072d010de933dccdfb3c655b3a76/spacy-3.8.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4234189861e486d86f1269e50542d87e8a6391a1ee190652479cf1a793db115f", size = 32776322 }, - { url = "https://files.pythonhosted.org/packages/92/e7/8176484801c67dcd814f141991fe0a3c9b5b4a3583ea30c2062e93d1aa6b/spacy-3.8.7-cp311-cp311-win_amd64.whl", hash = "sha256:e9d12e2eb7f36bc11dd9edae011032fe49ea100d63e83177290d3cbd80eaa650", size = 14938936 }, - { url = "https://files.pythonhosted.org/packages/a5/10/89852f40f926e0902c11c34454493ba0d15530b322711e754b89a6d7dfe6/spacy-3.8.7-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:88b397e37793cea51df298e6c651a763e49877a25bead5ba349761531a456687", size = 6265335 }, - { url = "https://files.pythonhosted.org/packages/16/fb/b5d54522969a632c06f4af354763467553b66d5bf0671ac39f3cceb3fd54/spacy-3.8.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f70b676955fa6959347ca86ed6edd8ff0d6eb2ba20561fdfec76924bd3e540f9", size = 5906035 }, - { url = "https://files.pythonhosted.org/packages/3a/03/70f06753fd65081404ade30408535eb69f627a36ffce2107116d1aa16239/spacy-3.8.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c4b5a624797ade30c25b5b69daa35a93ee24bcc56bd79b0884b2565f76f35d6", size = 33420084 }, - { url = "https://files.pythonhosted.org/packages/f9/19/b60e1ebf4985ee2b33d85705b89a5024942b65dad04dbdc3fb46f168b410/spacy-3.8.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d9d83e006df66decccefa3872fa958b3756228fb216d83783595444cf42ca10c", size = 33922188 }, - { url = "https://files.pythonhosted.org/packages/8f/a3/1fb1a49dc6d982d96fffc30c3a31bb431526008eea72ac3773f6518720a6/spacy-3.8.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0dca25deba54f3eb5dcfbf63bf16e613e6c601da56f91c4a902d38533c098941", size = 31939285 }, - { url = "https://files.pythonhosted.org/packages/2d/55/6cf1aff8e5c01ee683e828f3ccd9282d2aff7ca1143a9349ee3d0c1291ff/spacy-3.8.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5eef3f805a1c118d9b709a23e2d378f5f20da5a0d6258c9cfdc87c4cb234b4fc", size = 32988845 }, - { url = "https://files.pythonhosted.org/packages/8c/47/c17ee61b51aa8497d8af0999224b4b62485111a55ec105a06886685b2c68/spacy-3.8.7-cp312-cp312-win_amd64.whl", hash = "sha256:25d7a68e445200c9e9dc0044f8b7278ec0ef01ccc7cb5a95d1de2bd8e3ed6be2", size = 13918682 }, - { url = "https://files.pythonhosted.org/packages/2a/95/7125bea6d432c601478bf922f7a568762c8be425bbde5b66698260ab0358/spacy-3.8.7-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:dda7d57f42ec57c19fbef348095a9c82504e4777bca7b8db4b0d8318ba280fc7", size = 6235950 }, - { url = "https://files.pythonhosted.org/packages/96/c3/d2362846154d4d341136774831605df02d61f49ac637524a15f4f2794874/spacy-3.8.7-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:de0e0bddb810ed05bce44bcb91460eabe52bc56323da398d2ca74288a906da35", size = 5878106 }, - { url = "https://files.pythonhosted.org/packages/50/b6/b2943acfbfc4fc12642dac9feb571e712dd1569ab481db8f3daedee045fe/spacy-3.8.7-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a2e58f92b684465777a7c1a65d5578b1dc36fe55c48d9964fb6d46cc9449768", size = 33085866 }, - { url = "https://files.pythonhosted.org/packages/65/98/c4415cbb217ac0b502dbb3372136015c699dd16a0c47cd6d338cd15f4bed/spacy-3.8.7-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46330da2eb357d6979f40ea8fc16ee5776ee75cd0c70aac2a4ea10c80364b8f3", size = 33398424 }, - { url = "https://files.pythonhosted.org/packages/12/45/12a198858f1f11c21844876e039ba90df59d550527c72996d418c1faf78d/spacy-3.8.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:86b6a6ad23ca5440ef9d29c2b1e3125e28722c927db612ae99e564d49202861c", size = 31530066 }, - { url = "https://files.pythonhosted.org/packages/9c/df/80524f99822eb96c9649200042ec5912357eec100cf0cd678a2e9ef0ecb3/spacy-3.8.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ccfe468cbb370888153df145ce3693af8e54dae551940df49057258081b2112f", size = 32613343 }, - { url = "https://files.pythonhosted.org/packages/02/99/881f6f24c279a5a70b8d69aaf8266fd411a0a58fd1c8848112aaa348f6f6/spacy-3.8.7-cp313-cp313-win_amd64.whl", hash = "sha256:ca81e416ff35209769e8b5dd5d13acc52e4f57dd9d028364bccbbe157c2ae86b", size = 13911250 }, +sdist = { url = "https://files.pythonhosted.org/packages/1e/9e/fb4e1cefe3fbd51ea6a243e5a3d2bc629baa9a28930bf4be6fe5672fa1ca/spacy-3.8.7.tar.gz", hash = "sha256:700fd174c6c552276be142c48e70bb53cae24c4dd86003c4432af9cb93e4c908", size = 1316143, upload-time = "2025-05-23T08:55:39.538Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/29/c5/5fbb3a4e694d4855a5bab87af9664377c48b89691f180ad3cde4faeaf35c/spacy-3.8.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bdff8b9b556468a6dd527af17f0ddf9fb0b0bee92ee7703339ddf542361cff98", size = 6746140, upload-time = "2025-05-23T08:54:23.483Z" }, + { url = "https://files.pythonhosted.org/packages/03/2a/43afac516eb82409ca47d7206f982beaf265d2ba06a72ca07cf06b290c20/spacy-3.8.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9194b7cf015ed9b4450ffb162da49c8a9305e76b468de036b0948abdfc748a37", size = 6392440, upload-time = "2025-05-23T08:54:25.12Z" }, + { url = "https://files.pythonhosted.org/packages/6f/83/2ea68c18e2b1b9a6f6b30ef63eb9d07e979626b9595acfdb5394f18923c4/spacy-3.8.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7dc38b78d48b9c2a80a3eea95f776304993f63fc307f07cdd104441442f92f1e", size = 32699126, upload-time = "2025-05-23T08:54:27.385Z" }, + { url = "https://files.pythonhosted.org/packages/0a/0a/bb90e9aa0b3c527876627567d82517aabab08006ccf63796c33b0242254d/spacy-3.8.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e43bd70772751b8fc7a14f338d087a3d297195d43d171832923ef66204b23ab", size = 33008865, upload-time = "2025-05-23T08:54:30.248Z" }, + { url = "https://files.pythonhosted.org/packages/39/dd/8e906ba378457107ab0394976ea9f7b12fdb2cad682ef1a2ccf473d61e5f/spacy-3.8.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c402bf5dcf345fd96d202378c54bc345219681e3531f911d99567d569328c45f", size = 31933169, upload-time = "2025-05-23T08:54:33.199Z" }, + { url = "https://files.pythonhosted.org/packages/c9/b5/42df07eb837a923fbb42509864d5c7c2072d010de933dccdfb3c655b3a76/spacy-3.8.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4234189861e486d86f1269e50542d87e8a6391a1ee190652479cf1a793db115f", size = 32776322, upload-time = "2025-05-23T08:54:36.891Z" }, + { url = "https://files.pythonhosted.org/packages/92/e7/8176484801c67dcd814f141991fe0a3c9b5b4a3583ea30c2062e93d1aa6b/spacy-3.8.7-cp311-cp311-win_amd64.whl", hash = "sha256:e9d12e2eb7f36bc11dd9edae011032fe49ea100d63e83177290d3cbd80eaa650", size = 14938936, upload-time = "2025-05-23T08:54:40.322Z" }, + { url = "https://files.pythonhosted.org/packages/a5/10/89852f40f926e0902c11c34454493ba0d15530b322711e754b89a6d7dfe6/spacy-3.8.7-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:88b397e37793cea51df298e6c651a763e49877a25bead5ba349761531a456687", size = 6265335, upload-time = "2025-05-23T08:54:42.876Z" }, + { url = "https://files.pythonhosted.org/packages/16/fb/b5d54522969a632c06f4af354763467553b66d5bf0671ac39f3cceb3fd54/spacy-3.8.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f70b676955fa6959347ca86ed6edd8ff0d6eb2ba20561fdfec76924bd3e540f9", size = 5906035, upload-time = "2025-05-23T08:54:44.824Z" }, + { url = "https://files.pythonhosted.org/packages/3a/03/70f06753fd65081404ade30408535eb69f627a36ffce2107116d1aa16239/spacy-3.8.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c4b5a624797ade30c25b5b69daa35a93ee24bcc56bd79b0884b2565f76f35d6", size = 33420084, upload-time = "2025-05-23T08:54:46.889Z" }, + { url = "https://files.pythonhosted.org/packages/f9/19/b60e1ebf4985ee2b33d85705b89a5024942b65dad04dbdc3fb46f168b410/spacy-3.8.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d9d83e006df66decccefa3872fa958b3756228fb216d83783595444cf42ca10c", size = 33922188, upload-time = "2025-05-23T08:54:49.781Z" }, + { url = "https://files.pythonhosted.org/packages/8f/a3/1fb1a49dc6d982d96fffc30c3a31bb431526008eea72ac3773f6518720a6/spacy-3.8.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0dca25deba54f3eb5dcfbf63bf16e613e6c601da56f91c4a902d38533c098941", size = 31939285, upload-time = "2025-05-23T08:54:53.162Z" }, + { url = "https://files.pythonhosted.org/packages/2d/55/6cf1aff8e5c01ee683e828f3ccd9282d2aff7ca1143a9349ee3d0c1291ff/spacy-3.8.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5eef3f805a1c118d9b709a23e2d378f5f20da5a0d6258c9cfdc87c4cb234b4fc", size = 32988845, upload-time = "2025-05-23T08:54:57.776Z" }, + { url = "https://files.pythonhosted.org/packages/8c/47/c17ee61b51aa8497d8af0999224b4b62485111a55ec105a06886685b2c68/spacy-3.8.7-cp312-cp312-win_amd64.whl", hash = "sha256:25d7a68e445200c9e9dc0044f8b7278ec0ef01ccc7cb5a95d1de2bd8e3ed6be2", size = 13918682, upload-time = "2025-05-23T08:55:00.387Z" }, ] [[package]] name = "spacy-legacy" version = "3.0.12" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d9/79/91f9d7cc8db5642acad830dcc4b49ba65a7790152832c4eceb305e46d681/spacy-legacy-3.0.12.tar.gz", hash = "sha256:b37d6e0c9b6e1d7ca1cf5bc7152ab64a4c4671f59c85adaf7a3fcb870357a774", size = 23806 } +sdist = { url = "https://files.pythonhosted.org/packages/d9/79/91f9d7cc8db5642acad830dcc4b49ba65a7790152832c4eceb305e46d681/spacy-legacy-3.0.12.tar.gz", hash = "sha256:b37d6e0c9b6e1d7ca1cf5bc7152ab64a4c4671f59c85adaf7a3fcb870357a774", size = 23806, upload-time = "2023-01-23T09:04:15.104Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c3/55/12e842c70ff8828e34e543a2c7176dac4da006ca6901c9e8b43efab8bc6b/spacy_legacy-3.0.12-py2.py3-none-any.whl", hash = "sha256:476e3bd0d05f8c339ed60f40986c07387c0a71479245d6d0f4298dbd52cda55f", size = 29971 }, + { url = "https://files.pythonhosted.org/packages/c3/55/12e842c70ff8828e34e543a2c7176dac4da006ca6901c9e8b43efab8bc6b/spacy_legacy-3.0.12-py2.py3-none-any.whl", hash = "sha256:476e3bd0d05f8c339ed60f40986c07387c0a71479245d6d0f4298dbd52cda55f", size = 29971, upload-time = "2023-01-23T09:04:13.45Z" }, ] [[package]] name = "spacy-loggers" version = "1.0.5" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/67/3d/926db774c9c98acf66cb4ed7faf6c377746f3e00b84b700d0868b95d0712/spacy-loggers-1.0.5.tar.gz", hash = "sha256:d60b0bdbf915a60e516cc2e653baeff946f0cfc461b452d11a4d5458c6fe5f24", size = 20811 } +sdist = { url = "https://files.pythonhosted.org/packages/67/3d/926db774c9c98acf66cb4ed7faf6c377746f3e00b84b700d0868b95d0712/spacy-loggers-1.0.5.tar.gz", hash = "sha256:d60b0bdbf915a60e516cc2e653baeff946f0cfc461b452d11a4d5458c6fe5f24", size = 20811, upload-time = "2023-09-11T12:26:52.323Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/33/78/d1a1a026ef3af911159398c939b1509d5c36fe524c7b644f34a5146c4e16/spacy_loggers-1.0.5-py3-none-any.whl", hash = "sha256:196284c9c446cc0cdb944005384270d775fdeaf4f494d8e269466cfa497ef645", size = 22343 }, + { url = "https://files.pythonhosted.org/packages/33/78/d1a1a026ef3af911159398c939b1509d5c36fe524c7b644f34a5146c4e16/spacy_loggers-1.0.5-py3-none-any.whl", hash = "sha256:196284c9c446cc0cdb944005384270d775fdeaf4f494d8e269466cfa497ef645", size = 22343, upload-time = "2023-09-11T12:26:50.586Z" }, ] [[package]] @@ -3761,29 +3365,22 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "catalogue" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b7/e8/eb51b1349f50bac0222398af0942613fdc9d1453ae67cbe4bf9936a1a54b/srsly-2.5.1.tar.gz", hash = "sha256:ab1b4bf6cf3e29da23dae0493dd1517fb787075206512351421b89b4fc27c77e", size = 466464 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/df/9c/a248bb49de499fe0990e3cb0fb341c2373d8863ef9a8b5799353cade5731/srsly-2.5.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:58f0736794ce00a71d62a39cbba1d62ea8d5be4751df956e802d147da20ecad7", size = 635917 }, - { url = "https://files.pythonhosted.org/packages/41/47/1bdaad84502df973ecb8ca658117234cf7fb20e1dec60da71dce82de993f/srsly-2.5.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7a8269c40859806d71920396d185f4f38dc985cdb6a28d3a326a701e29a5f629", size = 634374 }, - { url = "https://files.pythonhosted.org/packages/e5/2a/d73c71989fcf2a6d1fa518d75322aff4db01a8763f167f8c5e00aac11097/srsly-2.5.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:889905900401fefc1032e22b73aecbed8b4251aa363f632b2d1f86fc16f1ad8e", size = 1108390 }, - { url = "https://files.pythonhosted.org/packages/35/a3/9eda9997a8bd011caed18fdaa5ce606714eb06d8dab587ed0522b3e92ab1/srsly-2.5.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf454755f22589df49c25dc799d8af7b47dce3d861dded35baf0f0b6ceab4422", size = 1110712 }, - { url = "https://files.pythonhosted.org/packages/8a/ef/4b50bc05d06349f905b27f824cc23b652098efd4be19aead3af4981df647/srsly-2.5.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cc0607c8a59013a51dde5c1b4e465558728e9e0a35dcfa73c7cbefa91a0aad50", size = 1081244 }, - { url = "https://files.pythonhosted.org/packages/90/af/d4a2512d9a5048d2b18efead39d4c4404bddd4972935bbc68211292a736c/srsly-2.5.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d5421ba3ab3c790e8b41939c51a1d0f44326bfc052d7a0508860fb79a47aee7f", size = 1091692 }, - { url = "https://files.pythonhosted.org/packages/bb/da/657a685f63028dcb00ccdc4ac125ed347c8bff6fa0dab6a9eb3dc45f3223/srsly-2.5.1-cp311-cp311-win_amd64.whl", hash = "sha256:b96ea5a9a0d0379a79c46d255464a372fb14c30f59a8bc113e4316d131a530ab", size = 632627 }, - { url = "https://files.pythonhosted.org/packages/fb/f6/bebc20d75bd02121fc0f65ad8c92a5dd2570e870005e940faa55a263e61a/srsly-2.5.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:683b54ed63d7dfee03bc2abc4b4a5f2152f81ec217bbadbac01ef1aaf2a75790", size = 636717 }, - { url = "https://files.pythonhosted.org/packages/b6/e8/9372317a4742c70b87b413335adfcdfb2bee4f88f3faba89fabb9e6abf21/srsly-2.5.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:459d987130e57e83ce9e160899afbeb871d975f811e6958158763dd9a8a20f23", size = 634697 }, - { url = "https://files.pythonhosted.org/packages/d5/00/c6a7b99ab27b051a27bd26fe1a8c1885225bb8980282bf9cb99f70610368/srsly-2.5.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:184e3c98389aab68ff04aab9095bd5f1a8e5a72cc5edcba9d733bac928f5cf9f", size = 1134655 }, - { url = "https://files.pythonhosted.org/packages/c2/e6/861459e8241ec3b78c111081bd5efa414ef85867e17c45b6882954468d6e/srsly-2.5.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00c2a3e4856e63b7efd47591d049aaee8e5a250e098917f50d93ea68853fab78", size = 1143544 }, - { url = "https://files.pythonhosted.org/packages/2d/85/8448fe874dd2042a4eceea5315cfff3af03ac77ff5073812071852c4e7e2/srsly-2.5.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:366b4708933cd8d6025c13c2cea3331f079c7bb5c25ec76fca392b6fc09818a0", size = 1098330 }, - { url = "https://files.pythonhosted.org/packages/ef/7e/04d0e1417da140b2ac4053a3d4fcfc86cd59bf4829f69d370bb899f74d5d/srsly-2.5.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c8a0b03c64eb6e150d772c5149befbadd981cc734ab13184b0561c17c8cef9b1", size = 1110670 }, - { url = "https://files.pythonhosted.org/packages/96/1a/a8cd627eaa81a91feb6ceab50155f4ceff3eef6107916cb87ef796958427/srsly-2.5.1-cp312-cp312-win_amd64.whl", hash = "sha256:7952538f6bba91b9d8bf31a642ac9e8b9ccc0ccbb309feb88518bfb84bb0dc0d", size = 632598 }, - { url = "https://files.pythonhosted.org/packages/42/94/cab36845aad6e2c22ecee1178accaa365657296ff87305b805648fd41118/srsly-2.5.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:84b372f7ef1604b4a5b3cee1571993931f845a5b58652ac01bcb32c52586d2a8", size = 634883 }, - { url = "https://files.pythonhosted.org/packages/67/8b/501f51f4eaee7e1fd7327764799cb0a42f5d0de042a97916d30dbff770fc/srsly-2.5.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6ac3944c112acb3347a39bfdc2ebfc9e2d4bace20fe1c0b764374ac5b83519f2", size = 632842 }, - { url = "https://files.pythonhosted.org/packages/07/be/5b8fce4829661e070a7d3e262d2e533f0e297b11b8993d57240da67d7330/srsly-2.5.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6118f9c4b221cde0a990d06a42c8a4845218d55b425d8550746fe790acf267e9", size = 1118516 }, - { url = "https://files.pythonhosted.org/packages/91/60/a34e97564eac352c0e916c98f44b6f566b7eb6a9fb60bcd60ffa98530762/srsly-2.5.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7481460110d9986781d9e4ac0f5f991f1d6839284a80ad268625f9a23f686950", size = 1127974 }, - { url = "https://files.pythonhosted.org/packages/70/a2/f642334db0cabd187fa86b8773257ee6993c6009338a6831d4804e2c5b3c/srsly-2.5.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6e57b8138082f09e35db60f99757e16652489e9e3692471d8e0c39aa95180688", size = 1086098 }, - { url = "https://files.pythonhosted.org/packages/0d/9b/be48e185c5a010e71b5135e4cdf317ff56b8ac4bc08f394bbf882ac13b05/srsly-2.5.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:bab90b85a63a1fe0bbc74d373c8bb9bb0499ddfa89075e0ebe8d670f12d04691", size = 1100354 }, - { url = "https://files.pythonhosted.org/packages/3a/e2/745aeba88a8513017fbac2fd2f9f07b8a36065e51695f818541eb795ec0c/srsly-2.5.1-cp313-cp313-win_amd64.whl", hash = "sha256:e73712be1634b5e1de6f81c273a7d47fe091ad3c79dc779c03d3416a5c117cee", size = 630634 }, +sdist = { url = "https://files.pythonhosted.org/packages/b7/e8/eb51b1349f50bac0222398af0942613fdc9d1453ae67cbe4bf9936a1a54b/srsly-2.5.1.tar.gz", hash = "sha256:ab1b4bf6cf3e29da23dae0493dd1517fb787075206512351421b89b4fc27c77e", size = 466464, upload-time = "2025-01-17T09:26:26.919Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/df/9c/a248bb49de499fe0990e3cb0fb341c2373d8863ef9a8b5799353cade5731/srsly-2.5.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:58f0736794ce00a71d62a39cbba1d62ea8d5be4751df956e802d147da20ecad7", size = 635917, upload-time = "2025-01-17T09:25:25.109Z" }, + { url = "https://files.pythonhosted.org/packages/41/47/1bdaad84502df973ecb8ca658117234cf7fb20e1dec60da71dce82de993f/srsly-2.5.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7a8269c40859806d71920396d185f4f38dc985cdb6a28d3a326a701e29a5f629", size = 634374, upload-time = "2025-01-17T09:25:26.609Z" }, + { url = "https://files.pythonhosted.org/packages/e5/2a/d73c71989fcf2a6d1fa518d75322aff4db01a8763f167f8c5e00aac11097/srsly-2.5.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:889905900401fefc1032e22b73aecbed8b4251aa363f632b2d1f86fc16f1ad8e", size = 1108390, upload-time = "2025-01-17T09:25:29.32Z" }, + { url = "https://files.pythonhosted.org/packages/35/a3/9eda9997a8bd011caed18fdaa5ce606714eb06d8dab587ed0522b3e92ab1/srsly-2.5.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf454755f22589df49c25dc799d8af7b47dce3d861dded35baf0f0b6ceab4422", size = 1110712, upload-time = "2025-01-17T09:25:31.051Z" }, + { url = "https://files.pythonhosted.org/packages/8a/ef/4b50bc05d06349f905b27f824cc23b652098efd4be19aead3af4981df647/srsly-2.5.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cc0607c8a59013a51dde5c1b4e465558728e9e0a35dcfa73c7cbefa91a0aad50", size = 1081244, upload-time = "2025-01-17T09:25:32.611Z" }, + { url = "https://files.pythonhosted.org/packages/90/af/d4a2512d9a5048d2b18efead39d4c4404bddd4972935bbc68211292a736c/srsly-2.5.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d5421ba3ab3c790e8b41939c51a1d0f44326bfc052d7a0508860fb79a47aee7f", size = 1091692, upload-time = "2025-01-17T09:25:34.15Z" }, + { url = "https://files.pythonhosted.org/packages/bb/da/657a685f63028dcb00ccdc4ac125ed347c8bff6fa0dab6a9eb3dc45f3223/srsly-2.5.1-cp311-cp311-win_amd64.whl", hash = "sha256:b96ea5a9a0d0379a79c46d255464a372fb14c30f59a8bc113e4316d131a530ab", size = 632627, upload-time = "2025-01-17T09:25:37.36Z" }, + { url = "https://files.pythonhosted.org/packages/fb/f6/bebc20d75bd02121fc0f65ad8c92a5dd2570e870005e940faa55a263e61a/srsly-2.5.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:683b54ed63d7dfee03bc2abc4b4a5f2152f81ec217bbadbac01ef1aaf2a75790", size = 636717, upload-time = "2025-01-17T09:25:40.236Z" }, + { url = "https://files.pythonhosted.org/packages/b6/e8/9372317a4742c70b87b413335adfcdfb2bee4f88f3faba89fabb9e6abf21/srsly-2.5.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:459d987130e57e83ce9e160899afbeb871d975f811e6958158763dd9a8a20f23", size = 634697, upload-time = "2025-01-17T09:25:43.605Z" }, + { url = "https://files.pythonhosted.org/packages/d5/00/c6a7b99ab27b051a27bd26fe1a8c1885225bb8980282bf9cb99f70610368/srsly-2.5.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:184e3c98389aab68ff04aab9095bd5f1a8e5a72cc5edcba9d733bac928f5cf9f", size = 1134655, upload-time = "2025-01-17T09:25:45.238Z" }, + { url = "https://files.pythonhosted.org/packages/c2/e6/861459e8241ec3b78c111081bd5efa414ef85867e17c45b6882954468d6e/srsly-2.5.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00c2a3e4856e63b7efd47591d049aaee8e5a250e098917f50d93ea68853fab78", size = 1143544, upload-time = "2025-01-17T09:25:47.485Z" }, + { url = "https://files.pythonhosted.org/packages/2d/85/8448fe874dd2042a4eceea5315cfff3af03ac77ff5073812071852c4e7e2/srsly-2.5.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:366b4708933cd8d6025c13c2cea3331f079c7bb5c25ec76fca392b6fc09818a0", size = 1098330, upload-time = "2025-01-17T09:25:52.55Z" }, + { url = "https://files.pythonhosted.org/packages/ef/7e/04d0e1417da140b2ac4053a3d4fcfc86cd59bf4829f69d370bb899f74d5d/srsly-2.5.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c8a0b03c64eb6e150d772c5149befbadd981cc734ab13184b0561c17c8cef9b1", size = 1110670, upload-time = "2025-01-17T09:25:54.02Z" }, + { url = "https://files.pythonhosted.org/packages/96/1a/a8cd627eaa81a91feb6ceab50155f4ceff3eef6107916cb87ef796958427/srsly-2.5.1-cp312-cp312-win_amd64.whl", hash = "sha256:7952538f6bba91b9d8bf31a642ac9e8b9ccc0ccbb309feb88518bfb84bb0dc0d", size = 632598, upload-time = "2025-01-17T09:25:55.499Z" }, ] [[package]] @@ -3795,9 +3392,9 @@ dependencies = [ { name = "executing" }, { name = "pure-eval" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/28/e3/55dcc2cfbc3ca9c29519eb6884dd1415ecb53b0e934862d3559ddcb7e20b/stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9", size = 44707 } +sdist = { url = "https://files.pythonhosted.org/packages/28/e3/55dcc2cfbc3ca9c29519eb6884dd1415ecb53b0e934862d3559ddcb7e20b/stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9", size = 44707, upload-time = "2023-09-30T13:58:05.479Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f1/7b/ce1eafaf1a76852e2ec9b22edecf1daa58175c090266e9f6c64afcd81d91/stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695", size = 24521 }, + { url = "https://files.pythonhosted.org/packages/f1/7b/ce1eafaf1a76852e2ec9b22edecf1daa58175c090266e9f6c64afcd81d91/stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695", size = 24521, upload-time = "2023-09-30T13:58:03.53Z" }, ] [[package]] @@ -3807,9 +3404,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "mpmath" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/83/d3/803453b36afefb7c2bb238361cd4ae6125a569b4db67cd9e79846ba2d68c/sympy-1.14.0.tar.gz", hash = "sha256:d3d3fe8df1e5a0b42f0e7bdf50541697dbe7d23746e894990c030e2b05e72517", size = 7793921 } +sdist = { url = "https://files.pythonhosted.org/packages/83/d3/803453b36afefb7c2bb238361cd4ae6125a569b4db67cd9e79846ba2d68c/sympy-1.14.0.tar.gz", hash = "sha256:d3d3fe8df1e5a0b42f0e7bdf50541697dbe7d23746e894990c030e2b05e72517", size = 7793921, upload-time = "2025-04-27T18:05:01.611Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a2/09/77d55d46fd61b4a135c444fc97158ef34a095e5681d0a6c10b75bf356191/sympy-1.14.0-py3-none-any.whl", hash = "sha256:e091cc3e99d2141a0ba2847328f5479b05d94a6635cb96148ccb3f34671bd8f5", size = 6299353 }, + { url = "https://files.pythonhosted.org/packages/a2/09/77d55d46fd61b4a135c444fc97158ef34a095e5681d0a6c10b75bf356191/sympy-1.14.0-py3-none-any.whl", hash = "sha256:e091cc3e99d2141a0ba2847328f5479b05d94a6635cb96148ccb3f34671bd8f5", size = 6299353, upload-time = "2025-04-27T18:04:59.103Z" }, ] [[package]] @@ -3821,9 +3418,9 @@ dependencies = [ { name = "pywinpty", marker = "os_name == 'nt'" }, { name = "tornado" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/8a/11/965c6fd8e5cc254f1fe142d547387da17a8ebfd75a3455f637c663fb38a0/terminado-0.18.1.tar.gz", hash = "sha256:de09f2c4b85de4765f7714688fff57d3e75bad1f909b589fde880460c753fd2e", size = 32701 } +sdist = { url = "https://files.pythonhosted.org/packages/8a/11/965c6fd8e5cc254f1fe142d547387da17a8ebfd75a3455f637c663fb38a0/terminado-0.18.1.tar.gz", hash = "sha256:de09f2c4b85de4765f7714688fff57d3e75bad1f909b589fde880460c753fd2e", size = 32701, upload-time = "2024-03-12T14:34:39.026Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6a/9e/2064975477fdc887e47ad42157e214526dcad8f317a948dee17e1659a62f/terminado-0.18.1-py3-none-any.whl", hash = "sha256:a4468e1b37bb318f8a86514f65814e1afc977cf29b3992a4500d9dd305dcceb0", size = 14154 }, + { url = "https://files.pythonhosted.org/packages/6a/9e/2064975477fdc887e47ad42157e214526dcad8f317a948dee17e1659a62f/terminado-0.18.1-py3-none-any.whl", hash = "sha256:a4468e1b37bb318f8a86514f65814e1afc977cf29b3992a4500d9dd305dcceb0", size = 14154, upload-time = "2024-03-12T14:34:36.569Z" }, ] [[package]] @@ -3833,9 +3430,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "nltk" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/63/a1/31fc6a5e9e46f2d84f72f12048588feac5464486e526dbfcc4719569cd3e/textblob-0.19.0.tar.gz", hash = "sha256:0a3d06a47cf7759441da3418c4843aed3797a998beba2108c6245a2020f83b01", size = 637872 } +sdist = { url = "https://files.pythonhosted.org/packages/63/a1/31fc6a5e9e46f2d84f72f12048588feac5464486e526dbfcc4719569cd3e/textblob-0.19.0.tar.gz", hash = "sha256:0a3d06a47cf7759441da3418c4843aed3797a998beba2108c6245a2020f83b01", size = 637872, upload-time = "2025-01-13T23:03:07.352Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1e/d6/40aa5aead775582ea0cf35870e5a3f16fab4b967f1ad2debe675f673f923/textblob-0.19.0-py3-none-any.whl", hash = "sha256:af6b8827886f1ee839a625f4865e5abb1584eae8db2259627b33a6a0b02ef19d", size = 624280 }, + { url = "https://files.pythonhosted.org/packages/1e/d6/40aa5aead775582ea0cf35870e5a3f16fab4b967f1ad2debe675f673f923/textblob-0.19.0-py3-none-any.whl", hash = "sha256:af6b8827886f1ee839a625f4865e5abb1584eae8db2259627b33a6a0b02ef19d", size = 624280, upload-time = "2025-01-13T23:03:01.034Z" }, ] [[package]] @@ -3856,32 +3453,24 @@ dependencies = [ { name = "srsly" }, { name = "wasabi" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/2f/3a/2d0f0be132b9faaa6d56f04565ae122684273e4bf4eab8dee5f48dc00f68/thinc-8.3.10.tar.gz", hash = "sha256:5a75109f4ee1c968fc055ce651a17cb44b23b000d9e95f04a4d047ab3cb3e34e", size = 194196 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/38/43/01b662540888140b5e9f76c957c7118c203cb91f17867ce78fc4f2d3800f/thinc-8.3.10-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:72793e0bd3f0f391ca36ab0996b3c21db7045409bd3740840e7d6fcd9a044d81", size = 818632 }, - { url = "https://files.pythonhosted.org/packages/f0/ba/e0edcc84014bdde1bc9a082408279616a061566a82b5e3b90b9e64f33c1b/thinc-8.3.10-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4b13311acb061e04e3a0c4bd677b85ec2971e3a3674558252443b5446e378256", size = 770622 }, - { url = "https://files.pythonhosted.org/packages/f3/51/0558f8cb69c13e1114428726a3fb36fe1adc5821a62ccd3fa7b7c1a5bd9a/thinc-8.3.10-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:9ffddcf311fb7c998eb8988d22c618dc0f33b26303853c0445edb8a69819ac60", size = 4094652 }, - { url = "https://files.pythonhosted.org/packages/a0/c9/bb78601f74f9bcadb2d3d4d5b057c4dc3f2e52d9771bad3d93a4e38a9dc1/thinc-8.3.10-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9b1e0511e8421f20abe4f22d8c8073a0d7ce4a31597cc7a404fdbad72bf38058", size = 4124379 }, - { url = "https://files.pythonhosted.org/packages/f6/3e/961e1b9794111c89f2ceadfef5692aba5097bec4aaaf89f1b8a04c5bc961/thinc-8.3.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e31e49441dfad8fd64b8ca5f5c9b8c33ee87a553bf79c830a15b4cd02efcc444", size = 5094221 }, - { url = "https://files.pythonhosted.org/packages/e5/de/da163a1533faaef5b17dd11dfb9ffd9fd5627dbef56e1160da6edbe1b224/thinc-8.3.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9de5dd73ce7135dcf41d68625d35cd9f5cf8e5f55a3932001a188b45057c3379", size = 5262834 }, - { url = "https://files.pythonhosted.org/packages/4c/4e/449d29e33f7ddda6ba1b9e06de3ea5155c2dc33c21f438f8faafebde4e13/thinc-8.3.10-cp311-cp311-win_amd64.whl", hash = "sha256:b6d64e390a1996d489872b9d99a584142542aba59ebdc60f941f473732582f6f", size = 1791864 }, - { url = "https://files.pythonhosted.org/packages/4a/b3/68038d88d45d83a501c3f19bd654d275b7ac730c807f52bbb46f35f591bc/thinc-8.3.10-cp311-cp311-win_arm64.whl", hash = "sha256:3991b6ad72e611dfbfb58235de5b67bcc9f61426127cc023607f97e8c5f43e0e", size = 1717563 }, - { url = "https://files.pythonhosted.org/packages/d3/34/ba3b386d92edf50784b60ee34318d47c7f49c198268746ef7851c5bbe8cf/thinc-8.3.10-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:51bc6ef735bdbcab75ab2916731b8f61f94c66add6f9db213d900d3c6a244f95", size = 794509 }, - { url = "https://files.pythonhosted.org/packages/07/f3/9f52d18115cd9d8d7b2590d226cb2752d2a5ffec61576b19462b48410184/thinc-8.3.10-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4f48b4d346915f98e9722c0c50ef911cc16c6790a2b7afebc6e1a2c96a6ce6c6", size = 741084 }, - { url = "https://files.pythonhosted.org/packages/ad/9c/129c2b740c4e3d3624b6fb3dec1577ef27cb804bc1647f9bc3e1801ea20c/thinc-8.3.10-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5003f4db2db22cc8d686db8db83509acc3c50f4c55ebdcb2bbfcc1095096f7d2", size = 3846337 }, - { url = "https://files.pythonhosted.org/packages/22/d2/738cf188dea8240c2be081c83ea47270fea585eba446171757d2cdb9b675/thinc-8.3.10-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:b12484c3ed0632331fada2c334680dd6bc35972d0717343432dfc701f04a9b4c", size = 3901216 }, - { url = "https://files.pythonhosted.org/packages/22/92/32f66eb9b1a29b797bf378a0874615d810d79eefca1d6c736c5ca3f8b918/thinc-8.3.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8677c446d3f9b97a465472c58683b785b25dfcf26c683e3f4e8f8c7c188e4362", size = 4827286 }, - { url = "https://files.pythonhosted.org/packages/c4/5f/7ceae1e1f2029efd67ed88e23cd6dc13a5ee647cdc2b35113101b2a62c10/thinc-8.3.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:759c385ac08dcf950238b60b96a28f9c04618861141766928dff4a51b1679b25", size = 5024421 }, - { url = "https://files.pythonhosted.org/packages/0b/66/30f9d8d41049b78bc614213d492792fbcfeb1b28642adf661c42110a7ebd/thinc-8.3.10-cp312-cp312-win_amd64.whl", hash = "sha256:bf3f188c3fa1fdcefd547d1f90a1245c29025d6d0e3f71d7fdf21dad210b990c", size = 1718631 }, - { url = "https://files.pythonhosted.org/packages/f8/44/32e2a5018a1165a304d25eb9b1c74e5310da19a533a35331e8d824dc6a88/thinc-8.3.10-cp312-cp312-win_arm64.whl", hash = "sha256:234b7e57a6ef4e0260d99f4e8fdc328ed12d0ba9bbd98fdaa567294a17700d1c", size = 1642224 }, - { url = "https://files.pythonhosted.org/packages/53/fc/17a2818d1f460b8c4f33b8bd3f21b19d263a647bfd23b572768d175e6b64/thinc-8.3.10-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:c7c3a50ddd423d1c49419899acef4ac80d800af3b423593acb9e40578384b543", size = 789771 }, - { url = "https://files.pythonhosted.org/packages/8d/24/649f54774b1fbe791a1c2efd7d7f0a95cfd9244902553ca7dcf19daab1dd/thinc-8.3.10-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1a1cb110398f51fc2b9a07a2a4daec6f91e166533a9c9f1c565225330f46569a", size = 737051 }, - { url = "https://files.pythonhosted.org/packages/b2/8c/5840c6c504c1fa9718e1c74d6e04d77a474f594888867dbba53f9317285f/thinc-8.3.10-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:42318746a67403d04be57d862fe0c0015b58b6fb9bbbf7b6db01f3f103b73a99", size = 3839221 }, - { url = "https://files.pythonhosted.org/packages/45/ef/e7fca88074cb0aa1c1a23195470b4549492c2797fe7dc9ff79a85500153a/thinc-8.3.10-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:6b0e41e79973f8828adead770f885db8d0f199bfbaa9591d1d896c385842e993", size = 3885024 }, - { url = "https://files.pythonhosted.org/packages/9a/eb/805e277aa019896009028d727460f071c6cf83843d70f6a69e58994d2203/thinc-8.3.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9ed982daa1eddbad813bfd079546483b849a68b98c01ad4a7e4efd125ddc5d7b", size = 4815939 }, - { url = "https://files.pythonhosted.org/packages/4f/f5/6425f12a60e3782091c9ec16394b9239f0c18c52c70218f3c8c047ff985c/thinc-8.3.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d22bd381410749dec5f629b3162b7d1f1e2d9b7364fd49a7ea555b61c93772b9", size = 5020260 }, - { url = "https://files.pythonhosted.org/packages/85/a2/ae98feffe0b161400e87b7bfc8859e6fa1e6023fa7bcfa0a8cacd83b39a1/thinc-8.3.10-cp313-cp313-win_amd64.whl", hash = "sha256:9c32830446a57da13b6856cacb0225bc2f2104f279d9928d40500081c13aa9ec", size = 1717562 }, - { url = "https://files.pythonhosted.org/packages/b8/e0/faa1d04a6890ea33b9541727d2a3ca88bad794a89f73b9111af6f9aefe10/thinc-8.3.10-cp313-cp313-win_arm64.whl", hash = "sha256:aa43f9af76781d32f5f9fe29299204c8841d71e64cbb56e0e4f3d1e0387c2783", size = 1641536 }, +sdist = { url = "https://files.pythonhosted.org/packages/2f/3a/2d0f0be132b9faaa6d56f04565ae122684273e4bf4eab8dee5f48dc00f68/thinc-8.3.10.tar.gz", hash = "sha256:5a75109f4ee1c968fc055ce651a17cb44b23b000d9e95f04a4d047ab3cb3e34e", size = 194196, upload-time = "2025-11-17T17:21:46.435Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/38/43/01b662540888140b5e9f76c957c7118c203cb91f17867ce78fc4f2d3800f/thinc-8.3.10-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:72793e0bd3f0f391ca36ab0996b3c21db7045409bd3740840e7d6fcd9a044d81", size = 818632, upload-time = "2025-11-17T17:20:49.123Z" }, + { url = "https://files.pythonhosted.org/packages/f0/ba/e0edcc84014bdde1bc9a082408279616a061566a82b5e3b90b9e64f33c1b/thinc-8.3.10-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4b13311acb061e04e3a0c4bd677b85ec2971e3a3674558252443b5446e378256", size = 770622, upload-time = "2025-11-17T17:20:50.467Z" }, + { url = "https://files.pythonhosted.org/packages/f3/51/0558f8cb69c13e1114428726a3fb36fe1adc5821a62ccd3fa7b7c1a5bd9a/thinc-8.3.10-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:9ffddcf311fb7c998eb8988d22c618dc0f33b26303853c0445edb8a69819ac60", size = 4094652, upload-time = "2025-11-17T17:20:52.104Z" }, + { url = "https://files.pythonhosted.org/packages/a0/c9/bb78601f74f9bcadb2d3d4d5b057c4dc3f2e52d9771bad3d93a4e38a9dc1/thinc-8.3.10-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9b1e0511e8421f20abe4f22d8c8073a0d7ce4a31597cc7a404fdbad72bf38058", size = 4124379, upload-time = "2025-11-17T17:20:53.781Z" }, + { url = "https://files.pythonhosted.org/packages/f6/3e/961e1b9794111c89f2ceadfef5692aba5097bec4aaaf89f1b8a04c5bc961/thinc-8.3.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e31e49441dfad8fd64b8ca5f5c9b8c33ee87a553bf79c830a15b4cd02efcc444", size = 5094221, upload-time = "2025-11-17T17:20:55.466Z" }, + { url = "https://files.pythonhosted.org/packages/e5/de/da163a1533faaef5b17dd11dfb9ffd9fd5627dbef56e1160da6edbe1b224/thinc-8.3.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9de5dd73ce7135dcf41d68625d35cd9f5cf8e5f55a3932001a188b45057c3379", size = 5262834, upload-time = "2025-11-17T17:20:57.459Z" }, + { url = "https://files.pythonhosted.org/packages/4c/4e/449d29e33f7ddda6ba1b9e06de3ea5155c2dc33c21f438f8faafebde4e13/thinc-8.3.10-cp311-cp311-win_amd64.whl", hash = "sha256:b6d64e390a1996d489872b9d99a584142542aba59ebdc60f941f473732582f6f", size = 1791864, upload-time = "2025-11-17T17:20:59.817Z" }, + { url = "https://files.pythonhosted.org/packages/4a/b3/68038d88d45d83a501c3f19bd654d275b7ac730c807f52bbb46f35f591bc/thinc-8.3.10-cp311-cp311-win_arm64.whl", hash = "sha256:3991b6ad72e611dfbfb58235de5b67bcc9f61426127cc023607f97e8c5f43e0e", size = 1717563, upload-time = "2025-11-17T17:21:01.634Z" }, + { url = "https://files.pythonhosted.org/packages/d3/34/ba3b386d92edf50784b60ee34318d47c7f49c198268746ef7851c5bbe8cf/thinc-8.3.10-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:51bc6ef735bdbcab75ab2916731b8f61f94c66add6f9db213d900d3c6a244f95", size = 794509, upload-time = "2025-11-17T17:21:03.21Z" }, + { url = "https://files.pythonhosted.org/packages/07/f3/9f52d18115cd9d8d7b2590d226cb2752d2a5ffec61576b19462b48410184/thinc-8.3.10-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4f48b4d346915f98e9722c0c50ef911cc16c6790a2b7afebc6e1a2c96a6ce6c6", size = 741084, upload-time = "2025-11-17T17:21:04.568Z" }, + { url = "https://files.pythonhosted.org/packages/ad/9c/129c2b740c4e3d3624b6fb3dec1577ef27cb804bc1647f9bc3e1801ea20c/thinc-8.3.10-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5003f4db2db22cc8d686db8db83509acc3c50f4c55ebdcb2bbfcc1095096f7d2", size = 3846337, upload-time = "2025-11-17T17:21:06.079Z" }, + { url = "https://files.pythonhosted.org/packages/22/d2/738cf188dea8240c2be081c83ea47270fea585eba446171757d2cdb9b675/thinc-8.3.10-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:b12484c3ed0632331fada2c334680dd6bc35972d0717343432dfc701f04a9b4c", size = 3901216, upload-time = "2025-11-17T17:21:07.842Z" }, + { url = "https://files.pythonhosted.org/packages/22/92/32f66eb9b1a29b797bf378a0874615d810d79eefca1d6c736c5ca3f8b918/thinc-8.3.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8677c446d3f9b97a465472c58683b785b25dfcf26c683e3f4e8f8c7c188e4362", size = 4827286, upload-time = "2025-11-17T17:21:09.62Z" }, + { url = "https://files.pythonhosted.org/packages/c4/5f/7ceae1e1f2029efd67ed88e23cd6dc13a5ee647cdc2b35113101b2a62c10/thinc-8.3.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:759c385ac08dcf950238b60b96a28f9c04618861141766928dff4a51b1679b25", size = 5024421, upload-time = "2025-11-17T17:21:11.199Z" }, + { url = "https://files.pythonhosted.org/packages/0b/66/30f9d8d41049b78bc614213d492792fbcfeb1b28642adf661c42110a7ebd/thinc-8.3.10-cp312-cp312-win_amd64.whl", hash = "sha256:bf3f188c3fa1fdcefd547d1f90a1245c29025d6d0e3f71d7fdf21dad210b990c", size = 1718631, upload-time = "2025-11-17T17:21:12.965Z" }, + { url = "https://files.pythonhosted.org/packages/f8/44/32e2a5018a1165a304d25eb9b1c74e5310da19a533a35331e8d824dc6a88/thinc-8.3.10-cp312-cp312-win_arm64.whl", hash = "sha256:234b7e57a6ef4e0260d99f4e8fdc328ed12d0ba9bbd98fdaa567294a17700d1c", size = 1642224, upload-time = "2025-11-17T17:21:14.371Z" }, ] [[package]] @@ -3892,36 +3481,22 @@ dependencies = [ { name = "regex" }, { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/7d/ab/4d017d0f76ec3171d469d80fc03dfbb4e48a4bcaddaa831b31d526f05edc/tiktoken-0.12.0.tar.gz", hash = "sha256:b18ba7ee2b093863978fcb14f74b3707cdc8d4d4d3836853ce7ec60772139931", size = 37806 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/de/46/21ea696b21f1d6d1efec8639c204bdf20fde8bafb351e1355c72c5d7de52/tiktoken-0.12.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6e227c7f96925003487c33b1b32265fad2fbcec2b7cf4817afb76d416f40f6bb", size = 1051565 }, - { url = "https://files.pythonhosted.org/packages/c9/d9/35c5d2d9e22bb2a5f74ba48266fb56c63d76ae6f66e02feb628671c0283e/tiktoken-0.12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c06cf0fcc24c2cb2adb5e185c7082a82cba29c17575e828518c2f11a01f445aa", size = 995284 }, - { url = "https://files.pythonhosted.org/packages/01/84/961106c37b8e49b9fdcf33fe007bb3a8fdcc380c528b20cc7fbba80578b8/tiktoken-0.12.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:f18f249b041851954217e9fd8e5c00b024ab2315ffda5ed77665a05fa91f42dc", size = 1129201 }, - { url = "https://files.pythonhosted.org/packages/6a/d0/3d9275198e067f8b65076a68894bb52fd253875f3644f0a321a720277b8a/tiktoken-0.12.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:47a5bc270b8c3db00bb46ece01ef34ad050e364b51d406b6f9730b64ac28eded", size = 1152444 }, - { url = "https://files.pythonhosted.org/packages/78/db/a58e09687c1698a7c592e1038e01c206569b86a0377828d51635561f8ebf/tiktoken-0.12.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:508fa71810c0efdcd1b898fda574889ee62852989f7c1667414736bcb2b9a4bd", size = 1195080 }, - { url = "https://files.pythonhosted.org/packages/9e/1b/a9e4d2bf91d515c0f74afc526fd773a812232dd6cda33ebea7f531202325/tiktoken-0.12.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a1af81a6c44f008cba48494089dd98cccb8b313f55e961a52f5b222d1e507967", size = 1255240 }, - { url = "https://files.pythonhosted.org/packages/9d/15/963819345f1b1fb0809070a79e9dd96938d4ca41297367d471733e79c76c/tiktoken-0.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:3e68e3e593637b53e56f7237be560f7a394451cb8c11079755e80ae64b9e6def", size = 879422 }, - { url = "https://files.pythonhosted.org/packages/a4/85/be65d39d6b647c79800fd9d29241d081d4eeb06271f383bb87200d74cf76/tiktoken-0.12.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b97f74aca0d78a1ff21b8cd9e9925714c15a9236d6ceacf5c7327c117e6e21e8", size = 1050728 }, - { url = "https://files.pythonhosted.org/packages/4a/42/6573e9129bc55c9bf7300b3a35bef2c6b9117018acca0dc760ac2d93dffe/tiktoken-0.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2b90f5ad190a4bb7c3eb30c5fa32e1e182ca1ca79f05e49b448438c3e225a49b", size = 994049 }, - { url = "https://files.pythonhosted.org/packages/66/c5/ed88504d2f4a5fd6856990b230b56d85a777feab84e6129af0822f5d0f70/tiktoken-0.12.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:65b26c7a780e2139e73acc193e5c63ac754021f160df919add909c1492c0fb37", size = 1129008 }, - { url = "https://files.pythonhosted.org/packages/f4/90/3dae6cc5436137ebd38944d396b5849e167896fc2073da643a49f372dc4f/tiktoken-0.12.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:edde1ec917dfd21c1f2f8046b86348b0f54a2c0547f68149d8600859598769ad", size = 1152665 }, - { url = "https://files.pythonhosted.org/packages/a3/fe/26df24ce53ffde419a42f5f53d755b995c9318908288c17ec3f3448313a3/tiktoken-0.12.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:35a2f8ddd3824608b3d650a000c1ef71f730d0c56486845705a8248da00f9fe5", size = 1194230 }, - { url = "https://files.pythonhosted.org/packages/20/cc/b064cae1a0e9fac84b0d2c46b89f4e57051a5f41324e385d10225a984c24/tiktoken-0.12.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:83d16643edb7fa2c99eff2ab7733508aae1eebb03d5dfc46f5565862810f24e3", size = 1254688 }, - { url = "https://files.pythonhosted.org/packages/81/10/b8523105c590c5b8349f2587e2fdfe51a69544bd5a76295fc20f2374f470/tiktoken-0.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:ffc5288f34a8bc02e1ea7047b8d041104791d2ddbf42d1e5fa07822cbffe16bd", size = 878694 }, - { url = "https://files.pythonhosted.org/packages/00/61/441588ee21e6b5cdf59d6870f86beb9789e532ee9718c251b391b70c68d6/tiktoken-0.12.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:775c2c55de2310cc1bc9a3ad8826761cbdc87770e586fd7b6da7d4589e13dab3", size = 1050802 }, - { url = "https://files.pythonhosted.org/packages/1f/05/dcf94486d5c5c8d34496abe271ac76c5b785507c8eae71b3708f1ad9b45a/tiktoken-0.12.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a01b12f69052fbe4b080a2cfb867c4de12c704b56178edf1d1d7b273561db160", size = 993995 }, - { url = "https://files.pythonhosted.org/packages/a0/70/5163fe5359b943f8db9946b62f19be2305de8c3d78a16f629d4165e2f40e/tiktoken-0.12.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:01d99484dc93b129cd0964f9d34eee953f2737301f18b3c7257bf368d7615baa", size = 1128948 }, - { url = "https://files.pythonhosted.org/packages/0c/da/c028aa0babf77315e1cef357d4d768800c5f8a6de04d0eac0f377cb619fa/tiktoken-0.12.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:4a1a4fcd021f022bfc81904a911d3df0f6543b9e7627b51411da75ff2fe7a1be", size = 1151986 }, - { url = "https://files.pythonhosted.org/packages/a0/5a/886b108b766aa53e295f7216b509be95eb7d60b166049ce2c58416b25f2a/tiktoken-0.12.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:981a81e39812d57031efdc9ec59fa32b2a5a5524d20d4776574c4b4bd2e9014a", size = 1194222 }, - { url = "https://files.pythonhosted.org/packages/f4/f8/4db272048397636ac7a078d22773dd2795b1becee7bc4922fe6207288d57/tiktoken-0.12.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9baf52f84a3f42eef3ff4e754a0db79a13a27921b457ca9832cf944c6be4f8f3", size = 1255097 }, - { url = "https://files.pythonhosted.org/packages/8e/32/45d02e2e0ea2be3a9ed22afc47d93741247e75018aac967b713b2941f8ea/tiktoken-0.12.0-cp313-cp313-win_amd64.whl", hash = "sha256:b8a0cd0c789a61f31bf44851defbd609e8dd1e2c8589c614cc1060940ef1f697", size = 879117 }, - { url = "https://files.pythonhosted.org/packages/ce/76/994fc868f88e016e6d05b0da5ac24582a14c47893f4474c3e9744283f1d5/tiktoken-0.12.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d5f89ea5680066b68bcb797ae85219c72916c922ef0fcdd3480c7d2315ffff16", size = 1050309 }, - { url = "https://files.pythonhosted.org/packages/f6/b8/57ef1456504c43a849821920d582a738a461b76a047f352f18c0b26c6516/tiktoken-0.12.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b4e7ed1c6a7a8a60a3230965bdedba8cc58f68926b835e519341413370e0399a", size = 993712 }, - { url = "https://files.pythonhosted.org/packages/72/90/13da56f664286ffbae9dbcfadcc625439142675845baa62715e49b87b68b/tiktoken-0.12.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:fc530a28591a2d74bce821d10b418b26a094bf33839e69042a6e86ddb7a7fb27", size = 1128725 }, - { url = "https://files.pythonhosted.org/packages/05/df/4f80030d44682235bdaecd7346c90f67ae87ec8f3df4a3442cb53834f7e4/tiktoken-0.12.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:06a9f4f49884139013b138920a4c393aa6556b2f8f536345f11819389c703ebb", size = 1151875 }, - { url = "https://files.pythonhosted.org/packages/22/1f/ae535223a8c4ef4c0c1192e3f9b82da660be9eb66b9279e95c99288e9dab/tiktoken-0.12.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:04f0e6a985d95913cabc96a741c5ffec525a2c72e9df086ff17ebe35985c800e", size = 1194451 }, - { url = "https://files.pythonhosted.org/packages/78/a7/f8ead382fce0243cb625c4f266e66c27f65ae65ee9e77f59ea1653b6d730/tiktoken-0.12.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:0ee8f9ae00c41770b5f9b0bb1235474768884ae157de3beb5439ca0fd70f3e25", size = 1253794 }, - { url = "https://files.pythonhosted.org/packages/93/e0/6cc82a562bc6365785a3ff0af27a2a092d57c47d7a81d9e2295d8c36f011/tiktoken-0.12.0-cp313-cp313t-win_amd64.whl", hash = "sha256:dc2dd125a62cb2b3d858484d6c614d136b5b848976794edfb63688d539b8b93f", size = 878777 }, +sdist = { url = "https://files.pythonhosted.org/packages/7d/ab/4d017d0f76ec3171d469d80fc03dfbb4e48a4bcaddaa831b31d526f05edc/tiktoken-0.12.0.tar.gz", hash = "sha256:b18ba7ee2b093863978fcb14f74b3707cdc8d4d4d3836853ce7ec60772139931", size = 37806, upload-time = "2025-10-06T20:22:45.419Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/de/46/21ea696b21f1d6d1efec8639c204bdf20fde8bafb351e1355c72c5d7de52/tiktoken-0.12.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6e227c7f96925003487c33b1b32265fad2fbcec2b7cf4817afb76d416f40f6bb", size = 1051565, upload-time = "2025-10-06T20:21:44.566Z" }, + { url = "https://files.pythonhosted.org/packages/c9/d9/35c5d2d9e22bb2a5f74ba48266fb56c63d76ae6f66e02feb628671c0283e/tiktoken-0.12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c06cf0fcc24c2cb2adb5e185c7082a82cba29c17575e828518c2f11a01f445aa", size = 995284, upload-time = "2025-10-06T20:21:45.622Z" }, + { url = "https://files.pythonhosted.org/packages/01/84/961106c37b8e49b9fdcf33fe007bb3a8fdcc380c528b20cc7fbba80578b8/tiktoken-0.12.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:f18f249b041851954217e9fd8e5c00b024ab2315ffda5ed77665a05fa91f42dc", size = 1129201, upload-time = "2025-10-06T20:21:47.074Z" }, + { url = "https://files.pythonhosted.org/packages/6a/d0/3d9275198e067f8b65076a68894bb52fd253875f3644f0a321a720277b8a/tiktoken-0.12.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:47a5bc270b8c3db00bb46ece01ef34ad050e364b51d406b6f9730b64ac28eded", size = 1152444, upload-time = "2025-10-06T20:21:48.139Z" }, + { url = "https://files.pythonhosted.org/packages/78/db/a58e09687c1698a7c592e1038e01c206569b86a0377828d51635561f8ebf/tiktoken-0.12.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:508fa71810c0efdcd1b898fda574889ee62852989f7c1667414736bcb2b9a4bd", size = 1195080, upload-time = "2025-10-06T20:21:49.246Z" }, + { url = "https://files.pythonhosted.org/packages/9e/1b/a9e4d2bf91d515c0f74afc526fd773a812232dd6cda33ebea7f531202325/tiktoken-0.12.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a1af81a6c44f008cba48494089dd98cccb8b313f55e961a52f5b222d1e507967", size = 1255240, upload-time = "2025-10-06T20:21:50.274Z" }, + { url = "https://files.pythonhosted.org/packages/9d/15/963819345f1b1fb0809070a79e9dd96938d4ca41297367d471733e79c76c/tiktoken-0.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:3e68e3e593637b53e56f7237be560f7a394451cb8c11079755e80ae64b9e6def", size = 879422, upload-time = "2025-10-06T20:21:51.734Z" }, + { url = "https://files.pythonhosted.org/packages/a4/85/be65d39d6b647c79800fd9d29241d081d4eeb06271f383bb87200d74cf76/tiktoken-0.12.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b97f74aca0d78a1ff21b8cd9e9925714c15a9236d6ceacf5c7327c117e6e21e8", size = 1050728, upload-time = "2025-10-06T20:21:52.756Z" }, + { url = "https://files.pythonhosted.org/packages/4a/42/6573e9129bc55c9bf7300b3a35bef2c6b9117018acca0dc760ac2d93dffe/tiktoken-0.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2b90f5ad190a4bb7c3eb30c5fa32e1e182ca1ca79f05e49b448438c3e225a49b", size = 994049, upload-time = "2025-10-06T20:21:53.782Z" }, + { url = "https://files.pythonhosted.org/packages/66/c5/ed88504d2f4a5fd6856990b230b56d85a777feab84e6129af0822f5d0f70/tiktoken-0.12.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:65b26c7a780e2139e73acc193e5c63ac754021f160df919add909c1492c0fb37", size = 1129008, upload-time = "2025-10-06T20:21:54.832Z" }, + { url = "https://files.pythonhosted.org/packages/f4/90/3dae6cc5436137ebd38944d396b5849e167896fc2073da643a49f372dc4f/tiktoken-0.12.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:edde1ec917dfd21c1f2f8046b86348b0f54a2c0547f68149d8600859598769ad", size = 1152665, upload-time = "2025-10-06T20:21:56.129Z" }, + { url = "https://files.pythonhosted.org/packages/a3/fe/26df24ce53ffde419a42f5f53d755b995c9318908288c17ec3f3448313a3/tiktoken-0.12.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:35a2f8ddd3824608b3d650a000c1ef71f730d0c56486845705a8248da00f9fe5", size = 1194230, upload-time = "2025-10-06T20:21:57.546Z" }, + { url = "https://files.pythonhosted.org/packages/20/cc/b064cae1a0e9fac84b0d2c46b89f4e57051a5f41324e385d10225a984c24/tiktoken-0.12.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:83d16643edb7fa2c99eff2ab7733508aae1eebb03d5dfc46f5565862810f24e3", size = 1254688, upload-time = "2025-10-06T20:21:58.619Z" }, + { url = "https://files.pythonhosted.org/packages/81/10/b8523105c590c5b8349f2587e2fdfe51a69544bd5a76295fc20f2374f470/tiktoken-0.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:ffc5288f34a8bc02e1ea7047b8d041104791d2ddbf42d1e5fa07822cbffe16bd", size = 878694, upload-time = "2025-10-06T20:21:59.876Z" }, ] [[package]] @@ -3931,9 +3506,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "webencodings" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/7a/fd/7a5ee21fd08ff70d3d33a5781c255cbe779659bd03278feb98b19ee550f4/tinycss2-1.4.0.tar.gz", hash = "sha256:10c0972f6fc0fbee87c3edb76549357415e94548c1ae10ebccdea16fb404a9b7", size = 87085 } +sdist = { url = "https://files.pythonhosted.org/packages/7a/fd/7a5ee21fd08ff70d3d33a5781c255cbe779659bd03278feb98b19ee550f4/tinycss2-1.4.0.tar.gz", hash = "sha256:10c0972f6fc0fbee87c3edb76549357415e94548c1ae10ebccdea16fb404a9b7", size = 87085, upload-time = "2024-10-24T14:58:29.895Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e6/34/ebdc18bae6aa14fbee1a08b63c015c72b64868ff7dae68808ab500c492e2/tinycss2-1.4.0-py3-none-any.whl", hash = "sha256:3a49cf47b7675da0b15d0c6e1df8df4ebd96e9394bb905a5775adb0d884c5289", size = 26610 }, + { url = "https://files.pythonhosted.org/packages/e6/34/ebdc18bae6aa14fbee1a08b63c015c72b64868ff7dae68808ab500c492e2/tinycss2-1.4.0-py3-none-any.whl", hash = "sha256:3a49cf47b7675da0b15d0c6e1df8df4ebd96e9394bb905a5775adb0d884c5289", size = 26610, upload-time = "2024-10-24T14:58:28.029Z" }, ] [[package]] @@ -3943,50 +3518,50 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "huggingface-hub" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/1c/46/fb6854cec3278fbfa4a75b50232c77622bc517ac886156e6afbfa4d8fc6e/tokenizers-0.22.1.tar.gz", hash = "sha256:61de6522785310a309b3407bac22d99c4db5dba349935e99e4d15ea2226af2d9", size = 363123 } +sdist = { url = "https://files.pythonhosted.org/packages/1c/46/fb6854cec3278fbfa4a75b50232c77622bc517ac886156e6afbfa4d8fc6e/tokenizers-0.22.1.tar.gz", hash = "sha256:61de6522785310a309b3407bac22d99c4db5dba349935e99e4d15ea2226af2d9", size = 363123, upload-time = "2025-09-19T09:49:23.424Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/bf/33/f4b2d94ada7ab297328fc671fed209368ddb82f965ec2224eb1892674c3a/tokenizers-0.22.1-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:59fdb013df17455e5f950b4b834a7b3ee2e0271e6378ccb33aa74d178b513c73", size = 3069318 }, - { url = "https://files.pythonhosted.org/packages/1c/58/2aa8c874d02b974990e89ff95826a4852a8b2a273c7d1b4411cdd45a4565/tokenizers-0.22.1-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:8d4e484f7b0827021ac5f9f71d4794aaef62b979ab7608593da22b1d2e3c4edc", size = 2926478 }, - { url = "https://files.pythonhosted.org/packages/1e/3b/55e64befa1e7bfea963cf4b787b2cea1011362c4193f5477047532ce127e/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19d2962dd28bc67c1f205ab180578a78eef89ac60ca7ef7cbe9635a46a56422a", size = 3256994 }, - { url = "https://files.pythonhosted.org/packages/71/0b/fbfecf42f67d9b7b80fde4aabb2b3110a97fac6585c9470b5bff103a80cb/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:38201f15cdb1f8a6843e6563e6e79f4abd053394992b9bbdf5213ea3469b4ae7", size = 3153141 }, - { url = "https://files.pythonhosted.org/packages/17/a9/b38f4e74e0817af8f8ef925507c63c6ae8171e3c4cb2d5d4624bf58fca69/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d1cbe5454c9a15df1b3443c726063d930c16f047a3cc724b9e6e1a91140e5a21", size = 3508049 }, - { url = "https://files.pythonhosted.org/packages/d2/48/dd2b3dac46bb9134a88e35d72e1aa4869579eacc1a27238f1577270773ff/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e7d094ae6312d69cc2a872b54b91b309f4f6fbce871ef28eb27b52a98e4d0214", size = 3710730 }, - { url = "https://files.pythonhosted.org/packages/93/0e/ccabc8d16ae4ba84a55d41345207c1e2ea88784651a5a487547d80851398/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:afd7594a56656ace95cdd6df4cca2e4059d294c5cfb1679c57824b605556cb2f", size = 3412560 }, - { url = "https://files.pythonhosted.org/packages/d0/c6/dc3a0db5a6766416c32c034286d7c2d406da1f498e4de04ab1b8959edd00/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2ef6063d7a84994129732b47e7915e8710f27f99f3a3260b8a38fc7ccd083f4", size = 3250221 }, - { url = "https://files.pythonhosted.org/packages/d7/a6/2c8486eef79671601ff57b093889a345dd3d576713ef047776015dc66de7/tokenizers-0.22.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ba0a64f450b9ef412c98f6bcd2a50c6df6e2443b560024a09fa6a03189726879", size = 9345569 }, - { url = "https://files.pythonhosted.org/packages/6b/16/32ce667f14c35537f5f605fe9bea3e415ea1b0a646389d2295ec348d5657/tokenizers-0.22.1-cp39-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:331d6d149fa9c7d632cde4490fb8bbb12337fa3a0232e77892be656464f4b446", size = 9271599 }, - { url = "https://files.pythonhosted.org/packages/51/7c/a5f7898a3f6baa3fc2685c705e04c98c1094c523051c805cdd9306b8f87e/tokenizers-0.22.1-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:607989f2ea68a46cb1dfbaf3e3aabdf3f21d8748312dbeb6263d1b3b66c5010a", size = 9533862 }, - { url = "https://files.pythonhosted.org/packages/36/65/7e75caea90bc73c1dd8d40438adf1a7bc26af3b8d0a6705ea190462506e1/tokenizers-0.22.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a0f307d490295717726598ef6fa4f24af9d484809223bbc253b201c740a06390", size = 9681250 }, - { url = "https://files.pythonhosted.org/packages/30/2c/959dddef581b46e6209da82df3b78471e96260e2bc463f89d23b1bf0e52a/tokenizers-0.22.1-cp39-abi3-win32.whl", hash = "sha256:b5120eed1442765cd90b903bb6cfef781fd8fe64e34ccaecbae4c619b7b12a82", size = 2472003 }, - { url = "https://files.pythonhosted.org/packages/b3/46/e33a8c93907b631a99377ef4c5f817ab453d0b34f93529421f42ff559671/tokenizers-0.22.1-cp39-abi3-win_amd64.whl", hash = "sha256:65fd6e3fb11ca1e78a6a93602490f134d1fdeb13bcef99389d5102ea318ed138", size = 2674684 }, + { url = "https://files.pythonhosted.org/packages/bf/33/f4b2d94ada7ab297328fc671fed209368ddb82f965ec2224eb1892674c3a/tokenizers-0.22.1-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:59fdb013df17455e5f950b4b834a7b3ee2e0271e6378ccb33aa74d178b513c73", size = 3069318, upload-time = "2025-09-19T09:49:11.848Z" }, + { url = "https://files.pythonhosted.org/packages/1c/58/2aa8c874d02b974990e89ff95826a4852a8b2a273c7d1b4411cdd45a4565/tokenizers-0.22.1-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:8d4e484f7b0827021ac5f9f71d4794aaef62b979ab7608593da22b1d2e3c4edc", size = 2926478, upload-time = "2025-09-19T09:49:09.759Z" }, + { url = "https://files.pythonhosted.org/packages/1e/3b/55e64befa1e7bfea963cf4b787b2cea1011362c4193f5477047532ce127e/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19d2962dd28bc67c1f205ab180578a78eef89ac60ca7ef7cbe9635a46a56422a", size = 3256994, upload-time = "2025-09-19T09:48:56.701Z" }, + { url = "https://files.pythonhosted.org/packages/71/0b/fbfecf42f67d9b7b80fde4aabb2b3110a97fac6585c9470b5bff103a80cb/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:38201f15cdb1f8a6843e6563e6e79f4abd053394992b9bbdf5213ea3469b4ae7", size = 3153141, upload-time = "2025-09-19T09:48:59.749Z" }, + { url = "https://files.pythonhosted.org/packages/17/a9/b38f4e74e0817af8f8ef925507c63c6ae8171e3c4cb2d5d4624bf58fca69/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d1cbe5454c9a15df1b3443c726063d930c16f047a3cc724b9e6e1a91140e5a21", size = 3508049, upload-time = "2025-09-19T09:49:05.868Z" }, + { url = "https://files.pythonhosted.org/packages/d2/48/dd2b3dac46bb9134a88e35d72e1aa4869579eacc1a27238f1577270773ff/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e7d094ae6312d69cc2a872b54b91b309f4f6fbce871ef28eb27b52a98e4d0214", size = 3710730, upload-time = "2025-09-19T09:49:01.832Z" }, + { url = "https://files.pythonhosted.org/packages/93/0e/ccabc8d16ae4ba84a55d41345207c1e2ea88784651a5a487547d80851398/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:afd7594a56656ace95cdd6df4cca2e4059d294c5cfb1679c57824b605556cb2f", size = 3412560, upload-time = "2025-09-19T09:49:03.867Z" }, + { url = "https://files.pythonhosted.org/packages/d0/c6/dc3a0db5a6766416c32c034286d7c2d406da1f498e4de04ab1b8959edd00/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2ef6063d7a84994129732b47e7915e8710f27f99f3a3260b8a38fc7ccd083f4", size = 3250221, upload-time = "2025-09-19T09:49:07.664Z" }, + { url = "https://files.pythonhosted.org/packages/d7/a6/2c8486eef79671601ff57b093889a345dd3d576713ef047776015dc66de7/tokenizers-0.22.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ba0a64f450b9ef412c98f6bcd2a50c6df6e2443b560024a09fa6a03189726879", size = 9345569, upload-time = "2025-09-19T09:49:14.214Z" }, + { url = "https://files.pythonhosted.org/packages/6b/16/32ce667f14c35537f5f605fe9bea3e415ea1b0a646389d2295ec348d5657/tokenizers-0.22.1-cp39-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:331d6d149fa9c7d632cde4490fb8bbb12337fa3a0232e77892be656464f4b446", size = 9271599, upload-time = "2025-09-19T09:49:16.639Z" }, + { url = "https://files.pythonhosted.org/packages/51/7c/a5f7898a3f6baa3fc2685c705e04c98c1094c523051c805cdd9306b8f87e/tokenizers-0.22.1-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:607989f2ea68a46cb1dfbaf3e3aabdf3f21d8748312dbeb6263d1b3b66c5010a", size = 9533862, upload-time = "2025-09-19T09:49:19.146Z" }, + { url = "https://files.pythonhosted.org/packages/36/65/7e75caea90bc73c1dd8d40438adf1a7bc26af3b8d0a6705ea190462506e1/tokenizers-0.22.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a0f307d490295717726598ef6fa4f24af9d484809223bbc253b201c740a06390", size = 9681250, upload-time = "2025-09-19T09:49:21.501Z" }, + { url = "https://files.pythonhosted.org/packages/30/2c/959dddef581b46e6209da82df3b78471e96260e2bc463f89d23b1bf0e52a/tokenizers-0.22.1-cp39-abi3-win32.whl", hash = "sha256:b5120eed1442765cd90b903bb6cfef781fd8fe64e34ccaecbae4c619b7b12a82", size = 2472003, upload-time = "2025-09-19T09:49:27.089Z" }, + { url = "https://files.pythonhosted.org/packages/b3/46/e33a8c93907b631a99377ef4c5f817ab453d0b34f93529421f42ff559671/tokenizers-0.22.1-cp39-abi3-win_amd64.whl", hash = "sha256:65fd6e3fb11ca1e78a6a93602490f134d1fdeb13bcef99389d5102ea318ed138", size = 2674684, upload-time = "2025-09-19T09:49:24.953Z" }, ] [[package]] name = "tomlkit" version = "0.12.5" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/2b/ab/18f4c8f2bec75eb1a7aebcc52cdb02ab04fd39ff7025bb1b1c7846cc45b8/tomlkit-0.12.5.tar.gz", hash = "sha256:eef34fba39834d4d6b73c9ba7f3e4d1c417a4e56f89a7e96e090dd0d24b8fb3c", size = 191420 } +sdist = { url = "https://files.pythonhosted.org/packages/2b/ab/18f4c8f2bec75eb1a7aebcc52cdb02ab04fd39ff7025bb1b1c7846cc45b8/tomlkit-0.12.5.tar.gz", hash = "sha256:eef34fba39834d4d6b73c9ba7f3e4d1c417a4e56f89a7e96e090dd0d24b8fb3c", size = 191420, upload-time = "2024-05-08T13:50:19.363Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/73/6d/b5406752c4e4ba86692b22fab0afed8b48f16bdde8f92e1d852976b61dc6/tomlkit-0.12.5-py3-none-any.whl", hash = "sha256:af914f5a9c59ed9d0762c7b64d3b5d5df007448eb9cd2edc8a46b1eafead172f", size = 37685 }, + { url = "https://files.pythonhosted.org/packages/73/6d/b5406752c4e4ba86692b22fab0afed8b48f16bdde8f92e1d852976b61dc6/tomlkit-0.12.5-py3-none-any.whl", hash = "sha256:af914f5a9c59ed9d0762c7b64d3b5d5df007448eb9cd2edc8a46b1eafead172f", size = 37685, upload-time = "2024-05-08T13:50:17.343Z" }, ] [[package]] name = "tornado" version = "6.5.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/09/ce/1eb500eae19f4648281bb2186927bb062d2438c2e5093d1360391afd2f90/tornado-6.5.2.tar.gz", hash = "sha256:ab53c8f9a0fa351e2c0741284e06c7a45da86afb544133201c5cc8578eb076a0", size = 510821 } +sdist = { url = "https://files.pythonhosted.org/packages/09/ce/1eb500eae19f4648281bb2186927bb062d2438c2e5093d1360391afd2f90/tornado-6.5.2.tar.gz", hash = "sha256:ab53c8f9a0fa351e2c0741284e06c7a45da86afb544133201c5cc8578eb076a0", size = 510821, upload-time = "2025-08-08T18:27:00.78Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f6/48/6a7529df2c9cc12efd2e8f5dd219516184d703b34c06786809670df5b3bd/tornado-6.5.2-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:2436822940d37cde62771cff8774f4f00b3c8024fe482e16ca8387b8a2724db6", size = 442563 }, - { url = "https://files.pythonhosted.org/packages/f2/b5/9b575a0ed3e50b00c40b08cbce82eb618229091d09f6d14bce80fc01cb0b/tornado-6.5.2-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:583a52c7aa94ee046854ba81d9ebb6c81ec0fd30386d96f7640c96dad45a03ef", size = 440729 }, - { url = "https://files.pythonhosted.org/packages/1b/4e/619174f52b120efcf23633c817fd3fed867c30bff785e2cd5a53a70e483c/tornado-6.5.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0fe179f28d597deab2842b86ed4060deec7388f1fd9c1b4a41adf8af058907e", size = 444295 }, - { url = "https://files.pythonhosted.org/packages/95/fa/87b41709552bbd393c85dd18e4e3499dcd8983f66e7972926db8d96aa065/tornado-6.5.2-cp39-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b186e85d1e3536d69583d2298423744740986018e393d0321df7340e71898882", size = 443644 }, - { url = "https://files.pythonhosted.org/packages/f9/41/fb15f06e33d7430ca89420283a8762a4e6b8025b800ea51796ab5e6d9559/tornado-6.5.2-cp39-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e792706668c87709709c18b353da1f7662317b563ff69f00bab83595940c7108", size = 443878 }, - { url = "https://files.pythonhosted.org/packages/11/92/fe6d57da897776ad2e01e279170ea8ae726755b045fe5ac73b75357a5a3f/tornado-6.5.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:06ceb1300fd70cb20e43b1ad8aaee0266e69e7ced38fa910ad2e03285009ce7c", size = 444549 }, - { url = "https://files.pythonhosted.org/packages/9b/02/c8f4f6c9204526daf3d760f4aa555a7a33ad0e60843eac025ccfd6ff4a93/tornado-6.5.2-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:74db443e0f5251be86cbf37929f84d8c20c27a355dd452a5cfa2aada0d001ec4", size = 443973 }, - { url = "https://files.pythonhosted.org/packages/ae/2d/f5f5707b655ce2317190183868cd0f6822a1121b4baeae509ceb9590d0bd/tornado-6.5.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b5e735ab2889d7ed33b32a459cac490eda71a1ba6857b0118de476ab6c366c04", size = 443954 }, - { url = "https://files.pythonhosted.org/packages/e8/59/593bd0f40f7355806bf6573b47b8c22f8e1374c9b6fd03114bd6b7a3dcfd/tornado-6.5.2-cp39-abi3-win32.whl", hash = "sha256:c6f29e94d9b37a95013bb669616352ddb82e3bfe8326fccee50583caebc8a5f0", size = 445023 }, - { url = "https://files.pythonhosted.org/packages/c7/2a/f609b420c2f564a748a2d80ebfb2ee02a73ca80223af712fca591386cafb/tornado-6.5.2-cp39-abi3-win_amd64.whl", hash = "sha256:e56a5af51cc30dd2cae649429af65ca2f6571da29504a07995175df14c18f35f", size = 445427 }, - { url = "https://files.pythonhosted.org/packages/5e/4f/e1f65e8f8c76d73658b33d33b81eed4322fb5085350e4328d5c956f0c8f9/tornado-6.5.2-cp39-abi3-win_arm64.whl", hash = "sha256:d6c33dc3672e3a1f3618eb63b7ef4683a7688e7b9e6e8f0d9aa5726360a004af", size = 444456 }, + { url = "https://files.pythonhosted.org/packages/f6/48/6a7529df2c9cc12efd2e8f5dd219516184d703b34c06786809670df5b3bd/tornado-6.5.2-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:2436822940d37cde62771cff8774f4f00b3c8024fe482e16ca8387b8a2724db6", size = 442563, upload-time = "2025-08-08T18:26:42.945Z" }, + { url = "https://files.pythonhosted.org/packages/f2/b5/9b575a0ed3e50b00c40b08cbce82eb618229091d09f6d14bce80fc01cb0b/tornado-6.5.2-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:583a52c7aa94ee046854ba81d9ebb6c81ec0fd30386d96f7640c96dad45a03ef", size = 440729, upload-time = "2025-08-08T18:26:44.473Z" }, + { url = "https://files.pythonhosted.org/packages/1b/4e/619174f52b120efcf23633c817fd3fed867c30bff785e2cd5a53a70e483c/tornado-6.5.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0fe179f28d597deab2842b86ed4060deec7388f1fd9c1b4a41adf8af058907e", size = 444295, upload-time = "2025-08-08T18:26:46.021Z" }, + { url = "https://files.pythonhosted.org/packages/95/fa/87b41709552bbd393c85dd18e4e3499dcd8983f66e7972926db8d96aa065/tornado-6.5.2-cp39-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b186e85d1e3536d69583d2298423744740986018e393d0321df7340e71898882", size = 443644, upload-time = "2025-08-08T18:26:47.625Z" }, + { url = "https://files.pythonhosted.org/packages/f9/41/fb15f06e33d7430ca89420283a8762a4e6b8025b800ea51796ab5e6d9559/tornado-6.5.2-cp39-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e792706668c87709709c18b353da1f7662317b563ff69f00bab83595940c7108", size = 443878, upload-time = "2025-08-08T18:26:50.599Z" }, + { url = "https://files.pythonhosted.org/packages/11/92/fe6d57da897776ad2e01e279170ea8ae726755b045fe5ac73b75357a5a3f/tornado-6.5.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:06ceb1300fd70cb20e43b1ad8aaee0266e69e7ced38fa910ad2e03285009ce7c", size = 444549, upload-time = "2025-08-08T18:26:51.864Z" }, + { url = "https://files.pythonhosted.org/packages/9b/02/c8f4f6c9204526daf3d760f4aa555a7a33ad0e60843eac025ccfd6ff4a93/tornado-6.5.2-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:74db443e0f5251be86cbf37929f84d8c20c27a355dd452a5cfa2aada0d001ec4", size = 443973, upload-time = "2025-08-08T18:26:53.625Z" }, + { url = "https://files.pythonhosted.org/packages/ae/2d/f5f5707b655ce2317190183868cd0f6822a1121b4baeae509ceb9590d0bd/tornado-6.5.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b5e735ab2889d7ed33b32a459cac490eda71a1ba6857b0118de476ab6c366c04", size = 443954, upload-time = "2025-08-08T18:26:55.072Z" }, + { url = "https://files.pythonhosted.org/packages/e8/59/593bd0f40f7355806bf6573b47b8c22f8e1374c9b6fd03114bd6b7a3dcfd/tornado-6.5.2-cp39-abi3-win32.whl", hash = "sha256:c6f29e94d9b37a95013bb669616352ddb82e3bfe8326fccee50583caebc8a5f0", size = 445023, upload-time = "2025-08-08T18:26:56.677Z" }, + { url = "https://files.pythonhosted.org/packages/c7/2a/f609b420c2f564a748a2d80ebfb2ee02a73ca80223af712fca591386cafb/tornado-6.5.2-cp39-abi3-win_amd64.whl", hash = "sha256:e56a5af51cc30dd2cae649429af65ca2f6571da29504a07995175df14c18f35f", size = 445427, upload-time = "2025-08-08T18:26:57.91Z" }, + { url = "https://files.pythonhosted.org/packages/5e/4f/e1f65e8f8c76d73658b33d33b81eed4322fb5085350e4328d5c956f0c8f9/tornado-6.5.2-cp39-abi3-win_arm64.whl", hash = "sha256:d6c33dc3672e3a1f3618eb63b7ef4683a7688e7b9e6e8f0d9aa5726360a004af", size = 444456, upload-time = "2025-08-08T18:26:59.207Z" }, ] [[package]] @@ -3996,18 +3571,18 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a8/4b/29b4ef32e036bb34e4ab51796dd745cdba7ed47ad142a9f4a1eb8e0c744d/tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2", size = 169737 } +sdist = { url = "https://files.pythonhosted.org/packages/a8/4b/29b4ef32e036bb34e4ab51796dd745cdba7ed47ad142a9f4a1eb8e0c744d/tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2", size = 169737, upload-time = "2024-11-24T20:12:22.481Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2", size = 78540 }, + { url = "https://files.pythonhosted.org/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2", size = 78540, upload-time = "2024-11-24T20:12:19.698Z" }, ] [[package]] name = "traitlets" version = "5.14.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/eb/79/72064e6a701c2183016abbbfedaba506d81e30e232a68c9f0d6f6fcd1574/traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7", size = 161621 } +sdist = { url = "https://files.pythonhosted.org/packages/eb/79/72064e6a701c2183016abbbfedaba506d81e30e232a68c9f0d6f6fcd1574/traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7", size = 161621, upload-time = "2024-04-19T11:11:49.746Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/00/c0/8f5d070730d7836adc9c9b6408dec68c6ced86b304a9b26a14df072a6e8c/traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f", size = 85359 }, + { url = "https://files.pythonhosted.org/packages/00/c0/8f5d070730d7836adc9c9b6408dec68c6ced86b304a9b26a14df072a6e8c/traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f", size = 85359, upload-time = "2024-04-19T11:11:46.763Z" }, ] [[package]] @@ -4020,36 +3595,36 @@ dependencies = [ { name = "shellingham" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/21/ca/950278884e2ca20547ff3eb109478c6baf6b8cf219318e6bc4f666fad8e8/typer-0.19.2.tar.gz", hash = "sha256:9ad824308ded0ad06cc716434705f691d4ee0bfd0fb081839d2e426860e7fdca", size = 104755 } +sdist = { url = "https://files.pythonhosted.org/packages/21/ca/950278884e2ca20547ff3eb109478c6baf6b8cf219318e6bc4f666fad8e8/typer-0.19.2.tar.gz", hash = "sha256:9ad824308ded0ad06cc716434705f691d4ee0bfd0fb081839d2e426860e7fdca", size = 104755, upload-time = "2025-09-23T09:47:48.256Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/00/22/35617eee79080a5d071d0f14ad698d325ee6b3bf824fc0467c03b30e7fa8/typer-0.19.2-py3-none-any.whl", hash = "sha256:755e7e19670ffad8283db353267cb81ef252f595aa6834a0d1ca9312d9326cb9", size = 46748 }, + { url = "https://files.pythonhosted.org/packages/00/22/35617eee79080a5d071d0f14ad698d325ee6b3bf824fc0467c03b30e7fa8/typer-0.19.2-py3-none-any.whl", hash = "sha256:755e7e19670ffad8283db353267cb81ef252f595aa6834a0d1ca9312d9326cb9", size = 46748, upload-time = "2025-09-23T09:47:46.777Z" }, ] [[package]] name = "types-python-dateutil" version = "2.9.0.20251008" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/fc/83/24ed25dd0c6277a1a170c180ad9eef5879ecc9a4745b58d7905a4588c80d/types_python_dateutil-2.9.0.20251008.tar.gz", hash = "sha256:c3826289c170c93ebd8360c3485311187df740166dbab9dd3b792e69f2bc1f9c", size = 16128 } +sdist = { url = "https://files.pythonhosted.org/packages/fc/83/24ed25dd0c6277a1a170c180ad9eef5879ecc9a4745b58d7905a4588c80d/types_python_dateutil-2.9.0.20251008.tar.gz", hash = "sha256:c3826289c170c93ebd8360c3485311187df740166dbab9dd3b792e69f2bc1f9c", size = 16128, upload-time = "2025-10-08T02:51:34.93Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/da/af/5d24b8d49ef358468ecfdff5c556adf37f4fd28e336b96f923661a808329/types_python_dateutil-2.9.0.20251008-py3-none-any.whl", hash = "sha256:b9a5232c8921cf7661b29c163ccc56055c418ab2c6eabe8f917cbcc73a4c4157", size = 17934 }, + { url = "https://files.pythonhosted.org/packages/da/af/5d24b8d49ef358468ecfdff5c556adf37f4fd28e336b96f923661a808329/types_python_dateutil-2.9.0.20251008-py3-none-any.whl", hash = "sha256:b9a5232c8921cf7661b29c163ccc56055c418ab2c6eabe8f917cbcc73a4c4157", size = 17934, upload-time = "2025-10-08T02:51:33.55Z" }, ] [[package]] name = "types-pytz" version = "2025.2.0.20250809" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/07/e2/c774f754de26848f53f05defff5bb21dd9375a059d1ba5b5ea943cf8206e/types_pytz-2025.2.0.20250809.tar.gz", hash = "sha256:222e32e6a29bb28871f8834e8785e3801f2dc4441c715cd2082b271eecbe21e5", size = 10876 } +sdist = { url = "https://files.pythonhosted.org/packages/07/e2/c774f754de26848f53f05defff5bb21dd9375a059d1ba5b5ea943cf8206e/types_pytz-2025.2.0.20250809.tar.gz", hash = "sha256:222e32e6a29bb28871f8834e8785e3801f2dc4441c715cd2082b271eecbe21e5", size = 10876, upload-time = "2025-08-09T03:14:17.453Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/db/d0/91c24fe54e565f2344d7a6821e6c6bb099841ef09007ea6321a0bac0f808/types_pytz-2025.2.0.20250809-py3-none-any.whl", hash = "sha256:4f55ed1b43e925cf851a756fe1707e0f5deeb1976e15bf844bcaa025e8fbd0db", size = 10095 }, + { url = "https://files.pythonhosted.org/packages/db/d0/91c24fe54e565f2344d7a6821e6c6bb099841ef09007ea6321a0bac0f808/types_pytz-2025.2.0.20250809-py3-none-any.whl", hash = "sha256:4f55ed1b43e925cf851a756fe1707e0f5deeb1976e15bf844bcaa025e8fbd0db", size = 10095, upload-time = "2025-08-09T03:14:16.674Z" }, ] [[package]] name = "typing-extensions" version = "4.15.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391 } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614 }, + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, ] [[package]] @@ -4059,18 +3634,18 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949 } +sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949, upload-time = "2025-10-01T02:14:41.687Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611 }, + { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" }, ] [[package]] name = "tzdata" version = "2025.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/95/32/1a225d6164441be760d75c2c42e2780dc0873fe382da3e98a2e1e48361e5/tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9", size = 196380 } +sdist = { url = "https://files.pythonhosted.org/packages/95/32/1a225d6164441be760d75c2c42e2780dc0873fe382da3e98a2e1e48361e5/tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9", size = 196380, upload-time = "2025-03-23T13:54:43.652Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5c/23/c7abc0ca0a1526a0774eca151daeb8de62ec457e77262b66b359c3c7679e/tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8", size = 347839 }, + { url = "https://files.pythonhosted.org/packages/5c/23/c7abc0ca0a1526a0774eca151daeb8de62ec457e77262b66b359c3c7679e/tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8", size = 347839, upload-time = "2025-03-23T13:54:41.845Z" }, ] [[package]] @@ -4080,27 +3655,27 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "tomlkit" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/89/fa/4238c64177886003312a61a5deda93876a1cb023c6efe1ee204573209ad1/update_toml-0.2.1.tar.gz", hash = "sha256:92870b2ef8591eeffa32df674d9b4c4fce59a428f65063e138dee253bdb5d372", size = 3095 } +sdist = { url = "https://files.pythonhosted.org/packages/89/fa/4238c64177886003312a61a5deda93876a1cb023c6efe1ee204573209ad1/update_toml-0.2.1.tar.gz", hash = "sha256:92870b2ef8591eeffa32df674d9b4c4fce59a428f65063e138dee253bdb5d372", size = 3095, upload-time = "2024-05-15T23:49:00.323Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/73/94/d4dd5609d95c83d85b18b79121dd834a86a2e1b57d63d9b3d5388143c4cb/update_toml-0.2.1-py3-none-any.whl", hash = "sha256:90d5d9d2efbe2f273328ec78394912c33c0f741dc3b0ae744cfc4ddbe27051f7", size = 4755 }, + { url = "https://files.pythonhosted.org/packages/73/94/d4dd5609d95c83d85b18b79121dd834a86a2e1b57d63d9b3d5388143c4cb/update_toml-0.2.1-py3-none-any.whl", hash = "sha256:90d5d9d2efbe2f273328ec78394912c33c0f741dc3b0ae744cfc4ddbe27051f7", size = 4755, upload-time = "2024-05-15T23:48:58.817Z" }, ] [[package]] name = "uri-template" version = "1.3.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/31/c7/0336f2bd0bcbada6ccef7aaa25e443c118a704f828a0620c6fa0207c1b64/uri-template-1.3.0.tar.gz", hash = "sha256:0e00f8eb65e18c7de20d595a14336e9f337ead580c70934141624b6d1ffdacc7", size = 21678 } +sdist = { url = "https://files.pythonhosted.org/packages/31/c7/0336f2bd0bcbada6ccef7aaa25e443c118a704f828a0620c6fa0207c1b64/uri-template-1.3.0.tar.gz", hash = "sha256:0e00f8eb65e18c7de20d595a14336e9f337ead580c70934141624b6d1ffdacc7", size = 21678, upload-time = "2023-06-21T01:49:05.374Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e7/00/3fca040d7cf8a32776d3d81a00c8ee7457e00f80c649f1e4a863c8321ae9/uri_template-1.3.0-py3-none-any.whl", hash = "sha256:a44a133ea12d44a0c0f06d7d42a52d71282e77e2f937d8abd5655b8d56fc1363", size = 11140 }, + { url = "https://files.pythonhosted.org/packages/e7/00/3fca040d7cf8a32776d3d81a00c8ee7457e00f80c649f1e4a863c8321ae9/uri_template-1.3.0-py3-none-any.whl", hash = "sha256:a44a133ea12d44a0c0f06d7d42a52d71282e77e2f937d8abd5655b8d56fc1363", size = 11140, upload-time = "2023-06-21T01:49:03.467Z" }, ] [[package]] name = "urllib3" version = "2.5.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/15/22/9ee70a2574a4f4599c47dd506532914ce044817c7752a79b6a51286319bc/urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760", size = 393185 } +sdist = { url = "https://files.pythonhosted.org/packages/15/22/9ee70a2574a4f4599c47dd506532914ce044817c7752a79b6a51286319bc/urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760", size = 393185, upload-time = "2025-06-18T14:07:41.644Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795 }, + { url = "https://files.pythonhosted.org/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795, upload-time = "2025-06-18T14:07:40.39Z" }, ] [[package]] @@ -4110,45 +3685,42 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ac/f9/054e6e2f1071e963b5e746b48d1e3727470b2a490834d18ad92364929db3/wasabi-1.1.3.tar.gz", hash = "sha256:4bb3008f003809db0c3e28b4daf20906ea871a2bb43f9914197d540f4f2e0878", size = 30391 } +sdist = { url = "https://files.pythonhosted.org/packages/ac/f9/054e6e2f1071e963b5e746b48d1e3727470b2a490834d18ad92364929db3/wasabi-1.1.3.tar.gz", hash = "sha256:4bb3008f003809db0c3e28b4daf20906ea871a2bb43f9914197d540f4f2e0878", size = 30391, upload-time = "2024-05-31T16:56:18.99Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/06/7c/34330a89da55610daa5f245ddce5aab81244321101614751e7537f125133/wasabi-1.1.3-py3-none-any.whl", hash = "sha256:f76e16e8f7e79f8c4c8be49b4024ac725713ab10cd7f19350ad18a8e3f71728c", size = 27880 }, + { url = "https://files.pythonhosted.org/packages/06/7c/34330a89da55610daa5f245ddce5aab81244321101614751e7537f125133/wasabi-1.1.3-py3-none-any.whl", hash = "sha256:f76e16e8f7e79f8c4c8be49b4024ac725713ab10cd7f19350ad18a8e3f71728c", size = 27880, upload-time = "2024-05-31T16:56:16.699Z" }, ] [[package]] name = "watchdog" version = "6.0.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/db/7d/7f3d619e951c88ed75c6037b246ddcf2d322812ee8ea189be89511721d54/watchdog-6.0.0.tar.gz", hash = "sha256:9ddf7c82fda3ae8e24decda1338ede66e1c99883db93711d8fb941eaa2d8c282", size = 131220 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e0/24/d9be5cd6642a6aa68352ded4b4b10fb0d7889cb7f45814fb92cecd35f101/watchdog-6.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6eb11feb5a0d452ee41f824e271ca311a09e250441c262ca2fd7ebcf2461a06c", size = 96393 }, - { url = "https://files.pythonhosted.org/packages/63/7a/6013b0d8dbc56adca7fdd4f0beed381c59f6752341b12fa0886fa7afc78b/watchdog-6.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ef810fbf7b781a5a593894e4f439773830bdecb885e6880d957d5b9382a960d2", size = 88392 }, - { url = "https://files.pythonhosted.org/packages/d1/40/b75381494851556de56281e053700e46bff5b37bf4c7267e858640af5a7f/watchdog-6.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:afd0fe1b2270917c5e23c2a65ce50c2a4abb63daafb0d419fde368e272a76b7c", size = 89019 }, - { url = "https://files.pythonhosted.org/packages/39/ea/3930d07dafc9e286ed356a679aa02d777c06e9bfd1164fa7c19c288a5483/watchdog-6.0.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:bdd4e6f14b8b18c334febb9c4425a878a2ac20efd1e0b231978e7b150f92a948", size = 96471 }, - { url = "https://files.pythonhosted.org/packages/12/87/48361531f70b1f87928b045df868a9fd4e253d9ae087fa4cf3f7113be363/watchdog-6.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c7c15dda13c4eb00d6fb6fc508b3c0ed88b9d5d374056b239c4ad1611125c860", size = 88449 }, - { url = "https://files.pythonhosted.org/packages/5b/7e/8f322f5e600812e6f9a31b75d242631068ca8f4ef0582dd3ae6e72daecc8/watchdog-6.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6f10cb2d5902447c7d0da897e2c6768bca89174d0c6e1e30abec5421af97a5b0", size = 89054 }, - { url = "https://files.pythonhosted.org/packages/68/98/b0345cabdce2041a01293ba483333582891a3bd5769b08eceb0d406056ef/watchdog-6.0.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:490ab2ef84f11129844c23fb14ecf30ef3d8a6abafd3754a6f75ca1e6654136c", size = 96480 }, - { url = "https://files.pythonhosted.org/packages/85/83/cdf13902c626b28eedef7ec4f10745c52aad8a8fe7eb04ed7b1f111ca20e/watchdog-6.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:76aae96b00ae814b181bb25b1b98076d5fc84e8a53cd8885a318b42b6d3a5134", size = 88451 }, - { url = "https://files.pythonhosted.org/packages/fe/c4/225c87bae08c8b9ec99030cd48ae9c4eca050a59bf5c2255853e18c87b50/watchdog-6.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a175f755fc2279e0b7312c0035d52e27211a5bc39719dd529625b1930917345b", size = 89057 }, - { url = "https://files.pythonhosted.org/packages/a9/c7/ca4bf3e518cb57a686b2feb4f55a1892fd9a3dd13f470fca14e00f80ea36/watchdog-6.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:7607498efa04a3542ae3e05e64da8202e58159aa1fa4acddf7678d34a35d4f13", size = 79079 }, - { url = "https://files.pythonhosted.org/packages/5c/51/d46dc9332f9a647593c947b4b88e2381c8dfc0942d15b8edc0310fa4abb1/watchdog-6.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:9041567ee8953024c83343288ccc458fd0a2d811d6a0fd68c4c22609e3490379", size = 79078 }, - { url = "https://files.pythonhosted.org/packages/d4/57/04edbf5e169cd318d5f07b4766fee38e825d64b6913ca157ca32d1a42267/watchdog-6.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:82dc3e3143c7e38ec49d61af98d6558288c415eac98486a5c581726e0737c00e", size = 79076 }, - { url = "https://files.pythonhosted.org/packages/ab/cc/da8422b300e13cb187d2203f20b9253e91058aaf7db65b74142013478e66/watchdog-6.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:212ac9b8bf1161dc91bd09c048048a95ca3a4c4f5e5d4a7d1b1a7d5752a7f96f", size = 79077 }, - { url = "https://files.pythonhosted.org/packages/2c/3b/b8964e04ae1a025c44ba8e4291f86e97fac443bca31de8bd98d3263d2fcf/watchdog-6.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:e3df4cbb9a450c6d49318f6d14f4bbc80d763fa587ba46ec86f99f9e6876bb26", size = 79078 }, - { url = "https://files.pythonhosted.org/packages/62/ae/a696eb424bedff7407801c257d4b1afda455fe40821a2be430e173660e81/watchdog-6.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:2cce7cfc2008eb51feb6aab51251fd79b85d9894e98ba847408f662b3395ca3c", size = 79077 }, - { url = "https://files.pythonhosted.org/packages/b5/e8/dbf020b4d98251a9860752a094d09a65e1b436ad181faf929983f697048f/watchdog-6.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:20ffe5b202af80ab4266dcd3e91aae72bf2da48c0d33bdb15c66658e685e94e2", size = 79078 }, - { url = "https://files.pythonhosted.org/packages/07/f6/d0e5b343768e8bcb4cda79f0f2f55051bf26177ecd5651f84c07567461cf/watchdog-6.0.0-py3-none-win32.whl", hash = "sha256:07df1fdd701c5d4c8e55ef6cf55b8f0120fe1aef7ef39a1c6fc6bc2e606d517a", size = 79065 }, - { url = "https://files.pythonhosted.org/packages/db/d9/c495884c6e548fce18a8f40568ff120bc3a4b7b99813081c8ac0c936fa64/watchdog-6.0.0-py3-none-win_amd64.whl", hash = "sha256:cbafb470cf848d93b5d013e2ecb245d4aa1c8fd0504e863ccefa32445359d680", size = 79070 }, - { url = "https://files.pythonhosted.org/packages/33/e8/e40370e6d74ddba47f002a32919d91310d6074130fe4e17dabcafc15cbf1/watchdog-6.0.0-py3-none-win_ia64.whl", hash = "sha256:a1914259fa9e1454315171103c6a30961236f508b9b623eae470268bbcc6a22f", size = 79067 }, +sdist = { url = "https://files.pythonhosted.org/packages/db/7d/7f3d619e951c88ed75c6037b246ddcf2d322812ee8ea189be89511721d54/watchdog-6.0.0.tar.gz", hash = "sha256:9ddf7c82fda3ae8e24decda1338ede66e1c99883db93711d8fb941eaa2d8c282", size = 131220, upload-time = "2024-11-01T14:07:13.037Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e0/24/d9be5cd6642a6aa68352ded4b4b10fb0d7889cb7f45814fb92cecd35f101/watchdog-6.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6eb11feb5a0d452ee41f824e271ca311a09e250441c262ca2fd7ebcf2461a06c", size = 96393, upload-time = "2024-11-01T14:06:31.756Z" }, + { url = "https://files.pythonhosted.org/packages/63/7a/6013b0d8dbc56adca7fdd4f0beed381c59f6752341b12fa0886fa7afc78b/watchdog-6.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ef810fbf7b781a5a593894e4f439773830bdecb885e6880d957d5b9382a960d2", size = 88392, upload-time = "2024-11-01T14:06:32.99Z" }, + { url = "https://files.pythonhosted.org/packages/d1/40/b75381494851556de56281e053700e46bff5b37bf4c7267e858640af5a7f/watchdog-6.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:afd0fe1b2270917c5e23c2a65ce50c2a4abb63daafb0d419fde368e272a76b7c", size = 89019, upload-time = "2024-11-01T14:06:34.963Z" }, + { url = "https://files.pythonhosted.org/packages/39/ea/3930d07dafc9e286ed356a679aa02d777c06e9bfd1164fa7c19c288a5483/watchdog-6.0.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:bdd4e6f14b8b18c334febb9c4425a878a2ac20efd1e0b231978e7b150f92a948", size = 96471, upload-time = "2024-11-01T14:06:37.745Z" }, + { url = "https://files.pythonhosted.org/packages/12/87/48361531f70b1f87928b045df868a9fd4e253d9ae087fa4cf3f7113be363/watchdog-6.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c7c15dda13c4eb00d6fb6fc508b3c0ed88b9d5d374056b239c4ad1611125c860", size = 88449, upload-time = "2024-11-01T14:06:39.748Z" }, + { url = "https://files.pythonhosted.org/packages/5b/7e/8f322f5e600812e6f9a31b75d242631068ca8f4ef0582dd3ae6e72daecc8/watchdog-6.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6f10cb2d5902447c7d0da897e2c6768bca89174d0c6e1e30abec5421af97a5b0", size = 89054, upload-time = "2024-11-01T14:06:41.009Z" }, + { url = "https://files.pythonhosted.org/packages/a9/c7/ca4bf3e518cb57a686b2feb4f55a1892fd9a3dd13f470fca14e00f80ea36/watchdog-6.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:7607498efa04a3542ae3e05e64da8202e58159aa1fa4acddf7678d34a35d4f13", size = 79079, upload-time = "2024-11-01T14:06:59.472Z" }, + { url = "https://files.pythonhosted.org/packages/5c/51/d46dc9332f9a647593c947b4b88e2381c8dfc0942d15b8edc0310fa4abb1/watchdog-6.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:9041567ee8953024c83343288ccc458fd0a2d811d6a0fd68c4c22609e3490379", size = 79078, upload-time = "2024-11-01T14:07:01.431Z" }, + { url = "https://files.pythonhosted.org/packages/d4/57/04edbf5e169cd318d5f07b4766fee38e825d64b6913ca157ca32d1a42267/watchdog-6.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:82dc3e3143c7e38ec49d61af98d6558288c415eac98486a5c581726e0737c00e", size = 79076, upload-time = "2024-11-01T14:07:02.568Z" }, + { url = "https://files.pythonhosted.org/packages/ab/cc/da8422b300e13cb187d2203f20b9253e91058aaf7db65b74142013478e66/watchdog-6.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:212ac9b8bf1161dc91bd09c048048a95ca3a4c4f5e5d4a7d1b1a7d5752a7f96f", size = 79077, upload-time = "2024-11-01T14:07:03.893Z" }, + { url = "https://files.pythonhosted.org/packages/2c/3b/b8964e04ae1a025c44ba8e4291f86e97fac443bca31de8bd98d3263d2fcf/watchdog-6.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:e3df4cbb9a450c6d49318f6d14f4bbc80d763fa587ba46ec86f99f9e6876bb26", size = 79078, upload-time = "2024-11-01T14:07:05.189Z" }, + { url = "https://files.pythonhosted.org/packages/62/ae/a696eb424bedff7407801c257d4b1afda455fe40821a2be430e173660e81/watchdog-6.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:2cce7cfc2008eb51feb6aab51251fd79b85d9894e98ba847408f662b3395ca3c", size = 79077, upload-time = "2024-11-01T14:07:06.376Z" }, + { url = "https://files.pythonhosted.org/packages/b5/e8/dbf020b4d98251a9860752a094d09a65e1b436ad181faf929983f697048f/watchdog-6.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:20ffe5b202af80ab4266dcd3e91aae72bf2da48c0d33bdb15c66658e685e94e2", size = 79078, upload-time = "2024-11-01T14:07:07.547Z" }, + { url = "https://files.pythonhosted.org/packages/07/f6/d0e5b343768e8bcb4cda79f0f2f55051bf26177ecd5651f84c07567461cf/watchdog-6.0.0-py3-none-win32.whl", hash = "sha256:07df1fdd701c5d4c8e55ef6cf55b8f0120fe1aef7ef39a1c6fc6bc2e606d517a", size = 79065, upload-time = "2024-11-01T14:07:09.525Z" }, + { url = "https://files.pythonhosted.org/packages/db/d9/c495884c6e548fce18a8f40568ff120bc3a4b7b99813081c8ac0c936fa64/watchdog-6.0.0-py3-none-win_amd64.whl", hash = "sha256:cbafb470cf848d93b5d013e2ecb245d4aa1c8fd0504e863ccefa32445359d680", size = 79070, upload-time = "2024-11-01T14:07:10.686Z" }, + { url = "https://files.pythonhosted.org/packages/33/e8/e40370e6d74ddba47f002a32919d91310d6074130fe4e17dabcafc15cbf1/watchdog-6.0.0-py3-none-win_ia64.whl", hash = "sha256:a1914259fa9e1454315171103c6a30961236f508b9b623eae470268bbcc6a22f", size = 79067, upload-time = "2024-11-01T14:07:11.845Z" }, ] [[package]] name = "wcwidth" version = "0.2.14" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/24/30/6b0809f4510673dc723187aeaf24c7f5459922d01e2f794277a3dfb90345/wcwidth-0.2.14.tar.gz", hash = "sha256:4d478375d31bc5395a3c55c40ccdf3354688364cd61c4f6adacaa9215d0b3605", size = 102293 } +sdist = { url = "https://files.pythonhosted.org/packages/24/30/6b0809f4510673dc723187aeaf24c7f5459922d01e2f794277a3dfb90345/wcwidth-0.2.14.tar.gz", hash = "sha256:4d478375d31bc5395a3c55c40ccdf3354688364cd61c4f6adacaa9215d0b3605", size = 102293, upload-time = "2025-09-22T16:29:53.023Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/af/b5/123f13c975e9f27ab9c0770f514345bd406d0e8d3b7a0723af9d43f710af/wcwidth-0.2.14-py2.py3-none-any.whl", hash = "sha256:a7bb560c8aee30f9957e5f9895805edd20602f2d7f720186dfd906e82b4982e1", size = 37286 }, + { url = "https://files.pythonhosted.org/packages/af/b5/123f13c975e9f27ab9c0770f514345bd406d0e8d3b7a0723af9d43f710af/wcwidth-0.2.14-py2.py3-none-any.whl", hash = "sha256:a7bb560c8aee30f9957e5f9895805edd20602f2d7f720186dfd906e82b4982e1", size = 37286, upload-time = "2025-09-22T16:29:51.641Z" }, ] [[package]] @@ -4166,84 +3738,74 @@ dependencies = [ { name = "typer" }, { name = "wasabi" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a7/1a/9c522dd61b52939c217925d3e55c95f9348b73a66a956f52608e1e59a2c0/weasel-0.4.1.tar.gz", hash = "sha256:aabc210f072e13f6744e5c3a28037f93702433405cd35673f7c6279147085aa9", size = 38417 } +sdist = { url = "https://files.pythonhosted.org/packages/a7/1a/9c522dd61b52939c217925d3e55c95f9348b73a66a956f52608e1e59a2c0/weasel-0.4.1.tar.gz", hash = "sha256:aabc210f072e13f6744e5c3a28037f93702433405cd35673f7c6279147085aa9", size = 38417, upload-time = "2024-05-15T08:52:54.765Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2a/87/abd57374044e1f627f0a905ac33c1a7daab35a3a815abfea4e1bafd3fdb1/weasel-0.4.1-py3-none-any.whl", hash = "sha256:24140a090ea1ac512a2b2f479cc64192fd1d527a7f3627671268d08ed5ac418c", size = 50270 }, + { url = "https://files.pythonhosted.org/packages/2a/87/abd57374044e1f627f0a905ac33c1a7daab35a3a815abfea4e1bafd3fdb1/weasel-0.4.1-py3-none-any.whl", hash = "sha256:24140a090ea1ac512a2b2f479cc64192fd1d527a7f3627671268d08ed5ac418c", size = 50270, upload-time = "2024-05-15T08:52:52.977Z" }, ] [[package]] name = "webcolors" version = "24.11.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/7b/29/061ec845fb58521848f3739e466efd8250b4b7b98c1b6c5bf4d40b419b7e/webcolors-24.11.1.tar.gz", hash = "sha256:ecb3d768f32202af770477b8b65f318fa4f566c22948673a977b00d589dd80f6", size = 45064 } +sdist = { url = "https://files.pythonhosted.org/packages/7b/29/061ec845fb58521848f3739e466efd8250b4b7b98c1b6c5bf4d40b419b7e/webcolors-24.11.1.tar.gz", hash = "sha256:ecb3d768f32202af770477b8b65f318fa4f566c22948673a977b00d589dd80f6", size = 45064, upload-time = "2024-11-11T07:43:24.224Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/60/e8/c0e05e4684d13459f93d312077a9a2efbe04d59c393bc2b8802248c908d4/webcolors-24.11.1-py3-none-any.whl", hash = "sha256:515291393b4cdf0eb19c155749a096f779f7d909f7cceea072791cb9095b92e9", size = 14934 }, + { url = "https://files.pythonhosted.org/packages/60/e8/c0e05e4684d13459f93d312077a9a2efbe04d59c393bc2b8802248c908d4/webcolors-24.11.1-py3-none-any.whl", hash = "sha256:515291393b4cdf0eb19c155749a096f779f7d909f7cceea072791cb9095b92e9", size = 14934, upload-time = "2024-11-11T07:43:22.529Z" }, ] [[package]] name = "webencodings" version = "0.5.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0b/02/ae6ceac1baeda530866a85075641cec12989bd8d31af6d5ab4a3e8c92f47/webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923", size = 9721 } +sdist = { url = "https://files.pythonhosted.org/packages/0b/02/ae6ceac1baeda530866a85075641cec12989bd8d31af6d5ab4a3e8c92f47/webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923", size = 9721, upload-time = "2017-04-05T20:21:34.189Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f4/24/2a3e3df732393fed8b3ebf2ec078f05546de641fe1b667ee316ec1dcf3b7/webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78", size = 11774 }, + { url = "https://files.pythonhosted.org/packages/f4/24/2a3e3df732393fed8b3ebf2ec078f05546de641fe1b667ee316ec1dcf3b7/webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78", size = 11774, upload-time = "2017-04-05T20:21:32.581Z" }, ] [[package]] name = "websocket-client" version = "1.9.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/2c/41/aa4bf9664e4cda14c3b39865b12251e8e7d239f4cd0e3cc1b6c2ccde25c1/websocket_client-1.9.0.tar.gz", hash = "sha256:9e813624b6eb619999a97dc7958469217c3176312b3a16a4bd1bc7e08a46ec98", size = 70576 } +sdist = { url = "https://files.pythonhosted.org/packages/2c/41/aa4bf9664e4cda14c3b39865b12251e8e7d239f4cd0e3cc1b6c2ccde25c1/websocket_client-1.9.0.tar.gz", hash = "sha256:9e813624b6eb619999a97dc7958469217c3176312b3a16a4bd1bc7e08a46ec98", size = 70576, upload-time = "2025-10-07T21:16:36.495Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/34/db/b10e48aa8fff7407e67470363eac595018441cf32d5e1001567a7aeba5d2/websocket_client-1.9.0-py3-none-any.whl", hash = "sha256:af248a825037ef591efbf6ed20cc5faa03d3b47b9e5a2230a529eeee1c1fc3ef", size = 82616 }, + { url = "https://files.pythonhosted.org/packages/34/db/b10e48aa8fff7407e67470363eac595018441cf32d5e1001567a7aeba5d2/websocket_client-1.9.0-py3-none-any.whl", hash = "sha256:af248a825037ef591efbf6ed20cc5faa03d3b47b9e5a2230a529eeee1c1fc3ef", size = 82616, upload-time = "2025-10-07T21:16:34.951Z" }, ] [[package]] name = "widgetsnbextension" version = "4.0.14" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/41/53/2e0253c5efd69c9656b1843892052a31c36d37ad42812b5da45c62191f7e/widgetsnbextension-4.0.14.tar.gz", hash = "sha256:a3629b04e3edb893212df862038c7232f62973373869db5084aed739b437b5af", size = 1097428 } +sdist = { url = "https://files.pythonhosted.org/packages/41/53/2e0253c5efd69c9656b1843892052a31c36d37ad42812b5da45c62191f7e/widgetsnbextension-4.0.14.tar.gz", hash = "sha256:a3629b04e3edb893212df862038c7232f62973373869db5084aed739b437b5af", size = 1097428, upload-time = "2025-04-10T13:01:25.628Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ca/51/5447876806d1088a0f8f71e16542bf350918128d0a69437df26047c8e46f/widgetsnbextension-4.0.14-py3-none-any.whl", hash = "sha256:4875a9eaf72fbf5079dc372a51a9f268fc38d46f767cbf85c43a36da5cb9b575", size = 2196503 }, + { url = "https://files.pythonhosted.org/packages/ca/51/5447876806d1088a0f8f71e16542bf350918128d0a69437df26047c8e46f/widgetsnbextension-4.0.14-py3-none-any.whl", hash = "sha256:4875a9eaf72fbf5079dc372a51a9f268fc38d46f767cbf85c43a36da5cb9b575", size = 2196503, upload-time = "2025-04-10T13:01:23.086Z" }, ] [[package]] name = "wrapt" version = "1.17.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/95/8f/aeb76c5b46e273670962298c23e7ddde79916cb74db802131d49a85e4b7d/wrapt-1.17.3.tar.gz", hash = "sha256:f66eb08feaa410fe4eebd17f2a2c8e2e46d3476e9f8c783daa8e09e0faa666d0", size = 55547 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/52/db/00e2a219213856074a213503fdac0511203dceefff26e1daa15250cc01a0/wrapt-1.17.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:273a736c4645e63ac582c60a56b0acb529ef07f78e08dc6bfadf6a46b19c0da7", size = 53482 }, - { url = "https://files.pythonhosted.org/packages/5e/30/ca3c4a5eba478408572096fe9ce36e6e915994dd26a4e9e98b4f729c06d9/wrapt-1.17.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5531d911795e3f935a9c23eb1c8c03c211661a5060aab167065896bbf62a5f85", size = 38674 }, - { url = "https://files.pythonhosted.org/packages/31/25/3e8cc2c46b5329c5957cec959cb76a10718e1a513309c31399a4dad07eb3/wrapt-1.17.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0610b46293c59a3adbae3dee552b648b984176f8562ee0dba099a56cfbe4df1f", size = 38959 }, - { url = "https://files.pythonhosted.org/packages/5d/8f/a32a99fc03e4b37e31b57cb9cefc65050ea08147a8ce12f288616b05ef54/wrapt-1.17.3-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b32888aad8b6e68f83a8fdccbf3165f5469702a7544472bdf41f582970ed3311", size = 82376 }, - { url = "https://files.pythonhosted.org/packages/31/57/4930cb8d9d70d59c27ee1332a318c20291749b4fba31f113c2f8ac49a72e/wrapt-1.17.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8cccf4f81371f257440c88faed6b74f1053eef90807b77e31ca057b2db74edb1", size = 83604 }, - { url = "https://files.pythonhosted.org/packages/a8/f3/1afd48de81d63dd66e01b263a6fbb86e1b5053b419b9b33d13e1f6d0f7d0/wrapt-1.17.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8a210b158a34164de8bb68b0e7780041a903d7b00c87e906fb69928bf7890d5", size = 82782 }, - { url = "https://files.pythonhosted.org/packages/1e/d7/4ad5327612173b144998232f98a85bb24b60c352afb73bc48e3e0d2bdc4e/wrapt-1.17.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:79573c24a46ce11aab457b472efd8d125e5a51da2d1d24387666cd85f54c05b2", size = 82076 }, - { url = "https://files.pythonhosted.org/packages/bb/59/e0adfc831674a65694f18ea6dc821f9fcb9ec82c2ce7e3d73a88ba2e8718/wrapt-1.17.3-cp311-cp311-win32.whl", hash = "sha256:c31eebe420a9a5d2887b13000b043ff6ca27c452a9a22fa71f35f118e8d4bf89", size = 36457 }, - { url = "https://files.pythonhosted.org/packages/83/88/16b7231ba49861b6f75fc309b11012ede4d6b0a9c90969d9e0db8d991aeb/wrapt-1.17.3-cp311-cp311-win_amd64.whl", hash = "sha256:0b1831115c97f0663cb77aa27d381237e73ad4f721391a9bfb2fe8bc25fa6e77", size = 38745 }, - { url = "https://files.pythonhosted.org/packages/9a/1e/c4d4f3398ec073012c51d1c8d87f715f56765444e1a4b11e5180577b7e6e/wrapt-1.17.3-cp311-cp311-win_arm64.whl", hash = "sha256:5a7b3c1ee8265eb4c8f1b7d29943f195c00673f5ab60c192eba2d4a7eae5f46a", size = 36806 }, - { url = "https://files.pythonhosted.org/packages/9f/41/cad1aba93e752f1f9268c77270da3c469883d56e2798e7df6240dcb2287b/wrapt-1.17.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:ab232e7fdb44cdfbf55fc3afa31bcdb0d8980b9b95c38b6405df2acb672af0e0", size = 53998 }, - { url = "https://files.pythonhosted.org/packages/60/f8/096a7cc13097a1869fe44efe68dace40d2a16ecb853141394047f0780b96/wrapt-1.17.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9baa544e6acc91130e926e8c802a17f3b16fbea0fd441b5a60f5cf2cc5c3deba", size = 39020 }, - { url = "https://files.pythonhosted.org/packages/33/df/bdf864b8997aab4febb96a9ae5c124f700a5abd9b5e13d2a3214ec4be705/wrapt-1.17.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6b538e31eca1a7ea4605e44f81a48aa24c4632a277431a6ed3f328835901f4fd", size = 39098 }, - { url = "https://files.pythonhosted.org/packages/9f/81/5d931d78d0eb732b95dc3ddaeeb71c8bb572fb01356e9133916cd729ecdd/wrapt-1.17.3-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:042ec3bb8f319c147b1301f2393bc19dba6e176b7da446853406d041c36c7828", size = 88036 }, - { url = "https://files.pythonhosted.org/packages/ca/38/2e1785df03b3d72d34fc6252d91d9d12dc27a5c89caef3335a1bbb8908ca/wrapt-1.17.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3af60380ba0b7b5aeb329bc4e402acd25bd877e98b3727b0135cb5c2efdaefe9", size = 88156 }, - { url = "https://files.pythonhosted.org/packages/b3/8b/48cdb60fe0603e34e05cffda0b2a4adab81fd43718e11111a4b0100fd7c1/wrapt-1.17.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0b02e424deef65c9f7326d8c19220a2c9040c51dc165cddb732f16198c168396", size = 87102 }, - { url = "https://files.pythonhosted.org/packages/3c/51/d81abca783b58f40a154f1b2c56db1d2d9e0d04fa2d4224e357529f57a57/wrapt-1.17.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:74afa28374a3c3a11b3b5e5fca0ae03bef8450d6aa3ab3a1e2c30e3a75d023dc", size = 87732 }, - { url = "https://files.pythonhosted.org/packages/9e/b1/43b286ca1392a006d5336412d41663eeef1ad57485f3e52c767376ba7e5a/wrapt-1.17.3-cp312-cp312-win32.whl", hash = "sha256:4da9f45279fff3543c371d5ababc57a0384f70be244de7759c85a7f989cb4ebe", size = 36705 }, - { url = "https://files.pythonhosted.org/packages/28/de/49493f962bd3c586ab4b88066e967aa2e0703d6ef2c43aa28cb83bf7b507/wrapt-1.17.3-cp312-cp312-win_amd64.whl", hash = "sha256:e71d5c6ebac14875668a1e90baf2ea0ef5b7ac7918355850c0908ae82bcb297c", size = 38877 }, - { url = "https://files.pythonhosted.org/packages/f1/48/0f7102fe9cb1e8a5a77f80d4f0956d62d97034bbe88d33e94699f99d181d/wrapt-1.17.3-cp312-cp312-win_arm64.whl", hash = "sha256:604d076c55e2fdd4c1c03d06dc1a31b95130010517b5019db15365ec4a405fc6", size = 36885 }, - { url = "https://files.pythonhosted.org/packages/fc/f6/759ece88472157acb55fc195e5b116e06730f1b651b5b314c66291729193/wrapt-1.17.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a47681378a0439215912ef542c45a783484d4dd82bac412b71e59cf9c0e1cea0", size = 54003 }, - { url = "https://files.pythonhosted.org/packages/4f/a9/49940b9dc6d47027dc850c116d79b4155f15c08547d04db0f07121499347/wrapt-1.17.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:54a30837587c6ee3cd1a4d1c2ec5d24e77984d44e2f34547e2323ddb4e22eb77", size = 39025 }, - { url = "https://files.pythonhosted.org/packages/45/35/6a08de0f2c96dcdd7fe464d7420ddb9a7655a6561150e5fc4da9356aeaab/wrapt-1.17.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:16ecf15d6af39246fe33e507105d67e4b81d8f8d2c6598ff7e3ca1b8a37213f7", size = 39108 }, - { url = "https://files.pythonhosted.org/packages/0c/37/6faf15cfa41bf1f3dba80cd3f5ccc6622dfccb660ab26ed79f0178c7497f/wrapt-1.17.3-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:6fd1ad24dc235e4ab88cda009e19bf347aabb975e44fd5c2fb22a3f6e4141277", size = 88072 }, - { url = "https://files.pythonhosted.org/packages/78/f2/efe19ada4a38e4e15b6dff39c3e3f3f73f5decf901f66e6f72fe79623a06/wrapt-1.17.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0ed61b7c2d49cee3c027372df5809a59d60cf1b6c2f81ee980a091f3afed6a2d", size = 88214 }, - { url = "https://files.pythonhosted.org/packages/40/90/ca86701e9de1622b16e09689fc24b76f69b06bb0150990f6f4e8b0eeb576/wrapt-1.17.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:423ed5420ad5f5529db9ce89eac09c8a2f97da18eb1c870237e84c5a5c2d60aa", size = 87105 }, - { url = "https://files.pythonhosted.org/packages/fd/e0/d10bd257c9a3e15cbf5523025252cc14d77468e8ed644aafb2d6f54cb95d/wrapt-1.17.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e01375f275f010fcbf7f643b4279896d04e571889b8a5b3f848423d91bf07050", size = 87766 }, - { url = "https://files.pythonhosted.org/packages/e8/cf/7d848740203c7b4b27eb55dbfede11aca974a51c3d894f6cc4b865f42f58/wrapt-1.17.3-cp313-cp313-win32.whl", hash = "sha256:53e5e39ff71b3fc484df8a522c933ea2b7cdd0d5d15ae82e5b23fde87d44cbd8", size = 36711 }, - { url = "https://files.pythonhosted.org/packages/57/54/35a84d0a4d23ea675994104e667ceff49227ce473ba6a59ba2c84f250b74/wrapt-1.17.3-cp313-cp313-win_amd64.whl", hash = "sha256:1f0b2f40cf341ee8cc1a97d51ff50dddb9fcc73241b9143ec74b30fc4f44f6cb", size = 38885 }, - { url = "https://files.pythonhosted.org/packages/01/77/66e54407c59d7b02a3c4e0af3783168fff8e5d61def52cda8728439d86bc/wrapt-1.17.3-cp313-cp313-win_arm64.whl", hash = "sha256:7425ac3c54430f5fc5e7b6f41d41e704db073309acfc09305816bc6a0b26bb16", size = 36896 }, - { url = "https://files.pythonhosted.org/packages/1f/f6/a933bd70f98e9cf3e08167fc5cd7aaaca49147e48411c0bd5ae701bb2194/wrapt-1.17.3-py3-none-any.whl", hash = "sha256:7171ae35d2c33d326ac19dd8facb1e82e5fd04ef8c6c0e394d7af55a55051c22", size = 23591 }, +sdist = { url = "https://files.pythonhosted.org/packages/95/8f/aeb76c5b46e273670962298c23e7ddde79916cb74db802131d49a85e4b7d/wrapt-1.17.3.tar.gz", hash = "sha256:f66eb08feaa410fe4eebd17f2a2c8e2e46d3476e9f8c783daa8e09e0faa666d0", size = 55547, upload-time = "2025-08-12T05:53:21.714Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/52/db/00e2a219213856074a213503fdac0511203dceefff26e1daa15250cc01a0/wrapt-1.17.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:273a736c4645e63ac582c60a56b0acb529ef07f78e08dc6bfadf6a46b19c0da7", size = 53482, upload-time = "2025-08-12T05:51:45.79Z" }, + { url = "https://files.pythonhosted.org/packages/5e/30/ca3c4a5eba478408572096fe9ce36e6e915994dd26a4e9e98b4f729c06d9/wrapt-1.17.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5531d911795e3f935a9c23eb1c8c03c211661a5060aab167065896bbf62a5f85", size = 38674, upload-time = "2025-08-12T05:51:34.629Z" }, + { url = "https://files.pythonhosted.org/packages/31/25/3e8cc2c46b5329c5957cec959cb76a10718e1a513309c31399a4dad07eb3/wrapt-1.17.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0610b46293c59a3adbae3dee552b648b984176f8562ee0dba099a56cfbe4df1f", size = 38959, upload-time = "2025-08-12T05:51:56.074Z" }, + { url = "https://files.pythonhosted.org/packages/5d/8f/a32a99fc03e4b37e31b57cb9cefc65050ea08147a8ce12f288616b05ef54/wrapt-1.17.3-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b32888aad8b6e68f83a8fdccbf3165f5469702a7544472bdf41f582970ed3311", size = 82376, upload-time = "2025-08-12T05:52:32.134Z" }, + { url = "https://files.pythonhosted.org/packages/31/57/4930cb8d9d70d59c27ee1332a318c20291749b4fba31f113c2f8ac49a72e/wrapt-1.17.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8cccf4f81371f257440c88faed6b74f1053eef90807b77e31ca057b2db74edb1", size = 83604, upload-time = "2025-08-12T05:52:11.663Z" }, + { url = "https://files.pythonhosted.org/packages/a8/f3/1afd48de81d63dd66e01b263a6fbb86e1b5053b419b9b33d13e1f6d0f7d0/wrapt-1.17.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8a210b158a34164de8bb68b0e7780041a903d7b00c87e906fb69928bf7890d5", size = 82782, upload-time = "2025-08-12T05:52:12.626Z" }, + { url = "https://files.pythonhosted.org/packages/1e/d7/4ad5327612173b144998232f98a85bb24b60c352afb73bc48e3e0d2bdc4e/wrapt-1.17.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:79573c24a46ce11aab457b472efd8d125e5a51da2d1d24387666cd85f54c05b2", size = 82076, upload-time = "2025-08-12T05:52:33.168Z" }, + { url = "https://files.pythonhosted.org/packages/bb/59/e0adfc831674a65694f18ea6dc821f9fcb9ec82c2ce7e3d73a88ba2e8718/wrapt-1.17.3-cp311-cp311-win32.whl", hash = "sha256:c31eebe420a9a5d2887b13000b043ff6ca27c452a9a22fa71f35f118e8d4bf89", size = 36457, upload-time = "2025-08-12T05:53:03.936Z" }, + { url = "https://files.pythonhosted.org/packages/83/88/16b7231ba49861b6f75fc309b11012ede4d6b0a9c90969d9e0db8d991aeb/wrapt-1.17.3-cp311-cp311-win_amd64.whl", hash = "sha256:0b1831115c97f0663cb77aa27d381237e73ad4f721391a9bfb2fe8bc25fa6e77", size = 38745, upload-time = "2025-08-12T05:53:02.885Z" }, + { url = "https://files.pythonhosted.org/packages/9a/1e/c4d4f3398ec073012c51d1c8d87f715f56765444e1a4b11e5180577b7e6e/wrapt-1.17.3-cp311-cp311-win_arm64.whl", hash = "sha256:5a7b3c1ee8265eb4c8f1b7d29943f195c00673f5ab60c192eba2d4a7eae5f46a", size = 36806, upload-time = "2025-08-12T05:52:53.368Z" }, + { url = "https://files.pythonhosted.org/packages/9f/41/cad1aba93e752f1f9268c77270da3c469883d56e2798e7df6240dcb2287b/wrapt-1.17.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:ab232e7fdb44cdfbf55fc3afa31bcdb0d8980b9b95c38b6405df2acb672af0e0", size = 53998, upload-time = "2025-08-12T05:51:47.138Z" }, + { url = "https://files.pythonhosted.org/packages/60/f8/096a7cc13097a1869fe44efe68dace40d2a16ecb853141394047f0780b96/wrapt-1.17.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9baa544e6acc91130e926e8c802a17f3b16fbea0fd441b5a60f5cf2cc5c3deba", size = 39020, upload-time = "2025-08-12T05:51:35.906Z" }, + { url = "https://files.pythonhosted.org/packages/33/df/bdf864b8997aab4febb96a9ae5c124f700a5abd9b5e13d2a3214ec4be705/wrapt-1.17.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6b538e31eca1a7ea4605e44f81a48aa24c4632a277431a6ed3f328835901f4fd", size = 39098, upload-time = "2025-08-12T05:51:57.474Z" }, + { url = "https://files.pythonhosted.org/packages/9f/81/5d931d78d0eb732b95dc3ddaeeb71c8bb572fb01356e9133916cd729ecdd/wrapt-1.17.3-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:042ec3bb8f319c147b1301f2393bc19dba6e176b7da446853406d041c36c7828", size = 88036, upload-time = "2025-08-12T05:52:34.784Z" }, + { url = "https://files.pythonhosted.org/packages/ca/38/2e1785df03b3d72d34fc6252d91d9d12dc27a5c89caef3335a1bbb8908ca/wrapt-1.17.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3af60380ba0b7b5aeb329bc4e402acd25bd877e98b3727b0135cb5c2efdaefe9", size = 88156, upload-time = "2025-08-12T05:52:13.599Z" }, + { url = "https://files.pythonhosted.org/packages/b3/8b/48cdb60fe0603e34e05cffda0b2a4adab81fd43718e11111a4b0100fd7c1/wrapt-1.17.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0b02e424deef65c9f7326d8c19220a2c9040c51dc165cddb732f16198c168396", size = 87102, upload-time = "2025-08-12T05:52:14.56Z" }, + { url = "https://files.pythonhosted.org/packages/3c/51/d81abca783b58f40a154f1b2c56db1d2d9e0d04fa2d4224e357529f57a57/wrapt-1.17.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:74afa28374a3c3a11b3b5e5fca0ae03bef8450d6aa3ab3a1e2c30e3a75d023dc", size = 87732, upload-time = "2025-08-12T05:52:36.165Z" }, + { url = "https://files.pythonhosted.org/packages/9e/b1/43b286ca1392a006d5336412d41663eeef1ad57485f3e52c767376ba7e5a/wrapt-1.17.3-cp312-cp312-win32.whl", hash = "sha256:4da9f45279fff3543c371d5ababc57a0384f70be244de7759c85a7f989cb4ebe", size = 36705, upload-time = "2025-08-12T05:53:07.123Z" }, + { url = "https://files.pythonhosted.org/packages/28/de/49493f962bd3c586ab4b88066e967aa2e0703d6ef2c43aa28cb83bf7b507/wrapt-1.17.3-cp312-cp312-win_amd64.whl", hash = "sha256:e71d5c6ebac14875668a1e90baf2ea0ef5b7ac7918355850c0908ae82bcb297c", size = 38877, upload-time = "2025-08-12T05:53:05.436Z" }, + { url = "https://files.pythonhosted.org/packages/f1/48/0f7102fe9cb1e8a5a77f80d4f0956d62d97034bbe88d33e94699f99d181d/wrapt-1.17.3-cp312-cp312-win_arm64.whl", hash = "sha256:604d076c55e2fdd4c1c03d06dc1a31b95130010517b5019db15365ec4a405fc6", size = 36885, upload-time = "2025-08-12T05:52:54.367Z" }, + { url = "https://files.pythonhosted.org/packages/1f/f6/a933bd70f98e9cf3e08167fc5cd7aaaca49147e48411c0bd5ae701bb2194/wrapt-1.17.3-py3-none-any.whl", hash = "sha256:7171ae35d2c33d326ac19dd8facb1e82e5fd04ef8c6c0e394d7af55a55051c22", size = 23591, upload-time = "2025-08-12T05:53:20.674Z" }, ] [[package]] @@ -4255,80 +3817,48 @@ dependencies = [ { name = "multidict" }, { name = "propcache" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/57/63/0c6ebca57330cd313f6102b16dd57ffaf3ec4c83403dcb45dbd15c6f3ea1/yarl-1.22.0.tar.gz", hash = "sha256:bebf8557577d4401ba8bd9ff33906f1376c877aa78d1fe216ad01b4d6745af71", size = 187169 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/4d/27/5ab13fc84c76a0250afd3d26d5936349a35be56ce5785447d6c423b26d92/yarl-1.22.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1ab72135b1f2db3fed3997d7e7dc1b80573c67138023852b6efb336a5eae6511", size = 141607 }, - { url = "https://files.pythonhosted.org/packages/6a/a1/d065d51d02dc02ce81501d476b9ed2229d9a990818332242a882d5d60340/yarl-1.22.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:669930400e375570189492dc8d8341301578e8493aec04aebc20d4717f899dd6", size = 94027 }, - { url = "https://files.pythonhosted.org/packages/c1/da/8da9f6a53f67b5106ffe902c6fa0164e10398d4e150d85838b82f424072a/yarl-1.22.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:792a2af6d58177ef7c19cbf0097aba92ca1b9cb3ffdd9c7470e156c8f9b5e028", size = 94963 }, - { url = "https://files.pythonhosted.org/packages/68/fe/2c1f674960c376e29cb0bec1249b117d11738db92a6ccc4a530b972648db/yarl-1.22.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3ea66b1c11c9150f1372f69afb6b8116f2dd7286f38e14ea71a44eee9ec51b9d", size = 368406 }, - { url = "https://files.pythonhosted.org/packages/95/26/812a540e1c3c6418fec60e9bbd38e871eaba9545e94fa5eff8f4a8e28e1e/yarl-1.22.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3e2daa88dc91870215961e96a039ec73e4937da13cf77ce17f9cad0c18df3503", size = 336581 }, - { url = "https://files.pythonhosted.org/packages/0b/f5/5777b19e26fdf98563985e481f8be3d8a39f8734147a6ebf459d0dab5a6b/yarl-1.22.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ba440ae430c00eee41509353628600212112cd5018d5def7e9b05ea7ac34eb65", size = 388924 }, - { url = "https://files.pythonhosted.org/packages/86/08/24bd2477bd59c0bbd994fe1d93b126e0472e4e3df5a96a277b0a55309e89/yarl-1.22.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e6438cc8f23a9c1478633d216b16104a586b9761db62bfacb6425bac0a36679e", size = 392890 }, - { url = "https://files.pythonhosted.org/packages/46/00/71b90ed48e895667ecfb1eaab27c1523ee2fa217433ed77a73b13205ca4b/yarl-1.22.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4c52a6e78aef5cf47a98ef8e934755abf53953379b7d53e68b15ff4420e6683d", size = 365819 }, - { url = "https://files.pythonhosted.org/packages/30/2d/f715501cae832651d3282387c6a9236cd26bd00d0ff1e404b3dc52447884/yarl-1.22.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3b06bcadaac49c70f4c88af4ffcfbe3dc155aab3163e75777818092478bcbbe7", size = 363601 }, - { url = "https://files.pythonhosted.org/packages/f8/f9/a678c992d78e394e7126ee0b0e4e71bd2775e4334d00a9278c06a6cce96a/yarl-1.22.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:6944b2dc72c4d7f7052683487e3677456050ff77fcf5e6204e98caf785ad1967", size = 358072 }, - { url = "https://files.pythonhosted.org/packages/2c/d1/b49454411a60edb6fefdcad4f8e6dbba7d8019e3a508a1c5836cba6d0781/yarl-1.22.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:d5372ca1df0f91a86b047d1277c2aaf1edb32d78bbcefffc81b40ffd18f027ed", size = 385311 }, - { url = "https://files.pythonhosted.org/packages/87/e5/40d7a94debb8448c7771a916d1861d6609dddf7958dc381117e7ba36d9e8/yarl-1.22.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:51af598701f5299012b8416486b40fceef8c26fc87dc6d7d1f6fc30609ea0aa6", size = 381094 }, - { url = "https://files.pythonhosted.org/packages/35/d8/611cc282502381ad855448643e1ad0538957fc82ae83dfe7762c14069e14/yarl-1.22.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b266bd01fedeffeeac01a79ae181719ff848a5a13ce10075adbefc8f1daee70e", size = 370944 }, - { url = "https://files.pythonhosted.org/packages/2d/df/fadd00fb1c90e1a5a8bd731fa3d3de2e165e5a3666a095b04e31b04d9cb6/yarl-1.22.0-cp311-cp311-win32.whl", hash = "sha256:a9b1ba5610a4e20f655258d5a1fdc7ebe3d837bb0e45b581398b99eb98b1f5ca", size = 81804 }, - { url = "https://files.pythonhosted.org/packages/b5/f7/149bb6f45f267cb5c074ac40c01c6b3ea6d8a620d34b337f6321928a1b4d/yarl-1.22.0-cp311-cp311-win_amd64.whl", hash = "sha256:078278b9b0b11568937d9509b589ee83ef98ed6d561dfe2020e24a9fd08eaa2b", size = 86858 }, - { url = "https://files.pythonhosted.org/packages/2b/13/88b78b93ad3f2f0b78e13bfaaa24d11cbc746e93fe76d8c06bf139615646/yarl-1.22.0-cp311-cp311-win_arm64.whl", hash = "sha256:b6a6f620cfe13ccec221fa312139135166e47ae169f8253f72a0abc0dae94376", size = 81637 }, - { url = "https://files.pythonhosted.org/packages/75/ff/46736024fee3429b80a165a732e38e5d5a238721e634ab41b040d49f8738/yarl-1.22.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e340382d1afa5d32b892b3ff062436d592ec3d692aeea3bef3a5cfe11bbf8c6f", size = 142000 }, - { url = "https://files.pythonhosted.org/packages/5a/9a/b312ed670df903145598914770eb12de1bac44599549b3360acc96878df8/yarl-1.22.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f1e09112a2c31ffe8d80be1b0988fa6a18c5d5cad92a9ffbb1c04c91bfe52ad2", size = 94338 }, - { url = "https://files.pythonhosted.org/packages/ba/f5/0601483296f09c3c65e303d60c070a5c19fcdbc72daa061e96170785bc7d/yarl-1.22.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:939fe60db294c786f6b7c2d2e121576628468f65453d86b0fe36cb52f987bd74", size = 94909 }, - { url = "https://files.pythonhosted.org/packages/60/41/9a1fe0b73dbcefce72e46cf149b0e0a67612d60bfc90fb59c2b2efdfbd86/yarl-1.22.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e1651bf8e0398574646744c1885a41198eba53dc8a9312b954073f845c90a8df", size = 372940 }, - { url = "https://files.pythonhosted.org/packages/17/7a/795cb6dfee561961c30b800f0ed616b923a2ec6258b5def2a00bf8231334/yarl-1.22.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b8a0588521a26bf92a57a1705b77b8b59044cdceccac7151bd8d229e66b8dedb", size = 345825 }, - { url = "https://files.pythonhosted.org/packages/d7/93/a58f4d596d2be2ae7bab1a5846c4d270b894958845753b2c606d666744d3/yarl-1.22.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:42188e6a615c1a75bcaa6e150c3fe8f3e8680471a6b10150c5f7e83f47cc34d2", size = 386705 }, - { url = "https://files.pythonhosted.org/packages/61/92/682279d0e099d0e14d7fd2e176bd04f48de1484f56546a3e1313cd6c8e7c/yarl-1.22.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f6d2cb59377d99718913ad9a151030d6f83ef420a2b8f521d94609ecc106ee82", size = 396518 }, - { url = "https://files.pythonhosted.org/packages/db/0f/0d52c98b8a885aeda831224b78f3be7ec2e1aa4a62091f9f9188c3c65b56/yarl-1.22.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:50678a3b71c751d58d7908edc96d332af328839eea883bb554a43f539101277a", size = 377267 }, - { url = "https://files.pythonhosted.org/packages/22/42/d2685e35908cbeaa6532c1fc73e89e7f2efb5d8a7df3959ea8e37177c5a3/yarl-1.22.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1e8fbaa7cec507aa24ea27a01456e8dd4b6fab829059b69844bd348f2d467124", size = 365797 }, - { url = "https://files.pythonhosted.org/packages/a2/83/cf8c7bcc6355631762f7d8bdab920ad09b82efa6b722999dfb05afa6cfac/yarl-1.22.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:433885ab5431bc3d3d4f2f9bd15bfa1614c522b0f1405d62c4f926ccd69d04fa", size = 365535 }, - { url = "https://files.pythonhosted.org/packages/25/e1/5302ff9b28f0c59cac913b91fe3f16c59a033887e57ce9ca5d41a3a94737/yarl-1.22.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:b790b39c7e9a4192dc2e201a282109ed2985a1ddbd5ac08dc56d0e121400a8f7", size = 382324 }, - { url = "https://files.pythonhosted.org/packages/bf/cd/4617eb60f032f19ae3a688dc990d8f0d89ee0ea378b61cac81ede3e52fae/yarl-1.22.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:31f0b53913220599446872d757257be5898019c85e7971599065bc55065dc99d", size = 383803 }, - { url = "https://files.pythonhosted.org/packages/59/65/afc6e62bb506a319ea67b694551dab4a7e6fb7bf604e9bd9f3e11d575fec/yarl-1.22.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a49370e8f711daec68d09b821a34e1167792ee2d24d405cbc2387be4f158b520", size = 374220 }, - { url = "https://files.pythonhosted.org/packages/e7/3d/68bf18d50dc674b942daec86a9ba922d3113d8399b0e52b9897530442da2/yarl-1.22.0-cp312-cp312-win32.whl", hash = "sha256:70dfd4f241c04bd9239d53b17f11e6ab672b9f1420364af63e8531198e3f5fe8", size = 81589 }, - { url = "https://files.pythonhosted.org/packages/c8/9a/6ad1a9b37c2f72874f93e691b2e7ecb6137fb2b899983125db4204e47575/yarl-1.22.0-cp312-cp312-win_amd64.whl", hash = "sha256:8884d8b332a5e9b88e23f60bb166890009429391864c685e17bd73a9eda9105c", size = 87213 }, - { url = "https://files.pythonhosted.org/packages/44/c5/c21b562d1680a77634d748e30c653c3ca918beb35555cff24986fff54598/yarl-1.22.0-cp312-cp312-win_arm64.whl", hash = "sha256:ea70f61a47f3cc93bdf8b2f368ed359ef02a01ca6393916bc8ff877427181e74", size = 81330 }, - { url = "https://files.pythonhosted.org/packages/ea/f3/d67de7260456ee105dc1d162d43a019ecad6b91e2f51809d6cddaa56690e/yarl-1.22.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8dee9c25c74997f6a750cd317b8ca63545169c098faee42c84aa5e506c819b53", size = 139980 }, - { url = "https://files.pythonhosted.org/packages/01/88/04d98af0b47e0ef42597b9b28863b9060bb515524da0a65d5f4db160b2d5/yarl-1.22.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:01e73b85a5434f89fc4fe27dcda2aff08ddf35e4d47bbbea3bdcd25321af538a", size = 93424 }, - { url = "https://files.pythonhosted.org/packages/18/91/3274b215fd8442a03975ce6bee5fe6aa57a8326b29b9d3d56234a1dca244/yarl-1.22.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:22965c2af250d20c873cdbee8ff958fb809940aeb2e74ba5f20aaf6b7ac8c70c", size = 93821 }, - { url = "https://files.pythonhosted.org/packages/61/3a/caf4e25036db0f2da4ca22a353dfeb3c9d3c95d2761ebe9b14df8fc16eb0/yarl-1.22.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b4f15793aa49793ec8d1c708ab7f9eded1aa72edc5174cae703651555ed1b601", size = 373243 }, - { url = "https://files.pythonhosted.org/packages/6e/9e/51a77ac7516e8e7803b06e01f74e78649c24ee1021eca3d6a739cb6ea49c/yarl-1.22.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5542339dcf2747135c5c85f68680353d5cb9ffd741c0f2e8d832d054d41f35a", size = 342361 }, - { url = "https://files.pythonhosted.org/packages/d4/f8/33b92454789dde8407f156c00303e9a891f1f51a0330b0fad7c909f87692/yarl-1.22.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:5c401e05ad47a75869c3ab3e35137f8468b846770587e70d71e11de797d113df", size = 387036 }, - { url = "https://files.pythonhosted.org/packages/d9/9a/c5db84ea024f76838220280f732970aa4ee154015d7f5c1bfb60a267af6f/yarl-1.22.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:243dda95d901c733f5b59214d28b0120893d91777cb8aa043e6ef059d3cddfe2", size = 397671 }, - { url = "https://files.pythonhosted.org/packages/11/c9/cd8538dc2e7727095e0c1d867bad1e40c98f37763e6d995c1939f5fdc7b1/yarl-1.22.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bec03d0d388060058f5d291a813f21c011041938a441c593374da6077fe21b1b", size = 377059 }, - { url = "https://files.pythonhosted.org/packages/a1/b9/ab437b261702ced75122ed78a876a6dec0a1b0f5e17a4ac7a9a2482d8abe/yarl-1.22.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b0748275abb8c1e1e09301ee3cf90c8a99678a4e92e4373705f2a2570d581273", size = 365356 }, - { url = "https://files.pythonhosted.org/packages/b2/9d/8e1ae6d1d008a9567877b08f0ce4077a29974c04c062dabdb923ed98e6fe/yarl-1.22.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:47fdb18187e2a4e18fda2c25c05d8251a9e4a521edaed757fef033e7d8498d9a", size = 361331 }, - { url = "https://files.pythonhosted.org/packages/ca/5a/09b7be3905962f145b73beb468cdd53db8aa171cf18c80400a54c5b82846/yarl-1.22.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c7044802eec4524fde550afc28edda0dd5784c4c45f0be151a2d3ba017daca7d", size = 382590 }, - { url = "https://files.pythonhosted.org/packages/aa/7f/59ec509abf90eda5048b0bc3e2d7b5099dffdb3e6b127019895ab9d5ef44/yarl-1.22.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:139718f35149ff544caba20fce6e8a2f71f1e39b92c700d8438a0b1d2a631a02", size = 385316 }, - { url = "https://files.pythonhosted.org/packages/e5/84/891158426bc8036bfdfd862fabd0e0fa25df4176ec793e447f4b85cf1be4/yarl-1.22.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e1b51bebd221006d3d2f95fbe124b22b247136647ae5dcc8c7acafba66e5ee67", size = 374431 }, - { url = "https://files.pythonhosted.org/packages/bb/49/03da1580665baa8bef5e8ed34c6df2c2aca0a2f28bf397ed238cc1bbc6f2/yarl-1.22.0-cp313-cp313-win32.whl", hash = "sha256:d3e32536234a95f513bd374e93d717cf6b2231a791758de6c509e3653f234c95", size = 81555 }, - { url = "https://files.pythonhosted.org/packages/9a/ee/450914ae11b419eadd067c6183ae08381cfdfcb9798b90b2b713bbebddda/yarl-1.22.0-cp313-cp313-win_amd64.whl", hash = "sha256:47743b82b76d89a1d20b83e60d5c20314cbd5ba2befc9cda8f28300c4a08ed4d", size = 86965 }, - { url = "https://files.pythonhosted.org/packages/98/4d/264a01eae03b6cf629ad69bae94e3b0e5344741e929073678e84bf7a3e3b/yarl-1.22.0-cp313-cp313-win_arm64.whl", hash = "sha256:5d0fcda9608875f7d052eff120c7a5da474a6796fe4d83e152e0e4d42f6d1a9b", size = 81205 }, - { url = "https://files.pythonhosted.org/packages/88/fc/6908f062a2f77b5f9f6d69cecb1747260831ff206adcbc5b510aff88df91/yarl-1.22.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:719ae08b6972befcba4310e49edb1161a88cdd331e3a694b84466bd938a6ab10", size = 146209 }, - { url = "https://files.pythonhosted.org/packages/65/47/76594ae8eab26210b4867be6f49129861ad33da1f1ebdf7051e98492bf62/yarl-1.22.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:47d8a5c446df1c4db9d21b49619ffdba90e77c89ec6e283f453856c74b50b9e3", size = 95966 }, - { url = "https://files.pythonhosted.org/packages/ab/ce/05e9828a49271ba6b5b038b15b3934e996980dd78abdfeb52a04cfb9467e/yarl-1.22.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:cfebc0ac8333520d2d0423cbbe43ae43c8838862ddb898f5ca68565e395516e9", size = 97312 }, - { url = "https://files.pythonhosted.org/packages/d1/c5/7dffad5e4f2265b29c9d7ec869c369e4223166e4f9206fc2243ee9eea727/yarl-1.22.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4398557cbf484207df000309235979c79c4356518fd5c99158c7d38203c4da4f", size = 361967 }, - { url = "https://files.pythonhosted.org/packages/50/b2/375b933c93a54bff7fc041e1a6ad2c0f6f733ffb0c6e642ce56ee3b39970/yarl-1.22.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2ca6fd72a8cd803be290d42f2dec5cdcd5299eeb93c2d929bf060ad9efaf5de0", size = 323949 }, - { url = "https://files.pythonhosted.org/packages/66/50/bfc2a29a1d78644c5a7220ce2f304f38248dc94124a326794e677634b6cf/yarl-1.22.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ca1f59c4e1ab6e72f0a23c13fca5430f889634166be85dbf1013683e49e3278e", size = 361818 }, - { url = "https://files.pythonhosted.org/packages/46/96/f3941a46af7d5d0f0498f86d71275696800ddcdd20426298e572b19b91ff/yarl-1.22.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:6c5010a52015e7c70f86eb967db0f37f3c8bd503a695a49f8d45700144667708", size = 372626 }, - { url = "https://files.pythonhosted.org/packages/c1/42/8b27c83bb875cd89448e42cd627e0fb971fa1675c9ec546393d18826cb50/yarl-1.22.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d7672ecf7557476642c88497c2f8d8542f8e36596e928e9bcba0e42e1e7d71f", size = 341129 }, - { url = "https://files.pythonhosted.org/packages/49/36/99ca3122201b382a3cf7cc937b95235b0ac944f7e9f2d5331d50821ed352/yarl-1.22.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:3b7c88eeef021579d600e50363e0b6ee4f7f6f728cd3486b9d0f3ee7b946398d", size = 346776 }, - { url = "https://files.pythonhosted.org/packages/85/b4/47328bf996acd01a4c16ef9dcd2f59c969f495073616586f78cd5f2efb99/yarl-1.22.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:f4afb5c34f2c6fecdcc182dfcfc6af6cccf1aa923eed4d6a12e9d96904e1a0d8", size = 334879 }, - { url = "https://files.pythonhosted.org/packages/c2/ad/b77d7b3f14a4283bffb8e92c6026496f6de49751c2f97d4352242bba3990/yarl-1.22.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:59c189e3e99a59cf8d83cbb31d4db02d66cda5a1a4374e8a012b51255341abf5", size = 350996 }, - { url = "https://files.pythonhosted.org/packages/81/c8/06e1d69295792ba54d556f06686cbd6a7ce39c22307100e3fb4a2c0b0a1d/yarl-1.22.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:5a3bf7f62a289fa90f1990422dc8dff5a458469ea71d1624585ec3a4c8d6960f", size = 356047 }, - { url = "https://files.pythonhosted.org/packages/4b/b8/4c0e9e9f597074b208d18cef227d83aac36184bfbc6eab204ea55783dbc5/yarl-1.22.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:de6b9a04c606978fdfe72666fa216ffcf2d1a9f6a381058d4378f8d7b1e5de62", size = 342947 }, - { url = "https://files.pythonhosted.org/packages/e0/e5/11f140a58bf4c6ad7aca69a892bff0ee638c31bea4206748fc0df4ebcb3a/yarl-1.22.0-cp313-cp313t-win32.whl", hash = "sha256:1834bb90991cc2999f10f97f5f01317f99b143284766d197e43cd5b45eb18d03", size = 86943 }, - { url = "https://files.pythonhosted.org/packages/31/74/8b74bae38ed7fe6793d0c15a0c8207bbb819cf287788459e5ed230996cdd/yarl-1.22.0-cp313-cp313t-win_amd64.whl", hash = "sha256:ff86011bd159a9d2dfc89c34cfd8aff12875980e3bd6a39ff097887520e60249", size = 93715 }, - { url = "https://files.pythonhosted.org/packages/69/66/991858aa4b5892d57aef7ee1ba6b4d01ec3b7eb3060795d34090a3ca3278/yarl-1.22.0-cp313-cp313t-win_arm64.whl", hash = "sha256:7861058d0582b847bc4e3a4a4c46828a410bca738673f35a29ba3ca5db0b473b", size = 83857 }, - { url = "https://files.pythonhosted.org/packages/73/ae/b48f95715333080afb75a4504487cbe142cae1268afc482d06692d605ae6/yarl-1.22.0-py3-none-any.whl", hash = "sha256:1380560bdba02b6b6c90de54133c81c9f2a453dee9912fe58c1dcced1edb7cff", size = 46814 }, +sdist = { url = "https://files.pythonhosted.org/packages/57/63/0c6ebca57330cd313f6102b16dd57ffaf3ec4c83403dcb45dbd15c6f3ea1/yarl-1.22.0.tar.gz", hash = "sha256:bebf8557577d4401ba8bd9ff33906f1376c877aa78d1fe216ad01b4d6745af71", size = 187169, upload-time = "2025-10-06T14:12:55.963Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4d/27/5ab13fc84c76a0250afd3d26d5936349a35be56ce5785447d6c423b26d92/yarl-1.22.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1ab72135b1f2db3fed3997d7e7dc1b80573c67138023852b6efb336a5eae6511", size = 141607, upload-time = "2025-10-06T14:09:16.298Z" }, + { url = "https://files.pythonhosted.org/packages/6a/a1/d065d51d02dc02ce81501d476b9ed2229d9a990818332242a882d5d60340/yarl-1.22.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:669930400e375570189492dc8d8341301578e8493aec04aebc20d4717f899dd6", size = 94027, upload-time = "2025-10-06T14:09:17.786Z" }, + { url = "https://files.pythonhosted.org/packages/c1/da/8da9f6a53f67b5106ffe902c6fa0164e10398d4e150d85838b82f424072a/yarl-1.22.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:792a2af6d58177ef7c19cbf0097aba92ca1b9cb3ffdd9c7470e156c8f9b5e028", size = 94963, upload-time = "2025-10-06T14:09:19.662Z" }, + { url = "https://files.pythonhosted.org/packages/68/fe/2c1f674960c376e29cb0bec1249b117d11738db92a6ccc4a530b972648db/yarl-1.22.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3ea66b1c11c9150f1372f69afb6b8116f2dd7286f38e14ea71a44eee9ec51b9d", size = 368406, upload-time = "2025-10-06T14:09:21.402Z" }, + { url = "https://files.pythonhosted.org/packages/95/26/812a540e1c3c6418fec60e9bbd38e871eaba9545e94fa5eff8f4a8e28e1e/yarl-1.22.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3e2daa88dc91870215961e96a039ec73e4937da13cf77ce17f9cad0c18df3503", size = 336581, upload-time = "2025-10-06T14:09:22.98Z" }, + { url = "https://files.pythonhosted.org/packages/0b/f5/5777b19e26fdf98563985e481f8be3d8a39f8734147a6ebf459d0dab5a6b/yarl-1.22.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ba440ae430c00eee41509353628600212112cd5018d5def7e9b05ea7ac34eb65", size = 388924, upload-time = "2025-10-06T14:09:24.655Z" }, + { url = "https://files.pythonhosted.org/packages/86/08/24bd2477bd59c0bbd994fe1d93b126e0472e4e3df5a96a277b0a55309e89/yarl-1.22.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e6438cc8f23a9c1478633d216b16104a586b9761db62bfacb6425bac0a36679e", size = 392890, upload-time = "2025-10-06T14:09:26.617Z" }, + { url = "https://files.pythonhosted.org/packages/46/00/71b90ed48e895667ecfb1eaab27c1523ee2fa217433ed77a73b13205ca4b/yarl-1.22.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4c52a6e78aef5cf47a98ef8e934755abf53953379b7d53e68b15ff4420e6683d", size = 365819, upload-time = "2025-10-06T14:09:28.544Z" }, + { url = "https://files.pythonhosted.org/packages/30/2d/f715501cae832651d3282387c6a9236cd26bd00d0ff1e404b3dc52447884/yarl-1.22.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3b06bcadaac49c70f4c88af4ffcfbe3dc155aab3163e75777818092478bcbbe7", size = 363601, upload-time = "2025-10-06T14:09:30.568Z" }, + { url = "https://files.pythonhosted.org/packages/f8/f9/a678c992d78e394e7126ee0b0e4e71bd2775e4334d00a9278c06a6cce96a/yarl-1.22.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:6944b2dc72c4d7f7052683487e3677456050ff77fcf5e6204e98caf785ad1967", size = 358072, upload-time = "2025-10-06T14:09:32.528Z" }, + { url = "https://files.pythonhosted.org/packages/2c/d1/b49454411a60edb6fefdcad4f8e6dbba7d8019e3a508a1c5836cba6d0781/yarl-1.22.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:d5372ca1df0f91a86b047d1277c2aaf1edb32d78bbcefffc81b40ffd18f027ed", size = 385311, upload-time = "2025-10-06T14:09:34.634Z" }, + { url = "https://files.pythonhosted.org/packages/87/e5/40d7a94debb8448c7771a916d1861d6609dddf7958dc381117e7ba36d9e8/yarl-1.22.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:51af598701f5299012b8416486b40fceef8c26fc87dc6d7d1f6fc30609ea0aa6", size = 381094, upload-time = "2025-10-06T14:09:36.268Z" }, + { url = "https://files.pythonhosted.org/packages/35/d8/611cc282502381ad855448643e1ad0538957fc82ae83dfe7762c14069e14/yarl-1.22.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b266bd01fedeffeeac01a79ae181719ff848a5a13ce10075adbefc8f1daee70e", size = 370944, upload-time = "2025-10-06T14:09:37.872Z" }, + { url = "https://files.pythonhosted.org/packages/2d/df/fadd00fb1c90e1a5a8bd731fa3d3de2e165e5a3666a095b04e31b04d9cb6/yarl-1.22.0-cp311-cp311-win32.whl", hash = "sha256:a9b1ba5610a4e20f655258d5a1fdc7ebe3d837bb0e45b581398b99eb98b1f5ca", size = 81804, upload-time = "2025-10-06T14:09:39.359Z" }, + { url = "https://files.pythonhosted.org/packages/b5/f7/149bb6f45f267cb5c074ac40c01c6b3ea6d8a620d34b337f6321928a1b4d/yarl-1.22.0-cp311-cp311-win_amd64.whl", hash = "sha256:078278b9b0b11568937d9509b589ee83ef98ed6d561dfe2020e24a9fd08eaa2b", size = 86858, upload-time = "2025-10-06T14:09:41.068Z" }, + { url = "https://files.pythonhosted.org/packages/2b/13/88b78b93ad3f2f0b78e13bfaaa24d11cbc746e93fe76d8c06bf139615646/yarl-1.22.0-cp311-cp311-win_arm64.whl", hash = "sha256:b6a6f620cfe13ccec221fa312139135166e47ae169f8253f72a0abc0dae94376", size = 81637, upload-time = "2025-10-06T14:09:42.712Z" }, + { url = "https://files.pythonhosted.org/packages/75/ff/46736024fee3429b80a165a732e38e5d5a238721e634ab41b040d49f8738/yarl-1.22.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e340382d1afa5d32b892b3ff062436d592ec3d692aeea3bef3a5cfe11bbf8c6f", size = 142000, upload-time = "2025-10-06T14:09:44.631Z" }, + { url = "https://files.pythonhosted.org/packages/5a/9a/b312ed670df903145598914770eb12de1bac44599549b3360acc96878df8/yarl-1.22.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f1e09112a2c31ffe8d80be1b0988fa6a18c5d5cad92a9ffbb1c04c91bfe52ad2", size = 94338, upload-time = "2025-10-06T14:09:46.372Z" }, + { url = "https://files.pythonhosted.org/packages/ba/f5/0601483296f09c3c65e303d60c070a5c19fcdbc72daa061e96170785bc7d/yarl-1.22.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:939fe60db294c786f6b7c2d2e121576628468f65453d86b0fe36cb52f987bd74", size = 94909, upload-time = "2025-10-06T14:09:48.648Z" }, + { url = "https://files.pythonhosted.org/packages/60/41/9a1fe0b73dbcefce72e46cf149b0e0a67612d60bfc90fb59c2b2efdfbd86/yarl-1.22.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e1651bf8e0398574646744c1885a41198eba53dc8a9312b954073f845c90a8df", size = 372940, upload-time = "2025-10-06T14:09:50.089Z" }, + { url = "https://files.pythonhosted.org/packages/17/7a/795cb6dfee561961c30b800f0ed616b923a2ec6258b5def2a00bf8231334/yarl-1.22.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b8a0588521a26bf92a57a1705b77b8b59044cdceccac7151bd8d229e66b8dedb", size = 345825, upload-time = "2025-10-06T14:09:52.142Z" }, + { url = "https://files.pythonhosted.org/packages/d7/93/a58f4d596d2be2ae7bab1a5846c4d270b894958845753b2c606d666744d3/yarl-1.22.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:42188e6a615c1a75bcaa6e150c3fe8f3e8680471a6b10150c5f7e83f47cc34d2", size = 386705, upload-time = "2025-10-06T14:09:54.128Z" }, + { url = "https://files.pythonhosted.org/packages/61/92/682279d0e099d0e14d7fd2e176bd04f48de1484f56546a3e1313cd6c8e7c/yarl-1.22.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f6d2cb59377d99718913ad9a151030d6f83ef420a2b8f521d94609ecc106ee82", size = 396518, upload-time = "2025-10-06T14:09:55.762Z" }, + { url = "https://files.pythonhosted.org/packages/db/0f/0d52c98b8a885aeda831224b78f3be7ec2e1aa4a62091f9f9188c3c65b56/yarl-1.22.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:50678a3b71c751d58d7908edc96d332af328839eea883bb554a43f539101277a", size = 377267, upload-time = "2025-10-06T14:09:57.958Z" }, + { url = "https://files.pythonhosted.org/packages/22/42/d2685e35908cbeaa6532c1fc73e89e7f2efb5d8a7df3959ea8e37177c5a3/yarl-1.22.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1e8fbaa7cec507aa24ea27a01456e8dd4b6fab829059b69844bd348f2d467124", size = 365797, upload-time = "2025-10-06T14:09:59.527Z" }, + { url = "https://files.pythonhosted.org/packages/a2/83/cf8c7bcc6355631762f7d8bdab920ad09b82efa6b722999dfb05afa6cfac/yarl-1.22.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:433885ab5431bc3d3d4f2f9bd15bfa1614c522b0f1405d62c4f926ccd69d04fa", size = 365535, upload-time = "2025-10-06T14:10:01.139Z" }, + { url = "https://files.pythonhosted.org/packages/25/e1/5302ff9b28f0c59cac913b91fe3f16c59a033887e57ce9ca5d41a3a94737/yarl-1.22.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:b790b39c7e9a4192dc2e201a282109ed2985a1ddbd5ac08dc56d0e121400a8f7", size = 382324, upload-time = "2025-10-06T14:10:02.756Z" }, + { url = "https://files.pythonhosted.org/packages/bf/cd/4617eb60f032f19ae3a688dc990d8f0d89ee0ea378b61cac81ede3e52fae/yarl-1.22.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:31f0b53913220599446872d757257be5898019c85e7971599065bc55065dc99d", size = 383803, upload-time = "2025-10-06T14:10:04.552Z" }, + { url = "https://files.pythonhosted.org/packages/59/65/afc6e62bb506a319ea67b694551dab4a7e6fb7bf604e9bd9f3e11d575fec/yarl-1.22.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a49370e8f711daec68d09b821a34e1167792ee2d24d405cbc2387be4f158b520", size = 374220, upload-time = "2025-10-06T14:10:06.489Z" }, + { url = "https://files.pythonhosted.org/packages/e7/3d/68bf18d50dc674b942daec86a9ba922d3113d8399b0e52b9897530442da2/yarl-1.22.0-cp312-cp312-win32.whl", hash = "sha256:70dfd4f241c04bd9239d53b17f11e6ab672b9f1420364af63e8531198e3f5fe8", size = 81589, upload-time = "2025-10-06T14:10:09.254Z" }, + { url = "https://files.pythonhosted.org/packages/c8/9a/6ad1a9b37c2f72874f93e691b2e7ecb6137fb2b899983125db4204e47575/yarl-1.22.0-cp312-cp312-win_amd64.whl", hash = "sha256:8884d8b332a5e9b88e23f60bb166890009429391864c685e17bd73a9eda9105c", size = 87213, upload-time = "2025-10-06T14:10:11.369Z" }, + { url = "https://files.pythonhosted.org/packages/44/c5/c21b562d1680a77634d748e30c653c3ca918beb35555cff24986fff54598/yarl-1.22.0-cp312-cp312-win_arm64.whl", hash = "sha256:ea70f61a47f3cc93bdf8b2f368ed359ef02a01ca6393916bc8ff877427181e74", size = 81330, upload-time = "2025-10-06T14:10:13.112Z" }, + { url = "https://files.pythonhosted.org/packages/73/ae/b48f95715333080afb75a4504487cbe142cae1268afc482d06692d605ae6/yarl-1.22.0-py3-none-any.whl", hash = "sha256:1380560bdba02b6b6c90de54133c81c9f2a453dee9912fe58c1dcced1edb7cff", size = 46814, upload-time = "2025-10-06T14:12:53.872Z" }, ] [[package]] name = "zipp" version = "3.23.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e3/02/0f2892c661036d50ede074e376733dca2ae7c6eb617489437771209d4180/zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166", size = 25547 } +sdist = { url = "https://files.pythonhosted.org/packages/e3/02/0f2892c661036d50ede074e376733dca2ae7c6eb617489437771209d4180/zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166", size = 25547, upload-time = "2025-06-08T17:06:39.4Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e", size = 10276 }, + { url = "https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e", size = 10276, upload-time = "2025-06-08T17:06:38.034Z" }, ]