diff --git a/.github/workflows/lbox-develop.yml b/.github/workflows/lbox-develop.yml index 309ea2969..c0f1773b3 100644 --- a/.github/workflows/lbox-develop.yml +++ b/.github/workflows/lbox-develop.yml @@ -49,7 +49,6 @@ jobs: steps: - uses: actions/checkout@v4 with: - token: ${{ secrets.ACTIONS_ACCESS_TOKEN }} ref: ${{ github.head_ref }} - uses: ./.github/actions/python-package-shared-setup with: @@ -163,4 +162,4 @@ jobs: - name: Build and push (Pull Request) Output if: github.event_name == 'pull_request' run: | - echo "ghcr.io/labelbox/${{ matrix.package }}:${{ github.sha }}" >> "$GITHUB_STEP_SUMMARY" \ No newline at end of file + echo "ghcr.io/labelbox/${{ matrix.package }}:${{ github.sha }}" >> "$GITHUB_STEP_SUMMARY" diff --git a/.github/workflows/lbox-publish.yml b/.github/workflows/lbox-publish.yml index 031207155..dcca8e561 100644 --- a/.github/workflows/lbox-publish.yml +++ b/.github/workflows/lbox-publish.yml @@ -104,8 +104,6 @@ jobs: steps: - uses: actions/checkout@v4 with: - token: ${{ secrets.ACTIONS_ACCESS_TOKEN }} - # ref: ${{ inputs.tag }} ref: ${{ inputs.tag }} - uses: ./.github/actions/python-package-shared-setup with: @@ -190,4 +188,4 @@ jobs: id: image run: | echo "ghcr.io/labelbox/${{ matrix.package }}:latest" >> "$GITHUB_STEP_SUMMARY" - echo "ghcr.io/labelbox/${{ matrix.package }}:${{ inputs.tag }}" >> "$GITHUB_STEP_SUMMARY" \ No newline at end of file + echo "ghcr.io/labelbox/${{ matrix.package }}:${{ inputs.tag }}" >> "$GITHUB_STEP_SUMMARY" diff --git a/libs/labelbox/pyproject.toml b/libs/labelbox/pyproject.toml index 3ec33bf17..4e5ae1d0a 100644 --- a/libs/labelbox/pyproject.toml +++ b/libs/labelbox/pyproject.toml @@ -12,6 +12,7 @@ dependencies = [ "tqdm>=4.66.2", "geojson>=3.1.0", "lbox-clients==1.1.2", + "PyYAML>=6.0" ] readme = "README.md" requires-python = ">=3.9,<3.14" @@ -55,6 +56,9 @@ data = [ "typing-extensions>=4.10.0", "opencv-python-headless>=4.9.0.80", ] +alignerr = [ + "lbox-alignerr>=0.1.0", +] [build-system] requires = ["hatchling"] @@ -68,6 +72,7 @@ dev-dependencies = [ "types-python-dateutil>=2.9.0.20240316", "types-requests>=2.31.0.20240311", "types-tqdm>=4.66.0.20240106", + "types-PyYAML>=6.0.12.20240311", ] [tool.ruff] diff --git a/libs/labelbox/src/labelbox/__init__.py b/libs/labelbox/src/labelbox/__init__.py index b543894f1..24920786e 100644 --- a/libs/labelbox/src/labelbox/__init__.py +++ b/libs/labelbox/src/labelbox/__init__.py @@ -98,7 +98,6 @@ ResponseOption, PromptResponseClassification, ) -from lbox.exceptions import * from labelbox.schema.taskstatus import TaskStatus from labelbox.schema.api_key import ApiKey from labelbox.schema.timeunit import TimeUnit diff --git a/libs/labelbox/src/labelbox/schema/role.py b/libs/labelbox/src/labelbox/schema/role.py index d22e2a78e..0367d8f0c 100644 --- a/libs/labelbox/src/labelbox/schema/role.py +++ b/libs/labelbox/src/labelbox/schema/role.py @@ -29,6 +29,11 @@ def format_role(name: str): class Role(DbObject): name = Field.String("name") + @classmethod + def from_name(cls, client: "Client", name: str) -> Optional["Role"]: + roles = get_roles(client) + return roles.get(name.upper()) + class OrgRole(Role): ... diff --git a/libs/lbox-alignerr/README.md b/libs/lbox-alignerr/README.md new file mode 100644 index 000000000..3dc34b809 --- /dev/null +++ b/libs/lbox-alignerr/README.md @@ -0,0 +1,9 @@ +# Alignerr + +Alignerr workspace management for Labelbox. + +This package provides functionality for managing Alignerr projects, including: +- Project creation and configuration +- Rate management for labelers and reviewers +- Domain and tag management +- Workforce management diff --git a/libs/lbox-alignerr/pyproject.toml b/libs/lbox-alignerr/pyproject.toml new file mode 100644 index 000000000..0d25f4ab8 --- /dev/null +++ b/libs/lbox-alignerr/pyproject.toml @@ -0,0 +1,73 @@ +[project] +name = "lbox-alignerr" +version = "0.1.0" +description = "Alignerr workspace management for Labelbox" +authors = [ + { name = "Labelbox", email = "engineering@labelbox.com" } +] +dependencies = [ + "labelbox>=0.1.0", + "pydantic>=2.0.0", + "pyyaml>=6.0", +] +readme = "README.md" +requires-python = ">= 3.9" + +classifiers=[ + # How mature is this project? + "Development Status :: 2 - Pre-Alpha", + # Indicate who your project is intended for + "Topic :: Scientific/Engineering :: Artificial Intelligence", + "Topic :: Software Development :: Libraries", + "Intended Audience :: Developers", + "Intended Audience :: Science/Research", + # Pick your license as you wish + "License :: OSI Approved :: Apache Software License", + # Specify the Python versions you support here. + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", +] +keywords = ["ml", "ai", "labelbox", "labeling", "llm", "machinelearning", "alignerr", "lbox-alignerr"] + +[project.urls] +Homepage = "https://labelbox.com/" +Documentation = "https://labelbox-python.readthedocs.io/en/latest/" +Repository = "https://github.com/Labelbox/labelbox-python" +Issues = "https://github.com/Labelbox/labelbox-python/issues" +Changelog = "https://github.com/Labelbox/labelbox-python/blob/develop/libs/labelbox/CHANGELOG.md" + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.rye] +managed = true +dev-dependencies = [ + "pytest>=8.1.1", + "pytest-cases>=3.8.4", + "pytest-rerunfailures>=14.0", + "pytest-snapshot>=0.9.0", + "pytest-cov>=4.1.0", + "pytest-xdist>=3.5.0", + "faker>=25.5.0", + "pytest-timestamper>=0.0.10", + "pytest-timeout>=2.3.1", + "pytest-order>=1.2.1", + "pyjwt>=2.9.0", +] + +[tool.rye.scripts] +unit = "pytest tests/unit" +integration = "pytest tests/integration" + +[tool.hatch.metadata] +allow-direct-references = true + +[tool.hatch.build.targets.wheel] +packages = ["src/alignerr"] + +[tool.pytest.ini_options] +addopts = "-rP -vvv --durations=20 --cov=alignerr --import-mode=importlib" diff --git a/libs/lbox-alignerr/src/alignerr/__init__.py b/libs/lbox-alignerr/src/alignerr/__init__.py new file mode 100644 index 000000000..eecedade9 --- /dev/null +++ b/libs/lbox-alignerr/src/alignerr/__init__.py @@ -0,0 +1,3 @@ +from alignerr.alignerr_project import AlignerrWorkspace + +__all__ = ["AlignerrWorkspace"] diff --git a/libs/lbox-alignerr/src/alignerr/alignerr_project.py b/libs/lbox-alignerr/src/alignerr/alignerr_project.py new file mode 100644 index 000000000..cc2460f87 --- /dev/null +++ b/libs/lbox-alignerr/src/alignerr/alignerr_project.py @@ -0,0 +1,177 @@ +from enum import Enum +from typing import TYPE_CHECKING, Optional + +import logging + +from alignerr.schema.project_rate import ProjectRateV2 +from alignerr.schema.project_domain import ProjectDomain +from alignerr.schema.enchanced_resource_tags import ( + EnhancedResourceTag, + ResourceTagType, +) +from alignerr.schema.project_boost_workforce import ( + ProjectBoostWorkforce, +) +from labelbox.pagination import PaginatedCollection + +logger = logging.getLogger(__name__) + + +if TYPE_CHECKING: + from labelbox import Client + from labelbox.schema.project import Project + from alignerr.schema.project_domain import ProjectDomain + + +class AlignerrRole(Enum): + Labeler = "LABELER" + Reviewer = "REVIEWER" + Admin = "ADMIN" + ProjectCoordinator = "PROJECT_COORDINATOR" + AlignerrLabeler = "ALIGNERR_LABELER" + EndLabellingRole = "ENDLABELLINGROLE" + + +class AlignerrProject: + def __init__(self, client: "Client", project: "Project", _internal: bool = False): + if not _internal: + raise RuntimeError( + "AlignerrProject cannot be initialized directly. " + "Use AlignerrProjectBuilder or AlignerrProjectFactory to create instances." + ) + self.client = client + self.project = project + + @property + def project(self) -> "Project": + return self._project + + @project.setter + def project(self, project: "Project"): + self._project = project + + def domains(self) -> PaginatedCollection: + return ProjectDomain.get_by_project_id( + client=self.client, project_id=self.project.uid + ) + + def add_domain(self, project_domain: ProjectDomain): + return ProjectDomain.connect_project_to_domains( + client=self.client, + project_id=self.project.uid, + domain_ids=[project_domain.uid], + ) + + def get_project_rates(self) -> list["ProjectRateV2"]: + return ProjectRateV2.get_by_project_id( + client=self.client, project_id=self.project.uid + ) + + def set_project_rate(self, project_rate_input): + return ProjectRateV2.set_project_rate( + client=self.client, + project_id=self.project.uid, + project_rate_input=project_rate_input, + ) + + def set_tags(self, tag_names: list[str], tag_type: ResourceTagType): + # Convert tag names to tag IDs + tag_ids = [] + for tag_name in tag_names: + # Search for the tag by text to get its ID + found_tags = EnhancedResourceTag.search_by_text( + self.client, search_text=tag_name, tag_type=tag_type + ) + if found_tags: + tag_ids.append(found_tags[0].id) + + # Use the existing project resource tag functionality with IDs + self.project.update_project_resource_tags(tag_ids) + return self + + def get_tags(self) -> list[EnhancedResourceTag]: + """Get enhanced resource tags associated with this project. + + Returns: + List of EnhancedResourceTag instances + """ + # Get project resource tags and convert to EnhancedResourceTag instances + project_resource_tags = self.project.get_resource_tags() + enhanced_tags = [] + for tag in project_resource_tags: + # Search for the corresponding EnhancedResourceTag by text (try different types) + found_tags = [] + for tag_type in [ResourceTagType.Default, ResourceTagType.Billing]: + found_tags = EnhancedResourceTag.search_by_text( + self.client, search_text=tag.text, tag_type=tag_type + ) + if found_tags: + break + if found_tags: + enhanced_tags.extend(found_tags) + return enhanced_tags + + def add_tag(self, tag: EnhancedResourceTag): + """Add a single enhanced resource tag to the project. + + Args: + tag: EnhancedResourceTag instance to add + + Returns: + Self for method chaining + """ + current_tags = self.get_tags() + current_tag_names = [t.text for t in current_tags] + + if tag.text not in current_tag_names: + current_tag_names.append(tag.text) + self.set_tags(current_tag_names, tag.type) + + return self + + def remove_tag(self, tag: EnhancedResourceTag): + """Remove a single enhanced resource tag from the project. + + Args: + tag: EnhancedResourceTag instance to remove + + Returns: + Self for method chaining + """ + current_tags = self.get_tags() + current_tag_names = [t.text for t in current_tags if t.uid != tag.uid] + self.set_tags(current_tag_names, tag.type) + return self + + def get_project_owner(self) -> Optional[ProjectBoostWorkforce]: + """Get the ProjectBoostWorkforce for this project. + + Returns: + ProjectBoostWorkforce instance or None if not found + """ + return ProjectBoostWorkforce.get_by_project_id( + client=self.client, project_id=self.project.uid + ) + + +class AlignerrWorkspace: + def __init__(self, client: "Client"): + self.client = client + + def project_builder(self): + from alignerr.alignerr_project_builder import ( + AlignerrProjectBuilder, + ) + + return AlignerrProjectBuilder(self.client) + + def project_prototype(self): + from alignerr.alignerr_project_factory import ( + AlignerrProjectFactory, + ) + + return AlignerrProjectFactory(self.client) + + @classmethod + def from_labelbox(cls, client: "Client") -> "AlignerrWorkspace": + return cls(client) diff --git a/libs/lbox-alignerr/src/alignerr/alignerr_project_builder.py b/libs/lbox-alignerr/src/alignerr/alignerr_project_builder.py new file mode 100644 index 000000000..1edf00d31 --- /dev/null +++ b/libs/lbox-alignerr/src/alignerr/alignerr_project_builder.py @@ -0,0 +1,327 @@ +import datetime +from enum import Enum +from typing import TYPE_CHECKING, Optional, Union, List +import logging + +from alignerr.schema.project_rate import BillingMode +from alignerr.schema.project_rate import ProjectRateInput +from alignerr.schema.project_domain import ProjectDomain +from alignerr.schema.enchanced_resource_tags import ( + EnhancedResourceTag, + ResourceTagType, +) +from alignerr.schema.project_boost_workforce import ( + ProjectBoostWorkforce, +) +from labelbox.schema.media_type import MediaType + +logger = logging.getLogger(__name__) + + +class ValidationType(Enum): + """Enum for validation types that can be selectively skipped.""" + + ALIGNERR_RATE = "AlignerrRate" + CUSTOMER_RATE = "CustomerRate" + PROJECT_OWNER = "ProjectOwner" + + +if TYPE_CHECKING: + from labelbox import Client + from alignerr.alignerr_project import AlignerrProject, AlignerrRole + + +class AlignerrProjectBuilder: + def __init__(self, client: "Client"): + self.client = client + self._alignerr_rates: dict[str, ProjectRateInput] = {} + self._customer_rate: Optional[ProjectRateInput] = None + self._domains: list[ProjectDomain] = [] + self._enhanced_resource_tags: list[EnhancedResourceTag] = [] + self._project_owner_email: Optional[str] = None + self.role_name_to_id = self._get_role_name_to_id() + + def set_name(self, name: str): + self.project_name = name + return self + + def set_media_type(self, media_type: "MediaType"): + self.project_media_type = media_type + return self + + def set_alignerr_role_rate( + self, + *, + role_name: "AlignerrRole", + rate: float, + billing_mode: BillingMode, + effective_since: datetime.datetime, + effective_until: Optional[datetime.datetime] = None, + ): + if role_name.value not in self.role_name_to_id: + raise ValueError(f"Role {role_name.value} not found") + + role_id = self.role_name_to_id[role_name.value] + role_name_str = role_name.value + + # Convert datetime objects to ISO format strings + effective_since_str = ( + effective_since.isoformat() + if isinstance(effective_since, datetime.datetime) + else effective_since + ) + effective_until_str = ( + effective_until.isoformat() + if isinstance(effective_until, datetime.datetime) + else effective_until + ) + + self._alignerr_rates[role_name_str] = ProjectRateInput( + rateForId=role_id, + isBillRate=False, + billingMode=billing_mode, + rate=rate, + effectiveSince=effective_since_str, + effectiveUntil=effective_until_str, + ) + return self + + def set_customer_rate( + self, + *, + rate: float, + billing_mode: BillingMode, + effective_since: datetime.datetime, + effective_until: Optional[datetime.datetime] = None, + ): + # Convert datetime objects to ISO format strings + effective_since_str = ( + effective_since.isoformat() + if isinstance(effective_since, datetime.datetime) + else effective_since + ) + effective_until_str = ( + effective_until.isoformat() + if isinstance(effective_until, datetime.datetime) + else effective_until + ) + + self._customer_rate = ProjectRateInput( + rateForId="", # Empty string for customer rate + isBillRate=True, + billingMode=billing_mode, + rate=rate, + effectiveSince=effective_since_str, + effectiveUntil=effective_until_str, + ) + return self + + def set_domains(self, domains: list[str]): + for domain in domains: + project_domain_page = ProjectDomain.search( + self.client, search_by_name=domain + ) + domain_result = project_domain_page.get_one() + if domain_result is None: + raise ValueError(f"Domain {domain} not found") + self._domains.append(domain_result) + return self + + def set_tags(self, tag_texts: list[str], tag_type: ResourceTagType): + """Set enhanced resource tags for the project. + + Args: + tag_texts: List of tag text values to search for and attach + tag_type: Type filter for searching tags + + Returns: + Self for method chaining + """ + for tag_text in tag_texts: + # Search for existing tags by text + existing_tags = EnhancedResourceTag.search_by_text( + self.client, search_text=tag_text, tag_type=tag_type + ) + + if existing_tags: + # Use the first matching tag + self._enhanced_resource_tags.append(existing_tags[0]) + else: + # Create new tag if not found + new_tag = EnhancedResourceTag.create( + self.client, + text=tag_text, + color="#007bff", # Default blue color + tag_type=tag_type, + ) + self._enhanced_resource_tags.append(new_tag) + return self + + def set_project_owner(self, project_owner_email: str): + """Set the project owner for the ProjectBoostWorkforce. + + Args: + project_owner_email: Email of the user to set as project owner + + Returns: + Self for method chaining + """ + self._project_owner_email = project_owner_email + return self + + def create(self, skip_validation: Union[bool, List[ValidationType]] = False): + if not skip_validation: + self._validate() + elif isinstance(skip_validation, list): + self._validate_selective(skip_validation) + logger.info("Creating project") + + project_data = { + "name": self.project_name, + "media_type": self.project_media_type, + } + labelbox_project = self.client.create_project(**project_data) + + # Import here to avoid circular imports + from alignerr.alignerr_project import AlignerrProject + + alignerr_project = AlignerrProject( + self.client, labelbox_project, _internal=True + ) + + self._create_rates(alignerr_project) + self._create_domains(alignerr_project) + self._create_resource_tags(alignerr_project) + self._create_project_owner(alignerr_project) + + return alignerr_project + + def _create_rates(self, alignerr_project: "AlignerrProject"): + for alignerr_role, project_rate in self._alignerr_rates.items(): + logger.info(f"Setting project rate for {alignerr_role}") + alignerr_project.set_project_rate(project_rate) + + def _create_domains(self, alignerr_project: "AlignerrProject"): + if self._domains: + logger.info(f"Setting domains: {[domain.name for domain in self._domains]}") + domain_ids = [domain.uid for domain in self._domains] + ProjectDomain.connect_project_to_domains( + client=self.client, + project_id=alignerr_project.project.uid, + domain_ids=domain_ids, + ) + + def _create_resource_tags(self, alignerr_project: "AlignerrProject"): + if self._enhanced_resource_tags: + logger.info( + f"Setting enhanced resource tags: {[tag.text for tag in self._enhanced_resource_tags]}" + ) + # Group tags by type and set them accordingly + tags_by_type: dict[ResourceTagType, list[str]] = {} + for tag in self._enhanced_resource_tags: + tag_type = tag.type + if tag_type not in tags_by_type: + tags_by_type[tag_type] = [] + tags_by_type[tag_type].append(tag.text) + + # Set tags for each type + for tag_type_str, tag_names in tags_by_type.items(): + # Convert string back to enum + tag_type_enum = ResourceTagType(tag_type_str) + alignerr_project.set_tags(tag_names, tag_type_enum) + + def _create_project_owner(self, alignerr_project: "AlignerrProject"): + if self._project_owner_email: + logger.info(f"Setting project owner: {self._project_owner_email}") + + # Find user by email in the organization + user_id = self._find_user_by_email(self._project_owner_email) + if not user_id: + current_org = self.client.get_organization() + raise ValueError( + f"User with email {self._project_owner_email} not found in organization {current_org.uid}" + ) + + ProjectBoostWorkforce.set_project_owner( + client=self.client, + project_id=alignerr_project.project.uid, + project_owner_user_id=user_id, + ) + + def _validate_alignerr_rates(self): + # Import here to avoid circular imports + from alignerr.alignerr_project import AlignerrRole + + required_role_rates = set( + [AlignerrRole.Labeler.value, AlignerrRole.Reviewer.value] + ) + + for role_name in self._alignerr_rates.keys(): + required_role_rates.remove(role_name) + if len(required_role_rates) > 0: + raise ValueError(f"Required role rates are not set: {required_role_rates}") + + def _validate_customer_rate(self): + if self._customer_rate is None: + raise ValueError("Customer rate is not set") + + def _validate_project_owner(self): + if self._project_owner_email is None: + raise ValueError("Project owner is not set") + + def _validate(self): + self._validate_alignerr_rates() + self._validate_customer_rate() + self._validate_project_owner() + + def _validate_selective(self, skip_validations: List[ValidationType]): + """Run validations selectively, skipping those in the provided list. + + Args: + skip_validations: List of ValidationType enums to skip + """ + if ValidationType.ALIGNERR_RATE not in skip_validations: + self._validate_alignerr_rates() + + if ValidationType.CUSTOMER_RATE not in skip_validations: + self._validate_customer_rate() + + if ValidationType.PROJECT_OWNER not in skip_validations: + self._validate_project_owner() + + def _get_role_name_to_id(self) -> dict[str, str]: + roles = self.client.get_roles() + return {role.name: role.uid for role in roles.values()} + + def _find_user_by_email(self, email: str) -> Optional[str]: + """Find user ID by email in the organization. + + Args: + email: Email address to search for + + Returns: + User ID if found, None otherwise + """ + try: + # Import here to avoid circular imports + from labelbox.schema.user import User + + # Get the current organization + current_org = self.client.get_organization() + + # Use client.get_users with where clause to find user by email + users = self.client.get_users(where=User.email == email) + + # Get the first matching user and verify they belong to the same organization + user = next(users, None) + if user and user.organization().uid == current_org.uid: + return user.uid + else: + logger.warning( + f"User with email {email} not found in organization {current_org.uid}" + ) + return None + + except Exception as e: + logger.error(f"Error finding user by email {email}: {e}") + return None diff --git a/libs/lbox-alignerr/src/alignerr/alignerr_project_factory.py b/libs/lbox-alignerr/src/alignerr/alignerr_project_factory.py new file mode 100644 index 000000000..27d744b71 --- /dev/null +++ b/libs/lbox-alignerr/src/alignerr/alignerr_project_factory.py @@ -0,0 +1,277 @@ +import datetime +from typing import TYPE_CHECKING, Union, List +import yaml +from pathlib import Path +import logging + +from alignerr.schema.project_rate import BillingMode +from alignerr.schema.enchanced_resource_tags import ResourceTagType +from labelbox.schema.media_type import MediaType + +logger = logging.getLogger(__name__) + + +if TYPE_CHECKING: + from labelbox import Client + + +class AlignerrProjectFactory: + def __init__(self, client: "Client"): + self.client = client + + def create(self, yaml_file_path: str, skip_validation: Union[bool, List] = False): + """ + Create an AlignerrProject from a YAML configuration file. + + Args: + yaml_file_path: Path to the YAML configuration file + skip_validation: Whether to skip validation of required fields. Can be: + - bool: Skip all validations (True) or run all validations (False) + - List: Skip specific validations (e.g., [ValidationType.PROJECT_OWNER]) + + Returns: + AlignerrProject: The created project with all configured attributes + + Raises: + FileNotFoundError: If the YAML file doesn't exist + yaml.YAMLError: If the YAML file is invalid + ValueError: If required fields are missing or invalid + + YAML Configuration Structure: + name: str (required) - Project name + media_type: str (required) - Media type (e.g., "Image", "Video", "Text") + rates: dict (optional) - Alignerr role rates + role_name: + rate: float + billing_mode: str + effective_since: str (ISO datetime) + effective_until: str (optional, ISO datetime) + customer_rate: dict (optional) - Customer billing rate + rate: float + billing_mode: str + effective_since: str (ISO datetime) + effective_until: str (optional, ISO datetime) + domains: list[str] (optional) - Project domain names + tags: list[dict] (optional) - Enhanced resource tags + - text: str + type: str (ResourceTagType enum value) + project_owner: str (optional) - Project owner email address + """ + logger.info(f"Creating project from YAML file: {yaml_file_path}") + + # Load and parse YAML file + yaml_path = Path(yaml_file_path) + if not yaml_path.exists(): + raise FileNotFoundError(f"YAML file not found: {yaml_file_path}") + + try: + with open(yaml_path, "r") as file: + config = yaml.safe_load(file) + except yaml.YAMLError as e: + raise yaml.YAMLError(f"Invalid YAML file: {e}") + + # Validate required fields + if not config: + raise ValueError("YAML file is empty") + + required_fields = ["name", "media_type"] + for field in required_fields: + if field not in config: + raise ValueError( + f"Required field '{field}' is missing from YAML configuration" + ) + + # Import here to avoid circular imports + from alignerr.alignerr_project_builder import ( + AlignerrProjectBuilder, + ) + from alignerr.alignerr_project import AlignerrRole + + # Create project builder + builder = AlignerrProjectBuilder(self.client) + + # Set basic project properties + builder.set_name(config["name"]) + + # Set media type + media_type_str = config["media_type"] + media_type = MediaType(media_type_str) + + # Check if the media type is supported + if not MediaType.is_supported(media_type): + supported_members = MediaType.get_supported_members() + raise ValueError( + f"Invalid media_type '{media_type_str}'. Must be one of: {supported_members}" + ) + + builder.set_media_type(media_type) + + # Set project rates if provided + if "rates" in config: + rates_config = config["rates"] + if not isinstance(rates_config, dict): + raise ValueError("'rates' must be a dictionary") + + for role_name, rate_config in rates_config.items(): + try: + alignerr_role = AlignerrRole(role_name.upper()) + except ValueError: + raise ValueError( + f"Invalid role '{role_name}'. Must be one of: {[r.value for r in AlignerrRole]}" + ) + + # Validate rate configuration + required_rate_fields = [ + "rate", + "billing_mode", + "effective_since", + ] + for field in required_rate_fields: + if field not in rate_config: + raise ValueError( + f"Required field '{field}' is missing for role '{role_name}'" + ) + + # Parse billing mode + try: + billing_mode = BillingMode(rate_config["billing_mode"]) + except ValueError: + raise ValueError( + f"Invalid billing_mode '{rate_config['billing_mode']}' for role '{role_name}'. Must be one of: {[e.value for e in BillingMode]}" + ) + + # Parse effective dates + try: + effective_since = datetime.datetime.fromisoformat( + rate_config["effective_since"] + ) + except ValueError: + raise ValueError( + f"Invalid effective_since date format for role '{role_name}'. Use ISO format (YYYY-MM-DDTHH:MM:SS)" + ) + + effective_until = None + if "effective_until" in rate_config and rate_config["effective_until"]: + try: + effective_until = datetime.datetime.fromisoformat( + rate_config["effective_until"] + ) + except ValueError: + raise ValueError( + f"Invalid effective_until date format for role '{role_name}'. Use ISO format (YYYY-MM-DDTHH:MM:SS)" + ) + + # Set the rate + builder.set_alignerr_role_rate( + role_name=alignerr_role, + rate=float(rate_config["rate"]), + billing_mode=billing_mode, + effective_since=effective_since, + effective_until=effective_until, + ) + + # Set customer rate if provided + if "customer_rate" in config: + customer_rate_config = config["customer_rate"] + if not isinstance(customer_rate_config, dict): + raise ValueError("'customer_rate' must be a dictionary") + + # Validate customer rate configuration + required_customer_rate_fields = [ + "rate", + "billing_mode", + "effective_since", + ] + for field in required_customer_rate_fields: + if field not in customer_rate_config: + raise ValueError( + f"Required field '{field}' is missing for customer_rate" + ) + + # Parse billing mode + try: + billing_mode = BillingMode(customer_rate_config["billing_mode"]) + except ValueError: + raise ValueError( + f"Invalid billing_mode '{customer_rate_config['billing_mode']}' for customer_rate. Must be one of: {[e.value for e in BillingMode]}" + ) + + # Parse effective dates + try: + effective_since = datetime.datetime.fromisoformat( + customer_rate_config["effective_since"] + ) + except ValueError: + raise ValueError( + "Invalid effective_since date format for customer_rate. Use ISO format (YYYY-MM-DDTHH:MM:SS)" + ) + + effective_until = None + if ( + "effective_until" in customer_rate_config + and customer_rate_config["effective_until"] + ): + try: + effective_until = datetime.datetime.fromisoformat( + customer_rate_config["effective_until"] + ) + except ValueError: + raise ValueError( + "Invalid effective_until date format for customer_rate. Use ISO format (YYYY-MM-DDTHH:MM:SS)" + ) + + # Set the customer rate + builder.set_customer_rate( + rate=float(customer_rate_config["rate"]), + billing_mode=billing_mode, + effective_since=effective_since, + effective_until=effective_until, + ) + + # Set domains if provided + if "domains" in config: + domains_config = config["domains"] + if not isinstance(domains_config, list): + raise ValueError("'domains' must be a list") + + if not all(isinstance(domain, str) for domain in domains_config): + raise ValueError("All domain names must be strings") + + builder.set_domains(domains_config) + + # Set enhanced resource tags if provided + if "tags" in config: + tags_config = config["tags"] + if not isinstance(tags_config, list): + raise ValueError("'tags' must be a list") + + for tag_config in tags_config: + if not isinstance(tag_config, dict): + raise ValueError("Each tag must be a dictionary") + + required_tag_fields = ["text", "type"] + for field in required_tag_fields: + if field not in tag_config: + raise ValueError(f"Required field '{field}' is missing for tag") + + # Validate tag type + try: + tag_type = ResourceTagType(tag_config["type"]) + except ValueError: + raise ValueError( + f"Invalid tag type '{tag_config['type']}'. Must be one of: {[e.value for e in ResourceTagType]}" + ) + + # Set the tag + builder.set_tags([tag_config["text"]], tag_type) + + # Set project owner if provided + if "project_owner" in config: + project_owner_config = config["project_owner"] + if not isinstance(project_owner_config, str): + raise ValueError("'project_owner' must be a string (email address)") + + builder.set_project_owner(project_owner_config) + + # Create the project + return builder.create(skip_validation=skip_validation) diff --git a/libs/lbox-alignerr/src/alignerr/schema/__init__.py b/libs/lbox-alignerr/src/alignerr/schema/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/libs/lbox-alignerr/src/alignerr/schema/enchanced_resource_tags.py b/libs/lbox-alignerr/src/alignerr/schema/enchanced_resource_tags.py new file mode 100644 index 000000000..a500a2571 --- /dev/null +++ b/libs/lbox-alignerr/src/alignerr/schema/enchanced_resource_tags.py @@ -0,0 +1,145 @@ +from enum import Enum +from typing import List, Optional +from labelbox.orm.db_object import DbObject, Updateable +from labelbox.orm.model import Field +from pydantic import BaseModel + + +class ResourceTagType(Enum): + """Enum for resource tag types.""" + + Default = "Default" + Billing = "Billing" + + +class CreateResourceTagInput(BaseModel): + """Input for creating a new resource tag.""" + + text: str + color: str + type: Optional[str] = None + + +class UpdateResourceTagInput(BaseModel): + """Input for updating a resource tag.""" + + id: str + text: str + color: str + type: Optional[str] = None + + +class DeleteResourceTagInput(BaseModel): + """Input for deleting a resource tag.""" + + id: str + type: Optional[str] = None + + +class ResourceTagsInput(BaseModel): + """Input for querying resource tags.""" + + type: str + + +class EnhancedResourceTag(DbObject, Updateable): + """Enhanced resource tag with additional functionality and type support.""" + + # Fields matching the DDL schema + id = Field.String("id") + createdAt = Field.DateTime("createdAt") + updatedAt = Field.DateTime("updatedAt") + organizationId = Field.String("organizationId") + text = Field.String("text") + color = Field.String("color") + createdById = Field.String("createdById") + type = Field.String("type") + + @classmethod + def create( + cls, + client, + text: str, + color: str, + tag_type: Optional[ResourceTagType] = None, + ) -> "EnhancedResourceTag": + """Create a new enhanced resource tag. + + Args: + client: Labelbox client instance + text: Text content of the resource tag + color: Color of the resource tag + tag_type: Optional type of the resource tag + + Returns: + Created EnhancedResourceTag instance + """ + # Use the existing organization create_resource_tag method + # Get the organization + org = client.get_organization() + + # Create the tag using existing API + tag_data = {"text": text, "color": color} + created_tag = org.create_resource_tag(tag_data) + + # Create EnhancedResourceTag with the same data plus defaults for missing fields + enhanced_tag = cls( + client, + { + "id": created_tag.uid, + "text": created_tag.text, + "color": created_tag.color, + "createdAt": None, + "updatedAt": None, + "organizationId": None, + "createdById": None, + "type": tag_type.value if tag_type else None, + }, + ) + + return enhanced_tag + + @classmethod + def search_by_text( + cls, client, search_text: str, tag_type: ResourceTagType + ) -> List["EnhancedResourceTag"]: + """Search resource tags by text content. + + Args: + client: Labelbox client instance + search_text: Text to search for + tag_type: Type filter + + Returns: + List of matching EnhancedResourceTag instances + """ + # Use the existing organization get_resource_tags method + # Get the organization + org = client.get_organization() + + # Get all resource tags + regular_tags = org.get_resource_tags() + + # Convert to EnhancedResourceTag instances and filter by search text and type + matching_tags = [] + for tag in regular_tags: + if search_text.lower() in tag.text.lower(): + enhanced_tag = cls( + client, + { + "id": tag.uid, + "text": tag.text, + "color": tag.color, + "createdAt": None, + "updatedAt": None, + "organizationId": None, + "createdById": None, + "type": tag_type.value, + }, + ) + + # Apply type filter + if enhanced_tag.type == tag_type.value: + matching_tags.append(enhanced_tag) + + return matching_tags diff --git a/libs/lbox-alignerr/src/alignerr/schema/project_boost_workforce.py b/libs/lbox-alignerr/src/alignerr/schema/project_boost_workforce.py new file mode 100644 index 000000000..28ff27566 --- /dev/null +++ b/libs/lbox-alignerr/src/alignerr/schema/project_boost_workforce.py @@ -0,0 +1,258 @@ +from enum import Enum +from typing import Optional +from labelbox.orm.db_object import DbObject +from labelbox.orm.model import Relationship, Field +from pydantic import BaseModel + + +class ProjectBoostWorkforceStatus(Enum): + """Enum for ProjectBoostWorkforce status.""" + + SET_UP = "SET_UP" + REQUESTED = "REQUESTED" + ACCEPTED = "ACCEPTED" + CALIBRATION = "CALIBRATION" + PRODUCTION = "PRODUCTION" + COMPLETE = "COMPLETE" + PAUSED = "PAUSED" + + +class ProjectBoostType(Enum): + """Enum for ProjectBoost type.""" + + SELF_SERVE = "SELF_SERVE" + MANAGED = "MANAGED" + + +class ProjectDifficulty(Enum): + """Enum for project difficulty levels.""" + + EASY = "easy" + MEDIUM = "medium" + HARD = "hard" + + +class BillingMode(Enum): + """Enum for billing modes.""" + + BY_TASK = "BY_TASK" + BY_HOUR = "BY_HOUR" + BY_TASK_PER_TURN = "BY_TASK_PER_TURN" + + +class UpsertProjectBoostWorkforceInput(BaseModel): + """Input for upserting a ProjectBoostWorkforce.""" + + projectId: str + + +class UpdateProjectBoostWorkforceStatusInput(BaseModel): + """Input for updating ProjectBoostWorkforce status.""" + + projectId: str + status: ProjectBoostWorkforceStatus + + +class UpdateProjectBoostWorkforceCountryMultiplierInput(BaseModel): + """Input for updating country rate multipliers.""" + + projectId: str + disabledCountryRateMultipliers: bool + + +class UpdateProjectBoostWorkforceBillingModeInput(BaseModel): + """Input for updating billing mode.""" + + projectId: str + billingMode: BillingMode + customerBillingMode: Optional[BillingMode] = None + + +class ValidateAndRequestProjectBoostWorkforceInput(BaseModel): + """Input for validating and requesting ProjectBoostWorkforce.""" + + projectId: str + + +class UpdateProjectBoostWorkforceInput(BaseModel): + """Input for updating ProjectBoostWorkforce.""" + + projectId: str + status: Optional[ProjectBoostWorkforceStatus] = None + calibrationDatarows: Optional[int] = None + reworkThreshold: Optional[float] = None + jiraTicketUrl: Optional[str] = None + slackChannelUrl: Optional[str] = None + sampleVideoUrl: Optional[str] = None + projectDifficulty: Optional[ProjectDifficulty] = None + estimatedTimePerLabel: Optional[float] = None + projectDescription: Optional[str] = None + pilotStatus: Optional[bool] = None + discordLandingChannelId: Optional[str] = None + discordLabelerRoleId: Optional[str] = None + discordGuildId: Optional[str] = None + discordReviewerChannelId: Optional[str] = None + discordReviewerRoleId: Optional[str] = None + projectOwnerUserId: Optional[str] = None + + +class FindProjectBoostWorkforceInput(BaseModel): + """Input for finding ProjectBoostWorkforce.""" + + projectId: str + + +class ProjectBoostWorkforceResult(BaseModel): + """Result model for ProjectBoostWorkforce operations.""" + + success: bool + + +class ProjectBoostWorkforceStatusHistoryFields(BaseModel): + """Model for ProjectBoostWorkforce status history fields.""" + + id: str + projectId: str + updatedAt: str + updatedById: str + status: ProjectBoostWorkforceStatus + + +class ProjectBoostWorkforce(DbObject): + """A ProjectBoostWorkforce represents workforce management for a project.""" + + # Relationships + projectOwner = Relationship.ToOne("User", False) + + # Fields matching the GraphQL schema + id = Field.String("id") + projectId = Field.String("projectId") + createdAt = Field.DateTime("createdAt") + updatedAt = Field.DateTime("updatedAt") + createdById = Field.String("createdById") + createdByEmail = Field.String("createdByEmail") + updatedById = Field.String("updatedById") + status = Field.Enum(ProjectBoostWorkforceStatus, "status") + calibrationDatarows = Field.Int("calibrationDatarows") + reworkThreshold = Field.Float("reworkThreshold") + jiraTicketUrl = Field.String("jiraTicketUrl") + slackChannelUrl = Field.String("slackChannelUrl") + pilotStatus = Field.Boolean("pilotStatus") + discourseCategoryUrl = Field.String("discourseCategoryUrl") + sampleVideoUrl = Field.String("sampleVideoUrl") + projectDifficulty = Field.Enum(ProjectDifficulty, "projectDifficulty") + projectDescription = Field.String("projectDescription") + estimatedTimePerLabel = Field.Float("estimatedTimePerLabel") + disabledCountryRateMultipliers = Field.Boolean("disabledCountryRateMultipliers") + billingMode = Field.Enum(BillingMode, "billingMode") + customerBillingMode = Field.Enum(BillingMode, "customerBillingMode") + type = Field.Enum(ProjectBoostType, "type") + isPaused = Field.Boolean("isPaused") + isAnnotatingPausedForUser = Field.Boolean("isAnnotatingPausedForUser") + isPayPerTaskEnabled = Field.Boolean("isPayPerTaskEnabled") + codeId = Field.String("codeId") + projectOwnerUserId = Field.String("projectOwnerUserId") + + @classmethod + def get_by_project_id( + cls, client, project_id: str + ) -> Optional["ProjectBoostWorkforce"]: + """Get ProjectBoostWorkforce by project ID. + + Args: + client: Labelbox client instance + project_id: ID of the project + + Returns: + ProjectBoostWorkforce instance or None if not found + """ + input_data = FindProjectBoostWorkforceInput(projectId=project_id) + + query_str = """ + query GetProjectBoostWorkforcePyApi($data: FindProjectBoostWorkforceInput!) { + projectBoostWorkforce(data: $data) { + id + projectId + createdAt + updatedAt + createdById + createdByEmail + updatedById + status + calibrationDatarows + reworkThreshold + jiraTicketUrl + slackChannelUrl + pilotStatus + discourseCategoryUrl + sampleVideoUrl + projectDifficulty + projectDescription + estimatedTimePerLabel + disabledCountryRateMultipliers + billingMode + customerBillingMode + type + isPaused + isAnnotatingPausedForUser + isPayPerTaskEnabled + codeId + projectOwnerUserId + projectOwner { + id + email + name + } + } + }""" + + result = client.execute(query_str, {"data": input_data.model_dump()}) + workforce_data = result.get("projectBoostWorkforce") + + if not workforce_data: + return None + + return cls(client, workforce_data) + + @classmethod + def update( + cls, client, update_input: UpdateProjectBoostWorkforceInput + ) -> ProjectBoostWorkforceResult: + """Update ProjectBoostWorkforce with various fields. + + Args: + client: Labelbox client instance + update_input: UpdateProjectBoostWorkforceInput with fields to update + + Returns: + ProjectBoostWorkforceResult indicating success + """ + mutation_str = """ + mutation UpdateProjectBoostWorkforcePyApi($data: UpdateProjectBoostWorkforceInput!) { + updateProjectBoostWorkforce(data: $data) { + success + } + }""" + + result = client.execute(mutation_str, {"data": update_input.model_dump()}) + return ProjectBoostWorkforceResult(**result["updateProjectBoostWorkforce"]) + + @classmethod + def set_project_owner( + cls, client, project_id: str, project_owner_user_id: str + ) -> ProjectBoostWorkforceResult: + """Set the project owner for ProjectBoostWorkforce. + + Args: + client: Labelbox client instance + project_id: ID of the project + project_owner_user_id: ID of the user to set as project owner + + Returns: + ProjectBoostWorkforceResult indicating success + """ + update_input = UpdateProjectBoostWorkforceInput( + projectId=project_id, projectOwnerUserId=project_owner_user_id + ) + + return cls.update(client, update_input) diff --git a/libs/lbox-alignerr/src/alignerr/schema/project_domain.py b/libs/lbox-alignerr/src/alignerr/schema/project_domain.py new file mode 100644 index 000000000..3a9c2b6e3 --- /dev/null +++ b/libs/lbox-alignerr/src/alignerr/schema/project_domain.py @@ -0,0 +1,290 @@ +from typing import List, Optional, Dict, Any +from labelbox.orm.db_object import Deletable, DbObject +from labelbox.orm.model import Field +from labelbox.pagination import PaginatedCollection +from pydantic import BaseModel + + +class CreateProjectDomainInput(BaseModel): + """Input for creating a new project domain.""" + + name: str + projectId: Optional[str] = None + + +class ProjectDomainsPaginationInput(BaseModel): + """Input for paginating project domains query.""" + + limit: int = 30 + offset: int = 0 + searchByName: Optional[str] = None + projectIds: Optional[List[str]] = None + includeArchived: bool = False + + +class ConnectProjectToDomainsInput(BaseModel): + """Input for connecting a project to domains.""" + + projectId: str + domainIds: List[str] + + +class ProjectDomainsPage(BaseModel): + """Response model for paginated project domains.""" + + nodes: List["ProjectDomain"] + totalCount: int + + +class ProjectDomain(DbObject, Deletable): + """A project domain represents a categorization for projects.""" + + # Fields matching the GraphQL schema + id = Field.String("id") + name = Field.String("name") + createdAt = Field.DateTime("createdAt") + updatedAt = Field.DateTime("updatedAt") + deactivatedAt = Field.DateTime("deactivatedAt") + ratingsCount = Field.Int("ratingsCount") + + @classmethod + def create( + cls, client, name: str, project_id: Optional[str] = None + ) -> "ProjectDomain": + """Create a new project domain. + + Args: + client: Labelbox client instance + name: Name of the project domain + project_id: Optional project ID to associate with + + Returns: + Created ProjectDomain instance + """ + input_data = CreateProjectDomainInput(name=name, projectId=project_id) + + query_str = """ + mutation CreateProjectDomainPyApi($input: CreateProjectDomainInput!) { + createProjectDomain(input: $input) { + id + name + createdAt + updatedAt + deactivatedAt + ratingsCount + } + }""" + + result = client.execute(query_str, {"input": input_data.model_dump()}) + return cls(client, result["createProjectDomain"]) + + def activate(self) -> "ProjectDomain": + """Activate this project domain. + + Returns: + Updated ProjectDomain instance + """ + query_str = """ + mutation ActivateProjectDomainPyApi($id: ID!) { + activateProjectDomain(id: $id) { + id + name + createdAt + updatedAt + deactivatedAt + ratingsCount + } + }""" + + result = self.client.execute(query_str, {"id": self.uid}) + return self.__class__(self.client, result["activateProjectDomain"]) + + def deactivate(self) -> "ProjectDomain": + """Deactivate this project domain. + + Returns: + Updated ProjectDomain instance + """ + query_str = """ + mutation DeactivateProjectDomainPyApi($id: ID!) { + deactivateProjectDomain(id: $id) { + id + name + createdAt + updatedAt + deactivatedAt + ratingsCount + } + }""" + + result = self.client.execute(query_str, {"id": self.uid}) + return self.__class__(self.client, result["deactivateProjectDomain"]) + + @classmethod + def connect_project_to_domains( + cls, client, project_id: str, domain_ids: List[str] + ) -> bool: + """Connect a project to multiple domains. + + Args: + client: Labelbox client instance + project_id: ID of the project to connect + domain_ids: List of domain IDs to connect to the project + + Returns: + True if successful + """ + input_data = ConnectProjectToDomainsInput( + projectId=project_id, domainIds=domain_ids + ) + + query_str = """ + mutation ConnectProjectToDomainsPyApi($input: ConnectProjectToDomainsInput!) { + connectProjectToDomains(input: $input) + }""" + + result = client.execute(query_str, {"input": input_data.model_dump()}) + return result["connectProjectToDomains"] + + @classmethod + def query_by_project_id( + cls, + project_id: str, + limit: int = 30, + offset: int = 0, + include_archived: bool = False, + ) -> str: + """Get GraphQL query string for fetching project domains by project ID. + + Args: + project_id: ID of the project to fetch domains for + limit: Maximum number of results to return + offset: Number of results to skip + include_archived: Whether to include archived domains + + Returns: + GraphQL query string + """ + return """ + query ProjectDomainsPyApi($projectId: ID!, $includeArchived: Boolean!) { + projectDomains( + pagination: { + offset: %d + limit: %d + projectIds: [$projectId] + includeArchived: $includeArchived + } + ) { + nodes { + id + name + createdAt + updatedAt + deactivatedAt + ratingsCount + } + totalCount + } + }""" + + @classmethod + def get_by_project_id( + cls, + client, + project_id: str, + limit: int = 30, + offset: int = 0, + include_archived: bool = False, + ) -> PaginatedCollection: + """Get project domains for a specific project with pagination. + + Args: + client: Labelbox client instance + project_id: ID of the project to fetch domains for + limit: Maximum number of results to return + offset: Number of results to skip + include_archived: Whether to include archived domains + + Returns: + PaginatedCollection of ProjectDomain instances + """ + query_str = cls.query_by_project_id(project_id, limit, offset, include_archived) + + params: Dict[str, Any] = { + "projectId": project_id, + "includeArchived": include_archived, + } + + return PaginatedCollection( + client=client, + query=query_str, + params=params, + dereferencing=["projectDomains", "nodes"], + obj_class=cls, + ) + + @classmethod + def search( + cls, + client, + search_by_name: Optional[str] = None, + project_ids: Optional[List[str]] = None, + limit: int = 30, + offset: int = 0, + include_archived: bool = False, + ) -> PaginatedCollection: + """Search project domains with various filters. + + Args: + client: Labelbox client instance + search_by_name: Optional name to search for + project_ids: Optional list of project IDs to filter by + limit: Maximum number of results to return + offset: Number of results to skip + include_archived: Whether to include archived domains + + Returns: + PaginatedCollection of ProjectDomain instances + """ + query_str = """ + query SearchProjectDomainsPyApi($includeArchived: Boolean!, $searchByName: String, $projectIds: [ID!]) { + projectDomains( + pagination: { + offset: %d + limit: %d + searchByName: $searchByName + projectIds: $projectIds + includeArchived: $includeArchived + } + ) { + nodes { + id + name + createdAt + updatedAt + deactivatedAt + ratingsCount + } + totalCount + } + }""" + + # Build params dictionary with proper types for GraphQL + params: Dict[str, Any] = { + "includeArchived": include_archived, + } + + # Only add non-None values to avoid type issues + if search_by_name is not None: + params["searchByName"] = search_by_name + if project_ids is not None: + # Keep as list for GraphQL - it will be properly serialized + params["projectIds"] = project_ids + + return PaginatedCollection( + client=client, + query=query_str, + params=params, + dereferencing=["projectDomains", "nodes"], + obj_class=cls, + ) diff --git a/libs/lbox-alignerr/src/alignerr/schema/project_rate.py b/libs/lbox-alignerr/src/alignerr/schema/project_rate.py new file mode 100644 index 000000000..1d0d25110 --- /dev/null +++ b/libs/lbox-alignerr/src/alignerr/schema/project_rate.py @@ -0,0 +1,117 @@ +from enum import Enum +from typing import Optional +from labelbox.orm.db_object import DbObject, Deletable +from labelbox.orm.model import Relationship, Field +from pydantic import BaseModel, model_validator + + +class BillingMode(Enum): + BY_TASK = "BY_TASK" + BY_HOUR = "BY_HOUR" + BY_TASK_PER_TURN = "BY_TASK_PER_TURN" + BY_ACCEPTED_TASK = "BY_ACCEPTED_TASK" + + +class ProjectRateInput(BaseModel): + rateForId: str + isBillRate: bool + billingMode: BillingMode + rate: float + effectiveSince: str # DateTime as string + effectiveUntil: Optional[str] = None # Optional DateTime as string + + @model_validator(mode="after") + def validate_fields(self): + if self.rate < 0: + raise ValueError("Rate must be greater than or equal to 0") + + if self.isBillRate and self.rateForId != "": + raise ValueError( + "isBillRate indicates that this is a customer bill rate. rateForId must be empty if isBillRate is true" + ) + + if not self.isBillRate and self.rateForId == "": + raise ValueError("rateForId must be set to the id of the Alignerr Role") + + return self + + +class ProjectRateV2(DbObject, Deletable): + # Relationships + userRole = Relationship.ToOne("UserRole", False) + updatedBy = Relationship.ToOne("User", False) + + # Fields matching the GraphQL schema + isBillRate = Field.Boolean("isBillRate") + billingMode = Field.Enum(BillingMode, "billingMode") + rate = Field.Float("rate") + createdAt = Field.DateTime("createdAt") + updatedAt = Field.DateTime("updatedAt") + effectiveSince = Field.DateTime("effectiveSince") + effectiveUntil = Field.DateTime("effectiveUntil") + + @classmethod + def get_by_project_id(cls, client, project_id: str) -> list["ProjectRateV2"]: + query_str = """ + query GetAllProjectRatesPyApi($projectId: ID!) { + project(where: { id: $projectId }) { + id + ratesV2 { + id + userRole { + id + name + } + isBillRate + billingMode + rate + effectiveSince + effectiveUntil + createdAt + updatedAt + updatedBy { + id + email + name + } + } + } + } + """ + result = client.execute(query_str, {"projectId": project_id}) + rates_data = result["project"]["ratesV2"] + + if not rates_data: + return [] + + # Return all rates as ProjectRateV2 objects + return [cls(client, rate_data) for rate_data in rates_data] + + @classmethod + def set_project_rate( + cls, client, project_id: str, project_rate_input: ProjectRateInput + ): + mutation_str = """mutation SetProjectRateV2PyApi($input: SetProjectRateV2Input!) { + setProjectRateV2(input: $input) { + success + } + }""" + + params = { + "projectId": project_id, + "input": { + "projectId": project_id, + "userRoleId": project_rate_input.rateForId, + "isBillRate": project_rate_input.isBillRate, + "billingMode": project_rate_input.billingMode.value + if hasattr(project_rate_input.billingMode, "value") + else project_rate_input.billingMode, + "rate": project_rate_input.rate, + "effectiveSince": project_rate_input.effectiveSince, + "effectiveUntil": project_rate_input.effectiveUntil, + }, + } + + result = client.execute(mutation_str, params) + + return result["setProjectRateV2"]["success"] diff --git a/libs/lbox-alignerr/tests/assets/test_project_comprehensive.yaml b/libs/lbox-alignerr/tests/assets/test_project_comprehensive.yaml new file mode 100644 index 000000000..f341602df --- /dev/null +++ b/libs/lbox-alignerr/tests/assets/test_project_comprehensive.yaml @@ -0,0 +1,38 @@ +name: "TestComprehensiveProject" +media_type: "Image" + +# Alignerr role rates +rates: + LABELER: + rate: 15.0 + billing_mode: "BY_HOUR" + effective_since: "2024-01-01T00:00:00" + effective_until: "2024-12-31T23:59:59" + REVIEWER: + rate: 20.0 + billing_mode: "BY_HOUR" + effective_since: "2024-01-01T00:00:00" + +# Customer billing rate +customer_rate: + rate: 25.0 + billing_mode: "BY_HOUR" + effective_since: "2024-01-01T00:00:00" + effective_until: "2024-12-31T23:59:59" + +# Project domains (will be created if they don't exist) +domains: + - "TestDomain1" + - "TestDomain2" + +# Enhanced resource tags +tags: + - text: "TestTag1" + type: "Default" + - text: "TestTag2" + type: "Billing" + - text: "TestTag3" + type: "System" + +# Project owner (will use current user's email in tests) +project_owner: "test@example.com" diff --git a/libs/lbox-alignerr/tests/conftest.py b/libs/lbox-alignerr/tests/conftest.py new file mode 100644 index 000000000..27e1140e1 --- /dev/null +++ b/libs/lbox-alignerr/tests/conftest.py @@ -0,0 +1,1261 @@ +import json +import os +import re +import time +import uuid +from datetime import datetime +from enum import Enum +from random import randint +from string import ascii_letters +from types import SimpleNamespace +from typing import Tuple, Type + +import pytest +import requests +from lbox.exceptions import LabelboxError + +from labelbox import ( + Classification, + Client, + DataRow, + Dataset, + MediaType, + OntologyBuilder, + Option, + Tool, +) +from labelbox.orm import query +from labelbox.pagination import PaginatedCollection +from labelbox.schema.annotation_import import LabelImport +from labelbox.schema.enums import AnnotationImportState +from labelbox.schema.invite import Invite +from labelbox.schema.ontology import Ontology +from labelbox.schema.project import Project +from labelbox.schema.quality_mode import QualityMode + +IMG_URL = "https://picsum.photos/200/300.jpg" +MASKABLE_IMG_URL = "https://storage.googleapis.com/labelbox-datasets/image_sample_data/2560px-Kitano_Street_Kobe01s5s4110.jpeg" +SMALL_DATASET_URL = "https://storage.googleapis.com/lb-artifacts-testing-public/sdk_integration_test/potato.jpeg" +DATA_ROW_PROCESSING_WAIT_TIMEOUT_SECONDS = 30 +DATA_ROW_PROCESSING_WAIT_SLEEP_INTERNAL_SECONDS = 3 +EPHEMERAL_BASE_URL = "http://lb-api-public" +IMAGE_URL = "https://storage.googleapis.com/diagnostics-demo-data/coco/COCO_train2014_000000000034.jpg" +EXTERNAL_ID = "my-image" + +pytest_plugins = [] + + +@pytest.fixture(scope="session") +def rand_gen(): + def gen(field_type): + if field_type is str: + return "".join( + ascii_letters[randint(0, len(ascii_letters) - 1)] for _ in range(16) + ) + + if field_type is datetime: + return datetime.now() + + raise Exception("Can't random generate for field type '%r'" % field_type) + + return gen + + +class Environ(Enum): + LOCAL = "local" + PROD = "prod" + STAGING = "staging" + CUSTOM = "custom" + STAGING_EU = "staging-eu" + EPHEMERAL = "ephemeral" # Used for testing PRs with ephemeral environments + + +@pytest.fixture +def image_url() -> str: + return MASKABLE_IMG_URL + + +@pytest.fixture +def external_id() -> str: + return EXTERNAL_ID + + +def ephemeral_endpoint() -> str: + return os.getenv("LABELBOX_TEST_BASE_URL", EPHEMERAL_BASE_URL) + + +def graphql_url(environ: str) -> str: + if environ == Environ.LOCAL: + return "http://localhost:3000/api/graphql" + elif environ == Environ.PROD: + return "https://api.labelbox.com/graphql" + elif environ == Environ.STAGING: + return "https://api.lb-stage.xyz/graphql" + elif environ == Environ.CUSTOM: + graphql_api_endpoint = os.environ.get("LABELBOX_TEST_GRAPHQL_API_ENDPOINT") + if graphql_api_endpoint is None: + raise Exception("Missing LABELBOX_TEST_GRAPHQL_API_ENDPOINT") + return graphql_api_endpoint + elif environ == Environ.EPHEMERAL: + return f"{ephemeral_endpoint()}/graphql" + return "http://host.docker.internal:8080/graphql" + + +def rest_url(environ: str) -> str: + if environ == Environ.LOCAL: + return "http://localhost:3000/api/v1" + elif environ == Environ.PROD: + return "https://api.labelbox.com/api/v1" + elif environ == Environ.STAGING: + return "https://api.lb-stage.xyz/api/v1" + elif environ == Environ.CUSTOM: + rest_api_endpoint = os.environ.get("LABELBOX_TEST_REST_API_ENDPOINT") + if rest_api_endpoint is None: + raise Exception("Missing LABELBOX_TEST_REST_API_ENDPOINT") + return rest_api_endpoint + elif environ == Environ.EPHEMERAL: + return f"{ephemeral_endpoint()}/api/v1" + return "http://host.docker.internal:8080/api/v1" + + +def testing_api_key(environ: Environ) -> str: + keys = [ + f"LABELBOX_TEST_API_KEY_{environ.value.upper()}", + "LABELBOX_TEST_API_KEY", + "LABELBOX_API_KEY", + ] + for key in keys: + value = os.environ.get(key) + if value is not None: + return value + raise Exception("Cannot find API to use for tests") + + +def service_api_key() -> str: + service_api_key = os.environ["SERVICE_API_KEY"] + if service_api_key is None: + raise Exception("SERVICE_API_KEY is missing and needed for admin client") + return service_api_key + + +class IntegrationClient(Client): + def __init__(self, environ: str) -> None: + api_url = graphql_url(environ) + api_key = testing_api_key(environ) + rest_endpoint = rest_url(environ) + super().__init__( + api_key, + api_url, + enable_experimental=True, + rest_endpoint=rest_endpoint, + ) + self.queries = [] + + def execute(self, query=None, params=None, check_naming=True, **kwargs): + if check_naming and query is not None: + assert re.match(r"\s*(?:query|mutation) \w+PyApi", query) is not None + self.queries.append((query, params)) + if not kwargs.get("timeout"): + kwargs["timeout"] = 30.0 + return super().execute(query, params, **kwargs) + + +class AdminClient(Client): + def __init__(self, env): + """ + The admin client creates organizations and users using admin api described here https://labelbox.atlassian.net/wiki/spaces/AP/pages/2206564433/Internal+Admin+APIs. + """ + self._api_key = service_api_key() + self._admin_endpoint = f"{ephemeral_endpoint()}/admin/v1" + self._api_url = graphql_url(env) + self._rest_endpoint = rest_url(env) + + super().__init__( + self._api_key, + self._api_url, + enable_experimental=True, + rest_endpoint=self._rest_endpoint, + ) + + def _create_organization(self) -> str: + endpoint = f"{self._admin_endpoint}/organizations/" + response = requests.post( + endpoint, + headers=self.headers, + json={"name": f"Test Org {uuid.uuid4()}"}, + ) + + data = response.json() + if response.status_code not in [ + requests.codes.created, + requests.codes.ok, + ]: + raise Exception("Failed to create org, message: " + str(data["message"])) + + return data["id"] + + def _create_user(self, organization_id=None) -> Tuple[str, str]: + if organization_id is None: + organization_id = self.organization_id + + endpoint = f"{self._admin_endpoint}/user-identities/" + identity_id = f"e2e+{uuid.uuid4()}" + + response = requests.post( + endpoint, + headers=self.headers, + json={ + "identityId": identity_id, + "email": "email@email.com", + "name": f"tester{uuid.uuid4()}", + "verificationStatus": "VERIFIED", + }, + ) + data = response.json() + if response.status_code not in [ + requests.codes.created, + requests.codes.ok, + ]: + raise Exception("Failed to create user, message: " + str(data["message"])) + + user_identity_id = data["identityId"] + + endpoint = f"{self._admin_endpoint}/organizations/{organization_id}/users/" + response = requests.post( + endpoint, + headers=self.headers, + json={"identityId": user_identity_id, "organizationRole": "Admin"}, + ) + + data = response.json() + if response.status_code not in [ + requests.codes.created, + requests.codes.ok, + ]: + raise Exception( + "Failed to create link user to org, message: " + str(data["message"]) + ) + + user_id = data["id"] + + endpoint = f"{self._admin_endpoint}/users/{user_id}/token" + response = requests.get( + endpoint, + headers=self.headers, + ) + data = response.json() + if response.status_code not in [ + requests.codes.created, + requests.codes.ok, + ]: + raise Exception( + "Failed to create ephemeral user, message: " + str(data["message"]) + ) + + token = data["token"] + + return user_id, token + + def create_api_key_for_user(self) -> str: + organization_id = self._create_organization() + _, user_token = self._create_user(organization_id) + key_name = f"test-key+{uuid.uuid4()}" + query = """mutation CreateApiKeyPyApi($name: String!) { + createApiKey(data: {name: $name}) { + id + jwt + } + } + """ + params = {"name": key_name} + self.headers["Authorization"] = f"Bearer {user_token}" + res = self.execute(query, params, error_log_key="errors") + + return res["createApiKey"]["jwt"] + + +class EphemeralClient(Client): + def __init__(self, environ=Environ.EPHEMERAL): + self.admin_client = AdminClient(environ) + self.api_key = self.admin_client.create_api_key_for_user() + api_url = graphql_url(environ) + rest_endpoint = rest_url(environ) + + super().__init__( + self.api_key, + api_url, + enable_experimental=True, + rest_endpoint=rest_endpoint, + ) + + +@pytest.fixture +def ephmeral_client() -> EphemeralClient: + return EphemeralClient + + +@pytest.fixture +def integration_client() -> IntegrationClient: + return IntegrationClient + + +@pytest.fixture(scope="session") +def environ() -> Environ: + """ + Checks environment variables for LABELBOX_ENVIRON to be + 'prod' or 'staging' + Make sure to set LABELBOX_TEST_ENVIRON in .github/workflows/python-package.yaml + """ + keys = ["LABELBOX_TEST_ENV", "LABELBOX_TEST_ENVIRON", "LABELBOX_ENV"] + for key in keys: + value = os.environ.get(key) + if value is not None: + return Environ(value) + raise Exception(f"Missing env key in: {os.environ}") + + +def cancel_invite(client, invite_id): + """ + Do not use. Only for testing. + """ + query_str = """mutation CancelInvitePyApi($where: WhereUniqueIdInput!) { + cancelInvite(where: $where) {id}}""" + client.execute(query_str, {"where": {"id": invite_id}}, experimental=True) + + +def get_project_invites(client, project_id): + """ + Do not use. Only for testing. + """ + id_param = "projectId" + query_str = """query GetProjectInvitationsPyApi($from: ID, $first: PageSize, $%s: ID!) { + project(where: {id: $%s}) {id + invites(from: $from, first: $first) { nodes { %s + projectInvites { projectId projectRoleName } } nextCursor}}} + """ % (id_param, id_param, query.results_query_part(Invite)) + return PaginatedCollection( + client, + query_str, + {id_param: project_id}, + ["project", "invites", "nodes"], + Invite, + cursor_path=["project", "invites", "nextCursor"], + ) + + +def get_invites(client): + """ + Do not use. Only for testing. + """ + query_str = """query GetOrgInvitationsPyApi($from: ID, $first: PageSize) { + organization { id invites(from: $from, first: $first) { + nodes { id createdAt organizationRoleName inviteeEmail } nextCursor }}}""" + invites = PaginatedCollection( + client, + query_str, + {}, + ["organization", "invites", "nodes"], + Invite, + cursor_path=["organization", "invites", "nextCursor"], + experimental=True, + ) + return invites + + +@pytest.fixture +def queries(): + return SimpleNamespace( + cancel_invite=cancel_invite, + get_project_invites=get_project_invites, + get_invites=get_invites, + ) + + +@pytest.fixture(scope="session") +def admin_client(environ: str): + return AdminClient(environ) + + +@pytest.fixture(scope="session") +def client(environ: str): + if environ == Environ.EPHEMERAL: + return EphemeralClient() + return IntegrationClient(environ) + + +@pytest.fixture(scope="session") +def pdf_url(client): + pdf_url = client.upload_file("tests/assets/loremipsum.pdf") + return { + "row_data": { + "pdf_url": pdf_url, + }, + "global_key": str(uuid.uuid4()), + } + + +@pytest.fixture(scope="session") +def pdf_entity_data_row(client): + pdf_url = client.upload_file( + "tests/assets/arxiv-pdf_data_99-word-token-pdfs_0801.3483.pdf" + ) + text_layer_url = client.upload_file( + "tests/assets/arxiv-pdf_data_99-word-token-pdfs_0801.3483-lb-textlayer.json" + ) + + return { + "row_data": {"pdf_url": pdf_url, "text_layer_url": text_layer_url}, + "global_key": str(uuid.uuid4()), + } + + +@pytest.fixture() +def conversation_entity_data_row(client, rand_gen): + return { + "row_data": "https://storage.googleapis.com/labelbox-developer-testing-assets/conversational_text/1000-conversations/conversation-1.json", + "global_key": f"https://storage.googleapis.com/labelbox-developer-testing-assets/conversational_text/1000-conversations/conversation-1.json-{rand_gen(str)}", + } + + +@pytest.fixture +def project(client, rand_gen): + project = client.create_project( + name=rand_gen(str), + media_type=MediaType.Image, + ) + yield project + project.delete() + + +@pytest.fixture +def consensus_project(client, rand_gen): + project = client.create_project( + name=rand_gen(str), + quality_modes={QualityMode.Consensus}, + media_type=MediaType.Image, + ) + yield project + project.delete() + + +@pytest.fixture +def model_config(client, rand_gen, valid_model_id): + model_config = client.create_model_config( + name=rand_gen(str), + model_id=valid_model_id, + inference_params={"param": "value"}, + ) + yield model_config + client.delete_model_config(model_config.uid) + + +@pytest.fixture +def consensus_project_with_batch( + consensus_project, initial_dataset, rand_gen, image_url +): + project = consensus_project + dataset = initial_dataset + + data_rows = [] + for _ in range(3): + data_rows.append( + {DataRow.row_data: image_url, DataRow.global_key: str(uuid.uuid4())} + ) + task = dataset.create_data_rows(data_rows) + task.wait_till_done() + assert task.status == "COMPLETE" + + data_rows = list(dataset.data_rows()) + assert len(data_rows) == 3 + batch = project.create_batch( + rand_gen(str), + data_rows, # sample of data row objects + 5, # priority between 1(Highest) - 5(lowest) + ) + + yield [project, batch, data_rows] + batch.delete() + + +@pytest.fixture +def dataset(client, rand_gen): + # Handle invalid default IAM integrations in test environments gracefully + dataset = create_dataset_robust(client, name=rand_gen(str)) + yield dataset + dataset.delete() + + +@pytest.fixture(scope="function") +def unique_dataset(client, rand_gen): + # Handle invalid default IAM integrations in test environments gracefully + dataset = create_dataset_robust(client, name=rand_gen(str)) + yield dataset + dataset.delete() + + +@pytest.fixture +def small_dataset(dataset: Dataset): + task = dataset.create_data_rows( + [ + {"row_data": SMALL_DATASET_URL, "external_id": "my-image"}, + ] + * 2 + ) + task.wait_till_done() + + yield dataset + + +@pytest.fixture +def data_row(dataset, image_url, rand_gen): + global_key = f"global-key-{rand_gen(str)}" + task = dataset.create_data_rows( + [ + { + "row_data": image_url, + "external_id": "my-image", + "global_key": global_key, + }, + ] + ) + task.wait_till_done() + dr = dataset.data_rows().get_one() + yield dr + dr.delete() + + +@pytest.fixture +def data_row_and_global_key(dataset, image_url, rand_gen): + global_key = f"global-key-{rand_gen(str)}" + task = dataset.create_data_rows( + [ + { + "row_data": image_url, + "external_id": "my-image", + "global_key": global_key, + }, + ] + ) + task.wait_till_done() + dr = dataset.data_rows().get_one() + yield dr, global_key + dr.delete() + + +# can be used with +# @pytest.mark.parametrize('data_rows', [], indirect=True) +# if omitted, count defaults to 1 +@pytest.fixture +def data_rows(dataset, image_url, request, wait_for_data_row_processing, client): + count = 1 + if hasattr(request, "param"): + count = request.param + + datarows = [ + dict(row_data=image_url, global_key=f"global-key-{uuid.uuid4()}") + for _ in range(count) + ] + + task = dataset.create_data_rows(datarows) + task.wait_till_done() + datarows = dataset.data_rows().get_many(count) + for dr in dataset.data_rows(): + wait_for_data_row_processing(client, dr) + + yield datarows + + for datarow in datarows: + datarow.delete() + + +@pytest.fixture +def iframe_url(environ) -> str: + if environ in [Environ.PROD, Environ.LOCAL]: + return "https://editor.labelbox.com" + elif environ == Environ.STAGING: + return "https://editor.lb-stage.xyz" + + +@pytest.fixture +def sample_image() -> str: + path_to_video = "tests/integration/media/sample_image.jpg" + return path_to_video + + +@pytest.fixture +def sample_video() -> str: + path_to_video = "tests/integration/media/cat.mp4" + return path_to_video + + +@pytest.fixture +def sample_bulk_conversation() -> list: + path_to_conversation = "tests/integration/media/bulk_conversation.json" + with open(path_to_conversation) as json_file: + conversations = json.load(json_file) + return conversations + + +@pytest.fixture +def organization(client): + # Must have at least one seat open in your org to run these tests + org = client.get_organization() + + yield org + + +@pytest.fixture +def configured_project_with_label( + client, + rand_gen, + dataset, + data_row, + wait_for_label_processing, + teardown_helpers, +): + """Project with a connected dataset, having one datarow + + Project contains an ontology with 1 bbox tool + Additionally includes a create_label method for any needed extra labels + One label is already created and yielded when using fixture + """ + project = client.create_project( + name=rand_gen(str), + media_type=MediaType.Image, + ) + project._wait_until_data_rows_are_processed( + data_row_ids=[data_row.uid], + wait_processing_max_seconds=DATA_ROW_PROCESSING_WAIT_TIMEOUT_SECONDS, + sleep_interval=DATA_ROW_PROCESSING_WAIT_SLEEP_INTERNAL_SECONDS, + ) + + project.create_batch( + rand_gen(str), + [data_row.uid], # sample of data row objects + 5, # priority between 1(Highest) - 5(lowest) + ) + ontology = _setup_ontology(project, client) + label = _create_label(project, data_row, ontology, wait_for_label_processing) + yield [project, dataset, data_row, label] + + teardown_helpers.teardown_project_labels_ontology_feature_schemas(project) + + +def _create_label(project, data_row, ontology, wait_for_label_processing): + predictions = [ + { + "uuid": str(uuid.uuid4()), + "schemaId": ontology.tools[0].feature_schema_id, + "dataRow": {"id": data_row.uid}, + "bbox": {"top": 20, "left": 20, "height": 50, "width": 50}, + } + ] + + def create_label(): + """Ad-hoc function to create a LabelImport + Creates a LabelImport task which will create a label + """ + upload_task = LabelImport.create_from_objects( + project.client, + project.uid, + f"label-import-{uuid.uuid4()}", + predictions, + ) + upload_task.wait_until_done(sleep_time_seconds=5) + assert ( + upload_task.state == AnnotationImportState.FINISHED + ), "Label Import did not finish" + assert ( + len(upload_task.errors) == 0 + ), f"Label Import {upload_task.name} failed with errors {upload_task.errors}" + + project.create_label = create_label + project.create_label() + label = wait_for_label_processing(project)[0] + return label + + +def _setup_ontology(project: Project, client: Client): + ontology_builder = OntologyBuilder( + tools=[ + Tool(tool=Tool.Type.BBOX, name="test-bbox-class"), + ] + ) + ontology = client.create_ontology( + name="ontology with features", + media_type=MediaType.Image, + normalized=ontology_builder.asdict(), + ) + project.connect_ontology(ontology) + + return OntologyBuilder.from_project(project) + + +@pytest.fixture +def big_dataset(dataset: Dataset): + task = dataset.create_data_rows( + [ + {"row_data": IMAGE_URL, "external_id": EXTERNAL_ID}, + ] + * 3 + ) + task.wait_till_done() + + yield dataset + + +@pytest.fixture +def configured_batch_project_with_label( + client, + dataset, + data_row, + wait_for_label_processing, + rand_gen, + teardown_helpers, +): + """Project with a batch having one datarow + Project contains an ontology with 1 bbox tool + Additionally includes a create_label method for any needed extra labels + One label is already created and yielded when using fixture + """ + project = client.create_project( + name=rand_gen(str), + media_type=MediaType.Image, + ) + data_rows = [dr.uid for dr in list(dataset.data_rows())] + project._wait_until_data_rows_are_processed( + data_row_ids=data_rows, sleep_interval=3 + ) + project.create_batch("test-batch", data_rows) + project.data_row_ids = data_rows + + ontology = _setup_ontology(project, client) + label = _create_label(project, data_row, ontology, wait_for_label_processing) + + yield [project, dataset, data_row, label] + + teardown_helpers.teardown_project_labels_ontology_feature_schemas(project) + + +@pytest.fixture +def configured_batch_project_with_multiple_datarows( + client, + dataset, + data_rows, + wait_for_label_processing, + rand_gen, + teardown_helpers, +): + """Project with a batch having multiple datarows + Project contains an ontology with 1 bbox tool + Additionally includes a create_label method for any needed extra labels + """ + project = client.create_project( + name=rand_gen(str), + media_type=MediaType.Image, + ) + global_keys = [dr.global_key for dr in data_rows] + + batch_name = f"batch {uuid.uuid4()}" + project.create_batch(batch_name, global_keys=global_keys) + + ontology = _setup_ontology(project, client) + for datarow in data_rows: + _create_label(project, datarow, ontology, wait_for_label_processing) + + yield [project, dataset, data_rows] + + teardown_helpers.teardown_project_labels_ontology_feature_schemas(project) + + +# NOTE this is nice heuristics, also there is this logic _wait_until_data_rows_are_processed in Project +# in case we still have flakiness in the future, we can use it +@pytest.fixture +def wait_for_data_row_processing(): + """ + Do not use. Only for testing. + + Returns DataRow after waiting for it to finish processing media_attributes. + Some tests, specifically ones that rely on label export, rely on + DataRow be fully processed with media_attributes + """ + + def func(client, data_row, custom_check=None): + """ + added check_updated_at because when a data_row is updated from say + an image to pdf, it already has media_attributes and the loop does + not wait for processing to a pdf + """ + data_row_id = data_row.uid + timeout_seconds = 60 + while True: + data_row = client.get_data_row(data_row_id) + passed_custom_check = not custom_check or custom_check(data_row) + if data_row.media_attributes and passed_custom_check: + return data_row + timeout_seconds -= 2 + if timeout_seconds <= 0: + raise TimeoutError( + f"Timed out waiting for DataRow '{data_row_id}' to finish processing media_attributes" + ) + time.sleep(2) + + return func + + +@pytest.fixture +def wait_for_label_processing(): + """ + Do not use. Only for testing. + + Returns project's labels as a list after waiting for them to finish processing. + If `project.labels()` is called before label is fully processed, + it may return an empty set + """ + + def func(project): + timeout_seconds = 10 + while True: + labels = list(project.labels()) + if len(labels) > 0: + return labels + timeout_seconds -= 2 + if timeout_seconds <= 0: + raise TimeoutError( + f"Timed out waiting for label for project '{project.uid}' to finish processing" + ) + time.sleep(2) + + return func + + +@pytest.fixture +def initial_dataset(client, rand_gen): + # Handle invalid default IAM integrations in test environments gracefully + dataset = create_dataset_robust(client, name=rand_gen(str)) + yield dataset + + dataset.delete() + + +@pytest.fixture +def video_data(client, rand_gen, video_data_row, wait_for_data_row_processing): + # Handle invalid default IAM integrations in test environments gracefully + dataset = create_dataset_robust(client, name=rand_gen(str)) + data_row_ids = [] + data_row = dataset.create_data_row(video_data_row) + data_row = wait_for_data_row_processing(client, data_row) + data_row_ids.append(data_row.uid) + yield dataset, data_row_ids + dataset.delete() + + +def create_video_data_row(rand_gen): + return { + "row_data": "https://storage.googleapis.com/labelbox-datasets/video-sample-data/sample-video-1.mp4", + "global_key": f"https://storage.googleapis.com/labelbox-datasets/video-sample-data/sample-video-1.mp4-{rand_gen(str)}", + "media_type": "VIDEO", + } + + +@pytest.fixture +def video_data_100_rows(client, rand_gen, wait_for_data_row_processing): + # Handle invalid default IAM integrations in test environments gracefully + dataset = create_dataset_robust(client, name=rand_gen(str)) + data_row_ids = [] + for _ in range(100): + data_row = dataset.create_data_row(create_video_data_row(rand_gen)) + data_row = wait_for_data_row_processing(client, data_row) + data_row_ids.append(data_row.uid) + yield dataset, data_row_ids + dataset.delete() + + +@pytest.fixture() +def video_data_row(rand_gen): + return create_video_data_row(rand_gen) + + +class ExportV2Helpers: + @classmethod + def run_project_export_v2_task( + cls, project, num_retries=5, task_name=None, filters={}, params={} + ): + task = None + params = ( + params + if params + else { + "project_details": True, + "performance_details": False, + "data_row_details": True, + "label_details": True, + } + ) + while num_retries > 0: + task = project.export_v2( + task_name=task_name, filters=filters, params=params + ) + task.wait_till_done() + assert task.status == "COMPLETE" + assert task.errors is None + if len(task.result) == 0: + num_retries -= 1 + time.sleep(5) + else: + break + return task.result + + @classmethod + def run_dataset_export_v2_task( + cls, dataset, num_retries=5, task_name=None, filters={}, params={} + ): + task = None + params = ( + params if params else {"performance_details": False, "label_details": True} + ) + while num_retries > 0: + task = dataset.export_v2( + task_name=task_name, filters=filters, params=params + ) + task.wait_till_done() + assert task.status == "COMPLETE" + assert task.errors is None + if len(task.result) == 0: + num_retries -= 1 + time.sleep(5) + else: + break + + return task.result + + @classmethod + def run_catalog_export_v2_task( + cls, client, num_retries=5, task_name=None, filters={}, params={} + ): + task = None + params = ( + params if params else {"performance_details": False, "label_details": True} + ) + catalog = client.get_catalog() + while num_retries > 0: + task = catalog.export_v2( + task_name=task_name, filters=filters, params=params + ) + task.wait_till_done() + assert task.status == "COMPLETE" + assert task.errors is None + if len(task.result) == 0: + num_retries -= 1 + time.sleep(5) + else: + break + + return task.result + + +@pytest.fixture +def export_v2_test_helpers() -> Type[ExportV2Helpers]: + return ExportV2Helpers() + + +@pytest.fixture +def big_dataset_data_row_ids(big_dataset: Dataset): + export_task = big_dataset.export() + export_task.wait_till_done() + stream = export_task.get_buffered_stream() + yield [dr.json["data_row"]["id"] for dr in stream] + + +@pytest.fixture(scope="function") +def dataset_with_invalid_data_rows( + unique_dataset: Dataset, upload_invalid_data_rows_for_dataset +): + upload_invalid_data_rows_for_dataset(unique_dataset) + + yield unique_dataset + + +@pytest.fixture +def upload_invalid_data_rows_for_dataset(): + def _upload_invalid_data_rows_for_dataset(dataset: Dataset): + task = dataset.create_data_rows( + [ + { + "row_data": "gs://invalid-bucket/example.png", # forbidden + "external_id": "image-without-access.jpg", + }, + ] + * 2 + ) + task.wait_till_done() + + return _upload_invalid_data_rows_for_dataset + + +@pytest.fixture +def configured_project( + project_with_one_feature_ontology, initial_dataset, rand_gen, image_url +): + dataset = initial_dataset + data_row_id = dataset.create_data_row(row_data=image_url).uid + project = project_with_one_feature_ontology + + batch = project.create_batch( + rand_gen(str), + [data_row_id], # sample of data row objects + 5, # priority between 1(Highest) - 5(lowest) + ) + project.data_row_ids = [data_row_id] + + yield project + + batch.delete() + + +@pytest.fixture +def project_with_one_feature_ontology(project, client: Client): + tools = [ + Tool(tool=Tool.Type.BBOX, name="test-bbox-class").asdict(), + ] + empty_ontology = {"tools": tools, "classifications": []} + ontology = client.create_ontology("empty ontology", empty_ontology, MediaType.Image) + project.connect_ontology(ontology) + yield project + + +@pytest.fixture +def configured_project_with_complex_ontology( + client: Client, initial_dataset, rand_gen, image_url, teardown_helpers +): + project = client.create_project( + name=rand_gen(str), + media_type=MediaType.Image, + ) + dataset = initial_dataset + data_row = dataset.create_data_row(row_data=image_url) + data_row_ids = [data_row.uid] + + project.create_batch( + rand_gen(str), + data_row_ids, # sample of data row objects + 5, # priority between 1(Highest) - 5(lowest) + ) + project.data_row_ids = data_row_ids + + ontology = OntologyBuilder() + tools = [ + Tool(tool=Tool.Type.BBOX, name="test-bbox-class"), + Tool(tool=Tool.Type.LINE, name="test-line-class"), + Tool(tool=Tool.Type.POINT, name="test-point-class"), + Tool(tool=Tool.Type.POLYGON, name="test-polygon-class"), + ] + + options = [ + Option(value="first option answer"), + Option(value="second option answer"), + Option(value="third option answer"), + ] + + classifications = [ + Classification(class_type=Classification.Type.TEXT, name="test-text-class"), + Classification( + class_type=Classification.Type.RADIO, + name="test-radio-class", + options=options, + ), + Classification( + class_type=Classification.Type.CHECKLIST, + name="test-checklist-class", + options=options, + ), + ] + + for t in tools: + for c in classifications: + t.add_classification(c) + ontology.add_tool(t) + for c in classifications: + ontology.add_classification(c) + + ontology = client.create_ontology( + "complex image ontology", ontology.asdict(), MediaType.Image + ) + + project.connect_ontology(ontology) + + yield [project, data_row] + teardown_helpers.teardown_project_labels_ontology_feature_schemas(project) + + +@pytest.fixture +def embedding(client: Client, environ): + uuid_str = uuid.uuid4().hex + time.sleep(randint(1, 5)) + embedding = client.create_embedding(f"sdk-int-{uuid_str}", 8) + yield embedding + + embedding.delete() + + +@pytest.fixture +def valid_model_id(): + return "2c903542-d1da-48fd-9db1-8c62571bd3d2" + + +@pytest.fixture +def requested_labeling_service( + rand_gen, client, chat_evaluation_ontology, model_config, teardown_helpers +): + project_name = f"test-model-evaluation-project-{rand_gen(str)}" + dataset_name = f"test-model-evaluation-dataset-{rand_gen(str)}" + project = client.create_model_evaluation_project( + name=project_name, dataset_name=dataset_name, data_row_count=1 + ) + project.connect_ontology(chat_evaluation_ontology) + + project.upsert_instructions("tests/integration/media/sample_pdf.pdf") + + labeling_service = project.get_labeling_service() + project.add_model_config(model_config.uid) + project.set_project_model_setup_complete() + + labeling_service.request() + + yield project, project.get_labeling_service() + + teardown_helpers.teardown_project_labels_ontology_feature_schemas(project) + + +class TearDownHelpers: + @staticmethod + def teardown_project_labels_ontology_feature_schemas(project: Project): + """ + Call this function to release project, labels, ontology and feature schemas in fixture teardown + + NOTE: exception handling is not required as this is a fixture teardown + """ + ontology = project.ontology() + ontology_id = ontology.uid + client = project.client + classification_feature_schema_ids = [ + feature["featureSchemaId"] + for feature in ontology.normalized["classifications"] + ] + tool_feature_schema_ids = [ + feature["featureSchemaId"] for feature in ontology.normalized["tools"] + ] + + feature_schema_ids = classification_feature_schema_ids + tool_feature_schema_ids + labels = list(project.labels()) + for label in labels: + label.delete() + + project.delete() + client.delete_unused_ontology(ontology_id) + for feature_schema_id in feature_schema_ids: + try: + project.client.delete_unused_feature_schema(feature_schema_id) + except LabelboxError as e: + print(f"Failed to delete feature schema {feature_schema_id}: {e}") + + @staticmethod + def teardown_ontology_feature_schemas(ontology: Ontology): + """ + Call this function to release project, labels, ontology and feature schemas in fixture teardown + + NOTE: exception handling is not required as this is a fixture teardown + """ + ontology_id = ontology.uid + client = ontology.client + classification_feature_schema_ids = [ + feature["featureSchemaId"] + for feature in ontology.normalized["classifications"] + ] + [ + option["featureSchemaId"] + for feature in ontology.normalized["classifications"] + for option in feature.get("options", []) + ] + + tool_feature_schema_ids = ( + [feature["featureSchemaId"] for feature in ontology.normalized["tools"]] + + [ + classification["featureSchemaId"] + for tool in ontology.normalized["tools"] + for classification in tool.get("classifications", []) + ] + + [ + option["featureSchemaId"] + for tool in ontology.normalized["tools"] + for classification in tool.get("classifications", []) + for option in classification.get("options", []) + ] + ) + + feature_schema_ids = classification_feature_schema_ids + tool_feature_schema_ids + + client.delete_unused_ontology(ontology_id) + for feature_schema_id in feature_schema_ids: + try: + project.client.delete_unused_feature_schema(feature_schema_id) + except LabelboxError as e: + print(f"Failed to delete feature schema {feature_schema_id}: {e}") + + +class ModuleTearDownHelpers(TearDownHelpers): ... + + +class LabelHelpers: + def wait_for_labels(self, project, number_of_labels=1): + timeout_seconds = 10 + while True: + labels = list(project.labels()) + if len(labels) >= number_of_labels: + return labels + timeout_seconds -= 2 + if timeout_seconds <= 0: + raise TimeoutError( + f"Timed out waiting for label for project '{project.uid}' to finish processing" + ) + time.sleep(2) + + +@pytest.fixture +def teardown_helpers(): + return TearDownHelpers() + + +@pytest.fixture(scope="module") +def module_teardown_helpers(): + return TearDownHelpers() + + +@pytest.fixture +def label_helpers(): + return LabelHelpers() + + +def create_dataset_robust(client, **kwargs): + """ + Robust dataset creation that handles invalid default IAM integrations gracefully. + + This is a helper function for tests that need to create datasets directly + instead of using fixtures. It falls back to creating datasets without + IAM integration when the default integration is invalid. + + Args: + client: Labelbox client instance + **kwargs: Arguments to pass to create_dataset + + Returns: + Dataset: Created dataset + """ + try: + return client.create_dataset(**kwargs) + except ValueError as e: + if "Integration is not valid" in str(e): + # Fallback to creating dataset without IAM integration for tests + kwargs["iam_integration"] = None + return client.create_dataset(**kwargs) + else: + raise e diff --git a/libs/lbox-alignerr/tests/integration/test_alignerr_project.py b/libs/lbox-alignerr/tests/integration/test_alignerr_project.py new file mode 100644 index 000000000..f3c38964d --- /dev/null +++ b/libs/lbox-alignerr/tests/integration/test_alignerr_project.py @@ -0,0 +1,104 @@ +"""Integration tests for AlignerrProject functionality. + +These tests interact with the actual Labelbox API to verify AlignerrProject operations. +""" + +import datetime +import uuid + +import pytest +from alignerr.alignerr_project import AlignerrProject, AlignerrWorkspace +from alignerr.schema.project_rate import BillingMode, ProjectRateInput +from labelbox.schema.media_type import MediaType + + +@pytest.fixture +def test_project(client): + """Create a test project for AlignerrProject testing.""" + project_name = f"Test AlignerrProject {uuid.uuid4()}" + project = client.create_project(name=project_name, media_type=MediaType.Image) + + yield project + + # Cleanup + try: + project.delete() + except Exception: + pass # Project may already be deleted + + +@pytest.fixture +def test_alignerr_project(client, test_project): + """Create a test AlignerrProject instance using the builder pattern.""" + return ( + AlignerrWorkspace.from_labelbox(client) + .project_builder() + .set_name(test_project.name) + .set_media_type(test_project.media_type) + .create(skip_validation=True) + ) + + +def test_alignerr_project_initialization_error(client, test_project): + """Test that direct AlignerrProject initialization raises an error.""" + with pytest.raises( + RuntimeError, match="AlignerrProject cannot be initialized directly" + ): + AlignerrProject(client, test_project) + + +def test_alignerr_project_property_setter(client, test_alignerr_project): + """Test AlignerrProject property setter.""" + # Create a new project to test property setting + new_project_name = f"New Test Project {uuid.uuid4()}" + new_project = client.create_project( + name=new_project_name, media_type=MediaType.Image + ) + + try: + # Test property setter + test_alignerr_project.project = new_project + assert test_alignerr_project.project == new_project + assert test_alignerr_project.project.name == new_project_name + finally: + new_project.delete() + + +def test_alignerr_project_domains(client, test_alignerr_project): + """Test AlignerrProject domains() method.""" + # Test that domains() returns a PaginatedCollection + domains = test_alignerr_project.domains() + assert domains is not None + # The collection might be empty for a new project, which is expected + + +def test_alignerr_project_get_project_rates_no_rates(client, test_alignerr_project): + """Test get_project_rates() when no rates are set.""" + # For a new project without rates, this should return an empty list + project_rates = test_alignerr_project.get_project_rates() + assert project_rates == [] + + +def test_alignerr_project_set_and_get_project_rates(client, test_alignerr_project): + """Test setting and getting project rates.""" + # Create a project rate input for a customer rate (isBillRate=True requires empty rateForId) + project_rate_input = ProjectRateInput( + rateForId="", # Empty string for customer rate + isBillRate=True, + billingMode=BillingMode.BY_HOUR, + rate=25.0, + effectiveSince=datetime.datetime.now().isoformat(), + effectiveUntil=None, + ) + + # Set the project rate + result = test_alignerr_project.set_project_rate(project_rate_input) + assert result is True # Should return success status + + # Get the project rates back + project_rates = test_alignerr_project.get_project_rates() + # Should return a list with at least one rate + assert isinstance(project_rates, list) + assert len(project_rates) >= 1 + # Note: The actual rate retrieval might depend on the API implementation + # This test verifies the method calls work without errors diff --git a/libs/lbox-alignerr/tests/integration/test_alignerr_project_builder.py b/libs/lbox-alignerr/tests/integration/test_alignerr_project_builder.py new file mode 100644 index 000000000..2bcdd9f08 --- /dev/null +++ b/libs/lbox-alignerr/tests/integration/test_alignerr_project_builder.py @@ -0,0 +1,316 @@ +"""Integration tests for ProjectRateV2 functionality.""" + +import datetime +from labelbox import Client +from alignerr.alignerr_project import AlignerrRole, AlignerrWorkspace +from alignerr.schema.project_rate import BillingMode +from labelbox.schema.media_type import MediaType +import pytest + + +def test_skip_validation(client: Client): + alignerr_project = ( + AlignerrWorkspace.from_labelbox(client) + .project_builder() + .set_name("TestAlignerrProject") + .set_media_type(MediaType.Image) + .set_alignerr_role_rate( + role_name=AlignerrRole.Labeler, + rate=10.0, + billing_mode=BillingMode.BY_HOUR, + effective_since=datetime.datetime.now().isoformat(), + ) + .create(skip_validation=True) + ) + assert alignerr_project is not None + assert alignerr_project.project.name == "TestAlignerrProject" + + alignerr_project.project.delete() + + +def test_create_alignerr_project_using_builder_validate_input(client: Client): + with pytest.raises(ValueError): + AlignerrWorkspace.from_labelbox(client).project_builder().set_name( + "TestAlignerrProject" + ).set_media_type(MediaType.Image).set_alignerr_role_rate( + role_name=AlignerrRole.Labeler, + rate=10.0, + billing_mode=BillingMode.BY_HOUR, + effective_since=datetime.datetime.now().isoformat(), + ).create() + + # Get current user for project owner + current_user = client.get_user() + + alignerr_project = ( + AlignerrWorkspace.from_labelbox(client) + .project_builder() + .set_name("TestAlignerrProject2") + .set_media_type(MediaType.Image) + .set_alignerr_role_rate( + role_name=AlignerrRole.Labeler, + rate=10.0, + billing_mode=BillingMode.BY_HOUR, + effective_since=datetime.datetime.now().isoformat(), + ) + .set_alignerr_role_rate( + role_name=AlignerrRole.Reviewer, + rate=10.0, + billing_mode=BillingMode.BY_HOUR, + effective_since=datetime.datetime.now().isoformat(), + ) + .set_customer_rate( + rate=15.0, + billing_mode=BillingMode.BY_HOUR, + effective_since=datetime.datetime.now().isoformat(), + ) + .set_project_owner(current_user.email) + .create() + ) + + assert alignerr_project is not None + assert alignerr_project.project.name == "TestAlignerrProject2" + + alignerr_project.project.delete() + + +def test_create_alignerr_project_using_builder_add_domains(client: Client): + from alignerr.schema.project_domain import ProjectDomain + import uuid + + # Create test domains first + domain1_name = f"TestDomain1_{uuid.uuid4()}" + domain2_name = f"TestDomain2_{uuid.uuid4()}" + + domain1 = ProjectDomain.create(client, name=domain1_name) + domain2 = ProjectDomain.create(client, name=domain2_name) + + # Add a small delay to allow domains to be searchable + import time + + time.sleep(0.5) + + try: + # Add domains using set_domains method + alignerr_project = ( + AlignerrWorkspace.from_labelbox(client) + .project_builder() + .set_name("TestAlignerrProject3") + .set_media_type(MediaType.Image) + .set_domains([domain1_name, domain2_name]) + .create(skip_validation=True) + ) + assert alignerr_project is not None + assert alignerr_project.project.name == "TestAlignerrProject3" + + # Count domains by iterating through the collection + domain_count = sum(1 for _ in alignerr_project.domains()) + assert domain_count == 2 + alignerr_project.project.delete() + finally: + # Cleanup domains + try: + domain1.deactivate() + domain2.deactivate() + except Exception: + pass + + +def test_create_alignerr_project_with_rates_domains_and_resource_tags( + client: Client, +): + """Test creating an Alignerr project with rates, domains, and enhanced resource tags.""" + from alignerr.schema.project_domain import ProjectDomain + from alignerr.schema.enchanced_resource_tags import ( + EnhancedResourceTag, + ResourceTagType, + ) + import uuid + import time + + # Create test domains first + domain1_name = f"TestDomain1_{uuid.uuid4()}" + domain2_name = f"TestDomain2_{uuid.uuid4()}" + + domain1 = ProjectDomain.create(client, name=domain1_name) + domain2 = ProjectDomain.create(client, name=domain2_name) + + # Create test resource tags + tag1_text = f"TestTag1_{uuid.uuid4().hex[:8]}" + tag2_text = f"TestTag2_{uuid.uuid4().hex[:8]}" + + tag1 = EnhancedResourceTag.create( + client, + text=tag1_text, + color="#FF5733", + tag_type=ResourceTagType.Default, + ) + tag2 = EnhancedResourceTag.create( + client, + text=tag2_text, + color="#33FF57", + tag_type=ResourceTagType.Billing, + ) + + # Add a small delay to allow domains to be searchable + time.sleep(0.5) + + try: + # Get current user for project owner + current_user = client.get_user() + + # Create project with rates, domains, and resource tags + alignerr_project = ( + AlignerrWorkspace.from_labelbox(client) + .project_builder() + .set_name("TestAlignerrProjectWithAll") + .set_media_type(MediaType.Image) + .set_alignerr_role_rate( + role_name=AlignerrRole.Labeler, + rate=12.0, + billing_mode=BillingMode.BY_HOUR, + effective_since=datetime.datetime.now().isoformat(), + ) + .set_alignerr_role_rate( + role_name=AlignerrRole.Reviewer, + rate=15.0, + billing_mode=BillingMode.BY_HOUR, + effective_since=datetime.datetime.now().isoformat(), + ) + .set_customer_rate( + rate=20.0, + billing_mode=BillingMode.BY_HOUR, + effective_since=datetime.datetime.now().isoformat(), + ) + .set_domains([domain1_name, domain2_name]) + .set_tags([tag1_text, tag2_text], ResourceTagType.Default) + .set_project_owner(current_user.email) + .create() + ) + + assert alignerr_project is not None + assert alignerr_project.project.name == "TestAlignerrProjectWithAll" + + # Verify domains were added + domain_count = sum(1 for _ in alignerr_project.domains()) + assert domain_count == 2 + + # Verify resource tags were added + enhanced_tags = alignerr_project.get_tags() + assert len(enhanced_tags) >= 2 + + # Check that our specific tags are present + tag_texts = [tag.text for tag in enhanced_tags] + assert tag1_text in tag_texts + assert tag2_text in tag_texts + + alignerr_project.project.delete() + finally: + # Cleanup domains + try: + domain1.deactivate() + domain2.deactivate() + except Exception: + pass + + # Cleanup resource tags + try: + tag1.delete() + tag2.delete() + except Exception: + pass + + +def test_create_alignerr_project_with_project_owner(client: Client): + """Test creating an Alignerr project with project owner set.""" + # Get the current user as the project owner + current_user = client.get_user() + + try: + # Create project with project owner using email + alignerr_project = ( + AlignerrWorkspace.from_labelbox(client) + .project_builder() + .set_name("TestAlignerrProjectWithOwner") + .set_media_type(MediaType.Image) + .set_alignerr_role_rate( + role_name=AlignerrRole.Labeler, + rate=10.0, + billing_mode=BillingMode.BY_HOUR, + effective_since=datetime.datetime.now().isoformat(), + ) + .set_alignerr_role_rate( + role_name=AlignerrRole.Reviewer, + rate=12.0, + billing_mode=BillingMode.BY_HOUR, + effective_since=datetime.datetime.now().isoformat(), + ) + .set_customer_rate( + rate=15.0, + billing_mode=BillingMode.BY_HOUR, + effective_since=datetime.datetime.now().isoformat(), + ) + .set_project_owner(current_user.email) + .create() + ) + + assert alignerr_project is not None + assert alignerr_project.project.name == "TestAlignerrProjectWithOwner" + + # Verify project owner was set using the AlignerrProject method + project_boost_workforce = alignerr_project.get_project_owner() + + if project_boost_workforce: + assert project_boost_workforce.projectOwnerUserId == current_user.uid + assert project_boost_workforce.projectOwner.uid == current_user.uid + + alignerr_project.project.delete() + except Exception as e: + # Clean up if test fails + try: + alignerr_project.project.delete() + except Exception: + pass + raise e + + +def test_create_alignerr_project_selective_validation_skip_multiple( + client: Client, +): + """Test creating an Alignerr project with selective validation - skipping multiple validations.""" + from alignerr.alignerr_project_builder import ValidationType + + try: + # Create project skipping multiple validations + alignerr_project = ( + AlignerrWorkspace.from_labelbox(client) + .project_builder() + .set_name("TestAlignerrProjectSkipMultiple") + .set_media_type(MediaType.Image) + .set_alignerr_role_rate( + role_name=AlignerrRole.Labeler, + rate=10.0, + billing_mode=BillingMode.BY_HOUR, + effective_since=datetime.datetime.now().isoformat(), + ) + # Note: Missing reviewer rate, customer rate, and project owner, but we skip those validations + .create( + skip_validation=[ + ValidationType.ALIGNERR_RATE, + ValidationType.CUSTOMER_RATE, + ValidationType.PROJECT_OWNER, + ] + ) + ) + + assert alignerr_project is not None + assert alignerr_project.project.name == "TestAlignerrProjectSkipMultiple" + + alignerr_project.project.delete() + except Exception as e: + # Clean up if test fails + try: + alignerr_project.project.delete() + except Exception: + pass + raise e diff --git a/libs/lbox-alignerr/tests/integration/test_alignerr_project_factory.py b/libs/lbox-alignerr/tests/integration/test_alignerr_project_factory.py new file mode 100644 index 000000000..488bebc2d --- /dev/null +++ b/libs/lbox-alignerr/tests/integration/test_alignerr_project_factory.py @@ -0,0 +1,513 @@ +"""Integration tests for AlignerrProjectFactory functionality.""" + +import tempfile +import os +import yaml +from pathlib import Path + +import pytest + +from labelbox import Client +from alignerr.alignerr_project_factory import AlignerrProjectFactory +from alignerr.alignerr_project_builder import ValidationType +from labelbox.schema.media_type import MediaType + + +def test_create_alignerr_project_from_yaml_basic(client: Client): + """Test creating an AlignerrProject from a basic YAML configuration.""" + config = {"name": "TestFactoryProject", "media_type": "IMAGE"} + + with tempfile.NamedTemporaryFile(mode="w", suffix=".yaml", delete=False) as f: + yaml.dump(config, f) + yaml_file_path = f.name + + try: + factory = AlignerrProjectFactory(client) + alignerr_project = factory.create(yaml_file_path, skip_validation=True) + + assert alignerr_project is not None + assert alignerr_project.project.name == "TestFactoryProject" + assert alignerr_project.project.media_type == MediaType.Image + + alignerr_project.project.delete() + finally: + os.unlink(yaml_file_path) + + +def test_create_alignerr_project_from_yaml_with_rates(client: Client): + """Test creating an AlignerrProject from YAML with rate configurations.""" + config = { + "name": "TestFactoryProjectWithRates", + "media_type": "IMAGE", + "rates": { + "labeler": { + "rate": 0.50, + "billing_mode": "BY_TASK", + "effective_since": "2024-01-01T00:00:00", + "effective_until": "2024-12-31T23:59:59", + }, + "reviewer": { + "rate": 0.75, + "billing_mode": "BY_HOUR", + "effective_since": "2024-01-01T00:00:00", + }, + }, + } + + with tempfile.NamedTemporaryFile(mode="w", suffix=".yaml", delete=False) as f: + yaml.dump(config, f) + yaml_file_path = f.name + + try: + factory = AlignerrProjectFactory(client) + alignerr_project = factory.create(yaml_file_path, skip_validation=True) + + assert alignerr_project is not None + assert alignerr_project.project.name == "TestFactoryProjectWithRates" + assert alignerr_project.project.media_type == MediaType.Image + + # Verify rates were set by checking project rates + project_rates = alignerr_project.get_project_rates() + assert isinstance(project_rates, list) + assert len(project_rates) >= 1 + + alignerr_project.project.delete() + finally: + os.unlink(yaml_file_path) + + +def test_create_alignerr_project_from_yaml_validation_error(client: Client): + """Test that validation errors are raised for incomplete configurations.""" + config = { + "name": "TestProject" + # Missing media_type + } + + with tempfile.NamedTemporaryFile(mode="w", suffix=".yaml", delete=False) as f: + yaml.dump(config, f) + yaml_file_path = f.name + + try: + factory = AlignerrProjectFactory(client) + + with pytest.raises(ValueError, match="Required field 'media_type' is missing"): + factory.create(yaml_file_path) + finally: + os.unlink(yaml_file_path) + + +def test_create_alignerr_project_from_yaml_invalid_media_type(client: Client): + """Test that invalid media types raise appropriate errors.""" + config = {"name": "TestProject", "media_type": "INVALID_MEDIA_TYPE"} + + with tempfile.NamedTemporaryFile(mode="w", suffix=".yaml", delete=False) as f: + yaml.dump(config, f) + yaml_file_path = f.name + + try: + factory = AlignerrProjectFactory(client) + + with pytest.raises(ValueError, match="Invalid media_type"): + factory.create(yaml_file_path, skip_validation=True) + finally: + os.unlink(yaml_file_path) + + +def test_create_alignerr_project_from_yaml_file_not_found(client: Client): + """Test that missing YAML files raise appropriate errors.""" + factory = AlignerrProjectFactory(client) + + with pytest.raises(FileNotFoundError, match="YAML file not found"): + factory.create("nonexistent_file.yaml") + + +def test_create_alignerr_project_from_yaml_with_customer_rate(client: Client): + """Test creating an AlignerrProject from YAML with customer rate configuration.""" + config = { + "name": "TestFactoryProjectWithCustomerRate", + "media_type": "IMAGE", + "rates": { + "LABELER": { + "rate": 15.0, + "billing_mode": "BY_HOUR", + "effective_since": "2024-01-01T00:00:00", + }, + "REVIEWER": { + "rate": 20.0, + "billing_mode": "BY_HOUR", + "effective_since": "2024-01-01T00:00:00", + }, + }, + "customer_rate": { + "rate": 25.0, + "billing_mode": "BY_HOUR", + "effective_since": "2024-01-01T00:00:00", + "effective_until": "2024-12-31T23:59:59", + }, + } + + with tempfile.NamedTemporaryFile(mode="w", suffix=".yaml", delete=False) as f: + yaml.dump(config, f) + yaml_file_path = f.name + + try: + factory = AlignerrProjectFactory(client) + alignerr_project = factory.create( + yaml_file_path, skip_validation=[ValidationType.PROJECT_OWNER] + ) + + assert alignerr_project is not None + assert alignerr_project.project.name == "TestFactoryProjectWithCustomerRate" + assert alignerr_project.project.media_type == MediaType.Image + + # Verify rates were set + project_rates = alignerr_project.get_project_rates() + assert isinstance(project_rates, list) + assert len(project_rates) >= 2 # Should have both labeler and reviewer rates + + alignerr_project.project.delete() + finally: + os.unlink(yaml_file_path) + + +def test_create_alignerr_project_from_yaml_with_domains(client: Client): + """Test creating an AlignerrProject from YAML with domains configuration.""" + from alignerr.schema.project_domain import ProjectDomain + import uuid + import time + + # Create test domains first + domain1_name = f"TestDomain1_{uuid.uuid4()}" + domain2_name = f"TestDomain2_{uuid.uuid4()}" + + domain1 = ProjectDomain.create(client, name=domain1_name) + domain2 = ProjectDomain.create(client, name=domain2_name) + + # Add a small delay to allow domains to be searchable + time.sleep(0.5) + + config = { + "name": "TestFactoryProjectWithDomains", + "media_type": "IMAGE", + "rates": { + "LABELER": { + "rate": 15.0, + "billing_mode": "BY_HOUR", + "effective_since": "2024-01-01T00:00:00", + }, + "REVIEWER": { + "rate": 20.0, + "billing_mode": "BY_HOUR", + "effective_since": "2024-01-01T00:00:00", + }, + }, + "customer_rate": { + "rate": 25.0, + "billing_mode": "BY_HOUR", + "effective_since": "2024-01-01T00:00:00", + }, + "domains": [domain1_name, domain2_name], + } + + with tempfile.NamedTemporaryFile(mode="w", suffix=".yaml", delete=False) as f: + yaml.dump(config, f) + yaml_file_path = f.name + + try: + factory = AlignerrProjectFactory(client) + alignerr_project = factory.create( + yaml_file_path, skip_validation=[ValidationType.PROJECT_OWNER] + ) + + assert alignerr_project is not None + assert alignerr_project.project.name == "TestFactoryProjectWithDomains" + + # Verify domains were added + domain_count = sum(1 for _ in alignerr_project.domains()) + assert domain_count == 2 + + alignerr_project.project.delete() + finally: + os.unlink(yaml_file_path) + # Cleanup domains + try: + domain1.deactivate() + domain2.deactivate() + except Exception: + pass + + +def test_create_alignerr_project_from_yaml_with_tags(client: Client): + """Test creating an AlignerrProject from YAML with enhanced resource tags configuration.""" + from alignerr.schema.enchanced_resource_tags import ( + EnhancedResourceTag, + ResourceTagType, + ) + import uuid + + # Create test resource tags + tag1_text = f"TestTag1_{uuid.uuid4().hex[:8]}" + tag2_text = f"TestTag2_{uuid.uuid4().hex[:8]}" + + tag1 = EnhancedResourceTag.create( + client, + text=tag1_text, + color="#FF5733", + tag_type=ResourceTagType.Default, + ) + tag2 = EnhancedResourceTag.create( + client, + text=tag2_text, + color="#33FF57", + tag_type=ResourceTagType.Billing, + ) + + config = { + "name": "TestFactoryProjectWithTags", + "media_type": "IMAGE", + "rates": { + "LABELER": { + "rate": 15.0, + "billing_mode": "BY_HOUR", + "effective_since": "2024-01-01T00:00:00", + }, + "REVIEWER": { + "rate": 20.0, + "billing_mode": "BY_HOUR", + "effective_since": "2024-01-01T00:00:00", + }, + }, + "customer_rate": { + "rate": 25.0, + "billing_mode": "BY_HOUR", + "effective_since": "2024-01-01T00:00:00", + }, + "tags": [ + {"text": tag1_text, "type": "Default"}, + {"text": tag2_text, "type": "Billing"}, + ], + } + + with tempfile.NamedTemporaryFile(mode="w", suffix=".yaml", delete=False) as f: + yaml.dump(config, f) + yaml_file_path = f.name + + try: + factory = AlignerrProjectFactory(client) + alignerr_project = factory.create( + yaml_file_path, skip_validation=[ValidationType.PROJECT_OWNER] + ) + + assert alignerr_project is not None + assert alignerr_project.project.name == "TestFactoryProjectWithTags" + + # Verify resource tags were added + enhanced_tags = alignerr_project.get_tags() + assert len(enhanced_tags) >= 1 # At least one tag should be present + + alignerr_project.project.delete() + finally: + os.unlink(yaml_file_path) + # Cleanup resource tags + try: + tag1.delete() + tag2.delete() + except Exception: + pass + + +def test_create_alignerr_project_from_yaml_with_project_owner(client: Client): + """Test creating an AlignerrProject from YAML with project owner configuration.""" + # Get the current user as the project owner + current_user = client.get_user() + + config = { + "name": "TestFactoryProjectWithOwner", + "media_type": "IMAGE", + "rates": { + "LABELER": { + "rate": 15.0, + "billing_mode": "BY_HOUR", + "effective_since": "2024-01-01T00:00:00", + }, + "REVIEWER": { + "rate": 20.0, + "billing_mode": "BY_HOUR", + "effective_since": "2024-01-01T00:00:00", + }, + }, + "customer_rate": { + "rate": 25.0, + "billing_mode": "BY_HOUR", + "effective_since": "2024-01-01T00:00:00", + }, + "project_owner": current_user.email, + } + + with tempfile.NamedTemporaryFile(mode="w", suffix=".yaml", delete=False) as f: + yaml.dump(config, f) + yaml_file_path = f.name + + try: + factory = AlignerrProjectFactory(client) + alignerr_project = factory.create(yaml_file_path) + + assert alignerr_project is not None + assert alignerr_project.project.name == "TestFactoryProjectWithOwner" + + # Verify project owner was set + project_boost_workforce = alignerr_project.get_project_owner() + if project_boost_workforce: + assert project_boost_workforce.projectOwnerUserId == current_user.uid + assert project_boost_workforce.projectOwner.uid == current_user.uid + + alignerr_project.project.delete() + finally: + os.unlink(yaml_file_path) + + +def test_create_alignerr_project_from_yaml_comprehensive(client: Client): + """Test creating an AlignerrProject from the comprehensive YAML asset file.""" + # Get the current user for project owner + current_user = client.get_user() + + # Path to the comprehensive test YAML file + yaml_file_path = ( + Path(__file__).parent.parent / "assets" / "test_project_comprehensive.yaml" + ) + + # Read and modify the YAML to use current user's email and remove domains/tags that require existing resources + with open(yaml_file_path, "r") as f: + config = yaml.safe_load(f) + + # Update project owner to current user's email + config["project_owner"] = current_user.email + + # Remove domains and tags that require existing resources for this test + if "domains" in config: + del config["domains"] + if "tags" in config: + del config["tags"] + + # Create temporary YAML file with updated config + with tempfile.NamedTemporaryFile(mode="w", suffix=".yaml", delete=False) as f: + yaml.dump(config, f) + temp_yaml_path = f.name + + try: + factory = AlignerrProjectFactory(client) + alignerr_project = factory.create(temp_yaml_path) + + assert alignerr_project is not None + assert alignerr_project.project.name == "TestComprehensiveProject" + assert alignerr_project.project.media_type == MediaType.Image + + # Verify rates were set + project_rates = alignerr_project.get_project_rates() + assert isinstance(project_rates, list) + assert len(project_rates) >= 2 + + # Verify project owner was set + project_boost_workforce = alignerr_project.get_project_owner() + if project_boost_workforce: + assert project_boost_workforce.projectOwnerUserId == current_user.uid + + alignerr_project.project.delete() + finally: + os.unlink(temp_yaml_path) + + +def test_create_alignerr_project_from_yaml_selective_validation(client: Client): + """Test creating an AlignerrProject from YAML with selective validation.""" + config = { + "name": "TestFactoryProjectSelectiveValidation", + "media_type": "IMAGE", + "rates": { + "LABELER": { + "rate": 15.0, + "billing_mode": "BY_HOUR", + "effective_since": "2024-01-01T00:00:00", + }, + "REVIEWER": { + "rate": 20.0, + "billing_mode": "BY_HOUR", + "effective_since": "2024-01-01T00:00:00", + }, + }, + "customer_rate": { + "rate": 25.0, + "billing_mode": "BY_HOUR", + "effective_since": "2024-01-01T00:00:00", + }, + # Note: No project owner set, but we skip that validation + } + + with tempfile.NamedTemporaryFile(mode="w", suffix=".yaml", delete=False) as f: + yaml.dump(config, f) + yaml_file_path = f.name + + try: + factory = AlignerrProjectFactory(client) + # Skip project owner validation + alignerr_project = factory.create( + yaml_file_path, skip_validation=[ValidationType.PROJECT_OWNER] + ) + + assert alignerr_project is not None + assert alignerr_project.project.name == "TestFactoryProjectSelectiveValidation" + + alignerr_project.project.delete() + finally: + os.unlink(yaml_file_path) + + +def test_create_alignerr_project_from_yaml_invalid_customer_rate( + client: Client, +): + """Test that invalid customer rate configurations raise appropriate errors.""" + config = { + "name": "TestProject", + "media_type": "IMAGE", + "customer_rate": { + "rate": 25.0, + # Missing billing_mode and effective_since + }, + } + + with tempfile.NamedTemporaryFile(mode="w", suffix=".yaml", delete=False) as f: + yaml.dump(config, f) + yaml_file_path = f.name + + try: + factory = AlignerrProjectFactory(client) + + with pytest.raises( + ValueError, + match="Required field 'billing_mode' is missing for customer_rate", + ): + factory.create(yaml_file_path, skip_validation=True) + finally: + os.unlink(yaml_file_path) + + +def test_create_alignerr_project_from_yaml_invalid_tags(client: Client): + """Test that invalid tag configurations raise appropriate errors.""" + config = { + "name": "TestProject", + "media_type": "IMAGE", + "tags": [ + {"text": "TestTag1", "type": "InvalidType"}, # Invalid tag type + ], + } + + with tempfile.NamedTemporaryFile(mode="w", suffix=".yaml", delete=False) as f: + yaml.dump(config, f) + yaml_file_path = f.name + + try: + factory = AlignerrProjectFactory(client) + + with pytest.raises(ValueError, match="Invalid tag type 'InvalidType'"): + factory.create(yaml_file_path, skip_validation=True) + finally: + os.unlink(yaml_file_path) diff --git a/libs/lbox-alignerr/tests/integration/test_enhanced_resource_tags.py b/libs/lbox-alignerr/tests/integration/test_enhanced_resource_tags.py new file mode 100644 index 000000000..5e15d5eeb --- /dev/null +++ b/libs/lbox-alignerr/tests/integration/test_enhanced_resource_tags.py @@ -0,0 +1,173 @@ +"""Integration tests for EnhancedResourceTag functionality. + +These tests interact with the actual Labelbox API to verify EnhancedResourceTag operations. +""" + +import pytest +import uuid + +from alignerr.schema.enchanced_resource_tags import ( + EnhancedResourceTag, + ResourceTagType, +) + + +@pytest.fixture +def test_resource_tags(client): + """Create test resource tags for testing.""" + tags = [] + + # Create multiple test tags with different types + for i, tag_type in enumerate([ResourceTagType.Default, ResourceTagType.Billing]): + tag_text = f"Test_Tag_{i + 1}_{uuid.uuid4().hex[:8]}" + tag_color = f"#{i:06x}" # Generate different colors + tag = EnhancedResourceTag.create( + client, text=tag_text, color=tag_color, tag_type=tag_type + ) + tags.append(tag) + + yield tags + + # Cleanup - delete tags + for tag in tags: + try: + tag.delete() + except Exception: + pass # Tag may already be deleted + + +def test_create_enhanced_resource_tag(client): + """Test creating a new enhanced resource tag.""" + tag_text = f"Test_Create_Tag_{uuid.uuid4().hex[:8]}" + tag_color = "#FF5733" + + # Create tag + tag = EnhancedResourceTag.create( + client, text=tag_text, color=tag_color, tag_type=ResourceTagType.Default + ) + + assert tag is not None + assert tag.text == tag_text + assert tag.color == tag_color.lstrip("#") # API returns color without # + assert tag.type == ResourceTagType.Default.value + assert tag.id is not None + # Note: createdAt and organizationId are not available in current API + # assert tag.createdAt is not None + # assert tag.organizationId is not None + + # Cleanup + try: + tag.delete() + except Exception: + pass + + +def test_create_enhanced_resource_tag_without_type(client): + """Test creating a resource tag without specifying type.""" + tag_text = f"Test_Create_Tag_No_Type_{uuid.uuid4().hex[:8]}" + tag_color = "#33FF57" + + # Create tag without type + tag = EnhancedResourceTag.create(client, text=tag_text, color=tag_color) + + assert tag is not None + assert tag.text == tag_text + assert tag.color == tag_color.lstrip("#") # API returns color without # + assert tag.id is not None + # Note: createdAt is not available in current API + # assert tag.createdAt is not None + + # Cleanup + try: + tag.delete() + except Exception: + pass + + +def test_search_by_text(client, test_resource_tags): + """Test searching resource tags by text content.""" + # Test 1: Search for exact text match + target_tag = test_resource_tags[0] + search_results = EnhancedResourceTag.search_by_text( + client, search_text=target_tag.text, tag_type=ResourceTagType.Default + ) + assert isinstance(search_results, list) + assert len(search_results) >= 1 + assert any(tag.text == target_tag.text for tag in search_results) + + # Test 2: Search for partial text match + partial_text = "Test_Tag" + partial_results = EnhancedResourceTag.search_by_text( + client, search_text=partial_text, tag_type=ResourceTagType.Default + ) + assert isinstance(partial_results, list) + assert len(partial_results) >= 1 # At least one Default type tag + assert all(partial_text in tag.text for tag in partial_results) + + # Test 3: Search with type filter + type_filtered_results = EnhancedResourceTag.search_by_text( + client, search_text="Test_Tag", tag_type=ResourceTagType.Default + ) + assert isinstance(type_filtered_results, list) + # All results should contain the search text and match the type + for tag in type_filtered_results: + assert "Test_Tag" in tag.text + assert tag.type == ResourceTagType.Default.value + + # Test 4: Search for non-existent text + non_existent_results = EnhancedResourceTag.search_by_text( + client, + search_text="NonExistentTag12345", + tag_type=ResourceTagType.Default, + ) + assert isinstance(non_existent_results, list) + assert len(non_existent_results) == 0 + + +def test_resource_tag_types_enum(client): + """Test that all resource tag types are properly defined.""" + # Test creating tags with each supported type + supported_types = [ResourceTagType.Default, ResourceTagType.Billing] + for tag_type in supported_types: + tag_text = f"Test_{tag_type.value}_Tag_{uuid.uuid4().hex[:8]}" + tag_color = "#123456" + + tag = EnhancedResourceTag.create( + client, text=tag_text, color=tag_color, tag_type=tag_type + ) + + assert tag.type == tag_type.value + + # Cleanup + try: + tag.delete() + except Exception: + pass + + +def test_enhanced_resource_tag_properties(client, test_resource_tags): + """Test that enhanced resource tags have all expected properties.""" + tag = test_resource_tags[0] + + # Test all expected properties exist + expected_properties = [ + "id", + "createdAt", + "updatedAt", + "organizationId", + "text", + "color", + "createdById", + "type", + ] + + for prop in expected_properties: + assert hasattr(tag, prop), f"Tag missing property: {prop}" + + # Test that required properties are not None + assert tag.id is not None + assert tag.text is not None + assert tag.color is not None + # Note: Some properties are not available in current API + # assert tag.createdAt is not None + # assert tag.organizationId is not None diff --git a/libs/lbox-alignerr/tests/integration/test_project_domain.py b/libs/lbox-alignerr/tests/integration/test_project_domain.py new file mode 100644 index 000000000..7cd92348e --- /dev/null +++ b/libs/lbox-alignerr/tests/integration/test_project_domain.py @@ -0,0 +1,162 @@ +"""Integration tests for ProjectDomain functionality. + +These tests interact with the actual Labelbox API to verify ProjectDomain operations. +""" + +import pytest +import uuid + +from alignerr.schema.project_domain import ProjectDomain +from labelbox.schema.media_type import MediaType + + +@pytest.fixture +def test_project(client): + """Create a test project for domain testing.""" + project_name = f"Test Project Domain {uuid.uuid4()}" + project = client.create_project(name=project_name, media_type=MediaType.Image) + + yield project + + # Cleanup + try: + project.delete() + except Exception: + pass # Project may already be deleted + + +@pytest.fixture +def test_domains(client): + """Create test domains for testing.""" + domains = [] + + # Create multiple test domains + for i in range(3): + domain_name = f"Test Domain {i + 1} {uuid.uuid4()}" + domain = ProjectDomain.create(client, name=domain_name) + domains.append(domain) + + yield domains + + # Cleanup - deactivate domains + for domain in domains: + try: + domain.deactivate() + except Exception: + pass # Domain may already be deactivated + + +def test_create_project_domain(client): + """Test creating a new project domain.""" + domain_name = f"Test Create Domain {uuid.uuid4()}" + + # Create domain + domain = ProjectDomain.create(client, name=domain_name) + + assert domain is not None + assert domain.name == domain_name + assert domain.id is not None + assert domain.createdAt is not None + # Cleanup + try: + domain.deactivate() + except Exception: + pass + + +def test_activate_project_domain(client, test_domains): + """Test activating a project domain.""" + domain = test_domains[0] + + # Initially, domain should be active (created domains are active by default) + assert domain.deactivatedAt is None + + # Deactivate first + deactivated_domain = domain.deactivate() + assert deactivated_domain.deactivatedAt is not None + + # Then activate + activated_domain = deactivated_domain.activate() + assert activated_domain.deactivatedAt is None + assert activated_domain.id == domain.id + + +def test_deactivate_project_domain(client, test_domains): + """Test deactivating a project domain.""" + domain = test_domains[0] + + # Initially, domain should be active + assert domain.deactivatedAt is None + + # Deactivate + deactivated_domain = domain.deactivate() + assert deactivated_domain.deactivatedAt is not None + assert deactivated_domain.id == domain.id + + +def test_connect_project_to_domains(client, test_project, test_domains): + """Test connecting a project to multiple domains.""" + domain_ids = [domain.id for domain in test_domains] + + # Connect project to domains + result = ProjectDomain.connect_project_to_domains( + client, project_id=test_project.uid, domain_ids=domain_ids + ) + + assert result is True + + +def test_search_project_domains(client, test_domains): + """Test searching project domains with various filters.""" + # Test 1: Search without filters - should return all domains + results = ProjectDomain.search(client) + assert results is not None + domain_list = list(results) + assert isinstance(domain_list, list) + # Should find at least our test domains + assert len(domain_list) >= len(test_domains) + + # Test 2: Search by specific name - should find exact match + target_domain = test_domains[0] + search_results = ProjectDomain.search(client, search_by_name=target_domain.name) + found_domains = list(search_results) + assert len(found_domains) >= 1 + assert any(domain.name == target_domain.name for domain in found_domains) + + # Test 3: Search by partial name - should find matches + partial_name = "Test Domain" + partial_results = ProjectDomain.search(client, search_by_name=partial_name) + partial_domains = list(partial_results) + assert len(partial_domains) >= len(test_domains) + assert all("Test Domain" in domain.name for domain in partial_domains) + + # Test 4: Search for non-existent domain - should return empty + non_existent_results = ProjectDomain.search( + client, search_by_name="NonExistentDomain12345" + ) + non_existent_domains = list(non_existent_results) + assert len(non_existent_domains) == 0 + + # Test 5: Search with pagination parameters + # Note: PaginatedCollection automatically fetches all pages, so limit only affects individual page size + paginated_results = ProjectDomain.search(client, limit=2, offset=0) + paginated_domains = list(paginated_results) + # Should still find all domains since PaginatedCollection fetches all pages + assert len(paginated_domains) >= len(test_domains) + + # Test 6: Search with include_archived parameter + archived_results = ProjectDomain.search(client, include_archived=True) + archived_domains = list(archived_results) + assert isinstance(archived_domains, list) + + # Test 7: Verify domain properties in search results + if found_domains: + domain = found_domains[0] + assert hasattr(domain, "id") + assert hasattr(domain, "name") + assert hasattr(domain, "createdAt") + assert hasattr(domain, "updatedAt") + assert hasattr(domain, "deactivatedAt") + assert hasattr(domain, "ratingsCount") + assert domain.id is not None + assert domain.name is not None diff --git a/libs/lbox-alignerr/tests/integration/test_project_rate.py b/libs/lbox-alignerr/tests/integration/test_project_rate.py new file mode 100644 index 000000000..8612c9f60 --- /dev/null +++ b/libs/lbox-alignerr/tests/integration/test_project_rate.py @@ -0,0 +1,144 @@ +"""Integration tests for ProjectRateV2 functionality.""" + +import datetime +import uuid + +import pytest +from alignerr.schema.project_rate import ( + BillingMode, + ProjectRateInput, + ProjectRateV2, +) +from labelbox.schema.media_type import MediaType + + +@pytest.fixture +def test_project(client): + """Create a test project for ProjectRateV2 testing.""" + project_name = f"Test ProjectRateV2 {uuid.uuid4()}" + project = client.create_project(name=project_name, media_type=MediaType.Image) + + yield project + + # Cleanup + try: + project.delete() + except Exception: + pass # Project may already be deleted + + +def test_project_rate_input_validation(): + """Test ProjectRateInput validation logic.""" + # Test negative rate validation + with pytest.raises(ValueError, match="Rate must be greater than or equal to 0"): + ProjectRateInput( + rateForId="", + isBillRate=True, + billingMode=BillingMode.BY_HOUR, + rate=-10.0, + effectiveSince=datetime.datetime.now().isoformat(), + ) + + # Test isBillRate=True with non-empty rateForId + with pytest.raises( + ValueError, + match="isBillRate indicates that this is a customer bill rate. rateForId must be empty if isBillRate is true", + ): + ProjectRateInput( + rateForId="some-id", + isBillRate=True, + billingMode=BillingMode.BY_HOUR, + rate=25.0, + effectiveSince=datetime.datetime.now().isoformat(), + ) + + +def test_get_by_project_id_no_rates(client, test_project): + """Test get_by_project_id when no rates are set.""" + rates = ProjectRateV2.get_by_project_id(client, test_project.uid) + assert rates == [] + + +def test_set_and_get_project_rate_customer(client, test_project): + """Test setting and getting a customer project rate.""" + # Create customer rate input + rate_input = ProjectRateInput( + rateForId="", # Empty string for customer rate + isBillRate=True, + billingMode=BillingMode.BY_HOUR, + rate=25.0, + effectiveSince=datetime.datetime.now().isoformat(), + ) + + # Set the project rate + result = ProjectRateV2.set_project_rate(client, test_project.uid, rate_input) + assert result is True + + # Get the project rates back + rates = ProjectRateV2.get_by_project_id(client, test_project.uid) + assert isinstance(rates, list) + assert len(rates) >= 1 + + # Find the customer rate + customer_rate = None + for rate in rates: + if rate.isBillRate: + customer_rate = rate + break + + assert customer_rate is not None + assert customer_rate.isBillRate is True + assert customer_rate.billingMode == BillingMode.BY_HOUR + assert customer_rate.rate == 25.0 + + +def test_multiple_project_rates(client, test_project): + """Test setting multiple project rates for the same project.""" + # Set customer rate + customer_rate_input = ProjectRateInput( + rateForId="", + isBillRate=True, + billingMode=BillingMode.BY_HOUR, + rate=30.0, + effectiveSince=datetime.datetime.now().isoformat(), + ) + + result1 = ProjectRateV2.set_project_rate( + client, test_project.uid, customer_rate_input + ) + assert result1 is True + + # Get available roles for role rate + roles = client.get_roles() + role_id = None + for role in roles.values(): + if role.name == "REVIEWER": + role_id = role.uid + break + + if role_id: + # Set role rate + role_rate_input = ProjectRateInput( + rateForId=role_id, + isBillRate=False, + billingMode=BillingMode.BY_TASK, + rate=1.25, + effectiveSince=datetime.datetime.now().isoformat(), + ) + + result2 = ProjectRateV2.set_project_rate( + client, test_project.uid, role_rate_input + ) + assert result2 is True + + # Get all project rates + rates = ProjectRateV2.get_by_project_id(client, test_project.uid) + assert isinstance(rates, list) + assert len(rates) >= 2 + + # Verify we have both customer and role rates + customer_rates = [r for r in rates if r.isBillRate] + role_rates = [r for r in rates if not r.isBillRate] + + assert len(customer_rates) >= 1 + assert len(role_rates) >= 1 diff --git a/libs/lbox-alignerr/tests/unit/test_placeholder.py b/libs/lbox-alignerr/tests/unit/test_placeholder.py new file mode 100644 index 000000000..201975fcc --- /dev/null +++ b/libs/lbox-alignerr/tests/unit/test_placeholder.py @@ -0,0 +1,2 @@ +def test_placeholder(): + pass diff --git a/requirements-dev.lock b/requirements-dev.lock index 4dceb50ea..97cca34cd 100644 --- a/requirements-dev.lock +++ b/requirements-dev.lock @@ -8,6 +8,9 @@ # with-sources: false -e file:libs/labelbox + # via lbox-alignerr +-e file:libs/lbox-alignerr + # via labelbox -e file:libs/lbox-clients # via labelbox -e file:libs/lbox-example @@ -185,6 +188,7 @@ pyasn1-modules==0.4.0 pydantic==2.8.2 # via databooks # via labelbox + # via lbox-alignerr pydantic-core==2.20.1 # via pydantic pygeotile==1.0.6 @@ -219,6 +223,9 @@ python-dateutil==2.9.0.post0 # via pandas pytz==2024.1 # via pandas +pyyaml==6.0.3 + # via labelbox + # via lbox-alignerr pyzmq==26.0.3 # via jupyter-client referencing==0.35.1 @@ -309,6 +316,7 @@ typer==0.12.3 # via toml-cli types-pillow==10.2.0.20240520 types-python-dateutil==2.9.0.20240316 +types-pyyaml==6.0.12.20250915 types-requests==2.32.0.20240622 types-tqdm==4.66.0.20240417 typing-extensions==4.12.2 diff --git a/requirements.lock b/requirements.lock index bc7d7303e..3bf1f2bd4 100644 --- a/requirements.lock +++ b/requirements.lock @@ -8,6 +8,9 @@ # with-sources: false -e file:libs/labelbox + # via lbox-alignerr +-e file:libs/lbox-alignerr + # via labelbox -e file:libs/lbox-clients # via labelbox -e file:libs/lbox-example @@ -73,6 +76,7 @@ pyasn1-modules==0.4.0 # via google-auth pydantic==2.8.2 # via labelbox + # via lbox-alignerr pydantic-core==2.20.1 # via pydantic pygeotile==1.0.6 @@ -83,6 +87,9 @@ pyproj==3.6.1 # via labelbox python-dateutil==2.9.0.post0 # via labelbox +pyyaml==6.0.3 + # via labelbox + # via lbox-alignerr requests==2.32.3 # via google-api-core # via labelbox diff --git a/uv.lock b/uv.lock new file mode 100644 index 000000000..3ba602fb5 --- /dev/null +++ b/uv.lock @@ -0,0 +1,568 @@ +version = 1 +revision = 3 +requires-python = ">=3.9" +resolution-markers = [ + "python_full_version >= '3.11'", + "python_full_version == '3.10.*'", + "python_full_version < '3.10'", +] + +[[package]] +name = "alabaster" +version = "0.7.16" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.10'", +] +sdist = { url = "https://files.pythonhosted.org/packages/c9/3e/13dd8e5ed9094e734ac430b5d0eb4f2bb001708a8b7856cbf8e084e001ba/alabaster-0.7.16.tar.gz", hash = "sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65", size = 23776, upload-time = "2024-01-10T00:56:10.189Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/32/34/d4e1c02d3bee589efb5dfa17f88ea08bdb3e3eac12bc475462aec52ed223/alabaster-0.7.16-py3-none-any.whl", hash = "sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92", size = 13511, upload-time = "2024-01-10T00:56:08.388Z" }, +] + +[[package]] +name = "alabaster" +version = "1.0.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.11'", + "python_full_version == '3.10.*'", +] +sdist = { url = "https://files.pythonhosted.org/packages/a6/f8/d9c74d0daf3f742840fd818d69cfae176fa332022fd44e3469487d5a9420/alabaster-1.0.0.tar.gz", hash = "sha256:c00dca57bca26fa62a6d7d0a9fcce65f3e026e9bfe33e9c538fd3fbb2144fd9e", size = 24210, upload-time = "2024-07-26T18:15:03.762Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/b3/6b4067be973ae96ba0d615946e314c5ae35f9f993eca561b356540bb0c2b/alabaster-1.0.0-py3-none-any.whl", hash = "sha256:fc6786402dc3fcb2de3cabd5fe455a2db534b371124f1f21de8731783dec828b", size = 13929, upload-time = "2024-07-26T18:15:02.05Z" }, +] + +[[package]] +name = "babel" +version = "2.17.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7d/6b/d52e42361e1aa00709585ecc30b3f9684b3ab62530771402248b1b1d6240/babel-2.17.0.tar.gz", hash = "sha256:0c54cffb19f690cdcc52a3b50bcbf71e07a808d1c80d549f2459b9d2cf0afb9d", size = 9951852, upload-time = "2025-02-01T15:17:41.026Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/b8/3fe70c75fe32afc4bb507f75563d39bc5642255d1d94f1f23604725780bf/babel-2.17.0-py3-none-any.whl", hash = "sha256:4d0b53093fdfb4b21c92b5213dba5a1b23885afa8383709427046b21c366e5f2", size = 10182537, upload-time = "2025-02-01T15:17:37.39Z" }, +] + +[[package]] +name = "certifi" +version = "2025.8.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/dc/67/960ebe6bf230a96cda2e0abcf73af550ec4f090005363542f0765df162e0/certifi-2025.8.3.tar.gz", hash = "sha256:e564105f78ded564e3ae7c923924435e1daa7463faeab5bb932bc53ffae63407", size = 162386, upload-time = "2025-08-03T03:07:47.08Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/48/1549795ba7742c948d2ad169c1c8cdbae65bc450d6cd753d124b17c8cd32/certifi-2025.8.3-py3-none-any.whl", hash = "sha256:f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5", size = 161216, upload-time = "2025-08-03T03:07:45.777Z" }, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/83/2d/5fd176ceb9b2fc619e63405525573493ca23441330fcdaee6bef9460e924/charset_normalizer-3.4.3.tar.gz", hash = "sha256:6fce4b8500244f6fcb71465d4a4930d132ba9ab8e71a7859e6a5d59851068d14", size = 122371, upload-time = "2025-08-09T07:57:28.46Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d6/98/f3b8013223728a99b908c9344da3aa04ee6e3fa235f19409033eda92fb78/charset_normalizer-3.4.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:fb7f67a1bfa6e40b438170ebdc8158b78dc465a5a67b6dde178a46987b244a72", size = 207695, upload-time = "2025-08-09T07:55:36.452Z" }, + { url = "https://files.pythonhosted.org/packages/21/40/5188be1e3118c82dcb7c2a5ba101b783822cfb413a0268ed3be0468532de/charset_normalizer-3.4.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cc9370a2da1ac13f0153780040f465839e6cccb4a1e44810124b4e22483c93fe", size = 147153, upload-time = "2025-08-09T07:55:38.467Z" }, + { url = "https://files.pythonhosted.org/packages/37/60/5d0d74bc1e1380f0b72c327948d9c2aca14b46a9efd87604e724260f384c/charset_normalizer-3.4.3-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:07a0eae9e2787b586e129fdcbe1af6997f8d0e5abaa0bc98c0e20e124d67e601", size = 160428, upload-time = "2025-08-09T07:55:40.072Z" }, + { url = "https://files.pythonhosted.org/packages/85/9a/d891f63722d9158688de58d050c59dc3da560ea7f04f4c53e769de5140f5/charset_normalizer-3.4.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:74d77e25adda8581ffc1c720f1c81ca082921329452eba58b16233ab1842141c", size = 157627, upload-time = "2025-08-09T07:55:41.706Z" }, + { url = "https://files.pythonhosted.org/packages/65/1a/7425c952944a6521a9cfa7e675343f83fd82085b8af2b1373a2409c683dc/charset_normalizer-3.4.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d0e909868420b7049dafd3a31d45125b31143eec59235311fc4c57ea26a4acd2", size = 152388, upload-time = "2025-08-09T07:55:43.262Z" }, + { url = "https://files.pythonhosted.org/packages/f0/c9/a2c9c2a355a8594ce2446085e2ec97fd44d323c684ff32042e2a6b718e1d/charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c6f162aabe9a91a309510d74eeb6507fab5fff92337a15acbe77753d88d9dcf0", size = 150077, upload-time = "2025-08-09T07:55:44.903Z" }, + { url = "https://files.pythonhosted.org/packages/3b/38/20a1f44e4851aa1c9105d6e7110c9d020e093dfa5836d712a5f074a12bf7/charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:4ca4c094de7771a98d7fbd67d9e5dbf1eb73efa4f744a730437d8a3a5cf994f0", size = 161631, upload-time = "2025-08-09T07:55:46.346Z" }, + { url = "https://files.pythonhosted.org/packages/a4/fa/384d2c0f57edad03d7bec3ebefb462090d8905b4ff5a2d2525f3bb711fac/charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:02425242e96bcf29a49711b0ca9f37e451da7c70562bc10e8ed992a5a7a25cc0", size = 159210, upload-time = "2025-08-09T07:55:47.539Z" }, + { url = "https://files.pythonhosted.org/packages/33/9e/eca49d35867ca2db336b6ca27617deed4653b97ebf45dfc21311ce473c37/charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:78deba4d8f9590fe4dae384aeff04082510a709957e968753ff3c48399f6f92a", size = 153739, upload-time = "2025-08-09T07:55:48.744Z" }, + { url = "https://files.pythonhosted.org/packages/2a/91/26c3036e62dfe8de8061182d33be5025e2424002125c9500faff74a6735e/charset_normalizer-3.4.3-cp310-cp310-win32.whl", hash = "sha256:d79c198e27580c8e958906f803e63cddb77653731be08851c7df0b1a14a8fc0f", size = 99825, upload-time = "2025-08-09T07:55:50.305Z" }, + { url = "https://files.pythonhosted.org/packages/e2/c6/f05db471f81af1fa01839d44ae2a8bfeec8d2a8b4590f16c4e7393afd323/charset_normalizer-3.4.3-cp310-cp310-win_amd64.whl", hash = "sha256:c6e490913a46fa054e03699c70019ab869e990270597018cef1d8562132c2669", size = 107452, upload-time = "2025-08-09T07:55:51.461Z" }, + { url = "https://files.pythonhosted.org/packages/7f/b5/991245018615474a60965a7c9cd2b4efbaabd16d582a5547c47ee1c7730b/charset_normalizer-3.4.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b256ee2e749283ef3ddcff51a675ff43798d92d746d1a6e4631bf8c707d22d0b", size = 204483, upload-time = "2025-08-09T07:55:53.12Z" }, + { url = "https://files.pythonhosted.org/packages/c7/2a/ae245c41c06299ec18262825c1569c5d3298fc920e4ddf56ab011b417efd/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:13faeacfe61784e2559e690fc53fa4c5ae97c6fcedb8eb6fb8d0a15b475d2c64", size = 145520, upload-time = "2025-08-09T07:55:54.712Z" }, + { url = "https://files.pythonhosted.org/packages/3a/a4/b3b6c76e7a635748c4421d2b92c7b8f90a432f98bda5082049af37ffc8e3/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:00237675befef519d9af72169d8604a067d92755e84fe76492fef5441db05b91", size = 158876, upload-time = "2025-08-09T07:55:56.024Z" }, + { url = "https://files.pythonhosted.org/packages/e2/e6/63bb0e10f90a8243c5def74b5b105b3bbbfb3e7bb753915fe333fb0c11ea/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:585f3b2a80fbd26b048a0be90c5aae8f06605d3c92615911c3a2b03a8a3b796f", size = 156083, upload-time = "2025-08-09T07:55:57.582Z" }, + { url = "https://files.pythonhosted.org/packages/87/df/b7737ff046c974b183ea9aa111b74185ac8c3a326c6262d413bd5a1b8c69/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0e78314bdc32fa80696f72fa16dc61168fda4d6a0c014e0380f9d02f0e5d8a07", size = 150295, upload-time = "2025-08-09T07:55:59.147Z" }, + { url = "https://files.pythonhosted.org/packages/61/f1/190d9977e0084d3f1dc169acd060d479bbbc71b90bf3e7bf7b9927dec3eb/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:96b2b3d1a83ad55310de8c7b4a2d04d9277d5591f40761274856635acc5fcb30", size = 148379, upload-time = "2025-08-09T07:56:00.364Z" }, + { url = "https://files.pythonhosted.org/packages/4c/92/27dbe365d34c68cfe0ca76f1edd70e8705d82b378cb54ebbaeabc2e3029d/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:939578d9d8fd4299220161fdd76e86c6a251987476f5243e8864a7844476ba14", size = 160018, upload-time = "2025-08-09T07:56:01.678Z" }, + { url = "https://files.pythonhosted.org/packages/99/04/baae2a1ea1893a01635d475b9261c889a18fd48393634b6270827869fa34/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:fd10de089bcdcd1be95a2f73dbe6254798ec1bda9f450d5828c96f93e2536b9c", size = 157430, upload-time = "2025-08-09T07:56:02.87Z" }, + { url = "https://files.pythonhosted.org/packages/2f/36/77da9c6a328c54d17b960c89eccacfab8271fdaaa228305330915b88afa9/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1e8ac75d72fa3775e0b7cb7e4629cec13b7514d928d15ef8ea06bca03ef01cae", size = 151600, upload-time = "2025-08-09T07:56:04.089Z" }, + { url = "https://files.pythonhosted.org/packages/64/d4/9eb4ff2c167edbbf08cdd28e19078bf195762e9bd63371689cab5ecd3d0d/charset_normalizer-3.4.3-cp311-cp311-win32.whl", hash = "sha256:6cf8fd4c04756b6b60146d98cd8a77d0cdae0e1ca20329da2ac85eed779b6849", size = 99616, upload-time = "2025-08-09T07:56:05.658Z" }, + { url = "https://files.pythonhosted.org/packages/f4/9c/996a4a028222e7761a96634d1820de8a744ff4327a00ada9c8942033089b/charset_normalizer-3.4.3-cp311-cp311-win_amd64.whl", hash = "sha256:31a9a6f775f9bcd865d88ee350f0ffb0e25936a7f930ca98995c05abf1faf21c", size = 107108, upload-time = "2025-08-09T07:56:07.176Z" }, + { url = "https://files.pythonhosted.org/packages/e9/5e/14c94999e418d9b87682734589404a25854d5f5d0408df68bc15b6ff54bb/charset_normalizer-3.4.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e28e334d3ff134e88989d90ba04b47d84382a828c061d0d1027b1b12a62b39b1", size = 205655, upload-time = "2025-08-09T07:56:08.475Z" }, + { url = "https://files.pythonhosted.org/packages/7d/a8/c6ec5d389672521f644505a257f50544c074cf5fc292d5390331cd6fc9c3/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0cacf8f7297b0c4fcb74227692ca46b4a5852f8f4f24b3c766dd94a1075c4884", size = 146223, upload-time = "2025-08-09T07:56:09.708Z" }, + { url = "https://files.pythonhosted.org/packages/fc/eb/a2ffb08547f4e1e5415fb69eb7db25932c52a52bed371429648db4d84fb1/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c6fd51128a41297f5409deab284fecbe5305ebd7e5a1f959bee1c054622b7018", size = 159366, upload-time = "2025-08-09T07:56:11.326Z" }, + { url = "https://files.pythonhosted.org/packages/82/10/0fd19f20c624b278dddaf83b8464dcddc2456cb4b02bb902a6da126b87a1/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3cfb2aad70f2c6debfbcb717f23b7eb55febc0bb23dcffc0f076009da10c6392", size = 157104, upload-time = "2025-08-09T07:56:13.014Z" }, + { url = "https://files.pythonhosted.org/packages/16/ab/0233c3231af734f5dfcf0844aa9582d5a1466c985bbed6cedab85af9bfe3/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1606f4a55c0fd363d754049cdf400175ee96c992b1f8018b993941f221221c5f", size = 151830, upload-time = "2025-08-09T07:56:14.428Z" }, + { url = "https://files.pythonhosted.org/packages/ae/02/e29e22b4e02839a0e4a06557b1999d0a47db3567e82989b5bb21f3fbbd9f/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:027b776c26d38b7f15b26a5da1044f376455fb3766df8fc38563b4efbc515154", size = 148854, upload-time = "2025-08-09T07:56:16.051Z" }, + { url = "https://files.pythonhosted.org/packages/05/6b/e2539a0a4be302b481e8cafb5af8792da8093b486885a1ae4d15d452bcec/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:42e5088973e56e31e4fa58eb6bd709e42fc03799c11c42929592889a2e54c491", size = 160670, upload-time = "2025-08-09T07:56:17.314Z" }, + { url = "https://files.pythonhosted.org/packages/31/e7/883ee5676a2ef217a40ce0bffcc3d0dfbf9e64cbcfbdf822c52981c3304b/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cc34f233c9e71701040d772aa7490318673aa7164a0efe3172b2981218c26d93", size = 158501, upload-time = "2025-08-09T07:56:18.641Z" }, + { url = "https://files.pythonhosted.org/packages/c1/35/6525b21aa0db614cf8b5792d232021dca3df7f90a1944db934efa5d20bb1/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:320e8e66157cc4e247d9ddca8e21f427efc7a04bbd0ac8a9faf56583fa543f9f", size = 153173, upload-time = "2025-08-09T07:56:20.289Z" }, + { url = "https://files.pythonhosted.org/packages/50/ee/f4704bad8201de513fdc8aac1cabc87e38c5818c93857140e06e772b5892/charset_normalizer-3.4.3-cp312-cp312-win32.whl", hash = "sha256:fb6fecfd65564f208cbf0fba07f107fb661bcd1a7c389edbced3f7a493f70e37", size = 99822, upload-time = "2025-08-09T07:56:21.551Z" }, + { url = "https://files.pythonhosted.org/packages/39/f5/3b3836ca6064d0992c58c7561c6b6eee1b3892e9665d650c803bd5614522/charset_normalizer-3.4.3-cp312-cp312-win_amd64.whl", hash = "sha256:86df271bf921c2ee3818f0522e9a5b8092ca2ad8b065ece5d7d9d0e9f4849bcc", size = 107543, upload-time = "2025-08-09T07:56:23.115Z" }, + { url = "https://files.pythonhosted.org/packages/65/ca/2135ac97709b400c7654b4b764daf5c5567c2da45a30cdd20f9eefe2d658/charset_normalizer-3.4.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:14c2a87c65b351109f6abfc424cab3927b3bdece6f706e4d12faaf3d52ee5efe", size = 205326, upload-time = "2025-08-09T07:56:24.721Z" }, + { url = "https://files.pythonhosted.org/packages/71/11/98a04c3c97dd34e49c7d247083af03645ca3730809a5509443f3c37f7c99/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:41d1fc408ff5fdfb910200ec0e74abc40387bccb3252f3f27c0676731df2b2c8", size = 146008, upload-time = "2025-08-09T07:56:26.004Z" }, + { url = "https://files.pythonhosted.org/packages/60/f5/4659a4cb3c4ec146bec80c32d8bb16033752574c20b1252ee842a95d1a1e/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:1bb60174149316da1c35fa5233681f7c0f9f514509b8e399ab70fea5f17e45c9", size = 159196, upload-time = "2025-08-09T07:56:27.25Z" }, + { url = "https://files.pythonhosted.org/packages/86/9e/f552f7a00611f168b9a5865a1414179b2c6de8235a4fa40189f6f79a1753/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:30d006f98569de3459c2fc1f2acde170b7b2bd265dc1943e87e1a4efe1b67c31", size = 156819, upload-time = "2025-08-09T07:56:28.515Z" }, + { url = "https://files.pythonhosted.org/packages/7e/95/42aa2156235cbc8fa61208aded06ef46111c4d3f0de233107b3f38631803/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:416175faf02e4b0810f1f38bcb54682878a4af94059a1cd63b8747244420801f", size = 151350, upload-time = "2025-08-09T07:56:29.716Z" }, + { url = "https://files.pythonhosted.org/packages/c2/a9/3865b02c56f300a6f94fc631ef54f0a8a29da74fb45a773dfd3dcd380af7/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6aab0f181c486f973bc7262a97f5aca3ee7e1437011ef0c2ec04b5a11d16c927", size = 148644, upload-time = "2025-08-09T07:56:30.984Z" }, + { url = "https://files.pythonhosted.org/packages/77/d9/cbcf1a2a5c7d7856f11e7ac2d782aec12bdfea60d104e60e0aa1c97849dc/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:fdabf8315679312cfa71302f9bd509ded4f2f263fb5b765cf1433b39106c3cc9", size = 160468, upload-time = "2025-08-09T07:56:32.252Z" }, + { url = "https://files.pythonhosted.org/packages/f6/42/6f45efee8697b89fda4d50580f292b8f7f9306cb2971d4b53f8914e4d890/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:bd28b817ea8c70215401f657edef3a8aa83c29d447fb0b622c35403780ba11d5", size = 158187, upload-time = "2025-08-09T07:56:33.481Z" }, + { url = "https://files.pythonhosted.org/packages/70/99/f1c3bdcfaa9c45b3ce96f70b14f070411366fa19549c1d4832c935d8e2c3/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:18343b2d246dc6761a249ba1fb13f9ee9a2bcd95decc767319506056ea4ad4dc", size = 152699, upload-time = "2025-08-09T07:56:34.739Z" }, + { url = "https://files.pythonhosted.org/packages/a3/ad/b0081f2f99a4b194bcbb1934ef3b12aa4d9702ced80a37026b7607c72e58/charset_normalizer-3.4.3-cp313-cp313-win32.whl", hash = "sha256:6fb70de56f1859a3f71261cbe41005f56a7842cc348d3aeb26237560bfa5e0ce", size = 99580, upload-time = "2025-08-09T07:56:35.981Z" }, + { url = "https://files.pythonhosted.org/packages/9a/8f/ae790790c7b64f925e5c953b924aaa42a243fb778fed9e41f147b2a5715a/charset_normalizer-3.4.3-cp313-cp313-win_amd64.whl", hash = "sha256:cf1ebb7d78e1ad8ec2a8c4732c7be2e736f6e5123a4146c5b89c9d1f585f8cef", size = 107366, upload-time = "2025-08-09T07:56:37.339Z" }, + { url = "https://files.pythonhosted.org/packages/8e/91/b5a06ad970ddc7a0e513112d40113e834638f4ca1120eb727a249fb2715e/charset_normalizer-3.4.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3cd35b7e8aedeb9e34c41385fda4f73ba609e561faedfae0a9e75e44ac558a15", size = 204342, upload-time = "2025-08-09T07:56:38.687Z" }, + { url = "https://files.pythonhosted.org/packages/ce/ec/1edc30a377f0a02689342f214455c3f6c2fbedd896a1d2f856c002fc3062/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b89bc04de1d83006373429975f8ef9e7932534b8cc9ca582e4db7d20d91816db", size = 145995, upload-time = "2025-08-09T07:56:40.048Z" }, + { url = "https://files.pythonhosted.org/packages/17/e5/5e67ab85e6d22b04641acb5399c8684f4d37caf7558a53859f0283a650e9/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2001a39612b241dae17b4687898843f254f8748b796a2e16f1051a17078d991d", size = 158640, upload-time = "2025-08-09T07:56:41.311Z" }, + { url = "https://files.pythonhosted.org/packages/f1/e5/38421987f6c697ee3722981289d554957c4be652f963d71c5e46a262e135/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8dcfc373f888e4fb39a7bc57e93e3b845e7f462dacc008d9749568b1c4ece096", size = 156636, upload-time = "2025-08-09T07:56:43.195Z" }, + { url = "https://files.pythonhosted.org/packages/a0/e4/5a075de8daa3ec0745a9a3b54467e0c2967daaaf2cec04c845f73493e9a1/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:18b97b8404387b96cdbd30ad660f6407799126d26a39ca65729162fd810a99aa", size = 150939, upload-time = "2025-08-09T07:56:44.819Z" }, + { url = "https://files.pythonhosted.org/packages/02/f7/3611b32318b30974131db62b4043f335861d4d9b49adc6d57c1149cc49d4/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ccf600859c183d70eb47e05a44cd80a4ce77394d1ac0f79dbd2dd90a69a3a049", size = 148580, upload-time = "2025-08-09T07:56:46.684Z" }, + { url = "https://files.pythonhosted.org/packages/7e/61/19b36f4bd67f2793ab6a99b979b4e4f3d8fc754cbdffb805335df4337126/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:53cd68b185d98dde4ad8990e56a58dea83a4162161b1ea9272e5c9182ce415e0", size = 159870, upload-time = "2025-08-09T07:56:47.941Z" }, + { url = "https://files.pythonhosted.org/packages/06/57/84722eefdd338c04cf3030ada66889298eaedf3e7a30a624201e0cbe424a/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:30a96e1e1f865f78b030d65241c1ee850cdf422d869e9028e2fc1d5e4db73b92", size = 157797, upload-time = "2025-08-09T07:56:49.756Z" }, + { url = "https://files.pythonhosted.org/packages/72/2a/aff5dd112b2f14bcc3462c312dce5445806bfc8ab3a7328555da95330e4b/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d716a916938e03231e86e43782ca7878fb602a125a91e7acb8b5112e2e96ac16", size = 152224, upload-time = "2025-08-09T07:56:51.369Z" }, + { url = "https://files.pythonhosted.org/packages/b7/8c/9839225320046ed279c6e839d51f028342eb77c91c89b8ef2549f951f3ec/charset_normalizer-3.4.3-cp314-cp314-win32.whl", hash = "sha256:c6dbd0ccdda3a2ba7c2ecd9d77b37f3b5831687d8dc1b6ca5f56a4880cc7b7ce", size = 100086, upload-time = "2025-08-09T07:56:52.722Z" }, + { url = "https://files.pythonhosted.org/packages/ee/7a/36fbcf646e41f710ce0a563c1c9a343c6edf9be80786edeb15b6f62e17db/charset_normalizer-3.4.3-cp314-cp314-win_amd64.whl", hash = "sha256:73dc19b562516fc9bcf6e5d6e596df0b4eb98d87e4f79f3ae71840e6ed21361c", size = 107400, upload-time = "2025-08-09T07:56:55.172Z" }, + { url = "https://files.pythonhosted.org/packages/c2/ca/9a0983dd5c8e9733565cf3db4df2b0a2e9a82659fd8aa2a868ac6e4a991f/charset_normalizer-3.4.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:70bfc5f2c318afece2f5838ea5e4c3febada0be750fcf4775641052bbba14d05", size = 207520, upload-time = "2025-08-09T07:57:11.026Z" }, + { url = "https://files.pythonhosted.org/packages/39/c6/99271dc37243a4f925b09090493fb96c9333d7992c6187f5cfe5312008d2/charset_normalizer-3.4.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:23b6b24d74478dc833444cbd927c338349d6ae852ba53a0d02a2de1fce45b96e", size = 147307, upload-time = "2025-08-09T07:57:12.4Z" }, + { url = "https://files.pythonhosted.org/packages/e4/69/132eab043356bba06eb333cc2cc60c6340857d0a2e4ca6dc2b51312886b3/charset_normalizer-3.4.3-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:34a7f768e3f985abdb42841e20e17b330ad3aaf4bb7e7aeeb73db2e70f077b99", size = 160448, upload-time = "2025-08-09T07:57:13.712Z" }, + { url = "https://files.pythonhosted.org/packages/04/9a/914d294daa4809c57667b77470533e65def9c0be1ef8b4c1183a99170e9d/charset_normalizer-3.4.3-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:fb731e5deb0c7ef82d698b0f4c5bb724633ee2a489401594c5c88b02e6cb15f7", size = 157758, upload-time = "2025-08-09T07:57:14.979Z" }, + { url = "https://files.pythonhosted.org/packages/b0/a8/6f5bcf1bcf63cb45625f7c5cadca026121ff8a6c8a3256d8d8cd59302663/charset_normalizer-3.4.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:257f26fed7d7ff59921b78244f3cd93ed2af1800ff048c33f624c87475819dd7", size = 152487, upload-time = "2025-08-09T07:57:16.332Z" }, + { url = "https://files.pythonhosted.org/packages/c4/72/d3d0e9592f4e504f9dea08b8db270821c909558c353dc3b457ed2509f2fb/charset_normalizer-3.4.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1ef99f0456d3d46a50945c98de1774da86f8e992ab5c77865ea8b8195341fc19", size = 150054, upload-time = "2025-08-09T07:57:17.576Z" }, + { url = "https://files.pythonhosted.org/packages/20/30/5f64fe3981677fe63fa987b80e6c01042eb5ff653ff7cec1b7bd9268e54e/charset_normalizer-3.4.3-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:2c322db9c8c89009a990ef07c3bcc9f011a3269bc06782f916cd3d9eed7c9312", size = 161703, upload-time = "2025-08-09T07:57:20.012Z" }, + { url = "https://files.pythonhosted.org/packages/e1/ef/dd08b2cac9284fd59e70f7d97382c33a3d0a926e45b15fc21b3308324ffd/charset_normalizer-3.4.3-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:511729f456829ef86ac41ca78c63a5cb55240ed23b4b737faca0eb1abb1c41bc", size = 159096, upload-time = "2025-08-09T07:57:21.329Z" }, + { url = "https://files.pythonhosted.org/packages/45/8c/dcef87cfc2b3f002a6478f38906f9040302c68aebe21468090e39cde1445/charset_normalizer-3.4.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:88ab34806dea0671532d3f82d82b85e8fc23d7b2dd12fa837978dad9bb392a34", size = 153852, upload-time = "2025-08-09T07:57:22.608Z" }, + { url = "https://files.pythonhosted.org/packages/63/86/9cbd533bd37883d467fcd1bd491b3547a3532d0fbb46de2b99feeebf185e/charset_normalizer-3.4.3-cp39-cp39-win32.whl", hash = "sha256:16a8770207946ac75703458e2c743631c79c59c5890c80011d536248f8eaa432", size = 99840, upload-time = "2025-08-09T07:57:23.883Z" }, + { url = "https://files.pythonhosted.org/packages/ce/d6/7e805c8e5c46ff9729c49950acc4ee0aeb55efb8b3a56687658ad10c3216/charset_normalizer-3.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:d22dbedd33326a4a5190dd4fe9e9e693ef12160c77382d9e87919bce54f3d4ca", size = 107438, upload-time = "2025-08-09T07:57:25.287Z" }, + { url = "https://files.pythonhosted.org/packages/8a/1f/f041989e93b001bc4e44bb1669ccdcf54d3f00e628229a85b08d330615c5/charset_normalizer-3.4.3-py3-none-any.whl", hash = "sha256:ce571ab16d890d23b5c278547ba694193a45011ff86a9162a71307ed9f86759a", size = 53175, upload-time = "2025-08-09T07:57:26.864Z" }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, +] + +[[package]] +name = "docutils" +version = "0.21.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ae/ed/aefcc8cd0ba62a0560c3c18c33925362d46c6075480bfa4df87b28e169a9/docutils-0.21.2.tar.gz", hash = "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f", size = 2204444, upload-time = "2024-04-23T18:57:18.24Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8f/d7/9322c609343d929e75e7e5e6255e614fcc67572cfd083959cdef3b7aad79/docutils-0.21.2-py3-none-any.whl", hash = "sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2", size = 587408, upload-time = "2024-04-23T18:57:14.835Z" }, +] + +[[package]] +name = "idna" +version = "3.10" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490, upload-time = "2024-09-15T18:07:39.745Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" }, +] + +[[package]] +name = "imagesize" +version = "1.4.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a7/84/62473fb57d61e31fef6e36d64a179c8781605429fd927b5dd608c997be31/imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a", size = 1280026, upload-time = "2022-07-01T12:21:05.687Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ff/62/85c4c919272577931d407be5ba5d71c20f0b616d31a0befe0ae45bb79abd/imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b", size = 8769, upload-time = "2022-07-01T12:21:02.467Z" }, +] + +[[package]] +name = "importlib-metadata" +version = "8.7.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "zipp", marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/76/66/650a33bd90f786193e4de4b3ad86ea60b53c89b669a5c7be931fac31cdb0/importlib_metadata-8.7.0.tar.gz", hash = "sha256:d13b81ad223b890aa16c5471f2ac3056cf76c5f10f82d6f9292f0b415f389000", size = 56641, upload-time = "2025-04-27T15:29:01.736Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/b0/36bd937216ec521246249be3bf9855081de4c5e06a0c9b4219dbeda50373/importlib_metadata-8.7.0-py3-none-any.whl", hash = "sha256:e5dd1551894c77868a30651cef00984d50e1002d06942a7101d34870c5f02afd", size = 27656, upload-time = "2025-04-27T15:29:00.214Z" }, +] + +[[package]] +name = "jinja2" +version = "3.1.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115, upload-time = "2025-03-05T20:05:02.478Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" }, +] + +[[package]] +name = "labelbox-python" +version = "0.1.0" +source = { virtual = "." } +dependencies = [ + { name = "sphinx", version = "7.4.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, + { name = "sphinx", version = "8.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "sphinx-multiproject" }, + { name = "sphinx-rtd-theme" }, +] + +[package.metadata] +requires-dist = [ + { name = "sphinx", specifier = ">=7.1.2" }, + { name = "sphinx-multiproject", specifier = ">=1.0.0rc1" }, + { name = "sphinx-rtd-theme", specifier = ">=2.0.0" }, +] + +[[package]] +name = "markupsafe" +version = "3.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b2/97/5d42485e71dfc078108a86d6de8fa46db44a1a9295e89c5d6d4a06e23a62/markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0", size = 20537, upload-time = "2024-10-18T15:21:54.129Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/90/d08277ce111dd22f77149fd1a5d4653eeb3b3eaacbdfcbae5afb2600eebd/MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8", size = 14357, upload-time = "2024-10-18T15:20:51.44Z" }, + { url = "https://files.pythonhosted.org/packages/04/e1/6e2194baeae0bca1fae6629dc0cbbb968d4d941469cbab11a3872edff374/MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158", size = 12393, upload-time = "2024-10-18T15:20:52.426Z" }, + { url = "https://files.pythonhosted.org/packages/1d/69/35fa85a8ece0a437493dc61ce0bb6d459dcba482c34197e3efc829aa357f/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579", size = 21732, upload-time = "2024-10-18T15:20:53.578Z" }, + { url = "https://files.pythonhosted.org/packages/22/35/137da042dfb4720b638d2937c38a9c2df83fe32d20e8c8f3185dbfef05f7/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d", size = 20866, upload-time = "2024-10-18T15:20:55.06Z" }, + { url = "https://files.pythonhosted.org/packages/29/28/6d029a903727a1b62edb51863232152fd335d602def598dade38996887f0/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb", size = 20964, upload-time = "2024-10-18T15:20:55.906Z" }, + { url = "https://files.pythonhosted.org/packages/cc/cd/07438f95f83e8bc028279909d9c9bd39e24149b0d60053a97b2bc4f8aa51/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b", size = 21977, upload-time = "2024-10-18T15:20:57.189Z" }, + { url = "https://files.pythonhosted.org/packages/29/01/84b57395b4cc062f9c4c55ce0df7d3108ca32397299d9df00fedd9117d3d/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c", size = 21366, upload-time = "2024-10-18T15:20:58.235Z" }, + { url = "https://files.pythonhosted.org/packages/bd/6e/61ebf08d8940553afff20d1fb1ba7294b6f8d279df9fd0c0db911b4bbcfd/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171", size = 21091, upload-time = "2024-10-18T15:20:59.235Z" }, + { url = "https://files.pythonhosted.org/packages/11/23/ffbf53694e8c94ebd1e7e491de185124277964344733c45481f32ede2499/MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50", size = 15065, upload-time = "2024-10-18T15:21:00.307Z" }, + { url = "https://files.pythonhosted.org/packages/44/06/e7175d06dd6e9172d4a69a72592cb3f7a996a9c396eee29082826449bbc3/MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a", size = 15514, upload-time = "2024-10-18T15:21:01.122Z" }, + { url = "https://files.pythonhosted.org/packages/6b/28/bbf83e3f76936960b850435576dd5e67034e200469571be53f69174a2dfd/MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d", size = 14353, upload-time = "2024-10-18T15:21:02.187Z" }, + { url = "https://files.pythonhosted.org/packages/6c/30/316d194b093cde57d448a4c3209f22e3046c5bb2fb0820b118292b334be7/MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93", size = 12392, upload-time = "2024-10-18T15:21:02.941Z" }, + { url = "https://files.pythonhosted.org/packages/f2/96/9cdafba8445d3a53cae530aaf83c38ec64c4d5427d975c974084af5bc5d2/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832", size = 23984, upload-time = "2024-10-18T15:21:03.953Z" }, + { url = "https://files.pythonhosted.org/packages/f1/a4/aefb044a2cd8d7334c8a47d3fb2c9f328ac48cb349468cc31c20b539305f/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84", size = 23120, upload-time = "2024-10-18T15:21:06.495Z" }, + { url = "https://files.pythonhosted.org/packages/8d/21/5e4851379f88f3fad1de30361db501300d4f07bcad047d3cb0449fc51f8c/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca", size = 23032, upload-time = "2024-10-18T15:21:07.295Z" }, + { url = "https://files.pythonhosted.org/packages/00/7b/e92c64e079b2d0d7ddf69899c98842f3f9a60a1ae72657c89ce2655c999d/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798", size = 24057, upload-time = "2024-10-18T15:21:08.073Z" }, + { url = "https://files.pythonhosted.org/packages/f9/ac/46f960ca323037caa0a10662ef97d0a4728e890334fc156b9f9e52bcc4ca/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e", size = 23359, upload-time = "2024-10-18T15:21:09.318Z" }, + { url = "https://files.pythonhosted.org/packages/69/84/83439e16197337b8b14b6a5b9c2105fff81d42c2a7c5b58ac7b62ee2c3b1/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4", size = 23306, upload-time = "2024-10-18T15:21:10.185Z" }, + { url = "https://files.pythonhosted.org/packages/9a/34/a15aa69f01e2181ed8d2b685c0d2f6655d5cca2c4db0ddea775e631918cd/MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d", size = 15094, upload-time = "2024-10-18T15:21:11.005Z" }, + { url = "https://files.pythonhosted.org/packages/da/b8/3a3bd761922d416f3dc5d00bfbed11f66b1ab89a0c2b6e887240a30b0f6b/MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b", size = 15521, upload-time = "2024-10-18T15:21:12.911Z" }, + { url = "https://files.pythonhosted.org/packages/22/09/d1f21434c97fc42f09d290cbb6350d44eb12f09cc62c9476effdb33a18aa/MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf", size = 14274, upload-time = "2024-10-18T15:21:13.777Z" }, + { url = "https://files.pythonhosted.org/packages/6b/b0/18f76bba336fa5aecf79d45dcd6c806c280ec44538b3c13671d49099fdd0/MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225", size = 12348, upload-time = "2024-10-18T15:21:14.822Z" }, + { url = "https://files.pythonhosted.org/packages/e0/25/dd5c0f6ac1311e9b40f4af06c78efde0f3b5cbf02502f8ef9501294c425b/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028", size = 24149, upload-time = "2024-10-18T15:21:15.642Z" }, + { url = "https://files.pythonhosted.org/packages/f3/f0/89e7aadfb3749d0f52234a0c8c7867877876e0a20b60e2188e9850794c17/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8", size = 23118, upload-time = "2024-10-18T15:21:17.133Z" }, + { url = "https://files.pythonhosted.org/packages/d5/da/f2eeb64c723f5e3777bc081da884b414671982008c47dcc1873d81f625b6/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c", size = 22993, upload-time = "2024-10-18T15:21:18.064Z" }, + { url = "https://files.pythonhosted.org/packages/da/0e/1f32af846df486dce7c227fe0f2398dc7e2e51d4a370508281f3c1c5cddc/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557", size = 24178, upload-time = "2024-10-18T15:21:18.859Z" }, + { url = "https://files.pythonhosted.org/packages/c4/f6/bb3ca0532de8086cbff5f06d137064c8410d10779c4c127e0e47d17c0b71/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22", size = 23319, upload-time = "2024-10-18T15:21:19.671Z" }, + { url = "https://files.pythonhosted.org/packages/a2/82/8be4c96ffee03c5b4a034e60a31294daf481e12c7c43ab8e34a1453ee48b/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48", size = 23352, upload-time = "2024-10-18T15:21:20.971Z" }, + { url = "https://files.pythonhosted.org/packages/51/ae/97827349d3fcffee7e184bdf7f41cd6b88d9919c80f0263ba7acd1bbcb18/MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30", size = 15097, upload-time = "2024-10-18T15:21:22.646Z" }, + { url = "https://files.pythonhosted.org/packages/c1/80/a61f99dc3a936413c3ee4e1eecac96c0da5ed07ad56fd975f1a9da5bc630/MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87", size = 15601, upload-time = "2024-10-18T15:21:23.499Z" }, + { url = "https://files.pythonhosted.org/packages/83/0e/67eb10a7ecc77a0c2bbe2b0235765b98d164d81600746914bebada795e97/MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd", size = 14274, upload-time = "2024-10-18T15:21:24.577Z" }, + { url = "https://files.pythonhosted.org/packages/2b/6d/9409f3684d3335375d04e5f05744dfe7e9f120062c9857df4ab490a1031a/MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430", size = 12352, upload-time = "2024-10-18T15:21:25.382Z" }, + { url = "https://files.pythonhosted.org/packages/d2/f5/6eadfcd3885ea85fe2a7c128315cc1bb7241e1987443d78c8fe712d03091/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094", size = 24122, upload-time = "2024-10-18T15:21:26.199Z" }, + { url = "https://files.pythonhosted.org/packages/0c/91/96cf928db8236f1bfab6ce15ad070dfdd02ed88261c2afafd4b43575e9e9/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396", size = 23085, upload-time = "2024-10-18T15:21:27.029Z" }, + { url = "https://files.pythonhosted.org/packages/c2/cf/c9d56af24d56ea04daae7ac0940232d31d5a8354f2b457c6d856b2057d69/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79", size = 22978, upload-time = "2024-10-18T15:21:27.846Z" }, + { url = "https://files.pythonhosted.org/packages/2a/9f/8619835cd6a711d6272d62abb78c033bda638fdc54c4e7f4272cf1c0962b/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a", size = 24208, upload-time = "2024-10-18T15:21:28.744Z" }, + { url = "https://files.pythonhosted.org/packages/f9/bf/176950a1792b2cd2102b8ffeb5133e1ed984547b75db47c25a67d3359f77/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca", size = 23357, upload-time = "2024-10-18T15:21:29.545Z" }, + { url = "https://files.pythonhosted.org/packages/ce/4f/9a02c1d335caabe5c4efb90e1b6e8ee944aa245c1aaaab8e8a618987d816/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c", size = 23344, upload-time = "2024-10-18T15:21:30.366Z" }, + { url = "https://files.pythonhosted.org/packages/ee/55/c271b57db36f748f0e04a759ace9f8f759ccf22b4960c270c78a394f58be/MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1", size = 15101, upload-time = "2024-10-18T15:21:31.207Z" }, + { url = "https://files.pythonhosted.org/packages/29/88/07df22d2dd4df40aba9f3e402e6dc1b8ee86297dddbad4872bd5e7b0094f/MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f", size = 15603, upload-time = "2024-10-18T15:21:32.032Z" }, + { url = "https://files.pythonhosted.org/packages/62/6a/8b89d24db2d32d433dffcd6a8779159da109842434f1dd2f6e71f32f738c/MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c", size = 14510, upload-time = "2024-10-18T15:21:33.625Z" }, + { url = "https://files.pythonhosted.org/packages/7a/06/a10f955f70a2e5a9bf78d11a161029d278eeacbd35ef806c3fd17b13060d/MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb", size = 12486, upload-time = "2024-10-18T15:21:34.611Z" }, + { url = "https://files.pythonhosted.org/packages/34/cf/65d4a571869a1a9078198ca28f39fba5fbb910f952f9dbc5220afff9f5e6/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c", size = 25480, upload-time = "2024-10-18T15:21:35.398Z" }, + { url = "https://files.pythonhosted.org/packages/0c/e3/90e9651924c430b885468b56b3d597cabf6d72be4b24a0acd1fa0e12af67/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d", size = 23914, upload-time = "2024-10-18T15:21:36.231Z" }, + { url = "https://files.pythonhosted.org/packages/66/8c/6c7cf61f95d63bb866db39085150df1f2a5bd3335298f14a66b48e92659c/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe", size = 23796, upload-time = "2024-10-18T15:21:37.073Z" }, + { url = "https://files.pythonhosted.org/packages/bb/35/cbe9238ec3f47ac9a7c8b3df7a808e7cb50fe149dc7039f5f454b3fba218/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5", size = 25473, upload-time = "2024-10-18T15:21:37.932Z" }, + { url = "https://files.pythonhosted.org/packages/e6/32/7621a4382488aa283cc05e8984a9c219abad3bca087be9ec77e89939ded9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a", size = 24114, upload-time = "2024-10-18T15:21:39.799Z" }, + { url = "https://files.pythonhosted.org/packages/0d/80/0985960e4b89922cb5a0bac0ed39c5b96cbc1a536a99f30e8c220a996ed9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9", size = 24098, upload-time = "2024-10-18T15:21:40.813Z" }, + { url = "https://files.pythonhosted.org/packages/82/78/fedb03c7d5380df2427038ec8d973587e90561b2d90cd472ce9254cf348b/MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6", size = 15208, upload-time = "2024-10-18T15:21:41.814Z" }, + { url = "https://files.pythonhosted.org/packages/4f/65/6079a46068dfceaeabb5dcad6d674f5f5c61a6fa5673746f42a9f4c233b3/MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f", size = 15739, upload-time = "2024-10-18T15:21:42.784Z" }, + { url = "https://files.pythonhosted.org/packages/a7/ea/9b1530c3fdeeca613faeb0fb5cbcf2389d816072fab72a71b45749ef6062/MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a", size = 14344, upload-time = "2024-10-18T15:21:43.721Z" }, + { url = "https://files.pythonhosted.org/packages/4b/c2/fbdbfe48848e7112ab05e627e718e854d20192b674952d9042ebd8c9e5de/MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff", size = 12389, upload-time = "2024-10-18T15:21:44.666Z" }, + { url = "https://files.pythonhosted.org/packages/f0/25/7a7c6e4dbd4f867d95d94ca15449e91e52856f6ed1905d58ef1de5e211d0/MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13", size = 21607, upload-time = "2024-10-18T15:21:45.452Z" }, + { url = "https://files.pythonhosted.org/packages/53/8f/f339c98a178f3c1e545622206b40986a4c3307fe39f70ccd3d9df9a9e425/MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144", size = 20728, upload-time = "2024-10-18T15:21:46.295Z" }, + { url = "https://files.pythonhosted.org/packages/1a/03/8496a1a78308456dbd50b23a385c69b41f2e9661c67ea1329849a598a8f9/MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29", size = 20826, upload-time = "2024-10-18T15:21:47.134Z" }, + { url = "https://files.pythonhosted.org/packages/e6/cf/0a490a4bd363048c3022f2f475c8c05582179bb179defcee4766fb3dcc18/MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0", size = 21843, upload-time = "2024-10-18T15:21:48.334Z" }, + { url = "https://files.pythonhosted.org/packages/19/a3/34187a78613920dfd3cdf68ef6ce5e99c4f3417f035694074beb8848cd77/MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0", size = 21219, upload-time = "2024-10-18T15:21:49.587Z" }, + { url = "https://files.pythonhosted.org/packages/17/d8/5811082f85bb88410ad7e452263af048d685669bbbfb7b595e8689152498/MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178", size = 20946, upload-time = "2024-10-18T15:21:50.441Z" }, + { url = "https://files.pythonhosted.org/packages/7c/31/bd635fb5989440d9365c5e3c47556cfea121c7803f5034ac843e8f37c2f2/MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f", size = 15063, upload-time = "2024-10-18T15:21:51.385Z" }, + { url = "https://files.pythonhosted.org/packages/b3/73/085399401383ce949f727afec55ec3abd76648d04b9f22e1c0e99cb4bec3/MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a", size = 15506, upload-time = "2024-10-18T15:21:52.974Z" }, +] + +[[package]] +name = "packaging" +version = "25.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" }, +] + +[[package]] +name = "pygments" +version = "2.19.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, +] + +[[package]] +name = "requests" +version = "2.32.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "charset-normalizer" }, + { name = "idna" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" }, +] + +[[package]] +name = "roman-numerals-py" +version = "3.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/30/76/48fd56d17c5bdbdf65609abbc67288728a98ed4c02919428d4f52d23b24b/roman_numerals_py-3.1.0.tar.gz", hash = "sha256:be4bf804f083a4ce001b5eb7e3c0862479d10f94c936f6c4e5f250aa5ff5bd2d", size = 9017, upload-time = "2025-02-22T07:34:54.333Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/53/97/d2cbbaa10c9b826af0e10fdf836e1bf344d9f0abb873ebc34d1f49642d3f/roman_numerals_py-3.1.0-py3-none-any.whl", hash = "sha256:9da2ad2fb670bcf24e81070ceb3be72f6c11c440d73bd579fbeca1e9f330954c", size = 7742, upload-time = "2025-02-22T07:34:52.422Z" }, +] + +[[package]] +name = "snowballstemmer" +version = "3.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/75/a7/9810d872919697c9d01295633f5d574fb416d47e535f258272ca1f01f447/snowballstemmer-3.0.1.tar.gz", hash = "sha256:6d5eeeec8e9f84d4d56b847692bacf79bc2c8e90c7f80ca4444ff8b6f2e52895", size = 105575, upload-time = "2025-05-09T16:34:51.843Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c8/78/3565d011c61f5a43488987ee32b6f3f656e7f107ac2782dd57bdd7d91d9a/snowballstemmer-3.0.1-py3-none-any.whl", hash = "sha256:6cd7b3897da8d6c9ffb968a6781fa6532dce9c3618a4b127d920dab764a19064", size = 103274, upload-time = "2025-05-09T16:34:50.371Z" }, +] + +[[package]] +name = "sphinx" +version = "7.4.7" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.10'", +] +dependencies = [ + { name = "alabaster", version = "0.7.16", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "babel", marker = "python_full_version < '3.10'" }, + { name = "colorama", marker = "python_full_version < '3.10' and sys_platform == 'win32'" }, + { name = "docutils", marker = "python_full_version < '3.10'" }, + { name = "imagesize", marker = "python_full_version < '3.10'" }, + { name = "importlib-metadata", marker = "python_full_version < '3.10'" }, + { name = "jinja2", marker = "python_full_version < '3.10'" }, + { name = "packaging", marker = "python_full_version < '3.10'" }, + { name = "pygments", marker = "python_full_version < '3.10'" }, + { name = "requests", marker = "python_full_version < '3.10'" }, + { name = "snowballstemmer", marker = "python_full_version < '3.10'" }, + { name = "sphinxcontrib-applehelp", marker = "python_full_version < '3.10'" }, + { name = "sphinxcontrib-devhelp", marker = "python_full_version < '3.10'" }, + { name = "sphinxcontrib-htmlhelp", marker = "python_full_version < '3.10'" }, + { name = "sphinxcontrib-jsmath", marker = "python_full_version < '3.10'" }, + { name = "sphinxcontrib-qthelp", marker = "python_full_version < '3.10'" }, + { name = "sphinxcontrib-serializinghtml", marker = "python_full_version < '3.10'" }, + { name = "tomli", marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5b/be/50e50cb4f2eff47df05673d361095cafd95521d2a22521b920c67a372dcb/sphinx-7.4.7.tar.gz", hash = "sha256:242f92a7ea7e6c5b406fdc2615413890ba9f699114a9c09192d7dfead2ee9cfe", size = 8067911, upload-time = "2024-07-20T14:46:56.059Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0d/ef/153f6803c5d5f8917dbb7f7fcf6d34a871ede3296fa89c2c703f5f8a6c8e/sphinx-7.4.7-py3-none-any.whl", hash = "sha256:c2419e2135d11f1951cd994d6eb18a1835bd8fdd8429f9ca375dc1f3281bd239", size = 3401624, upload-time = "2024-07-20T14:46:52.142Z" }, +] + +[[package]] +name = "sphinx" +version = "8.1.3" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version == '3.10.*'", +] +dependencies = [ + { name = "alabaster", version = "1.0.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, + { name = "babel", marker = "python_full_version == '3.10.*'" }, + { name = "colorama", marker = "python_full_version == '3.10.*' and sys_platform == 'win32'" }, + { name = "docutils", marker = "python_full_version == '3.10.*'" }, + { name = "imagesize", marker = "python_full_version == '3.10.*'" }, + { name = "jinja2", marker = "python_full_version == '3.10.*'" }, + { name = "packaging", marker = "python_full_version == '3.10.*'" }, + { name = "pygments", marker = "python_full_version == '3.10.*'" }, + { name = "requests", marker = "python_full_version == '3.10.*'" }, + { name = "snowballstemmer", marker = "python_full_version == '3.10.*'" }, + { name = "sphinxcontrib-applehelp", marker = "python_full_version == '3.10.*'" }, + { name = "sphinxcontrib-devhelp", marker = "python_full_version == '3.10.*'" }, + { name = "sphinxcontrib-htmlhelp", marker = "python_full_version == '3.10.*'" }, + { name = "sphinxcontrib-jsmath", marker = "python_full_version == '3.10.*'" }, + { name = "sphinxcontrib-qthelp", marker = "python_full_version == '3.10.*'" }, + { name = "sphinxcontrib-serializinghtml", marker = "python_full_version == '3.10.*'" }, + { name = "tomli", marker = "python_full_version == '3.10.*'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/be0b61178fe2cdcb67e2a92fc9ebb488e3c51c4f74a36a7824c0adf23425/sphinx-8.1.3.tar.gz", hash = "sha256:43c1911eecb0d3e161ad78611bc905d1ad0e523e4ddc202a58a821773dc4c927", size = 8184611, upload-time = "2024-10-13T20:27:13.93Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/26/60/1ddff83a56d33aaf6f10ec8ce84b4c007d9368b21008876fceda7e7381ef/sphinx-8.1.3-py3-none-any.whl", hash = "sha256:09719015511837b76bf6e03e42eb7595ac8c2e41eeb9c29c5b755c6b677992a2", size = 3487125, upload-time = "2024-10-13T20:27:10.448Z" }, +] + +[[package]] +name = "sphinx" +version = "8.2.3" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.11'", +] +dependencies = [ + { name = "alabaster", version = "1.0.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "babel", marker = "python_full_version >= '3.11'" }, + { name = "colorama", marker = "python_full_version >= '3.11' and sys_platform == 'win32'" }, + { name = "docutils", marker = "python_full_version >= '3.11'" }, + { name = "imagesize", marker = "python_full_version >= '3.11'" }, + { name = "jinja2", marker = "python_full_version >= '3.11'" }, + { name = "packaging", marker = "python_full_version >= '3.11'" }, + { name = "pygments", marker = "python_full_version >= '3.11'" }, + { name = "requests", marker = "python_full_version >= '3.11'" }, + { name = "roman-numerals-py", marker = "python_full_version >= '3.11'" }, + { name = "snowballstemmer", marker = "python_full_version >= '3.11'" }, + { name = "sphinxcontrib-applehelp", marker = "python_full_version >= '3.11'" }, + { name = "sphinxcontrib-devhelp", marker = "python_full_version >= '3.11'" }, + { name = "sphinxcontrib-htmlhelp", marker = "python_full_version >= '3.11'" }, + { name = "sphinxcontrib-jsmath", marker = "python_full_version >= '3.11'" }, + { name = "sphinxcontrib-qthelp", marker = "python_full_version >= '3.11'" }, + { name = "sphinxcontrib-serializinghtml", marker = "python_full_version >= '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/38/ad/4360e50ed56cb483667b8e6dadf2d3fda62359593faabbe749a27c4eaca6/sphinx-8.2.3.tar.gz", hash = "sha256:398ad29dee7f63a75888314e9424d40f52ce5a6a87ae88e7071e80af296ec348", size = 8321876, upload-time = "2025-03-02T22:31:59.658Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/31/53/136e9eca6e0b9dc0e1962e2c908fbea2e5ac000c2a2fbd9a35797958c48b/sphinx-8.2.3-py3-none-any.whl", hash = "sha256:4405915165f13521d875a8c29c8970800a0141c14cc5416a38feca4ea5d9b9c3", size = 3589741, upload-time = "2025-03-02T22:31:56.836Z" }, +] + +[[package]] +name = "sphinx-multiproject" +version = "1.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/00/2b/aa8178b50b68fa4563df2a45147375feb7927c018abfb45c226e52a31a97/sphinx_multiproject-1.0.0.tar.gz", hash = "sha256:93aac0cc046b488ecf951a052edbc462243a2cdc1bbb1a0b89de6a014df99d88", size = 5527, upload-time = "2024-10-23T18:41:47.17Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/42/424d2f2ed91e6cf14037f172ab78ee74123771feec3537e01f10c7219af8/sphinx_multiproject-1.0.0-py3-none-any.whl", hash = "sha256:928d02982f5b8f83d7aff9f87b413781b1b6774fa458a6d8c826a6309eb50695", size = 4724, upload-time = "2024-10-23T18:41:45.627Z" }, +] + +[[package]] +name = "sphinx-rtd-theme" +version = "3.0.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "docutils" }, + { name = "sphinx", version = "7.4.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, + { name = "sphinx", version = "8.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "sphinxcontrib-jquery" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/91/44/c97faec644d29a5ceddd3020ae2edffa69e7d00054a8c7a6021e82f20335/sphinx_rtd_theme-3.0.2.tar.gz", hash = "sha256:b7457bc25dda723b20b086a670b9953c859eab60a2a03ee8eb2bb23e176e5f85", size = 7620463, upload-time = "2024-11-13T11:06:04.545Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/85/77/46e3bac77b82b4df5bb5b61f2de98637724f246b4966cfc34bc5895d852a/sphinx_rtd_theme-3.0.2-py2.py3-none-any.whl", hash = "sha256:422ccc750c3a3a311de4ae327e82affdaf59eb695ba4936538552f3b00f4ee13", size = 7655561, upload-time = "2024-11-13T11:06:02.094Z" }, +] + +[[package]] +name = "sphinxcontrib-applehelp" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ba/6e/b837e84a1a704953c62ef8776d45c3e8d759876b4a84fe14eba2859106fe/sphinxcontrib_applehelp-2.0.0.tar.gz", hash = "sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1", size = 20053, upload-time = "2024-07-29T01:09:00.465Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5d/85/9ebeae2f76e9e77b952f4b274c27238156eae7979c5421fba91a28f4970d/sphinxcontrib_applehelp-2.0.0-py3-none-any.whl", hash = "sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5", size = 119300, upload-time = "2024-07-29T01:08:58.99Z" }, +] + +[[package]] +name = "sphinxcontrib-devhelp" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f6/d2/5beee64d3e4e747f316bae86b55943f51e82bb86ecd325883ef65741e7da/sphinxcontrib_devhelp-2.0.0.tar.gz", hash = "sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad", size = 12967, upload-time = "2024-07-29T01:09:23.417Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/35/7a/987e583882f985fe4d7323774889ec58049171828b58c2217e7f79cdf44e/sphinxcontrib_devhelp-2.0.0-py3-none-any.whl", hash = "sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2", size = 82530, upload-time = "2024-07-29T01:09:21.945Z" }, +] + +[[package]] +name = "sphinxcontrib-htmlhelp" +version = "2.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/93/983afd9aa001e5201eab16b5a444ed5b9b0a7a010541e0ddfbbfd0b2470c/sphinxcontrib_htmlhelp-2.1.0.tar.gz", hash = "sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9", size = 22617, upload-time = "2024-07-29T01:09:37.889Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0a/7b/18a8c0bcec9182c05a0b3ec2a776bba4ead82750a55ff798e8d406dae604/sphinxcontrib_htmlhelp-2.1.0-py3-none-any.whl", hash = "sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8", size = 98705, upload-time = "2024-07-29T01:09:36.407Z" }, +] + +[[package]] +name = "sphinxcontrib-jquery" +version = "4.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "sphinx", version = "7.4.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, + { name = "sphinx", version = "8.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/de/f3/aa67467e051df70a6330fe7770894b3e4f09436dea6881ae0b4f3d87cad8/sphinxcontrib-jquery-4.1.tar.gz", hash = "sha256:1620739f04e36a2c779f1a131a2dfd49b2fd07351bf1968ced074365933abc7a", size = 122331, upload-time = "2023-03-14T15:01:01.944Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/85/749bd22d1a68db7291c89e2ebca53f4306c3f205853cf31e9de279034c3c/sphinxcontrib_jquery-4.1-py2.py3-none-any.whl", hash = "sha256:f936030d7d0147dd026a4f2b5a57343d233f1fc7b363f68b3d4f1cb0993878ae", size = 121104, upload-time = "2023-03-14T15:01:00.356Z" }, +] + +[[package]] +name = "sphinxcontrib-jsmath" +version = "1.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b2/e8/9ed3830aeed71f17c026a07a5097edcf44b692850ef215b161b8ad875729/sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8", size = 5787, upload-time = "2019-01-21T16:10:16.347Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c2/42/4c8646762ee83602e3fb3fbe774c2fac12f317deb0b5dbeeedd2d3ba4b77/sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178", size = 5071, upload-time = "2019-01-21T16:10:14.333Z" }, +] + +[[package]] +name = "sphinxcontrib-qthelp" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/68/bc/9104308fc285eb3e0b31b67688235db556cd5b0ef31d96f30e45f2e51cae/sphinxcontrib_qthelp-2.0.0.tar.gz", hash = "sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab", size = 17165, upload-time = "2024-07-29T01:09:56.435Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/27/83/859ecdd180cacc13b1f7e857abf8582a64552ea7a061057a6c716e790fce/sphinxcontrib_qthelp-2.0.0-py3-none-any.whl", hash = "sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb", size = 88743, upload-time = "2024-07-29T01:09:54.885Z" }, +] + +[[package]] +name = "sphinxcontrib-serializinghtml" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3b/44/6716b257b0aa6bfd51a1b31665d1c205fb12cb5ad56de752dfa15657de2f/sphinxcontrib_serializinghtml-2.0.0.tar.gz", hash = "sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d", size = 16080, upload-time = "2024-07-29T01:10:09.332Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/52/a7/d2782e4e3f77c8450f727ba74a8f12756d5ba823d81b941f1b04da9d033a/sphinxcontrib_serializinghtml-2.0.0-py3-none-any.whl", hash = "sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331", size = 92072, upload-time = "2024-07-29T01:10:08.203Z" }, +] + +[[package]] +name = "tomli" +version = "2.2.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/18/87/302344fed471e44a87289cf4967697d07e532f2421fdaf868a303cbae4ff/tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff", size = 17175, upload-time = "2024-11-27T22:38:36.873Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/43/ca/75707e6efa2b37c77dadb324ae7d9571cb424e61ea73fad7c56c2d14527f/tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249", size = 131077, upload-time = "2024-11-27T22:37:54.956Z" }, + { url = "https://files.pythonhosted.org/packages/c7/16/51ae563a8615d472fdbffc43a3f3d46588c264ac4f024f63f01283becfbb/tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6", size = 123429, upload-time = "2024-11-27T22:37:56.698Z" }, + { url = "https://files.pythonhosted.org/packages/f1/dd/4f6cd1e7b160041db83c694abc78e100473c15d54620083dbd5aae7b990e/tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a", size = 226067, upload-time = "2024-11-27T22:37:57.63Z" }, + { url = "https://files.pythonhosted.org/packages/a9/6b/c54ede5dc70d648cc6361eaf429304b02f2871a345bbdd51e993d6cdf550/tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee", size = 236030, upload-time = "2024-11-27T22:37:59.344Z" }, + { url = "https://files.pythonhosted.org/packages/1f/47/999514fa49cfaf7a92c805a86c3c43f4215621855d151b61c602abb38091/tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e", size = 240898, upload-time = "2024-11-27T22:38:00.429Z" }, + { url = "https://files.pythonhosted.org/packages/73/41/0a01279a7ae09ee1573b423318e7934674ce06eb33f50936655071d81a24/tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4", size = 229894, upload-time = "2024-11-27T22:38:02.094Z" }, + { url = "https://files.pythonhosted.org/packages/55/18/5d8bc5b0a0362311ce4d18830a5d28943667599a60d20118074ea1b01bb7/tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106", size = 245319, upload-time = "2024-11-27T22:38:03.206Z" }, + { url = "https://files.pythonhosted.org/packages/92/a3/7ade0576d17f3cdf5ff44d61390d4b3febb8a9fc2b480c75c47ea048c646/tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8", size = 238273, upload-time = "2024-11-27T22:38:04.217Z" }, + { url = "https://files.pythonhosted.org/packages/72/6f/fa64ef058ac1446a1e51110c375339b3ec6be245af9d14c87c4a6412dd32/tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff", size = 98310, upload-time = "2024-11-27T22:38:05.908Z" }, + { url = "https://files.pythonhosted.org/packages/6a/1c/4a2dcde4a51b81be3530565e92eda625d94dafb46dbeb15069df4caffc34/tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b", size = 108309, upload-time = "2024-11-27T22:38:06.812Z" }, + { url = "https://files.pythonhosted.org/packages/52/e1/f8af4c2fcde17500422858155aeb0d7e93477a0d59a98e56cbfe75070fd0/tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea", size = 132762, upload-time = "2024-11-27T22:38:07.731Z" }, + { url = "https://files.pythonhosted.org/packages/03/b8/152c68bb84fc00396b83e7bbddd5ec0bd3dd409db4195e2a9b3e398ad2e3/tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8", size = 123453, upload-time = "2024-11-27T22:38:09.384Z" }, + { url = "https://files.pythonhosted.org/packages/c8/d6/fc9267af9166f79ac528ff7e8c55c8181ded34eb4b0e93daa767b8841573/tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192", size = 233486, upload-time = "2024-11-27T22:38:10.329Z" }, + { url = "https://files.pythonhosted.org/packages/5c/51/51c3f2884d7bab89af25f678447ea7d297b53b5a3b5730a7cb2ef6069f07/tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222", size = 242349, upload-time = "2024-11-27T22:38:11.443Z" }, + { url = "https://files.pythonhosted.org/packages/ab/df/bfa89627d13a5cc22402e441e8a931ef2108403db390ff3345c05253935e/tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77", size = 252159, upload-time = "2024-11-27T22:38:13.099Z" }, + { url = "https://files.pythonhosted.org/packages/9e/6e/fa2b916dced65763a5168c6ccb91066f7639bdc88b48adda990db10c8c0b/tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6", size = 237243, upload-time = "2024-11-27T22:38:14.766Z" }, + { url = "https://files.pythonhosted.org/packages/b4/04/885d3b1f650e1153cbb93a6a9782c58a972b94ea4483ae4ac5cedd5e4a09/tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd", size = 259645, upload-time = "2024-11-27T22:38:15.843Z" }, + { url = "https://files.pythonhosted.org/packages/9c/de/6b432d66e986e501586da298e28ebeefd3edc2c780f3ad73d22566034239/tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e", size = 244584, upload-time = "2024-11-27T22:38:17.645Z" }, + { url = "https://files.pythonhosted.org/packages/1c/9a/47c0449b98e6e7d1be6cbac02f93dd79003234ddc4aaab6ba07a9a7482e2/tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98", size = 98875, upload-time = "2024-11-27T22:38:19.159Z" }, + { url = "https://files.pythonhosted.org/packages/ef/60/9b9638f081c6f1261e2688bd487625cd1e660d0a85bd469e91d8db969734/tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4", size = 109418, upload-time = "2024-11-27T22:38:20.064Z" }, + { url = "https://files.pythonhosted.org/packages/04/90/2ee5f2e0362cb8a0b6499dc44f4d7d48f8fff06d28ba46e6f1eaa61a1388/tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7", size = 132708, upload-time = "2024-11-27T22:38:21.659Z" }, + { url = "https://files.pythonhosted.org/packages/c0/ec/46b4108816de6b385141f082ba99e315501ccd0a2ea23db4a100dd3990ea/tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c", size = 123582, upload-time = "2024-11-27T22:38:22.693Z" }, + { url = "https://files.pythonhosted.org/packages/a0/bd/b470466d0137b37b68d24556c38a0cc819e8febe392d5b199dcd7f578365/tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13", size = 232543, upload-time = "2024-11-27T22:38:24.367Z" }, + { url = "https://files.pythonhosted.org/packages/d9/e5/82e80ff3b751373f7cead2815bcbe2d51c895b3c990686741a8e56ec42ab/tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281", size = 241691, upload-time = "2024-11-27T22:38:26.081Z" }, + { url = "https://files.pythonhosted.org/packages/05/7e/2a110bc2713557d6a1bfb06af23dd01e7dde52b6ee7dadc589868f9abfac/tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272", size = 251170, upload-time = "2024-11-27T22:38:27.921Z" }, + { url = "https://files.pythonhosted.org/packages/64/7b/22d713946efe00e0adbcdfd6d1aa119ae03fd0b60ebed51ebb3fa9f5a2e5/tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140", size = 236530, upload-time = "2024-11-27T22:38:29.591Z" }, + { url = "https://files.pythonhosted.org/packages/38/31/3a76f67da4b0cf37b742ca76beaf819dca0ebef26d78fc794a576e08accf/tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2", size = 258666, upload-time = "2024-11-27T22:38:30.639Z" }, + { url = "https://files.pythonhosted.org/packages/07/10/5af1293da642aded87e8a988753945d0cf7e00a9452d3911dd3bb354c9e2/tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744", size = 243954, upload-time = "2024-11-27T22:38:31.702Z" }, + { url = "https://files.pythonhosted.org/packages/5b/b9/1ed31d167be802da0fc95020d04cd27b7d7065cc6fbefdd2f9186f60d7bd/tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec", size = 98724, upload-time = "2024-11-27T22:38:32.837Z" }, + { url = "https://files.pythonhosted.org/packages/c7/32/b0963458706accd9afcfeb867c0f9175a741bf7b19cd424230714d722198/tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69", size = 109383, upload-time = "2024-11-27T22:38:34.455Z" }, + { url = "https://files.pythonhosted.org/packages/6e/c2/61d3e0f47e2b74ef40a68b9e6ad5984f6241a942f7cd3bbfbdbd03861ea9/tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc", size = 14257, upload-time = "2024-11-27T22:38:35.385Z" }, +] + +[[package]] +name = "urllib3" +version = "2.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/15/22/9ee70a2574a4f4599c47dd506532914ce044817c7752a79b6a51286319bc/urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760", size = 393185, upload-time = "2025-06-18T14:07:41.644Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795, upload-time = "2025-06-18T14:07:40.39Z" }, +] + +[[package]] +name = "zipp" +version = "3.23.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e3/02/0f2892c661036d50ede074e376733dca2ae7c6eb617489437771209d4180/zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166", size = 25547, upload-time = "2025-06-08T17:06:39.4Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e", size = 10276, upload-time = "2025-06-08T17:06:38.034Z" }, +]