Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

airbyte-ci: Revert mypy changes #33954

Merged
merged 4 commits into from
Jan 4, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/airbyte-ci-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ jobs:
gcs_credentials: ${{ secrets.METADATA_SERVICE_PROD_GCS_CREDENTIALS }}
sentry_dsn: ${{ secrets.SENTRY_AIRBYTE_CI_DSN }}
github_token: ${{ secrets.GH_PAT_MAINTENANCE_OCTAVIA }}
subcommand: "test airbyte-ci/connectors/pipelines --poetry-run-command='pytest tests' --poetry-run-command='mypy pipelines --disallow-untyped-defs' --poetry-run-command='ruff check pipelines'"
subcommand: "test airbyte-ci/connectors/pipelines --poetry-run-command='pytest tests'"
airbyte_ci_binary_url: ${{ inputs.airbyte_ci_binary_url || 'https://connectors.airbyte.com/airbyte-ci/releases/ubuntu/latest/airbyte-ci' }}
tailscale_auth_key: ${{ secrets.TAILSCALE_AUTH_KEY }}

Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/cat-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -31,5 +31,5 @@ jobs:
gcs_credentials: ${{ secrets.METADATA_SERVICE_PROD_GCS_CREDENTIALS }}
sentry_dsn: ${{ secrets.SENTRY_AIRBYTE_CI_DSN }}
github_token: ${{ secrets.GH_PAT_MAINTENANCE_OCTAVIA }}
subcommand: "test airbyte-integrations/bases/connector-acceptance-test --poetry-run-command='pytest unit_tests'"
subcommand: "test airbyte-integrations/bases/connector-acceptance-test --test-directory=unit_tests"
tailscale_auth_key: ${{ secrets.TAILSCALE_AUTH_KEY }}
1 change: 1 addition & 0 deletions airbyte-ci/connectors/pipelines/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -521,6 +521,7 @@ E.G.: running `pytest` on a specific test folder:

| Version | PR | Description |
| ------- | ---------------------------------------------------------- | --------------------------------------------------------------------------------------------------------- |
| 2.14.2 | [#33954](https://github.com/airbytehq/airbyte/pull/33954) | Revert mypy changes
| 2.14.1 | [#33956](https://github.com/airbytehq/airbyte/pull/33956) | Exclude pnpm lock files from auto-formatting
| 2.14.0 | [#33941](https://github.com/airbytehq/airbyte/pull/33941) | Enable in-connector normalization in destination-postgres |
| 2.13.1 | [#33920](https://github.com/airbytehq/airbyte/pull/33920) | Report different sentry environments |
Expand Down
12 changes: 6 additions & 6 deletions airbyte-ci/connectors/pipelines/pipelines/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
"""The pipelines package."""
import logging
import os
from typing import Union

from rich.logging import RichHandler

from .helpers import sentry_utils
Expand All @@ -15,16 +15,16 @@
logging.getLogger("requests").setLevel(logging.WARNING)
logging.getLogger("urllib3").setLevel(logging.WARNING)
logging.getLogger("httpx").setLevel(logging.WARNING)

# RichHandler does not work great in the CI environment, so we use a StreamHandler instead
logging_handler: Union[RichHandler, logging.StreamHandler] = RichHandler(rich_tracebacks=True) if "CI" not in os.environ else logging.StreamHandler()

logging_handlers = [RichHandler(rich_tracebacks=True)]
if "CI" in os.environ:
# RichHandler does not work great in the CI
logging_handlers = [logging.StreamHandler()]

logging.basicConfig(
level=logging.INFO,
format="%(name)s: %(message)s",
datefmt="[%X]",
handlers=[logging_handler],
handlers=logging_handlers,
)

main_logger = logging.getLogger(__name__)
Original file line number Diff line number Diff line change
Expand Up @@ -6,15 +6,14 @@
from __future__ import annotations

import anyio
from connector_ops.utils import ConnectorLanguage # type: ignore
from connector_ops.utils import ConnectorLanguage
from pipelines.airbyte_ci.connectors.build_image.steps import java_connectors, python_connectors
from pipelines.airbyte_ci.connectors.build_image.steps.common import LoadContainerToLocalDockerHost, StepStatus
from pipelines.airbyte_ci.connectors.context import ConnectorContext
from pipelines.airbyte_ci.connectors.reports import ConnectorReport, Report
from pipelines.airbyte_ci.connectors.reports import ConnectorReport
from pipelines.models.steps import StepResult



class NoBuildStepForLanguageError(Exception):
pass

Expand All @@ -33,7 +32,7 @@ async def run_connector_build(context: ConnectorContext) -> StepResult:
return await LANGUAGE_BUILD_CONNECTOR_MAPPING[context.connector.language](context)


async def run_connector_build_pipeline(context: ConnectorContext, semaphore: anyio.Semaphore, image_tag: str) -> Report:
async def run_connector_build_pipeline(context: ConnectorContext, semaphore: anyio.Semaphore, image_tag: str) -> ConnectorReport:
"""Run a build pipeline for a single connector.

Args:
Expand All @@ -52,6 +51,5 @@ async def run_connector_build_pipeline(context: ConnectorContext, semaphore: any
if context.is_local and build_result.status is StepStatus.SUCCESS:
load_image_result = await LoadContainerToLocalDockerHost(context, per_platform_built_containers, image_tag).run()
step_results.append(load_image_result)
report = ConnectorReport(context, step_results, name="BUILD RESULTS")
context.report = report
return report
context.report = ConnectorReport(context, step_results, name="BUILD RESULTS")
return context.report
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from types import ModuleType
from typing import List, Optional

from connector_ops.utils import Connector # type: ignore
from connector_ops.utils import Connector
from dagger import Container

BUILD_CUSTOMIZATION_MODULE_NAME = "build_customization"
Expand All @@ -21,15 +21,11 @@ def get_build_customization_module(connector: Connector) -> Optional[ModuleType]
Optional[ModuleType]: The build_customization.py module if it exists, None otherwise.
"""
build_customization_spec_path = connector.code_directory / BUILD_CUSTOMIZATION_SPEC_NAME

if not build_customization_spec_path.exists() or not (build_customization_spec := importlib.util.spec_from_file_location(
f"{connector.code_directory.name}_{BUILD_CUSTOMIZATION_MODULE_NAME}", build_customization_spec_path
)):
if not build_customization_spec_path.exists():
return None

if build_customization_spec.loader is None:
return None

build_customization_spec = importlib.util.spec_from_file_location(
f"{connector.code_directory.name}_{BUILD_CUSTOMIZATION_MODULE_NAME}", build_customization_spec_path
)
build_customization_module = importlib.util.module_from_spec(build_customization_spec)
build_customization_spec.loader.exec_module(build_customization_module)
return build_customization_module
Expand All @@ -45,12 +41,9 @@ def get_main_file_name(connector: Connector) -> str:
str: The main file name.
"""
build_customization_module = get_build_customization_module(connector)

return (
build_customization_module.MAIN_FILE_NAME
if build_customization_module and hasattr(build_customization_module, "MAIN_FILE_NAME")
else DEFAULT_MAIN_FILE_NAME
)
if hasattr(build_customization_module, "MAIN_FILE_NAME"):
return build_customization_module.MAIN_FILE_NAME
return DEFAULT_MAIN_FILE_NAME


def get_entrypoint(connector: Connector) -> List[str]:
Expand All @@ -71,7 +64,7 @@ async def pre_install_hooks(connector: Connector, base_container: Container, log
Container: The mutated base_container.
"""
build_customization_module = get_build_customization_module(connector)
if build_customization_module and hasattr(build_customization_module, "pre_connector_install"):
if hasattr(build_customization_module, "pre_connector_install"):
base_container = await build_customization_module.pre_connector_install(base_container)
logger.info(f"Connector {connector.technical_name} pre install hook executed.")
return base_container
Expand All @@ -90,7 +83,7 @@ async def post_install_hooks(connector: Connector, connector_container: Containe
Container: The mutated connector_container.
"""
build_customization_module = get_build_customization_module(connector)
if build_customization_module and hasattr(build_customization_module, "post_connector_install"):
if hasattr(build_customization_module, "post_connector_install"):
connector_container = await build_customization_module.post_connector_install(connector_container)
logger.info(f"Connector {connector.technical_name} post install hook executed.")
return connector_container
Original file line number Diff line number Diff line change
Expand Up @@ -4,33 +4,29 @@
from __future__ import annotations

from abc import ABC
from typing import TYPE_CHECKING
from typing import List, Optional, Tuple

import docker # type: ignore
import docker
from dagger import Container, ExecError, Platform, QueryError
from pipelines.airbyte_ci.connectors.context import ConnectorContext
from pipelines.helpers.utils import export_container_to_tarball
from pipelines.models.steps import Step, StepResult, StepStatus

if TYPE_CHECKING:
from typing import Any

class BuildConnectorImagesBase(Step, ABC):
"""
A step to build connector images for a set of platforms.
"""

context: ConnectorContext

@property
def title(self) -> str:
def title(self):
return f"Build {self.context.connector.technical_name} docker image for platform(s) {', '.join(self.build_platforms)}"

def __init__(self, context: ConnectorContext) -> None:
self.build_platforms = context.targeted_platforms
self.build_platforms: List[Platform] = context.targeted_platforms
super().__init__(context)

async def _run(self, *args: Any) -> StepResult:
async def _run(self, *args) -> StepResult:
build_results_per_platform = {}
for platform in self.build_platforms:
try:
Expand All @@ -50,7 +46,7 @@ async def _run(self, *args: Any) -> StepResult:
)
return StepResult(self, StepStatus.SUCCESS, stdout=success_message, output_artifact=build_results_per_platform)

async def _build_connector(self, platform: Platform, *args: Any, **kwargs: Any) -> Container:
async def _build_connector(self, platform: Platform, *args) -> Container:
"""Implement the generation of the image for the platform and return the corresponding container.

Returns:
Expand All @@ -60,26 +56,24 @@ async def _build_connector(self, platform: Platform, *args: Any, **kwargs: Any)


class LoadContainerToLocalDockerHost(Step):
context: ConnectorContext

def __init__(self, context: ConnectorContext, containers: dict[Platform, Container], image_tag: str = "dev") -> None:
def __init__(self, context: ConnectorContext, containers: dict[Platform, Container], image_tag: Optional[str] = "dev") -> None:
super().__init__(context)
self.image_tag = image_tag
self.containers = containers

def _generate_dev_tag(self, platform: Platform, multi_platforms: bool) -> str:
def _generate_dev_tag(self, platform: Platform, multi_platforms: bool):
"""
When building for multiple platforms, we need to tag the image with the platform name.
There's no way to locally build a multi-arch image, so we need to tag the image with the platform name when the user passed multiple architecture options.
"""
return f"{self.image_tag}-{platform.replace('/', '-')}" if multi_platforms else self.image_tag

@property
def title(self) -> str:
def title(self):
return f"Load {self.image_name}:{self.image_tag} to the local docker host."

@property
def image_name(self) -> str:
def image_name(self) -> Tuple:
return f"airbyte/{self.context.connector.technical_name}"

async def _run(self) -> StepResult:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,6 @@
class BuildOrPullNormalization(Step):
"""A step to build or pull the normalization image for a connector according to the image name."""

context: ConnectorContext

def __init__(self, context: ConnectorContext, normalization_image: str, build_platform: Platform) -> None:
"""Initialize the step to build or pull the normalization image.

Expand All @@ -26,10 +24,7 @@ def __init__(self, context: ConnectorContext, normalization_image: str, build_pl
self.build_platform = build_platform
self.use_dev_normalization = normalization_image.endswith(":dev")
self.normalization_image = normalization_image

@property
def title(self) -> str:
return f"Build {self.normalization_image}" if self.use_dev_normalization else f"Pull {self.normalization_image}"
self.title = f"Build {self.normalization_image}" if self.use_dev_normalization else f"Pull {self.normalization_image}"

async def _run(self) -> StepResult:
if self.use_dev_normalization:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,6 @@
#


from typing import Any

from dagger import Container, Platform
from pipelines.airbyte_ci.connectors.build_image.steps import build_customization
from pipelines.airbyte_ci.connectors.build_image.steps.common import BuildConnectorImagesBase
Expand All @@ -19,10 +17,9 @@ class BuildConnectorImages(BuildConnectorImagesBase):
A spec command is run on the container to validate it was built successfully.
"""

context: ConnectorContext
PATH_TO_INTEGRATION_CODE = "/airbyte/integration_code"

async def _build_connector(self, platform: Platform, *args: Any) -> Container:
async def _build_connector(self, platform: Platform):
if (
"connectorBuildOptions" in self.context.connector.metadata
and "baseImage" in self.context.connector.metadata["connectorBuildOptions"]
Expand Down Expand Up @@ -77,10 +74,10 @@ async def _build_from_base_image(self, platform: Platform) -> Container:
# copy python dependencies from builder to connector container
customized_base.with_directory("/usr/local", builder.directory("/usr/local"))
.with_workdir(self.PATH_TO_INTEGRATION_CODE)
.with_file(main_file_name, (await self.context.get_connector_dir(include=[main_file_name])).file(main_file_name))
.with_file(main_file_name, (await self.context.get_connector_dir(include=main_file_name)).file(main_file_name))
.with_directory(
connector_snake_case_name,
(await self.context.get_connector_dir(include=[connector_snake_case_name])).directory(connector_snake_case_name),
(await self.context.get_connector_dir(include=connector_snake_case_name)).directory(connector_snake_case_name),
)
.with_env_variable("AIRBYTE_ENTRYPOINT", " ".join(entrypoint))
.with_entrypoint(entrypoint)
Expand Down
Loading
Loading