Skip to content

Commit

Permalink
fix types, lint and format
Browse files Browse the repository at this point in the history
  • Loading branch information
alafanechere committed Oct 10, 2024
1 parent b682e9b commit 81f1b5c
Show file tree
Hide file tree
Showing 14 changed files with 79 additions and 50 deletions.
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
#
# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
#
from typing import List, Optional
from typing import List

import asyncclick as click
from pipelines.airbyte_ci.connectors.consts import CONNECTOR_TEST_STEP_ID
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -193,6 +193,7 @@ async def run_connector_base_image_upgrade_pipeline(context: ConnectorContext, s
og_repo_dir = await context.get_repo_dir()
update_base_image_in_metadata = UpdateBaseImageMetadata(
context,
await context.get_connector_dir(),
set_if_not_exists=set_if_not_exists,
)
update_base_image_in_metadata_result = await update_base_image_in_metadata.run()
Expand Down Expand Up @@ -232,6 +233,7 @@ async def run_connector_migration_to_base_image_pipeline(context: ConnectorConte
# UPDATE BASE IMAGE IN METADATA
update_base_image_in_metadata = UpdateBaseImageMetadata(
context,
await context.get_connector_dir(),
set_if_not_exists=True,
)
update_base_image_in_metadata_result = await update_base_image_in_metadata.run()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
from pipelines.airbyte_ci.connectors.reports import Report
from pipelines.consts import LOCAL_BUILD_PLATFORM
from pipelines.helpers.connectors.command import run_connector_steps
from pipelines.helpers.connectors.format import format_prettier
from pipelines.helpers.connectors.yaml import read_yaml, write_yaml
from pipelines.helpers.execution.run_steps import STEP_TREE, StepToRun
from pipelines.models.steps import Step, StepResult, StepStatus
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,57 @@
import copy
import logging
import typing
from typing import Any, Mapping, Optional, Set, Type
from typing import Any, Dict, Mapping, Optional, Set, Type

from pydantic import BaseModel

from .declarative_component_schema import *
from .declarative_component_schema import (
ApiKeyAuthenticator,
BasicHttpAuthenticator,
BearerAuthenticator,
CompositeErrorHandler,
ConstantBackoffStrategy,
CursorPagination,
CustomAuthenticator,
CustomBackoffStrategy,
CustomErrorHandler,
CustomIncrementalSync,
CustomPaginationStrategy,
CustomPartitionRouter,
CustomRecordExtractor,
CustomRecordFilter,
CustomRequester,
CustomRetriever,
CustomSchemaLoader,
CustomStateMigration,
CustomTransformation,
DatetimeBasedCursor,
DeclarativeSource,
DeclarativeStream,
DefaultErrorHandler,
DefaultPaginator,
DpathExtractor,
ExponentialBackoffStrategy,
HttpRequester,
HttpResponseFilter,
JsonFileSchemaLoader,
JwtAuthenticator,
LegacySessionTokenAuthenticator,
ListPartitionRouter,
MinMaxDatetime,
OAuthAuthenticator,
OffsetIncrement,
PageIncrement,
ParentStreamConfig,
RecordFilter,
RecordSelector,
SelectiveAuthenticator,
SessionTokenAuthenticator,
SimpleRetriever,
SubstreamPartitionRouter,
WaitTimeFromHeader,
WaitUntilTimeFromHeader,
)

PARAMETERS_STR = "$parameters"

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,7 @@ def preprocess_manifest(self, manifest: Mapping[str, Any]) -> Mapping[str, Any]:
"""
return self._evaluate_node(manifest, manifest, set()) # type: ignore[no-any-return]

def _evaluate_node(self, node: Any, manifest: Mapping[str, Any], visited: Set[Any]) -> Any:
def _evaluate_node(self, node: Any, manifest: Mapping[str, Any], visited: Set[Any]) -> Any: # noqa: ANN401
if isinstance(node, dict):
evaluated_dict = {k: self._evaluate_node(v, manifest, visited) for k, v in node.items() if not self._is_ref_key(k)}
if REF_TAG in node:
Expand All @@ -126,7 +126,7 @@ def _evaluate_node(self, node: Any, manifest: Mapping[str, Any], visited: Set[An
else:
return node

def _lookup_ref_value(self, ref: str, manifest: Mapping[str, Any]) -> Any:
def _lookup_ref_value(self, ref: str, manifest: Mapping[str, Any]) -> Any: # noqa: ANN401
ref_match = re.match(r"#/(.*)", ref)
if not ref_match:
raise ValueError(f"Invalid reference format {ref}")
Expand All @@ -137,15 +137,15 @@ def _lookup_ref_value(self, ref: str, manifest: Mapping[str, Any]) -> Any:
raise ValueError(f"{path}, {ref}")

@staticmethod
def _is_ref(node: Any) -> bool:
def _is_ref(node: Any) -> bool: # noqa: ANN401
return isinstance(node, str) and node.startswith("#/")

@staticmethod
def _is_ref_key(key: str) -> bool:
return bool(key == REF_TAG)

@staticmethod
def _read_ref_value(ref: str, manifest_node: Mapping[str, Any]) -> Any:
def _read_ref_value(ref: str, manifest_node: Mapping[str, Any]) -> Any: # noqa: ANN401
"""
Read the value at the referenced location of the manifest.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,30 +54,30 @@ def filter_out_third_party_connectors(
"--spec-cache-gcs-credentials",
help="The service account key to upload files to the GCS bucket hosting spec cache.",
type=click.STRING,
required=False,
required=True,
envvar="SPEC_CACHE_GCS_CREDENTIALS",
callback=wrap_gcp_credentials_in_secret,
)
@click.option(
"--spec-cache-bucket-name",
help="The name of the GCS bucket where specs will be cached.",
type=click.STRING,
required=False,
required=True,
envvar="SPEC_CACHE_BUCKET_NAME",
)
@click.option(
"--metadata-service-gcs-credentials",
help="The service account key to upload files to the GCS bucket hosting the metadata files.",
type=click.STRING,
required=False,
required=True,
envvar="METADATA_SERVICE_GCS_CREDENTIALS",
callback=wrap_gcp_credentials_in_secret,
)
@click.option(
"--metadata-service-bucket-name",
help="The name of the GCS bucket where metadata files will be uploaded.",
type=click.STRING,
required=False,
required=True,
envvar="METADATA_SERVICE_BUCKET_NAME",
)
@click.option(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,12 +19,7 @@
from pipelines.airbyte_ci.connectors.build_image import steps
from pipelines.airbyte_ci.connectors.publish.context import PublishConnectorContext, RolloutMode
from pipelines.airbyte_ci.connectors.reports import ConnectorReport
from pipelines.airbyte_ci.metadata.pipeline import (
MetadataPromoteReleaseCandidate,
MetadataRollbackReleaseCandidate,
MetadataUpload,
MetadataValidation,
)
from pipelines.airbyte_ci.metadata.pipeline import MetadataRollbackReleaseCandidate, MetadataUpload, MetadataValidation
from pipelines.airbyte_ci.steps.bump_version import SetConnectorVersion
from pipelines.airbyte_ci.steps.changelog import AddChangelogEntry
from pipelines.airbyte_ci.steps.pull_request import CreateOrUpdatePullRequest
Expand Down Expand Up @@ -400,7 +395,7 @@ def current_semver_version(self) -> semver.Version:
return semver.Version.parse(self.context.connector.version)

@property
def promoted_semver_version(self) -> semver:
def promoted_semver_version(self) -> semver.Version:
return self.current_semver_version.replace(prerelease=None)

@property
Expand All @@ -411,7 +406,7 @@ def promoted_version(self) -> str:
def current_version_is_rc(self) -> bool:
return bool(self.current_semver_version.prerelease and "rc" in self.current_semver_version.prerelease)

def __init__(self, context: PublishConnectorContext, connector_directory: Directory):
def __init__(self, context: PublishConnectorContext, connector_directory: Directory) -> None:
self.context = context
super().__init__(context, connector_directory, self.promoted_version)

Expand Down Expand Up @@ -667,7 +662,6 @@ async def run_connector_promote_pipeline(context: PublishConnectorContext, semap

results = []
current_version = context.connector.version
promoted_version = None
all_modified_files = set()
async with semaphore:
async with context:
Expand All @@ -678,7 +672,6 @@ async def run_connector_promote_pipeline(context: PublishConnectorContext, semap
set_promoted_version_results = await set_promoted_version.run()
results.append(set_promoted_version_results)
if set_promoted_version_results.success:
promoted_version = set_promoted_version.promoted_version
all_modified_files.update(await set_promoted_version.export_modified_files(context.connector.code_directory))

# Set isReleaseCandidate to False
Expand All @@ -690,6 +683,7 @@ async def run_connector_promote_pipeline(context: PublishConnectorContext, semap

# Open PR when all previous steps are successful
if all([result.success for result in results]):
promoted_version = set_promoted_version.promoted_version
initial_pr_creation = CreateOrUpdatePullRequest(context, skip_ci=True)
pr_creation_args, pr_creation_kwargs = get_promotion_pr_creation_arguments(
all_modified_files, context, results, current_version, promoted_version
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@
from pathlib import Path
from typing import TYPE_CHECKING

import dagger
from jinja2 import Environment, PackageLoader, select_autoescape
from pipelines.airbyte_ci.connectors.build_image.steps.python_connectors import BuildConnectorImages
from pipelines.airbyte_ci.connectors.context import ConnectorContext
Expand Down Expand Up @@ -85,37 +84,33 @@ async def run_connector_up_to_date_pipeline(
new_version: str | None = None

connector_directory = await context.get_connector_dir()
upgrade_base_image_in_metadata = UpdateBaseImageMetadata(context, connector_directory=connector_directory)
upgrade_base_image_in_metadata = UpdateBaseImageMetadata(context, connector_directory)
upgrade_base_image_in_metadata_result = await upgrade_base_image_in_metadata.run()
step_results.append(upgrade_base_image_in_metadata_result)
if upgrade_base_image_in_metadata_result.success:
connector_directory = upgrade_base_image_in_metadata_result.output
exported_modified_files = await upgrade_base_image_in_metadata.export_modified_files(
connector_directory, context.connector.code_directory
)
exported_modified_files = await upgrade_base_image_in_metadata.export_modified_files(context.connector.code_directory)
context.logger.info(f"Exported files following the base image upgrade: {exported_modified_files}")
all_modified_files.update(exported_modified_files)
connector_directory = upgrade_base_image_in_metadata_result.output

if context.connector.is_using_poetry:
# We run the poetry update step after the base image upgrade because the base image upgrade may change the python environment
connector_directory = await context.get_connector_dir()
poetry_update = PoetryUpdate(
context, connector_directory, specific_dependencies=specific_dependencies, connector_directory=connector_directory
)
poetry_update = PoetryUpdate(context, connector_directory, specific_dependencies=specific_dependencies)
poetry_update_result = await poetry_update.run()
step_results.append(poetry_update_result)
if poetry_update_result.success:
exported_modified_files = await poetry_update.export_modified_files(context.connector.code_directory)
context.logger.info(f"Exported files following the Poetry update: {exported_modified_files}")
all_modified_files.update(exported_modified_files)
connector_directory = poetry_update_result.output

one_previous_step_is_successful = any(step_result.success for step_result in step_results)

# NOTE:
# BumpConnectorVersion will already work for manifest-only and Java connectors too
if bump_connector_version and one_previous_step_is_successful:
connector_directory = await context.get_connector_dir()
bump_version = BumpConnectorVersion(context, connector_directory, BUMP_TYPE, connector_directory=connector_directory)
bump_version = BumpConnectorVersion(context, connector_directory, BUMP_TYPE)
bump_version_result = await bump_version.run()
step_results.append(bump_version_result)
if bump_version_result.success:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,10 @@
from connector_ops.utils import METADATA_FILE_NAME # type: ignore
from pipelines.airbyte_ci.connectors.context import ConnectorContext
from pipelines.helpers.connectors.dagger_fs import dagger_read_file, dagger_write_file
from pipelines.models.steps import Step, StepResult, StepStatus
from pipelines.models.steps import StepModifyingFiles, StepResult, StepStatus

if TYPE_CHECKING:
from typing import List, Optional
from typing import Optional

import dagger

Expand All @@ -21,20 +21,18 @@ class NoBaseImageAddressInMetadataError(Exception):
pass


class UpdateBaseImageMetadata(Step):
class UpdateBaseImageMetadata(StepModifyingFiles):
context: ConnectorContext

title = "Upgrade the base image to the latest version in metadata.yaml"

modified_files: List[str]

def __init__(
self,
context: ConnectorContext,
connector_directory: dagger.Directory,
set_if_not_exists: bool = False,
connector_directory: Optional[dagger.Directory] = None,
) -> None:
super().__init__(context)
super().__init__(context, connector_directory)
self.set_if_not_exists = set_if_not_exists
self.modified_files = []
self.connector_directory = connector_directory
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
from pipelines.models.steps import StepModifyingFiles, StepResult, StepStatus

if TYPE_CHECKING:
from typing import List
pass


class ConnectorVersionNotFoundError(Exception):
Expand Down Expand Up @@ -117,11 +117,11 @@ def __init__(
bump_type: str,
) -> None:
self.bump_type = bump_type
new_version = self.get_bumped_version(context.connector.version, bump_type)
super().__init__(
context,
connector_directory,
self.get_bumped_version(context.connector.version, bump_type),
connector_directory=connector_directory,
new_version,
)

@property
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
from __future__ import annotations

import datetime
from typing import TYPE_CHECKING

import semver
from dagger import Directory
Expand All @@ -14,9 +13,6 @@
from pipelines.helpers.connectors.dagger_fs import dagger_read_file, dagger_write_file
from pipelines.models.steps import StepModifyingFiles, StepResult, StepStatus

if TYPE_CHECKING:
from typing import List


class AddChangelogEntry(StepModifyingFiles):
context: ConnectorContext
Expand Down Expand Up @@ -48,7 +44,7 @@ async def _run(self, pull_request_number: int | str | None = None) -> StepResult
return StepResult(
step=self,
status=StepStatus.SKIPPED,
stderr=f"Connector does not have a documentation file.",
stderr="Connector does not have a documentation file.",
)

try:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@
import shutil
import ssl
import sys
import tempfile
import urllib.request
from typing import TYPE_CHECKING

Expand Down
1 change: 0 additions & 1 deletion airbyte-ci/connectors/pipelines/pipelines/helpers/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@
from typing import Any, Callable, Generator, List, Optional, Set, Tuple

from pipelines.airbyte_ci.connectors.context import ConnectorContext
from pipelines.models import StepModifyingFiles

DAGGER_CONFIG = Config(log_output=sys.stderr)
METADATA_FILE_NAME = "metadata.yaml"
Expand Down
1 change: 1 addition & 0 deletions airbyte-ci/connectors/pipelines/pipelines/models/steps.py
Original file line number Diff line number Diff line change
Expand Up @@ -420,6 +420,7 @@ def _get_timed_out_step_result(self) -> StepResult:
class StepModifyingFiles(Step):

modified_files: List[str]
modified_directory: dagger.Directory

def __init__(self, context: PipelineContext, modified_directory: dagger.Directory, secrets: List[Secret] | None = None) -> None:
super().__init__(context, secrets)
Expand Down

0 comments on commit 81f1b5c

Please sign in to comment.