Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

airbyte-ci: augment the report for java connectors #35317

Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,7 @@
<Property name="container-log-pattern">%d{yyyy-MM-dd'T'HH:mm:ss,SSS}{GMT+0}`%replace{%X{log_source}}{^ -}{} > %replace{%m}{$${env:LOG_SCRUB_PATTERN:-\*\*\*\*\*}}{*****}%n</Property>
<!-- Always log INFO by default. -->
<Property name="log-level">${sys:LOG_LEVEL:-${env:LOG_LEVEL:-INFO}}</Property>
<Property name="logSubDir">${env:AIRBYTE_LOG_SUBDIR:-${date:yyyy-MM-dd'T'HH:mm:ss}}</Property>
<Property name="logDir">build/test-logs/${logSubDir}</Property>
<Property name="logDir">build/test-logs/${date:yyyy-MM-dd'T'HH:mm:ss}</Property>
</Properties>

<Appenders>
Expand Down
3 changes: 2 additions & 1 deletion airbyte-ci/connectors/pipelines/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -644,7 +644,8 @@ E.G.: running Poe tasks on the modified internal packages of the current branch:

| Version | PR | Description |
| ------- | ---------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------- |
| 4.3.2 | [#35536](https://github.com/airbytehq/airbyte/pull/35536) | Make QA checks run correctly on `*-strict-encrypt` connectors. |
| 4.4.0 | [#35317](https://github.com/airbytehq/airbyte/pull/35317) | Augment java connector reports to include full logs and junit test results |
| 4.3.2 | [#35536](https://github.com/airbytehq/airbyte/pull/35536) | Make QA checks run correctly on `*-strict-encrypt` connectors. |
| 4.3.1 | [#35437](https://github.com/airbytehq/airbyte/pull/35437) | Do not run QA checks on publish, just MetadataValidation. |
| 4.3.0 | [#35438](https://github.com/airbytehq/airbyte/pull/35438) | Optionally disable telemetry with environment variable. |
| 4.2.4 | [#35325](https://github.com/airbytehq/airbyte/pull/35325) | Use `connectors_qa` for QA checks and remove redundant checks. |
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ async def run_connector_build_pipeline(context: ConnectorContext, semaphore: any
async with semaphore:
async with context:
build_result = await run_connector_build(context)
per_platform_built_containers = build_result.output_artifact
per_platform_built_containers = build_result.output
step_results.append(build_result)
if context.is_local and build_result.status is StepStatus.SUCCESS:
load_image_result = await LoadContainerToLocalDockerHost(context, per_platform_built_containers, image_tag).run()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ async def _run(self, *args: Any) -> StepResult:
f"The {self.context.connector.technical_name} docker image "
f"was successfully built for platform(s) {', '.join(self.build_platforms)}"
)
return StepResult(step=self, status=StepStatus.SUCCESS, stdout=success_message, output_artifact=build_results_per_platform)
return StepResult(step=self, status=StepStatus.SUCCESS, stdout=success_message, output=build_results_per_platform)

async def _build_connector(self, platform: Platform, *args: Any, **kwargs: Any) -> Container:
"""Implement the generation of the image for the platform and return the corresponding container.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ async def run_connector_build(context: ConnectorContext) -> StepResult:
build_connector_tar_result = await BuildConnectorDistributionTar(context).run()
if build_connector_tar_result.status is not StepStatus.SUCCESS:
return build_connector_tar_result
dist_dir = await build_connector_tar_result.output_artifact.directory(dist_tar_directory_path(context))
dist_dir = await build_connector_tar_result.output.directory(dist_tar_directory_path(context))
return await BuildConnectorImages(context).run(dist_dir)


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,4 +36,4 @@ async def _run(self) -> StepResult:
build_normalization_container = normalization.with_normalization(self.context, self.build_platform)
else:
build_normalization_container = self.context.dagger_client.container().from_(self.normalization_image)
return StepResult(step=self, status=StepStatus.SUCCESS, output_artifact=build_normalization_container)
return StepResult(step=self, status=StepStatus.SUCCESS, output=build_normalization_container)
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ async def _run(self) -> StepResult:
step=self,
status=StepStatus.SKIPPED,
stdout="Connector does not have a documentation file.",
output_artifact=self.repo_dir,
output=self.repo_dir,
)
try:
updated_doc = self.add_changelog_entry(doc_path.read_text())
Expand All @@ -64,14 +64,14 @@ async def _run(self) -> StepResult:
step=self,
status=StepStatus.FAILURE,
stdout=f"Could not add changelog entry: {e}",
output_artifact=self.repo_dir,
output=self.repo_dir,
)
updated_repo_dir = self.repo_dir.with_new_file(str(doc_path), contents=updated_doc)
return StepResult(
step=self,
status=StepStatus.SUCCESS,
stdout=f"Added changelog entry to {doc_path}",
output_artifact=updated_repo_dir,
output=updated_repo_dir,
)

def find_line_index_for_new_entry(self, markdown_text: str) -> int:
Expand Down Expand Up @@ -118,7 +118,7 @@ async def _run(self) -> StepResult:
step=self,
status=StepStatus.SKIPPED,
stdout="Can't retrieve the connector current version.",
output_artifact=self.repo_dir,
output=self.repo_dir,
)
updated_metadata_str = self.get_metadata_with_bumped_version(current_version, self.new_version, current_metadata_str)
repo_dir_with_updated_metadata = metadata_change_helpers.get_repo_dir_with_updated_metadata_str(
Expand All @@ -134,7 +134,7 @@ async def _run(self) -> StepResult:
step=self,
status=StepStatus.SUCCESS,
stdout=f"Updated dockerImageTag from {current_version} to {self.new_version} in {metadata_path}",
output_artifact=repo_dir_with_updated_metadata,
output=repo_dir_with_updated_metadata,
)


Expand Down Expand Up @@ -164,7 +164,7 @@ async def run_connector_version_bump_pipeline(
new_version,
)
update_docker_image_tag_in_metadata_result = await update_docker_image_tag_in_metadata.run()
repo_dir_with_updated_metadata = update_docker_image_tag_in_metadata_result.output_artifact
repo_dir_with_updated_metadata = update_docker_image_tag_in_metadata_result.output
steps_results.append(update_docker_image_tag_in_metadata_result)

add_changelog_entry = AddChangelogEntry(
Expand All @@ -176,7 +176,7 @@ async def run_connector_version_bump_pipeline(
)
add_changelog_entry_result = await add_changelog_entry.run()
steps_results.append(add_changelog_entry_result)
final_repo_dir = add_changelog_entry_result.output_artifact
final_repo_dir = add_changelog_entry_result.output
await og_repo_dir.diff(final_repo_dir).export(str(git.get_git_repo_path()))
report = ConnectorReport(context, steps_results, name="CONNECTOR VERSION BUMP RESULTS")
context.report = report
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ async def _run(self) -> StepResult:
step=self,
status=StepStatus.SKIPPED,
stdout="Could not find a base image for this connector language.",
output_artifact=self.repo_dir,
output=self.repo_dir,
)

metadata_path = self.context.connector.metadata_file_path
Expand All @@ -76,15 +76,15 @@ async def _run(self) -> StepResult:
step=self,
status=StepStatus.SKIPPED,
stdout="Connector does not have a base image metadata field.",
output_artifact=self.repo_dir,
output=self.repo_dir,
)

if current_base_image_address == latest_base_image_address:
return StepResult(
step=self,
status=StepStatus.SKIPPED,
stdout="Connector already uses latest base image",
output_artifact=self.repo_dir,
output=self.repo_dir,
)
updated_metadata = self.update_base_image_in_metadata(current_metadata, latest_base_image_address)
updated_repo_dir = metadata_change_helpers.get_repo_dir_with_updated_metadata(self.repo_dir, metadata_path, updated_metadata)
Expand All @@ -93,7 +93,7 @@ async def _run(self) -> StepResult:
step=self,
status=StepStatus.SUCCESS,
stdout=f"Updated base image to {latest_base_image_address} in {metadata_path}",
output_artifact=updated_repo_dir,
output=updated_repo_dir,
)


Expand Down Expand Up @@ -146,7 +146,7 @@ async def _run(self) -> StepResult:
step=self,
status=StepStatus.SKIPPED,
stdout="Connector does not have a documentation file.",
output_artifact=self.repo_dir,
output=self.repo_dir,
)
current_readme = await (await self.context.get_connector_dir(include=["README.md"])).file("README.md").contents()
try:
Expand All @@ -156,14 +156,14 @@ async def _run(self) -> StepResult:
step=self,
status=StepStatus.FAILURE,
stdout=str(e),
output_artifact=self.repo_dir,
output=self.repo_dir,
)
updated_repo_dir = await self.repo_dir.with_new_file(str(readme_path), contents=updated_readme)
return StepResult(
step=self,
status=StepStatus.SUCCESS,
stdout=f"Added build instructions to {readme_path}",
output_artifact=updated_repo_dir,
output=updated_repo_dir,
)

def add_build_instructions(self, og_doc_content: str) -> str:
Expand Down Expand Up @@ -276,7 +276,7 @@ async def run_connector_base_image_upgrade_pipeline(context: ConnectorContext, s
)
update_base_image_in_metadata_result = await update_base_image_in_metadata.run()
steps_results.append(update_base_image_in_metadata_result)
final_repo_dir = update_base_image_in_metadata_result.output_artifact
final_repo_dir = update_base_image_in_metadata_result.output
await og_repo_dir.diff(final_repo_dir).export(str(git.get_git_repo_path()))
report = ConnectorReport(context, steps_results, name="BASE IMAGE UPGRADE RESULTS")
context.report = report
Expand Down Expand Up @@ -324,7 +324,7 @@ async def run_connector_migration_to_base_image_pipeline(
new_version = get_bumped_version(context.connector.version, "patch")
bump_version_in_metadata = BumpDockerImageTagInMetadata(
context,
update_base_image_in_metadata_result.output_artifact,
update_base_image_in_metadata_result.output,
new_version,
)
bump_version_in_metadata_result = await bump_version_in_metadata.run()
Expand All @@ -333,7 +333,7 @@ async def run_connector_migration_to_base_image_pipeline(
# ADD CHANGELOG ENTRY
add_changelog_entry = AddChangelogEntry(
context,
bump_version_in_metadata_result.output_artifact,
bump_version_in_metadata_result.output,
new_version,
"Base image migration: remove Dockerfile and use the python-connector-base image",
pull_request_number,
Expand All @@ -344,13 +344,13 @@ async def run_connector_migration_to_base_image_pipeline(
# UPDATE DOC
add_build_instructions_to_doc = AddBuildInstructionsToReadme(
context,
add_changelog_entry_result.output_artifact,
add_changelog_entry_result.output,
)
add_build_instructions_to_doc_results = await add_build_instructions_to_doc.run()
steps_results.append(add_build_instructions_to_doc_results)

# EXPORT MODIFIED FILES BACK TO HOST
final_repo_dir = add_build_instructions_to_doc_results.output_artifact
final_repo_dir = add_build_instructions_to_doc_results.output
await og_repo_dir.diff(final_repo_dir).export(str(git.get_git_repo_path()))
report = ConnectorReport(context, steps_results, name="MIGRATE TO BASE IMAGE RESULTS")
context.report = report
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -313,7 +313,7 @@ def create_connector_report(results: List[StepResult]) -> ConnectorReport:
if build_connector_results.status is not StepStatus.SUCCESS:
return create_connector_report(results)

built_connector_platform_variants = list(build_connector_results.output_artifact.values())
built_connector_platform_variants = list(build_connector_results.output.values())
push_connector_image_results = await PushConnectorImageToRegistry(context).run(built_connector_platform_variants)
results.append(push_connector_image_results)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,13 +6,15 @@
import json
import webbrowser
from dataclasses import dataclass
from typing import TYPE_CHECKING
from pathlib import Path
from types import MappingProxyType
from typing import TYPE_CHECKING, Dict

from anyio import Path
stephane-airbyte marked this conversation as resolved.
Show resolved Hide resolved
from connector_ops.utils import console # type: ignore
from jinja2 import Environment, PackageLoader, select_autoescape
from pipelines.consts import GCS_PUBLIC_DOMAIN
from pipelines.helpers.utils import format_duration
from pipelines.models.artifacts import Artifact
from pipelines.models.reports import Report
from pipelines.models.steps import StepStatus
from rich.console import Group
Expand Down Expand Up @@ -42,13 +44,19 @@ def report_output_prefix(self) -> str:
def html_report_file_name(self) -> str:
return self.filename + ".html"

def file_remote_storage_key(self, file_name: str) -> str:
return f"{self.report_output_prefix}/{file_name}"

@property
def html_report_remote_storage_key(self) -> str:
return f"{self.report_output_prefix}/{self.html_report_file_name}"
return self.file_remote_storage_key(self.html_report_file_name)

def file_url(self, file_name: str) -> str:
return f"{GCS_PUBLIC_DOMAIN}/{self.pipeline_context.ci_report_bucket}/{self.file_remote_storage_key(file_name)}"

@property
def html_report_url(self) -> str:
return f"{GCS_PUBLIC_DOMAIN}/{self.pipeline_context.ci_report_bucket}/{self.html_report_remote_storage_key}"
return self.file_url(self.html_report_file_name)

def to_json(self) -> str:
"""Create a JSON representation of the connector test report.
Expand Down Expand Up @@ -81,7 +89,7 @@ def to_json(self) -> str:
}
)

async def to_html(self) -> str:
def to_html(self) -> str:
env = Environment(
loader=PackageLoader("pipelines.airbyte_ci.connectors.test.steps"),
autoescape=select_autoescape(),
Expand All @@ -91,7 +99,18 @@ async def to_html(self) -> str:
template = env.get_template("test_report.html.j2")
template.globals["StepStatus"] = StepStatus
template.globals["format_duration"] = format_duration
local_icon_path = await Path(f"{self.pipeline_context.connector.code_directory}/icon.svg").resolve()
local_icon_path = Path(f"{self.pipeline_context.connector.code_directory}/icon.svg").resolve()
step_result_to_artifact_links: Dict[str, List[Dict]] = {}
for step_result in self.steps_results:
for artifact in step_result.artifacts:
if artifact.gcs_url:
url = artifact.gcs_url
elif artifact.local_path:
url = artifact.local_path.resolve().as_uri()
else:
continue
step_result_to_artifact_links.setdefault(step_result.step.title, []).append({"name": artifact.name, "url": url})

template_context = {
"connector_name": self.pipeline_context.connector.technical_name,
"step_results": self.steps_results,
Expand All @@ -104,6 +123,8 @@ async def to_html(self) -> str:
"git_revision": self.pipeline_context.git_revision,
"commit_url": None,
"icon_url": local_icon_path.as_uri(),
"report": self,
"step_result_to_artifact_links": MappingProxyType(step_result_to_artifact_links),
}

if self.pipeline_context.is_ci:
Expand All @@ -116,18 +137,32 @@ async def to_html(self) -> str:
] = f"https://raw.githubusercontent.com/airbytehq/airbyte/{self.pipeline_context.git_revision}/{self.pipeline_context.connector.code_directory}/icon.svg"
return template.render(template_context)

async def save_html_report(self) -> None:
"""Save the report as HTML, upload it to GCS if the pipeline is running in CI"""

html_report_path = self.report_dir_path / self.html_report_file_name
report_dir = self.pipeline_context.dagger_client.host().directory(str(self.report_dir_path))
local_html_report_file = report_dir.with_new_file(self.html_report_file_name, self.to_html()).file(self.html_report_file_name)
html_report_artifact = Artifact(name="HTML Report", content_type="text/html", content=local_html_report_file)
await html_report_artifact.save_to_local_path(html_report_path)
absolute_path = html_report_path.absolute()
self.pipeline_context.logger.info(f"Report saved locally at {absolute_path}")
if self.remote_storage_enabled and self.pipeline_context.ci_gcs_credentials_secret and self.pipeline_context.ci_report_bucket:
gcs_url = await html_report_artifact.upload_to_gcs(
dagger_client=self.pipeline_context.dagger_client,
bucket=self.pipeline_context.ci_report_bucket,
key=self.html_report_remote_storage_key,
gcs_credentials=self.pipeline_context.ci_gcs_credentials_secret,
)
self.pipeline_context.logger.info(f"HTML report uploaded to {gcs_url}")

elif self.pipeline_context.enable_report_auto_open:
self.pipeline_context.logger.info("Opening HTML report in browser.")
webbrowser.open(absolute_path.as_uri())

async def save(self) -> None:
local_html_path = await self.save_local(self.html_report_file_name, await self.to_html())
absolute_path = await local_html_path.resolve()
if self.pipeline_context.enable_report_auto_open:
self.pipeline_context.logger.info(f"HTML report saved locally: {absolute_path}")
if self.pipeline_context.enable_report_auto_open:
self.pipeline_context.logger.info("Opening HTML report in browser.")
webbrowser.open(absolute_path.as_uri())
if self.remote_storage_enabled:
await self.save_remote(local_html_path, self.html_report_remote_storage_key, "text/html")
self.pipeline_context.logger.info(f"HTML report uploaded to {self.html_report_url}")
await super().save()
await self.save_html_report()

def print(self) -> None:
"""Print the test report to the console in a nice way."""
Expand Down
Loading
Loading