Skip to content

Commit

Permalink
Source Github: add workflow_jobs stream (airbytehq#16534)
Browse files Browse the repository at this point in the history
* feat(workflow-jobs)!: new class 'WorkflowJobs' to get job informations from Github;

* feat(workflow-jobs)!: unit test to new class 'WorkflowJobs';

* feat(test_stream/test_stream_workflow_jobs_read_incremental)!: support to method incremetal in unit test to class 'WorkflowJobs';

* feat(test_workflow): unit test incremental and full refresh to class 'WorkflowJobs';

* docs: updated github docs

* docs: updated github docs

* docs: updated github.md

* docs: updated github.md

* fix: add workflow_jobs in abnormal tests, use 'start date' in source and unit test;

* fix: add workflow_jobs in 'abnormal_state.json' and 'sample_state.json' for integration tests;

Co-authored-by: Paulo Roberto <paulo.rb.beserra@gmail.com>
Co-authored-by: Paulo Roberto <71179739+py-paulo@users.noreply.github.com>
  • Loading branch information
3 people authored and robbinhan committed Sep 29, 2022
1 parent 9be1cc5 commit 6bbef4d
Show file tree
Hide file tree
Showing 12 changed files with 383 additions and 4 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -331,7 +331,7 @@
- name: GitHub
sourceDefinitionId: ef69ef6e-aa7f-4af1-a01d-ef775033524e
dockerRepository: airbyte/source-github
dockerImageTag: 0.2.46
dockerImageTag: 0.3.0
documentationUrl: https://docs.airbyte.io/integrations/sources/github
icon: github.svg
sourceType: api
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3116,7 +3116,7 @@
supportsNormalization: false
supportsDBT: false
supported_destination_sync_modes: []
- dockerImage: "airbyte/source-github:0.2.46"
- dockerImage: "airbyte/source-github:0.3.0"
spec:
documentationUrl: "https://docs.airbyte.com/integrations/sources/github"
connectionSpecification:
Expand Down
2 changes: 1 addition & 1 deletion airbyte-integrations/connectors/source-github/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -12,5 +12,5 @@ RUN pip install .
ENV AIRBYTE_ENTRYPOINT "python /airbyte/integration_code/main.py"
ENTRYPOINT ["python", "/airbyte/integration_code/main.py"]

LABEL io.airbyte.version=0.2.46
LABEL io.airbyte.version=0.3.0
LABEL io.airbyte.name=airbyte/source-github
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@ tests:
stargazers: ["airbytehq/integration-test", "starred_at"]
workflow_runs: ["airbytehq/integration-test", "updated_at"]
workflows: ["airbytehq/integration-test", "updated_at"]
workflow_jobs: ["airbytehq/integration-test", "completed_at"]
full_refresh:
- config_path: "secrets/config.json"
configured_catalog_path: "integration_tests/configured_catalog.json"
Original file line number Diff line number Diff line change
Expand Up @@ -136,5 +136,10 @@
"airbytehq/integration-test": {
"updated_at": "2121-12-31T23:59:59Z"
}
},
"workflow_jobs": {
"airbytehq/integration-test": {
"completed_at": "2121-12-31T23:59:59Z"
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -381,6 +381,16 @@
"sync_mode": "incremental",
"destination_sync_mode": "overwrite"
},
{
"stream": {
"name": "workflow_jobs",
"json_schema": {},
"supported_sync_modes": ["full_refresh", "incremental"],
"source_defined_primary_key": [["id"]]
},
"sync_mode": "incremental",
"destination_sync_mode": "overwrite"
},
{
"stream": {
"name": "team_members",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,5 +63,15 @@
"airbytehq/integration-test": {
"updated_at": "2021-08-30T12:01:15Z"
}
},
"workflow_runs": {
"airbytehq/integration-test": {
"completed_at": "2021-08-30T12:01:15Z"
}
},
"workflow_jobs": {
"airbytehq/integration-test": {
"completed_at": "2021-08-30T12:01:15Z"
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,126 @@
{
"$schema": "http://json-schema.org/draft-04/schema#",
"type": "object",
"properties": {
"id": {
"type": "integer"
},
"run_id": {
"type": "integer"
},
"run_url": {
"type": "string"
},
"run_attempt": {
"type": "integer"
},
"node_id": {
"type": "string"
},
"head_sha": {
"type": "string"
},
"url": {
"type": "string"
},
"html_url": {
"type": [
"string",
"null"
]
},
"status": {
"type": "string"
},
"conclusion": {
"type": [
"string",
"null"
]
},
"started_at": {
"format": "date-time",
"type": "string"
},
"completed_at": {
"format": "date-time",
"type": [
"string",
"null"
]
},
"name": {
"type": "string"
},
"steps": {
"type": "array",
"items": {
"type": "object",
"properties": {
"status": {
"type": "string"
},
"conclusion": {
"type": [
"string",
"null"
]
},
"name": {
"type": "string"
},
"number": {
"type": "integer"
},
"started_at": {
"format": "date-time",
"type": [
"string",
"null"
]
},
"completed_at": {
"format": "date-time",
"type": [
"string",
"null"
]
}
}
}
},
"check_run_url": {
"type": "string"
},
"labels": {
"type": "array",
"items": {
"type": "string"
}
},
"runner_id": {
"type": [
"integer",
"null"
]
},
"runner_name": {
"type": [
"string",
"null"
]
},
"runner_group_id": {
"type": [
"integer",
"null"
]
},
"runner_group_name": {
"type": [
"string",
"null"
]
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@
TeamMemberships,
Teams,
Users,
WorkflowJobs,
WorkflowRuns,
Workflows,
)
Expand Down Expand Up @@ -188,6 +189,7 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]:
project_columns_stream = ProjectColumns(projects_stream, **repository_args_with_start_date)
teams_stream = Teams(**organization_args)
team_members_stream = TeamMembers(parent=teams_stream, **repository_args)
workflow_runs_stream = WorkflowRuns(**repository_args_with_start_date)

return [
Assignees(**repository_args),
Expand Down Expand Up @@ -224,5 +226,6 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]:
Users(**organization_args),
Workflows(**repository_args_with_start_date),
WorkflowRuns(**repository_args_with_start_date),
WorkflowJobs(parent=workflow_runs_stream, **repository_args_with_start_date),
TeamMemberships(parent=team_members_stream, **repository_args),
]
Original file line number Diff line number Diff line change
Expand Up @@ -1337,6 +1337,51 @@ def read_records(
break


class WorkflowJobs(SemiIncrementalMixin, GithubStream):
"""
Get all workflow jobs for a workflow run
API documentation: https://docs.github.com/pt/rest/actions/workflow-jobs#list-jobs-for-a-workflow-run
"""

cursor_field = "completed_at"

def __init__(self, parent: WorkflowRuns, **kwargs):
super().__init__(**kwargs)
self.parent = parent

def path(self, stream_slice: Mapping[str, Any] = None, **kwargs) -> str:
return f"repos/{stream_slice['repository']}/actions/runs/{stream_slice['run_id']}/jobs"

def stream_slices(
self, sync_mode: SyncMode, cursor_field: List[str] = None, stream_state: Mapping[str, Any] = None
) -> Iterable[Optional[Mapping[str, Any]]]:
parent_stream_slices = self.parent.stream_slices(
sync_mode=SyncMode.full_refresh, cursor_field=cursor_field, stream_state=stream_state
)
for stream_slice in parent_stream_slices:
parent_records = self.parent.read_records(
sync_mode=SyncMode.full_refresh, cursor_field=cursor_field, stream_slice=stream_slice, stream_state=stream_state
)
for record in parent_records:
yield {"repository": record["repository"]["full_name"], "run_id": record["id"]}

def parse_response(
self,
response: requests.Response,
stream_state: Mapping[str, Any],
stream_slice: Mapping[str, Any] = None,
next_page_token: Mapping[str, Any] = None,
) -> Iterable[Mapping]:
for record in response.json().get("jobs"): # GitHub puts records in an array.
yield self.transform(record=record, stream_slice=stream_slice)

def transform(self, record: MutableMapping[str, Any], stream_slice: Mapping[str, Any]) -> MutableMapping[str, Any]:
record = super().transform(record=record, stream_slice=stream_slice)
record["run_id"] = stream_slice["run_id"]
record["repository"] = stream_slice["repository"]
return record


class TeamMembers(GithubStream):
"""
API docs: https://docs.github.com/en/rest/reference/teams#list-team-members
Expand Down
Loading

0 comments on commit 6bbef4d

Please sign in to comment.