diff --git a/airbyte-integrations/connectors/source-zendesk-support/metadata.yaml b/airbyte-integrations/connectors/source-zendesk-support/metadata.yaml index b5d88cbe57ce..75e1eaf4c222 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/metadata.yaml +++ b/airbyte-integrations/connectors/source-zendesk-support/metadata.yaml @@ -11,7 +11,7 @@ data: connectorSubtype: api connectorType: source definitionId: 79c1aa37-dae3-42ae-b333-d1c105477715 - dockerImageTag: 2.6.3 + dockerImageTag: 2.6.4 dockerRepository: airbyte/source-zendesk-support documentationUrl: https://docs.airbyte.com/integrations/sources/zendesk-support githubIssueLabel: source-zendesk-support diff --git a/airbyte-integrations/connectors/source-zendesk-support/pyproject.toml b/airbyte-integrations/connectors/source-zendesk-support/pyproject.toml index f7a9e03863b8..25298ac51c59 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/pyproject.toml +++ b/airbyte-integrations/connectors/source-zendesk-support/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "2.6.3" +version = "2.6.4" name = "source-zendesk-support" description = "Source implementation for Zendesk Support." authors = [ "Airbyte ",] diff --git a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py index fe095fea1dea..ec66f2ebaa65 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py +++ b/airbyte-integrations/connectors/source-zendesk-support/source_zendesk_support/streams.py @@ -380,9 +380,9 @@ class SourceZendeskSupportTicketEventsExportStream(SourceZendeskIncrementalExpor https://developer.zendesk.com/api-reference/ticketing/ticket-management/incremental_exports/#incremental-ticket-event-export @ param response_list_name: the main nested entity to look at inside of response, default = "ticket_events" - @ param response_target_entity: nested property inside of `response_list_name`, default = "child_events" + @ param response_target_entity: nested property inside `response_list_name`, default = "child_events" @ param list_entities_from_event : the list of nested child_events entities to include from parent record - @ param event_type : specific event_type to check ["Audit", "Change", "Comment", etc] + @ param event_type : specific event_type to check ["Audit", "Change", "Comment", etc.] @ param sideload_param : parameter variable to include various information to response """ @@ -507,15 +507,17 @@ def stream_slices( ) -> Iterable[Optional[Mapping[str, Any]]]: parent_stream_state = None if stream_state: - cursor_value = pendulum.parse(stream_state.get(self.cursor_field)).int_timestamp - parent_stream_state = {self.parent.cursor_field: cursor_value} + cursor_value = stream_state.get(self.cursor_field) + parent_stream_state = {self.parent.cursor_field: pendulum.parse(cursor_value).int_timestamp} + else: + cursor_value = self._start_date parent_records = self.parent.read_records( sync_mode=SyncMode.incremental, cursor_field=cursor_field, stream_slice=None, stream_state=parent_stream_state ) for record in parent_records: - yield {"ticket_id": record["id"]} + yield {"ticket_id": record["id"], self.cursor_field: cursor_value} def should_retry(self, response: requests.Response) -> bool: if response.status_code == 404: @@ -527,7 +529,7 @@ def should_retry(self, response: requests.Response) -> bool: class TicketComments(SourceZendeskSupportTicketEventsExportStream): """ - Fetch the TicketComments incrementaly from TicketEvents Export stream + Fetch the TicketComments incrementally from TicketEvents Export stream """ list_entities_from_event = ["via_reference_id", "ticket_id", "timestamp"] @@ -603,7 +605,13 @@ def path( ) -> str: return f"tickets/{stream_slice['ticket_id']}/metrics" - def parse_response(self, response: requests.Response, stream_state: Mapping[str, Any], **kwargs) -> Iterable[Mapping]: + def parse_response( + self, + response: requests.Response, + stream_state: Mapping[str, Any], + stream_slice: Optional[Mapping[str, Any]] = None, + **kwargs, + ) -> Iterable[Mapping]: """try to select relevant data only""" try: @@ -613,13 +621,10 @@ def parse_response(self, response: requests.Response, stream_state: Mapping[str, # no data in case of http errors if data: - if not self.cursor_field: + cursor_date = (stream_slice or {}).get(self.cursor_field) + updated = data[self.cursor_field] + if not cursor_date or updated >= cursor_date: yield data - else: - cursor_date = (stream_state or {}).get(self.cursor_field) - updated = data[self.cursor_field] - if not cursor_date or updated >= cursor_date: - yield data class TicketSkips(CursorPaginationZendeskSupportStream): diff --git a/airbyte-integrations/connectors/source-zendesk-support/unit_tests/unit_test.py b/airbyte-integrations/connectors/source-zendesk-support/unit_tests/unit_test.py index 3ae800dc70c3..7e954e498230 100644 --- a/airbyte-integrations/connectors/source-zendesk-support/unit_tests/unit_test.py +++ b/airbyte-integrations/connectors/source-zendesk-support/unit_tests/unit_test.py @@ -1047,8 +1047,19 @@ class TestTicketSubstream: @pytest.mark.parametrize( "stream_state, response, expected_slices", [ - ({}, {"tickets": [{"id": "13"}, {"id": "80"}]}, [{"ticket_id": "13"}, {"ticket_id": "80"}]), - ({"updated_at": "2024-04-17T19:34:06Z"}, {"tickets": [{"id": "80"}]}, [{"ticket_id": "80"}]), + ( + {}, + {"tickets": [{"id": "13"}, {"id": "80"}]}, + [ + {"ticket_id": "13", "updated_at": STREAM_ARGS["start_date"]}, + {"ticket_id": "80", "updated_at": STREAM_ARGS["start_date"]}, + ], + ), + ( + {"updated_at": "2024-04-17T19:34:06Z"}, + {"tickets": [{"id": "80"}]}, + [{"ticket_id": "80", "updated_at": "2024-04-17T19:34:06Z"}], + ), ({"updated_at": "2224-04-17T19:34:06Z"}, {"tickets": []}, []), ], ids=[ @@ -1063,30 +1074,32 @@ def test_stream_slices(self, requests_mock, stream_state, response, expected_sli assert list(stream.stream_slices(sync_mode=SyncMode.full_refresh, stream_state=stream_state)) == expected_slices @pytest.mark.parametrize( - "stream_state, response, expected_records", + "stream_slice, response, expected_records", [ - ({}, {"updated_at": "2024-04-17T19:34:06Z", "id": "test id"}, [{"id": "test id", "updated_at": "2024-04-17T19:34:06Z"}]), - ({}, {"updated_at": "1979-04-17T19:34:06Z", "id": "test id"}, []), + ({"updated_at": "2024-05-17T19:34:06Z"}, {"updated_at": "2024-04-17T19:34:06Z", "id": "test id"}, []), ( - {"updated_at": "2024-04-17T19:34:06Z"}, - {"updated_at": "2024-04-18T19:34:06Z", "id": "test id"}, - [{"updated_at": "2024-04-18T19:34:06Z", "id": "test id"}], + {"updated_at": "2024-03-17T19:34:06Z"}, + {"updated_at": "2024-04-17T19:34:06Z", "id": "test id"}, + [{"updated_at": "2024-04-17T19:34:06Z", "id": "test id"}], + ), + ( + {}, + {"updated_at": "1979-04-17T19:34:06Z", "id": "test id"}, + [{"updated_at": "1979-04-17T19:34:06Z", "id": "test id"}], ), - ({"updated_at": "2024-04-17T19:34:06Z"}, {"updated_at": "1979-04-18T19:34:06Z", "id": "test id"}, []), ], ids=[ - "read_without_state", - "read_without_state_cursor_older_then_start_date", - "read_with_state", - "read_with_state_cursor_older_then_state_value", + "read_with_slice_cursor_greater_than_record_cursor", + "read_with_slice_cursor_less_than_record_cursor", + "read_without_slice_cursor", ], ) - def test_ticket_metrics_parse_response(self, stream_state, response, expected_records): + def test_ticket_metrics_parse_response(self, stream_slice, response, expected_records): stream = get_stream_instance(TicketMetrics, STREAM_ARGS) mocked_response = Mock() - mocked_response.json.return_value = {"ticket_metric": {"updated_at": "2024-04-17T19:34:06Z", "id": "test id"}} - records = list(stream.parse_response(mocked_response, stream_state=stream_state)) - assert records == [{"id": "test id", "updated_at": "2024-04-17T19:34:06Z"}] + mocked_response.json.return_value = {"ticket_metric": response} + records = list(stream.parse_response(mocked_response, stream_state={}, stream_slice=stream_slice)) + assert records == expected_records def test_read_ticket_metrics_with_error(self, requests_mock): stream = get_stream_instance(TicketMetrics, STREAM_ARGS) diff --git a/docs/integrations/sources/zendesk-support.md b/docs/integrations/sources/zendesk-support.md index 4c4d7598f91d..abb121d10616 100644 --- a/docs/integrations/sources/zendesk-support.md +++ b/docs/integrations/sources/zendesk-support.md @@ -163,7 +163,8 @@ The Zendesk connector ideally should not run into Zendesk API limitations under ## Changelog | Version | Date | Pull Request | Subject | -| :------ | :--------- | :------------------------------------------------------- | :--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +|:--------|:-----------|:---------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| 2.6.4 | 2024-05-20 | [38310](https://github.com/airbytehq/airbyte/pull/38310) | Fix record filter for `Ticket Metrics` stream | | 2.6.3 | 2024-05-02 | [36669](https://github.com/airbytehq/airbyte/pull/36669) | Schema descriptions | | 2.6.2 | 2024-02-05 | [37761](https://github.com/airbytehq/airbyte/pull/37761) | Add stop condition for `Ticket Audits` when recieved old records; Ignore 403 and 404 status codes. | | 2.6.1 | 2024-04-30 | [37723](https://github.com/airbytehq/airbyte/pull/37723) | Add %Y-%m-%dT%H:%M:%S%z to cursor_datetime_formats |