diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index e8be14c66e137..cbb2a1ee0f04c 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -94,12 +94,14 @@ airflow-core/src/airflow/ui/public/i18n/locales/zh-TW/ @Lee-W @jason810496 @guan
/providers/fab/ @vincbeck
/providers/google/ @shahar1
/providers/hashicorp/ @hussein-awala
+/providers/informatica/ @RNHTTR # + @cetingokhan @sertaykabuk @umutozel
/providers/keycloak/ @vincbeck @bugraoz93
/providers/microsoft/azure/ @dabla
/providers/openlineage/ @mobuchowski
/providers/smtp/ @hussein-awala
/providers/snowflake/ @potiuk
+
# Dev tools
/.github/workflows/ @potiuk @ashb @gopidesupavan @amoghrajesh @jscheffl @bugraoz93 @kaxil @jason810496
/dev/ @potiuk @ashb @gopidesupavan @amoghrajesh @jscheffl @bugraoz93 @kaxil @jason810496 @jedcunningham @ephraimbuddy
diff --git a/.github/ISSUE_TEMPLATE/3-airflow_providers_bug_report.yml b/.github/ISSUE_TEMPLATE/3-airflow_providers_bug_report.yml
index 1e4e1d4c31b76..f7c6642519d3a 100644
--- a/.github/ISSUE_TEMPLATE/3-airflow_providers_bug_report.yml
+++ b/.github/ISSUE_TEMPLATE/3-airflow_providers_bug_report.yml
@@ -79,6 +79,7 @@ body:
- http
- imap
- influxdb
+ - informatica
- jdbc
- jenkins
- keycloak
diff --git a/.github/boring-cyborg.yml b/.github/boring-cyborg.yml
index e668a401d480d..f8f6123ff3011 100644
--- a/.github/boring-cyborg.yml
+++ b/.github/boring-cyborg.yml
@@ -174,6 +174,9 @@ labelPRBasedOnFilePath:
provider:influxdb:
- providers/influxdb/**
+ provider:informatica:
+ - providers/informatica/**
+
provider:jdbc:
- providers/jdbc/**
diff --git a/airflow-core/docs/extra-packages-ref.rst b/airflow-core/docs/extra-packages-ref.rst
index 6b059c0ca27de..89424bf8aa2c6 100644
--- a/airflow-core/docs/extra-packages-ref.rst
+++ b/airflow-core/docs/extra-packages-ref.rst
@@ -423,6 +423,8 @@ pre-installed when Airflow is installed.
+---------------------+-----------------------------------------------------+--------------------------------------+--------------+
| ssh | ``pip install 'apache-airflow[ssh]'`` | SSH hooks and operators | |
+---------------------+-----------------------------------------------------+--------------------------------------+--------------+
+| informatica | ``pip install 'apache-airflow[informatica]'`` | Informatica hooks and operators | |
++---------------------+-----------------------------------------------------+--------------------------------------+--------------+
Group extras
------------
diff --git a/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_plugins.py b/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_plugins.py
index 4220837ebb1ba..d3d2388ce5328 100644
--- a/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_plugins.py
+++ b/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_plugins.py
@@ -34,8 +34,9 @@ class TestGetPlugins:
# Filters
(
{},
- 13,
+ 14,
[
+ "InformaticaProviderPlugin",
"MetadataCollectionPlugin",
"OpenLineageProviderPlugin",
"databricks_workflow",
@@ -52,11 +53,11 @@ class TestGetPlugins:
],
),
(
- {"limit": 3, "offset": 2},
- 13,
+ {"limit": 3, "offset": 3},
+ 14,
["databricks_workflow", "decreasing_priority_weight_strategy_plugin", "edge_executor"],
),
- ({"limit": 1}, 13, ["MetadataCollectionPlugin"]),
+ ({"limit": 1}, 14, ["InformaticaProviderPlugin"]),
],
)
def test_should_respond_200(
@@ -146,17 +147,17 @@ def test_invalid_external_view_destination_should_log_warning_and_continue(self,
# Verify warning was logged
assert any("Skipping invalid plugin due to error" in rec.message for rec in caplog.records)
- response = test_client.get("/plugins", params={"limit": 5, "offset": 9})
+ response = test_client.get("/plugins", params={"limit": 6, "offset": 9})
assert response.status_code == 200
body = response.json()
plugins_page = body["plugins"]
- # Even though limit=5, only 4 valid plugins should come back
- assert len(plugins_page) == 4
+ # Even though limit=6, only 5 valid plugins should come back
+ assert len(plugins_page) == 5
assert "test_plugin_invalid" not in [p["name"] for p in plugins_page]
- assert body["total_entries"] == 13
+ assert body["total_entries"] == 14
@skip_if_force_lowest_dependencies_marker
diff --git a/airflow-core/tests/unit/plugins/test_plugins_manager.py b/airflow-core/tests/unit/plugins/test_plugins_manager.py
index 78d4f4f671cf8..10377fbb4ed18 100644
--- a/airflow-core/tests/unit/plugins/test_plugins_manager.py
+++ b/airflow-core/tests/unit/plugins/test_plugins_manager.py
@@ -396,4 +396,4 @@ def test_does_not_double_import_entrypoint_provider_plugins(self):
# Mock/skip loading from plugin dir
with mock.patch("airflow.plugins_manager._load_plugins_from_plugin_directory", return_value=([], [])):
plugins = plugins_manager._get_plugins()[0]
- assert len(plugins) == 4
+ assert len(plugins) == 5
diff --git a/dev/breeze/doc/images/output_build-docs.svg b/dev/breeze/doc/images/output_build-docs.svg
index 0dc1c1f365ace..7b11b9b353964 100644
--- a/dev/breeze/doc/images/output_build-docs.svg
+++ b/dev/breeze/doc/images/output_build-docs.svg
@@ -222,11 +222,11 @@
| apache.tinkerpop | apprise | arangodb | asana | atlassian.jira | celery | cloudant | cncf.kubernetes | cohere |
common.ai | common.compat | common.io | common.messaging | common.sql | databricks | datadog | dbt.cloud | dingding |
discord | docker | docker-stack | edge3 | elasticsearch | exasol | fab | facebook | ftp | git | github | google | grpc
-| hashicorp | helm-chart | http | imap | influxdb | jdbc | jenkins | keycloak | microsoft.azure | microsoft.mssql |
-microsoft.psrp | microsoft.winrm | mongo | mysql | neo4j | odbc | openai | openfaas | openlineage | opensearch |
-opsgenie | oracle | pagerduty | papermill | pgvector | pinecone | postgres | presto | qdrant | redis | salesforce |
-samba | segment | sendgrid | sftp | singularity | slack | smtp | snowflake | sqlite | ssh | standard | tableau |
-task-sdk | telegram | teradata | trino | vertica | weaviate | yandex | ydb | zendesk]...
+| hashicorp | helm-chart | http | imap | influxdb | informatica | jdbc | jenkins | keycloak | microsoft.azure |
+microsoft.mssql | microsoft.psrp | microsoft.winrm | mongo | mysql | neo4j | odbc | openai | openfaas | openlineage |
+opensearch | opsgenie | oracle | pagerduty | papermill | pgvector | pinecone | postgres | presto | qdrant | redis |
+salesforce | samba | segment | sendgrid | sftp | singularity | slack | smtp | snowflake | sqlite | ssh | standard |
+tableau | task-sdk | telegram | teradata | trino | vertica | weaviate | yandex | ydb | zendesk]...
Build documents.
diff --git a/dev/breeze/doc/images/output_build-docs.txt b/dev/breeze/doc/images/output_build-docs.txt
index 2e99629ebc6bb..68aba2e8b6023 100644
--- a/dev/breeze/doc/images/output_build-docs.txt
+++ b/dev/breeze/doc/images/output_build-docs.txt
@@ -1 +1 @@
-dc1dd631bb1cb07a6209f54e59d99c1a
+9cab0b55ae6a31b4cd43f8edb5d69521
diff --git a/dev/breeze/doc/images/output_release-management_add-back-references.svg b/dev/breeze/doc/images/output_release-management_add-back-references.svg
index 53a8e35319422..c5296b761dc9e 100644
--- a/dev/breeze/doc/images/output_release-management_add-back-references.svg
+++ b/dev/breeze/doc/images/output_release-management_add-back-references.svg
@@ -152,11 +152,11 @@
| apache.tinkerpop | apprise | arangodb | asana | atlassian.jira | celery | cloudant | cncf.kubernetes | cohere |
common.ai | common.compat | common.io | common.messaging | common.sql | databricks | datadog | dbt.cloud | dingding |
discord | docker | docker-stack | edge3 | elasticsearch | exasol | fab | facebook | ftp | git | github | google | grpc
-| hashicorp | helm-chart | http | imap | influxdb | jdbc | jenkins | keycloak | microsoft.azure | microsoft.mssql |
-microsoft.psrp | microsoft.winrm | mongo | mysql | neo4j | odbc | openai | openfaas | openlineage | opensearch |
-opsgenie | oracle | pagerduty | papermill | pgvector | pinecone | postgres | presto | qdrant | redis | salesforce |
-samba | segment | sendgrid | sftp | singularity | slack | smtp | snowflake | sqlite | ssh | standard | tableau |
-task-sdk | telegram | teradata | trino | vertica | weaviate | yandex | ydb | zendesk]...
+| hashicorp | helm-chart | http | imap | influxdb | informatica | jdbc | jenkins | keycloak | microsoft.azure |
+microsoft.mssql | microsoft.psrp | microsoft.winrm | mongo | mysql | neo4j | odbc | openai | openfaas | openlineage |
+opensearch | opsgenie | oracle | pagerduty | papermill | pgvector | pinecone | postgres | presto | qdrant | redis |
+salesforce | samba | segment | sendgrid | sftp | singularity | slack | smtp | snowflake | sqlite | ssh | standard |
+tableau | task-sdk | telegram | teradata | trino | vertica | weaviate | yandex | ydb | zendesk]...
Command to add back references for documentation to make it backward compatible.
diff --git a/dev/breeze/doc/images/output_release-management_add-back-references.txt b/dev/breeze/doc/images/output_release-management_add-back-references.txt
index b3f758e4167c5..389b8c232211a 100644
--- a/dev/breeze/doc/images/output_release-management_add-back-references.txt
+++ b/dev/breeze/doc/images/output_release-management_add-back-references.txt
@@ -1 +1 @@
-6270bec472e9c8bf7986b64ca4580a8e
+244b18c58ed980a094c555a81698da9b
diff --git a/dev/breeze/doc/images/output_release-management_generate-issue-content-providers.svg b/dev/breeze/doc/images/output_release-management_generate-issue-content-providers.svg
index 3196ac0a8b949..2fb4ef4b6ff06 100644
--- a/dev/breeze/doc/images/output_release-management_generate-issue-content-providers.svg
+++ b/dev/breeze/doc/images/output_release-management_generate-issue-content-providers.svg
@@ -143,11 +143,11 @@
apache.pinot | apache.spark | apache.tinkerpop | apprise | arangodb | asana | atlassian.jira | celery | cloudant |
cncf.kubernetes | cohere | common.compat | common.io | common.messaging | common.sql | databricks | datadog |
dbt.cloud | dingding | discord | docker | edge3 | elasticsearch | exasol | fab | facebook | ftp | git | github |
-google | grpc | hashicorp | http | imap | influxdb | jdbc | jenkins | keycloak | microsoft.azure | microsoft.mssql |
-microsoft.psrp | microsoft.winrm | mongo | mysql | neo4j | odbc | openai | openfaas | openlineage | opensearch |
-opsgenie | oracle | pagerduty | papermill | pgvector | pinecone | postgres | presto | qdrant | redis | salesforce |
-samba | segment | sendgrid | sftp | singularity | slack | smtp | snowflake | sqlite | ssh | standard | tableau |
-telegram | teradata | trino | vertica | weaviate | yandex | ydb | zendesk]...
+google | grpc | hashicorp | http | imap | influxdb | informatica | jdbc | jenkins | keycloak | microsoft.azure |
+microsoft.mssql | microsoft.psrp | microsoft.winrm | mongo | mysql | neo4j | odbc | openai | openfaas | openlineage |
+opensearch | opsgenie | oracle | pagerduty | papermill | pgvector | pinecone | postgres | presto | qdrant | redis |
+salesforce | samba | segment | sendgrid | sftp | singularity | slack | smtp | snowflake | sqlite | ssh | standard |
+tableau | telegram | teradata | trino | vertica | weaviate | yandex | ydb | zendesk]...
Generates content for issue to test the release.
diff --git a/dev/breeze/doc/images/output_release-management_generate-issue-content-providers.txt b/dev/breeze/doc/images/output_release-management_generate-issue-content-providers.txt
index 942c79cf35e31..18de3e60197dd 100644
--- a/dev/breeze/doc/images/output_release-management_generate-issue-content-providers.txt
+++ b/dev/breeze/doc/images/output_release-management_generate-issue-content-providers.txt
@@ -1 +1 @@
-ae6d32aa6ff3adc2ad6192b55e9bf67a
+1cae29e202f794ccc8c3b04c2000e18d
diff --git a/dev/breeze/doc/images/output_release-management_generate-providers-metadata.svg b/dev/breeze/doc/images/output_release-management_generate-providers-metadata.svg
index 5f32fbb0216e0..6e1f93fb8dc8a 100644
--- a/dev/breeze/doc/images/output_release-management_generate-providers-metadata.svg
+++ b/dev/breeze/doc/images/output_release-management_generate-providers-metadata.svg
@@ -172,12 +172,12 @@
│asana | atlassian.jira | celery | cloudant | cncf.kubernetes | cohere | common.compat | │
│common.io | common.messaging | common.sql | databricks | datadog | dbt.cloud | dingding | │
│discord | docker | edge3 | elasticsearch | exasol | fab | facebook | ftp | git | github | google│
-│| grpc | hashicorp | http | imap | influxdb | jdbc | jenkins | keycloak | microsoft.azure | │
-│microsoft.mssql | microsoft.psrp | microsoft.winrm | mongo | mysql | neo4j | odbc | openai | │
-│openfaas | openlineage | opensearch | opsgenie | oracle | pagerduty | papermill | pgvector | │
-│pinecone | postgres | presto | qdrant | redis | salesforce | samba | segment | sendgrid | sftp |│
-│singularity | slack | smtp | snowflake | sqlite | ssh | standard | tableau | telegram | teradata│
-│| trino | vertica | weaviate | yandex | ydb | zendesk)│
+│| grpc | hashicorp | http | imap | influxdb | informatica | jdbc | jenkins | keycloak | │
+│microsoft.azure | microsoft.mssql | microsoft.psrp | microsoft.winrm | mongo | mysql | neo4j | │
+│odbc | openai | openfaas | openlineage | opensearch | opsgenie | oracle | pagerduty | papermill │
+│| pgvector | pinecone | postgres | presto | qdrant | redis | salesforce | samba | segment | │
+│sendgrid | sftp | singularity | slack | smtp | snowflake | sqlite | ssh | standard | tableau | │
+│telegram | teradata | trino | vertica | weaviate | yandex | ydb | zendesk)│
│--provider-versionProvider version to generate metadata for. Only used when --provider-id is specified. Limits │
│running metadata generation to only this version of the provider. (TEXT)│
╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯
diff --git a/dev/breeze/doc/images/output_release-management_generate-providers-metadata.txt b/dev/breeze/doc/images/output_release-management_generate-providers-metadata.txt
index 73f33d11e6d2a..8424582464120 100644
--- a/dev/breeze/doc/images/output_release-management_generate-providers-metadata.txt
+++ b/dev/breeze/doc/images/output_release-management_generate-providers-metadata.txt
@@ -1 +1 @@
-b633b7092a6c61b4cb33c76cd3e10a40
+a9a87df8382a9faa57e6b79356f4295f
diff --git a/dev/breeze/doc/images/output_release-management_prepare-provider-distributions.svg b/dev/breeze/doc/images/output_release-management_prepare-provider-distributions.svg
index f409b74df1ef1..cdc37e129f8fc 100644
--- a/dev/breeze/doc/images/output_release-management_prepare-provider-distributions.svg
+++ b/dev/breeze/doc/images/output_release-management_prepare-provider-distributions.svg
@@ -185,11 +185,11 @@
apache.pinot | apache.spark | apache.tinkerpop | apprise | arangodb | asana | atlassian.jira | celery | cloudant |
cncf.kubernetes | cohere | common.compat | common.io | common.messaging | common.sql | databricks | datadog |
dbt.cloud | dingding | discord | docker | edge3 | elasticsearch | exasol | fab | facebook | ftp | git | github |
-google | grpc | hashicorp | http | imap | influxdb | jdbc | jenkins | keycloak | microsoft.azure | microsoft.mssql |
-microsoft.psrp | microsoft.winrm | mongo | mysql | neo4j | odbc | openai | openfaas | openlineage | opensearch |
-opsgenie | oracle | pagerduty | papermill | pgvector | pinecone | postgres | presto | qdrant | redis | salesforce |
-samba | segment | sendgrid | sftp | singularity | slack | smtp | snowflake | sqlite | ssh | standard | tableau |
-telegram | teradata | trino | vertica | weaviate | yandex | ydb | zendesk]...
+google | grpc | hashicorp | http | imap | influxdb | informatica | jdbc | jenkins | keycloak | microsoft.azure |
+microsoft.mssql | microsoft.psrp | microsoft.winrm | mongo | mysql | neo4j | odbc | openai | openfaas | openlineage |
+opensearch | opsgenie | oracle | pagerduty | papermill | pgvector | pinecone | postgres | presto | qdrant | redis |
+salesforce | samba | segment | sendgrid | sftp | singularity | slack | smtp | snowflake | sqlite | ssh | standard |
+tableau | telegram | teradata | trino | vertica | weaviate | yandex | ydb | zendesk]...
Prepare sdist/whl distributions of Airflow Providers.
diff --git a/dev/breeze/doc/images/output_release-management_prepare-provider-distributions.txt b/dev/breeze/doc/images/output_release-management_prepare-provider-distributions.txt
index 25fda1301e47a..589321a0b3015 100644
--- a/dev/breeze/doc/images/output_release-management_prepare-provider-distributions.txt
+++ b/dev/breeze/doc/images/output_release-management_prepare-provider-distributions.txt
@@ -1 +1 @@
-cbd2fc91d30c113d0dd513d81fef9385
+f3fe0a23a40c7d160a85ffdce6d10ad5
diff --git a/dev/breeze/doc/images/output_release-management_prepare-provider-documentation.svg b/dev/breeze/doc/images/output_release-management_prepare-provider-documentation.svg
index 5026752dd60b7..61390a14985cf 100644
--- a/dev/breeze/doc/images/output_release-management_prepare-provider-documentation.svg
+++ b/dev/breeze/doc/images/output_release-management_prepare-provider-documentation.svg
@@ -212,11 +212,11 @@
apache.pinot | apache.spark | apache.tinkerpop | apprise | arangodb | asana | atlassian.jira | celery | cloudant |
cncf.kubernetes | cohere | common.compat | common.io | common.messaging | common.sql | databricks | datadog |
dbt.cloud | dingding | discord | docker | edge3 | elasticsearch | exasol | fab | facebook | ftp | git | github |
-google | grpc | hashicorp | http | imap | influxdb | jdbc | jenkins | keycloak | microsoft.azure | microsoft.mssql |
-microsoft.psrp | microsoft.winrm | mongo | mysql | neo4j | odbc | openai | openfaas | openlineage | opensearch |
-opsgenie | oracle | pagerduty | papermill | pgvector | pinecone | postgres | presto | qdrant | redis | salesforce |
-samba | segment | sendgrid | sftp | singularity | slack | smtp | snowflake | sqlite | ssh | standard | tableau |
-telegram | teradata | trino | vertica | weaviate | yandex | ydb | zendesk]...
+google | grpc | hashicorp | http | imap | influxdb | informatica | jdbc | jenkins | keycloak | microsoft.azure |
+microsoft.mssql | microsoft.psrp | microsoft.winrm | mongo | mysql | neo4j | odbc | openai | openfaas | openlineage |
+opensearch | opsgenie | oracle | pagerduty | papermill | pgvector | pinecone | postgres | presto | qdrant | redis |
+salesforce | samba | segment | sendgrid | sftp | singularity | slack | smtp | snowflake | sqlite | ssh | standard |
+tableau | telegram | teradata | trino | vertica | weaviate | yandex | ydb | zendesk]...
Prepare CHANGELOG, README and COMMITS information for providers.
diff --git a/dev/breeze/doc/images/output_release-management_prepare-provider-documentation.txt b/dev/breeze/doc/images/output_release-management_prepare-provider-documentation.txt
index 6f18da81adec2..00e9f8cbd1cf6 100644
--- a/dev/breeze/doc/images/output_release-management_prepare-provider-documentation.txt
+++ b/dev/breeze/doc/images/output_release-management_prepare-provider-documentation.txt
@@ -1 +1 @@
-152c1d0a0af752a66ae663c0711d81b1
+85faf6f44c9358c04ed1bf5ca2ab9e23
diff --git a/dev/breeze/doc/images/output_release-management_publish-docs.svg b/dev/breeze/doc/images/output_release-management_publish-docs.svg
index 2d80f8d3973b9..a9855fa19899f 100644
--- a/dev/breeze/doc/images/output_release-management_publish-docs.svg
+++ b/dev/breeze/doc/images/output_release-management_publish-docs.svg
@@ -191,11 +191,11 @@
| apache.tinkerpop | apprise | arangodb | asana | atlassian.jira | celery | cloudant | cncf.kubernetes | cohere |
common.ai | common.compat | common.io | common.messaging | common.sql | databricks | datadog | dbt.cloud | dingding |
discord | docker | docker-stack | edge3 | elasticsearch | exasol | fab | facebook | ftp | git | github | google | grpc
-| hashicorp | helm-chart | http | imap | influxdb | jdbc | jenkins | keycloak | microsoft.azure | microsoft.mssql |
-microsoft.psrp | microsoft.winrm | mongo | mysql | neo4j | odbc | openai | openfaas | openlineage | opensearch |
-opsgenie | oracle | pagerduty | papermill | pgvector | pinecone | postgres | presto | qdrant | redis | salesforce |
-samba | segment | sendgrid | sftp | singularity | slack | smtp | snowflake | sqlite | ssh | standard | tableau |
-task-sdk | telegram | teradata | trino | vertica | weaviate | yandex | ydb | zendesk]...
+| hashicorp | helm-chart | http | imap | influxdb | informatica | jdbc | jenkins | keycloak | microsoft.azure |
+microsoft.mssql | microsoft.psrp | microsoft.winrm | mongo | mysql | neo4j | odbc | openai | openfaas | openlineage |
+opensearch | opsgenie | oracle | pagerduty | papermill | pgvector | pinecone | postgres | presto | qdrant | redis |
+salesforce | samba | segment | sendgrid | sftp | singularity | slack | smtp | snowflake | sqlite | ssh | standard |
+tableau | task-sdk | telegram | teradata | trino | vertica | weaviate | yandex | ydb | zendesk]...
Command to publish generated documentation to airflow-site
diff --git a/dev/breeze/doc/images/output_release-management_publish-docs.txt b/dev/breeze/doc/images/output_release-management_publish-docs.txt
index 5082b376775ce..6f1d035aa6940 100644
--- a/dev/breeze/doc/images/output_release-management_publish-docs.txt
+++ b/dev/breeze/doc/images/output_release-management_publish-docs.txt
@@ -1 +1 @@
-33f8461fa4b8ad6e8ba667825855ffae
+5a1222737e7be788adc08aaab7e6221a
diff --git a/dev/breeze/doc/images/output_sbom_generate-providers-requirements.svg b/dev/breeze/doc/images/output_sbom_generate-providers-requirements.svg
index c053575e00de8..b14347578e122 100644
--- a/dev/breeze/doc/images/output_sbom_generate-providers-requirements.svg
+++ b/dev/breeze/doc/images/output_sbom_generate-providers-requirements.svg
@@ -186,10 +186,10 @@
│celery | cloudant | cncf.kubernetes | cohere | common.ai | common.compat | common.io | │
│common.messaging | common.sql | databricks | datadog | dbt.cloud | dingding | discord | docker |│
│edge3 | elasticsearch | exasol | fab | facebook | ftp | git | github | google | grpc | hashicorp│
-│| http | imap | influxdb | jdbc | jenkins | keycloak | microsoft.azure | microsoft.mssql | │
-│microsoft.psrp | microsoft.winrm | mongo | mysql | neo4j | odbc | openai | openfaas | │
-│openlineage | opensearch | opsgenie | oracle | pagerduty | papermill | pgvector | pinecone | │
-│postgres | presto | qdrant | redis | salesforce | samba | segment | sendgrid | sftp | │
+│| http | imap | influxdb | informatica | jdbc | jenkins | keycloak | microsoft.azure | │
+│microsoft.mssql | microsoft.psrp | microsoft.winrm | mongo | mysql | neo4j | odbc | openai | │
+│openfaas | openlineage | opensearch | opsgenie | oracle | pagerduty | papermill | pgvector | │
+│pinecone | postgres | presto | qdrant | redis | salesforce | samba | segment | sendgrid | sftp |│
│singularity | slack | smtp | snowflake | sqlite | ssh | standard | tableau | telegram | teradata│
│| trino | vertica | weaviate | yandex | ydb | zendesk)│
│--provider-versionProvider version to generate the requirements for i.e `2.1.0`. `latest` is also a supported │
diff --git a/dev/breeze/doc/images/output_sbom_generate-providers-requirements.txt b/dev/breeze/doc/images/output_sbom_generate-providers-requirements.txt
index 18c2ab38d1086..90c625821d070 100644
--- a/dev/breeze/doc/images/output_sbom_generate-providers-requirements.txt
+++ b/dev/breeze/doc/images/output_sbom_generate-providers-requirements.txt
@@ -1 +1 @@
-c76b342d83640a68f62252dedaac5ac1
+04bceba91651d8dc4f2281f4f168d86c
diff --git a/dev/breeze/doc/images/output_workflow-run_publish-docs.svg b/dev/breeze/doc/images/output_workflow-run_publish-docs.svg
index d76128c49b9ea..c6550a857b0d6 100644
--- a/dev/breeze/doc/images/output_workflow-run_publish-docs.svg
+++ b/dev/breeze/doc/images/output_workflow-run_publish-docs.svg
@@ -188,11 +188,11 @@
| apache.tinkerpop | apprise | arangodb | asana | atlassian.jira | celery | cloudant | cncf.kubernetes | cohere |
common.ai | common.compat | common.io | common.messaging | common.sql | databricks | datadog | dbt.cloud | dingding |
discord | docker | docker-stack | edge3 | elasticsearch | exasol | fab | facebook | ftp | git | github | google | grpc
-| hashicorp | helm-chart | http | imap | influxdb | jdbc | jenkins | keycloak | microsoft.azure | microsoft.mssql |
-microsoft.psrp | microsoft.winrm | mongo | mysql | neo4j | odbc | openai | openfaas | openlineage | opensearch |
-opsgenie | oracle | pagerduty | papermill | pgvector | pinecone | postgres | presto | qdrant | redis | salesforce |
-samba | segment | sendgrid | sftp | singularity | slack | smtp | snowflake | sqlite | ssh | standard | tableau |
-task-sdk | telegram | teradata | trino | vertica | weaviate | yandex | ydb | zendesk]...
+| hashicorp | helm-chart | http | imap | influxdb | informatica | jdbc | jenkins | keycloak | microsoft.azure |
+microsoft.mssql | microsoft.psrp | microsoft.winrm | mongo | mysql | neo4j | odbc | openai | openfaas | openlineage |
+opensearch | opsgenie | oracle | pagerduty | papermill | pgvector | pinecone | postgres | presto | qdrant | redis |
+salesforce | samba | segment | sendgrid | sftp | singularity | slack | smtp | snowflake | sqlite | ssh | standard |
+tableau | task-sdk | telegram | teradata | trino | vertica | weaviate | yandex | ydb | zendesk]...
Trigger publish docs to S3 workflow
diff --git a/dev/breeze/doc/images/output_workflow-run_publish-docs.txt b/dev/breeze/doc/images/output_workflow-run_publish-docs.txt
index 2fb09c95943aa..5bebcab9ed42b 100644
--- a/dev/breeze/doc/images/output_workflow-run_publish-docs.txt
+++ b/dev/breeze/doc/images/output_workflow-run_publish-docs.txt
@@ -1 +1 @@
-dbdb5067b109467509cd622639ba40aa
+1a5d3d1a76f284aae5195840496a5b2b
diff --git a/dev/breeze/src/airflow_breeze/global_constants.py b/dev/breeze/src/airflow_breeze/global_constants.py
index ca1433c878b10..d7c49348e5c76 100644
--- a/dev/breeze/src/airflow_breeze/global_constants.py
+++ b/dev/breeze/src/airflow_breeze/global_constants.py
@@ -743,7 +743,7 @@ def get_airflow_extras():
{
"python-version": "3.10",
"airflow-version": "2.11.0",
- "remove-providers": "common.messaging edge3 fab git keycloak common.ai",
+ "remove-providers": "common.messaging edge3 fab git keycloak informatica common.ai",
"run-unit-tests": "true",
},
{
diff --git a/dev/breeze/tests/test_selective_checks.py b/dev/breeze/tests/test_selective_checks.py
index dd3a831aba28b..fa75cd43937cb 100644
--- a/dev/breeze/tests/test_selective_checks.py
+++ b/dev/breeze/tests/test_selective_checks.py
@@ -754,7 +754,7 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str):
"providers/http/tests/file.py",
),
{
- "selected-providers-list-as-string": "amazon apache.livy atlassian.jira common.compat dbt.cloud dingding discord google http pagerduty",
+ "selected-providers-list-as-string": "amazon apache.livy atlassian.jira common.compat dbt.cloud dingding discord google http informatica pagerduty",
"all-python-versions": f"['{DEFAULT_PYTHON_MAJOR_MINOR_VERSION}']",
"all-python-versions-list-as-string": DEFAULT_PYTHON_MAJOR_MINOR_VERSION,
"python-versions": f"['{DEFAULT_PYTHON_MAJOR_MINOR_VERSION}']",
@@ -775,31 +775,31 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str):
[
{
"description": "amazon...google",
- "test_types": "Providers[amazon] Providers[apache.livy,atlassian.jira,common.compat,dbt.cloud,dingding,discord,http,pagerduty] Providers[google]",
+ "test_types": "Providers[amazon] Providers[apache.livy,atlassian.jira,common.compat,dbt.cloud,dingding,discord,http,informatica,pagerduty] Providers[google]",
}
]
),
"individual-providers-test-types-list-as-strings-in-json": json.dumps(
[
{
- "description": "amazon...apache.livy",
- "test_types": "Providers[amazon] Providers[apache.livy]",
+ "description": "amazon...atlassian.jir",
+ "test_types": "Providers[amazon] Providers[apache.livy] Providers[atlassian.jira]",
},
{
- "description": "atlassian.jir...common.compat",
- "test_types": "Providers[atlassian.jira] Providers[common.compat]",
+ "description": "common.compat...dbt.cloud",
+ "test_types": "Providers[common.compat] Providers[dbt.cloud]",
},
{
- "description": "dbt.cloud...dingding",
- "test_types": "Providers[dbt.cloud] Providers[dingding]",
+ "description": "dingding...discord",
+ "test_types": "Providers[dingding] Providers[discord]",
},
{
- "description": "discord...google",
- "test_types": "Providers[discord] Providers[google]",
+ "description": "google...http",
+ "test_types": "Providers[google] Providers[http]",
},
{
- "description": "http...pagerduty",
- "test_types": "Providers[http] Providers[pagerduty]",
+ "description": "informatica...pagerduty",
+ "test_types": "Providers[informatica] Providers[pagerduty]",
},
]
),
diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt
index cc9767243a79a..c749e02473c42 100644
--- a/docs/spelling_wordlist.txt
+++ b/docs/spelling_wordlist.txt
@@ -599,6 +599,7 @@ ecb
ECR
ecr
ecs
+EDC
EdDSA
edgeexecutor
EdgeModifier
@@ -909,6 +910,8 @@ inferral
infile
Influxdb
influxdb
+Informatica
+informatica
infoType
infoTypes
ing
@@ -1683,6 +1686,8 @@ sns
somecollection
somedatabase
sortable
+Source
+source
sourceArchiveUrl
sourceRepository
sourceUploadUrl
diff --git a/providers/informatica/LICENSE b/providers/informatica/LICENSE
new file mode 100644
index 0000000000000..11069edd79019
--- /dev/null
+++ b/providers/informatica/LICENSE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
+
+APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+Copyright [yyyy] [name of copyright owner]
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
diff --git a/providers/informatica/NOTICE b/providers/informatica/NOTICE
new file mode 100644
index 0000000000000..a51bd9390d030
--- /dev/null
+++ b/providers/informatica/NOTICE
@@ -0,0 +1,5 @@
+Apache Airflow
+Copyright 2016-2026 The Apache Software Foundation
+
+This product includes software developed at
+The Apache Software Foundation (http://www.apache.org/).
diff --git a/providers/informatica/README.rst b/providers/informatica/README.rst
new file mode 100644
index 0000000000000..be8bba47eeadb
--- /dev/null
+++ b/providers/informatica/README.rst
@@ -0,0 +1,183 @@
+
+ .. Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership. The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License. You may obtain a copy of the License at
+
+ .. http://www.apache.org/licenses/LICENSE-2.0
+
+ .. Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied. See the License for the
+ specific language governing permissions and limitations
+ under the License.
+
+Apache Airflow Informatica Provider
+===================================
+
+This provider package contains integrations for `Informatica Enterprise Data Catalog (EDC) `_ to work with Apache Airflow.
+
+
+Features
+--------
+
+- **Airflow Integration**: Seamless integration with Airflow's lineage system using inlets and outlets.
+
+
+Installation
+------------
+
+.. code-block:: bash
+
+ pip install apache-airflow-providers-informatica
+
+
+
+Connection Setup
+~~~~~~~~~~~~~~~~
+
+Create an Informatica EDC connection in Airflow:
+
+ #. **Connection Type**: ``informatica_edc``
+ #. **Host**: Your EDC server hostname
+ #. **Port**: EDC server port (typically 9087)
+ #. **Schema**: ``https`` or ``http``
+ #. **Login**: EDC username
+ #. **Password**: EDC password
+ #. **Extras**: Add the following JSON:
+
+ .. code-block:: json
+
+ {"security_domain": "your_security_domain"}
+
+Configuration Options
+~~~~~~~~~~~~~~~~~~~~~
+
+Add to your ``airflow.cfg``:
+
+.. code-block:: ini
+
+ [informatica]
+ # Disable sending events without uninstalling the Informatica Provider
+ disabled = False
+ # The connection ID to use when no connection ID is provided
+ default_conn_id = informatica_edc_default
+
+
+
+Complete DAG Example
+~~~~~~~~~~~~~~~~~~~~
+
+.. code-block:: python
+
+ from airflow import DAG
+ from airflow.providers.standard.operators.python import PythonOperator
+ from datetime import datetime
+
+
+ def my_python_task(**kwargs):
+ print("Hello Informatica Lineage!")
+
+
+ with DAG(
+ dag_id="example_informatica_lineage_dag",
+ start_date=datetime(2024, 1, 1),
+ schedule=None,
+ catchup=False,
+ ) as dag:
+ python_task = PythonOperator(
+ task_id="my_python_task",
+ python_callable=my_python_task,
+ inlets=[{"dataset_uri": "edc://object/source_table_abc123"}],
+ outlets=[{"dataset_uri": "edc://object/target_table_xyz789"}],
+ )
+ python_task
+
+
+
+EDC API Endpoints Used
+~~~~~~~~~~~~~~~~~~~~~~
+
+
+- ``/access/2/catalog/data/objects/{object_id}?includeRefObjects={true|false}`` - Retrieve catalog object details
+- ``/access/1/catalog/data/objects`` (PATCH) - Create lineage relationship between objects
+
+
+Testing
+~~~~~~~
+
+.. code-block:: bash
+
+ # Run unit tests
+ python -m pytest providers/informatica/tests/
+
+ # Run specific test
+ python -m pytest providers/informatica/tests/hooks/test_edc.py
+
+ # Run with coverage
+ python -m pytest providers/informatica/tests/ --cov=airflow.providers.informatica
+
+Code Quality
+~~~~~~~~~~~~
+
+.. code-block:: bash
+
+ # Type checking
+ mypy providers/informatica/src/airflow/providers/informatica/
+
+ # Code formatting
+ black providers/informatica/src/airflow/providers/informatica/
+
+ # Linting
+ flake8 providers/informatica/src/airflow/providers/informatica/
+
+
+
+Common Issues
+~~~~~~~~~~~~~
+
+1. **Authentication Failures**
+ - Verify EDC credentials and server connectivity
+ - Check firewall and network access to EDC server
+ - Ensure EDC service is running and accessible
+
+2. **No Lineage Found**
+ - Verify table IDs exist in EDC catalog
+
+
+Logging
+~~~~~~~
+
+Enable debug logging to troubleshoot issues:
+
+.. code-block:: python
+
+ import logging
+
+ logging.getLogger("airflow.providers.informatica").setLevel(logging.DEBUG)
+
+
+Compatibility
+~~~~~~~~~~~~~
+
+- **Informatica EDC Version**: This provider is compatible with Informatica EDC version 10.5 and above.
+- **Airflow Compatibility**: This provider is compatible with Apache Airflow 3.0 and above.
+
+
+
+License
+-------
+
+Licensed under the Apache License, Version 2.0. See `LICENSE `_ for details.
+
+Support
+-------
+
+**Note:** This provider is not officially maintained or endorsed by Informatica. It is a community-developed integration for Apache Airflow.
+
+- `Apache Airflow Documentation `_
+- `GitHub Issues `_
diff --git a/providers/informatica/docs/changelog.rst b/providers/informatica/docs/changelog.rst
new file mode 100644
index 0000000000000..c3e86849dbc53
--- /dev/null
+++ b/providers/informatica/docs/changelog.rst
@@ -0,0 +1,34 @@
+
+ .. Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership. The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License. You may obtain a copy of the License at
+
+ .. http://www.apache.org/licenses/LICENSE-2.0
+
+ .. Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied. See the License for the
+ specific language governing permissions and limitations
+ under the License.
+
+.. NOTE TO CONTRIBUTORS:
+ Please, only add notes to the Changelog just below the "Changelog" header when there are some breaking changes
+ and you want to add an explanation to the users on how they are supposed to deal with them.
+ The changelog is updated and maintained semi-automatically by release manager.
+
+``apache-airflow-providers-informatica``
+
+
+
+Changelog
+=========
+
+0.1.0
+-----
+
+Initial version of the provider.
diff --git a/providers/informatica/docs/commits.rst b/providers/informatica/docs/commits.rst
new file mode 100644
index 0000000000000..8bf87cca6b1f2
--- /dev/null
+++ b/providers/informatica/docs/commits.rst
@@ -0,0 +1,24 @@
+
+ .. Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership. The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License. You may obtain a copy of the License at
+
+ .. http://www.apache.org/licenses/LICENSE-2.0
+
+ .. Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied. See the License for the
+ specific language governing permissions and limitations
+ under the License.
+
+.. THIS FILE IS UPDATED AUTOMATICALLY_AT_RELEASE_TIME
+
+Changelog
+=========
+
+This file contains the changelog for the Informatica provider.
diff --git a/providers/informatica/docs/conf.py b/providers/informatica/docs/conf.py
new file mode 100644
index 0000000000000..5b6cd3237317a
--- /dev/null
+++ b/providers/informatica/docs/conf.py
@@ -0,0 +1,27 @@
+# Disable Flake8 because of all the sphinx imports
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""Configuration of Providers docs building."""
+
+from __future__ import annotations
+
+import os
+
+os.environ["AIRFLOW_PACKAGE_NAME"] = "apache-airflow-providers-informatica"
+
+from docs.provider_conf import * # noqa: F403
diff --git a/providers/informatica/docs/configurations-ref.rst b/providers/informatica/docs/configurations-ref.rst
new file mode 100644
index 0000000000000..cdde0e1f2c1cd
--- /dev/null
+++ b/providers/informatica/docs/configurations-ref.rst
@@ -0,0 +1,22 @@
+ .. Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership. The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License. You may obtain a copy of the License at
+
+ .. http://www.apache.org/licenses/LICENSE-2.0
+
+ .. Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied. See the License for the
+ specific language governing permissions and limitations
+ under the License.
+
+
+.. _configuration:informatica:
+
+.. include:: /../../../devel-common/src/sphinx_exts/includes/providers-configurations-ref.rst
+.. include:: /../../../devel-common/src/sphinx_exts/includes/sections-and-options.rst
diff --git a/providers/informatica/docs/guides/api.rst b/providers/informatica/docs/guides/api.rst
new file mode 100644
index 0000000000000..917b7c5395783
--- /dev/null
+++ b/providers/informatica/docs/guides/api.rst
@@ -0,0 +1,135 @@
+
+ .. Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership. The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License. You may obtain a copy of the License at
+
+ .. http://www.apache.org/licenses/LICENSE-2.0
+
+ .. Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied. See the License for the
+ specific language governing permissions and limitations
+ under the License.
+
+
+Informatica Provider API Reference
+==================================
+
+This section describes the main public classes and methods provided by the Informatica provider for Apache Airflow.
+
+
+Hooks
+-----
+
+**InformaticaEDCHook**
+~~~~~~~~~~~~~~~~~~~~~~
+
+The ``InformaticaEDCHook`` provides low-level access to the Informatica Enterprise Data Catalog (EDC) REST API. It handles authentication, connection configuration, and common EDC operations such as retrieving catalog objects and creating lineage links.
+
+**Initialization Example:**
+
+.. code-block:: python
+
+ from airflow.providers.informatica.hooks.edc import InformaticaEDCHook
+
+ hook = InformaticaEDCHook(informatica_edc_conn_id="my_informatica_conn")
+
+**Key Methods:**
+
+- ``get_object(object_id: str, include_ref_objects: bool = False) -> dict``
+
+ Retrieves a catalog object by its identifier from EDC.
+
+ :param object_id: EDC object identifier (e.g., "``edc://object/table_123``")
+ :param include_ref_objects: Whether to include referenced objects (default: False)
+ :returns: Dictionary containing object data
+
+ .. code-block:: python
+
+ object_data = hook.get_object("edc://object/table_123")
+
+- ``create_lineage_link(source_object_id: str, target_object_id: str) -> dict``
+
+ Creates a lineage relationship between source and target objects in EDC.
+
+ :param source_object_id: Source object identifier
+ :param target_object_id: Target object identifier
+ :returns: Dictionary containing operation result
+
+ .. code-block:: python
+
+ result = hook.create_lineage_link("source_id", "target_id")
+
+
+Extractors
+----------
+
+**InformaticaLineageExtractor**
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The ``InformaticaLineageExtractor`` uses an ``InformaticaEDCHook`` to extract lineage information from Informatica EDC and convert it to Airflow-compatible asset definitions. It is typically used internally by the provider's plugin and listeners.
+
+**Initialization Example:**
+
+.. code-block:: python
+
+ from airflow.providers.informatica.extractors.informatica import InformaticaLineageExtractor
+ from airflow.providers.informatica.hooks.edc import InformaticaEDCHook
+
+ hook = InformaticaEDCHook()
+ extractor = InformaticaLineageExtractor(edc_hook=hook)
+
+**Key Methods:**
+
+- ``get_object(object_id: str) -> dict``
+
+ Returns Informatica catalog object by ID via the EDC hook.
+
+- ``create_lineage_link(source_object_id: str, target_object_id: str) -> dict``
+
+ Creates a lineage link between source and target objects via the EDC hook.
+
+
+Plugins
+-------
+
+**InformaticaProviderPlugin**
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The ``InformaticaProviderPlugin`` registers event listeners that monitor Airflow task lifecycle events (start, success, failure) and trigger lineage extraction and EDC API calls. This plugin is loaded automatically when the provider is installed and enabled.
+
+No manual instantiation is required. The plugin works transparently with any task that defines inlets and outlets.
+
+
+Configuration Classes
+---------------------
+
+**InformaticaConnectionConfig**
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+This dataclass holds Informatica EDC connection settings, including base URL, credentials, security domain, SSL verification, and provider metadata. It is constructed internally by the hook and not typically used directly by end users.
+
+
+Error Handling
+--------------
+
+**InformaticaEDCError**
+~~~~~~~~~~~~~~~~~~~~~~~
+
+Custom exception raised when the Informatica EDC API returns an error or a request fails.
+
+
+EDC API Endpoints Used
+----------------------
+
+The Informatica provider uses the following EDC REST API endpoints:
+
+- ``GET /access/2/catalog/data/objects/{object_id}?includeRefObjects={true|false}`` — Retrieve catalog object details
+- ``PATCH /access/1/catalog/data/objects`` — Create or update lineage relationships
+
+See the configuration and usage guides for more details and complete examples.
diff --git a/providers/informatica/docs/guides/configuration.rst b/providers/informatica/docs/guides/configuration.rst
new file mode 100644
index 0000000000000..a1a521d354765
--- /dev/null
+++ b/providers/informatica/docs/guides/configuration.rst
@@ -0,0 +1,74 @@
+
+ .. Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership. The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License. You may obtain a copy of the License at
+
+ .. http://www.apache.org/licenses/LICENSE-2.0
+
+ .. Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied. See the License for the
+ specific language governing permissions and limitations
+ under the License.
+
+Configuration
+=============
+
+This section describes how to configure the Informatica provider for Apache Airflow.
+
+Connection Setup
+----------------
+
+Create an HTTP connection in Airflow for Informatica EDC:
+
+1. **Connection Type**: ``informatica_edc``
+2. **Host**: Your EDC server hostname
+3. **Port**: EDC server port (typically 9087)
+4. **Schema**: ``https`` or ``http``
+5. **Login**: EDC username
+6. **Password**: EDC password
+7. **Extras**: Add the following JSON:
+
+ .. code-block:: json
+
+ {"security_domain": "your_security_domain"}
+
+Configuration Options
+---------------------
+
+Add to your ``airflow.cfg``:
+
+.. code-block:: ini
+
+ [informatica]
+ # Disable sending events without uninstalling the Informatica Provider
+ disabled = False
+ # The connection ID to use when no connection ID is provided
+ default_conn_id = informatica_edc_default
+
+Provider Configuration
+----------------------
+
+The provider configuration is defined in ``get_provider_info.py`` and includes:
+
+- ``disabled``: Boolean flag to disable the provider without uninstalling
+- ``default_conn_id``: Default connection ID for Informatica EDC
+
+SSL and Security
+----------------
+
+The connection supports SSL verification control through extras:
+
+.. code-block:: json
+
+ {
+ "security_domain": "your_domain",
+ "verify_ssl": true
+ }
+
+Set ``verify_ssl`` to ``false`` to disable SSL certificate verification.
diff --git a/providers/informatica/docs/guides/usage.rst b/providers/informatica/docs/guides/usage.rst
new file mode 100644
index 0000000000000..956bfaf5916ac
--- /dev/null
+++ b/providers/informatica/docs/guides/usage.rst
@@ -0,0 +1,164 @@
+
+ .. Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership. The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License. You may obtain a copy of the License at
+
+ .. http://www.apache.org/licenses/LICENSE-2.0
+
+ .. Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied. See the License for the
+ specific language governing permissions and limitations
+ under the License.
+
+Usage Guide
+===========
+
+The Informatica provider enables automatic lineage tracking for Airflow tasks that define inlets and outlets.
+
+How It Works
+------------
+
+The Informatica plugin automatically detects tasks with lineage support and sends inlet/outlet information to Informatica EDC when tasks succeed. No additional configuration is required beyond defining inlets and outlets in your tasks.
+
+Key Features
+------------
+
+- **Automatic Lineage Detection**: Plugin automatically detects tasks with lineage support
+- **EDC Integration**: Native REST API integration with Informatica Enterprise Data Catalog
+- **Transparent Operation**: No code changes required beyond inlet/outlet definitions
+- **Error Handling**: Robust error handling for API failures and invalid objects
+- **Configurable**: Extensive configuration options for different environments
+
+Architecture
+------------
+
+The provider consists of several key components:
+
+**Hooks**
+ ``InformaticaEDCHook`` provides low-level EDC API access for authentication, object retrieval, and lineage creation.
+
+**Extractors**
+ ``InformaticaLineageExtractor`` handles lineage data extraction and conversion to Airflow-compatible formats.
+
+**Plugins**
+ ``InformaticaProviderPlugin`` registers listeners that monitor task lifecycle events and trigger lineage operations.
+
+**Listeners**
+ Event-driven listeners that respond to task success/failure events and process lineage information.
+
+
+Requirements
+------------
+
+- Apache Airflow 3.0+
+- Access to Informatica Enterprise Data Catalog instance
+- Valid EDC credentials with API access permissions
+
+
+Quick Start
+-----------
+
+1. **Install the provider:**
+
+ .. code-block:: bash
+
+ pip install apache-airflow-providers-informatica
+
+2. **Configure connection:**
+
+ Create an HTTP connection in Airflow UI with EDC server details and security domain in extras.
+
+3. **Add lineage to tasks:**
+
+ Define inlets and outlets in your tasks using EDC object URIs.
+
+4. **Run your DAG:**
+
+ The provider automatically handles lineage extraction when tasks succeed.
+
+
+Example DAG
+-----------
+
+.. code-block:: python
+
+ from airflow import DAG
+ from airflow.providers.standard.operators.python import PythonOperator
+ from datetime import datetime
+
+
+ def my_python_task(**kwargs):
+ print("Hello Informatica Lineage!")
+
+
+ with DAG(
+ dag_id="example_informatica_lineage_dag",
+ start_date=datetime(2024, 1, 1),
+ schedule=None,
+ catchup=False,
+ ) as dag:
+ python_task = PythonOperator(
+ task_id="my_python_task",
+ python_callable=my_python_task,
+ inlets=[{"dataset_uri": "edc://object/source_table_abc123"}],
+ outlets=[{"dataset_uri": "edc://object/target_table_xyz789"}],
+ )
+
+When this task succeeds, the provider automatically creates a lineage link between the source and target objects in EDC.
+
+Hooks
+-----
+
+InformaticaEDCHook
+^^^^^^^^^^^^^^^^^^
+
+The hook provides low-level access to Informatica EDC API.
+
+.. code-block:: python
+
+ from airflow.providers.informatica.hooks.edc import InformaticaEDCHook
+
+ hook = InformaticaEDCHook(informatica_edc_conn_id="my_connection")
+ object_data = hook.get_object("edc://object/table_123")
+ result = hook.create_lineage_link("source_id", "target_id")
+
+Plugins and Listeners
+---------------------
+
+The ``InformaticaProviderPlugin`` automatically registers listeners that:
+
+- Monitor task success events
+- Extract inlet/outlet information from tasks
+- Resolve object IDs using EDC API
+- Create lineage links between resolved objects
+
+No manual intervention is required. The plugin works transparently with any task that defines inlets and outlets.
+
+Supported Inlet/Outlet Formats
+------------------------------
+
+Inlets and outlets can be defined as:
+
+- String URIs: ``"edc://object/table_name"``
+- Dictionary with dataset_uri: ``{"dataset_uri": "edc://object/table_name"}``
+
+The plugin automatically handles both formats and resolves them to EDC object IDs.
+
+
+Support
+-------
+
+- **Documentation**: See the guides section for detailed usage and configuration
+- **Issues**: Report bugs on the Apache Airflow GitHub repository
+- **Community**: Join the Airflow community for discussions and support
+
+License
+-------
+
+Licensed under the Apache License, Version 2.0. See LICENSE file for details.
diff --git a/providers/informatica/docs/index.rst b/providers/informatica/docs/index.rst
new file mode 100644
index 0000000000000..5c5401e00f87c
--- /dev/null
+++ b/providers/informatica/docs/index.rst
@@ -0,0 +1,172 @@
+
+ .. Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership. The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License. You may obtain a copy of the License at
+
+ .. http://www.apache.org/licenses/LICENSE-2.0
+
+ .. Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied. See the License for the
+ specific language governing permissions and limitations
+ under the License.
+
+========================================
+``apache-airflow-providers-informatica``
+========================================
+
+.. toctree::
+ :hidden:
+ :maxdepth: 1
+ :caption: Basics
+
+ Home
+ Security
+ Changelog
+
+.. toctree::
+ :hidden:
+ :maxdepth: 1
+ :caption: Guides
+
+ Usage
+ API Reference
+ Configuration
+
+.. toctree::
+ :hidden:
+ :maxdepth: 1
+ :caption: References
+
+ Configuration
+ Python API <_api/airflow/providers/informatica/index>
+
+.. toctree::
+ :hidden:
+ :maxdepth: 1
+ :caption: Resources
+
+ PyPI Repository
+ Installing from sources
+
+.. toctree::
+ :hidden:
+ :maxdepth: 1
+ :caption: Commits
+
+ Detailed list of commits
+
+Apache Airflow Informatica Provider
+===================================
+
+
+The Informatica provider integrates Apache Airflow with Informatica Enterprise Data Catalog (EDC) for advanced data lineage tracking and asset discovery.
+
+Overview
+--------
+
+This provider enables automatic lineage extraction and tracking between Airflow tasks and Informatica EDC catalog objects. When tasks define inlets and outlets with EDC object URIs, the provider automatically:
+
+- Resolves object identifiers using EDC API
+- Creates lineage relationships between source and target objects
+- Integrates with Airflow's native lineage system
+
+Installation
+------------
+
+You can install this package on top of an existing Airflow installation via
+``pip install apache-airflow-providers-informatica``.
+For the minimum Airflow version supported, see ``Requirements`` below.
+
+
+Requirements
+------------
+
+The minimum Apache Airflow version supported by this provider distribution is ``3.0.0``.
+
+========================================== ==================
+PIP package Version required
+========================================== ==================
+``apache-airflow`` ``>=3.0.0``
+``apache-airflow-providers-common-compat`` ``>=1.12.0``
+``apache-airflow-providers-http`` ``>=4.13.2``
+========================================== ==================
+
+.. THE REMAINDER OF THE FILE IS AUTOMATICALLY GENERATED. IT WILL BE OVERWRITTEN AT RELEASE TIME!
+
+
+.. toctree::
+ :hidden:
+ :maxdepth: 1
+ :caption: Commits
+
+ Detailed list of commits
+
+
+apache-airflow-providers-informatica package
+------------------------------------------------------
+
+`Informatica `__
+
+
+Release: 0.1.0
+
+Provider package
+----------------
+
+This package is for the ``informatica`` provider.
+All classes for this package are included in the ``airflow.providers.informatica`` python package.
+
+Installation
+------------
+
+You can install this package on top of an existing Airflow installation via
+``pip install apache-airflow-providers-informatica``.
+For the minimum Airflow version supported, see ``Requirements`` below.
+
+Requirements
+------------
+
+The minimum Apache Airflow version supported by this provider distribution is ``3.0.0``.
+
+========================================== ==================
+PIP package Version required
+========================================== ==================
+``apache-airflow`` ``>=3.0.0``
+``apache-airflow-providers-common-compat`` ``>=1.12.0``
+``apache-airflow-providers-http`` ``>=4.13.2``
+========================================== ==================
+
+Cross provider package dependencies
+-----------------------------------
+
+Those are dependencies that might be needed in order to use all the features of the package.
+You need to install the specified provider distributions in order to use them.
+
+You can install such cross-provider dependencies when installing from PyPI. For example:
+
+.. code-block:: bash
+
+ pip install apache-airflow-providers-informatica[common.compat]
+
+
+================================================================================================================== =================
+Dependent package Extra
+================================================================================================================== =================
+`apache-airflow-providers-common-compat `_ ``common.compat``
+`apache-airflow-providers-http `_ ``http``
+================================================================================================================== =================
+
+Downloading official packages
+-----------------------------
+
+You can download officially released packages and verify their checksums and signatures from the
+`Official Apache Download site `_
+
+* `The apache-airflow-providers-informatica 0.1.0 sdist package `_ (`asc `__, `sha512 `__)
+* `The apache-airflow-providers-informatica 0.1.0 wheel package `_ (`asc `__, `sha512 `__)
diff --git a/providers/informatica/docs/installing-providers-from-sources.rst b/providers/informatica/docs/installing-providers-from-sources.rst
new file mode 100644
index 0000000000000..8b51458828ed7
--- /dev/null
+++ b/providers/informatica/docs/installing-providers-from-sources.rst
@@ -0,0 +1,19 @@
+
+ .. Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership. The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License. You may obtain a copy of the License at
+
+ .. http://www.apache.org/licenses/LICENSE-2.0
+
+ .. Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied. See the License for the
+ specific language governing permissions and limitations
+ under the License.
+
+.. include:: /../../../devel-common/src/sphinx_exts/includes/installing-providers-from-sources.rst
diff --git a/providers/informatica/docs/integration-logos/informatica.png b/providers/informatica/docs/integration-logos/informatica.png
new file mode 100644
index 0000000000000..0cf2a34bf72f4
Binary files /dev/null and b/providers/informatica/docs/integration-logos/informatica.png differ
diff --git a/providers/informatica/docs/security.rst b/providers/informatica/docs/security.rst
new file mode 100644
index 0000000000000..05a96c9782be4
--- /dev/null
+++ b/providers/informatica/docs/security.rst
@@ -0,0 +1,19 @@
+
+ .. Licensed to the Apache Software Foundation (ASF) under one
+ or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership. The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License. You may obtain a copy of the License at
+
+ .. http://www.apache.org/licenses/LICENSE-2.0
+
+ .. Unless required by applicable law or agreed to in writing,
+ software distributed under the License is distributed on an
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ KIND, either express or implied. See the License for the
+ specific language governing permissions and limitations
+ under the License.
+
+.. include:: /../../../devel-common/src/sphinx_exts/includes/security.rst
diff --git a/providers/informatica/provider.yaml b/providers/informatica/provider.yaml
new file mode 100644
index 0000000000000..f660abd507568
--- /dev/null
+++ b/providers/informatica/provider.yaml
@@ -0,0 +1,73 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+---
+package-name: apache-airflow-providers-informatica
+name: Informatica Airflow
+description: |
+ `Informatica `__
+
+state: ready
+source-date-epoch: 1758787152
+# Note that those versions are maintained by release manager - do not update them manually
+# with the exception of case where other provider in sources has >= new provider version.
+# In such case adding >= NEW_VERSION and bumping to NEW_VERSION in a provider have
+# to be done in the same PR
+versions:
+ - 0.1.0
+
+integrations:
+ - integration-name: Informatica
+ external-doc-url: https://www.informatica.com/
+ logo: /docs/integration-logos/informatica.png
+ tags: [protocol]
+
+hooks:
+ - integration-name: Informatica
+ python-modules:
+ - airflow.providers.informatica.hooks.edc
+
+connection-types:
+ - hook-class-name: airflow.providers.informatica.hooks.edc.InformaticaEDCHook
+ connection-type: informatica_edc
+
+plugins:
+ - name: informatica
+ plugin-class: airflow.providers.informatica.plugins.InformaticaProviderPlugin
+
+config:
+ informatica:
+ description: |
+ This section applies settings for Informatica integration.
+ More about configuration and its precedence can be found in the `usage's guide
+ `_.
+
+ options:
+ disabled:
+ description: |
+ Disable sending events without uninstalling the Informatica Provider by setting this to true.
+ type: boolean
+ example: ~
+ default: "False"
+ version_added: ~
+ default_conn_id:
+ description: |
+ The default connection ID to use for Informatica operations.
+ type: string
+ example: "informatica_edc_default"
+ default: "informatica_edc_default"
+ version_added: ~
diff --git a/providers/informatica/pyproject.toml b/providers/informatica/pyproject.toml
new file mode 100644
index 0000000000000..6adaefae5d05d
--- /dev/null
+++ b/providers/informatica/pyproject.toml
@@ -0,0 +1,125 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+# NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE OVERWRITTEN!
+
+# IF YOU WANT TO MODIFY THIS FILE EXCEPT DEPENDENCIES, YOU SHOULD MODIFY THE TEMPLATE
+# `pyproject_TEMPLATE.toml.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY
+[build-system]
+requires = ["flit_core==3.12.0"]
+build-backend = "flit_core.buildapi"
+
+[project]
+name = "apache-airflow-providers-informatica"
+version = "0.1.0"
+description = "Provider package apache-airflow-providers-informatica for Apache Airflow"
+readme = "README.rst"
+license = "Apache-2.0"
+license-files = ['LICENSE', 'NOTICE']
+authors = [
+ {name="Apache Software Foundation", email="dev@airflow.apache.org"},
+]
+maintainers = [
+ {name="Apache Software Foundation", email="dev@airflow.apache.org"},
+]
+keywords = [ "airflow-provider", "informatica", "airflow", "integration" ]
+classifiers = [
+ "Development Status :: 5 - Production/Stable",
+ "Environment :: Console",
+ "Environment :: Web Environment",
+ "Intended Audience :: Developers",
+ "Intended Audience :: System Administrators",
+ "Framework :: Apache Airflow",
+ "Framework :: Apache Airflow :: Provider",
+ "Programming Language :: Python :: 3.10",
+ "Programming Language :: Python :: 3.11",
+ "Programming Language :: Python :: 3.12",
+ "Programming Language :: Python :: 3.13",
+ "Topic :: System :: Monitoring",
+]
+requires-python = ">=3.10"
+
+# The dependencies should be modified in place in the generated file.
+# Any change in the dependencies is preserved when the file is regenerated
+# Make sure to run ``prek update-providers-dependencies --all-files``
+# After you modify the dependencies, and rebuild your Breeze CI image with ``breeze ci-image build``
+dependencies = [
+ "apache-airflow>=3.0.0",
+ "apache-airflow-providers-common-compat>=1.12.0",
+ "apache-airflow-providers-http>=4.13.2"
+]
+
+# The optional dependencies should be modified in place in the generated file
+# Any change in the dependencies is preserved when the file is regenerated
+[project.optional-dependencies]
+"common.compat" = [
+ "apache-airflow-providers-common-compat"
+]
+
+[dependency-groups]
+dev = [
+ "apache-airflow",
+ "apache-airflow-task-sdk",
+ "apache-airflow-devel-common",
+ "apache-airflow-providers-common-compat",
+ "apache-airflow-providers-http",
+ # Additional devel dependencies (do not remove this line and add extra development dependencies)
+ "uuid6>=2024.7.10"
+]
+
+# To build docs:
+#
+# uv run --group docs build-docs
+#
+# To enable auto-refreshing build with server:
+#
+# uv run --group docs build-docs --autobuild
+#
+# To see more options:
+#
+# uv run --group docs build-docs --help
+#
+docs = [
+ "apache-airflow-devel-common[docs]"
+]
+
+[tool.uv.sources]
+# These names must match the names as defined in the pyproject.toml of the workspace items,
+# *not* the workspace folder paths
+apache-airflow = {workspace = true}
+apache-airflow-devel-common = {workspace = true}
+apache-airflow-task-sdk = {workspace = true}
+apache-airflow-providers-common-sql = {workspace = true}
+apache-airflow-providers-standard = {workspace = true}
+
+[project.urls]
+"Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-informatica/0.1.0"
+"Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-informatica/0.1.0/changelog.html"
+"Bug Tracker" = "https://github.com/apache/airflow/issues"
+"Source Code" = "https://github.com/apache/airflow"
+"Slack Chat" = "https://s.apache.org/airflow-slack"
+"Mastodon" = "https://fosstodon.org/@airflow"
+"YouTube" = "https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/"
+
+[project.entry-points."apache_airflow_provider"]
+provider_info = "airflow.providers.informatica.get_provider_info:get_provider_info"
+
+[project.entry-points."airflow.plugins"]
+informatica = "airflow.providers.informatica.plugins:InformaticaProviderPlugin"
+
+[tool.flit.module]
+name = "airflow.providers.informatica"
diff --git a/providers/informatica/src/airflow/__init__.py b/providers/informatica/src/airflow/__init__.py
new file mode 100644
index 0000000000000..5966d6b1d5261
--- /dev/null
+++ b/providers/informatica/src/airflow/__init__.py
@@ -0,0 +1,17 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+__path__ = __import__("pkgutil").extend_path(__path__, __name__)
diff --git a/providers/informatica/src/airflow/providers/__init__.py b/providers/informatica/src/airflow/providers/__init__.py
new file mode 100644
index 0000000000000..5966d6b1d5261
--- /dev/null
+++ b/providers/informatica/src/airflow/providers/__init__.py
@@ -0,0 +1,17 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+__path__ = __import__("pkgutil").extend_path(__path__, __name__)
diff --git a/providers/informatica/src/airflow/providers/informatica/LICENSE b/providers/informatica/src/airflow/providers/informatica/LICENSE
new file mode 100644
index 0000000000000..11069edd79019
--- /dev/null
+++ b/providers/informatica/src/airflow/providers/informatica/LICENSE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
+
+APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+Copyright [yyyy] [name of copyright owner]
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
diff --git a/providers/informatica/src/airflow/providers/informatica/__init__.py b/providers/informatica/src/airflow/providers/informatica/__init__.py
new file mode 100644
index 0000000000000..c8a603fe93a5e
--- /dev/null
+++ b/providers/informatica/src/airflow/providers/informatica/__init__.py
@@ -0,0 +1,39 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
+# OVERWRITTEN WHEN PREPARING DOCUMENTATION FOR THE PACKAGES.
+#
+# IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE
+# `PROVIDER__INIT__PY_TEMPLATE.py.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY
+#
+from __future__ import annotations
+
+import packaging.version
+
+from airflow import __version__ as airflow_version
+
+__all__ = ["__version__"]
+
+__version__ = "0.1.0"
+
+if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
+ "3.0.0"
+):
+ raise RuntimeError(
+ f"The package `apache-airflow-providers-informatica:{__version__}` needs Apache Airflow 3.0.0+"
+ )
diff --git a/providers/informatica/src/airflow/providers/informatica/extractors/__init__.py b/providers/informatica/src/airflow/providers/informatica/extractors/__init__.py
new file mode 100644
index 0000000000000..2b591e460e715
--- /dev/null
+++ b/providers/informatica/src/airflow/providers/informatica/extractors/__init__.py
@@ -0,0 +1,28 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+from __future__ import annotations
+
+# from airflow.providers.informatica.extractors.base import BaseExtractor, OperatorLineage
+# from airflow.providers.informatica.extractors.manager import ExtractorManager
+from airflow.providers.informatica.extractors.informatica import InformaticaLineageExtractor
+
+"""
+:meta private:
+"""
+
+__all__ = ["InformaticaLineageExtractor"]
diff --git a/providers/informatica/src/airflow/providers/informatica/extractors/informatica.py b/providers/informatica/src/airflow/providers/informatica/extractors/informatica.py
new file mode 100644
index 0000000000000..f2debbba70d19
--- /dev/null
+++ b/providers/informatica/src/airflow/providers/informatica/extractors/informatica.py
@@ -0,0 +1,64 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Any
+
+from airflow.utils.log.logging_mixin import LoggingMixin
+
+if TYPE_CHECKING:
+ from airflow.providers.informatica.hooks.edc import InformaticaEDCHook
+
+
+class InformaticaLineageExtractor(LoggingMixin):
+ """Extracts lineage information from Informatica EDC and converts to Airflow Assets."""
+
+ def __init__(self, edc_hook: InformaticaEDCHook) -> None:
+ """
+ Initialize InformaticaLineageExtractor.
+
+ Args:
+ edc_hook (InformaticaEDCHook): Hook for Informatica EDC API connection.
+ """
+ super().__init__()
+ self.edc_hook = edc_hook
+
+ def get_object(self, object_id: str) -> dict[str, Any]:
+ """
+ Return Informatica catalog object by id via EDC hook.
+
+ Args:
+ object_id (str): Informatica object id.
+
+ Returns:
+ dict[str, Any]: Informatica catalog object.
+ """
+ return self.edc_hook.get_object(object_id)
+
+ def create_lineage_link(self, source_object_id: str, target_object_id: str) -> dict[str, Any]:
+ """
+ Create a lineage link between source and target objects via EDC hook.
+
+ Args:
+ source_object_id (str): Source Informatica object id.
+ target_object_id (str): Target Informatica object id.
+
+ Returns:
+ dict[str, Any]: Result of lineage link creation.
+ """
+ return self.edc_hook.create_lineage_link(source_object_id, target_object_id)
diff --git a/providers/informatica/src/airflow/providers/informatica/get_provider_info.py b/providers/informatica/src/airflow/providers/informatica/get_provider_info.py
new file mode 100644
index 0000000000000..22ac916627976
--- /dev/null
+++ b/providers/informatica/src/airflow/providers/informatica/get_provider_info.py
@@ -0,0 +1,73 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+# NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE OVERWRITTEN!
+#
+# IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE
+# `get_provider_info_TEMPLATE.py.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY
+
+
+def get_provider_info():
+ return {
+ "package-name": "apache-airflow-providers-informatica",
+ "name": "Informatica Airflow",
+ "description": "`Informatica `__\n",
+ "integrations": [
+ {
+ "integration-name": "Informatica",
+ "external-doc-url": "https://www.informatica.com/",
+ "logo": "/docs/integration-logos/informatica.png",
+ "tags": ["protocol"],
+ }
+ ],
+ "hooks": [
+ {"integration-name": "Informatica", "python-modules": ["airflow.providers.informatica.hooks.edc"]}
+ ],
+ "connection-types": [
+ {
+ "hook-class-name": "airflow.providers.informatica.hooks.edc.InformaticaEDCHook",
+ "connection-type": "informatica_edc",
+ }
+ ],
+ "plugins": [
+ {
+ "name": "informatica",
+ "plugin-class": "airflow.providers.informatica.plugins.InformaticaProviderPlugin",
+ }
+ ],
+ "config": {
+ "informatica": {
+ "description": "This section applies settings for Informatica integration.\nMore about configuration and its precedence can be found in the `usage's guide\n`_.\n",
+ "options": {
+ "disabled": {
+ "description": "Disable sending events without uninstalling the Informatica Provider by setting this to true.\n",
+ "type": "boolean",
+ "example": None,
+ "default": "False",
+ "version_added": None,
+ },
+ "default_conn_id": {
+ "description": "The default connection ID to use for Informatica operations.\n",
+ "type": "string",
+ "example": "informatica_edc_default",
+ "default": "informatica_edc_default",
+ "version_added": None,
+ },
+ },
+ }
+ },
+ }
diff --git a/providers/informatica/src/airflow/providers/informatica/hooks/__init__.py b/providers/informatica/src/airflow/providers/informatica/hooks/__init__.py
new file mode 100644
index 0000000000000..42191ffa37b14
--- /dev/null
+++ b/providers/informatica/src/airflow/providers/informatica/hooks/__init__.py
@@ -0,0 +1,20 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+from airflow.providers.informatica.hooks.edc import InformaticaConnectionConfig, InformaticaEDCError
+
+__all__ = ["InformaticaEDCError", "InformaticaConnectionConfig"]
diff --git a/providers/informatica/src/airflow/providers/informatica/hooks/edc.py b/providers/informatica/src/airflow/providers/informatica/hooks/edc.py
new file mode 100644
index 0000000000000..808707e5e72e5
--- /dev/null
+++ b/providers/informatica/src/airflow/providers/informatica/hooks/edc.py
@@ -0,0 +1,245 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+import base64
+import re
+from collections.abc import Mapping, MutableMapping
+from dataclasses import dataclass
+from typing import TYPE_CHECKING, Any
+
+from requests.exceptions import RequestException
+
+from airflow.configuration import conf
+from airflow.providers.http.hooks.http import HttpHook
+
+if TYPE_CHECKING:
+ from requests import Response
+
+ from airflow.providers.common.compat.sdk import Connection
+
+
+class InformaticaEDCError(RuntimeError):
+ """Raised when the Informatica Enterprise Data Catalog API returns an error."""
+
+
+@dataclass(frozen=True)
+class InformaticaConnectionConfig:
+ """Container for Informatica EDC connection settings."""
+
+ base_url: str
+ username: str | None
+ password: str | None
+ security_domain: str | None
+ verify_ssl: bool
+ request_timeout: int
+ provider_id: str
+ modified_by: str | None
+
+ @property
+ def auth_header(self) -> str | None:
+ """Return the authorization header for the configured credentials."""
+ if not self.username:
+ return None
+
+ domain_prefix = f"{self.security_domain}\\" if self.security_domain else ""
+ credential = f"{domain_prefix}{self.username}:{self.password or ''}"
+ token = base64.b64encode(bytes(credential, "utf-8")).decode("utf-8")
+ return f"Basic {token}"
+
+
+class InformaticaEDCHook(HttpHook):
+ """Hook providing a minimal client for the Informatica EDC REST API."""
+
+ conn_name_attr = "informatica_edc_conn_id"
+ default_conn_name = conf.get("informatica", "default_conn_id", fallback="informatica_edc_default")
+ conn_type = "informatica_edc"
+ hook_name = "Informatica EDC"
+ _lineage_association = "core.DataSetDataFlow"
+
+ def __init__(
+ self,
+ informatica_edc_conn_id: str = default_conn_name,
+ *,
+ request_timeout: int | None = None,
+ **kwargs,
+ ) -> None:
+ super().__init__(http_conn_id=informatica_edc_conn_id, method="GET", **kwargs)
+ self._config: InformaticaConnectionConfig | None = None
+ self._request_timeout = request_timeout or conf.getint("informatica", "request_timeout", fallback=30)
+
+ @property
+ def config(self) -> InformaticaConnectionConfig:
+ """Return cached connection configuration."""
+ if self._config is None:
+ connection = self.get_connection(self.http_conn_id)
+ self._config = self._build_connection_config(connection)
+ return self._config
+
+ def _build_connection_config(self, connection: Connection) -> InformaticaConnectionConfig:
+ """Build a configuration object from an Airflow connection."""
+ host = connection.host or ""
+ schema = connection.schema or "https"
+ if host.startswith("http://") or host.startswith("https://"):
+ base_url = host
+ else:
+ base_url = f"{schema}://{host}" if host else f"{schema}://"
+ if connection.port:
+ base_url = f"{base_url}:{connection.port}"
+
+ extras: MutableMapping[str, Any] = connection.extra_dejson or {}
+ verify_ssl_raw = extras.get("verify_ssl", extras.get("verify", True))
+ verify_ssl = str(verify_ssl_raw).lower() not in {"0", "false", "no"}
+
+ provider_id = str(extras.get("provider_id", "enrichment"))
+ modified_by = str(extras.get("modified_by", connection.login or "airflow"))
+ security_domain = extras.get("security_domain") or extras.get("domain")
+
+ return InformaticaConnectionConfig(
+ base_url=base_url.rstrip("/"),
+ username=connection.login,
+ password=connection.password,
+ security_domain=str(security_domain) if security_domain else None,
+ verify_ssl=verify_ssl,
+ request_timeout=self._request_timeout,
+ provider_id=provider_id,
+ modified_by=modified_by,
+ )
+
+ def close_session(self) -> None:
+ pass
+
+ def get_conn(
+ self,
+ headers: dict[str, Any] | None = None,
+ extra_options: dict[str, Any] | None = None,
+ ) -> Any:
+ """Return a configured session augmented with Informatica specific headers."""
+ session = super().get_conn(headers=headers, extra_options=extra_options)
+ session.verify = self.config.verify_ssl
+ session.headers.update({"Accept": "application/json", "Content-Type": "application/json"})
+ if self.config.auth_header:
+ session.headers["Authorization"] = self.config.auth_header
+ return session
+
+ def _build_url(self, endpoint: str) -> str:
+ endpoint = endpoint if endpoint.startswith("/") else f"/{endpoint}"
+ return f"{self.config.base_url}{endpoint}"
+
+ def _request(
+ self,
+ method: str,
+ endpoint: str,
+ *,
+ params: Mapping[str, Any] | None = None,
+ json: Mapping[str, Any] | None = None,
+ ) -> Response:
+ """Execute an HTTP request and raise :class:`InformaticaEDCError` on failure."""
+ url = self._build_url(endpoint)
+ session = self.get_conn()
+ try:
+ response = session.request(
+ method=method.upper(),
+ url=url,
+ params=params,
+ json=json,
+ timeout=self.config.request_timeout,
+ )
+ except RequestException as exc:
+ raise InformaticaEDCError(f"Failed to call Informatica EDC endpoint {endpoint}: {exc}") from exc
+
+ if response.ok:
+ return response
+
+ message = response.text or response.reason
+ raise InformaticaEDCError(
+ f"Informatica EDC request to {endpoint} returned {response.status_code}: {message}"
+ )
+
+ def _encode_id(self, object_id, tilde=False):
+ """
+ Encode an ID to be safe. Return String.
+
+ Parameters
+ ----------
+ object_id : String
+ ID of object
+ tilde : Boolean, optional (default=False)
+ Whether to encode with a tilde or percent sign.
+ """
+ if ":___" in object_id:
+ object_id = object_id.replace(":___", "://")
+
+ regex = re.compile(r"([^a-zA-Z0-9_-])")
+
+ id_lst = list(object_id)
+ idx = 0
+
+ while regex.search(object_id, idx) is not None:
+ idx = regex.search(object_id, idx).span()[1]
+ if tilde:
+ id_lst[idx - 1] = "~" + str(bytes(id_lst[idx - 1], "utf-8").hex()) + "~"
+ else:
+ id_lst[idx - 1] = "%" + str(bytes(id_lst[idx - 1], "utf-8").hex())
+
+ return "".join(id_lst)
+
+ def get_object(self, object_id: str, include_ref_objects: bool = False) -> dict[str, Any]:
+ """Retrieve a catalog object by its identifier."""
+ encoded_object_id = self._encode_id(object_id, tilde=True)
+ include_refs = "true" if include_ref_objects else "false"
+
+ url = f"/access/2/catalog/data/objects/{encoded_object_id}?includeRefObjects={include_refs}"
+
+ response = self._request("GET", url)
+ return response.json()
+
+ def create_lineage_link(self, source_object_id: str, target_object_id: str) -> dict[str, Any]:
+ """Create a lineage relationship between source and target objects."""
+ if source_object_id == target_object_id:
+ raise InformaticaEDCError(
+ "Source and target object identifiers must differ when creating lineage."
+ )
+
+ payload = {
+ "providerId": self.config.provider_id,
+ "modifiedBy": self.config.modified_by,
+ "updates": [
+ {
+ "id": target_object_id,
+ "newSourceLinks": [
+ {
+ "objectId": source_object_id,
+ "associationId": self._lineage_association,
+ "properties": [
+ {
+ "attrUuid": "core.targetAttribute",
+ "value": self._lineage_association,
+ }
+ ],
+ }
+ ],
+ "deleteSourceLinks": [],
+ "newFacts": [],
+ "deleteFacts": [],
+ }
+ ],
+ }
+
+ response = self._request("PATCH", "/access/1/catalog/data/objects", json=payload)
+ return response.json() if response.content else {}
diff --git a/providers/informatica/src/airflow/providers/informatica/plugins/__init__.py b/providers/informatica/src/airflow/providers/informatica/plugins/__init__.py
new file mode 100644
index 0000000000000..80cc07090a9b1
--- /dev/null
+++ b/providers/informatica/src/airflow/providers/informatica/plugins/__init__.py
@@ -0,0 +1,20 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+from airflow.providers.informatica.plugins.informatica import InformaticaProviderPlugin
+
+__all__ = ["InformaticaProviderPlugin"]
diff --git a/providers/informatica/src/airflow/providers/informatica/plugins/informatica.py b/providers/informatica/src/airflow/providers/informatica/plugins/informatica.py
new file mode 100644
index 0000000000000..0e517a8f1cd7a
--- /dev/null
+++ b/providers/informatica/src/airflow/providers/informatica/plugins/informatica.py
@@ -0,0 +1,38 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+from airflow.configuration import conf
+from airflow.plugins_manager import AirflowPlugin
+
+is_disabled = conf.getboolean("informatica", "disabled", fallback=False)
+# Conditional imports - only load expensive dependencies when plugin is enabled
+if not is_disabled:
+ from airflow.providers.common.compat.sdk import HookLineageReader
+ from airflow.providers.informatica.plugins.listener import get_informatica_listener
+
+
+class InformaticaProviderPlugin(AirflowPlugin):
+ """
+ Listener that emits numerous Events.
+
+ Informatica Plugin provides listener that emits OL events on DAG.
+ """
+
+ name: str = "InformaticaProviderPlugin"
+ listeners: list = [get_informatica_listener()] if not is_disabled else []
+ hook_lineage_readers: list = [HookLineageReader] if not is_disabled else []
diff --git a/providers/informatica/src/airflow/providers/informatica/plugins/listener.py b/providers/informatica/src/airflow/providers/informatica/plugins/listener.py
new file mode 100644
index 0000000000000..6e3dd1a5c2fc3
--- /dev/null
+++ b/providers/informatica/src/airflow/providers/informatica/plugins/listener.py
@@ -0,0 +1,141 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+import logging
+from typing import TYPE_CHECKING
+
+from airflow.listeners import hookimpl
+from airflow.providers.informatica.extractors import InformaticaLineageExtractor
+from airflow.providers.informatica.hooks.edc import InformaticaEDCHook
+
+if TYPE_CHECKING:
+ from airflow.models import TaskInstance
+ from airflow.utils.state import TaskInstanceState
+
+_informatica_listener: InformaticaListener | None = None
+
+
+class InformaticaListener:
+ """Informatica listener sends events on task instance state changes to Informatica EDC for lineage tracking."""
+
+ def __init__(self):
+ self._executor = None
+ self.log = logging.getLogger(__name__)
+ self.hook = InformaticaLineageExtractor(edc_hook=InformaticaEDCHook())
+ # self.extractor_manager = ExtractorManager()
+
+ @hookimpl
+ def on_task_instance_success(
+ self, previous_state: TaskInstanceState, task_instance: TaskInstance, *args, **kwargs
+ ):
+ self._handle_lineage(task_instance, state="success")
+
+ @hookimpl
+ def on_task_instance_failed(
+ self, previous_state: TaskInstanceState, task_instance: TaskInstance, *args, **kwargs
+ ):
+ self._handle_lineage(task_instance, state="failed")
+
+ @hookimpl
+ def on_task_instance_running(
+ self, previous_state: TaskInstanceState, task_instance: TaskInstance, *args, **kwargs
+ ):
+ self._handle_lineage(task_instance, state="running")
+
+ def _handle_lineage(self, task_instance: TaskInstance, state: str):
+ """
+ Handle lineage resolution for inlets and outlets.
+
+ For each inlet and outlet, resolve Informatica EDC object IDs using getObject.
+ If valid, collect and create lineage links between all valid inlets and outlets.
+ """
+ task = getattr(task_instance, "task", None)
+ if not task:
+ self.log.debug("No task found for TaskInstance %s", task_instance)
+ return
+ inlets = getattr(task, "inlets", getattr(task_instance, "inlets", []))
+ outlets = getattr(task, "outlets", getattr(task_instance, "outlets", []))
+
+ valid_inlets = [] # List of tuples: (uri, object_id)
+ valid_outlets = []
+
+ self.log.info("[InformaticaLineageListener] Task: %s State: %s", task_instance.task_id, state)
+
+ if state != "success":
+ self.log.info("[InformaticaLineageListener] Skipping lineage handling for state: %s", state)
+ return
+
+ for inlet in inlets:
+ inlet_uri = None
+ if isinstance(inlet, dict) and "dataset_uri" in inlet:
+ inlet_uri = inlet["dataset_uri"]
+ elif isinstance(inlet, str):
+ inlet_uri = inlet
+ else:
+ self.log.error("Inlet is not a string or dict with 'dataset_uri': %s", inlet)
+ continue
+ self.log.info("[InformaticaLineageListener] Inlet: %s and type: %s", inlet_uri, type(inlet))
+ try:
+ obj = self.hook.get_object(inlet_uri)
+ if obj and "id" in obj and obj["id"]:
+ valid_inlets.append((inlet_uri, obj["id"]))
+ except Exception as e:
+ self.log.exception("Failed to resolve inlet %s: %s", inlet_uri, e)
+
+ for outlet in outlets:
+ outlet_uri = None
+ if isinstance(outlet, dict) and "dataset_uri" in outlet:
+ outlet_uri = outlet["dataset_uri"]
+ elif isinstance(outlet, str):
+ outlet_uri = outlet
+ else:
+ self.log.error("Outlet is not a string or dict with 'dataset_uri': %s", outlet)
+ continue
+ self.log.info("[InformaticaLineageListener] Outlet: %s", outlet_uri)
+ try:
+ obj = self.hook.get_object(outlet_uri)
+ if obj and "id" in obj and obj["id"]:
+ valid_outlets.append((outlet_uri, obj["id"]))
+ except Exception as e:
+ self.log.warning("Failed to resolve outlet %s: %s", outlet_uri, e)
+
+ # Create lineage links between all valid inlet and outlet object IDs
+ for inlet_uri, inlet_id in valid_inlets:
+ for outlet_uri, outlet_id in valid_outlets:
+ try:
+ self.log.info(
+ "[InformaticaLineageListener] Creating lineage link: %s (%s) -> %s (%s)",
+ inlet_uri,
+ inlet_id,
+ outlet_uri,
+ outlet_id,
+ )
+ result = self.hook.create_lineage_link(inlet_id, outlet_id)
+ self.log.info("Lineage link created: %s -> %s | Result: %s", inlet_id, outlet_id, result)
+ except Exception as e:
+ self.log.exception(
+ "Failed to create lineage link from %s to %s: %s", inlet_id, outlet_id, e
+ )
+
+
+def get_informatica_listener() -> InformaticaListener:
+ """Get singleton listener manager."""
+ global _informatica_listener
+ if not _informatica_listener:
+ _informatica_listener = InformaticaListener()
+ return _informatica_listener
diff --git a/providers/informatica/tests/conftest.py b/providers/informatica/tests/conftest.py
new file mode 100644
index 0000000000000..f56ccce0a3f69
--- /dev/null
+++ b/providers/informatica/tests/conftest.py
@@ -0,0 +1,19 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+pytest_plugins = "tests_common.pytest_plugin"
diff --git a/providers/informatica/tests/unit/__init__.py b/providers/informatica/tests/unit/__init__.py
new file mode 100644
index 0000000000000..5966d6b1d5261
--- /dev/null
+++ b/providers/informatica/tests/unit/__init__.py
@@ -0,0 +1,17 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+__path__ = __import__("pkgutil").extend_path(__path__, __name__)
diff --git a/providers/informatica/tests/unit/informatica/__init__.py b/providers/informatica/tests/unit/informatica/__init__.py
new file mode 100644
index 0000000000000..13a83393a9124
--- /dev/null
+++ b/providers/informatica/tests/unit/informatica/__init__.py
@@ -0,0 +1,16 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
diff --git a/providers/informatica/tests/unit/informatica/extractors/__init__.py b/providers/informatica/tests/unit/informatica/extractors/__init__.py
new file mode 100644
index 0000000000000..13a83393a9124
--- /dev/null
+++ b/providers/informatica/tests/unit/informatica/extractors/__init__.py
@@ -0,0 +1,16 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
diff --git a/providers/informatica/tests/unit/informatica/extractors/test_informatica.py b/providers/informatica/tests/unit/informatica/extractors/test_informatica.py
new file mode 100644
index 0000000000000..1843571c35f48
--- /dev/null
+++ b/providers/informatica/tests/unit/informatica/extractors/test_informatica.py
@@ -0,0 +1,71 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+"""
+Unit tests for InformaticaLineageExtractor covering all methods.
+"""
+
+from __future__ import annotations
+
+from unittest.mock import patch
+
+import pytest
+
+from airflow.models import Connection
+from airflow.providers.informatica.extractors.informatica import InformaticaLineageExtractor
+from airflow.providers.informatica.hooks.edc import InformaticaEDCHook
+
+
+@pytest.fixture
+def extractor():
+ informatica_hook = InformaticaEDCHook(informatica_edc_conn_id="test_conn")
+ return InformaticaLineageExtractor(edc_hook=informatica_hook)
+
+
+@pytest.fixture(autouse=True)
+def setup_connections(create_connection_without_db):
+ create_connection_without_db(
+ Connection(
+ conn_id="test_conn",
+ conn_type="informatica_edc",
+ host="testhost",
+ schema="https",
+ port=443,
+ login="user",
+ password="pass",
+ extra='{"security_domain": "domain"}',
+ )
+ )
+
+
+@patch("airflow.providers.informatica.hooks.edc.InformaticaEDCHook._request")
+def test_get_object(mock_request, extractor):
+ """Test get_object delegates to hook and returns result."""
+ mock_request.return_value.json.return_value = {"id": "obj1", "name": "test_obj"}
+ result = extractor.get_object("obj1")
+ mock_request.assert_called_once()
+ assert result["id"] == "obj1"
+ assert result["name"] == "test_obj"
+
+
+@patch("airflow.providers.informatica.hooks.edc.InformaticaEDCHook._request")
+def test_create_lineage_link(mock_request, extractor):
+ """Test create_lineage_link delegates to hook and returns result."""
+ mock_request.return_value.json.return_value = {"success": True}
+ result = extractor.create_lineage_link("src_id", "tgt_id")
+ mock_request.assert_called_once()
+ assert result["success"] is True
diff --git a/providers/informatica/tests/unit/informatica/hooks/__init__.py b/providers/informatica/tests/unit/informatica/hooks/__init__.py
new file mode 100644
index 0000000000000..13a83393a9124
--- /dev/null
+++ b/providers/informatica/tests/unit/informatica/hooks/__init__.py
@@ -0,0 +1,16 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
diff --git a/providers/informatica/tests/unit/informatica/hooks/test_edc.py b/providers/informatica/tests/unit/informatica/hooks/test_edc.py
new file mode 100644
index 0000000000000..9fd0da906c4ab
--- /dev/null
+++ b/providers/informatica/tests/unit/informatica/hooks/test_edc.py
@@ -0,0 +1,156 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+from unittest.mock import MagicMock, patch
+
+import pytest
+
+from airflow.providers.informatica.hooks.edc import InformaticaEDCError, InformaticaEDCHook
+
+
+@pytest.fixture
+def hook():
+ return InformaticaEDCHook(informatica_edc_conn_id="test_conn")
+
+
+@patch("airflow.providers.informatica.hooks.edc.HttpHook.get_connection")
+def test_config_property_and_build_connection_config(mock_get_connection, hook):
+ """Test config property and _build_connection_config method."""
+ mock_conn = MagicMock()
+ mock_conn.host = "testhost"
+ mock_conn.schema = "https"
+ mock_conn.port = 443
+ mock_conn.login = "user"
+ mock_conn.password = "pass"
+ mock_conn.extra_dejson = {
+ "verify_ssl": True,
+ "provider_id": "test_provider",
+ "modified_by": "tester",
+ "security_domain": "domain",
+ }
+ mock_get_connection.return_value = mock_conn
+ config = hook.config
+ assert config.base_url == "https://testhost:443"
+ assert config.username == "user"
+ assert config.password == "pass"
+ assert config.security_domain == "domain"
+ assert config.provider_id == "test_provider"
+ assert config.modified_by == "tester"
+ assert config.verify_ssl is True
+ assert isinstance(config.request_timeout, int)
+ assert config.auth_header.startswith("Basic ")
+
+
+@patch("airflow.providers.informatica.hooks.edc.HttpHook.get_connection")
+@patch("airflow.providers.informatica.hooks.edc.HttpHook.get_conn")
+def test_get_conn_headers_and_verify(mock_get_conn, mock_get_connection, hook):
+ """Test get_conn sets headers and verify."""
+ mock_conn = MagicMock()
+ mock_conn.host = "testhost"
+ mock_conn.schema = "https"
+ mock_conn.port = 443
+ mock_conn.login = "user"
+ mock_conn.password = "pass"
+ mock_conn.extra_dejson = {"verify_ssl": True}
+ mock_get_connection.return_value = mock_conn
+ mock_session = MagicMock()
+ mock_session.headers = {}
+ mock_get_conn.return_value = mock_session
+ session = hook.get_conn()
+ assert "Accept" in session.headers
+ assert "Content-Type" in session.headers
+ assert "Authorization" in session.headers
+ assert session.verify is True
+
+
+def test_build_url(hook):
+ """Test _build_url method."""
+ hook._config = MagicMock(base_url="http://test")
+ url = hook._build_url("endpoint")
+ assert url == "http://test/endpoint"
+ url2 = hook._build_url("/endpoint")
+ assert url2 == "http://test/endpoint"
+
+
+@patch("airflow.providers.informatica.hooks.edc.InformaticaEDCHook.get_conn")
+def test_request_success_and_error(mock_get_conn, hook):
+ """Test _request method for success and error cases."""
+ mock_session = MagicMock()
+ mock_response = MagicMock()
+ mock_response.ok = True
+ mock_response.status_code = 200
+ mock_response.text = ""
+ mock_response.json.return_value = {"result": "ok"}
+ mock_session.request.return_value = mock_response
+ mock_get_conn.return_value = mock_session
+ hook._config = MagicMock(base_url="http://test", request_timeout=10)
+ resp = hook._request("GET", "endpoint")
+ assert resp.json() == {"result": "ok"}
+
+ # Error case
+ mock_response.ok = False
+ mock_response.status_code = 400
+ mock_response.text = "Bad Request"
+ mock_session.request.return_value = mock_response
+ with pytest.raises(
+ InformaticaEDCError, match="Informatica EDC request to endpoint returned 400: Bad Request"
+ ):
+ hook._request("GET", "endpoint")
+
+
+def test_encode_id(hook):
+ """Test _encode_id method for tilde and percent encoding."""
+ # ID with unsafe chars
+ unsafe_id = "table:___name/unsafe"
+ encoded = hook._encode_id(unsafe_id, tilde=True)
+ assert "~" in encoded
+ encoded_percent = hook._encode_id(unsafe_id, tilde=False)
+ assert "%" in encoded_percent
+
+
+@patch("airflow.providers.informatica.hooks.edc.InformaticaEDCHook._request")
+def test_get_object(mock_request, hook):
+ """Test get_object method."""
+ mock_request.return_value.json.return_value = {"id": "table://database/schema/safe", "name": "test"}
+ hook._config = MagicMock(base_url="http://test", request_timeout=10)
+ obj = hook.get_object("table://database/schema/safe")
+ assert obj["id"] == "table://database/schema/safe"
+ assert obj["name"] == "test"
+
+
+@patch("airflow.providers.informatica.hooks.edc.InformaticaEDCHook._request")
+def test_create_lineage_link(mock_request, hook):
+ """Test create_lineage_link method and error for same source/target."""
+ hook._config = MagicMock(
+ base_url="http://test", provider_id="prov", modified_by="mod", request_timeout=10
+ )
+ mock_request.return_value.content = b'{"success": true}'
+ mock_request.return_value.json.return_value = {"success": True}
+ result = hook.create_lineage_link("src_id", "tgt_id")
+ assert result["success"] is True
+ # Error for same source/target
+ with pytest.raises(
+ InformaticaEDCError, match="Source and target object identifiers must differ when creating lineage."
+ ):
+ hook.create_lineage_link("same_id", "same_id")
+
+
+def test_close_session(hook):
+ """Test close_session does nothing (no-op)."""
+ assert hook.close_session() is None
diff --git a/providers/informatica/tests/unit/informatica/plugins/__init__.py b/providers/informatica/tests/unit/informatica/plugins/__init__.py
new file mode 100644
index 0000000000000..13a83393a9124
--- /dev/null
+++ b/providers/informatica/tests/unit/informatica/plugins/__init__.py
@@ -0,0 +1,16 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
diff --git a/providers/informatica/tests/unit/informatica/plugins/test_informatica.py b/providers/informatica/tests/unit/informatica/plugins/test_informatica.py
new file mode 100644
index 0000000000000..857a2a0a64e99
--- /dev/null
+++ b/providers/informatica/tests/unit/informatica/plugins/test_informatica.py
@@ -0,0 +1,62 @@
+"""Unit tests for Informatica provider plugin."""
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+import contextlib
+import sys
+
+import pytest
+
+from tests_common import RUNNING_TESTS_AGAINST_AIRFLOW_PACKAGES
+from tests_common.test_utils.config import conf_vars
+
+
+@pytest.mark.skipif(
+ RUNNING_TESTS_AGAINST_AIRFLOW_PACKAGES, reason="Plugin initialization is done early in case of packages"
+)
+class TestInformaticaProviderPlugin:
+ def setup_method(self):
+ # Remove module under test if loaded already before. This lets us
+ # import the same source files for more than one test.
+ if "airflow.providers.informatica.plugins.informatica" in sys.modules:
+ del sys.modules["airflow.providers.informatica.plugins.informatica"]
+
+ @pytest.mark.parametrize(
+ ("mocks", "expected"),
+ [
+ # 1: not disabled by default
+ ([], 1),
+ # 0: conf disabled = true
+ ([conf_vars({("informatica", "disabled"): "True"})], 0),
+ # 0: conf disabled = 1
+ ([conf_vars({("informatica", "disabled"): "1"})], 0),
+ # 1: conf disabled = false
+ ([conf_vars({("informatica", "disabled"): "False"})], 1),
+ # 1: conf disabled = 0
+ ([conf_vars({("informatica", "disabled"): "0"})], 1),
+ ],
+ )
+ def test_plugin_disablements(self, mocks, expected):
+ with contextlib.ExitStack() as stack:
+ for mock in mocks:
+ stack.enter_context(mock)
+ from airflow.providers.informatica.plugins.informatica import InformaticaProviderPlugin
+
+ plugin = InformaticaProviderPlugin()
+ assert len(plugin.listeners) == expected
diff --git a/providers/informatica/tests/unit/informatica/plugins/test_listener.py b/providers/informatica/tests/unit/informatica/plugins/test_listener.py
new file mode 100644
index 0000000000000..46bc02ae20627
--- /dev/null
+++ b/providers/informatica/tests/unit/informatica/plugins/test_listener.py
@@ -0,0 +1,110 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+from unittest.mock import MagicMock
+
+import pytest
+
+from airflow.providers.informatica.hooks.edc import InformaticaEDCError
+from airflow.providers.informatica.plugins.listener import InformaticaListener
+
+
+class DummyTask:
+ def __init__(self, inlets=None, outlets=None):
+ self.inlets = inlets or []
+ self.outlets = outlets or []
+
+
+class DummyTaskInstance:
+ def __init__(self, task, task_id="dummy"):
+ self.task = task
+ self.task_id = task_id
+
+
+@pytest.fixture
+def listener():
+ informatica_listener = InformaticaListener()
+ informatica_listener.hook = MagicMock()
+ informatica_listener.log = MagicMock()
+ return informatica_listener
+
+
+def test_handle_lineage_success_str(listener):
+ listener.hook.get_object.side_effect = lambda x: {"id": x}
+ listener.hook.create_lineage_link.return_value = {"metadata": {}}
+ task = DummyTask(inlets=["in1"], outlets=["out1"])
+ ti = DummyTaskInstance(task)
+ listener._handle_lineage(ti, state="success")
+ listener.hook.get_object.assert_any_call("in1")
+ listener.hook.get_object.assert_any_call("out1")
+ listener.hook.create_lineage_link.assert_called_once_with("in1", "out1")
+
+
+def test_handle_lineage_success_dict(listener):
+ listener.hook.get_object.side_effect = lambda x: {"id": x}
+ listener.hook.create_lineage_link.return_value = {"metadata": {}}
+ task = DummyTask(inlets=[{"dataset_uri": "in1"}], outlets=[{"dataset_uri": "out1"}])
+ ti = DummyTaskInstance(task)
+ listener._handle_lineage(ti, state="success")
+ listener.hook.get_object.assert_any_call("in1")
+ listener.hook.get_object.assert_any_call("out1")
+ listener.hook.create_lineage_link.assert_called_once_with("in1", "out1")
+
+
+def test_handle_lineage_skips_missing_objectid(listener):
+ listener.hook.get_object.return_value = {}
+ task = DummyTask(inlets=["in1"], outlets=["out1"])
+ ti = DummyTaskInstance(task)
+ listener._handle_lineage(ti, state="success")
+ listener.hook.create_lineage_link.assert_not_called()
+
+
+def test_handle_lineage_edc_error_on_inlet(listener):
+ listener.hook.get_object.side_effect = [InformaticaEDCError("fail"), {"id": "out1"}]
+ task = DummyTask(inlets=["in1"], outlets=["out1"])
+ ti = DummyTaskInstance(task)
+ listener._handle_lineage(ti, state="success")
+ listener.hook.create_lineage_link.assert_not_called()
+
+
+def test_handle_lineage_non_success_state(listener):
+ task = DummyTask(inlets=["in1"], outlets=["out1"])
+ ti = DummyTaskInstance(task)
+ listener._handle_lineage(ti, state="failed")
+ listener.hook.get_object.assert_not_called()
+ listener.hook.create_lineage_link.assert_not_called()
+
+
+def test_handle_lineage_link_creation_error_logs(listener):
+ listener.hook.get_object.side_effect = lambda x: {"id": x}
+ listener.hook.create_lineage_link.side_effect = Exception("fail link")
+ listener.log = MagicMock()
+ task = DummyTask(inlets=["in1"], outlets=["out1"])
+ ti = DummyTaskInstance(task)
+ listener._handle_lineage(ti, state="success")
+
+ calls = listener.log.exception.call_args_list
+ assert any("Failed to create lineage link from" in str(call) for call, *_ in calls)
+
+
+def test_handle_lineage_inlet_outlet_type_error(listener):
+ task = DummyTask(inlets=[123], outlets=[None])
+ ti = DummyTaskInstance(task)
+ listener._handle_lineage(ti, state="success")
+ listener.hook.get_object.assert_not_called()
+ listener.hook.create_lineage_link.assert_not_called()
diff --git a/providers/openlineage/tests/unit/openlineage/extractors/test_base.py b/providers/openlineage/tests/unit/openlineage/extractors/test_base.py
index 71ce953c835ec..ccffd2a93c059 100644
--- a/providers/openlineage/tests/unit/openlineage/extractors/test_base.py
+++ b/providers/openlineage/tests/unit/openlineage/extractors/test_base.py
@@ -341,26 +341,32 @@ def test_extraction_without_on_start():
def test_extractor_manager_calls_appropriate_extractor_method(
operator_class, task_state, expected_job_facets
):
- extractor_manager = ExtractorManager()
-
- ti = mock.MagicMock()
-
- metadata = extractor_manager.extract_metadata(
- dagrun=mock.MagicMock(run_id="dagrun_run_id"),
- task=operator_class(task_id="task_id"),
- task_instance_state=task_state,
- task_instance=ti,
- )
+ # Mocking get_hook_lineage_collector to prevent test pollution from other tests
+ # (e.g. test_serialization_deserialization_basic in common.io)
+ with mock.patch(
+ "airflow.providers.common.compat.lineage.hook.get_hook_lineage_collector"
+ ) as mock_collector:
+ mock_collector.return_value.has_collected = False
+ extractor_manager = ExtractorManager()
+
+ ti = mock.MagicMock()
+
+ metadata = extractor_manager.extract_metadata(
+ dagrun=mock.MagicMock(run_id="dagrun_run_id"),
+ task=operator_class(task_id="task_id"),
+ task_instance_state=task_state,
+ task_instance=ti,
+ )
- assert metadata.job_facets == expected_job_facets
- if not expected_job_facets: # Empty OperatorLineage() is expected
- assert not metadata.inputs
- assert not metadata.outputs
- assert not metadata.run_facets
- else:
- assert metadata.inputs == INPUTS
- assert metadata.outputs == OUTPUTS
- assert metadata.run_facets == RUN_FACETS
+ assert metadata.job_facets == expected_job_facets
+ if not expected_job_facets: # Empty OperatorLineage() is expected
+ assert not metadata.inputs
+ assert not metadata.outputs
+ assert not metadata.run_facets
+ else:
+ assert metadata.inputs == INPUTS
+ assert metadata.outputs == OUTPUTS
+ assert metadata.run_facets == RUN_FACETS
@mock.patch("airflow.providers.openlineage.conf.custom_extractors")
diff --git a/pyproject.toml b/pyproject.toml
index ccbc9a580c0f1..959ab2e07e0bd 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -252,6 +252,9 @@ packages = []
"influxdb" = [
"apache-airflow-providers-influxdb>=2.8.0"
]
+"informatica" = [
+ "apache-airflow-providers-informatica>=0.1.0" # Set from MIN_VERSION_OVERRIDE in update_airflow_pyproject_toml.py
+]
"jdbc" = [
"apache-airflow-providers-jdbc>=4.5.2"
]
@@ -445,6 +448,7 @@ packages = []
"apache-airflow-providers-http>=4.13.2",
"apache-airflow-providers-imap>=3.8.0",
"apache-airflow-providers-influxdb>=2.8.0",
+ "apache-airflow-providers-informatica>=0.1.0", # Set from MIN_VERSION_OVERRIDE in update_airflow_pyproject_toml.py
"apache-airflow-providers-jdbc>=4.5.2",
"apache-airflow-providers-jenkins>=3.7.2",
"apache-airflow-providers-keycloak>=0.0.1",
@@ -1150,6 +1154,8 @@ mypy_path = [
"$MYPY_CONFIG_FILE_DIR/providers/imap/tests",
"$MYPY_CONFIG_FILE_DIR/providers/influxdb/src",
"$MYPY_CONFIG_FILE_DIR/providers/influxdb/tests",
+ "$MYPY_CONFIG_FILE_DIR/providers/informatica/src",
+ "$MYPY_CONFIG_FILE_DIR/providers/informatica/tests",
"$MYPY_CONFIG_FILE_DIR/providers/jdbc/src",
"$MYPY_CONFIG_FILE_DIR/providers/jdbc/tests",
"$MYPY_CONFIG_FILE_DIR/providers/jenkins/src",
@@ -1421,6 +1427,7 @@ apache-airflow-providers-hashicorp = { workspace = true }
apache-airflow-providers-http = { workspace = true }
apache-airflow-providers-imap = { workspace = true }
apache-airflow-providers-influxdb = { workspace = true }
+apache-airflow-providers-informatica = { workspace = true }
apache-airflow-providers-jdbc = { workspace = true }
apache-airflow-providers-jenkins = { workspace = true }
apache-airflow-providers-keycloak = { workspace = true }
@@ -1550,6 +1557,7 @@ members = [
"providers/http",
"providers/imap",
"providers/influxdb",
+ "providers/informatica",
"providers/jdbc",
"providers/jenkins",
"providers/keycloak",
diff --git a/scripts/ci/docker-compose/remove-sources.yml b/scripts/ci/docker-compose/remove-sources.yml
index 245200fe76e44..bd08c89116b23 100644
--- a/scripts/ci/docker-compose/remove-sources.yml
+++ b/scripts/ci/docker-compose/remove-sources.yml
@@ -78,6 +78,7 @@ services:
- ../../../empty:/opt/airflow/providers/http/src
- ../../../empty:/opt/airflow/providers/imap/src
- ../../../empty:/opt/airflow/providers/influxdb/src
+ - ../../../empty:/opt/airflow/providers/informatica/src
- ../../../empty:/opt/airflow/providers/jdbc/src
- ../../../empty:/opt/airflow/providers/jenkins/src
- ../../../empty:/opt/airflow/providers/keycloak/src
diff --git a/scripts/ci/docker-compose/tests-sources.yml b/scripts/ci/docker-compose/tests-sources.yml
index e8f3c9a5fafab..eb9da3cacebf5 100644
--- a/scripts/ci/docker-compose/tests-sources.yml
+++ b/scripts/ci/docker-compose/tests-sources.yml
@@ -91,6 +91,7 @@ services:
- ../../../providers/http/tests:/opt/airflow/providers/http/tests
- ../../../providers/imap/tests:/opt/airflow/providers/imap/tests
- ../../../providers/influxdb/tests:/opt/airflow/providers/influxdb/tests
+ - ../../../providers/informatica/tests:/opt/airflow/providers/informatica/tests
- ../../../providers/jdbc/tests:/opt/airflow/providers/jdbc/tests
- ../../../providers/jenkins/tests:/opt/airflow/providers/jenkins/tests
- ../../../providers/keycloak/tests:/opt/airflow/providers/keycloak/tests
diff --git a/scripts/ci/prek/update_airflow_pyproject_toml.py b/scripts/ci/prek/update_airflow_pyproject_toml.py
index e3b5ec3d1b102..b3bf8369e0d97 100755
--- a/scripts/ci/prek/update_airflow_pyproject_toml.py
+++ b/scripts/ci/prek/update_airflow_pyproject_toml.py
@@ -78,6 +78,7 @@
"openlineage": parse_version("2.3.0"),
"git": parse_version("0.0.2"),
"common.messaging": parse_version("2.0.0"),
+ "informatica": parse_version("0.1.0"),
}