diff --git a/airflow-core/pyproject.toml b/airflow-core/pyproject.toml index c5eda3c36b833..61b67d19c4504 100644 --- a/airflow-core/pyproject.toml +++ b/airflow-core/pyproject.toml @@ -63,6 +63,7 @@ version = "3.0.0" dependencies = [ "a2wsgi>=1.10.8", + "aiosqlite>=0.20.0", # Alembic is important to handle our migrations in predictable and performant way. It is developed # together with SQLAlchemy. Our experience with Alembic is that it very stable in minor version # The 1.13.0 of alembic marked some migration code as SQLAlchemy 2+ only so we limit it to 1.13.1 @@ -139,7 +140,6 @@ dependencies = [ "apache-airflow-providers-common-sql>=1.24.1", "apache-airflow-providers-smtp>=2.0.1", "apache-airflow-providers-standard>=0.2.0", - "apache-airflow-providers-sqlite>=4.0.1", ] diff --git a/airflow-core/tests/unit/always/test_connection.py b/airflow-core/tests/unit/always/test_connection.py index 2304542c53058..1a329dfef5ce4 100644 --- a/airflow-core/tests/unit/always/test_connection.py +++ b/airflow-core/tests/unit/always/test_connection.py @@ -31,7 +31,8 @@ from airflow.exceptions import AirflowException from airflow.hooks.base import BaseHook from airflow.models import Connection, crypto -from airflow.providers.sqlite.hooks.sqlite import SqliteHook + +sqlite = pytest.importorskip("airflow.providers.sqlite.hooks.sqlite") from tests_common.test_utils.config import conf_vars from tests_common.test_utils.markers import skip_if_force_lowest_dependencies_marker @@ -594,6 +595,8 @@ def test_from_json_special_characters(self, val, expected): }, ) def test_using_env_var(self): + from airflow.providers.sqlite.hooks.sqlite import SqliteHook + conn = SqliteHook.get_connection(conn_id="test_uri") assert conn.host == "ec2.compute.com" assert conn.schema == "the_database" @@ -610,6 +613,8 @@ def test_using_env_var(self): }, ) def test_using_unix_socket_env_var(self): + from airflow.providers.sqlite.hooks.sqlite import SqliteHook + conn = SqliteHook.get_connection(conn_id="test_uri_no_creds") assert conn.host == "ec2.compute.com" assert conn.schema == "the_database" @@ -634,6 +639,8 @@ def test_param_setup(self): @pytest.mark.db_test def test_env_var_priority(self): + from airflow.providers.sqlite.hooks.sqlite import SqliteHook + conn = SqliteHook.get_connection(conn_id="airflow_db") assert conn.host != "ec2.compute.com" @@ -687,6 +694,8 @@ def test_dbapi_get_sqlalchemy_engine(self): }, ) def test_get_connections_env_var(self): + from airflow.providers.sqlite.hooks.sqlite import SqliteHook + conns = SqliteHook.get_connection(conn_id="test_uri") assert conns.host == "ec2.compute.com" assert conns.schema == "the_database" diff --git a/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_connections.py b/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_connections.py index e766bc2bc27d7..502621169dbc4 100644 --- a/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_connections.py +++ b/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_connections.py @@ -17,7 +17,8 @@ from __future__ import annotations import os -from unittest import mock +from importlib.metadata import PackageNotFoundError, metadata +from unittest import SkipTest, mock import pytest @@ -828,6 +829,12 @@ def test_patch_should_response_200_redacted_password( class TestConnection(TestConnectionEndpoint): + def setup_method(self): + try: + metadata("apache-airflow-providers-sqlite") + except PackageNotFoundError: + raise SkipTest("The SQlite distribution package is not installed.") + @mock.patch.dict(os.environ, {"AIRFLOW__CORE__TEST_CONNECTION": "Enabled"}) @pytest.mark.parametrize( "body, message", diff --git a/docker-stack-docs/docker-examples/extending/add-providers/Dockerfile b/docker-stack-docs/docker-examples/extending/add-providers/Dockerfile index 40211564e7ffa..530d9db464961 100644 --- a/docker-stack-docs/docker-examples/extending/add-providers/Dockerfile +++ b/docker-stack-docs/docker-examples/extending/add-providers/Dockerfile @@ -25,5 +25,5 @@ RUN apt-get update \ && rm -rf /var/lib/apt/lists/* USER airflow ENV JAVA_HOME=/usr/lib/jvm/java-17-openjdk-amd64 -RUN pip install --no-cache-dir "apache-airflow==${AIRFLOW_VERSION}" apache-airflow-providers-apache-spark==2.1.3 +RUN pip install --no-cache-dir "apache-airflow==${AIRFLOW_VERSION}" apache-airflow-providers-apache-spark==5.1.1 # [END Dockerfile] diff --git a/providers/apache/livy/pyproject.toml b/providers/apache/livy/pyproject.toml index e004ffc8bc821..ad19ca56bd2b6 100644 --- a/providers/apache/livy/pyproject.toml +++ b/providers/apache/livy/pyproject.toml @@ -72,6 +72,7 @@ dev = [ "apache-airflow-providers-common-compat", "apache-airflow-providers-http", # Additional devel dependencies (do not remove this line and add extra development dependencies) + "apache-airflow-providers-openlineage", ] # To build docs: diff --git a/providers/sqlite/README.rst b/providers/sqlite/README.rst index 8245663d3f4d8..894574c715cfe 100644 --- a/providers/sqlite/README.rst +++ b/providers/sqlite/README.rst @@ -54,7 +54,6 @@ Requirements PIP package Version required ======================================= ================== ``apache-airflow`` ``>=2.9.0`` -``aiosqlite`` ``>=0.20.0`` ``apache-airflow-providers-common-sql`` ``>=1.20.0`` ======================================= ================== diff --git a/providers/sqlite/pyproject.toml b/providers/sqlite/pyproject.toml index d282893419a6c..a08d399be9332 100644 --- a/providers/sqlite/pyproject.toml +++ b/providers/sqlite/pyproject.toml @@ -58,7 +58,6 @@ requires-python = "~=3.9" # After you modify the dependencies, and rebuild your Breeze CI image with ``breeze ci-image build`` dependencies = [ "apache-airflow>=2.9.0", - "aiosqlite>=0.20.0", "apache-airflow-providers-common-sql>=1.20.0", ] diff --git a/pyproject.toml b/pyproject.toml index af4420d3566b7..551b2e89f45f5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -725,10 +725,11 @@ testing = ["dev", "providers.tests", "tests_common", "tests", "system", "unit", # All of the modules which have an extra license header (i.e. that we copy from another project) need to # ignore E402 -- module level import not at top level -"scripts/ci/pre_commit/*.py" = ["E402"] -"airflow-core/src/airflow/api/auth/backend/kerberos_auth.py" = ["E402"] -"airflow-core/src/airflow/security/kerberos.py" = ["E402"] -"airflow-core/src/airflow/security/utils.py" = ["E402"] +"scripts/ci/pre_commit/*.py" = [ "E402" ] +"airflow-core/src/airflow/api/auth/backend/kerberos_auth.py" = [ "E402" ] +"airflow-core/src/airflow/security/kerberos.py" = [ "E402" ] +"airflow-core/src/airflow/security/utils.py" = [ "E402" ] +"airflow-core/tests/unit/always/test_connection.py" = [ "E402" ] "providers/google/tests/unit/google/cloud/hooks/vertex_ai/test_auto_ml.py" = [ "E402"] "providers/google/tests/unit/google/cloud/hooks/vertex_ai/test_batch_prediction_job.py" = [ "E402"] "providers/google/tests/unit/google/cloud/hooks/vertex_ai/test_custom_job.py" = [ "E402"]