Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion airflow-core/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,7 @@ version = "3.0.0"

dependencies = [
"a2wsgi>=1.10.8",
"aiosqlite>=0.20.0",
# Alembic is important to handle our migrations in predictable and performant way. It is developed
# together with SQLAlchemy. Our experience with Alembic is that it very stable in minor version
# The 1.13.0 of alembic marked some migration code as SQLAlchemy 2+ only so we limit it to 1.13.1
Expand Down Expand Up @@ -139,7 +140,6 @@ dependencies = [
"apache-airflow-providers-common-sql>=1.24.1",
"apache-airflow-providers-smtp>=2.0.1",
"apache-airflow-providers-standard>=0.2.0",
"apache-airflow-providers-sqlite>=4.0.1",
]


Expand Down
11 changes: 10 additions & 1 deletion airflow-core/tests/unit/always/test_connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,8 @@
from airflow.exceptions import AirflowException
from airflow.hooks.base import BaseHook
from airflow.models import Connection, crypto
from airflow.providers.sqlite.hooks.sqlite import SqliteHook

sqlite = pytest.importorskip("airflow.providers.sqlite.hooks.sqlite")

from tests_common.test_utils.config import conf_vars
from tests_common.test_utils.markers import skip_if_force_lowest_dependencies_marker
Expand Down Expand Up @@ -594,6 +595,8 @@ def test_from_json_special_characters(self, val, expected):
},
)
def test_using_env_var(self):
from airflow.providers.sqlite.hooks.sqlite import SqliteHook

conn = SqliteHook.get_connection(conn_id="test_uri")
assert conn.host == "ec2.compute.com"
assert conn.schema == "the_database"
Expand All @@ -610,6 +613,8 @@ def test_using_env_var(self):
},
)
def test_using_unix_socket_env_var(self):
from airflow.providers.sqlite.hooks.sqlite import SqliteHook

conn = SqliteHook.get_connection(conn_id="test_uri_no_creds")
assert conn.host == "ec2.compute.com"
assert conn.schema == "the_database"
Expand All @@ -634,6 +639,8 @@ def test_param_setup(self):

@pytest.mark.db_test
def test_env_var_priority(self):
from airflow.providers.sqlite.hooks.sqlite import SqliteHook

conn = SqliteHook.get_connection(conn_id="airflow_db")
assert conn.host != "ec2.compute.com"

Expand Down Expand Up @@ -687,6 +694,8 @@ def test_dbapi_get_sqlalchemy_engine(self):
},
)
def test_get_connections_env_var(self):
from airflow.providers.sqlite.hooks.sqlite import SqliteHook

conns = SqliteHook.get_connection(conn_id="test_uri")
assert conns.host == "ec2.compute.com"
assert conns.schema == "the_database"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,8 @@
from __future__ import annotations

import os
from unittest import mock
from importlib.metadata import PackageNotFoundError, metadata
from unittest import SkipTest, mock

import pytest

Expand Down Expand Up @@ -828,6 +829,12 @@ def test_patch_should_response_200_redacted_password(


class TestConnection(TestConnectionEndpoint):
def setup_method(self):
try:
metadata("apache-airflow-providers-sqlite")
except PackageNotFoundError:
raise SkipTest("The SQlite distribution package is not installed.")

@mock.patch.dict(os.environ, {"AIRFLOW__CORE__TEST_CONNECTION": "Enabled"})
@pytest.mark.parametrize(
"body, message",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,5 +25,5 @@ RUN apt-get update \
&& rm -rf /var/lib/apt/lists/*
USER airflow
ENV JAVA_HOME=/usr/lib/jvm/java-17-openjdk-amd64
RUN pip install --no-cache-dir "apache-airflow==${AIRFLOW_VERSION}" apache-airflow-providers-apache-spark==2.1.3
RUN pip install --no-cache-dir "apache-airflow==${AIRFLOW_VERSION}" apache-airflow-providers-apache-spark==5.1.1
# [END Dockerfile]
1 change: 1 addition & 0 deletions providers/apache/livy/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,7 @@ dev = [
"apache-airflow-providers-common-compat",
"apache-airflow-providers-http",
# Additional devel dependencies (do not remove this line and add extra development dependencies)
"apache-airflow-providers-openlineage",
]

# To build docs:
Expand Down
1 change: 0 additions & 1 deletion providers/sqlite/README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,6 @@ Requirements
PIP package Version required
======================================= ==================
``apache-airflow`` ``>=2.9.0``
``aiosqlite`` ``>=0.20.0``
``apache-airflow-providers-common-sql`` ``>=1.20.0``
======================================= ==================

Expand Down
1 change: 0 additions & 1 deletion providers/sqlite/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,6 @@ requires-python = "~=3.9"
# After you modify the dependencies, and rebuild your Breeze CI image with ``breeze ci-image build``
dependencies = [
"apache-airflow>=2.9.0",
"aiosqlite>=0.20.0",
"apache-airflow-providers-common-sql>=1.20.0",
]

Expand Down
9 changes: 5 additions & 4 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -725,10 +725,11 @@ testing = ["dev", "providers.tests", "tests_common", "tests", "system", "unit",

# All of the modules which have an extra license header (i.e. that we copy from another project) need to
# ignore E402 -- module level import not at top level
"scripts/ci/pre_commit/*.py" = ["E402"]
"airflow-core/src/airflow/api/auth/backend/kerberos_auth.py" = ["E402"]
"airflow-core/src/airflow/security/kerberos.py" = ["E402"]
"airflow-core/src/airflow/security/utils.py" = ["E402"]
"scripts/ci/pre_commit/*.py" = [ "E402" ]
"airflow-core/src/airflow/api/auth/backend/kerberos_auth.py" = [ "E402" ]
"airflow-core/src/airflow/security/kerberos.py" = [ "E402" ]
"airflow-core/src/airflow/security/utils.py" = [ "E402" ]
"airflow-core/tests/unit/always/test_connection.py" = [ "E402" ]
"providers/google/tests/unit/google/cloud/hooks/vertex_ai/test_auto_ml.py" = [ "E402"]
"providers/google/tests/unit/google/cloud/hooks/vertex_ai/test_batch_prediction_job.py" = [ "E402"]
"providers/google/tests/unit/google/cloud/hooks/vertex_ai/test_custom_job.py" = [ "E402"]
Expand Down
Loading