Skip to content

Commit

Permalink
chore: Move test requirements to optional dependencies in pyproject.t…
Browse files Browse the repository at this point in the history
…oml (#263)

* Chore: Move test requirements to optional dependencies in pyproject.toml

Signed-off-by: Simon Brugman <sfbbrugman@gmail.com>

* Chore: Refactor setup.py for static dependencies

Signed-off-by: Simon Brugman <sfbbrugman@gmail.com>

* Chore: Replace setup.py with pyproject.toml

Signed-off-by: Simon Brugman <sfbbrugman@gmail.com>

* chore: remove `requirements-fixer` from pre-commit

Signed-off-by: Simon Brugman <sfbbrugman@gmail.com>

---------

Signed-off-by: Simon Brugman <sfbbrugman@gmail.com>
  • Loading branch information
sbrugman authored Jul 20, 2023
1 parent 31b311e commit 73262bc
Show file tree
Hide file tree
Showing 15 changed files with 276 additions and 268 deletions.
6 changes: 3 additions & 3 deletions .github/workflows/check-plugin.yml
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ jobs:
- name: Install dependencies
run: |
cd ${{ inputs.plugin }}
pip install . -r test_requirements.txt # TODO(deepyaman): Define `test` extra and `pip install .[test]`
pip install ".[test]"
- name: pip freeze
run: pip freeze
- name: Run unit tests for Linux / all plugins
Expand Down Expand Up @@ -84,7 +84,7 @@ jobs:
run: |
cd ${{ inputs.plugin }}
pip install git+https://github.com/kedro-org/kedro@main
pip install . -r test_requirements.txt # TODO(deepyaman): Define `test` extra and `pip install .[test]`
pip install ".[test]"
pip freeze
- name: Install pre-commit hooks
run: |
Expand Down Expand Up @@ -120,7 +120,7 @@ jobs:
run: |
cd ${{ inputs.plugin }}
pip install git+https://github.com/kedro-org/kedro@main
pip install . -r test_requirements.txt # TODO(deepyaman): Define `test` extra and `pip install .[test]`
pip install ".[test]"
- name: pip freeze
run: pip freeze
- name: Run end to end tests
Expand Down
1 change: 0 additions & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@ repos:
- id: check-case-conflict # Check for files that would conflict in case-insensitive filesystems
- id: check-merge-conflict # Check for files that contain merge conflict strings.
- id: debug-statements # Check for debugger imports and py37+ `breakpoint()` calls in python source.
- id: requirements-txt-fixer # Sorts entries in requirements.txt
- id: flake8
files: ^(kedro-datasets/kedro_datasets/|kedro-airflow/kedro_airflow/|kedro-docker/kedro_docker/|kedro-telemetry/kedro_telemetry/)
args:
Expand Down
4 changes: 2 additions & 2 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ install-pip-setuptools:
python -m pip install -U pip setuptools wheel

lint:
pre-commit run trailing-whitespace --all-files && pre-commit run end-of-file-fixer --all-files && pre-commit run check-yaml --all-files && pre-commit run check-added-large-files --all-files && pre-commit run check-case-conflict --all-files && pre-commit run check-merge-conflict --all-files && pre-commit run debug-statements --all-files && pre-commit run requirements-txt-fixer --all-files && pre-commit run flake8 --all-files && pre-commit run isort-$(plugin) --all-files --hook-stage manual && pre-commit run black-$(plugin) --all-files --hook-stage manual && pre-commit run secret_scan --all-files --hook-stage manual && pre-commit run bandit --all-files --hook-stage manual && pre-commit run pylint-$(plugin) --all-files --hook-stage manual && pre-commit run pylint-$(plugin)-features --all-files --hook-stage manual && pre-commit run pylint-$(plugin)-tests --all-files --hook-stage manual
pre-commit run trailing-whitespace --all-files && pre-commit run end-of-file-fixer --all-files && pre-commit run check-yaml --all-files && pre-commit run check-added-large-files --all-files && pre-commit run check-case-conflict --all-files && pre-commit run check-merge-conflict --all-files && pre-commit run debug-statements --all-files && pre-commit run flake8 --all-files && pre-commit run isort-$(plugin) --all-files --hook-stage manual && pre-commit run black-$(plugin) --all-files --hook-stage manual && pre-commit run secret_scan --all-files --hook-stage manual && pre-commit run bandit --all-files --hook-stage manual && pre-commit run pylint-$(plugin) --all-files --hook-stage manual && pre-commit run pylint-$(plugin)-features --all-files --hook-stage manual && pre-commit run pylint-$(plugin)-tests --all-files --hook-stage manual

test:
cd $(plugin) && pytest tests --cov-config pyproject.toml --numprocesses 4 --dist loadfile
Expand All @@ -35,7 +35,7 @@ clean:
find . -regex ".*\.egg-info" -exec rm -rf {} +;\

install-test-requirements:
cd $(plugin) && pip install -r test_requirements.txt
cd $(plugin) && pip install ".[test]"

install-pre-commit: install-test-requirements
pre-commit install --install-hooks
Expand Down
2 changes: 1 addition & 1 deletion kedro-airflow/CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ We use a branching model that helps us keep track of branches in a logical, cons
To run E2E tests you need to install the test requirements which includes `behave`, do this using the following command:

```bash
pip install -r test_requirements.txt
pip install ".[test]"
```

### Running checks locally
Expand Down
3 changes: 1 addition & 2 deletions kedro-airflow/features/environment.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,8 +50,7 @@ def call(cmd, print_output=False):

# pip install us
call([context.python, "-m", "pip", "install", "-U", "pip", "pip-tools"])
call([context.pip, "install", "-r", "test_requirements.txt"])
call([context.pip, "install", "."])
call([context.pip, "install", ".[test]"])

context.temp_dir = Path(tempfile.mkdtemp())

Expand Down
17 changes: 17 additions & 0 deletions kedro-airflow/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,23 @@ Source = "https://github.com/kedro-org/kedro-plugins/tree/main/kedro-airflow"
Documentation = "https://github.com/kedro-org/kedro-plugins/blob/main/kedro-airflow/README.md"
Tracker = "https://github.com/kedro-org/kedro-plugins/issues"

[project.optional-dependencies]
test = [
"apache-airflow<3.0",
"bandit>=1.6.2, <2.0",
"behave",
"black~=22.0",
"flake8",
"pre-commit>=1.17.0, <2.0",
"pylint>=2.5.2, <3.0",
"pytest",
"pytest-cov",
"pytest-mock",
"pytest-xdist",
"trufflehog>=2.1.0, <3.0",
"wheel"
]

[project.entry-points."kedro.project_commands"]
airflow = "kedro_airflow.plugin:commands"

Expand Down
13 changes: 0 additions & 13 deletions kedro-airflow/test_requirements.txt

This file was deleted.

228 changes: 215 additions & 13 deletions kedro-datasets/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -5,15 +5,212 @@ build-backend = "setuptools.build_meta"
[project]
name = "kedro-datasets"
authors = [
{name = "Kedro"}
{name = "Kedro"}
]
description = "Kedro-Datasets is where you can find all of Kedro's data connectors."
requires-python = ">=3.7, <3.11"
license = {text = "Apache Software License (Apache 2.0)"}
dependencies = [
"kedro>=0.16",
"kedro>=0.16"
]
dynamic = ["readme", "version"]

[project.optional-dependencies]
api = ["kedro-datasets[api.APIDataSet]"]
api-apidataset = ["requests~=2.20"]
biosequence = ["kedro-datasets[biosequence.BioSequenceDataSet]"]
biosequence-biosequencedataset = ["biopython~=1.73"]
dask = ["kedro-datasets[dask.ParquetDataSet]"]
dask-parquetdataset = ["dask[complete]~=2021.10", "triad>=0.6.7, <1.0"]
databricks = ["kedro-datasets[databricks.ManagedTableDataSet]"]
databricks-managedtabledataset = ["kedro-datasets[spark-base,pandas-base,delta-base]"]
delta-base = ["delta-spark~=1.2.1"]
geopandas = ["kedro-datasets[geopandas.GeoJSONDataSet]"]
geopandas-geojsondataset = ["geopandas>=0.6.0, <1.0", "pyproj~=3.0"]
hdfs-base = ["hdfs>=2.5.8, <3.0"]
holoviews = ["kedro-datasets[holoviews.HoloviewsWriter]"]
holoviews-holoviewswriter = ["holoviews~=1.13.0"]
matplotlib = ["kedro-datasets[matplotlib.MatplotlibWriter]"]
matplotlib-matplotlibwriter = ["matplotlib>=3.0.3, <4.0"]
networkx = ["kedro-datasets[networkx.NetworkXDataSet]"]
networkx-networkxdataset = ["networkx~=2.4"]
pandas = [
"""kedro-datasets[\
pandas.CSVDataSet,\
pandas.ExcelDataSet,\
pandas.FeatherDataSet,\
pandas.GBQTableDataSet,\
pandas.GBQQueryDataSet,\
pandas.HDFDataSet,\
pandas.JSONDataSet,\
pandas.ParquetDataSet,\
pandas.SQLTableDataSet,\
pandas.SQLQueryDataSet,\
pandas.XMLDataSet,pandas.GenericDataSet\
]"""
]
pandas-base = ["pandas>=1.3, <3.0"]
pandas-csvdataset = ["kedro-datasets[pandas-base]"]
pandas-exceldataset = ["kedro-datasets[pandas-base]", "openpyxl>=3.0.6, <4.0"]
pandas-featherdataset = ["kedro-datasets[pandas-base]"]
pandas-gbqquerydataset = [
"kedro-datasets[pandas-base]",
"pandas-gbq>=0.12.0, <0.18.0"
]
pandas-gbqtabledataset = [
"kedro-datasets[pandas-base]",
"pandas-gbq>=0.12.0, <0.18.0"
]
pandas-genericdataset = ["kedro-datasets[pandas-base]"]
pandas-hdfdataset = [
"kedro-datasets[pandas-base]",
"tables~=3.6.0; platform_system == 'Windows'",
"tables~=3.6; platform_system != 'Windows'"
]
pandas-jsondataset = ["kedro-datasets[pandas-base]"]
pandas-parquetdataset = ["kedro-datasets[pandas-base]", "pyarrow>=6.0"]
pandas-sqlquerydataset = [
"kedro-datasets[pandas-base]",
"SQLAlchemy>=1.4, <3.0",
"pyodbc~=4.0"
]
pandas-sqltabledataset = ["kedro-datasets[pandas-base]", "SQLAlchemy>=1.4, <3.0"]
pandas-xmldataset = ["kedro-datasets[pandas-base]", "lxml~=4.6"]
pickle = ["kedro-datasets[pickle.PickleDataSet]"]
pickle-pickledataset = ["compress-pickle[lz4]~=2.1.0"]
pillow = ["kedro-datasets[pillow.ImageDataSet]"]
pillow-imagedataset = ["Pillow~=9.0"]
plotly = ["kedro-datasets[plotly.PlotlyDataSet,plotly.JSONDataSet]"]
plotly-base = ["plotly>=4.8.0, <6.0"]
plotly-jsondataset = ["kedro-datasets[plotly-base]"]
plotly-plotlydataset = ["kedro-datasets[pandas-base,plotly-base]"]
polars = ["kedro-datasets[polars.CSVDataSet]"]
polars-base = ["polars~=0.17.0"]
polars-csvdataset = ["kedro-datasets[polars-base]"]
redis = ["kedro-datasets[redis.PickleDataSet]"]
redis-pickledataset = ["redis~=4.1"]
s3fs-base = ["s3fs>=0.3.0, <0.5"]
snowflake = ["kedro-datasets[snowflake.SnowparkTableDataSet]"]
snowflake-snowparktabledataset = [
"snowflake-snowpark-python~=1.0.0; python_version == '3.8'",
"pyarrow~=8.0"
]
spark = [
"kedro-datasets[spark.SparkDataSet,spark.SparkHiveDataSet,spark.SparkJDBCDataSet,spark.DeltaTableDataSet]"
]
spark-base = ["pyspark>=2.2, <4.0"]
spark-deltatabledataset = [
"kedro-datasets[spark-base,hdfs-base,s3fs-base]",
"delta-spark>=1.0, <3.0"
]
spark-sparkdataset = ["kedro-datasets[spark-base,hdfs-base,s3fs-base]"]
spark-sparkhivedataset = ["kedro-datasets[spark-base,hdfs-base,s3fs-base]"]
spark-sparkjdbcdataset = ["kedro-datasets[spark-base,hdfs-base,s3fs-base]"]
svmlight = ["kedro-datasets[svmlight.SVMLightDataSet]"]
svmlight-svmlightdataset = ["scikit-learn~=1.0.2", "scipy~=1.7.3"]
tensorflow = ["kedro-datasets[tensorflow.TensorFlowModelDataSet]"]
tensorflow-tensorflowmodeldataset = [
# currently only TensorFlow V2 supported for saving and loading.
# V1 requires HDF5 and serialises differently
"tensorflow~=2.0; platform_system != 'Darwin' or platform_machine != 'arm64'",
# https://developer.apple.com/metal/tensorflow-plugin/
"tensorflow-macos~=2.0; platform_system == 'Darwin' and platform_machine == 'arm64'"
]
video = ["kedro-datasets[video.VideoDataSet]"]
video-videodataset = ["opencv-python~=4.5.5.64"]
yaml = ["kedro-datasets[yaml.YAMLDataSet]"]
yaml-yamldataset = ["kedro-datasets[pandas-base]", "PyYAML>=4.2, <7.0"]

all = [
"""kedro-datasets[\
api,biosequence,dask,databricks,\
geopandas,holoviews,matplotlib,\
networkx,pickle,pillow,plotly,\
polars,snowflake,redis,spark,svmlight,\
tensorflow,video,yaml\
]"""
]
docs = [
# docutils>=0.17 changed the HTML
# see https://github.com/readthedocs/sphinx_rtd_theme/issues/1115
"docutils==0.16",
"sphinx~=5.3.0",
"sphinx_rtd_theme==1.2.0",
# Regression on sphinx-autodoc-typehints 1.21
# that creates some problematic docstrings
"sphinx-autodoc-typehints==1.20.2",
"sphinx_copybutton==0.3.1",
"sphinx-notfound-page",
"ipykernel>=5.3, <7.0",
"sphinxcontrib-mermaid~=0.7.1",
"myst-parser~=1.0.0",
"Jinja2<3.1.0"
]
test = [
"adlfs>=2021.7.1, <=2022.2",
"bandit>=1.6.2, <2.0",
"behave==1.2.6",
"biopython~=1.73",
"blacken-docs==1.9.2",
"black~=22.0",
"compress-pickle[lz4]~=1.2.0",
"coverage[toml]",
"dask[complete]",
"delta-spark~=1.2.1",
# 1.2.0 has a bug that breaks some of our tests: https://github.com/delta-io/delta/issues/1070
"dill~=0.3.1",
"filelock>=3.4.0, <4.0",
"gcsfs>=2021.4, <=2022.1",
"geopandas>=0.6.0, <1.0",
"hdfs>=2.5.8, <3.0",
"holoviews~=1.13.0",
"import-linter[toml]==1.2.6",
"ipython>=7.31.1, <8.0",
"Jinja2<3.1.0",
"joblib>=0.14",
"jupyterlab~=3.0",
"jupyter~=1.0",
"lxml~=4.6",
"matplotlib>=3.0.3, <3.4; python_version < '3.10'", # 3.4.0 breaks holoviews
"matplotlib>=3.5, <3.6; python_version == '3.10'",
"memory_profiler>=0.50.0, <1.0",
"moto==1.3.7; python_version < '3.10'",
"moto==3.0.4; python_version == '3.10'",
"networkx~=2.4",
"opencv-python~=4.5.5.64",
"openpyxl>=3.0.3, <4.0",
"pandas-gbq>=0.12.0, <0.18.0",
"pandas>=1.3, <2", # 1.3 for read_xml/to_xml, <2 for compatibility with Spark < 3.4
"Pillow~=9.0",
"plotly>=4.8.0, <6.0",
"polars~=0.15.13",
"pre-commit>=2.9.2, <3.0", # The hook `mypy` requires pre-commit version 2.9.2.
"psutil==5.8.0",
"pyarrow~=8.0",
"pylint>=2.5.2, <3.0",
"pyodbc~=4.0.35",
"pyproj~=3.0",
"pyspark>=2.2, <4.0",
"pytest-cov~=3.0",
"pytest-mock>=1.7.1, <2.0",
"pytest-xdist[psutil]~=2.2.1",
"pytest~=7.2",
"redis~=4.1",
"requests-mock~=1.6",
"requests~=2.20",
"s3fs>=0.3.0, <0.5", # Needs to be at least 0.3.0 to make use of `cachable` attribute on S3FileSystem.
"scikit-learn~=1.0.2",
"scipy~=1.7.3",
"snowflake-snowpark-python~=1.0.0; python_version == '3.8'",
"SQLAlchemy>=1.4, <3.0",
# The `Inspector.has_table()` method replaces the `Engine.has_table()` method in version 1.4.
"tables~=3.7",
"tensorflow-macos~=2.0; platform_system == 'Darwin' and platform_machine == 'arm64'",
"tensorflow~=2.0; platform_system != 'Darwin' or platform_machine != 'arm64'",
"triad>=0.6.7, <1.0",
"trufflehog~=2.1",
"xlsxwriter~=1.0"
]
dynamic = ["readme", "version", "optional-dependencies"]

[project.urls]
Source = "https://github.com/kedro-org/kedro-plugins/tree/main/kedro-datasets"
Expand All @@ -34,28 +231,33 @@ profile = "black"
[tool.pylint.master]
ignore = "CVS"
load-plugins = [
"pylint.extensions.docparams",
"pylint.extensions.no_self_use"
"pylint.extensions.docparams",
"pylint.extensions.no_self_use"
]
extension-pkg-whitelist = "cv2"
unsafe-load-any-extension = false

[tool.pylint.messages_control]
disable = [
"ungrouped-imports",
"duplicate-code",
"too-many-instance-attributes"
"ungrouped-imports",
"duplicate-code",
"too-many-instance-attributes"
]
enable = ["useless-suppression"]

[tool.pylint.refactoring]
max-nested-blocks = 5

[tool.pylint.format]
indent-after-paren=4
indent-string=" "
indent-after-paren = 4
indent-string = " "

[tool.pylint.miscellaneous]
notes = [
"FIXME",
"XXX"
"FIXME",
"XXX"
]

[tool.pylint.design]
min-public-methods = 1

Expand All @@ -66,7 +268,7 @@ omit = ["tests/*", "kedro_datasets/holoviews/*", "kedro_datasets/snowflake/*"]
exclude_lines = ["pragma: no cover", "raise NotImplementedError"]

[tool.pytest.ini_options]
addopts="""
addopts = """
--cov-report xml:coverage.xml \
--cov-report term-missing \
--cov kedro_datasets \
Expand Down
Loading

0 comments on commit 73262bc

Please sign in to comment.