From 42f4487a1bd03cffc73a7f5505139d409ec0c987 Mon Sep 17 00:00:00 2001 From: Pranav Rathi <4427674+pranavrth@users.noreply.github.com> Date: Fri, 18 Oct 2024 12:14:58 +0530 Subject: [PATCH] Introduced pyproject.toml and moved static metadata from setup.py (#1592) Introduced pyproject.toml and moved static metadata from setup.py --- CHANGELOG.md | 1 + DEVELOPER.md | 8 +-- LICENSE.txt => LICENSE | 0 MANIFEST.in | 4 +- Makefile | 3 +- docs/conf.py | 8 --- examples/README.md | 8 +-- examples/docker/Dockerfile.alpine | 7 +- pyproject.toml | 67 +++++++++++++++++ requirements/requirements-all.txt | 9 +++ requirements/requirements-avro.txt | 4 ++ .../requirements-docs.txt | 0 .../requirements-examples.txt | 0 requirements/requirements-json.txt | 2 + requirements/requirements-protobuf.txt | 1 + requirements/requirements-schemaregistry.txt | 1 + .../requirements-soaktest.txt | 0 requirements/requirements-tests-install.txt | 5 ++ requirements/requirements-tests.txt | 8 +++ requirements/requirements.txt | 0 setup.py | 72 +------------------ src/confluent_kafka/avro/requirements.txt | 3 - src/confluent_kafka/requirements.txt | 2 - .../schema_registry/requirements.txt | 6 -- src/confluent_kafka/src/confluent_kafka.h | 2 +- tests/README.md | 4 +- tests/requirements.txt | 14 ---- tests/soak/bootstrap.sh | 4 +- tests/soak/build.sh | 2 - tools/RELEASE.md | 31 ++++---- tools/build-manylinux.sh | 2 +- tools/smoketest.sh | 2 +- tools/source-package-verification.sh | 9 +-- tools/wheels/build-wheels.bat | 2 +- tools/wheels/build-wheels.sh | 7 +- tools/windows-build.bat | 5 +- tox.ini | 15 +--- 37 files changed, 149 insertions(+), 169 deletions(-) rename LICENSE.txt => LICENSE (100%) create mode 100644 pyproject.toml create mode 100644 requirements/requirements-all.txt create mode 100644 requirements/requirements-avro.txt rename docs/requirements.txt => requirements/requirements-docs.txt (100%) rename examples/requirements.txt => requirements/requirements-examples.txt (100%) create mode 100644 requirements/requirements-json.txt create mode 100644 requirements/requirements-protobuf.txt create mode 100644 requirements/requirements-schemaregistry.txt rename tests/soak/requirements.txt => requirements/requirements-soaktest.txt (100%) create mode 100644 requirements/requirements-tests-install.txt create mode 100644 requirements/requirements-tests.txt create mode 100644 requirements/requirements.txt delete mode 100644 src/confluent_kafka/avro/requirements.txt delete mode 100644 src/confluent_kafka/requirements.txt delete mode 100644 src/confluent_kafka/schema_registry/requirements.txt delete mode 100644 tests/requirements.txt diff --git a/CHANGELOG.md b/CHANGELOG.md index 9be3b1499..7f013027c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,7 @@ v2.6.1 is a maintenance release with the following fixes and enhancements: + - Migrated build system from `setup.py` to `pyproject.toml` in accordance with `PEP 517` and `PEP 518`, improving project configuration, build system requirements management, and compatibility with modern Python packaging tools like `pip` and `build`. - Added an example for OAUTH OIDC producer with support for confluent cloud (#1769, @sarwarbhuiyan) confluent-kafka-python is based on librdkafka v2.6.1, see the diff --git a/DEVELOPER.md b/DEVELOPER.md index 9c68b0767..8a50c7bf9 100644 --- a/DEVELOPER.md +++ b/DEVELOPER.md @@ -5,19 +5,19 @@ This document provides information useful to developers working on confluent-kaf ## Build - $ python setup.py build + $ python -m build If librdkafka is installed in a non-standard location provide the include and library directories with: - $ C_INCLUDE_PATH=/path/to/include LIBRARY_PATH=/path/to/lib python setup.py ... + $ C_INCLUDE_PATH=/path/to/include LIBRARY_PATH=/path/to/lib python -m build **Note**: On Windows the variables for Visual Studio are named INCLUDE and LIB ## Generate Documentation -Install sphinx and sphinx_rtd_theme packages: +Install docs dependencies: - $ pip install sphinx sphinx_rtd_theme + $ pip install .[docs] Build HTML docs: diff --git a/LICENSE.txt b/LICENSE similarity index 100% rename from LICENSE.txt rename to LICENSE diff --git a/MANIFEST.in b/MANIFEST.in index 4590b1dba..2f7a4818d 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,4 +1,4 @@ include README.md -include LICENSE.txt -include test-requirements.txt include src/confluent_kafka/src/*.[ch] +prune tests +prune docs \ No newline at end of file diff --git a/Makefile b/Makefile index 83fdd30c3..3615e2b93 100644 --- a/Makefile +++ b/Makefile @@ -5,7 +5,8 @@ all: clean: - python setup.py clean + pip cache purge + rm -rf dist make -C docs clean .PHONY: docs diff --git a/docs/conf.py b/docs/conf.py index 5740337b0..7af81f36e 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -13,14 +13,6 @@ # serve to show the default. import sphinx_rtd_theme -import sys -import os -from glob import glob - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path[:0] = [os.path.abspath(x) for x in glob('../build/lib.*')] # -- General configuration ------------------------------------------------ diff --git a/examples/README.md b/examples/README.md index 32a248a1d..b68fb8886 100644 --- a/examples/README.md +++ b/examples/README.md @@ -30,8 +30,8 @@ To setup a venv with the latest release version of confluent-kafka and dependenc ``` $ python3 -m venv venv_examples $ source venv_examples/bin/activate -$ cd examples -$ pip install -r requirements.txt +$ pip install confluent_kafka +$ pip install -r requirements/requirements-examples.txt ``` To setup a venv that uses the current source tree version of confluent_kafka, you @@ -42,9 +42,7 @@ need to have a C compiler and librdkafka installed ``` $ python3 -m venv venv_examples $ source venv_examples/bin/activate -$ python setup.py develop -$ cd examples -$ pip install -r requirements.txt +$ pip install .[examples] ``` When you're finished with the venv: diff --git a/examples/docker/Dockerfile.alpine b/examples/docker/Dockerfile.alpine index b3af04a60..4dfc57bd3 100644 --- a/examples/docker/Dockerfile.alpine +++ b/examples/docker/Dockerfile.alpine @@ -73,9 +73,10 @@ RUN \ mkdir -p /usr/src/confluent-kafka-python && \ cd /usr/src/confluent-kafka-python && \ rm -rf build && \ - python3 setup.py clean -a && \ - python3 setup.py build && \ - python3 setup.py install && \ + rm -rf dist && \ + python3 -m pip install build \ + python3 -m build && \ + python3 -m pip install dist/confluent_kafka*whl && \ cd / && \ rm -rf /usr/src/confluent-kafka-python diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 000000000..8931f8f65 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,67 @@ +[build-system] +requires = [ "setuptools>=62", "wheel"] +build-backend = "setuptools.build_meta" + +[project] +name = "confluent-kafka" +version = "2.6.0.post1.dev1" +description = "Confluent's Python client for Apache Kafka" +classifiers = [ + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Topic :: Software Development :: Libraries :: Python Modules"] +readme = "README.md" +license = { file = "LICENSE" } +requires-python = ">=3.7" +dynamic = ["dependencies", "optional-dependencies"] + +[[project.authors]] +name = "Confluent Inc." +email = "support@confluent.io" + +[project.urls] +Homepage = "https://github.com/confluentinc/confluent-kafka-python" + +[tool.setuptools] +include-package-data = false + +[tool.setuptools.dynamic] +dependencies = {file = ["requirements/requirements.txt"]} +optional-dependencies.schemaregistry = { file = ["requirements/requirements-schemaregistry.txt"] } +optional-dependencies.avro = { file = ["requirements/requirements-avro.txt", "requirements/requirements-schemaregistry.txt"] } +optional-dependencies.json = { file = ["requirements/requirements-json.txt", "requirements/requirements-schemaregistry.txt"] } +optional-dependencies.protobuf = { file = ["requirements/requirements-protobuf.txt", "requirements/requirements-schemaregistry.txt"] } +optional-dependencies.dev = { file = [ + "requirements/requirements-docs.txt", + "requirements/requirements-examples.txt", + "requirements/requirements-tests.txt", + "requirements/requirements-schemaregistry.txt", + "requirements/requirements-avro.txt", + "requirements/requirements-json.txt", + "requirements/requirements-protobuf.txt"] } +optional-dependencies.docs = { file = [ + "requirements/requirements-docs.txt", + "requirements/requirements-schemaregistry.txt", + "requirements/requirements-avro.txt", + "requirements/requirements-json.txt", + "requirements/requirements-protobuf.txt"] } +optional-dependencies.tests = { file = [ + "requirements/requirements-tests.txt", + "requirements/requirements-schemaregistry.txt", + "requirements/requirements-avro.txt", + "requirements/requirements-json.txt", + "requirements/requirements-protobuf.txt"] } +optional-dependencies.examples = { file = ["requirements/requirements-examples.txt"] } +optional-dependencies.soaktest = { file = ["requirements/requirements-soaktest.txt"] } +optional-dependencies.all = { file = [ + "requirements/requirements-soaktest.txt", + "requirements/requirements-docs.txt", + "requirements/requirements-examples.txt", + "requirements/requirements-tests.txt", + "requirements/requirements-schemaregistry.txt", + "requirements/requirements-avro.txt", + "requirements/requirements-json.txt", + "requirements/requirements-protobuf.txt"] } diff --git a/requirements/requirements-all.txt b/requirements/requirements-all.txt new file mode 100644 index 000000000..6877fa4fc --- /dev/null +++ b/requirements/requirements-all.txt @@ -0,0 +1,9 @@ +-r requirements.txt +-r requirements-schemaregistry.txt +-r requirements-avro.txt +-r requirements-protobuf.txt +-r requirements-json.txt +-r requirements-examples.txt +-r requirements-tests.txt +-r requirements-docs.txt +-r requirements-soaktest.txt \ No newline at end of file diff --git a/requirements/requirements-avro.txt b/requirements/requirements-avro.txt new file mode 100644 index 000000000..ccb70d0c4 --- /dev/null +++ b/requirements/requirements-avro.txt @@ -0,0 +1,4 @@ +fastavro < 1.8.0; python_version == "3.7" +fastavro < 2; python_version > "3.7" +requests +avro>=1.11.1,<2 \ No newline at end of file diff --git a/docs/requirements.txt b/requirements/requirements-docs.txt similarity index 100% rename from docs/requirements.txt rename to requirements/requirements-docs.txt diff --git a/examples/requirements.txt b/requirements/requirements-examples.txt similarity index 100% rename from examples/requirements.txt rename to requirements/requirements-examples.txt diff --git a/requirements/requirements-json.txt b/requirements/requirements-json.txt new file mode 100644 index 000000000..b2dfe5e73 --- /dev/null +++ b/requirements/requirements-json.txt @@ -0,0 +1,2 @@ +pyrsistent +jsonschema \ No newline at end of file diff --git a/requirements/requirements-protobuf.txt b/requirements/requirements-protobuf.txt new file mode 100644 index 000000000..b0c79cc0e --- /dev/null +++ b/requirements/requirements-protobuf.txt @@ -0,0 +1 @@ +protobuf diff --git a/requirements/requirements-schemaregistry.txt b/requirements/requirements-schemaregistry.txt new file mode 100644 index 000000000..f2293605c --- /dev/null +++ b/requirements/requirements-schemaregistry.txt @@ -0,0 +1 @@ +requests diff --git a/tests/soak/requirements.txt b/requirements/requirements-soaktest.txt similarity index 100% rename from tests/soak/requirements.txt rename to requirements/requirements-soaktest.txt diff --git a/requirements/requirements-tests-install.txt b/requirements/requirements-tests-install.txt new file mode 100644 index 000000000..3ea242f93 --- /dev/null +++ b/requirements/requirements-tests-install.txt @@ -0,0 +1,5 @@ +-r requirements-tests.txt +-r requirements-schemaregistry.txt +-r requirements-avro.txt +-r requirements-protobuf.txt +-r requirements-json.txt \ No newline at end of file diff --git a/requirements/requirements-tests.txt b/requirements/requirements-tests.txt new file mode 100644 index 000000000..225dde9f4 --- /dev/null +++ b/requirements/requirements-tests.txt @@ -0,0 +1,8 @@ +# core test requirements +urllib3<2.0.0;python_version<="3.7" +urllib3 >= 2.0.0,<3; python_version > "3.7" +flake8 +pytest +pytest-timeout +requests-mock +trivup>=0.8.3 diff --git a/requirements/requirements.txt b/requirements/requirements.txt new file mode 100644 index 000000000..e69de29bb diff --git a/setup.py b/setup.py index ab6efb0bb..c401e6ad3 100755 --- a/setup.py +++ b/setup.py @@ -1,7 +1,7 @@ #!/usr/bin/env python import os -from setuptools import setup, find_packages +from setuptools import setup from distutils.core import Extension import platform @@ -9,33 +9,6 @@ mod_dir = os.path.join(work_dir, 'src', 'confluent_kafka') ext_dir = os.path.join(mod_dir, 'src') -INSTALL_REQUIRES = [ - 'futures;python_version<"3.2"', - 'enum34;python_version<"3.4"', -] - -TEST_REQUIRES = [ - 'pytest==4.6.4;python_version<"3.0"', - 'pytest;python_version>="3.0"', - 'pytest-timeout', - 'flake8' -] - -DOC_REQUIRES = ['sphinx', 'sphinx-rtd-theme'] - -SCHEMA_REGISTRY_REQUIRES = ['requests'] - -AVRO_REQUIRES = ['fastavro>=0.23.0,<1.0;python_version<"3.0"', - 'fastavro>=1.0;python_version>"3.0"', - 'avro>=1.11.1,<2', - ] + SCHEMA_REGISTRY_REQUIRES - -JSON_REQUIRES = ['pyrsistent==0.16.1;python_version<"3.0"', - 'pyrsistent;python_version>"3.0"', - 'jsonschema'] + SCHEMA_REGISTRY_REQUIRES - -PROTO_REQUIRES = ['protobuf'] + SCHEMA_REGISTRY_REQUIRES - # On Un*x the library is linked as -lrdkafka, # while on windows we need the full librdkafka name. if platform.system() == 'Windows': @@ -52,45 +25,4 @@ os.path.join(ext_dir, 'AdminTypes.c'), os.path.join(ext_dir, 'Admin.c')]) - -def get_install_requirements(path): - content = open(os.path.join(os.path.dirname(__file__), path)).read() - return [ - req - for req in content.split("\n") - if req != '' and not req.startswith('#') - ] - - -trove_classifiers = [ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'License :: OSI Approved :: Apache Software License', - 'Programming Language :: Python', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Topic :: Software Development :: Libraries :: Python Modules', -] - -setup(name='confluent-kafka', - # Make sure to bump CFL_VERSION* in confluent_kafka/src/confluent_kafka.h - # and version in docs/conf.py. - version='2.6.0', - description='Confluent\'s Python client for Apache Kafka', - author='Confluent Inc', - author_email='support@confluent.io', - url='https://github.com/confluentinc/confluent-kafka-python', - ext_modules=[module], - packages=find_packages('src'), - package_dir={'': 'src'}, - data_files=[('', [os.path.join(work_dir, 'LICENSE.txt')])], - install_requires=INSTALL_REQUIRES, - classifiers=trove_classifiers, - extras_require={ - 'schema-registry': SCHEMA_REGISTRY_REQUIRES, - 'avro': AVRO_REQUIRES, - 'json': JSON_REQUIRES, - 'protobuf': PROTO_REQUIRES, - 'dev': TEST_REQUIRES + AVRO_REQUIRES, - 'doc': DOC_REQUIRES + AVRO_REQUIRES - }) +setup(ext_modules=[module]) diff --git a/src/confluent_kafka/avro/requirements.txt b/src/confluent_kafka/avro/requirements.txt deleted file mode 100644 index e34a65dd8..000000000 --- a/src/confluent_kafka/avro/requirements.txt +++ /dev/null @@ -1,3 +0,0 @@ -fastavro>=0.23.0 -requests -avro>=1.11.1,<2 diff --git a/src/confluent_kafka/requirements.txt b/src/confluent_kafka/requirements.txt deleted file mode 100644 index c329b5c54..000000000 --- a/src/confluent_kafka/requirements.txt +++ /dev/null @@ -1,2 +0,0 @@ -enum34;python_version<"3.4" -futures;python_version<"3.2" diff --git a/src/confluent_kafka/schema_registry/requirements.txt b/src/confluent_kafka/schema_registry/requirements.txt deleted file mode 100644 index 83c8d721e..000000000 --- a/src/confluent_kafka/schema_registry/requirements.txt +++ /dev/null @@ -1,6 +0,0 @@ -fastavro>=0.23.0 -pyrsistent==0.16.1;python_version<"3.0" -pyrsistent;python_version>"3.0" -jsonschema -protobuf -requests diff --git a/src/confluent_kafka/src/confluent_kafka.h b/src/confluent_kafka/src/confluent_kafka.h index c1fac64e9..f86cefa4e 100644 --- a/src/confluent_kafka/src/confluent_kafka.h +++ b/src/confluent_kafka/src/confluent_kafka.h @@ -36,7 +36,7 @@ /** - * @brief confluent-kafka-python version, must match that of setup.py. + * @brief confluent-kafka-python version, must match that of pyproject.toml. * * Hex version representation: * 0xMMmmRRPP diff --git a/tests/README.md b/tests/README.md index e3b2fe30f..956d67907 100644 --- a/tests/README.md +++ b/tests/README.md @@ -18,9 +18,7 @@ A python3 env suitable for running tests: $ python3 -m venv venv_test $ source venv_test/bin/activate - $ pip install -r tests/requirements.txt - $ python setup.py build - $ python setup.py install + $ python3 -m pip install .[tests] When you're finished with it: diff --git a/tests/requirements.txt b/tests/requirements.txt deleted file mode 100644 index b5e20efb2..000000000 --- a/tests/requirements.txt +++ /dev/null @@ -1,14 +0,0 @@ -urllib3<2.0.0;python_version<="3.7" -urllib3 -flake8 -pytest==4.6.9;python_version<="3.0" -pytest>=6.0.0;python_version>="3.0" -pytest-timeout -requests-mock -trivup>=0.8.3 -fastavro<1.8.0;python_version=="3.7" -fastavro>=1.8.4;python_version>"3.7" -fastavro -avro>=1.11.1,<2 -jsonschema -protobuf diff --git a/tests/soak/bootstrap.sh b/tests/soak/bootstrap.sh index e110fabeb..1900c97ec 100755 --- a/tests/soak/bootstrap.sh +++ b/tests/soak/bootstrap.sh @@ -45,9 +45,7 @@ source $venv/bin/activate pip install -U pip -pip install -v . - -pip install -r tests/soak/requirements.txt +pip install -v .[soaktest] popd # ..python diff --git a/tests/soak/build.sh b/tests/soak/build.sh index 795dcfb69..279ecf458 100755 --- a/tests/soak/build.sh +++ b/tests/soak/build.sh @@ -32,8 +32,6 @@ set -u pushd confluent-kafka-python git fetch --tags git checkout $cflpy_version -python3 setup.py clean -a -python3 setup.py build python3 -m pip install . popd diff --git a/tools/RELEASE.md b/tools/RELEASE.md index 72bc622d2..c9c007ea4 100644 --- a/tools/RELEASE.md +++ b/tools/RELEASE.md @@ -140,11 +140,11 @@ RCs, so it only needs to be set once for each release. * `src/confluent_kafka/src/confluent_kafka.h` update both `CFL_VERSION` and `CFL_VERSION_STR`. * `docs/conf.py` - change `version` variable. - * `setup.py` - change `version` argument to `setup()`. + * `pyproject.toml` - change `version` field. Commit these changes with a commit-message containing the version: - $ git commit -m "Version v0.11.4rc1" src/confluent_kafka/src/confluent_kafka.c docs/conf.py setup.py + $ git commit -m "Version v0.11.4rc1" src/confluent_kafka/src/confluent_kafka.h docs/conf.py pyproject.toml ## 5. Tag, CI build, wheel verification, upload @@ -290,30 +290,29 @@ With the PR merged to master, check out and update master: Now go back to 5.1 and start the final RELEASE ITERATION. -### 5.6. Upload wheel packages to PyPi +### 5.6. Create source distribution -**CANDIDATE ITERATION:** To upload binary packages to test.pypi.org, use: +When creating the source packages make sure to have checked out the correct tag +and that you do not have any uncommited modifications and that the `dist/` +directory is empty. - $ twine upload -r test dl-v0.11.4rc1/* + $ python -m build -s -**RELEASE ITERATION:** To upload binary packages to the proper pypi.org (WARNING!), use: +The above command will create the necessary source distribution. Move this +generated sdist file to correct `tools\dl-` folder - $ twine upload dl-v0.11.4rc1/* + $ mv dist/confluent-kafka-0.11.4rc1.tar.gz tools/dl-v0.11.4rc1/ -### 5.7. Upload source packages to PyPi +### 5.7. Upload wheel packages and sdist to PyPi -When uploading source packages make sure to have checked out the correct tag -and that you do not have any uncommited modifications and that the `build/` -directory is empty. - -**CANDIDATE ITERATION:** Upload source packages to test.pypi.org: +**CANDIDATE ITERATION:** To upload binary packages to test.pypi.org, use: - $ python setup.py sdist upload -r test + $ twine upload -r test tools/dl-v0.11.4rc1/* -**RELEASE ITERATION:** Upload source packages to the proper pypi.org (WARNING!): +**RELEASE ITERATION:** To upload binary packages to the proper pypi.org (WARNING!), use: - $ python setup.py sdist upload + $ twine upload tools/dl-v0.11.4rc1/* ### 5.8. Verify installation from PyPi diff --git a/tools/build-manylinux.sh b/tools/build-manylinux.sh index f3a596a73..c39857c9b 100755 --- a/tools/build-manylinux.sh +++ b/tools/build-manylinux.sh @@ -15,7 +15,7 @@ # docker run -t -v $(pwd):/io quay.io/pypa/manylinux2010_x86_64:latest /io/tools/build-manylinux.sh LIBRDKAFKA_VERSION=$1 -PYTHON_VERSIONS=("cp36" "cp37" "cp38" "cp39" "cp310" "cp311" "cp312" "cp313") +PYTHON_VERSIONS=("cp37" "cp38" "cp39" "cp310" "cp311" "cp312" "cp313") if [[ -z "$LIBRDKAFKA_VERSION" ]]; then echo "Usage: $0 " diff --git a/tools/smoketest.sh b/tools/smoketest.sh index af2afd6fe..2140b5a8d 100755 --- a/tools/smoketest.sh +++ b/tools/smoketest.sh @@ -60,7 +60,7 @@ for py in 3.9 ; do hash -r pip install -U pip pkginfo - pip install -r tests/requirements.txt + pip install -r requirements/requirements-tests-install.txt # Get the packages version so we can pin the install # command to this version (which hopefully loads it from the wheeldir diff --git a/tools/source-package-verification.sh b/tools/source-package-verification.sh index eb7506061..09830ef5b 100755 --- a/tools/source-package-verification.sh +++ b/tools/source-package-verification.sh @@ -5,9 +5,8 @@ # set -e -pip install -r docs/requirements.txt -pip install -U protobuf -pip install -r tests/requirements.txt +pip install -r requirements/requirements-tests-install.txt +pip install -U build lib_dir=dest/runtimes/$OS_NAME-$ARCH/native tools/wheels/install-librdkafka.sh "${LIBRDKAFKA_VERSION#v}" dest @@ -16,9 +15,11 @@ export LDFLAGS="$LDFLAGS -L${PWD}/${lib_dir}" export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$PWD/$lib_dir" export DYLD_LIBRARY_PATH="$DYLD_LIBRARY_PATH:$PWD/$lib_dir" -python setup.py build && python setup.py install +python3 -m pip install . + if [[ $OS_NAME == linux && $ARCH == x64 ]]; then flake8 --exclude ./_venv,*_pb2.py + pip install -r requirements/requirements-docs.txt make docs python -m pytest --timeout 1200 --ignore=dest else diff --git a/tools/wheels/build-wheels.bat b/tools/wheels/build-wheels.bat index d23ef4311..ddb1bd15b 100644 --- a/tools/wheels/build-wheels.bat +++ b/tools/wheels/build-wheels.bat @@ -13,7 +13,7 @@ set WHEELHOUSE=%4 if [%WHEELHOUSE%]==[] goto usage echo on -set CIBW_BUILD=cp36-%BW_ARCH% cp37-%BW_ARCH% cp38-%BW_ARCH% cp39-%BW_ARCH% cp310-%BW_ARCH% cp311-%BW_ARCH% cp312-%BW_ARCH% cp313-%BW_ARCH% +set CIBW_BUILD=cp37-%BW_ARCH% cp38-%BW_ARCH% cp39-%BW_ARCH% cp310-%BW_ARCH% cp311-%BW_ARCH% cp312-%BW_ARCH% cp313-%BW_ARCH% set CIBW_BEFORE_BUILD=python -m pip install delvewheel==1.1.4 set CIBW_TEST_REQUIRES=pytest set CIBW_TEST_COMMAND=pytest {project}\tests\test_Producer.py diff --git a/tools/wheels/build-wheels.sh b/tools/wheels/build-wheels.sh index 5ccf0b0b1..73e2d4858 100755 --- a/tools/wheels/build-wheels.sh +++ b/tools/wheels/build-wheels.sh @@ -8,7 +8,7 @@ this_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" # Skip PyPy, Python2, old Python3 versions, musl, and x86 builds. -export CIBW_SKIP="pp* cp27-* cp35-* *i686 *musllinux* $CIBW_SKIP" +export CIBW_SKIP="pp* cp27-* cp35-* cp36-* *i686 *musllinux* $CIBW_SKIP" # Run a simple test suite export CIBW_TEST_REQUIRES="pytest" export CIBW_TEST_COMMAND="pytest {project}/tests/test_Producer.py" @@ -33,14 +33,13 @@ ARCH=${ARCH:-x64} case $OSTYPE in linux*) os=linux - # Need to set up env vars (in docker) so that setup.py - # finds librdkafka. + # Need to set up env vars (in docker) so that cibuildwheel finds librdkafka. lib_dir=dest/runtimes/linux-$ARCH/native export CIBW_ENVIRONMENT="CFLAGS=-I\$PWD/dest/build/native/include LDFLAGS=-L\$PWD/$lib_dir LD_LIBRARY_PATH=\$LD_LIBRARY_PATH:\$PWD/$lib_dir" ;; darwin*) os=macos - # Need to set up env vars so that setup.py finds librdkafka. + # Need to set up env vars so that cibuildwheel finds librdkafka. lib_dir=dest/runtimes/osx-$ARCH/native export CFLAGS="-I${PWD}/dest/build/native/include" export LDFLAGS="-L${PWD}/$lib_dir" diff --git a/tools/windows-build.bat b/tools/windows-build.bat index 0a770b885..497c99473 100644 --- a/tools/windows-build.bat +++ b/tools/windows-build.bat @@ -10,7 +10,7 @@ set rem Download and install librdkafka from NuGet. call tools\windows-install-librdkafka.bat %LIBRDKAFKA_NUGET_VERSION% dest || exit /b 1 -pip install -U -r tests/requirements.txt -r src/confluent_kafka/avro/requirements.txt +pip install -r requirements\requirements-tests-install.txt pip install cibuildwheel==0.12.0 || exit /b 1 rem Build wheels (without tests) @@ -51,8 +51,7 @@ for %%W in (wheelhouse\confluent_kafka-*cp%PYTHON_SHORTVER%*win*%PYTHON_ARCH%.wh python -c "import struct; print(struct.calcsize('P') * 8)" 7z l %%~W pip install %%~W || exit /b 1 - pip install -r src\confluent_kafka\requirements.txt - pip install -r src\confluent_kafka\avro\requirements.txt + pip install -r requirements\requirements-tests-install.txt python -c "from confluent_kafka import libversion ; print(libversion())" || exit /b 1 diff --git a/tox.ini b/tox.ini index e2a1c68a4..504584fad 100644 --- a/tox.ini +++ b/tox.ini @@ -1,18 +1,13 @@ [tox] -envlist = flake8,py37,py38,py39,py310 +envlist = flake8,py37,py38,py39,py310,py311,py312,py313 [testenv] -setenv = - CPPFLAGS=-I{toxinidir}/tmp-build/include - LDFLAGS=-L{toxinidir}/tmp-build/lib - C_INCLUDE_PATH={toxinidir}/tmp-build/include - LD_LIBRARY_PATH={toxinidir}/tmp-build/lib passenv = #http://tox.readthedocs.io/en/latest/config.html#confval-passenv=SPACE-SEPARATED-GLOBNAMES * commands = # Install main package and all sub-packages - pip install . .[avro] .[schema-registry] .[json] .[protobuf] + pip install .[tests] # Early verification that module is loadable python -c 'import confluent_kafka ; print(confluent_kafka.version())' # Run tests (large timeout to allow docker image downloads) @@ -20,10 +15,6 @@ commands = # See tests/README.md for additional notes on testing #python tests/integration/integration_test.py -deps = - # https://docs.pytest.org/en/latest/changelog.html#id53 - -rtests/requirements.txt - [testenv:flake8] deps = flake8 commands = flake8 @@ -34,6 +25,6 @@ testpaths = tests norecursedirs = tests/integration/*/java [flake8] -exclude = venv*,.venv*,env,.env,.tox,.toxenv,.git,build,docs,tools,tmp-build,*_pb2.py +exclude = venv*,.venv*,env,.env,.tox,.toxenv,.git,build,docs,tools,tmp-build,*_pb2.py,tmp-KafkaCluster/* max-line-length = 119 accept-encodings = utf-8