forked from scrapy/scrapy
-
Notifications
You must be signed in to change notification settings - Fork 0
/
setup.py
97 lines (88 loc) · 3.2 KB
/
setup.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
from pathlib import Path
from pkg_resources import parse_version
from setuptools import __version__ as setuptools_version
from setuptools import find_packages, setup
version = (Path(__file__).parent / "scrapy/VERSION").read_text("ascii").strip()
def has_environment_marker_platform_impl_support():
"""Code extracted from 'pytest/setup.py'
https://github.com/pytest-dev/pytest/blob/7538680c/setup.py#L31
The first known release to support environment marker with range operators
it is 18.5, see:
https://setuptools.readthedocs.io/en/latest/history.html#id235
"""
return parse_version(setuptools_version) >= parse_version("18.5")
install_requires = [
"Twisted>=18.9.0",
"cryptography>=3.4.6",
"cssselect>=0.9.1",
"itemloaders>=1.0.1",
"parsel>=1.5.0",
"pyOpenSSL>=21.0.0",
"queuelib>=1.4.2",
"service_identity>=18.1.0",
"w3lib>=1.17.0",
"zope.interface>=5.1.0",
"protego>=0.1.15",
"itemadapter>=0.1.0",
"setuptools",
"packaging",
"tldextract",
"lxml>=4.3.0",
]
extras_require = {}
cpython_dependencies = [
"PyDispatcher>=2.0.5",
]
if has_environment_marker_platform_impl_support():
extras_require[
':platform_python_implementation == "CPython"'
] = cpython_dependencies
extras_require[':platform_python_implementation == "PyPy"'] = [
"PyPyDispatcher>=2.1.0",
]
else:
install_requires.extend(cpython_dependencies)
setup(
name="Scrapy",
version=version,
url="https://scrapy.org",
project_urls={
"Documentation": "https://docs.scrapy.org/",
"Source": "https://github.com/scrapy/scrapy",
"Tracker": "https://github.com/scrapy/scrapy/issues",
},
description="A high-level Web Crawling and Web Scraping framework",
long_description=open("README.rst", encoding="utf-8").read(),
author="Scrapy developers",
author_email="pablo@pablohoffman.com",
maintainer="Pablo Hoffman",
maintainer_email="pablo@pablohoffman.com",
license="BSD",
packages=find_packages(exclude=("tests", "tests.*")),
include_package_data=True,
zip_safe=False,
entry_points={"console_scripts": ["scrapy = scrapy.cmdline:execute"]},
classifiers=[
"Framework :: Scrapy",
"Development Status :: 5 - Production/Stable",
"Environment :: Console",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Software Development :: Libraries :: Application Frameworks",
"Topic :: Software Development :: Libraries :: Python Modules",
],
python_requires=">=3.7",
install_requires=install_requires,
extras_require=extras_require,
)