Skip to content

Commit

Permalink
Revert "get_spider_list() (used by Schedule and ListSpiders) supports…
Browse files Browse the repository at this point in the history
… the [settings] section. Add Root._config (hack) #526" 94e87da
  • Loading branch information
jpmckinney committed Jul 22, 2024
1 parent e0907c0 commit 88e2d87
Show file tree
Hide file tree
Showing 11 changed files with 18 additions and 29 deletions.
2 changes: 1 addition & 1 deletion docs/api.rst
Original file line number Diff line number Diff line change
Expand Up @@ -201,7 +201,7 @@ listspiders.json

Get the spiders in a version of a project.

.. note:: If :ref:`the project is in a Python module rather than a Python egg<config-settings>`, don't set the ``version`` parameter.
.. note:: If the project is configured via a :ref:`scrapy.cfg<config-settings>` file rather than uploaded via the :ref:`addversion.json` webservice, don't set the ``version`` parameter.

Supported request methods
``GET``
Expand Down
2 changes: 0 additions & 2 deletions docs/news.rst
Original file line number Diff line number Diff line change
Expand Up @@ -36,8 +36,6 @@ Web UI
API
^^^

- The :ref:`schedule.json` and :ref:`listspiders.json` webservices support Scrapy projects stored as Python modules, using the previously undocumented :ref:`[settings]<config-settings>` section.
- The :ref:`addversion.json` webservice errors if a Python egg is uploaded with the same project name as a Python module, to avoid confusion as to which code is used.
- Clarify error messages, for example:

- ``'project' parameter is required``, instead of ``'project'`` (KeyError)
Expand Down
4 changes: 0 additions & 4 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -30,10 +30,6 @@ ignore = [
"DTZ005", # `datetime.datetime.now()` called without a `tz` argument
"DTZ006", # `datetime.datetime.fromtimestamp()` called without a `tz` argument
"DTZ007", # Naive datetime constructed using `datetime.datetime.strptime()` without %z

# https://github.com/scrapy/scrapyd/issues/526
"FIX002",
"SLF001",
]

[tool.ruff.lint.flake8-builtins]
Expand Down
6 changes: 5 additions & 1 deletion scrapyd/runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ def project_environment(project):
sanitized_version, egg = eggstorage.get(project, eggversion)

tmp = None
# egg can be None if the project is not in eggstorage, but is defined in the [settings] configuration section.
# egg can be None if the project is not in egg storage: for example, if Scrapyd is invoked within a Scrapy project.
if egg:
try:
if hasattr(egg, "name"): # for example, FileIO
Expand All @@ -64,6 +64,10 @@ def main():
with project_environment(project):
from scrapy.cmdline import execute

# This calls scrapy.utils.project.get_project_settings(). It uses SCRAPY_SETTINGS_MODULE if set. Otherwise, it
# calls scrapy.utils.conf.init_env(), which reads Scrapy's configuration sources, looks for a project matching
# SCRAPY_PROJECT in the [settings] section, and uses its value for SCRAPY_SETTINGS_MODULE.
# https://docs.scrapy.org/en/latest/topics/commands.html#configuration-settings
execute()


Expand Down
2 changes: 1 addition & 1 deletion scrapyd/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ def sqlite_connection_string(config, database):

def get_project_list(config):
"""Get list of projects by inspecting the eggs storage and the ones defined in
the scrapyd.conf [settings] section
the scrapy.cfg [settings] section
"""

# The poller and scheduler use this function (via get_spider_queues), and they aren't initialized with the
Expand Down
5 changes: 0 additions & 5 deletions scrapyd/webservice.py
Original file line number Diff line number Diff line change
Expand Up @@ -231,11 +231,6 @@ def render_POST(self, txrequest, project, version, egg):
code=http.OK, message=b"egg is not a ZIP file (if using curl, use egg=@path not egg=path)"
)

if any(p for p, _ in self.root._config.items("settings", default=[]) if project == p):
raise error.Error(
code=http.OK, message=b"project '%b' already configured in the [settings] section" % project.encode()
)

self.root.eggstorage.put(BytesIO(egg), project, version)
self.root.update_projects()

Expand Down
5 changes: 1 addition & 4 deletions scrapyd/website.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ def _getFilesAndDirectories(self, directory):

for path in directory:
if isinstance(path, bytes):
path = path.decode("utf8") # noqa: PLW2901 from Twisted
path = path.decode() # noqa: PLW2901 from Twisted

url = quote(path, "/")
escaped_path = escape(path)
Expand Down Expand Up @@ -133,9 +133,6 @@ def __init__(self, config, app):
items_dir = config.get("items_dir")

self.app = app
# TODO(jpmckinney): Make Config a Component
# https://github.com/scrapy/scrapyd/issues/526
self._config = config
self.debug = config.getboolean("debug", False)
self.runner = config.get("runner", "scrapyd.runner")
self.prefix_header = config.get("prefix_header")
Expand Down
4 changes: 2 additions & 2 deletions tests/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import io
import os.path
import pkgutil


Expand All @@ -7,8 +8,7 @@ def get_egg_data(basename):


def has_settings(root):
# https://github.com/scrapy/scrapyd/issues/526
return root._config.cp.has_section("settings")
return os.path.exists("scrapy.cfg")


def root_add_version(root, project, version, basename):
Expand Down
11 changes: 5 additions & 6 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,17 +38,16 @@ def chdir(monkeypatch, tmpdir):
params=[
None,
(Config.SECTION, "items_dir", "items"),
("settings", "localproject", "localproject.settings"),
"scrapy.cfg",
],
ids=["default", "items_dir", "settings"],
)
def root(request, chdir):
if request.param == "scrapy.cfg":
shutil.copytree(os.path.join(BASEDIR, "fixtures", "filesystem"), os.path.join(chdir), dirs_exist_ok=True)

config = Config()
if request.param:
if request.param[0] == "settings":
config.cp.add_section(request.param[0])
# Copy the local files to be in the Python path.
shutil.copytree(os.path.join(BASEDIR, "fixtures", "filesystem"), os.path.join(chdir), dirs_exist_ok=True)
if isinstance(request.param, tuple):
config.cp.set(*request.param)

return Root(config, application(config))
Expand Down
2 changes: 1 addition & 1 deletion tests/fixtures/filesystem/scrapy.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
# https://scrapyd.readthedocs.io/en/latest/deploy.html

[settings]
default = localproject.settings
localproject = localproject.settings

[deploy]
#url = http://localhost:6800/
Expand Down
4 changes: 2 additions & 2 deletions tests/test_webservice.py
Original file line number Diff line number Diff line change
Expand Up @@ -442,8 +442,8 @@ def test_add_version_settings(txrequest, root):
pytest.skip("[settings] section is not set")

args = {b"project": [b"localproject"], b"version": [b"0.1"], b"egg": [get_egg_data("quotesbot")]}
message = b"project 'localproject' already configured in the [settings] section"
assert_error(txrequest, root, "POST", "addversion", args, message)
expected = {"project": "localproject", "spiders": 2, "version": "0.1"}
assert_content(txrequest, root, "POST", "addversion", args, expected)


def test_add_version_invalid(txrequest, root):
Expand Down

0 comments on commit 88e2d87

Please sign in to comment.