Skip to content

Commit

Permalink
Merge pull request #504 from shubhbapna/use-url
Browse files Browse the repository at this point in the history
allow use of url for req, constraint and graph files
  • Loading branch information
mergify[bot] authored Dec 4, 2024
2 parents 6235e8c + 43a9c06 commit f1bfa8b
Show file tree
Hide file tree
Showing 13 changed files with 98 additions and 47 deletions.
4 changes: 2 additions & 2 deletions src/fromager/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,7 @@
@click.option(
"-c",
"--constraints-file",
type=clickext.ClickPath(),
type=str,
help="location of the constraints file",
)
@click.option(
Expand Down Expand Up @@ -142,7 +142,7 @@ def main(
patches_dir: pathlib.Path,
settings_file: pathlib.Path,
settings_dir: pathlib.Path,
constraints_file: pathlib.Path,
constraints_file: str,
cleanup: bool,
variant: str,
jobs: int | None,
Expand Down
12 changes: 5 additions & 7 deletions src/fromager/commands/bootstrap.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,11 @@
import logging
import pathlib
import typing

import click
from packaging.requirements import Requirement

from .. import (
bootstrapper,
clickext,
context,
dependency_graph,
metrics,
Expand All @@ -28,7 +26,7 @@

def _get_requirements_from_args(
toplevel: typing.Iterable[str],
req_files: typing.Iterable[pathlib.Path],
req_files: typing.Iterable[str],
) -> list[Requirement]:
parsed_req: list[str] = []
parsed_req.extend(toplevel)
Expand Down Expand Up @@ -59,14 +57,14 @@ def _get_requirements_from_args(
"--requirements-file",
"requirements_files",
multiple=True,
type=clickext.ClickPath(),
type=str,
help="pip requirements file",
)
@click.option(
"-p",
"--previous-bootstrap-file",
"previous_bootstrap_file",
type=clickext.ClickPath(),
type=str,
help="graph file produced from a previous bootstrap",
)
@click.option(
Expand All @@ -79,8 +77,8 @@ def _get_requirements_from_args(
@click.pass_obj
def bootstrap(
wkctx: context.WorkContext,
requirements_files: list[pathlib.Path],
previous_bootstrap_file: pathlib.Path | None,
requirements_files: list[str],
previous_bootstrap_file: str | None,
cache_wheel_server_url: str | None,
toplevel: list[str],
) -> None:
Expand Down
3 changes: 2 additions & 1 deletion src/fromager/commands/build.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
metrics,
overrides,
progress,
read,
server,
sources,
wheels,
Expand Down Expand Up @@ -137,7 +138,7 @@ def build_sequence(
entries: list[BuildSequenceEntry] = []

logger.info("reading build order from %s", build_order_file)
with open(build_order_file, "r") as f:
with read.open_file_or_url(build_order_file) as f:
for entry in progress.progress(json.load(f)):
dist_name = entry["dist"]
resolved_version = Version(entry["version"])
Expand Down
4 changes: 2 additions & 2 deletions src/fromager/commands/download_sequence.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
from packaging.requirements import Requirement
from packaging.version import Version

from .. import context, progress, sources, wheels
from .. import context, progress, read, sources, wheels

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -57,7 +57,7 @@ def download_sequence(
wheel_servers = [sdist_server_url]

logger.info("reading build order from %s", build_order_file)
with open(build_order_file, "r") as f:
with read.open_file_or_url(build_order_file) as f:
build_order = json.load(f)

def download_one(entry: dict[str, typing.Any]):
Expand Down
16 changes: 7 additions & 9 deletions src/fromager/commands/graph.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,12 +36,10 @@ def graph():
)
@click.argument(
"graph-file",
type=clickext.ClickPath(),
type=str,
)
@click.pass_obj
def to_constraints(
wkctx: context.WorkContext, graph_file: pathlib.Path, output: pathlib.Path
):
def to_constraints(wkctx: context.WorkContext, graph_file: str, output: pathlib.Path):
"Convert a graph file to a constraints file."
graph = DependencyGraph.from_file(graph_file)
if output:
Expand All @@ -59,10 +57,10 @@ def to_constraints(
)
@click.argument(
"graph-file",
type=clickext.ClickPath(),
type=str,
)
@click.pass_obj
def to_dot(wkctx, graph_file: pathlib.Path, output: pathlib.Path):
def to_dot(wkctx: context.WorkContext, graph_file: str, output: pathlib.Path):
"Convert a graph file to a DOT file suitable to pass to graphviz."
graph = DependencyGraph.from_file(graph_file)
if output:
Expand Down Expand Up @@ -114,7 +112,7 @@ def get_node_id(node):
@graph.command()
@click.argument(
"graph-file",
type=clickext.ClickPath(),
type=str,
)
@click.pass_obj
def explain_duplicates(wkctx, graph_file):
Expand Down Expand Up @@ -183,13 +181,13 @@ def explain_duplicates(wkctx, graph_file):
)
@click.argument(
"graph-file",
type=clickext.ClickPath(),
type=str,
)
@click.argument("package-name", type=str)
@click.pass_obj
def why(
wkctx: context.WorkContext,
graph_file: pathlib.Path,
graph_file: str,
package_name: str,
version: list[Version],
depth: int,
Expand Down
13 changes: 4 additions & 9 deletions src/fromager/constraints.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,14 +40,9 @@ def _parse(content: typing.Iterable[str]) -> Constraints:
return Constraints(constraints)


def load(filename: pathlib.Path | None) -> Constraints:
if not filename:
def load(constraints_file: str | pathlib.Path | None) -> Constraints:
if not constraints_file:
return Constraints({})
filepath = pathlib.Path(filename)
if not filepath.exists():
raise FileNotFoundError(
f"constraints file {filepath.absolute()} does not exist, ignoring"
)
logger.info("loading constraints from %s", filepath.absolute())
parsed_req_file = requirements_file.parse_requirements_file(filename)
logger.info("loading constraints from %s", constraints_file)
parsed_req_file = requirements_file.parse_requirements_file(constraints_file)
return _parse(parsed_req_file)
28 changes: 17 additions & 11 deletions src/fromager/context.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,11 +8,7 @@
from packaging.utils import NormalizedName, canonicalize_name
from packaging.version import Version

from . import (
constraints,
dependency_graph,
packagesettings,
)
from . import constraints, dependency_graph, packagesettings, request_session

logger = logging.getLogger(__name__)

Expand All @@ -25,7 +21,7 @@ class WorkContext:
def __init__(
self,
active_settings: packagesettings.Settings | None,
constraints_file: pathlib.Path | None,
constraints_file: str | None,
patches_dir: pathlib.Path,
sdists_repo: pathlib.Path,
wheels_repo: pathlib.Path,
Expand All @@ -45,12 +41,12 @@ def __init__(
max_jobs=max_jobs,
)
self.settings = active_settings
self.input_constraints_file: pathlib.Path | None
self.input_constraints_uri: str | None
if constraints_file is not None:
self.input_constraints_file = constraints_file.absolute()
self.input_constraints_uri = constraints_file
self.constraints = constraints.load(constraints_file)
else:
self.input_constraints_file = None
self.input_constraints_uri = None
self.constraints = constraints.Constraints({})
self.sdists_repo = pathlib.Path(sdists_repo).absolute()
self.sdists_downloads = self.sdists_repo / "downloads"
Expand Down Expand Up @@ -88,9 +84,19 @@ def pip_wheel_server_args(self) -> list[str]:

@property
def pip_constraint_args(self) -> list[str]:
if not self.input_constraints_file:
if not self.input_constraints_uri:
return []
return ["--constraint", os.fspath(self.input_constraints_file)]

if self.input_constraints_uri.startswith(("https://", "http://", "file://")):
path_to_constraints_file = self.work_dir / "input-constraints.txt"
if not path_to_constraints_file.exists():
response = request_session.session.get(self.input_constraints_uri)
path_to_constraints_file.write_text(response.text)
else:
path_to_constraints_file = pathlib.Path(self.input_constraints_uri)

path_to_constraints_file = path_to_constraints_file.absolute()
return ["--constraint", os.fspath(path_to_constraints_file)]

def write_to_graph_to_file(self):
with open(self.work_dir / "graph.json", "w") as f:
Expand Down
5 changes: 3 additions & 2 deletions src/fromager/dependency_graph.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
from packaging.utils import NormalizedName, canonicalize_name
from packaging.version import Version

from .read import open_file_or_url
from .requirements_file import RequirementType

logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -122,9 +123,9 @@ def __init__(self) -> None:
@classmethod
def from_file(
cls,
graph_file: pathlib.Path,
graph_file: pathlib.Path | str,
) -> "DependencyGraph":
with open(graph_file) as f:
with open_file_or_url(graph_file) as f:
# TODO: add JSON validation to ensure it is a parsable graph json
raw_graph = typing.cast(dict[str, dict], json.load(f))
return cls.from_dict(raw_graph)
Expand Down
26 changes: 26 additions & 0 deletions src/fromager/read.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
import io
import pathlib
import typing
from contextlib import contextmanager
from urllib.parse import urlparse

from .request_session import session


@contextmanager
def open_file_or_url(
path_or_url: str | pathlib.Path,
) -> typing.Generator[io.TextIOWrapper, typing.Any, None]:
location = str(path_or_url)
if location.startswith("file://"):
location = urlparse(location).path

if location.startswith(("https://", "http://")):
response = session.get(location)
yield io.StringIO(response.text)
else:
f = open(location, "r")
try:
yield f
finally:
f.close()
6 changes: 4 additions & 2 deletions src/fromager/requirements_file.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@
from packaging import markers
from packaging.requirements import Requirement

from .read import open_file_or_url

logger = logging.getLogger(__name__)


Expand Down Expand Up @@ -40,11 +42,11 @@ class SourceType(StrEnum):


def parse_requirements_file(
req_file: pathlib.Path,
req_file: str | pathlib.Path,
) -> typing.Iterable[str]:
logger.debug("reading requirements file %s", req_file)
lines = []
with open(req_file, "r") as f:
with open_file_or_url(req_file) as f:
for line in f:
useful, _, _ = line.partition("#")
useful = useful.strip()
Expand Down
2 changes: 1 addition & 1 deletion tests/test_bootstrap.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ def test_get_requirements_args_and_file(tmp_path: pathlib.Path):
requirements_file = tmp_path / "requirements.txt"
requirements_file.write_text("c\n")
requirements = bootstrap._get_requirements_from_args(
["a", "b"], [requirements_file]
["a", "b"], [str(requirements_file)]
)
assert [
Requirement("a"),
Expand Down
2 changes: 1 addition & 1 deletion tests/test_context.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ def test_pip_constraints_args(tmp_path):
constraints_file.write_text("\n") # the file has to exist
ctx = context.WorkContext(
active_settings=None,
constraints_file=constraints_file,
constraints_file=str(constraints_file),
patches_dir=tmp_path / "overrides/patches",
sdists_repo=tmp_path / "sdists-repo",
wheels_repo=tmp_path / "wheels-repo",
Expand Down
24 changes: 24 additions & 0 deletions tests/test_read.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
import pathlib

import requests_mock

from fromager import read


def test_read_from_file(tmp_path: pathlib.Path):
file = tmp_path / "test"
text = ["hello", "world"]
file.write_text("\n".join(text))
with read.open_file_or_url(file) as f:
for index, line in enumerate(f):
assert line.strip() == text[index]


def test_read_from_url():
url = "https://someurl.com"
text = ["hello", "world"]
with requests_mock.Mocker() as r:
r.get(url, text="\n".join(text))
with read.open_file_or_url(url) as f:
for index, line in enumerate(f):
assert line.strip() == text[index]

0 comments on commit f1bfa8b

Please sign in to comment.