From a6ef85d4e35951d3e4a3cc9e1b96d1672f36af73 Mon Sep 17 00:00:00 2001 From: bloodearnest Date: Fri, 22 Mar 2024 14:29:10 +0000 Subject: [PATCH] Update to jobrunenr v2.73.0, including pipeline --- opensafely/_vendor/jobrunner/actions.py | 3 + opensafely/_vendor/jobrunner/cli/add_job.py | 13 +- opensafely/_vendor/jobrunner/cli/kill_job.py | 12 +- opensafely/_vendor/jobrunner/config.py | 23 +-- .../jobrunner/create_or_update_jobs.py | 35 ++-- .../_vendor/jobrunner/executors/local.py | 37 ++++- opensafely/_vendor/jobrunner/job_executor.py | 1 + opensafely/_vendor/jobrunner/lib/commands.py | 19 --- .../_vendor/jobrunner/lib/docker_stats.py | 1 + opensafely/_vendor/jobrunner/models.py | 25 ++- opensafely/_vendor/jobrunner/record_stats.py | 155 +++++++++++++++++- .../_vendor/jobrunner/reusable_actions.py | 5 +- opensafely/_vendor/jobrunner/sync.py | 7 +- opensafely/_vendor/jobrunner/tracing.py | 48 ++++-- .../opensafely_jobrunner-2.72.0.dist-info.pyi | 1 - .../direct_url.json | 1 - .../opensafely_jobrunner-2.73.0.dist-info.pyi | 1 + .../INSTALLER | 0 .../LICENSE | 0 .../METADATA | 4 +- .../RECORD | 45 +++-- .../REQUESTED | 0 .../WHEEL | 2 +- .../direct_url.json | 1 + .../entry_points.txt | 0 .../top_level.txt | 0 ...ly_pipeline-2023.11.6.145820.dist-info.pyi | 1 - .../RECORD | 20 --- .../direct_url.json | 1 - ...ly_pipeline-2024.3.19.153938.dist-info.pyi | 1 + .../INSTALLER | 0 .../LICENSE | 0 .../METADATA | 2 +- .../RECORD | 20 +++ .../REQUESTED | 0 .../WHEEL | 2 +- .../direct_url.json | 1 + .../top_level.txt | 0 opensafely/_vendor/pipeline/__main__.py | 1 + opensafely/_vendor/pipeline/constants.py | 1 - opensafely/_vendor/pipeline/models.py | 45 +++-- opensafely/_vendor/pipeline/types.py | 1 + vendor.in | 4 +- vendor.txt | 44 ++--- 44 files changed, 399 insertions(+), 184 deletions(-) delete mode 100644 opensafely/_vendor/jobrunner/lib/commands.py delete mode 100644 opensafely/_vendor/opensafely_jobrunner-2.72.0.dist-info.pyi delete mode 100644 opensafely/_vendor/opensafely_jobrunner-2.72.0.dist-info/direct_url.json create mode 100644 opensafely/_vendor/opensafely_jobrunner-2.73.0.dist-info.pyi rename opensafely/_vendor/{opensafely_jobrunner-2.72.0.dist-info => opensafely_jobrunner-2.73.0.dist-info}/INSTALLER (100%) rename opensafely/_vendor/{opensafely_jobrunner-2.72.0.dist-info => opensafely_jobrunner-2.73.0.dist-info}/LICENSE (100%) rename opensafely/_vendor/{opensafely_jobrunner-2.72.0.dist-info => opensafely_jobrunner-2.73.0.dist-info}/METADATA (99%) rename opensafely/_vendor/{opensafely_jobrunner-2.72.0.dist-info => opensafely_jobrunner-2.73.0.dist-info}/RECORD (59%) rename opensafely/_vendor/{opensafely_jobrunner-2.72.0.dist-info => opensafely_jobrunner-2.73.0.dist-info}/REQUESTED (100%) rename opensafely/_vendor/{opensafely_jobrunner-2.72.0.dist-info => opensafely_jobrunner-2.73.0.dist-info}/WHEEL (65%) create mode 100644 opensafely/_vendor/opensafely_jobrunner-2.73.0.dist-info/direct_url.json rename opensafely/_vendor/{opensafely_jobrunner-2.72.0.dist-info => opensafely_jobrunner-2.73.0.dist-info}/entry_points.txt (100%) rename opensafely/_vendor/{opensafely_jobrunner-2.72.0.dist-info => opensafely_jobrunner-2.73.0.dist-info}/top_level.txt (100%) delete mode 100644 opensafely/_vendor/opensafely_pipeline-2023.11.6.145820.dist-info.pyi delete mode 100644 opensafely/_vendor/opensafely_pipeline-2023.11.6.145820.dist-info/RECORD delete mode 100644 opensafely/_vendor/opensafely_pipeline-2023.11.6.145820.dist-info/direct_url.json create mode 100644 opensafely/_vendor/opensafely_pipeline-2024.3.19.153938.dist-info.pyi rename opensafely/_vendor/{opensafely_pipeline-2023.11.6.145820.dist-info => opensafely_pipeline-2024.3.19.153938.dist-info}/INSTALLER (100%) rename opensafely/_vendor/{opensafely_pipeline-2023.11.6.145820.dist-info => opensafely_pipeline-2024.3.19.153938.dist-info}/LICENSE (100%) rename opensafely/_vendor/{opensafely_pipeline-2023.11.6.145820.dist-info => opensafely_pipeline-2024.3.19.153938.dist-info}/METADATA (98%) create mode 100644 opensafely/_vendor/opensafely_pipeline-2024.3.19.153938.dist-info/RECORD rename opensafely/_vendor/{opensafely_pipeline-2023.11.6.145820.dist-info => opensafely_pipeline-2024.3.19.153938.dist-info}/REQUESTED (100%) rename opensafely/_vendor/{opensafely_pipeline-2023.11.6.145820.dist-info => opensafely_pipeline-2024.3.19.153938.dist-info}/WHEEL (65%) create mode 100644 opensafely/_vendor/opensafely_pipeline-2024.3.19.153938.dist-info/direct_url.json rename opensafely/_vendor/{opensafely_pipeline-2023.11.6.145820.dist-info => opensafely_pipeline-2024.3.19.153938.dist-info}/top_level.txt (100%) diff --git a/opensafely/_vendor/jobrunner/actions.py b/opensafely/_vendor/jobrunner/actions.py index bf0c6e9d..01772871 100644 --- a/opensafely/_vendor/jobrunner/actions.py +++ b/opensafely/_vendor/jobrunner/actions.py @@ -4,6 +4,7 @@ from typing import Dict, List from opensafely._vendor.pipeline.exceptions import ProjectValidationError +from opensafely._vendor.pipeline.models import Action from opensafely._vendor.pipeline.outputs import get_output_dirs from opensafely._vendor.jobrunner.lib.path_utils import ensure_unix_path @@ -19,6 +20,7 @@ class ActionSpecification: run: str needs: List[str] outputs: Dict[str, Dict[str, str]] + action: Action def get_action_specification(config, action_id, using_dummy_data_backend=False): @@ -80,6 +82,7 @@ def get_action_specification(config, action_id, using_dummy_data_backend=False): run=run_command, needs=action_spec.needs, outputs=action_spec.outputs.dict(exclude_unset=True), + action=action_spec, ) diff --git a/opensafely/_vendor/jobrunner/cli/add_job.py b/opensafely/_vendor/jobrunner/cli/add_job.py index 93830573..ab6a994e 100644 --- a/opensafely/_vendor/jobrunner/cli/add_job.py +++ b/opensafely/_vendor/jobrunner/cli/add_job.py @@ -5,6 +5,7 @@ import argparse import dataclasses import pprint +import sys import textwrap from pathlib import Path from urllib.parse import urlparse @@ -36,6 +37,7 @@ def main( requested_actions=actions, force_run_dependencies=force_run_dependencies, cancelled_actions=[], + codelists_ok=True, ) ) print("Submitting JobRequest:\n") @@ -46,6 +48,8 @@ def main( for job in jobs: display_obj(job) + return job_request, jobs + def display_obj(obj): if hasattr(obj, "asdict"): @@ -57,7 +61,10 @@ def display_obj(obj): print() -def run(): +def run(argv=None): + if argv is None: + argv = sys.argv[1:] + configure_logging() parser = argparse.ArgumentParser(description=__doc__.partition("\n\n")[0]) parser.add_argument("repo_url", help="URL (or local path) of git repository") @@ -82,8 +89,8 @@ def run(): ) parser.add_argument("-f", "--force-run-dependencies", action="store_true") - args = parser.parse_args() - main(**vars(args)) + args = parser.parse_args(argv) + return main(**vars(args)) if __name__ == "__main__": diff --git a/opensafely/_vendor/jobrunner/cli/kill_job.py b/opensafely/_vendor/jobrunner/cli/kill_job.py index 0fa3dccc..e6f0f4a9 100644 --- a/opensafely/_vendor/jobrunner/cli/kill_job.py +++ b/opensafely/_vendor/jobrunner/cli/kill_job.py @@ -4,6 +4,7 @@ import argparse from opensafely._vendor.jobrunner.executors import local +from opensafely._vendor.jobrunner.job_executor import JobResults from opensafely._vendor.jobrunner.lib import database, docker from opensafely._vendor.jobrunner.models import Job, State, StatusCode from opensafely._vendor.jobrunner.run import job_to_job_definition, mark_job_as_failed @@ -32,7 +33,16 @@ def main(partial_job_ids, cleanup=False): ) if container_metadata: job = job_to_job_definition(job) - metadata = local.get_job_metadata(job, {}, container_metadata) + # create a dummy JobResults with just the message we want + results = JobResults( + outputs=None, + unmatched_patterns=None, + unmatched_outputs=None, + exit_code=container_metadata["State"]["ExitCode"], + image_id=container_metadata["Image"], + message="job killed by OpenSAFELY administrator", + ) + metadata = local.get_job_metadata(job, {}, container_metadata, results) local.write_job_logs(job, metadata, copy_log_to_workspace=False) if cleanup: diff --git a/opensafely/_vendor/jobrunner/config.py b/opensafely/_vendor/jobrunner/config.py index 4ae5270f..9d1d4014 100644 --- a/opensafely/_vendor/jobrunner/config.py +++ b/opensafely/_vendor/jobrunner/config.py @@ -6,6 +6,8 @@ from multiprocessing import cpu_count from pathlib import Path +from opensafely._vendor import pipeline + class ConfigException(Exception): pass @@ -46,6 +48,7 @@ def _is_valid_backend_name(name): WORKDIR = Path(os.environ.get("WORKDIR", default_work_dir)).resolve() DATABASE_FILE = WORKDIR / "db.sqlite" +METRICS_FILE = WORKDIR / "metrics.sqlite" GIT_REPO_DIR = WORKDIR / "repos" # valid archive formats @@ -143,25 +146,7 @@ def database_urls_from_env(env): ) # 16mb -# TODO: we might want to take this list from pipeline if we implement it there. -LEVEL4_FILE_TYPES = [ - # tables - ".csv", - ".tsv", - # images - ".jpg", - ".jpeg", - ".png", - ".svg", - ".svgz", - # reports - ".html", - ".pdf", - ".txt", - ".log", - ".json", - ".md", -] +LEVEL4_FILE_TYPES = pipeline.constants.LEVEL4_FILE_TYPES STATA_LICENSE = os.environ.get("STATA_LICENSE") STATA_LICENSE_REPO = os.environ.get( diff --git a/opensafely/_vendor/jobrunner/create_or_update_jobs.py b/opensafely/_vendor/jobrunner/create_or_update_jobs.py index 1ea710ac..9933c69b 100644 --- a/opensafely/_vendor/jobrunner/create_or_update_jobs.py +++ b/opensafely/_vendor/jobrunner/create_or_update_jobs.py @@ -36,6 +36,10 @@ class JobRequestError(Exception): pass +class StaleCodelistError(JobRequestError): + pass + + class NothingToDoError(JobRequestError): pass @@ -62,10 +66,10 @@ def create_or_update_jobs(job_request): JobRequestError, ) as e: log.info(f"JobRequest failed:\n{e}") - create_failed_job(job_request, e) + create_job_from_exception(job_request, e) except Exception: log.exception("Uncaught error while creating jobs") - create_failed_job(job_request, JobRequestError("Internal error")) + create_job_from_exception(job_request, JobRequestError("Internal error")) else: if job_request.cancelled_actions: log.debug("Cancelling actions: %s", job_request.cancelled_actions) @@ -114,7 +118,8 @@ def create_jobs(job_request): def validate_job_request(job_request): - if config.ALLOWED_GITHUB_ORGS: + # http prefix allows local git repos, useful for tests + if job_request.repo_url.startswith("http") and config.ALLOWED_GITHUB_ORGS: validate_repo_url(job_request.repo_url, config.ALLOWED_GITHUB_ORGS) if not job_request.requested_actions: raise JobRequestError("At least one action must be supplied") @@ -238,6 +243,7 @@ def recursively_build_jobs(jobs_by_action, job_request, pipeline_config, action) commit=job_request.commit, workspace=job_request.workspace, database_name=job_request.database_name, + requires_db=action_spec.action.is_database_action, action=action, wait_for_job_ids=wait_for_job_ids, requires_outputs_from=action_spec.needs, @@ -311,12 +317,12 @@ def assert_codelists_ok(job_request, new_jobs): # Codelists are out of date; fail the entire job request if any job # requires database access if job.requires_db: - raise JobRequestError( + raise StaleCodelistError( f"Codelists are out of date (required by action {job.action})" ) -def create_failed_job(job_request, exception): +def create_job_from_exception(job_request, exception): """ Sometimes we want to say to the job-server (and the user): your JobRequest was broken so we weren't able to create any jobs for it. But the only way @@ -327,19 +333,25 @@ def create_failed_job(job_request, exception): This is a bit of a hack, but it keeps the sync protocol simple. """ + action = "__error__" + error = exception + state = State.FAILED + status_message = str(exception) + # Special case for the NothingToDoError which we treat as a success if isinstance(exception, NothingToDoError): state = State.SUCCEEDED code = StatusCode.SUCCEEDED - status_message = "All actions have already run" action = job_request.requested_actions[0] error = None + # StaleCodelistError is a failure but not an INTERNAL_ERROR + elif isinstance(exception, StaleCodelistError): + code = StatusCode.STALE_CODELISTS else: - state = State.FAILED code = StatusCode.INTERNAL_ERROR + # include exception name in message to aid debugging status_message = f"{type(exception).__name__}: {exception}" - action = "__error__" - error = exception + now = time.time() job = Job( job_request_id=job_request.id, @@ -379,7 +391,10 @@ def set_cancelled_flag_for_actions(job_request_id, actions): # working. update_where( Job, - {"cancelled": True}, + { + "cancelled": True, + "completed_at": int(time.time()), + }, job_request_id=job_request_id, action__in=actions, ) diff --git a/opensafely/_vendor/jobrunner/executors/local.py b/opensafely/_vendor/jobrunner/executors/local.py index 18671317..f5871e53 100644 --- a/opensafely/_vendor/jobrunner/executors/local.py +++ b/opensafely/_vendor/jobrunner/executors/local.py @@ -11,7 +11,7 @@ from opensafely._vendor.pipeline.legacy import get_all_output_patterns_from_project_file -from opensafely._vendor.jobrunner import config +from opensafely._vendor.jobrunner import config, record_stats from opensafely._vendor.jobrunner.executors import volumes from opensafely._vendor.jobrunner.job_executor import ( ExecutorAPI, @@ -241,16 +241,24 @@ def get_status(self, job_definition, timeout=15): f"docker timed out after {timeout}s inspecting container {name}" ) + metrics = record_stats.read_job_metrics(job_definition.id) + if container is None: # container doesn't exist if job_definition.cancelled: if volumes.get_volume_api(job_definition).volume_exists(job_definition): # jobs prepared but not running do not need to finalize, so we # proceed directly to the FINALIZED state here return JobStatus( - ExecutorState.FINALIZED, "Prepared job was cancelled" + ExecutorState.FINALIZED, + "Prepared job was cancelled", + metrics=metrics, ) else: - return JobStatus(ExecutorState.UNKNOWN, "Pending job was cancelled") + return JobStatus( + ExecutorState.UNKNOWN, + "Pending job was cancelled", + metrics=metrics, + ) # timestamp file presence means we have finished preparing timestamp_ns = volumes.get_volume_api(job_definition).read_timestamp( @@ -261,24 +269,31 @@ def get_status(self, job_definition, timeout=15): # re-prepare it anyway. if timestamp_ns is None: # we are Jon Snow - return JobStatus(ExecutorState.UNKNOWN) + return JobStatus(ExecutorState.UNKNOWN, metrics={}) else: # we've finish preparing - return JobStatus(ExecutorState.PREPARED, timestamp_ns=timestamp_ns) + return JobStatus( + ExecutorState.PREPARED, timestamp_ns=timestamp_ns, metrics=metrics + ) if container["State"]["Running"]: timestamp_ns = datestr_to_ns_timestamp(container["State"]["StartedAt"]) - return JobStatus(ExecutorState.EXECUTING, timestamp_ns=timestamp_ns) + return JobStatus( + ExecutorState.EXECUTING, timestamp_ns=timestamp_ns, metrics=metrics + ) elif job_definition.id in RESULTS: return JobStatus( ExecutorState.FINALIZED, timestamp_ns=RESULTS[job_definition.id].timestamp_ns, + metrics=metrics, ) else: # container present but not running, i.e. finished # Nb. this does not include prepared jobs, as they have a volume but not a container timestamp_ns = datestr_to_ns_timestamp(container["State"]["FinishedAt"]) - return JobStatus(ExecutorState.EXECUTED, timestamp_ns=timestamp_ns) + return JobStatus( + ExecutorState.EXECUTED, timestamp_ns=timestamp_ns, metrics=metrics + ) def get_results(self, job_definition): if job_definition.id not in RESULTS: @@ -409,7 +424,9 @@ def finalize_job(job_definition): base_revision=labels.get("org.opensafely.base.vcs-ref", "unknown"), base_created=labels.get("org.opencontainers.base.build-date", "unknown"), ) - job_metadata = get_job_metadata(job_definition, outputs, container_metadata) + job_metadata = get_job_metadata( + job_definition, outputs, container_metadata, results + ) if job_definition.cancelled: write_job_logs(job_definition, job_metadata, copy_log_to_workspace=False) @@ -426,7 +443,7 @@ def finalize_job(job_definition): return results -def get_job_metadata(job_definition, outputs, container_metadata): +def get_job_metadata(job_definition, outputs, container_metadata, results): # job_metadata is a big dict capturing everything we know about the state # of the job job_metadata = dict() @@ -437,6 +454,7 @@ def get_job_metadata(job_definition, outputs, container_metadata): job_metadata["docker_image_id"] = container_metadata["Image"] # convert exit code to str so 0 exit codes get logged job_metadata["exit_code"] = str(container_metadata["State"]["ExitCode"]) + job_metadata["status_message"] = results.message job_metadata["container_metadata"] = container_metadata job_metadata["outputs"] = outputs job_metadata["commit"] = job_definition.study.commit @@ -679,6 +697,7 @@ def write_log_file(job_definition, job_metadata, filename, excluded): "commit", "docker_image_id", "exit_code", + "status_message", "created_at", "completed_at", "database_name", diff --git a/opensafely/_vendor/jobrunner/job_executor.py b/opensafely/_vendor/jobrunner/job_executor.py index 2ee0039d..da135bae 100644 --- a/opensafely/_vendor/jobrunner/job_executor.py +++ b/opensafely/_vendor/jobrunner/job_executor.py @@ -67,6 +67,7 @@ class JobStatus: timestamp_ns: int = ( None # timestamp this JobStatus occurred, in integer nanoseconds ) + metrics: dict = field(default_factory=dict) @dataclass diff --git a/opensafely/_vendor/jobrunner/lib/commands.py b/opensafely/_vendor/jobrunner/lib/commands.py deleted file mode 100644 index b0ce50dc..00000000 --- a/opensafely/_vendor/jobrunner/lib/commands.py +++ /dev/null @@ -1,19 +0,0 @@ -def requires_db_access(args): - """ - By default actions do not have database access, but certain trusted actions require it - """ - valid_commands = { - "cohortextractor": ("generate_cohort", "generate_codelist_report"), - "databuilder": ("generate-dataset",), - "ehrql": ("generate-dataset", "generate-measures"), - "sqlrunner": None, # all commands are valid - } - if len(args) <= 1: - return False - - image, command = args[0], args[1] - image = image.split(":")[0] - if image in valid_commands: - if valid_commands[image] is None or command in valid_commands[image]: - return True - return False diff --git a/opensafely/_vendor/jobrunner/lib/docker_stats.py b/opensafely/_vendor/jobrunner/lib/docker_stats.py index 95daa5f8..aed1a573 100644 --- a/opensafely/_vendor/jobrunner/lib/docker_stats.py +++ b/opensafely/_vendor/jobrunner/lib/docker_stats.py @@ -30,6 +30,7 @@ def get_container_stats(timeout=DEFAULT_TIMEOUT): removeprefix(row["Name"], "os-job-"): { "cpu_percentage": float(row["CPUPerc"].rstrip("%")), "memory_used": _parse_size(row["MemUsage"].split()[0]), + "container_id": row["Container"], } for row in data if row["Name"].startswith("os-job-") diff --git a/opensafely/_vendor/jobrunner/models.py b/opensafely/_vendor/jobrunner/models.py index 56a7e640..8b3958ec 100644 --- a/opensafely/_vendor/jobrunner/models.py +++ b/opensafely/_vendor/jobrunner/models.py @@ -14,8 +14,8 @@ import secrets import shlex from enum import Enum +from functools import total_ordering -from opensafely._vendor.jobrunner.lib.commands import requires_db_access from opensafely._vendor.jobrunner.lib.database import databaseclass, migration from opensafely._vendor.jobrunner.lib.string_utils import slugify @@ -36,6 +36,7 @@ class State(Enum): # affordances in the web, cli and telemetry. +@total_ordering class StatusCode(Enum): # PENDING states # @@ -72,11 +73,16 @@ class StatusCode(Enum): UNMATCHED_PATTERNS = "unmatched_patterns" INTERNAL_ERROR = "internal_error" KILLED_BY_ADMIN = "killed_by_admin" + STALE_CODELISTS = "stale_codelists" @property def is_final_code(self): return self in StatusCode._FINAL_STATUS_CODES + def __lt__(self, other): + order = list(self.__class__) + return order.index(self) < order.index(other) + # used for tracing to know if a state is final or not StatusCode._FINAL_STATUS_CODES = [ @@ -87,6 +93,7 @@ def is_final_code(self): StatusCode.UNMATCHED_PATTERNS, StatusCode.INTERNAL_ERROR, StatusCode.KILLED_BY_ADMIN, + StatusCode.STALE_CODELISTS, ] @@ -158,6 +165,7 @@ class Job: trace_context TEXT, status_code_updated_at INT, level4_excluded_files TEXT, + requires_db BOOLEAN, PRIMARY KEY (id) ); @@ -187,6 +195,13 @@ class Job: """, ) + migration( + 3, + """ + ALTER TABLE job ADD COLUMN requires_db BOOLEAN; + """, + ) + id: str = None # noqa: A003 job_request_id: str = None state: State = None @@ -245,8 +260,12 @@ class Job: # used to track the OTel trace context for this job trace_context: dict = None + # map of file -> error level4_excluded_files: dict = None + # does the job require db access + requires_db: bool = False + # used to cache the job_request json by the tracing code _job_request = None @@ -316,10 +335,6 @@ def action_args(self): else: return [] - @property - def requires_db(self): - return requires_db_access(self.action_args) - def deterministic_id(seed): digest = hashlib.sha1(seed.encode("utf-8")).digest() diff --git a/opensafely/_vendor/jobrunner/record_stats.py b/opensafely/_vendor/jobrunner/record_stats.py index 8dd7e375..108c2485 100644 --- a/opensafely/_vendor/jobrunner/record_stats.py +++ b/opensafely/_vendor/jobrunner/record_stats.py @@ -1,10 +1,15 @@ """ Super crude docker/system stats logger """ +import json import logging +import sqlite3 import subprocess import sys +import threading import time +from collections import defaultdict +from pathlib import Path from opensafely._vendor.opentelemetry import trace @@ -17,12 +22,99 @@ log = logging.getLogger(__name__) tracer = trace.get_tracer("ticks") +# Simplest possible table. We're only storing aggregate data +DDL = """ +CREATE TABLE IF NOT EXISTS jobs ( + id TEXT, + metrics TEXT, + PRIMARY KEY (id) +) +""" + +CONNECTION_CACHE = threading.local() + + +def get_connection(readonly=True): + db_file = config.METRICS_FILE + + # developer check against using memory dbs, which cannot be used with this + # function, as we need to set mode ourselves + assert isinstance(db_file, Path), "config.METRICS_FILE db must be file path" + assert not str(db_file).startswith( + "file:" + ), "config.METRICS_FILE db must be file path, not url" + + if readonly: + db = f"file:{db_file}?mode=ro" + else: + db = f"file:{db_file}?mode=rwc" + + cache = CONNECTION_CACHE.__dict__ + if db not in cache: + try: + conn = sqlite3.connect(db, uri=True) + except sqlite3.OperationalError as exc: + # if its readonly, we cannot create file, so fail gracefully. + # Caller should check for conn being None. + if readonly and "unable to open" in str(exc).lower(): + return None + raise + + # manual transactions + conn.isolation_level = None + # Support dict-like access to rows + conn.row_factory = sqlite3.Row + + if not readonly: + conn.execute("PRAGMA journal_mode = WAL") + conn.execute(DDL) + + cache[db] = conn + + return cache[db] + + +def read_job_metrics(job_id): + conn = get_connection(readonly=True) + + raw_metrics = None + + if conn is not None: + try: + raw_metrics = conn.execute( + "SELECT metrics FROM jobs WHERE id = ?", + (job_id,), + ).fetchone() + except sqlite3.OperationalError as exc: + if "no such table" not in str(exc).lower(): + raise + + if raw_metrics is None: + metrics = {} + else: + metrics = json.loads(raw_metrics["metrics"]) + return defaultdict(float, metrics) + + +def write_job_metrics(job_id, metrics): + raw_metrics = json.dumps(metrics) + get_connection(readonly=False).execute( + """ + INSERT INTO jobs (id, metrics) VALUES (?, ?) + ON CONFLICT(id) DO UPDATE set metrics = ? + """, + (job_id, raw_metrics, raw_metrics), + ) + def main(): last_run = None while True: before = time.time() - last_run = record_tick_trace(last_run) + active_jobs = database.find_where( + models.Job, state__in=[models.State.PENDING, models.State.RUNNING] + ) + last_run = record_tick_trace(last_run, active_jobs) # record_tick_trace might have take a while, so sleep the remainding interval # enforce a minimum time of 3s to ensure we don't hammer honeycomb or @@ -31,7 +123,7 @@ def main(): time.sleep(max(2, config.STATS_POLL_INTERVAL - elapsed)) -def record_tick_trace(last_run): +def record_tick_trace(last_run, active_jobs): """Record a period tick trace of current jobs. This will give us more realtime information than the job traces, which only @@ -69,10 +161,7 @@ def record_tick_trace(last_run): # every span has the same timings start_time = last_run end_time = now - - active_jobs = database.find_where( - models.Job, state__in=[models.State.PENDING, models.State.RUNNING] - ) + duration_s = int((end_time - start_time) / 1e9) with tracer.start_as_current_span( "TICK", start_time=start_time, attributes=trace_attrs @@ -82,22 +171,72 @@ def record_tick_trace(last_run): root.add_event("stats_error", attributes=error_attrs, timestamp=start_time) for job in active_jobs: - span = tracer.start_span(job.status_code.name, start_time=start_time) + # we are using seconds for our metric calculations + + metrics = stats.get(job.id, {}) # set up attributes job_span_attrs = {} job_span_attrs.update(trace_attrs) - metrics = stats.get(job.id, {}) job_span_attrs["has_metrics"] = metrics != {} job_span_attrs.update(metrics) + # this means the job is running + if metrics: + runtime_s = int(now / 1e9) - job.started_at + # protect against unexpected runtimes + if runtime_s > 0: + job_metrics = update_job_metrics( + job, + metrics, + duration_s, + runtime_s, + ) + job_span_attrs.update(job_metrics) + else: + job_span_attrs.set("bad_tick_runtime", runtime_s) + # record span + span = tracer.start_span(job.status_code.name, start_time=start_time) tracing.set_span_metadata(span, job, **job_span_attrs) span.end(end_time) return end_time +def update_job_metrics(job, raw_metrics, duration_s, runtime_s): + """Update and persist per-job aggregate stats in the metrics db""" + + job_metrics = read_job_metrics(job.id) + + # If the job has been restarted so it's now running in a new container then we need + # to zero out all the previous stats. + if ( + # This check is only needed for smooth deployment as previous metrics dicts + # won't have the container_id populated yet + "container_id" in job_metrics + and job_metrics["container_id"] != raw_metrics["container_id"] + ): + job_metrics = defaultdict(float) + + cpu = raw_metrics["cpu_percentage"] + mem_mb = raw_metrics["memory_used"] / (1024.0 * 1024.0) + + job_metrics["cpu_sample"] = cpu + job_metrics["cpu_cumsum"] += duration_s * cpu + job_metrics["cpu_mean"] = job_metrics["cpu_cumsum"] / runtime_s + job_metrics["cpu_peak"] = max(job_metrics["cpu_peak"], cpu) + job_metrics["mem_mb_sample"] = mem_mb + job_metrics["mem_mb_cumsum"] += duration_s * mem_mb + job_metrics["mem_mb_mean"] = job_metrics["mem_mb_cumsum"] / runtime_s + job_metrics["mem_mb_peak"] = max(job_metrics["mem_mb_peak"], mem_mb) + job_metrics["container_id"] = raw_metrics["container_id"] + + write_job_metrics(job.id, job_metrics) + + return job_metrics + + if __name__ == "__main__": configure_logging() diff --git a/opensafely/_vendor/jobrunner/reusable_actions.py b/opensafely/_vendor/jobrunner/reusable_actions.py index 0be5532a..19f28541 100644 --- a/opensafely/_vendor/jobrunner/reusable_actions.py +++ b/opensafely/_vendor/jobrunner/reusable_actions.py @@ -2,9 +2,10 @@ import shlex import textwrap +from opensafely._vendor.pipeline.models import is_database_action + from opensafely._vendor.jobrunner import config from opensafely._vendor.jobrunner.lib import git -from opensafely._vendor.jobrunner.lib.commands import requires_db_access from opensafely._vendor.jobrunner.lib.github_validators import ( GithubValidationError, validate_branch_and_commit, @@ -182,7 +183,7 @@ def apply_reusable_action(run_args, reusable_action): action_image, action_tag = action_run_args[0].split(":") if action_image not in config.ALLOWED_IMAGES: raise ReusableActionError(f"Unrecognised runtime: {action_image}") - if requires_db_access(action_run_args): + if is_database_action(action_run_args): raise ReusableActionError( "Re-usable actions cannot run commands which access the database" ) diff --git a/opensafely/_vendor/jobrunner/sync.py b/opensafely/_vendor/jobrunner/sync.py index 16c98d25..81ec4941 100644 --- a/opensafely/_vendor/jobrunner/sync.py +++ b/opensafely/_vendor/jobrunner/sync.py @@ -9,7 +9,7 @@ from opensafely._vendor import requests -from opensafely._vendor.jobrunner import config, queries +from opensafely._vendor.jobrunner import config, queries, record_stats from opensafely._vendor.jobrunner.create_or_update_jobs import create_or_update_jobs from opensafely._vendor.jobrunner.lib.database import find_where, select_values from opensafely._vendor.jobrunner.lib.log_utils import configure_logging, set_log_context @@ -143,19 +143,22 @@ def job_to_remote_format(job): Convert our internal representation of a Job into whatever format the job-server expects """ + return { "identifier": job.id, "job_request_id": job.job_request_id, "action": job.action, "run_command": job.run_command, "status": job.state.value, - "status_code": job.status_code.value if job.status_code else "", + "status_code": job.status_code.value, "status_message": job.status_message or "", "created_at": job.created_at_isoformat, "updated_at": job.updated_at_isoformat, "started_at": job.started_at_isoformat, "completed_at": job.completed_at_isoformat, "trace_context": job.trace_context, + "metrics": record_stats.read_job_metrics(job.id), + "requires_db": job.requires_db, } diff --git a/opensafely/_vendor/jobrunner/tracing.py b/opensafely/_vendor/jobrunner/tracing.py index c10ce54b..4c5d9ec0 100644 --- a/opensafely/_vendor/jobrunner/tracing.py +++ b/opensafely/_vendor/jobrunner/tracing.py @@ -4,6 +4,7 @@ from opensafely._vendor.opentelemetry import trace from opensafely._vendor.opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter +from opensafely._vendor.opentelemetry.sdk.resources import Resource from opensafely._vendor.opentelemetry.sdk.trace import TracerProvider from opensafely._vendor.opentelemetry.sdk.trace.export import BatchSpanProcessor, ConsoleSpanExporter from opensafely._vendor.opentelemetry.trace import propagation @@ -15,11 +16,21 @@ logger = logging.getLogger(__name__) -provider = TracerProvider() -trace.set_tracer_provider(provider) -def add_exporter(exporter, processor=BatchSpanProcessor): +def get_provider(): + # https://github.com/open-telemetry/semantic-conventions/tree/main/docs/resource#service + resource = Resource.create( + attributes={ + "service.name": os.environ.get("OTEL_SERVICE_NAME", "jobrunner"), + "service.namespace": os.environ.get("BACKEND", "unknown"), + "service.version": config.VERSION, + } + ) + return TracerProvider(resource=resource) + + +def add_exporter(provider, exporter, processor=BatchSpanProcessor): """Utility method to add an exporter. We use the BatchSpanProcessor by default, which is the default for @@ -33,20 +44,30 @@ def add_exporter(exporter, processor=BatchSpanProcessor): provider.add_span_processor(processor(exporter)) -def setup_default_tracing(): +def setup_default_tracing(set_global=True): """Inspect environment variables and set up exporters accordingly.""" + + provider = get_provider() + if "OTEL_EXPORTER_OTLP_HEADERS" in os.environ: - if "OTEL_SERVICE_NAME" not in os.environ: - raise Exception( - "OTEL_EXPORTER_OTLP_HEADERS is configured, but missing OTEL_SERVICE_NAME" - ) + # workaround for env file parsing issues + cleaned_headers = os.environ["OTEL_EXPORTER_OTLP_HEADERS"].strip("\"'") + # put back into env to be parsed properly + os.environ["OTEL_EXPORTER_OTLP_HEADERS"] = cleaned_headers + if "OTEL_EXPORTER_OTLP_ENDPOINT" not in os.environ: os.environ["OTEL_EXPORTER_OTLP_ENDPOINT"] = "https://api.honeycomb.io" - add_exporter(OTLPSpanExporter()) + # now we can created OTLP exporter + add_exporter(provider, OTLPSpanExporter()) if "OTEL_EXPORTER_CONSOLE" in os.environ: - add_exporter(ConsoleSpanExporter()) + add_exporter(provider, ConsoleSpanExporter()) + + if set_global: + trace.set_tracer_provider(provider) + + return provider @warn_assertions @@ -211,7 +232,7 @@ def complete_job(job, timestamp_ns, error=None, results=None, **attrs): # to send it. # this effectively starts a new trace - root_span = tracer.start_span("JOB", start_time=job_start_time) + root_span = tracer.start_span("JOB", context={}, start_time=job_start_time) # replace the context with the one from the original root span root_span._context = root_ctx @@ -278,9 +299,10 @@ def trace_attributes(job, results=None): orgs=",".join(job._job_request.get("orgs", [])), state=job.state.name, message=job.status_message, + # convert float seconds to ns integer + created_at=int(job.created_at * 1e9), + started_at=int(job.started_at * 1e9) if job.started_at else None, requires_db=job.requires_db, - jobrunner_version=config.VERSION, - jobrunner_sha=config.GIT_SHA, ) # local_run jobs don't have a commit diff --git a/opensafely/_vendor/opensafely_jobrunner-2.72.0.dist-info.pyi b/opensafely/_vendor/opensafely_jobrunner-2.72.0.dist-info.pyi deleted file mode 100644 index 57091450..00000000 --- a/opensafely/_vendor/opensafely_jobrunner-2.72.0.dist-info.pyi +++ /dev/null @@ -1 +0,0 @@ -from opensafely_jobrunner-2.72.0.dist-info import * \ No newline at end of file diff --git a/opensafely/_vendor/opensafely_jobrunner-2.72.0.dist-info/direct_url.json b/opensafely/_vendor/opensafely_jobrunner-2.72.0.dist-info/direct_url.json deleted file mode 100644 index b77914cc..00000000 --- a/opensafely/_vendor/opensafely_jobrunner-2.72.0.dist-info/direct_url.json +++ /dev/null @@ -1 +0,0 @@ -{"url": "https://github.com/opensafely-core/job-runner", "vcs_info": {"commit_id": "04e7cb910058be1f831a3615a0a8e0083591a42a", "requested_revision": "v2.72.0", "vcs": "git"}} \ No newline at end of file diff --git a/opensafely/_vendor/opensafely_jobrunner-2.73.0.dist-info.pyi b/opensafely/_vendor/opensafely_jobrunner-2.73.0.dist-info.pyi new file mode 100644 index 00000000..d3014dc1 --- /dev/null +++ b/opensafely/_vendor/opensafely_jobrunner-2.73.0.dist-info.pyi @@ -0,0 +1 @@ +from opensafely_jobrunner-2.73.0.dist-info import * \ No newline at end of file diff --git a/opensafely/_vendor/opensafely_jobrunner-2.72.0.dist-info/INSTALLER b/opensafely/_vendor/opensafely_jobrunner-2.73.0.dist-info/INSTALLER similarity index 100% rename from opensafely/_vendor/opensafely_jobrunner-2.72.0.dist-info/INSTALLER rename to opensafely/_vendor/opensafely_jobrunner-2.73.0.dist-info/INSTALLER diff --git a/opensafely/_vendor/opensafely_jobrunner-2.72.0.dist-info/LICENSE b/opensafely/_vendor/opensafely_jobrunner-2.73.0.dist-info/LICENSE similarity index 100% rename from opensafely/_vendor/opensafely_jobrunner-2.72.0.dist-info/LICENSE rename to opensafely/_vendor/opensafely_jobrunner-2.73.0.dist-info/LICENSE diff --git a/opensafely/_vendor/opensafely_jobrunner-2.72.0.dist-info/METADATA b/opensafely/_vendor/opensafely_jobrunner-2.73.0.dist-info/METADATA similarity index 99% rename from opensafely/_vendor/opensafely_jobrunner-2.72.0.dist-info/METADATA rename to opensafely/_vendor/opensafely_jobrunner-2.73.0.dist-info/METADATA index 875cf937..0aaf482e 100644 --- a/opensafely/_vendor/opensafely_jobrunner-2.72.0.dist-info/METADATA +++ b/opensafely/_vendor/opensafely_jobrunner-2.73.0.dist-info/METADATA @@ -1,6 +1,6 @@ Metadata-Version: 2.1 Name: opensafely-jobrunner -Version: 2.72.0 +Version: 2.73.0 Summary: OpenSAFELY job scheduling and executor Author-email: OpenSAFELY License: OpenSAFELY Job Runner @@ -26,7 +26,7 @@ Classifier: License :: OSI Approved :: GNU General Public License v3 (GPLv3) Requires-Python: >=3.8 Description-Content-Type: text/markdown License-File: LICENSE -Requires-Dist: opensafely-pipeline @ git+https://github.com/opensafely-core/pipeline@v2023.11.06.145820 +Requires-Dist: opensafely-pipeline @ git+https://github.com/opensafely-core/pipeline@v2024.03.19.153938 Requires-Dist: ruyaml Requires-Dist: requests Requires-Dist: opentelemetry-exporter-otlp-proto-http diff --git a/opensafely/_vendor/opensafely_jobrunner-2.72.0.dist-info/RECORD b/opensafely/_vendor/opensafely_jobrunner-2.73.0.dist-info/RECORD similarity index 59% rename from opensafely/_vendor/opensafely_jobrunner-2.72.0.dist-info/RECORD rename to opensafely/_vendor/opensafely_jobrunner-2.73.0.dist-info/RECORD index 17518485..1a6b31c0 100644 --- a/opensafely/_vendor/opensafely_jobrunner-2.72.0.dist-info/RECORD +++ b/opensafely/_vendor/opensafely_jobrunner-2.73.0.dist-info/RECORD @@ -6,27 +6,26 @@ ../../bin/prepare_for_reboot,sha256=qaOUP298vP2oxYKLLO4wEgaS50kvzVkmALvRq8NurpU,263 ../../bin/retry_job,sha256=aO9Pjf1BgdxuNqLdPnTfstZoerlZvUWsc2IitKR5fNg,254 jobrunner/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -jobrunner/actions.py,sha256=eyzCccC_ApLJAbRhCIGq0uwm9e4flFmAvyAOC9lTDQE,3046 +jobrunner/actions.py,sha256=bGcLGnsgTKW9mHkZSiN5HdauFADCiUu6v_VlsWhNAOc,3128 jobrunner/cli/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -jobrunner/cli/add_job.py,sha256=12Fm5VffTh2QFFa96g88rheKGq8eJLp_V46FpeHQVj0,2826 +jobrunner/cli/add_job.py,sha256=hbOFNJx4bUTXch3IriMOwUG66lfOHIbyGnTdPYnCvmA,2968 jobrunner/cli/flags.py,sha256=6dFPWab0vB6CGCLg5yMAp9iQJo8Y6rN5cfQ7bEXhgqs,2704 -jobrunner/cli/kill_job.py,sha256=nThDIJ-Lbys04_QsAiPuXkHUfoFEbKxAyDGb_75GtSQ,2862 +jobrunner/cli/kill_job.py,sha256=_cTNi-JjirvCVfK2vSs2SQYREkCiyj2jLeNG4yYaZAc,3333 jobrunner/cli/local_run.py,sha256=G1UQ-NEijyB4dAiC18fqTbPI4k18BGyPfj-DnOGyc6c,25679 jobrunner/cli/migrate.py,sha256=V2cI3Kee67DNhqJUiDnALNvzHlP4mx_myKQycD4E9uI,609 jobrunner/cli/prepare_for_reboot.py,sha256=Zdajs1cnkCCsKklFjg3mynU2NJqd5n0zFct3SdA9Mig,1493 jobrunner/cli/retry_job.py,sha256=qDTiYwxc59QYZBLfgv_t-CAa6kmmhiCKh0sLpv5xhwA,2458 -jobrunner/config.py,sha256=LmaJd9KakqQ4Jej-tWyuGoaF-kpkHQFEp3buje0SzPE,9647 -jobrunner/create_or_update_jobs.py,sha256=5ugZzBjmVzXhyAlrZ6XaRr-RFes_PRElSh2md_jMQIE,14843 +jobrunner/config.py,sha256=qge3J3ldp_Wdjk2z8EI6HWcb1lcIDCJI_Zsuo_d6Rbc,9460 +jobrunner/create_or_update_jobs.py,sha256=eA-wK0VwWwRFF5_t8Qfz4ddCOL5COnNyAAXCZWKuTgY,15344 jobrunner/executors/__init__.py,sha256=4-eu9LwIzhALtsq1LDC9NQ_5nbcjsPDdIEGvRvZwIbo,283 -jobrunner/executors/local.py,sha256=fLPbUQQx_tGu-LaBWsoKYEDNcC3KQ6ntWXGYyB0w93c,31665 +jobrunner/executors/local.py,sha256=WD9UXttTWQLBEmyoL-hTaV3Bq0UvHAnZEpX4ZI13tXw,32226 jobrunner/executors/logging.py,sha256=iCISXFR8sbtCrp-E3jaQlC1Kw6Huf65b-dqomrJzywI,2104 jobrunner/executors/volumes.py,sha256=H8lISCydAyi9-g3p344KkwVhNhrqfWO5RO-NZRFVM5c,7102 -jobrunner/job_executor.py,sha256=X6SyJ8zAdr8Rtidla2k1RLIB1rtQivnK4ZPkff9VLr4,13870 +jobrunner/job_executor.py,sha256=JnUfIKwz9gptljbsgVo5gSqQfZiCwByEaNKFL5qOAvs,13918 jobrunner/lib/__init__.py,sha256=Lv8p-FcwvRSjDZoDjXaNvnb4QjjKgGB0gqFbg3UeuLs,2775 -jobrunner/lib/commands.py,sha256=t8vNZ1KMbkIEpFs-3t_W4EEzw8nlSvEB8wNefVw51EA,667 jobrunner/lib/database.py,sha256=tAa650LomUvf4Gc0q0A-2cXswqoSSGPKmc1fh9SURFQ,12272 jobrunner/lib/docker.py,sha256=C2fp3quN4vkaqg2MvMNC_k6Zbz8awN-oFtFuSLqA6xY,15825 -jobrunner/lib/docker_stats.py,sha256=Wj1Tg7W2gVTk_Znik_IT0Qvlw9-n4lIoIFGozMGRVgk,1311 +jobrunner/lib/docker_stats.py,sha256=PBx1eU7Rax-Q-fRLKXGSvFv-UD6IYIEENqH6_hoWpKU,1357 jobrunner/lib/git.py,sha256=5Bw3bRk4EJaNEmcOADFk8Ww_NHeF5GtqDpJ5rR3KYFA,13145 jobrunner/lib/github_validators.py,sha256=3YW04zbYz15lnGXjQ3XHrsaH1VyRX_kmd6lF4vyTKM8,2412 jobrunner/lib/log_utils.py,sha256=xy86246BLYDjt1VR5SPGggrvkFVbxN1UaqsMUZQGd8k,5660 @@ -35,20 +34,20 @@ jobrunner/lib/path_utils.py,sha256=559GU8YpHq71ib95i4iOsEAume6xh0fNC3Ox9eD-W5c,1 jobrunner/lib/string_utils.py,sha256=GAyAEQxyUiaufzphnXCYuuyDeU9uB1QMAaIm79dP3OA,1336 jobrunner/lib/subprocess_utils.py,sha256=RdoHzJzO4xA5SCBa-Cl9jcuE7SJNmsB8B1qa3wkOFvU,996 jobrunner/lib/yaml_utils.py,sha256=H6FU_e2uQYp3qxq_QXn-oyK5cuQatDSq-q5HBv5Me3k,1747 -jobrunner/models.py,sha256=eZAwJv0cnBiQ24EtMhnYb7OkTmyyDympGwpu1TnO8Z0,11722 +jobrunner/models.py,sha256=AwtbptqMJNg-K25vi43jLXhCXZPuCk7iFtMqG_cOhxo,12062 jobrunner/queries.py,sha256=EsBtoRsFr_dCqHOKu44Sgxjdgmfnzd0dfOQYogNITYo,2225 -jobrunner/record_stats.py,sha256=jW9wUWRGENvXMcn6_eWh0ox6eoZUIdHw6IccaFYy0KY,3135 -jobrunner/reusable_actions.py,sha256=Am1Ju-lbtnIrnqWxVvhiBP0QPkNsxnBTKIQ-rsPvvBU,7689 +jobrunner/record_stats.py,sha256=etCjpOyv0zT2gxagDnaovbiAuUfLUwildDWDtL3I_RE,7567 +jobrunner/reusable_actions.py,sha256=yt9qSKXUPIPxI-2wM7tgUFJdQlOMneqZqqT8JUpVQow,7683 jobrunner/run.py,sha256=TGTWvN6d56D_wm6ZfE5G9UAawKg1FkAif1l8LBc_kTw,27509 jobrunner/service.py,sha256=MhppSwuGiDTrkcduxGfmHLoUpD1Ao0fRI2lfuQkb11Y,4182 -jobrunner/sync.py,sha256=UBHlebp6bn-5itIxRr_YRM-RZ3AaTZbY-wSYPBUFNZY,5041 -jobrunner/tracing.py,sha256=C7c6jCirfeBUYUhfSdhc6E46Lafqrs-jElZtQVOyiuo,12266 -opensafely_jobrunner-2.72.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -opensafely_jobrunner-2.72.0.dist-info/LICENSE,sha256=F5fS3mizkbW4yOk3XP--G0oDJbZAovAKuSIZShtkCw4,671 -opensafely_jobrunner-2.72.0.dist-info/METADATA,sha256=glIDIRu0dFdOdHteXdiraE2vIWGSNIrma5q_NUuusvc,8212 -opensafely_jobrunner-2.72.0.dist-info/RECORD,, -opensafely_jobrunner-2.72.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -opensafely_jobrunner-2.72.0.dist-info/WHEEL,sha256=Xo9-1PvkuimrydujYJAjF7pCkriuXBpUPEjma1nZyJ0,92 -opensafely_jobrunner-2.72.0.dist-info/direct_url.json,sha256=oxhn5U8mEq_q_wYxXndiXYluP6N1OLu9yVnnOOFox_4,174 -opensafely_jobrunner-2.72.0.dist-info/entry_points.txt,sha256=hat6DNe6ZtwPqk0GIs5BOzd-18yfWfwJrouA1YAmBJY,298 -opensafely_jobrunner-2.72.0.dist-info/top_level.txt,sha256=dHLIHTr12iPEGMfrfPkXrkh8qGsw52DE0cbpHQVbiic,10 +jobrunner/sync.py,sha256=nRyHluwAxQjSNw36xpq5sJXhJNLmtREOHjFjhCT7P7A,5127 +jobrunner/tracing.py,sha256=O9CGARNrGYpEqxze2hJYqXJYVb8DJxfasJZFQ4NmdbI,13043 +opensafely_jobrunner-2.73.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +opensafely_jobrunner-2.73.0.dist-info/LICENSE,sha256=F5fS3mizkbW4yOk3XP--G0oDJbZAovAKuSIZShtkCw4,671 +opensafely_jobrunner-2.73.0.dist-info/METADATA,sha256=guesaqshUVh2giVzV8xXA3OzFyYYGRTGR2wDVPC9H2g,8212 +opensafely_jobrunner-2.73.0.dist-info/RECORD,, +opensafely_jobrunner-2.73.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +opensafely_jobrunner-2.73.0.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92 +opensafely_jobrunner-2.73.0.dist-info/direct_url.json,sha256=RarHrgvUk3F9C6SXo4aG4nCd7zKmLRrIWr_nox3gxd0,174 +opensafely_jobrunner-2.73.0.dist-info/entry_points.txt,sha256=hat6DNe6ZtwPqk0GIs5BOzd-18yfWfwJrouA1YAmBJY,298 +opensafely_jobrunner-2.73.0.dist-info/top_level.txt,sha256=dHLIHTr12iPEGMfrfPkXrkh8qGsw52DE0cbpHQVbiic,10 diff --git a/opensafely/_vendor/opensafely_jobrunner-2.72.0.dist-info/REQUESTED b/opensafely/_vendor/opensafely_jobrunner-2.73.0.dist-info/REQUESTED similarity index 100% rename from opensafely/_vendor/opensafely_jobrunner-2.72.0.dist-info/REQUESTED rename to opensafely/_vendor/opensafely_jobrunner-2.73.0.dist-info/REQUESTED diff --git a/opensafely/_vendor/opensafely_jobrunner-2.72.0.dist-info/WHEEL b/opensafely/_vendor/opensafely_jobrunner-2.73.0.dist-info/WHEEL similarity index 65% rename from opensafely/_vendor/opensafely_jobrunner-2.72.0.dist-info/WHEEL rename to opensafely/_vendor/opensafely_jobrunner-2.73.0.dist-info/WHEEL index ba48cbcf..bab98d67 100644 --- a/opensafely/_vendor/opensafely_jobrunner-2.72.0.dist-info/WHEEL +++ b/opensafely/_vendor/opensafely_jobrunner-2.73.0.dist-info/WHEEL @@ -1,5 +1,5 @@ Wheel-Version: 1.0 -Generator: bdist_wheel (0.41.3) +Generator: bdist_wheel (0.43.0) Root-Is-Purelib: true Tag: py3-none-any diff --git a/opensafely/_vendor/opensafely_jobrunner-2.73.0.dist-info/direct_url.json b/opensafely/_vendor/opensafely_jobrunner-2.73.0.dist-info/direct_url.json new file mode 100644 index 00000000..1dac7d6c --- /dev/null +++ b/opensafely/_vendor/opensafely_jobrunner-2.73.0.dist-info/direct_url.json @@ -0,0 +1 @@ +{"url": "https://github.com/opensafely-core/job-runner", "vcs_info": {"commit_id": "22f9fd5eb25280061d386178304d8de9e0174f83", "requested_revision": "v2.73.0", "vcs": "git"}} \ No newline at end of file diff --git a/opensafely/_vendor/opensafely_jobrunner-2.72.0.dist-info/entry_points.txt b/opensafely/_vendor/opensafely_jobrunner-2.73.0.dist-info/entry_points.txt similarity index 100% rename from opensafely/_vendor/opensafely_jobrunner-2.72.0.dist-info/entry_points.txt rename to opensafely/_vendor/opensafely_jobrunner-2.73.0.dist-info/entry_points.txt diff --git a/opensafely/_vendor/opensafely_jobrunner-2.72.0.dist-info/top_level.txt b/opensafely/_vendor/opensafely_jobrunner-2.73.0.dist-info/top_level.txt similarity index 100% rename from opensafely/_vendor/opensafely_jobrunner-2.72.0.dist-info/top_level.txt rename to opensafely/_vendor/opensafely_jobrunner-2.73.0.dist-info/top_level.txt diff --git a/opensafely/_vendor/opensafely_pipeline-2023.11.6.145820.dist-info.pyi b/opensafely/_vendor/opensafely_pipeline-2023.11.6.145820.dist-info.pyi deleted file mode 100644 index 3a242202..00000000 --- a/opensafely/_vendor/opensafely_pipeline-2023.11.6.145820.dist-info.pyi +++ /dev/null @@ -1 +0,0 @@ -from opensafely_pipeline-2023.11.6.145820.dist-info import * \ No newline at end of file diff --git a/opensafely/_vendor/opensafely_pipeline-2023.11.6.145820.dist-info/RECORD b/opensafely/_vendor/opensafely_pipeline-2023.11.6.145820.dist-info/RECORD deleted file mode 100644 index 9c2ea350..00000000 --- a/opensafely/_vendor/opensafely_pipeline-2023.11.6.145820.dist-info/RECORD +++ /dev/null @@ -1,20 +0,0 @@ -opensafely_pipeline-2023.11.6.145820.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -opensafely_pipeline-2023.11.6.145820.dist-info/LICENSE,sha256=3dYRqvpnIRI1ISbzwG_EKRHulT5qzYLacVDM09Ehn5Y,675 -opensafely_pipeline-2023.11.6.145820.dist-info/METADATA,sha256=QJM6KCDHxG36v4rOhWDvKEvVfBV_tJENFCZYkYqEVOs,1830 -opensafely_pipeline-2023.11.6.145820.dist-info/RECORD,, -opensafely_pipeline-2023.11.6.145820.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -opensafely_pipeline-2023.11.6.145820.dist-info/WHEEL,sha256=Xo9-1PvkuimrydujYJAjF7pCkriuXBpUPEjma1nZyJ0,92 -opensafely_pipeline-2023.11.6.145820.dist-info/direct_url.json,sha256=PgG1h8Y9fD2xAmM-gatCkYnQztx9Nx7sszSHN60D43k,183 -opensafely_pipeline-2023.11.6.145820.dist-info/top_level.txt,sha256=Qdc1eKrvhKK_o9CPbdooOdDt7g3ZSXZDrNXHmUGl94Q,9 -pipeline/__init__.py,sha256=OXi7WT9uT8zmpPCJY5mh7DCBiSdRg7D6EFOT-wNTulM,236 -pipeline/__main__.py,sha256=K81Vj0UTwIrEjAhjZuNV0giNUabkuHjUkbbAB5_Q5W8,470 -pipeline/constants.py,sha256=8ji9shMtzIXpUb72ah78nG3DZHH2k3N-EpltmMsyMQs,386 -pipeline/exceptions.py,sha256=AzXfyBEHPp3gypXIVoowgstBFG2bnNcjeUBVwhzlows,151 -pipeline/features.py,sha256=IIPT4buZH7VZ4Q54oXug8cDdFJWRzKm0JhZmT9nBWCk,573 -pipeline/legacy.py,sha256=yw5sC8gyYVgRm_Oq67F8AlBppwVZu_9nnG130lThhvY,413 -pipeline/loading.py,sha256=Knlu2ddYV4tByaHu4VeH1fQbScSPzvb2kd8IhXy9EVY,2753 -pipeline/main.py,sha256=AeJWX7csBOen9zXHr_pi0GWSEIgFAQORGdb8JHa7Dtg,993 -pipeline/models.py,sha256=xzuiGvdZwcFQMHsUB5mTiMlvzkwDA1IHfFfg2ID_9ok,11866 -pipeline/outputs.py,sha256=P2TzmZoi_FB9pM_PgUyz4AI3eFvQSXUCg8L8vGjcvBM,781 -pipeline/types.py,sha256=8Sx4Kfr9UwLpQFWVS1VvpsCAIjzD4t8hLnaIc7ha8No,729 -pipeline/validation.py,sha256=21NRtPcVNqgkZwu6kgyXl3lpKFhWDqBxY6ISJ4-8dzg,4354 diff --git a/opensafely/_vendor/opensafely_pipeline-2023.11.6.145820.dist-info/direct_url.json b/opensafely/_vendor/opensafely_pipeline-2023.11.6.145820.dist-info/direct_url.json deleted file mode 100644 index 74d97d3c..00000000 --- a/opensafely/_vendor/opensafely_pipeline-2023.11.6.145820.dist-info/direct_url.json +++ /dev/null @@ -1 +0,0 @@ -{"url": "https://github.com/opensafely-core/pipeline", "vcs_info": {"commit_id": "8c706d663fb2fcae279bc0f15c1b1b3bd4314e99", "requested_revision": "v2023.11.06.145820", "vcs": "git"}} \ No newline at end of file diff --git a/opensafely/_vendor/opensafely_pipeline-2024.3.19.153938.dist-info.pyi b/opensafely/_vendor/opensafely_pipeline-2024.3.19.153938.dist-info.pyi new file mode 100644 index 00000000..884de2f6 --- /dev/null +++ b/opensafely/_vendor/opensafely_pipeline-2024.3.19.153938.dist-info.pyi @@ -0,0 +1 @@ +from opensafely_pipeline-2024.3.19.153938.dist-info import * \ No newline at end of file diff --git a/opensafely/_vendor/opensafely_pipeline-2023.11.6.145820.dist-info/INSTALLER b/opensafely/_vendor/opensafely_pipeline-2024.3.19.153938.dist-info/INSTALLER similarity index 100% rename from opensafely/_vendor/opensafely_pipeline-2023.11.6.145820.dist-info/INSTALLER rename to opensafely/_vendor/opensafely_pipeline-2024.3.19.153938.dist-info/INSTALLER diff --git a/opensafely/_vendor/opensafely_pipeline-2023.11.6.145820.dist-info/LICENSE b/opensafely/_vendor/opensafely_pipeline-2024.3.19.153938.dist-info/LICENSE similarity index 100% rename from opensafely/_vendor/opensafely_pipeline-2023.11.6.145820.dist-info/LICENSE rename to opensafely/_vendor/opensafely_pipeline-2024.3.19.153938.dist-info/LICENSE diff --git a/opensafely/_vendor/opensafely_pipeline-2023.11.6.145820.dist-info/METADATA b/opensafely/_vendor/opensafely_pipeline-2024.3.19.153938.dist-info/METADATA similarity index 98% rename from opensafely/_vendor/opensafely_pipeline-2023.11.6.145820.dist-info/METADATA rename to opensafely/_vendor/opensafely_pipeline-2024.3.19.153938.dist-info/METADATA index fb712877..b22eac4e 100644 --- a/opensafely/_vendor/opensafely_pipeline-2023.11.6.145820.dist-info/METADATA +++ b/opensafely/_vendor/opensafely_pipeline-2024.3.19.153938.dist-info/METADATA @@ -1,6 +1,6 @@ Metadata-Version: 2.1 Name: opensafely-pipeline -Version: 2023.11.6.145820 +Version: 2024.3.19.153938 Summary: OpenSAFELY pipeline configuration parsing library Author-email: OpenSAFELY License: ${GITHUB_REPOSITORY_NAME} diff --git a/opensafely/_vendor/opensafely_pipeline-2024.3.19.153938.dist-info/RECORD b/opensafely/_vendor/opensafely_pipeline-2024.3.19.153938.dist-info/RECORD new file mode 100644 index 00000000..bcef7724 --- /dev/null +++ b/opensafely/_vendor/opensafely_pipeline-2024.3.19.153938.dist-info/RECORD @@ -0,0 +1,20 @@ +opensafely_pipeline-2024.3.19.153938.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +opensafely_pipeline-2024.3.19.153938.dist-info/LICENSE,sha256=3dYRqvpnIRI1ISbzwG_EKRHulT5qzYLacVDM09Ehn5Y,675 +opensafely_pipeline-2024.3.19.153938.dist-info/METADATA,sha256=x0LGd0bWMC86GoVL4Av7Ye1z-2FtCfeQbX-uPMbXwO8,1830 +opensafely_pipeline-2024.3.19.153938.dist-info/RECORD,, +opensafely_pipeline-2024.3.19.153938.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +opensafely_pipeline-2024.3.19.153938.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92 +opensafely_pipeline-2024.3.19.153938.dist-info/direct_url.json,sha256=qGGI-0l5jJJgZRhbzIR_0HVbtEHolohEl85AADcmpHw,183 +opensafely_pipeline-2024.3.19.153938.dist-info/top_level.txt,sha256=Qdc1eKrvhKK_o9CPbdooOdDt7g3ZSXZDrNXHmUGl94Q,9 +pipeline/__init__.py,sha256=OXi7WT9uT8zmpPCJY5mh7DCBiSdRg7D6EFOT-wNTulM,236 +pipeline/__main__.py,sha256=5hAi8GJDuS0ufV6IA9TP91SPQphoJQjdBTtBHzPeIQQ,471 +pipeline/constants.py,sha256=GrlfnUSPTss2H_G_GtP5jyICBwRVCSJJxdPa9zYXUio,370 +pipeline/exceptions.py,sha256=AzXfyBEHPp3gypXIVoowgstBFG2bnNcjeUBVwhzlows,151 +pipeline/features.py,sha256=IIPT4buZH7VZ4Q54oXug8cDdFJWRzKm0JhZmT9nBWCk,573 +pipeline/legacy.py,sha256=yw5sC8gyYVgRm_Oq67F8AlBppwVZu_9nnG130lThhvY,413 +pipeline/loading.py,sha256=Knlu2ddYV4tByaHu4VeH1fQbScSPzvb2kd8IhXy9EVY,2753 +pipeline/main.py,sha256=AeJWX7csBOen9zXHr_pi0GWSEIgFAQORGdb8JHa7Dtg,993 +pipeline/models.py,sha256=SVEwdeuecIKVLkBsTC2zycFV_wqMmfHOnJKxniV5RXk,12023 +pipeline/outputs.py,sha256=P2TzmZoi_FB9pM_PgUyz4AI3eFvQSXUCg8L8vGjcvBM,781 +pipeline/types.py,sha256=k5xxdWdNLMSeqysPNpP8fq33NZ6bEzyDBMVoG9CjuBc,730 +pipeline/validation.py,sha256=21NRtPcVNqgkZwu6kgyXl3lpKFhWDqBxY6ISJ4-8dzg,4354 diff --git a/opensafely/_vendor/opensafely_pipeline-2023.11.6.145820.dist-info/REQUESTED b/opensafely/_vendor/opensafely_pipeline-2024.3.19.153938.dist-info/REQUESTED similarity index 100% rename from opensafely/_vendor/opensafely_pipeline-2023.11.6.145820.dist-info/REQUESTED rename to opensafely/_vendor/opensafely_pipeline-2024.3.19.153938.dist-info/REQUESTED diff --git a/opensafely/_vendor/opensafely_pipeline-2023.11.6.145820.dist-info/WHEEL b/opensafely/_vendor/opensafely_pipeline-2024.3.19.153938.dist-info/WHEEL similarity index 65% rename from opensafely/_vendor/opensafely_pipeline-2023.11.6.145820.dist-info/WHEEL rename to opensafely/_vendor/opensafely_pipeline-2024.3.19.153938.dist-info/WHEEL index ba48cbcf..bab98d67 100644 --- a/opensafely/_vendor/opensafely_pipeline-2023.11.6.145820.dist-info/WHEEL +++ b/opensafely/_vendor/opensafely_pipeline-2024.3.19.153938.dist-info/WHEEL @@ -1,5 +1,5 @@ Wheel-Version: 1.0 -Generator: bdist_wheel (0.41.3) +Generator: bdist_wheel (0.43.0) Root-Is-Purelib: true Tag: py3-none-any diff --git a/opensafely/_vendor/opensafely_pipeline-2024.3.19.153938.dist-info/direct_url.json b/opensafely/_vendor/opensafely_pipeline-2024.3.19.153938.dist-info/direct_url.json new file mode 100644 index 00000000..1580a729 --- /dev/null +++ b/opensafely/_vendor/opensafely_pipeline-2024.3.19.153938.dist-info/direct_url.json @@ -0,0 +1 @@ +{"url": "https://github.com/opensafely-core/pipeline", "vcs_info": {"commit_id": "0a5688aa777754ea045ff290fbcc99d50b84db4b", "requested_revision": "v2024.03.19.153938", "vcs": "git"}} \ No newline at end of file diff --git a/opensafely/_vendor/opensafely_pipeline-2023.11.6.145820.dist-info/top_level.txt b/opensafely/_vendor/opensafely_pipeline-2024.3.19.153938.dist-info/top_level.txt similarity index 100% rename from opensafely/_vendor/opensafely_pipeline-2023.11.6.145820.dist-info/top_level.txt rename to opensafely/_vendor/opensafely_pipeline-2024.3.19.153938.dist-info/top_level.txt diff --git a/opensafely/_vendor/pipeline/__main__.py b/opensafely/_vendor/pipeline/__main__.py index 72dd4a9b..7ff51b8e 100644 --- a/opensafely/_vendor/pipeline/__main__.py +++ b/opensafely/_vendor/pipeline/__main__.py @@ -1,6 +1,7 @@ """ This module is for testing in local development. """ + import sys from devtools import debug diff --git a/opensafely/_vendor/pipeline/constants.py b/opensafely/_vendor/pipeline/constants.py index 728dcd1f..03a077aa 100644 --- a/opensafely/_vendor/pipeline/constants.py +++ b/opensafely/_vendor/pipeline/constants.py @@ -5,7 +5,6 @@ [ # tables ".csv", - ".tsv", # images ".jpg", ".jpeg", diff --git a/opensafely/_vendor/pipeline/models.py b/opensafely/_vendor/pipeline/models.py index 12cc5e00..4ce496c1 100644 --- a/opensafely/_vendor/pipeline/models.py +++ b/opensafely/_vendor/pipeline/models.py @@ -20,21 +20,34 @@ cohortextractor_pat = re.compile(r"cohortextractor:\S+ generate_cohort") databuilder_pat = re.compile(r"databuilder|ehrql:\S+ generate[-_]dataset") -database_action_pat = re.compile( - r""" - # image name - ^\b(?:cohortextractor|databuilder|ehrql)\b - # : (v0, latest etc) - :.+ - # command; for cohortextractor, only generate_cohort is a database action - # For ehrql (and legacy databuilder), generate-dataset and generate-measures - # are both database actions. Happily cohortextractor uses generate_measures as - # its measures command, so we can excluded cohortextractor measures - # actions with this regex. - \b(?:generate_cohort|generate-dataset|generate-measures) - """, - flags=re.X, -) +# orderd by most common, going forwards +DB_COMMANDS = { + "ehrql": ("generate-dataset", "generate-measures"), + "sqlrunner": "*", # all commands are valid + "cohortextractor": ("generate_cohort", "generate_codelist_report"), + "databuilder": ("generate-dataset",), +} + + +def is_database_action(args: List[str]) -> bool: + """ + By default actions do not have database access, but certain trusted actions require it + """ + image = args[0] + image = image.split(":")[0] + db_commands = DB_COMMANDS.get(image) + if db_commands is None: + return False + + if db_commands == "*": + return True + + # no command specified + if len(args) == 1: + return False + + # 1st arg is command + return args[1] in db_commands class Expectations(BaseModel): @@ -131,7 +144,7 @@ def parse_run_string(cls, run: str) -> Command: @property def is_database_action(self) -> bool: - return database_action_pat.match(self.run.raw) is not None + return is_database_action(self.run.parts) class PartiallyValidatedPipeline(TypedDict): diff --git a/opensafely/_vendor/pipeline/types.py b/opensafely/_vendor/pipeline/types.py index 51d8f343..aa359e56 100644 --- a/opensafely/_vendor/pipeline/types.py +++ b/opensafely/_vendor/pipeline/types.py @@ -5,6 +5,7 @@ validate. Some of our validation is done via custom methods using the raw dictionary data. """ + from __future__ import annotations import pathlib diff --git a/vendor.in b/vendor.in index 4d22abf8..9f3a6c61 100644 --- a/vendor.in +++ b/vendor.in @@ -1,2 +1,2 @@ ---requirement https://raw.githubusercontent.com/opensafely-core/job-runner/v2.72.0/requirements.prod.txt -git+https://github.com/opensafely-core/job-runner@v2.72.0 +--requirement https://raw.githubusercontent.com/opensafely-core/job-runner/v2.73.0/requirements.prod.txt +git+https://github.com/opensafely-core/job-runner@v2.73.0 diff --git a/vendor.txt b/vendor.txt index 947fa951..e6e24b43 100644 --- a/vendor.txt +++ b/vendor.txt @@ -6,90 +6,90 @@ # backoff==2.1.2 # via - # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.72.0/requirements.prod.txt + # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.73.0/requirements.prod.txt # opentelemetry-exporter-otlp-proto-http certifi==2020.11.8 # via - # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.72.0/requirements.prod.txt + # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.73.0/requirements.prod.txt # requests chardet==3.0.4 # via - # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.72.0/requirements.prod.txt + # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.73.0/requirements.prod.txt # requests deprecated==1.2.13 # via - # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.72.0/requirements.prod.txt + # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.73.0/requirements.prod.txt # opentelemetry-api distro==1.8.0 # via - # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.72.0/requirements.prod.txt + # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.73.0/requirements.prod.txt # ruyaml googleapis-common-protos==1.56.4 # via - # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.72.0/requirements.prod.txt + # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.73.0/requirements.prod.txt # opentelemetry-exporter-otlp-proto-http idna==2.10 # via - # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.72.0/requirements.prod.txt + # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.73.0/requirements.prod.txt # requests -opensafely-jobrunner @ git+https://github.com/opensafely-core/job-runner@v2.72.0 +opensafely-jobrunner @ git+https://github.com/opensafely-core/job-runner@v2.73.0 # via -r vendor.in -opensafely-pipeline @ git+https://github.com/opensafely-core/pipeline@v2023.11.06.145820 +opensafely-pipeline @ git+https://github.com/opensafely-core/pipeline@v2024.03.19.153938 # via - # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.72.0/requirements.prod.txt + # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.73.0/requirements.prod.txt # opensafely-jobrunner opentelemetry-api==1.12.0 # via - # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.72.0/requirements.prod.txt + # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.73.0/requirements.prod.txt # opentelemetry-exporter-otlp-proto-http # opentelemetry-sdk opentelemetry-exporter-otlp-proto-http==1.12.0 # via - # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.72.0/requirements.prod.txt + # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.73.0/requirements.prod.txt # opensafely-jobrunner opentelemetry-proto==1.12.0 # via - # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.72.0/requirements.prod.txt + # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.73.0/requirements.prod.txt # opentelemetry-exporter-otlp-proto-http opentelemetry-sdk==1.12.0 # via - # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.72.0/requirements.prod.txt + # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.73.0/requirements.prod.txt # opentelemetry-exporter-otlp-proto-http opentelemetry-semantic-conventions==0.33b0 # via - # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.72.0/requirements.prod.txt + # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.73.0/requirements.prod.txt # opentelemetry-sdk protobuf==3.20.2 # via - # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.72.0/requirements.prod.txt + # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.73.0/requirements.prod.txt # googleapis-common-protos # opentelemetry-proto pydantic==1.10.12 # via - # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.72.0/requirements.prod.txt + # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.73.0/requirements.prod.txt # opensafely-pipeline requests==2.25.0 # via - # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.72.0/requirements.prod.txt + # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.73.0/requirements.prod.txt # opensafely-jobrunner # opentelemetry-exporter-otlp-proto-http ruyaml==0.91.0 # via - # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.72.0/requirements.prod.txt + # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.73.0/requirements.prod.txt # opensafely-jobrunner # opensafely-pipeline typing-extensions==4.7.1 # via - # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.72.0/requirements.prod.txt + # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.73.0/requirements.prod.txt # opentelemetry-sdk # pydantic urllib3==1.26.5 # via - # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.72.0/requirements.prod.txt + # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.73.0/requirements.prod.txt # requests wrapt==1.14.1 # via - # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.72.0/requirements.prod.txt + # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.73.0/requirements.prod.txt # deprecated # The following packages are considered to be unsafe in a requirements file: