diff --git a/opensafely/_vendor/chardet-3.0.4.dist-info/RECORD b/opensafely/_vendor/chardet-3.0.4.dist-info/RECORD index 162a8de..a0d18c4 100644 --- a/opensafely/_vendor/chardet-3.0.4.dist-info/RECORD +++ b/opensafely/_vendor/chardet-3.0.4.dist-info/RECORD @@ -1,4 +1,4 @@ -../../bin/chardetect,sha256=k6iJnsNSUeMlNutkq2v__zPIQpuglvel238-vfYDBlA,283 +../../bin/chardetect,sha256=Iim50Xo_A-6jNKXhng0od1YWXEr5OEzrv2MBb1TfLes,256 chardet-3.0.4.dist-info/DESCRIPTION.rst,sha256=PQ4sBsMyKFZkjC6QpmbpLn0UtCNyeb-ZqvCGEgyZMGk,2174 chardet-3.0.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 chardet-3.0.4.dist-info/METADATA,sha256=RV_2I4B1Z586DL8oVO5Kp7X5bUdQ5EuKAvNoAEF8wSw,3239 diff --git a/opensafely/_vendor/distro-1.8.0.dist-info/RECORD b/opensafely/_vendor/distro-1.8.0.dist-info/RECORD index 3963ad8..3ae948f 100644 --- a/opensafely/_vendor/distro-1.8.0.dist-info/RECORD +++ b/opensafely/_vendor/distro-1.8.0.dist-info/RECORD @@ -1,4 +1,4 @@ -../../bin/distro,sha256=24TzW2cWStxsIbDGwqb31a4yu0LPhBq-sA9gV1wIgWs,274 +../../bin/distro,sha256=LL7TkGdbIp5yO7jgBe9K5tlwiCJXMlG9hKhj5qmlETo,247 distro-1.8.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 distro-1.8.0.dist-info/LICENSE,sha256=y16Ofl9KOYjhBjwULGDcLfdWBfTEZRXnduOspt-XbhQ,11325 distro-1.8.0.dist-info/METADATA,sha256=NhYw94UPXb78_Z3_VtLxTJ1zQgUUKoTndg10uKJX800,6915 diff --git a/opensafely/_vendor/jobrunner/cli/local_run.py b/opensafely/_vendor/jobrunner/cli/local_run.py index 2c543fb..bcb831d 100644 --- a/opensafely/_vendor/jobrunner/cli/local_run.py +++ b/opensafely/_vendor/jobrunner/cli/local_run.py @@ -450,9 +450,6 @@ def create_job_request_and_jobs(project_dir, actions, force_run_dependencies): workspace=project_dir.name, database_name="dummy", force_run_dependencies=force_run_dependencies, - # The default behaviour of refusing to run if a dependency has failed - # makes for an awkward workflow when iterating in development - force_run_failed=True, branch="", original={"created_by": getuser()}, ) diff --git a/opensafely/_vendor/jobrunner/config.py b/opensafely/_vendor/jobrunner/config.py index fe5d5d6..4ae5270 100644 --- a/opensafely/_vendor/jobrunner/config.py +++ b/opensafely/_vendor/jobrunner/config.py @@ -86,11 +86,21 @@ def _is_valid_backend_name(name): DOCKER_REGISTRY = os.environ.get("DOCKER_REGISTRY", "ghcr.io/opensafely-core") -DATABASE_URLS = { - "full": os.environ.get("FULL_DATABASE_URL"), - "slice": os.environ.get("SLICE_DATABASE_URL"), - "dummy": os.environ.get("DUMMY_DATABASE_URL"), -} + +def database_urls_from_env(env): + db_names = ["default", "include_t1oo"] + return { + db_name: db_url + for db_name, db_url in [ + (db_name, env.get(f"{db_name.upper()}_DATABASE_URL")) + for db_name in db_names + ] + if db_url + } + + +DATABASE_URLS = database_urls_from_env(os.environ) + TEMP_DATABASE_NAME = os.environ.get("TEMP_DATABASE_NAME") @@ -128,6 +138,31 @@ def _is_valid_backend_name(name): MAX_RETRIES = int(os.environ.get("MAX_RETRIES", 0)) +LEVEL4_MAX_FILESIZE = int( + os.environ.get("LEVEL4_MAX_FILESIZE", 16 * 1024 * 1024) +) # 16mb + + +# TODO: we might want to take this list from pipeline if we implement it there. +LEVEL4_FILE_TYPES = [ + # tables + ".csv", + ".tsv", + # images + ".jpg", + ".jpeg", + ".png", + ".svg", + ".svgz", + # reports + ".html", + ".pdf", + ".txt", + ".log", + ".json", + ".md", +] + STATA_LICENSE = os.environ.get("STATA_LICENSE") STATA_LICENSE_REPO = os.environ.get( "STATA_LICENSE_REPO", diff --git a/opensafely/_vendor/jobrunner/create_or_update_jobs.py b/opensafely/_vendor/jobrunner/create_or_update_jobs.py index a6d5231..2386392 100644 --- a/opensafely/_vendor/jobrunner/create_or_update_jobs.py +++ b/opensafely/_vendor/jobrunner/create_or_update_jobs.py @@ -265,15 +265,9 @@ def job_should_be_rerun(job_request, job): # Otherwise if it succeeded last time there's no need to run again if job.state == State.SUCCEEDED: return False - # If it failed last time ... + # If it failed last time, re-run it by default elif job.state == State.FAILED: - # ... and we're forcing failed jobs to re-run then re-run it - if job_request.force_run_failed: - return True - # Otherwise it's an error condition - raise JobRequestError( - f"{job.action} failed on a previous run and must be re-run" - ) + return True else: raise ValueError(f"Invalid state: {job}") @@ -289,7 +283,7 @@ def assert_new_jobs_created(job_request, new_jobs, current_jobs): # is treated as a successful outcome because we've already done everything that was # requested. if RUN_ALL_COMMAND in job_request.requested_actions: - raise NothingToDoError() + raise NothingToDoError("All actions have already completed succesfully") # The other reason is that every requested action is already running or pending, # this is considered a user error. @@ -298,7 +292,7 @@ def assert_new_jobs_created(job_request, new_jobs, current_jobs): current_job_states.get(action) for action in job_request.requested_actions } if requested_action_states <= {State.PENDING, State.RUNNING}: - raise JobRequestError("All requested actions were already scheduled to run") + raise NothingToDoError("All requested actions were already scheduled to run") # But if we get here then we've somehow failed to schedule new jobs despite the fact # that some of the actions we depend on have failed, which is a bug. diff --git a/opensafely/_vendor/jobrunner/executors/local.py b/opensafely/_vendor/jobrunner/executors/local.py index 76bd6f3..c24ecbb 100644 --- a/opensafely/_vendor/jobrunner/executors/local.py +++ b/opensafely/_vendor/jobrunner/executors/local.py @@ -1,3 +1,4 @@ +import csv import datetime import json import logging @@ -86,20 +87,6 @@ def workspace_is_archived(workspace): return False -def was_oomkilled(container): - # Nb. this flag has been observed to be unreliable on some versions of Linux - return container["State"]["ExitCode"] == 137 and container["State"]["OOMKilled"] - - -def oomkilled_message(container): - message = "Job ran out of memory" - memory_limit = container.get("HostConfig", {}).get("Memory", 0) - if memory_limit > 0: - gb_limit = memory_limit / (1024**3) - message += f" (limit was {gb_limit:.2f}GB)" - return message - - class LocalDockerAPI(ExecutorAPI): """ExecutorAPI implementation using local docker service.""" @@ -291,19 +278,6 @@ def get_status(self, job_definition, timeout=15): else: # container present but not running, i.e. finished # Nb. this does not include prepared jobs, as they have a volume but not a container - if job_definition.cancelled: - return JobStatus( - ExecutorState.EXECUTED, - f"Job cancelled by {job_definition.cancelled}", - ) - if was_oomkilled(container): - return JobStatus(ExecutorState.ERROR, oomkilled_message(container)) - if container["State"]["ExitCode"] == 137: - return JobStatus( - ExecutorState.ERROR, - "Job either ran out of memory or was killed by an admin", - ) - timestamp_ns = datestr_to_ns_timestamp(container["State"]["FinishedAt"]) return JobStatus(ExecutorState.EXECUTED, timestamp_ns=timestamp_ns) @@ -321,16 +295,20 @@ def delete_files(self, workspace, privacy, files): else: raise Exception(f"unknown privacy of {privacy}") - errors = [] - for name in files: - path = root / name - try: - path.unlink(missing_ok=True) - except Exception: - log.exception(f"Could not delete {path}") - errors.append(name) + return delete_files_from_directory(root, files) + + +def delete_files_from_directory(directory, files): + errors = [] + for name in files: + path = directory / name + try: + path.unlink(missing_ok=True) + except Exception: + log.exception(f"Could not delete {path}") + errors.append(name) - return errors + return errors def prepare_job(job_definition): @@ -405,8 +383,16 @@ def finalize_job(job_definition): if exit_code == 137 and job_definition.cancelled: message = f"Job cancelled by {job_definition.cancelled}" - elif was_oomkilled(container_metadata): - message = oomkilled_message(container_metadata) + # Nb. this flag has been observed to be unreliable on some versions of Linux + elif ( + container_metadata["State"]["ExitCode"] == 137 + and container_metadata["State"]["OOMKilled"] + ): + message = "Job ran out of memory" + memory_limit = container_metadata.get("HostConfig", {}).get("Memory", 0) + if memory_limit > 0: + gb_limit = memory_limit / (1024**3) + message += f" (limit was {gb_limit:.2f}GB)" else: message = config.DOCKER_EXIT_CODES.get(exit_code) @@ -429,10 +415,17 @@ def finalize_job(job_definition): if job_definition.cancelled: write_job_logs(job_definition, job_metadata, copy_log_to_workspace=False) else: - write_job_logs(job_definition, job_metadata, copy_log_to_workspace=True) - persist_outputs(job_definition, results.outputs, job_metadata) + excluded = persist_outputs(job_definition, results.outputs, job_metadata) + write_job_logs( + job_definition, job_metadata, copy_log_to_workspace=True, excluded=excluded + ) + results.level4_excluded_files.update(**excluded) + RESULTS[job_definition.id] = results + # for ease of testing + return results + def get_job_metadata(job_definition, outputs, container_metadata): # job_metadata is a big dict capturing everything we know about the state @@ -448,14 +441,17 @@ def get_job_metadata(job_definition, outputs, container_metadata): job_metadata["container_metadata"] = container_metadata job_metadata["outputs"] = outputs job_metadata["commit"] = job_definition.study.commit + job_metadata["database_name"] = job_definition.database_name return job_metadata -def write_job_logs(job_definition, job_metadata, copy_log_to_workspace=True): +def write_job_logs( + job_definition, job_metadata, copy_log_to_workspace=True, excluded=None +): """Copy logs to log dir and workspace.""" # Dump useful info in log directory log_dir = get_log_dir(job_definition) - write_log_file(job_definition, job_metadata, log_dir / "logs.txt") + write_log_file(job_definition, job_metadata, log_dir / "logs.txt", excluded) with open(log_dir / "metadata.json", "w") as f: json.dump(job_metadata, f, indent=2) @@ -480,30 +476,137 @@ def persist_outputs(job_definition, outputs, job_metadata): # Extract outputs to workspace workspace_dir = get_high_privacy_workspace(job_definition.workspace) + excluded_files = {} + + sizes = {} for filename in outputs.keys(): log.info(f"Extracting output file: {filename}") - volumes.get_volume_api(job_definition).copy_from_volume( + size = volumes.get_volume_api(job_definition).copy_from_volume( job_definition, filename, workspace_dir / filename ) + sizes[filename] = size # Copy out medium privacy files medium_privacy_dir = get_medium_privacy_workspace(job_definition.workspace) if medium_privacy_dir: for filename, privacy_level in outputs.items(): if privacy_level == "moderately_sensitive": - volumes.copy_file( - workspace_dir / filename, medium_privacy_dir / filename + ok, job_msg, file_msg = check_l4_file( + job_definition, filename, sizes[filename], workspace_dir ) + message_file = medium_privacy_dir / (filename + ".txt") + + if ok: + volumes.copy_file( + workspace_dir / filename, medium_privacy_dir / filename + ) + # if it previously had a too big notice, delete it + delete_files_from_directory(medium_privacy_dir, [message_file]) + else: + excluded_files[filename] = job_msg + message_file.parent.mkdir(exist_ok=True, parents=True) + message_file.write_text(file_msg) + # this can be removed once osrelease is dead write_manifest_file( medium_privacy_dir, { - "repo": job_definition.study.git_repo_url, + # this currently needs to exist, but is not used + "repo": None, "workspace": job_definition.workspace, }, ) + return excluded_files + + +MAX_SIZE_MSG = """ +The file: + +{filename} + +was {size}Mb, which is above the limit for moderately_sensitive files of +{limit}Mb. + +As such, it has *not* been copied to Level 4 storage. Please double check that +{filename} contains only aggregate information, and is an appropriate size to +be able to be output checked. +""" + +INVALID_FILE_TYPE_MSG = """ +The file: + +{filename} + +is of type {suffix}. This is not a valid file type for moderately_sensitive files. + +Level 4 files should be aggregate information easily viewable by output checkers. + +See available list of file types here: https://docs.opensafely.org/releasing-files/#allowed-file-types +""" + +PATIENT_ID = """ +The file: + +{filename} + +has not been made available in level 4 because it has a `patient_id` column. + +Patient level data is not allowed by policy in level 4. + +You should change this file's privacy to `highly_sensitive` in your +project.yaml. Or, if is aggregrate data, you should remove the patient_id +column from your data. + +""" + + +def check_l4_file(job_definition, filename, size, workspace_dir): + def mb(b): + return round(b / (1024 * 1024), 2) + + job_msgs = [] + file_msgs = [] + + suffix = Path(filename).suffix + if suffix not in config.LEVEL4_FILE_TYPES: + job_msgs.append(f"File type of {suffix} is not valid level 4 file") + file_msgs.append(INVALID_FILE_TYPE_MSG.format(filename=filename, suffix=suffix)) + + elif suffix == ".csv": + + # note: this assumes the local executor can directly access the long term storage on disk + # this may need to be abstracted in future + actual_file = workspace_dir / filename + try: + with actual_file.open() as f: + reader = csv.DictReader(f) + headers = reader.fieldnames + except Exception: + pass + else: + if headers and "patient_id" in headers: + job_msgs.append("File has patient_id column") + file_msgs.append(PATIENT_ID.format(filename=filename)) + + if size > job_definition.level4_max_filesize: + job_msgs.append( + f"File size of {mb(size)}Mb is larger that limit of {mb(job_definition.level4_max_filesize)}Mb." + ) + file_msgs.append( + MAX_SIZE_MSG.format( + filename=filename, + size=mb(size), + limit=mb(job_definition.level4_max_filesize), + ) + ) + + if job_msgs: + return False, ",".join(job_msgs), "\n\n".join(file_msgs) + else: + return True, None, None + def find_matching_outputs(job_definition): """ @@ -544,7 +647,7 @@ def get_unmatched_outputs(job_definition, outputs): return [filename for filename in all_outputs if filename not in outputs] -def write_log_file(job_definition, job_metadata, filename): +def write_log_file(job_definition, job_metadata, filename, excluded): """ This dumps the (timestamped) Docker logs for a job to disk, followed by some useful metadata about the job and its outputs @@ -560,6 +663,10 @@ def write_log_file(job_definition, job_metadata, filename): f.write(f"{key}: {job_metadata[key]}\n") f.write("\noutputs:\n") f.write(tabulate(outputs, separator=" - ", indent=2, empty="(no outputs)")) + if excluded: + f.write("\nexcluded files:\n") + for excluded_file, msg in excluded.items(): + f.write(f"{excluded_file}: {msg}") # Keys of fields to log in manifest.json and log file @@ -571,6 +678,7 @@ def write_log_file(job_definition, job_metadata, filename): "exit_code", "created_at", "completed_at", + "database_name", ] @@ -676,7 +784,7 @@ def redact_environment_variables(container_metadata): def write_manifest_file(workspace_dir, manifest): manifest_file = workspace_dir / METADATA_DIR / MANIFEST_FILE - manifest_file.parent.mkdir(exist_ok=True) + manifest_file.parent.mkdir(exist_ok=True, parents=True) manifest_file_tmp = manifest_file.with_suffix(".tmp") manifest_file_tmp.write_text(json.dumps(manifest, indent=2)) manifest_file_tmp.replace(manifest_file) diff --git a/opensafely/_vendor/jobrunner/executors/volumes.py b/opensafely/_vendor/jobrunner/executors/volumes.py index 8348b9a..0d9d070 100644 --- a/opensafely/_vendor/jobrunner/executors/volumes.py +++ b/opensafely/_vendor/jobrunner/executors/volumes.py @@ -26,6 +26,8 @@ def copy_file(source, dest, follow_symlinks=True): with atomic_writer(dest) as tmp: shutil.copy(source, tmp, follow_symlinks=follow_symlinks) + return dest.stat().st_size + def docker_volume_name(job): return f"os-volume-{job.id}" @@ -50,7 +52,7 @@ def copy_to_volume(job, src, dst, timeout=None): docker.copy_to_volume(docker_volume_name(job), src, dst, timeout) def copy_from_volume(job, src, dst, timeout=None): - docker.copy_from_volume(docker_volume_name(job), src, dst, timeout) + return docker.copy_from_volume(docker_volume_name(job), src, dst, timeout) def delete_volume(job): docker.delete_volume(docker_volume_name(job)) @@ -140,7 +142,7 @@ def copy_to_volume(job, src, dst, timeout=None): def copy_from_volume(job, src, dst, timeout=None): # this is only used to copy final outputs/logs. path = host_volume_path(job) / src - copy_file(path, dst) + return copy_file(path, dst) def delete_volume(job): diff --git a/opensafely/_vendor/jobrunner/job_executor.py b/opensafely/_vendor/jobrunner/job_executor.py index 657b5c1..2ee0039 100644 --- a/opensafely/_vendor/jobrunner/job_executor.py +++ b/opensafely/_vendor/jobrunner/job_executor.py @@ -1,4 +1,4 @@ -from dataclasses import dataclass +from dataclasses import dataclass, field from enum import Enum from typing import List, Mapping, Optional @@ -30,20 +30,33 @@ class JobDefinition: str, str ] # the files that the job should produce (globs mapped to privacy levels) allow_database_access: bool # whether this job should have access to the database + # our internal name for the database this job uses (actual connection details are + # passed in `env`) + database_name: str = None cpu_count: str = None # number of CPUs to be allocated memory_limit: str = None # memory limit to apply + level4_max_filesize: int = 16 * 1024 * 1024 + level4_file_types: list = field(default_factory=lambda: [".csv"]) # if a job has been cancelled, the name of the canceller - either "user" or "admin" cancelled: str = None class ExecutorState(Enum): + # Job is currently preparing to run: creating volumes,copying files, etc PREPARING = "preparing" + # Job volume is prepared and ready to run PREPARED = "prepared" + # Job currently executing EXECUTING = "executing" + # Job process has finished executing, and has an exit code EXECUTED = "executed" + # Job is currently being inspected and finalized FINALIZING = "finalizing" + # Job has finished finalization FINALIZED = "finalized" + # Executor doesn't know anything about this job (it only tracks active jobs) UNKNOWN = "unknown" + # There was an error with the executor (*not* the same thing as an error with job) ERROR = "error" @@ -67,6 +80,9 @@ class JobResults: # timestamp these results were finalized, in integer nanoseconds timestamp_ns: int = None + # files not copied to level 4 (too big or similar reason) + level4_excluded_files: Mapping[str, str] = field(default_factory=dict) + # to be extracted from the image labels action_version: str = "unknown" action_revision: str = "unknown" diff --git a/opensafely/_vendor/jobrunner/lib/docker.py b/opensafely/_vendor/jobrunner/lib/docker.py index da13a42..8f03069 100644 --- a/opensafely/_vendor/jobrunner/lib/docker.py +++ b/opensafely/_vendor/jobrunner/lib/docker.py @@ -276,6 +276,8 @@ def copy_from_volume(volume_name, source, dest, timeout=None): timeout=timeout, ) + return dest.stat().st_size + def glob_volume_files(volume_name, glob_patterns): """ diff --git a/opensafely/_vendor/jobrunner/lib/docker_stats.py b/opensafely/_vendor/jobrunner/lib/docker_stats.py index a47016f..95daa5f 100644 --- a/opensafely/_vendor/jobrunner/lib/docker_stats.py +++ b/opensafely/_vendor/jobrunner/lib/docker_stats.py @@ -6,6 +6,13 @@ DEFAULT_TIMEOUT = 10 +# backport of 3.9s removeprefix +def removeprefix(s, prefix): + if s.startswith(prefix): + return s[len(prefix) :] + return s + + def get_job_stats(timeout=DEFAULT_TIMEOUT): # TODO: add volume sizes return get_container_stats(DEFAULT_TIMEOUT) @@ -20,7 +27,7 @@ def get_container_stats(timeout=DEFAULT_TIMEOUT): ) data = [json.loads(line) for line in response.stdout.splitlines()] return { - row["Name"].lstrip("os-job-"): { + removeprefix(row["Name"], "os-job-"): { "cpu_percentage": float(row["CPUPerc"].rstrip("%")), "memory_used": _parse_size(row["MemUsage"].split()[0]), } diff --git a/opensafely/_vendor/jobrunner/models.py b/opensafely/_vendor/jobrunner/models.py index 4fe80ea..ac68d6b 100644 --- a/opensafely/_vendor/jobrunner/models.py +++ b/opensafely/_vendor/jobrunner/models.py @@ -103,7 +103,6 @@ class JobRequest: workspace: str database_name: str force_run_dependencies: bool = False - force_run_failed: bool = False branch: str = None original: dict = None diff --git a/opensafely/_vendor/jobrunner/record_stats.py b/opensafely/_vendor/jobrunner/record_stats.py index 9bff6d9..8dd7e37 100644 --- a/opensafely/_vendor/jobrunner/record_stats.py +++ b/opensafely/_vendor/jobrunner/record_stats.py @@ -46,12 +46,22 @@ def record_tick_trace(last_run): if last_run is None: return time.time_ns() + trace_attrs = {"stats_timeout": False, "stats_error": False} + stats = {} + error_attrs = {} + try: stats = get_job_stats() except subprocess.TimeoutExpired: log.exception("Getting docker stats timed out") - # no metrics for this tick - stats = {} + trace_attrs["stats_timeout"] = True + except subprocess.CalledProcessError as exc: + log.exception("Error getting docker stats") + trace_attrs["stats_error"] = True + + error_attrs["cmd"] = " ".join(exc.cmd) + error_attrs["exit_code"] = exc.returncode + error_attrs["output"] = exc.stderr + "\n\n" + exc.output # record time once stats call has completed, as it can take a while now = time.time_ns() @@ -64,11 +74,25 @@ def record_tick_trace(last_run): models.Job, state__in=[models.State.PENDING, models.State.RUNNING] ) - with tracer.start_as_current_span("TICK", start_time=start_time): + with tracer.start_as_current_span( + "TICK", start_time=start_time, attributes=trace_attrs + ) as root: + # add error event so we can see the error from the docker command + if error_attrs: + root.add_event("stats_error", attributes=error_attrs, timestamp=start_time) + for job in active_jobs: span = tracer.start_span(job.status_code.name, start_time=start_time) + + # set up attributes + job_span_attrs = {} + job_span_attrs.update(trace_attrs) metrics = stats.get(job.id, {}) - tracing.set_span_metadata(span, job, **metrics) + job_span_attrs["has_metrics"] = metrics != {} + job_span_attrs.update(metrics) + + # record span + tracing.set_span_metadata(span, job, **job_span_attrs) span.end(end_time) return end_time diff --git a/opensafely/_vendor/jobrunner/run.py b/opensafely/_vendor/jobrunner/run.py index 3605947..1ba467d 100644 --- a/opensafely/_vendor/jobrunner/run.py +++ b/opensafely/_vendor/jobrunner/run.py @@ -82,6 +82,8 @@ def handle_jobs(api: Optional[ExecutorAPI]): # workspace. This gives a fairer allocation of capacity among # workspaces. running_for_workspace[job.workspace], + # DB jobs are more important than cpu jobs + 0 if job.requires_db else 1, # Finally use job age as a tie-breaker job.created_at, ) @@ -463,6 +465,9 @@ def save_results(job, job_definition, results): code = StatusCode.SUCCEEDED message = "Completed successfully" + if results.level4_excluded_files: + message += f", but {len(results.level4_excluded_files)} file(s) marked as moderately_sensitive were excluded. See job log for details." + set_code(job, code, message, error=error, results=results) @@ -543,10 +548,13 @@ def job_to_job_definition(job): inputs=input_files, output_spec=outputs, allow_database_access=allow_database_access, + database_name=job.database_name if allow_database_access else None, # in future, these may come from the JobRequest, but for now, we have # config defaults. cpu_count=config.DEFAULT_JOB_CPU_COUNT, memory_limit=config.DEFAULT_JOB_MEMORY_LIMIT, + level4_max_filesize=config.LEVEL4_MAX_FILESIZE, + level4_file_types=config.LEVEL4_FILE_TYPES, cancelled=job_definition_cancelled, ) @@ -566,7 +574,7 @@ def mark_job_as_failed(job, code, message, error=None, **attrs): if error is None: error = True - set_code(job, code, message, error=error, attrs=attrs) + set_code(job, code, message, error=error, **attrs) def set_code( diff --git a/opensafely/_vendor/jobrunner/service.py b/opensafely/_vendor/jobrunner/service.py index b7d42ee..ecb5939 100644 --- a/opensafely/_vendor/jobrunner/service.py +++ b/opensafely/_vendor/jobrunner/service.py @@ -105,7 +105,7 @@ def maintenance_mode(): "--current-mode", str(current), ], - env={"DATABASE_URL": config.DATABASE_URLS["full"]}, + env={"DATABASE_URL": config.DATABASE_URLS["default"]}, check=True, capture_output=True, text=True, diff --git a/opensafely/_vendor/jobrunner/sync.py b/opensafely/_vendor/jobrunner/sync.py index 7792b2c..fd9a9d6 100644 --- a/opensafely/_vendor/jobrunner/sync.py +++ b/opensafely/_vendor/jobrunner/sync.py @@ -126,7 +126,12 @@ def job_request_from_remote_format(job_request): requested_actions=job_request["requested_actions"], cancelled_actions=job_request["cancelled_actions"], workspace=job_request["workspace"]["name"], - database_name=job_request["workspace"]["db"], + # Transitional code while we move the location of the `database_name` attribute + database_name=( + job_request["database_name"] + if "database_name" in job_request + else job_request["workspace"]["db"] + ), force_run_dependencies=job_request["force_run_dependencies"], original=job_request, ) diff --git a/opensafely/_vendor/opensafely_jobrunner-2.69.0.dist-info.pyi b/opensafely/_vendor/opensafely_jobrunner-2.69.0.dist-info.pyi deleted file mode 100644 index 3ef940a..0000000 --- a/opensafely/_vendor/opensafely_jobrunner-2.69.0.dist-info.pyi +++ /dev/null @@ -1 +0,0 @@ -from opensafely_jobrunner-2.69.0.dist-info import * \ No newline at end of file diff --git a/opensafely/_vendor/opensafely_jobrunner-2.69.0.dist-info/RECORD b/opensafely/_vendor/opensafely_jobrunner-2.69.0.dist-info/RECORD deleted file mode 100644 index f623a99..0000000 --- a/opensafely/_vendor/opensafely_jobrunner-2.69.0.dist-info/RECORD +++ /dev/null @@ -1,54 +0,0 @@ -../../bin/add_job,sha256=EPY9NvEeq1nVam3--aTnu9uQtp2kXVFYusNwNMa3GMQ,280 -../../bin/flags,sha256=bFnpg5jD1QgvhKIpoffbzwqeNRP8DDVWo_pScl0GyP4,278 -../../bin/kill_job,sha256=0R1TbCJnNK9kExLnb0rZTMWd0iBMWCcsrSX0l1tFajY,281 -../../bin/local_run,sha256=Fpi3Q7j4R0fEO0v62AHOWspYgVtDlI44cNEtitDkcsU,282 -../../bin/migrate,sha256=_gXb4x11ZMp6OLyaujSgkU0FyUVv5jWxLnfbwGdAIFg,280 -../../bin/prepare_for_reboot,sha256=_gorxnyrf3SprQ0kjuu-ooNPmrrdjEH8iN5oOK9Xcuk,291 -../../bin/retry_job,sha256=x9Z18hjkt0F5cq3L0w-gI0nSggcLIag6NiaRY5H2OhQ,282 -jobrunner/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -jobrunner/actions.py,sha256=eyzCccC_ApLJAbRhCIGq0uwm9e4flFmAvyAOC9lTDQE,3046 -jobrunner/cli/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -jobrunner/cli/add_job.py,sha256=12Fm5VffTh2QFFa96g88rheKGq8eJLp_V46FpeHQVj0,2826 -jobrunner/cli/flags.py,sha256=6dFPWab0vB6CGCLg5yMAp9iQJo8Y6rN5cfQ7bEXhgqs,2704 -jobrunner/cli/kill_job.py,sha256=nThDIJ-Lbys04_QsAiPuXkHUfoFEbKxAyDGb_75GtSQ,2862 -jobrunner/cli/local_run.py,sha256=Y6r8BtyOIQu-_dpJsQ_mshXdS5sycUu1j-AdrdoiZd0,25609 -jobrunner/cli/migrate.py,sha256=V2cI3Kee67DNhqJUiDnALNvzHlP4mx_myKQycD4E9uI,609 -jobrunner/cli/prepare_for_reboot.py,sha256=Zdajs1cnkCCsKklFjg3mynU2NJqd5n0zFct3SdA9Mig,1493 -jobrunner/cli/retry_job.py,sha256=qDTiYwxc59QYZBLfgv_t-CAa6kmmhiCKh0sLpv5xhwA,2458 -jobrunner/config.py,sha256=Dj-mAHSZCoZdudGQ2ni63K5Om43odzY6iqZqd7rASO4,9078 -jobrunner/create_or_update_jobs.py,sha256=ySA28VNhD85DDd1TTQCzHZfLYA-x8-GoBBS_wOmd2_I,14495 -jobrunner/executors/__init__.py,sha256=4-eu9LwIzhALtsq1LDC9NQ_5nbcjsPDdIEGvRvZwIbo,283 -jobrunner/executors/local.py,sha256=sVc1qZhC_ce_nXPP2TBxmVu3OkPERO2q33tj0jFxJaQ,28290 -jobrunner/executors/logging.py,sha256=iCISXFR8sbtCrp-E3jaQlC1Kw6Huf65b-dqomrJzywI,2104 -jobrunner/executors/volumes.py,sha256=K3hqJnh6nJen3So5D3YRZiVy2mR2wffpSdjz4e9RT7g,7059 -jobrunner/job_executor.py,sha256=0ygUHCEV5orVdGwhdt9J_XjjRIWR_Voafv4CEZAp86I,12995 -jobrunner/lib/__init__.py,sha256=Lv8p-FcwvRSjDZoDjXaNvnb4QjjKgGB0gqFbg3UeuLs,2775 -jobrunner/lib/commands.py,sha256=t8vNZ1KMbkIEpFs-3t_W4EEzw8nlSvEB8wNefVw51EA,667 -jobrunner/lib/database.py,sha256=bxggzSaljus-qNHXslgnBBop-1wB0pqbNHlCzyPh3eI,12273 -jobrunner/lib/docker.py,sha256=WLZm1LwTSNztgZnMu6WxsWzaj7w6j2U_WcN4kMCEyns,15793 -jobrunner/lib/docker_stats.py,sha256=PLhb6WjGTVHA5aszUmJNQBvdRz0f5zU5iw1HCzNTsrI,1167 -jobrunner/lib/git.py,sha256=5Bw3bRk4EJaNEmcOADFk8Ww_NHeF5GtqDpJ5rR3KYFA,13145 -jobrunner/lib/github_validators.py,sha256=3YW04zbYz15lnGXjQ3XHrsaH1VyRX_kmd6lF4vyTKM8,2412 -jobrunner/lib/log_utils.py,sha256=-_F4p-MsCkBy--SzWbtElnUMTdUKuRysQ6hYoHg4ado,5661 -jobrunner/lib/lru_dict.py,sha256=I-LuTBo8QxyDAnfkOoc92xMAUQp50W5oRwsHdbihHz8,824 -jobrunner/lib/path_utils.py,sha256=559GU8YpHq71ib95i4iOsEAume6xh0fNC3Ox9eD-W5c,1611 -jobrunner/lib/string_utils.py,sha256=GAyAEQxyUiaufzphnXCYuuyDeU9uB1QMAaIm79dP3OA,1336 -jobrunner/lib/subprocess_utils.py,sha256=RdoHzJzO4xA5SCBa-Cl9jcuE7SJNmsB8B1qa3wkOFvU,996 -jobrunner/lib/yaml_utils.py,sha256=H6FU_e2uQYp3qxq_QXn-oyK5cuQatDSq-q5HBv5Me3k,1747 -jobrunner/models.py,sha256=tvmP5ab6nssq0WLKKLamXbPPuYSUBx2nIeJ3I8nG190,11534 -jobrunner/queries.py,sha256=EsBtoRsFr_dCqHOKu44Sgxjdgmfnzd0dfOQYogNITYo,2225 -jobrunner/record_stats.py,sha256=3QhhsUmDb5iAOv1y7VDPW3gP8A9uoACNWE00cQhEv_Q,2257 -jobrunner/reusable_actions.py,sha256=Am1Ju-lbtnIrnqWxVvhiBP0QPkNsxnBTKIQ-rsPvvBU,7689 -jobrunner/run.py,sha256=UNG4sGc7fAnzeDV1t3UX3GNwk2ugaIhdofilPoLGkMs,26975 -jobrunner/service.py,sha256=slmjnTFbOJ0U6SSkVha1ykBHF9paLFydU6tTSF8m478,4179 -jobrunner/sync.py,sha256=mFiMWLPQcoBmpeuwtXekXzVYdmYQh0WgnV3mCaXtCkM,4787 -jobrunner/tracing.py,sha256=C7c6jCirfeBUYUhfSdhc6E46Lafqrs-jElZtQVOyiuo,12266 -opensafely_jobrunner-2.69.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -opensafely_jobrunner-2.69.0.dist-info/LICENSE,sha256=F5fS3mizkbW4yOk3XP--G0oDJbZAovAKuSIZShtkCw4,671 -opensafely_jobrunner-2.69.0.dist-info/METADATA,sha256=arL5JBUyArVICKaFAzINfwCtbnC770qYPf9sjPRFCjQ,8212 -opensafely_jobrunner-2.69.0.dist-info/RECORD,, -opensafely_jobrunner-2.69.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -opensafely_jobrunner-2.69.0.dist-info/WHEEL,sha256=5sUXSg9e4bi7lTLOHcm6QEYwO5TIF1TNbTSVFVjcJcc,92 -opensafely_jobrunner-2.69.0.dist-info/direct_url.json,sha256=1_v3OTShyJlHqGlcKuP7s0v6DN8hlOoXroTZmn0XPvU,174 -opensafely_jobrunner-2.69.0.dist-info/entry_points.txt,sha256=hat6DNe6ZtwPqk0GIs5BOzd-18yfWfwJrouA1YAmBJY,298 -opensafely_jobrunner-2.69.0.dist-info/top_level.txt,sha256=dHLIHTr12iPEGMfrfPkXrkh8qGsw52DE0cbpHQVbiic,10 diff --git a/opensafely/_vendor/opensafely_jobrunner-2.69.0.dist-info/direct_url.json b/opensafely/_vendor/opensafely_jobrunner-2.69.0.dist-info/direct_url.json deleted file mode 100644 index a5a0de9..0000000 --- a/opensafely/_vendor/opensafely_jobrunner-2.69.0.dist-info/direct_url.json +++ /dev/null @@ -1 +0,0 @@ -{"url": "https://github.com/opensafely-core/job-runner", "vcs_info": {"commit_id": "447440de850b97309cefbe80a0bc60bda0f14be6", "requested_revision": "v2.69.0", "vcs": "git"}} \ No newline at end of file diff --git a/opensafely/_vendor/opensafely_jobrunner-2.71.0.dist-info.pyi b/opensafely/_vendor/opensafely_jobrunner-2.71.0.dist-info.pyi new file mode 100644 index 0000000..4b557a2 --- /dev/null +++ b/opensafely/_vendor/opensafely_jobrunner-2.71.0.dist-info.pyi @@ -0,0 +1 @@ +from opensafely_jobrunner-2.71.0.dist-info import * \ No newline at end of file diff --git a/opensafely/_vendor/opensafely_jobrunner-2.69.0.dist-info/INSTALLER b/opensafely/_vendor/opensafely_jobrunner-2.71.0.dist-info/INSTALLER similarity index 100% rename from opensafely/_vendor/opensafely_jobrunner-2.69.0.dist-info/INSTALLER rename to opensafely/_vendor/opensafely_jobrunner-2.71.0.dist-info/INSTALLER diff --git a/opensafely/_vendor/opensafely_jobrunner-2.69.0.dist-info/LICENSE b/opensafely/_vendor/opensafely_jobrunner-2.71.0.dist-info/LICENSE similarity index 100% rename from opensafely/_vendor/opensafely_jobrunner-2.69.0.dist-info/LICENSE rename to opensafely/_vendor/opensafely_jobrunner-2.71.0.dist-info/LICENSE diff --git a/opensafely/_vendor/opensafely_jobrunner-2.69.0.dist-info/METADATA b/opensafely/_vendor/opensafely_jobrunner-2.71.0.dist-info/METADATA similarity index 99% rename from opensafely/_vendor/opensafely_jobrunner-2.69.0.dist-info/METADATA rename to opensafely/_vendor/opensafely_jobrunner-2.71.0.dist-info/METADATA index 6b9c122..d5a53b2 100644 --- a/opensafely/_vendor/opensafely_jobrunner-2.69.0.dist-info/METADATA +++ b/opensafely/_vendor/opensafely_jobrunner-2.71.0.dist-info/METADATA @@ -1,6 +1,6 @@ Metadata-Version: 2.1 Name: opensafely-jobrunner -Version: 2.69.0 +Version: 2.71.0 Summary: OpenSAFELY job scheduling and executor Author-email: OpenSAFELY License: OpenSAFELY Job Runner @@ -26,7 +26,7 @@ Classifier: License :: OSI Approved :: GNU General Public License v3 (GPLv3) Requires-Python: >=3.8 Description-Content-Type: text/markdown License-File: LICENSE -Requires-Dist: opensafely-pipeline @ git+https://github.com/opensafely-core/pipeline@v2023.08.09.102223 +Requires-Dist: opensafely-pipeline @ git+https://github.com/opensafely-core/pipeline@v2023.11.06.145820 Requires-Dist: ruyaml Requires-Dist: requests Requires-Dist: opentelemetry-exporter-otlp-proto-http diff --git a/opensafely/_vendor/opensafely_jobrunner-2.71.0.dist-info/RECORD b/opensafely/_vendor/opensafely_jobrunner-2.71.0.dist-info/RECORD new file mode 100644 index 0000000..a88f4bc --- /dev/null +++ b/opensafely/_vendor/opensafely_jobrunner-2.71.0.dist-info/RECORD @@ -0,0 +1,54 @@ +../../bin/add_job,sha256=toLR2KSS5cK3diLpL2Ajp_8tgNMruAyHs0StB_zG8F8,253 +../../bin/flags,sha256=2k4O7Xrn-VLneAhXopy6EwaiVZFNGvZbNGRWEfbisdo,251 +../../bin/kill_job,sha256=mJAcqJU9vpGr_ELZ9SeFlx8nIc3tPcfqFoQ0LOZd-g4,254 +../../bin/local_run,sha256=jkMxn9s6xP_cEcjcRGwJatLY4WRjegxb8Px1HS45COQ,255 +../../bin/migrate,sha256=HrGrHt548NRrLzHGT6nypdb_APyAh3gtX1Rs43PESCE,253 +../../bin/prepare_for_reboot,sha256=V4JSzjPsMR5Jv6gO1u1EsjZwZ48J3i2IhrX07op-yUc,264 +../../bin/retry_job,sha256=w3ThBEgbjoJMaJCmiyIs6tPrzGT1sZS6UrhJ19QrjzE,255 +jobrunner/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +jobrunner/actions.py,sha256=eyzCccC_ApLJAbRhCIGq0uwm9e4flFmAvyAOC9lTDQE,3046 +jobrunner/cli/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +jobrunner/cli/add_job.py,sha256=12Fm5VffTh2QFFa96g88rheKGq8eJLp_V46FpeHQVj0,2826 +jobrunner/cli/flags.py,sha256=6dFPWab0vB6CGCLg5yMAp9iQJo8Y6rN5cfQ7bEXhgqs,2704 +jobrunner/cli/kill_job.py,sha256=nThDIJ-Lbys04_QsAiPuXkHUfoFEbKxAyDGb_75GtSQ,2862 +jobrunner/cli/local_run.py,sha256=jAi2tp4sCbybkGA7_g11-3P1ojnDZdzsGR6xurOG8zQ,25430 +jobrunner/cli/migrate.py,sha256=V2cI3Kee67DNhqJUiDnALNvzHlP4mx_myKQycD4E9uI,609 +jobrunner/cli/prepare_for_reboot.py,sha256=Zdajs1cnkCCsKklFjg3mynU2NJqd5n0zFct3SdA9Mig,1493 +jobrunner/cli/retry_job.py,sha256=qDTiYwxc59QYZBLfgv_t-CAa6kmmhiCKh0sLpv5xhwA,2458 +jobrunner/config.py,sha256=LmaJd9KakqQ4Jej-tWyuGoaF-kpkHQFEp3buje0SzPE,9647 +jobrunner/create_or_update_jobs.py,sha256=3h5MDFXXg7RdAriMSNz90l5EIQ6H-4z1QybDY0vLlmE,14291 +jobrunner/executors/__init__.py,sha256=4-eu9LwIzhALtsq1LDC9NQ_5nbcjsPDdIEGvRvZwIbo,283 +jobrunner/executors/local.py,sha256=mb4kvPORtF1brio1ycPNhTx65UfEX2FDneiUubjwQOE,31479 +jobrunner/executors/logging.py,sha256=iCISXFR8sbtCrp-E3jaQlC1Kw6Huf65b-dqomrJzywI,2104 +jobrunner/executors/volumes.py,sha256=T34DfRACH3RbX0v1b1Zo_XScr8h7UFQHExx3oQ8Kq9w,7105 +jobrunner/job_executor.py,sha256=X6SyJ8zAdr8Rtidla2k1RLIB1rtQivnK4ZPkff9VLr4,13870 +jobrunner/lib/__init__.py,sha256=Lv8p-FcwvRSjDZoDjXaNvnb4QjjKgGB0gqFbg3UeuLs,2775 +jobrunner/lib/commands.py,sha256=t8vNZ1KMbkIEpFs-3t_W4EEzw8nlSvEB8wNefVw51EA,667 +jobrunner/lib/database.py,sha256=bxggzSaljus-qNHXslgnBBop-1wB0pqbNHlCzyPh3eI,12273 +jobrunner/lib/docker.py,sha256=C2fp3quN4vkaqg2MvMNC_k6Zbz8awN-oFtFuSLqA6xY,15825 +jobrunner/lib/docker_stats.py,sha256=Wj1Tg7W2gVTk_Znik_IT0Qvlw9-n4lIoIFGozMGRVgk,1311 +jobrunner/lib/git.py,sha256=5Bw3bRk4EJaNEmcOADFk8Ww_NHeF5GtqDpJ5rR3KYFA,13145 +jobrunner/lib/github_validators.py,sha256=3YW04zbYz15lnGXjQ3XHrsaH1VyRX_kmd6lF4vyTKM8,2412 +jobrunner/lib/log_utils.py,sha256=-_F4p-MsCkBy--SzWbtElnUMTdUKuRysQ6hYoHg4ado,5661 +jobrunner/lib/lru_dict.py,sha256=I-LuTBo8QxyDAnfkOoc92xMAUQp50W5oRwsHdbihHz8,824 +jobrunner/lib/path_utils.py,sha256=559GU8YpHq71ib95i4iOsEAume6xh0fNC3Ox9eD-W5c,1611 +jobrunner/lib/string_utils.py,sha256=GAyAEQxyUiaufzphnXCYuuyDeU9uB1QMAaIm79dP3OA,1336 +jobrunner/lib/subprocess_utils.py,sha256=RdoHzJzO4xA5SCBa-Cl9jcuE7SJNmsB8B1qa3wkOFvU,996 +jobrunner/lib/yaml_utils.py,sha256=H6FU_e2uQYp3qxq_QXn-oyK5cuQatDSq-q5HBv5Me3k,1747 +jobrunner/models.py,sha256=LSJurgCG9Ai7aplXdREP1EJmCXzUtcyGrcXQDIa0BNo,11499 +jobrunner/queries.py,sha256=EsBtoRsFr_dCqHOKu44Sgxjdgmfnzd0dfOQYogNITYo,2225 +jobrunner/record_stats.py,sha256=jW9wUWRGENvXMcn6_eWh0ox6eoZUIdHw6IccaFYy0KY,3135 +jobrunner/reusable_actions.py,sha256=Am1Ju-lbtnIrnqWxVvhiBP0QPkNsxnBTKIQ-rsPvvBU,7689 +jobrunner/run.py,sha256=-Ar1kkHfJ_5robtnu57zk9ak26sxTWFYXaMgS5CDJl0,27450 +jobrunner/service.py,sha256=MhppSwuGiDTrkcduxGfmHLoUpD1Ao0fRI2lfuQkb11Y,4182 +jobrunner/sync.py,sha256=0K7hbXMxWsblFK4GPnIzt-cktWHk0bSlQzBgUghh410,4991 +jobrunner/tracing.py,sha256=C7c6jCirfeBUYUhfSdhc6E46Lafqrs-jElZtQVOyiuo,12266 +opensafely_jobrunner-2.71.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +opensafely_jobrunner-2.71.0.dist-info/LICENSE,sha256=F5fS3mizkbW4yOk3XP--G0oDJbZAovAKuSIZShtkCw4,671 +opensafely_jobrunner-2.71.0.dist-info/METADATA,sha256=pn58W_YpVxJZRqB7GyQV4HWXH8GYmhk7UR4-A4jF35M,8212 +opensafely_jobrunner-2.71.0.dist-info/RECORD,, +opensafely_jobrunner-2.71.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +opensafely_jobrunner-2.71.0.dist-info/WHEEL,sha256=Xo9-1PvkuimrydujYJAjF7pCkriuXBpUPEjma1nZyJ0,92 +opensafely_jobrunner-2.71.0.dist-info/direct_url.json,sha256=Q7Dd2w0zlDDmAFwUCKJ1Iascqvn-JXW34jix31E2s88,174 +opensafely_jobrunner-2.71.0.dist-info/entry_points.txt,sha256=hat6DNe6ZtwPqk0GIs5BOzd-18yfWfwJrouA1YAmBJY,298 +opensafely_jobrunner-2.71.0.dist-info/top_level.txt,sha256=dHLIHTr12iPEGMfrfPkXrkh8qGsw52DE0cbpHQVbiic,10 diff --git a/opensafely/_vendor/opensafely_jobrunner-2.69.0.dist-info/REQUESTED b/opensafely/_vendor/opensafely_jobrunner-2.71.0.dist-info/REQUESTED similarity index 100% rename from opensafely/_vendor/opensafely_jobrunner-2.69.0.dist-info/REQUESTED rename to opensafely/_vendor/opensafely_jobrunner-2.71.0.dist-info/REQUESTED diff --git a/opensafely/_vendor/opensafely_pipeline-2023.8.9.102223.dist-info/WHEEL b/opensafely/_vendor/opensafely_jobrunner-2.71.0.dist-info/WHEEL similarity index 65% rename from opensafely/_vendor/opensafely_pipeline-2023.8.9.102223.dist-info/WHEEL rename to opensafely/_vendor/opensafely_jobrunner-2.71.0.dist-info/WHEEL index 2c08da0..ba48cbc 100644 --- a/opensafely/_vendor/opensafely_pipeline-2023.8.9.102223.dist-info/WHEEL +++ b/opensafely/_vendor/opensafely_jobrunner-2.71.0.dist-info/WHEEL @@ -1,5 +1,5 @@ Wheel-Version: 1.0 -Generator: bdist_wheel (0.41.1) +Generator: bdist_wheel (0.41.3) Root-Is-Purelib: true Tag: py3-none-any diff --git a/opensafely/_vendor/opensafely_jobrunner-2.71.0.dist-info/direct_url.json b/opensafely/_vendor/opensafely_jobrunner-2.71.0.dist-info/direct_url.json new file mode 100644 index 0000000..b51438f --- /dev/null +++ b/opensafely/_vendor/opensafely_jobrunner-2.71.0.dist-info/direct_url.json @@ -0,0 +1 @@ +{"url": "https://github.com/opensafely-core/job-runner", "vcs_info": {"commit_id": "59ab3b735d59976582b5e34388bcae7426714bb5", "requested_revision": "v2.71.0", "vcs": "git"}} \ No newline at end of file diff --git a/opensafely/_vendor/opensafely_jobrunner-2.69.0.dist-info/entry_points.txt b/opensafely/_vendor/opensafely_jobrunner-2.71.0.dist-info/entry_points.txt similarity index 100% rename from opensafely/_vendor/opensafely_jobrunner-2.69.0.dist-info/entry_points.txt rename to opensafely/_vendor/opensafely_jobrunner-2.71.0.dist-info/entry_points.txt diff --git a/opensafely/_vendor/opensafely_jobrunner-2.69.0.dist-info/top_level.txt b/opensafely/_vendor/opensafely_jobrunner-2.71.0.dist-info/top_level.txt similarity index 100% rename from opensafely/_vendor/opensafely_jobrunner-2.69.0.dist-info/top_level.txt rename to opensafely/_vendor/opensafely_jobrunner-2.71.0.dist-info/top_level.txt diff --git a/opensafely/_vendor/opensafely_pipeline-2023.11.6.145820.dist-info.pyi b/opensafely/_vendor/opensafely_pipeline-2023.11.6.145820.dist-info.pyi new file mode 100644 index 0000000..3a24220 --- /dev/null +++ b/opensafely/_vendor/opensafely_pipeline-2023.11.6.145820.dist-info.pyi @@ -0,0 +1 @@ +from opensafely_pipeline-2023.11.6.145820.dist-info import * \ No newline at end of file diff --git a/opensafely/_vendor/opensafely_pipeline-2023.8.9.102223.dist-info/INSTALLER b/opensafely/_vendor/opensafely_pipeline-2023.11.6.145820.dist-info/INSTALLER similarity index 100% rename from opensafely/_vendor/opensafely_pipeline-2023.8.9.102223.dist-info/INSTALLER rename to opensafely/_vendor/opensafely_pipeline-2023.11.6.145820.dist-info/INSTALLER diff --git a/opensafely/_vendor/opensafely_pipeline-2023.8.9.102223.dist-info/LICENSE b/opensafely/_vendor/opensafely_pipeline-2023.11.6.145820.dist-info/LICENSE similarity index 100% rename from opensafely/_vendor/opensafely_pipeline-2023.8.9.102223.dist-info/LICENSE rename to opensafely/_vendor/opensafely_pipeline-2023.11.6.145820.dist-info/LICENSE diff --git a/opensafely/_vendor/opensafely_pipeline-2023.8.9.102223.dist-info/METADATA b/opensafely/_vendor/opensafely_pipeline-2023.11.6.145820.dist-info/METADATA similarity index 98% rename from opensafely/_vendor/opensafely_pipeline-2023.8.9.102223.dist-info/METADATA rename to opensafely/_vendor/opensafely_pipeline-2023.11.6.145820.dist-info/METADATA index af68ad2..fb71287 100644 --- a/opensafely/_vendor/opensafely_pipeline-2023.8.9.102223.dist-info/METADATA +++ b/opensafely/_vendor/opensafely_pipeline-2023.11.6.145820.dist-info/METADATA @@ -1,6 +1,6 @@ Metadata-Version: 2.1 Name: opensafely-pipeline -Version: 2023.8.9.102223 +Version: 2023.11.6.145820 Summary: OpenSAFELY pipeline configuration parsing library Author-email: OpenSAFELY License: ${GITHUB_REPOSITORY_NAME} diff --git a/opensafely/_vendor/opensafely_pipeline-2023.11.6.145820.dist-info/RECORD b/opensafely/_vendor/opensafely_pipeline-2023.11.6.145820.dist-info/RECORD new file mode 100644 index 0000000..9c2ea35 --- /dev/null +++ b/opensafely/_vendor/opensafely_pipeline-2023.11.6.145820.dist-info/RECORD @@ -0,0 +1,20 @@ +opensafely_pipeline-2023.11.6.145820.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +opensafely_pipeline-2023.11.6.145820.dist-info/LICENSE,sha256=3dYRqvpnIRI1ISbzwG_EKRHulT5qzYLacVDM09Ehn5Y,675 +opensafely_pipeline-2023.11.6.145820.dist-info/METADATA,sha256=QJM6KCDHxG36v4rOhWDvKEvVfBV_tJENFCZYkYqEVOs,1830 +opensafely_pipeline-2023.11.6.145820.dist-info/RECORD,, +opensafely_pipeline-2023.11.6.145820.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +opensafely_pipeline-2023.11.6.145820.dist-info/WHEEL,sha256=Xo9-1PvkuimrydujYJAjF7pCkriuXBpUPEjma1nZyJ0,92 +opensafely_pipeline-2023.11.6.145820.dist-info/direct_url.json,sha256=PgG1h8Y9fD2xAmM-gatCkYnQztx9Nx7sszSHN60D43k,183 +opensafely_pipeline-2023.11.6.145820.dist-info/top_level.txt,sha256=Qdc1eKrvhKK_o9CPbdooOdDt7g3ZSXZDrNXHmUGl94Q,9 +pipeline/__init__.py,sha256=OXi7WT9uT8zmpPCJY5mh7DCBiSdRg7D6EFOT-wNTulM,236 +pipeline/__main__.py,sha256=K81Vj0UTwIrEjAhjZuNV0giNUabkuHjUkbbAB5_Q5W8,470 +pipeline/constants.py,sha256=8ji9shMtzIXpUb72ah78nG3DZHH2k3N-EpltmMsyMQs,386 +pipeline/exceptions.py,sha256=AzXfyBEHPp3gypXIVoowgstBFG2bnNcjeUBVwhzlows,151 +pipeline/features.py,sha256=IIPT4buZH7VZ4Q54oXug8cDdFJWRzKm0JhZmT9nBWCk,573 +pipeline/legacy.py,sha256=yw5sC8gyYVgRm_Oq67F8AlBppwVZu_9nnG130lThhvY,413 +pipeline/loading.py,sha256=Knlu2ddYV4tByaHu4VeH1fQbScSPzvb2kd8IhXy9EVY,2753 +pipeline/main.py,sha256=AeJWX7csBOen9zXHr_pi0GWSEIgFAQORGdb8JHa7Dtg,993 +pipeline/models.py,sha256=xzuiGvdZwcFQMHsUB5mTiMlvzkwDA1IHfFfg2ID_9ok,11866 +pipeline/outputs.py,sha256=P2TzmZoi_FB9pM_PgUyz4AI3eFvQSXUCg8L8vGjcvBM,781 +pipeline/types.py,sha256=8Sx4Kfr9UwLpQFWVS1VvpsCAIjzD4t8hLnaIc7ha8No,729 +pipeline/validation.py,sha256=21NRtPcVNqgkZwu6kgyXl3lpKFhWDqBxY6ISJ4-8dzg,4354 diff --git a/opensafely/_vendor/opensafely_pipeline-2023.8.9.102223.dist-info/REQUESTED b/opensafely/_vendor/opensafely_pipeline-2023.11.6.145820.dist-info/REQUESTED similarity index 100% rename from opensafely/_vendor/opensafely_pipeline-2023.8.9.102223.dist-info/REQUESTED rename to opensafely/_vendor/opensafely_pipeline-2023.11.6.145820.dist-info/REQUESTED diff --git a/opensafely/_vendor/opensafely_jobrunner-2.69.0.dist-info/WHEEL b/opensafely/_vendor/opensafely_pipeline-2023.11.6.145820.dist-info/WHEEL similarity index 65% rename from opensafely/_vendor/opensafely_jobrunner-2.69.0.dist-info/WHEEL rename to opensafely/_vendor/opensafely_pipeline-2023.11.6.145820.dist-info/WHEEL index 2c08da0..ba48cbc 100644 --- a/opensafely/_vendor/opensafely_jobrunner-2.69.0.dist-info/WHEEL +++ b/opensafely/_vendor/opensafely_pipeline-2023.11.6.145820.dist-info/WHEEL @@ -1,5 +1,5 @@ Wheel-Version: 1.0 -Generator: bdist_wheel (0.41.1) +Generator: bdist_wheel (0.41.3) Root-Is-Purelib: true Tag: py3-none-any diff --git a/opensafely/_vendor/opensafely_pipeline-2023.11.6.145820.dist-info/direct_url.json b/opensafely/_vendor/opensafely_pipeline-2023.11.6.145820.dist-info/direct_url.json new file mode 100644 index 0000000..74d97d3 --- /dev/null +++ b/opensafely/_vendor/opensafely_pipeline-2023.11.6.145820.dist-info/direct_url.json @@ -0,0 +1 @@ +{"url": "https://github.com/opensafely-core/pipeline", "vcs_info": {"commit_id": "8c706d663fb2fcae279bc0f15c1b1b3bd4314e99", "requested_revision": "v2023.11.06.145820", "vcs": "git"}} \ No newline at end of file diff --git a/opensafely/_vendor/opensafely_pipeline-2023.8.9.102223.dist-info/top_level.txt b/opensafely/_vendor/opensafely_pipeline-2023.11.6.145820.dist-info/top_level.txt similarity index 100% rename from opensafely/_vendor/opensafely_pipeline-2023.8.9.102223.dist-info/top_level.txt rename to opensafely/_vendor/opensafely_pipeline-2023.11.6.145820.dist-info/top_level.txt diff --git a/opensafely/_vendor/opensafely_pipeline-2023.8.9.102223.dist-info.pyi b/opensafely/_vendor/opensafely_pipeline-2023.8.9.102223.dist-info.pyi deleted file mode 100644 index 136a3ce..0000000 --- a/opensafely/_vendor/opensafely_pipeline-2023.8.9.102223.dist-info.pyi +++ /dev/null @@ -1 +0,0 @@ -from opensafely_pipeline-2023.8.9.102223.dist-info import * \ No newline at end of file diff --git a/opensafely/_vendor/opensafely_pipeline-2023.8.9.102223.dist-info/RECORD b/opensafely/_vendor/opensafely_pipeline-2023.8.9.102223.dist-info/RECORD deleted file mode 100644 index 6dbc172..0000000 --- a/opensafely/_vendor/opensafely_pipeline-2023.8.9.102223.dist-info/RECORD +++ /dev/null @@ -1,20 +0,0 @@ -opensafely_pipeline-2023.8.9.102223.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -opensafely_pipeline-2023.8.9.102223.dist-info/LICENSE,sha256=3dYRqvpnIRI1ISbzwG_EKRHulT5qzYLacVDM09Ehn5Y,675 -opensafely_pipeline-2023.8.9.102223.dist-info/METADATA,sha256=kL0OGRscOIKA3cQzFXsQgSUs3C-uwe900mTZCnnK7rU,1829 -opensafely_pipeline-2023.8.9.102223.dist-info/RECORD,, -opensafely_pipeline-2023.8.9.102223.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -opensafely_pipeline-2023.8.9.102223.dist-info/WHEEL,sha256=5sUXSg9e4bi7lTLOHcm6QEYwO5TIF1TNbTSVFVjcJcc,92 -opensafely_pipeline-2023.8.9.102223.dist-info/direct_url.json,sha256=KbO3BCALMCO2sfUor6EBymBi5y8cP6f3uD4CIV8egpw,183 -opensafely_pipeline-2023.8.9.102223.dist-info/top_level.txt,sha256=Qdc1eKrvhKK_o9CPbdooOdDt7g3ZSXZDrNXHmUGl94Q,9 -pipeline/__init__.py,sha256=OXi7WT9uT8zmpPCJY5mh7DCBiSdRg7D6EFOT-wNTulM,236 -pipeline/__main__.py,sha256=K81Vj0UTwIrEjAhjZuNV0giNUabkuHjUkbbAB5_Q5W8,470 -pipeline/constants.py,sha256=HZrpkwBZmMAvJa5zYeYgkpQgx_MzS0jcKjrMa1YjuXo,83 -pipeline/exceptions.py,sha256=AzXfyBEHPp3gypXIVoowgstBFG2bnNcjeUBVwhzlows,151 -pipeline/features.py,sha256=IIPT4buZH7VZ4Q54oXug8cDdFJWRzKm0JhZmT9nBWCk,573 -pipeline/legacy.py,sha256=yw5sC8gyYVgRm_Oq67F8AlBppwVZu_9nnG130lThhvY,413 -pipeline/loading.py,sha256=Knlu2ddYV4tByaHu4VeH1fQbScSPzvb2kd8IhXy9EVY,2753 -pipeline/main.py,sha256=AeJWX7csBOen9zXHr_pi0GWSEIgFAQORGdb8JHa7Dtg,993 -pipeline/models.py,sha256=yKAB_WmdyKctE09iCVh60OwTcUCrZJEG9h_uLjem7s4,11150 -pipeline/outputs.py,sha256=P2TzmZoi_FB9pM_PgUyz4AI3eFvQSXUCg8L8vGjcvBM,781 -pipeline/types.py,sha256=8Sx4Kfr9UwLpQFWVS1VvpsCAIjzD4t8hLnaIc7ha8No,729 -pipeline/validation.py,sha256=0J4vP1En7UU6x1o5YiAg6vfEzIMa5xJjXdFL0ZX6aHk,3998 diff --git a/opensafely/_vendor/opensafely_pipeline-2023.8.9.102223.dist-info/direct_url.json b/opensafely/_vendor/opensafely_pipeline-2023.8.9.102223.dist-info/direct_url.json deleted file mode 100644 index 58dec3f..0000000 --- a/opensafely/_vendor/opensafely_pipeline-2023.8.9.102223.dist-info/direct_url.json +++ /dev/null @@ -1 +0,0 @@ -{"url": "https://github.com/opensafely-core/pipeline", "vcs_info": {"commit_id": "ea8d0d2a2d5e36d7fcc85bf2508288ec39ccb48a", "requested_revision": "v2023.08.09.102223", "vcs": "git"}} \ No newline at end of file diff --git a/opensafely/_vendor/pipeline/constants.py b/opensafely/_vendor/pipeline/constants.py index 0934d19..728dcd1 100644 --- a/opensafely/_vendor/pipeline/constants.py +++ b/opensafely/_vendor/pipeline/constants.py @@ -1,2 +1,23 @@ # The magic action name which means "run every action" RUN_ALL_COMMAND = "run_all" + +LEVEL4_FILE_TYPES = set( + [ + # tables + ".csv", + ".tsv", + # images + ".jpg", + ".jpeg", + ".png", + ".svg", + ".svgz", + # reports + ".html", + ".pdf", + ".txt", + ".log", + ".json", + ".md", + ] +) diff --git a/opensafely/_vendor/pipeline/models.py b/opensafely/_vendor/pipeline/models.py index 7f7038f..12cc5e0 100644 --- a/opensafely/_vendor/pipeline/models.py +++ b/opensafely/_vendor/pipeline/models.py @@ -20,6 +20,22 @@ cohortextractor_pat = re.compile(r"cohortextractor:\S+ generate_cohort") databuilder_pat = re.compile(r"databuilder|ehrql:\S+ generate[-_]dataset") +database_action_pat = re.compile( + r""" + # image name + ^\b(?:cohortextractor|databuilder|ehrql)\b + # : (v0, latest etc) + :.+ + # command; for cohortextractor, only generate_cohort is a database action + # For ehrql (and legacy databuilder), generate-dataset and generate-measures + # are both database actions. Happily cohortextractor uses generate_measures as + # its measures command, so we can excluded cohortextractor measures + # actions with this regex. + \b(?:generate_cohort|generate-dataset|generate-measures) + """, + flags=re.X, +) + class Expectations(BaseModel): population_size: int @@ -60,9 +76,9 @@ def validate_output_filenames_are_valid(cls, outputs: RawOutputs) -> RawOutputs: for privacy_level, output in outputs.items(): for output_id, filename in output.items(): try: - assert_valid_glob_pattern(filename) + assert_valid_glob_pattern(filename, privacy_level) except InvalidPatternError as e: - raise ValueError(f"Output path {filename} is not permitted: {e}") + raise ValueError(f"Output path {filename} is invalid: {e}") return outputs @@ -113,6 +129,10 @@ def parse_run_string(cls, run: str) -> Command: return Command(raw=run) + @property + def is_database_action(self) -> bool: + return database_action_pat.match(self.run.raw) is not None + class PartiallyValidatedPipeline(TypedDict): """ diff --git a/opensafely/_vendor/pipeline/validation.py b/opensafely/_vendor/pipeline/validation.py index 79c1ccf..e74ae93 100644 --- a/opensafely/_vendor/pipeline/validation.py +++ b/opensafely/_vendor/pipeline/validation.py @@ -1,9 +1,10 @@ from __future__ import annotations import posixpath -from pathlib import PurePosixPath, PureWindowsPath +from pathlib import Path, PurePosixPath, PureWindowsPath from typing import TYPE_CHECKING +from .constants import LEVEL4_FILE_TYPES from .exceptions import InvalidPatternError from .outputs import get_first_output_file, get_output_dirs @@ -12,7 +13,7 @@ from .models import Action -def assert_valid_glob_pattern(pattern: str) -> None: +def assert_valid_glob_pattern(pattern: str, privacy_level: str) -> None: """ These patterns get converted into regular expressions and matched with a `find` command so there shouldn't be any possibility of a path @@ -31,11 +32,19 @@ def assert_valid_glob_pattern(pattern: str) -> None: f"contains '{expr}' (only the * wildcard character is supported)" ) - if pattern.endswith("/"): + path = Path(pattern) + + if path.suffix == "" or path.suffix.endswith("*"): raise InvalidPatternError( - "looks like a directory (only files should be specified)" + "output paths must have a file type extension at the end" ) + if privacy_level == "moderately_sensitive": + if path.suffix not in LEVEL4_FILE_TYPES: + raise InvalidPatternError( + f"{path} is not an allowed file type for moderately_sensitive outputs" + ) + # Check that the path is in normal form if posixpath.normpath(pattern) != pattern: raise InvalidPatternError( diff --git a/opensafely/_vendor/wrapt-1.14.1.dist-info/RECORD b/opensafely/_vendor/wrapt-1.14.1.dist-info/RECORD index 50d04db..a1749ad 100644 --- a/opensafely/_vendor/wrapt-1.14.1.dist-info/RECORD +++ b/opensafely/_vendor/wrapt-1.14.1.dist-info/RECORD @@ -3,10 +3,10 @@ wrapt-1.14.1.dist-info/LICENSE,sha256=d9KpFZz_4SRz9TmnGj50HzURfmG_0AXr1gw6F3jiwf wrapt-1.14.1.dist-info/METADATA,sha256=2cBKYpezO05RkaYqJqK2LTbnB2txRteKtBRjx3RLY5s,6739 wrapt-1.14.1.dist-info/RECORD,, wrapt-1.14.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -wrapt-1.14.1.dist-info/WHEEL,sha256=Ly3Etn219k3CPJw_u_O2JJStO3-SwhzjwofZN3Au6Uw,108 +wrapt-1.14.1.dist-info/WHEEL,sha256=Qde73y9i2oDu_CV3UfJjbD0-upkMJblIn_RvcIzi-D4,105 wrapt-1.14.1.dist-info/top_level.txt,sha256=Jf7kcuXtwjUJMwOL0QzALDg2WiSiXiH9ThKMjN64DW0,6 wrapt/__init__.py,sha256=Bh0h33Iapc_qaoLWsWfaXK5xJz9KJExF7gQKIWYdSsg,1200 -wrapt/_wrappers.cpython-38-darwin.so,sha256=pFqKeYWjTuIyHc267C7cKY75pJGwE0ZJqQVwnyGTh28,90699 +wrapt/_wrappers.cpython-310-x86_64-linux-gnu.so,sha256=8HdtnY18QpFA7-OOkkIJht1FmumstOWpRmgtjiRsWxI,192360 wrapt/arguments.py,sha256=RF0nTEdPzPIewJ-jnSY42i4JSzK3ctjPABV1SJxLymg,1746 wrapt/decorators.py,sha256=gNy1PVq9NNVDAB9tujaAVhb0xtVKSSzqT-hdGFeWM34,21332 wrapt/importer.py,sha256=yxFgVg6-lRTbSVJ2oZbw1TPCtB98fIF4A_qi_Dh2JRc,9981 diff --git a/opensafely/_vendor/wrapt-1.14.1.dist-info/WHEEL b/opensafely/_vendor/wrapt-1.14.1.dist-info/WHEEL index 6430a9f..081be6b 100644 --- a/opensafely/_vendor/wrapt-1.14.1.dist-info/WHEEL +++ b/opensafely/_vendor/wrapt-1.14.1.dist-info/WHEEL @@ -1,5 +1,5 @@ Wheel-Version: 1.0 -Generator: bdist_wheel (0.40.0) +Generator: bdist_wheel (0.38.4) Root-Is-Purelib: false -Tag: cp38-cp38-macosx_13_0_arm64 +Tag: cp310-cp310-linux_x86_64 diff --git a/vendor.in b/vendor.in index b82bb90..568b531 100644 --- a/vendor.in +++ b/vendor.in @@ -1,2 +1,2 @@ ---requirement https://raw.githubusercontent.com/opensafely-core/job-runner/v2.69.0/requirements.prod.txt -git+https://github.com/opensafely-core/job-runner@v2.69.0 +--requirement https://raw.githubusercontent.com/opensafely-core/job-runner/v2.71.0/requirements.prod.txt +git+https://github.com/opensafely-core/job-runner@v2.71.0 diff --git a/vendor.txt b/vendor.txt index b8dcac5..5fd1758 100644 --- a/vendor.txt +++ b/vendor.txt @@ -1,95 +1,95 @@ # -# This file is autogenerated by pip-compile with Python 3.8 +# This file is autogenerated by pip-compile with Python 3.10 # by the following command: # # pip-compile vendor.in # backoff==2.1.2 # via - # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.69.0/requirements.prod.txt + # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.71.0/requirements.prod.txt # opentelemetry-exporter-otlp-proto-http certifi==2020.11.8 # via - # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.69.0/requirements.prod.txt + # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.71.0/requirements.prod.txt # requests chardet==3.0.4 # via - # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.69.0/requirements.prod.txt + # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.71.0/requirements.prod.txt # requests deprecated==1.2.13 # via - # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.69.0/requirements.prod.txt + # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.71.0/requirements.prod.txt # opentelemetry-api distro==1.8.0 # via - # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.69.0/requirements.prod.txt + # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.71.0/requirements.prod.txt # ruyaml googleapis-common-protos==1.56.4 # via - # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.69.0/requirements.prod.txt + # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.71.0/requirements.prod.txt # opentelemetry-exporter-otlp-proto-http idna==2.10 # via - # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.69.0/requirements.prod.txt + # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.71.0/requirements.prod.txt # requests -opensafely-jobrunner @ git+https://github.com/opensafely-core/job-runner@v2.69.0 +opensafely-jobrunner @ git+https://github.com/opensafely-core/job-runner@v2.71.0 # via -r vendor.in -opensafely-pipeline @ git+https://github.com/opensafely-core/pipeline@v2023.08.09.102223 +opensafely-pipeline @ git+https://github.com/opensafely-core/pipeline@v2023.11.06.145820 # via - # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.69.0/requirements.prod.txt + # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.71.0/requirements.prod.txt # opensafely-jobrunner opentelemetry-api==1.12.0 # via - # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.69.0/requirements.prod.txt + # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.71.0/requirements.prod.txt # opentelemetry-exporter-otlp-proto-http # opentelemetry-sdk opentelemetry-exporter-otlp-proto-http==1.12.0 # via - # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.69.0/requirements.prod.txt + # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.71.0/requirements.prod.txt # opensafely-jobrunner opentelemetry-proto==1.12.0 # via - # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.69.0/requirements.prod.txt + # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.71.0/requirements.prod.txt # opentelemetry-exporter-otlp-proto-http opentelemetry-sdk==1.12.0 # via - # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.69.0/requirements.prod.txt + # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.71.0/requirements.prod.txt # opentelemetry-exporter-otlp-proto-http opentelemetry-semantic-conventions==0.33b0 # via - # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.69.0/requirements.prod.txt + # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.71.0/requirements.prod.txt # opentelemetry-sdk protobuf==3.20.2 # via - # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.69.0/requirements.prod.txt + # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.71.0/requirements.prod.txt # googleapis-common-protos # opentelemetry-proto pydantic==1.10.12 # via - # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.69.0/requirements.prod.txt + # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.71.0/requirements.prod.txt # opensafely-pipeline requests==2.25.0 # via - # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.69.0/requirements.prod.txt + # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.71.0/requirements.prod.txt # opensafely-jobrunner # opentelemetry-exporter-otlp-proto-http ruyaml==0.91.0 # via - # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.69.0/requirements.prod.txt + # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.71.0/requirements.prod.txt # opensafely-jobrunner # opensafely-pipeline typing-extensions==4.7.1 # via - # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.69.0/requirements.prod.txt + # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.71.0/requirements.prod.txt # opentelemetry-sdk # pydantic urllib3==1.26.5 # via - # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.69.0/requirements.prod.txt + # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.71.0/requirements.prod.txt # requests wrapt==1.14.1 # via - # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.69.0/requirements.prod.txt + # -r https://raw.githubusercontent.com/opensafely-core/job-runner/v2.71.0/requirements.prod.txt # deprecated # The following packages are considered to be unsafe in a requirements file: