From 552309c95db912acdf26716f816258ed6d116b23 Mon Sep 17 00:00:00 2001 From: bloodearnest Date: Wed, 22 Nov 2023 17:12:22 +0000 Subject: [PATCH] Add a new version of the python image. This new version is based on 22.04 and python3.10, and has an up to date version of all python packages. The versioning scheme is arbitrary major version, e.g. v1, v2, etc. The previous image is retained as v1, and the new image is v2. This move from maintaining 1 to 2 images requires some refactoring. Each major version has its own directory, and associated configuration files to track dependencies. They both share the same parameterised `Dockerfile` and `docker-compose.yaml`. In the process, I have also improved the local development tooling: - move from Makefile to parameterised justfile - some small reording of Dockerfile steps for efficiency and reuse - a proper solution for user the docker images themselves to add new dependencies and/or upgrade existing ones. This means you a) do not need all the python versions installed on your machine and b) this can be built on macos, in theory. --- Dockerfile | 56 +- Makefile | 35 - docker-compose.yml | 24 +- justfile | 40 ++ tests/.test_import.py.swp | Bin 0 -> 12288 bytes tests/test_import.py | 34 +- .../build-dependencies.txt | 0 dependencies.txt => v1/dependencies.txt | 0 v1/env | 2 + requirements.in => v1/requirements.in | 0 requirements.txt => v1/requirements.txt | 61 +- v2/build-dependencies.txt | 10 + v2/dependencies.txt | 15 + v2/env | 2 + v2/requirements.in | 39 ++ v2/requirements.txt | 643 ++++++++++++++++++ 16 files changed, 864 insertions(+), 97 deletions(-) delete mode 100644 Makefile create mode 100644 justfile create mode 100644 tests/.test_import.py.swp rename build-dependencies.txt => v1/build-dependencies.txt (100%) rename dependencies.txt => v1/dependencies.txt (100%) create mode 100644 v1/env rename requirements.in => v1/requirements.in (100%) rename requirements.txt => v1/requirements.txt (88%) create mode 100644 v2/build-dependencies.txt create mode 100644 v2/dependencies.txt create mode 100644 v2/env create mode 100644 v2/requirements.in create mode 100644 v2/requirements.txt diff --git a/Dockerfile b/Dockerfile index 6de34b7..5ccef52 100644 --- a/Dockerfile +++ b/Dockerfile @@ -8,34 +8,48 @@ # and b) we specifically always want to build on the latest base image, by # design. # +ARG BASE # hadolint ignore=DL3007 -FROM ghcr.io/opensafely-core/base-action:latest as base-python -COPY dependencies.txt /root/dependencies.txt +FROM ghcr.io/opensafely-core/base-action:$BASE as base-python + +RUN mkdir /workspace +WORKDIR /workspace + +ARG MAJOR_VERSION +# ACTION_EXEC sets the default executable for the entrypoint in the base-docker image +ENV ACTION_EXEC=python MAJOR_VERSION=${MAJOR_VERSION} + +COPY ${MAJOR_VERSION}/dependencies.txt /root/dependencies.txt # use space efficient utility from base image RUN /root/docker-apt-install.sh /root/dependencies.txt +# now we have python, set up a venv to install packages to, for isolation from +# system python libraries +# hadolint ignore=DL3059 +RUN python3 -m venv /opt/venv +# "activate" the venv +ENV VIRTUAL_ENV=/opt/venv/ PATH="/opt/venv/bin:$PATH" +# We ensure up-to-date build tools (which why we ignore DL3013) +# hadolint ignore=DL3013,DL3042 +RUN --mount=type=cache,target=/root/.cache python -m pip install -U pip setuptools wheel pip-tools + + ################################################# # # Next, use the base-docker-plus-python image to create a build image FROM base-python as builder +ARG MAJOR_VERSION # install build time dependencies -COPY build-dependencies.txt /root/build-dependencies.txt +COPY ${MAJOR_VERSION}/build-dependencies.txt /root/build-dependencies.txt RUN /root/docker-apt-install.sh /root/build-dependencies.txt -# install everything in venv for isolation from system python libraries -# hadolint ignore=DL3059 -RUN python3 -m venv /opt/venv -ENV VIRTUAL_ENV=/opt/venv/ PATH="/opt/venv/bin:$PATH" LLVM_CONFIG=/usr/bin/llvm-config-10 - -COPY requirements.txt /root/requirements.txt -# We ensure up-to-date build tools (which why we ignore DL3013) +COPY ${MAJOR_VERSION}/requirements.txt /root/requirements.txt # Note: the mount command does two things: 1) caches across builds to speed up # local development and 2) ensures the pip cache does not get committed to the # layer (which is why we ignore DL3042). -# hadolint ignore=DL3013,DL3042 +# hadolint ignore=DL3042 RUN --mount=type=cache,target=/root/.cache \ - python -m pip install -U pip setuptools wheel && \ python -m pip install --requirement /root/requirements.txt ################################################ @@ -43,26 +57,26 @@ RUN --mount=type=cache,target=/root/.cache \ # Finally, build the actual image from the base-python image FROM base-python as python + +ARG MAJOR_VERSION # Some static metadata for this specific image, as defined by: # https://github.com/opencontainers/image-spec/blob/master/annotations.md#pre-defined-annotation-keys # The org.opensafely.action label is used by the jobrunner to indicate this is # an approved action image to run. -LABEL org.opencontainers.image.title="python" \ +LABEL org.opencontainers.image.title="python:${MAJOR_VERSION}" \ org.opencontainers.image.description="Python action for opensafely.org" \ org.opencontainers.image.source="https://github.com/opensafely-core/python-docker" \ - org.opensafely.action="python" + org.opensafely.action="python:${MAJOR_VERSION}" # copy venv over from builder image COPY --from=builder /opt/venv /opt/venv -# ACTION_EXEC sets the default executable for the entrypoint in the base-docker image -ENV VIRTUAL_ENV=/opt/venv/ PATH="/opt/venv/bin:$PATH" ACTION_EXEC=python - -RUN mkdir /workspace -WORKDIR /workspace -# tag with build info as the very last step, as it will never be cached +# tag with build info as the very last step, as it will never be cacheable ARG BUILD_DATE ARG REVISION +ARG BUILD_NUMBER # RFC 3339. LABEL org.opencontainers.image.created=$BUILD_DATE \ - org.opencontainers.image.revision=$REVISION + org.opencontainers.image.revision=$REVISION \ + org.opencontainers.image.build=$BUILD_NUMBER \ + org.opencontainers.image.version=$MAJOR_VERSION.$BUILD_NUMBER diff --git a/Makefile b/Makefile deleted file mode 100644 index 3816bc8..0000000 --- a/Makefile +++ /dev/null @@ -1,35 +0,0 @@ -INTERACTIVE:=$(shell [ -t 0 ] && echo 1) -export DOCKER_BUILDKIT=1 -export BUILD_DATE=$(shell date +'%y-%m-%dT%H:%M:%S.%3NZ') -export REVISION=$(shell git rev-parse --short HEAD) - -.PHONY: build -build: - docker-compose build --pull python - - -.PHONY: test -test: - docker-compose run --rm -v $(PWD):/workspace python pytest tests -v - -# test basic python invocation -functional-test: - docker-compose run --rm python -c '' - docker-compose run --rm python python -c '' - - -.PHONY: lint -lint: - @docker pull hadolint/hadolint:v2.8.0 - @docker run --rm -i hadolint/hadolint:v2.8.0 < Dockerfile - -requirements.txt: requirements.in venv/bin/pip-compile - venv/bin/pip-compile requirements.in - -venv/bin/pip-compile: | venv - venv/bin/pip install pip-tools - -venv: - virtualenv -p python3 venv - - diff --git a/docker-compose.yml b/docker-compose.yml index a9751fa..cff1564 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,18 +1,26 @@ services: - # used to build the production image - python: - image: python + base: + init: true + image: python:${MAJOR_VERSION}-base build: context: . - target: python + target: base-python cache_from: # should speed up the build in CI, where we have a cold cache - - ghcr.io/opensafely-core/base-docker - - ghcr.io/opensafely-core/python + - ghcr.io/opensafely-core/base-action:${BASE} + - ghcr.io/opensafely-core/python:${MAJOR_VERSION} args: # this makes the image work for later cache_from: usage - BUILDKIT_INLINE_CACHE=1 # env vars supplied by make/just + - BUILD_NUMBER - BUILD_DATE - REVISION - - VERSION - init: true + - BASE + - MAJOR_VERSION + + python: + extends: + service: base + image: python:${MAJOR_VERSION} + build: + target: python diff --git a/justfile b/justfile new file mode 100644 index 0000000..310f12e --- /dev/null +++ b/justfile @@ -0,0 +1,40 @@ +export DOCKER_BUILDKIT := "1" +export BUILD_DATE := `date +'%y-%m-%dT%H:%M:%S.%3NZ'` +export REVISION := `git rev-parse --short HEAD` + +# TODO: calculate this +export BUILD_NUMBER := "1234" + +build version target="python" *args="": + docker-compose --env-file {{ version }}/env build --pull {{ args }} {{ target }} + +test version *args="tests -v": + docker-compose --env-file {{ version }}/env run --rm -v $PWD:/workspace python pytest {{ args }} + +update version *args="": + docker-compose --env-file {{ version }}/env run --rm -v $PWD:/workspace base pip-compile {{ args }} {{ version }}/requirements.in -o {{ version }}/requirements.txt + +check: + @docker pull hadolint/hadolint:v2.12.0 + @docker run --rm -i hadolint/hadolint:v2.12.0 < Dockerfile + +publish version: + #!/bin/bash + set -euxo pipefail + docker tag python:{{ version }} ghcr.io/opensafely-core/python:{{ version }} + echo docker push ghcr.io/opensafely-core/python:{{ version }} + + if test "{{ version }}" = "v1"; then + # jupyter is only alias for v1 + docker tag python:{{ version }} ghcr.io/opensafely-core/jupyter:{{ version }} + echo docker push ghcr.io/opensafely-core/jupyter:{{ version }} + + # v1 is also known as latest, at least until we transition fully + docker tag python:{{ version }} ghcr.io/opensafely-core/python:latest + docker tag python:{{ version }} ghcr.io/opensafely-core/jupyter:latest + echo docker push ghcr.io/opensafely-core/python:latest + echo docker push ghcr.io/opensafely-core/jupyter:latest + fi + + + diff --git a/tests/.test_import.py.swp b/tests/.test_import.py.swp new file mode 100644 index 0000000000000000000000000000000000000000..bdbad6b252fbf7e2bfe833f4f2de2c01de974e0f GIT binary patch literal 12288 zcmeI2O=~1Y7{_b(q^_%?U!b;g>49wLP1q0!__nMGW>+HS5<^on{q&@qp02j4Cdrr$ z`W3th-c|4gM7;YE1pNqtcvNvOd+}dY-N_{AF0!XUHTN2U=2TCilS@t?UI zH%i;AnLcW19E$`ZfhQp_vE|Lx>uhCtY0;UPOK$L&UVQgSEXK8wKqL?eL;{gOBoGNi z0+B!@aLEYR<_h}=F<))tUTN=FF5KIn@fHb00+B!@5D7#Akw7F62}A;sKqL?eL;{zP zfG8OI_$kKrpGNTb|9|)Q|AXfl`xW{T`U3hK`V=al0n~?HgC0J|*l*Cc&=E9*R-wP} z0>SUlH_!=mA9@3N@GN70LtjEu=m>&U zjC;D~huxH5GY=-ya0`pAp02TiO74Pb-@wdUZ>e)O&)ey1?3Fg;vCyNel$9lYD0D6B zp-B@ttd%B&6znFmYm<3Cq?)v_)VXlOCRHY*`ao(`?RYUq_zj9JnO|Ra;q4=-hxTeNmkSjv%}xV`0i1O1qyXbIAqAerD^^Am1hp zK%EDdh95@~WfKB+X1JygP&B)rsfOyb=jz{$cSA_yZOll=;(ev_1JVYm^!Nf#Fk>P$ zjj6UKvqv`Z2_#8!r>z-$8p~CQo@!`dsc2amZWZqn7yXLd*Y^Egtn?lA?6S3e`cDR3f|J>CN>fnw2s_@Io=lqQfhZ>!3#nM z6jTqU*tlYZ;?$gq$pBlg;KKs#D&H!P&H2Lg;Ipquj2b0t z+cx(c+Na^(Y?q4Pfn1`;PD_fq@V*9 zU?s_0N)#N_c~>&m)TKoGs0q$xi(d3+tnc?=!$+B#UUt^>7tWThirD6ig|i%W)nwE} z52!QAXJU4T7FRV{sH!T BW2OKA literal 0 HcmV?d00001 diff --git a/tests/test_import.py b/tests/test_import.py index 09d4a4a..f877650 100644 --- a/tests/test_import.py +++ b/tests/test_import.py @@ -1,21 +1,44 @@ +import os import subprocess from importlib import import_module from pathlib import Path +import re import pytest from pkg_resources import Requirement, get_provider +# packages that have no way to detect their importable name +BAD_PACKAGES = { + "beautifulsoup4": "bs4", + "protobuf": None, # AARRRRGG + "qtpy": None, # required dependency of jupyter-lab +} + def get_module_names(pkg_name): """Load pkg metadata to find out its importable module name(s).""" + # remove any extras + pkg_name = re.sub(r'\[.*\]', '', pkg_name) modules = set() provider = get_provider(Requirement.parse(pkg_name)) # top level package name is typically all we need - if provider.has_metadata("top_level.txt"): - modules |= set(provider.get_metadata_lines("top_level.txt")) + if pkg_name in BAD_PACKAGES: + name = BAD_PACKAGES[pkg_name] + if name is None: # unimportably package + return [] + modules.add(BAD_PACKAGES[pkg_name]) + elif provider.has_metadata("top_level.txt"): + first_line = list(provider.get_metadata_lines("top_level.txt"))[0] + modules.add(first_line) else: # badly packaged dependency, make an educated guess - modules.add(pkg_name.replace("-", "_")) + name = pkg_name + if pkg_name.endswith("-cffi"): + name = pkg_name[:-5] + elif pkg_name.endswith("-py"): + name = pkg_name[:-3] + + modules.add(name.replace("-", "_")) if provider.has_metadata("namespace_packages.txt"): modules |= set(provider.get_metadata_lines("namespace_packages.txt")) @@ -24,8 +47,9 @@ def get_module_names(pkg_name): return [n for n in modules if n[0] != "_"] -def generate_import_names(req_path): +def generate_import_names(major_version): """Generate list of expected modules to be able to import.""" + req_path = Path(major_version) / "requirements.txt" with req_path.open() as fp: for line in fp: line = line.strip() @@ -38,7 +62,7 @@ def generate_import_names(req_path): @pytest.mark.parametrize( - "name, module", generate_import_names(Path("requirements.txt")) + "name, module", generate_import_names(os.environ["MAJOR_VERSION"]) ) @pytest.mark.filterwarnings("ignore") def test_import_package(name, module): diff --git a/build-dependencies.txt b/v1/build-dependencies.txt similarity index 100% rename from build-dependencies.txt rename to v1/build-dependencies.txt diff --git a/dependencies.txt b/v1/dependencies.txt similarity index 100% rename from dependencies.txt rename to v1/dependencies.txt diff --git a/v1/env b/v1/env new file mode 100644 index 0000000..ea122d4 --- /dev/null +++ b/v1/env @@ -0,0 +1,2 @@ +MAJOR_VERSION=v1 +BASE=20.04 diff --git a/requirements.in b/v1/requirements.in similarity index 100% rename from requirements.in rename to v1/requirements.in diff --git a/requirements.txt b/v1/requirements.txt similarity index 88% rename from requirements.txt rename to v1/requirements.txt index 18935e4..a00935f 100644 --- a/requirements.txt +++ b/v1/requirements.txt @@ -1,8 +1,8 @@ # -# This file is autogenerated by pip-compile with python 3.8 -# To update, run: +# This file is autogenerated by pip-compile with Python 3.8 +# by the following command: # -# pip-compile +# pip-compile --output-file=v1/requirements.txt v1/requirements.in # astor==0.8.1 # via formulaic @@ -20,7 +20,7 @@ autograd-gamma==0.5.0 backcall==0.1.0 # via ipython bash-kernel==0.7.2 - # via -r requirements.in + # via -r v1/requirements.in bleach==3.1.2 # via nbconvert cachetools==4.0.0 @@ -28,7 +28,7 @@ cachetools==4.0.0 cairocffi==1.4.0 # via cairosvg cairosvg==2.5.2 - # via -r requirements.in + # via -r v1/requirements.in certifi==2019.11.28 # via requests cffi==1.15.1 @@ -37,6 +37,8 @@ chardet==3.0.4 # via requests click==7.0 # via + # click-plugins + # cligj # fiona # pip-tools click-plugins==1.1.1 @@ -120,7 +122,7 @@ ipython-genutils==0.2.0 # traitlets ipywidgets==7.5.1 # via - # -r requirements.in + # -r v1/requirements.in # jupyter jedi==0.16.0 # via ipython @@ -139,7 +141,7 @@ jsonschema==3.2.0 # jupyterlab-server # nbformat jupyter==1.0.0 - # via -r requirements.in + # via -r v1/requirements.in jupyter-client==5.3.4 # via # ipykernel @@ -157,17 +159,17 @@ jupyter-core==4.6.1 # notebook # qtconsole jupyterlab==1.2.6 - # via -r requirements.in + # via -r v1/requirements.in jupyterlab-server==1.0.6 # via jupyterlab jupytext==1.3.3 - # via -r requirements.in + # via -r v1/requirements.in kaleido==0.2.1 - # via -r requirements.in + # via -r v1/requirements.in kiwisolver==1.1.0 # via matplotlib lifelines==0.26.4 - # via -r requirements.in + # via -r v1/requirements.in llvmlite==0.34.0 # via numba lz4==3.1.3 @@ -176,7 +178,7 @@ markupsafe==1.1.1 # via jinja2 matplotlib==3.1.3 # via - # -r requirements.in + # -r v1/requirements.in # descartes # lifelines # seaborn @@ -200,7 +202,7 @@ nbformat==5.0.4 # nbval # notebook nbval==0.9.4 - # via -r requirements.in + # via -r v1/requirements.in notebook==6.0.3 # via # jupyter @@ -208,10 +210,10 @@ notebook==6.0.3 # jupyterlab-server # widgetsnbextension numba==0.51.2 - # via -r requirements.in + # via -r v1/requirements.in numpy==1.18.1 # via - # -r requirements.in + # -r v1/requirements.in # autograd # formulaic # lifelines @@ -229,12 +231,12 @@ oauthlib==3.1.0 opensafely-cohort-extractor==1.88.0 # via -r requirements.in opensafely-matching==0.2.0 - # via -r requirements.in + # via -r v1/requirements.in packaging==20.1 # via pytest pandas==1.0.1 # via - # -r requirements.in + # -r v1/requirements.in # ebmdatalab # formulaic # geopandas @@ -247,7 +249,7 @@ pandas==1.0.1 # upsetplot pandas-gbq==0.13.0 # via - # -r requirements.in + # -r v1/requirements.in # ebmdatalab pandocfilters==1.4.2 # via nbconvert @@ -265,12 +267,12 @@ pickleshare==0.7.5 # via ipython pillow==8.1.0 # via - # -r requirements.in + # -r v1/requirements.in # cairosvg pip-tools==6.2.0 - # via -r requirements.in + # via -r v1/requirements.in plotly==4.5.0 - # via -r requirements.in + # via -r v1/requirements.in pluggy==0.13.1 # via pytest prometheus-client==0.7.1 @@ -283,6 +285,7 @@ protobuf==3.11.3 # via # google-api-core # google-cloud-bigquery + # googleapis-common-protos ptyprocess==0.6.0 # via # pexpect @@ -293,7 +296,7 @@ py==1.8.1 # retry pyarrow==3.0.0 # via - # -r requirements.in + # -r v1/requirements.in # opensafely-cohort-extractor pyasn1==0.4.8 # via @@ -321,7 +324,7 @@ pyrsistent==0.15.7 # via jsonschema pytest==5.3.5 # via - # -r requirements.in + # -r v1/requirements.in # nbval python-dateutil==2.8.1 # via @@ -356,10 +359,10 @@ retrying==1.3.3 rsa==4.0 # via google-auth scikit-learn==0.24.1 - # via -r requirements.in + # via -r v1/requirements.in scipy==1.4.1 # via - # -r requirements.in + # -r v1/requirements.in # autograd-gamma # formulaic # lifelines @@ -384,11 +387,13 @@ six==1.14.0 # google-cloud-bigquery # google-resumable-media # jsonschema + # munch # nbval # packaging # patsy # plotly # protobuf + # pyrsistent # python-dateutil # retrying # traitlets @@ -420,7 +425,7 @@ tornado==6.0.3 # notebook # terminado tqdm==4.42.1 - # via -r requirements.in + # via -r v1/requirements.in traitlets==4.3.3 # via # ipykernel @@ -433,11 +438,11 @@ traitlets==4.3.3 # notebook # qtconsole upsetplot==0.6.1 - # via -r requirements.in + # via -r v1/requirements.in urllib3==1.25.8 # via requests venn==0.1.3 - # via -r requirements.in + # via -r v1/requirements.in wcwidth==0.1.8 # via # prompt-toolkit diff --git a/v2/build-dependencies.txt b/v2/build-dependencies.txt new file mode 100644 index 0000000..d5c2de8 --- /dev/null +++ b/v2/build-dependencies.txt @@ -0,0 +1,10 @@ +# build time dependencies +build-essential +gcc +python3-dev +python3-venv +python3-wheel +# for numba +#llvm-10-dev +# for cairosvg +libffi-dev diff --git a/v2/dependencies.txt b/v2/dependencies.txt new file mode 100644 index 0000000..9fb4f06 --- /dev/null +++ b/v2/dependencies.txt @@ -0,0 +1,15 @@ +# run time dependencies +# ensure fully working base python3 installation +# see: https://gist.github.com/tiran/2dec9e03c6f901814f6d1e8dad09528e +python3 +python3-venv +python3-pip +python3-distutils +tzdata +ca-certificates + +# for cairosvg +libcairo2 + +# Some jupyter notebooks rely on using git to get current version of repo +git diff --git a/v2/env b/v2/env new file mode 100644 index 0000000..7cd7324 --- /dev/null +++ b/v2/env @@ -0,0 +1,2 @@ +MAJOR_VERSION=v2 +BASE=22.04 diff --git a/v2/requirements.in b/v2/requirements.in new file mode 100644 index 0000000..a89dd8b --- /dev/null +++ b/v2/requirements.in @@ -0,0 +1,39 @@ +# Basic requirements for notebook infrastructure provided in base +# docker image +pip-tools +jupyter +jupyterlab +jupytext +bash_kernel +nbval +opensafely-matching + +# Commonly-used packages provided in base docker image +pandas-gbq +pandas +numpy +ebmdatalab +matplotlib +scipy +tqdm +numba +opensafely-cohort-extractor +pyarrow +venn +kaleido +scikit-learn +lifelines + +# Both these required for plotly+notebooks +plotly +ipywidgets + +# Add extra per-notebook packages here +pillow +cairosvg + +# Allow for projects to run tests inside the container +pytest + +# For visualisation of set overlaps +upsetplot diff --git a/v2/requirements.txt b/v2/requirements.txt new file mode 100644 index 0000000..aecf474 --- /dev/null +++ b/v2/requirements.txt @@ -0,0 +1,643 @@ +# +# This file is autogenerated by pip-compile with Python 3.10 +# by the following command: +# +# pip-compile requirements-3.10.in +# +anyio==4.0.0 + # via jupyter-server +argon2-cffi==23.1.0 + # via jupyter-server +argon2-cffi-bindings==21.2.0 + # via argon2-cffi +arrow==1.3.0 + # via isoduration +astor==0.8.1 + # via formulaic +asttokens==2.4.1 + # via stack-data +async-lru==2.0.4 + # via jupyterlab +attrs==23.1.0 + # via + # fiona + # jsonschema + # referencing +autograd==1.6.2 + # via + # autograd-gamma + # lifelines +autograd-gamma==0.5.0 + # via lifelines +babel==2.13.1 + # via jupyterlab-server +bash-kernel==0.9.1 + # via -r requirements-3.10.in +beautifulsoup4==4.12.2 + # via nbconvert +bleach==6.1.0 + # via nbconvert +build==1.0.3 + # via pip-tools +cachetools==5.3.2 + # via google-auth +cairocffi==1.6.1 + # via cairosvg +cairosvg==2.7.1 + # via -r requirements-3.10.in +certifi==2023.11.17 + # via + # fiona + # pyproj + # requests +cffi==1.16.0 + # via + # argon2-cffi-bindings + # cairocffi +charset-normalizer==3.3.2 + # via requests +click==8.1.7 + # via + # click-plugins + # cligj + # fiona + # pip-tools +click-plugins==1.1.1 + # via fiona +cligj==0.7.2 + # via fiona +comm==0.2.0 + # via + # ipykernel + # ipywidgets +contourpy==1.2.0 + # via matplotlib +coverage==7.3.2 + # via nbval +cssselect2==0.7.0 + # via cairosvg +cycler==0.12.1 + # via matplotlib +db-dtypes==1.1.1 + # via pandas-gbq +debugpy==1.8.0 + # via ipykernel +decorator==5.1.1 + # via + # ipython + # retry +defusedxml==0.7.1 + # via + # cairosvg + # nbconvert +descartes==1.1.0 + # via ebmdatalab +ebmdatalab==0.0.30 + # via -r requirements-3.10.in +exceptiongroup==1.2.0 + # via + # anyio + # ipython + # pytest +executing==2.0.1 + # via stack-data +fastjsonschema==2.19.0 + # via nbformat +fiona==1.9.5 + # via geopandas +fonttools==4.45.0 + # via matplotlib +formulaic==0.6.6 + # via lifelines +fqdn==1.5.1 + # via jsonschema +future==0.18.3 + # via autograd +geopandas==0.14.1 + # via ebmdatalab +google-api-core[grpc]==2.14.0 + # via + # google-cloud-bigquery + # google-cloud-bigquery-storage + # google-cloud-core + # pandas-gbq +google-auth==2.23.4 + # via + # google-api-core + # google-auth-oauthlib + # google-cloud-core + # pandas-gbq + # pydata-google-auth +google-auth-oauthlib==1.1.0 + # via + # pandas-gbq + # pydata-google-auth +google-cloud-bigquery==3.13.0 + # via pandas-gbq +google-cloud-bigquery-storage==2.22.0 + # via pandas-gbq +google-cloud-core==2.3.3 + # via google-cloud-bigquery +google-crc32c==1.5.0 + # via google-resumable-media +google-resumable-media==2.6.0 + # via google-cloud-bigquery +googleapis-common-protos==1.61.0 + # via + # google-api-core + # grpcio-status +grpcio==1.59.3 + # via + # google-api-core + # google-cloud-bigquery + # grpcio-status +grpcio-status==1.59.3 + # via google-api-core +idna==3.4 + # via + # anyio + # jsonschema + # requests +iniconfig==2.0.0 + # via pytest +interface-meta==1.3.0 + # via formulaic +ipykernel==6.27.0 + # via + # bash-kernel + # jupyter + # jupyter-console + # jupyterlab + # nbval + # qtconsole +ipython==8.17.2 + # via + # ipykernel + # ipywidgets + # jupyter-console +ipywidgets==8.1.1 + # via + # -r requirements-3.10.in + # jupyter +isoduration==20.11.0 + # via jsonschema +jedi==0.19.1 + # via ipython +jinja2==3.1.2 + # via + # jupyter-server + # jupyterlab + # jupyterlab-server + # nbconvert +joblib==1.3.2 + # via scikit-learn +json5==0.9.14 + # via jupyterlab-server +jsonpointer==2.4 + # via jsonschema +jsonschema[format-nongpl]==4.20.0 + # via + # jupyter-events + # jupyterlab-server + # nbformat +jsonschema-specifications==2023.11.1 + # via jsonschema +jupyter==1.0.0 + # via -r requirements-3.10.in +jupyter-client==8.6.0 + # via + # ipykernel + # jupyter-console + # jupyter-server + # nbclient + # nbval + # qtconsole +jupyter-console==6.6.3 + # via jupyter +jupyter-core==5.5.0 + # via + # ipykernel + # jupyter-client + # jupyter-console + # jupyter-server + # jupyterlab + # nbclient + # nbconvert + # nbformat + # qtconsole +jupyter-events==0.9.0 + # via jupyter-server +jupyter-lsp==2.2.0 + # via jupyterlab +jupyter-server==2.11.0 + # via + # jupyter-lsp + # jupyterlab + # jupyterlab-server + # notebook + # notebook-shim +jupyter-server-terminals==0.4.4 + # via jupyter-server +jupyterlab==4.0.9 + # via + # -r requirements-3.10.in + # notebook +jupyterlab-pygments==0.2.2 + # via nbconvert +jupyterlab-server==2.25.2 + # via + # jupyterlab + # notebook +jupyterlab-widgets==3.0.9 + # via ipywidgets +jupytext==1.15.2 + # via -r requirements-3.10.in +kaleido==0.2.1 + # via -r requirements-3.10.in +kiwisolver==1.4.5 + # via matplotlib +lifelines==0.27.8 + # via -r requirements-3.10.in +llvmlite==0.41.1 + # via numba +lz4==4.3.2 + # via opensafely-cohort-extractor +markdown-it-py==3.0.0 + # via + # jupytext + # mdit-py-plugins +markupsafe==2.1.3 + # via + # jinja2 + # nbconvert +matplotlib==3.8.2 + # via + # -r requirements-3.10.in + # descartes + # lifelines + # seaborn + # upsetplot + # venn +matplotlib-inline==0.1.6 + # via + # ipykernel + # ipython +mdit-py-plugins==0.4.0 + # via jupytext +mdurl==0.1.2 + # via markdown-it-py +mistune==3.0.2 + # via nbconvert +nbclient==0.9.0 + # via nbconvert +nbconvert==7.11.0 + # via + # jupyter + # jupyter-server +nbformat==5.9.2 + # via + # jupyter-server + # jupytext + # nbclient + # nbconvert + # nbval +nbval==0.10.0 + # via -r requirements-3.10.in +nest-asyncio==1.5.8 + # via ipykernel +notebook==7.0.6 + # via jupyter +notebook-shim==0.2.3 + # via + # jupyterlab + # notebook +numba==0.58.1 + # via -r requirements-3.10.in +numpy==1.26.2 + # via + # -r requirements-3.10.in + # autograd + # contourpy + # db-dtypes + # formulaic + # lifelines + # matplotlib + # numba + # pandas + # pandas-gbq + # patsy + # pyarrow + # scikit-learn + # scipy + # seaborn + # shapely + # statsmodels +oauthlib==3.2.2 + # via requests-oauthlib +opensafely-cohort-extractor==1.90.0 + # via -r requirements-3.10.in +opensafely-matching==0.2.0 + # via -r requirements-3.10.in +overrides==7.4.0 + # via jupyter-server +packaging==23.2 + # via + # build + # db-dtypes + # geopandas + # google-cloud-bigquery + # ipykernel + # jupyter-server + # jupyterlab + # jupyterlab-server + # matplotlib + # nbconvert + # plotly + # pytest + # qtconsole + # qtpy + # statsmodels +pandas==2.1.3 + # via + # -r requirements-3.10.in + # db-dtypes + # ebmdatalab + # formulaic + # geopandas + # lifelines + # opensafely-cohort-extractor + # opensafely-matching + # pandas-gbq + # seaborn + # statsmodels + # upsetplot +pandas-gbq==0.19.2 + # via + # -r requirements-3.10.in + # ebmdatalab +pandocfilters==1.5.0 + # via nbconvert +parso==0.8.3 + # via jedi +patsy==0.5.3 + # via statsmodels +pexpect==4.8.0 + # via + # bash-kernel + # ipython +pillow==10.1.0 + # via + # -r requirements-3.10.in + # cairosvg + # matplotlib +pip-tools==7.3.0 + # via -r requirements-3.10.in +platformdirs==4.0.0 + # via jupyter-core +plotly==5.18.0 + # via -r requirements-3.10.in +pluggy==1.3.0 + # via pytest +prometheus-client==0.19.0 + # via jupyter-server +prompt-toolkit==3.0.41 + # via + # ipython + # jupyter-console +proto-plus==1.22.3 + # via + # google-cloud-bigquery + # google-cloud-bigquery-storage +protobuf==4.25.1 + # via + # google-api-core + # google-cloud-bigquery + # google-cloud-bigquery-storage + # googleapis-common-protos + # grpcio-status + # proto-plus +psutil==5.9.6 + # via ipykernel +ptyprocess==0.7.0 + # via + # pexpect + # terminado +pure-eval==0.2.2 + # via stack-data +py==1.11.0 + # via retry +pyarrow==14.0.1 + # via + # -r requirements-3.10.in + # db-dtypes + # opensafely-cohort-extractor + # pandas-gbq +pyasn1==0.5.1 + # via + # pyasn1-modules + # rsa +pyasn1-modules==0.3.0 + # via google-auth +pycparser==2.21 + # via cffi +pydata-google-auth==1.8.2 + # via pandas-gbq +pygments==2.17.1 + # via + # ipython + # jupyter-console + # nbconvert + # qtconsole +pyparsing==3.1.1 + # via matplotlib +pyproj==3.6.1 + # via geopandas +pyproject-hooks==1.0.0 + # via build +pytest==7.4.3 + # via + # -r requirements-3.10.in + # nbval +python-dateutil==2.8.2 + # via + # arrow + # google-cloud-bigquery + # jupyter-client + # matplotlib + # pandas +python-json-logger==2.0.7 + # via jupyter-events +pytz==2023.3.post1 + # via pandas +pyyaml==6.0.1 + # via + # jupyter-events + # jupytext + # opensafely-cohort-extractor +pyzmq==25.1.1 + # via + # ipykernel + # jupyter-client + # jupyter-console + # jupyter-server + # qtconsole +qtconsole==5.5.1 + # via jupyter +qtpy==2.4.1 + # via qtconsole +referencing==0.31.0 + # via + # jsonschema + # jsonschema-specifications + # jupyter-events +requests==2.31.0 + # via + # google-api-core + # google-cloud-bigquery + # jupyterlab-server + # opensafely-cohort-extractor + # requests-oauthlib +requests-oauthlib==1.3.1 + # via google-auth-oauthlib +retry==0.9.2 + # via opensafely-cohort-extractor +rfc3339-validator==0.1.4 + # via + # jsonschema + # jupyter-events +rfc3986-validator==0.1.1 + # via + # jsonschema + # jupyter-events +rpds-py==0.13.1 + # via + # jsonschema + # referencing +rsa==4.9 + # via google-auth +scikit-learn==1.3.2 + # via -r requirements-3.10.in +scipy==1.11.4 + # via + # -r requirements-3.10.in + # autograd-gamma + # formulaic + # lifelines + # scikit-learn + # statsmodels +seaborn==0.13.0 + # via + # ebmdatalab + # opensafely-cohort-extractor +send2trash==1.8.2 + # via jupyter-server +shapely==2.0.2 + # via geopandas +six==1.16.0 + # via + # asttokens + # bleach + # fiona + # patsy + # python-dateutil + # rfc3339-validator +sniffio==1.3.0 + # via anyio +soupsieve==2.5 + # via beautifulsoup4 +sqlparse==0.4.4 + # via opensafely-cohort-extractor +stack-data==0.6.3 + # via ipython +statsmodels==0.14.0 + # via ebmdatalab +structlog==23.2.0 + # via opensafely-cohort-extractor +tabulate==0.9.0 + # via opensafely-cohort-extractor +tenacity==8.2.3 + # via plotly +terminado==0.18.0 + # via + # jupyter-server + # jupyter-server-terminals +threadpoolctl==3.2.0 + # via scikit-learn +tinycss2==1.2.1 + # via + # cairosvg + # cssselect2 + # nbconvert +toml==0.10.2 + # via jupytext +tomli==2.0.1 + # via + # build + # jupyterlab + # pip-tools + # pyproject-hooks + # pytest +tornado==6.3.3 + # via + # ipykernel + # jupyter-client + # jupyter-server + # jupyterlab + # notebook + # terminado +tqdm==4.66.1 + # via -r requirements-3.10.in +traitlets==5.13.0 + # via + # comm + # ipykernel + # ipython + # ipywidgets + # jupyter-client + # jupyter-console + # jupyter-core + # jupyter-events + # jupyter-server + # jupyterlab + # matplotlib-inline + # nbclient + # nbconvert + # nbformat + # qtconsole +types-python-dateutil==2.8.19.14 + # via arrow +typing-extensions==4.8.0 + # via + # async-lru + # formulaic +tzdata==2023.3 + # via pandas +upsetplot==0.8.0 + # via -r requirements-3.10.in +uri-template==1.3.0 + # via jsonschema +urllib3==2.1.0 + # via requests +venn==0.1.3 + # via -r requirements-3.10.in +wcwidth==0.2.11 + # via prompt-toolkit +webcolors==1.13 + # via jsonschema +webencodings==0.5.1 + # via + # bleach + # cssselect2 + # tinycss2 +websocket-client==1.6.4 + # via jupyter-server +wheel==0.41.3 + # via pip-tools +widgetsnbextension==4.0.9 + # via ipywidgets +wrapt==1.16.0 + # via formulaic + +# The following packages are considered to be unsafe in a requirements file: +# pip +# setuptools