From d6373cb67e11e665acca55ea37bab78cf87e6fdb Mon Sep 17 00:00:00 2001 From: hayleyschi Date: Fri, 5 Jul 2024 11:59:20 +0100 Subject: [PATCH] Initial commit --- .gitattributes | 3 + .gitignore | 26 ++ DEVELOPERS.md | 117 ++++++++ Dockerfile | 35 +++ INSTALLATION_GUIDE.md | 79 ++++++ README.md | 46 ++++ data/.gitkeep | 0 jupyter-config/.gitignore | 3 + jupyter-lab-mac-os | 14 + jupyter-lab.sh | 164 +++++++++++ notebooks/.gitkeep | 0 packages.txt | 1 + postinstall.sh | 3 + requirements.in | 20 ++ requirements.txt | 560 ++++++++++++++++++++++++++++++++++++++ 15 files changed, 1071 insertions(+) create mode 100644 .gitattributes create mode 100644 .gitignore create mode 100644 DEVELOPERS.md create mode 100644 Dockerfile create mode 100644 INSTALLATION_GUIDE.md create mode 100644 README.md create mode 100644 data/.gitkeep create mode 100644 jupyter-config/.gitignore create mode 100755 jupyter-lab-mac-os create mode 100755 jupyter-lab.sh create mode 100644 notebooks/.gitkeep create mode 100644 packages.txt create mode 100755 postinstall.sh create mode 100644 requirements.in create mode 100644 requirements.txt diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000..648e07c --- /dev/null +++ b/.gitattributes @@ -0,0 +1,3 @@ +# Don't allow windows checkouts to convert `\n` to `\r\n`, as this +# breaks stuff that is meant to be run in linux-in-docker +* text=auto eol=lf diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..cdaf06b --- /dev/null +++ b/.gitignore @@ -0,0 +1,26 @@ +# Credentials for accessing BigQuery +bq-service-account.json + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# pyenv +.python-version + +# jupyter +.ipynb_checkpoints +.ipython/ +.jupyter/ +.local/ + +# sublime test/pycharm +.idea/ +.DS_Store + +# Emacs +*~ + +# Linux trash directories +.Trash-*/ diff --git a/DEVELOPERS.md b/DEVELOPERS.md new file mode 100644 index 0000000..efd872c --- /dev/null +++ b/DEVELOPERS.md @@ -0,0 +1,117 @@ +# The Bennett Institute's default notebook environment + + +## Running Jupyter Lab + +You will need to have installed Git and Docker, please see the +[`INSTALLATION_GUIDE.md`](INSTALLATION_GUIDE.md) for further details. + +Windows and Linux users should double-click the `jupyter-lab` file. +Users on macOS should double-click `jupyter-lab-mac-os` instead. + +This will build a Docker image with all software requirements installed, +start a new Jupyter Lab server, and then provide a link to access this +server. + +The first time you run this command it may take some time to download +and install the necessary software. Subsequent runs should be much +faster. + + +## Adding or updating Python packages + +To install a new package: + + * Add it to the bottom of the `requirements.in` file. + * From the Jupyter Labs Launcher page, choose "Terminal" (in the + "Other" section). + * Run: + ```sh + pip-compile -v + ``` + This will automatically update your `requirements.txt` file to + include the new package. (The `-v` just means "verbose" so you can + see progess as this command can take a while to run.) + * Shutdown the Jupyter server and re-run the `jupyter-lab` launcher + script. + * Docker should automatically install the new package before starting + the server. + +To update an existing package the process is the same as above except +that instead of running `pip-compile -v` you should run: +```sh +pip-compile -v --upgrade-package +``` + +To update _all_ packages you can run: +```sh +pip-compile -v --upgrade +``` + + +## Importing from `lib` + +We used to have configuration which made Python files in the top-level +`lib` directory importable. However this did not work reliably and users +developed a variety of different workarounds. We now no longer make any +changes to Python's default import behaviour. Depending on what +workarounds you already have in place this may make no difference to +you, or it may break your imports. + +If you find your imports no longer work and you have imports of the +form: +```python +from lib import my_custom_library +``` +Then you should move the `lib` directory to be inside `notebooks` and it +should work. + +If your imports no longer work and they are of the form: +```python +import my_custom_library +``` +Then you can move `lib/my_custom_library.py` to +`notebooks/my_custom_library.py`. + + +## Diffing notebook files + +By default, changes to `.ipynb` files do not produce easily readable +diffs in Github. One solution is to enable the "[Rich Jupyter Notebook +Diffs][richdiff]" preview feature. You can find this by clicking your +account icon in top right of the Github interface, choosing "Feature +preview", then "Rich Jupyter Notebooks Diffs" and then "Enable". + +[richdiff]: https://github.blog/changelog/2023-03-01-feature-preview-rich-jupyter-notebook-diffs/ + +Another option is to use [Jupytext][jupytext], which we have pre-added to the +list of installed packages. You can use either the `percent` or +`markdown` formats to create notebooks which have naturally readable +diffs, at the cost of not being able to save the outputs of cells within +the notebook. + +[jupytext]: https://jupytext.readthedocs.io/en/latest/ + +To use the "paired" format in which a traditional `.ipynb` file is saved +alongside a pure-Python variant inside a `diffable_python` directory, +add a file called `jupytext.toml` to the root of your repo containing +these lines: +```toml +[formats] +"notebooks/" = "ipynb" +"notebooks/diffable_python/" = "py:percent" +``` + +To prevent `.ipynb` files from showing in Github diffs add these lines +to the bottom of the `.gitattributes` files: +``` +# Don't show notebook files when diffing in GitHub +notebooks/**/*ipynb linguist-generated=true +``` + + +## How to invite people to cite + +Once a project is completed, please use the instructions [here](https://guides.github.com/activities/citable-code/) to deposit a copy of your code with Zenodo. You will need a Zenodo free account to do this. This creates a DOI. Once you have this please add this in the readme. + +If there is a paper associated with this code, please change the 'how to cite' section to the citation and DOI for the paper. This allows us to build up citation credit. diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..e55fe64 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,35 @@ +# syntax=docker/dockerfile:1.2 +FROM python:3.12-bookworm + +# Install apt packages, using the host cache +COPY packages.txt /tmp/packages.txt +RUN --mount=target=/var/lib/apt/lists,type=cache,sharing=locked \ + --mount=target=/var/cache/apt,type=cache,sharing=locked \ + rm -f /etc/apt/apt.conf.d/docker-clean \ + && apt-get update \ + && sed 's/#.*//' /tmp/packages.txt \ + | xargs apt-get -y --no-install-recommends install + +# Install Python packages, using the host cache +COPY requirements.txt /tmp/requirements.txt +RUN --mount=type=cache,target=/root/.cache \ + python -m pip install --no-deps --requirement /tmp/requirements.txt + +# Without this, the Jupyter terminal defaults to /bin/sh which is much less +# usable +ENV SHELL=/bin/bash +# Jupyter writes various runtime files to $HOME so we need that to be writable +# regardless of which user we run as +ENV HOME=/tmp +# Allow Jupyter to be configured from within the workspace +ENV JUPYTER_CONFIG_DIR=/workspace/jupyter-config +# This variable is only needed for the `ebmdatalab` package: +# https://pypi.org/project/ebmdatalab/ +ENV EBMDATALAB_BQ_CREDENTIALS_PATH=/workspace/bq-service-account.json + +# Run any necessary post-installation tasks +COPY postinstall.sh /tmp/postinstall.sh +RUN /tmp/postinstall.sh + +RUN mkdir /workspace +WORKDIR /workspace diff --git a/INSTALLATION_GUIDE.md b/INSTALLATION_GUIDE.md new file mode 100644 index 0000000..fefc17c --- /dev/null +++ b/INSTALLATION_GUIDE.md @@ -0,0 +1,79 @@ +## Docker enviroment + +### Why Docker? + +Software Engineers and Developers need to collaborate on software together. In our team, we use Jupyter +Notebooks to carry out research. Our work requires use of existing software packages. A common problem +is that different team members have different versions of these packages on their machine and work on +different operating systems. This means there are sometimes problems with running shared code. This is +particularly a problem when using a Windows machine. + +Docker allows you to run identical software on all platforms. It creates containers that are guaranteed +to be identical on any system that can run Docker. The exact specification of the environment are +recorded in the `Dockerfile` and by distributing this file, it guarantees that all team members +have the same set up. Because each container is its own entity, team members can have multiple projects +on their machine at the same time without creating clashes between different versions of a package. + +### Installation + +#### + +Windows and Macs have different installation processes. Regardless of machine, you will have to install +Docker and make an account on the [Docker Website](https://docs.docker.com/). + +Please follow installation instructions on the [Docker website](https://docs.docker.com/install/) for how to complete this step. +Docker Desktop is preferred over Docker Toolbox. Docker Desktop offers native support via Hyper-V containers, and so is preferred, but requires +Windows 10 64-bit Pro, Enterprise, or Education (Build 15063 or later), and Hyper-V and Containers +Windows features must be enabled (all of which are the case on our standard university laptop +installs; if Hyper-V has not been enabled, [follow the instructions here[(https://docs.microsoft.com/en-us/virtualization/hyper-v-on-windows/quick-start/enable-hyper-v)). + +Docker Toolbox runs docker within a Linux virtualbox via Docker Machine, and therefore offers a functional but sub-optimal experience. + + + +#### Windows + +First install Docker Desktop onto your machine. Windows users who log into an Active Directory domain +(i.e. a network login) may find they lack permissions to start Docker correctly. If +so, follow [these instructions](https://github.com/docker/for-win/issues/785#issuecomment-344805180). + +It is best to install using the default settings. You may be asked to enable Hyper-V and Containers, +which you should do. At least one user has had the box ticked on the screen but had to untick and tick again +to get this to enable correctly (Detailed in issue [#4](https://github.com/ebmdatalab/custom-docker/issues/4)). + +When starting Docker, it takes a while to actually start up - up to 5 minutes. While it's doing so, an animation runs in the notification area: + +![image](https://user-images.githubusercontent.com/211271/72052991-14a8c000-32be-11ea-948f-575a3c84bc3b.png) + +Another notification appears when it's finished. + +"Running" means there's a docker service running on your computer, to which you can connect using the command line. You can check it's up and running by opening a Command Prompt and entering `docker info`, which should output a load of diagnostics. + +To be able to access the windows filesystem from the docker container (and therefore do development inside Jupyter with results appearing in a place visible to Git), you must explicitly share your hard drive in the Docker settings (click system tray docker icon; select "settings"; select "shared drives") + +##### Network login issues + +When installing from the office, and logged in as a network user, there have been permission problems +that have been solved by adding the special "Authenticated Users" group to the `docker-users` group, per [this comment](https://github.com/docker/for-win/issues/785#issuecomment-327237998) (screenshot of place to do it [here](https://github.com/docker/for-win/issues/785#issuecomment-344805180)). + +Finally, note that when authentication changes (e.g. different logins), you sometimes have to reauthorise Docker's "Shared Drives" (click system tray docker icon; select "settings"; select "shared drives"; click Reset credentials; retick the drive to share; Apply) + +#### Macs + +Follow the instructions from the Docker website. You may have to restart your computer during installation. + +Once you have Docker installed, you will need to log in. This can be accessed via the Applications Folder +and once you have logged in, you should have the Docker icon on the top taskbar (ie. next to battery icon, etc.) + +![image](https://user-images.githubusercontent.com/25401512/75257439-dff4b780-57dc-11ea-9ae8-592e1570bc71.png) + +Once this is running, you should be able to use Docker. + +#### Gotchas + +- The first time you use Docker or use a new Docker template, please be aware that it takes a long time to make the build. +It is easy to think that it has frozen, but it will make quite a while to get going. + + If this is the case, look at this cat whilst you wait: + +![Alt Text](https://media.giphy.com/media/vFKqnCdLPNOKc/giphy.gif) diff --git a/README.md b/README.md new file mode 100644 index 0000000..25ff487 --- /dev/null +++ b/README.md @@ -0,0 +1,46 @@ +# The Bennett Institute's skeleton notebook environment + + +## Getting started with this skeleton project + +This is a skeleton project for creating a reproducible, cross-platform +analysis notebook, using Docker. + +Developers and analysts using this skeleton for new development should +refer to [`DEVELOPERS.md`](DEVELOPERS.md) for instructions on getting +started. Update this `README.md` so it is a suitable introduction to +your project. + + +## Running Jupyter Lab + +You will need to have installed Git and Docker, please see the +[`INSTALLATION_GUIDE.md`](INSTALLATION_GUIDE.md) for further details. + +Windows and Linux users should double-click the `jupyter-lab` file. +Users on macOS should double-click `jupyter-lab-mac-os` instead. + +Note: if double-clicking the `jupyter-lab` file opens the file in VS Code, you +should instead right-click on the file and open it with Git for Windows. + +This will build a Docker image with all software requirements installed, +start a new Jupyter Lab server, and then provide a link to access this +server. + +The first time you run this command it may take some time to download +and install the necessary software. Subsequent runs should be much +faster. + +Note: if running the command fails with: + +``` +docker: Error response from daemon: user declined directory sharing C:\path\to\directory +``` + +you should open the Docker dashboard, and then under Settings -> Resources -> +FileSharing, add the appropriate path. + + +## How to cite + +XXX Please change to either a paper (if published) or the repo. You may find it helpful to use Zenodo DOI (see [`DEVELOPERS.md`](DEVELOPERS.md#how-to-invite-people-to-cite) for further information) diff --git a/data/.gitkeep b/data/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/jupyter-config/.gitignore b/jupyter-config/.gitignore new file mode 100644 index 0000000..75d81b9 --- /dev/null +++ b/jupyter-config/.gitignore @@ -0,0 +1,3 @@ +# Ignore runtime-generated config +/lab +/labconfig diff --git a/jupyter-lab-mac-os b/jupyter-lab-mac-os new file mode 100755 index 0000000..49d0660 --- /dev/null +++ b/jupyter-lab-mac-os @@ -0,0 +1,14 @@ +#!/bin/bash + +# We want launcher shell scripts which can be directly executed from the file +# manager GUI without requiring a terminal. On Windows this requires an +# extension of ".sh", on macOS this requires either no extension or the +# extension ".command". There's no way to jointly satisfy these requirements so +# we need two launchers with different extensions, one of which just +# immediately executes the other. + +# Unset CDPATH to prevent `cd` potentially behaving unexpectedly +unset CDPATH +cd "$( dirname "${BASH_SOURCE[0]}")" + +exec ./jupyter-lab.sh diff --git a/jupyter-lab.sh b/jupyter-lab.sh new file mode 100755 index 0000000..2afe241 --- /dev/null +++ b/jupyter-lab.sh @@ -0,0 +1,164 @@ +#!/bin/bash + +# ***************************************************************************** +# +# This script is intended as a cross-platform launcher which starts Jupyter Lab +# running inside a Docker container. We use Bash because we already require git +# to be installed and on Windows that brings with it git-bash (and Linux and +# macOS have Bash already). +# +# ***************************************************************************** + +set -euo pipefail + + +# ERROR HANDLER +# +# We expect this script to often get run by double-clicking in a file manager, +# in which case the console window will disappear on exit. But when exiting +# with an error we'd like the window to stick around so the user can read the +# message, so we set up an error handler which waits for a keypress. +error_handler() { + echo + echo "Jupyter Lab failed to start, press any key to exit." + read +} + +trap "error_handler" ERR + + +# CHANGE INTO SCRIPT DIRECTORY +# +# Unset CDPATH to prevent `cd` potentially behaving unexpectedly +unset CDPATH +cd "$( dirname "${BASH_SOURCE[0]}")" + + +# GENERATE DOCKER IMAGE NAME +# +# We want a Docker image name which is: +# (a) stable so repeated runs of this script use the same image; +# (b) unique to this specific project; +# (c) reasonably easy to identify by eye in a list of image names. +# +# So we use the naming scheme: +# +# jupyter-- +# +# e.g. "jupyter-docker-notebook-8cfe31c1" +# +dirname="$(basename "$PWD")" +path_hash=$(echo "$PWD" | shasum | head -c 8) +image_name="jupyter-$dirname-$path_hash" + +# Generate a short random suffix so that we can set a meaningful name for the +# container but still ensure uniqueness (with sufficiently high probability) +container_suffix=$(head -c 6 /dev/urandom | base64 | tr '+/' '01') +container_name="$image_name-$container_suffix" + + +# BUILD IMAGE +# +# We explicitly specify the platform so that when running on Apple silicon we +# still get the `amd64` image rather than the `arm64` image. Not all the Python +# packages we want to install have `arm64` wheels, and we don't always have the +# headers we need to compile them. Insisting on `amd64` gives us cross-platform +# consistency. +docker build --platform linux/amd64 --tag "$image_name" . + + +# SET OS-SPECIFIC CONFIG +# +# On Linux, where the ownership of mounted files maps directly through to the +# host filesystem, we want the Docker user ID to match the current user ID so +# files end up with the right owner. On Windows/macOS files inside the +# container will appear owned as root, so we want to run as root. +if [[ "$(docker info -f '{{.OSType}}')" == "linux" ]]; then + docker_user="$UID:$(id -g)" +else + docker_user="root" +fi +# The git-bash terminal (which most of our Windows users will run this under) +# does not provide a TTY unless we run the command via the `winpty` tool. So we +# try to detect when we are running in git-bash and get the path to `winpty` if +# we are. +if [[ -z "${MSYSTEM:-}" ]]; then + winpty_path="" +else + winpty_path="$(command -v winpty || true)" +fi + + +# GENERATE SERVER URL +# +# Generate a random token with which to authenticate to Jupyter. Jupyter can +# generate this for us, but it massively simplifies things to generate it +# ourselves and pass it in, rather than try to extract the token Jupyter has +# generated. We use `base64` as it's universally available (unlike `base32`) +# and replace any URL-problematic characters. +token=$(head -c 12 /dev/urandom | base64 | tr '+/' '01') + +# Likewise, we want to tell Jupyter what port to bind to rather than let it +# choose. We find a free port by asking to bind to port 0 and then seeing what +# port we're given. This is obviously race-unsafe in the sense that the port +# might no longer be free at the point we want to use it, but that's seems +# unlikely on a local workstation. +# +# We shell out to Perl as we can assume the presence of git and git implies the +# presence of Perl. +port=$( + perl -e ' + use IO::Socket::INET; + + print( + IO::Socket::INET->new( + Proto => "tcp", LocalAddr => "127.0.0.1" + ) + ->sockport() + ); + ' +) + +server_url="http://localhost:$port/?token=$token" + + +echo +echo ' -> Connect to notebook with URL:' +echo +echo " $server_url" +echo +echo ' Tip: to open in browser, triple-click the URL, right-click, choose "Open"' +echo + + +# START JUPYTER LAB IN DOCKER + +docker_args=( + run + --rm + --interactive + --tty + --name "$container_name" + --user "$docker_user" + + # The leading slash before PWD here is needed when running on Windows to stop + # git-bash mangling the path + --volume "/$PWD:/workspace" + --publish "$port:$port" + + "$image_name" + + jupyter lab + --ip=0.0.0.0 + --port="$port" + --IdentityProvider.token="$token" + --ServerApp.custom_display_url="$server_url" + --no-browser + --allow-root +) + +if [[ -z "$winpty_path" ]]; then + docker "${docker_args[@]}" +else + "$winpty_path" -- docker "${docker_args[@]}" +fi diff --git a/notebooks/.gitkeep b/notebooks/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/packages.txt b/packages.txt new file mode 100644 index 0000000..572f265 --- /dev/null +++ b/packages.txt @@ -0,0 +1 @@ +# APT PACKAGES TO BE INSTALLED diff --git a/postinstall.sh b/postinstall.sh new file mode 100755 index 0000000..1ed1326 --- /dev/null +++ b/postinstall.sh @@ -0,0 +1,3 @@ +#!/bin/bash + +# This script is run after installing apt and Python packages diff --git a/requirements.in b/requirements.in new file mode 100644 index 0000000..8645319 --- /dev/null +++ b/requirements.in @@ -0,0 +1,20 @@ +# Basic requirements for notebook infrastructure +pip-tools +jupyter +jupyterlab +jupytext +bash_kernel + +# Commonly-used packages within the Bennett Institute +pandas-gbq +pandas +numpy +ebmdatalab +matplotlib +tqdm + +# Both these required for plotly+notebooks +ipywidgets +dash + +# Add extra per-notebook packages here diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..4c2761d --- /dev/null +++ b/requirements.txt @@ -0,0 +1,560 @@ +# +# This file is autogenerated by pip-compile with Python 3.12 +# by the following command: +# +# pip-compile +# +anyio==4.3.0 + # via + # httpx + # jupyter-server +argon2-cffi==23.1.0 + # via jupyter-server +argon2-cffi-bindings==21.2.0 + # via argon2-cffi +arrow==1.3.0 + # via isoduration +asttokens==2.4.1 + # via stack-data +async-lru==2.0.4 + # via jupyterlab +attrs==23.2.0 + # via + # fiona + # jsonschema + # referencing +babel==2.14.0 + # via jupyterlab-server +bash-kernel==0.9.3 + # via -r requirements.in +beautifulsoup4==4.12.3 + # via nbconvert +bleach==6.1.0 + # via nbconvert +blinker==1.7.0 + # via flask +build==1.2.1 + # via pip-tools +cachetools==5.3.3 + # via google-auth +certifi==2024.2.2 + # via + # fiona + # httpcore + # httpx + # pyproj + # requests +cffi==1.16.0 + # via argon2-cffi-bindings +charset-normalizer==3.3.2 + # via requests +click==8.1.7 + # via + # click-plugins + # cligj + # fiona + # flask + # pip-tools +click-plugins==1.1.1 + # via fiona +cligj==0.7.2 + # via fiona +comm==0.2.2 + # via + # ipykernel + # ipywidgets +contourpy==1.2.1 + # via matplotlib +cycler==0.12.1 + # via matplotlib +dash==2.16.1 + # via -r requirements.in +dash-core-components==2.0.0 + # via dash +dash-html-components==2.0.0 + # via dash +dash-table==5.0.0 + # via dash +db-dtypes==1.2.0 + # via pandas-gbq +debugpy==1.8.1 + # via ipykernel +decorator==5.1.1 + # via ipython +defusedxml==0.7.1 + # via nbconvert +descartes==1.1.0 + # via ebmdatalab +ebmdatalab==0.0.31 + # via -r requirements.in +executing==2.0.1 + # via stack-data +fastjsonschema==2.19.1 + # via nbformat +fiona==1.9.6 + # via geopandas +flask==3.0.3 + # via dash +fonttools==4.51.0 + # via matplotlib +fqdn==1.5.1 + # via jsonschema +geopandas==0.14.3 + # via ebmdatalab +google-api-core[grpc]==2.18.0 + # via + # google-cloud-bigquery + # google-cloud-core + # pandas-gbq +google-auth==2.29.0 + # via + # google-api-core + # google-auth-oauthlib + # google-cloud-bigquery + # google-cloud-core + # pandas-gbq + # pydata-google-auth +google-auth-oauthlib==1.2.0 + # via + # pandas-gbq + # pydata-google-auth +google-cloud-bigquery==3.21.0 + # via pandas-gbq +google-cloud-core==2.4.1 + # via google-cloud-bigquery +google-crc32c==1.5.0 + # via google-resumable-media +google-resumable-media==2.7.0 + # via google-cloud-bigquery +googleapis-common-protos==1.63.0 + # via + # google-api-core + # grpcio-status +grpcio==1.62.2 + # via + # google-api-core + # grpcio-status +grpcio-status==1.62.2 + # via google-api-core +h11==0.14.0 + # via httpcore +httpcore==1.0.5 + # via httpx +httpx==0.27.0 + # via jupyterlab +idna==3.7 + # via + # anyio + # httpx + # jsonschema + # requests +importlib-metadata==7.1.0 + # via dash +ipykernel==6.29.4 + # via + # bash-kernel + # jupyter + # jupyter-console + # jupyterlab + # qtconsole +ipython==8.23.0 + # via + # ipykernel + # ipywidgets + # jupyter-console +ipywidgets==8.1.2 + # via + # -r requirements.in + # jupyter +isoduration==20.11.0 + # via jsonschema +itsdangerous==2.2.0 + # via flask +jedi==0.19.1 + # via ipython +jinja2==3.1.3 + # via + # flask + # jupyter-server + # jupyterlab + # jupyterlab-server + # nbconvert +json5==0.9.25 + # via jupyterlab-server +jsonpointer==2.4 + # via jsonschema +jsonschema[format-nongpl]==4.21.1 + # via + # jupyter-events + # jupyterlab-server + # nbformat +jsonschema-specifications==2023.12.1 + # via jsonschema +jupyter==1.0.0 + # via -r requirements.in +jupyter-client==8.6.1 + # via + # ipykernel + # jupyter-console + # jupyter-server + # nbclient + # qtconsole +jupyter-console==6.6.3 + # via jupyter +jupyter-core==5.7.2 + # via + # ipykernel + # jupyter-client + # jupyter-console + # jupyter-server + # jupyterlab + # nbclient + # nbconvert + # nbformat + # qtconsole +jupyter-events==0.10.0 + # via jupyter-server +jupyter-lsp==2.2.5 + # via jupyterlab +jupyter-server==2.14.0 + # via + # jupyter-lsp + # jupyterlab + # jupyterlab-server + # notebook + # notebook-shim +jupyter-server-terminals==0.5.3 + # via jupyter-server +jupyterlab==4.1.6 + # via + # -r requirements.in + # notebook +jupyterlab-pygments==0.3.0 + # via nbconvert +jupyterlab-server==2.27.1 + # via + # jupyterlab + # notebook +jupyterlab-widgets==3.0.10 + # via ipywidgets +jupytext==1.16.1 + # via -r requirements.in +kiwisolver==1.4.5 + # via matplotlib +markdown-it-py==3.0.0 + # via + # jupytext + # mdit-py-plugins +markupsafe==2.1.5 + # via + # jinja2 + # nbconvert + # werkzeug +matplotlib==3.8.4 + # via + # -r requirements.in + # descartes + # seaborn +matplotlib-inline==0.1.7 + # via + # ipykernel + # ipython +mdit-py-plugins==0.4.0 + # via jupytext +mdurl==0.1.2 + # via markdown-it-py +mistune==3.0.2 + # via nbconvert +nbclient==0.10.0 + # via nbconvert +nbconvert==7.16.3 + # via + # jupyter + # jupyter-server +nbformat==5.10.4 + # via + # jupyter-server + # jupytext + # nbclient + # nbconvert +nest-asyncio==1.6.0 + # via + # dash + # ipykernel +notebook==7.1.3 + # via jupyter +notebook-shim==0.2.4 + # via + # jupyterlab + # notebook +numpy==1.26.4 + # via + # -r requirements.in + # contourpy + # db-dtypes + # matplotlib + # pandas + # pandas-gbq + # patsy + # pyarrow + # scipy + # seaborn + # shapely + # statsmodels +oauthlib==3.2.2 + # via requests-oauthlib +overrides==7.7.0 + # via jupyter-server +packaging==24.0 + # via + # build + # db-dtypes + # geopandas + # google-cloud-bigquery + # ipykernel + # jupyter-server + # jupyterlab + # jupyterlab-server + # jupytext + # matplotlib + # nbconvert + # pandas-gbq + # plotly + # qtconsole + # qtpy + # statsmodels +pandas==2.2.2 + # via + # -r requirements.in + # db-dtypes + # ebmdatalab + # geopandas + # pandas-gbq + # seaborn + # statsmodels +pandas-gbq==0.22.0 + # via + # -r requirements.in + # ebmdatalab +pandocfilters==1.5.1 + # via nbconvert +parso==0.8.4 + # via jedi +patsy==0.5.6 + # via statsmodels +pexpect==4.9.0 + # via + # bash-kernel + # ipython +pillow==10.3.0 + # via matplotlib +pip-tools==7.4.1 + # via -r requirements.in +platformdirs==4.2.0 + # via jupyter-core +plotly==5.21.0 + # via dash +prometheus-client==0.20.0 + # via jupyter-server +prompt-toolkit==3.0.43 + # via + # ipython + # jupyter-console +proto-plus==1.23.0 + # via google-api-core +protobuf==4.25.3 + # via + # google-api-core + # googleapis-common-protos + # grpcio-status + # proto-plus +psutil==5.9.8 + # via ipykernel +ptyprocess==0.7.0 + # via + # pexpect + # terminado +pure-eval==0.2.2 + # via stack-data +pyarrow==16.0.0 + # via + # db-dtypes + # pandas-gbq +pyasn1==0.6.0 + # via + # pyasn1-modules + # rsa +pyasn1-modules==0.4.0 + # via google-auth +pycparser==2.22 + # via cffi +pydata-google-auth==1.8.2 + # via pandas-gbq +pygments==2.17.2 + # via + # ipython + # jupyter-console + # nbconvert + # qtconsole +pyparsing==3.1.2 + # via matplotlib +pyproj==3.6.1 + # via geopandas +pyproject-hooks==1.0.0 + # via + # build + # pip-tools +python-dateutil==2.9.0.post0 + # via + # arrow + # google-cloud-bigquery + # jupyter-client + # matplotlib + # pandas +python-json-logger==2.0.7 + # via jupyter-events +pytz==2024.1 + # via pandas +pyyaml==6.0.1 + # via + # jupyter-events + # jupytext +pyzmq==26.0.2 + # via + # ipykernel + # jupyter-client + # jupyter-console + # jupyter-server + # qtconsole +qtconsole==5.5.1 + # via jupyter +qtpy==2.4.1 + # via qtconsole +referencing==0.34.0 + # via + # jsonschema + # jsonschema-specifications + # jupyter-events +requests==2.31.0 + # via + # dash + # google-api-core + # google-cloud-bigquery + # jupyterlab-server + # requests-oauthlib +requests-oauthlib==2.0.0 + # via google-auth-oauthlib +retrying==1.3.4 + # via dash +rfc3339-validator==0.1.4 + # via + # jsonschema + # jupyter-events +rfc3986-validator==0.1.1 + # via + # jsonschema + # jupyter-events +rpds-py==0.18.0 + # via + # jsonschema + # referencing +rsa==4.9 + # via google-auth +scipy==1.13.0 + # via statsmodels +seaborn==0.13.2 + # via ebmdatalab +send2trash==1.8.3 + # via jupyter-server +shapely==2.0.4 + # via geopandas +six==1.16.0 + # via + # asttokens + # bleach + # fiona + # patsy + # python-dateutil + # retrying + # rfc3339-validator +sniffio==1.3.1 + # via + # anyio + # httpx +soupsieve==2.5 + # via beautifulsoup4 +stack-data==0.6.3 + # via ipython +statsmodels==0.14.2 + # via ebmdatalab +tenacity==8.2.3 + # via plotly +terminado==0.18.1 + # via + # jupyter-server + # jupyter-server-terminals +tinycss2==1.2.1 + # via nbconvert +toml==0.10.2 + # via jupytext +tornado==6.4 + # via + # ipykernel + # jupyter-client + # jupyter-server + # jupyterlab + # notebook + # terminado +tqdm==4.66.2 + # via -r requirements.in +traitlets==5.14.3 + # via + # comm + # ipykernel + # ipython + # ipywidgets + # jupyter-client + # jupyter-console + # jupyter-core + # jupyter-events + # jupyter-server + # jupyterlab + # matplotlib-inline + # nbclient + # nbconvert + # nbformat + # qtconsole +types-python-dateutil==2.9.0.20240316 + # via arrow +typing-extensions==4.11.0 + # via dash +tzdata==2024.1 + # via pandas +uri-template==1.3.0 + # via jsonschema +urllib3==2.2.1 + # via requests +wcwidth==0.2.13 + # via prompt-toolkit +webcolors==1.13 + # via jsonschema +webencodings==0.5.1 + # via + # bleach + # tinycss2 +websocket-client==1.7.0 + # via jupyter-server +werkzeug==3.0.2 + # via + # dash + # flask +wheel==0.43.0 + # via pip-tools +widgetsnbextension==4.0.10 + # via ipywidgets +zipp==3.18.1 + # via importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +# pip +# setuptools