diff --git a/.github/workflows/startos-iso.yaml b/.github/workflows/startos-iso.yaml index 60b642e193..184c2b0c78 100644 --- a/.github/workflows/startos-iso.yaml +++ b/.github/workflows/startos-iso.yaml @@ -12,9 +12,6 @@ on: - dev - unstable - dev-unstable - - docker - - dev-docker - - dev-unstable-docker runner: type: choice description: Runner @@ -82,9 +79,12 @@ jobs: with: node-version: ${{ env.NODEJS_VERSION }} - - name: Set up QEMU + - name: Set up docker QEMU uses: docker/setup-qemu-action@v2 + - name: Set up system QEMU + run: sudo apt-get update && sudo apt-get install -y qemu-user-static + - name: Set up Docker Buildx uses: docker/setup-buildx-action@v2 @@ -172,6 +172,8 @@ jobs: - name: Prevent rebuild of compiled artifacts run: | mkdir -p web/dist/raw + touch core/startos/bindings + mkdir -p container-runtime/dist PLATFORM=${{ matrix.platform }} make -t compiled-${{ env.ARCH }}.tar - name: Run iso build diff --git a/.gitignore b/.gitignore index d33151e912..1df3692ee7 100644 --- a/.gitignore +++ b/.gitignore @@ -28,4 +28,5 @@ secrets.db /dpkg-workdir /compiled.tar /compiled-*.tar -/firmware \ No newline at end of file +/firmware +/tmp \ No newline at end of file diff --git a/Makefile b/Makefile index 65d4d79dd4..b0dcf58301 100644 --- a/Makefile +++ b/Makefile @@ -6,17 +6,17 @@ BASENAME := $(shell ./basename.sh) PLATFORM := $(shell if [ -f ./PLATFORM.txt ]; then cat ./PLATFORM.txt; else echo unknown; fi) ARCH := $(shell if [ "$(PLATFORM)" = "raspberrypi" ]; then echo aarch64; else echo $(PLATFORM) | sed 's/-nonfree$$//g'; fi) IMAGE_TYPE=$(shell if [ "$(PLATFORM)" = raspberrypi ]; then echo img; else echo iso; fi) -BINS := core/target/$(ARCH)-unknown-linux-gnu/release/startbox core/target/aarch64-unknown-linux-musl/release/container-init core/target/x86_64-unknown-linux-musl/release/container-init +BINS := core/target/$(ARCH)-unknown-linux-musl/release/startbox core/target/$(ARCH)-unknown-linux-musl/release/containerbox WEB_UIS := web/dist/raw/ui web/dist/raw/setup-wizard web/dist/raw/diagnostic-ui web/dist/raw/install-wizard FIRMWARE_ROMS := ./firmware/$(PLATFORM) $(shell jq --raw-output '.[] | select(.platform[] | contains("$(PLATFORM)")) | "./firmware/$(PLATFORM)/" + .id + ".rom.gz"' build/lib/firmware.json) -BUILD_SRC := $(shell git ls-files build) build/lib/depends build/lib/conflicts $(FIRMWARE_ROMS) +BUILD_SRC := $(shell git ls-files build) build/lib/depends build/lib/conflicts container-runtime/rootfs.$(ARCH).squashfs $(FIRMWARE_ROMS) DEBIAN_SRC := $(shell git ls-files debian/) IMAGE_RECIPE_SRC := $(shell git ls-files image-recipe/) STARTD_SRC := core/startos/startd.service $(BUILD_SRC) COMPAT_SRC := $(shell git ls-files system-images/compat/) UTILS_SRC := $(shell git ls-files system-images/utils/) BINFMT_SRC := $(shell git ls-files system-images/binfmt/) -CORE_SRC := $(shell git ls-files core) $(shell git ls-files --recurse-submodules patch-db) web/dist/static web/patchdb-ui-seed.json $(GIT_HASH_FILE) +CORE_SRC := $(shell git ls-files -- core ':!:core/startos/bindings/*') $(shell git ls-files --recurse-submodules patch-db) web/dist/static web/patchdb-ui-seed.json $(GIT_HASH_FILE) WEB_SHARED_SRC := $(shell git ls-files web/projects/shared) $(shell ls -p web/ | grep -v / | sed 's/^/web\//g') web/node_modules web/config.json patch-db/client/dist web/patchdb-ui-seed.json WEB_UI_SRC := $(shell git ls-files web/projects/ui) WEB_SETUP_WIZARD_SRC := $(shell git ls-files web/projects/setup-wizard) @@ -25,8 +25,8 @@ WEB_INSTALL_WIZARD_SRC := $(shell git ls-files web/projects/install-wizard) PATCH_DB_CLIENT_SRC := $(shell git ls-files --recurse-submodules patch-db/client) GZIP_BIN := $(shell which pigz || which gzip) TAR_BIN := $(shell which gtar || which tar) -COMPILED_TARGETS := $(BINS) system-images/compat/docker-images/$(ARCH).tar system-images/utils/docker-images/$(ARCH).tar system-images/binfmt/docker-images/$(ARCH).tar -ALL_TARGETS := $(STARTD_SRC) $(ENVIRONMENT_FILE) $(GIT_HASH_FILE) $(VERSION_FILE) $(COMPILED_TARGETS) $(shell if [ "$(PLATFORM)" = "raspberrypi" ]; then echo cargo-deps/aarch64-unknown-linux-gnu/release/pi-beep; fi) $(shell /bin/bash -c 'if [[ "${ENVIRONMENT}" =~ (^|-)unstable($$|-) ]]; then echo cargo-deps/$(ARCH)-unknown-linux-gnu/release/tokio-console; fi') $(PLATFORM_FILE) +COMPILED_TARGETS := $(BINS) system-images/compat/docker-images/$(ARCH).tar system-images/utils/docker-images/$(ARCH).tar system-images/binfmt/docker-images/$(ARCH).tar container-runtime/rootfs.$(ARCH).squashfs +ALL_TARGETS := $(STARTD_SRC) $(ENVIRONMENT_FILE) $(GIT_HASH_FILE) $(VERSION_FILE) $(COMPILED_TARGETS) $(shell if [ "$(PLATFORM)" = "raspberrypi" ]; then echo cargo-deps/aarch64-unknown-linux-musl/release/pi-beep; fi) $(shell /bin/bash -c 'if [[ "${ENVIRONMENT}" =~ (^|-)unstable($$|-) ]]; then echo cargo-deps/$(ARCH)-unknown-linux-musl/release/tokio-console; fi') $(PLATFORM_FILE) sdk/lib/test ifeq ($(REMOTE),) mkdir = mkdir -p $1 @@ -49,10 +49,13 @@ endif .DELETE_ON_ERROR: -.PHONY: all metadata install clean format sdk snapshots uis ui reflash deb $(IMAGE_TYPE) squashfs sudo wormhole test +.PHONY: all metadata install clean format cli uis ui reflash deb $(IMAGE_TYPE) squashfs sudo wormhole test all: $(ALL_TARGETS) +touch: + touch $(ALL_TARGETS) + metadata: $(VERSION_FILE) $(PLATFORM_FILE) $(ENVIRONMENT_FILE) $(GIT_HASH_FILE) sudo: @@ -74,6 +77,11 @@ clean: rm -rf image-recipe/deb rm -rf results rm -rf build/lib/firmware + rm -rf container-runtime/dist + rm -rf container-runtime/node_modules + rm -f container-runtime/*.squashfs + rm -rf sdk/dist + rm -rf sdk/node_modules rm -f ENVIRONMENT.txt rm -f PLATFORM.txt rm -f GIT_HASH.txt @@ -82,11 +90,13 @@ clean: format: cd core && cargo +nightly fmt -test: $(CORE_SRC) $(ENVIRONMENT_FILE) - cd core && cargo build && cargo test +test: $(CORE_SRC) $(ENVIRONMENT_FILE) + (cd core && cargo build && cargo test) + npm --prefix sdk exec -- prettier -w ./core/startos/bindings/*.ts + (cd sdk && make test) -sdk: - cd core && ./install-sdk.sh +cli: + cd core && ./install-cli.sh deb: results/$(BASENAME).deb @@ -104,17 +114,15 @@ results/$(BASENAME).$(IMAGE_TYPE) results/$(BASENAME).squashfs: $(IMAGE_RECIPE_S ./image-recipe/run-local-build.sh "results/$(BASENAME).deb" # For creating os images. DO NOT USE -install: $(ALL_TARGETS) +install: $(ALL_TARGETS) $(call mkdir,$(DESTDIR)/usr/bin) - $(call cp,core/target/$(ARCH)-unknown-linux-gnu/release/startbox,$(DESTDIR)/usr/bin/startbox) + $(call cp,core/target/$(ARCH)-unknown-linux-musl/release/startbox,$(DESTDIR)/usr/bin/startbox) $(call ln,/usr/bin/startbox,$(DESTDIR)/usr/bin/startd) $(call ln,/usr/bin/startbox,$(DESTDIR)/usr/bin/start-cli) $(call ln,/usr/bin/startbox,$(DESTDIR)/usr/bin/start-sdk) - $(call ln,/usr/bin/startbox,$(DESTDIR)/usr/bin/start-deno) - $(call ln,/usr/bin/startbox,$(DESTDIR)/usr/bin/avahi-alias) $(call ln,/usr/bin/startbox,$(DESTDIR)/usr/bin/embassy-cli) - if [ "$(PLATFORM)" = "raspberrypi" ]; then $(call cp,cargo-deps/aarch64-unknown-linux-gnu/release/pi-beep,$(DESTDIR)/usr/bin/pi-beep); fi - if /bin/bash -c '[[ "${ENVIRONMENT}" =~ (^|-)unstable($$|-) ]]'; then $(call cp,cargo-deps/$(ARCH)-unknown-linux-gnu/release/tokio-console,$(DESTDIR)/usr/bin/tokio-console); fi + if [ "$(PLATFORM)" = "raspberrypi" ]; then $(call cp,cargo-deps/aarch64-unknown-linux-musl/release/pi-beep,$(DESTDIR)/usr/bin/pi-beep); fi + if /bin/bash -c '[[ "${ENVIRONMENT}" =~ (^|-)unstable($$|-) ]]'; then $(call cp,cargo-deps/$(ARCH)-unknown-linux-musl/release/tokio-console,$(DESTDIR)/usr/bin/tokio-console); fi $(call mkdir,$(DESTDIR)/lib/systemd/system) $(call cp,core/startos/startd.service,$(DESTDIR)/lib/systemd/system/startd.service) @@ -122,16 +130,14 @@ install: $(ALL_TARGETS) $(call mkdir,$(DESTDIR)/usr/lib) $(call rm,$(DESTDIR)/usr/lib/startos) $(call cp,build/lib,$(DESTDIR)/usr/lib/startos) + $(call mkdir,$(DESTDIR)/usr/lib/startos/container-runtime) + $(call cp,container-runtime/rootfs.$(ARCH).squashfs,$(DESTDIR)/usr/lib/startos/container-runtime/rootfs.squashfs) $(call cp,PLATFORM.txt,$(DESTDIR)/usr/lib/startos/PLATFORM.txt) $(call cp,ENVIRONMENT.txt,$(DESTDIR)/usr/lib/startos/ENVIRONMENT.txt) $(call cp,GIT_HASH.txt,$(DESTDIR)/usr/lib/startos/GIT_HASH.txt) $(call cp,VERSION.txt,$(DESTDIR)/usr/lib/startos/VERSION.txt) - $(call mkdir,$(DESTDIR)/usr/lib/startos/container) - $(call cp,core/target/aarch64-unknown-linux-musl/release/container-init,$(DESTDIR)/usr/lib/startos/container/container-init.arm64) - $(call cp,core/target/x86_64-unknown-linux-musl/release/container-init,$(DESTDIR)/usr/lib/startos/container/container-init.amd64) - $(call mkdir,$(DESTDIR)/usr/lib/startos/system-images) $(call cp,system-images/compat/docker-images/$(ARCH).tar,$(DESTDIR)/usr/lib/startos/system-images/compat.tar) $(call cp,system-images/utils/docker-images/$(ARCH).tar,$(DESTDIR)/usr/lib/startos/system-images/utils.tar) @@ -148,8 +154,9 @@ update-overlay: $(ALL_TARGETS) $(MAKE) install REMOTE=$(REMOTE) SSHPASS=$(SSHPASS) PLATFORM=$(PLATFORM) $(call ssh,"sudo systemctl start startd") -wormhole: core/target/$(ARCH)-unknown-linux-gnu/release/startbox - @wormhole send core/target/$(ARCH)-unknown-linux-gnu/release/startbox 2>&1 | awk -Winteractive '/wormhole receive/ { printf "sudo /usr/lib/startos/scripts/chroot-and-upgrade \"cd /usr/bin && rm startbox && wormhole receive --accept-file %s && chmod +x startbox\"\n", $$3 }' +wormhole: core/target/$(ARCH)-unknown-linux-musl/release/startbox + @echo "Paste the following command into the shell of your start-os server:" + @wormhole send core/target/$(ARCH)-unknown-linux-musl/release/startbox 2>&1 | awk -Winteractive '/wormhole receive/ { printf "sudo /usr/lib/startos/scripts/chroot-and-upgrade \"cd /usr/bin && rm startbox && wormhole receive --accept-file %s && chmod +x startbox\"\n", $$3 }' update: $(ALL_TARGETS) @if [ -z "$(REMOTE)" ]; then >&2 echo "Must specify REMOTE" && false; fi @@ -166,13 +173,39 @@ emulate-reflash: $(ALL_TARGETS) upload-ota: results/$(BASENAME).squashfs TARGET=$(TARGET) KEY=$(KEY) ./upload-ota.sh +container-runtime/alpine.$(ARCH).squashfs: + ARCH=$(ARCH) ./container-runtime/download-base-image.sh + +container-runtime/node_modules: container-runtime/package.json container-runtime/package-lock.json sdk/dist + npm --prefix container-runtime ci + touch container-runtime/node_modules + +core/startos/bindings: $(shell git ls-files -- core ':!:core/startos/bindings/*') $(ENVIRONMENT_FILE) + rm -rf core/startos/bindings + (cd core/ && cargo test --features=test) + npm --prefix sdk exec -- prettier -w ./core/startos/bindings/*.ts + +sdk/dist: $(shell git ls-files sdk) core/startos/bindings + (cd sdk && make bundle) + +# TODO: make container-runtime its own makefile? +container-runtime/dist/index.js: container-runtime/node_modules $(shell git ls-files container-runtime/src) container-runtime/package.json container-runtime/tsconfig.json + npm --prefix container-runtime run build + +container-runtime/dist/node_modules container-runtime/dist/package.json container-runtime/dist/package-lock.json: container-runtime/package.json container-runtime/package-lock.json sdk/dist container-runtime/install-dist-deps.sh + ./container-runtime/install-dist-deps.sh + touch container-runtime/dist/node_modules + +container-runtime/rootfs.$(ARCH).squashfs: container-runtime/alpine.$(ARCH).squashfs container-runtime/containerRuntime.rc container-runtime/update-image.sh container-runtime/dist/index.js container-runtime/dist/node_modules core/target/$(ARCH)-unknown-linux-musl/release/containerbox | sudo + ARCH=$(ARCH) ./container-runtime/update-image.sh + build/lib/depends build/lib/conflicts: build/dpkg-deps/* build/dpkg-deps/generate.sh $(FIRMWARE_ROMS): build/lib/firmware.json download-firmware.sh $(PLATFORM_FILE) ./download-firmware.sh $(PLATFORM) -system-images/compat/docker-images/$(ARCH).tar: $(COMPAT_SRC) core/Cargo.lock +system-images/compat/docker-images/$(ARCH).tar: $(COMPAT_SRC) cd system-images/compat && make docker-images/$(ARCH).tar && touch docker-images/$(ARCH).tar system-images/utils/docker-images/$(ARCH).tar: $(UTILS_SRC) @@ -181,15 +214,12 @@ system-images/utils/docker-images/$(ARCH).tar: $(UTILS_SRC) system-images/binfmt/docker-images/$(ARCH).tar: $(BINFMT_SRC) cd system-images/binfmt && make docker-images/$(ARCH).tar && touch docker-images/$(ARCH).tar -snapshots: core/snapshot-creator/Cargo.toml - cd core/ && ARCH=aarch64 ./build-v8-snapshot.sh - cd core/ && ARCH=x86_64 ./build-v8-snapshot.sh - $(BINS): $(CORE_SRC) $(ENVIRONMENT_FILE) cd core && ARCH=$(ARCH) ./build-prod.sh touch $(BINS) -web/node_modules: web/package.json +web/node_modules: web/package.json sdk/dist + (cd sdk && make bundle) npm --prefix web ci web/dist/raw/ui: $(WEB_UI_SRC) $(WEB_SHARED_SRC) @@ -231,8 +261,8 @@ uis: $(WEB_UIS) # this is a convenience step to build the UI ui: web/dist/raw/ui -cargo-deps/aarch64-unknown-linux-gnu/release/pi-beep: +cargo-deps/aarch64-unknown-linux-musl/release/pi-beep: ARCH=aarch64 ./build-cargo-dep.sh pi-beep -cargo-deps/$(ARCH)-unknown-linux-gnu/release/tokio-console: +cargo-deps/$(ARCH)-unknown-linux-musl/release/tokio-console: ARCH=$(ARCH) ./build-cargo-dep.sh tokio-console \ No newline at end of file diff --git a/build-cargo-dep.sh b/build-cargo-dep.sh index f3cb8e9691..9e20f0cafe 100755 --- a/build-cargo-dep.sh +++ b/build-cargo-dep.sh @@ -18,8 +18,8 @@ if [ -z "$ARCH" ]; then fi mkdir -p cargo-deps -alias 'rust-arm64-builder'='docker run $USE_TTY --rm -v "$HOME/.cargo/registry":/usr/local/cargo/registry -v "$(pwd)"/cargo-deps:/home/rust/src -P start9/rust-arm-cross:aarch64' +alias 'rust-musl-builder'='docker run $USE_TTY --rm -e "RUSTFLAGS=$RUSTFLAGS" -v "$HOME/.cargo/registry":/root/.cargo/registry -v "$(pwd)"/cargo-deps:/home/rust/src -w /home/rust/src -P messense/rust-musl-cross:$ARCH-musl' -rust-arm64-builder cargo install "$1" --target-dir /home/rust/src --target=$ARCH-unknown-linux-gnu +rust-musl-builder cargo install "$1" --target-dir /home/rust/src --target=$ARCH-unknown-linux-musl sudo chown -R $USER cargo-deps sudo chown -R $USER ~/.cargo \ No newline at end of file diff --git a/build/.gitignore b/build/.gitignore index 357c0e49f8..497f4b9137 100644 --- a/build/.gitignore +++ b/build/.gitignore @@ -1,2 +1,2 @@ -lib/depends -lib/conflicts \ No newline at end of file +/lib/depends +/lib/conflicts \ No newline at end of file diff --git a/build/dpkg-deps/depends b/build/dpkg-deps/depends index a712d4a522..5438432e4b 100644 --- a/build/dpkg-deps/depends +++ b/build/dpkg-deps/depends @@ -20,12 +20,12 @@ httpdirfs iotop iw jq -libavahi-client3 libyajl2 linux-cpupower lm-sensors lshw lvm2 +lxc magic-wormhole man-db ncdu diff --git a/build/dpkg-deps/docker.depends b/build/dpkg-deps/docker.depends deleted file mode 100644 index dd78be8a1e..0000000000 --- a/build/dpkg-deps/docker.depends +++ /dev/null @@ -1,5 +0,0 @@ -+ containerd.io -+ docker-ce -+ docker-ce-cli -+ docker-compose-plugin -- podman \ No newline at end of file diff --git a/build/lib/firmware.json b/build/lib/firmware.json index 9637aa70a8..07def2c1eb 100644 --- a/build/lib/firmware.json +++ b/build/lib/firmware.json @@ -1,13 +1,13 @@ [ { - "id": "pureboot-librem_mini_v2-basic_usb_autoboot_blob_jail-Release-28.3", + "id": "pureboot-librem_mini_v2-basic_usb_autoboot_blob_jail-Release-29", "platform": ["x86_64"], "system-product-name": "librem_mini_v2", "bios-version": { "semver-prefix": "PureBoot-Release-", - "semver-range": "<28.3" + "semver-range": "<29" }, - "url": "https://source.puri.sm/firmware/releases/-/raw/master/librem_mini_v2/custom/pureboot-librem_mini_v2-basic_usb_autoboot_blob_jail-Release-28.3.rom.gz", - "shasum": "5019bcf53f7493c7aa74f8ef680d18b5fc26ec156c705a841433aaa2fdef8f35" + "url": "https://source.puri.sm/firmware/releases/-/raw/master/librem_mini_v2/custom/pureboot-librem_mini_v2-basic_usb_autoboot_blob_jail-Release-29.rom.gz", + "shasum": "96ec04f21b1cfe8e28d9a2418f1ff533efe21f9bbbbf16e162f7c814761b068b" } ] diff --git a/container-runtime/.gitignore b/container-runtime/.gitignore new file mode 100644 index 0000000000..8aa4208b71 --- /dev/null +++ b/container-runtime/.gitignore @@ -0,0 +1,8 @@ +node_modules/ +dist/ +bundle.js +startInit.js +service/ +service.js +*.squashfs +/tmp \ No newline at end of file diff --git a/container-runtime/Dockerfile b/container-runtime/Dockerfile new file mode 100644 index 0000000000..f936ee11b9 --- /dev/null +++ b/container-runtime/Dockerfile @@ -0,0 +1,4 @@ +FROM node:18-alpine + +ADD ./startInit.js /usr/local/lib/startInit.js +ADD ./entrypoint.sh /usr/local/bin/entrypoint.sh \ No newline at end of file diff --git a/container-runtime/RPCSpec.md b/container-runtime/RPCSpec.md new file mode 100644 index 0000000000..6796716149 --- /dev/null +++ b/container-runtime/RPCSpec.md @@ -0,0 +1,59 @@ +# Container RPC SERVER Specification + +## Methods + +### init +initialize runtime (mount `/proc`, `/sys`, `/dev`, and `/run` to each image in `/media/images`) + +called after os has mounted js and images to the container +#### args +`[]` +#### response +`null` + +### exit +shutdown runtime +#### args +`[]` +#### response +`null` + +### start +run main method if not already running +#### args +`[]` +#### response +`null` + +### stop +stop main method by sending SIGTERM to child processes, and SIGKILL after timeout +#### args +`{ timeout: millis }` +#### response +`null` + +### execute +run a specific package procedure +#### args +```ts +{ + procedure: JsonPath, + input: any, + timeout: millis, +} +``` +#### response +`any` + +### sandbox +run a specific package procedure in sandbox mode +#### args +```ts +{ + procedure: JsonPath, + input: any, + timeout: millis, +} +``` +#### response +`any` diff --git a/container-runtime/containerRuntime.rc b/container-runtime/containerRuntime.rc new file mode 100644 index 0000000000..203b996592 --- /dev/null +++ b/container-runtime/containerRuntime.rc @@ -0,0 +1,10 @@ +#!/sbin/openrc-run + +name=containerRuntime +#cfgfile="/etc/containerRuntime/containerRuntime.conf" +command="/usr/bin/node" +command_args="--experimental-detect-module --unhandled-rejections=warn /usr/lib/startos/init/index.js" +pidfile="/run/containerRuntime.pid" +command_background="yes" +output_log="/var/log/containerRuntime.log" +error_log="/var/log/containerRuntime.err" diff --git a/container-runtime/download-base-image.sh b/container-runtime/download-base-image.sh new file mode 100755 index 0000000000..23a140ea52 --- /dev/null +++ b/container-runtime/download-base-image.sh @@ -0,0 +1,19 @@ +#!/bin/bash + +cd "$(dirname "${BASH_SOURCE[0]}")" + +set -e + +DISTRO=alpine +VERSION=3.19 +ARCH=${ARCH:-$(uname -m)} +FLAVOR=default + +_ARCH=$ARCH +if [ "$_ARCH" = "x86_64" ]; then + _ARCH=amd64 +elif [ "$_ARCH" = "aarch64" ]; then + _ARCH=arm64 +fi + +curl https://images.linuxcontainers.org/$(curl --silent https://images.linuxcontainers.org/meta/1.0/index-system | grep "^$DISTRO;$VERSION;$_ARCH;$FLAVOR;" | head -n1 | sed 's/^.*;//g')/rootfs.squashfs --output alpine.${ARCH}.squashfs \ No newline at end of file diff --git a/container-runtime/install-dist-deps.sh b/container-runtime/install-dist-deps.sh new file mode 100755 index 0000000000..d155ed4f2b --- /dev/null +++ b/container-runtime/install-dist-deps.sh @@ -0,0 +1,10 @@ +#!/bin/bash + +cd "$(dirname "${BASH_SOURCE[0]}")" + +set -e + +cat ./package.json | sed 's/file:\.\([.\/]\)/file:..\/.\1/g' > ./dist/package.json +cat ./package-lock.json | sed 's/"\.\([.\/]\)/"..\/.\1/g' > ./dist/package-lock.json + +npm --prefix dist ci --omit=dev \ No newline at end of file diff --git a/container-runtime/mkcontainer.sh b/container-runtime/mkcontainer.sh new file mode 100644 index 0000000000..90de546718 --- /dev/null +++ b/container-runtime/mkcontainer.sh @@ -0,0 +1,28 @@ +#!/bin/bash + +set -e + +IMAGE=$1 + +if [ -z "$IMAGE" ]; then + >&2 echo "usage: $0 " + exit 1 +fi + +if ! [ -d "/media/images/$IMAGE" ]; then + >&2 echo "image does not exist" + exit 1 +fi + +container=$(mktemp -d) +mkdir -p $container/rootfs $container/upper $container/work +mount -t overlay -olowerdir=/media/images/$IMAGE,upperdir=$container/upper,workdir=$container/work overlay $container/rootfs + +rootfs=$container/rootfs + +for special in dev sys proc run; do + mkdir -p $rootfs/$special + mount --bind /$special $rootfs/$special +done + +echo $rootfs \ No newline at end of file diff --git a/container-runtime/package-lock.json b/container-runtime/package-lock.json new file mode 100644 index 0000000000..74551217ed --- /dev/null +++ b/container-runtime/package-lock.json @@ -0,0 +1,4897 @@ +{ + "name": "start-init", + "version": "0.0.0", + "lockfileVersion": 2, + "requires": true, + "packages": { + "": { + "name": "start-init", + "version": "0.0.0", + "dependencies": { + "@iarna/toml": "^2.2.5", + "@start9labs/start-sdk": "file:../sdk/dist", + "esbuild-plugin-resolve": "^2.0.0", + "filebrowser": "^1.0.0", + "isomorphic-fetch": "^3.0.0", + "node-fetch": "^3.1.0", + "ts-matches": "^5.4.1", + "tslib": "^2.5.3", + "typescript": "^5.1.3", + "yaml": "^2.3.1" + }, + "devDependencies": { + "@swc/cli": "^0.1.62", + "@swc/core": "^1.3.65", + "@types/node": "^20.11.13", + "esbuild": "^0.20.0", + "prettier": "^3.2.5", + "typescript": ">5.2" + } + }, + "../sdk/dist": { + "name": "@start9labs/start-sdk", + "version": "0.4.0-rev0.lib0.rc8.beta7", + "license": "MIT", + "dependencies": { + "@iarna/toml": "^2.2.5", + "isomorphic-fetch": "^3.0.0", + "ts-matches": "^5.4.1", + "yaml": "^2.2.2" + }, + "devDependencies": { + "@types/jest": "^29.4.0", + "jest": "^29.4.3", + "ts-jest": "^29.0.5", + "ts-node": "^10.9.1", + "tsx": "^4.7.1", + "typescript": "^5.0.4" + } + }, + "node_modules/@esbuild/aix-ppc64": { + "version": "0.20.0", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.20.0.tgz", + "integrity": "sha512-fGFDEctNh0CcSwsiRPxiaqX0P5rq+AqE0SRhYGZ4PX46Lg1FNR6oCxJghf8YgY0WQEgQuh3lErUFE4KxLeRmmw==", + "cpu": [ + "ppc64" + ], + "dev": true, + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/android-arm": { + "version": "0.20.0", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.20.0.tgz", + "integrity": "sha512-3bMAfInvByLHfJwYPJRlpTeaQA75n8C/QKpEaiS4HrFWFiJlNI0vzq/zCjBrhAYcPyVPG7Eo9dMrcQXuqmNk5g==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/android-arm64": { + "version": "0.20.0", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.20.0.tgz", + "integrity": "sha512-aVpnM4lURNkp0D3qPoAzSG92VXStYmoVPOgXveAUoQBWRSuQzt51yvSju29J6AHPmwY1BjH49uR29oyfH1ra8Q==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/android-x64": { + "version": "0.20.0", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.20.0.tgz", + "integrity": "sha512-uK7wAnlRvjkCPzh8jJ+QejFyrP8ObKuR5cBIsQZ+qbMunwR8sbd8krmMbxTLSrDhiPZaJYKQAU5Y3iMDcZPhyQ==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/darwin-arm64": { + "version": "0.20.0", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.20.0.tgz", + "integrity": "sha512-AjEcivGAlPs3UAcJedMa9qYg9eSfU6FnGHJjT8s346HSKkrcWlYezGE8VaO2xKfvvlZkgAhyvl06OJOxiMgOYQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/darwin-x64": { + "version": "0.20.0", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.20.0.tgz", + "integrity": "sha512-bsgTPoyYDnPv8ER0HqnJggXK6RyFy4PH4rtsId0V7Efa90u2+EifxytE9pZnsDgExgkARy24WUQGv9irVbTvIw==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/freebsd-arm64": { + "version": "0.20.0", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.20.0.tgz", + "integrity": "sha512-kQ7jYdlKS335mpGbMW5tEe3IrQFIok9r84EM3PXB8qBFJPSc6dpWfrtsC/y1pyrz82xfUIn5ZrnSHQQsd6jebQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/freebsd-x64": { + "version": "0.20.0", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.20.0.tgz", + "integrity": "sha512-uG8B0WSepMRsBNVXAQcHf9+Ko/Tr+XqmK7Ptel9HVmnykupXdS4J7ovSQUIi0tQGIndhbqWLaIL/qO/cWhXKyQ==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-arm": { + "version": "0.20.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.20.0.tgz", + "integrity": "sha512-2ezuhdiZw8vuHf1HKSf4TIk80naTbP9At7sOqZmdVwvvMyuoDiZB49YZKLsLOfKIr77+I40dWpHVeY5JHpIEIg==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-arm64": { + "version": "0.20.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.20.0.tgz", + "integrity": "sha512-uTtyYAP5veqi2z9b6Gr0NUoNv9F/rOzI8tOD5jKcCvRUn7T60Bb+42NDBCWNhMjkQzI0qqwXkQGo1SY41G52nw==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-ia32": { + "version": "0.20.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.20.0.tgz", + "integrity": "sha512-c88wwtfs8tTffPaoJ+SQn3y+lKtgTzyjkD8NgsyCtCmtoIC8RDL7PrJU05an/e9VuAke6eJqGkoMhJK1RY6z4w==", + "cpu": [ + "ia32" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-loong64": { + "version": "0.20.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.20.0.tgz", + "integrity": "sha512-lR2rr/128/6svngnVta6JN4gxSXle/yZEZL3o4XZ6esOqhyR4wsKyfu6qXAL04S4S5CgGfG+GYZnjFd4YiG3Aw==", + "cpu": [ + "loong64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-mips64el": { + "version": "0.20.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.20.0.tgz", + "integrity": "sha512-9Sycc+1uUsDnJCelDf6ZNqgZQoK1mJvFtqf2MUz4ujTxGhvCWw+4chYfDLPepMEvVL9PDwn6HrXad5yOrNzIsQ==", + "cpu": [ + "mips64el" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-ppc64": { + "version": "0.20.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.20.0.tgz", + "integrity": "sha512-CoWSaaAXOZd+CjbUTdXIJE/t7Oz+4g90A3VBCHLbfuc5yUQU/nFDLOzQsN0cdxgXd97lYW/psIIBdjzQIwTBGw==", + "cpu": [ + "ppc64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-riscv64": { + "version": "0.20.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.20.0.tgz", + "integrity": "sha512-mlb1hg/eYRJUpv8h/x+4ShgoNLL8wgZ64SUr26KwglTYnwAWjkhR2GpoKftDbPOCnodA9t4Y/b68H4J9XmmPzA==", + "cpu": [ + "riscv64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-s390x": { + "version": "0.20.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.20.0.tgz", + "integrity": "sha512-fgf9ubb53xSnOBqyvWEY6ukBNRl1mVX1srPNu06B6mNsNK20JfH6xV6jECzrQ69/VMiTLvHMicQR/PgTOgqJUQ==", + "cpu": [ + "s390x" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-x64": { + "version": "0.20.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.20.0.tgz", + "integrity": "sha512-H9Eu6MGse++204XZcYsse1yFHmRXEWgadk2N58O/xd50P9EvFMLJTQLg+lB4E1cF2xhLZU5luSWtGTb0l9UeSg==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/netbsd-x64": { + "version": "0.20.0", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.20.0.tgz", + "integrity": "sha512-lCT675rTN1v8Fo+RGrE5KjSnfY0x9Og4RN7t7lVrN3vMSjy34/+3na0q7RIfWDAj0e0rCh0OL+P88lu3Rt21MQ==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/openbsd-x64": { + "version": "0.20.0", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.20.0.tgz", + "integrity": "sha512-HKoUGXz/TOVXKQ+67NhxyHv+aDSZf44QpWLa3I1lLvAwGq8x1k0T+e2HHSRvxWhfJrFxaaqre1+YyzQ99KixoA==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/sunos-x64": { + "version": "0.20.0", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.20.0.tgz", + "integrity": "sha512-GDwAqgHQm1mVoPppGsoq4WJwT3vhnz/2N62CzhvApFD1eJyTroob30FPpOZabN+FgCjhG+AgcZyOPIkR8dfD7g==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/win32-arm64": { + "version": "0.20.0", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.20.0.tgz", + "integrity": "sha512-0vYsP8aC4TvMlOQYozoksiaxjlvUcQrac+muDqj1Fxy6jh9l9CZJzj7zmh8JGfiV49cYLTorFLxg7593pGldwQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/win32-ia32": { + "version": "0.20.0", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.20.0.tgz", + "integrity": "sha512-p98u4rIgfh4gdpV00IqknBD5pC84LCub+4a3MO+zjqvU5MVXOc3hqR2UgT2jI2nh3h8s9EQxmOsVI3tyzv1iFg==", + "cpu": [ + "ia32" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/win32-x64": { + "version": "0.20.0", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.20.0.tgz", + "integrity": "sha512-NgJnesu1RtWihtTtXGFMU5YSE6JyyHPMxCwBZK7a6/8d31GuSo9l0Ss7w1Jw5QnKUawG6UEehs883kcXf5fYwg==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@iarna/toml": { + "version": "2.2.5", + "resolved": "https://registry.npmjs.org/@iarna/toml/-/toml-2.2.5.tgz", + "integrity": "sha512-trnsAYxU3xnS1gPHPyU961coFyLkh4gAD/0zQ5mymY4yOZ+CYvsPqUbOFSw0aDM4y0tV7tiFxL/1XfXPNC6IPg==" + }, + "node_modules/@mole-inc/bin-wrapper": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/@mole-inc/bin-wrapper/-/bin-wrapper-8.0.1.tgz", + "integrity": "sha512-sTGoeZnjI8N4KS+sW2AN95gDBErhAguvkw/tWdCjeM8bvxpz5lqrnd0vOJABA1A+Ic3zED7PYoLP/RANLgVotA==", + "dev": true, + "dependencies": { + "bin-check": "^4.1.0", + "bin-version-check": "^5.0.0", + "content-disposition": "^0.5.4", + "ext-name": "^5.0.0", + "file-type": "^17.1.6", + "filenamify": "^5.0.2", + "got": "^11.8.5", + "os-filter-obj": "^2.0.0" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + } + }, + "node_modules/@nodelib/fs.scandir": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "dev": true, + "dependencies": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.stat": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", + "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", + "dev": true, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.walk": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", + "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", + "dev": true, + "dependencies": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@sindresorhus/is": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-4.6.0.tgz", + "integrity": "sha512-t09vSN3MdfsyCHoFcTRCH/iUtG7OJ0CsjzB8cjAmKc/va/kIgeDI/TxsigdncE/4be734m0cvIYwNaV4i2XqAw==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sindresorhus/is?sponsor=1" + } + }, + "node_modules/@start9labs/start-sdk": { + "resolved": "../sdk/dist", + "link": true + }, + "node_modules/@swc/cli": { + "version": "0.1.65", + "resolved": "https://registry.npmjs.org/@swc/cli/-/cli-0.1.65.tgz", + "integrity": "sha512-4NcgsvJVHhA7trDnMmkGLLvWMHu2kSy+qHx6QwRhhJhdiYdNUrhdp+ERxen73sYtaeEOYeLJcWrQ60nzKi6rpg==", + "dev": true, + "dependencies": { + "@mole-inc/bin-wrapper": "^8.0.1", + "commander": "^7.1.0", + "fast-glob": "^3.2.5", + "minimatch": "^9.0.3", + "semver": "^7.3.8", + "slash": "3.0.0", + "source-map": "^0.7.3" + }, + "bin": { + "spack": "bin/spack.js", + "swc": "bin/swc.js", + "swcx": "bin/swcx.js" + }, + "engines": { + "node": ">= 12.13" + }, + "peerDependencies": { + "@swc/core": "^1.2.66", + "chokidar": "^3.5.1" + }, + "peerDependenciesMeta": { + "chokidar": { + "optional": true + } + } + }, + "node_modules/@swc/core": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/@swc/core/-/core-1.4.1.tgz", + "integrity": "sha512-3y+Y8js+e7BbM16iND+6Rcs3jdiL28q3iVtYsCviYSSpP2uUVKkp5sJnCY4pg8AaVvyN7CGQHO7gLEZQ5ByozQ==", + "dev": true, + "hasInstallScript": true, + "dependencies": { + "@swc/counter": "^0.1.2", + "@swc/types": "^0.1.5" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/swc" + }, + "optionalDependencies": { + "@swc/core-darwin-arm64": "1.4.1", + "@swc/core-darwin-x64": "1.4.1", + "@swc/core-linux-arm-gnueabihf": "1.4.1", + "@swc/core-linux-arm64-gnu": "1.4.1", + "@swc/core-linux-arm64-musl": "1.4.1", + "@swc/core-linux-x64-gnu": "1.4.1", + "@swc/core-linux-x64-musl": "1.4.1", + "@swc/core-win32-arm64-msvc": "1.4.1", + "@swc/core-win32-ia32-msvc": "1.4.1", + "@swc/core-win32-x64-msvc": "1.4.1" + }, + "peerDependencies": { + "@swc/helpers": "^0.5.0" + }, + "peerDependenciesMeta": { + "@swc/helpers": { + "optional": true + } + } + }, + "node_modules/@swc/core-darwin-arm64": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/@swc/core-darwin-arm64/-/core-darwin-arm64-1.4.1.tgz", + "integrity": "sha512-ePyfx0348UbR4DOAW24TedeJbafnzha8liXFGuQ4bdXtEVXhLfPngprrxKrAddCuv42F9aTxydlF6+adD3FBhA==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=10" + } + }, + "node_modules/@swc/core-darwin-x64": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/@swc/core-darwin-x64/-/core-darwin-x64-1.4.1.tgz", + "integrity": "sha512-eLf4JSe6VkCMdDowjM8XNC5rO+BrgfbluEzAVtKR8L2HacNYukieumN7EzpYCi0uF1BYwu1ku6tLyG2r0VcGxA==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=10" + } + }, + "node_modules/@swc/core-linux-arm-gnueabihf": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/@swc/core-linux-arm-gnueabihf/-/core-linux-arm-gnueabihf-1.4.1.tgz", + "integrity": "sha512-K8VtTLWMw+rkN/jDC9o/Q9SMmzdiHwYo2CfgkwVT29NsGccwmNhCQx6XoYiPKyKGIFKt4tdQnJHKUFzxUqQVtQ==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=10" + } + }, + "node_modules/@swc/core-linux-arm64-gnu": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-gnu/-/core-linux-arm64-gnu-1.4.1.tgz", + "integrity": "sha512-0e8p4g0Bfkt8lkiWgcdiENH3RzkcqKtpRXIVNGOmVc0OBkvc2tpm2WTx/eoCnes2HpTT4CTtR3Zljj4knQ4Fvw==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=10" + } + }, + "node_modules/@swc/core-linux-arm64-musl": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-musl/-/core-linux-arm64-musl-1.4.1.tgz", + "integrity": "sha512-b/vWGQo2n7lZVUnSQ7NBq3Qrj85GrAPPiRbpqaIGwOytiFSk8VULFihbEUwDe0rXgY4LDm8z8wkgADZcLnmdUA==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=10" + } + }, + "node_modules/@swc/core-linux-x64-gnu": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/@swc/core-linux-x64-gnu/-/core-linux-x64-gnu-1.4.1.tgz", + "integrity": "sha512-AFMQlvkKEdNi1Vk2GFTxxJzbICttBsOQaXa98kFTeWTnFFIyiIj2w7Sk8XRTEJ/AjF8ia8JPKb1zddBWr9+bEQ==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=10" + } + }, + "node_modules/@swc/core-linux-x64-musl": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/@swc/core-linux-x64-musl/-/core-linux-x64-musl-1.4.1.tgz", + "integrity": "sha512-QX2MxIECX1gfvUVZY+jk528/oFkS9MAl76e3ZRvG2KC/aKlCQL0KSzcTSm13mOxkDKS30EaGRDRQWNukGpMeRg==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=10" + } + }, + "node_modules/@swc/core-win32-arm64-msvc": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/@swc/core-win32-arm64-msvc/-/core-win32-arm64-msvc-1.4.1.tgz", + "integrity": "sha512-OklkJYXXI/tntD2zaY8i3iZldpyDw5q+NAP3k9OlQ7wXXf37djRsHLV0NW4+ZNHBjE9xp2RsXJ0jlOJhfgGoFA==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=10" + } + }, + "node_modules/@swc/core-win32-ia32-msvc": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/@swc/core-win32-ia32-msvc/-/core-win32-ia32-msvc-1.4.1.tgz", + "integrity": "sha512-MBuc3/QfKX9FnLOU7iGN+6yHRTQaPQ9WskiC8s8JFiKQ+7I2p25tay2RplR9dIEEGgVAu6L7auv96LbNTh+FaA==", + "cpu": [ + "ia32" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=10" + } + }, + "node_modules/@swc/core-win32-x64-msvc": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/@swc/core-win32-x64-msvc/-/core-win32-x64-msvc-1.4.1.tgz", + "integrity": "sha512-lu4h4wFBb/bOK6N2MuZwg7TrEpwYXgpQf5R7ObNSXL65BwZ9BG8XRzD+dLJmALu8l5N08rP/TrpoKRoGT4WSxw==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=10" + } + }, + "node_modules/@swc/counter": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/@swc/counter/-/counter-0.1.3.tgz", + "integrity": "sha512-e2BR4lsJkkRlKZ/qCHPw9ZaSxc0MVUd7gtbtaB7aMvHeJVYe8sOB8DBZkP2DtISHGSku9sCK6T6cnY0CtXrOCQ==", + "dev": true + }, + "node_modules/@swc/types": { + "version": "0.1.5", + "resolved": "https://registry.npmjs.org/@swc/types/-/types-0.1.5.tgz", + "integrity": "sha512-myfUej5naTBWnqOCc/MdVOLVjXUXtIA+NpDrDBKJtLLg2shUjBu3cZmB/85RyitKc55+lUUyl7oRfLOvkr2hsw==", + "dev": true + }, + "node_modules/@szmarczak/http-timer": { + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/@szmarczak/http-timer/-/http-timer-4.0.6.tgz", + "integrity": "sha512-4BAffykYOgO+5nzBWYwE3W90sBgLJoUPRWWcL8wlyiM8IB8ipJz3UMJ9KXQd1RKQXpKp8Tutn80HZtWsu2u76w==", + "dev": true, + "dependencies": { + "defer-to-connect": "^2.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@tokenizer/token": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/@tokenizer/token/-/token-0.3.0.tgz", + "integrity": "sha512-OvjF+z51L3ov0OyAU0duzsYuvO01PH7x4t6DJx+guahgTnBHkhJdG7soQeTSFLWN3efnHyibZ4Z8l2EuWwJN3A==", + "dev": true + }, + "node_modules/@types/cacheable-request": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/@types/cacheable-request/-/cacheable-request-6.0.3.tgz", + "integrity": "sha512-IQ3EbTzGxIigb1I3qPZc1rWJnH0BmSKv5QYTalEwweFvyBDLSAe24zP0le/hyi7ecGfZVlIVAg4BZqb8WBwKqw==", + "dev": true, + "dependencies": { + "@types/http-cache-semantics": "*", + "@types/keyv": "^3.1.4", + "@types/node": "*", + "@types/responselike": "^1.0.0" + } + }, + "node_modules/@types/http-cache-semantics": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/@types/http-cache-semantics/-/http-cache-semantics-4.0.4.tgz", + "integrity": "sha512-1m0bIFVc7eJWyve9S0RnuRgcQqF/Xd5QsUZAZeQFr1Q3/p9JWoQQEqmVy+DPTNpGXwhgIetAoYF8JSc33q29QA==", + "dev": true + }, + "node_modules/@types/keyv": { + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/@types/keyv/-/keyv-3.1.4.tgz", + "integrity": "sha512-BQ5aZNSCpj7D6K2ksrRCTmKRLEpnPvWDiLPfoGyhZ++8YtiK9d/3DBKPJgry359X/P1PfruyYwvnvwFjuEiEIg==", + "dev": true, + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/node": { + "version": "20.11.17", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.11.17.tgz", + "integrity": "sha512-QmgQZGWu1Yw9TDyAP9ZzpFJKynYNeOvwMJmaxABfieQoVoiVOS6MN1WSpqpRcbeA5+RW82kraAVxCCJg+780Qw==", + "dev": true, + "dependencies": { + "undici-types": "~5.26.4" + } + }, + "node_modules/@types/responselike": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@types/responselike/-/responselike-1.0.3.tgz", + "integrity": "sha512-H/+L+UkTV33uf49PH5pCAUBVPNj2nDBXTN+qS1dOwyyg24l3CcicicCA7ca+HMvJBZcFgl5r8e+RR6elsb4Lyw==", + "dev": true, + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/accepts": { + "version": "1.3.8", + "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz", + "integrity": "sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==", + "dependencies": { + "mime-types": "~2.1.34", + "negotiator": "0.6.3" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/arch": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/arch/-/arch-2.2.0.tgz", + "integrity": "sha512-Of/R0wqp83cgHozfIYLbBMnej79U/SVGOOyuB3VVFv1NRM/PSFMK12x9KVtiYzJqmnU5WR2qp0Z5rHb7sWGnFQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/array-flatten": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", + "integrity": "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==" + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true + }, + "node_modules/bin-check": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/bin-check/-/bin-check-4.1.0.tgz", + "integrity": "sha512-b6weQyEUKsDGFlACWSIOfveEnImkJyK/FGW6FAG42loyoquvjdtOIqO6yBFzHyqyVVhNgNkQxxx09SFLK28YnA==", + "dev": true, + "dependencies": { + "execa": "^0.7.0", + "executable": "^4.1.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/bin-version": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/bin-version/-/bin-version-6.0.0.tgz", + "integrity": "sha512-nk5wEsP4RiKjG+vF+uG8lFsEn4d7Y6FVDamzzftSunXOoOcOOkzcWdKVlGgFFwlUQCj63SgnUkLLGF8v7lufhw==", + "dev": true, + "dependencies": { + "execa": "^5.0.0", + "find-versions": "^5.0.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/bin-version-check": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/bin-version-check/-/bin-version-check-5.1.0.tgz", + "integrity": "sha512-bYsvMqJ8yNGILLz1KP9zKLzQ6YpljV3ln1gqhuLkUtyfGi3qXKGuK2p+U4NAvjVFzDFiBBtOpCOSFNuYYEGZ5g==", + "dev": true, + "dependencies": { + "bin-version": "^6.0.0", + "semver": "^7.5.3", + "semver-truncate": "^3.0.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/bin-version/node_modules/cross-spawn": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", + "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", + "dev": true, + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/bin-version/node_modules/execa": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", + "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==", + "dev": true, + "dependencies": { + "cross-spawn": "^7.0.3", + "get-stream": "^6.0.0", + "human-signals": "^2.1.0", + "is-stream": "^2.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^4.0.1", + "onetime": "^5.1.2", + "signal-exit": "^3.0.3", + "strip-final-newline": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sindresorhus/execa?sponsor=1" + } + }, + "node_modules/bin-version/node_modules/get-stream": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", + "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/bin-version/node_modules/is-stream": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", + "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", + "dev": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/bin-version/node_modules/npm-run-path": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", + "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", + "dev": true, + "dependencies": { + "path-key": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/bin-version/node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/bin-version/node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/bin-version/node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/bin-version/node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/body-parser": { + "version": "1.20.1", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.1.tgz", + "integrity": "sha512-jWi7abTbYwajOytWCQc37VulmWiRae5RyTpaCyDcS5/lMdtwSz5lOpDE67srw/HYe35f1z3fDQw+3txg7gNtWw==", + "dependencies": { + "bytes": "3.1.2", + "content-type": "~1.0.4", + "debug": "2.6.9", + "depd": "2.0.0", + "destroy": "1.2.0", + "http-errors": "2.0.0", + "iconv-lite": "0.4.24", + "on-finished": "2.4.1", + "qs": "6.11.0", + "raw-body": "2.5.1", + "type-is": "~1.6.18", + "unpipe": "1.0.0" + }, + "engines": { + "node": ">= 0.8", + "npm": "1.2.8000 || >= 1.4.16" + } + }, + "node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/braces": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", + "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "dev": true, + "dependencies": { + "fill-range": "^7.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/bytes": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", + "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/cacheable-lookup": { + "version": "5.0.4", + "resolved": "https://registry.npmjs.org/cacheable-lookup/-/cacheable-lookup-5.0.4.tgz", + "integrity": "sha512-2/kNscPhpcxrOigMZzbiWF7dz8ilhb/nIHU3EyZiXWXpeq/au8qJ8VhdftMkty3n7Gj6HIGalQG8oiBNB3AJgA==", + "dev": true, + "engines": { + "node": ">=10.6.0" + } + }, + "node_modules/cacheable-request": { + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/cacheable-request/-/cacheable-request-7.0.4.tgz", + "integrity": "sha512-v+p6ongsrp0yTGbJXjgxPow2+DL93DASP4kXCDKb8/bwRtt9OEF3whggkkDkGNzgcWy2XaF4a8nZglC7uElscg==", + "dev": true, + "dependencies": { + "clone-response": "^1.0.2", + "get-stream": "^5.1.0", + "http-cache-semantics": "^4.0.0", + "keyv": "^4.0.0", + "lowercase-keys": "^2.0.0", + "normalize-url": "^6.0.1", + "responselike": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/cacheable-request/node_modules/get-stream": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-5.2.0.tgz", + "integrity": "sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA==", + "dev": true, + "dependencies": { + "pump": "^3.0.0" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/call-bind": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.7.tgz", + "integrity": "sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w==", + "dependencies": { + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.4", + "set-function-length": "^1.2.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/clone-response": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/clone-response/-/clone-response-1.0.3.tgz", + "integrity": "sha512-ROoL94jJH2dUVML2Y/5PEDNaSHgeOdSDicUyS7izcF63G6sTc/FTjLub4b8Il9S8S0beOfYt0TaA5qvFK+w0wA==", + "dev": true, + "dependencies": { + "mimic-response": "^1.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/commander": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-7.2.0.tgz", + "integrity": "sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==", + "dev": true, + "engines": { + "node": ">= 10" + } + }, + "node_modules/content-disposition": { + "version": "0.5.4", + "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz", + "integrity": "sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==", + "dependencies": { + "safe-buffer": "5.2.1" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/content-type": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz", + "integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/cookie": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.5.0.tgz", + "integrity": "sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/cookie-signature": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", + "integrity": "sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ==" + }, + "node_modules/cross-spawn": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-5.1.0.tgz", + "integrity": "sha512-pTgQJ5KC0d2hcY8eyL1IzlBPYjTkyH72XRZPnLyKus2mBfNjQs3klqbJU2VILqZryAZUt9JOb3h/mWMy23/f5A==", + "dev": true, + "dependencies": { + "lru-cache": "^4.0.1", + "shebang-command": "^1.2.0", + "which": "^1.2.9" + } + }, + "node_modules/data-uri-to-buffer": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/data-uri-to-buffer/-/data-uri-to-buffer-4.0.1.tgz", + "integrity": "sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A==", + "engines": { + "node": ">= 12" + } + }, + "node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/decompress-response": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-6.0.0.tgz", + "integrity": "sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==", + "dev": true, + "dependencies": { + "mimic-response": "^3.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/decompress-response/node_modules/mimic-response": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-3.1.0.tgz", + "integrity": "sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/defer-to-connect": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/defer-to-connect/-/defer-to-connect-2.0.1.tgz", + "integrity": "sha512-4tvttepXG1VaYGrRibk5EwJd1t4udunSOVMdLSAL6mId1ix438oPwPZMALY41FCijukO1L0twNcGsdzS7dHgDg==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/define-data-property": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz", + "integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==", + "dependencies": { + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "gopd": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/depd": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", + "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/destroy": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz", + "integrity": "sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==", + "engines": { + "node": ">= 0.8", + "npm": "1.2.8000 || >= 1.4.16" + } + }, + "node_modules/ee-first": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", + "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==" + }, + "node_modules/encodeurl": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", + "integrity": "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/end-of-stream": { + "version": "1.4.4", + "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", + "integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==", + "dev": true, + "dependencies": { + "once": "^1.4.0" + } + }, + "node_modules/es-define-property": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.0.tgz", + "integrity": "sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ==", + "dependencies": { + "get-intrinsic": "^1.2.4" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/esbuild": { + "version": "0.20.0", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.20.0.tgz", + "integrity": "sha512-6iwE3Y2RVYCME1jLpBqq7LQWK3MW6vjV2bZy6gt/WrqkY+WE74Spyc0ThAOYpMtITvnjX09CrC6ym7A/m9mebA==", + "dev": true, + "hasInstallScript": true, + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=12" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.20.0", + "@esbuild/android-arm": "0.20.0", + "@esbuild/android-arm64": "0.20.0", + "@esbuild/android-x64": "0.20.0", + "@esbuild/darwin-arm64": "0.20.0", + "@esbuild/darwin-x64": "0.20.0", + "@esbuild/freebsd-arm64": "0.20.0", + "@esbuild/freebsd-x64": "0.20.0", + "@esbuild/linux-arm": "0.20.0", + "@esbuild/linux-arm64": "0.20.0", + "@esbuild/linux-ia32": "0.20.0", + "@esbuild/linux-loong64": "0.20.0", + "@esbuild/linux-mips64el": "0.20.0", + "@esbuild/linux-ppc64": "0.20.0", + "@esbuild/linux-riscv64": "0.20.0", + "@esbuild/linux-s390x": "0.20.0", + "@esbuild/linux-x64": "0.20.0", + "@esbuild/netbsd-x64": "0.20.0", + "@esbuild/openbsd-x64": "0.20.0", + "@esbuild/sunos-x64": "0.20.0", + "@esbuild/win32-arm64": "0.20.0", + "@esbuild/win32-ia32": "0.20.0", + "@esbuild/win32-x64": "0.20.0" + } + }, + "node_modules/esbuild-plugin-resolve": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/esbuild-plugin-resolve/-/esbuild-plugin-resolve-2.0.0.tgz", + "integrity": "sha512-eJy9B8yDW5X/J48eWtR1uVmv+DKfHvYYnrrcqQoe/nUkVHVOTZlJnSevkYyGOz6hI90t036Y5QIPDrGzmppxfg==" + }, + "node_modules/escape-html": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", + "integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==" + }, + "node_modules/escape-string-regexp": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-5.0.0.tgz", + "integrity": "sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/etag": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", + "integrity": "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/execa": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/execa/-/execa-0.7.0.tgz", + "integrity": "sha512-RztN09XglpYI7aBBrJCPW95jEH7YF1UEPOoX9yDhUTPdp7mK+CQvnLTuD10BNXZ3byLTu2uehZ8EcKT/4CGiFw==", + "dev": true, + "dependencies": { + "cross-spawn": "^5.0.1", + "get-stream": "^3.0.0", + "is-stream": "^1.1.0", + "npm-run-path": "^2.0.0", + "p-finally": "^1.0.0", + "signal-exit": "^3.0.0", + "strip-eof": "^1.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/executable": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/executable/-/executable-4.1.1.tgz", + "integrity": "sha512-8iA79xD3uAch729dUG8xaaBBFGaEa0wdD2VkYLFHwlqosEj/jT66AzcreRDSgV7ehnNLBW2WR5jIXwGKjVdTLg==", + "dev": true, + "dependencies": { + "pify": "^2.2.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/express": { + "version": "4.18.2", + "resolved": "https://registry.npmjs.org/express/-/express-4.18.2.tgz", + "integrity": "sha512-5/PsL6iGPdfQ/lKM1UuielYgv3BUoJfz1aUwU9vHZ+J7gyvwdQXFEBIEIaxeGf0GIcreATNyBExtalisDbuMqQ==", + "dependencies": { + "accepts": "~1.3.8", + "array-flatten": "1.1.1", + "body-parser": "1.20.1", + "content-disposition": "0.5.4", + "content-type": "~1.0.4", + "cookie": "0.5.0", + "cookie-signature": "1.0.6", + "debug": "2.6.9", + "depd": "2.0.0", + "encodeurl": "~1.0.2", + "escape-html": "~1.0.3", + "etag": "~1.8.1", + "finalhandler": "1.2.0", + "fresh": "0.5.2", + "http-errors": "2.0.0", + "merge-descriptors": "1.0.1", + "methods": "~1.1.2", + "on-finished": "2.4.1", + "parseurl": "~1.3.3", + "path-to-regexp": "0.1.7", + "proxy-addr": "~2.0.7", + "qs": "6.11.0", + "range-parser": "~1.2.1", + "safe-buffer": "5.2.1", + "send": "0.18.0", + "serve-static": "1.15.0", + "setprototypeof": "1.2.0", + "statuses": "2.0.1", + "type-is": "~1.6.18", + "utils-merge": "1.0.1", + "vary": "~1.1.2" + }, + "engines": { + "node": ">= 0.10.0" + } + }, + "node_modules/ext-list": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/ext-list/-/ext-list-2.2.2.tgz", + "integrity": "sha512-u+SQgsubraE6zItfVA0tBuCBhfU9ogSRnsvygI7wht9TS510oLkBRXBsqopeUG/GBOIQyKZO9wjTqIu/sf5zFA==", + "dev": true, + "dependencies": { + "mime-db": "^1.28.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ext-name": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/ext-name/-/ext-name-5.0.0.tgz", + "integrity": "sha512-yblEwXAbGv1VQDmow7s38W77hzAgJAO50ztBLMcUyUBfxv1HC+LGwtiEN+Co6LtlqT/5uwVOxsD4TNIilWhwdQ==", + "dev": true, + "dependencies": { + "ext-list": "^2.0.0", + "sort-keys-length": "^1.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/fast-glob": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.2.tgz", + "integrity": "sha512-oX2ruAFQwf/Orj8m737Y5adxDQO0LAB7/S5MnxCdTNDd4p6BsyIVsv9JQsATbTSq8KHRpLwIHbVlUNatxd+1Ow==", + "dev": true, + "dependencies": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.4" + }, + "engines": { + "node": ">=8.6.0" + } + }, + "node_modules/fastq": { + "version": "1.17.1", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.17.1.tgz", + "integrity": "sha512-sRVD3lWVIXWg6By68ZN7vho9a1pQcN/WBFaAAsDDFzlJjvoGx0P8z7V1t72grFJfJhu3YPZBuu25f7Kaw2jN1w==", + "dev": true, + "dependencies": { + "reusify": "^1.0.4" + } + }, + "node_modules/fetch-blob": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/fetch-blob/-/fetch-blob-3.2.0.tgz", + "integrity": "sha512-7yAQpD2UMJzLi1Dqv7qFYnPbaPx7ZfFK6PiIxQ4PfkGPyNyl2Ugx+a/umUonmKqjhM4DnfbMvdX6otXq83soQQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/jimmywarting" + }, + { + "type": "paypal", + "url": "https://paypal.me/jimmywarting" + } + ], + "dependencies": { + "node-domexception": "^1.0.0", + "web-streams-polyfill": "^3.0.3" + }, + "engines": { + "node": "^12.20 || >= 14.13" + } + }, + "node_modules/file-type": { + "version": "17.1.6", + "resolved": "https://registry.npmjs.org/file-type/-/file-type-17.1.6.tgz", + "integrity": "sha512-hlDw5Ev+9e883s0pwUsuuYNu4tD7GgpUnOvykjv1Gya0ZIjuKumthDRua90VUn6/nlRKAjcxLUnHNTIUWwWIiw==", + "dev": true, + "dependencies": { + "readable-web-to-node-stream": "^3.0.2", + "strtok3": "^7.0.0-alpha.9", + "token-types": "^5.0.0-alpha.2" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sindresorhus/file-type?sponsor=1" + } + }, + "node_modules/filebrowser": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/filebrowser/-/filebrowser-1.0.0.tgz", + "integrity": "sha512-RRONYpCDzbmWPhBX43T4dE+ptqLznJ7lKfbMaZLChB2i2ZIdFXoqT9qZTi70Dpq6fnJHuvcdeiRqMIPZKhVgTQ==", + "dependencies": { + "commander": "^2.9.0", + "content-disposition": "^0.5.1", + "express": "^4.14.0" + } + }, + "node_modules/filebrowser/node_modules/commander": { + "version": "2.20.3", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", + "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==" + }, + "node_modules/filename-reserved-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/filename-reserved-regex/-/filename-reserved-regex-3.0.0.tgz", + "integrity": "sha512-hn4cQfU6GOT/7cFHXBqeBg2TbrMBgdD0kcjLhvSQYYwm3s4B6cjvBfb7nBALJLAXqmU5xajSa7X2NnUud/VCdw==", + "dev": true, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/filenamify": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/filenamify/-/filenamify-5.1.1.tgz", + "integrity": "sha512-M45CbrJLGACfrPOkrTp3j2EcO9OBkKUYME0eiqOCa7i2poaklU0jhlIaMlr8ijLorT0uLAzrn3qXOp5684CkfA==", + "dev": true, + "dependencies": { + "filename-reserved-regex": "^3.0.0", + "strip-outer": "^2.0.0", + "trim-repeated": "^2.0.0" + }, + "engines": { + "node": ">=12.20" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/fill-range": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", + "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "dev": true, + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/finalhandler": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.2.0.tgz", + "integrity": "sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg==", + "dependencies": { + "debug": "2.6.9", + "encodeurl": "~1.0.2", + "escape-html": "~1.0.3", + "on-finished": "2.4.1", + "parseurl": "~1.3.3", + "statuses": "2.0.1", + "unpipe": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/find-versions": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/find-versions/-/find-versions-5.1.0.tgz", + "integrity": "sha512-+iwzCJ7C5v5KgcBuueqVoNiHVoQpwiUK5XFLjf0affFTep+Wcw93tPvmb8tqujDNmzhBDPddnWV/qgWSXgq+Hg==", + "dev": true, + "dependencies": { + "semver-regex": "^4.0.5" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/formdata-polyfill": { + "version": "4.0.10", + "resolved": "https://registry.npmjs.org/formdata-polyfill/-/formdata-polyfill-4.0.10.tgz", + "integrity": "sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g==", + "dependencies": { + "fetch-blob": "^3.1.2" + }, + "engines": { + "node": ">=12.20.0" + } + }, + "node_modules/forwarded": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz", + "integrity": "sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/fresh": { + "version": "0.5.2", + "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", + "integrity": "sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-intrinsic": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.4.tgz", + "integrity": "sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ==", + "dependencies": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2", + "has-proto": "^1.0.1", + "has-symbols": "^1.0.3", + "hasown": "^2.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-stream": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-3.0.0.tgz", + "integrity": "sha512-GlhdIUuVakc8SJ6kK0zAFbiGzRFzNnY4jUuEbV9UROo4Y+0Ny4fjvcZFVTeDA4odpFyOQzaw6hXukJSq/f28sQ==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/gopd": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.0.1.tgz", + "integrity": "sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==", + "dependencies": { + "get-intrinsic": "^1.1.3" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/got": { + "version": "11.8.6", + "resolved": "https://registry.npmjs.org/got/-/got-11.8.6.tgz", + "integrity": "sha512-6tfZ91bOr7bOXnK7PRDCGBLa1H4U080YHNaAQ2KsMGlLEzRbk44nsZF2E1IeRc3vtJHPVbKCYgdFbaGO2ljd8g==", + "dev": true, + "dependencies": { + "@sindresorhus/is": "^4.0.0", + "@szmarczak/http-timer": "^4.0.5", + "@types/cacheable-request": "^6.0.1", + "@types/responselike": "^1.0.0", + "cacheable-lookup": "^5.0.3", + "cacheable-request": "^7.0.2", + "decompress-response": "^6.0.0", + "http2-wrapper": "^1.0.0-beta.5.2", + "lowercase-keys": "^2.0.0", + "p-cancelable": "^2.0.0", + "responselike": "^2.0.0" + }, + "engines": { + "node": ">=10.19.0" + }, + "funding": { + "url": "https://github.com/sindresorhus/got?sponsor=1" + } + }, + "node_modules/has-property-descriptors": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz", + "integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==", + "dependencies": { + "es-define-property": "^1.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.0.1.tgz", + "integrity": "sha512-7qE+iP+O+bgF9clE5+UoBFzE65mlBiVj3tKCrlNQ0Ogwm0BjpT/gK4SlLYDMybDh5I3TCTKnPPa0oMG7JDYrhg==", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-symbols": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", + "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/hasown": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.1.tgz", + "integrity": "sha512-1/th4MHjnwncwXsIW6QMzlvYL9kG5e/CpVvLRZe4XPa8TOUNbCELqmvhDmnkNsAjwaG4+I8gJJL0JBvTTLO9qA==", + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/http-cache-semantics": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-4.1.1.tgz", + "integrity": "sha512-er295DKPVsV82j5kw1Gjt+ADA/XYHsajl82cGNQG2eyoPkvgUhX+nDIyelzhIWbbsXP39EHcI6l5tYs2FYqYXQ==", + "dev": true + }, + "node_modules/http-errors": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz", + "integrity": "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==", + "dependencies": { + "depd": "2.0.0", + "inherits": "2.0.4", + "setprototypeof": "1.2.0", + "statuses": "2.0.1", + "toidentifier": "1.0.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/http2-wrapper": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/http2-wrapper/-/http2-wrapper-1.0.3.tgz", + "integrity": "sha512-V+23sDMr12Wnz7iTcDeJr3O6AIxlnvT/bmaAAAP/Xda35C90p9599p0F1eHR/N1KILWSoWVAiOMFjBBXaXSMxg==", + "dev": true, + "dependencies": { + "quick-lru": "^5.1.1", + "resolve-alpn": "^1.0.0" + }, + "engines": { + "node": ">=10.19.0" + } + }, + "node_modules/human-signals": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz", + "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==", + "dev": true, + "engines": { + "node": ">=10.17.0" + } + }, + "node_modules/iconv-lite": { + "version": "0.4.24", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", + "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ieee754": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", + "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" + }, + "node_modules/ipaddr.js": { + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", + "integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==", + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true, + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/is-plain-obj": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-1.1.0.tgz", + "integrity": "sha512-yvkRyxmFKEOQ4pNXCmJG5AEQNlXJS5LaONXo5/cLdTZdWvsZ1ioJEonLGAosKlMWE8lwUy/bJzMjcw8az73+Fg==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-stream": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-1.1.0.tgz", + "integrity": "sha512-uQPm8kcs47jx38atAcWTVxyltQYoPT68y9aWYdV6yWXSyW8mzSat0TL6CiWdZeCdF3KrAvpVtnHbTv4RN+rqdQ==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "dev": true + }, + "node_modules/isomorphic-fetch": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/isomorphic-fetch/-/isomorphic-fetch-3.0.0.tgz", + "integrity": "sha512-qvUtwJ3j6qwsF3jLxkZ72qCgjMysPzDfeV240JHiGZsANBYd+EEuu35v7dfrJ9Up0Ak07D7GGSkGhCHTqg/5wA==", + "dependencies": { + "node-fetch": "^2.6.1", + "whatwg-fetch": "^3.4.1" + } + }, + "node_modules/isomorphic-fetch/node_modules/node-fetch": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz", + "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==", + "dependencies": { + "whatwg-url": "^5.0.0" + }, + "engines": { + "node": "4.x || >=6.0.0" + }, + "peerDependencies": { + "encoding": "^0.1.0" + }, + "peerDependenciesMeta": { + "encoding": { + "optional": true + } + } + }, + "node_modules/json-buffer": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", + "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==", + "dev": true + }, + "node_modules/keyv": { + "version": "4.5.4", + "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz", + "integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==", + "dev": true, + "dependencies": { + "json-buffer": "3.0.1" + } + }, + "node_modules/lowercase-keys": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-2.0.0.tgz", + "integrity": "sha512-tqNXrS78oMOE73NMxK4EMLQsQowWf8jKooH9g7xPavRT706R6bkQJ6DY2Te7QukaZsulxa30wQ7bk0pm4XiHmA==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/lru-cache": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-4.1.5.tgz", + "integrity": "sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g==", + "dev": true, + "dependencies": { + "pseudomap": "^1.0.2", + "yallist": "^2.1.2" + } + }, + "node_modules/media-typer": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", + "integrity": "sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/merge-descriptors": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz", + "integrity": "sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w==" + }, + "node_modules/merge-stream": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", + "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", + "dev": true + }, + "node_modules/merge2": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "dev": true, + "engines": { + "node": ">= 8" + } + }, + "node_modules/methods": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", + "integrity": "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/micromatch": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz", + "integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==", + "dev": true, + "dependencies": { + "braces": "^3.0.2", + "picomatch": "^2.3.1" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/mime": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", + "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==", + "bin": { + "mime": "cli.js" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mimic-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", + "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/mimic-response": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-1.0.1.tgz", + "integrity": "sha512-j5EctnkH7amfV/q5Hgmoal1g2QHFJRraOtmx0JpIqkxhBhI/lJSl1nMpQ45hVarwNETOoWEimndZ4QK0RHxuxQ==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/minimatch": { + "version": "9.0.3", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.3.tgz", + "integrity": "sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg==", + "dev": true, + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" + }, + "node_modules/negotiator": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz", + "integrity": "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/node-domexception": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/node-domexception/-/node-domexception-1.0.0.tgz", + "integrity": "sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/jimmywarting" + }, + { + "type": "github", + "url": "https://paypal.me/jimmywarting" + } + ], + "engines": { + "node": ">=10.5.0" + } + }, + "node_modules/node-fetch": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-3.3.2.tgz", + "integrity": "sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA==", + "dependencies": { + "data-uri-to-buffer": "^4.0.0", + "fetch-blob": "^3.1.4", + "formdata-polyfill": "^4.0.10" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/node-fetch" + } + }, + "node_modules/normalize-url": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/normalize-url/-/normalize-url-6.1.0.tgz", + "integrity": "sha512-DlL+XwOy3NxAQ8xuC0okPgK46iuVNAK01YN7RueYBqqFeGsBjV9XmCAzAdgt+667bCl5kPh9EqKKDwnaPG1I7A==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/npm-run-path": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-2.0.2.tgz", + "integrity": "sha512-lJxZYlT4DW/bRUtFh1MQIWqmLwQfAxnqWG4HhEdjMlkrJYnJn0Jrr2u3mgxqaWsdiBc76TYkTG/mhrnYTuzfHw==", + "dev": true, + "dependencies": { + "path-key": "^2.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/object-inspect": { + "version": "1.13.1", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.1.tgz", + "integrity": "sha512-5qoj1RUiKOMsCCNLV1CBiPYE10sziTsnmNxkAI/rZhiD63CF7IqdFGC/XzjWjpSgLf0LxXX3bDFIh0E18f6UhQ==", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/on-finished": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz", + "integrity": "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==", + "dependencies": { + "ee-first": "1.1.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "dev": true, + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/onetime": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", + "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", + "dev": true, + "dependencies": { + "mimic-fn": "^2.1.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/os-filter-obj": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/os-filter-obj/-/os-filter-obj-2.0.0.tgz", + "integrity": "sha512-uksVLsqG3pVdzzPvmAHpBK0wKxYItuzZr7SziusRPoz67tGV8rL1szZ6IdeUrbqLjGDwApBtN29eEE3IqGHOjg==", + "dev": true, + "dependencies": { + "arch": "^2.1.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/p-cancelable": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/p-cancelable/-/p-cancelable-2.1.1.tgz", + "integrity": "sha512-BZOr3nRQHOntUjTrH8+Lh54smKHoHyur8We1V8DSMVrl5A2malOOwuJRnKRDjSnkoeBh4at6BwEnb5I7Jl31wg==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/p-finally": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/p-finally/-/p-finally-1.0.0.tgz", + "integrity": "sha512-LICb2p9CB7FS+0eR1oqWnHhp0FljGLZCWBE9aix0Uye9W8LTQPwMTYVGWQWIw9RdQiDg4+epXQODwIYJtSJaow==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/parseurl": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", + "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/path-key": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", + "integrity": "sha512-fEHGKCSmUSDPv4uoj8AlD+joPlq3peND+HRYyxFz4KPw4z926S/b8rIuFs2FYJg3BwsxJf6A9/3eIdLaYC+9Dw==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/path-to-regexp": { + "version": "0.1.7", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz", + "integrity": "sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ==" + }, + "node_modules/peek-readable": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/peek-readable/-/peek-readable-5.0.0.tgz", + "integrity": "sha512-YtCKvLUOvwtMGmrniQPdO7MwPjgkFBtFIrmfSbYmYuq3tKDV/mcfAhBth1+C3ru7uXIZasc/pHnb+YDYNkkj4A==", + "dev": true, + "engines": { + "node": ">=14.16" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/Borewit" + } + }, + "node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true, + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/pify": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", + "integrity": "sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/prettier": { + "version": "3.2.5", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.2.5.tgz", + "integrity": "sha512-3/GWa9aOC0YeD7LUfvOG2NiDyhOWRvt1k+rcKhOuYnMY24iiCphgneUfJDyFXd6rZCAnuLBv6UeAULtrhT/F4A==", + "dev": true, + "bin": { + "prettier": "bin/prettier.cjs" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/prettier/prettier?sponsor=1" + } + }, + "node_modules/proxy-addr": { + "version": "2.0.7", + "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz", + "integrity": "sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==", + "dependencies": { + "forwarded": "0.2.0", + "ipaddr.js": "1.9.1" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/pseudomap": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/pseudomap/-/pseudomap-1.0.2.tgz", + "integrity": "sha512-b/YwNhb8lk1Zz2+bXXpS/LK9OisiZZ1SNsSLxN1x2OXVEhW2Ckr/7mWE5vrC1ZTiJlD9g19jWszTmJsB+oEpFQ==", + "dev": true + }, + "node_modules/pump": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz", + "integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==", + "dev": true, + "dependencies": { + "end-of-stream": "^1.1.0", + "once": "^1.3.1" + } + }, + "node_modules/qs": { + "version": "6.11.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz", + "integrity": "sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==", + "dependencies": { + "side-channel": "^1.0.4" + }, + "engines": { + "node": ">=0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/queue-microtask": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/quick-lru": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/quick-lru/-/quick-lru-5.1.1.tgz", + "integrity": "sha512-WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/range-parser": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", + "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/raw-body": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.1.tgz", + "integrity": "sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig==", + "dependencies": { + "bytes": "3.1.2", + "http-errors": "2.0.0", + "iconv-lite": "0.4.24", + "unpipe": "1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/readable-stream": { + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", + "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", + "dev": true, + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/readable-web-to-node-stream": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/readable-web-to-node-stream/-/readable-web-to-node-stream-3.0.2.tgz", + "integrity": "sha512-ePeK6cc1EcKLEhJFt/AebMCLL+GgSKhuygrZ/GLaKZYEecIgIECf4UaUuaByiGtzckwR4ain9VzUh95T1exYGw==", + "dev": true, + "dependencies": { + "readable-stream": "^3.6.0" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/Borewit" + } + }, + "node_modules/resolve-alpn": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/resolve-alpn/-/resolve-alpn-1.2.1.tgz", + "integrity": "sha512-0a1F4l73/ZFZOakJnQ3FvkJ2+gSTQWz/r2KE5OdDY0TxPm5h4GkqkWWfM47T7HsbnOtcJVEF4epCVy6u7Q3K+g==", + "dev": true + }, + "node_modules/responselike": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/responselike/-/responselike-2.0.1.tgz", + "integrity": "sha512-4gl03wn3hj1HP3yzgdI7d3lCkF95F21Pz4BPGvKHinyQzALR5CapwC8yIi0Rh58DEMQ/SguC03wFj2k0M/mHhw==", + "dev": true, + "dependencies": { + "lowercase-keys": "^2.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/reusify": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", + "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==", + "dev": true, + "engines": { + "iojs": ">=1.0.0", + "node": ">=0.10.0" + } + }, + "node_modules/run-parallel": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "dependencies": { + "queue-microtask": "^1.2.2" + } + }, + "node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/safer-buffer": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" + }, + "node_modules/semver": { + "version": "7.6.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.0.tgz", + "integrity": "sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg==", + "dev": true, + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/semver-regex": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/semver-regex/-/semver-regex-4.0.5.tgz", + "integrity": "sha512-hunMQrEy1T6Jr2uEVjrAIqjwWcQTgOAcIM52C8MY1EZSD3DDNft04XzvYKPqjED65bNVVko0YI38nYeEHCX3yw==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/semver-truncate": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/semver-truncate/-/semver-truncate-3.0.0.tgz", + "integrity": "sha512-LJWA9kSvMolR51oDE6PN3kALBNaUdkxzAGcexw8gjMA8xr5zUqK0JiR3CgARSqanYF3Z1YHvsErb1KDgh+v7Rg==", + "dev": true, + "dependencies": { + "semver": "^7.3.5" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/semver/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/semver/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + }, + "node_modules/send": { + "version": "0.18.0", + "resolved": "https://registry.npmjs.org/send/-/send-0.18.0.tgz", + "integrity": "sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg==", + "dependencies": { + "debug": "2.6.9", + "depd": "2.0.0", + "destroy": "1.2.0", + "encodeurl": "~1.0.2", + "escape-html": "~1.0.3", + "etag": "~1.8.1", + "fresh": "0.5.2", + "http-errors": "2.0.0", + "mime": "1.6.0", + "ms": "2.1.3", + "on-finished": "2.4.1", + "range-parser": "~1.2.1", + "statuses": "2.0.1" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/send/node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" + }, + "node_modules/serve-static": { + "version": "1.15.0", + "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.15.0.tgz", + "integrity": "sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g==", + "dependencies": { + "encodeurl": "~1.0.2", + "escape-html": "~1.0.3", + "parseurl": "~1.3.3", + "send": "0.18.0" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/set-function-length": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.1.tgz", + "integrity": "sha512-j4t6ccc+VsKwYHso+kElc5neZpjtq9EnRICFZtWyBsLojhmeF/ZBd/elqm22WJh/BziDe/SBiOeAt0m2mfLD0g==", + "dependencies": { + "define-data-property": "^1.1.2", + "es-errors": "^1.3.0", + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.3", + "gopd": "^1.0.1", + "has-property-descriptors": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/setprototypeof": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", + "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==" + }, + "node_modules/shebang-command": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", + "integrity": "sha512-EV3L1+UQWGor21OmnvojK36mhg+TyIKDh3iFBKBohr5xeXIhNBcx8oWdgkTEEQ+BEFFYdLRuqMfd5L84N1V5Vg==", + "dev": true, + "dependencies": { + "shebang-regex": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/shebang-regex": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz", + "integrity": "sha512-wpoSFAxys6b2a2wHZ1XpDSgD7N9iVjg29Ph9uV/uaP9Ex/KXlkTZTeddxDPSYQpgvzKLGJke2UU0AzoGCjNIvQ==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/side-channel": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.5.tgz", + "integrity": "sha512-QcgiIWV4WV7qWExbN5llt6frQB/lBven9pqliLXfGPB+K9ZYXxDozp0wLkHS24kWCm+6YXH/f0HhnObZnZOBnQ==", + "dependencies": { + "call-bind": "^1.0.6", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.4", + "object-inspect": "^1.13.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", + "dev": true + }, + "node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/sort-keys": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/sort-keys/-/sort-keys-1.1.2.tgz", + "integrity": "sha512-vzn8aSqKgytVik0iwdBEi+zevbTYZogewTUM6dtpmGwEcdzbub/TX4bCzRhebDCRC3QzXgJsLRKB2V/Oof7HXg==", + "dev": true, + "dependencies": { + "is-plain-obj": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/sort-keys-length": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/sort-keys-length/-/sort-keys-length-1.0.1.tgz", + "integrity": "sha512-GRbEOUqCxemTAk/b32F2xa8wDTs+Z1QHOkbhJDQTvv/6G3ZkbJ+frYWsTcc7cBB3Fu4wy4XlLCuNtJuMn7Gsvw==", + "dev": true, + "dependencies": { + "sort-keys": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/source-map": { + "version": "0.7.4", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.4.tgz", + "integrity": "sha512-l3BikUxvPOcn5E74dZiq5BGsTb5yEwhaTSzccU6t4sDOH8NWJCstKO5QT2CvtFoK6F0saL7p9xHAqHOlCPJygA==", + "dev": true, + "engines": { + "node": ">= 8" + } + }, + "node_modules/statuses": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", + "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/string_decoder": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", + "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", + "dev": true, + "dependencies": { + "safe-buffer": "~5.2.0" + } + }, + "node_modules/strip-eof": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/strip-eof/-/strip-eof-1.0.0.tgz", + "integrity": "sha512-7FCwGGmx8mD5xQd3RPUvnSpUXHM3BWuzjtpD4TXsfcZ9EL4azvVVUscFYwD9nx8Kh+uCBC00XBtAykoMHwTh8Q==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/strip-final-newline": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", + "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/strip-outer": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/strip-outer/-/strip-outer-2.0.0.tgz", + "integrity": "sha512-A21Xsm1XzUkK0qK1ZrytDUvqsQWict2Cykhvi0fBQntGG5JSprESasEyV1EZ/4CiR5WB5KjzLTrP/bO37B0wPg==", + "dev": true, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/strtok3": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/strtok3/-/strtok3-7.0.0.tgz", + "integrity": "sha512-pQ+V+nYQdC5H3Q7qBZAz/MO6lwGhoC2gOAjuouGf/VO0m7vQRh8QNMl2Uf6SwAtzZ9bOw3UIeBukEGNJl5dtXQ==", + "dev": true, + "dependencies": { + "@tokenizer/token": "^0.3.0", + "peek-readable": "^5.0.0" + }, + "engines": { + "node": ">=14.16" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/Borewit" + } + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/toidentifier": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz", + "integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==", + "engines": { + "node": ">=0.6" + } + }, + "node_modules/token-types": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/token-types/-/token-types-5.0.1.tgz", + "integrity": "sha512-Y2fmSnZjQdDb9W4w4r1tswlMHylzWIeOKpx0aZH9BgGtACHhrk3OkT52AzwcuqTRBZtvvnTjDBh8eynMulu8Vg==", + "dev": true, + "dependencies": { + "@tokenizer/token": "^0.3.0", + "ieee754": "^1.2.1" + }, + "engines": { + "node": ">=14.16" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/Borewit" + } + }, + "node_modules/tr46": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", + "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==" + }, + "node_modules/trim-repeated": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/trim-repeated/-/trim-repeated-2.0.0.tgz", + "integrity": "sha512-QUHBFTJGdOwmp0tbOG505xAgOp/YliZP/6UgafFXYZ26WT1bvQmSMJUvkeVSASuJJHbqsFbynTvkd5W8RBTipg==", + "dev": true, + "dependencies": { + "escape-string-regexp": "^5.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/ts-matches": { + "version": "5.4.1", + "resolved": "https://registry.npmjs.org/ts-matches/-/ts-matches-5.4.1.tgz", + "integrity": "sha512-kXrY75F0s0WD15N2bWKDScKlKgwnusN6dTRzGs1N7LlxQRnazrsBISC1HL4sy2adsyk65Zbx3Ui3IGN8leAFOQ==" + }, + "node_modules/tslib": { + "version": "2.6.2", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz", + "integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==" + }, + "node_modules/type-is": { + "version": "1.6.18", + "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz", + "integrity": "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==", + "dependencies": { + "media-typer": "0.3.0", + "mime-types": "~2.1.24" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/typescript": { + "version": "5.3.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.3.3.tgz", + "integrity": "sha512-pXWcraxM0uxAS+tN0AG/BF2TyqmHO014Z070UsJ+pFvYuRSq8KH8DmWpnbXe0pEPDHXZV3FcAbJkijJ5oNEnWw==", + "dev": true, + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/undici-types": { + "version": "5.26.5", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", + "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==", + "dev": true + }, + "node_modules/unpipe": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", + "integrity": "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", + "dev": true + }, + "node_modules/utils-merge": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", + "integrity": "sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==", + "engines": { + "node": ">= 0.4.0" + } + }, + "node_modules/vary": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", + "integrity": "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/web-streams-polyfill": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-3.3.2.tgz", + "integrity": "sha512-3pRGuxRF5gpuZc0W+EpwQRmCD7gRqcDOMt688KmdlDAgAyaB1XlN0zq2njfDNm44XVdIouE7pZ6GzbdyH47uIQ==", + "engines": { + "node": ">= 8" + } + }, + "node_modules/webidl-conversions": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", + "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==" + }, + "node_modules/whatwg-fetch": { + "version": "3.6.20", + "resolved": "https://registry.npmjs.org/whatwg-fetch/-/whatwg-fetch-3.6.20.tgz", + "integrity": "sha512-EqhiFU6daOA8kpjOWTL0olhVOF3i7OrFzSYiGsEMB8GcXS+RrzauAERX65xMeNWVqxA6HXH2m69Z9LaKKdisfg==" + }, + "node_modules/whatwg-url": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", + "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", + "dependencies": { + "tr46": "~0.0.3", + "webidl-conversions": "^3.0.0" + } + }, + "node_modules/which": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", + "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", + "dev": true, + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "which": "bin/which" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "dev": true + }, + "node_modules/yallist": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-2.1.2.tgz", + "integrity": "sha512-ncTzHV7NvsQZkYe1DW7cbDLm0YpzHmZF5r/iyP3ZnQtMiJ+pjzisCiMNI+Sj+xQF5pXhSHxSB3uDbsBTzY/c2A==", + "dev": true + }, + "node_modules/yaml": { + "version": "2.3.4", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.3.4.tgz", + "integrity": "sha512-8aAvwVUSHpfEqTQ4w/KMlf3HcRdt50E5ODIQJBw1fQ5RL34xabzxtUlzTXVqc4rkZsPbvrXKWnABCD7kWSmocA==", + "engines": { + "node": ">= 14" + } + } + }, + "dependencies": { + "@esbuild/aix-ppc64": { + "version": "0.20.0", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.20.0.tgz", + "integrity": "sha512-fGFDEctNh0CcSwsiRPxiaqX0P5rq+AqE0SRhYGZ4PX46Lg1FNR6oCxJghf8YgY0WQEgQuh3lErUFE4KxLeRmmw==", + "dev": true, + "optional": true + }, + "@esbuild/android-arm": { + "version": "0.20.0", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.20.0.tgz", + "integrity": "sha512-3bMAfInvByLHfJwYPJRlpTeaQA75n8C/QKpEaiS4HrFWFiJlNI0vzq/zCjBrhAYcPyVPG7Eo9dMrcQXuqmNk5g==", + "dev": true, + "optional": true + }, + "@esbuild/android-arm64": { + "version": "0.20.0", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.20.0.tgz", + "integrity": "sha512-aVpnM4lURNkp0D3qPoAzSG92VXStYmoVPOgXveAUoQBWRSuQzt51yvSju29J6AHPmwY1BjH49uR29oyfH1ra8Q==", + "dev": true, + "optional": true + }, + "@esbuild/android-x64": { + "version": "0.20.0", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.20.0.tgz", + "integrity": "sha512-uK7wAnlRvjkCPzh8jJ+QejFyrP8ObKuR5cBIsQZ+qbMunwR8sbd8krmMbxTLSrDhiPZaJYKQAU5Y3iMDcZPhyQ==", + "dev": true, + "optional": true + }, + "@esbuild/darwin-arm64": { + "version": "0.20.0", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.20.0.tgz", + "integrity": "sha512-AjEcivGAlPs3UAcJedMa9qYg9eSfU6FnGHJjT8s346HSKkrcWlYezGE8VaO2xKfvvlZkgAhyvl06OJOxiMgOYQ==", + "dev": true, + "optional": true + }, + "@esbuild/darwin-x64": { + "version": "0.20.0", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.20.0.tgz", + "integrity": "sha512-bsgTPoyYDnPv8ER0HqnJggXK6RyFy4PH4rtsId0V7Efa90u2+EifxytE9pZnsDgExgkARy24WUQGv9irVbTvIw==", + "dev": true, + "optional": true + }, + "@esbuild/freebsd-arm64": { + "version": "0.20.0", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.20.0.tgz", + "integrity": "sha512-kQ7jYdlKS335mpGbMW5tEe3IrQFIok9r84EM3PXB8qBFJPSc6dpWfrtsC/y1pyrz82xfUIn5ZrnSHQQsd6jebQ==", + "dev": true, + "optional": true + }, + "@esbuild/freebsd-x64": { + "version": "0.20.0", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.20.0.tgz", + "integrity": "sha512-uG8B0WSepMRsBNVXAQcHf9+Ko/Tr+XqmK7Ptel9HVmnykupXdS4J7ovSQUIi0tQGIndhbqWLaIL/qO/cWhXKyQ==", + "dev": true, + "optional": true + }, + "@esbuild/linux-arm": { + "version": "0.20.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.20.0.tgz", + "integrity": "sha512-2ezuhdiZw8vuHf1HKSf4TIk80naTbP9At7sOqZmdVwvvMyuoDiZB49YZKLsLOfKIr77+I40dWpHVeY5JHpIEIg==", + "dev": true, + "optional": true + }, + "@esbuild/linux-arm64": { + "version": "0.20.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.20.0.tgz", + "integrity": "sha512-uTtyYAP5veqi2z9b6Gr0NUoNv9F/rOzI8tOD5jKcCvRUn7T60Bb+42NDBCWNhMjkQzI0qqwXkQGo1SY41G52nw==", + "dev": true, + "optional": true + }, + "@esbuild/linux-ia32": { + "version": "0.20.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.20.0.tgz", + "integrity": "sha512-c88wwtfs8tTffPaoJ+SQn3y+lKtgTzyjkD8NgsyCtCmtoIC8RDL7PrJU05an/e9VuAke6eJqGkoMhJK1RY6z4w==", + "dev": true, + "optional": true + }, + "@esbuild/linux-loong64": { + "version": "0.20.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.20.0.tgz", + "integrity": "sha512-lR2rr/128/6svngnVta6JN4gxSXle/yZEZL3o4XZ6esOqhyR4wsKyfu6qXAL04S4S5CgGfG+GYZnjFd4YiG3Aw==", + "dev": true, + "optional": true + }, + "@esbuild/linux-mips64el": { + "version": "0.20.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.20.0.tgz", + "integrity": "sha512-9Sycc+1uUsDnJCelDf6ZNqgZQoK1mJvFtqf2MUz4ujTxGhvCWw+4chYfDLPepMEvVL9PDwn6HrXad5yOrNzIsQ==", + "dev": true, + "optional": true + }, + "@esbuild/linux-ppc64": { + "version": "0.20.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.20.0.tgz", + "integrity": "sha512-CoWSaaAXOZd+CjbUTdXIJE/t7Oz+4g90A3VBCHLbfuc5yUQU/nFDLOzQsN0cdxgXd97lYW/psIIBdjzQIwTBGw==", + "dev": true, + "optional": true + }, + "@esbuild/linux-riscv64": { + "version": "0.20.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.20.0.tgz", + "integrity": "sha512-mlb1hg/eYRJUpv8h/x+4ShgoNLL8wgZ64SUr26KwglTYnwAWjkhR2GpoKftDbPOCnodA9t4Y/b68H4J9XmmPzA==", + "dev": true, + "optional": true + }, + "@esbuild/linux-s390x": { + "version": "0.20.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.20.0.tgz", + "integrity": "sha512-fgf9ubb53xSnOBqyvWEY6ukBNRl1mVX1srPNu06B6mNsNK20JfH6xV6jECzrQ69/VMiTLvHMicQR/PgTOgqJUQ==", + "dev": true, + "optional": true + }, + "@esbuild/linux-x64": { + "version": "0.20.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.20.0.tgz", + "integrity": "sha512-H9Eu6MGse++204XZcYsse1yFHmRXEWgadk2N58O/xd50P9EvFMLJTQLg+lB4E1cF2xhLZU5luSWtGTb0l9UeSg==", + "dev": true, + "optional": true + }, + "@esbuild/netbsd-x64": { + "version": "0.20.0", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.20.0.tgz", + "integrity": "sha512-lCT675rTN1v8Fo+RGrE5KjSnfY0x9Og4RN7t7lVrN3vMSjy34/+3na0q7RIfWDAj0e0rCh0OL+P88lu3Rt21MQ==", + "dev": true, + "optional": true + }, + "@esbuild/openbsd-x64": { + "version": "0.20.0", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.20.0.tgz", + "integrity": "sha512-HKoUGXz/TOVXKQ+67NhxyHv+aDSZf44QpWLa3I1lLvAwGq8x1k0T+e2HHSRvxWhfJrFxaaqre1+YyzQ99KixoA==", + "dev": true, + "optional": true + }, + "@esbuild/sunos-x64": { + "version": "0.20.0", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.20.0.tgz", + "integrity": "sha512-GDwAqgHQm1mVoPppGsoq4WJwT3vhnz/2N62CzhvApFD1eJyTroob30FPpOZabN+FgCjhG+AgcZyOPIkR8dfD7g==", + "dev": true, + "optional": true + }, + "@esbuild/win32-arm64": { + "version": "0.20.0", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.20.0.tgz", + "integrity": "sha512-0vYsP8aC4TvMlOQYozoksiaxjlvUcQrac+muDqj1Fxy6jh9l9CZJzj7zmh8JGfiV49cYLTorFLxg7593pGldwQ==", + "dev": true, + "optional": true + }, + "@esbuild/win32-ia32": { + "version": "0.20.0", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.20.0.tgz", + "integrity": "sha512-p98u4rIgfh4gdpV00IqknBD5pC84LCub+4a3MO+zjqvU5MVXOc3hqR2UgT2jI2nh3h8s9EQxmOsVI3tyzv1iFg==", + "dev": true, + "optional": true + }, + "@esbuild/win32-x64": { + "version": "0.20.0", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.20.0.tgz", + "integrity": "sha512-NgJnesu1RtWihtTtXGFMU5YSE6JyyHPMxCwBZK7a6/8d31GuSo9l0Ss7w1Jw5QnKUawG6UEehs883kcXf5fYwg==", + "dev": true, + "optional": true + }, + "@iarna/toml": { + "version": "2.2.5", + "resolved": "https://registry.npmjs.org/@iarna/toml/-/toml-2.2.5.tgz", + "integrity": "sha512-trnsAYxU3xnS1gPHPyU961coFyLkh4gAD/0zQ5mymY4yOZ+CYvsPqUbOFSw0aDM4y0tV7tiFxL/1XfXPNC6IPg==" + }, + "@mole-inc/bin-wrapper": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/@mole-inc/bin-wrapper/-/bin-wrapper-8.0.1.tgz", + "integrity": "sha512-sTGoeZnjI8N4KS+sW2AN95gDBErhAguvkw/tWdCjeM8bvxpz5lqrnd0vOJABA1A+Ic3zED7PYoLP/RANLgVotA==", + "dev": true, + "requires": { + "bin-check": "^4.1.0", + "bin-version-check": "^5.0.0", + "content-disposition": "^0.5.4", + "ext-name": "^5.0.0", + "file-type": "^17.1.6", + "filenamify": "^5.0.2", + "got": "^11.8.5", + "os-filter-obj": "^2.0.0" + } + }, + "@nodelib/fs.scandir": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "dev": true, + "requires": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + } + }, + "@nodelib/fs.stat": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", + "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", + "dev": true + }, + "@nodelib/fs.walk": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", + "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", + "dev": true, + "requires": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + } + }, + "@sindresorhus/is": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-4.6.0.tgz", + "integrity": "sha512-t09vSN3MdfsyCHoFcTRCH/iUtG7OJ0CsjzB8cjAmKc/va/kIgeDI/TxsigdncE/4be734m0cvIYwNaV4i2XqAw==", + "dev": true + }, + "@start9labs/start-sdk": { + "version": "file:../sdk/dist", + "requires": { + "@iarna/toml": "^2.2.5", + "@types/jest": "^29.4.0", + "isomorphic-fetch": "^3.0.0", + "jest": "^29.4.3", + "ts-jest": "^29.0.5", + "ts-matches": "^5.4.1", + "ts-node": "^10.9.1", + "tsx": "^4.7.1", + "typescript": "^5.0.4", + "yaml": "^2.2.2" + } + }, + "@swc/cli": { + "version": "0.1.65", + "resolved": "https://registry.npmjs.org/@swc/cli/-/cli-0.1.65.tgz", + "integrity": "sha512-4NcgsvJVHhA7trDnMmkGLLvWMHu2kSy+qHx6QwRhhJhdiYdNUrhdp+ERxen73sYtaeEOYeLJcWrQ60nzKi6rpg==", + "dev": true, + "requires": { + "@mole-inc/bin-wrapper": "^8.0.1", + "commander": "^7.1.0", + "fast-glob": "^3.2.5", + "minimatch": "^9.0.3", + "semver": "^7.3.8", + "slash": "3.0.0", + "source-map": "^0.7.3" + } + }, + "@swc/core": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/@swc/core/-/core-1.4.1.tgz", + "integrity": "sha512-3y+Y8js+e7BbM16iND+6Rcs3jdiL28q3iVtYsCviYSSpP2uUVKkp5sJnCY4pg8AaVvyN7CGQHO7gLEZQ5ByozQ==", + "dev": true, + "requires": { + "@swc/core-darwin-arm64": "1.4.1", + "@swc/core-darwin-x64": "1.4.1", + "@swc/core-linux-arm-gnueabihf": "1.4.1", + "@swc/core-linux-arm64-gnu": "1.4.1", + "@swc/core-linux-arm64-musl": "1.4.1", + "@swc/core-linux-x64-gnu": "1.4.1", + "@swc/core-linux-x64-musl": "1.4.1", + "@swc/core-win32-arm64-msvc": "1.4.1", + "@swc/core-win32-ia32-msvc": "1.4.1", + "@swc/core-win32-x64-msvc": "1.4.1", + "@swc/counter": "^0.1.2", + "@swc/types": "^0.1.5" + } + }, + "@swc/core-darwin-arm64": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/@swc/core-darwin-arm64/-/core-darwin-arm64-1.4.1.tgz", + "integrity": "sha512-ePyfx0348UbR4DOAW24TedeJbafnzha8liXFGuQ4bdXtEVXhLfPngprrxKrAddCuv42F9aTxydlF6+adD3FBhA==", + "dev": true, + "optional": true + }, + "@swc/core-darwin-x64": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/@swc/core-darwin-x64/-/core-darwin-x64-1.4.1.tgz", + "integrity": "sha512-eLf4JSe6VkCMdDowjM8XNC5rO+BrgfbluEzAVtKR8L2HacNYukieumN7EzpYCi0uF1BYwu1ku6tLyG2r0VcGxA==", + "dev": true, + "optional": true + }, + "@swc/core-linux-arm-gnueabihf": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/@swc/core-linux-arm-gnueabihf/-/core-linux-arm-gnueabihf-1.4.1.tgz", + "integrity": "sha512-K8VtTLWMw+rkN/jDC9o/Q9SMmzdiHwYo2CfgkwVT29NsGccwmNhCQx6XoYiPKyKGIFKt4tdQnJHKUFzxUqQVtQ==", + "dev": true, + "optional": true + }, + "@swc/core-linux-arm64-gnu": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-gnu/-/core-linux-arm64-gnu-1.4.1.tgz", + "integrity": "sha512-0e8p4g0Bfkt8lkiWgcdiENH3RzkcqKtpRXIVNGOmVc0OBkvc2tpm2WTx/eoCnes2HpTT4CTtR3Zljj4knQ4Fvw==", + "dev": true, + "optional": true + }, + "@swc/core-linux-arm64-musl": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-musl/-/core-linux-arm64-musl-1.4.1.tgz", + "integrity": "sha512-b/vWGQo2n7lZVUnSQ7NBq3Qrj85GrAPPiRbpqaIGwOytiFSk8VULFihbEUwDe0rXgY4LDm8z8wkgADZcLnmdUA==", + "dev": true, + "optional": true + }, + "@swc/core-linux-x64-gnu": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/@swc/core-linux-x64-gnu/-/core-linux-x64-gnu-1.4.1.tgz", + "integrity": "sha512-AFMQlvkKEdNi1Vk2GFTxxJzbICttBsOQaXa98kFTeWTnFFIyiIj2w7Sk8XRTEJ/AjF8ia8JPKb1zddBWr9+bEQ==", + "dev": true, + "optional": true + }, + "@swc/core-linux-x64-musl": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/@swc/core-linux-x64-musl/-/core-linux-x64-musl-1.4.1.tgz", + "integrity": "sha512-QX2MxIECX1gfvUVZY+jk528/oFkS9MAl76e3ZRvG2KC/aKlCQL0KSzcTSm13mOxkDKS30EaGRDRQWNukGpMeRg==", + "dev": true, + "optional": true + }, + "@swc/core-win32-arm64-msvc": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/@swc/core-win32-arm64-msvc/-/core-win32-arm64-msvc-1.4.1.tgz", + "integrity": "sha512-OklkJYXXI/tntD2zaY8i3iZldpyDw5q+NAP3k9OlQ7wXXf37djRsHLV0NW4+ZNHBjE9xp2RsXJ0jlOJhfgGoFA==", + "dev": true, + "optional": true + }, + "@swc/core-win32-ia32-msvc": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/@swc/core-win32-ia32-msvc/-/core-win32-ia32-msvc-1.4.1.tgz", + "integrity": "sha512-MBuc3/QfKX9FnLOU7iGN+6yHRTQaPQ9WskiC8s8JFiKQ+7I2p25tay2RplR9dIEEGgVAu6L7auv96LbNTh+FaA==", + "dev": true, + "optional": true + }, + "@swc/core-win32-x64-msvc": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/@swc/core-win32-x64-msvc/-/core-win32-x64-msvc-1.4.1.tgz", + "integrity": "sha512-lu4h4wFBb/bOK6N2MuZwg7TrEpwYXgpQf5R7ObNSXL65BwZ9BG8XRzD+dLJmALu8l5N08rP/TrpoKRoGT4WSxw==", + "dev": true, + "optional": true + }, + "@swc/counter": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/@swc/counter/-/counter-0.1.3.tgz", + "integrity": "sha512-e2BR4lsJkkRlKZ/qCHPw9ZaSxc0MVUd7gtbtaB7aMvHeJVYe8sOB8DBZkP2DtISHGSku9sCK6T6cnY0CtXrOCQ==", + "dev": true + }, + "@swc/types": { + "version": "0.1.5", + "resolved": "https://registry.npmjs.org/@swc/types/-/types-0.1.5.tgz", + "integrity": "sha512-myfUej5naTBWnqOCc/MdVOLVjXUXtIA+NpDrDBKJtLLg2shUjBu3cZmB/85RyitKc55+lUUyl7oRfLOvkr2hsw==", + "dev": true + }, + "@szmarczak/http-timer": { + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/@szmarczak/http-timer/-/http-timer-4.0.6.tgz", + "integrity": "sha512-4BAffykYOgO+5nzBWYwE3W90sBgLJoUPRWWcL8wlyiM8IB8ipJz3UMJ9KXQd1RKQXpKp8Tutn80HZtWsu2u76w==", + "dev": true, + "requires": { + "defer-to-connect": "^2.0.0" + } + }, + "@tokenizer/token": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/@tokenizer/token/-/token-0.3.0.tgz", + "integrity": "sha512-OvjF+z51L3ov0OyAU0duzsYuvO01PH7x4t6DJx+guahgTnBHkhJdG7soQeTSFLWN3efnHyibZ4Z8l2EuWwJN3A==", + "dev": true + }, + "@types/cacheable-request": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/@types/cacheable-request/-/cacheable-request-6.0.3.tgz", + "integrity": "sha512-IQ3EbTzGxIigb1I3qPZc1rWJnH0BmSKv5QYTalEwweFvyBDLSAe24zP0le/hyi7ecGfZVlIVAg4BZqb8WBwKqw==", + "dev": true, + "requires": { + "@types/http-cache-semantics": "*", + "@types/keyv": "^3.1.4", + "@types/node": "*", + "@types/responselike": "^1.0.0" + } + }, + "@types/http-cache-semantics": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/@types/http-cache-semantics/-/http-cache-semantics-4.0.4.tgz", + "integrity": "sha512-1m0bIFVc7eJWyve9S0RnuRgcQqF/Xd5QsUZAZeQFr1Q3/p9JWoQQEqmVy+DPTNpGXwhgIetAoYF8JSc33q29QA==", + "dev": true + }, + "@types/keyv": { + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/@types/keyv/-/keyv-3.1.4.tgz", + "integrity": "sha512-BQ5aZNSCpj7D6K2ksrRCTmKRLEpnPvWDiLPfoGyhZ++8YtiK9d/3DBKPJgry359X/P1PfruyYwvnvwFjuEiEIg==", + "dev": true, + "requires": { + "@types/node": "*" + } + }, + "@types/node": { + "version": "20.11.17", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.11.17.tgz", + "integrity": "sha512-QmgQZGWu1Yw9TDyAP9ZzpFJKynYNeOvwMJmaxABfieQoVoiVOS6MN1WSpqpRcbeA5+RW82kraAVxCCJg+780Qw==", + "dev": true, + "requires": { + "undici-types": "~5.26.4" + } + }, + "@types/responselike": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@types/responselike/-/responselike-1.0.3.tgz", + "integrity": "sha512-H/+L+UkTV33uf49PH5pCAUBVPNj2nDBXTN+qS1dOwyyg24l3CcicicCA7ca+HMvJBZcFgl5r8e+RR6elsb4Lyw==", + "dev": true, + "requires": { + "@types/node": "*" + } + }, + "accepts": { + "version": "1.3.8", + "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz", + "integrity": "sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==", + "requires": { + "mime-types": "~2.1.34", + "negotiator": "0.6.3" + } + }, + "arch": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/arch/-/arch-2.2.0.tgz", + "integrity": "sha512-Of/R0wqp83cgHozfIYLbBMnej79U/SVGOOyuB3VVFv1NRM/PSFMK12x9KVtiYzJqmnU5WR2qp0Z5rHb7sWGnFQ==", + "dev": true + }, + "array-flatten": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", + "integrity": "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==" + }, + "balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true + }, + "bin-check": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/bin-check/-/bin-check-4.1.0.tgz", + "integrity": "sha512-b6weQyEUKsDGFlACWSIOfveEnImkJyK/FGW6FAG42loyoquvjdtOIqO6yBFzHyqyVVhNgNkQxxx09SFLK28YnA==", + "dev": true, + "requires": { + "execa": "^0.7.0", + "executable": "^4.1.0" + } + }, + "bin-version": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/bin-version/-/bin-version-6.0.0.tgz", + "integrity": "sha512-nk5wEsP4RiKjG+vF+uG8lFsEn4d7Y6FVDamzzftSunXOoOcOOkzcWdKVlGgFFwlUQCj63SgnUkLLGF8v7lufhw==", + "dev": true, + "requires": { + "execa": "^5.0.0", + "find-versions": "^5.0.0" + }, + "dependencies": { + "cross-spawn": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", + "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", + "dev": true, + "requires": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + } + }, + "execa": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", + "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==", + "dev": true, + "requires": { + "cross-spawn": "^7.0.3", + "get-stream": "^6.0.0", + "human-signals": "^2.1.0", + "is-stream": "^2.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^4.0.1", + "onetime": "^5.1.2", + "signal-exit": "^3.0.3", + "strip-final-newline": "^2.0.0" + } + }, + "get-stream": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", + "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", + "dev": true + }, + "is-stream": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", + "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", + "dev": true + }, + "npm-run-path": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", + "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", + "dev": true, + "requires": { + "path-key": "^3.0.0" + } + }, + "path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true + }, + "shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, + "requires": { + "shebang-regex": "^3.0.0" + } + }, + "shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true + }, + "which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "requires": { + "isexe": "^2.0.0" + } + } + } + }, + "bin-version-check": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/bin-version-check/-/bin-version-check-5.1.0.tgz", + "integrity": "sha512-bYsvMqJ8yNGILLz1KP9zKLzQ6YpljV3ln1gqhuLkUtyfGi3qXKGuK2p+U4NAvjVFzDFiBBtOpCOSFNuYYEGZ5g==", + "dev": true, + "requires": { + "bin-version": "^6.0.0", + "semver": "^7.5.3", + "semver-truncate": "^3.0.0" + } + }, + "body-parser": { + "version": "1.20.1", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.1.tgz", + "integrity": "sha512-jWi7abTbYwajOytWCQc37VulmWiRae5RyTpaCyDcS5/lMdtwSz5lOpDE67srw/HYe35f1z3fDQw+3txg7gNtWw==", + "requires": { + "bytes": "3.1.2", + "content-type": "~1.0.4", + "debug": "2.6.9", + "depd": "2.0.0", + "destroy": "1.2.0", + "http-errors": "2.0.0", + "iconv-lite": "0.4.24", + "on-finished": "2.4.1", + "qs": "6.11.0", + "raw-body": "2.5.1", + "type-is": "~1.6.18", + "unpipe": "1.0.0" + } + }, + "brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dev": true, + "requires": { + "balanced-match": "^1.0.0" + } + }, + "braces": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", + "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "dev": true, + "requires": { + "fill-range": "^7.0.1" + } + }, + "bytes": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", + "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==" + }, + "cacheable-lookup": { + "version": "5.0.4", + "resolved": "https://registry.npmjs.org/cacheable-lookup/-/cacheable-lookup-5.0.4.tgz", + "integrity": "sha512-2/kNscPhpcxrOigMZzbiWF7dz8ilhb/nIHU3EyZiXWXpeq/au8qJ8VhdftMkty3n7Gj6HIGalQG8oiBNB3AJgA==", + "dev": true + }, + "cacheable-request": { + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/cacheable-request/-/cacheable-request-7.0.4.tgz", + "integrity": "sha512-v+p6ongsrp0yTGbJXjgxPow2+DL93DASP4kXCDKb8/bwRtt9OEF3whggkkDkGNzgcWy2XaF4a8nZglC7uElscg==", + "dev": true, + "requires": { + "clone-response": "^1.0.2", + "get-stream": "^5.1.0", + "http-cache-semantics": "^4.0.0", + "keyv": "^4.0.0", + "lowercase-keys": "^2.0.0", + "normalize-url": "^6.0.1", + "responselike": "^2.0.0" + }, + "dependencies": { + "get-stream": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-5.2.0.tgz", + "integrity": "sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA==", + "dev": true, + "requires": { + "pump": "^3.0.0" + } + } + } + }, + "call-bind": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.7.tgz", + "integrity": "sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w==", + "requires": { + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.4", + "set-function-length": "^1.2.1" + } + }, + "clone-response": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/clone-response/-/clone-response-1.0.3.tgz", + "integrity": "sha512-ROoL94jJH2dUVML2Y/5PEDNaSHgeOdSDicUyS7izcF63G6sTc/FTjLub4b8Il9S8S0beOfYt0TaA5qvFK+w0wA==", + "dev": true, + "requires": { + "mimic-response": "^1.0.0" + } + }, + "commander": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-7.2.0.tgz", + "integrity": "sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==", + "dev": true + }, + "content-disposition": { + "version": "0.5.4", + "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz", + "integrity": "sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==", + "requires": { + "safe-buffer": "5.2.1" + } + }, + "content-type": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz", + "integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==" + }, + "cookie": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.5.0.tgz", + "integrity": "sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw==" + }, + "cookie-signature": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", + "integrity": "sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ==" + }, + "cross-spawn": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-5.1.0.tgz", + "integrity": "sha512-pTgQJ5KC0d2hcY8eyL1IzlBPYjTkyH72XRZPnLyKus2mBfNjQs3klqbJU2VILqZryAZUt9JOb3h/mWMy23/f5A==", + "dev": true, + "requires": { + "lru-cache": "^4.0.1", + "shebang-command": "^1.2.0", + "which": "^1.2.9" + } + }, + "data-uri-to-buffer": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/data-uri-to-buffer/-/data-uri-to-buffer-4.0.1.tgz", + "integrity": "sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A==" + }, + "debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "requires": { + "ms": "2.0.0" + } + }, + "decompress-response": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-6.0.0.tgz", + "integrity": "sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==", + "dev": true, + "requires": { + "mimic-response": "^3.1.0" + }, + "dependencies": { + "mimic-response": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-3.1.0.tgz", + "integrity": "sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==", + "dev": true + } + } + }, + "defer-to-connect": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/defer-to-connect/-/defer-to-connect-2.0.1.tgz", + "integrity": "sha512-4tvttepXG1VaYGrRibk5EwJd1t4udunSOVMdLSAL6mId1ix438oPwPZMALY41FCijukO1L0twNcGsdzS7dHgDg==", + "dev": true + }, + "define-data-property": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz", + "integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==", + "requires": { + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "gopd": "^1.0.1" + } + }, + "depd": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", + "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==" + }, + "destroy": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz", + "integrity": "sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==" + }, + "ee-first": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", + "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==" + }, + "encodeurl": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", + "integrity": "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==" + }, + "end-of-stream": { + "version": "1.4.4", + "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", + "integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==", + "dev": true, + "requires": { + "once": "^1.4.0" + } + }, + "es-define-property": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.0.tgz", + "integrity": "sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ==", + "requires": { + "get-intrinsic": "^1.2.4" + } + }, + "es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==" + }, + "esbuild": { + "version": "0.20.0", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.20.0.tgz", + "integrity": "sha512-6iwE3Y2RVYCME1jLpBqq7LQWK3MW6vjV2bZy6gt/WrqkY+WE74Spyc0ThAOYpMtITvnjX09CrC6ym7A/m9mebA==", + "dev": true, + "requires": { + "@esbuild/aix-ppc64": "0.20.0", + "@esbuild/android-arm": "0.20.0", + "@esbuild/android-arm64": "0.20.0", + "@esbuild/android-x64": "0.20.0", + "@esbuild/darwin-arm64": "0.20.0", + "@esbuild/darwin-x64": "0.20.0", + "@esbuild/freebsd-arm64": "0.20.0", + "@esbuild/freebsd-x64": "0.20.0", + "@esbuild/linux-arm": "0.20.0", + "@esbuild/linux-arm64": "0.20.0", + "@esbuild/linux-ia32": "0.20.0", + "@esbuild/linux-loong64": "0.20.0", + "@esbuild/linux-mips64el": "0.20.0", + "@esbuild/linux-ppc64": "0.20.0", + "@esbuild/linux-riscv64": "0.20.0", + "@esbuild/linux-s390x": "0.20.0", + "@esbuild/linux-x64": "0.20.0", + "@esbuild/netbsd-x64": "0.20.0", + "@esbuild/openbsd-x64": "0.20.0", + "@esbuild/sunos-x64": "0.20.0", + "@esbuild/win32-arm64": "0.20.0", + "@esbuild/win32-ia32": "0.20.0", + "@esbuild/win32-x64": "0.20.0" + } + }, + "esbuild-plugin-resolve": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/esbuild-plugin-resolve/-/esbuild-plugin-resolve-2.0.0.tgz", + "integrity": "sha512-eJy9B8yDW5X/J48eWtR1uVmv+DKfHvYYnrrcqQoe/nUkVHVOTZlJnSevkYyGOz6hI90t036Y5QIPDrGzmppxfg==" + }, + "escape-html": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", + "integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==" + }, + "escape-string-regexp": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-5.0.0.tgz", + "integrity": "sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==", + "dev": true + }, + "etag": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", + "integrity": "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==" + }, + "execa": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/execa/-/execa-0.7.0.tgz", + "integrity": "sha512-RztN09XglpYI7aBBrJCPW95jEH7YF1UEPOoX9yDhUTPdp7mK+CQvnLTuD10BNXZ3byLTu2uehZ8EcKT/4CGiFw==", + "dev": true, + "requires": { + "cross-spawn": "^5.0.1", + "get-stream": "^3.0.0", + "is-stream": "^1.1.0", + "npm-run-path": "^2.0.0", + "p-finally": "^1.0.0", + "signal-exit": "^3.0.0", + "strip-eof": "^1.0.0" + } + }, + "executable": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/executable/-/executable-4.1.1.tgz", + "integrity": "sha512-8iA79xD3uAch729dUG8xaaBBFGaEa0wdD2VkYLFHwlqosEj/jT66AzcreRDSgV7ehnNLBW2WR5jIXwGKjVdTLg==", + "dev": true, + "requires": { + "pify": "^2.2.0" + } + }, + "express": { + "version": "4.18.2", + "resolved": "https://registry.npmjs.org/express/-/express-4.18.2.tgz", + "integrity": "sha512-5/PsL6iGPdfQ/lKM1UuielYgv3BUoJfz1aUwU9vHZ+J7gyvwdQXFEBIEIaxeGf0GIcreATNyBExtalisDbuMqQ==", + "requires": { + "accepts": "~1.3.8", + "array-flatten": "1.1.1", + "body-parser": "1.20.1", + "content-disposition": "0.5.4", + "content-type": "~1.0.4", + "cookie": "0.5.0", + "cookie-signature": "1.0.6", + "debug": "2.6.9", + "depd": "2.0.0", + "encodeurl": "~1.0.2", + "escape-html": "~1.0.3", + "etag": "~1.8.1", + "finalhandler": "1.2.0", + "fresh": "0.5.2", + "http-errors": "2.0.0", + "merge-descriptors": "1.0.1", + "methods": "~1.1.2", + "on-finished": "2.4.1", + "parseurl": "~1.3.3", + "path-to-regexp": "0.1.7", + "proxy-addr": "~2.0.7", + "qs": "6.11.0", + "range-parser": "~1.2.1", + "safe-buffer": "5.2.1", + "send": "0.18.0", + "serve-static": "1.15.0", + "setprototypeof": "1.2.0", + "statuses": "2.0.1", + "type-is": "~1.6.18", + "utils-merge": "1.0.1", + "vary": "~1.1.2" + } + }, + "ext-list": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/ext-list/-/ext-list-2.2.2.tgz", + "integrity": "sha512-u+SQgsubraE6zItfVA0tBuCBhfU9ogSRnsvygI7wht9TS510oLkBRXBsqopeUG/GBOIQyKZO9wjTqIu/sf5zFA==", + "dev": true, + "requires": { + "mime-db": "^1.28.0" + } + }, + "ext-name": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/ext-name/-/ext-name-5.0.0.tgz", + "integrity": "sha512-yblEwXAbGv1VQDmow7s38W77hzAgJAO50ztBLMcUyUBfxv1HC+LGwtiEN+Co6LtlqT/5uwVOxsD4TNIilWhwdQ==", + "dev": true, + "requires": { + "ext-list": "^2.0.0", + "sort-keys-length": "^1.0.0" + } + }, + "fast-glob": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.2.tgz", + "integrity": "sha512-oX2ruAFQwf/Orj8m737Y5adxDQO0LAB7/S5MnxCdTNDd4p6BsyIVsv9JQsATbTSq8KHRpLwIHbVlUNatxd+1Ow==", + "dev": true, + "requires": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.4" + } + }, + "fastq": { + "version": "1.17.1", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.17.1.tgz", + "integrity": "sha512-sRVD3lWVIXWg6By68ZN7vho9a1pQcN/WBFaAAsDDFzlJjvoGx0P8z7V1t72grFJfJhu3YPZBuu25f7Kaw2jN1w==", + "dev": true, + "requires": { + "reusify": "^1.0.4" + } + }, + "fetch-blob": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/fetch-blob/-/fetch-blob-3.2.0.tgz", + "integrity": "sha512-7yAQpD2UMJzLi1Dqv7qFYnPbaPx7ZfFK6PiIxQ4PfkGPyNyl2Ugx+a/umUonmKqjhM4DnfbMvdX6otXq83soQQ==", + "requires": { + "node-domexception": "^1.0.0", + "web-streams-polyfill": "^3.0.3" + } + }, + "file-type": { + "version": "17.1.6", + "resolved": "https://registry.npmjs.org/file-type/-/file-type-17.1.6.tgz", + "integrity": "sha512-hlDw5Ev+9e883s0pwUsuuYNu4tD7GgpUnOvykjv1Gya0ZIjuKumthDRua90VUn6/nlRKAjcxLUnHNTIUWwWIiw==", + "dev": true, + "requires": { + "readable-web-to-node-stream": "^3.0.2", + "strtok3": "^7.0.0-alpha.9", + "token-types": "^5.0.0-alpha.2" + } + }, + "filebrowser": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/filebrowser/-/filebrowser-1.0.0.tgz", + "integrity": "sha512-RRONYpCDzbmWPhBX43T4dE+ptqLznJ7lKfbMaZLChB2i2ZIdFXoqT9qZTi70Dpq6fnJHuvcdeiRqMIPZKhVgTQ==", + "requires": { + "commander": "^2.9.0", + "content-disposition": "^0.5.1", + "express": "^4.14.0" + }, + "dependencies": { + "commander": { + "version": "2.20.3", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", + "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==" + } + } + }, + "filename-reserved-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/filename-reserved-regex/-/filename-reserved-regex-3.0.0.tgz", + "integrity": "sha512-hn4cQfU6GOT/7cFHXBqeBg2TbrMBgdD0kcjLhvSQYYwm3s4B6cjvBfb7nBALJLAXqmU5xajSa7X2NnUud/VCdw==", + "dev": true + }, + "filenamify": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/filenamify/-/filenamify-5.1.1.tgz", + "integrity": "sha512-M45CbrJLGACfrPOkrTp3j2EcO9OBkKUYME0eiqOCa7i2poaklU0jhlIaMlr8ijLorT0uLAzrn3qXOp5684CkfA==", + "dev": true, + "requires": { + "filename-reserved-regex": "^3.0.0", + "strip-outer": "^2.0.0", + "trim-repeated": "^2.0.0" + } + }, + "fill-range": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", + "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "dev": true, + "requires": { + "to-regex-range": "^5.0.1" + } + }, + "finalhandler": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.2.0.tgz", + "integrity": "sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg==", + "requires": { + "debug": "2.6.9", + "encodeurl": "~1.0.2", + "escape-html": "~1.0.3", + "on-finished": "2.4.1", + "parseurl": "~1.3.3", + "statuses": "2.0.1", + "unpipe": "~1.0.0" + } + }, + "find-versions": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/find-versions/-/find-versions-5.1.0.tgz", + "integrity": "sha512-+iwzCJ7C5v5KgcBuueqVoNiHVoQpwiUK5XFLjf0affFTep+Wcw93tPvmb8tqujDNmzhBDPddnWV/qgWSXgq+Hg==", + "dev": true, + "requires": { + "semver-regex": "^4.0.5" + } + }, + "formdata-polyfill": { + "version": "4.0.10", + "resolved": "https://registry.npmjs.org/formdata-polyfill/-/formdata-polyfill-4.0.10.tgz", + "integrity": "sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g==", + "requires": { + "fetch-blob": "^3.1.2" + } + }, + "forwarded": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz", + "integrity": "sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==" + }, + "fresh": { + "version": "0.5.2", + "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", + "integrity": "sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==" + }, + "function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==" + }, + "get-intrinsic": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.4.tgz", + "integrity": "sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ==", + "requires": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2", + "has-proto": "^1.0.1", + "has-symbols": "^1.0.3", + "hasown": "^2.0.0" + } + }, + "get-stream": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-3.0.0.tgz", + "integrity": "sha512-GlhdIUuVakc8SJ6kK0zAFbiGzRFzNnY4jUuEbV9UROo4Y+0Ny4fjvcZFVTeDA4odpFyOQzaw6hXukJSq/f28sQ==", + "dev": true + }, + "glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "requires": { + "is-glob": "^4.0.1" + } + }, + "gopd": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.0.1.tgz", + "integrity": "sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==", + "requires": { + "get-intrinsic": "^1.1.3" + } + }, + "got": { + "version": "11.8.6", + "resolved": "https://registry.npmjs.org/got/-/got-11.8.6.tgz", + "integrity": "sha512-6tfZ91bOr7bOXnK7PRDCGBLa1H4U080YHNaAQ2KsMGlLEzRbk44nsZF2E1IeRc3vtJHPVbKCYgdFbaGO2ljd8g==", + "dev": true, + "requires": { + "@sindresorhus/is": "^4.0.0", + "@szmarczak/http-timer": "^4.0.5", + "@types/cacheable-request": "^6.0.1", + "@types/responselike": "^1.0.0", + "cacheable-lookup": "^5.0.3", + "cacheable-request": "^7.0.2", + "decompress-response": "^6.0.0", + "http2-wrapper": "^1.0.0-beta.5.2", + "lowercase-keys": "^2.0.0", + "p-cancelable": "^2.0.0", + "responselike": "^2.0.0" + } + }, + "has-property-descriptors": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz", + "integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==", + "requires": { + "es-define-property": "^1.0.0" + } + }, + "has-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.0.1.tgz", + "integrity": "sha512-7qE+iP+O+bgF9clE5+UoBFzE65mlBiVj3tKCrlNQ0Ogwm0BjpT/gK4SlLYDMybDh5I3TCTKnPPa0oMG7JDYrhg==" + }, + "has-symbols": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", + "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==" + }, + "hasown": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.1.tgz", + "integrity": "sha512-1/th4MHjnwncwXsIW6QMzlvYL9kG5e/CpVvLRZe4XPa8TOUNbCELqmvhDmnkNsAjwaG4+I8gJJL0JBvTTLO9qA==", + "requires": { + "function-bind": "^1.1.2" + } + }, + "http-cache-semantics": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-4.1.1.tgz", + "integrity": "sha512-er295DKPVsV82j5kw1Gjt+ADA/XYHsajl82cGNQG2eyoPkvgUhX+nDIyelzhIWbbsXP39EHcI6l5tYs2FYqYXQ==", + "dev": true + }, + "http-errors": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz", + "integrity": "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==", + "requires": { + "depd": "2.0.0", + "inherits": "2.0.4", + "setprototypeof": "1.2.0", + "statuses": "2.0.1", + "toidentifier": "1.0.1" + } + }, + "http2-wrapper": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/http2-wrapper/-/http2-wrapper-1.0.3.tgz", + "integrity": "sha512-V+23sDMr12Wnz7iTcDeJr3O6AIxlnvT/bmaAAAP/Xda35C90p9599p0F1eHR/N1KILWSoWVAiOMFjBBXaXSMxg==", + "dev": true, + "requires": { + "quick-lru": "^5.1.1", + "resolve-alpn": "^1.0.0" + } + }, + "human-signals": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz", + "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==", + "dev": true + }, + "iconv-lite": { + "version": "0.4.24", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", + "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", + "requires": { + "safer-buffer": ">= 2.1.2 < 3" + } + }, + "ieee754": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", + "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", + "dev": true + }, + "inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" + }, + "ipaddr.js": { + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", + "integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==" + }, + "is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "dev": true + }, + "is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "requires": { + "is-extglob": "^2.1.1" + } + }, + "is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true + }, + "is-plain-obj": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-1.1.0.tgz", + "integrity": "sha512-yvkRyxmFKEOQ4pNXCmJG5AEQNlXJS5LaONXo5/cLdTZdWvsZ1ioJEonLGAosKlMWE8lwUy/bJzMjcw8az73+Fg==", + "dev": true + }, + "is-stream": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-1.1.0.tgz", + "integrity": "sha512-uQPm8kcs47jx38atAcWTVxyltQYoPT68y9aWYdV6yWXSyW8mzSat0TL6CiWdZeCdF3KrAvpVtnHbTv4RN+rqdQ==", + "dev": true + }, + "isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "dev": true + }, + "isomorphic-fetch": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/isomorphic-fetch/-/isomorphic-fetch-3.0.0.tgz", + "integrity": "sha512-qvUtwJ3j6qwsF3jLxkZ72qCgjMysPzDfeV240JHiGZsANBYd+EEuu35v7dfrJ9Up0Ak07D7GGSkGhCHTqg/5wA==", + "requires": { + "node-fetch": "^2.6.1", + "whatwg-fetch": "^3.4.1" + }, + "dependencies": { + "node-fetch": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz", + "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==", + "requires": { + "whatwg-url": "^5.0.0" + } + } + } + }, + "json-buffer": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", + "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==", + "dev": true + }, + "keyv": { + "version": "4.5.4", + "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz", + "integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==", + "dev": true, + "requires": { + "json-buffer": "3.0.1" + } + }, + "lowercase-keys": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-2.0.0.tgz", + "integrity": "sha512-tqNXrS78oMOE73NMxK4EMLQsQowWf8jKooH9g7xPavRT706R6bkQJ6DY2Te7QukaZsulxa30wQ7bk0pm4XiHmA==", + "dev": true + }, + "lru-cache": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-4.1.5.tgz", + "integrity": "sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g==", + "dev": true, + "requires": { + "pseudomap": "^1.0.2", + "yallist": "^2.1.2" + } + }, + "media-typer": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", + "integrity": "sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==" + }, + "merge-descriptors": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz", + "integrity": "sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w==" + }, + "merge-stream": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", + "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", + "dev": true + }, + "merge2": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "dev": true + }, + "methods": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", + "integrity": "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==" + }, + "micromatch": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz", + "integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==", + "dev": true, + "requires": { + "braces": "^3.0.2", + "picomatch": "^2.3.1" + } + }, + "mime": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", + "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==" + }, + "mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==" + }, + "mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "requires": { + "mime-db": "1.52.0" + } + }, + "mimic-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", + "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", + "dev": true + }, + "mimic-response": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-1.0.1.tgz", + "integrity": "sha512-j5EctnkH7amfV/q5Hgmoal1g2QHFJRraOtmx0JpIqkxhBhI/lJSl1nMpQ45hVarwNETOoWEimndZ4QK0RHxuxQ==", + "dev": true + }, + "minimatch": { + "version": "9.0.3", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.3.tgz", + "integrity": "sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg==", + "dev": true, + "requires": { + "brace-expansion": "^2.0.1" + } + }, + "ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" + }, + "negotiator": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz", + "integrity": "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==" + }, + "node-domexception": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/node-domexception/-/node-domexception-1.0.0.tgz", + "integrity": "sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==" + }, + "node-fetch": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-3.3.2.tgz", + "integrity": "sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA==", + "requires": { + "data-uri-to-buffer": "^4.0.0", + "fetch-blob": "^3.1.4", + "formdata-polyfill": "^4.0.10" + } + }, + "normalize-url": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/normalize-url/-/normalize-url-6.1.0.tgz", + "integrity": "sha512-DlL+XwOy3NxAQ8xuC0okPgK46iuVNAK01YN7RueYBqqFeGsBjV9XmCAzAdgt+667bCl5kPh9EqKKDwnaPG1I7A==", + "dev": true + }, + "npm-run-path": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-2.0.2.tgz", + "integrity": "sha512-lJxZYlT4DW/bRUtFh1MQIWqmLwQfAxnqWG4HhEdjMlkrJYnJn0Jrr2u3mgxqaWsdiBc76TYkTG/mhrnYTuzfHw==", + "dev": true, + "requires": { + "path-key": "^2.0.0" + } + }, + "object-inspect": { + "version": "1.13.1", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.1.tgz", + "integrity": "sha512-5qoj1RUiKOMsCCNLV1CBiPYE10sziTsnmNxkAI/rZhiD63CF7IqdFGC/XzjWjpSgLf0LxXX3bDFIh0E18f6UhQ==" + }, + "on-finished": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz", + "integrity": "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==", + "requires": { + "ee-first": "1.1.1" + } + }, + "once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "dev": true, + "requires": { + "wrappy": "1" + } + }, + "onetime": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", + "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", + "dev": true, + "requires": { + "mimic-fn": "^2.1.0" + } + }, + "os-filter-obj": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/os-filter-obj/-/os-filter-obj-2.0.0.tgz", + "integrity": "sha512-uksVLsqG3pVdzzPvmAHpBK0wKxYItuzZr7SziusRPoz67tGV8rL1szZ6IdeUrbqLjGDwApBtN29eEE3IqGHOjg==", + "dev": true, + "requires": { + "arch": "^2.1.0" + } + }, + "p-cancelable": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/p-cancelable/-/p-cancelable-2.1.1.tgz", + "integrity": "sha512-BZOr3nRQHOntUjTrH8+Lh54smKHoHyur8We1V8DSMVrl5A2malOOwuJRnKRDjSnkoeBh4at6BwEnb5I7Jl31wg==", + "dev": true + }, + "p-finally": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/p-finally/-/p-finally-1.0.0.tgz", + "integrity": "sha512-LICb2p9CB7FS+0eR1oqWnHhp0FljGLZCWBE9aix0Uye9W8LTQPwMTYVGWQWIw9RdQiDg4+epXQODwIYJtSJaow==", + "dev": true + }, + "parseurl": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", + "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==" + }, + "path-key": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", + "integrity": "sha512-fEHGKCSmUSDPv4uoj8AlD+joPlq3peND+HRYyxFz4KPw4z926S/b8rIuFs2FYJg3BwsxJf6A9/3eIdLaYC+9Dw==", + "dev": true + }, + "path-to-regexp": { + "version": "0.1.7", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz", + "integrity": "sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ==" + }, + "peek-readable": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/peek-readable/-/peek-readable-5.0.0.tgz", + "integrity": "sha512-YtCKvLUOvwtMGmrniQPdO7MwPjgkFBtFIrmfSbYmYuq3tKDV/mcfAhBth1+C3ru7uXIZasc/pHnb+YDYNkkj4A==", + "dev": true + }, + "picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true + }, + "pify": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", + "integrity": "sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog==", + "dev": true + }, + "prettier": { + "version": "3.2.5", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.2.5.tgz", + "integrity": "sha512-3/GWa9aOC0YeD7LUfvOG2NiDyhOWRvt1k+rcKhOuYnMY24iiCphgneUfJDyFXd6rZCAnuLBv6UeAULtrhT/F4A==", + "dev": true + }, + "proxy-addr": { + "version": "2.0.7", + "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz", + "integrity": "sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==", + "requires": { + "forwarded": "0.2.0", + "ipaddr.js": "1.9.1" + } + }, + "pseudomap": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/pseudomap/-/pseudomap-1.0.2.tgz", + "integrity": "sha512-b/YwNhb8lk1Zz2+bXXpS/LK9OisiZZ1SNsSLxN1x2OXVEhW2Ckr/7mWE5vrC1ZTiJlD9g19jWszTmJsB+oEpFQ==", + "dev": true + }, + "pump": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz", + "integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==", + "dev": true, + "requires": { + "end-of-stream": "^1.1.0", + "once": "^1.3.1" + } + }, + "qs": { + "version": "6.11.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz", + "integrity": "sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==", + "requires": { + "side-channel": "^1.0.4" + } + }, + "queue-microtask": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "dev": true + }, + "quick-lru": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/quick-lru/-/quick-lru-5.1.1.tgz", + "integrity": "sha512-WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA==", + "dev": true + }, + "range-parser": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", + "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==" + }, + "raw-body": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.1.tgz", + "integrity": "sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig==", + "requires": { + "bytes": "3.1.2", + "http-errors": "2.0.0", + "iconv-lite": "0.4.24", + "unpipe": "1.0.0" + } + }, + "readable-stream": { + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", + "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", + "dev": true, + "requires": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + } + }, + "readable-web-to-node-stream": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/readable-web-to-node-stream/-/readable-web-to-node-stream-3.0.2.tgz", + "integrity": "sha512-ePeK6cc1EcKLEhJFt/AebMCLL+GgSKhuygrZ/GLaKZYEecIgIECf4UaUuaByiGtzckwR4ain9VzUh95T1exYGw==", + "dev": true, + "requires": { + "readable-stream": "^3.6.0" + } + }, + "resolve-alpn": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/resolve-alpn/-/resolve-alpn-1.2.1.tgz", + "integrity": "sha512-0a1F4l73/ZFZOakJnQ3FvkJ2+gSTQWz/r2KE5OdDY0TxPm5h4GkqkWWfM47T7HsbnOtcJVEF4epCVy6u7Q3K+g==", + "dev": true + }, + "responselike": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/responselike/-/responselike-2.0.1.tgz", + "integrity": "sha512-4gl03wn3hj1HP3yzgdI7d3lCkF95F21Pz4BPGvKHinyQzALR5CapwC8yIi0Rh58DEMQ/SguC03wFj2k0M/mHhw==", + "dev": true, + "requires": { + "lowercase-keys": "^2.0.0" + } + }, + "reusify": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", + "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==", + "dev": true + }, + "run-parallel": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "dev": true, + "requires": { + "queue-microtask": "^1.2.2" + } + }, + "safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==" + }, + "safer-buffer": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" + }, + "semver": { + "version": "7.6.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.0.tgz", + "integrity": "sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg==", + "dev": true, + "requires": { + "lru-cache": "^6.0.0" + }, + "dependencies": { + "lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "requires": { + "yallist": "^4.0.0" + } + }, + "yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + } + } + }, + "semver-regex": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/semver-regex/-/semver-regex-4.0.5.tgz", + "integrity": "sha512-hunMQrEy1T6Jr2uEVjrAIqjwWcQTgOAcIM52C8MY1EZSD3DDNft04XzvYKPqjED65bNVVko0YI38nYeEHCX3yw==", + "dev": true + }, + "semver-truncate": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/semver-truncate/-/semver-truncate-3.0.0.tgz", + "integrity": "sha512-LJWA9kSvMolR51oDE6PN3kALBNaUdkxzAGcexw8gjMA8xr5zUqK0JiR3CgARSqanYF3Z1YHvsErb1KDgh+v7Rg==", + "dev": true, + "requires": { + "semver": "^7.3.5" + } + }, + "send": { + "version": "0.18.0", + "resolved": "https://registry.npmjs.org/send/-/send-0.18.0.tgz", + "integrity": "sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg==", + "requires": { + "debug": "2.6.9", + "depd": "2.0.0", + "destroy": "1.2.0", + "encodeurl": "~1.0.2", + "escape-html": "~1.0.3", + "etag": "~1.8.1", + "fresh": "0.5.2", + "http-errors": "2.0.0", + "mime": "1.6.0", + "ms": "2.1.3", + "on-finished": "2.4.1", + "range-parser": "~1.2.1", + "statuses": "2.0.1" + }, + "dependencies": { + "ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" + } + } + }, + "serve-static": { + "version": "1.15.0", + "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.15.0.tgz", + "integrity": "sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g==", + "requires": { + "encodeurl": "~1.0.2", + "escape-html": "~1.0.3", + "parseurl": "~1.3.3", + "send": "0.18.0" + } + }, + "set-function-length": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.1.tgz", + "integrity": "sha512-j4t6ccc+VsKwYHso+kElc5neZpjtq9EnRICFZtWyBsLojhmeF/ZBd/elqm22WJh/BziDe/SBiOeAt0m2mfLD0g==", + "requires": { + "define-data-property": "^1.1.2", + "es-errors": "^1.3.0", + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.3", + "gopd": "^1.0.1", + "has-property-descriptors": "^1.0.1" + } + }, + "setprototypeof": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", + "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==" + }, + "shebang-command": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", + "integrity": "sha512-EV3L1+UQWGor21OmnvojK36mhg+TyIKDh3iFBKBohr5xeXIhNBcx8oWdgkTEEQ+BEFFYdLRuqMfd5L84N1V5Vg==", + "dev": true, + "requires": { + "shebang-regex": "^1.0.0" + } + }, + "shebang-regex": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz", + "integrity": "sha512-wpoSFAxys6b2a2wHZ1XpDSgD7N9iVjg29Ph9uV/uaP9Ex/KXlkTZTeddxDPSYQpgvzKLGJke2UU0AzoGCjNIvQ==", + "dev": true + }, + "side-channel": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.5.tgz", + "integrity": "sha512-QcgiIWV4WV7qWExbN5llt6frQB/lBven9pqliLXfGPB+K9ZYXxDozp0wLkHS24kWCm+6YXH/f0HhnObZnZOBnQ==", + "requires": { + "call-bind": "^1.0.6", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.4", + "object-inspect": "^1.13.1" + } + }, + "signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", + "dev": true + }, + "slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true + }, + "sort-keys": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/sort-keys/-/sort-keys-1.1.2.tgz", + "integrity": "sha512-vzn8aSqKgytVik0iwdBEi+zevbTYZogewTUM6dtpmGwEcdzbub/TX4bCzRhebDCRC3QzXgJsLRKB2V/Oof7HXg==", + "dev": true, + "requires": { + "is-plain-obj": "^1.0.0" + } + }, + "sort-keys-length": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/sort-keys-length/-/sort-keys-length-1.0.1.tgz", + "integrity": "sha512-GRbEOUqCxemTAk/b32F2xa8wDTs+Z1QHOkbhJDQTvv/6G3ZkbJ+frYWsTcc7cBB3Fu4wy4XlLCuNtJuMn7Gsvw==", + "dev": true, + "requires": { + "sort-keys": "^1.0.0" + } + }, + "source-map": { + "version": "0.7.4", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.4.tgz", + "integrity": "sha512-l3BikUxvPOcn5E74dZiq5BGsTb5yEwhaTSzccU6t4sDOH8NWJCstKO5QT2CvtFoK6F0saL7p9xHAqHOlCPJygA==", + "dev": true + }, + "statuses": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", + "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==" + }, + "string_decoder": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", + "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", + "dev": true, + "requires": { + "safe-buffer": "~5.2.0" + } + }, + "strip-eof": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/strip-eof/-/strip-eof-1.0.0.tgz", + "integrity": "sha512-7FCwGGmx8mD5xQd3RPUvnSpUXHM3BWuzjtpD4TXsfcZ9EL4azvVVUscFYwD9nx8Kh+uCBC00XBtAykoMHwTh8Q==", + "dev": true + }, + "strip-final-newline": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", + "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==", + "dev": true + }, + "strip-outer": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/strip-outer/-/strip-outer-2.0.0.tgz", + "integrity": "sha512-A21Xsm1XzUkK0qK1ZrytDUvqsQWict2Cykhvi0fBQntGG5JSprESasEyV1EZ/4CiR5WB5KjzLTrP/bO37B0wPg==", + "dev": true + }, + "strtok3": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/strtok3/-/strtok3-7.0.0.tgz", + "integrity": "sha512-pQ+V+nYQdC5H3Q7qBZAz/MO6lwGhoC2gOAjuouGf/VO0m7vQRh8QNMl2Uf6SwAtzZ9bOw3UIeBukEGNJl5dtXQ==", + "dev": true, + "requires": { + "@tokenizer/token": "^0.3.0", + "peek-readable": "^5.0.0" + } + }, + "to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "requires": { + "is-number": "^7.0.0" + } + }, + "toidentifier": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz", + "integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==" + }, + "token-types": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/token-types/-/token-types-5.0.1.tgz", + "integrity": "sha512-Y2fmSnZjQdDb9W4w4r1tswlMHylzWIeOKpx0aZH9BgGtACHhrk3OkT52AzwcuqTRBZtvvnTjDBh8eynMulu8Vg==", + "dev": true, + "requires": { + "@tokenizer/token": "^0.3.0", + "ieee754": "^1.2.1" + } + }, + "tr46": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", + "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==" + }, + "trim-repeated": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/trim-repeated/-/trim-repeated-2.0.0.tgz", + "integrity": "sha512-QUHBFTJGdOwmp0tbOG505xAgOp/YliZP/6UgafFXYZ26WT1bvQmSMJUvkeVSASuJJHbqsFbynTvkd5W8RBTipg==", + "dev": true, + "requires": { + "escape-string-regexp": "^5.0.0" + } + }, + "ts-matches": { + "version": "5.4.1", + "resolved": "https://registry.npmjs.org/ts-matches/-/ts-matches-5.4.1.tgz", + "integrity": "sha512-kXrY75F0s0WD15N2bWKDScKlKgwnusN6dTRzGs1N7LlxQRnazrsBISC1HL4sy2adsyk65Zbx3Ui3IGN8leAFOQ==" + }, + "tslib": { + "version": "2.6.2", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz", + "integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==" + }, + "type-is": { + "version": "1.6.18", + "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz", + "integrity": "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==", + "requires": { + "media-typer": "0.3.0", + "mime-types": "~2.1.24" + } + }, + "typescript": { + "version": "5.3.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.3.3.tgz", + "integrity": "sha512-pXWcraxM0uxAS+tN0AG/BF2TyqmHO014Z070UsJ+pFvYuRSq8KH8DmWpnbXe0pEPDHXZV3FcAbJkijJ5oNEnWw==", + "dev": true + }, + "undici-types": { + "version": "5.26.5", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", + "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==", + "dev": true + }, + "unpipe": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", + "integrity": "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==" + }, + "util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", + "dev": true + }, + "utils-merge": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", + "integrity": "sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==" + }, + "vary": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", + "integrity": "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==" + }, + "web-streams-polyfill": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-3.3.2.tgz", + "integrity": "sha512-3pRGuxRF5gpuZc0W+EpwQRmCD7gRqcDOMt688KmdlDAgAyaB1XlN0zq2njfDNm44XVdIouE7pZ6GzbdyH47uIQ==" + }, + "webidl-conversions": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", + "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==" + }, + "whatwg-fetch": { + "version": "3.6.20", + "resolved": "https://registry.npmjs.org/whatwg-fetch/-/whatwg-fetch-3.6.20.tgz", + "integrity": "sha512-EqhiFU6daOA8kpjOWTL0olhVOF3i7OrFzSYiGsEMB8GcXS+RrzauAERX65xMeNWVqxA6HXH2m69Z9LaKKdisfg==" + }, + "whatwg-url": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", + "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", + "requires": { + "tr46": "~0.0.3", + "webidl-conversions": "^3.0.0" + } + }, + "which": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", + "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", + "dev": true, + "requires": { + "isexe": "^2.0.0" + } + }, + "wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "dev": true + }, + "yallist": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-2.1.2.tgz", + "integrity": "sha512-ncTzHV7NvsQZkYe1DW7cbDLm0YpzHmZF5r/iyP3ZnQtMiJ+pjzisCiMNI+Sj+xQF5pXhSHxSB3uDbsBTzY/c2A==", + "dev": true + }, + "yaml": { + "version": "2.3.4", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.3.4.tgz", + "integrity": "sha512-8aAvwVUSHpfEqTQ4w/KMlf3HcRdt50E5ODIQJBw1fQ5RL34xabzxtUlzTXVqc4rkZsPbvrXKWnABCD7kWSmocA==" + } + } +} diff --git a/container-runtime/package.json b/container-runtime/package.json new file mode 100644 index 0000000000..2fa4074083 --- /dev/null +++ b/container-runtime/package.json @@ -0,0 +1,37 @@ +{ + "name": "start-init", + "version": "0.0.0", + "description": "We want to be the sdk intermitent for the system", + "module": "./index.js", + "scripts": { + "check": "tsc --noEmit", + "build": "prettier --write '**/*.ts' && rm -rf dist && tsc", + "tsc": "rm -rf dist; tsc" + }, + "author": "", + "prettier": { + "trailingComma": "all", + "tabWidth": 2, + "semi": false, + "singleQuote": false + }, + "dependencies": { + "@iarna/toml": "^2.2.5", + "@start9labs/start-sdk": "file:../sdk/dist", + "esbuild-plugin-resolve": "^2.0.0", + "filebrowser": "^1.0.0", + "isomorphic-fetch": "^3.0.0", + "node-fetch": "^3.1.0", + "ts-matches": "^5.4.1", + "tslib": "^2.5.3", + "typescript": "^5.1.3", + "yaml": "^2.3.1" + }, + "devDependencies": { + "@swc/cli": "^0.1.62", + "@swc/core": "^1.3.65", + "@types/node": "^20.11.13", + "prettier": "^3.2.5", + "typescript": ">5.2" + } +} diff --git a/container-runtime/readme.md b/container-runtime/readme.md new file mode 100644 index 0000000000..023091463c --- /dev/null +++ b/container-runtime/readme.md @@ -0,0 +1,86 @@ +## Testing + +So, we are going to + +1. create a fake server +2. pretend socket server os (while the fake server is running) +3. Run a fake effects system (while 1/2 are running) + +In order to simulate that we created a server like the start-os and +a fake server (in this case I am using syncthing-wrapper) + +### TODO + +Undo the packing that I have done earlier, and hijack the embassy.js to use the bundle service + code + +Converting embassy.js -> service.js + +```sequence {theme="hand"} +startOs ->> startInit.js: Rpc Call +startInit.js ->> service.js: Rpc Converted into js code +``` + +### Create a fake server + +```bash +run_test () { + ( + set -e + libs=/home/jh/Projects/start-os/libs/start_init + sockets=/tmp/start9 + service=/home/jh/Projects/syncthing-wrapper + + docker run \ + -v $libs:/libs \ + -v $service:/service \ + -w /libs \ + --rm node:18-alpine \ + sh -c " + npm i && + npm run bundle:esbuild && + npm run bundle:service + " + + + + docker run \ + -v ./libs/start_init/:/libs \ + -w /libs \ + --rm node:18-alpine \ + sh -c " + npm i && + npm run bundle:esbuild + " + + + + rm -rf $sockets || true + mkdir -p $sockets/sockets + cd $service + docker run \ + -v $libs:/start-init \ + -v $sockets:/start9 \ + --rm -it $(docker build -q .) sh -c " + apk add nodejs && + node /start-init/bundleEs.js + " + ) +} +run_test +``` + +### Pretend Socket Server OS + +First we are going to create our fake server client with the bash then send it the json possible data + +```bash +sudo socat - unix-client:/tmp/start9/sockets/rpc.sock +``` + + +```json +{"id":"a","method":"run","params":{"methodName":"/dependencyMounts","methodArgs":[]}} +{"id":"a","method":"run","params":{"methodName":"/actions/test","methodArgs":{"input":{"id": 1}}}} +{"id":"b","method":"run","params":{"methodName":"/actions/test","methodArgs":{"id": 1}}} + +``` diff --git a/container-runtime/rmcontainer.sh b/container-runtime/rmcontainer.sh new file mode 100644 index 0000000000..69912eebaf --- /dev/null +++ b/container-runtime/rmcontainer.sh @@ -0,0 +1,12 @@ +#!/bin/bash + +set -e + +rootfs=$1 +if [ -z "$rootfs" ]; then + >&2 echo "usage: $0 " + exit 1 +fi + +umount --recursive $rootfs +rm -rf $rootfs/.. \ No newline at end of file diff --git a/container-runtime/src/Adapters/HostSystemStartOs.ts b/container-runtime/src/Adapters/HostSystemStartOs.ts new file mode 100644 index 0000000000..5e52224fab --- /dev/null +++ b/container-runtime/src/Adapters/HostSystemStartOs.ts @@ -0,0 +1,290 @@ +import { types as T } from "@start9labs/start-sdk" +import * as net from "net" +import { object, string, number, literals, some, unknown } from "ts-matches" +import { Effects } from "../Models/Effects" + +import { CallbackHolder } from "../Models/CallbackHolder" +const matchRpcError = object({ + error: object( + { + code: number, + message: string, + data: some( + string, + object( + { + details: string, + debug: string, + }, + ["debug"], + ), + ), + }, + ["data"], + ), +}) +const testRpcError = matchRpcError.test +const testRpcResult = object({ + result: unknown, +}).test +type RpcError = typeof matchRpcError._TYPE + +const SOCKET_PATH = "/media/startos/rpc/host.sock" +const MAIN = "/main" as const +export class HostSystemStartOs implements Effects { + static of(callbackHolder: CallbackHolder) { + return new HostSystemStartOs(callbackHolder) + } + + constructor(readonly callbackHolder: CallbackHolder) {} + id = 0 + rpcRound( + method: K, + params: unknown, + ) { + const id = this.id++ + const client = net.createConnection({ path: SOCKET_PATH }, () => { + client.write( + JSON.stringify({ + id, + method, + params, + }) + "\n", + ) + }) + let bufs: Buffer[] = [] + return new Promise((resolve, reject) => { + client.on("data", (data) => { + try { + bufs.push(data) + if (data.reduce((acc, x) => acc || x == 10, false)) { + const res: unknown = JSON.parse( + Buffer.concat(bufs).toString().split("\n")[0], + ) + if (testRpcError(res)) { + let message = res.error.message + console.error({ method, params, hostSystemStartOs: true }) + if (string.test(res.error.data)) { + message += ": " + res.error.data + console.error(res.error.data) + } else { + if (res.error.data?.details) { + message += ": " + res.error.data.details + console.error(res.error.data.details) + } + if (res.error.data?.debug) { + message += "\n" + res.error.data.debug + console.error("Debug: " + res.error.data.debug) + } + } + reject(new Error(`${message}@${method}`)) + } else if (testRpcResult(res)) { + resolve(res.result) + } else { + reject(new Error(`malformed response ${JSON.stringify(res)}`)) + } + } + } catch (error) { + reject(error) + } + client.end() + }) + client.on("error", (error) => { + reject(error) + }) + }) + } + + bind(...[options]: Parameters) { + return this.rpcRound("bind", options) as ReturnType + } + clearBindings(...[]: Parameters) { + return this.rpcRound("clearBindings", null) as ReturnType< + T.Effects["clearBindings"] + > + } + clearServiceInterfaces( + ...[]: Parameters + ) { + return this.rpcRound("clearServiceInterfaces", null) as ReturnType< + T.Effects["clearServiceInterfaces"] + > + } + createOverlayedImage(options: { + imageId: string + }): Promise<[string, string]> { + return this.rpcRound("createOverlayedImage", options) as ReturnType< + T.Effects["createOverlayedImage"] + > + } + destroyOverlayedImage(options: { guid: string }): Promise { + return this.rpcRound("destroyOverlayedImage", options) as ReturnType< + T.Effects["destroyOverlayedImage"] + > + } + executeAction(...[options]: Parameters) { + return this.rpcRound("executeAction", options) as ReturnType< + T.Effects["executeAction"] + > + } + exists(...[packageId]: Parameters) { + return this.rpcRound("exists", packageId) as ReturnType + } + exportAction(...[options]: Parameters) { + return this.rpcRound("exportAction", options) as ReturnType< + T.Effects["exportAction"] + > + } + exportServiceInterface: Effects["exportServiceInterface"] = ( + ...[options]: Parameters + ) => { + return this.rpcRound("exportServiceInterface", options) as ReturnType< + T.Effects["exportServiceInterface"] + > + } + exposeForDependents(...[options]: any) { + return this.rpcRound("exposeForDependents", null) as ReturnType< + T.Effects["exposeForDependents"] + > + } + getConfigured(...[]: Parameters) { + return this.rpcRound("getConfigured", null) as ReturnType< + T.Effects["getConfigured"] + > + } + getContainerIp(...[]: Parameters) { + return this.rpcRound("getContainerIp", null) as ReturnType< + T.Effects["getContainerIp"] + > + } + getHostInfo: Effects["getHostInfo"] = (...[allOptions]: any[]) => { + const options = { + ...allOptions, + callback: this.callbackHolder.addCallback(allOptions.callback), + } + return this.rpcRound("getHostInfo", options) as ReturnType< + T.Effects["getHostInfo"] + > as any + } + getServiceInterface( + ...[options]: Parameters + ) { + return this.rpcRound("getServiceInterface", { + ...options, + callback: this.callbackHolder.addCallback(options.callback), + }) as ReturnType + } + + getPrimaryUrl(...[options]: Parameters) { + return this.rpcRound("getPrimaryUrl", { + ...options, + callback: this.callbackHolder.addCallback(options.callback), + }) as ReturnType + } + getServicePortForward( + ...[options]: Parameters + ) { + return this.rpcRound("getServicePortForward", options) as ReturnType< + T.Effects["getServicePortForward"] + > + } + getSslCertificate(options: Parameters[0]) { + return this.rpcRound("getSslCertificate", options) as ReturnType< + T.Effects["getSslCertificate"] + > + } + getSslKey(options: Parameters[0]) { + return this.rpcRound("getSslKey", options) as ReturnType< + T.Effects["getSslKey"] + > + } + getSystemSmtp(...[options]: Parameters) { + return this.rpcRound("getSystemSmtp", { + ...options, + callback: this.callbackHolder.addCallback(options.callback), + }) as ReturnType + } + listServiceInterfaces( + ...[options]: Parameters + ) { + return this.rpcRound("listServiceInterfaces", { + ...options, + callback: this.callbackHolder.addCallback(options.callback), + }) as ReturnType + } + mount(...[options]: Parameters) { + return this.rpcRound("mount", options) as ReturnType + } + removeAction(...[options]: Parameters) { + return this.rpcRound("removeAction", options) as ReturnType< + T.Effects["removeAction"] + > + } + removeAddress(...[options]: Parameters) { + return this.rpcRound("removeAddress", options) as ReturnType< + T.Effects["removeAddress"] + > + } + restart(...[]: Parameters) { + return this.rpcRound("restart", null) + } + reverseProxy(...[options]: Parameters) { + return this.rpcRound("reverseProxy", options) as ReturnType< + T.Effects["reverseProxy"] + > + } + running(...[packageId]: Parameters) { + return this.rpcRound("running", { packageId }) as ReturnType< + T.Effects["running"] + > + } + // runRsync(...[options]: Parameters) { + // + // return this.rpcRound('executeAction', options) as ReturnType + // + // return this.rpcRound('executeAction', options) as ReturnType + // } + setConfigured(...[configured]: Parameters) { + return this.rpcRound("setConfigured", { configured }) as ReturnType< + T.Effects["setConfigured"] + > + } + setDependencies( + ...[dependencies]: Parameters + ): ReturnType { + return this.rpcRound("setDependencies", { dependencies }) as ReturnType< + T.Effects["setDependencies"] + > + } + setHealth(...[options]: Parameters) { + return this.rpcRound("setHealth", options) as ReturnType< + T.Effects["setHealth"] + > + } + + setMainStatus(o: { status: "running" | "stopped" }): Promise { + return this.rpcRound("setMainStatus", o) as ReturnType< + T.Effects["setHealth"] + > + } + + shutdown(...[]: Parameters) { + return this.rpcRound("shutdown", null) + } + stopped(...[packageId]: Parameters) { + return this.rpcRound("stopped", { packageId }) as ReturnType< + T.Effects["stopped"] + > + } + store: T.Effects["store"] = { + get: async (options: any) => + this.rpcRound("getStore", { + ...options, + callback: this.callbackHolder.addCallback(options.callback), + }) as any, + set: async (options: any) => + this.rpcRound("setStore", options) as ReturnType< + T.Effects["store"]["set"] + >, + } +} diff --git a/container-runtime/src/Adapters/RpcListener.ts b/container-runtime/src/Adapters/RpcListener.ts new file mode 100644 index 0000000000..202e942b55 --- /dev/null +++ b/container-runtime/src/Adapters/RpcListener.ts @@ -0,0 +1,312 @@ +// @ts-check + +import * as net from "net" +import { + object, + some, + string, + literal, + array, + number, + matches, + any, + shape, + anyOf, +} from "ts-matches" + +import { types as T } from "@start9labs/start-sdk" +import * as CP from "child_process" +import * as Mod from "module" +import * as fs from "fs" + +import { CallbackHolder } from "../Models/CallbackHolder" +import { AllGetDependencies } from "../Interfaces/AllGetDependencies" +import { HostSystem } from "../Interfaces/HostSystem" +import { jsonPath } from "../Models/JsonPath" +import { System } from "../Interfaces/System" +type MaybePromise = T | Promise +export const matchRpcResult = anyOf( + object({ result: any }), + object({ + error: object( + { + code: number, + message: string, + data: object( + { + details: string, + debug: any, + }, + ["details", "debug"], + ), + }, + ["data"], + ), + }), +) +export type RpcResult = typeof matchRpcResult._TYPE +type SocketResponse = { jsonrpc: "2.0"; id: IdType } & RpcResult + +const SOCKET_PARENT = "/media/startos/rpc" +const SOCKET_PATH = "/media/startos/rpc/service.sock" +const jsonrpc = "2.0" as const + +const idType = some(string, number, literal(null)) +type IdType = null | string | number +const runType = object({ + id: idType, + method: literal("execute"), + params: object( + { + procedure: string, + input: any, + timeout: number, + }, + ["timeout"], + ), +}) +const sandboxRunType = object({ + id: idType, + method: literal("sandbox"), + params: object( + { + procedure: string, + input: any, + timeout: number, + }, + ["timeout"], + ), +}) +const callbackType = object({ + id: idType, + method: literal("callback"), + params: object({ + callback: string, + args: array, + }), +}) +const initType = object({ + id: idType, + method: literal("init"), +}) +const exitType = object({ + id: idType, + method: literal("exit"), +}) +const evalType = object({ + id: idType, + method: literal("eval"), + params: object({ + script: string, + }), +}) + +const jsonParse = (x: Buffer) => JSON.parse(x.toString()) +function reduceMethod( + methodArgs: object, + effects: HostSystem, +): (previousValue: any, currentValue: string) => any { + return (x: any, method: string) => + Promise.resolve(x) + .then((x) => x[method]) + .then((x) => + typeof x !== "function" + ? x + : x({ + ...methodArgs, + effects, + }), + ) +} + +const hasId = object({ id: idType }).test +export class RpcListener { + unixSocketServer = net.createServer(async (server) => {}) + private _system: System | undefined + private _effects: HostSystem | undefined + + constructor( + readonly getDependencies: AllGetDependencies, + private callbacks = new CallbackHolder(), + ) { + if (!fs.existsSync(SOCKET_PARENT)) { + fs.mkdirSync(SOCKET_PARENT, { recursive: true }) + } + this.unixSocketServer.listen(SOCKET_PATH) + + this.unixSocketServer.on("connection", (s) => { + let id: IdType = null + const captureId = (x: X) => { + if (hasId(x)) id = x.id + return x + } + const logData = + (location: string) => + (x: X) => { + console.log({ + location, + stringified: JSON.stringify(x), + type: typeof x, + id, + }) + return x + } + const mapError = (error: any): SocketResponse => ({ + jsonrpc, + id, + error: { + message: typeof error, + data: { + details: error?.message ?? String(error), + debug: error?.stack, + }, + code: 0, + }, + }) + const writeDataToSocket = (x: SocketResponse) => + new Promise((resolve) => s.write(JSON.stringify(x), resolve)) + s.on("data", (a) => + Promise.resolve(a) + .then(logData("dataIn")) + .then(jsonParse) + .then(captureId) + .then((x) => this.dealWithInput(x)) + .catch(mapError) + .then(logData("response")) + .then(writeDataToSocket) + .finally(() => void s.end()), + ) + }) + } + + private get effects() { + return this.getDependencies.hostSystem()(this.callbacks) + } + + private get system() { + if (!this._system) throw new Error("System not initialized") + return this._system + } + + private dealWithInput(input: unknown): MaybePromise { + return matches(input) + .when(some(runType, sandboxRunType), async ({ id, params }) => { + const system = this.system + const procedure = jsonPath.unsafeCast(params.procedure) + return system + .execute(this.effects, { + procedure, + input: params.input, + timeout: params.timeout, + }) + .then((result) => ({ + jsonrpc, + id, + ...result, + })) + .then((x) => { + if ( + ("result" in x && x.result === undefined) || + !("error" in x || "result" in x) + ) + (x as any).result = null + return x + }) + .catch((error) => ({ + jsonrpc, + id, + error: { + code: 0, + message: typeof error, + data: { details: "" + error, debug: error?.stack }, + }, + })) + }) + .when(callbackType, async ({ id, params: { callback, args } }) => + Promise.resolve(this.callbacks.callCallback(callback, args)) + .then((result) => ({ + jsonrpc, + id, + result, + })) + .catch((error) => ({ + jsonrpc, + id, + + error: { + code: 0, + message: typeof error, + data: { + details: error?.message ?? String(error), + debug: error?.stack, + }, + }, + })), + ) + .when(exitType, async ({ id }) => { + if (this._system) this._system.exit(this.effects) + delete this._system + delete this._effects + + return { + jsonrpc, + id, + result: null, + } + }) + .when(initType, async ({ id }) => { + this._system = await this.getDependencies.system() + + return { + jsonrpc, + id, + result: null, + } + }) + .when(evalType, async ({ id, params }) => { + const result = await new Function( + `return (async () => { return (${params.script}) }).call(this)`, + ).call({ + listener: this, + require: require, + }) + return { + jsonrpc, + id, + result: !["string", "number", "boolean", "null", "object"].includes( + typeof result, + ) + ? null + : result, + } + }) + .when(shape({ id: idType, method: string }), ({ id, method }) => ({ + jsonrpc, + id, + error: { + code: -32601, + message: `Method not found`, + data: { + details: method, + }, + }, + })) + + .defaultToLazy(() => { + console.warn( + `Coudln't parse the following input ${JSON.stringify(input)}`, + ) + return { + jsonrpc, + id: (input as any)?.id, + error: { + code: -32602, + message: "invalid params", + data: { + details: JSON.stringify(input), + }, + }, + } + }) + } +} diff --git a/container-runtime/src/Adapters/Systems/SystemForEmbassy/DockerProcedureContainer.ts b/container-runtime/src/Adapters/Systems/SystemForEmbassy/DockerProcedureContainer.ts new file mode 100644 index 0000000000..1a8b54e226 --- /dev/null +++ b/container-runtime/src/Adapters/Systems/SystemForEmbassy/DockerProcedureContainer.ts @@ -0,0 +1,99 @@ +import * as fs from "fs/promises" +import * as cp from "child_process" +import { Overlay, types as T } from "@start9labs/start-sdk" +import { promisify } from "util" +import { DockerProcedure, VolumeId } from "../../../Models/DockerProcedure" +import { Volume } from "./matchVolume" +export const exec = promisify(cp.exec) +export const execFile = promisify(cp.execFile) + +export class DockerProcedureContainer { + private constructor(readonly overlay: Overlay) {} + // static async readonlyOf(data: DockerProcedure) { + // return DockerProcedureContainer.of(data, ["-o", "ro"]) + // } + static async of( + effects: T.Effects, + data: DockerProcedure, + volumes: { [id: VolumeId]: Volume }, + ) { + const overlay = await Overlay.of(effects, data.image) + + if (data.mounts) { + const mounts = data.mounts + for (const mount in mounts) { + const path = mounts[mount].startsWith("/") + ? `${overlay.rootfs}${mounts[mount]}` + : `${overlay.rootfs}/${mounts[mount]}` + await fs.mkdir(path, { recursive: true }) + const volumeMount = volumes[mount] + if (volumeMount.type === "data") { + await overlay.mount( + { type: "volume", id: mount, subpath: null, readonly: false }, + mounts[mount], + ) + } else if (volumeMount.type === "assets") { + await overlay.mount( + { type: "assets", id: mount, subpath: null }, + mounts[mount], + ) + } else if (volumeMount.type === "certificate") { + volumeMount + const certChain = await effects.getSslCertificate({ + packageId: null, + hostId: volumeMount["interface-id"], + algorithm: null, + }) + const key = await effects.getSslKey({ + packageId: null, + hostId: volumeMount["interface-id"], + algorithm: null, + }) + await fs.writeFile( + `${path}/${volumeMount["interface-id"]}.cert.pem`, + certChain.join("\n"), + ) + await fs.writeFile( + `${path}/${volumeMount["interface-id"]}.key.pem`, + key, + ) + } else if (volumeMount.type === "pointer") { + await effects.mount({ + location: path, + target: { + packageId: volumeMount["package-id"], + subpath: volumeMount.path, + readonly: volumeMount.readonly, + volumeId: volumeMount["volume-id"], + }, + }) + } else if (volumeMount.type === "backup") { + throw new Error("TODO") + } + } + } + + return new DockerProcedureContainer(overlay) + } + + async exec(commands: string[]) { + try { + return await this.overlay.exec(commands) + } finally { + await this.overlay.destroy() + } + } + + async execSpawn(commands: string[]) { + try { + const spawned = await this.overlay.spawn(commands) + return spawned + } finally { + await this.overlay.destroy() + } + } + + async spawn(commands: string[]): Promise { + return await this.overlay.spawn(commands) + } +} diff --git a/container-runtime/src/Adapters/Systems/SystemForEmbassy/MainLoop.ts b/container-runtime/src/Adapters/Systems/SystemForEmbassy/MainLoop.ts new file mode 100644 index 0000000000..17fd13468a --- /dev/null +++ b/container-runtime/src/Adapters/Systems/SystemForEmbassy/MainLoop.ts @@ -0,0 +1,250 @@ +import { PolyfillEffects } from "./polyfillEffects" +import { DockerProcedureContainer } from "./DockerProcedureContainer" +import { SystemForEmbassy } from "." +import { HostSystemStartOs } from "../../HostSystemStartOs" +import { Daemons, T, daemons } from "@start9labs/start-sdk" + +const EMBASSY_HEALTH_INTERVAL = 15 * 1000 +const EMBASSY_PROPERTIES_LOOP = 30 * 1000 +/** + * We wanted something to represent what the main loop is doing, and + * in this case it used to run the properties, health, and the docker/ js main. + * Also, this has an ability to clean itself up too if need be. + */ +export class MainLoop { + private healthLoops: + | { + name: string + interval: NodeJS.Timeout + }[] + | undefined + + private mainEvent: + | Promise<{ + daemon: T.DaemonReturned + wait: Promise + }> + | undefined + constructor( + readonly system: SystemForEmbassy, + readonly effects: HostSystemStartOs, + ) { + this.healthLoops = this.constructHealthLoops() + this.mainEvent = this.constructMainEvent() + } + + private async constructMainEvent() { + const { system, effects } = this + const currentCommand: [string, ...string[]] = [ + system.manifest.main.entrypoint, + ...system.manifest.main.args, + ] + + await effects.setMainStatus({ status: "running" }) + const jsMain = (this.system.moduleCode as any)?.jsMain + const dockerProcedureContainer = await DockerProcedureContainer.of( + effects, + this.system.manifest.main, + this.system.manifest.volumes, + ) + if (jsMain) { + const daemons = Daemons.of({ + effects, + started: async (_) => {}, + healthReceipts: [], + }) + throw new Error("todo") + // return { + // daemon, + // wait: daemon.wait().finally(() => { + // this.clean() + // effects.setMainStatus({ status: "stopped" }) + // }), + // } + } + const daemon = await daemons.runDaemon()( + this.effects, + this.system.manifest.main.image, + currentCommand, + { + overlay: dockerProcedureContainer.overlay, + }, + ) + return { + daemon, + wait: daemon.wait().finally(() => { + this.clean() + effects + .setMainStatus({ status: "stopped" }) + .catch((e) => console.error("Could not set the status to stopped")) + }), + } + } + + public async clean(options?: { timeout?: number }) { + const { mainEvent, healthLoops } = this + const main = await mainEvent + delete this.mainEvent + delete this.healthLoops + if (mainEvent) await main?.daemon.term() + if (healthLoops) healthLoops.forEach((x) => clearInterval(x.interval)) + } + + private constructHealthLoops() { + const { manifest } = this.system + const effects = this.effects + const start = Date.now() + return Object.entries(manifest["health-checks"]).map( + ([healthId, value]) => { + const interval = setInterval(async () => { + const actionProcedure = value + const timeChanged = Date.now() - start + if (actionProcedure.type === "docker") { + const container = await DockerProcedureContainer.of( + effects, + actionProcedure, + manifest.volumes, + ) + const executed = await container.execSpawn([ + actionProcedure.entrypoint, + ...actionProcedure.args, + JSON.stringify(timeChanged), + ]) + if (executed.exitCode === 59) { + await effects.setHealth({ + id: healthId, + name: value.name, + result: "disabled", + message: + executed.stderr.toString() || executed.stdout.toString(), + }) + return + } + if (executed.exitCode === 60) { + await effects.setHealth({ + id: healthId, + name: value.name, + result: "starting", + message: + executed.stderr.toString() || executed.stdout.toString(), + }) + return + } + if (executed.exitCode === 61) { + await effects.setHealth({ + id: healthId, + name: value.name, + result: "loading", + message: + executed.stderr.toString() || executed.stdout.toString(), + }) + return + } + const errorMessage = executed.stderr.toString() + const message = executed.stdout.toString() + if (!!errorMessage) { + await effects.setHealth({ + id: healthId, + name: value.name, + result: "failure", + message: errorMessage, + }) + return + } + await effects.setHealth({ + id: healthId, + name: value.name, + result: "success", + message, + }) + return + } else { + actionProcedure + const moduleCode = await this.system.moduleCode + const method = moduleCode.health?.[healthId] + if (!method) { + await effects.setHealth({ + id: healthId, + name: value.name, + result: "failure", + message: `Expecting that the js health check ${healthId} exists`, + }) + return + } + + const result = await method( + new PolyfillEffects(effects, this.system.manifest), + timeChanged, + ) + + if ("result" in result) { + await effects.setHealth({ + id: healthId, + name: value.name, + result: "success", + message: null, + }) + return + } + if ("error" in result) { + await effects.setHealth({ + id: healthId, + name: value.name, + result: "failure", + message: result.error, + }) + return + } + if (!("error-code" in result)) { + await effects.setHealth({ + id: healthId, + name: value.name, + result: "failure", + message: `Unknown error type ${JSON.stringify(result)}`, + }) + return + } + const [code, message] = result["error-code"] + if (code === 59) { + await effects.setHealth({ + id: healthId, + name: value.name, + result: "disabled", + message, + }) + return + } + if (code === 60) { + await effects.setHealth({ + id: healthId, + name: value.name, + result: "starting", + message, + }) + return + } + if (code === 61) { + await effects.setHealth({ + id: healthId, + name: value.name, + result: "loading", + message, + }) + return + } + + await effects.setHealth({ + id: healthId, + name: value.name, + result: "failure", + message: `${result["error-code"][0]}: ${result["error-code"][1]}`, + }) + return + } + }, EMBASSY_HEALTH_INTERVAL) + + return { name: healthId, interval } + }, + ) + } +} diff --git a/container-runtime/src/Adapters/Systems/SystemForEmbassy/index.ts b/container-runtime/src/Adapters/Systems/SystemForEmbassy/index.ts new file mode 100644 index 0000000000..ade166eff5 --- /dev/null +++ b/container-runtime/src/Adapters/Systems/SystemForEmbassy/index.ts @@ -0,0 +1,806 @@ +import { types as T, utils, EmVer } from "@start9labs/start-sdk" +import * as fs from "fs/promises" + +import { PolyfillEffects } from "./polyfillEffects" +import { Duration, duration } from "../../../Models/Duration" +import { System } from "../../../Interfaces/System" +import { matchManifest, Manifest, Procedure } from "./matchManifest" +import * as childProcess from "node:child_process" +import { DockerProcedureContainer } from "./DockerProcedureContainer" +import { promisify } from "node:util" +import * as U from "./oldEmbassyTypes" +import { MainLoop } from "./MainLoop" +import { + matches, + boolean, + dictionary, + literal, + literals, + object, + string, + unknown, + any, + tuple, + number, + anyOf, + deferred, + Parser, +} from "ts-matches" +import { HostSystemStartOs } from "../../HostSystemStartOs" +import { JsonPath, unNestPath } from "../../../Models/JsonPath" +import { RpcResult, matchRpcResult } from "../../RpcListener" +import { InputSpec } from "@start9labs/start-sdk/cjs/sdk/lib/config/configTypes" + +type Optional = A | undefined | null +function todo(): never { + throw new Error("Not implemented") +} +const execFile = promisify(childProcess.execFile) + +const MANIFEST_LOCATION = "/usr/lib/startos/package/embassyManifest.json" +const EMBASSY_JS_LOCATION = "/usr/lib/startos/package/embassy.js" +const EMBASSY_POINTER_PATH_PREFIX = "/embassyConfig" + +export type PackagePropertiesV2 = { + [name: string]: PackagePropertyObject | PackagePropertyString +} +export type PackagePropertyString = { + type: "string" + description?: string + value: string + /** Let's the ui make this copyable button */ + copyable?: boolean + /** Let the ui create a qr for this field */ + qr?: boolean + /** Hiding the value unless toggled off for field */ + masked?: boolean +} +export type PackagePropertyObject = { + value: PackagePropertiesV2 + type: "object" + description: string +} + +const asProperty_ = ( + x: PackagePropertyString | PackagePropertyObject, +): T.PropertiesValue => { + if (x.type === "object") { + return { + ...x, + value: Object.fromEntries( + Object.entries(x.value).map(([key, value]) => [ + key, + asProperty_(value), + ]), + ), + } + } + return { + masked: false, + description: null, + qr: null, + copyable: null, + ...x, + } +} +const asProperty = (x: PackagePropertiesV2): T.PropertiesReturn => + Object.fromEntries( + Object.entries(x).map(([key, value]) => [key, asProperty_(value)]), + ) +const [matchPackageProperties, setMatchPackageProperties] = + deferred() +const matchPackagePropertyObject: Parser = + object({ + value: matchPackageProperties, + type: literal("object"), + description: string, + }) + +const matchPackagePropertyString: Parser = + object( + { + type: literal("string"), + description: string, + value: string, + copyable: boolean, + qr: boolean, + masked: boolean, + }, + ["copyable", "description", "qr", "masked"], + ) +setMatchPackageProperties( + dictionary([ + string, + anyOf(matchPackagePropertyObject, matchPackagePropertyString), + ]), +) + +const matchProperties = object({ + version: literal(2), + data: matchPackageProperties, +}) + +export class SystemForEmbassy implements System { + currentRunning: MainLoop | undefined + static async of(manifestLocation: string = MANIFEST_LOCATION) { + const moduleCode = await import(EMBASSY_JS_LOCATION) + .catch((_) => require(EMBASSY_JS_LOCATION)) + .catch(async (_) => { + console.error("Could not load the js") + console.error({ + exists: await fs.stat(EMBASSY_JS_LOCATION), + }) + return {} + }) + const manifestData = await fs.readFile(manifestLocation, "utf-8") + return new SystemForEmbassy( + matchManifest.unsafeCast(JSON.parse(manifestData)), + moduleCode, + ) + } + constructor( + readonly manifest: Manifest, + readonly moduleCode: Partial, + ) {} + async execute( + effects: HostSystemStartOs, + options: { + procedure: JsonPath + input: unknown + timeout?: number | undefined + }, + ): Promise { + return this._execute(effects, options) + .then((x) => + matches(x) + .when( + object({ + result: any, + }), + (x) => x, + ) + .when( + object({ + error: string, + }), + (x) => ({ + error: { + code: 0, + message: x.error, + }, + }), + ) + .when( + object({ + "error-code": tuple(number, string), + }), + ({ "error-code": [code, message] }) => ({ + error: { + code, + message, + }, + }), + ) + .defaultTo({ result: x }), + ) + .catch((error: unknown) => { + if (error instanceof Error) + return { + error: { + code: 0, + message: error.name, + data: { + details: error.message, + debug: `${error?.cause ?? "[noCause]"}:${error?.stack ?? "[noStack]"}`, + }, + }, + } + if (matchRpcResult.test(error)) return error + return { + error: { + code: 0, + message: String(error), + }, + } + }) + } + async exit(effects: HostSystemStartOs): Promise { + if (this.currentRunning) await this.currentRunning.clean() + delete this.currentRunning + } + async _execute( + effects: HostSystemStartOs, + options: { + procedure: JsonPath + input: unknown + timeout?: number | undefined + }, + ): Promise { + const input = options.input + switch (options.procedure) { + case "/backup/create": + return this.createBackup(effects) + case "/backup/restore": + return this.restoreBackup(effects) + case "/config/get": + return this.getConfig(effects) + case "/config/set": + return this.setConfig(effects, input) + case "/properties": + return this.properties(effects) + case "/actions/metadata": + return todo() + case "/init": + return this.init(effects, string.optional().unsafeCast(input)) + case "/uninit": + return this.uninit(effects, string.optional().unsafeCast(input)) + case "/main/start": + return this.mainStart(effects) + case "/main/stop": + return this.mainStop(effects) + default: + const procedures = unNestPath(options.procedure) + switch (true) { + case procedures[1] === "actions" && procedures[3] === "get": + return this.action(effects, procedures[2], input) + case procedures[1] === "actions" && procedures[3] === "run": + return this.action(effects, procedures[2], input) + case procedures[1] === "dependencies" && procedures[3] === "query": + return this.dependenciesAutoconfig(effects, procedures[2], input) + + case procedures[1] === "dependencies" && procedures[3] === "update": + return this.dependenciesAutoconfig(effects, procedures[2], input) + } + } + throw new Error(`Could not find the path for ${options.procedure}`) + } + private async init( + effects: HostSystemStartOs, + previousVersion: Optional, + ): Promise { + if (previousVersion) await this.migration(effects, previousVersion) + await effects.setMainStatus({ status: "stopped" }) + await this.exportActions(effects) + } + async exportActions(effects: HostSystemStartOs) { + const manifest = this.manifest + if (!manifest.actions) return + for (const [actionId, action] of Object.entries(manifest.actions)) { + const hasRunning = !!action["allowed-statuses"].find( + (x) => x === "running", + ) + const hasStopped = !!action["allowed-statuses"].find( + (x) => x === "stopped", + ) + // prettier-ignore + const allowedStatuses = hasRunning && hasStopped ? "any": + hasRunning ? "onlyRunning" : + "onlyStopped" + await effects.exportAction({ + id: actionId, + metadata: { + name: action.name, + description: action.description, + warning: action.warning || null, + input: action["input-spec"] as InputSpec, + disabled: false, + allowedStatuses, + group: null, + }, + }) + } + } + private async uninit( + effects: HostSystemStartOs, + nextVersion: Optional, + ): Promise { + // TODO Do a migration down if the version exists + await effects.setMainStatus({ status: "stopped" }) + } + private async mainStart(effects: HostSystemStartOs): Promise { + if (!!this.currentRunning) return + + this.currentRunning = new MainLoop(this, effects) + } + private async mainStop( + effects: HostSystemStartOs, + options?: { timeout?: number }, + ): Promise { + const { currentRunning } = this + delete this.currentRunning + if (currentRunning) { + await currentRunning.clean({ + timeout: options?.timeout || this.manifest.main["sigterm-timeout"], + }) + } + return duration(this.manifest.main["sigterm-timeout"], "s") + } + private async createBackup(effects: HostSystemStartOs): Promise { + const backup = this.manifest.backup.create + if (backup.type === "docker") { + const container = await DockerProcedureContainer.of( + effects, + backup, + this.manifest.volumes, + ) + await container.exec([backup.entrypoint, ...backup.args]) + } else { + const moduleCode = await this.moduleCode + await moduleCode.createBackup?.( + new PolyfillEffects(effects, this.manifest), + ) + } + } + private async restoreBackup(effects: HostSystemStartOs): Promise { + const restoreBackup = this.manifest.backup.restore + if (restoreBackup.type === "docker") { + const container = await DockerProcedureContainer.of( + effects, + restoreBackup, + this.manifest.volumes, + ) + await container.exec([restoreBackup.entrypoint, ...restoreBackup.args]) + } else { + const moduleCode = await this.moduleCode + await moduleCode.restoreBackup?.( + new PolyfillEffects(effects, this.manifest), + ) + } + } + private async getConfig(effects: HostSystemStartOs): Promise { + return this.getConfigUncleaned(effects).then(removePointers) + } + private async getConfigUncleaned( + effects: HostSystemStartOs, + ): Promise { + const config = this.manifest.config?.get + if (!config) return { spec: {} } + if (config.type === "docker") { + const container = await DockerProcedureContainer.of( + effects, + config, + this.manifest.volumes, + ) + // TODO: yaml + return JSON.parse( + ( + await container.exec([config.entrypoint, ...config.args]) + ).stdout.toString(), + ) + } else { + const moduleCode = await this.moduleCode + const method = moduleCode.getConfig + if (!method) throw new Error("Expecting that the method getConfig exists") + return (await method(new PolyfillEffects(effects, this.manifest)).then( + (x) => { + if ("result" in x) return x.result + if ("error" in x) throw new Error("Error getting config: " + x.error) + throw new Error("Error getting config: " + x["error-code"][1]) + }, + )) as any + } + } + private async setConfig( + effects: HostSystemStartOs, + newConfigWithoutPointers: unknown, + ): Promise { + const newConfig = structuredClone(newConfigWithoutPointers) + await updateConfig( + effects, + await this.getConfigUncleaned(effects).then((x) => x.spec), + newConfig, + ) + const setConfigValue = this.manifest.config?.set + if (!setConfigValue) return { signal: "SIGTERM", "depends-on": {} } + if (setConfigValue.type === "docker") { + const container = await DockerProcedureContainer.of( + effects, + setConfigValue, + this.manifest.volumes, + ) + return JSON.parse( + ( + await container.exec([ + setConfigValue.entrypoint, + ...setConfigValue.args, + JSON.stringify(newConfig), + ]) + ).stdout.toString(), + ) + } else if (setConfigValue.type === "script") { + const moduleCode = await this.moduleCode + const method = moduleCode.setConfig + if (!method) throw new Error("Expecting that the method setConfig exists") + return await method( + new PolyfillEffects(effects, this.manifest), + newConfig as U.Config, + ).then((x): T.SetResult => { + if ("result" in x) + return { + "depends-on": x.result["depends-on"], + signal: x.result.signal === "SIGEMT" ? "SIGTERM" : x.result.signal, + } + if ("error" in x) throw new Error("Error getting config: " + x.error) + throw new Error("Error getting config: " + x["error-code"][1]) + }) + } else { + return { + "depends-on": {}, + signal: "SIGTERM", + } + } + } + private async migration( + effects: HostSystemStartOs, + fromVersion: string, + ): Promise { + const fromEmver = EmVer.from(fromVersion) + const currentEmver = EmVer.from(this.manifest.version) + if (!this.manifest.migrations) return { configured: true } + const fromMigration = Object.entries(this.manifest.migrations.from) + .map(([version, procedure]) => [EmVer.from(version), procedure] as const) + .find( + ([versionEmver, procedure]) => + versionEmver.greaterThan(fromEmver) && + versionEmver.lessThanOrEqual(currentEmver), + ) + const toMigration = Object.entries(this.manifest.migrations.to) + .map(([version, procedure]) => [EmVer.from(version), procedure] as const) + .find( + ([versionEmver, procedure]) => + versionEmver.greaterThan(fromEmver) && + versionEmver.lessThanOrEqual(currentEmver), + ) + + // prettier-ignore + const migration = ( + fromEmver.greaterThan(currentEmver) ? [toMigration, fromMigration] : + [fromMigration, toMigration]).filter(Boolean)[0] + + if (migration) { + const [version, procedure] = migration + if (procedure.type === "docker") { + const container = await DockerProcedureContainer.of( + effects, + procedure, + this.manifest.volumes, + ) + return JSON.parse( + ( + await container.exec([ + procedure.entrypoint, + ...procedure.args, + JSON.stringify(fromVersion), + ]) + ).stdout.toString(), + ) + } else if (procedure.type === "script") { + const moduleCode = await this.moduleCode + const method = moduleCode.migration + if (!method) + throw new Error("Expecting that the method migration exists") + return (await method( + new PolyfillEffects(effects, this.manifest), + fromVersion as string, + ).then((x) => { + if ("result" in x) return x.result + if ("error" in x) throw new Error("Error getting config: " + x.error) + throw new Error("Error getting config: " + x["error-code"][1]) + })) as any + } + } + return { configured: true } + } + private async properties( + effects: HostSystemStartOs, + ): Promise> { + // TODO BLU-J set the properties ever so often + const setConfigValue = this.manifest.properties + if (!setConfigValue) throw new Error("There is no properties") + if (setConfigValue.type === "docker") { + const container = await DockerProcedureContainer.of( + effects, + setConfigValue, + this.manifest.volumes, + ) + const properties = matchProperties.unsafeCast( + JSON.parse( + ( + await container.exec([ + setConfigValue.entrypoint, + ...setConfigValue.args, + ]) + ).stdout.toString(), + ), + ) + return asProperty(properties.data) + } else if (setConfigValue.type === "script") { + const moduleCode = this.moduleCode + const method = moduleCode.properties + if (!method) + throw new Error("Expecting that the method properties exists") + const properties = matchProperties.unsafeCast( + await method(new PolyfillEffects(effects, this.manifest)).then((x) => { + if ("result" in x) return x.result + if ("error" in x) throw new Error("Error getting config: " + x.error) + throw new Error("Error getting config: " + x["error-code"][1]) + }), + ) + return asProperty(properties.data) + } + throw new Error(`Unknown type in the fetch properties: ${setConfigValue}`) + } + private async health( + effects: HostSystemStartOs, + healthId: string, + timeSinceStarted: unknown, + ): Promise { + const healthProcedure = this.manifest["health-checks"][healthId] + if (!healthProcedure) return + if (healthProcedure.type === "docker") { + const container = await DockerProcedureContainer.of( + effects, + healthProcedure, + this.manifest.volumes, + ) + return JSON.parse( + ( + await container.exec([ + healthProcedure.entrypoint, + ...healthProcedure.args, + JSON.stringify(timeSinceStarted), + ]) + ).stdout.toString(), + ) + } else if (healthProcedure.type === "script") { + const moduleCode = await this.moduleCode + const method = moduleCode.health?.[healthId] + if (!method) throw new Error("Expecting that the method health exists") + await method( + new PolyfillEffects(effects, this.manifest), + Number(timeSinceStarted), + ).then((x) => { + if ("result" in x) return x.result + if ("error" in x) throw new Error("Error getting config: " + x.error) + throw new Error("Error getting config: " + x["error-code"][1]) + }) + } + } + private async action( + effects: HostSystemStartOs, + actionId: string, + formData: unknown, + ): Promise { + const actionProcedure = this.manifest.actions?.[actionId]?.implementation + if (!actionProcedure) return { message: "Action not found", value: null } + if (actionProcedure.type === "docker") { + const container = await DockerProcedureContainer.of( + effects, + actionProcedure, + this.manifest.volumes, + ) + return JSON.parse( + ( + await container.exec([ + actionProcedure.entrypoint, + ...actionProcedure.args, + JSON.stringify(formData), + ]) + ).stdout.toString(), + ) + } else { + const moduleCode = await this.moduleCode + const method = moduleCode.action?.[actionId] + if (!method) throw new Error("Expecting that the method action exists") + return (await method( + new PolyfillEffects(effects, this.manifest), + formData as any, + ).then((x) => { + if ("result" in x) return x.result + if ("error" in x) throw new Error("Error getting config: " + x.error) + throw new Error("Error getting config: " + x["error-code"][1]) + })) as any + } + } + private async dependenciesCheck( + effects: HostSystemStartOs, + id: string, + oldConfig: unknown, + ): Promise { + const actionProcedure = this.manifest.dependencies?.[id]?.config?.check + if (!actionProcedure) return { message: "Action not found", value: null } + if (actionProcedure.type === "docker") { + const container = await DockerProcedureContainer.of( + effects, + actionProcedure, + this.manifest.volumes, + ) + return JSON.parse( + ( + await container.exec([ + actionProcedure.entrypoint, + ...actionProcedure.args, + JSON.stringify(oldConfig), + ]) + ).stdout.toString(), + ) + } else if (actionProcedure.type === "script") { + const moduleCode = await this.moduleCode + const method = moduleCode.dependencies?.[id]?.check + if (!method) + throw new Error( + `Expecting that the method dependency check ${id} exists`, + ) + return (await method( + new PolyfillEffects(effects, this.manifest), + oldConfig as any, + ).then((x) => { + if ("result" in x) return x.result + if ("error" in x) throw new Error("Error getting config: " + x.error) + throw new Error("Error getting config: " + x["error-code"][1]) + })) as any + } else { + return {} + } + } + private async dependenciesAutoconfig( + effects: HostSystemStartOs, + id: string, + oldConfig: unknown, + ): Promise { + const moduleCode = await this.moduleCode + const method = moduleCode.dependencies?.[id]?.autoConfigure + if (!method) + throw new Error( + `Expecting that the method dependency autoConfigure ${id} exists`, + ) + return (await method( + new PolyfillEffects(effects, this.manifest), + oldConfig as any, + ).then((x) => { + if ("result" in x) return x.result + if ("error" in x) throw new Error("Error getting config: " + x.error) + throw new Error("Error getting config: " + x["error-code"][1]) + })) as any + } +} +async function removePointers(value: T.ConfigRes): Promise { + const startingSpec = structuredClone(value.spec) + const config = + value.config && cleanConfigFromPointers(value.config, startingSpec) + const spec = cleanSpecOfPointers(startingSpec) + + return { config, spec } +} + +const matchPointer = object({ + type: literal("pointer"), +}) + +const matchPointerPackage = object({ + subtype: literal("package"), + target: literals("tor-key", "tor-address", "lan-address"), + "package-id": string, + interface: string, +}) +const matchPointerConfig = object({ + subtype: literal("package"), + target: literals("config"), + "package-id": string, + selector: string, + multi: boolean, +}) +const matchSpec = object({ + spec: object, +}) +const matchVariants = object({ variants: dictionary([string, unknown]) }) +function cleanSpecOfPointers(mutSpec: T): T { + if (!object.test(mutSpec)) return mutSpec + for (const key in mutSpec) { + const value = mutSpec[key] + if (matchSpec.test(value)) value.spec = cleanSpecOfPointers(value.spec) + if (matchVariants.test(value)) + value.variants = Object.fromEntries( + Object.entries(value.variants).map(([key, value]) => [ + key, + cleanSpecOfPointers(value), + ]), + ) + if (!matchPointer.test(value)) continue + delete mutSpec[key] + // // if (value.target === ) + } + + return mutSpec +} +function isKeyOf( + key: string, + ofObject: O, +): key is keyof O & string { + return key in ofObject +} + +// prettier-ignore +type CleanConfigFromPointers = + [C, S] extends [object, object] ? { + [K in (keyof C & keyof S ) & string]: ( + S[K] extends {type: "pointer"} ? never : + S[K] extends {spec: object & infer B} ? CleanConfigFromPointers : + C[K] + ) + } : + null + +function cleanConfigFromPointers( + config: C, + spec: S, +): CleanConfigFromPointers { + const newConfig = {} as CleanConfigFromPointers + + if (!(object.test(config) && object.test(spec)) || newConfig == null) + return null as CleanConfigFromPointers + + for (const key of Object.keys(spec)) { + if (!isKeyOf(key, spec)) continue + if (!isKeyOf(key, config)) continue + const partSpec = spec[key] + if (matchPointer.test(partSpec)) continue + ;(newConfig as any)[key] = matchSpec.test(partSpec) + ? cleanConfigFromPointers(config[key], partSpec.spec) + : config[key] + } + return newConfig as CleanConfigFromPointers +} + +async function updateConfig( + effects: HostSystemStartOs, + spec: unknown, + mutConfigValue: unknown, +) { + if (!dictionary([string, unknown]).test(spec)) return + if (!dictionary([string, unknown]).test(mutConfigValue)) return + for (const key in spec) { + const specValue = spec[key] + + const newConfigValue = mutConfigValue[key] + if (matchSpec.test(specValue)) { + const updateObject = { spec: null } + await updateConfig(effects, { spec: specValue.spec }, updateObject) + mutConfigValue[key] = updateObject.spec + } + if ( + matchVariants.test(specValue) && + object({ tag: object({ id: string }) }).test(newConfigValue) && + newConfigValue.tag.id in specValue.variants + ) { + // Not going to do anything on the variants... + } + if (!matchPointer.test(specValue)) continue + if (matchPointerConfig.test(specValue)) { + const configValue = (await effects.store.get({ + packageId: specValue["package-id"], + callback() {}, + path: `${EMBASSY_POINTER_PATH_PREFIX}${specValue.selector}` as any, + })) as any + mutConfigValue[key] = configValue + } + if (matchPointerPackage.test(specValue)) { + if (specValue.target === "tor-key") + throw new Error("This service uses an unsupported target TorKey") + const filled = await utils + .getServiceInterface(effects, { + packageId: specValue["package-id"], + id: specValue.interface, + }) + .once() + .catch(() => null) + + mutConfigValue[key] = + filled === null + ? "" + : specValue.target === "lan-address" + ? filled.addressInfo.localHostnames[0] + : filled.addressInfo.onionHostnames[0] + } + } +} diff --git a/container-runtime/src/Adapters/Systems/SystemForEmbassy/matchManifest.ts b/container-runtime/src/Adapters/Systems/SystemForEmbassy/matchManifest.ts new file mode 100644 index 0000000000..9b70f884b8 --- /dev/null +++ b/container-runtime/src/Adapters/Systems/SystemForEmbassy/matchManifest.ts @@ -0,0 +1,119 @@ +import { + object, + literal, + string, + array, + boolean, + dictionary, + literals, + number, + unknown, + some, + every, +} from "ts-matches" +import { matchVolume } from "./matchVolume" +import { matchDockerProcedure } from "../../../Models/DockerProcedure" + +const matchJsProcedure = object( + { + type: literal("script"), + args: array(unknown), + }, + ["args"], + { + args: [], + }, +) + +const matchProcedure = some(matchDockerProcedure, matchJsProcedure) +export type Procedure = typeof matchProcedure._TYPE + +const matchAction = object( + { + name: string, + description: string, + warning: string, + implementation: matchProcedure, + "allowed-statuses": array(literals("running", "stopped")), + "input-spec": unknown, + }, + ["warning", "input-spec", "input-spec"], +) +export const matchManifest = object( + { + id: string, + version: string, + main: matchDockerProcedure, + assets: object( + { + assets: string, + scripts: string, + }, + ["assets", "scripts"], + ), + "health-checks": dictionary([ + string, + every( + matchProcedure, + object({ + name: string, + }), + ), + ]), + config: object({ + get: matchProcedure, + set: matchProcedure, + }), + properties: matchProcedure, + volumes: dictionary([string, matchVolume]), + interfaces: dictionary([ + string, + object({ + name: string, + "tor-config": object({}), + "lan-config": object({}), + ui: boolean, + protocols: array(string), + }), + ]), + backup: object({ + create: matchProcedure, + restore: matchProcedure, + }), + migrations: object({ + to: dictionary([string, matchProcedure]), + from: dictionary([string, matchProcedure]), + }), + dependencies: dictionary([ + string, + object( + { + version: string, + requirement: some( + object({ + type: literal("opt-in"), + how: string, + }), + object({ + type: literal("opt-out"), + how: string, + }), + object({ + type: literal("required"), + }), + ), + description: string, + config: object({ + check: matchProcedure, + "auto-configure": matchProcedure, + }), + }, + ["description", "config"], + ), + ]), + + actions: dictionary([string, matchAction]), + }, + ["config", "actions", "properties", "migrations", "dependencies"], +) +export type Manifest = typeof matchManifest._TYPE diff --git a/container-runtime/src/Adapters/Systems/SystemForEmbassy/matchVolume.ts b/container-runtime/src/Adapters/Systems/SystemForEmbassy/matchVolume.ts new file mode 100644 index 0000000000..7aa579ecf3 --- /dev/null +++ b/container-runtime/src/Adapters/Systems/SystemForEmbassy/matchVolume.ts @@ -0,0 +1,35 @@ +import { object, literal, string, boolean, some } from "ts-matches" + +const matchDataVolume = object( + { + type: literal("data"), + readonly: boolean, + }, + ["readonly"], +) +const matchAssetVolume = object({ + type: literal("assets"), +}) +const matchPointerVolume = object({ + type: literal("pointer"), + "package-id": string, + "volume-id": string, + path: string, + readonly: boolean, +}) +const matchCertificateVolume = object({ + type: literal("certificate"), + "interface-id": string, +}) +const matchBackupVolume = object({ + type: literal("backup"), + readonly: boolean, +}) +export const matchVolume = some( + matchDataVolume, + matchAssetVolume, + matchPointerVolume, + matchCertificateVolume, + matchBackupVolume, +) +export type Volume = typeof matchVolume._TYPE diff --git a/container-runtime/src/Adapters/Systems/SystemForEmbassy/oldEmbassyTypes.ts b/container-runtime/src/Adapters/Systems/SystemForEmbassy/oldEmbassyTypes.ts new file mode 100644 index 0000000000..072a1171c7 --- /dev/null +++ b/container-runtime/src/Adapters/Systems/SystemForEmbassy/oldEmbassyTypes.ts @@ -0,0 +1,482 @@ +// deno-lint-ignore no-namespace +export type ExpectedExports = { + version: 2 + /** Set configuration is called after we have modified and saved the configuration in the embassy ui. Use this to make a file for the docker to read from for configuration. */ + setConfig: (effects: Effects, input: Config) => Promise> + /** Get configuration returns a shape that describes the format that the embassy ui will generate, and later send to the set config */ + getConfig: (effects: Effects) => Promise> + /** These are how we make sure the our dependency configurations are valid and if not how to fix them. */ + dependencies: Dependencies + /** For backing up service data though the embassyOS UI */ + createBackup: (effects: Effects) => Promise> + /** For restoring service data that was previously backed up using the embassyOS UI create backup flow. Backup restores are also triggered via the embassyOS UI, or doing a system restore flow during setup. */ + restoreBackup: (effects: Effects) => Promise> + /** Properties are used to get values from the docker, like a username + password, what ports we are hosting from */ + properties: (effects: Effects) => Promise> + health: { + /** Should be the health check id */ + [id: string]: ( + effects: Effects, + dateMs: number, + ) => Promise> + } + migration: ( + effects: Effects, + version: string, + ...args: unknown[] + ) => Promise> + action: { + [id: string]: ( + effects: Effects, + config?: Config, + ) => Promise> + } + + /** + * This is the entrypoint for the main container. Used to start up something like the service that the + * package represents, like running a bitcoind in a bitcoind-wrapper. + */ + main: (effects: Effects) => Promise> +} + +/** Used to reach out from the pure js runtime */ +export type Effects = { + /** Usable when not sandboxed */ + writeFile(input: { + path: string + volumeId: string + toWrite: string + }): Promise + readFile(input: { volumeId: string; path: string }): Promise + metadata(input: { volumeId: string; path: string }): Promise + /** Create a directory. Usable when not sandboxed */ + createDir(input: { volumeId: string; path: string }): Promise + + readDir(input: { volumeId: string; path: string }): Promise + /** Remove a directory. Usable when not sandboxed */ + removeDir(input: { volumeId: string; path: string }): Promise + removeFile(input: { volumeId: string; path: string }): Promise + + /** Write a json file into an object. Usable when not sandboxed */ + writeJsonFile(input: { + volumeId: string + path: string + toWrite: Record + }): Promise + + /** Read a json file into an object */ + readJsonFile(input: { + volumeId: string + path: string + }): Promise> + + runCommand(input: { + command: string + args?: string[] + timeoutMillis?: number + }): Promise> + runDaemon(input: { command: string; args?: string[] }): { + wait(): Promise> + term(): Promise + } + + chown(input: { volumeId: string; path: string; uid: string }): Promise + chmod(input: { volumeId: string; path: string; mode: string }): Promise + + sleep(timeMs: number): Promise + + /** Log at the trace level */ + trace(whatToPrint: string): void + /** Log at the warn level */ + warn(whatToPrint: string): void + /** Log at the error level */ + error(whatToPrint: string): void + /** Log at the debug level */ + debug(whatToPrint: string): void + /** Log at the info level */ + info(whatToPrint: string): void + + /** Sandbox mode lets us read but not write */ + is_sandboxed(): boolean + + exists(input: { volumeId: string; path: string }): Promise + bindLocal(options: { + internalPort: number + name: string + externalPort: number + }): Promise + bindTor(options: { + internalPort: number + name: string + externalPort: number + }): Promise + + fetch( + url: string, + options?: { + method?: "GET" | "POST" | "PUT" | "DELETE" | "HEAD" | "PATCH" + headers?: Record + body?: string + }, + ): Promise<{ + method: string + ok: boolean + status: number + headers: Record + body?: string | null + /// Returns the body as a string + text(): Promise + /// Returns the body as a json + json(): Promise + }> + + runRsync(options: { + srcVolume: string + dstVolume: string + srcPath: string + dstPath: string + // rsync options: https://linux.die.net/man/1/rsync + options: BackupOptions + }): { + id: () => Promise + wait: () => Promise + progress: () => Promise + } +} + +// rsync options: https://linux.die.net/man/1/rsync +export type BackupOptions = { + delete: boolean + force: boolean + ignoreExisting: boolean + exclude: string[] +} +export type Metadata = { + fileType: string + isDir: boolean + isFile: boolean + isSymlink: boolean + len: number + modified?: Date + accessed?: Date + created?: Date + readonly: boolean + uid: number + gid: number + mode: number +} + +export type MigrationRes = { + configured: boolean +} + +export type ActionResult = { + version: "0" + message: string + value?: string + copyable: boolean + qr: boolean +} + +export type ConfigRes = { + /** This should be the previous config, that way during set config we start with the previous */ + config?: Config + /** Shape that is describing the form in the ui */ + spec: ConfigSpec +} +export type Config = { + [propertyName: string]: unknown +} + +export type ConfigSpec = { + /** Given a config value, define what it should render with the following spec */ + [configValue: string]: ValueSpecAny +} +export type WithDefault = T & { + default: Default +} +export type WithNullableDefault = T & { + default?: Default +} + +export type WithDescription = T & { + description?: string + name: string + warning?: string +} + +export type WithOptionalDescription = T & { + /** @deprecated - optional only for backwards compatibility */ + description?: string + /** @deprecated - optional only for backwards compatibility */ + name?: string + warning?: string +} + +export type ListSpec = { + spec: T + range: string +} + +export type Tag = V & { + type: T +} + +export type Subtype = V & { + subtype: T +} + +export type Target = V & { + target: T +} + +export type UniqueBy = + | { + any: UniqueBy[] + } + | string + | null + +export type WithNullable = T & { + nullable: boolean +} +export type DefaultString = + | string + | { + /** The chars available for the random generation */ + charset?: string + /** Length that we generate to */ + len: number + } + +export type ValueSpecString = // deno-lint-ignore ban-types + ( + | {} + | { + pattern: string + "pattern-description": string + } + ) & { + copyable?: boolean + masked?: boolean + placeholder?: string + } +export type ValueSpecNumber = { + /** Something like [3,6] or [0, *) */ + range?: string + integral?: boolean + /** Used a description of the units */ + units?: string + placeholder?: number +} +export type ValueSpecBoolean = Record +export type ValueSpecAny = + | Tag<"boolean", WithDescription>> + | Tag< + "string", + WithDescription< + WithNullableDefault, DefaultString> + > + > + | Tag< + "number", + WithDescription< + WithNullableDefault, number> + > + > + | Tag< + "enum", + WithDescription< + WithDefault< + { + values: readonly string[] | string[] + "value-names": { + [key: string]: string + } + }, + string + > + > + > + | Tag<"list", ValueSpecList> + | Tag<"object", WithDescription>> + | Tag<"union", WithOptionalDescription>> + | Tag< + "pointer", + WithDescription< + | Subtype< + "package", + | Target< + "tor-key", + { + "package-id": string + interface: string + } + > + | Target< + "tor-address", + { + "package-id": string + interface: string + } + > + | Target< + "lan-address", + { + "package-id": string + interface: string + } + > + | Target< + "config", + { + "package-id": string + selector: string + multi: boolean + } + > + > + | Subtype<"system", Record> + > + > +export type ValueSpecUnion = { + /** What tag for the specification, for tag unions */ + tag: { + id: string + name: string + description?: string + "variant-names": { + [key: string]: string + } + } + /** The possible enum values */ + variants: { + [key: string]: ConfigSpec + } + "display-as"?: string + "unique-by"?: UniqueBy +} +export type ValueSpecObject = { + spec: ConfigSpec + "display-as"?: string + "unique-by"?: UniqueBy +} +export type ValueSpecList = + | Subtype< + "boolean", + WithDescription, boolean[]>> + > + | Subtype< + "string", + WithDescription, string[]>> + > + | Subtype< + "number", + WithDescription, number[]>> + > + | Subtype< + "enum", + WithDescription, string[]>> + > + | Subtype< + "object", + WithDescription< + WithNullableDefault< + ListSpec, + Record[] + > + > + > + | Subtype< + "union", + WithDescription, string[]>> + > +export type ValueSpecEnum = { + values: string[] + "value-names": { [key: string]: string } +} + +export type SetResult = { + /** These are the unix process signals */ + signal: + | "SIGTERM" + | "SIGHUP" + | "SIGINT" + | "SIGQUIT" + | "SIGILL" + | "SIGTRAP" + | "SIGABRT" + | "SIGBUS" + | "SIGFPE" + | "SIGKILL" + | "SIGUSR1" + | "SIGSEGV" + | "SIGUSR2" + | "SIGPIPE" + | "SIGALRM" + | "SIGSTKFLT" + | "SIGCHLD" + | "SIGCONT" + | "SIGSTOP" + | "SIGTSTP" + | "SIGTTIN" + | "SIGTTOU" + | "SIGURG" + | "SIGXCPU" + | "SIGXFSZ" + | "SIGVTALRM" + | "SIGPROF" + | "SIGWINCH" + | "SIGIO" + | "SIGPWR" + | "SIGSYS" + | "SIGEMT" + | "SIGINFO" + "depends-on": DependsOn +} + +export type DependsOn = { + [packageId: string]: string[] +} + +export type KnownError = + | { error: string } + | { + "error-code": [number, string] | readonly [number, string] + } +export type ResultType = KnownError | { result: T } + +export type PackagePropertiesV2 = { + [name: string]: PackagePropertyObject | PackagePropertyString +} +export type PackagePropertyString = { + type: "string" + description?: string + value: string + /** Let's the ui make this copyable button */ + copyable?: boolean + /** Let the ui create a qr for this field */ + qr?: boolean + /** Hiding the value unless toggled off for field */ + masked?: boolean +} +export type PackagePropertyObject = { + value: PackagePropertiesV2 + type: "object" + description: string +} + +export type Properties = { + version: 2 + data: PackagePropertiesV2 +} + +export type Dependencies = { + /** Id is the id of the package, should be the same as the manifest */ + [id: string]: { + /** Checks are called to make sure that our dependency is in the correct shape. If a known error is returned we know that the dependency needs modification */ + check(effects: Effects, input: Config): Promise> + /** This is called after we know that the dependency package needs a new configuration, this would be a transform for defaults */ + autoConfigure(effects: Effects, input: Config): Promise> + } +} diff --git a/container-runtime/src/Adapters/Systems/SystemForEmbassy/polyfillEffects.ts b/container-runtime/src/Adapters/Systems/SystemForEmbassy/polyfillEffects.ts new file mode 100644 index 0000000000..4c176b2f7a --- /dev/null +++ b/container-runtime/src/Adapters/Systems/SystemForEmbassy/polyfillEffects.ts @@ -0,0 +1,215 @@ +import * as fs from "fs/promises" +import * as oet from "./oldEmbassyTypes" +import { Volume } from "../../../Models/Volume" +import * as child_process from "child_process" +import { promisify } from "util" +import { Daemons, startSdk, T } from "@start9labs/start-sdk" +import { HostSystemStartOs } from "../../HostSystemStartOs" +import "isomorphic-fetch" +import { Manifest } from "./matchManifest" + +const execFile = promisify(child_process.execFile) + +export class PolyfillEffects implements oet.Effects { + constructor( + readonly effects: HostSystemStartOs, + private manifest: Manifest, + ) {} + async writeFile(input: { + path: string + volumeId: string + toWrite: string + }): Promise { + await fs.writeFile( + new Volume(input.volumeId, input.path).path, + input.toWrite, + ) + } + async readFile(input: { volumeId: string; path: string }): Promise { + return ( + await fs.readFile(new Volume(input.volumeId, input.path).path) + ).toString() + } + async metadata(input: { + volumeId: string + path: string + }): Promise { + const stats = await fs.stat(new Volume(input.volumeId, input.path).path) + return { + fileType: stats.isFile() ? "file" : "directory", + gid: stats.gid, + uid: stats.uid, + mode: stats.mode, + isDir: stats.isDirectory(), + isFile: stats.isFile(), + isSymlink: stats.isSymbolicLink(), + len: stats.size, + readonly: (stats.mode & 0o200) > 0, + } + } + async createDir(input: { volumeId: string; path: string }): Promise { + const path = new Volume(input.volumeId, input.path).path + await fs.mkdir(path, { recursive: true }) + return path + } + async readDir(input: { volumeId: string; path: string }): Promise { + return fs.readdir(new Volume(input.volumeId, input.path).path) + } + async removeDir(input: { volumeId: string; path: string }): Promise { + const path = new Volume(input.volumeId, input.path).path + await fs.rmdir(new Volume(input.volumeId, input.path).path, { + recursive: true, + }) + return path + } + removeFile(input: { volumeId: string; path: string }): Promise { + return fs.rm(new Volume(input.volumeId, input.path).path) + } + async writeJsonFile(input: { + volumeId: string + path: string + toWrite: Record + }): Promise { + await fs.writeFile( + new Volume(input.volumeId, input.path).path, + JSON.stringify(input.toWrite), + ) + } + async readJsonFile(input: { + volumeId: string + path: string + }): Promise> { + return JSON.parse( + ( + await fs.readFile(new Volume(input.volumeId, input.path).path) + ).toString(), + ) + } + runCommand({ + command, + args, + timeoutMillis, + }: { + command: string + args?: string[] | undefined + timeoutMillis?: number | undefined + }): Promise> { + return startSdk + .runCommand( + this.effects, + this.manifest.main.image, + [command, ...(args || [])], + {}, + ) + .then((x: any) => ({ + stderr: x.stderr.toString(), + stdout: x.stdout.toString(), + })) + .then((x) => (!!x.stderr ? { error: x.stderr } : { result: x.stdout })) + } + runDaemon(input: { command: string; args?: string[] | undefined }): { + wait(): Promise> + term(): Promise + } { + throw new Error("Method not implemented.") + } + chown(input: { volumeId: string; path: string; uid: string }): Promise { + throw new Error("Method not implemented.") + } + chmod(input: { + volumeId: string + path: string + mode: string + }): Promise { + throw new Error("Method not implemented.") + } + sleep(timeMs: number): Promise { + return new Promise((resolve) => setTimeout(resolve, timeMs)) + } + trace(whatToPrint: string): void { + console.trace(whatToPrint) + } + warn(whatToPrint: string): void { + console.warn(whatToPrint) + } + error(whatToPrint: string): void { + console.error(whatToPrint) + } + debug(whatToPrint: string): void { + console.debug(whatToPrint) + } + info(whatToPrint: string): void { + console.log(false) + } + is_sandboxed(): boolean { + return false + } + exists(input: { volumeId: string; path: string }): Promise { + return this.metadata(input) + .then(() => true) + .catch(() => false) + } + bindLocal(options: { + internalPort: number + name: string + externalPort: number + }): Promise { + throw new Error("Method not implemented.") + } + bindTor(options: { + internalPort: number + name: string + externalPort: number + }): Promise { + throw new Error("Method not implemented.") + } + async fetch( + url: string, + options?: + | { + method?: + | "GET" + | "POST" + | "PUT" + | "DELETE" + | "HEAD" + | "PATCH" + | undefined + headers?: Record | undefined + body?: string | undefined + } + | undefined, + ): Promise<{ + method: string + ok: boolean + status: number + headers: Record + body?: string | null | undefined + text(): Promise + json(): Promise + }> { + const fetched = await fetch(url, options) + return { + method: fetched.type, + ok: fetched.ok, + status: fetched.status, + headers: Object.fromEntries(fetched.headers.entries()), + body: await fetched.text(), + text: () => fetched.text(), + json: () => fetched.json(), + } + } + runRsync(options: { + srcVolume: string + dstVolume: string + srcPath: string + dstPath: string + options: oet.BackupOptions + }): { + id: () => Promise + wait: () => Promise + progress: () => Promise + } { + throw new Error("Method not implemented.") + } +} diff --git a/container-runtime/src/Adapters/Systems/SystemForStartOs.ts b/container-runtime/src/Adapters/Systems/SystemForStartOs.ts new file mode 100644 index 0000000000..7549bf0f2d --- /dev/null +++ b/container-runtime/src/Adapters/Systems/SystemForStartOs.ts @@ -0,0 +1,152 @@ +import { ExecuteResult, System } from "../../Interfaces/System" +import { unNestPath } from "../../Models/JsonPath" +import { string } from "ts-matches" +import { HostSystemStartOs } from "../HostSystemStartOs" +import { Effects } from "../../Models/Effects" +import { RpcResult } from "../RpcListener" +import { duration } from "../../Models/Duration" +const LOCATION = "/usr/lib/startos/package/startos" +export class SystemForStartOs implements System { + private onTerm: (() => Promise) | undefined + static of() { + return new SystemForStartOs() + } + constructor() {} + async execute( + effects: HostSystemStartOs, + options: { + procedure: + | "/init" + | "/uninit" + | "/main/start" + | "/main/stop" + | "/config/set" + | "/config/get" + | "/backup/create" + | "/backup/restore" + | "/actions/metadata" + | `/actions/${string}/get` + | `/actions/${string}/run` + | `/dependencies/${string}/query` + | `/dependencies/${string}/update` + input: unknown + timeout?: number | undefined + }, + ): Promise { + return { result: await this._execute(effects, options) } + } + async _execute( + effects: Effects, + options: { + procedure: + | "/init" + | "/uninit" + | "/main/start" + | "/main/stop" + | "/config/set" + | "/config/get" + | "/backup/create" + | "/backup/restore" + | "/actions/metadata" + | `/actions/${string}/get` + | `/actions/${string}/run` + | `/dependencies/${string}/query` + | `/dependencies/${string}/update` + input: unknown + timeout?: number | undefined + }, + ): Promise { + switch (options.procedure) { + case "/init": { + const path = `${LOCATION}/procedures/init` + const procedure: any = await import(path).catch(() => require(path)) + const previousVersion = string.optional().unsafeCast(options) + return procedure.init({ effects, previousVersion }) + } + case "/uninit": { + const path = `${LOCATION}/procedures/init` + const procedure: any = await import(path).catch(() => require(path)) + const nextVersion = string.optional().unsafeCast(options) + return procedure.uninit({ effects, nextVersion }) + } + case "/main/start": { + const path = `${LOCATION}/procedures/main` + const procedure: any = await import(path).catch(() => require(path)) + const started = async (onTerm: () => Promise) => { + await effects.setMainStatus({ status: "running" }) + if (this.onTerm) await this.onTerm() + this.onTerm = onTerm + } + return procedure.main({ effects, started }) + } + case "/main/stop": { + await effects.setMainStatus({ status: "stopped" }) + if (this.onTerm) await this.onTerm() + delete this.onTerm + return duration(30, "s") + } + case "/config/set": { + const path = `${LOCATION}/procedures/config` + const procedure: any = await import(path).catch(() => require(path)) + const input = options.input + return procedure.setConfig({ effects, input }) + } + case "/config/get": { + const path = `${LOCATION}/procedures/config` + const procedure: any = await import(path).catch(() => require(path)) + return procedure.getConfig({ effects }) + } + case "/backup/create": + case "/backup/restore": + throw new Error("this should be called with the init/unit") + case "/actions/metadata": { + const path = `${LOCATION}/procedures/actions` + const procedure: any = await import(path).catch(() => require(path)) + return procedure.actionsMetadata({ effects }) + } + default: + const procedures = unNestPath(options.procedure) + const id = procedures[2] + switch (true) { + case procedures[1] === "actions" && procedures[3] === "get": { + const path = `${LOCATION}/procedures/actions` + const action: any = (await import(path).catch(() => require(path))) + .actions[id] + if (!action) throw new Error(`Action ${id} not found`) + return action.get({ effects }) + } + case procedures[1] === "actions" && procedures[3] === "run": { + const path = `${LOCATION}/procedures/actions` + const action: any = (await import(path).catch(() => require(path))) + .actions[id] + if (!action) throw new Error(`Action ${id} not found`) + const input = options.input + return action.run({ effects, input }) + } + case procedures[1] === "dependencies" && procedures[3] === "query": { + const path = `${LOCATION}/procedures/dependencies` + const dependencyConfig: any = ( + await import(path).catch(() => require(path)) + ).dependencyConfig[id] + if (!dependencyConfig) + throw new Error(`dependencyConfig ${id} not found`) + const localConfig = options.input + return dependencyConfig.query({ effects, localConfig }) + } + case procedures[1] === "dependencies" && procedures[3] === "update": { + const path = `${LOCATION}/procedures/dependencies` + const dependencyConfig: any = ( + await import(path).catch(() => require(path)) + ).dependencyConfig[id] + if (!dependencyConfig) + throw new Error(`dependencyConfig ${id} not found`) + return dependencyConfig.update(options.input) + } + } + } + throw new Error("Method not implemented.") + } + exit(effects: Effects): Promise { + throw new Error("Method not implemented.") + } +} diff --git a/container-runtime/src/Adapters/Systems/index.ts b/container-runtime/src/Adapters/Systems/index.ts new file mode 100644 index 0000000000..eadc67318f --- /dev/null +++ b/container-runtime/src/Adapters/Systems/index.ts @@ -0,0 +1,6 @@ +import { System } from "../../Interfaces/System" +import { SystemForEmbassy } from "./SystemForEmbassy" +import { SystemForStartOs } from "./SystemForStartOs" +export async function getSystem(): Promise { + return SystemForEmbassy.of() +} diff --git a/container-runtime/src/Interfaces/AllGetDependencies.ts b/container-runtime/src/Interfaces/AllGetDependencies.ts new file mode 100644 index 0000000000..88a200900d --- /dev/null +++ b/container-runtime/src/Interfaces/AllGetDependencies.ts @@ -0,0 +1,6 @@ +import { GetDependency } from "./GetDependency" +import { System } from "./System" +import { GetHostSystem, HostSystem } from "./HostSystem" + +export type AllGetDependencies = GetDependency<"system", Promise> & + GetDependency<"hostSystem", GetHostSystem> diff --git a/container-runtime/src/Interfaces/GetDependency.ts b/container-runtime/src/Interfaces/GetDependency.ts new file mode 100644 index 0000000000..c4bce87330 --- /dev/null +++ b/container-runtime/src/Interfaces/GetDependency.ts @@ -0,0 +1,3 @@ +export type GetDependency = { + [OtherK in K]: () => T +} diff --git a/container-runtime/src/Interfaces/HostSystem.ts b/container-runtime/src/Interfaces/HostSystem.ts new file mode 100644 index 0000000000..4e04bbcc84 --- /dev/null +++ b/container-runtime/src/Interfaces/HostSystem.ts @@ -0,0 +1,7 @@ +import { types as T } from "@start9labs/start-sdk" + +import { CallbackHolder } from "../Models/CallbackHolder" +import { Effects } from "../Models/Effects" + +export type HostSystem = Effects +export type GetHostSystem = (callbackHolder: CallbackHolder) => HostSystem diff --git a/container-runtime/src/Interfaces/System.ts b/container-runtime/src/Interfaces/System.ts new file mode 100644 index 0000000000..86b2aa4920 --- /dev/null +++ b/container-runtime/src/Interfaces/System.ts @@ -0,0 +1,32 @@ +import { types as T } from "@start9labs/start-sdk" +import { JsonPath } from "../Models/JsonPath" +import { HostSystemStartOs } from "../Adapters/HostSystemStartOs" +import { RpcResult } from "../Adapters/RpcListener" +export type ExecuteResult = + | { ok: unknown } + | { err: { code: number; message: string } } +export interface System { + // init(effects: Effects): Promise + // exit(effects: Effects): Promise + // start(effects: Effects): Promise + // stop(effects: Effects, options: { timeout: number, signal?: number }): Promise + + execute( + effects: T.Effects, + options: { + procedure: JsonPath + input: unknown + timeout?: number + }, + ): Promise + // sandbox( + // effects: Effects, + // options: { + // procedure: JsonPath + // input: unknown + // timeout?: number + // }, + // ): Promise + + exit(effects: T.Effects): Promise +} diff --git a/container-runtime/src/Models/CallbackHolder.ts b/container-runtime/src/Models/CallbackHolder.ts new file mode 100644 index 0000000000..b562e8dd08 --- /dev/null +++ b/container-runtime/src/Models/CallbackHolder.ts @@ -0,0 +1,20 @@ +export class CallbackHolder { + constructor() {} + private root = (Math.random() + 1).toString(36).substring(7) + private inc = 0 + private callbacks = new Map() + private newId() { + return this.root + (this.inc++).toString(36) + } + addCallback(callback: Function) { + const id = this.newId() + this.callbacks.set(id, callback) + return id + } + callCallback(index: string, args: any[]): Promise { + const callback = this.callbacks.get(index) + if (!callback) throw new Error(`Callback ${index} does not exist`) + this.callbacks.delete(index) + return Promise.resolve().then(() => callback(...args)) + } +} diff --git a/container-runtime/src/Models/DockerProcedure.ts b/container-runtime/src/Models/DockerProcedure.ts new file mode 100644 index 0000000000..91ae73b5fd --- /dev/null +++ b/container-runtime/src/Models/DockerProcedure.ts @@ -0,0 +1,45 @@ +import { + object, + literal, + string, + boolean, + array, + dictionary, + literals, + number, + Parser, +} from "ts-matches" + +const VolumeId = string +const Path = string + +export type VolumeId = string +export type Path = string +export const matchDockerProcedure = object( + { + type: literal("docker"), + image: string, + system: boolean, + entrypoint: string, + args: array(string), + mounts: dictionary([VolumeId, Path]), + "io-format": literals( + "json", + "json-pretty", + "yaml", + "cbor", + "toml", + "toml-pretty", + ), + "sigterm-timeout": number, + inject: boolean, + }, + ["io-format", "sigterm-timeout", "system", "args", "inject", "mounts"], + { + "sigterm-timeout": 30, + inject: false, + args: [], + }, +) + +export type DockerProcedure = typeof matchDockerProcedure._TYPE diff --git a/container-runtime/src/Models/Duration.ts b/container-runtime/src/Models/Duration.ts new file mode 100644 index 0000000000..8c701a7034 --- /dev/null +++ b/container-runtime/src/Models/Duration.ts @@ -0,0 +1,6 @@ +export type TimeUnit = "d" | "h" | "s" | "ms" +export type Duration = `${number}${TimeUnit}` + +export function duration(timeValue: number, timeUnit: TimeUnit = "s") { + return `${timeValue}${timeUnit}` as Duration +} diff --git a/container-runtime/src/Models/Effects.ts b/container-runtime/src/Models/Effects.ts new file mode 100644 index 0000000000..757d512381 --- /dev/null +++ b/container-runtime/src/Models/Effects.ts @@ -0,0 +1,5 @@ +import { types as T } from "@start9labs/start-sdk" + +export type Effects = T.Effects & { + setMainStatus(o: { status: "running" | "stopped" }): Promise +} diff --git a/container-runtime/src/Models/JsonPath.ts b/container-runtime/src/Models/JsonPath.ts new file mode 100644 index 0000000000..3140191542 --- /dev/null +++ b/container-runtime/src/Models/JsonPath.ts @@ -0,0 +1,43 @@ +import { literals, some, string } from "ts-matches" + +type NestedPath = `/${A}/${string}/${B}` +type NestedPaths = + | NestedPath<"actions", "run" | "get"> + | NestedPath<"dependencies", "query" | "update"> +// prettier-ignore +type UnNestPaths = + A extends `${infer A}/${infer B}` ? [...UnNestPaths, ... UnNestPaths] : + [A] + +export function unNestPath(a: A): UnNestPaths { + return a.split("/") as UnNestPaths +} +function isNestedPath(path: string): path is NestedPaths { + const paths = path.split("/") + if (paths.length !== 4) return false + if (paths[1] === "actions" && (paths[3] === "run" || paths[3] === "get")) + return true + if ( + paths[1] === "dependencies" && + (paths[3] === "query" || paths[3] === "update") + ) + return true + return false +} +export const jsonPath = some( + literals( + "/init", + "/uninit", + "/main/start", + "/main/stop", + "/config/set", + "/config/get", + "/backup/create", + "/backup/restore", + "/actions/metadata", + "/properties", + ), + string.refine(isNestedPath, "isNestedPath"), +) + +export type JsonPath = typeof jsonPath._TYPE diff --git a/container-runtime/src/Models/Volume.ts b/container-runtime/src/Models/Volume.ts new file mode 100644 index 0000000000..ebf013b684 --- /dev/null +++ b/container-runtime/src/Models/Volume.ts @@ -0,0 +1,19 @@ +import * as fs from "node:fs/promises" + +export class Volume { + readonly path: string + constructor( + readonly volumeId: string, + _path = "", + ) { + const path = (this.path = `/media/startos/volumes/${volumeId}${ + !_path ? "" : `/${_path}` + }`) + } + async exists() { + return fs.stat(this.path).then( + () => true, + () => false, + ) + } +} diff --git a/container-runtime/src/index.ts b/container-runtime/src/index.ts new file mode 100644 index 0000000000..d86111ecbb --- /dev/null +++ b/container-runtime/src/index.ts @@ -0,0 +1,44 @@ +import { RpcListener } from "./Adapters/RpcListener" +import { SystemForEmbassy } from "./Adapters/Systems/SystemForEmbassy" +import { HostSystemStartOs } from "./Adapters/HostSystemStartOs" +import { AllGetDependencies } from "./Interfaces/AllGetDependencies" +import { getSystem } from "./Adapters/Systems" + +const getDependencies: AllGetDependencies = { + system: getSystem, + hostSystem: () => HostSystemStartOs.of, +} + +new RpcListener(getDependencies) + +/** + +So, this is going to be sent into a running comtainer along with any of the other node modules that are going to be needed and used. + +Once the container is started, we will go into a loading/ await state. +This is the init system, and it will always be running, and it will be waiting for a command to be sent to it. + +Each command will be a stopable promise. And an example is going to be something like an action/ main/ or just a query into the types. + +A command will be sent an object which are the effects, and the effects will be things like the file system, the network, the process, and the os. + + + */ +// So OS Adapter +// ============== + +/** +* Why: So when the we call from the os we enter or leave here? + + */ + +/** +Command: This is a command that the + +There are + */ + +/** +TODO: +Should I seperate those adapter in/out? + */ diff --git a/container-runtime/tsconfig.json b/container-runtime/tsconfig.json new file mode 100644 index 0000000000..fd93d51542 --- /dev/null +++ b/container-runtime/tsconfig.json @@ -0,0 +1,31 @@ +{ + "include": [ + "./**/*.mjs", + "./**/*.js", + "src/Adapters/RpcListener.ts", + "src/index.ts", + "effects.ts" + ], + "exclude": ["dist"], + "inputs": ["./src/index.ts"], + "compilerOptions": { + "module": "Node16", + "strict": true, + "outDir": "dist", + "preserveConstEnums": true, + "sourceMap": true, + "target": "ES2022", + "pretty": true, + "declaration": true, + "noImplicitAny": true, + "esModuleInterop": true, + "types": ["node"], + "moduleResolution": "Node16", + "skipLibCheck": true + }, + "ts-node": { + "compilerOptions": { + "module": "commonjs" + } + } +} diff --git a/container-runtime/update-image.sh b/container-runtime/update-image.sh new file mode 100755 index 0000000000..e0d5dc0c3e --- /dev/null +++ b/container-runtime/update-image.sh @@ -0,0 +1,41 @@ +#!/bin/bash + +cd "$(dirname "${BASH_SOURCE[0]}")" + +set -e + + + +if mountpoint tmp/combined; then sudo umount tmp/combined; fi +if mountpoint tmp/lower; then sudo umount tmp/lower; fi +mkdir -p tmp/lower tmp/upper tmp/work tmp/combined +sudo mount alpine.${ARCH}.squashfs tmp/lower +sudo mount -t overlay -olowerdir=tmp/lower,upperdir=tmp/upper,workdir=tmp/work overlay tmp/combined + +QEMU= +if [ "$ARCH" != "$(uname -m)" ]; then + QEMU=/usr/bin/qemu-${ARCH}-static + sudo cp $(which qemu-$ARCH-static) tmp/combined${QEMU} +fi + +echo "nameserver 8.8.8.8" | sudo tee tmp/combined/etc/resolv.conf # TODO - delegate to host resolver? +sudo chroot tmp/combined $QEMU /sbin/apk add nodejs +sudo mkdir -p tmp/combined/usr/lib/startos/ +sudo rsync -a --copy-unsafe-links dist/ tmp/combined/usr/lib/startos/init/ +sudo cp containerRuntime.rc tmp/combined/etc/init.d/containerRuntime +sudo cp ../core/target/$ARCH-unknown-linux-musl/release/containerbox tmp/combined/usr/bin/start-cli +sudo chmod +x tmp/combined/etc/init.d/containerRuntime +sudo chroot tmp/combined $QEMU /sbin/rc-update add containerRuntime default + +if [ -n "$QEMU" ]; then + sudo rm tmp/combined${QEMU} +fi + +sudo truncate -s 0 tmp/combined/etc/resolv.conf +sudo chown -R 0:0 tmp/combined +rm -f rootfs.${ARCH}.squashfs +mkdir -p ../build/lib/container-runtime +sudo mksquashfs tmp/combined rootfs.${ARCH}.squashfs +sudo umount tmp/combined +sudo umount tmp/lower +sudo rm -rf tmp \ No newline at end of file diff --git a/core/Cargo.lock b/core/Cargo.lock index 684cc8eb39..602c16ae34 100644 --- a/core/Cargo.lock +++ b/core/Cargo.lock @@ -7,10 +7,6 @@ name = "Inflector" version = "0.11.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fe438c63458706e03479442743baae6c88256498e6431708f6dfc520a26515d3" -dependencies = [ - "lazy_static", - "regex", -] [[package]] name = "addr2line" @@ -33,7 +29,7 @@ version = "0.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9e8b47f52ea9bae42228d07ec09eb676433d7c4ed1ebdf0f1d1c29ed446f1ab8" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "cipher 0.3.0", "cpufeatures", "ctr", @@ -42,23 +38,23 @@ dependencies = [ [[package]] name = "ahash" -version = "0.7.7" +version = "0.7.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a824f2aa7e75a0c98c5a504fceb80649e9c35265d44525b5f94de4771a395cd" +checksum = "891477e0c6a8957309ee5c45a6368af3ae14bb510732d2684ffa19af310920f9" dependencies = [ - "getrandom 0.2.11", + "getrandom 0.2.12", "once_cell", "version_check", ] [[package]] name = "ahash" -version = "0.8.6" +version = "0.8.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91429305e9f0a25f6205c5b8e0d2db09e0708a7a6df0f42212bb56c32c8ac97a" +checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" dependencies = [ - "cfg-if 1.0.0", - "getrandom 0.2.11", + "cfg-if", + "getrandom 0.2.12", "once_cell", "version_check", "zerocopy", @@ -66,9 +62,9 @@ dependencies = [ [[package]] name = "aho-corasick" -version = "1.1.2" +version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2969dcb958b36655471fc61f7e416fa76033bdd4bfed0678d8fee1e2d07a1f0" +checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" dependencies = [ "memchr", ] @@ -110,19 +106,58 @@ dependencies = [ ] [[package]] -name = "ansi_term" -version = "0.12.1" +name = "anstream" +version = "0.6.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d52a9bb7ec0cf484c551830a7ce27bd20d67eac647e1befb56b0be4ee39a55d2" +checksum = "d96bd03f33fe50a863e394ee9718a706f988b9079b20c3784fb726e7678b62fb" dependencies = [ - "winapi", + "anstyle", + "anstyle-parse", + "anstyle-query", + "anstyle-wincon", + "colorchoice", + "utf8parse", +] + +[[package]] +name = "anstyle" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8901269c6307e8d93993578286ac0edf7f195079ffff5ebdeea6a59ffb7e36bc" + +[[package]] +name = "anstyle-parse" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c75ac65da39e5fe5ab759307499ddad880d724eed2f6ce5b5e8a26f4f387928c" +dependencies = [ + "utf8parse", +] + +[[package]] +name = "anstyle-query" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e28923312444cdd728e4738b3f9c9cac739500909bb3d3c94b43551b16517648" +dependencies = [ + "windows-sys 0.52.0", +] + +[[package]] +name = "anstyle-wincon" +version = "3.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1cd54b81ec8d6180e24654d0b371ad22fc3dd083b6ff8ba325b72e00c87660a7" +dependencies = [ + "anstyle", + "windows-sys 0.52.0", ] [[package]] name = "anyhow" -version = "1.0.75" +version = "1.0.81" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4668cab20f66d8d020e1fbc0ebe47217433c1b6c8f2040faf858554e394ace6" +checksum = "0952808a6c2afd1aa8947271f3a60f1a6763c7b912d210184c5149b5cf147247" [[package]] name = "arrayref" @@ -145,19 +180,6 @@ dependencies = [ "term", ] -[[package]] -name = "ast_node" -version = "0.9.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c09c69dffe06d222d072c878c3afe86eee2179806f20503faec97250268b4c24" -dependencies = [ - "pmutil", - "proc-macro2", - "quote", - "swc_macros_common", - "syn 2.0.39", -] - [[package]] name = "async-channel" version = "1.9.0" @@ -171,9 +193,9 @@ dependencies = [ [[package]] name = "async-compression" -version = "0.4.4" +version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f658e2baef915ba0f26f1f7c42bfb8e12f532a01f449a090ded75ae7a07e9ba2" +checksum = "a116f46a969224200a0a97f29cfd4c50e7534e4b4826bd23ea2c3c533039c82c" dependencies = [ "brotli", "flate2", @@ -202,18 +224,18 @@ checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.55", ] [[package]] name = "async-trait" -version = "0.1.74" +version = "0.1.79" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a66537f1bb974b254c98ed142ff995236e81b9d0fe4db0575f46612cb15eb0f9" +checksum = "a507401cad91ec6a857ed5513a2073c82a9b9048762b885bb98655b306964681" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.55", ] [[package]] @@ -238,33 +260,54 @@ dependencies = [ [[package]] name = "autocfg" -version = "1.1.0" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" +checksum = "f1fdabc7756949593fe60f30ec81974b613357de856987752631dea1e3394c80" [[package]] -name = "avahi-sys" -version = "0.10.0" -source = "git+https://github.com/Start9Labs/avahi-sys?branch=feature/dynamic-linking#12bef9e435cfb0d36cb229b9d08e2114c176ea7a" +name = "axum" +version = "0.6.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b829e4e32b91e643de6eafe82b1d90675f5874230191a4ffbc1b336dec4d6bf" dependencies = [ - "bindgen", - "libc", + "async-trait", + "axum-core 0.3.4", + "bitflags 1.3.2", + "bytes", + "futures-util", + "http 0.2.12", + "http-body 0.4.6", + "hyper 0.14.28", + "itoa", + "matchit", + "memchr", + "mime", + "percent-encoding", + "pin-project-lite", + "rustversion", + "serde", + "sync_wrapper 0.1.2", + "tower", + "tower-layer", + "tower-service", ] [[package]] name = "axum" -version = "0.6.20" +version = "0.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b829e4e32b91e643de6eafe82b1d90675f5874230191a4ffbc1b336dec4d6bf" +checksum = "3a6c9af12842a67734c9a2e355436e5d03b22383ed60cf13cd0c18fbfe3dcbcf" dependencies = [ "async-trait", - "axum-core", - "bitflags 1.3.2", + "axum-core 0.4.3", + "base64", "bytes", "futures-util", - "http", - "http-body", - "hyper", + "http 1.1.0", + "http-body 1.0.0", + "http-body-util", + "hyper 1.2.0", + "hyper-util", "itoa", "matchit", "memchr", @@ -273,10 +316,17 @@ dependencies = [ "pin-project-lite", "rustversion", "serde", - "sync_wrapper", + "serde_json", + "serde_path_to_error", + "serde_urlencoded", + "sha1", + "sync_wrapper 1.0.0", + "tokio", + "tokio-tungstenite", "tower", "tower-layer", "tower-service", + "tracing", ] [[package]] @@ -288,23 +338,63 @@ dependencies = [ "async-trait", "bytes", "futures-util", - "http", - "http-body", + "http 0.2.12", + "http-body 0.4.6", + "mime", + "rustversion", + "tower-layer", + "tower-service", +] + +[[package]] +name = "axum-core" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a15c63fd72d41492dc4f497196f5da1fb04fb7529e631d73630d1b491e47a2e3" +dependencies = [ + "async-trait", + "bytes", + "futures-util", + "http 1.1.0", + "http-body 1.0.0", + "http-body-util", "mime", + "pin-project-lite", "rustversion", + "sync_wrapper 0.1.2", "tower-layer", "tower-service", + "tracing", +] + +[[package]] +name = "axum-server" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1ad46c3ec4e12f4a4b6835e173ba21c25e484c9d02b49770bf006ce5367c036" +dependencies = [ + "bytes", + "futures-util", + "http 1.1.0", + "http-body 1.0.0", + "http-body-util", + "hyper 1.2.0", + "hyper-util", + "pin-project-lite", + "tokio", + "tower", + "tower-service", ] [[package]] name = "backtrace" -version = "0.3.69" +version = "0.3.71" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2089b7e3f35b9dd2d0ed921ead4f6d318c27680d4a5bd167b3ee120edb105837" +checksum = "26b05800d2e817c8b3b4b54abd461726265fa9789ae34330622f2db9ee696f9d" dependencies = [ "addr2line", "cc", - "cfg-if 1.0.0", + "cfg-if", "libc", "miniz_oxide", "object", @@ -325,15 +415,9 @@ checksum = "23ce669cd6c8588f79e15cf450314f9638f967fc5770ff1c7c1deb0925ea7cfa" [[package]] name = "base64" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" - -[[package]] -name = "base64" -version = "0.21.5" +version = "0.21.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35636a1494ede3b646cc98f74f8e62c773a38a659ebc777a2cf26b9b74171df9" +checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" [[package]] name = "base64ct" @@ -343,24 +427,15 @@ checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" [[package]] name = "basic-cookies" -version = "0.1.4" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb53b6b315f924c7f113b162e53b3901c05fc9966baf84d201dfcc7432a4bb38" +checksum = "67bd8fd42c16bdb08688243dc5f0cc117a3ca9efeeaba3a345a18a6159ad96f7" dependencies = [ "lalrpop", "lalrpop-util", "regex", ] -[[package]] -name = "better_scoped_tls" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "794edcc9b3fb07bb4aecaa11f093fd45663b4feadb782d68303a2268bc2701de" -dependencies = [ - "scoped-tls", -] - [[package]] name = "bincode" version = "1.3.3" @@ -370,30 +445,6 @@ dependencies = [ "serde", ] -[[package]] -name = "bindgen" -version = "0.55.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75b13ce559e6433d360c26305643803cb52cfbabbc2b9c47ce04a58493dfb443" -dependencies = [ - "bitflags 1.3.2", - "cexpr", - "cfg-if 0.1.10", - "clang-sys", - "clap 2.34.0", - "env_logger 0.7.1", - "lazy_static", - "lazycell", - "log", - "peeking_take_while", - "proc-macro2", - "quote", - "regex", - "rustc-hash", - "shlex", - "which 3.1.1", -] - [[package]] name = "bit-set" version = "0.5.3" @@ -417,18 +468,18 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.4.1" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "327762f6e5a765692301e5bb513e0d9fef63be86bbc14528052b1cd3e6f03e07" +checksum = "cf4b9d6a944f767f8e5e0db018570623c85f3d925ac718db4e06d0187adb21c1" dependencies = [ "serde", ] [[package]] name = "bitmaps" -version = "3.2.0" +version = "3.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "703642b98a00b3b90513279a8ede3fcfa479c126c5fb46e78f3051522f021403" +checksum = "a1d084b0137aaa901caf9f1e8b21daa6aa24d41cd806e111335541eff9683bd6" [[package]] name = "bitvec" @@ -455,14 +506,14 @@ dependencies = [ [[package]] name = "blake3" -version = "1.5.0" +version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0231f06152bf547e9c2b5194f247cd97aacf6dcd8b15d8e5ec0663f64580da87" +checksum = "30cca6d3674597c30ddf2c587bf8d9d65c9a84d2326d941cc79c9842dfe0ef52" dependencies = [ "arrayref", "arrayvec", "cc", - "cfg-if 1.0.0", + "cfg-if", "constant_time_eq", ] @@ -472,7 +523,6 @@ version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4152116fd6e9dadb291ae18fc1ec3575ed6d84c29642d97890f4b4a3417297e4" dependencies = [ - "block-padding", "generic-array", ] @@ -485,17 +535,11 @@ dependencies = [ "generic-array", ] -[[package]] -name = "block-padding" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d696c370c750c948ada61c69a0ee2cbbb9c50b1019ddb86d9317157a99c2cae" - [[package]] name = "brotli" -version = "3.4.0" +version = "3.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "516074a47ef4bce09577a3b379392300159ce5b1ba2e501ff1c819950066100f" +checksum = "d640d25bc63c50fb1f0b545ffd80207d2e10a4c965530809b40ba3386825c391" dependencies = [ "alloc-no-stdlib", "alloc-stdlib", @@ -514,9 +558,9 @@ dependencies = [ [[package]] name = "bumpalo" -version = "3.14.0" +version = "3.15.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f30e7476521f6f8af1a1c4c0b8cc94f0bee37d91763d0ca2665f299b6cd8aec" +checksum = "7ff69b9dd49fd426c69a0db9fc04dd934cdb6645ff000864d98f7e2af8830eaa" [[package]] name = "byteorder" @@ -526,33 +570,15 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" -version = "1.5.0" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2bd12c1caf447e69cd4528f47f94d203fd2582878ecb9e9465484c4148a8223" +checksum = "514de17de45fdb8dc022b1a7975556c53c86f9f0aa5f534b98977b171857c2c9" [[package]] name = "cc" -version = "1.0.84" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f8e7c90afad890484a21653d08b6e209ae34770fb5ee298f9c699fcc1e5c856" -dependencies = [ - "libc", -] - -[[package]] -name = "cexpr" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4aedb84272dbe89af497cf81375129abda4fc0a9e7c5d317498c15cc30c0d27" -dependencies = [ - "nom 5.1.3", -] - -[[package]] -name = "cfg-if" -version = "0.1.10" +version = "1.0.90" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822" +checksum = "8cd6604a82acf3039f1144f54b8eb34e91ffba622051189e71b781822d5ee1f5" [[package]] name = "cfg-if" @@ -562,9 +588,9 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "chrono" -version = "0.4.31" +version = "0.4.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f2c685bad3eb3d45a01354cedb7d5faa66194d1d58ba6e267a8de788f79db38" +checksum = "8a0d04d43504c61aa6c7531f1871dd0d418d91130162063b789da00fd7057a5e" dependencies = [ "android-tzdata", "iana-time-zone", @@ -572,7 +598,7 @@ dependencies = [ "num-traits", "serde", "wasm-bindgen", - "windows-targets 0.48.5", + "windows-targets 0.52.4", ] [[package]] @@ -581,14 +607,14 @@ version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8eebd66744a15ded14960ab4ccdbfb51ad3b81f51f3f04a80adac98c985396c9" dependencies = [ - "hashbrown 0.14.2", + "hashbrown 0.14.3", ] [[package]] name = "ciborium" -version = "0.2.1" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "effd91f6c78e5a4ace8a5d3c0b6bfaec9e2baaef55f3efc00e45fb2e477ee926" +checksum = "42e69ffd6f0917f5c029256a24d0161db17cea3997d185db0d35926308770f0e" dependencies = [ "ciborium-io", "ciborium-ll", @@ -597,18 +623,18 @@ dependencies = [ [[package]] name = "ciborium-io" -version = "0.2.1" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cdf919175532b369853f5d5e20b26b43112613fd6fe7aee757e35f7a44642656" +checksum = "05afea1e0a06c9be33d539b876f1ce3692f4afea2cb41f740e7743225ed1c757" [[package]] name = "ciborium-ll" -version = "0.2.1" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "defaa24ecc093c77630e6c15e17c51f5e187bf35ee514f4e2d67baaa96dae22b" +checksum = "57663b653d948a338bfb3eeba9bb2fd5fcfaecb9e199e87e1eda4d9e8b240fd9" dependencies = [ "ciborium-io", - "half", + "half 2.4.0", ] [[package]] @@ -631,44 +657,52 @@ dependencies = [ ] [[package]] -name = "clang-sys" -version = "1.6.1" +name = "clap" +version = "3.2.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c688fc74432808e3eb684cae8830a86be1d66a2bd58e1f248ed0960a590baf6f" +checksum = "4ea181bf566f71cb9a5d17a59e1871af638180a18fb0035c92ae62b705207123" dependencies = [ - "glob", - "libc", - "libloading", + "atty", + "bitflags 1.3.2", + "clap_lex 0.2.4", + "indexmap 1.9.3", + "strsim 0.10.0", + "termcolor", + "textwrap", ] [[package]] name = "clap" -version = "2.34.0" +version = "4.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a0610544180c38b88101fecf2dd634b174a62eef6946f84dfc6a7127512b381c" +checksum = "90bc066a67923782aa8515dbaea16946c5bcc5addbd668bb80af688e53e548a0" dependencies = [ - "ansi_term", - "atty", - "bitflags 1.3.2", - "strsim 0.8.0", - "textwrap 0.11.0", - "unicode-width", - "vec_map", + "clap_builder", + "clap_derive", ] [[package]] -name = "clap" -version = "3.2.25" +name = "clap_builder" +version = "4.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ea181bf566f71cb9a5d17a59e1871af638180a18fb0035c92ae62b705207123" +checksum = "ae129e2e766ae0ec03484e609954119f123cc1fe650337e155d03b022f24f7b4" dependencies = [ - "atty", - "bitflags 1.3.2", - "clap_lex", - "indexmap 1.9.3", - "strsim 0.10.0", - "termcolor", - "textwrap 0.16.0", + "anstream", + "anstyle", + "clap_lex 0.7.0", + "strsim 0.11.0", +] + +[[package]] +name = "clap_derive" +version = "4.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "528131438037fd55894f62d6e9f068b8f45ac57ffa77517819645d10aed04f64" +dependencies = [ + "heck 0.5.0", + "proc-macro2", + "quote", + "syn 2.0.55", ] [[package]] @@ -680,11 +714,17 @@ dependencies = [ "os_str_bytes", ] +[[package]] +name = "clap_lex" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "98cc8fbded0c607b7ba9dd60cd98df59af97e84d24e49c8557331cfc26d301ce" + [[package]] name = "color-eyre" -version = "0.6.2" +version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a667583cca8c4f8436db8de46ea8233c42a7d9ae424a82d338f2e4675229204" +checksum = "55146f5e46f237f7423d74111267d4597b59b0dad0ffaf7303bce9945d843ad5" dependencies = [ "backtrace", "color-spantrace", @@ -697,9 +737,9 @@ dependencies = [ [[package]] name = "color-spantrace" -version = "0.2.0" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ba75b3d9449ecdccb27ecbc479fdc0b87fa2dd43d2f8298f9bf0e59aacc8dce" +checksum = "cd6be1b2a7e382e2b98b43b2adcca6bb0e465af0bdd38123873ae61eb17a72c2" dependencies = [ "once_cell", "owo-colors", @@ -707,26 +747,32 @@ dependencies = [ "tracing-error", ] +[[package]] +name = "colorchoice" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7" + [[package]] name = "concurrent-queue" -version = "2.3.0" +version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f057a694a54f12365049b0958a1685bb52d567f5593b355fbf685838e873d400" +checksum = "d16048cd947b08fa32c24458a22f5dc5e835264f689f4f5653210c69fd107363" dependencies = [ "crossbeam-utils", ] [[package]] name = "console" -version = "0.15.7" +version = "0.15.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c926e00cc70edefdc64d3a5ff31cc65bb97a3460097762bd23afb4d8145fccf8" +checksum = "0e1f83fc076bd6dd27517eacdf25fef6c4dfe5f1d7448bafaaf3a26f13b5e4eb" dependencies = [ "encode_unicode 0.3.6", "lazy_static", "libc", "unicode-width", - "windows-sys 0.45.0", + "windows-sys 0.52.0", ] [[package]] @@ -753,7 +799,7 @@ dependencies = [ "crossbeam-utils", "futures-task", "hdrhistogram", - "humantime 2.1.0", + "humantime", "prost-types", "serde", "serde_json", @@ -768,9 +814,9 @@ dependencies = [ [[package]] name = "const-oid" -version = "0.9.5" +version = "0.9.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28c122c3980598d243d63d9a704629a2d748d101f278052ff068be5a4423ab6f" +checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8" [[package]] name = "const_format" @@ -798,28 +844,6 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f7144d30dcf0fafbce74250a3963025d8d52177934239851c917d29f1df280c2" -[[package]] -name = "container-init" -version = "0.1.0" -dependencies = [ - "async-stream", - "color-eyre", - "futures", - "helpers", - "imbl", - "nix 0.27.1", - "procfs", - "serde", - "serde_json", - "tokio", - "tokio-stream", - "tracing", - "tracing-error", - "tracing-futures", - "tracing-subscriber", - "yajrc 0.1.0 (git+https://github.com/dr-bonez/yajrc.git?branch=develop)", -] - [[package]] name = "convert_case" version = "0.4.0" @@ -835,17 +859,6 @@ dependencies = [ "unicode-segmentation", ] -[[package]] -name = "cookie" -version = "0.16.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e859cd57d0710d9e06c381b550c06e76992472a8c6d527aecd2fc673dcc231fb" -dependencies = [ - "percent-encoding", - "time", - "version_check", -] - [[package]] name = "cookie" version = "0.17.0" @@ -859,31 +872,14 @@ dependencies = [ [[package]] name = "cookie" -version = "0.18.0" +version = "0.18.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3cd91cf61412820176e137621345ee43b3f4423e589e7ae4e50d601d93e35ef8" +checksum = "4ddef33a339a91ea89fb53151bd0a4689cfce27055c291dfa69945475d22c747" dependencies = [ "time", "version_check", ] -[[package]] -name = "cookie_store" -version = "0.16.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d606d0fba62e13cf04db20536c05cb7f13673c161cb47a47a82b9b9e7d3f1daa" -dependencies = [ - "cookie 0.16.2", - "idna 0.2.3", - "log", - "publicsuffix", - "serde", - "serde_derive", - "serde_json", - "time", - "url", -] - [[package]] name = "cookie_store" version = "0.20.0" @@ -903,9 +899,9 @@ dependencies = [ [[package]] name = "core-foundation" -version = "0.9.3" +version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "194a7a9e6de53fa55116934067c844d9d749312f75c6f6d0980e8c252f8c2146" +checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" dependencies = [ "core-foundation-sys", "libc", @@ -913,15 +909,15 @@ dependencies = [ [[package]] name = "core-foundation-sys" -version = "0.8.4" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e496a50fda8aacccc86d7529e2c1e0892dbd0f898a6b5645b5561b89c3210efa" +checksum = "06ea2b9bc92be3c2baa9334a323ebca2d6f074ff852cd1d7b11064035cd3868f" [[package]] name = "cpufeatures" -version = "0.2.11" +version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce420fe07aecd3e67c5f910618fe65e94158f6dcc0adf44e00d69ce2bdfe0fd0" +checksum = "53fe5e26ff1b7aef8bca9c6080520cfb8d9333c7568e1829cef191a9723e5504" dependencies = [ "libc", ] @@ -943,53 +939,74 @@ checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5" [[package]] name = "crc32fast" -version = "1.3.2" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d" +checksum = "b3855a8a784b474f333699ef2bbca9db2c4a1f6d9088a90a2d25b1eb53111eaa" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", ] [[package]] name = "crossbeam-channel" -version = "0.5.8" +version = "0.5.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a33c2bf77f2df06183c3aa30d1e96c0695a313d4f9c453cc3762a6db39f99200" +checksum = "ab3db02a9c5b5121e1e42fbdb1aeb65f5e02624cc58c43f2884c6ccac0b82f95" dependencies = [ - "cfg-if 1.0.0", "crossbeam-utils", ] [[package]] name = "crossbeam-queue" -version = "0.3.8" +version = "0.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d1cfb3ea8a53f37c40dea2c7bedcbd88bdfae54f5e2175d6ecaff1c988353add" +checksum = "df0346b5d5e76ac2fe4e327c5fd1118d6be7c51dfb18f9b7922923f287471e35" dependencies = [ - "cfg-if 1.0.0", "crossbeam-utils", ] [[package]] name = "crossbeam-utils" -version = "0.8.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a22b2d63d4d1dc0b7f1b6b2747dd0088008a9be28b6ddf0b1e7d335e3037294" -dependencies = [ - "cfg-if 1.0.0", -] - -[[package]] -name = "crunchy" -version = "0.2.2" +version = "0.8.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7" +checksum = "248e3bacc7dc6baa3b21e405ee045c3047101a49145e7e9eca583ab4c2ca5345" [[package]] -name = "crypto-bigint" -version = "0.5.4" +name = "crossterm" +version = "0.27.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28f85c3514d2a6e64160359b45a3918c3b4178bcbf4ae5d03ab2d02e521c479a" +checksum = "f476fe445d41c9e991fd07515a6f463074b782242ccf4a5b7b1d1012e70824df" +dependencies = [ + "bitflags 2.5.0", + "crossterm_winapi", + "futures-core", + "libc", + "mio", + "parking_lot", + "signal-hook", + "signal-hook-mio", + "winapi", +] + +[[package]] +name = "crossterm_winapi" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "acdd7c62a3665c7f6830a51635d9ac9b23ed385797f70a83bb8bafe9c572ab2b" +dependencies = [ + "winapi", +] + +[[package]] +name = "crunchy" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7" + +[[package]] +name = "crypto-bigint" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0dc92fb57ca44df6db8059111ab3af99a63d5d0f8375d9972e319a379c6bab76" dependencies = [ "generic-array", "rand_core 0.6.4", @@ -1007,16 +1024,6 @@ dependencies = [ "typenum", ] -[[package]] -name = "crypto-mac" -version = "0.11.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1d1a86f49236c215f271d40892d5fc950490551400b02ef360692c29815c714" -dependencies = [ - "generic-array", - "subtle", -] - [[package]] name = "csv" version = "1.3.0" @@ -1068,17 +1075,17 @@ dependencies = [ [[package]] name = "curve25519-dalek" -version = "4.1.1" +version = "4.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e89b8c6a2e4b1f45971ad09761aafb85514a84744b67a95e32c3cc1352d1f65c" +checksum = "0a677b8922c94e01bdbb12126b0bc852f00447528dee1782229af9c720c3f348" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "cpufeatures", "curve25519-dalek-derive", "digest 0.10.7", "fiat-crypto", "platforms", - "rustc_version 0.4.0", + "rustc_version", "subtle", "zeroize", ] @@ -1091,14 +1098,14 @@ checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.55", ] [[package]] name = "darling" -version = "0.20.3" +version = "0.20.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0209d94da627ab5605dcccf08bb18afa5009cfbef48d8a8b7d7bdbc79be25c5e" +checksum = "54e36fcd13ed84ffdfda6f5be89b31287cbb80c439841fe69e04841435464391" dependencies = [ "darling_core", "darling_macro", @@ -1106,189 +1113,34 @@ dependencies = [ [[package]] name = "darling_core" -version = "0.20.3" +version = "0.20.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "177e3443818124b357d8e76f53be906d60937f0d3a90773a664fa63fa253e621" +checksum = "9c2cf1c23a687a1feeb728783b993c4e1ad83d99f351801977dd809b48d0a70f" dependencies = [ "fnv", "ident_case", "proc-macro2", "quote", "strsim 0.10.0", - "syn 2.0.39", + "syn 2.0.55", ] [[package]] name = "darling_macro" -version = "0.20.3" +version = "0.20.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "836a9bbc7ad63342d6d6e7b815ccab164bc77a2d95d84bc3117a8c0d5c98e2d5" +checksum = "a668eda54683121533a393014d8692171709ff57a7d61f187b6e782719f8933f" dependencies = [ "darling_core", "quote", - "syn 2.0.39", -] - -[[package]] -name = "dashmap" -version = "5.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "978747c1d849a7d2ee5e8adc0159961c48fb7e5db2f06af6723b80123bb53856" -dependencies = [ - "cfg-if 1.0.0", - "hashbrown 0.14.2", - "lock_api", - "once_cell", - "parking_lot_core", + "syn 2.0.55", ] [[package]] name = "data-encoding" -version = "2.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2e66c9d817f1720209181c316d28635c050fa304f9c79e47a520882661b7308" - -[[package]] -name = "data-url" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41b319d1b62ffbd002e057f36bebd1f42b9f97927c9577461d855f3513c4289f" - -[[package]] -name = "debugid" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bef552e6f588e446098f6ba40d89ac146c8c7b64aade83c051ee00bb5d2bc18d" -dependencies = [ - "serde", - "uuid", -] - -[[package]] -name = "deno-proc-macro-rules" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c65c2ffdafc1564565200967edc4851c7b55422d3913466688907efd05ea26f" -dependencies = [ - "deno-proc-macro-rules-macros", - "proc-macro2", - "syn 2.0.39", -] - -[[package]] -name = "deno-proc-macro-rules-macros" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3047b312b7451e3190865713a4dd6e1f821aed614ada219766ebc3024a690435" -dependencies = [ - "once_cell", - "proc-macro2", - "quote", - "syn 2.0.39", -] - -[[package]] -name = "deno_ast" -version = "0.29.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a8adb6aeb787db71d015d8e9f63f6e004eeb09c86babb4ded00878be18619b1" -dependencies = [ - "anyhow", - "base64 0.13.1", - "deno_media_type", - "dprint-swc-ext", - "serde", - "swc_atoms", - "swc_common", - "swc_config", - "swc_config_macro", - "swc_ecma_ast", - "swc_ecma_codegen", - "swc_ecma_codegen_macros", - "swc_ecma_loader", - "swc_ecma_parser", - "swc_ecma_transforms_base", - "swc_ecma_transforms_classes", - "swc_ecma_transforms_macros", - "swc_ecma_transforms_proposal", - "swc_ecma_transforms_react", - "swc_ecma_transforms_typescript", - "swc_ecma_utils", - "swc_ecma_visit", - "swc_eq_ignore_macros", - "swc_macros_common", - "swc_visit", - "swc_visit_macros", - "text_lines", - "url", -] - -[[package]] -name = "deno_core" -version = "0.222.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b13c81b9ea8462680e7b77088a44fc36390bab3dbfa5a205a285e11b64e0919c" -dependencies = [ - "anyhow", - "bytes", - "deno_ops", - "deno_unsync", - "futures", - "indexmap 2.1.0", - "libc", - "log", - "once_cell", - "parking_lot", - "pin-project", - "serde", - "serde_json", - "serde_v8", - "smallvec", - "sourcemap 7.0.1", - "tokio", - "url", - "v8", -] - -[[package]] -name = "deno_media_type" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a798670c20308e5770cc0775de821424ff9e85665b602928509c8c70430b3ee0" -dependencies = [ - "data-url", - "serde", - "url", -] - -[[package]] -name = "deno_ops" -version = "0.98.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf89da1a3e50ff7c89956495b53d9bcad29e1f1b3f3d2bc54cad7155f55419c4" -dependencies = [ - "deno-proc-macro-rules", - "lazy-regex", - "once_cell", - "pmutil", - "proc-macro-crate 1.3.1", - "proc-macro2", - "quote", - "regex", - "strum", - "strum_macros", - "syn 2.0.39", - "thiserror", -] - -[[package]] -name = "deno_unsync" -version = "0.3.0" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8a8f3722afd50e566ecfc783cc8a3a046bc4dd5eb45007431dfb2776aeb8993" -dependencies = [ - "tokio", -] +checksum = "7e962a19be5cfc3f3bf6dd8f61eb50107f356ad6270fbb3ed41476571db78be5" [[package]] name = "der" @@ -1303,9 +1155,9 @@ dependencies = [ [[package]] name = "deranged" -version = "0.3.9" +version = "0.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f32d04922c60427da6f9fef14d042d9edddef64cb9d4ce0d64d0685fbeb1fd3" +checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4" dependencies = [ "powerfmt", "serde", @@ -1320,16 +1172,10 @@ dependencies = [ "convert_case 0.4.0", "proc-macro2", "quote", - "rustc_version 0.4.0", + "rustc_version", "syn 1.0.109", ] -[[package]] -name = "diff" -version = "0.1.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8" - [[package]] name = "digest" version = "0.9.0" @@ -1357,7 +1203,7 @@ version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b98cf8ebf19c3d1b223e151f99a4f9f0690dca41414773390fc824184ac833e1" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "dirs-sys-next", ] @@ -1384,22 +1230,6 @@ version = "0.15.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b" -[[package]] -name = "dprint-swc-ext" -version = "0.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a0a2492465344a58a37ae119de59e81fe5a2885f2711c7b5048ef0dfa14ce42" -dependencies = [ - "bumpalo", - "num-bigint", - "rustc-hash", - "swc_atoms", - "swc_common", - "swc_ecma_ast", - "swc_ecma_parser", - "text_lines", -] - [[package]] name = "drain" version = "0.1.1" @@ -1411,21 +1241,21 @@ dependencies = [ [[package]] name = "dyn-clone" -version = "1.0.16" +version = "1.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "545b22097d44f8a9581187cdf93de7a71e4722bf51200cfaba810865b49a495d" +checksum = "0d6ef0072f8a535281e4876be788938b528e9a1d43900b82c2569af7da799125" [[package]] name = "ecdsa" -version = "0.16.8" +version = "0.16.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4b1e0c257a9e9f25f90ff76d7a68360ed497ee519c8e428d1825ef0000799d4" +checksum = "ee27f32b5c5292967d2d4a9d7f1e0b0aed2c15daded5a60300e4abb9d8020bca" dependencies = [ "der", "digest 0.10.7", "elliptic-curve", "rfc6979", - "signature 2.0.0", + "signature 2.2.0", "spki", ] @@ -1446,7 +1276,7 @@ checksum = "115531babc129696a58c64a4fef0a8bf9e9698629fb97e9e40767d235cfbcd53" dependencies = [ "pkcs8", "serde", - "signature 2.0.0", + "signature 2.2.0", ] [[package]] @@ -1465,33 +1295,34 @@ dependencies = [ [[package]] name = "ed25519-dalek" -version = "2.0.0" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7277392b266383ef8396db7fdeb1e77b6c52fed775f5df15bb24f35b72156980" +checksum = "4a3daa8e81a3963a60642bcc1f90a670680bd4a77535faa384e9d1c79d620871" dependencies = [ - "curve25519-dalek 4.1.1", + "curve25519-dalek 4.1.2", "ed25519 2.2.3", "rand_core 0.6.4", "serde", "sha2 0.10.8", - "signature 2.0.0", + "signature 2.2.0", + "subtle", "zeroize", ] [[package]] name = "either" -version = "1.9.0" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07" +checksum = "11157ac094ffbdde99aa67b23417ebdd801842852b500e395a45a9c0aac03e4a" dependencies = [ "serde", ] [[package]] name = "elliptic-curve" -version = "0.13.6" +version = "0.13.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d97ca172ae9dc9f9b779a6e3a65d308f2af74e5b8c921299075bdb4a0370e914" +checksum = "b5e6043086bf7973472e0c7dff2142ea0b680d30e18d9cc40f267efbf222bd47" dependencies = [ "base16ct", "crypto-bigint", @@ -1514,7 +1345,7 @@ source = "git+https://github.com/Start9Labs/emver-rs.git#61cf0bc96711b4d6f3f30df dependencies = [ "either", "fp-core", - "nom 7.1.3", + "nom", "serde", ] @@ -1545,7 +1376,7 @@ version = "0.8.33" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7268b386296a025e474d5140678f75d6de9493ae55a5d709eeb9dd08149945e1" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", ] [[package]] @@ -1554,36 +1385,10 @@ version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5ffccbb6966c05b32ef8fbac435df276c4ae4d3dc55a8cd0eb9745e6c12f546a" dependencies = [ - "heck", + "heck 0.4.1", "proc-macro2", "quote", - "syn 2.0.39", -] - -[[package]] -name = "env_logger" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44533bbbb3bb3c1fa17d9f2e4e38bbbaf8396ba82193c4cb1b6445d711445d36" -dependencies = [ - "atty", - "humantime 1.3.0", - "log", - "regex", - "termcolor", -] - -[[package]] -name = "env_logger" -version = "0.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95b3f3e67048839cb0d0781f445682a35113da7121f7c949db0e2be96a4fbece" -dependencies = [ - "humantime 2.1.0", - "is-terminal", - "log", - "regex", - "termcolor", + "syn 2.0.55", ] [[package]] @@ -1594,12 +1399,12 @@ checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" [[package]] name = "errno" -version = "0.3.6" +version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7c18ee0ed65a5f1f81cac6b1d213b69c35fa47d4252ad41f1486dbd8226fe36e" +checksum = "a258e46cdc063eb8519c00b9fc845fc47bcfca4130e2f08e88665ceda8474245" dependencies = [ "libc", - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] [[package]] @@ -1608,7 +1413,7 @@ version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "136d1b5283a1ab77bd9257427ffd09d8667ced0570b6f938942bc7568ed5b943" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "home", "windows-sys 0.48.0", ] @@ -1621,9 +1426,9 @@ checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" [[package]] name = "eyre" -version = "0.6.8" +version = "0.6.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c2b6b5a29c02cdc822728b7d7b8ae1bab3e3b05d44522770ddd49722eeac7eb" +checksum = "7cd915d99f24784cdc19fd37ef22b97e3ff0ae756c7e492e9fbfe897d61e2aec" dependencies = [ "indenter", "once_cell", @@ -1631,9 +1436,9 @@ dependencies = [ [[package]] name = "fastrand" -version = "2.0.1" +version = "2.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25cbce373ec4653f1a01a31e8a5e5ec0c622dc27ff9c4e6606eefef5cbbed4a5" +checksum = "658bd65b1cf4c852a3cc96f18a8ce7b5640f6b703f905c7d74532294c2a63984" [[package]] name = "fd-lock-rs" @@ -1656,20 +1461,20 @@ dependencies = [ [[package]] name = "fiat-crypto" -version = "0.2.3" +version = "0.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f69037fe1b785e84986b4f2cbcf647381876a00671d25ceef715d7812dd7e1dd" +checksum = "c007b1ae3abe1cb6f85a16305acd418b7ca6343b953633fee2b76d8f108b830f" [[package]] name = "filetime" -version = "0.2.22" +version = "0.2.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4029edd3e734da6fe05b6cd7bd2960760a616bd2ddd0d59a0124746d6272af0" +checksum = "1ee447700ac8aa0b2f2bd7bc4462ad686ba06baa6727ac149a2d6277f0d240fd" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "libc", - "redox_syscall 0.3.5", - "windows-sys 0.48.0", + "redox_syscall 0.4.1", + "windows-sys 0.52.0", ] [[package]] @@ -1728,9 +1533,9 @@ checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" [[package]] name = "form_urlencoded" -version = "1.2.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a62bc1cf6f830c2ec14a513a9fb124d0a213a629668a4186f329db21fe045652" +checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" dependencies = [ "percent-encoding", ] @@ -1744,28 +1549,6 @@ dependencies = [ "itertools 0.8.2", ] -[[package]] -name = "from_variant" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03ec5dc38ee19078d84a692b1c41181ff9f94331c76cee66ff0208c770b5e54f" -dependencies = [ - "pmutil", - "proc-macro2", - "swc_macros_common", - "syn 2.0.39", -] - -[[package]] -name = "fslock" -version = "0.1.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57eafdd0c16f57161105ae1b98a1238f97645f2f588438b2949c99a2af9616bf" -dependencies = [ - "libc", - "winapi", -] - [[package]] name = "funty" version = "2.0.0" @@ -1774,9 +1557,9 @@ checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c" [[package]] name = "futures" -version = "0.3.29" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da0290714b38af9b4a7b094b8a37086d1b4e61f2df9122c3cad2577669145335" +checksum = "645c6916888f6cb6350d2550b80fb63e734897a8498abe35cfb732b6487804b0" dependencies = [ "futures-channel", "futures-core", @@ -1789,9 +1572,9 @@ dependencies = [ [[package]] name = "futures-channel" -version = "0.3.29" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff4dd66668b557604244583e3e1e1eada8c5c2e96a6d0d6653ede395b78bbacb" +checksum = "eac8f7d7865dcb88bd4373ab671c8cf4508703796caa2b1985a9ca867b3fcb78" dependencies = [ "futures-core", "futures-sink", @@ -1799,15 +1582,15 @@ dependencies = [ [[package]] name = "futures-core" -version = "0.3.29" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb1d22c66e66d9d72e1758f0bd7d4fd0bee04cad842ee34587d68c07e45d088c" +checksum = "dfc6580bb841c5a68e9ef15c77ccc837b40a7504914d52e47b8b0e9bbda25a1d" [[package]] name = "futures-executor" -version = "0.3.29" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f4fb8693db0cf099eadcca0efe2a5a22e4550f98ed16aba6c48700da29597bc" +checksum = "a576fc72ae164fca6b9db127eaa9a9dda0d61316034f33a0a0d4eda41f02b01d" dependencies = [ "futures-core", "futures-task", @@ -1827,38 +1610,38 @@ dependencies = [ [[package]] name = "futures-io" -version = "0.3.29" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8bf34a163b5c4c52d0478a4d757da8fb65cabef42ba90515efee0f6f9fa45aaa" +checksum = "a44623e20b9681a318efdd71c299b6b222ed6f231972bfe2f224ebad6311f0c1" [[package]] name = "futures-macro" -version = "0.3.29" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53b153fd91e4b0147f4aced87be237c98248656bb01050b96bf3ee89220a8ddb" +checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.55", ] [[package]] name = "futures-sink" -version = "0.3.29" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e36d3378ee38c2a36ad710c5d30c2911d752cb941c00c72dbabfb786a7970817" +checksum = "9fb8e00e87438d937621c1c6269e53f536c14d3fbd6a042bb24879e57d474fb5" [[package]] name = "futures-task" -version = "0.3.29" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "efd193069b0ddadc69c46389b740bbccdd97203899b48d09c5f7969591d6bae2" +checksum = "38d84fa142264698cdce1a9f9172cf383a0c82de1bddcf3092901442c4097004" [[package]] name = "futures-util" -version = "0.3.29" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a19526d624e703a3179b3d322efec918b6246ea0fa51d41124525f00f1cc8104" +checksum = "3d6401deb83407ab3da39eba7e33987a73c3df0c82b4bb5813ee871c19c41d48" dependencies = [ "futures-channel", "futures-core", @@ -1889,33 +1672,27 @@ version = "0.1.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "libc", "wasi 0.9.0+wasi-snapshot-preview1", ] [[package]] name = "getrandom" -version = "0.2.11" +version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe9006bed769170c11f845cf00c7c1e9092aeb3f268e007c3e760ac68008070f" +checksum = "190092ea657667030ac6a35e305e62fc4dd69fd98ac98631e5d3a2b1575a12b5" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "libc", "wasi 0.11.0+wasi-snapshot-preview1", ] [[package]] name = "gimli" -version = "0.28.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6fb8d784f27acf97159b40fc4db5ecd8aa23b9ad5ef69cdd136d3bc80665f0c0" - -[[package]] -name = "glob" -version = "0.3.1" +version = "0.28.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" +checksum = "4271d37baee1b8c7e4b708028c57d816cf9d2434acb33a549475f78c181f6253" [[package]] name = "gpt" @@ -1923,7 +1700,7 @@ version = "3.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8283e7331b8c93b9756e0cfdbcfb90312852f953c6faf9bf741e684cc3b6ad69" dependencies = [ - "bitflags 2.4.1", + "bitflags 2.5.0", "crc", "log", "uuid", @@ -1942,17 +1719,36 @@ dependencies = [ [[package]] name = "h2" -version = "0.3.21" +version = "0.3.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91fc23aa11be92976ef4729127f1a74adf36d8436f7816b185d18df956790833" +checksum = "4fbd2820c5e49886948654ab546d0688ff24530286bdcf8fca3cefb16d4618eb" dependencies = [ "bytes", "fnv", "futures-core", "futures-sink", "futures-util", - "http", - "indexmap 1.9.3", + "http 0.2.12", + "indexmap 2.2.6", + "slab", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "h2" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51ee2dd2e4f378392eeff5d51618cd9a63166a2513846bbc55f21cfacd9199d4" +dependencies = [ + "bytes", + "fnv", + "futures-core", + "futures-sink", + "futures-util", + "http 1.1.0", + "indexmap 2.2.6", "slab", "tokio", "tokio-util", @@ -1961,9 +1757,19 @@ dependencies = [ [[package]] name = "half" -version = "1.8.2" +version = "1.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b43ede17f21864e81be2fa654110bf1e793774238d86ef8555c37e6519c0403" + +[[package]] +name = "half" +version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eabb4a44450da02c90444cf74558da904edde8fb4e9035a9a6a4e15445af0bd7" +checksum = "b5eceaaeec696539ddaf7b333340f1af35a5aa87ae3e4f3ead0532f72affab2e" +dependencies = [ + "cfg-if", + "crunchy", +] [[package]] name = "hashbrown" @@ -1977,16 +1783,16 @@ version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "43a3c133739dddd0d2990f9a4bdf8eb4b21ef50e4851ca85ab661199821d510e" dependencies = [ - "ahash 0.8.6", + "ahash 0.8.11", ] [[package]] name = "hashbrown" -version = "0.14.2" +version = "0.14.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f93e7192158dbcda357bdec5fb5788eebf8bbac027f3f33e719d29135ae84156" +checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604" dependencies = [ - "ahash 0.8.6", + "ahash 0.8.11", "allocator-api2", ] @@ -1996,19 +1802,19 @@ version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e8094feaf31ff591f651a2664fb9cfd92bba7a60ce3197265e9482ebe753c8f7" dependencies = [ - "hashbrown 0.14.2", + "hashbrown 0.14.3", ] [[package]] name = "hdrhistogram" -version = "7.5.3" +version = "7.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a5b38e5c02b7c7be48c8dc5217c4f1634af2ea221caae2e024bffc7a7651c691" +checksum = "765c9198f173dd59ce26ff9f95ef0aafd0a0fe01fb9d72841bc5066a4c06511d" dependencies = [ - "base64 0.13.1", + "base64", "byteorder", "flate2", - "nom 7.1.3", + "nom", "num-traits", ] @@ -2021,6 +1827,12 @@ dependencies = [ "unicode-segmentation", ] +[[package]] +name = "heck" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" + [[package]] name = "helpers" version = "0.1.0" @@ -2030,12 +1842,12 @@ dependencies = [ "lazy_async_pool", "models", "pin-project", + "rpc-toolkit 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", "serde", "serde_json", "tokio", "tokio-stream", "tracing", - "yajrc 0.1.0 (git+https://github.com/dr-bonez/yajrc.git?branch=develop)", ] [[package]] @@ -2049,9 +1861,9 @@ dependencies = [ [[package]] name = "hermit-abi" -version = "0.3.3" +version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d77f7ec81a6d05a3abb01ab6eb7590f6083d08449fe5a1c8b1e620283546ccb7" +checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" [[package]] name = "hex" @@ -2061,52 +1873,53 @@ checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" [[package]] name = "hifijson" -version = "0.2.0" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85ef6b41c333e6dd2a4aaa59125a19b633cd17e7aaf372b2260809777bcdef4a" +checksum = "18ae468bcb4dfecf0e4949ee28abbc99076b6a0077f51ddbc94dbfff8e6a870c" [[package]] name = "hkdf" -version = "0.12.3" +version = "0.12.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "791a029f6b9fc27657f6f188ec6e5e43f6911f6f878e0dc5501396e09809d437" +checksum = "7b5f8eb2ad728638ea2c7d47a21db23b7b58a72ed6a38256b8a1849f15fbbdf7" dependencies = [ - "hmac 0.12.1", + "hmac", ] [[package]] name = "hmac" -version = "0.11.0" +version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a2a2320eb7ec0ebe8da8f744d7812d9fc4cb4d09344ac01898dbcb6a20ae69b" +checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" dependencies = [ - "crypto-mac", - "digest 0.9.0", + "digest 0.10.7", ] [[package]] -name = "hmac" -version = "0.12.1" +name = "home" +version = "0.5.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" +checksum = "e3d1354bf6b7235cb4a0576c2619fd4ed18183f689b12b006a0ee7329eeff9a5" dependencies = [ - "digest 0.10.7", + "windows-sys 0.52.0", ] [[package]] -name = "home" -version = "0.5.5" +name = "http" +version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5444c27eef6923071f7ebcc33e3444508466a76f7a2b93da00ed6e19f30c1ddb" +checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1" dependencies = [ - "windows-sys 0.48.0", + "bytes", + "fnv", + "itoa", ] [[package]] name = "http" -version = "0.2.11" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8947b1a6fad4393052c7ba1f4cd97bed3e953a95c79c92ad9b051a04611d9fbb" +checksum = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258" dependencies = [ "bytes", "fnv", @@ -2115,36 +1928,50 @@ dependencies = [ [[package]] name = "http-body" -version = "0.4.5" +version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d5f38f16d184e36f2408a55281cd658ecbd3ca05cce6d6510a176eca393e26d1" +checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" dependencies = [ "bytes", - "http", + "http 0.2.12", "pin-project-lite", ] [[package]] -name = "httparse" -version = "1.8.0" +name = "http-body" +version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904" - -[[package]] -name = "httpdate" -version = "1.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" +checksum = "1cac85db508abc24a2e48553ba12a996e87244a0395ce011e62b37158745d643" +dependencies = [ + "bytes", + "http 1.1.0", +] [[package]] -name = "humantime" -version = "1.3.0" +name = "http-body-util" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df004cfca50ef23c36850aaaa59ad52cc70d0e90243c3c7737a4dd32dc7a3c4f" +checksum = "0475f8b2ac86659c21b64320d5d653f9efe42acd2a4e560073ec61a155a34f1d" dependencies = [ - "quick-error", + "bytes", + "futures-core", + "http 1.1.0", + "http-body 1.0.0", + "pin-project-lite", ] +[[package]] +name = "httparse" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904" + +[[package]] +name = "httpdate" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" + [[package]] name = "humantime" version = "2.1.0" @@ -2153,35 +1980,55 @@ checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" [[package]] name = "hyper" -version = "0.14.27" +version = "0.14.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffb1cfd654a8219eaef89881fdb3bb3b1cdc5fa75ded05d6933b2b382e395468" +checksum = "bf96e135eb83a2a8ddf766e426a841d8ddd7449d5f00d34ea02b41d2f19eef80" dependencies = [ "bytes", "futures-channel", "futures-core", "futures-util", - "h2", - "http", - "http-body", + "h2 0.3.25", + "http 0.2.12", + "http-body 0.4.6", "httparse", "httpdate", "itoa", "pin-project-lite", - "socket2 0.4.10", + "socket2", "tokio", "tower-service", "tracing", "want", ] +[[package]] +name = "hyper" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "186548d73ac615b32a73aafe38fb4f56c0d340e110e5a200bcadbaf2e199263a" +dependencies = [ + "bytes", + "futures-channel", + "futures-util", + "h2 0.4.3", + "http 1.1.0", + "http-body 1.0.0", + "httparse", + "httpdate", + "itoa", + "pin-project-lite", + "smallvec", + "tokio", +] + [[package]] name = "hyper-timeout" version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbb958482e8c7be4bc3cf272a766a2b0bf1a6755e7a6ae777f017a31d11b13b1" dependencies = [ - "hyper", + "hyper 0.14.28", "pin-project-lite", "tokio", "tokio-io-timeout", @@ -2194,34 +2041,33 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" dependencies = [ "bytes", - "hyper", + "hyper 0.14.28", "native-tls", "tokio", "tokio-native-tls", ] [[package]] -name = "hyper-ws-listener" -version = "0.3.0" +name = "hyper-util" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dcbfe4981e45b0a7403a55d4af12f8d30e173e722409658c3857243990e72180" +checksum = "ca38ef113da30126bbff9cd1705f9273e15d45498615d138b0c20279ac7a76aa" dependencies = [ - "anyhow", - "base64 0.21.5", - "env_logger 0.10.1", - "futures", - "hyper", - "log", - "sha-1", + "bytes", + "futures-util", + "http 1.1.0", + "http-body 1.0.0", + "hyper 1.2.0", + "pin-project-lite", + "socket2", "tokio", - "tokio-tungstenite", ] [[package]] name = "iana-time-zone" -version = "0.1.58" +version = "0.1.60" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8326b86b6cff230b97d0d312a6c40a60726df3332e721f72a1b035f451663b20" +checksum = "e7ffbb5a1b541ea2561f8c41c087286cc091e21e556a4f09a8f6cbf17b69b141" dependencies = [ "android_system_properties", "core-foundation-sys", @@ -2240,6 +2086,15 @@ dependencies = [ "cc", ] +[[package]] +name = "id-pool" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8d0df4d8a768821ee4aa2e0353f67125c4586f0e13adbf95b8ebbf8d8fdb344" +dependencies = [ + "serde", +] + [[package]] name = "ident_case" version = "1.0.1" @@ -2248,20 +2103,19 @@ checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" [[package]] name = "idna" -version = "0.2.3" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "418a0a6fab821475f634efe3ccc45c013f742efe03d853e8d3355d5cb850ecf8" +checksum = "e14ddfc70884202db2244c223200c204c2bda1bc6e0998d11b5e024d657209e6" dependencies = [ - "matches", "unicode-bidi", "unicode-normalization", ] [[package]] name = "idna" -version = "0.3.0" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e14ddfc70884202db2244c223200c204c2bda1bc6e0998d11b5e024d657209e6" +checksum = "7d20d6b07bfbc108882d88ed8e37d39636dcc260e15e30c45e6ba089610b917c" dependencies = [ "unicode-bidi", "unicode-normalization", @@ -2269,20 +2123,14 @@ dependencies = [ [[package]] name = "idna" -version = "0.4.0" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d20d6b07bfbc108882d88ed8e37d39636dcc260e15e30c45e6ba089610b917c" +checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6" dependencies = [ "unicode-bidi", "unicode-normalization", ] -[[package]] -name = "if_chain" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb56e1aa765b4b4f3aadfab769793b7087bb03a4ea4920644a6d238e2df5b9ed" - [[package]] name = "imbl" version = "2.0.3" @@ -2299,9 +2147,9 @@ dependencies = [ [[package]] name = "imbl-sized-chunks" -version = "0.1.1" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6957ea0b2541c5ca561d3ef4538044af79f8a05a1eb3a3b148936aaceaa1076" +checksum = "144006fb58ed787dcae3f54575ff4349755b00ccc99f4b4873860b654be1ed63" dependencies = [ "bitmaps", ] @@ -2309,7 +2157,7 @@ dependencies = [ [[package]] name = "imbl-value" version = "0.1.0" -source = "git+https://github.com/Start9Labs/imbl-value.git#929395141c3a882ac366c12ac9402d0ebaa2201b" +source = "git+https://github.com/Start9Labs/imbl-value.git#48dc39a762a3b4f9300d3b9f850cbd394e777ae0" dependencies = [ "imbl", "serde", @@ -2356,20 +2204,20 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.1.0" +version = "2.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d530e1a18b1cb4c484e6e34556a0d948706958449fca0cab753d649f2bce3d1f" +checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26" dependencies = [ "equivalent", - "hashbrown 0.14.2", + "hashbrown 0.14.3", "serde", ] [[package]] name = "indicatif" -version = "0.17.7" +version = "0.17.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb28741c9db9a713d93deb3bb9515c20788cef5815265bee4980e87bde7e0f25" +checksum = "763a5a8f45087d6bcea4222e7b72c291a054edf80e4ef6efd2a4979878c7bea3" dependencies = [ "console", "instant", @@ -2394,7 +2242,7 @@ version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", ] [[package]] @@ -2407,17 +2255,6 @@ dependencies = [ "tokio", ] -[[package]] -name = "io-lifetimes" -version = "1.0.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eae7b9aee968036d54dce06cebaefd919e4472e753296daccd6d344e3e2df0c2" -dependencies = [ - "hermit-abi 0.3.3", - "libc", - "windows-sys 0.48.0", -] - [[package]] name = "ipnet" version = "2.9.0" @@ -2437,28 +2274,15 @@ dependencies = [ "serde", ] -[[package]] -name = "is-macro" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4467ed1321b310c2625c5aa6c1b1ffc5de4d9e42668cf697a08fb033ee8265e" -dependencies = [ - "Inflector", - "pmutil", - "proc-macro2", - "quote", - "syn 2.0.39", -] - [[package]] name = "is-terminal" -version = "0.4.9" +version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb0889898416213fab133e1d33a0e5858a48177452750691bde3666d0fdbaf8b" +checksum = "f23ff5ef2b80d608d61efee834934d862cd92461afc0560dedf493e4c033738b" dependencies = [ - "hermit-abi 0.3.3", - "rustix 0.38.21", - "windows-sys 0.48.0", + "hermit-abi 0.3.9", + "libc", + "windows-sys 0.52.0", ] [[package]] @@ -2498,11 +2322,20 @@ dependencies = [ "either", ] +[[package]] +name = "itertools" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba291022dbbd398a455acf126c1e341954079855bc60dfdda641363bd6922569" +dependencies = [ + "either", +] + [[package]] name = "itoa" -version = "1.0.9" +version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38" +checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" [[package]] name = "jaq-core" @@ -2510,7 +2343,7 @@ version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "eb52eeac20f256459e909bd4a03bb8c4fab6a1fdbb8ed52d00f644152df48ece" dependencies = [ - "ahash 0.7.7", + "ahash 0.7.8", "dyn-clone", "hifijson", "indexmap 1.9.3", @@ -2544,12 +2377,12 @@ dependencies = [ [[package]] name = "josekit" -version = "0.8.4" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5754487a088f527b1407df470db8e654e4064dccbbe1fe850e0773721e9962b7" +checksum = "0953340cf63354cec4a385f1fbcb3f409a5823778cae236078892f6030ed4565" dependencies = [ "anyhow", - "base64 0.21.5", + "base64", "flate2", "once_cell", "openssl", @@ -2560,32 +2393,11 @@ dependencies = [ "time", ] -[[package]] -name = "js-engine" -version = "0.1.0" -dependencies = [ - "async-trait", - "container-init", - "dashmap", - "deno_ast", - "deno_core", - "helpers", - "itertools 0.11.0", - "lazy_static", - "models", - "reqwest", - "serde", - "serde_json", - "sha2 0.10.8", - "tokio", - "tracing", -] - [[package]] name = "js-sys" -version = "0.3.65" +version = "0.3.69" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54c0c35952f67de54bb584e9fd912b3023117cbafc0a77d8f3dee1fb5f572fe8" +checksum = "29c15563dc2726973df627357ce0c9ddddbea194836909d655df6a75d2cf296d" dependencies = [ "wasm-bindgen", ] @@ -2623,65 +2435,42 @@ dependencies = [ [[package]] name = "keccak" -version = "0.1.4" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f6d5ed8676d904364de097082f4e7d240b571b67989ced0240f08b7f966f940" +checksum = "ecc2af9a1119c51f12a14607e783cb977bde58bc069ff0c3da1095e635d70654" dependencies = [ "cpufeatures", ] [[package]] name = "lalrpop" -version = "0.19.12" +version = "0.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a1cbf952127589f2851ab2046af368fd20645491bb4b376f04b7f94d7a9837b" +checksum = "55cb077ad656299f160924eb2912aa147d7339ea7d69e1b5517326fdcec3c1ca" dependencies = [ "ascii-canvas", "bit-set", - "diff", "ena", - "is-terminal", - "itertools 0.10.5", + "itertools 0.11.0", "lalrpop-util", "petgraph", + "pico-args", "regex", - "regex-syntax 0.6.29", + "regex-syntax 0.8.3", "string_cache", "term", "tiny-keccak", "unicode-xid", + "walkdir", ] [[package]] name = "lalrpop-util" -version = "0.19.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3c48237b9604c5a4702de6b824e02006c3214327564636aef27c1028a8fa0ed" -dependencies = [ - "regex", -] - -[[package]] -name = "lazy-regex" -version = "3.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d12be4595afdf58bd19e4a9f4e24187da2a66700786ff660a418e9059937a4c" -dependencies = [ - "lazy-regex-proc_macros", - "once_cell", - "regex", -] - -[[package]] -name = "lazy-regex-proc_macros" -version = "3.1.0" +version = "0.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44bcd58e6c97a7fcbaffcdc95728b393b8d98933bfadad49ed4097845b57ef0b" +checksum = "507460a910eb7b32ee961886ff48539633b788a36b65692b95f225b844c82553" dependencies = [ - "proc-macro2", - "quote", - "regex", - "syn 2.0.39", + "regex-automata 0.4.6", ] [[package]] @@ -2694,6 +2483,12 @@ dependencies = [ "futures", ] +[[package]] +name = "lazy_format" +version = "2.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e479e99b287d578ed5f6cd4c92cdf48db219088adb9c5b14f7c155b71dfba792" + [[package]] name = "lazy_static" version = "1.4.0" @@ -2703,27 +2498,11 @@ dependencies = [ "spin 0.5.2", ] -[[package]] -name = "lazycell" -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" - [[package]] name = "libc" -version = "0.2.150" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89d92a4743f9a61002fae18374ed11e7973f530cb3a3255fb354818118b2203c" - -[[package]] -name = "libloading" -version = "0.7.4" +version = "0.2.153" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b67380fd3b2fbe7527a606e18729d21c6f3951633d0500574c4dc22d2d638b9f" -dependencies = [ - "cfg-if 1.0.0", - "winapi", -] +checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd" [[package]] name = "libm" @@ -2737,16 +2516,16 @@ version = "0.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "85c833ca1e66078851dba29046874e38f08b2c883700aa29a03ddd3b23814ee8" dependencies = [ - "bitflags 2.4.1", + "bitflags 2.5.0", "libc", "redox_syscall 0.4.1", ] [[package]] name = "libsqlite3-sys" -version = "0.26.0" +version = "0.27.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "afc22eff61b133b115c6e8c74e818c628d6d5e7a502afea6f64dee076dd94326" +checksum = "cf4e226dcd58b4be396f7bd3c20da8fdee2911400705297ba7d2d7cc2c30f716" dependencies = [ "cc", "pkg-config", @@ -2755,15 +2534,9 @@ dependencies = [ [[package]] name = "linux-raw-sys" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f051f77a7c8e6957c0696eac88f26b0117e54f52d3fc682ab19397a8812846a4" - -[[package]] -name = "linux-raw-sys" -version = "0.4.11" +version = "0.4.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "969488b55f8ac402214f3f5fd243ebb7206cf82de60d3172994707a4bcc2b829" +checksum = "01cda141df6706de531b6c46c3a33ecca755538219bd484262fa09410c13539c" [[package]] name = "lock_api" @@ -2777,9 +2550,9 @@ dependencies = [ [[package]] name = "log" -version = "0.4.20" +version = "0.4.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f" +checksum = "90ed8c1e510134f979dbc4f070f87d4313098b704861a105fe34231c70a3901c" [[package]] name = "matchers" @@ -2790,12 +2563,6 @@ dependencies = [ "regex-automata 0.1.10", ] -[[package]] -name = "matches" -version = "0.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2532096657941c2fea9c289d370a250971c689d4f143798ff67113ec042024a5" - [[package]] name = "matchit" version = "0.7.3" @@ -2821,15 +2588,15 @@ version = "0.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d89e7ee0cfbedfc4da3340218492196241d89eefb6dab27de5df917a6d2e78cf" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "digest 0.10.7", ] [[package]] name = "memchr" -version = "2.6.4" +version = "2.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f665ee40bc4a3c5590afb1e9677db74a508659dfd71e126420da8274909a0167" +checksum = "523dc4f511e55ab87b694dc30d0f820d60906ef06413f93d4d7a1385599cc149" [[package]] name = "memoffset" @@ -2863,20 +2630,21 @@ checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" [[package]] name = "miniz_oxide" -version = "0.7.1" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7810e0be55b428ada41041c41f32c9f1a42817901b4ccf45fa3d4b6561e74c7" +checksum = "9d811f3e15f28568be3407c8e7fdb6514c1cda3cb30683f15b6a1a1dc4ea14a7" dependencies = [ "adler", ] [[package]] name = "mio" -version = "0.8.9" +version = "0.8.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3dce281c5e46beae905d4de1870d8b1509a9142b62eedf18b443b011ca8343d0" +checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c" dependencies = [ "libc", + "log", "wasi 0.11.0+wasi-snapshot-preview1", "windows-sys 0.48.0", ] @@ -2885,19 +2653,20 @@ dependencies = [ name = "models" version = "0.1.0" dependencies = [ - "base64 0.21.5", + "base64", "color-eyre", - "ed25519-dalek 2.0.0", + "ed25519-dalek 2.1.1", "emver", "ipnet", "lazy_static", "mbrman", + "num_enum", "openssl", "patch-db", "rand 0.8.5", "regex", "reqwest", - "rpc-toolkit", + "rpc-toolkit 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", "serde", "serde_json", "sqlx", @@ -2906,6 +2675,7 @@ dependencies = [ "tokio", "torut", "tracing", + "ts-rs", "yasi", ] @@ -2929,9 +2699,9 @@ dependencies = [ [[package]] name = "new_debug_unreachable" -version = "1.0.4" +version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e4a24736216ec316047a1fc4252e27dabb04218aa4a3f37c6e7ddbf1f9782b54" +checksum = "650eef8c711430f1a879fdd01d4745a7deea475becfb90269c06775983bbf086" [[package]] name = "new_mime_guess" @@ -2950,7 +2720,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fa52e972a9a719cecb6864fb88568781eb706bac2cd1d4f04a648542dbf78069" dependencies = [ "bitflags 1.3.2", - "cfg-if 1.0.0", + "cfg-if", "libc", "memoffset 0.6.5", ] @@ -2962,7 +2732,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "598beaf3cc6fdd9a5dfb1630c2800c7acd31df7aaf0f565796fba2b53ca1af1b" dependencies = [ "bitflags 1.3.2", - "cfg-if 1.0.0", + "cfg-if", "libc", "memoffset 0.7.1", "pin-utils", @@ -2974,21 +2744,11 @@ version = "0.27.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2eb04e9c688eff1c89d72b407f168cf79bb9e867a9d3323ed6c01519eb9cc053" dependencies = [ - "bitflags 2.4.1", - "cfg-if 1.0.0", + "bitflags 2.5.0", + "cfg-if", "libc", ] -[[package]] -name = "nom" -version = "5.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08959a387a676302eebf4ddbcbc611da04285579f76f88ee0506c63b1a61dd4b" -dependencies = [ - "memchr", - "version_check", -] - [[package]] name = "nom" version = "7.1.3" @@ -3032,8 +2792,6 @@ dependencies = [ "autocfg", "num-integer", "num-traits", - "rand 0.8.5", - "serde", ] [[package]] @@ -3055,28 +2813,33 @@ dependencies = [ [[package]] name = "num-complex" -version = "0.4.4" +version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ba157ca0885411de85d6ca030ba7e2a83a28636056c7c699b07c8b6f7383214" +checksum = "23c6602fda94a57c990fe0df199a035d83576b496aa29f4e634a8ac6004e68a6" dependencies = [ "num-traits", ] +[[package]] +name = "num-conv" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" + [[package]] name = "num-integer" -version = "0.1.45" +version = "0.1.46" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "225d3389fb3509a24c93f5c29eb6bde2586b98d9f016636dff58d7c6f7569cd9" +checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f" dependencies = [ - "autocfg", "num-traits", ] [[package]] name = "num-iter" -version = "0.1.43" +version = "0.1.44" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d03e6c028c5dc5cac6e2dec0efda81fc887605bb3d884578bb6d6bf7514e252" +checksum = "d869c01cc0c455284163fd0092f1f93835385ccab5a98a0dcc497b2f8bf055a9" dependencies = [ "autocfg", "num-integer", @@ -3097,9 +2860,9 @@ dependencies = [ [[package]] name = "num-traits" -version = "0.2.17" +version = "0.2.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39e3200413f237f41ab11ad6d161bc7239c84dcb631773ccd7de3dfe4b5c267c" +checksum = "da0df0e5185db44f69b44f26786fe401b6c293d1907744beaa7fa62b2e5a517a" dependencies = [ "autocfg", "libm", @@ -3111,29 +2874,29 @@ version = "1.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" dependencies = [ - "hermit-abi 0.3.3", + "hermit-abi 0.3.9", "libc", ] [[package]] name = "num_enum" -version = "0.7.1" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "683751d591e6d81200c39fb0d1032608b77724f34114db54f571ff1317b337c0" +checksum = "02339744ee7253741199f897151b38e72257d13802d4ee837285cc2990a90845" dependencies = [ "num_enum_derive", ] [[package]] name = "num_enum_derive" -version = "0.7.1" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c11e44798ad209ccdd91fc192f0526a369a01234f7373e1b141c96d7cee4f0e" +checksum = "681030a937600a36906c185595136d26abfebb4aa9c65701cefcaf8578bb982b" dependencies = [ - "proc-macro-crate 2.0.0", + "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.55", ] [[package]] @@ -3144,24 +2907,24 @@ checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3" [[package]] name = "object" -version = "0.32.1" +version = "0.32.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9cf5f9dd3933bd50a9e1f149ec995f39ae2c496d31fd772c1fd45ebc27e902b0" +checksum = "a6a622008b6e321afc04970976f62ee297fdbaa6f95318ca343e3eebb9648441" dependencies = [ "memchr", ] [[package]] name = "once_cell" -version = "1.18.0" +version = "1.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d" +checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" [[package]] name = "opaque-debug" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5" +checksum = "c08d65885ee38876c4f86fa503fb49d7b507c2b62552df7c70b2fce627e06381" [[package]] name = "openssh-keys" @@ -3169,7 +2932,7 @@ version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c75a0ec2d1b302412fb503224289325fcc0e44600176864804c7211b055cfd58" dependencies = [ - "base64 0.21.5", + "base64", "byteorder", "md-5", "sha2 0.10.8", @@ -3178,12 +2941,12 @@ dependencies = [ [[package]] name = "openssl" -version = "0.10.59" +version = "0.10.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a257ad03cd8fb16ad4172fedf8094451e1af1c4b70097636ef2eac9a5f0cc33" +checksum = "95a0481286a310808298130d22dd1fef0fa571e05a8f44ec801801e84b216b1f" dependencies = [ - "bitflags 2.4.1", - "cfg-if 1.0.0", + "bitflags 2.5.0", + "cfg-if", "foreign-types", "libc", "once_cell", @@ -3199,7 +2962,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.55", ] [[package]] @@ -3210,18 +2973,18 @@ checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" [[package]] name = "openssl-src" -version = "300.1.6+3.1.4" +version = "300.2.3+3.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "439fac53e092cd7442a3660c85dde4643ab3b5bd39040912388dcdabf6b88085" +checksum = "5cff92b6f71555b61bb9315f7c64da3ca43d87531622120fea0195fc761b4843" dependencies = [ "cc", ] [[package]] name = "openssl-sys" -version = "0.9.95" +version = "0.9.101" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40a4130519a360279579c2053038317e40eff64d13fd3f004f9e1b72b8a6aaf9" +checksum = "dda2b0f344e78efc2facf7d195d098df0dd72151b26ab98da807afc26c198dff" dependencies = [ "cc", "libc", @@ -3272,6 +3035,20 @@ dependencies = [ "sha2 0.10.8", ] +[[package]] +name = "p521" +version = "0.13.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fc9e2161f1f215afdfce23677034ae137bbd45016a880c2eb3ba8eb95f085b2" +dependencies = [ + "base16ct", + "ecdsa", + "elliptic-curve", + "primeorder", + "rand_core 0.6.4", + "sha2 0.10.8", +] + [[package]] name = "parking_lot" version = "0.12.1" @@ -3288,7 +3065,7 @@ version = "0.9.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4c42a9226546d68acdd9c0a280d17ce19bfe27a46bf68784e4066115788d008e" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "libc", "redox_syscall 0.4.1", "smallvec", @@ -3336,18 +3113,12 @@ dependencies = [ name = "patch-db-macro-internals" version = "0.1.0" dependencies = [ - "heck", + "heck 0.4.1", "proc-macro2", "quote", "syn 1.0.109", ] -[[package]] -name = "pathdiff" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8835116a5c179084a830efb3adc117ab007512b535bc1a21c991d3b32a6b44dd" - [[package]] name = "pbkdf2" version = "0.12.2" @@ -3355,15 +3126,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f8ed6a7761f76e3b9f92dfb0a60a6a6477c61024b775147ff0973a02653abaf2" dependencies = [ "digest 0.10.7", - "hmac 0.12.1", + "hmac", ] -[[package]] -name = "peeking_take_while" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19b17cddbe7ec3f8bc800887bab5e717348c95ea2ca0b1bf0837fb964dc67099" - [[package]] name = "pem-rfc7468" version = "0.7.0" @@ -3375,9 +3140,9 @@ dependencies = [ [[package]] name = "percent-encoding" -version = "2.3.0" +version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b2a4787296e9989611394c33f193f676704af1686e70b8f8033ab5ba9a35a94" +checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" [[package]] name = "petgraph" @@ -3386,42 +3151,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e1d3afd2628e69da2be385eb6f2fd57c8ac7977ceeff6dc166ff1657b0e386a9" dependencies = [ "fixedbitset", - "indexmap 2.1.0", -] - -[[package]] -name = "phf" -version = "0.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fabbf1ead8a5bcbc20f5f8b939ee3f5b0f6f281b6ad3468b84656b658b455259" -dependencies = [ - "phf_macros", - "phf_shared", - "proc-macro-hack", -] - -[[package]] -name = "phf_generator" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d5285893bb5eb82e6aaf5d59ee909a06a16737a8970984dd7746ba9283498d6" -dependencies = [ - "phf_shared", - "rand 0.8.5", -] - -[[package]] -name = "phf_macros" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "58fdf3184dd560f160dd73922bea2d5cd6e8f064bf4b13110abd81b03697b4e0" -dependencies = [ - "phf_generator", - "phf_shared", - "proc-macro-hack", - "proc-macro2", - "quote", - "syn 1.0.109", + "indexmap 2.2.6", ] [[package]] @@ -3433,24 +3163,30 @@ dependencies = [ "siphasher", ] +[[package]] +name = "pico-args" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5be167a7af36ee22fe3115051bc51f6e6c7054c9348e28deb4f49bd6f705a315" + [[package]] name = "pin-project" -version = "1.1.3" +version = "1.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fda4ed1c6c173e3fc7a83629421152e01d7b1f9b7f65fb301e490e8cfc656422" +checksum = "b6bf43b791c5b9e34c3d182969b4abb522f9343702850a2e57f460d00d09b4b3" dependencies = [ "pin-project-internal", ] [[package]] name = "pin-project-internal" -version = "1.1.3" +version = "1.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4359fd9c9171ec6e8c62926d6faaf553a8dc3f64e1507e76da7911b4f6a04405" +checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.55", ] [[package]] @@ -3488,32 +3224,21 @@ dependencies = [ [[package]] name = "pkg-config" -version = "0.3.27" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26072860ba924cbfa98ea39c8c19b4dd6a4a25423dbdf219c1eca91aa0cf6964" +checksum = "d231b230927b5e4ad203db57bbcbee2802f6bce620b1e4a9024a07d94e2907ec" [[package]] name = "platforms" -version = "3.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14e6ab3f592e6fb464fc9712d8d6e6912de6473954635fd76a589d832cffcbb0" - -[[package]] -name = "pmutil" -version = "0.6.1" +version = "3.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52a40bc70c2c58040d2d8b167ba9a5ff59fc9dab7ad44771cfde3dcfde7a09c6" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.39", -] +checksum = "db23d408679286588f4d4644f965003d056e3dd5abcaaa938116871d7ce2fee7" [[package]] name = "portable-atomic" -version = "1.5.1" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3bccab0e7fd7cc19f820a1c8c91720af652d0c88dc9664dd72aef2614f04af3b" +checksum = "7170ef9988bc169ba16dd36a7fa041e5c4cbeb6a35b76d4c03daded371eae7c0" [[package]] name = "powerfmt" @@ -3549,62 +3274,31 @@ dependencies = [ [[package]] name = "primeorder" -version = "0.13.3" +version = "0.13.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7dbe9ed3b56368bd99483eb32fe9c17fdd3730aebadc906918ce78d54c7eeb4" +checksum = "353e1ca18966c16d9deb1c69278edbc5f194139612772bd9537af60ac231e1e6" dependencies = [ "elliptic-curve", ] [[package]] name = "proc-macro-crate" -version = "1.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f4c021e1093a56626774e81216a4ce732a735e5bad4868a03f3ed65ca0c3919" -dependencies = [ - "once_cell", - "toml_edit 0.19.15", -] - -[[package]] -name = "proc-macro-crate" -version = "2.0.0" +version = "3.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e8366a6159044a37876a2b9817124296703c586a5c92e2c53751fa06d8d43e8" +checksum = "6d37c51ca738a55da99dc0c4a34860fd675453b8b36209178c2249bb13651284" dependencies = [ - "toml_edit 0.20.7", + "toml_edit 0.21.1", ] -[[package]] -name = "proc-macro-hack" -version = "0.5.20+deprecated" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc375e1527247fe1a97d8b7156678dfe7c1af2fc075c9a4db3690ecd2a148068" - [[package]] name = "proc-macro2" -version = "1.0.69" +version = "1.0.79" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "134c189feb4956b20f6f547d2cf727d4c0fe06722b20a0eec87ed445a97f92da" +checksum = "e835ff2298f5721608eb1a980ecaee1aef2c132bf95ecc026a11b7bf3c01c02e" dependencies = [ "unicode-ident", ] -[[package]] -name = "procfs" -version = "0.15.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "943ca7f9f29bab5844ecd8fdb3992c5969b6622bb9609b9502fef9b4310e3f1f" -dependencies = [ - "bitflags 1.3.2", - "byteorder", - "chrono", - "flate2", - "hex", - "lazy_static", - "rustix 0.36.17", -] - [[package]] name = "proptest" version = "1.4.0" @@ -3613,13 +3307,13 @@ checksum = "31b476131c3c86cb68032fdc5cb6d5a1045e3e42d96b69fa599fd77701e1f5bf" dependencies = [ "bit-set", "bit-vec", - "bitflags 2.4.1", + "bitflags 2.5.0", "lazy_static", "num-traits", "rand 0.8.5", "rand_chacha 0.3.1", "rand_xorshift", - "regex-syntax 0.8.2", + "regex-syntax 0.8.3", "rusty-fork", "tempfile", "unarray", @@ -3638,9 +3332,9 @@ dependencies = [ [[package]] name = "prost" -version = "0.12.1" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4fdd22f3b9c31b53c060df4a0613a1c7f062d4115a2b984dd15b1858f7e340d" +checksum = "146c289cda302b98a28d40c8b3b90498d6e526dd24ac2ecea73e4e491685b94a" dependencies = [ "bytes", "prost-derive", @@ -3648,22 +3342,22 @@ dependencies = [ [[package]] name = "prost-derive" -version = "0.12.1" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "265baba7fabd416cf5078179f7d2cbeca4ce7a9041111900675ea7c4cb8a4c32" +checksum = "efb6c9a1dd1def8e2124d17e83a20af56f1570d6c2d2bd9e266ccb768df3840e" dependencies = [ "anyhow", "itertools 0.11.0", "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.55", ] [[package]] name = "prost-types" -version = "0.12.1" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e081b29f63d83a4bc75cfc9f3fe424f9156cf92d8a4f0c9407cce9a1b67327cf" +checksum = "193898f59edcf43c26227dcd4c8427f00d99d61e95dcde58dabd49fa291d470e" dependencies = [ "prost", ] @@ -3674,15 +3368,6 @@ version = "2.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "33cb294fe86a74cbcf50d4445b37da762029549ebeea341421c7c70370f86cac" -[[package]] -name = "psm" -version = "0.1.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5787f7cda34e3033a72192c018bc5883100330f362ef279a8cbccfce8bb4e874" -dependencies = [ - "cc", -] - [[package]] name = "publicsuffix" version = "2.2.3" @@ -3701,9 +3386,9 @@ checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" [[package]] name = "quote" -version = "1.0.33" +version = "1.0.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae" +checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef" dependencies = [ "proc-macro2", ] @@ -3773,7 +3458,7 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ - "getrandom 0.2.11", + "getrandom 0.2.12", ] [[package]] @@ -3818,15 +3503,6 @@ dependencies = [ "bitflags 1.3.2", ] -[[package]] -name = "redox_syscall" -version = "0.3.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "567664f262709473930a4bf9e51bf2ebf3348f2e748ccc50dea20646858f8f29" -dependencies = [ - "bitflags 1.3.2", -] - [[package]] name = "redox_syscall" version = "0.4.1" @@ -3842,21 +3518,21 @@ version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a18479200779601e498ada4e8c1e1f50e3ee19deb0259c25825a98b5603b2cb4" dependencies = [ - "getrandom 0.2.11", + "getrandom 0.2.12", "libredox", "thiserror", ] [[package]] name = "regex" -version = "1.10.2" +version = "1.10.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "380b951a9c5e80ddfd6136919eef32310721aa4aacd4889a8d39124b026ab343" +checksum = "c117dbdfde9c8308975b6a18d71f3f385c89461f7b3fb054288ecf2a2058ba4c" dependencies = [ "aho-corasick", "memchr", - "regex-automata 0.4.3", - "regex-syntax 0.8.2", + "regex-automata 0.4.6", + "regex-syntax 0.8.3", ] [[package]] @@ -3870,13 +3546,13 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.4.3" +version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f804c7828047e88b2d32e2d7fe5a105da8ee3264f01902f796c8e067dc2483f" +checksum = "86b83b8b9847f9bf95ef68afb0b8e6cdb80f498442f5179a29fad448fcc1eaea" dependencies = [ "aho-corasick", "memchr", - "regex-syntax 0.8.2", + "regex-syntax 0.8.3", ] [[package]] @@ -3887,27 +3563,27 @@ checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" [[package]] name = "regex-syntax" -version = "0.8.2" +version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f" +checksum = "adad44e29e4c806119491a7f06f03de4d1af22c3a680dd47f1e6e179439d1f56" [[package]] name = "reqwest" -version = "0.11.22" +version = "0.11.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "046cd98826c46c2ac8ddecae268eb5c2e58628688a5fc7a2643704a73faba95b" +checksum = "dd67538700a17451e7cba03ac727fb961abb7607553461627b97de0b89cf4a62" dependencies = [ - "base64 0.21.5", + "base64", "bytes", - "cookie 0.16.2", - "cookie_store 0.16.2", + "cookie 0.17.0", + "cookie_store", "encoding_rs", "futures-core", "futures-util", - "h2", - "http", - "http-body", - "hyper", + "h2 0.3.25", + "http 0.2.12", + "http-body 0.4.6", + "hyper 0.14.28", "hyper-tls", "ipnet", "js-sys", @@ -3917,9 +3593,11 @@ dependencies = [ "once_cell", "percent-encoding", "pin-project-lite", + "rustls-pemfile", "serde", "serde_json", "serde_urlencoded", + "sync_wrapper 0.1.2", "system-configuration", "tokio", "tokio-native-tls", @@ -3941,7 +3619,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ba529055ea150e42e4eb9c11dcd380a41025ad4d594b0cb4904ef28b037e1061" dependencies = [ "bytes", - "cookie_store 0.20.0", + "cookie_store", "reqwest", "url", ] @@ -3952,22 +3630,23 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f8dd2a808d456c4a54e300a23e9f5a67e122c3024119acbfd73e3bf664491cb2" dependencies = [ - "hmac 0.12.1", + "hmac", "subtle", ] [[package]] name = "ring" -version = "0.17.5" +version = "0.17.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb0205304757e5d899b9c2e448b867ffd03ae7f988002e47cd24954391394d0b" +checksum = "c17fa4cb658e3583423e915b9f3acc01cceaee1860e33d59ebae66adc3a2dc0d" dependencies = [ "cc", - "getrandom 0.2.11", + "cfg-if", + "getrandom 0.2.12", "libc", "spin 0.9.8", "untrusted", - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] [[package]] @@ -3983,24 +3662,53 @@ dependencies = [ [[package]] name = "rpc-toolkit" -version = "0.2.2" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5353673ffd8265292281141560d2b851e4da49e83e2f5e255fd473736d45ee10" +checksum = "c48252a30abb9426a3239fa8dfd2c8dd2647bb24db0b6145db2df04ae53fe647" dependencies = [ "clap 3.2.25", "futures", - "hyper", + "hyper 0.14.28", "lazy_static", "openssl", "reqwest", - "rpc-toolkit-macro", + "rpc-toolkit-macro 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "serde", "serde_cbor 0.11.2", "serde_json", "thiserror", "tokio", "url", - "yajrc 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "yajrc", +] + +[[package]] +name = "rpc-toolkit" +version = "0.2.3" +source = "git+https://github.com/Start9Labs/rpc-toolkit.git?branch=refactor/traits#c89e0abdb15dd3bed9adb5339cf0b61a96f32b50" +dependencies = [ + "async-stream", + "async-trait", + "axum 0.7.5", + "clap 4.5.4", + "futures", + "http 1.1.0", + "http-body-util", + "imbl-value", + "itertools 0.12.1", + "lazy_format", + "lazy_static", + "openssl", + "pin-project", + "reqwest", + "rpc-toolkit-macro 0.2.2 (git+https://github.com/Start9Labs/rpc-toolkit.git?branch=refactor/traits)", + "serde", + "serde_json", + "thiserror", + "tokio", + "tokio-stream", + "url", + "yajrc", ] [[package]] @@ -4010,7 +3718,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f8e4b9cb00baf2d61bcd35e98d67dcb760382a3b4540df7e63b38d053c8a7b8b" dependencies = [ "proc-macro2", - "rpc-toolkit-macro-internals", + "rpc-toolkit-macro-internals 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.109", +] + +[[package]] +name = "rpc-toolkit-macro" +version = "0.2.2" +source = "git+https://github.com/Start9Labs/rpc-toolkit.git?branch=refactor/traits#c89e0abdb15dd3bed9adb5339cf0b61a96f32b50" +dependencies = [ + "proc-macro2", + "rpc-toolkit-macro-internals 0.2.2 (git+https://github.com/Start9Labs/rpc-toolkit.git?branch=refactor/traits)", "syn 1.0.109", ] @@ -4025,11 +3743,22 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "rpc-toolkit-macro-internals" +version = "0.2.2" +source = "git+https://github.com/Start9Labs/rpc-toolkit.git?branch=refactor/traits#c89e0abdb15dd3bed9adb5339cf0b61a96f32b50" +dependencies = [ + "itertools 0.12.1", + "proc-macro2", + "quote", + "syn 1.0.109", +] + [[package]] name = "rsa" -version = "0.9.3" +version = "0.9.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86ef35bf3e7fe15a53c4ab08a998e42271eab13eb0db224126bc7bc4c4bad96d" +checksum = "5d0e5124fcb30e76a7e79bfee683a2746db83784b86289f6251b54b7950a0dfc" dependencies = [ "const-oid", "digest 0.10.7", @@ -4040,7 +3769,7 @@ dependencies = [ "pkcs8", "rand_core 0.6.4", "sha2 0.10.8", - "signature 2.0.0", + "signature 2.2.0", "spki", "subtle", "zeroize", @@ -4058,11 +3787,11 @@ dependencies = [ [[package]] name = "rust-argon2" -version = "2.0.0" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e71971821b3ae0e769e4a4328dbcb517607b434db7697e9aba17203ec14e46a" +checksum = "9d9848531d60c9cbbcf9d166c885316c24bc0e2a9d3eba0956bb6cbbd79bc6e8" dependencies = [ - "base64 0.21.5", + "base64", "blake2b_simd", "constant_time_eq", ] @@ -4073,67 +3802,51 @@ version = "0.1.23" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76" -[[package]] -name = "rustc-hash" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" - -[[package]] -name = "rustc_version" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a" -dependencies = [ - "semver 0.9.0", -] - [[package]] name = "rustc_version" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" dependencies = [ - "semver 1.0.20", + "semver", ] [[package]] name = "rustix" -version = "0.36.17" +version = "0.38.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "305efbd14fde4139eb501df5f136994bb520b033fa9fbdce287507dc23b8c7ed" +checksum = "65e04861e65f21776e67888bfbea442b3642beaa0138fdb1dd7a84a52dffdb89" dependencies = [ - "bitflags 1.3.2", + "bitflags 2.5.0", "errno", - "io-lifetimes", "libc", - "linux-raw-sys 0.1.4", - "windows-sys 0.45.0", + "linux-raw-sys", + "windows-sys 0.52.0", ] [[package]] -name = "rustix" -version = "0.38.21" +name = "rustls" +version = "0.21.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b426b0506e5d50a7d8dafcf2e81471400deb602392c7dd110815afb4eaf02a3" +checksum = "f9d5a6813c0759e4609cd494e8e725babae6a2ca7b62a5536a13daaec6fcb7ba" dependencies = [ - "bitflags 2.4.1", - "errno", - "libc", - "linux-raw-sys 0.4.11", - "windows-sys 0.48.0", + "ring", + "rustls-webpki 0.101.7", + "sct", ] [[package]] name = "rustls" -version = "0.21.8" +version = "0.22.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "446e14c5cda4f3f30fe71863c34ec70f5ac79d6087097ad0bb433e1be5edf04c" +checksum = "99008d7ad0bbbea527ec27bddbc0e432c5b87d8175178cee68d2eec9c4a1813c" dependencies = [ "log", "ring", - "rustls-webpki", - "sct", + "rustls-pki-types", + "rustls-webpki 0.102.2", + "subtle", + "zeroize", ] [[package]] @@ -4142,9 +3855,15 @@ version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c" dependencies = [ - "base64 0.21.5", + "base64", ] +[[package]] +name = "rustls-pki-types" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ecd36cc4259e3e4514335c4a138c6b43171a8d61d8f5c9348f9fc7529416f247" + [[package]] name = "rustls-webpki" version = "0.101.7" @@ -4155,6 +3874,17 @@ dependencies = [ "untrusted", ] +[[package]] +name = "rustls-webpki" +version = "0.102.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "faaa0a62740bedb9b2ef5afa303da42764c012f743917351dc9a237ea1663610" +dependencies = [ + "ring", + "rustls-pki-types", + "untrusted", +] + [[package]] name = "rustversion" version = "1.0.14" @@ -4173,26 +3903,45 @@ dependencies = [ "wait-timeout", ] +[[package]] +name = "rustyline-async" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b6eb06391513b2184f0a5405c11a4a0a5302e8be442f4c5c35267187c2b37d5" +dependencies = [ + "crossterm", + "futures-channel", + "futures-util", + "pin-project", + "thingbuf", + "thiserror", + "unicode-segmentation", + "unicode-width", +] + [[package]] name = "ryu" -version = "1.0.15" +version = "1.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ad4cc8da4ef723ed60bced201181d83791ad433213d8c24efffda1eec85d741" +checksum = "e86697c916019a8588c99b5fac3cead74ec0b4b819707a682fd4d23fa0ce1ba1" [[package]] -name = "schannel" -version = "0.1.22" +name = "same-file" +version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c3733bf4cf7ea0880754e19cb5a462007c4a8c1914bff372ccc95b464f1df88" +checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" dependencies = [ - "windows-sys 0.48.0", + "winapi-util", ] [[package]] -name = "scoped-tls" -version = "1.0.1" +name = "schannel" +version = "0.1.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e1cf6437eb19a8f4a6cc0f7dca544973b0b78843adbfeb3683d1a94a0024a294" +checksum = "fbc91545643bcf3a0bbb6569265615222618bdf33ce4ffbbd13c4bbd4c093534" +dependencies = [ + "windows-sys 0.52.0", +] [[package]] name = "scopeguard" @@ -4249,33 +3998,18 @@ dependencies = [ [[package]] name = "semver" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403" -dependencies = [ - "semver-parser", -] - -[[package]] -name = "semver" -version = "1.0.20" +version = "1.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "836fa6a3e1e547f9a2c4040802ec865b5d85f4014efe00555d7090a3dcaa1090" +checksum = "92d43fe69e652f3df9bdc2b85b2854a0825b86e4fb76bc44d945137d053639ca" dependencies = [ "serde", ] -[[package]] -name = "semver-parser" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" - [[package]] name = "serde" -version = "1.0.192" +version = "1.0.197" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bca2a08484b285dcb282d0f67b26cadc0df8b19f8c12502c13d966bf9482f001" +checksum = "3fb1c873e1b9b056a4dc4c0c198b24c3ffa059243875552b2bd0933b1aee4ce2" dependencies = [ "serde_derive", ] @@ -4289,20 +4023,11 @@ dependencies = [ "serde", ] -[[package]] -name = "serde_bytes" -version = "0.11.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab33ec92f677585af6d88c65593ae2375adde54efdbf16d597f2cbc7a6d368ff" -dependencies = [ - "serde", -] - [[package]] name = "serde_cbor" version = "0.11.1" dependencies = [ - "half", + "half 1.8.3", "serde", ] @@ -4312,38 +4037,48 @@ version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2bef2ebfde456fb76bbcf9f59315333decc4fda0b2b44b420243c11e0f5ec1f5" dependencies = [ - "half", + "half 1.8.3", "serde", ] [[package]] name = "serde_derive" -version = "1.0.192" +version = "1.0.197" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d6c7207fbec9faa48073f3e3074cbe553af6ea512d7c21ba46e434e70ea9fbc1" +checksum = "7eb0b34b42edc17f6b7cac84a52a1c5f0e1bb2227e997ca9011ea3dd34e8610b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.55", ] [[package]] name = "serde_json" -version = "1.0.108" +version = "1.0.115" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d1c7e3eac408d115102c4c24ad393e0821bb3a5df4d506a80f85f7a742a526b" +checksum = "12dc5c46daa8e9fdf4f5e71b6cf9a53f2487da0e86e55808e2d35539666497dd" dependencies = [ - "indexmap 2.1.0", + "indexmap 2.2.6", "itoa", "ryu", "serde", ] +[[package]] +name = "serde_path_to_error" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af99884400da37c88f5e9146b7f1fd0fbcae8f6eec4e9da38b67d05486f814a6" +dependencies = [ + "itoa", + "serde", +] + [[package]] name = "serde_spanned" -version = "0.6.4" +version = "0.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12022b835073e5b11e90a14f86838ceb1c8fb0325b72416845c487ac0fa95e80" +checksum = "eb3622f419d1296904700073ea6cc23ad690adbd66f13ea683df73298736f0c1" dependencies = [ "serde", ] @@ -4360,34 +4095,19 @@ dependencies = [ "serde", ] -[[package]] -name = "serde_v8" -version = "0.131.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38cafa16d0a4288d75925351bb54d06d2e830118ad3fad393947bb11f91b18f3" -dependencies = [ - "bytes", - "derive_more", - "num-bigint", - "serde", - "serde_bytes", - "smallvec", - "thiserror", - "v8", -] - [[package]] name = "serde_with" -version = "3.4.0" +version = "3.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64cd236ccc1b7a29e7e2739f27c0b2dd199804abc4290e32f59f3b68d6405c23" +checksum = "ee80b0e361bbf88fd2f6e242ccd19cfda072cb0faa6ae694ecee08199938569a" dependencies = [ - "base64 0.21.5", + "base64", "chrono", "hex", "indexmap 1.9.3", - "indexmap 2.1.0", + "indexmap 2.2.6", "serde", + "serde_derive", "serde_json", "serde_with_macros", "time", @@ -4395,47 +4115,36 @@ dependencies = [ [[package]] name = "serde_with_macros" -version = "3.4.0" +version = "3.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93634eb5f75a2323b16de4748022ac4297f9e76b6dced2be287a099f41b5e788" +checksum = "6561dc161a9224638a31d876ccdfefbc1df91d3f3a8342eddb35f055d48c7655" dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.55", ] [[package]] name = "serde_yaml" -version = "0.9.27" +version = "0.9.34+deprecated" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3cc7a1570e38322cfe4154732e5110f887ea57e22b76f4bfd32b5bdd3368666c" +checksum = "6a8b1a1a2ebf674015cc02edccce75287f1a0130d394307b36743c2f5d504b47" dependencies = [ - "indexmap 2.1.0", + "indexmap 2.2.6", "itoa", "ryu", "serde", "unsafe-libyaml", ] -[[package]] -name = "sha-1" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "028f48d513f9678cda28f6e4064755b3fbb2af6acd672f2c209b62323f7aea0f" -dependencies = [ - "cfg-if 1.0.0", - "cpufeatures", - "digest 0.10.7", -] - [[package]] name = "sha1" version = "0.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "cpufeatures", "digest 0.10.7", ] @@ -4447,7 +4156,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4d58a1e1bf39749807d89cf2d98ac2dfa0ff1cb3faa38fbb64dd88ac8013d800" dependencies = [ "block-buffer 0.9.0", - "cfg-if 1.0.0", + "cfg-if", "cpufeatures", "digest 0.9.0", "opaque-debug", @@ -4459,21 +4168,19 @@ version = "0.10.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "cpufeatures", "digest 0.10.7", ] [[package]] name = "sha3" -version = "0.9.1" +version = "0.10.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f81199417d4e5de3f04b1e871023acea7389672c4135918f05aa9cbf2f2fa809" +checksum = "75872d278a8f37ef87fa0ddbda7802605cb18344497949862c0d4dcb291eba60" dependencies = [ - "block-buffer 0.9.0", - "digest 0.9.0", + "digest 0.10.7", "keccak", - "opaque-debug", ] [[package]] @@ -4486,10 +4193,31 @@ dependencies = [ ] [[package]] -name = "shlex" -version = "0.1.1" +name = "shell-words" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24188a676b6ae68c3b2cb3a01be17fbf7240ce009799bb56d5b1409051e78fde" + +[[package]] +name = "signal-hook" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8621587d4798caf8eb44879d42e56b9a93ea5dcd315a6487c357130095b62801" +dependencies = [ + "libc", + "signal-hook-registry", +] + +[[package]] +name = "signal-hook-mio" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fdf1b9db47230893d76faad238fd6097fd6d6a9245cd7a4d90dbd639536bbd2" +checksum = "29ad2e15f37ec9a6cc544097b78a1ec90001e9f71b81338ca39f430adaca99af" +dependencies = [ + "libc", + "mio", + "signal-hook", +] [[package]] name = "signal-hook-registry" @@ -4508,9 +4236,9 @@ checksum = "74233d3b3b2f6d4b006dc19dee745e73e2a6bfb6f93607cd3b02bd5b00797d7c" [[package]] name = "signature" -version = "2.0.0" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8fe458c98333f9c8152221191a77e2a44e8325d0193484af2e9421a53019e57d" +checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de" dependencies = [ "digest 0.10.7", "rand_core 0.6.4", @@ -4544,102 +4272,40 @@ dependencies = [ [[package]] name = "smallvec" -version = "1.11.2" +version = "1.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4dccd0940a2dcdf68d092b8cbab7dc0ad8fa938bf95787e1b916b0e3d0e8e970" +checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" [[package]] -name = "smartstring" -version = "1.0.1" +name = "socket2" +version = "0.5.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fb72c633efbaa2dd666986505016c32c3044395ceaf881518399d2f4127ee29" +checksum = "05ffd9c0a93b7543e062e759284fcf5f5e3b098501104bfbdde4d404db792871" dependencies = [ - "autocfg", - "static_assertions", - "version_check", + "libc", + "windows-sys 0.52.0", ] [[package]] -name = "snapshot_creator" -version = "0.1.0" -dependencies = [ - "dashmap", - "deno_ast", - "deno_core", -] +name = "spin" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" [[package]] -name = "socket2" -version = "0.4.10" +name = "spin" +version = "0.9.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f7916fc008ca5542385b89a3d3ce689953c143e9304a9bf8beec1de48994c0d" +checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" dependencies = [ - "libc", - "winapi", + "lock_api", ] [[package]] -name = "socket2" -version = "0.5.5" +name = "spki" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b5fac59a5cb5dd637972e5fca70daf0523c9067fcdc4842f053dae04a18f8e9" -dependencies = [ - "libc", - "windows-sys 0.48.0", -] - -[[package]] -name = "sourcemap" -version = "6.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e4cbf65ca7dc576cf50e21f8d0712d96d4fcfd797389744b7b222a85cdf5bd90" -dependencies = [ - "data-encoding", - "debugid", - "if_chain", - "rustc_version 0.2.3", - "serde", - "serde_json", - "unicode-id", - "url", -] - -[[package]] -name = "sourcemap" -version = "7.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "10da010a590ed2fa9ca8467b00ce7e9c5a8017742c0c09c45450efc172208c4b" -dependencies = [ - "data-encoding", - "debugid", - "if_chain", - "rustc_version 0.2.3", - "serde", - "serde_json", - "unicode-id", - "url", -] - -[[package]] -name = "spin" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" - -[[package]] -name = "spin" -version = "0.9.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" -dependencies = [ - "lock_api", -] - -[[package]] -name = "spki" -version = "0.7.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d1e996ef02c474957d681f1b05213dfb0abab947b446a62d37770b23500184a" +checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d" dependencies = [ "base64ct", "der", @@ -4647,20 +4313,20 @@ dependencies = [ [[package]] name = "sqlformat" -version = "0.2.2" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b7b278788e7be4d0d29c0f39497a0eef3fba6bbc8e70d8bf7fde46edeaa9e85" +checksum = "ce81b7bd7c4493975347ef60d8c7e8b742d4694f4c49f93e0a12ea263938176c" dependencies = [ - "itertools 0.11.0", - "nom 7.1.3", + "itertools 0.12.1", + "nom", "unicode_categories", ] [[package]] name = "sqlx" -version = "0.7.2" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e50c216e3624ec8e7ecd14c6a6a6370aad6ee5d8cfc3ab30b5162eeeef2ed33" +checksum = "c9a2ccff1a000a5a59cd33da541d9f2fdcd9e6e8229cc200565942bff36d0aaa" dependencies = [ "sqlx-core", "sqlx-macros", @@ -4671,18 +4337,17 @@ dependencies = [ [[package]] name = "sqlx-core" -version = "0.7.2" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d6753e460c998bbd4cd8c6f0ed9a64346fcca0723d6e75e52fdc351c5d2169d" +checksum = "24ba59a9342a3d9bab6c56c118be528b27c9b60e490080e9711a04dccac83ef6" dependencies = [ - "ahash 0.8.6", + "ahash 0.8.11", "atoi", "byteorder", "bytes", "chrono", "crc", "crossbeam-queue", - "dotenvy", "either", "event-listener", "futures-channel", @@ -4692,13 +4357,13 @@ dependencies = [ "futures-util", "hashlink", "hex", - "indexmap 2.1.0", + "indexmap 2.2.6", "log", "memchr", "once_cell", "paste", "percent-encoding", - "rustls", + "rustls 0.21.10", "rustls-pemfile", "serde", "serde_json", @@ -4715,9 +4380,9 @@ dependencies = [ [[package]] name = "sqlx-macros" -version = "0.7.2" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a793bb3ba331ec8359c1853bd39eed32cdd7baaf22c35ccf5c92a7e8d1189ec" +checksum = "4ea40e2345eb2faa9e1e5e326db8c34711317d2b5e08d0d5741619048a803127" dependencies = [ "proc-macro2", "quote", @@ -4728,13 +4393,13 @@ dependencies = [ [[package]] name = "sqlx-macros-core" -version = "0.7.2" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a4ee1e104e00dedb6aa5ffdd1343107b0a4702e862a84320ee7cc74782d96fc" +checksum = "5833ef53aaa16d860e92123292f1f6a3d53c34ba8b1969f152ef1a7bb803f3c8" dependencies = [ "dotenvy", "either", - "heck", + "heck 0.4.1", "hex", "once_cell", "proc-macro2", @@ -4754,13 +4419,13 @@ dependencies = [ [[package]] name = "sqlx-mysql" -version = "0.7.2" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "864b869fdf56263f4c95c45483191ea0af340f9f3e3e7b4d57a61c7c87a970db" +checksum = "1ed31390216d20e538e447a7a9b959e06ed9fc51c37b514b46eb758016ecd418" dependencies = [ "atoi", - "base64 0.21.5", - "bitflags 2.4.1", + "base64", + "bitflags 2.5.0", "byteorder", "bytes", "chrono", @@ -4775,7 +4440,7 @@ dependencies = [ "generic-array", "hex", "hkdf", - "hmac 0.12.1", + "hmac", "itoa", "log", "md-5", @@ -4797,13 +4462,13 @@ dependencies = [ [[package]] name = "sqlx-postgres" -version = "0.7.2" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb7ae0e6a97fb3ba33b23ac2671a5ce6e3cabe003f451abd5a56e7951d975624" +checksum = "7c824eb80b894f926f89a0b9da0c7f435d27cdd35b8c655b114e58223918577e" dependencies = [ "atoi", - "base64 0.21.5", - "bitflags 2.4.1", + "base64", + "bitflags 2.5.0", "byteorder", "chrono", "crc", @@ -4815,7 +4480,7 @@ dependencies = [ "futures-util", "hex", "hkdf", - "hmac 0.12.1", + "hmac", "home", "itoa", "log", @@ -4825,7 +4490,6 @@ dependencies = [ "rand 0.8.5", "serde", "serde_json", - "sha1", "sha2 0.10.8", "smallvec", "sqlx-core", @@ -4837,9 +4501,9 @@ dependencies = [ [[package]] name = "sqlx-sqlite" -version = "0.7.2" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d59dc83cf45d89c555a577694534fcd1b55c545a816c816ce51f20bbe56a4f3f" +checksum = "b244ef0a8414da0bed4bb1910426e890b19e5e9bccc27ada6b797d05c55ae0aa" dependencies = [ "atoi", "chrono", @@ -4856,6 +4520,7 @@ dependencies = [ "sqlx-core", "tracing", "url", + "urlencoding", ] [[package]] @@ -4881,7 +4546,7 @@ dependencies = [ "quote", "regex-syntax 0.6.29", "strsim 0.10.0", - "syn 2.0.39", + "syn 2.0.55", "unicode-width", ] @@ -4908,43 +4573,25 @@ dependencies = [ [[package]] name = "ssh-key" -version = "0.6.2" +version = "0.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2180b3bc4955efd5661a97658d3cf4c8107e0d132f619195afe9486c13cca313" +checksum = "3b71299a724c8d84956caaf8fc3b3ea57c3587fe2d0b800cd0dc1f3599905d7e" dependencies = [ - "ed25519-dalek 2.0.0", + "ed25519-dalek 2.1.1", "p256", "p384", + "p521", "rand_core 0.6.4", "rsa", "sec1", "sha2 0.10.8", - "signature 2.0.0", + "signature 2.2.0", "ssh-cipher", "ssh-encoding", "subtle", "zeroize", ] -[[package]] -name = "stable_deref_trait" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" - -[[package]] -name = "stacker" -version = "0.1.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c886bd4480155fd3ef527d45e9ac8dd7118a898a46530b7b94c3e21866259fce" -dependencies = [ - "cc", - "cfg-if 1.0.0", - "libc", - "psm", - "winapi", -] - [[package]] name = "start-os" version = "0.3.5-rev.1" @@ -4953,53 +4600,53 @@ dependencies = [ "async-compression", "async-stream", "async-trait", - "avahi-sys", + "axum 0.7.5", + "axum-server", "base32", - "base64 0.21.5", + "base64", "base64ct", "basic-cookies", "blake3", "bytes", "chrono", "ciborium", - "clap 3.2.25", + "clap 4.5.4", "color-eyre", "console", "console-subscriber", - "container-init", - "cookie 0.18.0", - "cookie_store 0.20.0", + "cookie 0.18.1", + "cookie_store", "current_platform", "digest 0.10.7", "divrem", "ed25519 2.2.3", "ed25519-dalek 1.0.1", - "ed25519-dalek 2.0.0", + "ed25519-dalek 2.1.1", "emver", "fd-lock-rs", "futures", "gpt", "helpers", "hex", - "hmac 0.12.1", - "http", - "hyper", - "hyper-ws-listener", + "hmac", + "http 1.1.0", + "id-pool", "imbl", "imbl-value", "include_dir", - "indexmap 2.1.0", + "indexmap 2.2.6", "indicatif", "integer-encoding", "ipnet", "iprange", "isocountry", - "itertools 0.11.0", + "itertools 0.12.1", "jaq-core", "jaq-std", "josekit", - "js-engine", "jsonpath_lib", + "lazy_async_pool", + "lazy_format", "lazy_static", "libc", "log", @@ -5007,9 +4654,10 @@ dependencies = [ "models", "new_mime_guess", "nix 0.27.1", - "nom 7.1.3", + "nom", "num", "num_enum", + "once_cell", "openssh-keys", "openssl", "p256", @@ -5019,496 +4667,106 @@ dependencies = [ "pkcs8", "prettytable-rs", "proptest", - "proptest-derive", - "rand 0.8.5", - "regex", - "reqwest", - "reqwest_cookie_store", - "rpassword", - "rpc-toolkit", - "rust-argon2", - "scopeguard", - "semver 1.0.20", - "serde", - "serde_json", - "serde_with", - "serde_yaml", - "sha2 0.10.8", - "simple-logging", - "sqlx", - "sscanf", - "ssh-key", - "stderrlog", - "tar", - "thiserror", - "tokio", - "tokio-rustls", - "tokio-socks", - "tokio-stream", - "tokio-tar", - "tokio-tungstenite", - "tokio-util", - "toml 0.8.8", - "torut", - "tracing", - "tracing-error", - "tracing-futures", - "tracing-journald", - "tracing-subscriber", - "trust-dns-server", - "typed-builder", - "url", - "urlencoding", - "uuid", - "zeroize", -] - -[[package]] -name = "static_assertions" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" - -[[package]] -name = "stderrlog" -version = "0.5.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69a26bbf6de627d389164afa9783739b56746c6c72c4ed16539f4ff54170327b" -dependencies = [ - "atty", - "chrono", - "log", - "termcolor", - "thread_local", -] - -[[package]] -name = "string_cache" -version = "0.8.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f91138e76242f575eb1d3b38b4f1362f10d3a43f47d182a5b359af488a02293b" -dependencies = [ - "new_debug_unreachable", - "once_cell", - "parking_lot", - "phf_shared", - "precomputed-hash", - "serde", -] - -[[package]] -name = "string_cache_codegen" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6bb30289b722be4ff74a408c3cc27edeaad656e06cb1fe8fa9231fa59c728988" -dependencies = [ - "phf_generator", - "phf_shared", - "proc-macro2", - "quote", -] - -[[package]] -name = "string_enum" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8fa4d4f81d7c05b9161f8de839975d3326328b8ba2831164b465524cc2f55252" -dependencies = [ - "pmutil", - "proc-macro2", - "quote", - "swc_macros_common", - "syn 2.0.39", -] - -[[package]] -name = "stringprep" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb41d74e231a107a1b4ee36bd1214b11285b77768d2e3824aedafa988fd36ee6" -dependencies = [ - "finl_unicode", - "unicode-bidi", - "unicode-normalization", -] - -[[package]] -name = "strsim" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a" - -[[package]] -name = "strsim" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" - -[[package]] -name = "strum" -version = "0.25.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "290d54ea6f91c969195bdbcd7442c8c2a2ba87da8bf60a7ee86a235d4bc1e125" -dependencies = [ - "strum_macros", -] - -[[package]] -name = "strum_macros" -version = "0.25.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23dc1fa9ac9c169a78ba62f0b841814b7abae11bdd047b9c58f893439e309ea0" -dependencies = [ - "heck", - "proc-macro2", - "quote", - "rustversion", - "syn 2.0.39", -] - -[[package]] -name = "subtle" -version = "2.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601" - -[[package]] -name = "swc_atoms" -version = "0.5.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f54563d7dcba626d4acfe14ed12def7ecc28e004debe3ecd2c3ee07cc47e449" -dependencies = [ - "once_cell", - "rustc-hash", - "serde", - "string_cache", - "string_cache_codegen", - "triomphe", -] - -[[package]] -name = "swc_common" -version = "0.32.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39cb7fcd56655c8ae7dcf2344f0be6cbff4d9c7cb401fe3ec8e56e1de8dfe582" -dependencies = [ - "ast_node", - "better_scoped_tls", - "cfg-if 1.0.0", - "either", - "from_variant", - "new_debug_unreachable", - "num-bigint", - "once_cell", - "rustc-hash", - "serde", - "siphasher", - "sourcemap 6.4.1", - "string_cache", - "swc_atoms", - "swc_eq_ignore_macros", - "swc_visit", - "tracing", - "unicode-width", - "url", -] - -[[package]] -name = "swc_config" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ba1c7a40d38f9dd4e9a046975d3faf95af42937b34b2b963be4d8f01239584b" -dependencies = [ - "indexmap 1.9.3", - "serde", - "serde_json", - "swc_config_macro", -] - -[[package]] -name = "swc_config_macro" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5b5aaca9a0082be4515f0fbbecc191bf5829cd25b5b9c0a2810f6a2bb0d6829" -dependencies = [ - "pmutil", - "proc-macro2", - "quote", - "swc_macros_common", - "syn 2.0.39", -] - -[[package]] -name = "swc_ecma_ast" -version = "0.109.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7bc2286cedd688a68f214faa1c19bb5cceab7c9c54d0cbe3273e4c1704e38f69" -dependencies = [ - "bitflags 2.4.1", - "is-macro", - "num-bigint", - "scoped-tls", - "serde", - "string_enum", - "swc_atoms", - "swc_common", - "unicode-id", -] - -[[package]] -name = "swc_ecma_codegen" -version = "0.144.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e62ba2c0ed1f119fc1a76542d007f1b2c12854d54dea15f5491363227debe11" -dependencies = [ - "memchr", - "num-bigint", - "once_cell", - "rustc-hash", - "serde", - "sourcemap 6.4.1", - "swc_atoms", - "swc_common", - "swc_ecma_ast", - "swc_ecma_codegen_macros", - "tracing", -] - -[[package]] -name = "swc_ecma_codegen_macros" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dcdff076dccca6cc6a0e0b2a2c8acfb066014382bc6df98ec99e755484814384" -dependencies = [ - "pmutil", - "proc-macro2", - "quote", - "swc_macros_common", - "syn 2.0.39", -] - -[[package]] -name = "swc_ecma_loader" -version = "0.44.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7d7c322462657ae27ac090a2c89f7e456c94416284a2f5ecf66c43a6a3c19d1" -dependencies = [ - "anyhow", - "pathdiff", - "serde", - "swc_common", - "tracing", -] - -[[package]] -name = "swc_ecma_parser" -version = "0.139.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3eab46cb863bc5cd61535464e07e5b74d5f792fa26a27b9f6fd4c8daca9903b7" -dependencies = [ - "either", - "num-bigint", - "num-traits", - "serde", - "smallvec", - "smartstring", - "stacker", - "swc_atoms", - "swc_common", - "swc_ecma_ast", - "tracing", - "typed-arena", -] - -[[package]] -name = "swc_ecma_transforms_base" -version = "0.132.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01ffd4a8149052bfc1ec1832fcbe04f317846ce635a49ec438df33b06db27d26" -dependencies = [ - "better_scoped_tls", - "bitflags 2.4.1", - "indexmap 1.9.3", - "once_cell", - "phf", - "rustc-hash", - "serde", - "smallvec", - "swc_atoms", - "swc_common", - "swc_ecma_ast", - "swc_ecma_parser", - "swc_ecma_utils", - "swc_ecma_visit", - "tracing", -] - -[[package]] -name = "swc_ecma_transforms_classes" -version = "0.121.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4b7fee0e2c6f12456d2aefb2418f2f26529b995945d493e1dce35a5a22584fc" -dependencies = [ - "swc_atoms", - "swc_common", - "swc_ecma_ast", - "swc_ecma_transforms_base", - "swc_ecma_utils", - "swc_ecma_visit", -] - -[[package]] -name = "swc_ecma_transforms_macros" -version = "0.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8188eab297da773836ef5cf2af03ee5cca7a563e1be4b146f8141452c28cc690" -dependencies = [ - "pmutil", - "proc-macro2", - "quote", - "swc_macros_common", - "syn 2.0.39", -] - -[[package]] -name = "swc_ecma_transforms_proposal" -version = "0.166.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "122fd9a69f464694edefbf9c59106b3c15e5cc8cb8575a97836e4fb79018e98f" -dependencies = [ - "either", - "rustc-hash", - "serde", - "smallvec", - "swc_atoms", - "swc_common", - "swc_ecma_ast", - "swc_ecma_transforms_base", - "swc_ecma_transforms_classes", - "swc_ecma_transforms_macros", - "swc_ecma_utils", - "swc_ecma_visit", -] - -[[package]] -name = "swc_ecma_transforms_react" -version = "0.178.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "675b5c755b0448268830e85e59429095d3423c0ce4a850b209c6f0eeab069f63" -dependencies = [ - "base64 0.13.1", - "dashmap", - "indexmap 1.9.3", - "once_cell", + "proptest-derive", + "rand 0.8.5", + "regex", + "reqwest", + "reqwest_cookie_store", + "rpassword", + "rpc-toolkit 0.2.3 (git+https://github.com/Start9Labs/rpc-toolkit.git?branch=refactor/traits)", + "rust-argon2", + "rustyline-async", + "semver", "serde", - "sha-1", - "string_enum", - "swc_atoms", - "swc_common", - "swc_config", - "swc_ecma_ast", - "swc_ecma_parser", - "swc_ecma_transforms_base", - "swc_ecma_transforms_macros", - "swc_ecma_utils", - "swc_ecma_visit", + "serde_json", + "serde_with", + "serde_yaml", + "sha2 0.10.8", + "shell-words", + "simple-logging", + "sqlx", + "sscanf", + "ssh-key", + "stderrlog", + "tar", + "thiserror", + "tokio", + "tokio-rustls", + "tokio-socks", + "tokio-stream", + "tokio-tar", + "tokio-tungstenite", + "tokio-util", + "toml 0.8.12", + "torut", + "tracing", + "tracing-error", + "tracing-futures", + "tracing-journald", + "tracing-subscriber", + "trust-dns-server", + "ts-rs", + "typed-builder", + "url", + "urlencoding", + "uuid", + "zeroize", ] [[package]] -name = "swc_ecma_transforms_typescript" -version = "0.182.3" +name = "stderrlog" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4eba97b1ea71739fcf278aedad4677a3cacb52288a3f3566191b70d16a889de6" +checksum = "69a26bbf6de627d389164afa9783739b56746c6c72c4ed16539f4ff54170327b" dependencies = [ - "serde", - "swc_atoms", - "swc_common", - "swc_ecma_ast", - "swc_ecma_transforms_base", - "swc_ecma_transforms_react", - "swc_ecma_utils", - "swc_ecma_visit", + "atty", + "chrono", + "log", + "termcolor", + "thread_local", ] [[package]] -name = "swc_ecma_utils" -version = "0.122.0" +name = "string_cache" +version = "0.8.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11006a3398ffd4693c4d3b0a1b1a5030edbdc04228159f5301120a6178144708" +checksum = "f91138e76242f575eb1d3b38b4f1362f10d3a43f47d182a5b359af488a02293b" dependencies = [ - "indexmap 1.9.3", - "num_cpus", + "new_debug_unreachable", "once_cell", - "rustc-hash", - "swc_atoms", - "swc_common", - "swc_ecma_ast", - "swc_ecma_visit", - "tracing", - "unicode-id", -] - -[[package]] -name = "swc_ecma_visit" -version = "0.95.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f628ec196e76e67892441e14eef2e423a738543d32bffdabfeec20c29582117" -dependencies = [ - "num-bigint", - "swc_atoms", - "swc_common", - "swc_ecma_ast", - "swc_visit", - "tracing", + "parking_lot", + "phf_shared", + "precomputed-hash", ] [[package]] -name = "swc_eq_ignore_macros" -version = "0.1.2" +name = "stringprep" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05a95d367e228d52484c53336991fdcf47b6b553ef835d9159db4ba40efb0ee8" +checksum = "bb41d74e231a107a1b4ee36bd1214b11285b77768d2e3824aedafa988fd36ee6" dependencies = [ - "pmutil", - "proc-macro2", - "quote", - "syn 2.0.39", + "finl_unicode", + "unicode-bidi", + "unicode-normalization", ] [[package]] -name = "swc_macros_common" -version = "0.3.8" +name = "strsim" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a273205ccb09b51fabe88c49f3b34c5a4631c4c00a16ae20e03111d6a42e832" -dependencies = [ - "pmutil", - "proc-macro2", - "quote", - "syn 2.0.39", -] +checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" [[package]] -name = "swc_visit" -version = "0.5.7" +name = "strsim" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e87c337fbb2d191bf371173dea6a957f01899adb8f189c6c31b122a6cfc98fc3" -dependencies = [ - "either", - "swc_visit_macros", -] +checksum = "5ee073c9e4cd00e28217186dbe12796d692868f432bf2e97ee73bed0c56dfa01" [[package]] -name = "swc_visit_macros" -version = "0.5.8" +name = "subtle" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f322730fb82f3930a450ac24de8c98523af7d34ab8cb2f46bcb405839891a99" -dependencies = [ - "Inflector", - "pmutil", - "proc-macro2", - "quote", - "swc_macros_common", - "syn 2.0.39", -] +checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc" [[package]] name = "syn" @@ -5523,9 +4781,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.39" +version = "2.0.55" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23e78b90f2fcf45d3e842032ce32e3f2d1545ba6636271dcbf24fa306d87be7a" +checksum = "002a1b3dbf967edfafc32655d0f377ab0bb7b994aa1d32c8cc7e9b8bf3ebb8f0" dependencies = [ "proc-macro2", "quote", @@ -5538,6 +4796,12 @@ version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" +[[package]] +name = "sync_wrapper" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "384595c11a4e2969895cad5a8c4029115f5ab956a9e5ef4de79d11a426e5f20c" + [[package]] name = "system-configuration" version = "0.5.1" @@ -5573,20 +4837,19 @@ checksum = "b16afcea1f22891c49a00c751c7b63b2233284064f11a200fc624137c51e2ddb" dependencies = [ "filetime", "libc", - "xattr 1.0.1", + "xattr 1.3.1", ] [[package]] name = "tempfile" -version = "3.8.1" +version = "3.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ef1adac450ad7f4b3c28589471ade84f25f731a7a0fe30d71dfa9f60fd808e5" +checksum = "85b77fafb263dd9d05cbeac119526425676db3784113aa9295c88498cbf8bff1" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "fastrand", - "redox_syscall 0.4.1", - "rustix 0.38.21", - "windows-sys 0.48.0", + "rustix", + "windows-sys 0.52.0", ] [[package]] @@ -5610,47 +4873,39 @@ dependencies = [ ] [[package]] -name = "text_lines" -version = "0.6.0" +name = "textwrap" +version = "0.16.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fd5828de7deaa782e1dd713006ae96b3bee32d3279b79eb67ecf8072c059bcf" -dependencies = [ - "serde", -] +checksum = "23d434d3f8967a09480fb04132ebe0a3e088c173e6d0ee7897abbdf4eab0f8b9" [[package]] -name = "textwrap" -version = "0.11.0" +name = "thingbuf" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060" +checksum = "4706f1bfb859af03f099ada2de3cea3e515843c2d3e93b7893f16d94a37f9415" dependencies = [ - "unicode-width", + "parking_lot", + "pin-project", ] -[[package]] -name = "textwrap" -version = "0.16.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "222a222a5bfe1bba4a77b45ec488a741b3cb8872e5e499451fd7d0129c9c7c3d" - [[package]] name = "thiserror" -version = "1.0.50" +version = "1.0.58" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9a7210f5c9a7156bb50aa36aed4c95afb51df0df00713949448cf9e97d382d2" +checksum = "03468839009160513471e86a034bb2c5c0e4baae3b43f79ffc55c4a5427b3297" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.50" +version = "1.0.58" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "266b2e40bc00e5a6c09c3584011e08b06f123c00362c92b975ba9843aaaa14b8" +checksum = "c61f3ba182994efc43764a46c018c347bc492c79f024e705f46567b418f6d4f7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.55", ] [[package]] @@ -5666,22 +4921,23 @@ dependencies = [ [[package]] name = "thread_local" -version = "1.1.7" +version = "1.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fdd6f064ccff2d6567adcb3873ca630700f00b5ad3f060c25b5dcfd9a4ce152" +checksum = "8b9ef9bad013ada3808854ceac7b46812a6465ba368859a37e2100283d2d719c" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "once_cell", ] [[package]] name = "time" -version = "0.3.30" +version = "0.3.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4a34ab300f2dee6e562c10a046fc05e358b29f9bf92277f30c3c8d82275f6f5" +checksum = "c8248b6521bb14bc45b4067159b9b6ad792e2d6d754d6c41fb50e29fefe38749" dependencies = [ "deranged", "itoa", + "num-conv", "powerfmt", "serde", "time-core", @@ -5696,10 +4952,11 @@ checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" [[package]] name = "time-macros" -version = "0.2.15" +version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ad70d68dba9e1f8aceda7aa6711965dfec1cac869f311a51bd08b3a2ccbce20" +checksum = "7ba3a3ef41e6672a2f0f001392bb5dcd3ff0a9992d618ca761a11c3121547774" dependencies = [ + "num-conv", "time-core", ] @@ -5729,9 +4986,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.34.0" +version = "1.36.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0c014766411e834f7af5b8f4cf46257aab4036ca95e9d2c144a10f59ad6f5b9" +checksum = "61285f6515fa018fb2d1e46eb21223fff441ee8db5d0f1435e8ab4f5cdb80931" dependencies = [ "backtrace", "bytes", @@ -5741,7 +4998,7 @@ dependencies = [ "parking_lot", "pin-project-lite", "signal-hook-registry", - "socket2 0.5.5", + "socket2", "tokio-macros", "tracing", "windows-sys 0.48.0", @@ -5765,7 +5022,7 @@ checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.55", ] [[package]] @@ -5780,11 +5037,12 @@ dependencies = [ [[package]] name = "tokio-rustls" -version = "0.24.1" +version = "0.25.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" +checksum = "775e0c0f0adb3a2f22a00c4745d728b479985fc15ee7ca6a2608388c5569860f" dependencies = [ - "rustls", + "rustls 0.22.3", + "rustls-pki-types", "tokio", ] @@ -5802,9 +5060,9 @@ dependencies = [ [[package]] name = "tokio-stream" -version = "0.1.14" +version = "0.1.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "397c988d37662c7dda6d2208364a706264bf3d6138b11d436cbac0ad38832842" +checksum = "267ac89e0bec6e691e5813911606935d77c476ff49024f98abcea3e7b15e37af" dependencies = [ "futures-core", "pin-project-lite", @@ -5828,9 +5086,9 @@ dependencies = [ [[package]] name = "tokio-tungstenite" -version = "0.20.1" +version = "0.21.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "212d5dcb2a1ce06d81107c3d0ffa3121fe974b73f068c8282cb1c32328113b6c" +checksum = "c83b561d025642014097b66e6c1bb422783339e0909e4429cde4749d1990bc38" dependencies = [ "futures-util", "log", @@ -5868,14 +5126,14 @@ dependencies = [ [[package]] name = "toml" -version = "0.8.8" +version = "0.8.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1a195ec8c9da26928f773888e0742ca3ca1040c6cd859c919c9f59c1954ab35" +checksum = "e9dd1545e8208b4a5af1aa9bbd0b4cf7e9ea08fabc5d0a5c67fcaafa17433aa3" dependencies = [ "serde", "serde_spanned", "toml_datetime", - "toml_edit 0.21.0", + "toml_edit 0.22.9", ] [[package]] @@ -5893,35 +5151,35 @@ version = "0.19.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421" dependencies = [ - "indexmap 2.1.0", + "indexmap 2.2.6", "serde", "serde_spanned", "toml_datetime", - "winnow", + "winnow 0.5.40", ] [[package]] name = "toml_edit" -version = "0.20.7" +version = "0.21.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70f427fce4d84c72b5b732388bf4a9f4531b53f74e2887e3ecb2481f68f66d81" +checksum = "6a8534fd7f78b5405e860340ad6575217ce99f38d4d5c8f2442cb5ecb50090e1" dependencies = [ - "indexmap 2.1.0", + "indexmap 2.2.6", "toml_datetime", - "winnow", + "winnow 0.5.40", ] [[package]] name = "toml_edit" -version = "0.21.0" +version = "0.22.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d34d383cd00a163b4a5b85053df514d45bc330f6de7737edfe0a93311d1eaa03" +checksum = "8e40bb779c5187258fd7aad0eb68cb8706a0a81fa712fbea808ab43c4b8374c4" dependencies = [ - "indexmap 2.1.0", + "indexmap 2.2.6", "serde", "serde_spanned", "toml_datetime", - "winnow", + "winnow 0.6.5", ] [[package]] @@ -5932,13 +5190,13 @@ checksum = "d560933a0de61cf715926b9cac824d4c883c2c43142f787595e48280c40a1d0e" dependencies = [ "async-stream", "async-trait", - "axum", - "base64 0.21.5", + "axum 0.6.20", + "base64", "bytes", - "h2", - "http", - "http-body", - "hyper", + "h2 0.3.25", + "http 0.2.12", + "http-body 0.4.6", + "hyper 0.14.28", "hyper-timeout", "percent-encoding", "pin-project", @@ -5954,19 +5212,18 @@ dependencies = [ [[package]] name = "torut" version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "99febc413f26cf855b3a309c5872edff5c31e0ffe9c2fce5681868761df36f69" +source = "git+https://github.com/Start9Labs/torut.git?branch=update/dependencies#cc7a1425a01214465e106975e6690794d8551bdb" dependencies = [ "base32", - "base64 0.13.1", + "base64", "derive_more", "ed25519-dalek 1.0.1", "hex", - "hmac 0.11.0", + "hmac", "rand 0.7.3", "serde", "serde_derive", - "sha2 0.9.9", + "sha2 0.10.8", "sha3", "tokio", ] @@ -6023,7 +5280,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.55", ] [[package]] @@ -6098,23 +5355,13 @@ dependencies = [ [[package]] name = "treediff" -version = "4.0.2" +version = "4.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52984d277bdf2a751072b5df30ec0377febdb02f7696d64c2d7d54630bac4303" +checksum = "4d127780145176e2b5d16611cc25a900150e86e9fd79d3bde6ff3a37359c9cb5" dependencies = [ "serde_json", ] -[[package]] -name = "triomphe" -version = "0.1.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0eee8098afad3fb0c54a9007aab6804558410503ad676d4633f9c2559a00ac0f" -dependencies = [ - "serde", - "stable_deref_trait", -] - [[package]] name = "trust-dns-proto" version = "0.23.2" @@ -6122,7 +5369,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3119112651c157f4488931a01e586aa459736e9d6046d3bd9105ffb69352d374" dependencies = [ "async-trait", - "cfg-if 1.0.0", + "cfg-if", "data-encoding", "enum-as-inner", "futures-channel", @@ -6148,7 +5395,7 @@ checksum = "c540f73c2b2ec2f6c54eabd0900e7aafb747a820224b742f556e8faabb461bc7" dependencies = [ "async-trait", "bytes", - "cfg-if 1.0.0", + "cfg-if", "drain", "enum-as-inner", "futures-executor", @@ -6164,20 +5411,41 @@ dependencies = [ [[package]] name = "try-lock" -version = "0.2.4" +version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3528ecfd12c466c6f163363caf2d02a71161dd5e1cc6ae7b34207ea2d42d81ed" +checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" + +[[package]] +name = "ts-rs" +version = "8.1.0" +source = "git+https://github.com/dr-bonez/ts-rs.git?branch=feature/top-level-type-override#f37e51abbf3b4afc6dc2fdea72100ed0ee463a06" +dependencies = [ + "thiserror", + "ts-rs-macros", +] + +[[package]] +name = "ts-rs-macros" +version = "8.1.0" +source = "git+https://github.com/dr-bonez/ts-rs.git?branch=feature/top-level-type-override#f37e51abbf3b4afc6dc2fdea72100ed0ee463a06" +dependencies = [ + "Inflector", + "proc-macro2", + "quote", + "syn 2.0.55", + "termcolor", +] [[package]] name = "tungstenite" -version = "0.20.1" +version = "0.21.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e3dac10fd62eaf6617d3a904ae222845979aec67c615d1c842b4002c7666fb9" +checksum = "9ef1a641ea34f399a848dea702823bbecfb4c486f911735368f1f137cb8257e1" dependencies = [ "byteorder", "bytes", "data-encoding", - "http", + "http 1.1.0", "httparse", "log", "native-tls", @@ -6188,30 +5456,24 @@ dependencies = [ "utf-8", ] -[[package]] -name = "typed-arena" -version = "2.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6af6ae20167a9ece4bcb41af5b80f8a1f1df981f6391189ce00fd257af04126a" - [[package]] name = "typed-builder" -version = "0.17.0" +version = "0.18.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1c6a006a6d3d6a6f143fda41cf4d1ad35110080687628c9f2117bd3cc7924f3" +checksum = "444d8748011b93cb168770e8092458cb0f8854f931ff82fdf6ddfbd72a9c933e" dependencies = [ "typed-builder-macro", ] [[package]] name = "typed-builder-macro" -version = "0.17.0" +version = "0.18.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8fa054ee5e2346187d631d2f1d1fd3b33676772d6d03a2d84e1c5213b31674ee" +checksum = "563b3b88238ec95680aef36bdece66896eaa7ce3c0f1b4f39d38fb2435261352" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.55", ] [[package]] @@ -6237,15 +5499,9 @@ dependencies = [ [[package]] name = "unicode-bidi" -version = "0.3.13" +version = "0.3.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92888ba5573ff080736b3648696b70cafad7d250551175acbaa4e0385b3e1460" - -[[package]] -name = "unicode-id" -version = "0.3.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1b6def86329695390197b82c1e244a54a131ceb66c996f2088a3876e2ae083f" +checksum = "08f95100a766bf4f8f28f90d77e0a5461bbdb219042e7679bebe79004fed8d75" [[package]] name = "unicode-ident" @@ -6255,18 +5511,18 @@ checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" [[package]] name = "unicode-normalization" -version = "0.1.22" +version = "0.1.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c5713f0fc4b5db668a2ac63cdb7bb4469d8c9fed047b1d0292cc7b0ce2ba921" +checksum = "a56d1686db2308d901306f92a263857ef59ea39678a5458e7cb17f01415101f5" dependencies = [ "tinyvec", ] [[package]] name = "unicode-segmentation" -version = "1.10.1" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1dd624098567895118886609431a7c3b8f516e41d30e0643f03d94592a147e36" +checksum = "d4c87d22b6e3f4a18d4d40ef354e97c90fcb14dd91d7dc0aa9d8a1172ebf7202" [[package]] name = "unicode-width" @@ -6288,9 +5544,9 @@ checksum = "39ec24b3121d976906ece63c9daad25b85969647682eee313cb5779fdd69e14e" [[package]] name = "unsafe-libyaml" -version = "0.2.9" +version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f28467d3e1d3c6586d8f25fa243f544f5800fec42d97032474e17222c2b75cfa" +checksum = "673aac59facbab8a9007c7f6108d11f63b603f7cabff99fabf650fea5c32b861" [[package]] name = "untrusted" @@ -6300,12 +5556,12 @@ checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" [[package]] name = "url" -version = "2.4.1" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "143b538f18257fac9cad154828a57c6bf5157e1aa604d4816b5995bf6de87ae5" +checksum = "31e6302e3bb753d46e83516cae55ae196fc0c309407cf11ab35cc51a4c2a4633" dependencies = [ "form_urlencoded", - "idna 0.4.0", + "idna 0.5.0", "percent-encoding", "serde", ] @@ -6323,24 +5579,18 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" [[package]] -name = "uuid" -version = "1.5.0" +name = "utf8parse" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "88ad59a7560b41a70d191093a945f0b87bc1deeda46fb237479708a1d6b6cdfc" -dependencies = [ - "getrandom 0.2.11", -] +checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" [[package]] -name = "v8" -version = "0.79.2" +name = "uuid" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b15561535230812a1db89a696f1f16a12ae6c2c370c6b2241c68d4cb33963faf" +checksum = "a183cf7feeba97b4dd1c0d46788634f6221d87fa961b305bed08c851829efcc0" dependencies = [ - "bitflags 1.3.2", - "fslock", - "once_cell", - "which 4.4.2", + "getrandom 0.2.12", ] [[package]] @@ -6355,12 +5605,6 @@ version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" -[[package]] -name = "vec_map" -version = "0.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191" - [[package]] name = "version_check" version = "0.9.4" @@ -6376,6 +5620,16 @@ dependencies = [ "libc", ] +[[package]] +name = "walkdir" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" +dependencies = [ + "same-file", + "winapi-util", +] + [[package]] name = "want" version = "0.3.1" @@ -6397,38 +5651,44 @@ version = "0.11.0+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" +[[package]] +name = "wasite" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b" + [[package]] name = "wasm-bindgen" -version = "0.2.88" +version = "0.2.92" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7daec296f25a1bae309c0cd5c29c4b260e510e6d813c286b19eaadf409d40fce" +checksum = "4be2531df63900aeb2bca0daaaddec08491ee64ceecbee5076636a3b026795a8" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "wasm-bindgen-macro", ] [[package]] name = "wasm-bindgen-backend" -version = "0.2.88" +version = "0.2.92" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e397f4664c0e4e428e8313a469aaa58310d302159845980fd23b0f22a847f217" +checksum = "614d787b966d3989fa7bb98a654e369c762374fd3213d212cfc0251257e747da" dependencies = [ "bumpalo", "log", "once_cell", "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.55", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-futures" -version = "0.4.38" +version = "0.4.42" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9afec9963e3d0994cac82455b2b3502b81a7f40f9a0d32181f7528d9f4b43e02" +checksum = "76bc14366121efc8dbb487ab05bcc9d346b3b5ec0eaa76e46594cabbe51762c0" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "js-sys", "wasm-bindgen", "web-sys", @@ -6436,9 +5696,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.88" +version = "0.2.92" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5961017b3b08ad5f3fe39f1e79877f8ee7c23c5e5fd5eb80de95abc41f1f16b2" +checksum = "a1f8823de937b71b9460c0c34e25f3da88250760bec0ebac694b49997550d726" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -6446,28 +5706,28 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.88" +version = "0.2.92" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5353b8dab669f5e10f5bd76df26a9360c748f054f862ff5f3f8aae0c7fb3907" +checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.55", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.88" +version = "0.2.92" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d046c5d029ba91a1ed14da14dca44b68bf2f124cfbaf741c54151fdb3e0750b" +checksum = "af190c94f2773fdb3729c55b007a722abb5384da03bc0986df4c289bf5567e96" [[package]] name = "wasm-streams" -version = "0.3.0" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4609d447824375f43e1ffbc051b50ad8f4b3ae8219680c94452ea05eb240ac7" +checksum = "b65dc4c90b63b118468cf747d8bf3566c1913ef60be765b5730ead9e0a3ba129" dependencies = [ "futures-util", "js-sys", @@ -6478,9 +5738,9 @@ dependencies = [ [[package]] name = "web-sys" -version = "0.3.65" +version = "0.3.69" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5db499c5f66323272151db0e666cd34f78617522fb0c1604d31a27c50c206a85" +checksum = "77afa9a11836342370f4817622a2f0f418b134426d91a82dfb48f532d2ec13ef" dependencies = [ "js-sys", "wasm-bindgen", @@ -6488,40 +5748,20 @@ dependencies = [ [[package]] name = "webpki-roots" -version = "0.24.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b291546d5d9d1eab74f069c77749f2cb8504a12caa20f0f2de93ddbf6f411888" -dependencies = [ - "rustls-webpki", -] - -[[package]] -name = "which" -version = "3.1.1" +version = "0.25.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d011071ae14a2f6671d0b74080ae0cd8ebf3a6f8c9589a2cd45f23126fe29724" -dependencies = [ - "libc", -] +checksum = "5f20c57d8d7db6d3b86154206ae5d8fba62dd39573114de97c2cb0578251f8e1" [[package]] -name = "which" -version = "4.4.2" +name = "whoami" +version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87ba24419a2078cd2b0f2ede2691b6c66d8e47836da3b6db8265ebad47afbfc7" +checksum = "a44ab49fad634e88f55bf8f9bb3abd2f27d7204172a112c7c9987e01c1c94ea9" dependencies = [ - "either", - "home", - "once_cell", - "rustix 0.38.21", + "redox_syscall 0.4.1", + "wasite", ] -[[package]] -name = "whoami" -version = "1.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22fc3756b8a9133049b26c7f61ab35416c130e8c09b660f5b3958b446f52cc50" - [[package]] name = "winapi" version = "0.3.9" @@ -6555,20 +5795,11 @@ checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" [[package]] name = "windows-core" -version = "0.51.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1f8cf84f35d2db49a46868f947758c7a1138116f7fac3bc844f43ade1292e64" -dependencies = [ - "windows-targets 0.48.5", -] - -[[package]] -name = "windows-sys" -version = "0.45.0" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0" +checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9" dependencies = [ - "windows-targets 0.42.2", + "windows-targets 0.52.4", ] [[package]] @@ -6581,18 +5812,12 @@ dependencies = [ ] [[package]] -name = "windows-targets" -version = "0.42.2" +name = "windows-sys" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e5180c00cd44c9b1c88adb3693291f1cd93605ded80c250a75d472756b4d071" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" dependencies = [ - "windows_aarch64_gnullvm 0.42.2", - "windows_aarch64_msvc 0.42.2", - "windows_i686_gnu 0.42.2", - "windows_i686_msvc 0.42.2", - "windows_x86_64_gnu 0.42.2", - "windows_x86_64_gnullvm 0.42.2", - "windows_x86_64_msvc 0.42.2", + "windows-targets 0.52.4", ] [[package]] @@ -6611,10 +5836,19 @@ dependencies = [ ] [[package]] -name = "windows_aarch64_gnullvm" -version = "0.42.2" +name = "windows-targets" +version = "0.52.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8" +checksum = "7dd37b7e5ab9018759f893a1952c9420d060016fc19a472b4bb20d1bdd694d1b" +dependencies = [ + "windows_aarch64_gnullvm 0.52.4", + "windows_aarch64_msvc 0.52.4", + "windows_i686_gnu 0.52.4", + "windows_i686_msvc 0.52.4", + "windows_x86_64_gnu 0.52.4", + "windows_x86_64_gnullvm 0.52.4", + "windows_x86_64_msvc 0.52.4", +] [[package]] name = "windows_aarch64_gnullvm" @@ -6623,10 +5857,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" [[package]] -name = "windows_aarch64_msvc" -version = "0.42.2" +name = "windows_aarch64_gnullvm" +version = "0.52.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43" +checksum = "bcf46cf4c365c6f2d1cc93ce535f2c8b244591df96ceee75d8e83deb70a9cac9" [[package]] name = "windows_aarch64_msvc" @@ -6635,10 +5869,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" [[package]] -name = "windows_i686_gnu" -version = "0.42.2" +name = "windows_aarch64_msvc" +version = "0.52.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f" +checksum = "da9f259dd3bcf6990b55bffd094c4f7235817ba4ceebde8e6d11cd0c5633b675" [[package]] name = "windows_i686_gnu" @@ -6647,10 +5881,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" [[package]] -name = "windows_i686_msvc" -version = "0.42.2" +name = "windows_i686_gnu" +version = "0.52.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060" +checksum = "b474d8268f99e0995f25b9f095bc7434632601028cf86590aea5c8a5cb7801d3" [[package]] name = "windows_i686_msvc" @@ -6659,10 +5893,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" [[package]] -name = "windows_x86_64_gnu" -version = "0.42.2" +name = "windows_i686_msvc" +version = "0.52.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36" +checksum = "1515e9a29e5bed743cb4415a9ecf5dfca648ce85ee42e15873c3cd8610ff8e02" [[package]] name = "windows_x86_64_gnu" @@ -6671,10 +5905,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" [[package]] -name = "windows_x86_64_gnullvm" -version = "0.42.2" +name = "windows_x86_64_gnu" +version = "0.52.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3" +checksum = "5eee091590e89cc02ad514ffe3ead9eb6b660aedca2183455434b93546371a03" [[package]] name = "windows_x86_64_gnullvm" @@ -6683,10 +5917,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" [[package]] -name = "windows_x86_64_msvc" -version = "0.42.2" +name = "windows_x86_64_gnullvm" +version = "0.52.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0" +checksum = "77ca79f2451b49fa9e2af39f0747fe999fcda4f5e241b2898624dca97a1f2177" [[package]] name = "windows_x86_64_msvc" @@ -6694,11 +5928,26 @@ version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32b752e52a2da0ddfbdbcc6fceadfeede4c939ed16d13e648833a61dfb611ed8" + +[[package]] +name = "winnow" +version = "0.5.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f593a95398737aeed53e489c785df13f3618e41dbcd6718c6addbf1395aa6876" +dependencies = [ + "memchr", +] + [[package]] name = "winnow" -version = "0.5.19" +version = "0.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "829846f3e3db426d4cee4510841b71a8e58aa2a76b1132579487ae430ccd9c7b" +checksum = "dffa400e67ed5a4dd237983829e66475f0a4a26938c4b04c21baede6262215b8" dependencies = [ "memchr", ] @@ -6709,7 +5958,7 @@ version = "0.50.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "windows-sys 0.48.0", ] @@ -6733,29 +5982,20 @@ dependencies = [ [[package]] name = "xattr" -version = "1.0.1" +version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4686009f71ff3e5c4dbcf1a282d0a44db3f021ba69350cd42086b3e5f1c6985" +checksum = "8da84f1a25939b27f6820d92aed108f83ff920fdf11a7b19366c27c4cda81d4f" dependencies = [ "libc", + "linux-raw-sys", + "rustix", ] [[package]] name = "yajrc" -version = "0.1.0" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b40687b4c165cb760e35730055c8840f36897e7c98099b2d3d66ba8cb624c79a" -dependencies = [ - "anyhow", - "serde", - "serde_json", - "thiserror", -] - -[[package]] -name = "yajrc" -version = "0.1.0" -source = "git+https://github.com/dr-bonez/yajrc.git?branch=develop#72a22f7ac2197d7a5cdce4be601cf20e5280eec5" +checksum = "ce7af47ad983c2f8357333ef87d859e66deb7eef4bf6f9e1ae7b5e99044a48bf" dependencies = [ "anyhow", "serde", @@ -6769,7 +6009,7 @@ version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f355ab62ebe30b758c1f4ab096a306722c4b7dbfb9d8c07d18c70d71a945588" dependencies = [ - "ahash 0.8.6", + "ahash 0.8.11", "hashbrown 0.13.2", "lazy_static", "serde", @@ -6777,29 +6017,29 @@ dependencies = [ [[package]] name = "zerocopy" -version = "0.7.25" +version = "0.7.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8cd369a67c0edfef15010f980c3cbe45d7f651deac2cd67ce097cd801de16557" +checksum = "74d4d3961e53fa4c9a25a8637fc2bfaf2595b3d3ae34875568a5cf64787716be" dependencies = [ "zerocopy-derive", ] [[package]] name = "zerocopy-derive" -version = "0.7.25" +version = "0.7.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2f140bda219a26ccc0cdb03dba58af72590c53b22642577d88a927bc5c87d6b" +checksum = "9ce1b18ccd8e73a9321186f97e46f9f04b778851177567b1975109d26a08d2a6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.55", ] [[package]] name = "zeroize" -version = "1.6.0" +version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a0956f1ba7c7909bfb66c2e9e4124ab6f6482560f6628b5aaeba39207c9aad9" +checksum = "525b4ec142c6b68a2d10f01f7bbf6755599ca3f81ea53b8431b7dd348f5fdb2d" dependencies = [ "zeroize_derive", ] @@ -6812,5 +6052,5 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.55", ] diff --git a/core/Cargo.toml b/core/Cargo.toml index 8943625224..5b6823df26 100644 --- a/core/Cargo.toml +++ b/core/Cargo.toml @@ -1,10 +1,3 @@ [workspace] -members = [ - "container-init", - "helpers", - "js-engine", - "models", - "snapshot-creator", - "startos", -] +members = ["helpers", "models", "startos"] diff --git a/core/README.md b/core/README.md index 7a4be62a13..76f4d4c864 100644 --- a/core/README.md +++ b/core/README.md @@ -8,9 +8,6 @@ ## Structure - `startos`: This contains the core library for StartOS that supports building `startbox`. -- `container-init` (ignore: deprecated) -- `js-engine`: This contains the library required to build `deno` to support running `.js` maintainer scripts for v0.3 -- `snapshot-creator`: This contains a binary used to build `v8` runtime snapshots, required for initializing `start-deno` - `helpers`: This contains utility functions used across both `startos` and `js-engine` - `models`: This contains types that are shared across `startos`, `js-engine`, and `helpers` @@ -24,8 +21,6 @@ several different names for different behaviour: `startd` and control it similarly to the UI - `start-sdk`: This is a CLI tool that aids in building and packaging services you wish to deploy to StartOS -- `start-deno`: This is a CLI tool invoked by startd to run `.js` maintainer scripts for v0.3 -- `avahi-alias`: This is a CLI tool invoked by startd to create aliases in `avahi` for mDNS ## Questions diff --git a/core/build-prod.sh b/core/build-prod.sh index 2144297274..8b61849426 100755 --- a/core/build-prod.sh +++ b/core/build-prod.sh @@ -18,22 +18,22 @@ cd .. FEATURES="$(echo $ENVIRONMENT | sed 's/-/,/g')" RUSTFLAGS="" -alias 'rust-gnu-builder'='docker run $USE_TTY --rm -e "RUSTFLAGS=$RUSTFLAGS" -v "$HOME/.cargo/registry":/usr/local/cargo/registry -v "$(pwd)":/home/rust/src -w /home/rust/src -P start9/rust-arm-cross:aarch64' -alias 'rust-musl-builder'='docker run $USE_TTY --rm -v "$HOME/.cargo/registry":/root/.cargo/registry -v "$(pwd)":/home/rust/src -P messense/rust-musl-cross:$ARCH-musl' +if [[ "${ENVIRONMENT}" =~ (^|-)unstable($|-) ]]; then + RUSTFLAGS="--cfg tokio_unstable" +fi + +alias 'rust-musl-builder'='docker run $USE_TTY --rm -e "RUSTFLAGS=$RUSTFLAGS" -v "$HOME/.cargo/registry":/root/.cargo/registry -v "$HOME/.cargo/git":/root/.cargo/git -v "$(pwd)":/home/rust/src -w /home/rust/src -P messense/rust-musl-cross:$ARCH-musl' set +e fail= echo "FEATURES=\"$FEATURES\"" echo "RUSTFLAGS=\"$RUSTFLAGS\"" -if ! rust-gnu-builder sh -c "(cd core && cargo build --release --features avahi-alias,$FEATURES --locked --bin startbox --target=$ARCH-unknown-linux-gnu)"; then +if ! rust-musl-builder sh -c "(cd core && cargo build --release $(if [ -n "$FEATURES" ]; then echo "--features $FEATURES"; fi) --locked --bin startbox --target=$ARCH-unknown-linux-musl)"; then + fail=true +fi +if ! rust-musl-builder sh -c "(cd core && cargo build --release --no-default-features --features container-runtime,$FEATURES --locked --bin containerbox --target=$ARCH-unknown-linux-musl)"; then fail=true fi -for ARCH in x86_64 aarch64 -do - if ! rust-musl-builder sh -c "(cd core && cargo build --release --locked --bin container-init)"; then - fail=true - fi -done set -e cd core diff --git a/core/build-v8-snapshot.sh b/core/build-v8-snapshot.sh deleted file mode 100755 index 58ff27c794..0000000000 --- a/core/build-v8-snapshot.sh +++ /dev/null @@ -1,39 +0,0 @@ -#!/bin/bash -# Reason for this being is that we need to create a snapshot for the deno runtime. It wants to pull 3 files from build, and during the creation it gets embedded, but for some -# reason during the actual runtime it is looking for them. So this will create a docker in arm that creates the snaphot needed for the arm - -cd "$(dirname "${BASH_SOURCE[0]}")" - -set -e -shopt -s expand_aliases - -if [ -z "$ARCH" ]; then - ARCH=$(uname -m) -fi - -USE_TTY= -if tty -s; then - USE_TTY="-it" -fi - -alias 'rust-gnu-builder'='docker run $USE_TTY --rm -v "$HOME/.cargo/registry":/usr/local/cargo/registry -v "$(pwd)":/home/rust/src -w /home/rust/src -P start9/rust-arm-cross:aarch64' - -echo "Building " -cd .. -rust-gnu-builder sh -c "(cd core/ && cargo build -p snapshot_creator --release --target=${ARCH}-unknown-linux-gnu)" -cd - - -if [ "$ARCH" = "aarch64" ]; then - DOCKER_ARCH='arm64/v8' -elif [ "$ARCH" = "x86_64" ]; then - DOCKER_ARCH='amd64' -fi - -echo "Creating Arm v8 Snapshot" -docker run $USE_TTY --platform "linux/${DOCKER_ARCH}" --mount type=bind,src=$(pwd),dst=/mnt ubuntu:22.04 /bin/sh -c "cd /mnt && /mnt/target/${ARCH}-unknown-linux-gnu/release/snapshot_creator" -sudo chown -R $USER target -sudo chown -R $USER ~/.cargo -sudo chown $USER JS_SNAPSHOT.bin -sudo chmod 0644 JS_SNAPSHOT.bin - -sudo mv -f JS_SNAPSHOT.bin ./js-engine/src/artifacts/JS_SNAPSHOT.${ARCH}.bin \ No newline at end of file diff --git a/core/container-init/Cargo.toml b/core/container-init/Cargo.toml deleted file mode 100644 index 8229973d78..0000000000 --- a/core/container-init/Cargo.toml +++ /dev/null @@ -1,39 +0,0 @@ -[package] -name = "container-init" -version = "0.1.0" -edition = "2021" -rust = "1.66" - -[features] -dev = [] -metal = [] -sound = [] -unstable = [] - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html -[dependencies] -async-stream = "0.3" -# cgroups-rs = "0.2" -color-eyre = "0.6" -futures = "0.3" -serde = { version = "1", features = ["derive", "rc"] } -serde_json = "1" -helpers = { path = "../helpers" } -imbl = "2" -nix = { version = "0.27", features = ["process", "signal"] } -tokio = { version = "1", features = ["full"] } -tokio-stream = { version = "0.1", features = ["io-util", "sync", "net"] } -tracing = "0.1" -tracing-error = "0.2" -tracing-futures = "0.2" -tracing-subscriber = { version = "0.3", features = ["env-filter"] } -yajrc = { version = "*", git = "https://github.com/dr-bonez/yajrc.git", branch = "develop" } - -[target.'cfg(target_os = "linux")'.dependencies] -procfs = "0.15" - -[profile.test] -opt-level = 3 - -[profile.dev.package.backtrace] -opt-level = 3 diff --git a/core/container-init/src/lib.rs b/core/container-init/src/lib.rs deleted file mode 100644 index 63d3380a73..0000000000 --- a/core/container-init/src/lib.rs +++ /dev/null @@ -1,214 +0,0 @@ -use nix::unistd::Pid; -use serde::{Deserialize, Serialize, Serializer}; -use yajrc::RpcMethod; - -/// Know what the process is called -#[derive(Debug, Serialize, Deserialize, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] -pub struct ProcessId(pub u32); -impl From for Pid { - fn from(pid: ProcessId) -> Self { - Pid::from_raw(pid.0 as i32) - } -} -impl From for ProcessId { - fn from(pid: Pid) -> Self { - ProcessId(pid.as_raw() as u32) - } -} -impl From for ProcessId { - fn from(pid: i32) -> Self { - ProcessId(pid as u32) - } -} - -#[derive(Debug, Serialize, Deserialize, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] -pub struct ProcessGroupId(pub u32); - -#[derive(Debug, Serialize, Deserialize, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] -#[serde(rename_all = "kebab-case")] -pub enum OutputStrategy { - Inherit, - Collect, -} - -#[derive(Debug, Clone, Copy)] -pub struct RunCommand; -impl Serialize for RunCommand { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - Serialize::serialize(Self.as_str(), serializer) - } -} -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct RunCommandParams { - pub gid: Option, - pub command: String, - pub args: Vec, - pub output: OutputStrategy, -} -impl RpcMethod for RunCommand { - type Params = RunCommandParams; - type Response = ProcessId; - fn as_str<'a>(&'a self) -> &'a str { - "command" - } -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub enum LogLevel { - Trace(String), - Warn(String), - Error(String), - Info(String), - Debug(String), -} -impl LogLevel { - pub fn trace(&self) { - match self { - LogLevel::Trace(x) => tracing::trace!("{}", x), - LogLevel::Warn(x) => tracing::warn!("{}", x), - LogLevel::Error(x) => tracing::error!("{}", x), - LogLevel::Info(x) => tracing::info!("{}", x), - LogLevel::Debug(x) => tracing::debug!("{}", x), - } - } -} - -#[derive(Debug, Clone, Copy)] -pub struct Log; -impl Serialize for Log { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - Serialize::serialize(Self.as_str(), serializer) - } -} -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct LogParams { - pub gid: Option, - pub level: LogLevel, -} -impl RpcMethod for Log { - type Params = LogParams; - type Response = (); - fn as_str<'a>(&'a self) -> &'a str { - "log" - } -} - -#[derive(Debug, Clone, Copy)] -pub struct ReadLineStdout; -impl Serialize for ReadLineStdout { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - Serialize::serialize(Self.as_str(), serializer) - } -} -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct ReadLineStdoutParams { - pub pid: ProcessId, -} -impl RpcMethod for ReadLineStdout { - type Params = ReadLineStdoutParams; - type Response = String; - fn as_str<'a>(&'a self) -> &'a str { - "read-line-stdout" - } -} - -#[derive(Debug, Clone, Copy)] -pub struct ReadLineStderr; -impl Serialize for ReadLineStderr { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - Serialize::serialize(Self.as_str(), serializer) - } -} -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct ReadLineStderrParams { - pub pid: ProcessId, -} -impl RpcMethod for ReadLineStderr { - type Params = ReadLineStderrParams; - type Response = String; - fn as_str<'a>(&'a self) -> &'a str { - "read-line-stderr" - } -} - -#[derive(Debug, Clone, Copy)] -pub struct Output; -impl Serialize for Output { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - Serialize::serialize(Self.as_str(), serializer) - } -} -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct OutputParams { - pub pid: ProcessId, -} -impl RpcMethod for Output { - type Params = OutputParams; - type Response = String; - fn as_str<'a>(&'a self) -> &'a str { - "output" - } -} - -#[derive(Debug, Clone, Copy)] -pub struct SendSignal; -impl Serialize for SendSignal { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - Serialize::serialize(Self.as_str(), serializer) - } -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct SendSignalParams { - pub pid: ProcessId, - pub signal: u32, -} -impl RpcMethod for SendSignal { - type Params = SendSignalParams; - type Response = (); - fn as_str<'a>(&'a self) -> &'a str { - "signal" - } -} - -#[derive(Debug, Clone, Copy)] -pub struct SignalGroup; -impl Serialize for SignalGroup { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - Serialize::serialize(Self.as_str(), serializer) - } -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct SignalGroupParams { - pub gid: ProcessGroupId, - pub signal: u32, -} -impl RpcMethod for SignalGroup { - type Params = SignalGroupParams; - type Response = (); - fn as_str<'a>(&'a self) -> &'a str { - "signal-group" - } -} diff --git a/core/container-init/src/main.rs b/core/container-init/src/main.rs deleted file mode 100644 index 9975378083..0000000000 --- a/core/container-init/src/main.rs +++ /dev/null @@ -1,428 +0,0 @@ -use std::collections::BTreeMap; -use std::ops::DerefMut; -use std::os::unix::process::ExitStatusExt; -use std::process::Stdio; -use std::sync::Arc; - -use container_init::{ - LogParams, OutputParams, OutputStrategy, ProcessGroupId, ProcessId, RunCommandParams, - SendSignalParams, SignalGroupParams, -}; -use futures::StreamExt; -use helpers::NonDetachingJoinHandle; -use nix::errno::Errno; -use nix::sys::signal::Signal; -use serde::{Deserialize, Serialize}; -use serde_json::json; -use tokio::io::{AsyncBufReadExt, AsyncWriteExt, BufReader}; -use tokio::process::{Child, Command}; -use tokio::select; -use tokio::sync::{watch, Mutex}; -use yajrc::{Id, RpcError}; - -/// Outputs embedded in the JSONRpc output of the executable. -#[derive(Debug, Clone, Serialize)] -#[serde(untagged)] -enum Output { - Command(ProcessId), - ReadLineStdout(String), - ReadLineStderr(String), - Output(String), - Log, - Signal, - SignalGroup, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -#[serde(tag = "method", content = "params", rename_all = "kebab-case")] -enum Input { - /// Run a new command, with the args - Command(RunCommandParams), - /// Want to log locall on the service rather than the eos - Log(LogParams), - // /// Get a line of stdout from the command - // ReadLineStdout(ReadLineStdoutParams), - // /// Get a line of stderr from the command - // ReadLineStderr(ReadLineStderrParams), - /// Get output of command - Output(OutputParams), - /// Send the sigterm to the process - Signal(SendSignalParams), - /// Signal a group of processes - SignalGroup(SignalGroupParams), -} - -#[derive(Deserialize)] -struct IncomingRpc { - id: Id, - #[serde(flatten)] - input: Input, -} - -struct ChildInfo { - gid: Option, - child: Arc>>, - output: Option, -} - -struct InheritOutput { - _thread: NonDetachingJoinHandle<()>, - stdout: watch::Receiver, - stderr: watch::Receiver, -} - -struct HandlerMut { - processes: BTreeMap, - // groups: BTreeMap, -} - -#[derive(Clone)] -struct Handler { - children: Arc>, -} -impl Handler { - fn new() -> Self { - Handler { - children: Arc::new(Mutex::new(HandlerMut { - processes: BTreeMap::new(), - // groups: BTreeMap::new(), - })), - } - } - async fn handle(&self, req: Input) -> Result { - Ok(match req { - Input::Command(RunCommandParams { - gid, - command, - args, - output, - }) => Output::Command(self.command(gid, command, args, output).await?), - // Input::ReadLineStdout(ReadLineStdoutParams { pid }) => { - // Output::ReadLineStdout(self.read_line_stdout(pid).await?) - // } - // Input::ReadLineStderr(ReadLineStderrParams { pid }) => { - // Output::ReadLineStderr(self.read_line_stderr(pid).await?) - // } - Input::Log(LogParams { gid: _, level }) => { - level.trace(); - Output::Log - } - Input::Output(OutputParams { pid }) => Output::Output(self.output(pid).await?), - Input::Signal(SendSignalParams { pid, signal }) => { - self.signal(pid, signal).await?; - Output::Signal - } - Input::SignalGroup(SignalGroupParams { gid, signal }) => { - self.signal_group(gid, signal).await?; - Output::SignalGroup - } - }) - } - - async fn command( - &self, - gid: Option, - command: String, - args: Vec, - output: OutputStrategy, - ) -> Result { - let mut cmd = Command::new(command); - cmd.args(args); - cmd.kill_on_drop(true); - cmd.stdout(Stdio::piped()); - cmd.stderr(Stdio::piped()); - let mut child = cmd.spawn().map_err(|e| { - let mut err = yajrc::INTERNAL_ERROR.clone(); - err.data = Some(json!(e.to_string())); - err - })?; - let pid = ProcessId(child.id().ok_or_else(|| { - let mut err = yajrc::INTERNAL_ERROR.clone(); - err.data = Some(json!("Child has no pid")); - err - })?); - let output = match output { - OutputStrategy::Inherit => { - let (stdout_send, stdout) = watch::channel(String::new()); - let (stderr_send, stderr) = watch::channel(String::new()); - if let (Some(child_stdout), Some(child_stderr)) = - (child.stdout.take(), child.stderr.take()) - { - Some(InheritOutput { - _thread: tokio::spawn(async move { - tokio::join!( - async { - if let Err(e) = async { - let mut lines = BufReader::new(child_stdout).lines(); - while let Some(line) = lines.next_line().await? { - tracing::info!("({}): {}", pid.0, line); - let _ = stdout_send.send(line); - } - Ok::<_, std::io::Error>(()) - } - .await - { - tracing::error!( - "Error reading stdout of pid {}: {}", - pid.0, - e - ); - } - }, - async { - if let Err(e) = async { - let mut lines = BufReader::new(child_stderr).lines(); - while let Some(line) = lines.next_line().await? { - tracing::warn!("({}): {}", pid.0, line); - let _ = stderr_send.send(line); - } - Ok::<_, std::io::Error>(()) - } - .await - { - tracing::error!( - "Error reading stdout of pid {}: {}", - pid.0, - e - ); - } - } - ); - }) - .into(), - stdout, - stderr, - }) - } else { - None - } - } - OutputStrategy::Collect => None, - }; - self.children.lock().await.processes.insert( - pid, - ChildInfo { - gid, - child: Arc::new(Mutex::new(Some(child))), - output, - }, - ); - Ok(pid) - } - - async fn output(&self, pid: ProcessId) -> Result { - let not_found = || { - let mut err = yajrc::INTERNAL_ERROR.clone(); - err.data = Some(json!(format!("Child with pid {} not found", pid.0))); - err - }; - let mut child = { - self.children - .lock() - .await - .processes - .get(&pid) - .ok_or_else(not_found)? - .child - .clone() - } - .lock_owned() - .await; - if let Some(child) = child.take() { - let output = child.wait_with_output().await?; - if output.status.success() { - Ok(String::from_utf8(output.stdout).map_err(|_| yajrc::PARSE_ERROR)?) - } else { - Err(RpcError { - code: output - .status - .code() - .or_else(|| output.status.signal().map(|s| 128 + s)) - .unwrap_or(0), - message: "Command failed".into(), - data: Some(json!(String::from_utf8(if output.stderr.is_empty() { - output.stdout - } else { - output.stderr - }) - .map_err(|_| yajrc::PARSE_ERROR)?)), - }) - } - } else { - Err(not_found()) - } - } - - async fn signal(&self, pid: ProcessId, signal: u32) -> Result<(), RpcError> { - let not_found = || { - let mut err = yajrc::INTERNAL_ERROR.clone(); - err.data = Some(json!(format!("Child with pid {} not found", pid.0))); - err - }; - - Self::killall(pid, Signal::try_from(signal as i32)?)?; - - if signal == 9 { - self.children - .lock() - .await - .processes - .remove(&pid) - .ok_or_else(not_found)?; - } - Ok(()) - } - - async fn signal_group(&self, gid: ProcessGroupId, signal: u32) -> Result<(), RpcError> { - let mut to_kill = Vec::new(); - { - let mut children_ref = self.children.lock().await; - let children = std::mem::take(&mut children_ref.deref_mut().processes); - for (pid, child_info) in children { - if child_info.gid == Some(gid) { - to_kill.push(pid); - } else { - children_ref.processes.insert(pid, child_info); - } - } - } - for pid in to_kill { - tracing::info!("Killing pid {}", pid.0); - Self::killall(pid, Signal::try_from(signal as i32)?)?; - } - - Ok(()) - } - - fn killall(pid: ProcessId, signal: Signal) -> Result<(), RpcError> { - for proc in procfs::process::all_processes()? { - let stat = proc?.stat()?; - if ProcessId::from(stat.ppid) == pid { - Self::killall(stat.pid.into(), signal)?; - } - } - if let Err(e) = nix::sys::signal::kill(pid.into(), Some(signal)) { - if e != Errno::ESRCH { - tracing::error!("Failed to kill pid {}: {}", pid.0, e); - } - } - Ok(()) - } - - async fn graceful_exit(self) { - let kill_all = futures::stream::iter( - std::mem::take(&mut self.children.lock().await.deref_mut().processes).into_iter(), - ) - .for_each_concurrent(None, |(pid, child)| async move { - let _ = Self::killall(pid, Signal::SIGTERM); - if let Some(child) = child.child.lock().await.take() { - let _ = child.wait_with_output().await; - } - }); - kill_all.await - } -} - -#[tokio::main] -async fn main() { - use tokio::signal::unix::{signal, SignalKind}; - let mut sigint = signal(SignalKind::interrupt()).unwrap(); - let mut sigterm = signal(SignalKind::terminate()).unwrap(); - let mut sigquit = signal(SignalKind::quit()).unwrap(); - let mut sighangup = signal(SignalKind::hangup()).unwrap(); - - use tracing_error::ErrorLayer; - use tracing_subscriber::prelude::*; - use tracing_subscriber::{fmt, EnvFilter}; - - let filter_layer = EnvFilter::new("container_init=debug"); - let fmt_layer = fmt::layer().with_target(true); - - tracing_subscriber::registry() - .with(filter_layer) - .with(fmt_layer) - .with(ErrorLayer::default()) - .init(); - color_eyre::install().unwrap(); - - let handler = Handler::new(); - let handler_thread = async { - let listener = tokio::net::UnixListener::bind("/start9/sockets/rpc.sock")?; - loop { - let (stream, _) = listener.accept().await?; - let (r, w) = stream.into_split(); - let mut lines = BufReader::new(r).lines(); - let handler = handler.clone(); - tokio::spawn(async move { - let w = Arc::new(Mutex::new(w)); - while let Some(line) = lines.next_line().await.transpose() { - let handler = handler.clone(); - let w = w.clone(); - tokio::spawn(async move { - if let Err(e) = async { - let req = serde_json::from_str::(&line?)?; - match handler.handle(req.input).await { - Ok(output) => { - if w.lock().await.write_all( - format!("{}\n", json!({ "id": req.id, "jsonrpc": "2.0", "result": output })) - .as_bytes(), - ) - .await.is_err() { - tracing::error!("Error sending to {id:?}", id = req.id); - } - } - Err(e) => - if w - .lock() - .await - .write_all( - format!("{}\n", json!({ "id": req.id, "jsonrpc": "2.0", "error": e })) - .as_bytes(), - ) - .await.is_err() { - - tracing::error!("Handle + Error sending to {id:?}", id = req.id); - }, - } - Ok::<_, color_eyre::Report>(()) - } - .await - { - tracing::error!("Error parsing RPC request: {}", e); - tracing::debug!("{:?}", e); - } - }); - } - Ok::<_, std::io::Error>(()) - }); - } - #[allow(unreachable_code)] - Ok::<_, std::io::Error>(()) - }; - - select! { - res = handler_thread => { - match res { - Ok(()) => tracing::debug!("Done with inputs/outputs"), - Err(e) => { - tracing::error!("Error reading RPC input: {}", e); - tracing::debug!("{:?}", e); - } - } - }, - _ = sigint.recv() => { - tracing::debug!("SIGINT"); - }, - _ = sigterm.recv() => { - tracing::debug!("SIGTERM"); - }, - _ = sigquit.recv() => { - tracing::debug!("SIGQUIT"); - }, - _ = sighangup.recv() => { - tracing::debug!("SIGHUP"); - } - } - handler.graceful_exit().await; - ::std::process::exit(0) -} diff --git a/core/helpers/Cargo.toml b/core/helpers/Cargo.toml index 83e1fd7888..228f3ef54d 100644 --- a/core/helpers/Cargo.toml +++ b/core/helpers/Cargo.toml @@ -11,9 +11,9 @@ futures = "0.3.28" lazy_async_pool = "0.3.3" models = { path = "../models" } pin-project = "1.1.3" +rpc-toolkit = "0.2.3" serde = { version = "1.0", features = ["derive", "rc"] } serde_json = "1.0" tokio = { version = "1", features = ["full"] } tokio-stream = { version = "0.1.14", features = ["io-util", "sync"] } tracing = "0.1.39" -yajrc = { version = "*", git = "https://github.com/dr-bonez/yajrc.git", branch = "develop" } diff --git a/core/helpers/src/lib.rs b/core/helpers/src/lib.rs index 226787590c..d913aefee9 100644 --- a/core/helpers/src/lib.rs +++ b/core/helpers/src/lib.rs @@ -11,11 +11,9 @@ use tokio::sync::oneshot; use tokio::task::{JoinError, JoinHandle, LocalSet}; mod byte_replacement_reader; -mod rpc_client; mod rsync; mod script_dir; pub use byte_replacement_reader::*; -pub use rpc_client::{RpcClient, UnixRpcClient}; pub use rsync::*; pub use script_dir::*; diff --git a/core/install-sdk.sh b/core/install-cli.sh similarity index 61% rename from core/install-sdk.sh rename to core/install-cli.sh index 3eec40012e..f4fe712ee6 100755 --- a/core/install-sdk.sh +++ b/core/install-cli.sh @@ -12,7 +12,4 @@ if [ -z "$PLATFORM" ]; then export PLATFORM=$(uname -m) fi -cargo install --path=./startos --no-default-features --features=js_engine,sdk,cli --locked -startbox_loc=$(which startbox) -ln -sf $startbox_loc $(dirname $startbox_loc)/start-cli -ln -sf $startbox_loc $(dirname $startbox_loc)/start-sdk +cargo install --path=./startos --no-default-features --features=cli,docker --bin start-cli --locked diff --git a/core/js-engine/Cargo.toml b/core/js-engine/Cargo.toml deleted file mode 100644 index 14205109b4..0000000000 --- a/core/js-engine/Cargo.toml +++ /dev/null @@ -1,23 +0,0 @@ -[package] -name = "js-engine" -version = "0.1.0" -edition = "2021" - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[dependencies] -async-trait = "0.1.74" -dashmap = "5.5.3" -deno_core = "=0.222.0" -deno_ast = { version = "=0.29.5", features = ["transpiling"] } -container-init = { path = "../container-init" } -reqwest = { version = "0.11.22" } -sha2 = "0.10.8" -itertools = "0.11.0" -lazy_static = "1.4.0" -models = { path = "../models" } -helpers = { path = "../helpers" } -serde = { version = "1.0", features = ["derive", "rc"] } -serde_json = "1.0" -tokio = { version = "1", features = ["full"] } -tracing = "0.1" diff --git a/core/js-engine/src/artifacts/JS_SNAPSHOT.aarch64.bin b/core/js-engine/src/artifacts/JS_SNAPSHOT.aarch64.bin deleted file mode 100644 index 305aa2d4cb..0000000000 Binary files a/core/js-engine/src/artifacts/JS_SNAPSHOT.aarch64.bin and /dev/null differ diff --git a/core/js-engine/src/artifacts/JS_SNAPSHOT.x86_64.bin b/core/js-engine/src/artifacts/JS_SNAPSHOT.x86_64.bin deleted file mode 100644 index 7f7d106897..0000000000 Binary files a/core/js-engine/src/artifacts/JS_SNAPSHOT.x86_64.bin and /dev/null differ diff --git a/core/js-engine/src/artifacts/loadModule.js b/core/js-engine/src/artifacts/loadModule.js deleted file mode 100644 index de30eac891..0000000000 --- a/core/js-engine/src/artifacts/loadModule.js +++ /dev/null @@ -1,242 +0,0 @@ -import Deno from "/deno_global.js"; -import * as mainModule from "/embassy.js"; - -function requireParam(param) { - throw new Error(`Missing required parameter ${param}`); -} - -/** - * This is using the simplified json pointer spec, using no escapes and arrays - * @param {object} obj - * @param {string} pointer - * @returns - */ -function jsonPointerValue(obj, pointer) { - const paths = pointer.substring(1).split("/"); - for (const path of paths) { - if (obj == null) { - return null; - } - obj = (obj || {})[path]; - } - return obj; -} - -function maybeDate(value) { - if (!value) return value; - return new Date(value); -} -const writeFile = ( - { - path = requireParam("path"), - volumeId = requireParam("volumeId"), - toWrite = requireParam("toWrite"), - } = requireParam("options"), -) => Deno.core.opAsync("write_file", volumeId, path, toWrite); - -const readFile = ( - { volumeId = requireParam("volumeId"), path = requireParam("path") } = requireParam("options"), -) => Deno.core.opAsync("read_file", volumeId, path); - - - -const runDaemon = ( - { command = requireParam("command"), args = [] } = requireParam("options"), -) => { - let id = Deno.core.opAsync("start_command", command, args, "inherit", null); - let processId = id.then(x => x.processId) - let waitPromise = null; - return { - processId, - async wait() { - waitPromise = waitPromise || Deno.core.opAsync("wait_command", await processId) - return waitPromise - }, - async term(signal = 15) { - return Deno.core.opAsync("send_signal", await processId, 15) - } - } -}; -const runCommand = async ( - { command = requireParam("command"), args = [], timeoutMillis = 30000 } = requireParam("options"), -) => { - let id = Deno.core.opAsync("start_command", command, args, "collect", timeoutMillis); - let pid = id.then(x => x.processId) - return Deno.core.opAsync("wait_command", await pid) -}; -const signalGroup = async ( - { gid = requireParam("gid"), signal = requireParam("signal") } = requireParam("gid and signal") -) => { - return Deno.core.opAsync("signal_group", gid, signal); -}; -const sleep = (timeMs = requireParam("timeMs"), -) => Deno.core.opAsync("sleep", timeMs); - -const rename = ( - { - srcVolume = requireParam("srcVolume"), - dstVolume = requirePapram("dstVolume"), - srcPath = requireParam("srcPath"), - dstPath = requireParam("dstPath"), - } = requireParam("options"), -) => Deno.core.opAsync("rename", srcVolume, srcPath, dstVolume, dstPath); -const metadata = async ( - { volumeId = requireParam("volumeId"), path = requireParam("path") } = requireParam("options"), -) => { - const data = await Deno.core.opAsync("metadata", volumeId, path); - return { - ...data, - modified: maybeDate(data.modified), - created: maybeDate(data.created), - accessed: maybeDate(data.accessed), - }; -}; -const removeFile = ( - { volumeId = requireParam("volumeId"), path = requireParam("path") } = requireParam("options"), -) => Deno.core.opAsync("remove_file", volumeId, path); -const isSandboxed = () => Deno.core.ops["is_sandboxed"](); - -const writeJsonFile = ( - { - volumeId = requireParam("volumeId"), - path = requireParam("path"), - toWrite = requireParam("toWrite"), - } = requireParam("options"), -) => - writeFile({ - volumeId, - path, - toWrite: JSON.stringify(toWrite), - }); - -const chown = async ( - { - volumeId = requireParam("volumeId"), - path = requireParam("path"), - uid = requireParam("uid"), - } = requireParam("options"), -) => { - return await Deno.core.opAsync("chown", volumeId, path, uid); -}; - -const chmod = async ( - { - volumeId = requireParam("volumeId"), - path = requireParam("path"), - mode = requireParam("mode"), - } = requireParam("options"), -) => { - return await Deno.core.opAsync("chmod", volumeId, path, mode); -}; -const readJsonFile = async ( - { volumeId = requireParam("volumeId"), path = requireParam("path") } = requireParam("options"), -) => JSON.parse(await readFile({ volumeId, path })); -const createDir = ( - { volumeId = requireParam("volumeId"), path = requireParam("path") } = requireParam("options"), -) => Deno.core.opAsync("create_dir", volumeId, path); - -const readDir = ( - { volumeId = requireParam("volumeId"), path = requireParam("path") } = requireParam("options"), -) => Deno.core.opAsync("read_dir", volumeId, path); -const removeDir = ( - { volumeId = requireParam("volumeId"), path = requireParam("path") } = requireParam("options"), -) => Deno.core.opAsync("remove_dir", volumeId, path); -const trace = (whatToTrace = requireParam('whatToTrace')) => Deno.core.opAsync("log_trace", whatToTrace); -const warn = (whatToTrace = requireParam('whatToTrace')) => Deno.core.opAsync("log_warn", whatToTrace); -const error = (whatToTrace = requireParam('whatToTrace')) => Deno.core.opAsync("log_error", whatToTrace); -const debug = (whatToTrace = requireParam('whatToTrace')) => Deno.core.opAsync("log_debug", whatToTrace); -const info = (whatToTrace = requireParam('whatToTrace')) => Deno.core.opAsync("log_info", whatToTrace); -const fetch = async (url = requireParam ('url'), options = null) => { - const { body, ...response } = await Deno.core.opAsync("fetch", url, options); - const textValue = Promise.resolve(body); - return { - ...response, - text() { - return textValue; - }, - json() { - return textValue.then((x) => JSON.parse(x)); - }, - }; -}; - -const runRsync = ( - { - srcVolume = requireParam("srcVolume"), - dstVolume = requireParam("dstVolume"), - srcPath = requireParam("srcPath"), - dstPath = requireParam("dstPath"), - options = requireParam("options"), - } = requireParam("options"), -) => { - let id = Deno.core.opAsync("rsync", srcVolume, srcPath, dstVolume, dstPath, options); - let waitPromise = null; - return { - async id() { - return id - }, - async wait() { - waitPromise = waitPromise || Deno.core.opAsync("rsync_wait", await id) - return waitPromise - }, - async progress() { - return Deno.core.opAsync("rsync_progress", await id) - } - } -}; - -const diskUsage = async ({ - volumeId = requireParam("volumeId"), - path = requireParam("path"), -} = { volumeId: null, path: null }) => { - const [used, total] = await Deno.core.opAsync("disk_usage", volumeId, path); - return { used, total } -} - -const currentFunction = Deno.core.ops.current_function(); -const input = Deno.core.ops.get_input(); -const variable_args = Deno.core.ops.get_variable_args(); -const setState = (x) => Deno.core.ops.set_value(x); -const effects = { - chmod, - chown, - writeFile, - readFile, - writeJsonFile, - readJsonFile, - error, - warn, - debug, - trace, - info, - isSandboxed, - fetch, - removeFile, - createDir, - removeDir, - metadata, - rename, - runCommand, - sleep, - runDaemon, - signalGroup, - runRsync, - readDir, - diskUsage, -}; - -const defaults = { - "handleSignal": (effects, { gid, signal }) => { - return effects.signalGroup({ gid, signal }) - } -} - -const runFunction = jsonPointerValue(mainModule, currentFunction) || jsonPointerValue(defaults, currentFunction); -(async () => { - if (typeof runFunction !== "function") { - error(`Expecting ${currentFunction} to be a function`); - throw new Error(`Expecting ${currentFunction} to be a function`); - } - const answer = await runFunction(effects, input, ...variable_args); - setState(answer); -})(); diff --git a/core/js-engine/src/lib.rs b/core/js-engine/src/lib.rs deleted file mode 100644 index b0b9bea37c..0000000000 --- a/core/js-engine/src/lib.rs +++ /dev/null @@ -1,1219 +0,0 @@ -use std::collections::BTreeMap; -use std::path::{Path, PathBuf}; -use std::pin::Pin; -use std::sync::Arc; -use std::time::SystemTime; - -use deno_core::anyhow::{anyhow, bail}; -use deno_core::error::AnyError; -use deno_core::{ - resolve_import, Extension, FastString, JsRuntime, ModuleLoader, ModuleSource, - ModuleSourceFuture, ModuleSpecifier, ModuleType, OpDecl, ResolutionKind, RuntimeOptions, - Snapshot, -}; -use helpers::{script_dir, spawn_local, Rsync}; -use models::{PackageId, ProcedureName, Version, VolumeId}; -use serde::{Deserialize, Serialize}; -use serde_json::Value; -use tokio::io::AsyncReadExt; -use tokio::sync::Mutex; - -lazy_static::lazy_static! { - static ref DENO_GLOBAL_JS: ModuleSpecifier = "file:///deno_global.js".parse().unwrap(); - static ref LOAD_MODULE_JS: ModuleSpecifier = "file:///loadModule.js".parse().unwrap(); - static ref EMBASSY_JS: ModuleSpecifier = "file:///embassy.js".parse().unwrap(); -} - -pub trait PathForVolumeId: Send + Sync { - fn path_for( - &self, - data_dir: &Path, - package_id: &PackageId, - version: &Version, - volume_id: &VolumeId, - ) -> Option; - fn readonly(&self, volume_id: &VolumeId) -> bool; -} - -#[derive(Serialize, Deserialize, Debug, Clone)] -pub struct JsCode(Arc); - -#[derive(Debug, Clone, Copy)] -pub enum JsError { - Unknown, - Javascript, - Engine, - BoundryLayerSerDe, - Tokio, - FileSystem, - Code(i32), - Timeout, - NotValidProcedureName, -} - -impl JsError { - pub fn as_code_num(&self) -> i32 { - match self { - JsError::Unknown => 1, - JsError::Javascript => 2, - JsError::Engine => 3, - JsError::BoundryLayerSerDe => 4, - JsError::Tokio => 5, - JsError::FileSystem => 6, - JsError::NotValidProcedureName => 7, - JsError::Code(code) => *code, - JsError::Timeout => 143, - } - } -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct MetadataJs { - file_type: String, - is_dir: bool, - is_file: bool, - is_symlink: bool, - len: u64, - modified: Option, - accessed: Option, - created: Option, - readonly: bool, - gid: u32, - mode: u32, - uid: u32, -} - -#[cfg(target_arch = "x86_64")] -const SNAPSHOT_BYTES: &[u8] = include_bytes!("./artifacts/JS_SNAPSHOT.x86_64.bin"); - -#[cfg(target_arch = "aarch64")] -const SNAPSHOT_BYTES: &[u8] = include_bytes!("./artifacts/JS_SNAPSHOT.aarch64.bin"); - -#[derive(Clone)] -struct JsContext { - sandboxed: bool, - datadir: PathBuf, - run_function: String, - version: Version, - package_id: PackageId, - volumes: Arc, - input: Value, - variable_args: Vec, - rsyncs: Arc)>>, -} -#[derive(Debug, Clone, serde::Deserialize, serde::Serialize)] -#[serde(rename_all = "kebab-case")] -enum ResultType { - Error(String), - ErrorCode(i32, String), - Result(serde_json::Value), -} -#[derive(Clone, Default)] -struct AnswerState(std::sync::Arc>); - -#[derive(Clone, Debug)] -struct ModsLoader { - code: JsCode, -} - -impl ModuleLoader for ModsLoader { - fn resolve( - &self, - specifier: &str, - referrer: &str, - _is_main: ResolutionKind, - ) -> Result { - if referrer.contains("embassy") { - bail!("Embassy.js cannot import anything else"); - } - let s = resolve_import(specifier, referrer).unwrap(); - Ok(s) - } - - fn load( - &self, - module_specifier: &ModuleSpecifier, - maybe_referrer: Option<&ModuleSpecifier>, - is_dyn_import: bool, - ) -> Pin> { - let module_specifier = module_specifier.as_str().to_owned(); - let module = match &*module_specifier { - "file:///deno_global.js" => Ok(ModuleSource::new( - ModuleType::JavaScript, - FastString::Static("const old_deno = Deno; Deno = null; export default old_deno"), - &DENO_GLOBAL_JS, - )), - "file:///loadModule.js" => Ok(ModuleSource::new( - ModuleType::JavaScript, - FastString::Static(include_str!("./artifacts/loadModule.js")), - &LOAD_MODULE_JS, - )), - "file:///embassy.js" => Ok(ModuleSource::new( - ModuleType::JavaScript, - self.code.0.clone().into(), - &EMBASSY_JS, - )), - - x => Err(anyhow!("Not allowed to import: {}", x)), - }; - let module = module.and_then(|m| { - if is_dyn_import { - bail!("Will not import dynamic"); - } - match &maybe_referrer { - Some(x) if x.as_str() == "file:///embassy.js" => { - bail!("StartJS is not allowed to import") - } - _ => (), - } - Ok(m) - }); - Box::pin(async move { module }) - } -} - -pub struct JsExecutionEnvironment { - sandboxed: bool, - base_directory: PathBuf, - module_loader: ModsLoader, - package_id: PackageId, - version: Version, - volumes: Arc, -} - -impl JsExecutionEnvironment { - pub async fn load_from_package( - data_directory: impl AsRef, - package_id: &PackageId, - version: &Version, - volumes: Box, - ) -> Result { - let data_dir = data_directory.as_ref(); - let base_directory = data_dir; - let js_code = JsCode({ - let file_path = script_dir(data_dir, package_id, version).join("embassy.js"); - let mut file = match tokio::fs::File::open(file_path.clone()).await { - Ok(x) => x, - Err(e) => { - tracing::debug!("path: {:?}", file_path); - tracing::debug!("{:?}", e); - return Err(( - JsError::FileSystem, - format!("The file opening '{:?}' created error: {}", file_path, e), - )); - } - }; - let mut buffer = Default::default(); - if let Err(err) = file.read_to_string(&mut buffer).await { - tracing::debug!("{:?}", err); - return Err(( - JsError::FileSystem, - format!("The file reading created error: {}", err), - )); - }; - buffer.into() - }); - Ok(JsExecutionEnvironment { - base_directory: base_directory.to_owned(), - module_loader: ModsLoader { code: js_code }, - package_id: package_id.clone(), - version: version.clone(), - volumes: volumes.into(), - sandboxed: false, - }) - } - pub fn read_only_effects(mut self) -> Self { - self.sandboxed = true; - self - } - - pub async fn run_action Deserialize<'de>>( - self, - procedure_name: ProcedureName, - input: Option, - variable_args: Vec, - ) -> Result { - let input = match serde_json::to_value(input) { - Ok(a) => a, - Err(err) => { - tracing::error!("{}", err); - tracing::debug!("{:?}", err); - return Err(( - JsError::BoundryLayerSerDe, - "Couldn't convert input".to_string(), - )); - } - }; - let safer_handle = spawn_local(|| self.execute(procedure_name, input, variable_args)).await; - let output = safer_handle.await.unwrap()?; - match serde_json::from_value(output.clone()) { - Ok(x) => Ok(x), - Err(err) => { - tracing::error!("{}", err); - tracing::debug!("{:?}", err); - Err(( - JsError::BoundryLayerSerDe, - format!( - "Couldn't convert output = {:#?} to the correct type", - serde_json::to_string_pretty(&output).unwrap_or_default() - ), - )) - } - } - } - fn declarations() -> Vec { - vec![ - fns::chown::decl(), - fns::chmod::decl(), - fns::fetch::decl(), - fns::read_file::decl(), - fns::metadata::decl(), - fns::write_file::decl(), - fns::rename::decl(), - fns::remove_file::decl(), - fns::create_dir::decl(), - fns::remove_dir::decl(), - fns::read_dir::decl(), - fns::disk_usage::decl(), - fns::current_function::decl(), - fns::log_trace::decl(), - fns::log_warn::decl(), - fns::log_error::decl(), - fns::log_debug::decl(), - fns::log_info::decl(), - fns::get_input::decl(), - fns::get_variable_args::decl(), - fns::set_value::decl(), - fns::is_sandboxed::decl(), - fns::sleep::decl(), - fns::rsync::decl(), - fns::rsync_wait::decl(), - fns::rsync_progress::decl(), - ] - } - - async fn execute( - self, - procedure_name: ProcedureName, - input: Value, - variable_args: Vec, - ) -> Result { - let base_directory = self.base_directory.clone(); - let answer_state = AnswerState::default(); - let ext_answer_state = answer_state.clone(); - let js_ctx = JsContext { - datadir: base_directory, - run_function: procedure_name - .js_function_name() - .map(Ok) - .unwrap_or_else(|| { - Err(( - JsError::NotValidProcedureName, - format!("procedure is not value: {:?}", procedure_name), - )) - })?, - package_id: self.package_id.clone(), - volumes: self.volumes.clone(), - version: self.version.clone(), - sandboxed: self.sandboxed, - input, - variable_args, - rsyncs: Default::default(), - }; - let ext = Extension::builder("embassy") - .ops(Self::declarations()) - .state(move |state| { - state.put(ext_answer_state.clone()); - state.put(js_ctx); - }) - .build(); - - let loader = std::rc::Rc::new(self.module_loader.clone()); - let runtime_options = RuntimeOptions { - module_loader: Some(loader), - extensions: vec![ext], - startup_snapshot: Some(Snapshot::Static(SNAPSHOT_BYTES)), - ..Default::default() - }; - let mut runtime = JsRuntime::new(runtime_options); - - let future = async move { - let mod_id = runtime - .load_main_module(&"file:///loadModule.js".parse().unwrap(), None) - .await?; - let evaluated = runtime.mod_evaluate(mod_id); - let res = runtime.run_event_loop(false).await; - res?; - evaluated.await??; - Ok::<_, AnyError>(()) - }; - - future.await.map_err(|e| { - tracing::debug!("{:?}", e); - (JsError::Javascript, format!("{}", e)) - })?; - - let answer = answer_state.0.lock().clone(); - Ok(answer) - } -} - -/// Note: Make sure that we have the assumption that all these methods are callable at any time, and all call restrictions should be in rust -mod fns { - use std::cell::RefCell; - use std::collections::BTreeMap; - use std::convert::TryFrom; - use std::fs::Permissions; - use std::os::unix::fs::MetadataExt; - use std::os::unix::prelude::PermissionsExt; - use std::path::{Path, PathBuf}; - use std::rc::Rc; - use std::time::Duration; - - use container_init::ProcessId; - use deno_core::anyhow::{anyhow, bail}; - use deno_core::error::AnyError; - use deno_core::*; - use helpers::{to_tmp_path, AtomicFile, Rsync, RsyncOptions}; - use itertools::Itertools; - use models::VolumeId; - use serde::{Deserialize, Serialize}; - use serde_json::Value; - use tokio::io::AsyncWriteExt; - use tokio::process::Command; - - use super::{AnswerState, JsContext}; - use crate::{system_time_as_unix_ms, MetadataJs}; - - #[derive(serde::Serialize, serde::Deserialize, Debug, Clone, Default)] - struct FetchOptions { - method: Option, - headers: Option>, - body: Option, - } - #[derive(serde::Serialize, serde::Deserialize, Debug, Clone, Default)] - struct FetchResponse { - method: String, - ok: bool, - status: u32, - headers: BTreeMap, - body: Option, - } - #[op] - async fn fetch( - state: Rc>, - url: url::Url, - options: Option, - ) -> Result { - let sandboxed = { - let state = state.borrow(); - let ctx: &JsContext = state.borrow(); - ctx.sandboxed - }; - - if sandboxed { - bail!("Will not run fetch in sandboxed mode"); - } - - let client = reqwest::Client::new(); - let options = options.unwrap_or_default(); - let method = options - .method - .unwrap_or_else(|| "GET".to_string()) - .to_uppercase(); - let mut request_builder = match &*method { - "GET" => client.get(url), - "POST" => client.post(url), - "PUT" => client.put(url), - "DELETE" => client.delete(url), - "HEAD" => client.head(url), - "PATCH" => client.patch(url), - x => bail!("Unsupported method: {}", x), - }; - if let Some(headers) = options.headers { - for (key, value) in headers { - request_builder = request_builder.header(key, value); - } - } - if let Some(body) = options.body { - request_builder = request_builder.body(body); - } - let response = request_builder.send().await?; - - let fetch_response = FetchResponse { - method, - ok: response.status().is_success(), - status: response.status().as_u16() as u32, - headers: response - .headers() - .iter() - .filter_map(|(head, value)| { - Some((format!("{}", head), value.to_str().ok()?.to_string())) - }) - .collect(), - body: response.text().await.ok(), - }; - - Ok(fetch_response) - } - - #[op] - async fn read_file( - state: Rc>, - volume_id: VolumeId, - path_in: PathBuf, - ) -> Result { - let volume_path = { - let state = state.borrow(); - let ctx: &JsContext = state.borrow(); - ctx.volumes - .path_for(&ctx.datadir, &ctx.package_id, &ctx.version, &volume_id) - .ok_or_else(|| anyhow!("There is no {} in volumes", volume_id))? - }; - //get_path_for in volume.rs - let path_in = path_in.strip_prefix("/").unwrap_or(&path_in); - let new_file = volume_path.join(path_in); - if !is_subset(&volume_path, &new_file).await? { - bail!( - "Path '{}' has broken away from parent '{}'", - new_file.to_string_lossy(), - volume_path.to_string_lossy(), - ); - } - let answer = tokio::fs::read_to_string(new_file).await?; - Ok(answer) - } - #[op] - async fn metadata( - state: Rc>, - volume_id: VolumeId, - path_in: PathBuf, - ) -> Result { - let volume_path = { - let state = state.borrow(); - let ctx: &JsContext = state.borrow(); - ctx.volumes - .path_for(&ctx.datadir, &ctx.package_id, &ctx.version, &volume_id) - .ok_or_else(|| anyhow!("There is no {} in volumes", volume_id))? - }; - //get_path_for in volume.rs - let path_in = path_in.strip_prefix("/").unwrap_or(&path_in); - let new_file = volume_path.join(path_in); - if !is_subset(&volume_path, &new_file).await? { - bail!( - "Path '{}' has broken away from parent '{}'", - new_file.to_string_lossy(), - volume_path.to_string_lossy(), - ); - } - let answer = tokio::fs::metadata(new_file).await?; - let metadata_js = MetadataJs { - file_type: format!("{:?}", answer.file_type()), - is_dir: answer.is_dir(), - is_file: answer.is_file(), - is_symlink: answer.is_symlink(), - len: answer.len(), - modified: answer - .modified() - .ok() - .as_ref() - .and_then(system_time_as_unix_ms), - accessed: answer - .accessed() - .ok() - .as_ref() - .and_then(system_time_as_unix_ms), - created: answer - .created() - .ok() - .as_ref() - .and_then(system_time_as_unix_ms), - readonly: answer.permissions().readonly(), - gid: answer.gid(), - mode: answer.mode(), - uid: answer.uid(), - }; - - Ok(metadata_js) - } - #[op] - async fn write_file( - state: Rc>, - volume_id: VolumeId, - path_in: PathBuf, - write: String, - ) -> Result<(), AnyError> { - let (volumes, volume_path) = { - let state = state.borrow(); - let ctx: &JsContext = state.borrow(); - let volume_path = ctx - .volumes - .path_for(&ctx.datadir, &ctx.package_id, &ctx.version, &volume_id) - .ok_or_else(|| anyhow!("There is no {} in volumes", volume_id))?; - (ctx.volumes.clone(), volume_path) - }; - if volumes.readonly(&volume_id) { - bail!("Volume {} is readonly", volume_id); - } - - let path_in = path_in.strip_prefix("/").unwrap_or(&path_in); - let new_file = volume_path.join(path_in); - let parent_new_file = new_file - .parent() - .ok_or_else(|| anyhow!("Expecting that file is not root"))?; - // With the volume check - if !is_subset(&volume_path, &parent_new_file).await? { - bail!( - "Path '{}' has broken away from parent '{}'", - new_file.to_string_lossy(), - volume_path.to_string_lossy(), - ); - } - let new_volume_tmp = to_tmp_path(&volume_path).map_err(|e| anyhow!("{}", e))?; - let hashed_name = { - use std::os::unix::ffi::OsStrExt; - - use sha2::{Digest, Sha256}; - let mut hasher = Sha256::new(); - - hasher.update(path_in.as_os_str().as_bytes()); - let result = hasher.finalize(); - format!("{:X}", result) - }; - let temp_file = new_volume_tmp.join(&hashed_name); - let mut file = AtomicFile::new(&new_file, Some(&temp_file)) - .await - .map_err(|e| anyhow!("{}", e))?; - file.write_all(write.as_bytes()).await?; - file.save().await.map_err(|e| anyhow!("{}", e))?; - Ok(()) - } - #[op] - async fn rename( - state: Rc>, - src_volume: VolumeId, - src_path: PathBuf, - dst_volume: VolumeId, - dst_path: PathBuf, - ) -> Result<(), AnyError> { - let (volumes, volume_path, volume_path_out) = { - let state = state.borrow(); - let ctx: &JsContext = state.borrow(); - let volume_path = ctx - .volumes - .path_for(&ctx.datadir, &ctx.package_id, &ctx.version, &src_volume) - .ok_or_else(|| anyhow!("There is no {} in volumes", src_volume))?; - let volume_path_out = ctx - .volumes - .path_for(&ctx.datadir, &ctx.package_id, &ctx.version, &dst_volume) - .ok_or_else(|| anyhow!("There is no {} in volumes", dst_volume))?; - (ctx.volumes.clone(), volume_path, volume_path_out) - }; - if volumes.readonly(&dst_volume) { - bail!("Volume {} is readonly", dst_volume); - } - - let src_path = src_path.strip_prefix("/").unwrap_or(&src_path); - let old_file = volume_path.join(src_path); - let parent_old_file = old_file - .parent() - .ok_or_else(|| anyhow!("Expecting that file is not root"))?; - // With the volume check - if !is_subset(&volume_path, &parent_old_file).await? { - bail!( - "Path '{}' has broken away from parent '{}'", - old_file.to_string_lossy(), - volume_path.to_string_lossy(), - ); - } - - let dst_path = dst_path.strip_prefix("/").unwrap_or(&dst_path); - let new_file = volume_path_out.join(dst_path); - let parent_new_file = new_file - .parent() - .ok_or_else(|| anyhow!("Expecting that file is not root"))?; - // With the volume check - if !is_subset(&volume_path_out, &parent_new_file).await? { - bail!( - "Path '{}' has broken away from parent '{}'", - new_file.to_string_lossy(), - volume_path_out.to_string_lossy(), - ); - } - tokio::fs::rename(old_file, new_file).await?; - Ok(()) - } - - #[op] - async fn rsync( - state: Rc>, - src_volume: VolumeId, - src_path: PathBuf, - dst_volume: VolumeId, - dst_path: PathBuf, - options: RsyncOptions, - ) -> Result { - let (volumes, volume_path, volume_path_out, rsyncs) = { - let state = state.borrow(); - let ctx: &JsContext = state.borrow(); - let volume_path = ctx - .volumes - .path_for(&ctx.datadir, &ctx.package_id, &ctx.version, &src_volume) - .ok_or_else(|| anyhow!("There is no {} in volumes", src_volume))?; - let volume_path_out = ctx - .volumes - .path_for(&ctx.datadir, &ctx.package_id, &ctx.version, &dst_volume) - .ok_or_else(|| anyhow!("There is no {} in volumes", dst_volume))?; - ( - ctx.volumes.clone(), - volume_path, - volume_path_out, - ctx.rsyncs.clone(), - ) - }; - if volumes.readonly(&dst_volume) { - bail!("Volume {} is readonly", dst_volume); - } - - let src_path = src_path.strip_prefix("/").unwrap_or(&src_path); - let src = volume_path.join(src_path); - // With the volume check - if !is_subset(&volume_path, &src).await? { - bail!( - "Path '{}' has broken away from parent '{}'", - src.to_string_lossy(), - volume_path.to_string_lossy(), - ); - } - if tokio::fs::metadata(&src).await.is_err() { - bail!("Source at {} does not exists", src.to_string_lossy()); - } - - let dst_path = src_path.strip_prefix("/").unwrap_or(&dst_path); - let dst = volume_path_out.join(dst_path); - // With the volume check - if !is_subset(&volume_path_out, &dst).await? { - bail!( - "Path '{}' has broken away from parent '{}'", - dst.to_string_lossy(), - volume_path_out.to_string_lossy(), - ); - } - - let running_rsync = Rsync::new(src, dst, options) - .await - .map_err(|e| anyhow::anyhow!("{:?}", e.source))?; - let insert_id = { - let mut rsyncs = rsyncs.lock().await; - let next = rsyncs.0 + 1; - rsyncs.0 = next; - rsyncs.1.insert(next, running_rsync); - next - }; - Ok(insert_id) - } - - #[op] - async fn rsync_wait(state: Rc>, id: usize) -> Result<(), AnyError> { - let rsyncs = { - let state = state.borrow(); - let ctx: &JsContext = state.borrow(); - ctx.rsyncs.clone() - }; - let running_rsync = match rsyncs.lock().await.1.remove(&id) { - Some(a) => a, - None => bail!("Couldn't find rsync at id {id}"), - }; - running_rsync - .wait() - .await - .map_err(|x| anyhow::anyhow!("{}", x.source))?; - Ok(()) - } - #[op] - async fn rsync_progress(state: Rc>, id: usize) -> Result { - use futures::StreamExt; - let rsyncs = { - let state = state.borrow(); - let ctx: &JsContext = state.borrow(); - ctx.rsyncs.clone() - }; - let mut running_rsync = match rsyncs.lock().await.1.remove(&id) { - Some(a) => a, - None => bail!("Couldn't find rsync at id {id}"), - }; - let progress = running_rsync.progress.next().await.unwrap_or_default(); - rsyncs.lock().await.1.insert(id, running_rsync); - Ok(progress) - } - #[op] - async fn remove_file( - state: Rc>, - volume_id: VolumeId, - path_in: PathBuf, - ) -> Result<(), AnyError> { - let (volumes, volume_path) = { - let state = state.borrow(); - let ctx: &JsContext = state.borrow(); - let volume_path = ctx - .volumes - .path_for(&ctx.datadir, &ctx.package_id, &ctx.version, &volume_id) - .ok_or_else(|| anyhow!("There is no {} in volumes", volume_id))?; - (ctx.volumes.clone(), volume_path) - }; - if volumes.readonly(&volume_id) { - bail!("Volume {} is readonly", volume_id); - } - let path_in = path_in.strip_prefix("/").unwrap_or(&path_in); - let new_file = volume_path.join(path_in); - // With the volume check - if !is_subset(&volume_path, &new_file).await? { - bail!( - "Path '{}' has broken away from parent '{}'", - new_file.to_string_lossy(), - volume_path.to_string_lossy(), - ); - } - tokio::fs::remove_file(new_file).await?; - Ok(()) - } - #[op] - async fn remove_dir( - state: Rc>, - volume_id: VolumeId, - path_in: PathBuf, - ) -> Result<(), AnyError> { - let (volumes, volume_path) = { - let state = state.borrow(); - let ctx: &JsContext = state.borrow(); - let volume_path = ctx - .volumes - .path_for(&ctx.datadir, &ctx.package_id, &ctx.version, &volume_id) - .ok_or_else(|| anyhow!("There is no {} in volumes", volume_id))?; - (ctx.volumes.clone(), volume_path) - }; - if volumes.readonly(&volume_id) { - bail!("Volume {} is readonly", volume_id); - } - let path_in = path_in.strip_prefix("/").unwrap_or(&path_in); - let new_file = volume_path.join(path_in); - // With the volume check - if !is_subset(&volume_path, &new_file).await? { - bail!( - "Path '{}' has broken away from parent '{}'", - new_file.to_string_lossy(), - volume_path.to_string_lossy(), - ); - } - tokio::fs::remove_dir_all(new_file).await?; - Ok(()) - } - #[op] - async fn create_dir( - state: Rc>, - volume_id: VolumeId, - path_in: PathBuf, - ) -> Result<(), AnyError> { - let (volumes, volume_path) = { - let state = state.borrow(); - let ctx: &JsContext = state.borrow(); - let volume_path = ctx - .volumes - .path_for(&ctx.datadir, &ctx.package_id, &ctx.version, &volume_id) - .ok_or_else(|| anyhow!("There is no {} in volumes", volume_id))?; - (ctx.volumes.clone(), volume_path) - }; - if volumes.readonly(&volume_id) { - bail!("Volume {} is readonly", volume_id); - } - let path_in = path_in.strip_prefix("/").unwrap_or(&path_in); - let new_file = volume_path.join(path_in); - - // With the volume check - if !is_subset(&volume_path, &new_file).await? { - bail!( - "Path '{}' has broken away from parent '{}'", - new_file.to_string_lossy(), - volume_path.to_string_lossy(), - ); - } - tokio::fs::create_dir_all(new_file).await?; - Ok(()) - } - #[op] - async fn read_dir( - state: Rc>, - volume_id: VolumeId, - path_in: PathBuf, - ) -> Result, AnyError> { - let volume_path = { - let state = state.borrow(); - let ctx: &JsContext = state.borrow(); - ctx.volumes - .path_for(&ctx.datadir, &ctx.package_id, &ctx.version, &volume_id) - .ok_or_else(|| anyhow!("There is no {} in volumes", volume_id))? - }; - let path_in = path_in.strip_prefix("/").unwrap_or(&path_in); - let new_file = volume_path.join(path_in); - - // With the volume check - if !is_subset(&volume_path, &new_file).await? { - bail!( - "Path '{}' has broken away from parent '{}'", - new_file.to_string_lossy(), - volume_path.to_string_lossy(), - ); - } - let mut reader = tokio::fs::read_dir(&new_file).await?; - let mut paths: Vec = Vec::new(); - let origin_path = format!("{}/", new_file.to_str().unwrap_or_default()); - let remove_new_file = |other_path: String| other_path.replacen(&origin_path, "", 1); - let has_origin_path = |other_path: &String| other_path.starts_with(&origin_path); - while let Some(entry) = reader.next_entry().await? { - entry - .path() - .to_str() - .into_iter() - .map(ToString::to_string) - .filter(&has_origin_path) - .map(&remove_new_file) - .for_each(|x| paths.push(x)); - } - paths.sort(); - Ok(paths) - } - - #[op] - async fn disk_usage( - state: Rc>, - volume_id: Option, - path_in: Option, - ) -> Result<(u64, u64), AnyError> { - let (base_path, volume_path) = { - let state = state.borrow(); - let ctx: &JsContext = state.borrow(); - let volume_path = if let Some(volume_id) = volume_id { - Some( - ctx.volumes - .path_for(&ctx.datadir, &ctx.package_id, &ctx.version, &volume_id) - .ok_or_else(|| anyhow!("There is no {} in volumes", volume_id))?, - ) - } else { - None - }; - (ctx.datadir.join("package-data"), volume_path) - }; - let path = if let (Some(volume_path), Some(path_in)) = (volume_path, path_in) { - let path_in = path_in.strip_prefix("/").unwrap_or(&path_in); - Some(volume_path.join(path_in)) - } else { - None - }; - - if let Some(path) = path { - let size = String::from_utf8( - Command::new("df") - .arg("--output=size") - .arg("--block-size=1") - .arg(&base_path) - .stdout(std::process::Stdio::piped()) - .output() - .await? - .stdout, - )? - .lines() - .nth(1) - .unwrap_or_default() - .parse()?; - let used = String::from_utf8( - Command::new("du") - .arg("-s") - .arg("--block-size=1") - .arg(path) - .stdout(std::process::Stdio::piped()) - .output() - .await? - .stdout, - )? - .split_ascii_whitespace() - .next() - .unwrap_or_default() - .parse()?; - Ok((used, size)) - } else { - String::from_utf8( - Command::new("df") - .arg("--output=used,size") - .arg("--block-size=1") - .arg(&base_path) - .stdout(std::process::Stdio::piped()) - .output() - .await? - .stdout, - )? - .lines() - .nth(1) - .unwrap_or_default() - .split_ascii_whitespace() - .next_tuple() - .and_then(|(used, size)| Some((used.parse().ok()?, size.parse().ok()?))) - .ok_or_else(|| anyhow!("invalid output from df")) - } - } - - #[op] - fn current_function(state: &mut OpState) -> Result { - let ctx = state.borrow::(); - Ok(ctx.run_function.clone()) - } - - #[op] - async fn log_trace(state: Rc>, input: String) -> Result<(), AnyError> { - let ctx = { - let state = state.borrow(); - state.borrow::().clone() - }; - tracing::trace!( - package_id = tracing::field::display(&ctx.package_id), - run_function = tracing::field::display(&ctx.run_function), - "{}", - input - ); - Ok(()) - } - #[op] - async fn log_warn(state: Rc>, input: String) -> Result<(), AnyError> { - let ctx = { - let state = state.borrow(); - state.borrow::().clone() - }; - tracing::warn!( - package_id = tracing::field::display(&ctx.package_id), - run_function = tracing::field::display(&ctx.run_function), - "{}", - input - ); - Ok(()) - } - #[op] - async fn log_error(state: Rc>, input: String) -> Result<(), AnyError> { - let ctx = { - let state = state.borrow(); - state.borrow::().clone() - }; - tracing::error!( - package_id = tracing::field::display(&ctx.package_id), - run_function = tracing::field::display(&ctx.run_function), - "{}", - input - ); - Ok(()) - } - #[op] - async fn log_debug(state: Rc>, input: String) -> Result<(), AnyError> { - let ctx = { - let state = state.borrow(); - state.borrow::().clone() - }; - tracing::debug!( - package_id = tracing::field::display(&ctx.package_id), - run_function = tracing::field::display(&ctx.run_function), - "{}", - input - ); - Ok(()) - } - #[op] - async fn log_info(state: Rc>, input: String) -> Result<(), AnyError> { - let (package_id, run_function) = { - let state = state.borrow(); - let ctx: JsContext = state.borrow::().clone(); - (ctx.package_id, ctx.run_function) - }; - tracing::info!( - package_id = tracing::field::display(&package_id), - run_function = tracing::field::display(&run_function), - "{}", - input - ); - Ok(()) - } - - #[op] - fn get_input(state: &mut OpState) -> Result { - let ctx = state.borrow::(); - Ok(ctx.input.clone()) - } - #[op] - fn get_variable_args(state: &mut OpState) -> Result, AnyError> { - let ctx = state.borrow::(); - Ok(ctx.variable_args.clone()) - } - #[op] - fn set_value(state: &mut OpState, value: Value) -> Result<(), AnyError> { - let mut answer = state.borrow::().0.lock(); - *answer = value; - Ok(()) - } - #[op] - fn is_sandboxed(state: &mut OpState) -> Result { - let ctx = state.borrow::(); - Ok(ctx.sandboxed) - } - - #[derive(Debug, Clone, Serialize, Deserialize)] - #[serde(rename_all = "camelCase")] - pub struct StartCommand { - process_id: ProcessId, - } - - #[op] - async fn sleep(time_ms: u64) -> Result<(), AnyError> { - tokio::time::sleep(Duration::from_millis(time_ms)).await; - - Ok(()) - } - - #[op] - async fn chown( - state: Rc>, - volume_id: VolumeId, - path_in: PathBuf, - ownership: u32, - ) -> Result<(), AnyError> { - let sandboxed = { - let state = state.borrow(); - let ctx: &JsContext = state.borrow(); - ctx.sandboxed - }; - - if sandboxed { - bail!("Will not run chown in sandboxed mode"); - } - - let (volumes, volume_path) = { - let state = state.borrow(); - let ctx: &JsContext = state.borrow(); - let volume_path = ctx - .volumes - .path_for(&ctx.datadir, &ctx.package_id, &ctx.version, &volume_id) - .ok_or_else(|| anyhow!("There is no {} in volumes", volume_id))?; - (ctx.volumes.clone(), volume_path) - }; - if volumes.readonly(&volume_id) { - bail!("Volume {} is readonly", volume_id); - } - let path_in = path_in.strip_prefix("/").unwrap_or(&path_in); - let new_file = volume_path.join(path_in); - // With the volume check - if !is_subset(&volume_path, &new_file).await? { - bail!( - "Path '{}' has broken away from parent '{}'", - new_file.to_string_lossy(), - volume_path.to_string_lossy(), - ); - } - let output = tokio::process::Command::new("chown") - .arg("--recursive") - .arg(format!("{ownership}")) - .arg(new_file.as_os_str()) - .output() - .await?; - if !output.status.success() { - return Err(anyhow!("Chown Error")); - } - Ok(()) - } - #[op] - async fn chmod( - state: Rc>, - volume_id: VolumeId, - path_in: PathBuf, - mode: u32, - ) -> Result<(), AnyError> { - let sandboxed = { - let state = state.borrow(); - let ctx: &JsContext = state.borrow(); - ctx.sandboxed - }; - - if sandboxed { - bail!("Will not run chmod in sandboxed mode"); - } - - let (volumes, volume_path) = { - let state = state.borrow(); - let ctx: &JsContext = state.borrow(); - let volume_path = ctx - .volumes - .path_for(&ctx.datadir, &ctx.package_id, &ctx.version, &volume_id) - .ok_or_else(|| anyhow!("There is no {} in volumes", volume_id))?; - (ctx.volumes.clone(), volume_path) - }; - if volumes.readonly(&volume_id) { - bail!("Volume {} is readonly", volume_id); - } - let path_in = path_in.strip_prefix("/").unwrap_or(&path_in); - let new_file = volume_path.join(path_in); - // With the volume check - if !is_subset(&volume_path, &new_file).await? { - bail!( - "Path '{}' has broken away from parent '{}'", - new_file.to_string_lossy(), - volume_path.to_string_lossy(), - ); - } - tokio::fs::set_permissions(new_file, Permissions::from_mode(mode)).await?; - Ok(()) - } - /// We need to make sure that during the file accessing, we don't reach beyond our scope of control - async fn is_subset( - parent: impl AsRef, - child: impl AsRef, - ) -> Result { - let child = { - let mut child_count = 0; - let mut child = child.as_ref(); - loop { - if child.ends_with("..") { - child_count += 1; - } else if child_count > 0 { - child_count -= 1; - } else { - let meta = tokio::fs::metadata(child).await; - if meta.is_ok() { - break; - } - } - child = match child.parent() { - Some(child) => child, - None => { - return Ok(false); - } - }; - } - tokio::fs::canonicalize(child).await? - }; - let parent = tokio::fs::canonicalize(parent).await?; - Ok(child.starts_with(parent)) - } - - #[tokio::test] - async fn test_is_subset() { - let home = std::env::var("HOME").unwrap(); - let home = Path::new(&home); - assert!(!is_subset(home, &home.join("code/fakedir/../../..")) - .await - .unwrap()) - } -} - -fn system_time_as_unix_ms(system_time: &SystemTime) -> Option { - system_time - .duration_since(SystemTime::UNIX_EPOCH) - .ok()? - .as_millis() - .try_into() - .ok() -} diff --git a/core/models/Cargo.toml b/core/models/Cargo.toml index 9d75f92c4e..b6a5067744 100644 --- a/core/models/Cargo.toml +++ b/core/models/Cargo.toml @@ -15,6 +15,7 @@ emver = { version = "0.1", git = "https://github.com/Start9Labs/emver-rs.git", f "serde", ] } ipnet = "2.8.0" +num_enum = "0.7.1" openssl = { version = "0.10.57", features = ["vendored"] } patch-db = { version = "*", path = "../../patch-db/patch-db", features = [ "trace", @@ -31,8 +32,9 @@ sqlx = { version = "0.7.2", features = [ "postgres", ] } ssh-key = "0.6.2" +ts-rs = { git = "https://github.com/dr-bonez/ts-rs.git", branch = "feature/top-level-type-override" } # "8" thiserror = "1.0" tokio = { version = "1", features = ["full"] } -torut = "0.2.1" +torut = { git = "https://github.com/Start9Labs/torut.git", branch = "update/dependencies" } tracing = "0.1.39" yasi = "0.1.5" diff --git a/core/models/bindings/ServiceInterfaceId.ts b/core/models/bindings/ServiceInterfaceId.ts new file mode 100644 index 0000000000..87edd8694f --- /dev/null +++ b/core/models/bindings/ServiceInterfaceId.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type ServiceInterfaceId = string; \ No newline at end of file diff --git a/core/models/src/data_url.rs b/core/models/src/data_url.rs index e2141b15a3..9757d7f6a1 100644 --- a/core/models/src/data_url.rs +++ b/core/models/src/data_url.rs @@ -6,11 +6,13 @@ use color_eyre::eyre::eyre; use reqwest::header::CONTENT_TYPE; use serde::{Deserialize, Serialize}; use tokio::io::{AsyncRead, AsyncReadExt}; +use ts_rs::TS; use yasi::InternedString; use crate::{mime, Error, ErrorKind, ResultExt}; -#[derive(Clone)] +#[derive(Clone, TS)] +#[ts(type = "string")] pub struct DataUrl<'a> { mime: InternedString, data: Cow<'a, [u8]>, diff --git a/core/models/src/errors.rs b/core/models/src/errors.rs index f22624d367..2362b6dbad 100644 --- a/core/models/src/errors.rs +++ b/core/models/src/errors.rs @@ -1,14 +1,19 @@ -use std::fmt::Display; +use std::fmt::{Debug, Display}; use color_eyre::eyre::eyre; +use num_enum::TryFromPrimitive; use patch_db::Revision; use rpc_toolkit::hyper::http::uri::InvalidUri; use rpc_toolkit::reqwest; -use rpc_toolkit::yajrc::RpcError; +use rpc_toolkit::yajrc::{ + RpcError, INVALID_PARAMS_ERROR, INVALID_REQUEST_ERROR, METHOD_NOT_FOUND_ERROR, PARSE_ERROR, +}; +use serde::{Deserialize, Serialize}; use crate::InvalidId; -#[derive(Debug, Clone, Copy, PartialEq, Eq)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, TryFromPrimitive)] +#[repr(i32)] pub enum ErrorKind { Unknown = 1, Filesystem = 2, @@ -81,6 +86,8 @@ pub enum ErrorKind { CpuSettings = 69, Firmware = 70, Timeout = 71, + Lxc = 72, + Cancelled = 73, } impl ErrorKind { pub fn as_str(&self) -> &'static str { @@ -157,6 +164,8 @@ impl ErrorKind { CpuSettings => "CPU Settings Error", Firmware => "Firmware Error", Timeout => "Timeout Error", + Lxc => "LXC Error", + Cancelled => "Cancelled", } } } @@ -186,6 +195,22 @@ impl Error { revision: None, } } + pub fn clone_output(&self) -> Self { + Error { + source: ErrorData { + details: format!("{}", self.source), + debug: format!("{:?}", self.source), + } + .into(), + kind: self.kind, + revision: self.revision.clone(), + } + } +} +impl From for Error { + fn from(value: std::convert::Infallible) -> Self { + match value {} + } } impl From for Error { fn from(err: InvalidId) -> Self { @@ -300,6 +325,53 @@ impl From for Error { } } +#[derive(Clone, Deserialize, Serialize)] +pub struct ErrorData { + pub details: String, + pub debug: String, +} +impl Display for ErrorData { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + Display::fmt(&self.details, f) + } +} +impl Debug for ErrorData { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + Display::fmt(&self.debug, f) + } +} +impl std::error::Error for ErrorData {} +impl From<&RpcError> for ErrorData { + fn from(value: &RpcError) -> Self { + Self { + details: value + .data + .as_ref() + .and_then(|d| { + d.as_object() + .and_then(|d| { + d.get("details") + .and_then(|d| d.as_str().map(|s| s.to_owned())) + }) + .or_else(|| d.as_str().map(|s| s.to_owned())) + }) + .unwrap_or_else(|| value.message.clone().into_owned()), + debug: value + .data + .as_ref() + .and_then(|d| { + d.as_object() + .and_then(|d| { + d.get("debug") + .and_then(|d| d.as_str().map(|s| s.to_owned())) + }) + .or_else(|| d.as_str().map(|s| s.to_owned())) + }) + .unwrap_or_else(|| value.message.clone().into_owned()), + } + } +} + impl From for RpcError { fn from(e: Error) -> Self { let mut data_object = serde_json::Map::with_capacity(3); @@ -318,10 +390,40 @@ impl From for RpcError { RpcError { code: e.kind as i32, message: e.kind.as_str().into(), - data: Some(data_object.into()), + data: Some( + match serde_json::to_value(&ErrorData { + details: format!("{}", e.source), + debug: format!("{:?}", e.source), + }) { + Ok(a) => a, + Err(e) => { + tracing::warn!("Error serializing revision for Error object: {}", e); + serde_json::Value::Null + } + }, + ), } } } +impl From for Error { + fn from(e: RpcError) -> Self { + Error::new( + ErrorData::from(&e), + if let Ok(kind) = e.code.try_into() { + kind + } else if e.code == METHOD_NOT_FOUND_ERROR.code { + ErrorKind::NotFound + } else if e.code == PARSE_ERROR.code + || e.code == INVALID_PARAMS_ERROR.code + || e.code == INVALID_REQUEST_ERROR.code + { + ErrorKind::Deserialization + } else { + ErrorKind::Unknown + }, + ) + } +} #[derive(Debug, Default)] pub struct ErrorCollection(Vec); @@ -377,10 +479,7 @@ where Self: Sized, { fn with_kind(self, kind: ErrorKind) -> Result; - fn with_ctx (ErrorKind, D), D: Display + Send + Sync + 'static>( - self, - f: F, - ) -> Result; + fn with_ctx (ErrorKind, D), D: Display>(self, f: F) -> Result; } impl ResultExt for Result where @@ -394,10 +493,7 @@ where }) } - fn with_ctx (ErrorKind, D), D: Display + Send + Sync + 'static>( - self, - f: F, - ) -> Result { + fn with_ctx (ErrorKind, D), D: Display>(self, f: F) -> Result { self.map_err(|e| { let (kind, ctx) = f(&e); let source = color_eyre::eyre::Error::from(e); @@ -411,6 +507,29 @@ where }) } } +impl ResultExt for Result { + fn with_kind(self, kind: ErrorKind) -> Result { + self.map_err(|e| Error { + source: e.source, + kind, + revision: e.revision, + }) + } + + fn with_ctx (ErrorKind, D), D: Display>(self, f: F) -> Result { + self.map_err(|e| { + let (kind, ctx) = f(&e); + let source = e.source; + let ctx = format!("{}: {}", ctx, source); + let source = source.wrap_err(ctx); + Error { + kind, + source, + revision: e.revision, + } + }) + } +} pub trait OptionExt where diff --git a/core/models/src/id/action.rs b/core/models/src/id/action.rs index 9b814a98af..3f17048e29 100644 --- a/core/models/src/id/action.rs +++ b/core/models/src/id/action.rs @@ -2,10 +2,12 @@ use std::path::Path; use std::str::FromStr; use serde::{Deserialize, Serialize}; +use ts_rs::TS; use crate::{Id, InvalidId}; -#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize)] +#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, TS)] +#[ts(type = "string")] pub struct ActionId(Id); impl FromStr for ActionId { type Err = InvalidId; diff --git a/core/models/src/id/health_check.rs b/core/models/src/id/health_check.rs index dc643c9127..937f31aa30 100644 --- a/core/models/src/id/health_check.rs +++ b/core/models/src/id/health_check.rs @@ -1,16 +1,25 @@ use std::path::Path; +use std::str::FromStr; use serde::{Deserialize, Deserializer, Serialize}; +use ts_rs::TS; -use crate::Id; +use crate::{Id, InvalidId}; -#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize)] +#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, TS)] +#[ts(type = "string")] pub struct HealthCheckId(Id); impl std::fmt::Display for HealthCheckId { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{}", &self.0) } } +impl FromStr for HealthCheckId { + type Err = InvalidId; + fn from_str(s: &str) -> Result { + Id::from_str(s).map(HealthCheckId) + } +} impl AsRef for HealthCheckId { fn as_ref(&self) -> &str { self.0.as_ref() diff --git a/core/models/src/id/interface.rs b/core/models/src/id/host.rs similarity index 65% rename from core/models/src/id/interface.rs rename to core/models/src/id/host.rs index b9b32dd4a5..2a1595bd81 100644 --- a/core/models/src/id/interface.rs +++ b/core/models/src/id/host.rs @@ -2,52 +2,65 @@ use std::path::Path; use std::str::FromStr; use serde::{Deserialize, Deserializer, Serialize}; +use ts_rs::TS; +use yasi::InternedString; use crate::{Id, InvalidId}; -#[derive(Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize)] -pub struct InterfaceId(Id); -impl FromStr for InterfaceId { +#[derive(Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, TS)] +#[ts(type = "string")] +pub struct HostId(Id); +impl FromStr for HostId { type Err = InvalidId; fn from_str(s: &str) -> Result { Ok(Self(Id::try_from(s.to_owned())?)) } } -impl From for InterfaceId { +impl From for HostId { fn from(id: Id) -> Self { Self(id) } } -impl std::fmt::Display for InterfaceId { +impl From for Id { + fn from(value: HostId) -> Self { + value.0 + } +} +impl From for InternedString { + fn from(value: HostId) -> Self { + value.0.into() + } +} +impl std::fmt::Display for HostId { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{}", &self.0) } } -impl std::ops::Deref for InterfaceId { +impl std::ops::Deref for HostId { type Target = str; fn deref(&self) -> &Self::Target { &*self.0 } } -impl AsRef for InterfaceId { +impl AsRef for HostId { fn as_ref(&self) -> &str { self.0.as_ref() } } -impl<'de> Deserialize<'de> for InterfaceId { +impl<'de> Deserialize<'de> for HostId { fn deserialize(deserializer: D) -> Result where D: Deserializer<'de>, { - Ok(InterfaceId(Deserialize::deserialize(deserializer)?)) + Ok(HostId(Deserialize::deserialize(deserializer)?)) } } -impl AsRef for InterfaceId { +impl AsRef for HostId { fn as_ref(&self) -> &Path { self.0.as_ref().as_ref() } } -impl<'q> sqlx::Encode<'q, sqlx::Postgres> for InterfaceId { +impl<'q> sqlx::Encode<'q, sqlx::Postgres> for HostId { fn encode_by_ref( &self, buf: &mut >::ArgumentBuffer, @@ -55,7 +68,7 @@ impl<'q> sqlx::Encode<'q, sqlx::Postgres> for InterfaceId { <&str as sqlx::Encode<'q, sqlx::Postgres>>::encode_by_ref(&&**self, buf) } } -impl sqlx::Type for InterfaceId { +impl sqlx::Type for HostId { fn type_info() -> sqlx::postgres::PgTypeInfo { <&str as sqlx::Type>::type_info() } diff --git a/core/models/src/id/image.rs b/core/models/src/id/image.rs index 10ef0451d7..bbb0a601e7 100644 --- a/core/models/src/id/image.rs +++ b/core/models/src/id/image.rs @@ -1,12 +1,20 @@ use std::fmt::Debug; +use std::path::Path; use std::str::FromStr; use serde::{Deserialize, Deserializer, Serialize}; +use ts_rs::TS; use crate::{Id, InvalidId, PackageId, Version}; -#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize)] +#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, TS)] +#[ts(type = "string")] pub struct ImageId(Id); +impl AsRef for ImageId { + fn as_ref(&self) -> &Path { + self.0.as_ref().as_ref() + } +} impl std::fmt::Display for ImageId { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{}", &self.0) diff --git a/core/models/src/id/mod.rs b/core/models/src/id/mod.rs index ac32ceb22f..11644c71d1 100644 --- a/core/models/src/id/mod.rs +++ b/core/models/src/id/mod.rs @@ -1,25 +1,26 @@ use std::borrow::Borrow; +use std::str::FromStr; use regex::Regex; use serde::{Deserialize, Deserializer, Serialize, Serializer}; use yasi::InternedString; mod action; -mod address; mod health_check; +mod host; mod image; -mod interface; mod invalid_id; mod package; +mod service_interface; mod volume; pub use action::ActionId; -pub use address::AddressId; pub use health_check::HealthCheckId; +pub use host::HostId; pub use image::ImageId; -pub use interface::InterfaceId; pub use invalid_id::InvalidId; pub use package::{PackageId, SYSTEM_PACKAGE_ID}; +pub use service_interface::ServiceInterfaceId; pub use volume::VolumeId; lazy_static::lazy_static! { @@ -32,7 +33,7 @@ pub struct Id(InternedString); impl TryFrom for Id { type Error = InvalidId; fn try_from(value: InternedString) -> Result { - if ID_REGEX.is_match(&*value) { + if ID_REGEX.is_match(&value) { Ok(Id(value)) } else { Err(InvalidId) @@ -52,17 +53,28 @@ impl TryFrom for Id { impl TryFrom<&str> for Id { type Error = InvalidId; fn try_from(value: &str) -> Result { - if ID_REGEX.is_match(&value) { + if ID_REGEX.is_match(value) { Ok(Id(InternedString::intern(value))) } else { Err(InvalidId) } } } +impl FromStr for Id { + type Err = InvalidId; + fn from_str(s: &str) -> Result { + Self::try_from(s) + } +} +impl From for InternedString { + fn from(value: Id) -> Self { + value.0 + } +} impl std::ops::Deref for Id { type Target = str; fn deref(&self) -> &Self::Target { - &*self.0 + &self.0 } } impl std::fmt::Display for Id { @@ -72,7 +84,7 @@ impl std::fmt::Display for Id { } impl AsRef for Id { fn as_ref(&self) -> &str { - &*self.0 + &self.0 } } impl Borrow for Id { @@ -94,7 +106,7 @@ impl Serialize for Id { where Ser: Serializer, { - serializer.serialize_str(&*self) + serializer.serialize_str(self) } } impl<'q> sqlx::Encode<'q, sqlx::Postgres> for Id { diff --git a/core/models/src/id/package.rs b/core/models/src/id/package.rs index 14c29d88b9..d2665e59a6 100644 --- a/core/models/src/id/package.rs +++ b/core/models/src/id/package.rs @@ -3,13 +3,16 @@ use std::path::Path; use std::str::FromStr; use serde::{Deserialize, Serialize, Serializer}; +use ts_rs::TS; +use yasi::InternedString; use crate::{Id, InvalidId, SYSTEM_ID}; lazy_static::lazy_static! { pub static ref SYSTEM_PACKAGE_ID: PackageId = PackageId(SYSTEM_ID.clone()); } -#[derive(Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[derive(Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash, TS)] +#[ts(type = "string")] pub struct PackageId(Id); impl FromStr for PackageId { type Err = InvalidId; @@ -22,10 +25,20 @@ impl From for PackageId { PackageId(id) } } +impl From for Id { + fn from(value: PackageId) -> Self { + value.0 + } +} +impl From for InternedString { + fn from(value: PackageId) -> Self { + value.0.into() + } +} impl std::ops::Deref for PackageId { type Target = str; fn deref(&self) -> &Self::Target { - &*self.0 + &self.0 } } impl AsRef for PackageId { diff --git a/core/models/src/id/address.rs b/core/models/src/id/service_interface.rs similarity index 67% rename from core/models/src/id/address.rs rename to core/models/src/id/service_interface.rs index 1bd6705255..25aec0aba8 100644 --- a/core/models/src/id/address.rs +++ b/core/models/src/id/service_interface.rs @@ -1,46 +1,48 @@ use std::path::Path; use serde::{Deserialize, Deserializer, Serialize}; +use ts_rs::TS; use crate::Id; -#[derive(Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize)] -pub struct AddressId(Id); -impl From for AddressId { +#[derive(Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, TS)] +#[ts(export, type = "string")] +pub struct ServiceInterfaceId(Id); +impl From for ServiceInterfaceId { fn from(id: Id) -> Self { Self(id) } } -impl std::fmt::Display for AddressId { +impl std::fmt::Display for ServiceInterfaceId { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{}", &self.0) } } -impl std::ops::Deref for AddressId { +impl std::ops::Deref for ServiceInterfaceId { type Target = str; fn deref(&self) -> &Self::Target { &*self.0 } } -impl AsRef for AddressId { +impl AsRef for ServiceInterfaceId { fn as_ref(&self) -> &str { self.0.as_ref() } } -impl<'de> Deserialize<'de> for AddressId { +impl<'de> Deserialize<'de> for ServiceInterfaceId { fn deserialize(deserializer: D) -> Result where D: Deserializer<'de>, { - Ok(AddressId(Deserialize::deserialize(deserializer)?)) + Ok(ServiceInterfaceId(Deserialize::deserialize(deserializer)?)) } } -impl AsRef for AddressId { +impl AsRef for ServiceInterfaceId { fn as_ref(&self) -> &Path { self.0.as_ref().as_ref() } } -impl<'q> sqlx::Encode<'q, sqlx::Postgres> for AddressId { +impl<'q> sqlx::Encode<'q, sqlx::Postgres> for ServiceInterfaceId { fn encode_by_ref( &self, buf: &mut >::ArgumentBuffer, @@ -48,7 +50,7 @@ impl<'q> sqlx::Encode<'q, sqlx::Postgres> for AddressId { <&str as sqlx::Encode<'q, sqlx::Postgres>>::encode_by_ref(&&**self, buf) } } -impl sqlx::Type for AddressId { +impl sqlx::Type for ServiceInterfaceId { fn type_info() -> sqlx::postgres::PgTypeInfo { <&str as sqlx::Type>::type_info() } diff --git a/core/models/src/id/volume.rs b/core/models/src/id/volume.rs index 16821a3cfd..7425c79c6f 100644 --- a/core/models/src/id/volume.rs +++ b/core/models/src/id/volume.rs @@ -2,10 +2,12 @@ use std::borrow::Borrow; use std::path::Path; use serde::{Deserialize, Deserializer, Serialize}; +use ts_rs::TS; use crate::Id; -#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, TS)] +#[ts(type = "string")] pub enum VolumeId { Backup, Custom(Id), diff --git a/core/models/src/procedure_name.rs b/core/models/src/procedure_name.rs index 6a092955ad..c42068be3e 100644 --- a/core/models/src/procedure_name.rs +++ b/core/models/src/procedure_name.rs @@ -1,57 +1,42 @@ use serde::{Deserialize, Serialize}; -use crate::{ActionId, HealthCheckId, PackageId}; +use crate::ActionId; #[derive(Debug, Clone, Serialize, Deserialize)] pub enum ProcedureName { - Main, // Usually just run container - CreateBackup, - RestoreBackup, + StartMain, + StopMain, GetConfig, SetConfig, - Migration, + CreateBackup, Properties, - LongRunning, - Check(PackageId), - AutoConfig(PackageId), - Health(HealthCheckId), - Action(ActionId), - Signal, + RestoreBackup, + ActionMetadata, + RunAction(ActionId), + GetAction(ActionId), + QueryDependency(ActionId), + UpdateDependency(ActionId), + Init, + Uninit, } impl ProcedureName { - pub fn docker_name(&self) -> Option { - match self { - ProcedureName::Main => None, - ProcedureName::LongRunning => None, - ProcedureName::CreateBackup => Some("CreateBackup".to_string()), - ProcedureName::RestoreBackup => Some("RestoreBackup".to_string()), - ProcedureName::GetConfig => Some("GetConfig".to_string()), - ProcedureName::SetConfig => Some("SetConfig".to_string()), - ProcedureName::Migration => Some("Migration".to_string()), - ProcedureName::Properties => Some(format!("Properties-{}", rand::random::())), - ProcedureName::Health(id) => Some(format!("{}Health", id)), - ProcedureName::Action(id) => Some(format!("{}Action", id)), - ProcedureName::Check(_) => None, - ProcedureName::AutoConfig(_) => None, - ProcedureName::Signal => None, - } - } - pub fn js_function_name(&self) -> Option { + pub fn js_function_name(&self) -> String { match self { - ProcedureName::Main => Some("/main".to_string()), - ProcedureName::LongRunning => None, - ProcedureName::CreateBackup => Some("/createBackup".to_string()), - ProcedureName::RestoreBackup => Some("/restoreBackup".to_string()), - ProcedureName::GetConfig => Some("/getConfig".to_string()), - ProcedureName::SetConfig => Some("/setConfig".to_string()), - ProcedureName::Migration => Some("/migration".to_string()), - ProcedureName::Properties => Some("/properties".to_string()), - ProcedureName::Health(id) => Some(format!("/health/{}", id)), - ProcedureName::Action(id) => Some(format!("/action/{}", id)), - ProcedureName::Check(id) => Some(format!("/dependencies/{}/check", id)), - ProcedureName::AutoConfig(id) => Some(format!("/dependencies/{}/autoConfigure", id)), - ProcedureName::Signal => Some("/handleSignal".to_string()), + ProcedureName::Init => "/init".to_string(), + ProcedureName::Uninit => "/uninit".to_string(), + ProcedureName::StartMain => "/main/start".to_string(), + ProcedureName::StopMain => "/main/stop".to_string(), + ProcedureName::SetConfig => "/config/set".to_string(), + ProcedureName::GetConfig => "/config/get".to_string(), + ProcedureName::CreateBackup => "/backup/create".to_string(), + ProcedureName::Properties => "/properties".to_string(), + ProcedureName::RestoreBackup => "/backup/restore".to_string(), + ProcedureName::ActionMetadata => "/actions/metadata".to_string(), + ProcedureName::RunAction(id) => format!("/actions/{}/run", id), + ProcedureName::GetAction(id) => format!("/actions/{}/get", id), + ProcedureName::QueryDependency(id) => format!("/dependencies/{}/query", id), + ProcedureName::UpdateDependency(id) => format!("/dependencies/{}/update", id), } } } diff --git a/core/snapshot-creator/Cargo.toml b/core/snapshot-creator/Cargo.toml deleted file mode 100644 index 628cd31618..0000000000 --- a/core/snapshot-creator/Cargo.toml +++ /dev/null @@ -1,11 +0,0 @@ -[package] -name = "snapshot_creator" -version = "0.1.0" -edition = "2021" - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[dependencies] -dashmap = "5.3.4" -deno_core = "=0.222.0" -deno_ast = { version = "=0.29.5", features = ["transpiling"] } diff --git a/core/snapshot-creator/src/main.rs b/core/snapshot-creator/src/main.rs deleted file mode 100644 index ad73304840..0000000000 --- a/core/snapshot-creator/src/main.rs +++ /dev/null @@ -1,11 +0,0 @@ -use deno_core::JsRuntimeForSnapshot; - -fn main() { - let runtime = JsRuntimeForSnapshot::new(Default::default()); - let snapshot = runtime.snapshot(); - - let snapshot_slice: &[u8] = &*snapshot; - println!("Snapshot size: {}", snapshot_slice.len()); - - std::fs::write("JS_SNAPSHOT.bin", snapshot_slice).unwrap(); -} diff --git a/core/startos/Cargo.toml b/core/startos/Cargo.toml index 52e619f278..d1a3334472 100644 --- a/core/startos/Cargo.toml +++ b/core/startos/Cargo.toml @@ -21,20 +21,27 @@ license = "MIT" name = "startos" path = "src/lib.rs" +[[bin]] +name = "containerbox" +path = "src/main.rs" + +[[bin]] +name = "start-cli" +path = "src/main.rs" + [[bin]] name = "startbox" path = "src/main.rs" [features] -avahi = ["avahi-sys"] -avahi-alias = ["avahi"] cli = [] +container-runtime = [] daemon = [] -default = ["cli", "sdk", "daemon", "js-engine"] +default = ["cli", "daemon"] dev = [] -docker = [] -sdk = [] unstable = ["console-subscriber", "tokio/tracing"] +docker = [] +test = [] [dependencies] aes = { version = "0.7.5", features = ["ctr"] } @@ -45,9 +52,8 @@ async-compression = { version = "0.4.4", features = [ ] } async-stream = "0.3.5" async-trait = "0.1.74" -avahi-sys = { git = "https://github.com/Start9Labs/avahi-sys", version = "0.10.0", branch = "feature/dynamic-linking", features = [ - "dynamic", -], optional = true } +axum = { version = "0.7.3", features = ["ws"] } +axum-server = "0.6.0" base32 = "0.4.0" base64 = "0.21.4" base64ct = "1.6.0" @@ -55,7 +61,7 @@ basic-cookies = "0.1.4" blake3 = "1.5.0" bytes = "1" chrono = { version = "0.4.31", features = ["serde"] } -clap = "3.2.25" +clap = "4.4.12" color-eyre = "0.6.2" console = "0.15.7" console-subscriber = { version = "0.2", optional = true } @@ -65,14 +71,14 @@ current_platform = "0.2.0" digest = "0.10.7" divrem = "1.0.0" ed25519 = { version = "2.2.3", features = ["pkcs8", "pem", "alloc"] } -ed25519-dalek = { version = "2.0.0", features = [ +ed25519-dalek = { version = "2.1.1", features = [ "serde", "zeroize", "rand_core", "digest", + "pkcs8", ] } ed25519-dalek-v1 = { package = "ed25519-dalek", version = "1" } -container-init = { path = "../container-init" } emver = { version = "0.1.7", git = "https://github.com/Start9Labs/emver-rs.git", features = [ "serde", ] } @@ -82,9 +88,11 @@ gpt = "3.1.0" helpers = { path = "../helpers" } hex = "0.4.3" hmac = "0.12.1" -http = "0.2.9" -hyper = { version = "0.14.27", features = ["full"] } -hyper-ws-listener = "0.3.0" +http = "1.0.0" +id-pool = { version = "0.2.2", default-features = false, features = [ + "serde", + "u16", +] } imbl = "2.0.2" imbl-value = { git = "https://github.com/Start9Labs/imbl-value.git" } include_dir = "0.7.3" @@ -94,12 +102,13 @@ integer-encoding = { version = "4.0.0", features = ["tokio_async"] } ipnet = { version = "2.8.0", features = ["serde"] } iprange = { version = "0.6.7", features = ["serde"] } isocountry = "0.3.2" -itertools = "0.11.0" +itertools = "0.12.0" jaq-core = "0.10.1" jaq-std = "0.10.0" josekit = "0.8.4" -js-engine = { path = '../js-engine', optional = true } jsonpath_lib = { git = "https://github.com/Start9Labs/jsonpath.git" } +lazy_async_pool = "0.3.3" +lazy_format = "2.0" lazy_static = "1.4.0" libc = "0.2.149" log = "0.4.20" @@ -110,6 +119,7 @@ nix = { version = "0.27.1", features = ["user", "process", "signal", "fs"] } nom = "7.1.3" num = "0.4.1" num_enum = "0.7.0" +once_cell = "1.19.0" openssh-keys = "0.6.2" openssl = { version = "0.10.57", features = ["vendored"] } p256 = { version = "0.13.2", features = ["pem"] } @@ -124,12 +134,12 @@ proptest = "1.3.1" proptest-derive = "0.4.0" rand = { version = "0.8.5", features = ["std"] } regex = "1.10.2" -reqwest = { version = "0.11.22", features = ["stream", "json", "socks"] } +reqwest = { version = "0.11.23", features = ["stream", "json", "socks"] } reqwest_cookie_store = "0.6.0" rpassword = "7.2.0" -rpc-toolkit = "0.2.2" +rpc-toolkit = { git = "https://github.com/Start9Labs/rpc-toolkit.git", branch = "refactor/traits" } rust-argon2 = "2.0.0" -scopeguard = "1.1" # because avahi-sys fucks your shit up +rustyline-async = "0.4.1" semver = { version = "1.0.20", features = ["serde"] } serde = { version = "1.0", features = ["derive", "rc"] } serde_cbor = { package = "ciborium", version = "0.2.1" } @@ -138,6 +148,7 @@ serde_toml = { package = "toml", version = "0.8.2" } serde_with = { version = "3.4.0", features = ["macros", "json"] } serde_yaml = "0.9.25" sha2 = "0.10.2" +shell-words = "1" simple-logging = "2.0.2" sqlx = { version = "0.7.2", features = [ "chrono", @@ -150,20 +161,23 @@ stderrlog = "0.5.4" tar = "0.4.40" thiserror = "1.0.49" tokio = { version = "1", features = ["full"] } -tokio-rustls = "0.24.1" +tokio-rustls = "0.25.0" tokio-socks = "0.5.1" tokio-stream = { version = "0.1.14", features = ["io-util", "sync", "net"] } tokio-tar = { git = "https://github.com/dr-bonez/tokio-tar.git" } -tokio-tungstenite = { version = "0.20.1", features = ["native-tls"] } +tokio-tungstenite = { version = "0.21.0", features = ["native-tls"] } tokio-util = { version = "0.7.9", features = ["io"] } -torut = "0.2.1" +torut = { git = "https://github.com/Start9Labs/torut.git", branch = "update/dependencies", features = [ + "serialize", +] } tracing = "0.1.39" tracing-error = "0.2.0" tracing-futures = "0.2.5" tracing-journald = "0.3.0" tracing-subscriber = { version = "0.3.17", features = ["env-filter"] } trust-dns-server = "0.23.1" -typed-builder = "0.17.0" +ts-rs = { git = "https://github.com/dr-bonez/ts-rs.git", branch = "feature/top-level-type-override" } # "8.1.0" +typed-builder = "0.18.0" url = { version = "2.4.1", features = ["serde"] } urlencoding = "2.1.3" uuid = { version = "1.4.1", features = ["v4"] } diff --git a/core/startos/Effects.ts b/core/startos/Effects.ts new file mode 100644 index 0000000000..9be56724e5 --- /dev/null +++ b/core/startos/Effects.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export interface SetStoreParams { value: any, path: string, } \ No newline at end of file diff --git a/core/startos/bindings/ActionId.ts b/core/startos/bindings/ActionId.ts new file mode 100644 index 0000000000..9bb1f99903 --- /dev/null +++ b/core/startos/bindings/ActionId.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type ActionId = string; diff --git a/core/startos/bindings/ActionMetadata.ts b/core/startos/bindings/ActionMetadata.ts new file mode 100644 index 0000000000..c9373a5b88 --- /dev/null +++ b/core/startos/bindings/ActionMetadata.ts @@ -0,0 +1,12 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { AllowedStatuses } from "./AllowedStatuses"; + +export type ActionMetadata = { + name: string; + description: string; + warning: string | null; + input: any; + disabled: boolean; + allowedStatuses: AllowedStatuses; + group: string | null; +}; diff --git a/core/startos/bindings/AddSslOptions.ts b/core/startos/bindings/AddSslOptions.ts new file mode 100644 index 0000000000..984a1a7c6b --- /dev/null +++ b/core/startos/bindings/AddSslOptions.ts @@ -0,0 +1,8 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { AlpnInfo } from "./AlpnInfo"; + +export type AddSslOptions = { + scheme: string | null; + preferredExternalPort: number; + alpn: AlpnInfo; +}; diff --git a/core/startos/bindings/AddressInfo.ts b/core/startos/bindings/AddressInfo.ts new file mode 100644 index 0000000000..1355f72a2a --- /dev/null +++ b/core/startos/bindings/AddressInfo.ts @@ -0,0 +1,10 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { BindOptions } from "./BindOptions"; +import type { HostId } from "./HostId"; + +export type AddressInfo = { + username: string | null; + hostId: HostId; + bindOptions: BindOptions; + suffix: string; +}; diff --git a/core/startos/bindings/Alerts.ts b/core/startos/bindings/Alerts.ts new file mode 100644 index 0000000000..c6a671e833 --- /dev/null +++ b/core/startos/bindings/Alerts.ts @@ -0,0 +1,9 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type Alerts = { + install: string | null; + uninstall: string | null; + restore: string | null; + start: string | null; + stop: string | null; +}; diff --git a/core/startos/bindings/Algorithm.ts b/core/startos/bindings/Algorithm.ts new file mode 100644 index 0000000000..877325fdf0 --- /dev/null +++ b/core/startos/bindings/Algorithm.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type Algorithm = "ecdsa" | "ed25519"; diff --git a/core/startos/bindings/AllPackageData.ts b/core/startos/bindings/AllPackageData.ts new file mode 100644 index 0000000000..c7698d10df --- /dev/null +++ b/core/startos/bindings/AllPackageData.ts @@ -0,0 +1,5 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { PackageDataEntry } from "./PackageDataEntry"; +import type { PackageId } from "./PackageId"; + +export type AllPackageData = { [key: PackageId]: PackageDataEntry }; diff --git a/core/startos/bindings/AllowedStatuses.ts b/core/startos/bindings/AllowedStatuses.ts new file mode 100644 index 0000000000..a04ee2bac9 --- /dev/null +++ b/core/startos/bindings/AllowedStatuses.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type AllowedStatuses = "onlyRunning" | "onlyStopped" | "any"; diff --git a/core/startos/bindings/AlpnInfo.ts b/core/startos/bindings/AlpnInfo.ts new file mode 100644 index 0000000000..eac072fc0c --- /dev/null +++ b/core/startos/bindings/AlpnInfo.ts @@ -0,0 +1,4 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { MaybeUtf8String } from "./MaybeUtf8String"; + +export type AlpnInfo = "reflect" | { specified: Array }; diff --git a/core/startos/bindings/BackupProgress.ts b/core/startos/bindings/BackupProgress.ts new file mode 100644 index 0000000000..42812a7b71 --- /dev/null +++ b/core/startos/bindings/BackupProgress.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type BackupProgress = { complete: boolean }; diff --git a/core/startos/bindings/BindInfo.ts b/core/startos/bindings/BindInfo.ts new file mode 100644 index 0000000000..8bda6d37e1 --- /dev/null +++ b/core/startos/bindings/BindInfo.ts @@ -0,0 +1,4 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { BindOptions } from "./BindOptions"; + +export type BindInfo = { options: BindOptions; assignedLanPort: number | null }; diff --git a/core/startos/bindings/BindOptions.ts b/core/startos/bindings/BindOptions.ts new file mode 100644 index 0000000000..6b12139a72 --- /dev/null +++ b/core/startos/bindings/BindOptions.ts @@ -0,0 +1,10 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { AddSslOptions } from "./AddSslOptions"; +import type { Security } from "./Security"; + +export type BindOptions = { + scheme: string | null; + preferredExternalPort: number; + addSsl: AddSslOptions | null; + secure: Security | null; +}; diff --git a/core/startos/bindings/BindParams.ts b/core/startos/bindings/BindParams.ts new file mode 100644 index 0000000000..4aa78e5224 --- /dev/null +++ b/core/startos/bindings/BindParams.ts @@ -0,0 +1,15 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { AddSslOptions } from "./AddSslOptions"; +import type { HostId } from "./HostId"; +import type { HostKind } from "./HostKind"; +import type { Security } from "./Security"; + +export type BindParams = { + kind: HostKind; + id: HostId; + internalPort: number; + scheme: string | null; + preferredExternalPort: number; + addSsl: AddSslOptions | null; + secure: Security | null; +}; diff --git a/core/startos/bindings/Callback.ts b/core/startos/bindings/Callback.ts new file mode 100644 index 0000000000..e6c5b004ff --- /dev/null +++ b/core/startos/bindings/Callback.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type Callback = () => void; diff --git a/core/startos/bindings/ChrootParams.ts b/core/startos/bindings/ChrootParams.ts new file mode 100644 index 0000000000..9ee6e89596 --- /dev/null +++ b/core/startos/bindings/ChrootParams.ts @@ -0,0 +1,10 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type ChrootParams = { + env: string | null; + workdir: string | null; + user: string | null; + path: string; + command: string; + args: string[]; +}; diff --git a/core/startos/bindings/CreateOverlayedImageParams.ts b/core/startos/bindings/CreateOverlayedImageParams.ts new file mode 100644 index 0000000000..d17f2e8c10 --- /dev/null +++ b/core/startos/bindings/CreateOverlayedImageParams.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type CreateOverlayedImageParams = { imageId: string }; diff --git a/core/startos/bindings/CurrentDependencies.ts b/core/startos/bindings/CurrentDependencies.ts new file mode 100644 index 0000000000..75645e200c --- /dev/null +++ b/core/startos/bindings/CurrentDependencies.ts @@ -0,0 +1,5 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { CurrentDependencyInfo } from "./CurrentDependencyInfo"; +import type { PackageId } from "./PackageId"; + +export type CurrentDependencies = { [key: PackageId]: CurrentDependencyInfo }; diff --git a/core/startos/bindings/CurrentDependencyInfo.ts b/core/startos/bindings/CurrentDependencyInfo.ts new file mode 100644 index 0000000000..10112e3124 --- /dev/null +++ b/core/startos/bindings/CurrentDependencyInfo.ts @@ -0,0 +1,9 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { DataUrl } from "./DataUrl"; + +export type CurrentDependencyInfo = { + title: string; + icon: DataUrl; + registryUrl: string; + versionSpec: string; +} & ({ kind: "exists" } | { kind: "running"; healthChecks: string[] }); diff --git a/core/startos/bindings/DataUrl.ts b/core/startos/bindings/DataUrl.ts new file mode 100644 index 0000000000..65fa15059f --- /dev/null +++ b/core/startos/bindings/DataUrl.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type DataUrl = string; diff --git a/core/startos/bindings/DepInfo.ts b/core/startos/bindings/DepInfo.ts new file mode 100644 index 0000000000..3c01e09391 --- /dev/null +++ b/core/startos/bindings/DepInfo.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type DepInfo = { description: string | null; optional: boolean }; diff --git a/core/startos/bindings/Dependencies.ts b/core/startos/bindings/Dependencies.ts new file mode 100644 index 0000000000..974495b7b3 --- /dev/null +++ b/core/startos/bindings/Dependencies.ts @@ -0,0 +1,5 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { DepInfo } from "./DepInfo"; +import type { PackageId } from "./PackageId"; + +export type Dependencies = { [key: PackageId]: DepInfo }; diff --git a/core/startos/bindings/DependencyConfigErrors.ts b/core/startos/bindings/DependencyConfigErrors.ts new file mode 100644 index 0000000000..5f2246fa9f --- /dev/null +++ b/core/startos/bindings/DependencyConfigErrors.ts @@ -0,0 +1,4 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { PackageId } from "./PackageId"; + +export type DependencyConfigErrors = { [key: PackageId]: string }; diff --git a/core/startos/bindings/DependencyKind.ts b/core/startos/bindings/DependencyKind.ts new file mode 100644 index 0000000000..71fd49762c --- /dev/null +++ b/core/startos/bindings/DependencyKind.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type DependencyKind = "exists" | "running"; diff --git a/core/startos/bindings/DependencyRequirement.ts b/core/startos/bindings/DependencyRequirement.ts new file mode 100644 index 0000000000..e6224ce486 --- /dev/null +++ b/core/startos/bindings/DependencyRequirement.ts @@ -0,0 +1,11 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type DependencyRequirement = + | { + kind: "running"; + id: string; + healthChecks: string[]; + versionSpec: string; + registryUrl: string; + } + | { kind: "exists"; id: string; versionSpec: string; registryUrl: string }; diff --git a/core/startos/bindings/Description.ts b/core/startos/bindings/Description.ts new file mode 100644 index 0000000000..918bd09c52 --- /dev/null +++ b/core/startos/bindings/Description.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type Description = { short: string; long: string }; diff --git a/core/startos/bindings/DestroyOverlayedImageParams.ts b/core/startos/bindings/DestroyOverlayedImageParams.ts new file mode 100644 index 0000000000..47671604af --- /dev/null +++ b/core/startos/bindings/DestroyOverlayedImageParams.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type DestroyOverlayedImageParams = { guid: string }; diff --git a/core/startos/bindings/Duration.ts b/core/startos/bindings/Duration.ts new file mode 100644 index 0000000000..889c729b85 --- /dev/null +++ b/core/startos/bindings/Duration.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type Duration = string; diff --git a/core/startos/bindings/ExecuteAction.ts b/core/startos/bindings/ExecuteAction.ts new file mode 100644 index 0000000000..aaa3407471 --- /dev/null +++ b/core/startos/bindings/ExecuteAction.ts @@ -0,0 +1,7 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type ExecuteAction = { + serviceId: string | null; + actionId: string; + input: any; +}; diff --git a/core/startos/bindings/ExportActionParams.ts b/core/startos/bindings/ExportActionParams.ts new file mode 100644 index 0000000000..4961c4a110 --- /dev/null +++ b/core/startos/bindings/ExportActionParams.ts @@ -0,0 +1,4 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { ActionMetadata } from "./ActionMetadata"; + +export type ExportActionParams = { id: string; metadata: ActionMetadata }; diff --git a/core/startos/bindings/ExportServiceInterfaceParams.ts b/core/startos/bindings/ExportServiceInterfaceParams.ts new file mode 100644 index 0000000000..93deb0ce58 --- /dev/null +++ b/core/startos/bindings/ExportServiceInterfaceParams.ts @@ -0,0 +1,14 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { AddressInfo } from "./AddressInfo"; +import type { ServiceInterfaceType } from "./ServiceInterfaceType"; + +export type ExportServiceInterfaceParams = { + id: string; + name: string; + description: string; + hasPrimary: boolean; + disabled: boolean; + masked: boolean; + addressInfo: AddressInfo; + type: ServiceInterfaceType; +}; diff --git a/core/startos/bindings/ExportedHostInfo.ts b/core/startos/bindings/ExportedHostInfo.ts new file mode 100644 index 0000000000..d8339a0740 --- /dev/null +++ b/core/startos/bindings/ExportedHostInfo.ts @@ -0,0 +1,10 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { ExportedHostnameInfo } from "./ExportedHostnameInfo"; +import type { HostId } from "./HostId"; +import type { HostKind } from "./HostKind"; + +export type ExportedHostInfo = { + id: HostId; + kind: HostKind; + hostnames: Array; +}; diff --git a/core/startos/bindings/ExportedHostnameInfo.ts b/core/startos/bindings/ExportedHostnameInfo.ts new file mode 100644 index 0000000000..23cbdd4873 --- /dev/null +++ b/core/startos/bindings/ExportedHostnameInfo.ts @@ -0,0 +1,12 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { ExportedIpHostname } from "./ExportedIpHostname"; +import type { ExportedOnionHostname } from "./ExportedOnionHostname"; + +export type ExportedHostnameInfo = + | { + kind: "ip"; + networkInterfaceId: string; + public: boolean; + hostname: ExportedIpHostname; + } + | { kind: "onion"; hostname: ExportedOnionHostname }; diff --git a/core/startos/bindings/ExportedIpHostname.ts b/core/startos/bindings/ExportedIpHostname.ts new file mode 100644 index 0000000000..1e02ab2be6 --- /dev/null +++ b/core/startos/bindings/ExportedIpHostname.ts @@ -0,0 +1,18 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type ExportedIpHostname = + | { kind: "ipv4"; value: string; port: number | null; sslPort: number | null } + | { kind: "ipv6"; value: string; port: number | null; sslPort: number | null } + | { + kind: "local"; + value: string; + port: number | null; + sslPort: number | null; + } + | { + kind: "domain"; + domain: string; + subdomain: string | null; + port: number | null; + sslPort: number | null; + }; diff --git a/core/startos/bindings/ExportedOnionHostname.ts b/core/startos/bindings/ExportedOnionHostname.ts new file mode 100644 index 0000000000..af072289fa --- /dev/null +++ b/core/startos/bindings/ExportedOnionHostname.ts @@ -0,0 +1,7 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type ExportedOnionHostname = { + value: string; + port: number | null; + sslPort: number | null; +}; diff --git a/core/startos/bindings/ExposeForDependentsParams.ts b/core/startos/bindings/ExposeForDependentsParams.ts new file mode 100644 index 0000000000..714771c1e3 --- /dev/null +++ b/core/startos/bindings/ExposeForDependentsParams.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type ExposeForDependentsParams = { paths: string[] }; diff --git a/core/startos/bindings/FullProgress.ts b/core/startos/bindings/FullProgress.ts new file mode 100644 index 0000000000..56f6a1a15f --- /dev/null +++ b/core/startos/bindings/FullProgress.ts @@ -0,0 +1,5 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { NamedProgress } from "./NamedProgress"; +import type { Progress } from "./Progress"; + +export type FullProgress = { overall: Progress; phases: Array }; diff --git a/core/startos/bindings/GetHostInfoParams.ts b/core/startos/bindings/GetHostInfoParams.ts new file mode 100644 index 0000000000..7c2b1b6b6c --- /dev/null +++ b/core/startos/bindings/GetHostInfoParams.ts @@ -0,0 +1,10 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { Callback } from "./Callback"; +import type { GetHostInfoParamsKind } from "./GetHostInfoParamsKind"; + +export type GetHostInfoParams = { + kind: GetHostInfoParamsKind | null; + serviceInterfaceId: string; + packageId: string | null; + callback: Callback; +}; diff --git a/core/startos/bindings/GetHostInfoParamsKind.ts b/core/startos/bindings/GetHostInfoParamsKind.ts new file mode 100644 index 0000000000..6353044ce8 --- /dev/null +++ b/core/startos/bindings/GetHostInfoParamsKind.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type GetHostInfoParamsKind = "multi"; diff --git a/core/startos/bindings/GetPrimaryUrlParams.ts b/core/startos/bindings/GetPrimaryUrlParams.ts new file mode 100644 index 0000000000..c5aaa0ebe0 --- /dev/null +++ b/core/startos/bindings/GetPrimaryUrlParams.ts @@ -0,0 +1,8 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { Callback } from "./Callback"; + +export type GetPrimaryUrlParams = { + packageId: string | null; + serviceInterfaceId: string; + callback: Callback; +}; diff --git a/core/startos/bindings/GetServiceInterfaceParams.ts b/core/startos/bindings/GetServiceInterfaceParams.ts new file mode 100644 index 0000000000..03990c10b6 --- /dev/null +++ b/core/startos/bindings/GetServiceInterfaceParams.ts @@ -0,0 +1,8 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { Callback } from "./Callback"; + +export type GetServiceInterfaceParams = { + packageId: string | null; + serviceInterfaceId: string; + callback: Callback; +}; diff --git a/core/startos/bindings/GetServicePortForwardParams.ts b/core/startos/bindings/GetServicePortForwardParams.ts new file mode 100644 index 0000000000..3de7e02195 --- /dev/null +++ b/core/startos/bindings/GetServicePortForwardParams.ts @@ -0,0 +1,6 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type GetServicePortForwardParams = { + packageId: string | null; + internalPort: number; +}; diff --git a/core/startos/bindings/GetSslCertificateParams.ts b/core/startos/bindings/GetSslCertificateParams.ts new file mode 100644 index 0000000000..a1fd17bdde --- /dev/null +++ b/core/startos/bindings/GetSslCertificateParams.ts @@ -0,0 +1,8 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { Algorithm } from "./Algorithm"; + +export type GetSslCertificateParams = { + packageId: string | null; + hostId: string; + algorithm: Algorithm | null; +}; diff --git a/core/startos/bindings/GetSslKeyParams.ts b/core/startos/bindings/GetSslKeyParams.ts new file mode 100644 index 0000000000..8c5170c9c9 --- /dev/null +++ b/core/startos/bindings/GetSslKeyParams.ts @@ -0,0 +1,8 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { Algorithm } from "./Algorithm"; + +export type GetSslKeyParams = { + packageId: string | null; + hostId: string; + algorithm: Algorithm | null; +}; diff --git a/core/startos/bindings/GetStoreParams.ts b/core/startos/bindings/GetStoreParams.ts new file mode 100644 index 0000000000..bfd97377b0 --- /dev/null +++ b/core/startos/bindings/GetStoreParams.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type GetStoreParams = { packageId: string | null; path: string }; diff --git a/core/startos/bindings/GetSystemSmtpParams.ts b/core/startos/bindings/GetSystemSmtpParams.ts new file mode 100644 index 0000000000..b96b9f5952 --- /dev/null +++ b/core/startos/bindings/GetSystemSmtpParams.ts @@ -0,0 +1,4 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { Callback } from "./Callback"; + +export type GetSystemSmtpParams = { callback: Callback }; diff --git a/core/startos/bindings/Governor.ts b/core/startos/bindings/Governor.ts new file mode 100644 index 0000000000..82c9e39f15 --- /dev/null +++ b/core/startos/bindings/Governor.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type Governor = string; diff --git a/core/startos/bindings/HardwareRequirements.ts b/core/startos/bindings/HardwareRequirements.ts new file mode 100644 index 0000000000..079483289c --- /dev/null +++ b/core/startos/bindings/HardwareRequirements.ts @@ -0,0 +1,7 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type HardwareRequirements = { + device: { [key: string]: string }; + ram: bigint | null; + arch: Array | null; +}; diff --git a/core/startos/bindings/HealthCheckId.ts b/core/startos/bindings/HealthCheckId.ts new file mode 100644 index 0000000000..eeefafbb1f --- /dev/null +++ b/core/startos/bindings/HealthCheckId.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type HealthCheckId = string; diff --git a/core/startos/bindings/HealthCheckResult.ts b/core/startos/bindings/HealthCheckResult.ts new file mode 100644 index 0000000000..ae4d3c3c2c --- /dev/null +++ b/core/startos/bindings/HealthCheckResult.ts @@ -0,0 +1,9 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type HealthCheckResult = { name: string } & ( + | { result: "success"; message: string | null } + | { result: "disabled"; message: string | null } + | { result: "starting"; message: string | null } + | { result: "loading"; message: string } + | { result: "failure"; message: string } +); diff --git a/core/startos/bindings/Host.ts b/core/startos/bindings/Host.ts new file mode 100644 index 0000000000..4188cb404d --- /dev/null +++ b/core/startos/bindings/Host.ts @@ -0,0 +1,11 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { BindInfo } from "./BindInfo"; +import type { HostAddress } from "./HostAddress"; +import type { HostKind } from "./HostKind"; + +export type Host = { + kind: HostKind; + bindings: { [key: number]: BindInfo }; + addresses: Array; + primary: HostAddress | null; +}; diff --git a/core/startos/bindings/HostAddress.ts b/core/startos/bindings/HostAddress.ts new file mode 100644 index 0000000000..9cddf778bc --- /dev/null +++ b/core/startos/bindings/HostAddress.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type HostAddress = { kind: "onion"; address: string }; diff --git a/core/startos/bindings/HostId.ts b/core/startos/bindings/HostId.ts new file mode 100644 index 0000000000..23049a51d9 --- /dev/null +++ b/core/startos/bindings/HostId.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type HostId = string; diff --git a/core/startos/bindings/HostInfo.ts b/core/startos/bindings/HostInfo.ts new file mode 100644 index 0000000000..b69dbf6b46 --- /dev/null +++ b/core/startos/bindings/HostInfo.ts @@ -0,0 +1,5 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { Host } from "./Host"; +import type { HostId } from "./HostId"; + +export type HostInfo = { [key: HostId]: Host }; diff --git a/core/startos/bindings/HostKind.ts b/core/startos/bindings/HostKind.ts new file mode 100644 index 0000000000..09e61b91b1 --- /dev/null +++ b/core/startos/bindings/HostKind.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type HostKind = "multi"; diff --git a/core/startos/bindings/ImageId.ts b/core/startos/bindings/ImageId.ts new file mode 100644 index 0000000000..408331f728 --- /dev/null +++ b/core/startos/bindings/ImageId.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type ImageId = string; diff --git a/core/startos/bindings/InstalledState.ts b/core/startos/bindings/InstalledState.ts new file mode 100644 index 0000000000..053c3ae667 --- /dev/null +++ b/core/startos/bindings/InstalledState.ts @@ -0,0 +1,4 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { Manifest } from "./Manifest"; + +export type InstalledState = { manifest: Manifest }; diff --git a/core/startos/bindings/InstallingInfo.ts b/core/startos/bindings/InstallingInfo.ts new file mode 100644 index 0000000000..7627f45029 --- /dev/null +++ b/core/startos/bindings/InstallingInfo.ts @@ -0,0 +1,5 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { FullProgress } from "./FullProgress"; +import type { Manifest } from "./Manifest"; + +export type InstallingInfo = { newManifest: Manifest; progress: FullProgress }; diff --git a/core/startos/bindings/InstallingState.ts b/core/startos/bindings/InstallingState.ts new file mode 100644 index 0000000000..3203cfd6b3 --- /dev/null +++ b/core/startos/bindings/InstallingState.ts @@ -0,0 +1,4 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { InstallingInfo } from "./InstallingInfo"; + +export type InstallingState = { installingInfo: InstallingInfo }; diff --git a/core/startos/bindings/IpInfo.ts b/core/startos/bindings/IpInfo.ts new file mode 100644 index 0000000000..1208fafb94 --- /dev/null +++ b/core/startos/bindings/IpInfo.ts @@ -0,0 +1,8 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type IpInfo = { + ipv4Range: string | null; + ipv4: string | null; + ipv6Range: string | null; + ipv6: string | null; +}; diff --git a/core/startos/bindings/ListServiceInterfacesParams.ts b/core/startos/bindings/ListServiceInterfacesParams.ts new file mode 100644 index 0000000000..03d1cdfc8a --- /dev/null +++ b/core/startos/bindings/ListServiceInterfacesParams.ts @@ -0,0 +1,7 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { Callback } from "./Callback"; + +export type ListServiceInterfacesParams = { + packageId: string | null; + callback: Callback; +}; diff --git a/core/startos/bindings/MainStatus.ts b/core/startos/bindings/MainStatus.ts new file mode 100644 index 0000000000..8782130872 --- /dev/null +++ b/core/startos/bindings/MainStatus.ts @@ -0,0 +1,20 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { Duration } from "./Duration"; +import type { HealthCheckId } from "./HealthCheckId"; +import type { HealthCheckResult } from "./HealthCheckResult"; + +export type MainStatus = + | { status: "stopped" } + | { status: "restarting" } + | { status: "stopping"; timeout: Duration } + | { status: "starting" } + | { + status: "running"; + started: string; + health: { [key: HealthCheckId]: HealthCheckResult }; + } + | { + status: "backingUp"; + started: string | null; + health: { [key: HealthCheckId]: HealthCheckResult }; + }; diff --git a/core/startos/bindings/Manifest.ts b/core/startos/bindings/Manifest.ts new file mode 100644 index 0000000000..688486a7d2 --- /dev/null +++ b/core/startos/bindings/Manifest.ts @@ -0,0 +1,32 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { Alerts } from "./Alerts"; +import type { Dependencies } from "./Dependencies"; +import type { Description } from "./Description"; +import type { HardwareRequirements } from "./HardwareRequirements"; +import type { ImageId } from "./ImageId"; +import type { PackageId } from "./PackageId"; +import type { VolumeId } from "./VolumeId"; + +export type Manifest = { + id: PackageId; + title: string; + version: string; + releaseNotes: string; + license: string; + replaces: Array; + wrapperRepo: string; + upstreamRepo: string; + supportSite: string; + marketingSite: string; + donationUrl: string | null; + description: Description; + images: Array; + assets: Array; + volumes: Array; + alerts: Alerts; + dependencies: Dependencies; + hardwareRequirements: HardwareRequirements; + gitHash: string | null; + osVersion: string; + hasConfig: boolean; +}; diff --git a/core/startos/bindings/MaybeUtf8String.ts b/core/startos/bindings/MaybeUtf8String.ts new file mode 100644 index 0000000000..a77f8ce4e3 --- /dev/null +++ b/core/startos/bindings/MaybeUtf8String.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type MaybeUtf8String = string | number[]; diff --git a/core/startos/bindings/MountParams.ts b/core/startos/bindings/MountParams.ts new file mode 100644 index 0000000000..daa4ddf325 --- /dev/null +++ b/core/startos/bindings/MountParams.ts @@ -0,0 +1,4 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { MountTarget } from "./MountTarget"; + +export type MountParams = { location: string; target: MountTarget }; diff --git a/core/startos/bindings/MountTarget.ts b/core/startos/bindings/MountTarget.ts new file mode 100644 index 0000000000..3009861fb0 --- /dev/null +++ b/core/startos/bindings/MountTarget.ts @@ -0,0 +1,8 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type MountTarget = { + packageId: string; + volumeId: string; + subpath: string | null; + readonly: boolean; +}; diff --git a/core/startos/bindings/NamedProgress.ts b/core/startos/bindings/NamedProgress.ts new file mode 100644 index 0000000000..42104c48b7 --- /dev/null +++ b/core/startos/bindings/NamedProgress.ts @@ -0,0 +1,4 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { Progress } from "./Progress"; + +export type NamedProgress = { name: string; progress: Progress }; diff --git a/core/startos/bindings/PackageDataEntry.ts b/core/startos/bindings/PackageDataEntry.ts new file mode 100644 index 0000000000..8b906bb90f --- /dev/null +++ b/core/startos/bindings/PackageDataEntry.ts @@ -0,0 +1,26 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { ActionId } from "./ActionId"; +import type { ActionMetadata } from "./ActionMetadata"; +import type { CurrentDependencies } from "./CurrentDependencies"; +import type { DataUrl } from "./DataUrl"; +import type { HostInfo } from "./HostInfo"; +import type { PackageState } from "./PackageState"; +import type { ServiceInterfaceId } from "./ServiceInterfaceId"; +import type { ServiceInterfaceWithHostInfo } from "./ServiceInterfaceWithHostInfo"; +import type { Status } from "./Status"; + +export type PackageDataEntry = { + stateInfo: PackageState; + status: Status; + marketplaceUrl: string | null; + developerKey: string; + icon: DataUrl; + lastBackup: string | null; + currentDependencies: CurrentDependencies; + actions: { [key: ActionId]: ActionMetadata }; + serviceInterfaces: { + [key: ServiceInterfaceId]: ServiceInterfaceWithHostInfo; + }; + hosts: HostInfo; + storeExposedDependents: string[]; +}; diff --git a/core/startos/bindings/PackageId.ts b/core/startos/bindings/PackageId.ts new file mode 100644 index 0000000000..349bf44d7b --- /dev/null +++ b/core/startos/bindings/PackageId.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type PackageId = string; diff --git a/core/startos/bindings/PackageState.ts b/core/startos/bindings/PackageState.ts new file mode 100644 index 0000000000..6c90bff09a --- /dev/null +++ b/core/startos/bindings/PackageState.ts @@ -0,0 +1,11 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { InstalledState } from "./InstalledState"; +import type { InstallingState } from "./InstallingState"; +import type { UpdatingState } from "./UpdatingState"; + +export type PackageState = + | ({ state: "installing" } & InstallingState) + | ({ state: "restoring" } & InstallingState) + | ({ state: "updating" } & UpdatingState) + | ({ state: "installed" } & InstalledState) + | ({ state: "removing" } & InstalledState); diff --git a/core/startos/bindings/ParamsMaybePackageId.ts b/core/startos/bindings/ParamsMaybePackageId.ts new file mode 100644 index 0000000000..e9f0f170ca --- /dev/null +++ b/core/startos/bindings/ParamsMaybePackageId.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type ParamsMaybePackageId = { packageId: string | null }; diff --git a/core/startos/bindings/ParamsPackageId.ts b/core/startos/bindings/ParamsPackageId.ts new file mode 100644 index 0000000000..7bc9198433 --- /dev/null +++ b/core/startos/bindings/ParamsPackageId.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type ParamsPackageId = { packageId: string }; diff --git a/core/startos/bindings/Progress.ts b/core/startos/bindings/Progress.ts new file mode 100644 index 0000000000..c8e5e44310 --- /dev/null +++ b/core/startos/bindings/Progress.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type Progress = boolean | { done: number; total: number | null }; diff --git a/core/startos/bindings/Public.ts b/core/startos/bindings/Public.ts new file mode 100644 index 0000000000..4421763034 --- /dev/null +++ b/core/startos/bindings/Public.ts @@ -0,0 +1,9 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { AllPackageData } from "./AllPackageData"; +import type { ServerInfo } from "./ServerInfo"; + +export type Public = { + serverInfo: ServerInfo; + packageData: AllPackageData; + ui: any; +}; diff --git a/core/startos/bindings/RemoveActionParams.ts b/core/startos/bindings/RemoveActionParams.ts new file mode 100644 index 0000000000..fcd567c3f3 --- /dev/null +++ b/core/startos/bindings/RemoveActionParams.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type RemoveActionParams = { id: string }; diff --git a/core/startos/bindings/RemoveAddressParams.ts b/core/startos/bindings/RemoveAddressParams.ts new file mode 100644 index 0000000000..578631d396 --- /dev/null +++ b/core/startos/bindings/RemoveAddressParams.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type RemoveAddressParams = { id: string }; diff --git a/core/startos/bindings/ReverseProxyBind.ts b/core/startos/bindings/ReverseProxyBind.ts new file mode 100644 index 0000000000..c9d67b1270 --- /dev/null +++ b/core/startos/bindings/ReverseProxyBind.ts @@ -0,0 +1,7 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type ReverseProxyBind = { + ip: string | null; + port: number; + ssl: boolean; +}; diff --git a/core/startos/bindings/ReverseProxyDestination.ts b/core/startos/bindings/ReverseProxyDestination.ts new file mode 100644 index 0000000000..216d1310f8 --- /dev/null +++ b/core/startos/bindings/ReverseProxyDestination.ts @@ -0,0 +1,7 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type ReverseProxyDestination = { + ip: string | null; + port: number; + ssl: boolean; +}; diff --git a/core/startos/bindings/ReverseProxyHttp.ts b/core/startos/bindings/ReverseProxyHttp.ts new file mode 100644 index 0000000000..07cf41862b --- /dev/null +++ b/core/startos/bindings/ReverseProxyHttp.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type ReverseProxyHttp = { headers: null | { [key: string]: string } }; diff --git a/core/startos/bindings/ReverseProxyParams.ts b/core/startos/bindings/ReverseProxyParams.ts new file mode 100644 index 0000000000..6f684b7801 --- /dev/null +++ b/core/startos/bindings/ReverseProxyParams.ts @@ -0,0 +1,10 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { ReverseProxyBind } from "./ReverseProxyBind"; +import type { ReverseProxyDestination } from "./ReverseProxyDestination"; +import type { ReverseProxyHttp } from "./ReverseProxyHttp"; + +export type ReverseProxyParams = { + bind: ReverseProxyBind; + dst: ReverseProxyDestination; + http: ReverseProxyHttp; +}; diff --git a/core/startos/bindings/Security.ts b/core/startos/bindings/Security.ts new file mode 100644 index 0000000000..e722707df2 --- /dev/null +++ b/core/startos/bindings/Security.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type Security = { ssl: boolean }; diff --git a/core/startos/bindings/ServerInfo.ts b/core/startos/bindings/ServerInfo.ts new file mode 100644 index 0000000000..e505c84e73 --- /dev/null +++ b/core/startos/bindings/ServerInfo.ts @@ -0,0 +1,35 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { Governor } from "./Governor"; +import type { IpInfo } from "./IpInfo"; +import type { ServerStatus } from "./ServerStatus"; +import type { WifiInfo } from "./WifiInfo"; + +export type ServerInfo = { + arch: string; + platform: string; + id: string; + hostname: string; + version: string; + lastBackup: string | null; + /** + * Used in the wifi to determine the region to set the system to + */ + lastWifiRegion: string | null; + eosVersionCompat: string; + lanAddress: string; + onionAddress: string; + /** + * for backwards compatibility + */ + torAddress: string; + ipInfo: { [key: string]: IpInfo }; + statusInfo: ServerStatus; + wifi: WifiInfo; + unreadNotificationCount: number; + passwordHash: string; + pubkey: string; + caFingerprint: string; + ntpSynced: boolean; + zram: boolean; + governor: Governor | null; +}; diff --git a/core/startos/bindings/ServerSpecs.ts b/core/startos/bindings/ServerSpecs.ts new file mode 100644 index 0000000000..b2278873c4 --- /dev/null +++ b/core/startos/bindings/ServerSpecs.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type ServerSpecs = { cpu: string; disk: string; memory: string }; diff --git a/core/startos/bindings/ServerStatus.ts b/core/startos/bindings/ServerStatus.ts new file mode 100644 index 0000000000..e72d5d6deb --- /dev/null +++ b/core/startos/bindings/ServerStatus.ts @@ -0,0 +1,12 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { BackupProgress } from "./BackupProgress"; +import type { PackageId } from "./PackageId"; +import type { UpdateProgress } from "./UpdateProgress"; + +export type ServerStatus = { + backupProgress: { [key: PackageId]: BackupProgress } | null; + updated: boolean; + updateProgress: UpdateProgress | null; + shuttingDown: boolean; + restarting: boolean; +}; diff --git a/core/startos/bindings/ServiceInterface.ts b/core/startos/bindings/ServiceInterface.ts new file mode 100644 index 0000000000..4167257bb7 --- /dev/null +++ b/core/startos/bindings/ServiceInterface.ts @@ -0,0 +1,15 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { AddressInfo } from "./AddressInfo"; +import type { ServiceInterfaceId } from "./ServiceInterfaceId"; +import type { ServiceInterfaceType } from "./ServiceInterfaceType"; + +export type ServiceInterface = { + id: ServiceInterfaceId; + name: string; + description: string; + hasPrimary: boolean; + disabled: boolean; + masked: boolean; + addressInfo: AddressInfo; + type: ServiceInterfaceType; +}; diff --git a/core/startos/bindings/ServiceInterfaceId.ts b/core/startos/bindings/ServiceInterfaceId.ts new file mode 100644 index 0000000000..55315ab60c --- /dev/null +++ b/core/startos/bindings/ServiceInterfaceId.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type ServiceInterfaceId = string; diff --git a/core/startos/bindings/ServiceInterfaceType.ts b/core/startos/bindings/ServiceInterfaceType.ts new file mode 100644 index 0000000000..fadd11f9d2 --- /dev/null +++ b/core/startos/bindings/ServiceInterfaceType.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type ServiceInterfaceType = "ui" | "p2p" | "api"; diff --git a/core/startos/bindings/ServiceInterfaceWithHostInfo.ts b/core/startos/bindings/ServiceInterfaceWithHostInfo.ts new file mode 100644 index 0000000000..bef83abe21 --- /dev/null +++ b/core/startos/bindings/ServiceInterfaceWithHostInfo.ts @@ -0,0 +1,17 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { AddressInfo } from "./AddressInfo"; +import type { ExportedHostInfo } from "./ExportedHostInfo"; +import type { ServiceInterfaceId } from "./ServiceInterfaceId"; +import type { ServiceInterfaceType } from "./ServiceInterfaceType"; + +export type ServiceInterfaceWithHostInfo = { + hostInfo: ExportedHostInfo; + id: ServiceInterfaceId; + name: string; + description: string; + hasPrimary: boolean; + disabled: boolean; + masked: boolean; + addressInfo: AddressInfo; + type: ServiceInterfaceType; +}; diff --git a/core/startos/bindings/SetConfigured.ts b/core/startos/bindings/SetConfigured.ts new file mode 100644 index 0000000000..ff9eaf11d9 --- /dev/null +++ b/core/startos/bindings/SetConfigured.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type SetConfigured = { configured: boolean }; diff --git a/core/startos/bindings/SetDependenciesParams.ts b/core/startos/bindings/SetDependenciesParams.ts new file mode 100644 index 0000000000..d4d2d45cba --- /dev/null +++ b/core/startos/bindings/SetDependenciesParams.ts @@ -0,0 +1,6 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { DependencyRequirement } from "./DependencyRequirement"; + +export type SetDependenciesParams = { + dependencies: Array; +}; diff --git a/core/startos/bindings/SetHealth.ts b/core/startos/bindings/SetHealth.ts new file mode 100644 index 0000000000..8a4a1bcff8 --- /dev/null +++ b/core/startos/bindings/SetHealth.ts @@ -0,0 +1,10 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { HealthCheckId } from "./HealthCheckId"; + +export type SetHealth = { id: HealthCheckId; name: string } & ( + | { result: "success"; message: string | null } + | { result: "disabled"; message: string | null } + | { result: "starting"; message: string | null } + | { result: "loading"; message: string } + | { result: "failure"; message: string } +); diff --git a/core/startos/bindings/SetMainStatus.ts b/core/startos/bindings/SetMainStatus.ts new file mode 100644 index 0000000000..653342e5f9 --- /dev/null +++ b/core/startos/bindings/SetMainStatus.ts @@ -0,0 +1,4 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { Status } from "./Status"; + +export type SetMainStatus = { status: Status }; diff --git a/core/startos/bindings/SetStoreParams.ts b/core/startos/bindings/SetStoreParams.ts new file mode 100644 index 0000000000..8737295bdf --- /dev/null +++ b/core/startos/bindings/SetStoreParams.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type SetStoreParams = { value: any; path: string }; diff --git a/core/startos/bindings/Status.ts b/core/startos/bindings/Status.ts new file mode 100644 index 0000000000..e23c5b4be5 --- /dev/null +++ b/core/startos/bindings/Status.ts @@ -0,0 +1,9 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { DependencyConfigErrors } from "./DependencyConfigErrors"; +import type { MainStatus } from "./MainStatus"; + +export type Status = { + configured: boolean; + main: MainStatus; + dependencyConfigErrors: DependencyConfigErrors; +}; diff --git a/core/startos/bindings/UpdateProgress.ts b/core/startos/bindings/UpdateProgress.ts new file mode 100644 index 0000000000..3d07c56b4c --- /dev/null +++ b/core/startos/bindings/UpdateProgress.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type UpdateProgress = { size: number | null; downloaded: number }; diff --git a/core/startos/bindings/UpdatingState.ts b/core/startos/bindings/UpdatingState.ts new file mode 100644 index 0000000000..37a83f0dfb --- /dev/null +++ b/core/startos/bindings/UpdatingState.ts @@ -0,0 +1,8 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { InstallingInfo } from "./InstallingInfo"; +import type { Manifest } from "./Manifest"; + +export type UpdatingState = { + manifest: Manifest; + installingInfo: InstallingInfo; +}; diff --git a/core/startos/bindings/VolumeId.ts b/core/startos/bindings/VolumeId.ts new file mode 100644 index 0000000000..e5c63994e3 --- /dev/null +++ b/core/startos/bindings/VolumeId.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type VolumeId = string; diff --git a/core/startos/bindings/WifiInfo.ts b/core/startos/bindings/WifiInfo.ts new file mode 100644 index 0000000000..c949b1e766 --- /dev/null +++ b/core/startos/bindings/WifiInfo.ts @@ -0,0 +1,7 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type WifiInfo = { + ssids: Array; + selected: string | null; + connected: string | null; +}; diff --git a/core/startos/deny.toml b/core/startos/deny.toml index 7b4924cdc1..5a42f73787 100644 --- a/core/startos/deny.toml +++ b/core/startos/deny.toml @@ -14,9 +14,15 @@ allow = [ "BSD-3-Clause", "LGPL-3.0", "OpenSSL", + "Unicode-DFS-2016", + "Zlib", ] clarify = [ - { name = "webpki", expression = "ISC", license-files = [ { path = "LICENSE", hash = 0x001c7e6c } ] }, - { name = "ring", expression = "OpenSSL", license-files = [ { path = "LICENSE", hash = 0xbd0eed23 } ] }, + { name = "webpki", expression = "ISC", license-files = [ + { path = "LICENSE", hash = 0x001c7e6c }, + ] }, + { name = "ring", expression = "OpenSSL", license-files = [ + { path = "LICENSE", hash = 0xbd0eed23 }, + ] }, ] diff --git a/core/startos/src/account.rs b/core/startos/src/account.rs index cb08a0d533..e074d301d6 100644 --- a/core/startos/src/account.rs +++ b/core/startos/src/account.rs @@ -1,15 +1,14 @@ use std::time::SystemTime; -use ed25519_dalek::SecretKey; use openssl::pkey::{PKey, Private}; use openssl::x509::X509; -use sqlx::PgExecutor; +use torut::onion::TorSecretKeyV3; +use crate::db::model::DatabaseModel; use crate::hostname::{generate_hostname, generate_id, Hostname}; -use crate::net::keys::Key; use crate::net::ssl::{generate_key, make_root_cert}; use crate::prelude::*; -use crate::util::crypto::ed25519_expand_key; +use crate::util::serde::Pem; fn hash_password(password: &str) -> Result { argon2::hash_encoded( @@ -25,103 +24,83 @@ pub struct AccountInfo { pub server_id: String, pub hostname: Hostname, pub password: String, - pub key: Key, + pub tor_key: TorSecretKeyV3, pub root_ca_key: PKey, pub root_ca_cert: X509, + pub ssh_key: ssh_key::PrivateKey, } impl AccountInfo { pub fn new(password: &str, start_time: SystemTime) -> Result { let server_id = generate_id(); let hostname = generate_hostname(); + let tor_key = TorSecretKeyV3::generate(); let root_ca_key = generate_key()?; let root_ca_cert = make_root_cert(&root_ca_key, &hostname, start_time)?; + let ssh_key = ssh_key::PrivateKey::from(ssh_key::private::Ed25519Keypair::random( + &mut rand::thread_rng(), + )); Ok(Self { server_id, hostname, password: hash_password(password)?, - key: Key::new(None), + tor_key, root_ca_key, root_ca_cert, + ssh_key, }) } - pub async fn load(secrets: impl PgExecutor<'_>) -> Result { - let r = sqlx::query!("SELECT * FROM account WHERE id = 0") - .fetch_one(secrets) - .await?; - - let server_id = r.server_id.unwrap_or_else(generate_id); - let hostname = r.hostname.map(Hostname).unwrap_or_else(generate_hostname); - let password = r.password; - let network_key = SecretKey::try_from(r.network_key).map_err(|e| { - Error::new( - eyre!("expected vec of len 32, got len {}", e.len()), - ErrorKind::ParseDbField, - ) - })?; - let tor_key = if let Some(k) = &r.tor_key { - <[u8; 64]>::try_from(&k[..]).map_err(|_| { - Error::new( - eyre!("expected vec of len 64, got len {}", k.len()), - ErrorKind::ParseDbField, - ) - })? - } else { - ed25519_expand_key(&network_key) - }; - let key = Key::from_pair(None, network_key, tor_key); - let root_ca_key = PKey::private_key_from_pem(r.root_ca_key_pem.as_bytes())?; - let root_ca_cert = X509::from_pem(r.root_ca_cert_pem.as_bytes())?; + pub fn load(db: &DatabaseModel) -> Result { + let server_id = db.as_public().as_server_info().as_id().de()?; + let hostname = Hostname(db.as_public().as_server_info().as_hostname().de()?); + let password = db.as_private().as_password().de()?; + let key_store = db.as_private().as_key_store(); + let tor_addr = db.as_public().as_server_info().as_onion_address().de()?; + let tor_key = key_store.as_onion().get_key(&tor_addr)?; + let cert_store = key_store.as_local_certs(); + let root_ca_key = cert_store.as_root_key().de()?.0; + let root_ca_cert = cert_store.as_root_cert().de()?.0; + let ssh_key = db.as_private().as_ssh_privkey().de()?.0; Ok(Self { server_id, hostname, password, - key, + tor_key, root_ca_key, root_ca_cert, + ssh_key, }) } - pub async fn save(&self, secrets: impl PgExecutor<'_>) -> Result<(), Error> { - let server_id = self.server_id.as_str(); - let hostname = self.hostname.0.as_str(); - let password = self.password.as_str(); - let network_key = self.key.as_bytes(); - let network_key = network_key.as_slice(); - let root_ca_key = String::from_utf8(self.root_ca_key.private_key_to_pem_pkcs8()?)?; - let root_ca_cert = String::from_utf8(self.root_ca_cert.to_pem()?)?; - - sqlx::query!( - r#" - INSERT INTO account ( - id, - server_id, - hostname, - password, - network_key, - root_ca_key_pem, - root_ca_cert_pem - ) VALUES ( - 0, $1, $2, $3, $4, $5, $6 - ) ON CONFLICT (id) DO UPDATE SET - server_id = EXCLUDED.server_id, - hostname = EXCLUDED.hostname, - password = EXCLUDED.password, - network_key = EXCLUDED.network_key, - root_ca_key_pem = EXCLUDED.root_ca_key_pem, - root_ca_cert_pem = EXCLUDED.root_ca_cert_pem - "#, - server_id, - hostname, - password, - network_key, - root_ca_key, - root_ca_cert, - ) - .execute(secrets) - .await?; - + pub fn save(&self, db: &mut DatabaseModel) -> Result<(), Error> { + let server_info = db.as_public_mut().as_server_info_mut(); + server_info.as_id_mut().ser(&self.server_id)?; + server_info.as_hostname_mut().ser(&self.hostname.0)?; + server_info + .as_lan_address_mut() + .ser(&self.hostname.lan_address().parse()?)?; + server_info + .as_pubkey_mut() + .ser(&self.ssh_key.public_key().to_openssh()?)?; + let onion_address = self.tor_key.public().get_onion_address(); + server_info.as_onion_address_mut().ser(&onion_address)?; + server_info + .as_tor_address_mut() + .ser(&format!("https://{onion_address}").parse()?)?; + db.as_private_mut().as_password_mut().ser(&self.password)?; + db.as_private_mut() + .as_ssh_privkey_mut() + .ser(Pem::new_ref(&self.ssh_key))?; + let key_store = db.as_private_mut().as_key_store_mut(); + key_store.as_onion_mut().insert_key(&self.tor_key)?; + let cert_store = key_store.as_local_certs_mut(); + cert_store + .as_root_key_mut() + .ser(Pem::new_ref(&self.root_ca_key))?; + cert_store + .as_root_cert_mut() + .ser(Pem::new_ref(&self.root_ca_cert))?; Ok(()) } diff --git a/core/startos/src/action.rs b/core/startos/src/action.rs index 3223aaa86d..ff3998c3c4 100644 --- a/core/startos/src/action.rs +++ b/core/startos/src/action.rs @@ -1,26 +1,14 @@ -use std::collections::{BTreeMap, BTreeSet}; - -use clap::ArgMatches; -use color_eyre::eyre::eyre; -use indexmap::IndexSet; +use clap::Parser; pub use models::ActionId; -use models::ImageId; +use models::PackageId; use rpc_toolkit::command; use serde::{Deserialize, Serialize}; use tracing::instrument; -use crate::config::{Config, ConfigSpec}; +use crate::config::Config; use crate::context::RpcContext; use crate::prelude::*; -use crate::procedure::docker::DockerContainers; -use crate::procedure::{PackageProcedure, ProcedureName}; -use crate::s9pk::manifest::PackageId; -use crate::util::serde::{display_serializable, parse_stdin_deserializable, IoFormat}; -use crate::util::Version; -use crate::volume::Volumes; -use crate::{Error, ResultExt}; -#[derive(Clone, Debug, Default, Deserialize, Serialize)] -pub struct Actions(pub BTreeMap); +use crate::util::serde::{display_serializable, StdinDeserializable, WithIoFormat}; #[derive(Debug, Serialize, Deserialize)] #[serde(tag = "version")] @@ -38,78 +26,17 @@ pub struct ActionResultV0 { } #[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, Deserialize, Serialize)] -#[serde(rename_all = "kebab-case")] +#[serde(rename_all = "camelCase")] pub enum DockerStatus { Running, Stopped, } -#[derive(Clone, Debug, Deserialize, Serialize)] -#[serde(rename_all = "kebab-case")] -pub struct Action { - pub name: String, - pub description: String, - #[serde(default)] - pub warning: Option, - pub implementation: PackageProcedure, - pub allowed_statuses: IndexSet, - #[serde(default)] - pub input_spec: ConfigSpec, -} -impl Action { - #[instrument(skip_all)] - pub fn validate( - &self, - _container: &Option, - eos_version: &Version, - volumes: &Volumes, - image_ids: &BTreeSet, - ) -> Result<(), Error> { - self.implementation - .validate(eos_version, volumes, image_ids, true) - .with_ctx(|_| { - ( - crate::ErrorKind::ValidateS9pk, - format!("Action {}", self.name), - ) - }) +pub fn display_action_result(params: WithIoFormat, result: ActionResult) { + if let Some(format) = params.format { + return display_serializable(format, result); } - - #[instrument(skip_all)] - pub async fn execute( - &self, - ctx: &RpcContext, - pkg_id: &PackageId, - pkg_version: &Version, - action_id: &ActionId, - volumes: &Volumes, - input: Option, - ) -> Result { - if let Some(ref input) = input { - self.input_spec - .matches(&input) - .with_kind(crate::ErrorKind::ConfigSpecViolation)?; - } - self.implementation - .execute( - ctx, - pkg_id, - pkg_version, - ProcedureName::Action(action_id.clone()), - volumes, - input, - None, - ) - .await? - .map_err(|e| Error::new(eyre!("{}", e.1), crate::ErrorKind::Action)) - } -} - -fn display_action_result(action_result: ActionResult, matches: &ArgMatches) { - if matches.is_present("format") { - return display_serializable(action_result, matches); - } - match action_result { + match result { ActionResult::V0(ar) => { println!( "{}: {}", @@ -120,44 +47,39 @@ fn display_action_result(action_result: ActionResult, matches: &ArgMatches) { } } -#[command(about = "Executes an action", display(display_action_result))] +#[derive(Deserialize, Serialize, Parser)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "kebab-case")] +pub struct ActionParams { + #[arg(id = "id")] + #[serde(rename = "id")] + pub package_id: PackageId, + #[arg(id = "action-id")] + #[serde(rename = "action-id")] + pub action_id: ActionId, + #[command(flatten)] + pub input: StdinDeserializable>, +} +// impl C + +// #[command(about = "Executes an action", display(display_action_result))] #[instrument(skip_all)] pub async fn action( - #[context] ctx: RpcContext, - #[arg(rename = "id")] pkg_id: PackageId, - #[arg(rename = "action-id")] action_id: ActionId, - #[arg(stdin, parse(parse_stdin_deserializable))] input: Option, - #[allow(unused_variables)] - #[arg(long = "format")] - format: Option, + ctx: RpcContext, + ActionParams { + package_id, + action_id, + input: StdinDeserializable(input), + }: ActionParams, ) -> Result { - let manifest = ctx - .db - .peek() + ctx.services + .get(&package_id) + .await + .as_ref() + .or_not_found(lazy_format!("Manager for {}", package_id))? + .action( + action_id, + input.map(|c| to_value(&c)).transpose()?.unwrap_or_default(), + ) .await - .as_package_data() - .as_idx(&pkg_id) - .or_not_found(&pkg_id)? - .as_installed() - .or_not_found(&pkg_id)? - .as_manifest() - .de()?; - - if let Some(action) = manifest.actions.0.get(&action_id) { - action - .execute( - &ctx, - &manifest.id, - &manifest.version, - &action_id, - &manifest.volumes, - input, - ) - .await - } else { - Err(Error::new( - eyre!("Action not found in manifest"), - crate::ErrorKind::NotFound, - )) - } } diff --git a/core/startos/src/auth.rs b/core/startos/src/auth.rs index a6ae2fff00..7adce72bc2 100644 --- a/core/startos/src/auth.rs +++ b/core/startos/src/auth.rs @@ -1,25 +1,43 @@ use std::collections::BTreeMap; -use std::marker::PhantomData; use chrono::{DateTime, Utc}; -use clap::ArgMatches; +use clap::Parser; use color_eyre::eyre::eyre; +use imbl_value::{json, InternedString}; use josekit::jwk::Jwk; -use rpc_toolkit::command; -use rpc_toolkit::command_helpers::prelude::{RequestParts, ResponseParts}; use rpc_toolkit::yajrc::RpcError; +use rpc_toolkit::{command, from_fn_async, AnyContext, CallRemote, HandlerExt, ParentHandler}; use serde::{Deserialize, Serialize}; -use serde_json::Value; -use sqlx::{Executor, Postgres}; use tracing::instrument; use crate::context::{CliContext, RpcContext}; -use crate::middleware::auth::{AsLogoutSessionId, HasLoggedOutSessions, HashSessionToken}; -use crate::middleware::encrypt::EncryptedWire; +use crate::db::model::DatabaseModel; +use crate::middleware::auth::{ + AsLogoutSessionId, HasLoggedOutSessions, HashSessionToken, LoginRes, +}; use crate::prelude::*; -use crate::util::display_none; -use crate::util::serde::{display_serializable, IoFormat}; +use crate::util::crypto::EncryptedWire; +use crate::util::serde::{display_serializable, HandlerExtSerde, WithIoFormat}; use crate::{ensure_code, Error, ResultExt}; + +#[derive(Debug, Clone, Default, Deserialize, Serialize)] +pub struct Sessions(pub BTreeMap); +impl Sessions { + pub fn new() -> Self { + Self(BTreeMap::new()) + } +} +impl Map for Sessions { + type Key = InternedString; + type Value = Session; + fn key_str(key: &Self::Key) -> Result, Error> { + Ok(key) + } + fn key_string(key: &Self::Key) -> Result { + Ok(key.clone()) + } +} + #[derive(Clone, Serialize, Deserialize)] #[serde(untagged)] pub enum PasswordType { @@ -61,20 +79,38 @@ impl std::str::FromStr for PasswordType { }) } } - -#[command(subcommands(login, logout, session, reset_password, get_pubkey))] -pub fn auth() -> Result<(), Error> { - Ok(()) -} - -pub fn cli_metadata() -> Value { - serde_json::json!({ - "platforms": ["cli"], - }) -} - -pub fn parse_metadata(_: &str, _: &ArgMatches) -> Result { - Ok(cli_metadata()) +pub fn auth() -> ParentHandler { + ParentHandler::new() + .subcommand( + "login", + from_fn_async(login_impl) + .with_metadata("login", Value::Bool(true)) + .no_cli(), + ) + .subcommand("login", from_fn_async(cli_login).no_display()) + .subcommand( + "logout", + from_fn_async(logout) + .with_metadata("get-session", Value::Bool(true)) + .with_remote_cli::() + .no_display(), + ) + .subcommand("session", session()) + .subcommand( + "reset-password", + from_fn_async(reset_password_impl).no_cli(), + ) + .subcommand( + "reset-password", + from_fn_async(cli_reset_password).no_display(), + ) + .subcommand( + "get-pubkey", + from_fn_async(get_pubkey) + .with_metadata("authenticated", Value::Bool(false)) + .no_display() + .with_remote_cli::(), + ) } #[test] @@ -89,12 +125,17 @@ fn gen_pwd() { .unwrap() ) } +#[derive(Deserialize, Serialize, Parser)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "kebab-case")] +pub struct CliLoginParams { + password: Option, +} #[instrument(skip_all)] async fn cli_login( ctx: CliContext, - password: Option, - metadata: Value, + CliLoginParams { password }: CliLoginParams, ) -> Result<(), RpcError> { let password = if let Some(password) = password { password.decrypt(&ctx)? @@ -102,14 +143,16 @@ async fn cli_login( rpassword::prompt_password("Password: ")? }; - rpc_toolkit::command_helpers::call_remote( - ctx, + ctx.call_remote( "auth.login", - serde_json::json!({ "password": password, "metadata": metadata }), - PhantomData::<()>, + json!({ + "password": password, + "metadata": { + "platforms": ["cli"], + }, + }), ) - .await? - .result?; + .await?; Ok(()) } @@ -128,99 +171,110 @@ pub fn check_password(hash: &str, password: &str) -> Result<(), Error> { Ok(()) } -pub async fn check_password_against_db(secrets: &mut Ex, password: &str) -> Result<(), Error> -where - for<'a> &'a mut Ex: Executor<'a, Database = Postgres>, -{ - let pw_hash = sqlx::query!("SELECT password FROM account") - .fetch_one(secrets) - .await? - .password; +pub fn check_password_against_db(db: &DatabaseModel, password: &str) -> Result<(), Error> { + let pw_hash = db.as_private().as_password().de()?; check_password(&pw_hash, password)?; Ok(()) } -#[command( - custom_cli(cli_login(async, context(CliContext))), - display(display_none), - metadata(authenticated = false) -)] -#[instrument(skip_all)] -pub async fn login( - #[context] ctx: RpcContext, - #[request] req: &RequestParts, - #[response] res: &mut ResponseParts, - #[arg] password: Option, - #[arg( - parse(parse_metadata), - default = "cli_metadata", - help = "RPC Only: This value cannot be overidden from the cli" - )] +#[derive(Deserialize, Serialize, Parser)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "kebab-case")] +pub struct LoginParams { + password: Option, + #[serde(default)] + user_agent: Option, + #[serde(default)] metadata: Value, -) -> Result<(), Error> { - let password = password.unwrap_or_default().decrypt(&ctx)?; - let mut handle = ctx.secret_store.acquire().await?; - check_password_against_db(handle.as_mut(), &password).await?; - - let hash_token = HashSessionToken::new(); - let user_agent = req.headers.get("user-agent").and_then(|h| h.to_str().ok()); - let metadata = serde_json::to_string(&metadata).with_kind(crate::ErrorKind::Database)?; - let hash_token_hashed = hash_token.hashed(); - sqlx::query!( - "INSERT INTO session (id, user_agent, metadata) VALUES ($1, $2, $3)", - hash_token_hashed, +} + +#[instrument(skip_all)] +pub async fn login_impl( + ctx: RpcContext, + LoginParams { + password, user_agent, metadata, - ) - .execute(handle.as_mut()) - .await?; - res.headers.insert( - "set-cookie", - hash_token.header_value()?, // Should be impossible, but don't want to panic - ); + }: LoginParams, +) -> Result { + let password = password.unwrap_or_default().decrypt(&ctx)?; - Ok(()) + ctx.db + .mutate(|db| { + check_password_against_db(db, &password)?; + let hash_token = HashSessionToken::new(); + db.as_private_mut().as_sessions_mut().insert( + hash_token.hashed(), + &Session { + logged_in: Utc::now(), + last_active: Utc::now(), + user_agent, + metadata, + }, + )?; + + Ok(hash_token.to_login_res()) + }) + .await +} + +#[derive(Deserialize, Serialize, Parser)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "kebab-case")] +pub struct LogoutParams { + session: InternedString, } -#[command(display(display_none), metadata(authenticated = false))] -#[instrument(skip_all)] pub async fn logout( - #[context] ctx: RpcContext, - #[request] req: &RequestParts, + ctx: RpcContext, + LogoutParams { session }: LogoutParams, ) -> Result, Error> { - let auth = match HashSessionToken::from_request_parts(req) { - Err(_) => return Ok(None), - Ok(a) => a, - }; - Ok(Some(HasLoggedOutSessions::new(vec![auth], &ctx).await?)) + Ok(Some( + HasLoggedOutSessions::new(vec![HashSessionToken::from_token(session)], &ctx).await?, + )) } -#[derive(Deserialize, Serialize)] -#[serde(rename_all = "kebab-case")] +#[derive(Debug, Clone, Deserialize, Serialize)] +#[serde(rename_all = "camelCase")] pub struct Session { - logged_in: DateTime, - last_active: DateTime, - user_agent: Option, - metadata: Value, + pub logged_in: DateTime, + pub last_active: DateTime, + pub user_agent: Option, + pub metadata: Value, } #[derive(Deserialize, Serialize)] -#[serde(rename_all = "kebab-case")] +#[serde(rename_all = "camelCase")] pub struct SessionList { - current: String, - sessions: BTreeMap, + current: InternedString, + sessions: Sessions, } -#[command(subcommands(list, kill))] -pub async fn session() -> Result<(), Error> { - Ok(()) +pub fn session() -> ParentHandler { + ParentHandler::new() + .subcommand( + "list", + from_fn_async(list) + .with_metadata("get-session", Value::Bool(true)) + .with_display_serializable() + .with_custom_display_fn::(|handle, result| { + Ok(display_sessions(handle.params, result)) + }) + .with_remote_cli::(), + ) + .subcommand( + "kill", + from_fn_async(kill) + .no_display() + .with_remote_cli::(), + ) } -fn display_sessions(arg: SessionList, matches: &ArgMatches) { +fn display_sessions(params: WithIoFormat, arg: SessionList) { use prettytable::*; - if matches.is_present("format") { - return display_serializable(arg, matches); + if let Some(format) = params.format { + return display_serializable(format, arg); } let mut table = Table::new(); @@ -231,7 +285,7 @@ fn display_sessions(arg: SessionList, matches: &ArgMatches) { "USER AGENT", "METADATA", ]); - for (id, session) in arg.sessions { + for (id, session) in arg.sessions.0 { let mut row = row![ &id, &format!("{}", session.logged_in), @@ -249,67 +303,69 @@ fn display_sessions(arg: SessionList, matches: &ArgMatches) { table.print_tty(false).unwrap(); } -#[command(display(display_sessions))] +#[derive(Deserialize, Serialize, Parser)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "kebab-case")] +pub struct ListParams { + #[arg(skip)] + session: InternedString, +} + +// #[command(display(display_sessions))] #[instrument(skip_all)] pub async fn list( - #[context] ctx: RpcContext, - #[request] req: &RequestParts, - #[allow(unused_variables)] - #[arg(long = "format")] - format: Option, + ctx: RpcContext, + ListParams { session, .. }: ListParams, ) -> Result { Ok(SessionList { - current: HashSessionToken::from_request_parts(req)?.as_hash(), - sessions: sqlx::query!( - "SELECT * FROM session WHERE logged_out IS NULL OR logged_out > CURRENT_TIMESTAMP" - ) - .fetch_all(ctx.secret_store.acquire().await?.as_mut()) - .await? - .into_iter() - .map(|row| { - Ok(( - row.id, - Session { - logged_in: DateTime::from_utc(row.logged_in, Utc), - last_active: DateTime::from_utc(row.last_active, Utc), - user_agent: row.user_agent, - metadata: serde_json::from_str(&row.metadata) - .with_kind(crate::ErrorKind::Database)?, - }, - )) - }) - .collect::>()?, + current: HashSessionToken::from_token(session).hashed().clone(), + sessions: ctx.db.peek().await.into_private().into_sessions().de()?, }) } -fn parse_comma_separated(arg: &str, _: &ArgMatches) -> Result, RpcError> { - Ok(arg.split(",").map(|s| s.trim().to_owned()).collect()) -} - #[derive(Debug, Clone, Serialize, Deserialize)] -struct KillSessionId(String); +struct KillSessionId(InternedString); + +impl KillSessionId { + fn new(id: String) -> Self { + Self(InternedString::from(id)) + } +} impl AsLogoutSessionId for KillSessionId { - fn as_logout_session_id(self) -> String { + fn as_logout_session_id(self) -> InternedString { self.0 } } -#[command(display(display_none))] +#[derive(Deserialize, Serialize, Parser)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "kebab-case")] +pub struct KillParams { + ids: Vec, +} + #[instrument(skip_all)] -pub async fn kill( - #[context] ctx: RpcContext, - #[arg(parse(parse_comma_separated))] ids: Vec, -) -> Result<(), Error> { - HasLoggedOutSessions::new(ids.into_iter().map(KillSessionId), &ctx).await?; +pub async fn kill(ctx: RpcContext, KillParams { ids }: KillParams) -> Result<(), Error> { + HasLoggedOutSessions::new(ids.into_iter().map(KillSessionId::new), &ctx).await?; Ok(()) } +#[derive(Deserialize, Serialize, Parser)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "kebab-case")] +pub struct ResetPasswordParams { + old_password: Option, + new_password: Option, +} + #[instrument(skip_all)] async fn cli_reset_password( ctx: CliContext, - old_password: Option, - new_password: Option, + ResetPasswordParams { + old_password, + new_password, + }: ResetPasswordParams, ) -> Result<(), RpcError> { let old_password = if let Some(old_password) = old_password { old_password.decrypt(&ctx)? @@ -331,28 +387,22 @@ async fn cli_reset_password( new_password }; - rpc_toolkit::command_helpers::call_remote( - ctx, + ctx.call_remote( "auth.reset-password", - serde_json::json!({ "old-password": old_password, "new-password": new_password }), - PhantomData::<()>, + imbl_value::json!({ "old-password": old_password, "new-password": new_password }), ) - .await? - .result?; + .await?; Ok(()) } -#[command( - rename = "reset-password", - custom_cli(cli_reset_password(async, context(CliContext))), - display(display_none) -)] #[instrument(skip_all)] -pub async fn reset_password( - #[context] ctx: RpcContext, - #[arg(rename = "old-password")] old_password: Option, - #[arg(rename = "new-password")] new_password: Option, +pub async fn reset_password_impl( + ctx: RpcContext, + ResetPasswordParams { + old_password, + new_password, + }: ResetPasswordParams, ) -> Result<(), Error> { let old_password = old_password.unwrap_or_default().decrypt(&ctx)?; let new_password = new_password.unwrap_or_default().decrypt(&ctx)?; @@ -367,24 +417,23 @@ pub async fn reset_password( )); } account.set_password(&new_password)?; - account.save(&ctx.secret_store).await?; let account_password = &account.password; + let account = account.clone(); ctx.db .mutate(|d| { - d.as_server_info_mut() + d.as_public_mut() + .as_server_info_mut() .as_password_hash_mut() - .ser(account_password) + .ser(account_password)?; + account.save(d)?; + + Ok(()) }) .await } -#[command( - rename = "get-pubkey", - display(display_none), - metadata(authenticated = false) -)] #[instrument(skip_all)] -pub async fn get_pubkey(#[context] ctx: RpcContext) -> Result { +pub async fn get_pubkey(ctx: RpcContext) -> Result { let secret = ctx.as_ref().clone(); let pub_key = secret.to_public_key()?; Ok(pub_key) diff --git a/core/startos/src/backup/backup_bulk.rs b/core/startos/src/backup/backup_bulk.rs index 21eedbaf24..b9f1b877e9 100644 --- a/core/startos/src/backup/backup_bulk.rs +++ b/core/startos/src/backup/backup_bulk.rs @@ -1,17 +1,15 @@ use std::collections::BTreeMap; -use std::panic::UnwindSafe; use std::path::{Path, PathBuf}; use std::sync::Arc; use chrono::Utc; -use clap::ArgMatches; +use clap::Parser; use color_eyre::eyre::eyre; use helpers::AtomicFile; use imbl::OrdSet; -use models::Version; -use rpc_toolkit::command; +use models::PackageId; +use serde::{Deserialize, Serialize}; use tokio::io::AsyncWriteExt; -use tokio::sync::Mutex; use tracing::instrument; use super::target::BackupTargetId; @@ -20,259 +18,264 @@ use crate::auth::check_password_against_db; use crate::backup::os::OsBackup; use crate::backup::{BackupReport, ServerBackupReport}; use crate::context::RpcContext; -use crate::db::model::BackupProgress; -use crate::db::package::get_packages; +use crate::db::model::public::BackupProgress; +use crate::db::model::DatabaseModel; use crate::disk::mount::backup::BackupMountGuard; use crate::disk::mount::filesystem::ReadWrite; -use crate::disk::mount::guard::TmpMountGuard; -use crate::manager::BackupReturn; -use crate::notifications::NotificationLevel; +use crate::disk::mount::guard::{GenericMountGuard, TmpMountGuard}; +use crate::notifications::{notify, NotificationLevel}; use crate::prelude::*; -use crate::s9pk::manifest::PackageId; -use crate::util::display_none; use crate::util::io::dir_copy; use crate::util::serde::IoFormat; use crate::version::VersionT; -fn parse_comma_separated(arg: &str, _: &ArgMatches) -> Result, Error> { - arg.split(',') - .map(|s| s.trim().parse::().map_err(Error::from)) - .collect() +#[derive(Deserialize, Serialize, Parser)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "kebab-case")] +pub struct BackupParams { + target_id: BackupTargetId, + #[arg(long = "old-password")] + old_password: Option, + #[arg(long = "package-ids")] + package_ids: Option>, + password: crate::auth::PasswordType, +} + +struct BackupStatusGuard(Option); +impl BackupStatusGuard { + fn new(db: PatchDb) -> Self { + Self(Some(db)) + } + async fn handle_result( + mut self, + result: Result, Error>, + ) -> Result<(), Error> { + if let Some(db) = self.0.as_ref() { + db.mutate(|v| { + v.as_public_mut() + .as_server_info_mut() + .as_status_info_mut() + .as_backup_progress_mut() + .ser(&None) + }) + .await?; + } + if let Some(db) = self.0.take() { + match result { + Ok(report) if report.iter().all(|(_, rep)| rep.error.is_none()) => { + db.mutate(|db| { + notify( + db, + None, + NotificationLevel::Success, + "Backup Complete".to_owned(), + "Your backup has completed".to_owned(), + BackupReport { + server: ServerBackupReport { + attempted: true, + error: None, + }, + packages: report, + }, + ) + }) + .await + } + Ok(report) => { + db.mutate(|db| { + notify( + db, + None, + NotificationLevel::Warning, + "Backup Complete".to_owned(), + "Your backup has completed, but some package(s) failed to backup" + .to_owned(), + BackupReport { + server: ServerBackupReport { + attempted: true, + error: None, + }, + packages: report, + }, + ) + }) + .await + } + Err(e) => { + tracing::error!("Backup Failed: {}", e); + tracing::debug!("{:?}", e); + let err_string = e.to_string(); + db.mutate(|db| { + notify( + db, + None, + NotificationLevel::Error, + "Backup Failed".to_owned(), + "Your backup failed to complete.".to_owned(), + BackupReport { + server: ServerBackupReport { + attempted: true, + error: Some(err_string), + }, + packages: BTreeMap::new(), + }, + ) + }) + .await + } + }?; + } + Ok(()) + } +} +impl Drop for BackupStatusGuard { + fn drop(&mut self) { + if let Some(db) = self.0.take() { + tokio::spawn(async move { + db.mutate(|v| { + v.as_public_mut() + .as_server_info_mut() + .as_status_info_mut() + .as_backup_progress_mut() + .ser(&None) + }) + .await + .unwrap() + }); + } + } } -#[command(rename = "create", display(display_none))] #[instrument(skip(ctx, old_password, password))] pub async fn backup_all( - #[context] ctx: RpcContext, - #[arg(rename = "target-id")] target_id: BackupTargetId, - #[arg(rename = "old-password", long = "old-password")] old_password: Option< - crate::auth::PasswordType, - >, - #[arg( - rename = "package-ids", - long = "package-ids", - parse(parse_comma_separated) - )] - package_ids: Option>, - #[arg] password: crate::auth::PasswordType, + ctx: RpcContext, + BackupParams { + target_id, + old_password, + package_ids, + password, + }: BackupParams, ) -> Result<(), Error> { - let db = ctx.db.peek().await; let old_password_decrypted = old_password .as_ref() .unwrap_or(&password) .clone() .decrypt(&ctx)?; let password = password.decrypt(&ctx)?; - check_password_against_db(ctx.secret_store.acquire().await?.as_mut(), &password).await?; - let fs = target_id - .load(ctx.secret_store.acquire().await?.as_mut()) - .await?; + + let ((fs, package_ids), status_guard) = ( + ctx.db + .mutate(|db| { + check_password_against_db(db, &password)?; + let fs = target_id.load(db)?; + let package_ids = if let Some(ids) = package_ids { + ids.into_iter().collect() + } else { + db.as_public() + .as_package_data() + .as_entries()? + .into_iter() + .filter(|(_, m)| m.as_state_info().expect_installed().is_ok()) + .map(|(id, _)| id) + .collect() + }; + assure_backing_up(db, &package_ids)?; + Ok((fs, package_ids)) + }) + .await?, + BackupStatusGuard::new(ctx.db.clone()), + ); + let mut backup_guard = BackupMountGuard::mount( TmpMountGuard::mount(&fs, ReadWrite).await?, &old_password_decrypted, ) .await?; - let package_ids = if let Some(ids) = package_ids { - ids.into_iter() - .flat_map(|package_id| { - let version = db - .as_package_data() - .as_idx(&package_id)? - .as_manifest() - .as_version() - .de() - .ok()?; - Some((package_id, version)) - }) - .collect() - } else { - get_packages(db.clone())?.into_iter().collect() - }; if old_password.is_some() { backup_guard.change_password(&password)?; } - assure_backing_up(&ctx.db, &package_ids).await?; tokio::task::spawn(async move { - let backup_res = perform_backup(&ctx, backup_guard, &package_ids).await; - match backup_res { - Ok(report) if report.iter().all(|(_, rep)| rep.error.is_none()) => ctx - .notification_manager - .notify( - ctx.db.clone(), - None, - NotificationLevel::Success, - "Backup Complete".to_owned(), - "Your backup has completed".to_owned(), - BackupReport { - server: ServerBackupReport { - attempted: true, - error: None, - }, - packages: report - .into_iter() - .map(|((package_id, _), value)| (package_id, value)) - .collect(), - }, - None, - ) - .await - .expect("failed to send notification"), - Ok(report) => ctx - .notification_manager - .notify( - ctx.db.clone(), - None, - NotificationLevel::Warning, - "Backup Complete".to_owned(), - "Your backup has completed, but some package(s) failed to backup".to_owned(), - BackupReport { - server: ServerBackupReport { - attempted: true, - error: None, - }, - packages: report - .into_iter() - .map(|((package_id, _), value)| (package_id, value)) - .collect(), - }, - None, - ) - .await - .expect("failed to send notification"), - Err(e) => { - tracing::error!("Backup Failed: {}", e); - tracing::debug!("{:?}", e); - ctx.notification_manager - .notify( - ctx.db.clone(), - None, - NotificationLevel::Error, - "Backup Failed".to_owned(), - "Your backup failed to complete.".to_owned(), - BackupReport { - server: ServerBackupReport { - attempted: true, - error: Some(e.to_string()), - }, - packages: BTreeMap::new(), - }, - None, - ) - .await - .expect("failed to send notification"); - } - } - ctx.db - .mutate(|v| { - v.as_server_info_mut() - .as_status_info_mut() - .as_backup_progress_mut() - .ser(&None) - }) - .await?; - Ok::<(), Error>(()) + status_guard + .handle_result(perform_backup(&ctx, backup_guard, &package_ids).await) + .await + .unwrap(); }); Ok(()) } #[instrument(skip(db, packages))] -async fn assure_backing_up( - db: &PatchDb, - packages: impl IntoIterator + UnwindSafe + Send, +fn assure_backing_up<'a>( + db: &mut DatabaseModel, + packages: impl IntoIterator, ) -> Result<(), Error> { - db.mutate(|v| { - let backing_up = v - .as_server_info_mut() - .as_status_info_mut() - .as_backup_progress_mut(); - if backing_up - .clone() - .de()? - .iter() - .flat_map(|x| x.values()) - .fold(false, |acc, x| { - if !x.complete { - return true; - } - acc - }) - { - return Err(Error::new( - eyre!("Server is already backing up!"), - ErrorKind::InvalidRequest, - )); - } - backing_up.ser(&Some( - packages - .into_iter() - .map(|(x, _)| (x.clone(), BackupProgress { complete: false })) - .collect(), - ))?; - Ok(()) - }) - .await + let backing_up = db + .as_public_mut() + .as_server_info_mut() + .as_status_info_mut() + .as_backup_progress_mut(); + if backing_up + .clone() + .de()? + .iter() + .flat_map(|x| x.values()) + .fold(false, |acc, x| { + if !x.complete { + return true; + } + acc + }) + { + return Err(Error::new( + eyre!("Server is already backing up!"), + ErrorKind::InvalidRequest, + )); + } + backing_up.ser(&Some( + packages + .into_iter() + .map(|x| (x.clone(), BackupProgress { complete: false })) + .collect(), + ))?; + Ok(()) } #[instrument(skip(ctx, backup_guard))] async fn perform_backup( ctx: &RpcContext, backup_guard: BackupMountGuard, - package_ids: &OrdSet<(PackageId, Version)>, -) -> Result, Error> { + package_ids: &OrdSet, +) -> Result, Error> { let mut backup_report = BTreeMap::new(); - let backup_guard = Arc::new(Mutex::new(backup_guard)); + let backup_guard = Arc::new(backup_guard); - for package_id in package_ids { - let (response, _report) = match ctx - .managers - .get(package_id) - .await - .ok_or_else(|| Error::new(eyre!("Manager not found"), ErrorKind::InvalidRequest))? - .backup(backup_guard.clone()) - .await - { - BackupReturn::Ran { report, res } => (res, report), - BackupReturn::AlreadyRunning(report) => { - backup_report.insert(package_id.clone(), report); - continue; - } - BackupReturn::Error(error) => { - tracing::warn!("Backup thread error"); - tracing::debug!("{error:?}"); - backup_report.insert( - package_id.clone(), - PackageBackupReport { - error: Some("Backup thread error".to_owned()), - }, - ); - continue; - } - }; - backup_report.insert( - package_id.clone(), - PackageBackupReport { - error: response.as_ref().err().map(|e| e.to_string()), - }, - ); - - if let Ok(pkg_meta) = response { - backup_guard - .lock() - .await - .metadata - .package_backups - .insert(package_id.0.clone(), pkg_meta); + for id in package_ids { + if let Some(service) = &*ctx.services.get(id).await { + backup_report.insert( + id.clone(), + PackageBackupReport { + error: service + .backup(backup_guard.package_backup(id)) + .await + .err() + .map(|e| e.to_string()), + }, + ); } } - let ui = ctx.db.peek().await.into_ui().de()?; + let mut backup_guard = Arc::try_unwrap(backup_guard).map_err(|_| { + Error::new( + eyre!("leaked reference to BackupMountGuard"), + ErrorKind::Incoherent, + ) + })?; - let mut os_backup_file = AtomicFile::new( - backup_guard.lock().await.as_ref().join("os-backup.cbor"), - None::, - ) - .await - .with_kind(ErrorKind::Filesystem)?; + let ui = ctx.db.peek().await.into_public().into_ui().de()?; + + let mut os_backup_file = + AtomicFile::new(backup_guard.path().join("os-backup.cbor"), None::) + .await + .with_kind(ErrorKind::Filesystem)?; os_backup_file .write_all(&IoFormat::Cbor.to_vec(&OsBackup { account: ctx.account.read().await.clone(), @@ -284,11 +287,11 @@ async fn perform_backup( .await .with_kind(ErrorKind::Filesystem)?; - let luks_folder_old = backup_guard.lock().await.as_ref().join("luks.old"); + let luks_folder_old = backup_guard.path().join("luks.old"); if tokio::fs::metadata(&luks_folder_old).await.is_ok() { tokio::fs::remove_dir_all(&luks_folder_old).await?; } - let luks_folder_bak = backup_guard.lock().await.as_ref().join("luks"); + let luks_folder_bak = backup_guard.path().join("luks"); if tokio::fs::metadata(&luks_folder_bak).await.is_ok() { tokio::fs::rename(&luks_folder_bak, &luks_folder_old).await?; } @@ -298,14 +301,6 @@ async fn perform_backup( } let timestamp = Some(Utc::now()); - let mut backup_guard = Arc::try_unwrap(backup_guard) - .map_err(|_err| { - Error::new( - eyre!("Backup guard could not ensure that the others where dropped"), - ErrorKind::Unknown, - ) - })? - .into_inner(); backup_guard.unencrypted_metadata.version = crate::version::Current::new().semver().into(); backup_guard.unencrypted_metadata.full = true; @@ -315,7 +310,12 @@ async fn perform_backup( backup_guard.save_and_unmount().await?; ctx.db - .mutate(|v| v.as_server_info_mut().as_last_backup_mut().ser(×tamp)) + .mutate(|v| { + v.as_public_mut() + .as_server_info_mut() + .as_last_backup_mut() + .ser(×tamp) + }) .await?; Ok(backup_report) diff --git a/core/startos/src/backup/mod.rs b/core/startos/src/backup/mod.rs index 2f3f9bd8f1..c963118c42 100644 --- a/core/startos/src/backup/mod.rs +++ b/core/startos/src/backup/mod.rs @@ -1,33 +1,15 @@ -use std::collections::{BTreeMap, BTreeSet}; -use std::path::{Path, PathBuf}; -use std::sync::Arc; +use std::collections::BTreeMap; use chrono::{DateTime, Utc}; -use color_eyre::eyre::eyre; -use helpers::AtomicFile; -use models::{ImageId, OptionExt}; +use models::{HostId, PackageId}; use reqwest::Url; -use rpc_toolkit::command; +use rpc_toolkit::{from_fn_async, HandlerExt, ParentHandler}; use serde::{Deserialize, Serialize}; -use tokio::fs::File; -use tokio::io::AsyncWriteExt; -use tracing::instrument; -use self::target::PackageBackupInfo; -use crate::context::RpcContext; -use crate::install::PKG_ARCHIVE_DIR; -use crate::manager::manager_seed::ManagerSeed; -use crate::net::interface::InterfaceId; -use crate::net::keys::Key; +use crate::context::CliContext; +#[allow(unused_imports)] use crate::prelude::*; -use crate::procedure::docker::DockerContainers; -use crate::procedure::{NoOutput, PackageProcedure, ProcedureName}; -use crate::s9pk::manifest::PackageId; -use crate::util::serde::{Base32, Base64, IoFormat}; -use crate::util::Version; -use crate::version::{Current, VersionT}; -use crate::volume::{backup_dir, Volume, VolumeId, Volumes, BACKUP_DIR}; -use crate::{Error, ErrorKind, ResultExt}; +use crate::util::serde::{Base32, Base64}; pub mod backup_bulk; pub mod os; @@ -51,176 +33,33 @@ pub struct PackageBackupReport { pub error: Option, } -#[command(subcommands(backup_bulk::backup_all, target::target))] -pub fn backup() -> Result<(), Error> { - Ok(()) +// #[command(subcommands(backup_bulk::backup_all, target::target))] +pub fn backup() -> ParentHandler { + ParentHandler::new() + .subcommand( + "create", + from_fn_async(backup_bulk::backup_all) + .no_display() + .with_remote_cli::(), + ) + .subcommand("target", target::target()) } -#[command(rename = "backup", subcommands(restore::restore_packages_rpc))] -pub fn package_backup() -> Result<(), Error> { - Ok(()) +pub fn package_backup() -> ParentHandler { + ParentHandler::new().subcommand( + "restore", + from_fn_async(restore::restore_packages_rpc) + .no_display() + .with_remote_cli::(), + ) } #[derive(Deserialize, Serialize)] struct BackupMetadata { pub timestamp: DateTime, #[serde(default)] - pub network_keys: BTreeMap>, + pub network_keys: BTreeMap>, #[serde(default)] - pub tor_keys: BTreeMap>, // DEPRECATED + pub tor_keys: BTreeMap>, // DEPRECATED pub marketplace_url: Option, } - -#[derive(Clone, Debug, Deserialize, Serialize, HasModel)] -#[model = "Model"] -pub struct BackupActions { - pub create: PackageProcedure, - pub restore: PackageProcedure, -} -impl BackupActions { - pub fn validate( - &self, - _container: &Option, - eos_version: &Version, - volumes: &Volumes, - image_ids: &BTreeSet, - ) -> Result<(), Error> { - self.create - .validate(eos_version, volumes, image_ids, false) - .with_ctx(|_| (crate::ErrorKind::ValidateS9pk, "Backup Create"))?; - self.restore - .validate(eos_version, volumes, image_ids, false) - .with_ctx(|_| (crate::ErrorKind::ValidateS9pk, "Backup Restore"))?; - Ok(()) - } - - #[instrument(skip_all)] - pub async fn create(&self, seed: Arc) -> Result { - let manifest = &seed.manifest; - let mut volumes = seed.manifest.volumes.to_readonly(); - let ctx = &seed.ctx; - let pkg_id = &manifest.id; - let pkg_version = &manifest.version; - volumes.insert(VolumeId::Backup, Volume::Backup { readonly: false }); - let backup_dir = backup_dir(&manifest.id); - if tokio::fs::metadata(&backup_dir).await.is_err() { - tokio::fs::create_dir_all(&backup_dir).await? - } - self.create - .execute::<(), NoOutput>( - ctx, - pkg_id, - pkg_version, - ProcedureName::CreateBackup, - &volumes, - None, - None, - ) - .await? - .map_err(|e| eyre!("{}", e.1)) - .with_kind(crate::ErrorKind::Backup)?; - let (network_keys, tor_keys): (Vec<_>, Vec<_>) = - Key::for_package(&ctx.secret_store, pkg_id) - .await? - .into_iter() - .filter_map(|k| { - let interface = k.interface().map(|(_, i)| i)?; - Some(( - (interface.clone(), Base64(k.as_bytes())), - (interface, Base32(k.tor_key().as_bytes())), - )) - }) - .unzip(); - let marketplace_url = ctx - .db - .peek() - .await - .as_package_data() - .as_idx(&pkg_id) - .or_not_found(pkg_id)? - .expect_as_installed()? - .as_installed() - .as_marketplace_url() - .de()?; - let tmp_path = Path::new(BACKUP_DIR) - .join(pkg_id) - .join(format!("{}.s9pk", pkg_id)); - let s9pk_path = ctx - .datadir - .join(PKG_ARCHIVE_DIR) - .join(pkg_id) - .join(pkg_version.as_str()) - .join(format!("{}.s9pk", pkg_id)); - let mut infile = File::open(&s9pk_path).await?; - let mut outfile = AtomicFile::new(&tmp_path, None::) - .await - .with_kind(ErrorKind::Filesystem)?; - tokio::io::copy(&mut infile, &mut *outfile) - .await - .with_ctx(|_| { - ( - crate::ErrorKind::Filesystem, - format!("cp {} -> {}", s9pk_path.display(), tmp_path.display()), - ) - })?; - outfile.save().await.with_kind(ErrorKind::Filesystem)?; - let timestamp = Utc::now(); - let metadata_path = Path::new(BACKUP_DIR).join(pkg_id).join("metadata.cbor"); - let mut outfile = AtomicFile::new(&metadata_path, None::) - .await - .with_kind(ErrorKind::Filesystem)?; - let network_keys = network_keys.into_iter().collect(); - let tor_keys = tor_keys.into_iter().collect(); - outfile - .write_all(&IoFormat::Cbor.to_vec(&BackupMetadata { - timestamp, - network_keys, - tor_keys, - marketplace_url, - })?) - .await?; - outfile.save().await.with_kind(ErrorKind::Filesystem)?; - Ok(PackageBackupInfo { - os_version: Current::new().semver().into(), - title: manifest.title.clone(), - version: pkg_version.clone(), - timestamp, - }) - } - - #[instrument(skip_all)] - pub async fn restore( - &self, - ctx: &RpcContext, - pkg_id: &PackageId, - pkg_version: &Version, - volumes: &Volumes, - ) -> Result, Error> { - let mut volumes = volumes.clone(); - volumes.insert(VolumeId::Backup, Volume::Backup { readonly: true }); - self.restore - .execute::<(), NoOutput>( - ctx, - pkg_id, - pkg_version, - ProcedureName::RestoreBackup, - &volumes, - None, - None, - ) - .await? - .map_err(|e| eyre!("{}", e.1)) - .with_kind(crate::ErrorKind::Restore)?; - let metadata_path = Path::new(BACKUP_DIR).join(pkg_id).join("metadata.cbor"); - let metadata: BackupMetadata = IoFormat::Cbor.from_slice( - &tokio::fs::read(&metadata_path).await.with_ctx(|_| { - ( - crate::ErrorKind::Filesystem, - metadata_path.display().to_string(), - ) - })?, - )?; - - Ok(metadata.marketplace_url) - } -} diff --git a/core/startos/src/backup/os.rs b/core/startos/src/backup/os.rs index 5ab8bd12e3..6848473a7f 100644 --- a/core/startos/src/backup/os.rs +++ b/core/startos/src/backup/os.rs @@ -1,13 +1,15 @@ -use openssl::pkey::PKey; +use openssl::pkey::{PKey, Private}; use openssl::x509::X509; use patch_db::Value; use serde::{Deserialize, Serialize}; +use ssh_key::private::Ed25519Keypair; +use torut::onion::TorSecretKeyV3; use crate::account::AccountInfo; use crate::hostname::{generate_hostname, generate_id, Hostname}; -use crate::net::keys::Key; use crate::prelude::*; -use crate::util::serde::Base64; +use crate::util::crypto::ed25519_expand_key; +use crate::util::serde::{Base32, Base64, Pem}; pub struct OsBackup { pub account: AccountInfo, @@ -19,19 +21,23 @@ impl<'de> Deserialize<'de> for OsBackup { D: serde::Deserializer<'de>, { let tagged = OsBackupSerDe::deserialize(deserializer)?; - match tagged.version { + Ok(match tagged.version { 0 => patch_db::value::from_value::(tagged.rest) .map_err(serde::de::Error::custom)? .project() - .map_err(serde::de::Error::custom), + .map_err(serde::de::Error::custom)?, 1 => patch_db::value::from_value::(tagged.rest) .map_err(serde::de::Error::custom)? - .project() - .map_err(serde::de::Error::custom), - v => Err(serde::de::Error::custom(&format!( - "Unknown backup version {v}" - ))), - } + .project(), + 2 => patch_db::value::from_value::(tagged.rest) + .map_err(serde::de::Error::custom)? + .project(), + v => { + return Err(serde::de::Error::custom(&format!( + "Unknown backup version {v}" + ))) + } + }) } } impl Serialize for OsBackup { @@ -40,11 +46,9 @@ impl Serialize for OsBackup { S: serde::Serializer, { OsBackupSerDe { - version: 1, - rest: patch_db::value::to_value( - &OsBackupV1::unproject(self).map_err(serde::ser::Error::custom)?, - ) - .map_err(serde::ser::Error::custom)?, + version: 2, + rest: patch_db::value::to_value(&OsBackupV2::unproject(self)) + .map_err(serde::ser::Error::custom)?, } .serialize(serializer) } @@ -62,10 +66,10 @@ struct OsBackupSerDe { #[derive(Deserialize)] #[serde(rename = "kebab-case")] struct OsBackupV0 { - // tor_key: Base32<[u8; 64]>, - root_ca_key: String, // PEM Encoded OpenSSL Key - root_ca_cert: String, // PEM Encoded OpenSSL X509 Certificate - ui: Value, // JSON Value + tor_key: Base32<[u8; 64]>, // Base32 Encoded Ed25519 Expanded Secret Key + root_ca_key: Pem>, // PEM Encoded OpenSSL Key + root_ca_cert: Pem, // PEM Encoded OpenSSL X509 Certificate + ui: Value, // JSON Value } impl OsBackupV0 { fn project(self) -> Result { @@ -74,9 +78,13 @@ impl OsBackupV0 { server_id: generate_id(), hostname: generate_hostname(), password: Default::default(), - key: Key::new(None), - root_ca_key: PKey::private_key_from_pem(self.root_ca_key.as_bytes())?, - root_ca_cert: X509::from_pem(self.root_ca_cert.as_bytes())?, + root_ca_key: self.root_ca_key.0, + root_ca_cert: self.root_ca_cert.0, + ssh_key: ssh_key::PrivateKey::random( + &mut rand::thread_rng(), + ssh_key::Algorithm::Ed25519, + )?, + tor_key: TorSecretKeyV3::from(self.tor_key.0), }, ui: self.ui, }) @@ -87,36 +95,67 @@ impl OsBackupV0 { #[derive(Deserialize, Serialize)] #[serde(rename = "kebab-case")] struct OsBackupV1 { - server_id: String, // uuidv4 - hostname: String, // embassy-- - net_key: Base64<[u8; 32]>, // Ed25519 Secret Key - root_ca_key: String, // PEM Encoded OpenSSL Key - root_ca_cert: String, // PEM Encoded OpenSSL X509 Certificate - ui: Value, // JSON Value - // TODO add more + server_id: String, // uuidv4 + hostname: String, // embassy-- + net_key: Base64<[u8; 32]>, // Ed25519 Secret Key + root_ca_key: Pem>, // PEM Encoded OpenSSL Key + root_ca_cert: Pem, // PEM Encoded OpenSSL X509 Certificate + ui: Value, // JSON Value } impl OsBackupV1 { - fn project(self) -> Result { - Ok(OsBackup { + fn project(self) -> OsBackup { + OsBackup { account: AccountInfo { server_id: self.server_id, hostname: Hostname(self.hostname), password: Default::default(), - key: Key::from_bytes(None, self.net_key.0), - root_ca_key: PKey::private_key_from_pem(self.root_ca_key.as_bytes())?, - root_ca_cert: X509::from_pem(self.root_ca_cert.as_bytes())?, + root_ca_key: self.root_ca_key.0, + root_ca_cert: self.root_ca_cert.0, + ssh_key: ssh_key::PrivateKey::from(Ed25519Keypair::from_seed(&self.net_key.0)), + tor_key: TorSecretKeyV3::from(ed25519_expand_key(&self.net_key.0)), }, ui: self.ui, - }) + } + } +} + +/// V2 +#[derive(Deserialize, Serialize)] +#[serde(rename = "kebab-case")] + +struct OsBackupV2 { + server_id: String, // uuidv4 + hostname: String, // - + root_ca_key: Pem>, // PEM Encoded OpenSSL Key + root_ca_cert: Pem, // PEM Encoded OpenSSL X509 Certificate + ssh_key: Pem, // PEM Encoded OpenSSH Key + tor_key: TorSecretKeyV3, // Base64 Encoded Ed25519 Expanded Secret Key + ui: Value, // JSON Value +} +impl OsBackupV2 { + fn project(self) -> OsBackup { + OsBackup { + account: AccountInfo { + server_id: self.server_id, + hostname: Hostname(self.hostname), + password: Default::default(), + root_ca_key: self.root_ca_key.0, + root_ca_cert: self.root_ca_cert.0, + ssh_key: self.ssh_key.0, + tor_key: self.tor_key, + }, + ui: self.ui, + } } - fn unproject(backup: &OsBackup) -> Result { - Ok(Self { + fn unproject(backup: &OsBackup) -> Self { + Self { server_id: backup.account.server_id.clone(), hostname: backup.account.hostname.0.clone(), - net_key: Base64(backup.account.key.as_bytes()), - root_ca_key: String::from_utf8(backup.account.root_ca_key.private_key_to_pem_pkcs8()?)?, - root_ca_cert: String::from_utf8(backup.account.root_ca_cert.to_pem()?)?, + root_ca_key: Pem(backup.account.root_ca_key.clone()), + root_ca_cert: Pem(backup.account.root_ca_cert.clone()), + ssh_key: Pem(backup.account.ssh_key.clone()), + tor_key: backup.account.tor_key.clone(), ui: backup.ui.clone(), - }) + } } } diff --git a/core/startos/src/backup/restore.rs b/core/startos/src/backup/restore.rs index b72b319e2f..70695b42ee 100644 --- a/core/startos/src/backup/restore.rs +++ b/core/startos/src/backup/restore.rs @@ -1,175 +1,76 @@ use std::collections::BTreeMap; -use std::path::Path; -use std::sync::atomic::Ordering; use std::sync::Arc; -use std::time::Duration; -use clap::ArgMatches; -use futures::future::BoxFuture; -use futures::{stream, FutureExt, StreamExt}; +use clap::Parser; +use futures::{stream, StreamExt}; +use models::PackageId; use openssl::x509::X509; -use rpc_toolkit::command; -use sqlx::Connection; -use tokio::fs::File; +use patch_db::json_ptr::ROOT; +use serde::{Deserialize, Serialize}; use torut::onion::OnionAddressV3; use tracing::instrument; use super::target::BackupTargetId; use crate::backup::os::OsBackup; -use crate::backup::BackupMetadata; -use crate::context::rpc::RpcContextConfig; use crate::context::{RpcContext, SetupContext}; -use crate::db::model::{PackageDataEntry, PackageDataEntryRestoring, StaticFiles}; -use crate::disk::mount::backup::{BackupMountGuard, PackageBackupMountGuard}; +use crate::db::model::Database; +use crate::disk::mount::backup::BackupMountGuard; use crate::disk::mount::filesystem::ReadWrite; -use crate::disk::mount::guard::TmpMountGuard; +use crate::disk::mount::guard::{GenericMountGuard, TmpMountGuard}; use crate::hostname::Hostname; use crate::init::init; -use crate::install::progress::InstallProgress; -use crate::install::{download_install_s9pk, PKG_PUBLIC_DIR}; -use crate::notifications::NotificationLevel; use crate::prelude::*; -use crate::s9pk::manifest::{Manifest, PackageId}; -use crate::s9pk::reader::S9pkReader; -use crate::setup::SetupStatus; -use crate::util::display_none; -use crate::util::io::dir_size; +use crate::s9pk::S9pk; +use crate::service::service_map::DownloadInstallFuture; use crate::util::serde::IoFormat; -use crate::volume::{backup_dir, BACKUP_DIR, PKG_VOLUME_DIR}; -fn parse_comma_separated(arg: &str, _: &ArgMatches) -> Result, Error> { - arg.split(',') - .map(|s| s.trim().parse().map_err(Error::from)) - .collect() +#[derive(Deserialize, Serialize, Parser)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "kebab-case")] +pub struct RestorePackageParams { + pub ids: Vec, + pub target_id: BackupTargetId, + pub password: String, } -#[command(rename = "restore", display(display_none))] +// #[command(rename = "restore", display(display_none))] #[instrument(skip(ctx, password))] pub async fn restore_packages_rpc( - #[context] ctx: RpcContext, - #[arg(parse(parse_comma_separated))] ids: Vec, - #[arg(rename = "target-id")] target_id: BackupTargetId, - #[arg] password: String, + ctx: RpcContext, + RestorePackageParams { + ids, + target_id, + password, + }: RestorePackageParams, ) -> Result<(), Error> { - let fs = target_id - .load(ctx.secret_store.acquire().await?.as_mut()) - .await?; + let fs = target_id.load(&ctx.db.peek().await)?; let backup_guard = BackupMountGuard::mount(TmpMountGuard::mount(&fs, ReadWrite).await?, &password).await?; - let (backup_guard, tasks, _) = restore_packages(&ctx, backup_guard, ids).await?; + let tasks = restore_packages(&ctx, backup_guard, ids).await?; tokio::spawn(async move { - stream::iter(tasks.into_iter().map(|x| (x, ctx.clone()))) - .for_each_concurrent(5, |(res, ctx)| async move { - match res.await { - (Ok(_), _) => (), - (Err(err), package_id) => { - if let Err(err) = ctx - .notification_manager - .notify( - ctx.db.clone(), - Some(package_id.clone()), - NotificationLevel::Error, - "Restoration Failure".to_string(), - format!("Error restoring package {}: {}", package_id, err), - (), - None, - ) - .await - { - tracing::error!("Failed to notify: {}", err); - tracing::debug!("{:?}", err); - }; - tracing::error!("Error restoring package {}: {}", package_id, err); + stream::iter(tasks) + .for_each_concurrent(5, |(id, res)| async move { + match async { res.await?.await }.await { + Ok(_) => (), + Err(err) => { + tracing::error!("Error restoring package {}: {}", id, err); tracing::debug!("{:?}", err); } } }) .await; - if let Err(e) = backup_guard.unmount().await { - tracing::error!("Error unmounting backup drive: {}", e); - tracing::debug!("{:?}", e); - } }); Ok(()) } -async fn approximate_progress( - rpc_ctx: &RpcContext, - progress: &mut ProgressInfo, -) -> Result<(), Error> { - for (id, size) in &mut progress.target_volume_size { - let dir = rpc_ctx.datadir.join(PKG_VOLUME_DIR).join(id).join("data"); - if tokio::fs::metadata(&dir).await.is_err() { - *size = 0; - } else { - *size = dir_size(&dir, None).await?; - } - } - Ok(()) -} - -async fn approximate_progress_loop( - ctx: &SetupContext, - rpc_ctx: &RpcContext, - mut starting_info: ProgressInfo, -) { - loop { - if let Err(e) = approximate_progress(rpc_ctx, &mut starting_info).await { - tracing::error!("Failed to approximate restore progress: {}", e); - tracing::debug!("{:?}", e); - } else { - *ctx.setup_status.write().await = Some(Ok(starting_info.flatten())); - } - tokio::time::sleep(Duration::from_secs(1)).await; - } -} - -#[derive(Debug, Default)] -struct ProgressInfo { - package_installs: BTreeMap>, - src_volume_size: BTreeMap, - target_volume_size: BTreeMap, -} -impl ProgressInfo { - fn flatten(&self) -> SetupStatus { - let mut total_bytes = 0; - let mut bytes_transferred = 0; - - for progress in self.package_installs.values() { - total_bytes += ((progress.size.unwrap_or(0) as f64) * 2.2) as u64; - bytes_transferred += progress.downloaded.load(Ordering::SeqCst); - bytes_transferred += ((progress.validated.load(Ordering::SeqCst) as f64) * 0.2) as u64; - bytes_transferred += progress.unpacked.load(Ordering::SeqCst); - } - - for size in self.src_volume_size.values() { - total_bytes += *size; - } - - for size in self.target_volume_size.values() { - bytes_transferred += *size; - } - - if bytes_transferred > total_bytes { - bytes_transferred = total_bytes; - } - - SetupStatus { - total_bytes: Some(total_bytes), - bytes_transferred, - complete: false, - } - } -} - #[instrument(skip(ctx))] pub async fn recover_full_embassy( ctx: SetupContext, disk_guid: Arc, - embassy_password: String, + start_os_password: String, recovery_source: TmpMountGuard, recovery_password: Option, ) -> Result<(Arc, Hostname, OnionAddressV3, X509), Error> { @@ -179,7 +80,7 @@ pub async fn recover_full_embassy( ) .await?; - let os_backup_path = backup_guard.as_ref().join("os-backup.cbor"); + let os_backup_path = backup_guard.path().join("os-backup.cbor"); let mut os_backup: OsBackup = IoFormat::Cbor.from_slice( &tokio::fs::read(&os_backup_path) .await @@ -187,23 +88,19 @@ pub async fn recover_full_embassy( )?; os_backup.account.password = argon2::hash_encoded( - embassy_password.as_bytes(), + start_os_password.as_bytes(), &rand::random::<[u8; 16]>()[..], &argon2::Config::rfc9106_low_mem(), ) .with_kind(ErrorKind::PasswordHashGeneration)?; - let secret_store = ctx.secret_store().await?; + let db = ctx.db().await?; + db.put(&ROOT, &Database::init(&os_backup.account)?).await?; + drop(db); - os_backup.account.save(&secret_store).await?; + init(&ctx.config).await?; - secret_store.close().await; - - let cfg = RpcContextConfig::load(ctx.config_path.clone()).await?; - - init(&cfg).await?; - - let rpc_ctx = RpcContext::init(ctx.config_path.clone(), disk_guid.clone()).await?; + let rpc_ctx = RpcContext::init(&ctx.config, disk_guid.clone()).await?; let ids: Vec<_> = backup_guard .metadata @@ -211,43 +108,25 @@ pub async fn recover_full_embassy( .keys() .cloned() .collect(); - let (backup_guard, tasks, progress_info) = - restore_packages(&rpc_ctx, backup_guard, ids).await?; - let task_consumer_rpc_ctx = rpc_ctx.clone(); - tokio::select! { - _ = async move { - stream::iter(tasks.into_iter().map(|x| (x, task_consumer_rpc_ctx.clone()))) - .for_each_concurrent(5, |(res, ctx)| async move { - match res.await { - (Ok(_), _) => (), - (Err(err), package_id) => { - if let Err(err) = ctx.notification_manager.notify( - ctx.db.clone(), - Some(package_id.clone()), - NotificationLevel::Error, - "Restoration Failure".to_string(), format!("Error restoring package {}: {}", package_id,err), (), None).await{ - tracing::error!("Failed to notify: {}", err); - tracing::debug!("{:?}", err); - }; - tracing::error!("Error restoring package {}: {}", package_id, err); - tracing::debug!("{:?}", err); - }, - } - }).await; - - } => { - - }, - _ = approximate_progress_loop(&ctx, &rpc_ctx, progress_info) => unreachable!(concat!(module_path!(), "::approximate_progress_loop should not terminate")), - } + let tasks = restore_packages(&rpc_ctx, backup_guard, ids).await?; + stream::iter(tasks) + .for_each_concurrent(5, |(id, res)| async move { + match async { res.await?.await }.await { + Ok(_) => (), + Err(err) => { + tracing::error!("Error restoring package {}: {}", id, err); + tracing::debug!("{:?}", err); + } + } + }) + .await; - backup_guard.unmount().await?; rpc_ctx.shutdown().await?; Ok(( disk_guid, os_backup.account.hostname, - os_backup.account.key.tor_address(), + os_backup.account.tor_key.public().get_onion_address(), os_backup.account.root_ca_cert, )) } @@ -257,205 +136,25 @@ async fn restore_packages( ctx: &RpcContext, backup_guard: BackupMountGuard, ids: Vec, -) -> Result< - ( - BackupMountGuard, - Vec, PackageId)>>, - ProgressInfo, - ), - Error, -> { - let guards = assure_restoring(ctx, ids, &backup_guard).await?; - - let mut progress_info = ProgressInfo::default(); - - let mut tasks = Vec::with_capacity(guards.len()); - for (manifest, guard) in guards { - let id = manifest.id.clone(); - let (progress, task) = restore_package(ctx.clone(), manifest, guard).await?; - progress_info - .package_installs - .insert(id.clone(), progress.clone()); - progress_info - .src_volume_size - .insert(id.clone(), dir_size(backup_dir(&id), None).await?); - progress_info.target_volume_size.insert(id.clone(), 0); - let package_id = id.clone(); - tasks.push( - async move { - if let Err(e) = task.await { - tracing::error!("Error restoring package {}: {}", id, e); - tracing::debug!("{:?}", e); - Err(e) - } else { - Ok(()) - } - } - .map(|x| (x, package_id)) - .boxed(), - ); - } - - Ok((backup_guard, tasks, progress_info)) -} - -#[instrument(skip(ctx, backup_guard))] -async fn assure_restoring( - ctx: &RpcContext, - ids: Vec, - backup_guard: &BackupMountGuard, -) -> Result, Error> { - let mut guards = Vec::with_capacity(ids.len()); - - let mut insert_packages = BTreeMap::new(); - +) -> Result, Error> { + let backup_guard = Arc::new(backup_guard); + let mut tasks = BTreeMap::new(); for id in ids { - let peek = ctx.db.peek().await; - - let model = peek.as_package_data().as_idx(&id); - - if !model.is_none() { - return Err(Error::new( - eyre!("Can't restore over existing package: {}", id), - crate::ErrorKind::InvalidRequest, - )); - } - let guard = backup_guard.mount_package_backup(&id).await?; - let s9pk_path = Path::new(BACKUP_DIR).join(&id).join(format!("{}.s9pk", id)); - let mut rdr = S9pkReader::open(&s9pk_path, false).await?; - - let manifest = rdr.manifest().await?; - let version = manifest.version.clone(); - let progress = Arc::new(InstallProgress::new(Some( - tokio::fs::metadata(&s9pk_path).await?.len(), - ))); - - let public_dir_path = ctx - .datadir - .join(PKG_PUBLIC_DIR) - .join(&id) - .join(version.as_str()); - tokio::fs::create_dir_all(&public_dir_path).await?; - - let license_path = public_dir_path.join("LICENSE.md"); - let mut dst = File::create(&license_path).await?; - tokio::io::copy(&mut rdr.license().await?, &mut dst).await?; - dst.sync_all().await?; - - let instructions_path = public_dir_path.join("INSTRUCTIONS.md"); - let mut dst = File::create(&instructions_path).await?; - tokio::io::copy(&mut rdr.instructions().await?, &mut dst).await?; - dst.sync_all().await?; - - let icon_path = Path::new("icon").with_extension(&manifest.assets.icon_type()); - let icon_path = public_dir_path.join(&icon_path); - let mut dst = File::create(&icon_path).await?; - tokio::io::copy(&mut rdr.icon().await?, &mut dst).await?; - dst.sync_all().await?; - insert_packages.insert( - id.clone(), - PackageDataEntry::Restoring(PackageDataEntryRestoring { - install_progress: progress.clone(), - static_files: StaticFiles::local(&id, &version, manifest.assets.icon_type()), - manifest: manifest.clone(), - }), - ); - - guards.push((manifest, guard)); - } - ctx.db - .mutate(|db| { - for (id, package) in insert_packages { - db.as_package_data_mut().insert(&id, &package)?; - } - Ok(()) - }) - .await?; - Ok(guards) -} - -#[instrument(skip(ctx, guard))] -async fn restore_package<'a>( - ctx: RpcContext, - manifest: Manifest, - guard: PackageBackupMountGuard, -) -> Result<(Arc, BoxFuture<'static, Result<(), Error>>), Error> { - let id = manifest.id.clone(); - let s9pk_path = Path::new(BACKUP_DIR) - .join(&manifest.id) - .join(format!("{}.s9pk", id)); - - let metadata_path = Path::new(BACKUP_DIR).join(&id).join("metadata.cbor"); - let metadata: BackupMetadata = IoFormat::Cbor.from_slice( - &tokio::fs::read(&metadata_path) - .await - .with_ctx(|_| (ErrorKind::Filesystem, metadata_path.display().to_string()))?, - )?; - - let mut secrets = ctx.secret_store.acquire().await?; - let mut secrets_tx = secrets.begin().await?; - for (iface, key) in metadata.network_keys { - let k = key.0.as_slice(); - sqlx::query!( - "INSERT INTO network_keys (package, interface, key) VALUES ($1, $2, $3) ON CONFLICT (package, interface) DO NOTHING", - id.to_string(), - iface.to_string(), - k, - ) - .execute(secrets_tx.as_mut()).await?; - } - // DEPRECATED - for (iface, key) in metadata.tor_keys { - let k = key.0.as_slice(); - sqlx::query!( - "INSERT INTO tor (package, interface, key) VALUES ($1, $2, $3) ON CONFLICT (package, interface) DO NOTHING", - id.to_string(), - iface.to_string(), - k, - ) - .execute(secrets_tx.as_mut()).await?; - } - secrets_tx.commit().await?; - drop(secrets); - - let len = tokio::fs::metadata(&s9pk_path) - .await - .with_ctx(|_| (ErrorKind::Filesystem, s9pk_path.display().to_string()))? - .len(); - let file = File::open(&s9pk_path) - .await - .with_ctx(|_| (ErrorKind::Filesystem, s9pk_path.display().to_string()))?; - - let progress = InstallProgress::new(Some(len)); - let marketplace_url = metadata.marketplace_url; - - let progress = Arc::new(progress); - - ctx.db - .mutate(|db| { - db.as_package_data_mut().insert( - &id, - &PackageDataEntry::Restoring(PackageDataEntryRestoring { - install_progress: progress.clone(), - static_files: StaticFiles::local( - &id, - &manifest.version, - manifest.assets.icon_type(), - ), - manifest: manifest.clone(), - }), + let backup_dir = backup_guard.clone().package_backup(&id); + let task = ctx + .services + .install( + ctx.clone(), + S9pk::open( + backup_dir.path().join(&id).with_extension("s9pk"), + Some(&id), + ) + .await?, + Some(backup_dir), ) - }) - .await?; - Ok(( - progress.clone(), - async move { - download_install_s9pk(ctx, manifest, marketplace_url, progress, file, None).await?; - - guard.unmount().await?; + .await?; + tasks.insert(id, task); + } - Ok(()) - } - .boxed(), - )) + Ok(tasks) } diff --git a/core/startos/src/backup/target/cifs.rs b/core/startos/src/backup/target/cifs.rs index 3f3251535e..a3bbb9759a 100644 --- a/core/startos/src/backup/target/cifs.rs +++ b/core/startos/src/backup/target/cifs.rs @@ -1,63 +1,114 @@ +use std::collections::BTreeMap; use std::path::{Path, PathBuf}; +use clap::Parser; use color_eyre::eyre::eyre; -use futures::TryStreamExt; -use rpc_toolkit::command; +use imbl_value::InternedString; +use rpc_toolkit::{command, from_fn_async, HandlerExt, ParentHandler}; use serde::{Deserialize, Serialize}; -use sqlx::{Executor, Postgres}; use super::{BackupTarget, BackupTargetId}; -use crate::context::RpcContext; +use crate::context::{CliContext, RpcContext}; +use crate::db::model::DatabaseModel; use crate::disk::mount::filesystem::cifs::Cifs; use crate::disk::mount::filesystem::ReadOnly; -use crate::disk::mount::guard::TmpMountGuard; +use crate::disk::mount::guard::{GenericMountGuard, TmpMountGuard}; use crate::disk::util::{recovery_info, EmbassyOsRecoveryInfo}; use crate::prelude::*; -use crate::util::display_none; use crate::util::serde::KeyVal; +#[derive(Debug, Default, Deserialize, Serialize)] +pub struct CifsTargets(pub BTreeMap); +impl CifsTargets { + pub fn new() -> Self { + Self(BTreeMap::new()) + } +} +impl Map for CifsTargets { + type Key = u32; + type Value = Cifs; + fn key_str(key: &Self::Key) -> Result, Error> { + Self::key_string(key) + } + fn key_string(key: &Self::Key) -> Result { + Ok(InternedString::from_display(key)) + } +} + #[derive(Debug, Deserialize, Serialize)] -#[serde(rename_all = "kebab-case")] +#[serde(rename_all = "camelCase")] pub struct CifsBackupTarget { hostname: String, path: PathBuf, username: String, mountable: bool, - embassy_os: Option, + start_os: Option, } -#[command(subcommands(add, update, remove))] -pub fn cifs() -> Result<(), Error> { - Ok(()) +pub fn cifs() -> ParentHandler { + ParentHandler::new() + .subcommand( + "add", + from_fn_async(add) + .no_display() + .with_remote_cli::(), + ) + .subcommand( + "update", + from_fn_async(update) + .no_display() + .with_remote_cli::(), + ) + .subcommand( + "remove", + from_fn_async(remove) + .no_display() + .with_remote_cli::(), + ) +} + +#[derive(Deserialize, Serialize, Parser)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "kebab-case")] +pub struct AddParams { + pub hostname: String, + pub path: PathBuf, + pub username: String, + pub password: Option, } -#[command(display(display_none))] pub async fn add( - #[context] ctx: RpcContext, - #[arg] hostname: String, - #[arg] path: PathBuf, - #[arg] username: String, - #[arg] password: Option, + ctx: RpcContext, + AddParams { + hostname, + path, + username, + password, + }: AddParams, ) -> Result, Error> { let cifs = Cifs { hostname, - path, + path: Path::new("/").join(path), username, password, }; let guard = TmpMountGuard::mount(&cifs, ReadOnly).await?; - let embassy_os = recovery_info(&guard).await?; + let start_os = recovery_info(guard.path()).await?; guard.unmount().await?; - let path_string = Path::new("/").join(&cifs.path).display().to_string(); - let id: i32 = sqlx::query!( - "INSERT INTO cifs_shares (hostname, path, username, password) VALUES ($1, $2, $3, $4) RETURNING id", - cifs.hostname, - path_string, - cifs.username, - cifs.password, - ) - .fetch_one(&ctx.secret_store) - .await?.id; + let id = ctx + .db + .mutate(|db| { + let id = db + .as_private() + .as_cifs() + .keys()? + .into_iter() + .max() + .map_or(0, |a| a + 1); + db.as_private_mut().as_cifs_mut().insert(&id, &cifs)?; + Ok(id) + }) + .await?; Ok(KeyVal { key: BackupTargetId::Cifs { id }, value: BackupTarget::Cifs(CifsBackupTarget { @@ -65,19 +116,31 @@ pub async fn add( path: cifs.path, username: cifs.username, mountable: true, - embassy_os, + start_os, }), }) } -#[command(display(display_none))] +#[derive(Deserialize, Serialize, Parser)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "kebab-case")] +pub struct UpdateParams { + pub id: BackupTargetId, + pub hostname: String, + pub path: PathBuf, + pub username: String, + pub password: Option, +} + pub async fn update( - #[context] ctx: RpcContext, - #[arg] id: BackupTargetId, - #[arg] hostname: String, - #[arg] path: PathBuf, - #[arg] username: String, - #[arg] password: Option, + ctx: RpcContext, + UpdateParams { + id, + hostname, + path, + username, + password, + }: UpdateParams, ) -> Result, Error> { let id = if let BackupTargetId::Cifs { id } = id { id @@ -89,32 +152,27 @@ pub async fn update( }; let cifs = Cifs { hostname, - path, + path: Path::new("/").join(path), username, password, }; let guard = TmpMountGuard::mount(&cifs, ReadOnly).await?; - let embassy_os = recovery_info(&guard).await?; + let start_os = recovery_info(guard.path()).await?; guard.unmount().await?; - let path_string = Path::new("/").join(&cifs.path).display().to_string(); - if sqlx::query!( - "UPDATE cifs_shares SET hostname = $1, path = $2, username = $3, password = $4 WHERE id = $5", - cifs.hostname, - path_string, - cifs.username, - cifs.password, - id, - ) - .execute(&ctx.secret_store) - .await? - .rows_affected() - == 0 - { - return Err(Error::new( - eyre!("Backup Target ID {} Not Found", BackupTargetId::Cifs { id }), - ErrorKind::NotFound, - )); - }; + ctx.db + .mutate(|db| { + db.as_private_mut() + .as_cifs_mut() + .as_idx_mut(&id) + .ok_or_else(|| { + Error::new( + eyre!("Backup Target ID {} Not Found", BackupTargetId::Cifs { id }), + ErrorKind::NotFound, + ) + })? + .ser(&cifs) + }) + .await?; Ok(KeyVal { key: BackupTargetId::Cifs { id }, value: BackupTarget::Cifs(CifsBackupTarget { @@ -122,13 +180,19 @@ pub async fn update( path: cifs.path, username: cifs.username, mountable: true, - embassy_os, + start_os, }), }) } -#[command(display(display_none))] -pub async fn remove(#[context] ctx: RpcContext, #[arg] id: BackupTargetId) -> Result<(), Error> { +#[derive(Deserialize, Serialize, Parser)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "kebab-case")] +pub struct RemoveParams { + pub id: BackupTargetId, +} + +pub async fn remove(ctx: RpcContext, RemoveParams { id }: RemoveParams) -> Result<(), Error> { let id = if let BackupTargetId::Cifs { id } = id { id } else { @@ -137,74 +201,46 @@ pub async fn remove(#[context] ctx: RpcContext, #[arg] id: BackupTargetId) -> Re ErrorKind::NotFound, )); }; - if sqlx::query!("DELETE FROM cifs_shares WHERE id = $1", id) - .execute(&ctx.secret_store) - .await? - .rows_affected() - == 0 - { - return Err(Error::new( - eyre!("Backup Target ID {} Not Found", BackupTargetId::Cifs { id }), - ErrorKind::NotFound, - )); - }; + ctx.db + .mutate(|db| db.as_private_mut().as_cifs_mut().remove(&id)) + .await?; Ok(()) } -pub async fn load(secrets: &mut Ex, id: i32) -> Result -where - for<'a> &'a mut Ex: Executor<'a, Database = Postgres>, -{ - let record = sqlx::query!( - "SELECT hostname, path, username, password FROM cifs_shares WHERE id = $1", - id - ) - .fetch_one(secrets) - .await?; - - Ok(Cifs { - hostname: record.hostname, - path: PathBuf::from(record.path), - username: record.username, - password: record.password, - }) +pub fn load(db: &DatabaseModel, id: u32) -> Result { + db.as_private() + .as_cifs() + .as_idx(&id) + .ok_or_else(|| { + Error::new( + eyre!("Backup Target ID {} Not Found", id), + ErrorKind::NotFound, + ) + })? + .de() } -pub async fn list(secrets: &mut Ex) -> Result, Error> -where - for<'a> &'a mut Ex: Executor<'a, Database = Postgres>, -{ - let mut records = - sqlx::query!("SELECT id, hostname, path, username, password FROM cifs_shares") - .fetch_many(secrets); - +pub async fn list(db: &DatabaseModel) -> Result, Error> { let mut cifs = Vec::new(); - while let Some(query_result) = records.try_next().await? { - if let Some(record) = query_result.right() { - let mount_info = Cifs { - hostname: record.hostname, - path: PathBuf::from(record.path), - username: record.username, - password: record.password, - }; - let embassy_os = async { - let guard = TmpMountGuard::mount(&mount_info, ReadOnly).await?; - let embassy_os = recovery_info(&guard).await?; - guard.unmount().await?; - Ok::<_, Error>(embassy_os) - } - .await; - cifs.push(( - record.id, - CifsBackupTarget { - hostname: mount_info.hostname, - path: mount_info.path, - username: mount_info.username, - mountable: embassy_os.is_ok(), - embassy_os: embassy_os.ok().and_then(|a| a), - }, - )); + for (id, model) in db.as_private().as_cifs().as_entries()? { + let mount_info = model.de()?; + let start_os = async { + let guard = TmpMountGuard::mount(&mount_info, ReadOnly).await?; + let start_os = recovery_info(guard.path()).await?; + guard.unmount().await?; + Ok::<_, Error>(start_os) } + .await; + cifs.push(( + id, + CifsBackupTarget { + hostname: mount_info.hostname, + path: mount_info.path, + username: mount_info.username, + mountable: start_os.is_ok(), + start_os: start_os.ok().and_then(|a| a), + }, + )); } Ok(cifs) diff --git a/core/startos/src/backup/target/mod.rs b/core/startos/src/backup/target/mod.rs index 93e56c2d39..ac48d75a6e 100644 --- a/core/startos/src/backup/target/mod.rs +++ b/core/startos/src/backup/target/mod.rs @@ -1,39 +1,42 @@ use std::collections::BTreeMap; use std::path::{Path, PathBuf}; -use async_trait::async_trait; use chrono::{DateTime, Utc}; -use clap::ArgMatches; +use clap::builder::ValueParserFactory; +use clap::Parser; use color_eyre::eyre::eyre; use digest::generic_array::GenericArray; use digest::OutputSizeUser; -use rpc_toolkit::command; +use models::PackageId; +use rpc_toolkit::{command, from_fn_async, AnyContext, HandlerExt, ParentHandler}; use serde::{Deserialize, Serialize}; use sha2::Sha256; -use sqlx::{Executor, Postgres}; use tokio::sync::Mutex; use tracing::instrument; use self::cifs::CifsBackupTarget; -use crate::context::RpcContext; +use crate::context::{CliContext, RpcContext}; +use crate::db::model::DatabaseModel; use crate::disk::mount::backup::BackupMountGuard; use crate::disk::mount::filesystem::block_dev::BlockDev; use crate::disk::mount::filesystem::cifs::Cifs; use crate::disk::mount::filesystem::{FileSystem, MountType, ReadWrite}; -use crate::disk::mount::guard::TmpMountGuard; +use crate::disk::mount::guard::{GenericMountGuard, TmpMountGuard}; use crate::disk::util::PartitionInfo; use crate::prelude::*; -use crate::s9pk::manifest::PackageId; -use crate::util::serde::{deserialize_from_str, display_serializable, serialize_display}; -use crate::util::{display_none, Version}; +use crate::util::clap::FromStrParser; +use crate::util::serde::{ + deserialize_from_str, display_serializable, serialize_display, HandlerExtSerde, WithIoFormat, +}; +use crate::util::Version; pub mod cifs; #[derive(Debug, Deserialize, Serialize)] #[serde(tag = "type")] -#[serde(rename_all = "kebab-case")] +#[serde(rename_all = "camelCase")] pub enum BackupTarget { - #[serde(rename_all = "kebab-case")] + #[serde(rename_all = "camelCase")] Disk { vendor: Option, model: Option, @@ -46,18 +49,15 @@ pub enum BackupTarget { #[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone)] pub enum BackupTargetId { Disk { logicalname: PathBuf }, - Cifs { id: i32 }, + Cifs { id: u32 }, } impl BackupTargetId { - pub async fn load(self, secrets: &mut Ex) -> Result - where - for<'a> &'a mut Ex: Executor<'a, Database = Postgres>, - { + pub fn load(self, db: &DatabaseModel) -> Result { Ok(match self { BackupTargetId::Disk { logicalname } => { BackupTargetFS::Disk(BlockDev::new(logicalname)) } - BackupTargetId::Cifs { id } => BackupTargetFS::Cifs(cifs::load(secrets, id).await?), + BackupTargetId::Cifs { id } => BackupTargetFS::Cifs(cifs::load(db, id)?), }) } } @@ -84,6 +84,12 @@ impl std::str::FromStr for BackupTargetId { } } } +impl ValueParserFactory for BackupTargetId { + type Parser = FromStrParser; + fn value_parser() -> Self::Parser { + FromStrParser::new() + } +} impl<'de> Deserialize<'de> for BackupTargetId { fn deserialize(deserializer: D) -> Result where @@ -103,14 +109,13 @@ impl Serialize for BackupTargetId { #[derive(Debug, Deserialize, Serialize)] #[serde(tag = "type")] -#[serde(rename_all = "kebab-case")] +#[serde(rename_all = "camelCase")] pub enum BackupTargetFS { Disk(BlockDev), Cifs(Cifs), } -#[async_trait] impl FileSystem for BackupTargetFS { - async fn mount + Send + Sync>( + async fn mount + Send>( &self, mountpoint: P, mount_type: MountType, @@ -130,19 +135,33 @@ impl FileSystem for BackupTargetFS { } } -#[command(subcommands(cifs::cifs, list, info, mount, umount))] -pub fn target() -> Result<(), Error> { - Ok(()) +// #[command(subcommands(cifs::cifs, list, info, mount, umount))] +pub fn target() -> ParentHandler { + ParentHandler::new() + .subcommand("cifs", cifs::cifs()) + .subcommand( + "list", + from_fn_async(list) + .with_display_serializable() + .with_remote_cli::(), + ) + .subcommand( + "info", + from_fn_async(info) + .with_display_serializable() + .with_custom_display_fn::(|params, info| { + Ok(display_backup_info(params.params, info)) + }) + .with_remote_cli::(), + ) } -#[command(display(display_serializable))] -pub async fn list( - #[context] ctx: RpcContext, -) -> Result, Error> { - let mut sql_handle = ctx.secret_store.acquire().await?; +// #[command(display(display_serializable))] +pub async fn list(ctx: RpcContext) -> Result, Error> { + let peek = ctx.db.peek().await; let (disks_res, cifs) = tokio::try_join!( crate::disk::util::list(&ctx.os_partitions), - cifs::list(sql_handle.as_mut()), + cifs::list(&peek), )?; Ok(disks_res .into_iter() @@ -171,7 +190,7 @@ pub async fn list( } #[derive(Clone, Debug, Default, Deserialize, Serialize)] -#[serde(rename_all = "kebab-case")] +#[serde(rename_all = "camelCase")] pub struct BackupInfo { pub version: Version, pub timestamp: Option>, @@ -179,7 +198,7 @@ pub struct BackupInfo { } #[derive(Clone, Debug, Deserialize, Serialize)] -#[serde(rename_all = "kebab-case")] +#[serde(rename_all = "camelCase")] pub struct PackageBackupInfo { pub title: String, pub version: Version, @@ -187,11 +206,11 @@ pub struct PackageBackupInfo { pub timestamp: DateTime, } -fn display_backup_info(info: BackupInfo, matches: &ArgMatches) { +fn display_backup_info(params: WithIoFormat, info: BackupInfo) { use prettytable::*; - if matches.is_present("format") { - return display_serializable(info, matches); + if let Some(format) = params.format { + return display_serializable(format, info); } let mut table = Table::new(); @@ -223,21 +242,24 @@ fn display_backup_info(info: BackupInfo, matches: &ArgMatches) { table.print_tty(false).unwrap(); } -#[command(display(display_backup_info))] +#[derive(Deserialize, Serialize, Parser)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "kebab-case")] +pub struct InfoParams { + target_id: BackupTargetId, + password: String, +} + #[instrument(skip(ctx, password))] pub async fn info( - #[context] ctx: RpcContext, - #[arg(rename = "target-id")] target_id: BackupTargetId, - #[arg] password: String, + ctx: RpcContext, + InfoParams { + target_id, + password, + }: InfoParams, ) -> Result { let guard = BackupMountGuard::mount( - TmpMountGuard::mount( - &target_id - .load(ctx.secret_store.acquire().await?.as_mut()) - .await?, - ReadWrite, - ) - .await?, + TmpMountGuard::mount(&target_id.load(&ctx.db.peek().await)?, ReadWrite).await?, &password, ) .await?; @@ -254,45 +276,51 @@ lazy_static::lazy_static! { Mutex::new(BTreeMap::new()); } -#[command] +#[derive(Deserialize, Serialize, Parser)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "kebab-case")] +pub struct MountParams { + target_id: BackupTargetId, + password: String, +} + #[instrument(skip_all)] pub async fn mount( - #[context] ctx: RpcContext, - #[arg(rename = "target-id")] target_id: BackupTargetId, - #[arg] password: String, + ctx: RpcContext, + MountParams { + target_id, + password, + }: MountParams, ) -> Result { let mut mounts = USER_MOUNTS.lock().await; if let Some(existing) = mounts.get(&target_id) { - return Ok(existing.as_ref().display().to_string()); + return Ok(existing.path().display().to_string()); } let guard = BackupMountGuard::mount( - TmpMountGuard::mount( - &target_id - .clone() - .load(ctx.secret_store.acquire().await?.as_mut()) - .await?, - ReadWrite, - ) - .await?, + TmpMountGuard::mount(&target_id.clone().load(&ctx.db.peek().await)?, ReadWrite).await?, &password, ) .await?; - let res = guard.as_ref().display().to_string(); + let res = guard.path().display().to_string(); mounts.insert(target_id, guard); Ok(res) } -#[command(display(display_none))] + +#[derive(Deserialize, Serialize, Parser)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "kebab-case")] +pub struct UmountParams { + target_id: Option, +} + #[instrument(skip_all)] -pub async fn umount( - #[context] _ctx: RpcContext, - #[arg(rename = "target-id")] target_id: Option, -) -> Result<(), Error> { - let mut mounts = USER_MOUNTS.lock().await; +pub async fn umount(_: RpcContext, UmountParams { target_id }: UmountParams) -> Result<(), Error> { + let mut mounts = USER_MOUNTS.lock().await; // TODO: move to context if let Some(target_id) = target_id { if let Some(existing) = mounts.remove(&target_id) { existing.unmount().await?; diff --git a/core/startos/src/bins/avahi_alias.rs b/core/startos/src/bins/avahi_alias.rs deleted file mode 100644 index 3c4a4fe7e7..0000000000 --- a/core/startos/src/bins/avahi_alias.rs +++ /dev/null @@ -1,163 +0,0 @@ -use avahi_sys::{ - self, avahi_client_errno, avahi_entry_group_add_service, avahi_entry_group_commit, - avahi_strerror, AvahiClient, -}; - -fn log_str_error(action: &str, e: i32) { - unsafe { - let e_str = avahi_strerror(e); - eprintln!( - "Could not {}: {:?}", - action, - std::ffi::CStr::from_ptr(e_str) - ); - } -} - -pub fn main() { - let aliases: Vec<_> = std::env::args().skip(1).collect(); - unsafe { - let simple_poll = avahi_sys::avahi_simple_poll_new(); - let poll = avahi_sys::avahi_simple_poll_get(simple_poll); - let mut box_err = Box::pin(0 as i32); - let err_c: *mut i32 = box_err.as_mut().get_mut(); - let avahi_client = avahi_sys::avahi_client_new( - poll, - avahi_sys::AvahiClientFlags::AVAHI_CLIENT_NO_FAIL, - Some(client_callback), - std::ptr::null_mut(), - err_c, - ); - if avahi_client == std::ptr::null_mut::() { - log_str_error("create Avahi client", *box_err); - panic!("Failed to create Avahi Client"); - } - let group = avahi_sys::avahi_entry_group_new( - avahi_client, - Some(entry_group_callback), - std::ptr::null_mut(), - ); - if group == std::ptr::null_mut() { - log_str_error("create Avahi entry group", avahi_client_errno(avahi_client)); - panic!("Failed to create Avahi Entry Group"); - } - let mut hostname_buf = vec![0]; - let hostname_raw = avahi_sys::avahi_client_get_host_name_fqdn(avahi_client); - hostname_buf.extend_from_slice(std::ffi::CStr::from_ptr(hostname_raw).to_bytes_with_nul()); - let buflen = hostname_buf.len(); - debug_assert!(hostname_buf.ends_with(b".local\0")); - debug_assert!(!hostname_buf[..(buflen - 7)].contains(&b'.')); - // assume fixed length prefix on hostname due to local address - hostname_buf[0] = (buflen - 8) as u8; // set the prefix length to len - 8 (leading byte, .local, nul) for the main address - hostname_buf[buflen - 7] = 5; // set the prefix length to 5 for "local" - let mut res; - let http_tcp_cstr = - std::ffi::CString::new("_http._tcp").expect("Could not cast _http._tcp to c string"); - res = avahi_entry_group_add_service( - group, - avahi_sys::AVAHI_IF_UNSPEC, - avahi_sys::AVAHI_PROTO_UNSPEC, - avahi_sys::AvahiPublishFlags_AVAHI_PUBLISH_USE_MULTICAST, - hostname_raw, - http_tcp_cstr.as_ptr(), - std::ptr::null(), - std::ptr::null(), - 443, - // below is a secret final argument that the type signature of this function does not tell you that it - // needs. This is because the C lib function takes a variable number of final arguments indicating the - // desired TXT records to add to this service entry. The way it decides when to stop taking arguments - // from the stack and dereferencing them is when it finds a null pointer...because fuck you, that's why. - // The consequence of this is that forgetting this last argument will cause segfaults or other undefined - // behavior. Welcome back to the stone age motherfucker. - std::ptr::null::(), - ); - if res < avahi_sys::AVAHI_OK { - log_str_error("add service to Avahi entry group", res); - panic!("Failed to load Avahi services"); - } - eprintln!("Published {:?}", std::ffi::CStr::from_ptr(hostname_raw)); - for alias in aliases { - let lan_address = alias + ".local"; - let lan_address_ptr = std::ffi::CString::new(lan_address) - .expect("Could not cast lan address to c string"); - res = avahi_sys::avahi_entry_group_add_record( - group, - avahi_sys::AVAHI_IF_UNSPEC, - avahi_sys::AVAHI_PROTO_UNSPEC, - avahi_sys::AvahiPublishFlags_AVAHI_PUBLISH_USE_MULTICAST - | avahi_sys::AvahiPublishFlags_AVAHI_PUBLISH_ALLOW_MULTIPLE, - lan_address_ptr.as_ptr(), - avahi_sys::AVAHI_DNS_CLASS_IN as u16, - avahi_sys::AVAHI_DNS_TYPE_CNAME as u16, - avahi_sys::AVAHI_DEFAULT_TTL, - hostname_buf.as_ptr().cast(), - hostname_buf.len(), - ); - if res < avahi_sys::AVAHI_OK { - log_str_error("add CNAME record to Avahi entry group", res); - panic!("Failed to load Avahi services"); - } - eprintln!("Published {:?}", lan_address_ptr); - } - let commit_err = avahi_entry_group_commit(group); - if commit_err < avahi_sys::AVAHI_OK { - log_str_error("reset Avahi entry group", commit_err); - panic!("Failed to load Avahi services: reset"); - } - } - std::thread::park() -} - -unsafe extern "C" fn entry_group_callback( - _group: *mut avahi_sys::AvahiEntryGroup, - state: avahi_sys::AvahiEntryGroupState, - _userdata: *mut core::ffi::c_void, -) { - match state { - avahi_sys::AvahiEntryGroupState_AVAHI_ENTRY_GROUP_FAILURE => { - eprintln!("AvahiCallback: EntryGroupState = AVAHI_ENTRY_GROUP_FAILURE"); - } - avahi_sys::AvahiEntryGroupState_AVAHI_ENTRY_GROUP_COLLISION => { - eprintln!("AvahiCallback: EntryGroupState = AVAHI_ENTRY_GROUP_COLLISION"); - } - avahi_sys::AvahiEntryGroupState_AVAHI_ENTRY_GROUP_UNCOMMITED => { - eprintln!("AvahiCallback: EntryGroupState = AVAHI_ENTRY_GROUP_UNCOMMITED"); - } - avahi_sys::AvahiEntryGroupState_AVAHI_ENTRY_GROUP_ESTABLISHED => { - eprintln!("AvahiCallback: EntryGroupState = AVAHI_ENTRY_GROUP_ESTABLISHED"); - } - avahi_sys::AvahiEntryGroupState_AVAHI_ENTRY_GROUP_REGISTERING => { - eprintln!("AvahiCallback: EntryGroupState = AVAHI_ENTRY_GROUP_REGISTERING"); - } - other => { - eprintln!("AvahiCallback: EntryGroupState = {}", other); - } - } -} - -unsafe extern "C" fn client_callback( - _group: *mut avahi_sys::AvahiClient, - state: avahi_sys::AvahiClientState, - _userdata: *mut core::ffi::c_void, -) { - match state { - avahi_sys::AvahiClientState_AVAHI_CLIENT_FAILURE => { - eprintln!("AvahiCallback: ClientState = AVAHI_CLIENT_FAILURE"); - } - avahi_sys::AvahiClientState_AVAHI_CLIENT_S_RUNNING => { - eprintln!("AvahiCallback: ClientState = AVAHI_CLIENT_S_RUNNING"); - } - avahi_sys::AvahiClientState_AVAHI_CLIENT_CONNECTING => { - eprintln!("AvahiCallback: ClientState = AVAHI_CLIENT_CONNECTING"); - } - avahi_sys::AvahiClientState_AVAHI_CLIENT_S_COLLISION => { - eprintln!("AvahiCallback: ClientState = AVAHI_CLIENT_S_COLLISION"); - } - avahi_sys::AvahiClientState_AVAHI_CLIENT_S_REGISTERING => { - eprintln!("AvahiCallback: ClientState = AVAHI_CLIENT_S_REGISTERING"); - } - other => { - eprintln!("AvahiCallback: ClientState = {}", other); - } - } -} diff --git a/core/startos/src/bins/container_cli.rs b/core/startos/src/bins/container_cli.rs new file mode 100644 index 0000000000..a33a99131d --- /dev/null +++ b/core/startos/src/bins/container_cli.rs @@ -0,0 +1,38 @@ +use std::ffi::OsString; + +use rpc_toolkit::CliApp; +use serde_json::Value; + +use crate::service::cli::{ContainerCliContext, ContainerClientConfig}; +use crate::util::logger::EmbassyLogger; +use crate::version::{Current, VersionT}; + +lazy_static::lazy_static! { + static ref VERSION_STRING: String = Current::new().semver().to_string(); +} + +pub fn main(args: impl IntoIterator) { + EmbassyLogger::init(); + if let Err(e) = CliApp::new( + |cfg: ContainerClientConfig| Ok(ContainerCliContext::init(cfg)), + crate::service::service_effect_handler::service_effect_handler(), + ) + .run(args) + { + match e.data { + Some(Value::String(s)) => eprintln!("{}: {}", e.message, s), + Some(Value::Object(o)) => { + if let Some(Value::String(s)) = o.get("details") { + eprintln!("{}: {}", e.message, s); + if let Some(Value::String(s)) = o.get("debug") { + tracing::debug!("{}", s) + } + } + } + Some(a) => eprintln!("{}: {}", e.message, a), + None => eprintln!("{}", e.message), + } + + std::process::exit(e.code); + } +} diff --git a/core/startos/src/bins/mod.rs b/core/startos/src/bins/mod.rs index f9c88cae92..68f2802e0b 100644 --- a/core/startos/src/bins/mod.rs +++ b/core/startos/src/bins/mod.rs @@ -1,49 +1,54 @@ +use std::collections::VecDeque; +use std::ffi::OsString; use std::path::Path; -#[cfg(feature = "avahi-alias")] -pub mod avahi_alias; +#[cfg(feature = "container-runtime")] +pub mod container_cli; pub mod deprecated; #[cfg(feature = "cli")] pub mod start_cli; -#[cfg(feature = "js-engine")] -pub mod start_deno; #[cfg(feature = "daemon")] pub mod start_init; -#[cfg(feature = "sdk")] -pub mod start_sdk; #[cfg(feature = "daemon")] pub mod startd; -fn select_executable(name: &str) -> Option { +fn select_executable(name: &str) -> Option)> { match name { - #[cfg(feature = "avahi-alias")] - "avahi-alias" => Some(avahi_alias::main), - #[cfg(feature = "js-engine")] - "start-deno" => Some(start_deno::main), #[cfg(feature = "cli")] "start-cli" => Some(start_cli::main), - #[cfg(feature = "sdk")] - "start-sdk" => Some(start_sdk::main), + #[cfg(feature = "container-runtime")] + "start-cli" => Some(container_cli::main), #[cfg(feature = "daemon")] "startd" => Some(startd::main), - "embassy-cli" => Some(|| deprecated::renamed("embassy-cli", "start-cli")), - "embassy-sdk" => Some(|| deprecated::renamed("embassy-sdk", "start-sdk")), - "embassyd" => Some(|| deprecated::renamed("embassyd", "startd")), - "embassy-init" => Some(|| deprecated::removed("embassy-init")), + "embassy-cli" => Some(|_| deprecated::renamed("embassy-cli", "start-cli")), + "embassy-sdk" => Some(|_| deprecated::renamed("embassy-sdk", "start-sdk")), + "embassyd" => Some(|_| deprecated::renamed("embassyd", "startd")), + "embassy-init" => Some(|_| deprecated::removed("embassy-init")), _ => None, } } pub fn startbox() { - let args = std::env::args().take(2).collect::>(); - let executable = args - .get(0) - .and_then(|s| Path::new(&*s).file_name()) - .and_then(|s| s.to_str()); - if let Some(x) = executable.and_then(|s| select_executable(&s)) { - x() - } else { - eprintln!("unknown executable: {}", executable.unwrap_or("N/A")); - std::process::exit(1); + let mut args = std::env::args_os().collect::>(); + for _ in 0..2 { + if let Some(s) = args.pop_front() { + if let Some(x) = Path::new(&*s) + .file_name() + .and_then(|s| s.to_str()) + .and_then(|s| select_executable(&s)) + { + args.push_front(s); + return x(args); + } + } } + let args = std::env::args().collect::>(); + eprintln!( + "unknown executable: {}", + args.get(1) + .or_else(|| args.get(0)) + .map(|s| s.as_str()) + .unwrap_or("N/A") + ); + std::process::exit(1); } diff --git a/core/startos/src/bins/start_cli.rs b/core/startos/src/bins/start_cli.rs index 3ef64096ea..374247f2e8 100644 --- a/core/startos/src/bins/start_cli.rs +++ b/core/startos/src/bins/start_cli.rs @@ -1,62 +1,39 @@ -use clap::Arg; -use rpc_toolkit::run_cli; -use rpc_toolkit::yajrc::RpcError; +use std::ffi::OsString; + +use rpc_toolkit::CliApp; use serde_json::Value; +use crate::context::config::ClientConfig; use crate::context::CliContext; use crate::util::logger::EmbassyLogger; use crate::version::{Current, VersionT}; -use crate::Error; lazy_static::lazy_static! { static ref VERSION_STRING: String = Current::new().semver().to_string(); } -fn inner_main() -> Result<(), Error> { - run_cli!({ - command: crate::main_api, - app: app => app - .name("StartOS CLI") - .version(&**VERSION_STRING) - .arg( - clap::Arg::with_name("config") - .short('c') - .long("config") - .takes_value(true), - ) - .arg(Arg::with_name("host").long("host").short('h').takes_value(true)) - .arg(Arg::with_name("proxy").long("proxy").short('p').takes_value(true)), - context: matches => { - EmbassyLogger::init(); - CliContext::init(matches)? - }, - exit: |e: RpcError| { - match e.data { - Some(Value::String(s)) => eprintln!("{}: {}", e.message, s), - Some(Value::Object(o)) => if let Some(Value::String(s)) = o.get("details") { +pub fn main(args: impl IntoIterator) { + EmbassyLogger::init(); + if let Err(e) = CliApp::new( + |cfg: ClientConfig| Ok(CliContext::init(cfg.load()?)?), + crate::main_api(), + ) + .run(args) + { + match e.data { + Some(Value::String(s)) => eprintln!("{}: {}", e.message, s), + Some(Value::Object(o)) => { + if let Some(Value::String(s)) = o.get("details") { eprintln!("{}: {}", e.message, s); if let Some(Value::String(s)) = o.get("debug") { tracing::debug!("{}", s) } } - Some(a) => eprintln!("{}: {}", e.message, a), - None => eprintln!("{}", e.message), } - - std::process::exit(e.code); + Some(a) => eprintln!("{}: {}", e.message, a), + None => eprintln!("{}", e.message), } - }); - Ok(()) -} -pub fn main() { - match inner_main() { - Ok(_) => (), - Err(e) => { - eprintln!("{}", e.source); - tracing::debug!("{:?}", e.source); - drop(e.source); - std::process::exit(e.kind as i32) - } + std::process::exit(e.code); } } diff --git a/core/startos/src/bins/start_deno.rs b/core/startos/src/bins/start_deno.rs deleted file mode 100644 index 8f5a1451a0..0000000000 --- a/core/startos/src/bins/start_deno.rs +++ /dev/null @@ -1,140 +0,0 @@ -use rpc_toolkit::yajrc::RpcError; -use rpc_toolkit::{command, run_cli, Context}; -use serde_json::Value; - -use crate::procedure::js_scripts::ExecuteArgs; -use crate::s9pk::manifest::PackageId; -use crate::util::serde::{display_serializable, parse_stdin_deserializable, IoFormat}; -use crate::version::{Current, VersionT}; -use crate::Error; - -lazy_static::lazy_static! { - static ref VERSION_STRING: String = Current::new().semver().to_string(); -} - -struct DenoContext; -impl Context for DenoContext {} - -#[command(subcommands(execute, sandbox))] -fn deno_api() -> Result<(), Error> { - Ok(()) -} - -#[command(cli_only, display(display_serializable))] -async fn execute( - #[arg(stdin, parse(parse_stdin_deserializable))] arg: ExecuteArgs, - #[allow(unused_variables)] - #[arg(long = "format")] - format: Option, -) -> Result, Error> { - let ExecuteArgs { - procedure, - directory, - pkg_id, - pkg_version, - name, - volumes, - input, - } = arg; - PackageLogger::init(&pkg_id); - procedure - .execute_impl(&directory, &pkg_id, &pkg_version, name, &volumes, input) - .await -} -#[command(cli_only, display(display_serializable))] -async fn sandbox( - #[arg(stdin, parse(parse_stdin_deserializable))] arg: ExecuteArgs, - #[allow(unused_variables)] - #[arg(long = "format")] - format: Option, -) -> Result, Error> { - let ExecuteArgs { - procedure, - directory, - pkg_id, - pkg_version, - name, - volumes, - input, - } = arg; - PackageLogger::init(&pkg_id); - procedure - .sandboxed_impl(&directory, &pkg_id, &pkg_version, &volumes, input, name) - .await -} - -use tracing::Subscriber; -use tracing_subscriber::util::SubscriberInitExt; - -#[derive(Clone)] -struct PackageLogger {} - -impl PackageLogger { - fn base_subscriber(id: &PackageId) -> impl Subscriber { - use tracing_error::ErrorLayer; - use tracing_subscriber::prelude::*; - use tracing_subscriber::{fmt, EnvFilter}; - - let filter_layer = EnvFilter::default().add_directive( - format!("{}=warn", std::module_path!().split("::").next().unwrap()) - .parse() - .unwrap(), - ); - let fmt_layer = fmt::layer().with_writer(std::io::stderr).with_target(true); - let journald_layer = tracing_journald::layer() - .unwrap() - .with_syslog_identifier(format!("{id}.embassy")); - - let sub = tracing_subscriber::registry() - .with(filter_layer) - .with(fmt_layer) - .with(journald_layer) - .with(ErrorLayer::default()); - - sub - } - pub fn init(id: &PackageId) -> Self { - Self::base_subscriber(id).init(); - color_eyre::install().unwrap_or_else(|_| tracing::warn!("tracing too many times")); - - Self {} - } -} - -fn inner_main() -> Result<(), Error> { - run_cli!({ - command: deno_api, - app: app => app - .name("StartOS Deno Executor") - .version(&**VERSION_STRING), - context: _m => DenoContext, - exit: |e: RpcError| { - match e.data { - Some(Value::String(s)) => eprintln!("{}: {}", e.message, s), - Some(Value::Object(o)) => if let Some(Value::String(s)) = o.get("details") { - eprintln!("{}: {}", e.message, s); - if let Some(Value::String(s)) = o.get("debug") { - tracing::debug!("{}", s) - } - } - Some(a) => eprintln!("{}: {}", e.message, a), - None => eprintln!("{}", e.message), - } - - std::process::exit(e.code); - } - }); - Ok(()) -} - -pub fn main() { - match inner_main() { - Ok(_) => (), - Err(e) => { - eprintln!("{}", e.source); - tracing::debug!("{:?}", e.source); - drop(e.source); - std::process::exit(e.kind as i32) - } - } -} diff --git a/core/startos/src/bins/start_init.rs b/core/startos/src/bins/start_init.rs index 1cb0708512..284748339d 100644 --- a/core/startos/src/bins/start_init.rs +++ b/core/startos/src/bins/start_init.rs @@ -1,5 +1,5 @@ use std::net::{Ipv6Addr, SocketAddr}; -use std::path::{Path, PathBuf}; +use std::path::Path; use std::sync::Arc; use std::time::Duration; @@ -7,7 +7,7 @@ use helpers::NonDetachingJoinHandle; use tokio::process::Command; use tracing::instrument; -use crate::context::rpc::RpcContextConfig; +use crate::context::config::ServerConfig; use crate::context::{DiagnosticContext, InstallContext, SetupContext}; use crate::disk::fsck::{RepairStrategy, RequiresReboot}; use crate::disk::main::DEFAULT_PASSWORD; @@ -21,7 +21,7 @@ use crate::util::Invoke; use crate::{Error, ErrorKind, ResultExt, PLATFORM}; #[instrument(skip_all)] -async fn setup_or_init(cfg_path: Option) -> Result, Error> { +async fn setup_or_init(config: &ServerConfig) -> Result, Error> { let song = NonDetachingJoinHandle::from(tokio::spawn(async { loop { BEP.play().await.unwrap(); @@ -82,13 +82,12 @@ async fn setup_or_init(cfg_path: Option) -> Result, Er .invoke(crate::ErrorKind::OpenSsh) .await?; - let ctx = InstallContext::init(cfg_path).await?; + let ctx = InstallContext::init().await?; let server = WebServer::install( SocketAddr::new(Ipv6Addr::UNSPECIFIED.into(), 80), ctx.clone(), - ) - .await?; + )?; drop(song); tokio::time::sleep(Duration::from_secs(1)).await; // let the record state that I hate this @@ -109,26 +108,24 @@ async fn setup_or_init(cfg_path: Option) -> Result, Er .await .is_err() { - let ctx = SetupContext::init(cfg_path).await?; + let ctx = SetupContext::init(config)?; let server = WebServer::setup( SocketAddr::new(Ipv6Addr::UNSPECIFIED.into(), 80), ctx.clone(), - ) - .await?; + )?; drop(song); tokio::time::sleep(Duration::from_secs(1)).await; // let the record state that I hate this CHIME.play().await?; - ctx.shutdown - .subscribe() - .recv() - .await - .expect("context dropped"); + let mut shutdown = ctx.shutdown.subscribe(); + shutdown.recv().await.expect("context dropped"); server.shutdown().await; + drop(shutdown); + tokio::task::yield_now().await; if let Err(e) = Command::new("killall") .arg("firefox-esr") @@ -139,13 +136,12 @@ async fn setup_or_init(cfg_path: Option) -> Result, Er tracing::debug!("{:?}", e); } } else { - let cfg = RpcContextConfig::load(cfg_path).await?; let guid_string = tokio::fs::read_to_string("/media/embassy/config/disk.guid") // unique identifier for volume group - keeps track of the disk that goes with your embassy .await?; let guid = guid_string.trim(); let requires_reboot = crate::disk::main::import( guid, - cfg.datadir(), + config.datadir(), if tokio::fs::metadata(REPAIR_DISK_PATH).await.is_ok() { RepairStrategy::Aggressive } else { @@ -164,13 +160,13 @@ async fn setup_or_init(cfg_path: Option) -> Result, Er .with_ctx(|_| (crate::ErrorKind::Filesystem, REPAIR_DISK_PATH))?; } if requires_reboot.0 { - crate::disk::main::export(guid, cfg.datadir()).await?; + crate::disk::main::export(guid, config.datadir()).await?; Command::new("reboot") .invoke(crate::ErrorKind::Unknown) .await?; } tracing::info!("Loaded Disk"); - crate::init::init(&cfg).await?; + crate::init::init(config).await?; drop(song); } @@ -196,7 +192,7 @@ async fn run_script_if_exists>(path: P) { } #[instrument(skip_all)] -async fn inner_main(cfg_path: Option) -> Result, Error> { +async fn inner_main(config: &ServerConfig) -> Result, Error> { if &*PLATFORM == "raspberrypi" && tokio::fs::metadata(STANDBY_MODE_PATH).await.is_ok() { tokio::fs::remove_file(STANDBY_MODE_PATH).await?; Command::new("sync").invoke(ErrorKind::Filesystem).await?; @@ -208,7 +204,7 @@ async fn inner_main(cfg_path: Option) -> Result, Error run_script_if_exists("/media/embassy/config/preinit.sh").await; - let res = match setup_or_init(cfg_path.clone()).await { + let res = match setup_or_init(config).await { Err(e) => { async move { tracing::error!("{}", e.source); @@ -216,7 +212,7 @@ async fn inner_main(cfg_path: Option) -> Result, Error crate::sound::BEETHOVEN.play().await?; let ctx = DiagnosticContext::init( - cfg_path, + config, if tokio::fs::metadata("/media/embassy/config/disk.guid") .await .is_ok() @@ -231,14 +227,12 @@ async fn inner_main(cfg_path: Option) -> Result, Error None }, e, - ) - .await?; + )?; let server = WebServer::diagnostic( SocketAddr::new(Ipv6Addr::UNSPECIFIED.into(), 80), ctx.clone(), - ) - .await?; + )?; let shutdown = ctx.shutdown.subscribe().recv().await.unwrap(); @@ -256,23 +250,13 @@ async fn inner_main(cfg_path: Option) -> Result, Error res } -pub fn main() { - let matches = clap::App::new("start-init") - .arg( - clap::Arg::with_name("config") - .short('c') - .long("config") - .takes_value(true), - ) - .get_matches(); - - let cfg_path = matches.value_of("config").map(|p| Path::new(p).to_owned()); +pub fn main(config: &ServerConfig) { let res = { let rt = tokio::runtime::Builder::new_multi_thread() .enable_all() .build() .expect("failed to initialize runtime"); - rt.block_on(inner_main(cfg_path)) + rt.block_on(inner_main(config)) }; match res { diff --git a/core/startos/src/bins/start_sdk.rs b/core/startos/src/bins/start_sdk.rs deleted file mode 100644 index 10219c4853..0000000000 --- a/core/startos/src/bins/start_sdk.rs +++ /dev/null @@ -1,61 +0,0 @@ -use rpc_toolkit::run_cli; -use rpc_toolkit::yajrc::RpcError; -use serde_json::Value; - -use crate::context::SdkContext; -use crate::util::logger::EmbassyLogger; -use crate::version::{Current, VersionT}; -use crate::Error; - -lazy_static::lazy_static! { - static ref VERSION_STRING: String = Current::new().semver().to_string(); -} - -fn inner_main() -> Result<(), Error> { - run_cli!({ - command: crate::portable_api, - app: app => app - .name("StartOS SDK") - .version(&**VERSION_STRING) - .arg( - clap::Arg::with_name("config") - .short('c') - .long("config") - .takes_value(true), - ), - context: matches => { - if let Err(_) = std::env::var("RUST_LOG") { - std::env::set_var("RUST_LOG", "embassy=warn,js_engine=warn"); - } - EmbassyLogger::init(); - SdkContext::init(matches)? - }, - exit: |e: RpcError| { - match e.data { - Some(Value::String(s)) => eprintln!("{}: {}", e.message, s), - Some(Value::Object(o)) => if let Some(Value::String(s)) = o.get("details") { - eprintln!("{}: {}", e.message, s); - if let Some(Value::String(s)) = o.get("debug") { - tracing::debug!("{}", s) - } - } - Some(a) => eprintln!("{}: {}", e.message, a), - None => eprintln!("{}", e.message), - } - std::process::exit(e.code); - } - }); - Ok(()) -} - -pub fn main() { - match inner_main() { - Ok(_) => (), - Err(e) => { - eprintln!("{}", e.source); - tracing::debug!("{:?}", e.source); - drop(e.source); - std::process::exit(e.kind as i32) - } - } -} diff --git a/core/startos/src/bins/startd.rs b/core/startos/src/bins/startd.rs index a773dd99a4..3e571d6b2e 100644 --- a/core/startos/src/bins/startd.rs +++ b/core/startos/src/bins/startd.rs @@ -1,12 +1,15 @@ +use std::ffi::OsString; use std::net::{Ipv6Addr, SocketAddr}; -use std::path::{Path, PathBuf}; +use std::path::Path; use std::sync::Arc; +use clap::Parser; use color_eyre::eyre::eyre; use futures::{FutureExt, TryFutureExt}; use tokio::signal::unix::signal; use tracing::instrument; +use crate::context::config::ServerConfig; use crate::context::{DiagnosticContext, RpcContext}; use crate::net::web_server::WebServer; use crate::shutdown::Shutdown; @@ -15,10 +18,10 @@ use crate::util::logger::EmbassyLogger; use crate::{Error, ErrorKind, ResultExt}; #[instrument(skip_all)] -async fn inner_main(cfg_path: Option) -> Result, Error> { +async fn inner_main(config: &ServerConfig) -> Result, Error> { let (rpc_ctx, server, shutdown) = async { let rpc_ctx = RpcContext::init( - cfg_path, + config, Arc::new( tokio::fs::read_to_string("/media/embassy/config/disk.guid") // unique identifier for volume group - keeps track of the disk that goes with your embassy .await? @@ -31,8 +34,7 @@ async fn inner_main(cfg_path: Option) -> Result, Error let server = WebServer::main( SocketAddr::new(Ipv6Addr::UNSPECIFIED.into(), 80), rpc_ctx.clone(), - ) - .await?; + )?; let mut shutdown_recv = rpc_ctx.shutdown.subscribe(); @@ -102,32 +104,23 @@ async fn inner_main(cfg_path: Option) -> Result, Error Ok(shutdown) } -pub fn main() { +pub fn main(args: impl IntoIterator) { EmbassyLogger::init(); + let config = ServerConfig::parse_from(args).load().unwrap(); + if !Path::new("/run/embassy/initialized").exists() { - super::start_init::main(); + super::start_init::main(&config); std::fs::write("/run/embassy/initialized", "").unwrap(); } - let matches = clap::App::new("startd") - .arg( - clap::Arg::with_name("config") - .short('c') - .long("config") - .takes_value(true), - ) - .get_matches(); - - let cfg_path = matches.value_of("config").map(|p| Path::new(p).to_owned()); - let res = { let rt = tokio::runtime::Builder::new_multi_thread() .enable_all() .build() .expect("failed to initialize runtime"); rt.block_on(async { - match inner_main(cfg_path.clone()).await { + match inner_main(&config).await { Ok(a) => Ok(a), Err(e) => { async { @@ -135,7 +128,7 @@ pub fn main() { tracing::debug!("{:?}", e.source); crate::sound::BEETHOVEN.play().await?; let ctx = DiagnosticContext::init( - cfg_path, + &config, if tokio::fs::metadata("/media/embassy/config/disk.guid") .await .is_ok() @@ -150,14 +143,12 @@ pub fn main() { None }, e, - ) - .await?; + )?; let server = WebServer::diagnostic( SocketAddr::new(Ipv6Addr::UNSPECIFIED.into(), 80), ctx.clone(), - ) - .await?; + )?; let mut shutdown = ctx.shutdown.subscribe(); diff --git a/core/startos/src/config/action.rs b/core/startos/src/config/action.rs index 27cd1683fa..ef26571a76 100644 --- a/core/startos/src/config/action.rs +++ b/core/startos/src/config/action.rs @@ -1,116 +1,22 @@ use std::collections::{BTreeMap, BTreeSet}; -use color_eyre::eyre::eyre; -use models::ImageId; -use patch_db::HasModel; +use models::PackageId; use serde::{Deserialize, Serialize}; -use tracing::instrument; use super::{Config, ConfigSpec}; -use crate::context::RpcContext; -use crate::dependencies::Dependencies; +#[allow(unused_imports)] use crate::prelude::*; -use crate::procedure::docker::DockerContainers; -use crate::procedure::{PackageProcedure, ProcedureName}; -use crate::s9pk::manifest::PackageId; use crate::status::health_check::HealthCheckId; -use crate::util::Version; -use crate::volume::Volumes; -use crate::{Error, ResultExt}; #[derive(Debug, Deserialize, Serialize)] -#[serde(rename_all = "kebab-case")] +#[serde(rename_all = "camelCase")] pub struct ConfigRes { pub config: Option, pub spec: ConfigSpec, } -#[derive(Clone, Debug, Deserialize, Serialize, HasModel)] -#[model = "Model"] -pub struct ConfigActions { - pub get: PackageProcedure, - pub set: PackageProcedure, -} -impl ConfigActions { - #[instrument(skip_all)] - pub fn validate( - &self, - _container: &Option, - eos_version: &Version, - volumes: &Volumes, - image_ids: &BTreeSet, - ) -> Result<(), Error> { - self.get - .validate(eos_version, volumes, image_ids, true) - .with_ctx(|_| (crate::ErrorKind::ValidateS9pk, "Config Get"))?; - self.set - .validate(eos_version, volumes, image_ids, true) - .with_ctx(|_| (crate::ErrorKind::ValidateS9pk, "Config Set"))?; - Ok(()) - } - #[instrument(skip_all)] - pub async fn get( - &self, - ctx: &RpcContext, - pkg_id: &PackageId, - pkg_version: &Version, - volumes: &Volumes, - ) -> Result { - self.get - .execute( - ctx, - pkg_id, - pkg_version, - ProcedureName::GetConfig, - volumes, - None::<()>, - None, - ) - .await - .and_then(|res| { - res.map_err(|e| Error::new(eyre!("{}", e.1), crate::ErrorKind::ConfigGen)) - }) - } - - #[instrument(skip_all)] - pub async fn set( - &self, - ctx: &RpcContext, - pkg_id: &PackageId, - pkg_version: &Version, - dependencies: &Dependencies, - volumes: &Volumes, - input: &Config, - ) -> Result { - let res: SetResult = self - .set - .execute( - ctx, - pkg_id, - pkg_version, - ProcedureName::SetConfig, - volumes, - Some(input), - None, - ) - .await - .and_then(|res| { - res.map_err(|e| { - Error::new(eyre!("{}", e.1), crate::ErrorKind::ConfigRulesViolation) - }) - })?; - Ok(SetResult { - depends_on: res - .depends_on - .into_iter() - .filter(|(pkg, _)| dependencies.0.contains_key(pkg)) - .collect(), - }) - } -} - #[derive(Debug, Deserialize, Serialize)] -#[serde(rename_all = "kebab-case")] +#[serde(rename_all = "camelCase")] pub struct SetResult { pub depends_on: BTreeMap>, } diff --git a/core/startos/src/config/mod.rs b/core/startos/src/config/mod.rs index 06e7770b06..18f67b05ac 100644 --- a/core/startos/src/config/mod.rs +++ b/core/startos/src/config/mod.rs @@ -1,34 +1,31 @@ -use std::collections::BTreeMap; -use std::path::PathBuf; use std::sync::Arc; use std::time::Duration; +use clap::Parser; use color_eyre::eyre::eyre; -use indexmap::IndexSet; +use indexmap::{IndexMap, IndexSet}; use itertools::Itertools; -use models::{ErrorKind, OptionExt}; +use models::{ErrorKind, OptionExt, PackageId}; use patch_db::value::InternedString; use patch_db::Value; use regex::Regex; -use rpc_toolkit::command; +use rpc_toolkit::{from_fn_async, Empty, HandlerExt, ParentHandler}; +use serde::{Deserialize, Serialize}; use tracing::instrument; -use crate::context::RpcContext; +use crate::context::{CliContext, RpcContext}; use crate::prelude::*; -use crate::s9pk::manifest::PackageId; -use crate::util::display_none; -use crate::util::serde::{display_serializable, parse_stdin_deserializable, IoFormat}; -use crate::Error; +use crate::util::serde::{HandlerExtSerde, StdinDeserializable}; + +#[derive(Clone, Debug, Default, Serialize, Deserialize)] +pub struct ConfigSpec(pub IndexMap); pub mod action; -pub mod spec; pub mod util; -pub use spec::{ConfigSpec, Defaultable}; use util::NumRange; use self::action::ConfigRes; -use self::spec::ValueSpecPointer; pub type Config = patch_db::value::InOMap; pub trait TypeOf { @@ -55,8 +52,6 @@ pub enum ConfigurationError { NoMatch(#[from] NoMatchWithPath), #[error("System Error: {0}")] SystemError(Error), - #[error("Permission Denied: {0}")] - PermissionDenied(ValueSpecPointer), } impl From for Error { fn from(err: ConfigurationError) -> Self { @@ -124,164 +119,101 @@ pub enum MatchError { PropertyMatchesUnionTag(InternedString, String), #[error("Name of Property {0:?} Conflicts With Map Tag Name")] PropertyNameMatchesMapTag(String), - #[error("Pointer Is Invalid: {0}")] - InvalidPointer(spec::ValueSpecPointer), #[error("Object Key Is Invalid: {0}")] InvalidKey(String), #[error("Value In List Is Not Unique")] ListUniquenessViolation, } -#[command(rename = "config-spec", cli_only, blocking, display(display_none))] -pub fn verify_spec(#[arg] path: PathBuf) -> Result<(), Error> { - let mut file = std::fs::File::open(&path)?; - let format = match path.extension().and_then(|s| s.to_str()) { - Some("yaml") | Some("yml") => IoFormat::Yaml, - Some("json") => IoFormat::Json, - Some("toml") => IoFormat::Toml, - Some("cbor") => IoFormat::Cbor, - _ => { - return Err(Error::new( - eyre!("Unknown file format. Expected one of yaml, json, toml, cbor."), - crate::ErrorKind::Deserialization, - )); - } - }; - let _: ConfigSpec = format.from_reader(&mut file)?; - - Ok(()) +#[derive(Deserialize, Serialize, Parser)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "kebab-case")] +pub struct ConfigParams { + pub id: PackageId, } -#[command(subcommands(get, set))] -pub fn config(#[arg] id: PackageId) -> Result { - Ok(id) +// #[command(subcommands(get, set))] +pub fn config() -> ParentHandler { + ParentHandler::new() + .subcommand( + "get", + from_fn_async(get) + .with_inherited(|ConfigParams { id }, _| id) + .with_display_serializable() + .with_remote_cli::(), + ) + .subcommand("set", set().with_inherited(|ConfigParams { id }, _| id)) } -#[command(display(display_serializable))] #[instrument(skip_all)] -pub async fn get( - #[context] ctx: RpcContext, - #[parent_data] id: PackageId, - #[allow(unused_variables)] - #[arg(long = "format")] - format: Option, -) -> Result { - let db = ctx.db.peek().await; - let manifest = db - .as_package_data() - .as_idx(&id) - .or_not_found(&id)? - .as_installed() - .or_not_found(&id)? - .as_manifest(); - let action = manifest - .as_config() - .de()? - .ok_or_else(|| Error::new(eyre!("{} has no config", id), crate::ErrorKind::NotFound))?; - - let volumes = manifest.as_volumes().de()?; - let version = manifest.as_version().de()?; - action.get(&ctx, &id, &version, &volumes).await +pub async fn get(ctx: RpcContext, _: Empty, id: PackageId) -> Result { + ctx.services + .get(&id) + .await + .as_ref() + .or_not_found(lazy_format!("Manager for {id}"))? + .get_config() + .await } -#[command( - subcommands(self(set_impl(async, context(RpcContext))), set_dry), - display(display_none), - metadata(sync_db = true) -)] -#[instrument(skip_all)] -pub fn set( - #[parent_data] id: PackageId, - #[allow(unused_variables)] - #[arg(long = "format")] - format: Option, - #[arg(long = "timeout")] timeout: Option, - #[arg(stdin, parse(parse_stdin_deserializable))] config: Option, -) -> Result<(PackageId, Option, Option), Error> { - Ok((id, config, timeout.map(|d| *d))) +#[derive(Deserialize, Serialize, Parser)] +#[serde(rename_all = "camelCase")] +pub struct SetParams { + #[arg(long = "timeout")] + pub timeout: Option, + #[command(flatten)] + pub config: StdinDeserializable>, } -#[command(rename = "dry", display(display_serializable))] +// #[command( +// subcommands(self(set_impl(async, context(RpcContext))), set_dry), +// display(display_none), +// metadata(sync_db = true) +// )] #[instrument(skip_all)] -pub async fn set_dry( - #[context] ctx: RpcContext, - #[parent_data] (id, config, timeout): (PackageId, Option, Option), -) -> Result, Error> { - let breakages = BTreeMap::new(); - let overrides = Default::default(); - - let configure_context = ConfigureContext { - breakages, - timeout, - config, - dry_run: true, - overrides, - }; - let breakages = configure(&ctx, &id, configure_context).await?; - - Ok(breakages) +pub fn set() -> ParentHandler { + ParentHandler::new().root_handler( + from_fn_async(set_impl) + .with_metadata("sync_db", Value::Bool(true)) + .with_inherited(|set_params, id| (id, set_params)) + .no_display() + .with_remote_cli::(), + ) } +#[derive(Default)] pub struct ConfigureContext { - pub breakages: BTreeMap, pub timeout: Option, pub config: Option, - pub overrides: BTreeMap, - pub dry_run: bool, } #[instrument(skip_all)] pub async fn set_impl( ctx: RpcContext, - (id, config, timeout): (PackageId, Option, Option), + _: Empty, + ( + id, + SetParams { + timeout, + config: StdinDeserializable(config), + }, + ): (PackageId, SetParams), ) -> Result<(), Error> { - let breakages = BTreeMap::new(); - let overrides = Default::default(); - let configure_context = ConfigureContext { - breakages, - timeout, + timeout: timeout.map(|t| *t), config, - dry_run: false, - overrides, }; - configure(&ctx, &id, configure_context).await?; - Ok(()) -} - -#[instrument(skip_all)] -pub async fn configure( - ctx: &RpcContext, - id: &PackageId, - configure_context: ConfigureContext, -) -> Result, Error> { - let db = ctx.db.peek().await; - let package = db - .as_package_data() - .as_idx(id) - .or_not_found(&id)? - .as_installed() - .or_not_found(&id)?; - let version = package.as_manifest().as_version().de()?; - ctx.managers - .get(&(id.clone(), version.clone())) + ctx.services + .get(&id) .await + .as_ref() .ok_or_else(|| { Error::new( - eyre!("There is no manager running for {id:?} and {version:?}"), + eyre!("There is no manager running for {id}"), ErrorKind::Unknown, ) })? .configure(configure_context) - .await -} - -macro_rules! not_found { - ($x:expr) => { - crate::Error::new( - color_eyre::eyre::eyre!("Could not find {} at {}:{}", $x, module_path!(), line!()), - crate::ErrorKind::Incoherent, - ) - }; + .await?; + Ok(()) } -pub(crate) use not_found; diff --git a/core/startos/src/config/spec.rs b/core/startos/src/config/spec.rs deleted file mode 100644 index a98ad888d4..0000000000 --- a/core/startos/src/config/spec.rs +++ /dev/null @@ -1,2013 +0,0 @@ -use std::borrow::Cow; -use std::collections::{BTreeMap, BTreeSet}; -use std::fmt; -use std::fmt::Debug; -use std::hash::{Hash, Hasher}; -use std::iter::FromIterator; -use std::ops::RangeBounds; -use std::sync::Arc; -use std::time::Duration; - -use async_trait::async_trait; -use imbl::Vector; -use imbl_value::InternedString; -use indexmap::{IndexMap, IndexSet}; -use itertools::Itertools; -use jsonpath_lib::Compiled as CompiledJsonPath; -use patch_db::value::{Number, Value}; -use rand::{CryptoRng, Rng}; -use regex::Regex; -use serde::de::{MapAccess, Visitor}; -use serde::{Deserialize, Deserializer, Serialize, Serializer}; -use sqlx::PgPool; - -use super::util::{self, CharSet, NumRange, UniqueBy, STATIC_NULL}; -use super::{Config, MatchError, NoMatchWithPath, TimeoutError, TypeOf}; -use crate::config::ConfigurationError; -use crate::context::RpcContext; -use crate::net::interface::InterfaceId; -use crate::net::keys::Key; -use crate::prelude::*; -use crate::s9pk::manifest::{Manifest, PackageId}; - -// Config Value Specifications -#[async_trait] -pub trait ValueSpec { - // This function defines whether the value supplied in the argument is - // consistent with the spec in &self - fn matches(&self, value: &Value) -> Result<(), NoMatchWithPath>; - // This function checks whether the value spec is consistent with itself, - // since not all inVariant can be checked by the type - fn validate(&self, manifest: &Manifest) -> Result<(), NoMatchWithPath>; - // update is to fill in values for environment pointers recursively - async fn update( - &self, - ctx: &RpcContext, - manifest: &Manifest, - config_overrides: &BTreeMap, - value: &mut Value, - ) -> Result<(), ConfigurationError>; - // returns all pointers that are live in the provided config - fn pointers(&self, value: &Value) -> Result, NoMatchWithPath>; - // requires returns whether the app id is the target of a pointer within it - fn requires(&self, id: &PackageId, value: &Value) -> bool; - // defines if 2 values of this type are equal for the purpose of uniqueness - fn eq(&self, lhs: &Value, rhs: &Value) -> bool; -} - -// Config Value Default Generation -// -// This behavior is defined by two independent traits as well as a third that -// represents a conjunction of those two traits: -// -// DefaultableWith - defines an associated type describing the information it -// needs to be able to generate a default value, as well as a function for -// extracting relevant pieces of that information and using it to actually -// generate the default value -// -// HasDefaultSpec - only purpose is to summon the default spec for the type -// -// Defaultable - this is a redundant trait that may replace 'DefaultableWith' -// and 'HasDefaultSpec'. -pub trait DefaultableWith { - type DefaultSpec: Sync; - type Error: std::error::Error; - - fn gen_with( - &self, - spec: &Self::DefaultSpec, - rng: &mut R, - timeout: &Option, - ) -> Result; -} -pub trait HasDefaultSpec: DefaultableWith { - fn default_spec(&self) -> &Self::DefaultSpec; -} - -pub trait Defaultable { - type Error; - - fn gen( - &self, - rng: &mut R, - timeout: &Option, - ) -> Result; -} -impl Defaultable for T -where - T: HasDefaultSpec + DefaultableWith + Sync, - E: std::error::Error, -{ - type Error = E; - - fn gen( - &self, - rng: &mut R, - timeout: &Option, - ) -> Result { - self.gen_with(self.default_spec(), rng, timeout) - } -} - -// WithDefault - trivial wrapper that pairs a 'DefaultableWith' type with a -// default spec -#[derive(Clone, Debug, Serialize, Deserialize)] -pub struct WithDefault { - #[serde(flatten)] - pub inner: T, - pub default: T::DefaultSpec, -} -impl DefaultableWith for WithDefault -where - T: DefaultableWith + Sync + Send, - T::DefaultSpec: Send, -{ - type DefaultSpec = T::DefaultSpec; - type Error = T::Error; - - fn gen_with( - &self, - spec: &Self::DefaultSpec, - rng: &mut R, - timeout: &Option, - ) -> Result { - self.inner.gen_with(spec, rng, timeout) - } -} -impl HasDefaultSpec for WithDefault -where - T: DefaultableWith + Sync + Send, - T::DefaultSpec: Send, -{ - fn default_spec(&self) -> &Self::DefaultSpec { - &self.default - } -} -#[async_trait] -impl ValueSpec for WithDefault -where - T: ValueSpec + DefaultableWith + Send + Sync, - Self: Send + Sync, -{ - fn matches(&self, value: &Value) -> Result<(), NoMatchWithPath> { - self.inner.matches(value) - } - fn validate(&self, manifest: &Manifest) -> Result<(), NoMatchWithPath> { - self.inner.validate(manifest) - } - async fn update( - &self, - ctx: &RpcContext, - manifest: &Manifest, - config_overrides: &BTreeMap, - value: &mut Value, - ) -> Result<(), ConfigurationError> { - self.inner - .update(ctx, manifest, config_overrides, value) - .await - } - fn pointers(&self, value: &Value) -> Result, NoMatchWithPath> { - self.inner.pointers(value) - } - fn requires(&self, id: &PackageId, value: &Value) -> bool { - self.inner.requires(id, value) - } - fn eq(&self, lhs: &Value, rhs: &Value) -> bool { - self.inner.eq(lhs, rhs) - } -} - -#[derive(Clone, Debug, Serialize, Deserialize)] -pub struct WithNullable { - #[serde(flatten)] - pub inner: T, - pub nullable: bool, -} -#[async_trait] -impl ValueSpec for WithNullable -where - T: ValueSpec + Send + Sync, - Self: Send + Sync, -{ - fn matches(&self, value: &Value) -> Result<(), NoMatchWithPath> { - match (self.nullable, value) { - (true, &Value::Null) => Ok(()), - _ => self.inner.matches(value), - } - } - fn validate(&self, manifest: &Manifest) -> Result<(), NoMatchWithPath> { - self.inner.validate(manifest) - } - async fn update( - &self, - ctx: &RpcContext, - manifest: &Manifest, - config_overrides: &BTreeMap, - value: &mut Value, - ) -> Result<(), ConfigurationError> { - self.inner - .update(ctx, manifest, config_overrides, value) - .await - } - fn pointers(&self, value: &Value) -> Result, NoMatchWithPath> { - self.inner.pointers(value) - } - fn requires(&self, id: &PackageId, value: &Value) -> bool { - self.inner.requires(id, value) - } - fn eq(&self, lhs: &Value, rhs: &Value) -> bool { - self.inner.eq(lhs, rhs) - } -} - -impl DefaultableWith for WithNullable -where - T: DefaultableWith + Sync + Send, -{ - type DefaultSpec = T::DefaultSpec; - type Error = T::Error; - - fn gen_with( - &self, - spec: &Self::DefaultSpec, - rng: &mut R, - timeout: &Option, - ) -> Result { - self.inner.gen_with(spec, rng, timeout) - } -} - -impl Defaultable for WithNullable -where - T: Defaultable + Sync + Send, -{ - type Error = T::Error; - - fn gen( - &self, - rng: &mut R, - timeout: &Option, - ) -> Result { - self.inner.gen(rng, timeout) - } -} - -#[derive(Clone, Debug, Serialize, Deserialize)] -#[serde(rename_all = "kebab-case")] -pub struct WithDescription { - #[serde(flatten)] - pub inner: T, - pub description: Option, - pub name: String, - #[serde(skip_serializing_if = "Option::is_none")] - pub warning: Option, -} -#[async_trait] -impl ValueSpec for WithDescription -where - T: ValueSpec + Sync + Send, - Self: Sync + Send, -{ - fn matches(&self, value: &Value) -> Result<(), NoMatchWithPath> { - self.inner.matches(value) - } - fn validate(&self, manifest: &Manifest) -> Result<(), NoMatchWithPath> { - self.inner.validate(manifest) - } - async fn update( - &self, - ctx: &RpcContext, - manifest: &Manifest, - config_overrides: &BTreeMap, - value: &mut Value, - ) -> Result<(), ConfigurationError> { - self.inner - .update(ctx, manifest, config_overrides, value) - .await - } - fn pointers(&self, value: &Value) -> Result, NoMatchWithPath> { - self.inner.pointers(value) - } - fn requires(&self, id: &PackageId, value: &Value) -> bool { - self.inner.requires(id, value) - } - fn eq(&self, lhs: &Value, rhs: &Value) -> bool { - self.inner.eq(lhs, rhs) - } -} - -impl DefaultableWith for WithDescription -where - T: DefaultableWith + Sync + Send, -{ - type DefaultSpec = T::DefaultSpec; - type Error = T::Error; - - fn gen_with( - &self, - spec: &Self::DefaultSpec, - rng: &mut R, - timeout: &Option, - ) -> Result { - self.inner.gen_with(spec, rng, timeout) - } -} - -impl Defaultable for WithDescription -where - T: Defaultable + Sync + Send, -{ - type Error = T::Error; - - fn gen( - &self, - rng: &mut R, - timeout: &Option, - ) -> Result { - self.inner.gen(rng, timeout) - } -} - -#[derive(Clone, Debug, Serialize, Deserialize)] -#[serde(rename_all = "kebab-case")] -#[serde(tag = "type")] -pub enum ValueSpecAny { - Boolean(WithDescription>), - Enum(WithDescription>), - List(ValueSpecList), - Number(WithDescription>>), - Object(WithDescription), - String(WithDescription>>), - Union(WithDescription>), - Pointer(WithDescription), -} -impl ValueSpecAny { - pub fn name(&self) -> &'_ str { - match self { - ValueSpecAny::Boolean(b) => b.name.as_str(), - ValueSpecAny::Enum(e) => e.name.as_str(), - ValueSpecAny::List(l) => match l { - ValueSpecList::Enum(e) => e.name.as_str(), - ValueSpecList::Number(n) => n.name.as_str(), - ValueSpecList::Object(o) => o.name.as_str(), - ValueSpecList::String(s) => s.name.as_str(), - ValueSpecList::Union(u) => u.name.as_str(), - }, - ValueSpecAny::Number(n) => n.name.as_str(), - ValueSpecAny::Object(o) => o.name.as_str(), - ValueSpecAny::Pointer(p) => p.name.as_str(), - ValueSpecAny::String(s) => s.name.as_str(), - ValueSpecAny::Union(u) => u.name.as_str(), - } - } -} -#[async_trait] -impl ValueSpec for ValueSpecAny { - fn matches(&self, value: &Value) -> Result<(), NoMatchWithPath> { - match self { - ValueSpecAny::Boolean(a) => a.matches(value), - ValueSpecAny::Enum(a) => a.matches(value), - ValueSpecAny::List(a) => a.matches(value), - ValueSpecAny::Number(a) => a.matches(value), - ValueSpecAny::Object(a) => a.matches(value), - ValueSpecAny::String(a) => a.matches(value), - ValueSpecAny::Union(a) => a.matches(value), - ValueSpecAny::Pointer(a) => a.matches(value), - } - } - fn validate(&self, manifest: &Manifest) -> Result<(), NoMatchWithPath> { - match self { - ValueSpecAny::Boolean(a) => a.validate(manifest), - ValueSpecAny::Enum(a) => a.validate(manifest), - ValueSpecAny::List(a) => a.validate(manifest), - ValueSpecAny::Number(a) => a.validate(manifest), - ValueSpecAny::Object(a) => a.validate(manifest), - ValueSpecAny::String(a) => a.validate(manifest), - ValueSpecAny::Union(a) => a.validate(manifest), - ValueSpecAny::Pointer(a) => a.validate(manifest), - } - } - async fn update( - &self, - ctx: &RpcContext, - manifest: &Manifest, - config_overrides: &BTreeMap, - value: &mut Value, - ) -> Result<(), ConfigurationError> { - match self { - ValueSpecAny::Boolean(a) => a.update(ctx, manifest, config_overrides, value).await, - ValueSpecAny::Enum(a) => a.update(ctx, manifest, config_overrides, value).await, - ValueSpecAny::List(a) => a.update(ctx, manifest, config_overrides, value).await, - ValueSpecAny::Number(a) => a.update(ctx, manifest, config_overrides, value).await, - ValueSpecAny::Object(a) => a.update(ctx, manifest, config_overrides, value).await, - ValueSpecAny::String(a) => a.update(ctx, manifest, config_overrides, value).await, - ValueSpecAny::Union(a) => a.update(ctx, manifest, config_overrides, value).await, - ValueSpecAny::Pointer(a) => a.update(ctx, manifest, config_overrides, value).await, - } - } - fn pointers(&self, value: &Value) -> Result, NoMatchWithPath> { - match self { - ValueSpecAny::Boolean(a) => a.pointers(value), - ValueSpecAny::Enum(a) => a.pointers(value), - ValueSpecAny::List(a) => a.pointers(value), - ValueSpecAny::Number(a) => a.pointers(value), - ValueSpecAny::Object(a) => a.pointers(value), - ValueSpecAny::String(a) => a.pointers(value), - ValueSpecAny::Union(a) => a.pointers(value), - ValueSpecAny::Pointer(a) => a.pointers(value), - } - } - fn requires(&self, id: &PackageId, value: &Value) -> bool { - match self { - ValueSpecAny::Boolean(a) => a.requires(id, value), - ValueSpecAny::Enum(a) => a.requires(id, value), - ValueSpecAny::List(a) => a.requires(id, value), - ValueSpecAny::Number(a) => a.requires(id, value), - ValueSpecAny::Object(a) => a.requires(id, value), - ValueSpecAny::String(a) => a.requires(id, value), - ValueSpecAny::Union(a) => a.requires(id, value), - ValueSpecAny::Pointer(a) => a.requires(id, value), - } - } - fn eq(&self, lhs: &Value, rhs: &Value) -> bool { - match self { - ValueSpecAny::Boolean(a) => a.eq(lhs, rhs), - ValueSpecAny::Enum(a) => a.eq(lhs, rhs), - ValueSpecAny::List(a) => a.eq(lhs, rhs), - ValueSpecAny::Number(a) => a.eq(lhs, rhs), - ValueSpecAny::Object(a) => a.eq(lhs, rhs), - ValueSpecAny::String(a) => a.eq(lhs, rhs), - ValueSpecAny::Union(a) => a.eq(lhs, rhs), - ValueSpecAny::Pointer(a) => a.eq(lhs, rhs), - } - } -} -impl Defaultable for ValueSpecAny { - type Error = ConfigurationError; - - fn gen( - &self, - rng: &mut R, - timeout: &Option, - ) -> Result { - match self { - ValueSpecAny::Boolean(a) => a.gen(rng, timeout).map_err(crate::util::Never::absurd), - ValueSpecAny::Enum(a) => a.gen(rng, timeout).map_err(crate::util::Never::absurd), - ValueSpecAny::List(a) => a.gen(rng, timeout), - ValueSpecAny::Number(a) => a.gen(rng, timeout).map_err(crate::util::Never::absurd), - ValueSpecAny::Object(a) => a.gen(rng, timeout), - ValueSpecAny::String(a) => a.gen(rng, timeout).map_err(ConfigurationError::from), - ValueSpecAny::Union(a) => a.gen(rng, timeout), - ValueSpecAny::Pointer(a) => a.gen(rng, timeout), - } - } -} - -#[derive(Clone, Debug, Serialize, Deserialize)] -pub struct ValueSpecBoolean {} -#[async_trait] -impl ValueSpec for ValueSpecBoolean { - fn matches(&self, val: &Value) -> Result<(), NoMatchWithPath> { - match val { - Value::Bool(_) => Ok(()), - Value::Null => Err(NoMatchWithPath::new(MatchError::NotNullable)), - a => Err(NoMatchWithPath::new(MatchError::InvalidType( - "boolean", - a.type_of(), - ))), - } - } - fn validate(&self, _manifest: &Manifest) -> Result<(), NoMatchWithPath> { - Ok(()) - } - async fn update( - &self, - _ctx: &RpcContext, - _manifest: &Manifest, - _config_overrides: &BTreeMap, - _value: &mut Value, - ) -> Result<(), ConfigurationError> { - Ok(()) - } - fn pointers(&self, _value: &Value) -> Result, NoMatchWithPath> { - Ok(BTreeSet::new()) - } - fn requires(&self, _id: &PackageId, _value: &Value) -> bool { - false - } - fn eq(&self, lhs: &Value, rhs: &Value) -> bool { - match (lhs, rhs) { - (Value::Bool(lhs), Value::Bool(rhs)) => lhs == rhs, - _ => false, - } - } -} -impl DefaultableWith for ValueSpecBoolean { - type DefaultSpec = bool; - type Error = crate::util::Never; - - fn gen_with( - &self, - spec: &Self::DefaultSpec, - _rng: &mut R, - _timeout: &Option, - ) -> Result { - Ok(Value::Bool(*spec)) - } -} - -#[derive(Clone, Debug, Serialize)] -#[serde(rename_all = "kebab-case")] -pub struct ValueSpecEnum { - pub values: IndexSet, - pub value_names: BTreeMap, -} -impl<'de> serde::de::Deserialize<'de> for ValueSpecEnum { - fn deserialize>(deserializer: D) -> Result { - #[derive(Deserialize)] - #[serde(rename_all = "kebab-case")] - pub struct _ValueSpecEnum { - pub values: IndexSet, - #[serde(default)] - pub value_names: BTreeMap, - } - - let mut r#enum = _ValueSpecEnum::deserialize(deserializer)?; - for name in &r#enum.values { - if !r#enum.value_names.contains_key(name) { - r#enum.value_names.insert(name.clone(), name.clone()); - } - } - Ok(ValueSpecEnum { - values: r#enum.values, - value_names: r#enum.value_names, - }) - } -} -#[async_trait] -impl ValueSpec for ValueSpecEnum { - fn matches(&self, val: &Value) -> Result<(), NoMatchWithPath> { - match val { - Value::String(b) => { - if self.values.contains(&**b) { - Ok(()) - } else { - Err(NoMatchWithPath::new(MatchError::Enum( - b.clone(), - self.values.clone(), - ))) - } - } - Value::Null => Err(NoMatchWithPath::new(MatchError::NotNullable)), - a => Err(NoMatchWithPath::new(MatchError::InvalidType( - "string", - a.type_of(), - ))), - } - } - fn validate(&self, _manifest: &Manifest) -> Result<(), NoMatchWithPath> { - Ok(()) - } - async fn update( - &self, - _ctx: &RpcContext, - _manifest: &Manifest, - _config_overrides: &BTreeMap, - _value: &mut Value, - ) -> Result<(), ConfigurationError> { - Ok(()) - } - fn pointers(&self, _value: &Value) -> Result, NoMatchWithPath> { - Ok(BTreeSet::new()) - } - fn requires(&self, _id: &PackageId, _value: &Value) -> bool { - false - } - fn eq(&self, lhs: &Value, rhs: &Value) -> bool { - match (lhs, rhs) { - (Value::String(lhs), Value::String(rhs)) => lhs == rhs, - _ => false, - } - } -} -impl DefaultableWith for ValueSpecEnum { - type DefaultSpec = Arc; - type Error = crate::util::Never; - - fn gen_with( - &self, - spec: &Self::DefaultSpec, - _rng: &mut R, - _timeout: &Option, - ) -> Result { - Ok(Value::String(spec.clone())) - } -} - -#[derive(Clone, Debug, Serialize, Deserialize)] -pub struct ListSpec { - pub spec: T, - pub range: NumRange, -} -#[async_trait] -impl ValueSpec for ListSpec -where - T: ValueSpec + Sync + Send, - Self: Sync + Send, -{ - fn matches(&self, value: &Value) -> Result<(), NoMatchWithPath> { - match value { - Value::Array(l) => { - if !self.range.contains(&l.len()) { - Err(NoMatchWithPath { - path: Vec::new(), - error: MatchError::LengthMismatch(self.range.clone(), l.len()), - }) - } else { - l.iter() - .enumerate() - .map(|(i, v)| { - self.spec - .matches(v) - .map_err(|e| e.prepend(InternedString::from_display(&i)))?; - if l.iter() - .enumerate() - .any(|(i2, v2)| i != i2 && self.spec.eq(v, v2)) - { - Err(NoMatchWithPath::new(MatchError::ListUniquenessViolation) - .prepend(InternedString::from_display(&i))) - } else { - Ok(()) - } - }) - .collect() - } - } - Value::Null => Err(NoMatchWithPath::new(MatchError::NotNullable)), - a => Err(NoMatchWithPath::new(MatchError::InvalidType( - "list", - a.type_of(), - ))), - } - } - fn validate(&self, manifest: &Manifest) -> Result<(), NoMatchWithPath> { - self.spec.validate(manifest) - } - async fn update( - &self, - ctx: &RpcContext, - manifest: &Manifest, - config_overrides: &BTreeMap, - value: &mut Value, - ) -> Result<(), ConfigurationError> { - if let Value::Array(ref mut ls) = value { - for (i, val) in ls.iter_mut().enumerate() { - match self.spec.update(ctx, manifest, config_overrides, val).await { - Err(ConfigurationError::NoMatch(e)) => Err(ConfigurationError::NoMatch( - e.prepend(InternedString::from_display(&i)), - )), - a => a, - }?; - } - Ok(()) - } else { - Err(ConfigurationError::NoMatch(NoMatchWithPath::new( - MatchError::InvalidType("list", value.type_of()), - ))) - } - } - fn pointers(&self, _value: &Value) -> Result, NoMatchWithPath> { - Ok(BTreeSet::new()) - } - fn requires(&self, id: &PackageId, value: &Value) -> bool { - if let Value::Array(ref ls) = value { - ls.into_iter().any(|v| self.spec.requires(id, v)) - } else { - false - } - } - fn eq(&self, lhs: &Value, rhs: &Value) -> bool { - match (lhs, rhs) { - (Value::Array(lhs), Value::Array(rhs)) => { - lhs.iter().zip_longest(rhs.iter()).all(|zip| match zip { - itertools::EitherOrBoth::Both(lhs, rhs) => lhs == rhs, - _ => false, - }) - } - _ => false, - } - } -} - -impl DefaultableWith for ListSpec -where - T: DefaultableWith + Sync + Send, -{ - type DefaultSpec = Vec; - type Error = T::Error; - - fn gen_with( - &self, - spec: &Self::DefaultSpec, - rng: &mut R, - timeout: &Option, - ) -> Result { - let mut res = Vector::new(); - for spec_member in spec.iter() { - res.push_back(self.spec.gen_with(spec_member, rng, timeout)?); - } - Ok(Value::Array(res)) - } -} - -unsafe impl Sync for ValueSpecObject {} // TODO: remove -unsafe impl Send for ValueSpecObject {} // TODO: remove -unsafe impl Sync for ValueSpecUnion {} // TODO: remove -unsafe impl Send for ValueSpecUnion {} // TODO: remove - -#[derive(Clone, Debug, Serialize, Deserialize)] -#[serde(rename_all = "kebab-case")] -#[serde(tag = "subtype")] -pub enum ValueSpecList { - Enum(WithDescription>>), - Number(WithDescription>>), - Object(WithDescription>>), - String(WithDescription>>), - Union(WithDescription>>>), -} -#[async_trait] -impl ValueSpec for ValueSpecList { - fn matches(&self, value: &Value) -> Result<(), NoMatchWithPath> { - match self { - ValueSpecList::Enum(a) => a.matches(value), - ValueSpecList::Number(a) => a.matches(value), - ValueSpecList::Object(a) => a.matches(value), - ValueSpecList::String(a) => a.matches(value), - ValueSpecList::Union(a) => a.matches(value), - } - } - fn validate(&self, manifest: &Manifest) -> Result<(), NoMatchWithPath> { - match self { - ValueSpecList::Enum(a) => a.validate(manifest), - ValueSpecList::Number(a) => a.validate(manifest), - ValueSpecList::Object(a) => a.validate(manifest), - ValueSpecList::String(a) => a.validate(manifest), - ValueSpecList::Union(a) => a.validate(manifest), - } - } - async fn update( - &self, - ctx: &RpcContext, - manifest: &Manifest, - config_overrides: &BTreeMap, - value: &mut Value, - ) -> Result<(), ConfigurationError> { - match self { - ValueSpecList::Enum(a) => a.update(ctx, manifest, config_overrides, value).await, - ValueSpecList::Number(a) => a.update(ctx, manifest, config_overrides, value).await, - ValueSpecList::Object(a) => a.update(ctx, manifest, config_overrides, value).await, - ValueSpecList::String(a) => a.update(ctx, manifest, config_overrides, value).await, - ValueSpecList::Union(a) => a.update(ctx, manifest, config_overrides, value).await, - } - } - fn pointers(&self, value: &Value) -> Result, NoMatchWithPath> { - match self { - ValueSpecList::Enum(a) => a.pointers(value), - ValueSpecList::Number(a) => a.pointers(value), - ValueSpecList::Object(a) => a.pointers(value), - ValueSpecList::String(a) => a.pointers(value), - ValueSpecList::Union(a) => a.pointers(value), - } - } - fn requires(&self, id: &PackageId, value: &Value) -> bool { - match self { - ValueSpecList::Enum(a) => a.requires(id, value), - ValueSpecList::Number(a) => a.requires(id, value), - ValueSpecList::Object(a) => a.requires(id, value), - ValueSpecList::String(a) => a.requires(id, value), - ValueSpecList::Union(a) => a.requires(id, value), - } - } - fn eq(&self, lhs: &Value, rhs: &Value) -> bool { - match self { - ValueSpecList::Enum(a) => a.eq(lhs, rhs), - ValueSpecList::Number(a) => a.eq(lhs, rhs), - ValueSpecList::Object(a) => a.eq(lhs, rhs), - ValueSpecList::String(a) => a.eq(lhs, rhs), - ValueSpecList::Union(a) => a.eq(lhs, rhs), - } - } -} - -impl Defaultable for ValueSpecList { - type Error = ConfigurationError; - - fn gen( - &self, - rng: &mut R, - timeout: &Option, - ) -> Result { - match self { - ValueSpecList::Enum(a) => a.gen(rng, timeout).map_err(crate::util::Never::absurd), - ValueSpecList::Number(a) => a.gen(rng, timeout).map_err(crate::util::Never::absurd), - ValueSpecList::Object(a) => { - let mut ret = match a.gen(rng, timeout).unwrap() { - Value::Array(l) => l, - a => { - return Err(ConfigurationError::NoMatch(NoMatchWithPath::new( - MatchError::InvalidType("list", a.type_of()), - ))) - } - }; - while !( - a.inner.inner.range.start_bound(), - std::ops::Bound::Unbounded, - ) - .contains(&ret.len()) - { - ret.push_back( - a.inner - .inner - .spec - .gen(rng, timeout) - .map_err(ConfigurationError::from)?, - ); - } - Ok(Value::Array(ret)) - } - ValueSpecList::String(a) => a.gen(rng, timeout).map_err(ConfigurationError::from), - ValueSpecList::Union(a) => a.gen(rng, timeout).map_err(ConfigurationError::from), - } - } -} - -#[derive(Clone, Debug, Serialize, Deserialize)] -pub struct ValueSpecNumber { - range: Option>, - #[serde(default)] - integral: bool, - #[serde(skip_serializing_if = "Option::is_none")] - units: Option, - #[serde(skip_serializing_if = "Option::is_none")] - #[serde(default)] - pub placeholder: Option, -} -#[async_trait] -impl ValueSpec for ValueSpecNumber { - fn matches(&self, value: &Value) -> Result<(), NoMatchWithPath> { - match value { - Value::Number(n) => { - let n = n.as_f64().unwrap(); - if self.integral && n.floor() != n { - return Err(NoMatchWithPath::new(MatchError::NonIntegral(n))); - } - if let Some(range) = &self.range { - if !range.contains(&n) { - return Err(NoMatchWithPath::new(MatchError::OutOfRange( - range.clone(), - n, - ))); - } - } - Ok(()) - } - Value::Null => Err(NoMatchWithPath::new(MatchError::NotNullable)), - a => Err(NoMatchWithPath::new(MatchError::InvalidType( - "object", - a.type_of(), - ))), - } - } - fn validate(&self, _manifest: &Manifest) -> Result<(), NoMatchWithPath> { - Ok(()) - } - async fn update( - &self, - _ctx: &RpcContext, - _manifest: &Manifest, - _config_overrides: &BTreeMap, - _value: &mut Value, - ) -> Result<(), ConfigurationError> { - Ok(()) - } - fn pointers(&self, _value: &Value) -> Result, NoMatchWithPath> { - Ok(BTreeSet::new()) - } - fn requires(&self, _id: &PackageId, _value: &Value) -> bool { - false - } - fn eq(&self, lhs: &Value, rhs: &Value) -> bool { - match (lhs, rhs) { - (Value::Number(lhs), Value::Number(rhs)) => lhs == rhs, - _ => false, - } - } -} -impl DefaultableWith for ValueSpecNumber { - type DefaultSpec = Option; - type Error = crate::util::Never; - - fn gen_with( - &self, - spec: &Self::DefaultSpec, - _rng: &mut R, - _timeout: &Option, - ) -> Result { - Ok(spec - .clone() - .map(|s| Value::Number(s)) - .unwrap_or(Value::Null)) - } -} - -#[derive(Clone, Debug, Serialize, Deserialize)] -#[serde(rename_all = "kebab-case")] -pub struct ValueSpecObject { - pub spec: ConfigSpec, - pub display_as: Option, - #[serde(default)] - pub unique_by: UniqueBy, -} -#[async_trait] -impl ValueSpec for ValueSpecObject { - fn matches(&self, value: &Value) -> Result<(), NoMatchWithPath> { - match value { - Value::Object(o) => self.spec.matches(o), - Value::Null => Err(NoMatchWithPath::new(MatchError::NotNullable)), - a => Err(NoMatchWithPath::new(MatchError::InvalidType( - "object", - a.type_of(), - ))), - } - } - fn validate(&self, manifest: &Manifest) -> Result<(), NoMatchWithPath> { - self.spec.validate(manifest) - } - async fn update( - &self, - ctx: &RpcContext, - manifest: &Manifest, - config_overrides: &BTreeMap, - value: &mut Value, - ) -> Result<(), ConfigurationError> { - if let Value::Object(o) = value { - self.spec.update(ctx, manifest, config_overrides, o).await - } else { - Err(ConfigurationError::NoMatch(NoMatchWithPath::new( - MatchError::InvalidType("object", value.type_of()), - ))) - } - } - fn pointers(&self, value: &Value) -> Result, NoMatchWithPath> { - if let Value::Object(o) = value { - self.spec.pointers(o) - } else { - Err(NoMatchWithPath::new(MatchError::InvalidType( - "object", - value.type_of(), - ))) - } - } - fn requires(&self, id: &PackageId, value: &Value) -> bool { - if let Value::Object(o) = value { - self.spec.requires(id, o) - } else { - false - } - } - fn eq(&self, lhs: &Value, rhs: &Value) -> bool { - match (lhs, rhs) { - (Value::Object(lhs), Value::Object(rhs)) => self.unique_by.eq(lhs, rhs), - _ => false, - } - } -} -impl DefaultableWith for ValueSpecObject { - type DefaultSpec = Config; - type Error = crate::util::Never; - - fn gen_with( - &self, - spec: &Self::DefaultSpec, - _rng: &mut R, - _timeout: &Option, - ) -> Result { - Ok(Value::Object(spec.clone())) - } -} -impl Defaultable for ValueSpecObject { - type Error = ConfigurationError; - - fn gen( - &self, - rng: &mut R, - timeout: &Option, - ) -> Result { - self.spec.gen(rng, timeout).map(Value::Object) - } -} - -#[derive(Clone, Debug, Default, Serialize, Deserialize)] -pub struct ConfigSpec(pub IndexMap); -impl ConfigSpec { - pub fn matches(&self, value: &Config) -> Result<(), NoMatchWithPath> { - for (key, val) in self.0.iter() { - if let Some(v) = value.get(&**key) { - val.matches(v).map_err(|e| e.prepend(key.clone()))?; - } else { - val.matches(&Value::Null) - .map_err(|e| e.prepend(key.clone()))?; - } - } - Ok(()) - } - - pub fn gen( - &self, - rng: &mut R, - timeout: &Option, - ) -> Result { - let mut res = Config::new(); - for (key, val) in self.0.iter() { - res.insert(key.clone(), val.gen(rng, timeout)?); - } - Ok(res) - } - - pub fn validate(&self, manifest: &Manifest) -> Result<(), NoMatchWithPath> { - for (name, val) in &self.0 { - val.validate(manifest) - .map_err(|e| e.prepend(name.clone()))?; - } - Ok(()) - } - - pub async fn update( - &self, - ctx: &RpcContext, - manifest: &Manifest, - config_overrides: &BTreeMap, - cfg: &mut Config, - ) -> Result<(), ConfigurationError> { - for (k, vs) in self.0.iter() { - match cfg.get_mut(k) { - None => { - let mut v = Value::Null; - vs.update(ctx, manifest, config_overrides, &mut v).await?; - cfg.insert(k.clone(), v); - } - Some(v) => match vs.update(ctx, manifest, config_overrides, v).await { - Err(ConfigurationError::NoMatch(e)) => { - Err(ConfigurationError::NoMatch(e.prepend(k.clone()))) - } - a => a, - }?, - }; - } - Ok(()) - } - - pub fn pointers(&self, cfg: &Config) -> Result, NoMatchWithPath> { - cfg.iter() - .filter_map(|(k, v)| self.0.get(k).map(|vs| (k, vs.pointers(v)))) - .fold(Ok(BTreeSet::::new()), |acc, v| { - match (acc, v) { - // propagate existing errors - (Err(e), _) => Err(e), - // create new error case - (Ok(_), (k, Err(e))) => Err(e.prepend(k.clone())), - // combine sets - (Ok(s0), (_, Ok(s1))) => Ok(BTreeSet::from_iter(s0.union(&s1).cloned())), - } - }) - } - - pub fn requires(&self, id: &PackageId, cfg: &Config) -> bool { - self.0 - .iter() - .any(|(k, v)| v.requires(id, cfg.get(k).unwrap_or(&STATIC_NULL))) - } -} - -#[derive(Clone, Debug, Serialize, Deserialize)] -#[serde(rename_all = "kebab-case")] -pub struct Pattern { - #[serde(with = "util::serde_regex")] - pub pattern: Regex, - pub pattern_description: String, -} - -#[derive(Clone, Debug, Serialize)] -#[serde(rename_all = "kebab-case")] -pub struct ValueSpecString { - #[serde(flatten)] - pub pattern: Option, - pub textarea: bool, - pub copyable: bool, - pub masked: bool, - #[serde(skip_serializing_if = "Option::is_none")] - pub placeholder: Option, -} -impl<'de> Deserialize<'de> for ValueSpecString { - fn deserialize>(deserializer: D) -> Result { - struct ValueSpecStringVisitor; - impl<'de> Visitor<'de> for ValueSpecStringVisitor { - type Value = ValueSpecString; - fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter.write_str("struct ValueSpecString") - } - fn visit_map>(self, mut map: V) -> Result { - let mut pattern = None; - let mut pattern_description = None; - let mut textarea = false; - let mut copyable = false; - let mut masked = false; - let mut placeholder = None; - while let Some::(key) = map.next_key()? { - if &key == "pattern" { - if pattern.is_some() { - return Err(serde::de::Error::duplicate_field("pattern")); - } else { - pattern = Some( - Regex::new(&map.next_value::()?) - .map_err(serde::de::Error::custom)?, - ); - } - } else if &key == "pattern-description" { - if pattern_description.is_some() { - return Err(serde::de::Error::duplicate_field("pattern-description")); - } else { - pattern_description = Some(map.next_value()?); - } - } else if &key == "textarea" { - textarea = map.next_value()?; - } else if &key == "copyable" { - copyable = map.next_value()?; - } else if &key == "masked" { - masked = map.next_value()?; - } else if &key == "placeholder" { - if placeholder.is_some() { - return Err(serde::de::Error::duplicate_field("placeholder")); - } else { - placeholder = Some(map.next_value()?); - } - } - } - let regex = match (pattern, pattern_description) { - (None, None) => None, - (Some(p), Some(d)) => Some(Pattern { - pattern: p, - pattern_description: d, - }), - (Some(_), None) => { - return Err(serde::de::Error::missing_field("pattern-description")); - } - (None, Some(_)) => { - return Err(serde::de::Error::missing_field("pattern")); - } - }; - Ok(ValueSpecString { - pattern: regex, - textarea, - copyable, - masked, - placeholder, - }) - } - } - const FIELDS: &[&str] = &[ - "pattern", - "pattern-description", - "textarea", - "copyable", - "masked", - "placeholder", - ]; - deserializer.deserialize_struct("ValueSpecString", FIELDS, ValueSpecStringVisitor) - } -} -#[async_trait] -impl ValueSpec for ValueSpecString { - fn matches(&self, value: &Value) -> Result<(), NoMatchWithPath> { - match value { - Value::String(s) => { - if let Some(pattern) = &self.pattern { - if pattern.pattern.is_match(s) { - Ok(()) - } else { - Err(NoMatchWithPath::new(MatchError::Pattern( - s.clone(), - pattern.pattern.clone(), - ))) - } - } else { - Ok(()) - } - } - Value::Null => Err(NoMatchWithPath::new(MatchError::NotNullable)), - a => Err(NoMatchWithPath::new(MatchError::InvalidType( - "string", - a.type_of(), - ))), - } - } - fn validate(&self, _manifest: &Manifest) -> Result<(), NoMatchWithPath> { - Ok(()) - } - async fn update( - &self, - _ctx: &RpcContext, - _manifest: &Manifest, - _config_overrides: &BTreeMap, - _value: &mut Value, - ) -> Result<(), ConfigurationError> { - Ok(()) - } - fn pointers(&self, _value: &Value) -> Result, NoMatchWithPath> { - Ok(BTreeSet::new()) - } - fn requires(&self, _id: &PackageId, _value: &Value) -> bool { - false - } - fn eq(&self, lhs: &Value, rhs: &Value) -> bool { - match (lhs, rhs) { - (Value::String(lhs), Value::String(rhs)) => lhs == rhs, - _ => false, - } - } -} -impl DefaultableWith for ValueSpecString { - type DefaultSpec = Option; - type Error = TimeoutError; - - fn gen_with( - &self, - spec: &Self::DefaultSpec, - rng: &mut R, - timeout: &Option, - ) -> Result { - if let Some(spec) = spec { - let now = timeout.as_ref().map(|_| std::time::Instant::now()); - loop { - let candidate = spec.gen(rng); - match (spec, &self.pattern) { - (DefaultString::Entropy(_), Some(pattern)) - if !pattern.pattern.is_match(&candidate) => {} - _ => { - return Ok(Value::String(candidate)); - } - } - if let (Some(now), Some(timeout)) = (now, timeout) { - if &now.elapsed() > timeout { - return Err(TimeoutError); - } - } else { - return Ok(Value::String(candidate)); - } - } - } else { - Ok(Value::Null) - } - } -} - -#[derive(Clone, Debug, Serialize, Deserialize)] -#[serde(untagged)] -pub enum DefaultString { - Literal(String), - Entropy(Entropy), -} -impl DefaultString { - pub fn gen(&self, rng: &mut R) -> Arc { - Arc::new(match self { - DefaultString::Literal(s) => s.clone(), - DefaultString::Entropy(e) => e.gen(rng), - }) - } -} - -#[derive(Clone, Debug, Serialize, Deserialize)] -pub struct Entropy { - pub charset: Option, - pub len: usize, -} -impl Entropy { - pub fn gen(&self, rng: &mut R) -> String { - let len = self.len; - let set = self - .charset - .as_ref() - .map(|cs| Cow::Borrowed(cs)) - .unwrap_or_else(|| Cow::Owned(Default::default())); - std::iter::repeat_with(|| set.gen(rng)).take(len).collect() - } -} - -#[derive(Clone, Debug, Serialize, Deserialize)] -#[serde(rename_all = "kebab-case")] -pub struct UnionTag { - pub id: InternedString, - pub name: String, - pub description: Option, - pub variant_names: BTreeMap, -} - -#[derive(Clone, Debug, Serialize)] -#[serde(rename_all = "kebab-case")] -pub struct ValueSpecUnion { - pub tag: UnionTag, - pub variants: BTreeMap, - pub display_as: Option, - pub unique_by: UniqueBy, -} - -impl<'de> serde::de::Deserialize<'de> for ValueSpecUnion { - fn deserialize>(deserializer: D) -> Result { - #[derive(Deserialize)] - #[serde(rename_all = "kebab-case")] - #[serde(untagged)] - pub enum _UnionTag { - Old(InternedString), - New(UnionTag), - } - #[derive(Deserialize)] - #[serde(rename_all = "kebab-case")] - pub struct _ValueSpecUnion { - pub variants: BTreeMap, - pub tag: _UnionTag, - pub display_as: Option, - #[serde(default)] - pub unique_by: UniqueBy, - } - - let u = _ValueSpecUnion::deserialize(deserializer)?; - Ok(ValueSpecUnion { - tag: match u.tag { - _UnionTag::Old(id) => UnionTag { - id: id.clone(), - name: id.to_string(), - description: None, - variant_names: u - .variants - .keys() - .map(|k| (k.to_owned(), k.to_owned())) - .collect(), - }, - _UnionTag::New(UnionTag { - id, - name, - description, - mut variant_names, - }) => UnionTag { - id, - name, - description, - variant_names: { - let mut iter = u.variants.keys(); - while variant_names.len() < u.variants.len() { - if let Some(variant) = iter.next() { - variant_names.insert(variant.to_owned(), variant.to_owned()); - } else { - break; - } - } - variant_names - }, - }, - }, - variants: u.variants, - display_as: u.display_as, - unique_by: u.unique_by, - }) - } -} - -#[async_trait] -impl ValueSpec for ValueSpecUnion { - fn matches(&self, value: &Value) -> Result<(), NoMatchWithPath> { - match value { - Value::Object(o) => { - if let Some(Value::String(ref tag)) = o.get(&*self.tag.id) { - if let Some(obj_spec) = self.variants.get(&**tag) { - let mut without_tag = o.clone(); - without_tag.remove(&*self.tag.id); - obj_spec.matches(&without_tag) - } else { - Err(NoMatchWithPath::new(MatchError::Union( - tag.clone(), - self.variants.keys().cloned().collect(), - ))) - } - } else { - Err(NoMatchWithPath::new(MatchError::MissingTag( - self.tag.id.clone(), - ))) - } - } - Value::Null => Err(NoMatchWithPath::new(MatchError::NotNullable)), - a => Err(NoMatchWithPath::new(MatchError::InvalidType( - "object", - a.type_of(), - ))), - } - } - fn validate(&self, manifest: &Manifest) -> Result<(), NoMatchWithPath> { - for (name, variant) in &self.variants { - if variant.0.get(&*self.tag.id).is_some() { - return Err(NoMatchWithPath::new(MatchError::PropertyMatchesUnionTag( - self.tag.id.clone(), - name.clone(), - ))); - } - variant.validate(manifest)?; - } - Ok(()) - } - async fn update( - &self, - ctx: &RpcContext, - manifest: &Manifest, - config_overrides: &BTreeMap, - value: &mut Value, - ) -> Result<(), ConfigurationError> { - if let Value::Object(o) = value { - match o.get(&*self.tag.id) { - None => Err(ConfigurationError::NoMatch(NoMatchWithPath::new( - MatchError::MissingTag(self.tag.id.clone()), - ))), - Some(Value::String(tag)) => match self.variants.get(&**tag) { - None => Err(ConfigurationError::NoMatch(NoMatchWithPath::new( - MatchError::Union(tag.clone(), self.variants.keys().cloned().collect()), - ))), - Some(spec) => spec.update(ctx, manifest, config_overrides, o).await, - }, - Some(other) => Err(ConfigurationError::NoMatch( - NoMatchWithPath::new(MatchError::InvalidType("string", other.type_of())) - .prepend(self.tag.id.clone()), - )), - } - } else { - Err(ConfigurationError::NoMatch(NoMatchWithPath::new( - MatchError::InvalidType("object", value.type_of()), - ))) - } - } - fn pointers(&self, value: &Value) -> Result, NoMatchWithPath> { - if let Value::Object(o) = value { - match o.get(&*self.tag.id) { - None => Err(NoMatchWithPath::new(MatchError::MissingTag( - self.tag.id.clone(), - ))), - Some(Value::String(tag)) => match self.variants.get(&**tag) { - None => Err(NoMatchWithPath::new(MatchError::Union( - tag.clone(), - self.variants.keys().cloned().collect(), - ))), - Some(spec) => spec.pointers(o), - }, - Some(other) => Err(NoMatchWithPath::new(MatchError::InvalidType( - "string", - other.type_of(), - )) - .prepend(self.tag.id.clone())), - } - } else { - Err(NoMatchWithPath::new(MatchError::InvalidType( - "object", - value.type_of(), - ))) - } - } - fn requires(&self, id: &PackageId, value: &Value) -> bool { - if let Value::Object(o) = value { - match o.get(&*self.tag.id) { - Some(Value::String(tag)) => match self.variants.get(&**tag) { - None => false, - Some(spec) => spec.requires(id, o), - }, - _ => false, - } - } else { - false - } - } - fn eq(&self, lhs: &Value, rhs: &Value) -> bool { - match (lhs, rhs) { - (Value::Object(lhs), Value::Object(rhs)) => self.unique_by.eq(lhs, rhs), - _ => false, - } - } -} -impl DefaultableWith for ValueSpecUnion { - type DefaultSpec = Arc; - type Error = ConfigurationError; - - fn gen_with( - &self, - spec: &Self::DefaultSpec, - rng: &mut R, - timeout: &Option, - ) -> Result { - let variant = if let Some(v) = self.variants.get(&**spec) { - v - } else { - return Err(ConfigurationError::NoMatch(NoMatchWithPath::new( - MatchError::Union(spec.clone(), self.variants.keys().cloned().collect()), - ))); - }; - let cfg_res = variant.gen(rng, timeout)?; - - let mut tagged_cfg = Config::new(); - tagged_cfg.insert(self.tag.id.clone(), Value::String(spec.clone())); - tagged_cfg.extend(cfg_res.into_iter()); - - Ok(Value::Object(tagged_cfg)) - } -} - -#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)] -#[serde(tag = "subtype")] -#[serde(rename_all = "kebab-case")] -pub enum ValueSpecPointer { - Package(PackagePointerSpec), - System(SystemPointerSpec), -} -impl fmt::Display for ValueSpecPointer { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - ValueSpecPointer::Package(p) => write!(f, "{}", p), - ValueSpecPointer::System(p) => write!(f, "{}", p), - } - } -} -impl Defaultable for ValueSpecPointer { - type Error = ConfigurationError; - fn gen( - &self, - _rng: &mut R, - _timeout: &Option, - ) -> Result { - Ok(Value::Null) - } -} -#[async_trait] -impl ValueSpec for ValueSpecPointer { - fn matches(&self, value: &Value) -> Result<(), NoMatchWithPath> { - match self { - ValueSpecPointer::Package(a) => a.matches(value), - ValueSpecPointer::System(a) => a.matches(value), - } - } - fn validate(&self, manifest: &Manifest) -> Result<(), NoMatchWithPath> { - match self { - ValueSpecPointer::Package(a) => a.validate(manifest), - ValueSpecPointer::System(a) => a.validate(manifest), - } - } - async fn update( - &self, - ctx: &RpcContext, - manifest: &Manifest, - config_overrides: &BTreeMap, - value: &mut Value, - ) -> Result<(), ConfigurationError> { - match self { - ValueSpecPointer::Package(a) => a.update(ctx, manifest, config_overrides, value).await, - ValueSpecPointer::System(a) => a.update(ctx, manifest, config_overrides, value).await, - } - } - fn pointers(&self, _value: &Value) -> Result, NoMatchWithPath> { - let mut pointers = BTreeSet::new(); - pointers.insert(self.clone()); - Ok(pointers) - } - fn requires(&self, id: &PackageId, value: &Value) -> bool { - match self { - ValueSpecPointer::Package(a) => a.requires(id, value), - ValueSpecPointer::System(a) => a.requires(id, value), - } - } - fn eq(&self, _lhs: &Value, _rhs: &Value) -> bool { - false - } -} - -#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)] -#[serde(tag = "target")] -#[serde(rename_all = "kebab-case")] -pub enum PackagePointerSpec { - TorKey(TorKeyPointer), - TorAddress(TorAddressPointer), - LanAddress(LanAddressPointer), - Config(ConfigPointer), -} -impl PackagePointerSpec { - pub fn package_id(&self) -> &PackageId { - match self { - PackagePointerSpec::TorKey(TorKeyPointer { package_id, .. }) => package_id, - PackagePointerSpec::TorAddress(TorAddressPointer { package_id, .. }) => package_id, - PackagePointerSpec::LanAddress(LanAddressPointer { package_id, .. }) => package_id, - PackagePointerSpec::Config(ConfigPointer { package_id, .. }) => package_id, - } - } - async fn deref( - &self, - ctx: &RpcContext, - manifest: &Manifest, - config_overrides: &BTreeMap, - ) -> Result { - match &self { - PackagePointerSpec::TorKey(key) => key.deref(&manifest.id, &ctx.secret_store).await, - PackagePointerSpec::TorAddress(tor) => tor.deref(ctx).await, - PackagePointerSpec::LanAddress(lan) => lan.deref(ctx).await, - PackagePointerSpec::Config(cfg) => cfg.deref(ctx, config_overrides).await, - } - } -} -impl fmt::Display for PackagePointerSpec { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - PackagePointerSpec::TorKey(key) => write!(f, "{}", key), - PackagePointerSpec::TorAddress(tor) => write!(f, "{}", tor), - PackagePointerSpec::LanAddress(lan) => write!(f, "{}", lan), - PackagePointerSpec::Config(cfg) => write!(f, "{}", cfg), - } - } -} -impl Defaultable for PackagePointerSpec { - type Error = ConfigurationError; - fn gen( - &self, - _rng: &mut R, - _timeout: &Option, - ) -> Result { - Ok(Value::Null) - } -} -#[async_trait] -impl ValueSpec for PackagePointerSpec { - fn matches(&self, _value: &Value) -> Result<(), NoMatchWithPath> { - Ok(()) - } - fn validate(&self, manifest: &Manifest) -> Result<(), NoMatchWithPath> { - if &manifest.id != self.package_id() - && !manifest.dependencies.0.contains_key(self.package_id()) - { - return Err(NoMatchWithPath::new(MatchError::InvalidPointer( - ValueSpecPointer::Package(self.clone()), - ))); - } - match self { - _ => Ok(()), - } - } - async fn update( - &self, - ctx: &RpcContext, - manifest: &Manifest, - config_overrides: &BTreeMap, - value: &mut Value, - ) -> Result<(), ConfigurationError> { - *value = self.deref(ctx, manifest, config_overrides).await?; - Ok(()) - } - fn pointers(&self, _value: &Value) -> Result, NoMatchWithPath> { - let mut pointers = BTreeSet::new(); - pointers.insert(ValueSpecPointer::Package(self.clone())); - Ok(pointers) - } - fn requires(&self, id: &PackageId, _value: &Value) -> bool { - self.package_id() == id - } - fn eq(&self, _lhs: &Value, _rhs: &Value) -> bool { - false - } -} - -#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)] -#[serde(rename_all = "kebab-case")] -pub struct TorAddressPointer { - pub package_id: PackageId, - interface: InterfaceId, -} -impl TorAddressPointer { - async fn deref(&self, ctx: &RpcContext) -> Result { - let addr = ctx - .db - .peek() - .await - .as_package_data() - .as_idx(&self.package_id) - .and_then(|pde| pde.as_installed()) - .and_then(|i| i.as_interface_addresses().as_idx(&self.interface)) - .and_then(|a| a.as_tor_address().de().transpose()) - .transpose() - .map_err(|e| ConfigurationError::SystemError(e))?; - Ok(addr.map(Arc::new).map(Value::String).unwrap_or(Value::Null)) - } -} -impl fmt::Display for TorAddressPointer { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - TorAddressPointer { - package_id, - interface, - } => write!(f, "{}: tor-address: {}", package_id, interface), - } - } -} - -#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)] -#[serde(rename_all = "kebab-case")] -pub struct LanAddressPointer { - pub package_id: PackageId, - interface: InterfaceId, -} -impl fmt::Display for LanAddressPointer { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let LanAddressPointer { - package_id, - interface, - } = self; - write!(f, "{}: lan-address: {}", package_id, interface) - } -} -impl LanAddressPointer { - async fn deref(&self, ctx: &RpcContext) -> Result { - let addr = ctx - .db - .peek() - .await - .as_package_data() - .as_idx(&self.package_id) - .and_then(|pde| pde.as_installed()) - .and_then(|i| i.as_interface_addresses().as_idx(&self.interface)) - .and_then(|a| a.as_lan_address().de().transpose()) - .transpose() - .map_err(|e| ConfigurationError::SystemError(e))?; - Ok(addr - .to_owned() - .map(Arc::new) - .map(Value::String) - .unwrap_or(Value::Null)) - } -} - -#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)] -#[serde(rename_all = "kebab-case")] -pub struct ConfigPointer { - package_id: PackageId, - selector: Arc, - multi: bool, -} -impl ConfigPointer { - pub fn select(&self, val: &Value) -> Value { - self.selector.select(self.multi, val) - } - async fn deref( - &self, - ctx: &RpcContext, - config_overrides: &BTreeMap, - ) -> Result { - if let Some(cfg) = config_overrides.get(&self.package_id) { - Ok(self.select(&Value::Object(cfg.clone()))) - } else { - let id = &self.package_id; - let db = ctx.db.peek().await; - let manifest = db.as_package_data().as_idx(id).map(|pde| pde.as_manifest()); - let cfg_actions = manifest.and_then(|m| m.as_config().transpose_ref()); - if let (Some(manifest), Some(cfg_actions)) = (manifest, cfg_actions) { - let cfg_res = cfg_actions - .de() - .map_err(|e| ConfigurationError::SystemError(e))? - .get( - ctx, - &self.package_id, - &manifest - .as_version() - .de() - .map_err(|e| ConfigurationError::SystemError(e))?, - &manifest - .as_volumes() - .de() - .map_err(|e| ConfigurationError::SystemError(e))?, - ) - .await - .map_err(|e| ConfigurationError::SystemError(e))?; - if let Some(cfg) = cfg_res.config { - Ok(self.select(&Value::Object(cfg))) - } else { - Ok(Value::Null) - } - } else { - Ok(Value::Null) - } - } - } -} -impl fmt::Display for ConfigPointer { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let ConfigPointer { - package_id, - selector, - .. - } = self; - write!(f, "{}: config: {}", package_id, selector) - } -} - -#[derive(Clone, Debug)] -pub struct ConfigSelector { - src: String, - compiled: CompiledJsonPath, -} -impl ConfigSelector { - fn select(&self, multi: bool, val: &Value) -> Value { - let selected = self.compiled.select(&val).ok().unwrap_or_else(Vector::new); - if multi { - Value::Array(selected.into_iter().cloned().collect()) - } else { - selected.get(0).map(|v| (*v).clone()).unwrap_or(Value::Null) - } - } -} -impl fmt::Display for ConfigSelector { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{}", self.src) - } -} -impl Serialize for ConfigSelector { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - serializer.serialize_str(&self.src) - } -} -impl<'de> Deserialize<'de> for ConfigSelector { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - let src: String = Deserialize::deserialize(deserializer)?; - let compiled = CompiledJsonPath::compile(&src).map_err(serde::de::Error::custom)?; - Ok(Self { src, compiled }) - } -} -impl PartialEq for ConfigSelector { - fn eq(&self, other: &ConfigSelector) -> bool { - self.src == other.src - } -} -impl Eq for ConfigSelector {} -impl PartialOrd for ConfigSelector { - fn partial_cmp(&self, other: &Self) -> Option { - self.src.partial_cmp(&other.src) - } -} -impl Ord for ConfigSelector { - fn cmp(&self, other: &Self) -> std::cmp::Ordering { - self.src.cmp(&other.src) - } -} -impl Hash for ConfigSelector { - fn hash(&self, state: &mut H) { - self.src.hash(state) - } -} - -#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)] -#[serde(rename_all = "kebab-case")] -pub struct TorKeyPointer { - package_id: PackageId, - interface: InterfaceId, -} -impl TorKeyPointer { - async fn deref( - &self, - source_package: &PackageId, - secrets: &PgPool, - ) -> Result { - if &self.package_id != source_package { - return Err(ConfigurationError::PermissionDenied( - ValueSpecPointer::Package(PackagePointerSpec::TorKey(self.clone())), - )); - } - let key = Key::for_interface( - secrets - .acquire() - .await - .map_err(|e| ConfigurationError::SystemError(e.into()))? - .as_mut(), - Some((self.package_id.clone(), self.interface.clone())), - ) - .await - .map_err(ConfigurationError::SystemError)?; - Ok(Value::String(Arc::new(base32::encode( - base32::Alphabet::RFC4648 { padding: false }, - &key.tor_key().as_bytes(), - )))) - } -} -impl fmt::Display for TorKeyPointer { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{}: tor-key: {}", self.package_id, self.interface) - } -} - -#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)] -#[serde(rename_all = "kebab-case")] -#[serde(tag = "target")] -pub enum SystemPointerSpec {} -impl fmt::Display for SystemPointerSpec { - fn fmt(&self, _f: &mut fmt::Formatter) -> fmt::Result { - // write!(f, "SYSTEM: {}", match *self {}) - Ok(()) - } -} -impl SystemPointerSpec { - async fn deref(&self, _ctx: &RpcContext) -> Result { - #[allow(unreachable_code)] - Ok(match *self {}) - } -} -impl Defaultable for SystemPointerSpec { - type Error = ConfigurationError; - fn gen( - &self, - _rng: &mut R, - _timeout: &Option, - ) -> Result { - Ok(Value::Null) - } -} -#[async_trait] -impl ValueSpec for SystemPointerSpec { - fn matches(&self, _value: &Value) -> Result<(), NoMatchWithPath> { - Ok(()) - } - fn validate(&self, _manifest: &Manifest) -> Result<(), NoMatchWithPath> { - Ok(()) - } - async fn update( - &self, - ctx: &RpcContext, - _manifest: &Manifest, - _config_overrides: &BTreeMap, - value: &mut Value, - ) -> Result<(), ConfigurationError> { - *value = self.deref(ctx).await?; - Ok(()) - } - fn pointers(&self, _value: &Value) -> Result, NoMatchWithPath> { - let mut pointers = BTreeSet::new(); - pointers.insert(ValueSpecPointer::System(self.clone())); - #[allow(unreachable_code)] - Ok(pointers) - } - fn requires(&self, _id: &PackageId, _value: &Value) -> bool { - false - } - fn eq(&self, _lhs: &Value, _rhs: &Value) -> bool { - false - } -} - -#[test] -fn invalid_regex_produces_error() { - assert!( - serde_yaml::from_reader::<_, ConfigSpec>(std::io::Cursor::new(include_bytes!( - "../../test/config-spec/lnd-invalid-regex.yaml" - ))) - .is_err() - ) -} - -#[test] -fn missing_pattern_description_produces_error() { - assert!( - serde_yaml::from_reader::<_, ConfigSpec>(std::io::Cursor::new(include_bytes!( - "../../test/config-spec/lnd-missing-pattern-description.yaml" - ))) - .is_err() - ) -} - -#[test] -fn missing_pattern_produces_error() { - assert!( - serde_yaml::from_reader::<_, ConfigSpec>(std::io::Cursor::new(include_bytes!( - "../../test/config-spec/lnd-missing-pattern.yaml" - ))) - .is_err() - ) -} - -#[test] -fn regex_control() { - let spec = serde_yaml::from_reader::<_, ConfigSpec>(std::io::Cursor::new(include_bytes!( - "../../test/config-spec/lnd-correct.yaml" - ))) - .unwrap(); - println!("{}", serde_json::to_string_pretty(&spec).unwrap()); -} diff --git a/core/startos/src/context/cli.rs b/core/startos/src/context/cli.rs index 020b73459d..cc2fe232be 100644 --- a/core/startos/src/context/cli.rs +++ b/core/startos/src/context/cli.rs @@ -1,43 +1,37 @@ use std::fs::File; use std::io::BufReader; -use std::net::Ipv4Addr; use std::path::{Path, PathBuf}; use std::sync::Arc; -use clap::ArgMatches; -use color_eyre::eyre::eyre; use cookie_store::{CookieStore, RawCookie}; use josekit::jwk::Jwk; +use once_cell::sync::OnceCell; use reqwest::Proxy; use reqwest_cookie_store::CookieStoreMutex; use rpc_toolkit::reqwest::{Client, Url}; -use rpc_toolkit::url::Host; -use rpc_toolkit::Context; -use serde::Deserialize; +use rpc_toolkit::yajrc::RpcError; +use rpc_toolkit::{call_remote_http, CallRemote, Context}; +use tokio::net::TcpStream; +use tokio::runtime::Runtime; +use tokio_tungstenite::{MaybeTlsStream, WebSocketStream}; use tracing::instrument; use super::setup::CURRENT_SECRET; +use crate::context::config::{local_config_path, ClientConfig}; +use crate::core::rpc_continuations::RequestGuid; use crate::middleware::auth::LOCAL_AUTH_COOKIE_PATH; -use crate::util::config::{load_config_from_paths, local_config_path}; -use crate::ResultExt; - -#[derive(Debug, Default, Deserialize)] -#[serde(rename_all = "kebab-case")] -pub struct CliContextConfig { - pub host: Option, - #[serde(deserialize_with = "crate::util::serde::deserialize_from_str_opt")] - #[serde(default)] - pub proxy: Option, - pub cookie_path: Option, -} +use crate::prelude::*; #[derive(Debug)] pub struct CliContextSeed { + pub runtime: OnceCell, pub base_url: Url, pub rpc_url: Url, pub client: Client, pub cookie_store: Arc, pub cookie_path: PathBuf, + pub developer_key_path: PathBuf, + pub developer_key: OnceCell, } impl Drop for CliContextSeed { fn drop(&mut self) { @@ -60,42 +54,22 @@ impl Drop for CliContextSeed { } } -const DEFAULT_HOST: Host<&'static str> = Host::Ipv4(Ipv4Addr::new(127, 0, 0, 1)); -const DEFAULT_PORT: u16 = 5959; - #[derive(Debug, Clone)] pub struct CliContext(Arc); impl CliContext { /// BLOCKING #[instrument(skip_all)] - pub fn init(matches: &ArgMatches) -> Result { - let local_config_path = local_config_path(); - let base: CliContextConfig = load_config_from_paths( - matches - .values_of("config") - .into_iter() - .flatten() - .map(|p| Path::new(p)) - .chain(local_config_path.as_deref().into_iter()) - .chain(std::iter::once(Path::new(crate::util::config::CONFIG_PATH))), - )?; - let mut url = if let Some(host) = matches.value_of("host") { - host.parse()? - } else if let Some(host) = base.host { + pub fn init(config: ClientConfig) -> Result { + let mut url = if let Some(host) = config.host { host } else { "http://localhost".parse()? }; - let proxy = if let Some(proxy) = matches.value_of("proxy") { - Some(proxy.parse()?) - } else { - base.proxy - }; - let cookie_path = base.cookie_path.unwrap_or_else(|| { - local_config_path + let cookie_path = config.cookie_path.unwrap_or_else(|| { + local_config_path() .as_deref() - .unwrap_or_else(|| Path::new(crate::util::config::CONFIG_PATH)) + .unwrap_or_else(|| Path::new(super::config::CONFIG_PATH)) .parent() .unwrap_or(Path::new("/")) .join(".cookies.json") @@ -120,6 +94,7 @@ impl CliContext { })); Ok(CliContext(Arc::new(CliContextSeed { + runtime: OnceCell::new(), base_url: url.clone(), rpc_url: { url.path_segments_mut() @@ -131,7 +106,7 @@ impl CliContext { }, client: { let mut builder = Client::builder().cookie_provider(cookie_store.clone()); - if let Some(proxy) = proxy { + if let Some(proxy) = config.proxy { builder = builder.proxy(Proxy::all(proxy).with_kind(crate::ErrorKind::ParseUrl)?) } @@ -139,8 +114,90 @@ impl CliContext { }, cookie_store, cookie_path, + developer_key_path: config.developer_key_path.unwrap_or_else(|| { + local_config_path() + .as_deref() + .unwrap_or_else(|| Path::new(super::config::CONFIG_PATH)) + .parent() + .unwrap_or(Path::new("/")) + .join("developer.key.pem") + }), + developer_key: OnceCell::new(), }))) } + + /// BLOCKING + #[instrument(skip_all)] + pub fn developer_key(&self) -> Result<&ed25519_dalek::SigningKey, Error> { + self.developer_key.get_or_try_init(|| { + if !self.developer_key_path.exists() { + return Err(Error::new(eyre!("Developer Key does not exist! Please run `start-cli init` before running this command."), crate::ErrorKind::Uninitialized)); + } + let pair = ::from_pkcs8_pem( + &std::fs::read_to_string(&self.developer_key_path)?, + ) + .with_kind(crate::ErrorKind::Pem)?; + let secret = ed25519_dalek::SecretKey::try_from(&pair.secret_key[..]).map_err(|_| { + Error::new( + eyre!("pkcs8 key is of incorrect length"), + ErrorKind::OpenSsl, + ) + })?; + Ok(secret.into()) + }) + } + + pub async fn ws_continuation( + &self, + guid: RequestGuid, + ) -> Result>, Error> { + let mut url = self.base_url.clone(); + let ws_scheme = match url.scheme() { + "https" => "wss", + "http" => "ws", + _ => { + return Err(Error::new( + eyre!("Cannot parse scheme from base URL"), + crate::ErrorKind::ParseUrl, + ) + .into()) + } + }; + url.set_scheme(ws_scheme) + .map_err(|_| Error::new(eyre!("Cannot set URL scheme"), crate::ErrorKind::ParseUrl))?; + url.path_segments_mut() + .map_err(|_| eyre!("Url cannot be base")) + .with_kind(crate::ErrorKind::ParseUrl)? + .push("ws") + .push("rpc") + .push(guid.as_ref()); + let (stream, _) = + // base_url is "http://127.0.0.1/", with a trailing slash, so we don't put a leading slash in this path: + tokio_tungstenite::connect_async(url).await.with_kind(ErrorKind::Network)?; + Ok(stream) + } + + pub async fn rest_continuation( + &self, + guid: RequestGuid, + body: reqwest::Body, + headers: reqwest::header::HeaderMap, + ) -> Result { + let mut url = self.base_url.clone(); + url.path_segments_mut() + .map_err(|_| eyre!("Url cannot be base")) + .with_kind(crate::ErrorKind::ParseUrl)? + .push("rest") + .push("rpc") + .push(guid.as_ref()); + self.client + .post(url) + .headers(headers) + .body(body) + .send() + .await + .with_kind(ErrorKind::Network) + } } impl AsRef for CliContext { fn as_ref(&self) -> &Jwk { @@ -154,32 +211,33 @@ impl std::ops::Deref for CliContext { } } impl Context for CliContext { - fn protocol(&self) -> &str { - self.0.base_url.scheme() - } - fn host(&self) -> Host<&str> { - self.0.base_url.host().unwrap_or(DEFAULT_HOST) + fn runtime(&self) -> tokio::runtime::Handle { + self.runtime + .get_or_init(|| { + tokio::runtime::Builder::new_multi_thread() + .enable_all() + .build() + .unwrap() + }) + .handle() + .clone() } - fn port(&self) -> u16 { - self.0.base_url.port().unwrap_or(DEFAULT_PORT) - } - fn path(&self) -> &str { - self.0.rpc_url.path() - } - fn url(&self) -> Url { - self.0.rpc_url.clone() - } - fn client(&self) -> &Client { - &self.0.client +} +#[async_trait::async_trait] +impl CallRemote for CliContext { + async fn call_remote(&self, method: &str, params: Value) -> Result { + call_remote_http(&self.client, self.rpc_url.clone(), method, params).await } } -/// When we had an empty proxy the system wasn't working like it used to, which allowed empty proxy + #[test] -fn test_cli_proxy_empty() { - serde_yaml::from_str::( - " - bind_rpc: - ", - ) - .unwrap(); +fn test() { + let ctx = CliContext::init(ClientConfig::default()).unwrap(); + ctx.runtime().block_on(async { + reqwest::Client::new() + .get("http://example.com") + .send() + .await + .unwrap(); + }); } diff --git a/core/startos/src/context/config.rs b/core/startos/src/context/config.rs new file mode 100644 index 0000000000..55065e8164 --- /dev/null +++ b/core/startos/src/context/config.rs @@ -0,0 +1,169 @@ +use std::fs::File; +use std::net::SocketAddr; +use std::path::{Path, PathBuf}; + +use clap::Parser; +use reqwest::Url; +use serde::de::DeserializeOwned; +use serde::{Deserialize, Serialize}; +use sqlx::postgres::PgConnectOptions; +use sqlx::PgPool; + +use crate::disk::OsPartitionInfo; +use crate::init::init_postgres; +use crate::prelude::*; +use crate::util::serde::IoFormat; + +pub const DEVICE_CONFIG_PATH: &str = "/media/embassy/config/config.yaml"; // "/media/startos/config/config.yaml"; +pub const CONFIG_PATH: &str = "/etc/startos/config.yaml"; +pub const CONFIG_PATH_LOCAL: &str = ".startos/config.yaml"; + +pub fn local_config_path() -> Option { + if let Ok(home) = std::env::var("HOME") { + Some(Path::new(&home).join(CONFIG_PATH_LOCAL)) + } else { + None + } +} + +pub trait ContextConfig: DeserializeOwned + Default { + fn next(&mut self) -> Option; + fn merge_with(&mut self, other: Self); + fn from_path(path: impl AsRef) -> Result { + let format: IoFormat = path + .as_ref() + .extension() + .and_then(|s| s.to_str()) + .map(|f| f.parse()) + .transpose()? + .unwrap_or_default(); + format.from_reader(File::open(path)?) + } + fn load_path_rec(&mut self, path: Option>) -> Result<(), Error> { + if let Some(path) = path.filter(|p| p.as_ref().exists()) { + let mut other = Self::from_path(path)?; + let path = other.next(); + self.merge_with(other); + self.load_path_rec(path)?; + } + Ok(()) + } +} + +#[derive(Debug, Default, Deserialize, Serialize, Parser)] +#[serde(rename_all = "kebab-case")] +#[command(rename_all = "kebab-case")] +pub struct ClientConfig { + #[arg(short = 'c', long = "config")] + pub config: Option, + #[arg(short = 'h', long = "host")] + pub host: Option, + #[arg(short = 'p', long = "proxy")] + pub proxy: Option, + #[arg(long = "cookie-path")] + pub cookie_path: Option, + #[arg(long = "developer-key-path")] + pub developer_key_path: Option, +} +impl ContextConfig for ClientConfig { + fn next(&mut self) -> Option { + self.config.take() + } + fn merge_with(&mut self, other: Self) { + self.host = self.host.take().or(other.host); + self.proxy = self.proxy.take().or(other.proxy); + self.cookie_path = self.cookie_path.take().or(other.cookie_path); + } +} +impl ClientConfig { + pub fn load(mut self) -> Result { + let path = self.next(); + self.load_path_rec(path)?; + self.load_path_rec(local_config_path())?; + self.load_path_rec(Some(CONFIG_PATH))?; + Ok(self) + } +} + +#[derive(Debug, Clone, Default, Deserialize, Serialize, Parser)] +#[serde(rename_all = "kebab-case")] +#[command(rename_all = "kebab-case")] +pub struct ServerConfig { + #[arg(short = 'c', long = "config")] + pub config: Option, + #[arg(long = "wifi-interface")] + pub wifi_interface: Option, + #[arg(long = "ethernet-interface")] + pub ethernet_interface: Option, + #[arg(skip)] + pub os_partitions: Option, + #[arg(long = "bind-rpc")] + pub bind_rpc: Option, + #[arg(long = "tor-control")] + pub tor_control: Option, + #[arg(long = "tor-socks")] + pub tor_socks: Option, + #[arg(long = "dns-bind")] + pub dns_bind: Option>, + #[arg(long = "revision-cache-size")] + pub revision_cache_size: Option, + #[arg(short = 'd', long = "datadir")] + pub datadir: Option, + #[arg(long = "disable-encryption")] + pub disable_encryption: Option, +} +impl ContextConfig for ServerConfig { + fn next(&mut self) -> Option { + self.config.take() + } + fn merge_with(&mut self, other: Self) { + self.wifi_interface = self.wifi_interface.take().or(other.wifi_interface); + self.ethernet_interface = self.ethernet_interface.take().or(other.ethernet_interface); + self.os_partitions = self.os_partitions.take().or(other.os_partitions); + self.bind_rpc = self.bind_rpc.take().or(other.bind_rpc); + self.tor_control = self.tor_control.take().or(other.tor_control); + self.tor_socks = self.tor_socks.take().or(other.tor_socks); + self.dns_bind = self.dns_bind.take().or(other.dns_bind); + self.revision_cache_size = self + .revision_cache_size + .take() + .or(other.revision_cache_size); + self.datadir = self.datadir.take().or(other.datadir); + self.disable_encryption = self.disable_encryption.take().or(other.disable_encryption); + } +} + +impl ServerConfig { + pub fn load(mut self) -> Result { + let path = self.next(); + self.load_path_rec(path)?; + self.load_path_rec(Some(DEVICE_CONFIG_PATH))?; + self.load_path_rec(Some(CONFIG_PATH))?; + Ok(self) + } + pub fn datadir(&self) -> &Path { + self.datadir + .as_deref() + .unwrap_or_else(|| Path::new("/embassy-data")) + } + pub async fn db(&self) -> Result { + let db_path = self.datadir().join("main").join("embassy.db"); + let db = PatchDb::open(&db_path) + .await + .with_ctx(|_| (crate::ErrorKind::Filesystem, db_path.display().to_string()))?; + + Ok(db) + } + #[instrument(skip_all)] + pub async fn secret_store(&self) -> Result { + init_postgres(self.datadir()).await?; + let secret_store = + PgPool::connect_with(PgConnectOptions::new().database("secrets").username("root")) + .await?; + sqlx::migrate!() + .run(&secret_store) + .await + .with_kind(crate::ErrorKind::Database)?; + Ok(secret_store) + } +} diff --git a/core/startos/src/context/diagnostic.rs b/core/startos/src/context/diagnostic.rs index 151948d7c9..117e560611 100644 --- a/core/startos/src/context/diagnostic.rs +++ b/core/startos/src/context/diagnostic.rs @@ -1,47 +1,16 @@ use std::ops::Deref; -use std::path::{Path, PathBuf}; +use std::path::PathBuf; use std::sync::Arc; use rpc_toolkit::yajrc::RpcError; use rpc_toolkit::Context; -use serde::Deserialize; use tokio::sync::broadcast::Sender; use tracing::instrument; +use crate::context::config::ServerConfig; use crate::shutdown::Shutdown; -use crate::util::config::load_config_from_paths; use crate::Error; -#[derive(Debug, Default, Deserialize)] -#[serde(rename_all = "kebab-case")] -pub struct DiagnosticContextConfig { - pub datadir: Option, -} -impl DiagnosticContextConfig { - #[instrument(skip_all)] - pub async fn load + Send + 'static>(path: Option

) -> Result { - tokio::task::spawn_blocking(move || { - load_config_from_paths( - path.as_ref() - .into_iter() - .map(|p| p.as_ref()) - .chain(std::iter::once(Path::new( - crate::util::config::DEVICE_CONFIG_PATH, - ))) - .chain(std::iter::once(Path::new(crate::util::config::CONFIG_PATH))), - ) - }) - .await - .unwrap() - } - - pub fn datadir(&self) -> &Path { - self.datadir - .as_deref() - .unwrap_or_else(|| Path::new("/embassy-data")) - } -} - pub struct DiagnosticContextSeed { pub datadir: PathBuf, pub shutdown: Sender>, @@ -53,20 +22,18 @@ pub struct DiagnosticContextSeed { pub struct DiagnosticContext(Arc); impl DiagnosticContext { #[instrument(skip_all)] - pub async fn init + Send + 'static>( - path: Option

, + pub fn init( + config: &ServerConfig, disk_guid: Option>, error: Error, ) -> Result { tracing::error!("Error: {}: Starting diagnostic UI", error); tracing::debug!("{:?}", error); - let cfg = DiagnosticContextConfig::load(path).await?; - let (shutdown, _) = tokio::sync::broadcast::channel(1); Ok(Self(Arc::new(DiagnosticContextSeed { - datadir: cfg.datadir().to_owned(), + datadir: config.datadir().to_owned(), shutdown, disk_guid, error: Arc::new(error.into()), diff --git a/core/startos/src/context/install.rs b/core/startos/src/context/install.rs index 87484b7e57..d4717d2b05 100644 --- a/core/startos/src/context/install.rs +++ b/core/startos/src/context/install.rs @@ -1,35 +1,13 @@ use std::ops::Deref; -use std::path::Path; use std::sync::Arc; use rpc_toolkit::Context; -use serde::Deserialize; use tokio::sync::broadcast::Sender; use tracing::instrument; use crate::net::utils::find_eth_iface; -use crate::util::config::load_config_from_paths; use crate::Error; -#[derive(Debug, Default, Deserialize)] -#[serde(rename_all = "kebab-case")] -pub struct InstallContextConfig {} -impl InstallContextConfig { - #[instrument(skip_all)] - pub async fn load + Send + 'static>(path: Option

) -> Result { - tokio::task::spawn_blocking(move || { - load_config_from_paths( - path.as_ref() - .into_iter() - .map(|p| p.as_ref()) - .chain(std::iter::once(Path::new(crate::util::config::CONFIG_PATH))), - ) - }) - .await - .unwrap() - } -} - pub struct InstallContextSeed { pub ethernet_interface: String, pub shutdown: Sender<()>, @@ -39,8 +17,7 @@ pub struct InstallContextSeed { pub struct InstallContext(Arc); impl InstallContext { #[instrument(skip_all)] - pub async fn init + Send + 'static>(path: Option

) -> Result { - let _cfg = InstallContextConfig::load(path.as_ref().map(|p| p.as_ref().to_owned())).await?; + pub async fn init() -> Result { let (shutdown, _) = tokio::sync::broadcast::channel(1); Ok(Self(Arc::new(InstallContextSeed { ethernet_interface: find_eth_iface().await?, diff --git a/core/startos/src/context/mod.rs b/core/startos/src/context/mod.rs index c4e8e7757b..77f54f26c9 100644 --- a/core/startos/src/context/mod.rs +++ b/core/startos/src/context/mod.rs @@ -1,44 +1,12 @@ pub mod cli; +pub mod config; pub mod diagnostic; pub mod install; pub mod rpc; -pub mod sdk; pub mod setup; pub use cli::CliContext; pub use diagnostic::DiagnosticContext; pub use install::InstallContext; pub use rpc::RpcContext; -pub use sdk::SdkContext; pub use setup::SetupContext; - -impl From for () { - fn from(_: CliContext) -> Self { - () - } -} -impl From for () { - fn from(_: DiagnosticContext) -> Self { - () - } -} -impl From for () { - fn from(_: RpcContext) -> Self { - () - } -} -impl From for () { - fn from(_: SdkContext) -> Self { - () - } -} -impl From for () { - fn from(_: SetupContext) -> Self { - () - } -} -impl From for () { - fn from(_: InstallContext) -> Self { - () - } -} diff --git a/core/startos/src/context/rpc.rs b/core/startos/src/context/rpc.rs index 5358a59ba2..6450eb561d 100644 --- a/core/startos/src/context/rpc.rs +++ b/core/startos/src/context/rpc.rs @@ -1,107 +1,38 @@ use std::collections::BTreeMap; use std::net::{Ipv4Addr, SocketAddr, SocketAddrV4}; use std::ops::Deref; -use std::path::{Path, PathBuf}; +use std::path::PathBuf; use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::Arc; use std::time::Duration; -use helpers::to_tmp_path; +use imbl_value::InternedString; use josekit::jwk::Jwk; -use patch_db::json_ptr::JsonPointer; use patch_db::PatchDb; -use reqwest::{Client, Proxy, Url}; +use reqwest::{Client, Proxy}; use rpc_toolkit::Context; -use serde::Deserialize; -use sqlx::postgres::PgConnectOptions; -use sqlx::PgPool; use tokio::sync::{broadcast, oneshot, Mutex, RwLock}; use tokio::time::Instant; use tracing::instrument; use super::setup::CURRENT_SECRET; use crate::account::AccountInfo; -use crate::core::rpc_continuations::{RequestGuid, RestHandler, RpcContinuation}; -use crate::db::model::{CurrentDependents, Database, PackageDataEntryMatchModelRef}; +use crate::context::config::ServerConfig; +use crate::core::rpc_continuations::{RequestGuid, RestHandler, RpcContinuation, WebSocketHandler}; use crate::db::prelude::PatchDbExt; use crate::dependencies::compute_dependency_config_errs; use crate::disk::OsPartitionInfo; -use crate::init::{check_time_is_synchronized, init_postgres}; -use crate::install::cleanup::{cleanup_failed, uninstall}; -use crate::manager::ManagerMap; +use crate::init::check_time_is_synchronized; +use crate::lxc::{LxcContainer, LxcManager}; use crate::middleware::auth::HashSessionToken; use crate::net::net_controller::NetController; -use crate::net::ssl::{root_ca_start_time, SslManager}; +use crate::net::utils::find_eth_iface; use crate::net::wifi::WpaCli; -use crate::notifications::NotificationManager; +use crate::prelude::*; +use crate::service::ServiceMap; use crate::shutdown::Shutdown; -use crate::status::MainStatus; use crate::system::get_mem_info; -use crate::util::config::load_config_from_paths; use crate::util::lshw::{lshw, LshwDevice}; -use crate::{Error, ErrorKind, ResultExt}; - -#[derive(Debug, Default, Deserialize)] -#[serde(rename_all = "kebab-case")] -pub struct RpcContextConfig { - pub wifi_interface: Option, - pub ethernet_interface: String, - pub os_partitions: OsPartitionInfo, - pub migration_batch_rows: Option, - pub migration_prefetch_rows: Option, - pub bind_rpc: Option, - pub tor_control: Option, - pub tor_socks: Option, - pub dns_bind: Option>, - pub revision_cache_size: Option, - pub datadir: Option, - pub log_server: Option, -} -impl RpcContextConfig { - pub async fn load + Send + 'static>(path: Option

) -> Result { - tokio::task::spawn_blocking(move || { - load_config_from_paths( - path.as_ref() - .into_iter() - .map(|p| p.as_ref()) - .chain(std::iter::once(Path::new( - crate::util::config::DEVICE_CONFIG_PATH, - ))) - .chain(std::iter::once(Path::new(crate::util::config::CONFIG_PATH))), - ) - }) - .await - .unwrap() - } - pub fn datadir(&self) -> &Path { - self.datadir - .as_deref() - .unwrap_or_else(|| Path::new("/embassy-data")) - } - pub async fn db(&self, account: &AccountInfo) -> Result { - let db_path = self.datadir().join("main").join("embassy.db"); - let db = PatchDb::open(&db_path) - .await - .with_ctx(|_| (crate::ErrorKind::Filesystem, db_path.display().to_string()))?; - if !db.exists(&::default()).await { - db.put(&::default(), &Database::init(account)) - .await?; - } - Ok(db) - } - #[instrument(skip_all)] - pub async fn secret_store(&self) -> Result { - init_postgres(self.datadir()).await?; - let secret_store = - PgPool::connect_with(PgConnectOptions::new().database("secrets").username("root")) - .await?; - sqlx::migrate!() - .run(&secret_store) - .await - .with_kind(crate::ErrorKind::Database)?; - Ok(secret_store) - } -} pub struct RpcContextSeed { is_closed: AtomicBool, @@ -111,14 +42,13 @@ pub struct RpcContextSeed { pub datadir: PathBuf, pub disk_guid: Arc, pub db: PatchDb, - pub secret_store: PgPool, pub account: RwLock, pub net_controller: Arc, - pub managers: ManagerMap, + pub services: ServiceMap, pub metrics_cache: RwLock>, pub shutdown: broadcast::Sender>, pub tor_socks: SocketAddr, - pub notification_manager: NotificationManager, + pub lxc_manager: Arc, pub open_authed_websockets: Mutex>>>, pub rpc_stream_continuations: Mutex>, pub wifi_manager: Option>>, @@ -126,6 +56,11 @@ pub struct RpcContextSeed { pub client: Client, pub hardware: Hardware, pub start_time: Instant, + pub dev: Dev, +} + +pub struct Dev { + pub lxc: Mutex>, } pub struct Hardware { @@ -137,77 +72,95 @@ pub struct Hardware { pub struct RpcContext(Arc); impl RpcContext { #[instrument(skip_all)] - pub async fn init + Send + Sync + 'static>( - cfg_path: Option

, - disk_guid: Arc, - ) -> Result { - let base = RpcContextConfig::load(cfg_path).await?; + pub async fn init(config: &ServerConfig, disk_guid: Arc) -> Result { tracing::info!("Loaded Config"); - let tor_proxy = base.tor_socks.unwrap_or(SocketAddr::V4(SocketAddrV4::new( + let tor_proxy = config.tor_socks.unwrap_or(SocketAddr::V4(SocketAddrV4::new( Ipv4Addr::new(127, 0, 0, 1), 9050, ))); let (shutdown, _) = tokio::sync::broadcast::channel(1); - let secret_store = base.secret_store().await?; - tracing::info!("Opened Pg DB"); - let account = AccountInfo::load(&secret_store).await?; - let db = base.db(&account).await?; + + let db = config.db().await?; + let peek = db.peek().await; + let account = AccountInfo::load(&peek)?; tracing::info!("Opened PatchDB"); let net_controller = Arc::new( NetController::init( - base.tor_control + db.clone(), + config + .tor_control .unwrap_or(SocketAddr::from(([127, 0, 0, 1], 9051))), tor_proxy, - base.dns_bind + config + .dns_bind .as_deref() .unwrap_or(&[SocketAddr::from(([127, 0, 0, 1], 53))]), - SslManager::new(&account, root_ca_start_time().await?)?, &account.hostname, - &account.key, + account.tor_key.clone(), ) .await?, ); tracing::info!("Initialized Net Controller"); - let managers = ManagerMap::default(); + let services = ServiceMap::default(); let metrics_cache = RwLock::>::new(None); - let notification_manager = NotificationManager::new(secret_store.clone()); tracing::info!("Initialized Notification Manager"); let tor_proxy_url = format!("socks5h://{tor_proxy}"); let devices = lshw().await?; let ram = get_mem_info().await?.total.0 as u64 * 1024 * 1024; - if !db.peek().await.as_server_info().as_ntp_synced().de()? { + if !db + .peek() + .await + .as_public() + .as_server_info() + .as_ntp_synced() + .de()? + { let db = db.clone(); tokio::spawn(async move { while !check_time_is_synchronized().await.unwrap() { tokio::time::sleep(Duration::from_secs(30)).await; } - db.mutate(|v| v.as_server_info_mut().as_ntp_synced_mut().ser(&true)) - .await - .unwrap() + db.mutate(|v| { + v.as_public_mut() + .as_server_info_mut() + .as_ntp_synced_mut() + .ser(&true) + }) + .await + .unwrap() }); } let seed = Arc::new(RpcContextSeed { is_closed: AtomicBool::new(false), - datadir: base.datadir().to_path_buf(), - os_partitions: base.os_partitions, - wifi_interface: base.wifi_interface.clone(), - ethernet_interface: base.ethernet_interface, + datadir: config.datadir().to_path_buf(), + os_partitions: config.os_partitions.clone().ok_or_else(|| { + Error::new( + eyre!("OS Partition Information Missing"), + ErrorKind::Filesystem, + ) + })?, + wifi_interface: config.wifi_interface.clone(), + ethernet_interface: if let Some(eth) = config.ethernet_interface.clone() { + eth + } else { + find_eth_iface().await? + }, disk_guid, db, - secret_store, account: RwLock::new(account), net_controller, - managers, + services, metrics_cache, shutdown, tor_socks: tor_proxy, - notification_manager, + lxc_manager: Arc::new(LxcManager::new()), open_authed_websockets: Mutex::new(BTreeMap::new()), rpc_stream_continuations: Mutex::new(BTreeMap::new()), - wifi_manager: base + wifi_manager: config .wifi_interface + .clone() .map(|i| Arc::new(RwLock::new(WpaCli::init(i)))), current_secret: Arc::new( Jwk::generate_ec_key(josekit::jwk::alg::ec::EcCurve::P256).map_err(|e| { @@ -231,6 +184,9 @@ impl RpcContext { .with_kind(crate::ErrorKind::ParseUrl)?, hardware: Hardware { devices, ram }, start_time: Instant::now(), + dev: Dev { + lxc: Mutex::new(BTreeMap::new()), + }, }); let res = Self(seed.clone()); @@ -241,8 +197,7 @@ impl RpcContext { #[instrument(skip_all)] pub async fn shutdown(self) -> Result<(), Error> { - self.managers.empty().await?; - self.secret_store.close().await; + self.services.shutdown_all().await?; self.is_closed.store(true, Ordering::SeqCst); tracing::info!("RPC Context is shutdown"); // TODO: shutdown http servers @@ -251,139 +206,33 @@ impl RpcContext { #[instrument(skip(self))] pub async fn cleanup_and_initialize(&self) -> Result<(), Error> { - self.db - .mutate(|f| { - let mut current_dependents = f - .as_package_data() - .keys()? - .into_iter() - .map(|k| (k.clone(), BTreeMap::new())) - .collect::>(); - for (package_id, package) in f.as_package_data_mut().as_entries_mut()? { - for (k, v) in package - .as_installed_mut() - .into_iter() - .flat_map(|i| i.clone().into_current_dependencies().into_entries()) - .flatten() - { - let mut entry: BTreeMap<_, _> = - current_dependents.remove(&k).unwrap_or_default(); - entry.insert(package_id.clone(), v.de()?); - current_dependents.insert(k, entry); - } - } - for (package_id, current_dependents) in current_dependents { - if let Some(deps) = f - .as_package_data_mut() - .as_idx_mut(&package_id) - .and_then(|pde| pde.expect_as_installed_mut().ok()) - .map(|i| i.as_installed_mut().as_current_dependents_mut()) - { - deps.ser(&CurrentDependents(current_dependents))?; - } else if let Some(deps) = f - .as_package_data_mut() - .as_idx_mut(&package_id) - .and_then(|pde| pde.expect_as_removing_mut().ok()) - .map(|i| i.as_removing_mut().as_current_dependents_mut()) - { - deps.ser(&CurrentDependents(current_dependents))?; - } - } - Ok(()) - }) - .await?; - - let peek = self.db.peek().await; - - for (package_id, package) in peek.as_package_data().as_entries()?.into_iter() { - let action = match package.as_match() { - PackageDataEntryMatchModelRef::Installing(_) - | PackageDataEntryMatchModelRef::Restoring(_) - | PackageDataEntryMatchModelRef::Updating(_) => { - cleanup_failed(self, &package_id).await - } - PackageDataEntryMatchModelRef::Removing(_) => { - uninstall( - self, - self.secret_store.acquire().await?.as_mut(), - &package_id, - ) - .await - } - PackageDataEntryMatchModelRef::Installed(m) => { - let version = m.as_manifest().as_version().clone().de()?; - let volumes = m.as_manifest().as_volumes().de()?; - for (volume_id, volume_info) in &*volumes { - let tmp_path = to_tmp_path(volume_info.path_for( - &self.datadir, - &package_id, - &version, - volume_id, - )) - .with_kind(ErrorKind::Filesystem)?; - if tokio::fs::metadata(&tmp_path).await.is_ok() { - tokio::fs::remove_dir_all(&tmp_path).await?; - } - } - Ok(()) - } - _ => continue, - }; - if let Err(e) = action { - tracing::error!("Failed to clean up package {}: {}", package_id, e); - tracing::debug!("{:?}", e); - } - } - let peek = self - .db - .mutate(|v| { - for (_, pde) in v.as_package_data_mut().as_entries_mut()? { - let status = pde - .expect_as_installed_mut()? - .as_installed_mut() - .as_status_mut() - .as_main_mut(); - let running = status.clone().de()?.running(); - status.ser(&if running { - MainStatus::Starting - } else { - MainStatus::Stopped - })?; - } - Ok(v.clone()) - }) - .await?; - self.managers.init(self.clone(), peek.clone()).await?; + self.services.init(&self).await?; tracing::info!("Initialized Package Managers"); let mut all_dependency_config_errs = BTreeMap::new(); - for (package_id, package) in peek.as_package_data().as_entries()?.into_iter() { + let peek = self.db.peek().await; + for (package_id, package) in peek.as_public().as_package_data().as_entries()?.into_iter() { let package = package.clone(); - if let Some(current_dependencies) = package - .as_installed() - .and_then(|x| x.as_current_dependencies().de().ok()) - { - let manifest = package.as_manifest().de()?; - all_dependency_config_errs.insert( - package_id.clone(), - compute_dependency_config_errs( - self, - &peek, - &manifest, - ¤t_dependencies, - &Default::default(), - ) - .await?, - ); - } + let current_dependencies = package.as_current_dependencies().de()?; + all_dependency_config_errs.insert( + package_id.clone(), + compute_dependency_config_errs( + self, + &peek, + &package_id, + ¤t_dependencies, + &Default::default(), + ) + .await?, + ); } self.db .mutate(|v| { for (package_id, errs) in all_dependency_config_errs { if let Some(config_errors) = v + .as_public_mut() .as_package_data_mut() .as_idx_mut(&package_id) - .and_then(|pde| pde.as_installed_mut()) .map(|i| i.as_status_mut().as_dependency_config_errors_mut()) { config_errors.ser(&errs)?; @@ -419,33 +268,30 @@ impl RpcContext { .insert(guid, handler); } - pub async fn get_continuation_handler(&self, guid: &RequestGuid) -> Option { + pub async fn get_ws_continuation_handler( + &self, + guid: &RequestGuid, + ) -> Option { let mut continuations = self.rpc_stream_continuations.lock().await; - if let Some(cont) = continuations.remove(guid) { - cont.into_handler().await - } else { - None - } - } - - pub async fn get_ws_continuation_handler(&self, guid: &RequestGuid) -> Option { - let continuations = self.rpc_stream_continuations.lock().await; - if matches!(continuations.get(guid), Some(RpcContinuation::WebSocket(_))) { - drop(continuations); - self.get_continuation_handler(guid).await - } else { - None + if !matches!(continuations.get(guid), Some(RpcContinuation::WebSocket(_))) { + return None; } + let Some(RpcContinuation::WebSocket(x)) = continuations.remove(guid) else { + return None; + }; + x.get().await } pub async fn get_rest_continuation_handler(&self, guid: &RequestGuid) -> Option { - let continuations = self.rpc_stream_continuations.lock().await; - if matches!(continuations.get(guid), Some(RpcContinuation::Rest(_))) { - drop(continuations); - self.get_continuation_handler(guid).await - } else { - None + let mut continuations: tokio::sync::MutexGuard<'_, BTreeMap> = + self.rpc_stream_continuations.lock().await; + if !matches!(continuations.get(guid), Some(RpcContinuation::Rest(_))) { + return None; } + let Some(RpcContinuation::Rest(x)) = continuations.remove(guid) else { + return None; + }; + x.get().await } } impl AsRef for RpcContext { diff --git a/core/startos/src/context/sdk.rs b/core/startos/src/context/sdk.rs index 7ba7a6bfa2..fb5d995721 100644 --- a/core/startos/src/context/sdk.rs +++ b/core/startos/src/context/sdk.rs @@ -8,13 +8,6 @@ use serde::Deserialize; use tracing::instrument; use crate::prelude::*; -use crate::util::config::{load_config_from_paths, local_config_path}; - -#[derive(Debug, Default, Deserialize)] -#[serde(rename_all = "kebab-case")] -pub struct SdkContextConfig { - pub developer_key_path: Option, -} #[derive(Debug)] pub struct SdkContextSeed { @@ -26,7 +19,7 @@ pub struct SdkContext(Arc); impl SdkContext { /// BLOCKING #[instrument(skip_all)] - pub fn init(matches: &ArgMatches) -> Result { + pub fn init(config: ) -> Result { let local_config_path = local_config_path(); let base: SdkContextConfig = load_config_from_paths( matches @@ -48,24 +41,7 @@ impl SdkContext { }), }))) } - /// BLOCKING - #[instrument(skip_all)] - pub fn developer_key(&self) -> Result { - if !self.developer_key_path.exists() { - return Err(Error::new(eyre!("Developer Key does not exist! Please run `start-sdk init` before running this command."), crate::ErrorKind::Uninitialized)); - } - let pair = ::from_pkcs8_pem( - &std::fs::read_to_string(&self.developer_key_path)?, - ) - .with_kind(crate::ErrorKind::Pem)?; - let secret = ed25519_dalek::SecretKey::try_from(&pair.secret_key[..]).map_err(|_| { - Error::new( - eyre!("pkcs8 key is of incorrect length"), - ErrorKind::OpenSsl, - ) - })?; - Ok(secret.into()) - } + } impl std::ops::Deref for SdkContext { type Target = SdkContextSeed; diff --git a/core/startos/src/context/setup.rs b/core/startos/src/context/setup.rs index 7ae161b01d..013dc060b0 100644 --- a/core/startos/src/context/setup.rs +++ b/core/startos/src/context/setup.rs @@ -1,9 +1,8 @@ use std::ops::Deref; -use std::path::{Path, PathBuf}; +use std::path::PathBuf; use std::sync::Arc; use josekit::jwk::Jwk; -use patch_db::json_ptr::JsonPointer; use patch_db::PatchDb; use rpc_toolkit::yajrc::RpcError; use rpc_toolkit::Context; @@ -14,13 +13,11 @@ use tokio::sync::broadcast::Sender; use tokio::sync::RwLock; use tracing::instrument; -use crate::account::AccountInfo; -use crate::db::model::Database; +use crate::context::config::ServerConfig; use crate::disk::OsPartitionInfo; use crate::init::init_postgres; +use crate::prelude::*; use crate::setup::SetupStatus; -use crate::util::config::load_config_from_paths; -use crate::{Error, ResultExt}; lazy_static::lazy_static! { pub static ref CURRENT_SECRET: Jwk = Jwk::generate_ec_key(josekit::jwk::alg::ec::EcCurve::P256).unwrap_or_else(|e| { @@ -31,52 +28,16 @@ lazy_static::lazy_static! { } #[derive(Clone, Serialize, Deserialize)] -#[serde(rename_all = "kebab-case")] +#[serde(rename_all = "camelCase")] pub struct SetupResult { pub tor_address: String, pub lan_address: String, pub root_ca: String, } -#[derive(Debug, Default, Deserialize)] -#[serde(rename_all = "kebab-case")] -pub struct SetupContextConfig { - pub os_partitions: OsPartitionInfo, - pub migration_batch_rows: Option, - pub migration_prefetch_rows: Option, - pub datadir: Option, - #[serde(default)] - pub disable_encryption: bool, -} -impl SetupContextConfig { - #[instrument(skip_all)] - pub async fn load + Send + 'static>(path: Option

) -> Result { - tokio::task::spawn_blocking(move || { - load_config_from_paths( - path.as_ref() - .into_iter() - .map(|p| p.as_ref()) - .chain(std::iter::once(Path::new( - crate::util::config::DEVICE_CONFIG_PATH, - ))) - .chain(std::iter::once(Path::new(crate::util::config::CONFIG_PATH))), - ) - }) - .await - .unwrap() - } - pub fn datadir(&self) -> &Path { - self.datadir - .as_deref() - .unwrap_or_else(|| Path::new("/embassy-data")) - } -} - pub struct SetupContextSeed { + pub config: ServerConfig, pub os_partitions: OsPartitionInfo, - pub config_path: Option, - pub migration_batch_rows: usize, - pub migration_prefetch_rows: usize, pub disable_encryption: bool, pub shutdown: Sender<()>, pub datadir: PathBuf, @@ -96,16 +57,18 @@ impl AsRef for SetupContextSeed { pub struct SetupContext(Arc); impl SetupContext { #[instrument(skip_all)] - pub async fn init + Send + 'static>(path: Option

) -> Result { - let cfg = SetupContextConfig::load(path.as_ref().map(|p| p.as_ref().to_owned())).await?; + pub fn init(config: &ServerConfig) -> Result { let (shutdown, _) = tokio::sync::broadcast::channel(1); - let datadir = cfg.datadir().to_owned(); + let datadir = config.datadir().to_owned(); Ok(Self(Arc::new(SetupContextSeed { - os_partitions: cfg.os_partitions, - config_path: path.as_ref().map(|p| p.as_ref().to_owned()), - migration_batch_rows: cfg.migration_batch_rows.unwrap_or(25000), - migration_prefetch_rows: cfg.migration_prefetch_rows.unwrap_or(100_000), - disable_encryption: cfg.disable_encryption, + config: config.clone(), + os_partitions: config.os_partitions.clone().ok_or_else(|| { + Error::new( + eyre!("missing required configuration: `os-partitions`"), + ErrorKind::NotFound, + ) + })?, + disable_encryption: config.disable_encryption.unwrap_or(false), shutdown, datadir, selected_v2_drive: RwLock::new(None), @@ -115,15 +78,11 @@ impl SetupContext { }))) } #[instrument(skip_all)] - pub async fn db(&self, account: &AccountInfo) -> Result { + pub async fn db(&self) -> Result { let db_path = self.datadir.join("main").join("embassy.db"); let db = PatchDb::open(&db_path) .await .with_ctx(|_| (crate::ErrorKind::Filesystem, db_path.display().to_string()))?; - if !db.exists(&::default()).await { - db.put(&::default(), &Database::init(account)) - .await?; - } Ok(db) } #[instrument(skip_all)] diff --git a/core/startos/src/control.rs b/core/startos/src/control.rs index 58e39ac14a..e37c88ee4d 100644 --- a/core/startos/src/control.rs +++ b/core/startos/src/control.rs @@ -1,92 +1,55 @@ +use clap::Parser; use color_eyre::eyre::eyre; +use models::PackageId; use rpc_toolkit::command; +use serde::{Deserialize, Serialize}; use tracing::instrument; use crate::context::RpcContext; use crate::prelude::*; -use crate::s9pk::manifest::PackageId; -use crate::status::MainStatus; -use crate::util::display_none; use crate::Error; -#[command(display(display_none), metadata(sync_db = true))] -#[instrument(skip_all)] -pub async fn start(#[context] ctx: RpcContext, #[arg] id: PackageId) -> Result<(), Error> { - let peek = ctx.db.peek().await; - let version = peek - .as_package_data() - .as_idx(&id) - .or_not_found(&id)? - .as_installed() - .or_not_found(&id)? - .as_manifest() - .as_version() - .de()?; +#[derive(Deserialize, Serialize, Parser)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "kebab-case")] +pub struct ControlParams { + pub id: PackageId, +} - ctx.managers - .get(&(id, version)) +#[instrument(skip_all)] +pub async fn start(ctx: RpcContext, ControlParams { id }: ControlParams) -> Result<(), Error> { + ctx.services + .get(&id) .await - .ok_or_else(|| Error::new(eyre!("Manager not found"), crate::ErrorKind::InvalidRequest))? + .as_ref() + .or_not_found(lazy_format!("Manager for {id}"))? .start() - .await; + .await?; Ok(()) } -#[command(display(display_none), metadata(sync_db = true))] -pub async fn stop(#[context] ctx: RpcContext, #[arg] id: PackageId) -> Result { - let peek = ctx.db.peek().await; - let version = peek - .as_package_data() - .as_idx(&id) - .or_not_found(&id)? - .as_installed() - .or_not_found(&id)? - .as_manifest() - .as_version() - .de()?; - - let last_statuts = ctx - .db - .mutate(|v| { - v.as_package_data_mut() - .as_idx_mut(&id) - .and_then(|x| x.as_installed_mut()) - .ok_or_else(|| Error::new(eyre!("{} is not installed", id), ErrorKind::NotFound))? - .as_status_mut() - .as_main_mut() - .replace(&MainStatus::Stopping) - }) - .await?; - - ctx.managers - .get(&(id, version)) +pub async fn stop(ctx: RpcContext, ControlParams { id }: ControlParams) -> Result<(), Error> { + // TODO: why did this return last_status before? + ctx.services + .get(&id) .await + .as_ref() .ok_or_else(|| Error::new(eyre!("Manager not found"), crate::ErrorKind::InvalidRequest))? .stop() - .await; + .await?; - Ok(last_statuts) + Ok(()) } -#[command(display(display_none), metadata(sync_db = true))] -pub async fn restart(#[context] ctx: RpcContext, #[arg] id: PackageId) -> Result<(), Error> { - let peek = ctx.db.peek().await; - let version = peek - .as_package_data() - .as_idx(&id) - .or_not_found(&id)? - .expect_as_installed()? - .as_manifest() - .as_version() - .de()?; - - ctx.managers - .get(&(id, version)) +pub async fn restart(ctx: RpcContext, ControlParams { id }: ControlParams) -> Result<(), Error> { + ctx.services + .get(&id) .await + .as_ref() .ok_or_else(|| Error::new(eyre!("Manager not found"), crate::ErrorKind::InvalidRequest))? .restart() - .await; + .await?; Ok(()) } diff --git a/core/startos/src/core/rpc_continuations.rs b/core/startos/src/core/rpc_continuations.rs index 45a1c1b059..9a82cb1fe5 100644 --- a/core/startos/src/core/rpc_continuations.rs +++ b/core/startos/src/core/rpc_continuations.rs @@ -1,27 +1,21 @@ -use std::sync::Arc; use std::time::Duration; +use axum::extract::ws::WebSocket; +use axum::extract::Request; +use axum::response::Response; use futures::future::BoxFuture; -use futures::FutureExt; use helpers::TimedResource; -use hyper::upgrade::Upgraded; -use hyper::{Body, Error as HyperError, Request, Response}; -use rand::RngCore; -use tokio::task::JoinError; -use tokio_tungstenite::WebSocketStream; +use imbl_value::InternedString; -use crate::{Error, ResultExt}; +#[allow(unused_imports)] +use crate::prelude::*; +use crate::util::new_guid; #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, serde::Serialize, serde::Deserialize)] -pub struct RequestGuid = String>(Arc); +pub struct RequestGuid(InternedString); impl RequestGuid { pub fn new() -> Self { - let mut buf = [0; 40]; - rand::thread_rng().fill_bytes(&mut buf); - RequestGuid(Arc::new(base32::encode( - base32::Alphabet::RFC4648 { padding: false }, - &buf, - ))) + Self(new_guid()) } pub fn from(r: &str) -> Option { @@ -33,9 +27,15 @@ impl RequestGuid { return None; } } - Some(RequestGuid(Arc::new(r.to_owned()))) + Some(RequestGuid(InternedString::intern(r))) } } +impl AsRef for RequestGuid { + fn as_ref(&self) -> &str { + self.0.as_ref() + } +} + #[test] fn parse_guid() { println!( @@ -44,22 +44,16 @@ fn parse_guid() { ) } -impl> std::fmt::Display for RequestGuid { +impl std::fmt::Display for RequestGuid { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - (&*self.0).as_ref().fmt(f) + self.0.fmt(f) } } -pub type RestHandler = Box< - dyn FnOnce(Request) -> BoxFuture<'static, Result, crate::Error>> + Send, ->; +pub type RestHandler = + Box BoxFuture<'static, Result> + Send>; -pub type WebSocketHandler = Box< - dyn FnOnce( - BoxFuture<'static, Result, HyperError>, JoinError>>, - ) -> BoxFuture<'static, Result<(), Error>> - + Send, ->; +pub type WebSocketHandler = Box BoxFuture<'static, ()> + Send>; pub enum RpcContinuation { Rest(TimedResource), @@ -78,39 +72,4 @@ impl RpcContinuation { RpcContinuation::WebSocket(a) => a.is_timed_out(), } } - pub async fn into_handler(self) -> Option { - match self { - RpcContinuation::Rest(handler) => handler.get().await, - RpcContinuation::WebSocket(handler) => { - if let Some(handler) = handler.get().await { - Some(Box::new( - |req: Request| -> BoxFuture<'static, Result, Error>> { - async move { - let (parts, body) = req.into_parts(); - let req = Request::from_parts(parts, body); - let (res, ws_fut) = hyper_ws_listener::create_ws(req) - .with_kind(crate::ErrorKind::Network)?; - if let Some(ws_fut) = ws_fut { - tokio::task::spawn(async move { - match handler(ws_fut.boxed()).await { - Ok(()) => (), - Err(e) => { - tracing::error!("WebSocket Closed: {}", e); - tracing::debug!("{:?}", e); - } - } - }); - } - - Ok(res) - } - .boxed() - }, - )) - } else { - None - } - } - } - } } diff --git a/core/startos/src/db/mod.rs b/core/startos/src/db/mod.rs index 03ad94338f..99602ba5a3 100644 --- a/core/startos/src/db/mod.rs +++ b/core/startos/src/db/mod.rs @@ -1,61 +1,56 @@ pub mod model; -pub mod package; pub mod prelude; -use std::future::Future; use std::path::PathBuf; use std::sync::Arc; -use futures::{FutureExt, SinkExt, StreamExt}; -use patch_db::json_ptr::JsonPointer; +use axum::extract::ws::{self, WebSocket}; +use axum::extract::WebSocketUpgrade; +use axum::response::Response; +use clap::Parser; +use futures::{FutureExt, StreamExt}; +use http::header::COOKIE; +use http::HeaderMap; +use patch_db::json_ptr::{JsonPointer, ROOT}; use patch_db::{Dump, Revision}; -use rpc_toolkit::command; -use rpc_toolkit::hyper::upgrade::Upgraded; -use rpc_toolkit::hyper::{Body, Error as HyperError, Request, Response}; use rpc_toolkit::yajrc::RpcError; +use rpc_toolkit::{command, from_fn_async, CallRemote, HandlerExt, ParentHandler}; use serde::{Deserialize, Serialize}; use serde_json::Value; use tokio::sync::oneshot; -use tokio::task::JoinError; -use tokio_tungstenite::tungstenite::protocol::frame::coding::CloseCode; -use tokio_tungstenite::tungstenite::protocol::CloseFrame; -use tokio_tungstenite::tungstenite::Message; -use tokio_tungstenite::WebSocketStream; use tracing::instrument; use crate::context::{CliContext, RpcContext}; use crate::middleware::auth::{HasValidSession, HashSessionToken}; use crate::prelude::*; -use crate::util::display_none; -use crate::util::serde::{display_serializable, IoFormat}; +use crate::util::serde::{apply_expr, HandlerExtSerde}; + +lazy_static::lazy_static! { + static ref PUBLIC: JsonPointer = "/public".parse().unwrap(); +} #[instrument(skip_all)] -async fn ws_handler< - WSFut: Future, HyperError>, JoinError>>, ->( +async fn ws_handler( ctx: RpcContext, session: Option<(HasValidSession, HashSessionToken)>, - ws_fut: WSFut, + mut stream: WebSocket, ) -> Result<(), Error> { - let (dump, sub) = ctx.db.dump_and_sub().await; - let mut stream = ws_fut - .await - .with_kind(ErrorKind::Network)? - .with_kind(ErrorKind::Unknown)?; + let (dump, sub) = ctx.db.dump_and_sub(PUBLIC.clone()).await; if let Some((session, token)) = session { let kill = subscribe_to_session_kill(&ctx, token).await; - send_dump(session, &mut stream, dump).await?; + send_dump(session.clone(), &mut stream, dump).await?; deal_with_messages(session, kill, sub, stream).await?; } else { stream - .close(Some(CloseFrame { - code: CloseCode::Error, + .send(ws::Message::Close(Some(ws::CloseFrame { + code: ws::close_code::ERROR, reason: "UNAUTHORIZED".into(), - })) + }))) .await .with_kind(ErrorKind::Network)?; + drop(stream); } Ok(()) @@ -80,7 +75,7 @@ async fn deal_with_messages( _has_valid_authentication: HasValidSession, mut kill: oneshot::Receiver<()>, mut sub: patch_db::Subscriber, - mut stream: WebSocketStream, + mut stream: WebSocket, ) -> Result<(), Error> { let mut timer = tokio::time::interval(tokio::time::Duration::from_secs(5)); @@ -89,18 +84,18 @@ async fn deal_with_messages( _ = (&mut kill).fuse() => { tracing::info!("Closing WebSocket: Reason: Session Terminated"); stream - .close(Some(CloseFrame { - code: CloseCode::Error, - reason: "UNAUTHORIZED".into(), - })) - .await - .with_kind(ErrorKind::Network)?; + .send(ws::Message::Close(Some(ws::CloseFrame { + code: ws::close_code::ERROR, + reason: "UNAUTHORIZED".into(), + }))).await + .with_kind(ErrorKind::Network)?; + drop(stream); return Ok(()) } new_rev = sub.recv().fuse() => { let rev = new_rev.expect("UNREACHABLE: patch-db is dropped"); stream - .send(Message::Text(serde_json::to_string(&rev).with_kind(ErrorKind::Serialization)?)) + .send(ws::Message::Text(serde_json::to_string(&rev).with_kind(ErrorKind::Serialization)?)) .await .with_kind(ErrorKind::Network)?; } @@ -117,7 +112,7 @@ async fn deal_with_messages( // This is trying to give a health checks to the home to keep the ui alive. _ = timer.tick().fuse() => { stream - .send(Message::Ping(vec![])) + .send(ws::Message::Ping(vec![])) .await .with_kind(crate::ErrorKind::Network)?; } @@ -127,11 +122,11 @@ async fn deal_with_messages( async fn send_dump( _has_valid_authentication: HasValidSession, - stream: &mut WebSocketStream, + stream: &mut WebSocket, dump: Dump, ) -> Result<(), Error> { stream - .send(Message::Text( + .send(ws::Message::Text( serde_json::to_string(&dump).with_kind(ErrorKind::Serialization)?, )) .await @@ -139,11 +134,14 @@ async fn send_dump( Ok(()) } -pub async fn subscribe(ctx: RpcContext, req: Request) -> Result, Error> { - let (parts, body) = req.into_parts(); +pub async fn subscribe( + ctx: RpcContext, + headers: HeaderMap, + ws: WebSocketUpgrade, +) -> Result { let session = match async { - let token = HashSessionToken::from_request_parts(&parts)?; - let session = HasValidSession::from_request_parts(&parts, &ctx).await?; + let token = HashSessionToken::from_header(headers.get(COOKIE))?; + let session = HasValidSession::from_header(headers.get(COOKIE), &ctx).await?; Ok::<_, Error>((session, token)) } .await @@ -157,26 +155,24 @@ pub async fn subscribe(ctx: RpcContext, req: Request) -> Result (), - Err(e) => { - tracing::error!("WebSocket Closed: {}", e); - tracing::debug!("{:?}", e); - } + Ok(ws.on_upgrade(|ws| async move { + match ws_handler(ctx, session, ws).await { + Ok(()) => (), + Err(e) => { + tracing::error!("WebSocket Closed: {}", e); + tracing::debug!("{:?}", e); } - }); - } - - Ok(res) + } + })) } -#[command(subcommands(dump, put, apply))] -pub fn db() -> Result<(), RpcError> { - Ok(()) +pub fn db() -> ParentHandler { + ParentHandler::new() + .subcommand("dump", from_fn_async(cli_dump).with_display_serializable()) + .subcommand("dump", from_fn_async(dump).no_cli()) + .subcommand("put", put()) + .subcommand("apply", from_fn_async(cli_apply).no_display()) + .subcommand("apply", from_fn_async(apply).no_cli()) } #[derive(Deserialize, Serialize)] @@ -186,97 +182,64 @@ pub enum RevisionsRes { Dump(Dump), } +#[derive(Deserialize, Serialize, Parser)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "kebab-case")] +pub struct CliDumpParams { + #[arg(long = "include-private", short = 'p')] + #[serde(default)] + include_private: bool, + path: Option, +} + #[instrument(skip_all)] async fn cli_dump( ctx: CliContext, - _format: Option, - path: Option, + CliDumpParams { + path, + include_private, + }: CliDumpParams, ) -> Result { let dump = if let Some(path) = path { - PatchDb::open(path).await?.dump().await + PatchDb::open(path).await?.dump(&ROOT).await } else { - rpc_toolkit::command_helpers::call_remote( - ctx, - "db.dump", - serde_json::json!({}), - std::marker::PhantomData::, - ) - .await? - .result? + from_value::( + ctx.call_remote( + "db.dump", + imbl_value::json!({ "includePrivate":include_private }), + ) + .await?, + )? }; Ok(dump) } -#[command( - custom_cli(cli_dump(async, context(CliContext))), - display(display_serializable) -)] -pub async fn dump( - #[context] ctx: RpcContext, - #[allow(unused_variables)] - #[arg(long = "format")] - format: Option, - #[allow(unused_variables)] - #[arg] - path: Option, -) -> Result { - Ok(ctx.db.dump().await) +#[derive(Deserialize, Serialize, Parser)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "kebab-case")] +pub struct DumpParams { + #[arg(long = "include-private", short = 'p')] + #[serde(default)] + include_private: bool, } -fn apply_expr(input: jaq_core::Val, expr: &str) -> Result { - let (expr, errs) = jaq_core::parse::parse(expr, jaq_core::parse::main()); - - let Some(expr) = expr else { - return Err(Error::new( - eyre!("Failed to parse expression: {:?}", errs), - crate::ErrorKind::InvalidRequest, - )); - }; - - let mut errs = Vec::new(); - - let mut defs = jaq_core::Definitions::core(); - for def in jaq_std::std() { - defs.insert(def, &mut errs); - } - - let filter = defs.finish(expr, Vec::new(), &mut errs); - - if !errs.is_empty() { - return Err(Error::new( - eyre!("Failed to compile expression: {:?}", errs), - crate::ErrorKind::InvalidRequest, - )); - }; - - let inputs = jaq_core::RcIter::new(std::iter::empty()); - let mut res_iter = filter.run(jaq_core::Ctx::new([], &inputs), input); - - let Some(res) = res_iter - .next() - .transpose() - .map_err(|e| eyre!("{e}")) - .with_kind(crate::ErrorKind::Deserialization)? - else { - return Err(Error::new( - eyre!("expr returned no results"), - crate::ErrorKind::InvalidRequest, - )); - }; - - if res_iter.next().is_some() { - return Err(Error::new( - eyre!("expr returned too many results"), - crate::ErrorKind::InvalidRequest, - )); - } - - Ok(res) +pub async fn dump( + ctx: RpcContext, + DumpParams { include_private }: DumpParams, +) -> Result { + Ok(if include_private { + ctx.db.dump(&ROOT).await + } else { + ctx.db.dump(&PUBLIC).await + }) } #[instrument(skip_all)] -async fn cli_apply(ctx: CliContext, expr: String, path: Option) -> Result<(), RpcError> { +async fn cli_apply( + ctx: CliContext, + ApplyParams { expr, path }: ApplyParams, +) -> Result<(), RpcError> { if let Some(path) = path { PatchDb::open(path) .await? @@ -301,30 +264,22 @@ async fn cli_apply(ctx: CliContext, expr: String, path: Option) -> Resu }) .await?; } else { - rpc_toolkit::command_helpers::call_remote( - ctx, - "db.apply", - serde_json::json!({ "expr": expr }), - std::marker::PhantomData::<()>, - ) - .await? - .result?; + ctx.call_remote("db.apply", imbl_value::json!({ "expr": expr })) + .await?; } Ok(()) } -#[command( - custom_cli(cli_apply(async, context(CliContext))), - display(display_none) -)] -pub async fn apply( - #[context] ctx: RpcContext, - #[arg] expr: String, - #[allow(unused_variables)] - #[arg] +#[derive(Deserialize, Serialize, Parser)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "kebab-case")] +pub struct ApplyParams { + expr: String, path: Option, -) -> Result<(), Error> { +} + +pub async fn apply(ctx: RpcContext, ApplyParams { expr, .. }: ApplyParams) -> Result<(), Error> { ctx.db .mutate(|db| { let res = apply_expr( @@ -346,21 +301,25 @@ pub async fn apply( .await } -#[command(subcommands(ui))] -pub fn put() -> Result<(), RpcError> { - Ok(()) +pub fn put() -> ParentHandler { + ParentHandler::new().subcommand( + "ui", + from_fn_async(ui) + .with_display_serializable() + .with_remote_cli::(), + ) +} +#[derive(Deserialize, Serialize, Parser)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "kebab-case")] +pub struct UiParams { + pointer: JsonPointer, + value: Value, } -#[command(display(display_serializable))] +// #[command(display(display_serializable))] #[instrument(skip_all)] -pub async fn ui( - #[context] ctx: RpcContext, - #[arg] pointer: JsonPointer, - #[arg] value: Value, - #[allow(unused_variables)] - #[arg(long = "format")] - format: Option, -) -> Result<(), Error> { +pub async fn ui(ctx: RpcContext, UiParams { pointer, value, .. }: UiParams) -> Result<(), Error> { let ptr = "/ui" .parse::() .with_kind(ErrorKind::Database)? diff --git a/core/startos/src/db/model.rs b/core/startos/src/db/model.rs deleted file mode 100644 index 344d5abb37..0000000000 --- a/core/startos/src/db/model.rs +++ /dev/null @@ -1,530 +0,0 @@ -use std::collections::{BTreeMap, BTreeSet}; -use std::net::{Ipv4Addr, Ipv6Addr}; -use std::sync::Arc; - -use chrono::{DateTime, Utc}; -use emver::VersionRange; -use imbl_value::InternedString; -use ipnet::{Ipv4Net, Ipv6Net}; -use isocountry::CountryCode; -use itertools::Itertools; -use models::{DataUrl, HealthCheckId, InterfaceId}; -use openssl::hash::MessageDigest; -use patch_db::{HasModel, Value}; -use reqwest::Url; -use serde::{Deserialize, Serialize}; -use ssh_key::public::Ed25519PublicKey; - -use crate::account::AccountInfo; -use crate::config::spec::PackagePointerSpec; -use crate::install::progress::InstallProgress; -use crate::net::utils::{get_iface_ipv4_addr, get_iface_ipv6_addr}; -use crate::prelude::*; -use crate::s9pk::manifest::{Manifest, PackageId}; -use crate::status::Status; -use crate::util::cpupower::{Governor}; -use crate::util::Version; -use crate::version::{Current, VersionT}; -use crate::{ARCH, PLATFORM}; - -#[derive(Debug, Deserialize, Serialize, HasModel)] -#[serde(rename_all = "kebab-case")] -#[model = "Model"] -// #[macro_debug] -pub struct Database { - pub server_info: ServerInfo, - pub package_data: AllPackageData, - pub ui: Value, -} -impl Database { - pub fn init(account: &AccountInfo) -> Self { - let lan_address = account.hostname.lan_address().parse().unwrap(); - Database { - server_info: ServerInfo { - arch: get_arch(), - platform: get_platform(), - id: account.server_id.clone(), - version: Current::new().semver().into(), - hostname: account.hostname.no_dot_host_name(), - last_backup: None, - last_wifi_region: None, - eos_version_compat: Current::new().compat().clone(), - lan_address, - tor_address: format!("https://{}", account.key.tor_address()) - .parse() - .unwrap(), - ip_info: BTreeMap::new(), - status_info: ServerStatus { - backup_progress: None, - updated: false, - update_progress: None, - shutting_down: false, - restarting: false, - }, - wifi: WifiInfo { - ssids: Vec::new(), - connected: None, - selected: None, - }, - unread_notification_count: 0, - connection_addresses: ConnectionAddresses { - tor: Vec::new(), - clearnet: Vec::new(), - }, - password_hash: account.password.clone(), - pubkey: ssh_key::PublicKey::from(Ed25519PublicKey::from(&account.key.ssh_key())) - .to_openssh() - .unwrap(), - ca_fingerprint: account - .root_ca_cert - .digest(MessageDigest::sha256()) - .unwrap() - .iter() - .map(|x| format!("{x:X}")) - .join(":"), - ntp_synced: false, - zram: true, - governor: None, - }, - package_data: AllPackageData::default(), - ui: serde_json::from_str(include_str!(concat!( - env!("CARGO_MANIFEST_DIR"), - "/../../web/patchdb-ui-seed.json" - ))) - .unwrap(), - } - } -} - -pub type DatabaseModel = Model; - -fn get_arch() -> InternedString { - (*ARCH).into() -} - -fn get_platform() -> InternedString { - (&*PLATFORM).into() -} - -#[derive(Debug, Deserialize, Serialize, HasModel)] -#[serde(rename_all = "kebab-case")] -#[model = "Model"] -pub struct ServerInfo { - #[serde(default = "get_arch")] - pub arch: InternedString, - #[serde(default = "get_platform")] - pub platform: InternedString, - pub id: String, - pub hostname: String, - pub version: Version, - pub last_backup: Option>, - /// Used in the wifi to determine the region to set the system to - pub last_wifi_region: Option, - pub eos_version_compat: VersionRange, - pub lan_address: Url, - pub tor_address: Url, - pub ip_info: BTreeMap, - #[serde(default)] - pub status_info: ServerStatus, - pub wifi: WifiInfo, - pub unread_notification_count: u64, - pub connection_addresses: ConnectionAddresses, - pub password_hash: String, - pub pubkey: String, - pub ca_fingerprint: String, - #[serde(default)] - pub ntp_synced: bool, - #[serde(default)] - pub zram: bool, - pub governor: Option, -} - -#[derive(Debug, Deserialize, Serialize, HasModel)] -#[serde(rename_all = "kebab-case")] -#[model = "Model"] -pub struct IpInfo { - pub ipv4_range: Option, - pub ipv4: Option, - pub ipv6_range: Option, - pub ipv6: Option, -} -impl IpInfo { - pub async fn for_interface(iface: &str) -> Result { - let (ipv4, ipv4_range) = get_iface_ipv4_addr(iface).await?.unzip(); - let (ipv6, ipv6_range) = get_iface_ipv6_addr(iface).await?.unzip(); - Ok(Self { - ipv4_range, - ipv4, - ipv6_range, - ipv6, - }) - } -} - -#[derive(Debug, Default, Deserialize, Serialize, HasModel)] -#[model = "Model"] -pub struct BackupProgress { - pub complete: bool, -} - -#[derive(Debug, Default, Deserialize, Serialize, HasModel)] -#[serde(rename_all = "kebab-case")] -#[model = "Model"] -pub struct ServerStatus { - pub backup_progress: Option>, - pub updated: bool, - pub update_progress: Option, - #[serde(default)] - pub shutting_down: bool, - #[serde(default)] - pub restarting: bool, -} - -#[derive(Debug, Deserialize, Serialize, HasModel)] -#[serde(rename_all = "kebab-case")] -#[model = "Model"] -pub struct UpdateProgress { - pub size: Option, - pub downloaded: u64, -} - -#[derive(Debug, Deserialize, Serialize, HasModel)] -#[serde(rename_all = "kebab-case")] -#[model = "Model"] -pub struct WifiInfo { - pub ssids: Vec, - pub selected: Option, - pub connected: Option, -} - -#[derive(Debug, Deserialize, Serialize)] -#[serde(rename_all = "kebab-case")] -pub struct ServerSpecs { - pub cpu: String, - pub disk: String, - pub memory: String, -} - -#[derive(Debug, Deserialize, Serialize)] -#[serde(rename_all = "kebab-case")] -pub struct ConnectionAddresses { - pub tor: Vec, - pub clearnet: Vec, -} - -#[derive(Debug, Default, Deserialize, Serialize)] -pub struct AllPackageData(pub BTreeMap); -impl Map for AllPackageData { - type Key = PackageId; - type Value = PackageDataEntry; -} - -#[derive(Debug, Deserialize, Serialize, HasModel)] -#[serde(rename_all = "kebab-case")] -#[model = "Model"] -pub struct StaticFiles { - license: String, - instructions: String, - icon: String, -} -impl StaticFiles { - pub fn local(id: &PackageId, version: &Version, icon_type: &str) -> Self { - StaticFiles { - license: format!("/public/package-data/{}/{}/LICENSE.md", id, version), - instructions: format!("/public/package-data/{}/{}/INSTRUCTIONS.md", id, version), - icon: format!("/public/package-data/{}/{}/icon.{}", id, version, icon_type), - } - } -} - -#[derive(Debug, Deserialize, Serialize, HasModel)] -#[serde(rename_all = "kebab-case")] -#[model = "Model"] -pub struct PackageDataEntryInstalling { - pub static_files: StaticFiles, - pub manifest: Manifest, - pub install_progress: Arc, -} - -#[derive(Debug, Deserialize, Serialize, HasModel)] -#[serde(rename_all = "kebab-case")] -#[model = "Model"] -pub struct PackageDataEntryUpdating { - pub static_files: StaticFiles, - pub manifest: Manifest, - pub installed: InstalledPackageInfo, - pub install_progress: Arc, -} - -#[derive(Debug, Deserialize, Serialize, HasModel)] -#[serde(rename_all = "kebab-case")] -#[model = "Model"] -pub struct PackageDataEntryRestoring { - pub static_files: StaticFiles, - pub manifest: Manifest, - pub install_progress: Arc, -} - -#[derive(Debug, Deserialize, Serialize, HasModel)] -#[serde(rename_all = "kebab-case")] -#[model = "Model"] -pub struct PackageDataEntryRemoving { - pub static_files: StaticFiles, - pub manifest: Manifest, - pub removing: InstalledPackageInfo, -} - -#[derive(Debug, Deserialize, Serialize, HasModel)] -#[serde(rename_all = "kebab-case")] -#[model = "Model"] -pub struct PackageDataEntryInstalled { - pub static_files: StaticFiles, - pub manifest: Manifest, - pub installed: InstalledPackageInfo, -} - -#[derive(Debug, Deserialize, Serialize, HasModel)] -#[serde(tag = "state")] -#[serde(rename_all = "kebab-case")] -#[model = "Model"] -// #[macro_debug] -pub enum PackageDataEntry { - Installing(PackageDataEntryInstalling), - Updating(PackageDataEntryUpdating), - Restoring(PackageDataEntryRestoring), - Removing(PackageDataEntryRemoving), - Installed(PackageDataEntryInstalled), -} -impl Model { - pub fn expect_into_installed(self) -> Result, Error> { - if let PackageDataEntryMatchModel::Installed(a) = self.into_match() { - Ok(a) - } else { - Err(Error::new( - eyre!("package is not in installed state"), - ErrorKind::InvalidRequest, - )) - } - } - pub fn expect_as_installed(&self) -> Result<&Model, Error> { - if let PackageDataEntryMatchModelRef::Installed(a) = self.as_match() { - Ok(a) - } else { - Err(Error::new( - eyre!("package is not in installed state"), - ErrorKind::InvalidRequest, - )) - } - } - pub fn expect_as_installed_mut( - &mut self, - ) -> Result<&mut Model, Error> { - if let PackageDataEntryMatchModelMut::Installed(a) = self.as_match_mut() { - Ok(a) - } else { - Err(Error::new( - eyre!("package is not in installed state"), - ErrorKind::InvalidRequest, - )) - } - } - pub fn expect_into_removing(self) -> Result, Error> { - if let PackageDataEntryMatchModel::Removing(a) = self.into_match() { - Ok(a) - } else { - Err(Error::new( - eyre!("package is not in removing state"), - ErrorKind::InvalidRequest, - )) - } - } - pub fn expect_as_removing(&self) -> Result<&Model, Error> { - if let PackageDataEntryMatchModelRef::Removing(a) = self.as_match() { - Ok(a) - } else { - Err(Error::new( - eyre!("package is not in removing state"), - ErrorKind::InvalidRequest, - )) - } - } - pub fn expect_as_removing_mut( - &mut self, - ) -> Result<&mut Model, Error> { - if let PackageDataEntryMatchModelMut::Removing(a) = self.as_match_mut() { - Ok(a) - } else { - Err(Error::new( - eyre!("package is not in removing state"), - ErrorKind::InvalidRequest, - )) - } - } - pub fn expect_as_installing_mut( - &mut self, - ) -> Result<&mut Model, Error> { - if let PackageDataEntryMatchModelMut::Installing(a) = self.as_match_mut() { - Ok(a) - } else { - Err(Error::new( - eyre!("package is not in installing state"), - ErrorKind::InvalidRequest, - )) - } - } - pub fn into_manifest(self) -> Model { - match self.into_match() { - PackageDataEntryMatchModel::Installing(a) => a.into_manifest(), - PackageDataEntryMatchModel::Updating(a) => a.into_installed().into_manifest(), - PackageDataEntryMatchModel::Restoring(a) => a.into_manifest(), - PackageDataEntryMatchModel::Removing(a) => a.into_manifest(), - PackageDataEntryMatchModel::Installed(a) => a.into_manifest(), - PackageDataEntryMatchModel::Error(_) => Model::from(Value::Null), - } - } - pub fn as_manifest(&self) -> &Model { - match self.as_match() { - PackageDataEntryMatchModelRef::Installing(a) => a.as_manifest(), - PackageDataEntryMatchModelRef::Updating(a) => a.as_installed().as_manifest(), - PackageDataEntryMatchModelRef::Restoring(a) => a.as_manifest(), - PackageDataEntryMatchModelRef::Removing(a) => a.as_manifest(), - PackageDataEntryMatchModelRef::Installed(a) => a.as_manifest(), - PackageDataEntryMatchModelRef::Error(_) => (&Value::Null).into(), - } - } - pub fn into_installed(self) -> Option> { - match self.into_match() { - PackageDataEntryMatchModel::Installing(_) => None, - PackageDataEntryMatchModel::Updating(a) => Some(a.into_installed()), - PackageDataEntryMatchModel::Restoring(_) => None, - PackageDataEntryMatchModel::Removing(_) => None, - PackageDataEntryMatchModel::Installed(a) => Some(a.into_installed()), - PackageDataEntryMatchModel::Error(_) => None, - } - } - pub fn as_installed(&self) -> Option<&Model> { - match self.as_match() { - PackageDataEntryMatchModelRef::Installing(_) => None, - PackageDataEntryMatchModelRef::Updating(a) => Some(a.as_installed()), - PackageDataEntryMatchModelRef::Restoring(_) => None, - PackageDataEntryMatchModelRef::Removing(_) => None, - PackageDataEntryMatchModelRef::Installed(a) => Some(a.as_installed()), - PackageDataEntryMatchModelRef::Error(_) => None, - } - } - pub fn as_installed_mut(&mut self) -> Option<&mut Model> { - match self.as_match_mut() { - PackageDataEntryMatchModelMut::Installing(_) => None, - PackageDataEntryMatchModelMut::Updating(a) => Some(a.as_installed_mut()), - PackageDataEntryMatchModelMut::Restoring(_) => None, - PackageDataEntryMatchModelMut::Removing(_) => None, - PackageDataEntryMatchModelMut::Installed(a) => Some(a.as_installed_mut()), - PackageDataEntryMatchModelMut::Error(_) => None, - } - } - pub fn as_install_progress(&self) -> Option<&Model>> { - match self.as_match() { - PackageDataEntryMatchModelRef::Installing(a) => Some(a.as_install_progress()), - PackageDataEntryMatchModelRef::Updating(a) => Some(a.as_install_progress()), - PackageDataEntryMatchModelRef::Restoring(a) => Some(a.as_install_progress()), - PackageDataEntryMatchModelRef::Removing(_) => None, - PackageDataEntryMatchModelRef::Installed(_) => None, - PackageDataEntryMatchModelRef::Error(_) => None, - } - } - pub fn as_install_progress_mut(&mut self) -> Option<&mut Model>> { - match self.as_match_mut() { - PackageDataEntryMatchModelMut::Installing(a) => Some(a.as_install_progress_mut()), - PackageDataEntryMatchModelMut::Updating(a) => Some(a.as_install_progress_mut()), - PackageDataEntryMatchModelMut::Restoring(a) => Some(a.as_install_progress_mut()), - PackageDataEntryMatchModelMut::Removing(_) => None, - PackageDataEntryMatchModelMut::Installed(_) => None, - PackageDataEntryMatchModelMut::Error(_) => None, - } - } -} - -#[derive(Debug, Deserialize, Serialize, HasModel)] -#[serde(rename_all = "kebab-case")] -#[model = "Model"] -pub struct InstalledPackageInfo { - pub status: Status, - pub marketplace_url: Option, - #[serde(default)] - #[serde(with = "crate::util::serde::ed25519_pubkey")] - pub developer_key: ed25519_dalek::VerifyingKey, - pub manifest: Manifest, - pub last_backup: Option>, - pub dependency_info: BTreeMap, - pub current_dependents: CurrentDependents, - pub current_dependencies: CurrentDependencies, - pub interface_addresses: InterfaceAddressMap, -} - -#[derive(Debug, Clone, Default, Deserialize, Serialize)] -pub struct CurrentDependents(pub BTreeMap); -impl CurrentDependents { - pub fn map( - mut self, - transform: impl Fn( - BTreeMap, - ) -> BTreeMap, - ) -> Self { - self.0 = transform(self.0); - self - } -} -impl Map for CurrentDependents { - type Key = PackageId; - type Value = CurrentDependencyInfo; -} - -#[derive(Debug, Clone, Default, Deserialize, Serialize)] -pub struct CurrentDependencies(pub BTreeMap); -impl CurrentDependencies { - pub fn map( - mut self, - transform: impl Fn( - BTreeMap, - ) -> BTreeMap, - ) -> Self { - self.0 = transform(self.0); - self - } -} -impl Map for CurrentDependencies { - type Key = PackageId; - type Value = CurrentDependencyInfo; -} - -#[derive(Debug, Deserialize, Serialize, HasModel)] -#[serde(rename_all = "kebab-case")] -#[model = "Model"] -pub struct StaticDependencyInfo { - pub title: String, - pub icon: DataUrl<'static>, -} - -#[derive(Clone, Debug, Default, Deserialize, Serialize, HasModel)] -#[serde(rename_all = "kebab-case")] -#[model = "Model"] -pub struct CurrentDependencyInfo { - #[serde(default)] - pub pointers: BTreeSet, - pub health_checks: BTreeSet, -} - -#[derive(Debug, Deserialize, Serialize)] -pub struct InterfaceAddressMap(pub BTreeMap); -impl Map for InterfaceAddressMap { - type Key = InterfaceId; - type Value = InterfaceAddresses; -} - -#[derive(Debug, Deserialize, Serialize, HasModel)] -#[serde(rename_all = "kebab-case")] -#[model = "Model"] -pub struct InterfaceAddresses { - pub tor_address: Option, - pub lan_address: Option, -} diff --git a/core/startos/src/db/model/mod.rs b/core/startos/src/db/model/mod.rs new file mode 100644 index 0000000000..9be0f8b686 --- /dev/null +++ b/core/startos/src/db/model/mod.rs @@ -0,0 +1,49 @@ +use std::collections::BTreeMap; + +use patch_db::HasModel; +use serde::{Deserialize, Serialize}; +use ts_rs::TS; + +use crate::account::AccountInfo; +use crate::auth::Sessions; +use crate::backup::target::cifs::CifsTargets; +use crate::db::model::private::Private; +use crate::db::model::public::Public; +use crate::net::forward::AvailablePorts; +use crate::net::keys::KeyStore; +use crate::notifications::Notifications; +use crate::prelude::*; +use crate::ssh::SshKeys; +use crate::util::serde::Pem; + +pub mod package; +pub mod private; +pub mod public; + +#[derive(Debug, Deserialize, Serialize, HasModel)] +#[serde(rename_all = "camelCase")] +#[model = "Model"] +pub struct Database { + pub public: Public, + pub private: Private, +} +impl Database { + pub fn init(account: &AccountInfo) -> Result { + Ok(Self { + public: Public::init(account)?, + private: Private { + key_store: KeyStore::new(account)?, + password: account.password.clone(), + ssh_privkey: Pem(account.ssh_key.clone()), + ssh_pubkeys: SshKeys::new(), + available_ports: AvailablePorts::new(), + sessions: Sessions::new(), + notifications: Notifications::new(), + cifs: CifsTargets::new(), + package_stores: BTreeMap::new(), + }, // TODO + }) + } +} + +pub type DatabaseModel = Model; diff --git a/core/startos/src/db/model/package.rs b/core/startos/src/db/model/package.rs new file mode 100644 index 0000000000..83a35d086b --- /dev/null +++ b/core/startos/src/db/model/package.rs @@ -0,0 +1,417 @@ +use std::collections::{BTreeMap, BTreeSet}; + +use chrono::{DateTime, Utc}; +use emver::VersionRange; +use imbl_value::InternedString; +use models::{ActionId, DataUrl, HealthCheckId, HostId, PackageId, ServiceInterfaceId}; +use patch_db::json_ptr::JsonPointer; +use patch_db::HasModel; +use reqwest::Url; +use serde::{Deserialize, Serialize}; +use ts_rs::TS; + +use crate::net::host::HostInfo; +use crate::net::service_interface::ServiceInterfaceWithHostInfo; +use crate::prelude::*; +use crate::progress::FullProgress; +use crate::s9pk::manifest::Manifest; +use crate::status::Status; +use crate::util::serde::Pem; + +#[derive(Debug, Default, Deserialize, Serialize, TS)] +#[ts(export)] +pub struct AllPackageData(pub BTreeMap); +impl Map for AllPackageData { + type Key = PackageId; + type Value = PackageDataEntry; + fn key_str(key: &Self::Key) -> Result, Error> { + Ok(key) + } + fn key_string(key: &Self::Key) -> Result { + Ok(key.clone().into()) + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] +pub enum ManifestPreference { + Old, + New, +} + +#[derive(Debug, Deserialize, Serialize, HasModel, TS)] +#[serde(rename_all = "camelCase")] +#[serde(tag = "state")] +#[model = "Model"] +#[ts(export)] +pub enum PackageState { + Installing(InstallingState), + Restoring(InstallingState), + Updating(UpdatingState), + Installed(InstalledState), + Removing(InstalledState), +} +impl PackageState { + pub fn expect_installed(&self) -> Result<&InstalledState, Error> { + match self { + Self::Installed(a) => Ok(a), + a => Err(Error::new( + eyre!( + "Package {} is not in installed state", + self.as_manifest(ManifestPreference::Old).id + ), + ErrorKind::InvalidRequest, + )), + } + } + pub fn into_installing_info(self) -> Option { + match self { + Self::Installing(InstallingState { installing_info }) + | Self::Restoring(InstallingState { installing_info }) => Some(installing_info), + Self::Updating(UpdatingState { + installing_info, .. + }) => Some(installing_info), + Self::Installed(_) | Self::Removing(_) => None, + } + } + pub fn as_installing_info(&self) -> Option<&InstallingInfo> { + match self { + Self::Installing(InstallingState { installing_info }) + | Self::Restoring(InstallingState { installing_info }) => Some(installing_info), + Self::Updating(UpdatingState { + installing_info, .. + }) => Some(installing_info), + Self::Installed(_) | Self::Removing(_) => None, + } + } + pub fn as_installing_info_mut(&mut self) -> Option<&mut InstallingInfo> { + match self { + Self::Installing(InstallingState { installing_info }) + | Self::Restoring(InstallingState { installing_info }) => Some(installing_info), + Self::Updating(UpdatingState { + installing_info, .. + }) => Some(installing_info), + Self::Installed(_) | Self::Removing(_) => None, + } + } + pub fn into_manifest(self, preference: ManifestPreference) -> Manifest { + match self { + Self::Installing(InstallingState { + installing_info: InstallingInfo { new_manifest, .. }, + }) + | Self::Restoring(InstallingState { + installing_info: InstallingInfo { new_manifest, .. }, + }) => new_manifest, + Self::Updating(UpdatingState { manifest, .. }) + if preference == ManifestPreference::Old => + { + manifest + } + Self::Updating(UpdatingState { + installing_info: InstallingInfo { new_manifest, .. }, + .. + }) => new_manifest, + Self::Installed(InstalledState { manifest }) + | Self::Removing(InstalledState { manifest }) => manifest, + } + } + pub fn as_manifest(&self, preference: ManifestPreference) -> &Manifest { + match self { + Self::Installing(InstallingState { + installing_info: InstallingInfo { new_manifest, .. }, + }) + | Self::Restoring(InstallingState { + installing_info: InstallingInfo { new_manifest, .. }, + }) => new_manifest, + Self::Updating(UpdatingState { manifest, .. }) + if preference == ManifestPreference::Old => + { + manifest + } + Self::Updating(UpdatingState { + installing_info: InstallingInfo { new_manifest, .. }, + .. + }) => new_manifest, + Self::Installed(InstalledState { manifest }) + | Self::Removing(InstalledState { manifest }) => manifest, + } + } + pub fn as_manifest_mut(&mut self, preference: ManifestPreference) -> &mut Manifest { + match self { + Self::Installing(InstallingState { + installing_info: InstallingInfo { new_manifest, .. }, + }) + | Self::Restoring(InstallingState { + installing_info: InstallingInfo { new_manifest, .. }, + }) => new_manifest, + Self::Updating(UpdatingState { manifest, .. }) + if preference == ManifestPreference::Old => + { + manifest + } + Self::Updating(UpdatingState { + installing_info: InstallingInfo { new_manifest, .. }, + .. + }) => new_manifest, + Self::Installed(InstalledState { manifest }) + | Self::Removing(InstalledState { manifest }) => manifest, + } + } +} +impl Model { + pub fn expect_installed(&self) -> Result<&Model, Error> { + match self.as_match() { + PackageStateMatchModelRef::Installed(a) => Ok(a), + a => Err(Error::new( + eyre!( + "Package {} is not in installed state", + self.as_manifest(ManifestPreference::Old).as_id().de()? + ), + ErrorKind::InvalidRequest, + )), + } + } + pub fn into_installing_info(self) -> Option> { + match self.into_match() { + PackageStateMatchModel::Installing(s) | PackageStateMatchModel::Restoring(s) => { + Some(s.into_installing_info()) + } + PackageStateMatchModel::Updating(s) => Some(s.into_installing_info()), + PackageStateMatchModel::Installed(_) | PackageStateMatchModel::Removing(_) => None, + PackageStateMatchModel::Error(_) => None, + } + } + pub fn as_installing_info(&self) -> Option<&Model> { + match self.as_match() { + PackageStateMatchModelRef::Installing(s) | PackageStateMatchModelRef::Restoring(s) => { + Some(s.as_installing_info()) + } + PackageStateMatchModelRef::Updating(s) => Some(s.as_installing_info()), + PackageStateMatchModelRef::Installed(_) | PackageStateMatchModelRef::Removing(_) => { + None + } + PackageStateMatchModelRef::Error(_) => None, + } + } + pub fn as_installing_info_mut(&mut self) -> Option<&mut Model> { + match self.as_match_mut() { + PackageStateMatchModelMut::Installing(s) | PackageStateMatchModelMut::Restoring(s) => { + Some(s.as_installing_info_mut()) + } + PackageStateMatchModelMut::Updating(s) => Some(s.as_installing_info_mut()), + PackageStateMatchModelMut::Installed(_) | PackageStateMatchModelMut::Removing(_) => { + None + } + PackageStateMatchModelMut::Error(_) => None, + } + } + pub fn into_manifest(self, preference: ManifestPreference) -> Model { + match self.into_match() { + PackageStateMatchModel::Installing(s) | PackageStateMatchModel::Restoring(s) => { + s.into_installing_info().into_new_manifest() + } + PackageStateMatchModel::Updating(s) if preference == ManifestPreference::Old => { + s.into_manifest() + } + PackageStateMatchModel::Updating(s) => s.into_installing_info().into_new_manifest(), + PackageStateMatchModel::Installed(s) | PackageStateMatchModel::Removing(s) => { + s.into_manifest() + } + PackageStateMatchModel::Error(_) => Value::Null.into(), + } + } + pub fn as_manifest(&self, preference: ManifestPreference) -> &Model { + match self.as_match() { + PackageStateMatchModelRef::Installing(s) | PackageStateMatchModelRef::Restoring(s) => { + s.as_installing_info().as_new_manifest() + } + PackageStateMatchModelRef::Updating(s) if preference == ManifestPreference::Old => { + s.as_manifest() + } + PackageStateMatchModelRef::Updating(s) => s.as_installing_info().as_new_manifest(), + PackageStateMatchModelRef::Installed(s) | PackageStateMatchModelRef::Removing(s) => { + s.as_manifest() + } + PackageStateMatchModelRef::Error(_) => (&Value::Null).into(), + } + } + pub fn as_manifest_mut( + &mut self, + preference: ManifestPreference, + ) -> Result<&mut Model, Error> { + Ok(match self.as_match_mut() { + PackageStateMatchModelMut::Installing(s) | PackageStateMatchModelMut::Restoring(s) => { + s.as_installing_info_mut().as_new_manifest_mut() + } + PackageStateMatchModelMut::Updating(s) if preference == ManifestPreference::Old => { + s.as_manifest_mut() + } + PackageStateMatchModelMut::Updating(s) => { + s.as_installing_info_mut().as_new_manifest_mut() + } + PackageStateMatchModelMut::Installed(s) | PackageStateMatchModelMut::Removing(s) => { + s.as_manifest_mut() + } + PackageStateMatchModelMut::Error(s) => { + return Err(Error::new( + eyre!("could not determine package state to get manifest"), + ErrorKind::Database, + )) + } + }) + } +} + +#[derive(Debug, Deserialize, Serialize, HasModel, TS)] +#[serde(rename_all = "camelCase")] +#[model = "Model"] +#[ts(export)] +pub struct InstallingState { + pub installing_info: InstallingInfo, +} + +#[derive(Debug, Deserialize, Serialize, HasModel, TS)] +#[serde(rename_all = "camelCase")] +#[model = "Model"] +#[ts(export)] +pub struct UpdatingState { + pub manifest: Manifest, + pub installing_info: InstallingInfo, +} + +#[derive(Debug, Deserialize, Serialize, HasModel, TS)] +#[serde(rename_all = "camelCase")] +#[model = "Model"] +#[ts(export)] +pub struct InstalledState { + pub manifest: Manifest, +} + +#[derive(Debug, Deserialize, Serialize, HasModel, TS)] +#[serde(rename_all = "camelCase")] +#[model = "Model"] +#[ts(export)] +pub struct InstallingInfo { + pub new_manifest: Manifest, + pub progress: FullProgress, +} +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] +#[ts(export)] +#[serde(rename_all = "camelCase")] +pub enum AllowedStatuses { + OnlyRunning, // onlyRunning + OnlyStopped, + Any, +} + +#[derive(Clone, Debug, Deserialize, Serialize, HasModel, TS)] +#[serde(rename_all = "camelCase")] +#[model = "Model"] +pub struct ActionMetadata { + pub name: String, + pub description: String, + pub warning: Option, + #[ts(type = "any")] + pub input: Value, + pub disabled: bool, + pub allowed_statuses: AllowedStatuses, + pub group: Option, +} + +#[derive(Debug, Deserialize, Serialize, HasModel, TS)] +#[serde(rename_all = "camelCase")] +#[model = "Model"] +#[ts(export)] +pub struct PackageDataEntry { + pub state_info: PackageState, + pub status: Status, + #[ts(type = "string | null")] + pub marketplace_url: Option, + #[ts(type = "string")] + pub developer_key: Pem, + pub icon: DataUrl<'static>, + #[ts(type = "string | null")] + pub last_backup: Option>, + pub current_dependencies: CurrentDependencies, + pub actions: BTreeMap, + pub service_interfaces: BTreeMap, + pub hosts: HostInfo, + #[ts(type = "string[]")] + pub store_exposed_dependents: Vec, +} +impl AsRef for PackageDataEntry { + fn as_ref(&self) -> &PackageDataEntry { + self + } +} + +#[derive(Debug, Clone, Default, Deserialize, Serialize, TS)] +#[ts(export)] +pub struct CurrentDependencies(pub BTreeMap); +impl CurrentDependencies { + pub fn map( + mut self, + transform: impl Fn( + BTreeMap, + ) -> BTreeMap, + ) -> Self { + self.0 = transform(self.0); + self + } +} +impl Map for CurrentDependencies { + type Key = PackageId; + type Value = CurrentDependencyInfo; + fn key_str(key: &Self::Key) -> Result, Error> { + Ok(key) + } + fn key_string(key: &Self::Key) -> Result { + Ok(key.clone().into()) + } +} + +#[derive(Clone, Debug, Deserialize, Serialize, TS)] +#[serde(rename_all = "camelCase")] +pub struct CurrentDependencyInfo { + #[serde(flatten)] + pub kind: CurrentDependencyKind, + pub title: String, + pub icon: DataUrl<'static>, + #[ts(type = "string")] + pub registry_url: Url, + #[ts(type = "string")] + pub version_spec: VersionRange, +} + +#[derive(Clone, Debug, Deserialize, Serialize, TS)] +#[serde(rename_all = "camelCase")] +#[serde(tag = "kind")] +pub enum CurrentDependencyKind { + Exists, + #[serde(rename_all = "camelCase")] + Running { + #[serde(default)] + #[ts(type = "string[]")] + health_checks: BTreeSet, + }, +} + +#[derive(Debug, Default, Deserialize, Serialize)] +pub struct InterfaceAddressMap(pub BTreeMap); +impl Map for InterfaceAddressMap { + type Key = HostId; + type Value = InterfaceAddresses; + fn key_str(key: &Self::Key) -> Result, Error> { + Ok(key) + } + fn key_string(key: &Self::Key) -> Result { + Ok(key.clone().into()) + } +} + +#[derive(Debug, Deserialize, Serialize, HasModel)] +#[serde(rename_all = "camelCase")] +#[model = "Model"] +pub struct InterfaceAddresses { + pub tor_address: Option, + pub lan_address: Option, +} diff --git a/core/startos/src/db/model/private.rs b/core/startos/src/db/model/private.rs new file mode 100644 index 0000000000..2b8c55dbdd --- /dev/null +++ b/core/startos/src/db/model/private.rs @@ -0,0 +1,30 @@ +use std::collections::BTreeMap; + +use models::PackageId; +use patch_db::{HasModel, Value}; +use serde::{Deserialize, Serialize}; + +use crate::auth::Sessions; +use crate::backup::target::cifs::CifsTargets; +use crate::net::forward::AvailablePorts; +use crate::net::keys::KeyStore; +use crate::notifications::Notifications; +use crate::prelude::*; +use crate::ssh::SshKeys; +use crate::util::serde::Pem; + +#[derive(Debug, Deserialize, Serialize, HasModel)] +#[serde(rename_all = "camelCase")] +#[model = "Model"] +pub struct Private { + pub key_store: KeyStore, + pub password: String, // argon2 hash + pub ssh_privkey: Pem, + pub ssh_pubkeys: SshKeys, + pub available_ports: AvailablePorts, + pub sessions: Sessions, + pub notifications: Notifications, + pub cifs: CifsTargets, + #[serde(default)] + pub package_stores: BTreeMap, +} diff --git a/core/startos/src/db/model/public.rs b/core/startos/src/db/model/public.rs new file mode 100644 index 0000000000..d1eec5443b --- /dev/null +++ b/core/startos/src/db/model/public.rs @@ -0,0 +1,221 @@ +use std::collections::BTreeMap; +use std::net::{Ipv4Addr, Ipv6Addr}; + +use chrono::{DateTime, Utc}; +use emver::VersionRange; +use imbl_value::InternedString; +use ipnet::{Ipv4Net, Ipv6Net}; +use isocountry::CountryCode; +use itertools::Itertools; +use models::PackageId; +use openssl::hash::MessageDigest; +use patch_db::{HasModel, Value}; +use reqwest::Url; +use serde::{Deserialize, Serialize}; +use torut::onion::OnionAddressV3; +use ts_rs::TS; + +use crate::account::AccountInfo; +use crate::db::model::package::AllPackageData; +use crate::net::utils::{get_iface_ipv4_addr, get_iface_ipv6_addr}; +use crate::prelude::*; +use crate::util::cpupower::Governor; +use crate::util::Version; +use crate::version::{Current, VersionT}; +use crate::{ARCH, PLATFORM}; + +#[derive(Debug, Deserialize, Serialize, HasModel, TS)] +#[serde(rename_all = "camelCase")] +#[model = "Model"] +#[ts(export)] +pub struct Public { + pub server_info: ServerInfo, + pub package_data: AllPackageData, + #[ts(type = "any")] + pub ui: Value, +} +impl Public { + pub fn init(account: &AccountInfo) -> Result { + let lan_address = account.hostname.lan_address().parse().unwrap(); + Ok(Self { + server_info: ServerInfo { + arch: get_arch(), + platform: get_platform(), + id: account.server_id.clone(), + version: Current::new().semver().into(), + hostname: account.hostname.no_dot_host_name(), + last_backup: None, + last_wifi_region: None, + eos_version_compat: Current::new().compat().clone(), + lan_address, + onion_address: account.tor_key.public().get_onion_address(), + tor_address: format!("https://{}", account.tor_key.public().get_onion_address()) + .parse() + .unwrap(), + ip_info: BTreeMap::new(), + status_info: ServerStatus { + backup_progress: None, + updated: false, + update_progress: None, + shutting_down: false, + restarting: false, + }, + wifi: WifiInfo { + ssids: Vec::new(), + connected: None, + selected: None, + }, + unread_notification_count: 0, + password_hash: account.password.clone(), + pubkey: ssh_key::PublicKey::from(&account.ssh_key) + .to_openssh() + .unwrap(), + ca_fingerprint: account + .root_ca_cert + .digest(MessageDigest::sha256()) + .unwrap() + .iter() + .map(|x| format!("{x:X}")) + .join(":"), + ntp_synced: false, + zram: true, + governor: None, + }, + package_data: AllPackageData::default(), + ui: serde_json::from_str(include_str!(concat!( + env!("CARGO_MANIFEST_DIR"), + "/../../web/patchdb-ui-seed.json" + ))) + .with_kind(ErrorKind::Deserialization)?, + }) + } +} + +fn get_arch() -> InternedString { + (*ARCH).into() +} + +fn get_platform() -> InternedString { + (&*PLATFORM).into() +} + +#[derive(Debug, Deserialize, Serialize, HasModel, TS)] +#[serde(rename_all = "camelCase")] +#[model = "Model"] +#[ts(export)] +pub struct ServerInfo { + #[serde(default = "get_arch")] + #[ts(type = "string")] + pub arch: InternedString, + #[serde(default = "get_platform")] + #[ts(type = "string")] + pub platform: InternedString, + pub id: String, + pub hostname: String, + #[ts(type = "string")] + pub version: Version, + #[ts(type = "string | null")] + pub last_backup: Option>, + /// Used in the wifi to determine the region to set the system to + #[ts(type = "string | null")] + pub last_wifi_region: Option, + #[ts(type = "string")] + pub eos_version_compat: VersionRange, + #[ts(type = "string")] + pub lan_address: Url, + #[ts(type = "string")] + pub onion_address: OnionAddressV3, + /// for backwards compatibility + #[ts(type = "string")] + pub tor_address: Url, + pub ip_info: BTreeMap, + #[serde(default)] + pub status_info: ServerStatus, + pub wifi: WifiInfo, + #[ts(type = "number")] + pub unread_notification_count: u64, + pub password_hash: String, + pub pubkey: String, + pub ca_fingerprint: String, + #[serde(default)] + pub ntp_synced: bool, + #[serde(default)] + pub zram: bool, + pub governor: Option, +} + +#[derive(Debug, Deserialize, Serialize, HasModel, TS)] +#[serde(rename_all = "camelCase")] +#[model = "Model"] +#[ts(export)] +pub struct IpInfo { + #[ts(type = "string | null")] + pub ipv4_range: Option, + pub ipv4: Option, + #[ts(type = "string | null")] + pub ipv6_range: Option, + pub ipv6: Option, +} +impl IpInfo { + pub async fn for_interface(iface: &str) -> Result { + let (ipv4, ipv4_range) = get_iface_ipv4_addr(iface).await?.unzip(); + let (ipv6, ipv6_range) = get_iface_ipv6_addr(iface).await?.unzip(); + Ok(Self { + ipv4_range, + ipv4, + ipv6_range, + ipv6, + }) + } +} + +#[derive(Debug, Default, Deserialize, Serialize, HasModel, TS)] +#[model = "Model"] +#[ts(export)] +pub struct BackupProgress { + pub complete: bool, +} + +#[derive(Debug, Default, Deserialize, Serialize, HasModel, TS)] +#[serde(rename_all = "camelCase")] +#[model = "Model"] +#[ts(export)] +pub struct ServerStatus { + pub backup_progress: Option>, + pub updated: bool, + pub update_progress: Option, + #[serde(default)] + pub shutting_down: bool, + #[serde(default)] + pub restarting: bool, +} + +#[derive(Debug, Deserialize, Serialize, HasModel, TS)] +#[serde(rename_all = "camelCase")] +#[model = "Model"] +#[ts(export)] +pub struct UpdateProgress { + #[ts(type = "number | null")] + pub size: Option, + #[ts(type = "number")] + pub downloaded: u64, +} + +#[derive(Debug, Deserialize, Serialize, HasModel, TS)] +#[serde(rename_all = "camelCase")] +#[model = "Model"] +#[ts(export)] +pub struct WifiInfo { + pub ssids: Vec, + pub selected: Option, + pub connected: Option, +} + +#[derive(Debug, Deserialize, Serialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub struct ServerSpecs { + pub cpu: String, + pub disk: String, + pub memory: String, +} diff --git a/core/startos/src/db/package.rs b/core/startos/src/db/package.rs deleted file mode 100644 index fe6f938092..0000000000 --- a/core/startos/src/db/package.rs +++ /dev/null @@ -1,22 +0,0 @@ -use models::Version; - -use crate::prelude::*; -use crate::s9pk::manifest::PackageId; - -pub fn get_packages(db: Peeked) -> Result, Error> { - Ok(db - .as_package_data() - .keys()? - .into_iter() - .flat_map(|package_id| { - let version = db - .as_package_data() - .as_idx(&package_id)? - .as_manifest() - .as_version() - .de() - .ok()?; - Some((package_id, version)) - }) - .collect()) -} diff --git a/core/startos/src/db/prelude.rs b/core/startos/src/db/prelude.rs index 922a475006..43dd590029 100644 --- a/core/startos/src/db/prelude.rs +++ b/core/startos/src/db/prelude.rs @@ -1,11 +1,16 @@ use std::collections::BTreeMap; +use std::future::Future; use std::marker::PhantomData; use std::panic::UnwindSafe; +use std::str::FromStr; +use chrono::{DateTime, Utc}; +pub use imbl_value::Value; +use patch_db::json_ptr::ROOT; use patch_db::value::InternedString; -pub use patch_db::{HasModel, PatchDb, Value}; +pub use patch_db::{HasModel, PatchDb}; use serde::de::DeserializeOwned; -use serde::Serialize; +use serde::{Deserialize, Serialize}; use crate::db::model::DatabaseModel; use crate::prelude::*; @@ -26,22 +31,20 @@ where patch_db::value::from_value(value).with_kind(ErrorKind::Deserialization) } -#[async_trait::async_trait] pub trait PatchDbExt { - async fn peek(&self) -> DatabaseModel; - async fn mutate( + fn peek(&self) -> impl Future + Send; + fn mutate( &self, f: impl FnOnce(&mut DatabaseModel) -> Result + UnwindSafe + Send, - ) -> Result; - async fn map_mutate( + ) -> impl Future> + Send; + fn map_mutate( &self, f: impl FnOnce(DatabaseModel) -> Result + UnwindSafe + Send, - ) -> Result; + ) -> impl Future> + Send; } -#[async_trait::async_trait] impl PatchDbExt for PatchDb { async fn peek(&self) -> DatabaseModel { - DatabaseModel::from(self.dump().await.value) + DatabaseModel::from(self.dump(&ROOT).await.value) } async fn mutate( &self, @@ -90,12 +93,43 @@ impl Model { } } +impl Serialize for Model { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + self.value.serialize(serializer) + } +} + +impl<'de, T: Serialize + Deserialize<'de>> Deserialize<'de> for Model { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + use serde::de::Error; + Self::new(&T::deserialize(deserializer)?).map_err(D::Error::custom) + } +} + impl Model { pub fn replace(&mut self, value: &T) -> Result { let orig = self.de()?; self.ser(value)?; Ok(orig) } + pub fn mutate(&mut self, f: impl FnOnce(&mut T) -> Result) -> Result { + let mut orig = self.de()?; + let res = f(&mut orig)?; + self.ser(&orig)?; + Ok(res) + } + pub fn map_mutate(&mut self, f: impl FnOnce(T) -> Result) -> Result { + let mut orig = self.de()?; + let res = f(orig)?; + self.ser(&res)?; + Ok(res) + } } impl Clone for Model { fn clone(&self) -> Self { @@ -179,20 +213,38 @@ impl Model> { pub trait Map: DeserializeOwned + Serialize { type Key; type Value; + fn key_str(key: &Self::Key) -> Result, Error>; + fn key_string(key: &Self::Key) -> Result { + Ok(InternedString::intern(Self::key_str(key)?.as_ref())) + } } impl Map for BTreeMap +where + A: serde::Serialize + serde::de::DeserializeOwned + Ord + AsRef, + B: serde::Serialize + serde::de::DeserializeOwned, +{ + type Key = A; + type Value = B; + fn key_str(key: &Self::Key) -> Result, Error> { + Ok(key.as_ref()) + } +} + +impl Map for BTreeMap, B> where A: serde::Serialize + serde::de::DeserializeOwned + Ord, B: serde::Serialize + serde::de::DeserializeOwned, { type Key = A; type Value = B; + fn key_str(key: &Self::Key) -> Result, Error> { + serde_json::to_string(key).with_kind(ErrorKind::Serialization) + } } impl Model where - T::Key: AsRef, T::Value: Serialize, { pub fn insert(&mut self, key: &T::Key, value: &T::Value) -> Result<(), Error> { @@ -200,7 +252,7 @@ where let v = patch_db::value::to_value(value)?; match &mut self.value { Value::Object(o) => { - o.insert(InternedString::intern(key.as_ref()), v); + o.insert(T::key_string(key)?, v); Ok(()) } v => Err(patch_db::value::Error { @@ -210,13 +262,40 @@ where .into()), } } + pub fn upsert(&mut self, key: &T::Key, value: F) -> Result<&mut Model, Error> + where + F: FnOnce() -> D, + D: AsRef, + { + use serde::ser::Error; + match &mut self.value { + Value::Object(o) => { + use patch_db::ModelExt; + let s = T::key_str(key)?; + let exists = o.contains_key(s.as_ref()); + let res = self.transmute_mut(|v| { + use patch_db::value::index::Index; + s.as_ref().index_or_insert(v) + }); + if !exists { + res.ser(value().as_ref())?; + } + Ok(res) + } + v => Err(patch_db::value::Error { + source: patch_db::value::ErrorSource::custom(format!("expected object found {v}")), + kind: patch_db::value::ErrorKind::Serialization, + } + .into()), + } + } pub fn insert_model(&mut self, key: &T::Key, value: Model) -> Result<(), Error> { use patch_db::ModelExt; use serde::ser::Error; let v = value.into_value(); match &mut self.value { Value::Object(o) => { - o.insert(InternedString::intern(key.as_ref()), v); + o.insert(T::key_string(key)?, v); Ok(()) } v => Err(patch_db::value::Error { @@ -230,25 +309,16 @@ where impl Model where - T::Key: DeserializeOwned + Ord + Clone, + T::Key: FromStr + Ord + Clone, + Error: From<::Err>, { pub fn keys(&self) -> Result, Error> { use serde::de::Error; - use serde::Deserialize; match &self.value { Value::Object(o) => o .keys() .cloned() - .map(|k| { - T::Key::deserialize(patch_db::value::de::InternedStringDeserializer::from(k)) - .map_err(|e| { - patch_db::value::Error { - kind: patch_db::value::ErrorKind::Deserialization, - source: e, - } - .into() - }) - }) + .map(|k| Ok(T::Key::from_str(&*k)?)) .collect(), v => Err(patch_db::value::Error { source: patch_db::value::ErrorSource::custom(format!("expected object found {v}")), @@ -261,19 +331,10 @@ where pub fn into_entries(self) -> Result)>, Error> { use patch_db::ModelExt; use serde::de::Error; - use serde::Deserialize; match self.value { Value::Object(o) => o .into_iter() - .map(|(k, v)| { - Ok(( - T::Key::deserialize(patch_db::value::de::InternedStringDeserializer::from( - k, - )) - .with_kind(ErrorKind::Deserialization)?, - Model::from_value(v), - )) - }) + .map(|(k, v)| Ok((T::Key::from_str(&*k)?, Model::from_value(v)))) .collect(), v => Err(patch_db::value::Error { source: patch_db::value::ErrorSource::custom(format!("expected object found {v}")), @@ -285,19 +346,10 @@ where pub fn as_entries(&self) -> Result)>, Error> { use patch_db::ModelExt; use serde::de::Error; - use serde::Deserialize; match &self.value { Value::Object(o) => o .iter() - .map(|(k, v)| { - Ok(( - T::Key::deserialize(patch_db::value::de::InternedStringDeserializer::from( - k.clone(), - )) - .with_kind(ErrorKind::Deserialization)?, - Model::value_as(v), - )) - }) + .map(|(k, v)| Ok((T::Key::from_str(&**k)?, Model::value_as(v)))) .collect(), v => Err(patch_db::value::Error { source: patch_db::value::ErrorSource::custom(format!("expected object found {v}")), @@ -309,19 +361,10 @@ where pub fn as_entries_mut(&mut self) -> Result)>, Error> { use patch_db::ModelExt; use serde::de::Error; - use serde::Deserialize; match &mut self.value { Value::Object(o) => o .iter_mut() - .map(|(k, v)| { - Ok(( - T::Key::deserialize(patch_db::value::de::InternedStringDeserializer::from( - k.clone(), - )) - .with_kind(ErrorKind::Deserialization)?, - Model::value_as_mut(v), - )) - }) + .map(|(k, v)| Ok((T::Key::from_str(&**k)?, Model::value_as_mut(v)))) .collect(), v => Err(patch_db::value::Error { source: patch_db::value::ErrorSource::custom(format!("expected object found {v}")), @@ -331,36 +374,36 @@ where } } } -impl Model -where - T::Key: AsRef, -{ +impl Model { pub fn into_idx(self, key: &T::Key) -> Option> { use patch_db::ModelExt; + let s = T::key_str(key).ok()?; match &self.value { - Value::Object(o) if o.contains_key(key.as_ref()) => Some(self.transmute(|v| { + Value::Object(o) if o.contains_key(s.as_ref()) => Some(self.transmute(|v| { use patch_db::value::index::Index; - key.as_ref().index_into_owned(v).unwrap() + s.as_ref().index_into_owned(v).unwrap() })), _ => None, } } pub fn as_idx<'a>(&'a self, key: &T::Key) -> Option<&'a Model> { use patch_db::ModelExt; + let s = T::key_str(key).ok()?; match &self.value { - Value::Object(o) if o.contains_key(key.as_ref()) => Some(self.transmute_ref(|v| { + Value::Object(o) if o.contains_key(s.as_ref()) => Some(self.transmute_ref(|v| { use patch_db::value::index::Index; - key.as_ref().index_into(v).unwrap() + s.as_ref().index_into(v).unwrap() })), _ => None, } } pub fn as_idx_mut<'a>(&'a mut self, key: &T::Key) -> Option<&'a mut Model> { use patch_db::ModelExt; + let s = T::key_str(key).ok()?; match &mut self.value { - Value::Object(o) if o.contains_key(key.as_ref()) => Some(self.transmute_mut(|v| { + Value::Object(o) if o.contains_key(s.as_ref()) => Some(self.transmute_mut(|v| { use patch_db::value::index::Index; - key.as_ref().index_or_insert(v) + s.as_ref().index_or_insert(v) })), _ => None, } @@ -369,7 +412,7 @@ where use serde::ser::Error; match &mut self.value { Value::Object(o) => { - let v = o.remove(key.as_ref()); + let v = o.remove(T::key_str(key)?.as_ref()); Ok(v.map(patch_db::ModelExt::from_value)) } v => Err(patch_db::value::Error { @@ -380,3 +423,90 @@ where } } } + +#[repr(transparent)] +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] +pub struct JsonKey(pub T); +impl From for JsonKey { + fn from(value: T) -> Self { + Self::new(value) + } +} +impl JsonKey { + pub fn new(value: T) -> Self { + Self(value) + } + pub fn unwrap(self) -> T { + self.0 + } + pub fn new_ref(value: &T) -> &Self { + unsafe { std::mem::transmute(value) } + } + pub fn new_mut(value: &mut T) -> &mut Self { + unsafe { std::mem::transmute(value) } + } +} +impl std::ops::Deref for JsonKey { + type Target = T; + fn deref(&self) -> &Self::Target { + &self.0 + } +} +impl std::ops::DerefMut for JsonKey { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} +impl Serialize for JsonKey { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + use serde::ser::Error; + serde_json::to_string(&self.0) + .map_err(S::Error::custom)? + .serialize(serializer) + } +} +// { "foo": "bar" } -> "{ \"foo\": \"bar\" }" +impl<'de, T: Serialize + DeserializeOwned> Deserialize<'de> for JsonKey { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + use serde::de::Error; + let string = String::deserialize(deserializer)?; + Ok(Self( + serde_json::from_str(&string).map_err(D::Error::custom)?, + )) + } +} + +#[derive(Debug, Clone, Deserialize, Serialize)] +pub struct WithTimeData { + pub created_at: DateTime, + pub updated_at: DateTime, + pub value: T, +} +impl WithTimeData { + pub fn new(value: T) -> Self { + let now = Utc::now(); + Self { + created_at: now, + updated_at: now, + value, + } + } +} +impl std::ops::Deref for WithTimeData { + type Target = T; + fn deref(&self) -> &Self::Target { + &self.value + } +} +impl std::ops::DerefMut for WithTimeData { + fn deref_mut(&mut self) -> &mut Self::Target { + self.updated_at = Utc::now(); + &mut self.value + } +} diff --git a/core/startos/src/dependencies.rs b/core/startos/src/dependencies.rs index dfddecd937..d973688e94 100644 --- a/core/startos/src/dependencies.rs +++ b/core/startos/src/dependencies.rs @@ -1,362 +1,195 @@ use std::collections::BTreeMap; use std::time::Duration; -use color_eyre::eyre::eyre; -use emver::VersionRange; -use models::OptionExt; -use rand::SeedableRng; -use rpc_toolkit::command; +use clap::Parser; +use models::PackageId; +use rpc_toolkit::{command, from_fn_async, Empty, HandlerExt, ParentHandler}; use serde::{Deserialize, Serialize}; use tracing::instrument; +use ts_rs::TS; -use crate::config::action::ConfigRes; -use crate::config::spec::PackagePointerSpec; -use crate::config::{not_found, Config, ConfigSpec, ConfigureContext}; +use crate::config::{Config, ConfigSpec, ConfigureContext}; use crate::context::RpcContext; -use crate::db::model::{CurrentDependencies, Database}; +use crate::db::model::package::CurrentDependencies; use crate::prelude::*; -use crate::procedure::{NoOutput, PackageProcedure, ProcedureName}; -use crate::s9pk::manifest::{Manifest, PackageId}; use crate::status::DependencyConfigErrors; -use crate::util::serde::display_serializable; -use crate::util::{display_none, Version}; -use crate::volume::Volumes; use crate::Error; -#[command(subcommands(configure))] -pub fn dependency() -> Result<(), Error> { - Ok(()) +pub fn dependency() -> ParentHandler { + ParentHandler::new().subcommand("configure", configure()) } -#[derive(Clone, Debug, Default, Deserialize, Serialize, HasModel)] +#[derive(Clone, Debug, Default, Deserialize, Serialize, HasModel, TS)] #[model = "Model"] +#[ts(export)] pub struct Dependencies(pub BTreeMap); impl Map for Dependencies { type Key = PackageId; type Value = DepInfo; -} - -#[derive(Clone, Debug, Deserialize, Serialize)] -#[serde(rename_all = "kebab-case")] -#[serde(tag = "type")] -pub enum DependencyRequirement { - OptIn { how: String }, - OptOut { how: String }, - Required, -} -impl DependencyRequirement { - pub fn required(&self) -> bool { - matches!(self, &DependencyRequirement::Required) + fn key_str(key: &Self::Key) -> Result, Error> { + Ok(key) + } + fn key_string(key: &Self::Key) -> Result { + Ok(key.clone().into()) } } -#[derive(Clone, Debug, Deserialize, Serialize, HasModel)] -#[serde(rename_all = "kebab-case")] +#[derive(Clone, Debug, Deserialize, Serialize, HasModel, TS)] +#[serde(rename_all = "camelCase")] #[model = "Model"] +#[ts(export)] pub struct DepInfo { - pub version: VersionRange, - pub requirement: DependencyRequirement, pub description: Option, - #[serde(default)] - pub config: Option, + pub optional: bool, } -#[derive(Clone, Debug, Deserialize, Serialize, HasModel)] -#[serde(rename_all = "kebab-case")] -#[model = "Model"] -pub struct DependencyConfig { - check: PackageProcedure, - auto_configure: PackageProcedure, -} -impl DependencyConfig { - pub async fn check( - &self, - ctx: &RpcContext, - dependent_id: &PackageId, - dependent_version: &Version, - dependent_volumes: &Volumes, - dependency_id: &PackageId, - dependency_config: &Config, - ) -> Result, Error> { - Ok(self - .check - .sandboxed( - ctx, - dependent_id, - dependent_version, - dependent_volumes, - Some(dependency_config), - None, - ProcedureName::Check(dependency_id.clone()), - ) - .await? - .map_err(|(_, e)| e)) - } - pub async fn auto_configure( - &self, - ctx: &RpcContext, - dependent_id: &PackageId, - dependent_version: &Version, - dependent_volumes: &Volumes, - old: &Config, - ) -> Result { - self.auto_configure - .sandboxed( - ctx, - dependent_id, - dependent_version, - dependent_volumes, - Some(old), - None, - ProcedureName::AutoConfig(dependent_id.clone()), - ) - .await? - .map_err(|e| Error::new(eyre!("{}", e.1), crate::ErrorKind::AutoConfigure)) - } +#[derive(Deserialize, Serialize, Parser)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "kebab-case")] +pub struct ConfigureParams { + dependent_id: PackageId, + dependency_id: PackageId, } - -#[command( - subcommands(self(configure_impl(async)), configure_dry), - display(display_none) -)] -pub async fn configure( - #[arg(rename = "dependent-id")] dependent_id: PackageId, - #[arg(rename = "dependency-id")] dependency_id: PackageId, -) -> Result<(PackageId, PackageId), Error> { - Ok((dependent_id, dependency_id)) +pub fn configure() -> ParentHandler { + ParentHandler::new().root_handler( + from_fn_async(configure_impl) + .with_inherited(|params, _| params) + .no_cli(), + ) } pub async fn configure_impl( ctx: RpcContext, - (pkg_id, dep_id): (PackageId, PackageId), + _: Empty, + ConfigureParams { + dependent_id, + dependency_id, + }: ConfigureParams, ) -> Result<(), Error> { - let breakages = BTreeMap::new(); - let overrides = Default::default(); let ConfigDryRes { old_config: _, new_config, spec: _, - } = configure_logic(ctx.clone(), (pkg_id, dep_id.clone())).await?; + } = configure_logic(ctx.clone(), (dependent_id, dependency_id.clone())).await?; let configure_context = ConfigureContext { - breakages, timeout: Some(Duration::from_secs(3).into()), config: Some(new_config), - dry_run: false, - overrides, }; - crate::config::configure(&ctx, &dep_id, configure_context).await?; + ctx.services + .get(&dependency_id) + .await + .as_ref() + .ok_or_else(|| { + Error::new( + eyre!("There is no manager running for {dependency_id}"), + ErrorKind::Unknown, + ) + })? + .configure(configure_context) + .await?; Ok(()) } #[derive(Clone, Debug, Serialize, Deserialize)] -#[serde(rename_all = "kebab-case")] +#[serde(rename_all = "camelCase")] pub struct ConfigDryRes { pub old_config: Config, pub new_config: Config, pub spec: ConfigSpec, } -#[command(rename = "dry", display(display_serializable))] -#[instrument(skip_all)] -pub async fn configure_dry( - #[context] ctx: RpcContext, - #[parent_data] (pkg_id, dependency_id): (PackageId, PackageId), -) -> Result { - configure_logic(ctx, (pkg_id, dependency_id)).await -} - pub async fn configure_logic( ctx: RpcContext, - (pkg_id, dependency_id): (PackageId, PackageId), + (dependent_id, dependency_id): (PackageId, PackageId), ) -> Result { - let db = ctx.db.peek().await; - let pkg = db - .as_package_data() - .as_idx(&pkg_id) - .or_not_found(&pkg_id)? - .as_installed() - .or_not_found(&pkg_id)?; - let pkg_version = pkg.as_manifest().as_version().de()?; - let pkg_volumes = pkg.as_manifest().as_volumes().de()?; - let dependency = db - .as_package_data() - .as_idx(&dependency_id) - .or_not_found(&dependency_id)? - .as_installed() - .or_not_found(&dependency_id)?; - let dependency_config_action = dependency - .as_manifest() - .as_config() - .de()? - .ok_or_else(|| not_found!("Manifest Config"))?; - let dependency_version = dependency.as_manifest().as_version().de()?; - let dependency_volumes = dependency.as_manifest().as_volumes().de()?; - let dependency = pkg - .as_manifest() - .as_dependencies() - .as_idx(&dependency_id) - .or_not_found(&dependency_id)?; - - let ConfigRes { - config: maybe_config, - spec, - } = dependency_config_action - .get( - &ctx, - &dependency_id, - &dependency_version, - &dependency_volumes, - ) - .await?; - - let old_config = if let Some(config) = maybe_config { - config - } else { - spec.gen( - &mut rand::rngs::StdRng::from_entropy(), - &Some(Duration::new(10, 0)), - )? - }; - - let new_config = dependency - .as_config() - .de()? - .ok_or_else(|| not_found!("Config"))? - .auto_configure - .sandboxed( - &ctx, - &pkg_id, - &pkg_version, - &pkg_volumes, - Some(&old_config), - None, - ProcedureName::AutoConfig(dependency_id.clone()), - ) - .await? - .map_err(|e| Error::new(eyre!("{}", e.1), crate::ErrorKind::AutoConfigure))?; - - Ok(ConfigDryRes { - old_config, - new_config, - spec, - }) -} - -#[instrument(skip_all)] -pub fn add_dependent_to_current_dependents_lists( - db: &mut Model, - dependent_id: &PackageId, - current_dependencies: &CurrentDependencies, -) -> Result<(), Error> { - for (dependency, dep_info) in ¤t_dependencies.0 { - if let Some(dependency_dependents) = db - .as_package_data_mut() - .as_idx_mut(dependency) - .and_then(|pde| pde.as_installed_mut()) - .map(|i| i.as_current_dependents_mut()) - { - dependency_dependents.insert(dependent_id, dep_info)?; - } - } - Ok(()) -} - -pub fn set_dependents_with_live_pointers_to_needs_config( - db: &mut Peeked, - id: &PackageId, -) -> Result, Error> { - let mut res = Vec::new(); - for (dep, info) in db - .as_package_data() - .as_idx(id) - .or_not_found(id)? - .as_installed() - .or_not_found(id)? - .as_current_dependents() - .de()? - .0 - { - if info.pointers.iter().any(|ptr| match ptr { - // dependency id matches the package being uninstalled - PackagePointerSpec::TorAddress(ptr) => &ptr.package_id == id && &dep != id, - PackagePointerSpec::LanAddress(ptr) => &ptr.package_id == id && &dep != id, - // we never need to retarget these - PackagePointerSpec::TorKey(_) => false, - PackagePointerSpec::Config(_) => false, - }) { - let installed = db - .as_package_data_mut() - .as_idx_mut(&dep) - .or_not_found(&dep)? - .as_installed_mut() - .or_not_found(&dep)?; - let version = installed.as_manifest().as_version().de()?; - let configured = installed.as_status_mut().as_configured_mut(); - if configured.de()? { - configured.ser(&false)?; - res.push((dep, version)); - } - } - } - Ok(res) + // let db = ctx.db.peek().await; + // let pkg = db + // .as_package_data() + // .as_idx(&pkg_id) + // .or_not_found(&pkg_id)? + // .as_installed() + // .or_not_found(&pkg_id)?; + // let pkg_version = pkg.as_manifest().as_version().de()?; + // let pkg_volumes = pkg.as_manifest().as_volumes().de()?; + // let dependency = db + // .as_package_data() + // .as_idx(&dependency_id) + // .or_not_found(&dependency_id)? + // .as_installed() + // .or_not_found(&dependency_id)?; + // let dependency_config_action = dependency + // .as_manifest() + // .as_config() + // .de()? + // .ok_or_else(|| not_found!("Manifest Config"))?; + // let dependency_version = dependency.as_manifest().as_version().de()?; + // let dependency_volumes = dependency.as_manifest().as_volumes().de()?; + // let dependency = pkg + // .as_manifest() + // .as_dependencies() + // .as_idx(&dependency_id) + // .or_not_found(&dependency_id)?; + + // let ConfigRes { + // config: maybe_config, + // spec, + // } = dependency_config_action + // .get( + // &ctx, + // &dependency_id, + // &dependency_version, + // &dependency_volumes, + // ) + // .await?; + + // let old_config = if let Some(config) = maybe_config { + // config + // } else { + // spec.gen( + // &mut rand::rngs::StdRng::from_entropy(), + // &Some(Duration::new(10, 0)), + // )? + // }; + + // let new_config = dependency + // .as_config() + // .de()? + // .ok_or_else(|| not_found!("Config"))? + // .auto_configure + // .sandboxed( + // &ctx, + // &pkg_id, + // &pkg_version, + // &pkg_volumes, + // Some(&old_config), + // None, + // ProcedureName::AutoConfig(dependency_id.clone()), + // ) + // .await? + // .map_err(|e| Error::new(eyre!("{}", e.1), crate::ErrorKind::AutoConfigure))?; + + // Ok(ConfigDryRes { + // old_config, + // new_config, + // spec, + // }) + todo!() } #[instrument(skip_all)] pub async fn compute_dependency_config_errs( ctx: &RpcContext, db: &Peeked, - manifest: &Manifest, + id: &PackageId, current_dependencies: &CurrentDependencies, dependency_config: &BTreeMap, ) -> Result { let mut dependency_config_errs = BTreeMap::new(); - for (dependency, _dep_info) in current_dependencies - .0 - .iter() - .filter(|(dep_id, _)| dep_id != &&manifest.id) - { + for (dependency, _dep_info) in current_dependencies.0.iter() { // check if config passes dependency check - if let Some(cfg) = &manifest - .dependencies - .0 - .get(dependency) - .or_not_found(dependency)? - .config - { - if let Err(error) = cfg - .check( - ctx, - &manifest.id, - &manifest.version, - &manifest.volumes, - dependency, - &if let Some(config) = dependency_config.get(dependency) { - config.clone() - } else if let Some(manifest) = db - .as_package_data() - .as_idx(dependency) - .and_then(|pde| pde.as_installed()) - .map(|i| i.as_manifest().de()) - .transpose()? - { - if let Some(config) = &manifest.config { - config - .get(ctx, &manifest.id, &manifest.version, &manifest.volumes) - .await? - .config - .unwrap_or_default() - } else { - Config::default() - } - } else { - Config::default() - }, - ) - .await? - { - dependency_config_errs.insert(dependency.clone(), error); - } + if let Some(error) = todo!() { + dependency_config_errs.insert(dependency.clone(), error); } } Ok(DependencyConfigErrors(dependency_config_errs)) diff --git a/core/startos/src/developer/mod.rs b/core/startos/src/developer/mod.rs index 8722a4a110..5969574451 100644 --- a/core/startos/src/developer/mod.rs +++ b/core/startos/src/developer/mod.rs @@ -5,16 +5,13 @@ use std::path::Path; use ed25519::pkcs8::EncodePrivateKey; use ed25519::PublicKeyBytes; use ed25519_dalek::{SigningKey, VerifyingKey}; -use rpc_toolkit::command; use tracing::instrument; -use crate::context::SdkContext; -use crate::util::display_none; +use crate::context::CliContext; use crate::{Error, ResultExt}; -#[command(cli_only, blocking, display(display_none))] #[instrument(skip_all)] -pub fn init(#[context] ctx: SdkContext) -> Result<(), Error> { +pub fn init(ctx: CliContext) -> Result<(), Error> { if !ctx.developer_key_path.exists() { let parent = ctx.developer_key_path.parent().unwrap_or(Path::new("/")); if !parent.exists() { @@ -48,8 +45,3 @@ pub fn init(#[context] ctx: SdkContext) -> Result<(), Error> { } Ok(()) } - -#[command(subcommands(crate::s9pk::verify, crate::config::verify_spec))] -pub fn verify() -> Result<(), Error> { - Ok(()) -} diff --git a/core/startos/src/diagnostic.rs b/core/startos/src/diagnostic.rs index aad95a5e5a..b0820b2936 100644 --- a/core/startos/src/diagnostic.rs +++ b/core/startos/src/diagnostic.rs @@ -1,44 +1,70 @@ use std::path::Path; use std::sync::Arc; -use rpc_toolkit::command; +use clap::Parser; use rpc_toolkit::yajrc::RpcError; +use rpc_toolkit::{command, from_fn, from_fn_async, AnyContext, HandlerExt, ParentHandler}; +use serde::{Deserialize, Serialize}; -use crate::context::DiagnosticContext; -use crate::disk::repair; +use crate::context::{CliContext, DiagnosticContext}; use crate::init::SYSTEM_REBUILD_PATH; use crate::logs::{fetch_logs, LogResponse, LogSource}; use crate::shutdown::Shutdown; -use crate::util::display_none; use crate::Error; -#[command(subcommands(error, logs, exit, restart, forget_disk, disk, rebuild))] -pub fn diagnostic() -> Result<(), Error> { - Ok(()) +pub fn diagnostic() -> ParentHandler { + ParentHandler::new() + .subcommand("error", from_fn(error).with_remote_cli::()) + .subcommand("logs", from_fn_async(logs).no_cli()) + .subcommand( + "exit", + from_fn(exit).no_display().with_remote_cli::(), + ) + .subcommand( + "restart", + from_fn(restart) + .no_display() + .with_remote_cli::(), + ) + .subcommand("disk", disk()) + .subcommand( + "rebuild", + from_fn_async(rebuild) + .no_display() + .with_remote_cli::(), + ) } -#[command] -pub fn error(#[context] ctx: DiagnosticContext) -> Result, Error> { +// #[command] +pub fn error(ctx: DiagnosticContext) -> Result, Error> { Ok(ctx.error.clone()) } -#[command(rpc_only)] +#[derive(Deserialize, Serialize, Parser)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "kebab-case")] +pub struct LogsParams { + limit: Option, + cursor: Option, + before: bool, +} pub async fn logs( - #[arg] limit: Option, - #[arg] cursor: Option, - #[arg] before: bool, + _: AnyContext, + LogsParams { + limit, + cursor, + before, + }: LogsParams, ) -> Result { Ok(fetch_logs(LogSource::System, limit, cursor, before).await?) } -#[command(display(display_none))] -pub fn exit(#[context] ctx: DiagnosticContext) -> Result<(), Error> { +pub fn exit(ctx: DiagnosticContext) -> Result<(), Error> { ctx.shutdown.send(None).expect("receiver dropped"); Ok(()) } -#[command(display(display_none))] -pub fn restart(#[context] ctx: DiagnosticContext) -> Result<(), Error> { +pub fn restart(ctx: DiagnosticContext) -> Result<(), Error> { ctx.shutdown .send(Some(Shutdown { export_args: ctx @@ -50,20 +76,21 @@ pub fn restart(#[context] ctx: DiagnosticContext) -> Result<(), Error> { .expect("receiver dropped"); Ok(()) } - -#[command(display(display_none))] -pub async fn rebuild(#[context] ctx: DiagnosticContext) -> Result<(), Error> { +pub async fn rebuild(ctx: DiagnosticContext) -> Result<(), Error> { tokio::fs::write(SYSTEM_REBUILD_PATH, b"").await?; restart(ctx) } -#[command(subcommands(forget_disk, repair))] -pub fn disk() -> Result<(), Error> { - Ok(()) +pub fn disk() -> ParentHandler { + ParentHandler::new().subcommand( + "forget", + from_fn_async(forget_disk) + .no_display() + .with_remote_cli::(), + ) } -#[command(rename = "forget", display(display_none))] -pub async fn forget_disk() -> Result<(), Error> { +pub async fn forget_disk(_: AnyContext) -> Result<(), Error> { let disk_guid = Path::new("/media/embassy/config/disk.guid"); if tokio::fs::metadata(disk_guid).await.is_ok() { tokio::fs::remove_file(disk_guid).await?; diff --git a/core/startos/src/disk/main.rs b/core/startos/src/disk/main.rs index 74f6db73ca..a337a44732 100644 --- a/core/startos/src/disk/main.rs +++ b/core/startos/src/disk/main.rs @@ -7,8 +7,8 @@ use tracing::instrument; use super::fsck::{RepairStrategy, RequiresReboot}; use super::util::pvscan; -use crate::disk::mount::filesystem::block_dev::mount; -use crate::disk::mount::filesystem::ReadWrite; +use crate::disk::mount::filesystem::block_dev::BlockDev; +use crate::disk::mount::filesystem::{FileSystem, ReadWrite}; use crate::disk::mount::util::unmount; use crate::util::Invoke; use crate::{Error, ErrorKind, ResultExt}; @@ -142,7 +142,9 @@ pub async fn create_fs>( .arg(&blockdev_path) .invoke(crate::ErrorKind::DiskManagement) .await?; - mount(&blockdev_path, datadir.as_ref().join(name), ReadWrite).await?; + BlockDev::new(&blockdev_path) + .mount(datadir.as_ref().join(name), ReadWrite) + .await?; Ok(()) } @@ -318,7 +320,9 @@ pub async fn mount_fs>( tokio::fs::rename(&tmp_luks_bak, &luks_bak).await?; } - mount(&blockdev_path, datadir.as_ref().join(name), ReadWrite).await?; + BlockDev::new(&blockdev_path) + .mount(datadir.as_ref().join(name), ReadWrite) + .await?; Ok(reboot) } diff --git a/core/startos/src/disk/mod.rs b/core/startos/src/disk/mod.rs index 485d2570e6..f74c944a49 100644 --- a/core/startos/src/disk/mod.rs +++ b/core/startos/src/disk/mod.rs @@ -1,13 +1,11 @@ use std::path::{Path, PathBuf}; -use clap::ArgMatches; -use rpc_toolkit::command; +use rpc_toolkit::{from_fn_async, AnyContext, Empty, HandlerExt, ParentHandler}; use serde::{Deserialize, Serialize}; -use crate::context::RpcContext; +use crate::context::{CliContext, RpcContext}; use crate::disk::util::DiskInfo; -use crate::util::display_none; -use crate::util::serde::{display_serializable, IoFormat}; +use crate::util::serde::{display_serializable, HandlerExtSerde, WithIoFormat}; use crate::Error; pub mod fsck; @@ -19,7 +17,7 @@ pub const BOOT_RW_PATH: &str = "/media/boot-rw"; pub const REPAIR_DISK_PATH: &str = "/media/embassy/config/repair-disk"; #[derive(Clone, Debug, Default, Deserialize, Serialize)] -#[serde(rename_all = "kebab-case")] +#[serde(rename_all = "camelCase")] pub struct OsPartitionInfo { pub efi: Option, pub bios: Option, @@ -42,16 +40,30 @@ impl OsPartitionInfo { } } -#[command(subcommands(list, repair))] -pub fn disk() -> Result<(), Error> { - Ok(()) +pub fn disk() -> ParentHandler { + ParentHandler::new() + .subcommand( + "list", + from_fn_async(list) + .with_display_serializable() + .with_custom_display_fn::(|handle, result| { + Ok(display_disk_info(handle.params, result)) + }) + .with_remote_cli::(), + ) + .subcommand( + "repair", + from_fn_async(repair) + .no_display() + .with_remote_cli::(), + ) } -fn display_disk_info(info: Vec, matches: &ArgMatches) { +fn display_disk_info(params: WithIoFormat, args: Vec) { use prettytable::*; - if matches.is_present("format") { - return display_serializable(info, matches); + if let Some(format) = params.format { + return display_serializable(format, args); } let mut table = Table::new(); @@ -60,9 +72,9 @@ fn display_disk_info(info: Vec, matches: &ArgMatches) { "LABEL", "CAPACITY", "USED", - "EMBASSY OS VERSION" + "STARTOS VERSION" ]); - for disk in info { + for disk in args { let row = row![ disk.logicalname.display(), "N/A", @@ -89,7 +101,7 @@ fn display_disk_info(info: Vec, matches: &ArgMatches) { } else { "N/A" }, - if let Some(eos) = part.embassy_os.as_ref() { + if let Some(eos) = part.start_os.as_ref() { eos.version.as_str() } else { "N/A" @@ -101,17 +113,11 @@ fn display_disk_info(info: Vec, matches: &ArgMatches) { table.print_tty(false).unwrap(); } -#[command(display(display_disk_info))] -pub async fn list( - #[context] ctx: RpcContext, - #[allow(unused_variables)] - #[arg] - format: Option, -) -> Result, Error> { +// #[command(display(display_disk_info))] +pub async fn list(ctx: RpcContext, _: Empty) -> Result, Error> { crate::disk::util::list(&ctx.os_partitions).await } -#[command(display(display_none))] pub async fn repair() -> Result<(), Error> { tokio::fs::write(REPAIR_DISK_PATH, b"").await?; Ok(()) diff --git a/core/startos/src/disk/mount/backup.rs b/core/startos/src/disk/mount/backup.rs index a19056241a..5dbd80db3d 100644 --- a/core/startos/src/disk/mount/backup.rs +++ b/core/startos/src/disk/mount/backup.rs @@ -1,24 +1,24 @@ use std::path::{Path, PathBuf}; +use std::sync::Arc; use color_eyre::eyre::eyre; use helpers::AtomicFile; +use models::PackageId; use tokio::io::AsyncWriteExt; use tracing::instrument; use super::filesystem::ecryptfs::EcryptFS; use super::guard::{GenericMountGuard, TmpMountGuard}; -use super::util::{bind, unmount}; use crate::auth::check_password; use crate::backup::target::BackupInfo; use crate::disk::mount::filesystem::ReadWrite; +use crate::disk::mount::guard::SubPath; use crate::disk::util::EmbassyOsRecoveryInfo; -use crate::middleware::encrypt::{decrypt_slice, encrypt_slice}; -use crate::s9pk::manifest::PackageId; +use crate::util::crypto::{decrypt_slice, encrypt_slice}; use crate::util::serde::IoFormat; -use crate::util::FileLock; -use crate::volume::BACKUP_DIR; use crate::{Error, ErrorKind, ResultExt}; +#[derive(Clone, Debug)] pub struct BackupMountGuard { backup_disk_mount_guard: Option, encrypted_guard: Option, @@ -29,7 +29,7 @@ pub struct BackupMountGuard { impl BackupMountGuard { fn backup_disk_path(&self) -> &Path { if let Some(guard) = &self.backup_disk_mount_guard { - guard.as_ref() + guard.path() } else { unreachable!() } @@ -37,7 +37,7 @@ impl BackupMountGuard { #[instrument(skip_all)] pub async fn mount(backup_disk_mount_guard: G, password: &str) -> Result { - let backup_disk_path = backup_disk_mount_guard.as_ref(); + let backup_disk_path = backup_disk_mount_guard.path(); let unencrypted_metadata_path = backup_disk_path.join("EmbassyBackups/unencrypted-metadata.cbor"); let mut unencrypted_metadata: EmbassyOsRecoveryInfo = @@ -108,7 +108,7 @@ impl BackupMountGuard { let encrypted_guard = TmpMountGuard::mount(&EcryptFS::new(&crypt_path, &enc_key), ReadWrite).await?; - let metadata_path = encrypted_guard.as_ref().join("metadata.cbor"); + let metadata_path = encrypted_guard.path().join("metadata.cbor"); let metadata: BackupInfo = if tokio::fs::metadata(&metadata_path).await.is_ok() { IoFormat::Cbor.from_slice(&tokio::fs::read(&metadata_path).await.with_ctx(|_| { ( @@ -146,22 +146,13 @@ impl BackupMountGuard { } #[instrument(skip_all)] - pub async fn mount_package_backup( - &self, - id: &PackageId, - ) -> Result { - let lock = FileLock::new(Path::new(BACKUP_DIR).join(format!("{}.lock", id)), false).await?; - let mountpoint = Path::new(BACKUP_DIR).join(id); - bind(self.as_ref().join(id), &mountpoint, false).await?; - Ok(PackageBackupMountGuard { - mountpoint: Some(mountpoint), - lock: Some(lock), - }) + pub fn package_backup(self: &Arc, id: &PackageId) -> SubPath> { + SubPath::new(self.clone(), id) } #[instrument(skip_all)] pub async fn save(&self) -> Result<(), Error> { - let metadata_path = self.as_ref().join("metadata.cbor"); + let metadata_path = self.path().join("metadata.cbor"); let backup_disk_path = self.backup_disk_path(); let mut file = AtomicFile::new(&metadata_path, None::) .await @@ -180,17 +171,6 @@ impl BackupMountGuard { Ok(()) } - #[instrument(skip_all)] - pub async fn unmount(mut self) -> Result<(), Error> { - if let Some(guard) = self.encrypted_guard.take() { - guard.unmount().await?; - } - if let Some(guard) = self.backup_disk_mount_guard.take() { - guard.unmount().await?; - } - Ok(()) - } - #[instrument(skip_all)] pub async fn save_and_unmount(self) -> Result<(), Error> { self.save().await?; @@ -198,14 +178,24 @@ impl BackupMountGuard { Ok(()) } } -impl AsRef for BackupMountGuard { - fn as_ref(&self) -> &Path { +#[async_trait::async_trait] +impl GenericMountGuard for BackupMountGuard { + fn path(&self) -> &Path { if let Some(guard) = &self.encrypted_guard { - guard.as_ref() + guard.path() } else { unreachable!() } } + async fn unmount(mut self) -> Result<(), Error> { + if let Some(guard) = self.encrypted_guard.take() { + guard.unmount().await?; + } + if let Some(guard) = self.backup_disk_mount_guard.take() { + guard.unmount().await?; + } + Ok(()) + } } impl Drop for BackupMountGuard { fn drop(&mut self) { @@ -221,42 +211,3 @@ impl Drop for BackupMountGuard { }); } } - -pub struct PackageBackupMountGuard { - mountpoint: Option, - lock: Option, -} -impl PackageBackupMountGuard { - pub async fn unmount(mut self) -> Result<(), Error> { - if let Some(mountpoint) = self.mountpoint.take() { - unmount(&mountpoint).await?; - } - if let Some(lock) = self.lock.take() { - lock.unlock().await?; - } - Ok(()) - } -} -impl AsRef for PackageBackupMountGuard { - fn as_ref(&self) -> &Path { - if let Some(mountpoint) = &self.mountpoint { - mountpoint - } else { - unreachable!() - } - } -} -impl Drop for PackageBackupMountGuard { - fn drop(&mut self) { - let mountpoint = self.mountpoint.take(); - let lock = self.lock.take(); - tokio::spawn(async move { - if let Some(mountpoint) = mountpoint { - unmount(&mountpoint).await.unwrap(); - } - if let Some(lock) = lock { - lock.unlock().await.unwrap(); - } - }); - } -} diff --git a/core/startos/src/disk/mount/filesystem/bind.rs b/core/startos/src/disk/mount/filesystem/bind.rs index 8799372e5b..196e78a3dd 100644 --- a/core/startos/src/disk/mount/filesystem/bind.rs +++ b/core/startos/src/disk/mount/filesystem/bind.rs @@ -1,14 +1,12 @@ use std::os::unix::ffi::OsStrExt; use std::path::Path; -use async_trait::async_trait; use digest::generic_array::GenericArray; use digest::{Digest, OutputSizeUser}; use sha2::Sha256; -use super::{FileSystem, MountType, ReadOnly}; -use crate::disk::mount::util::bind; -use crate::{Error, ResultExt}; +use super::FileSystem; +use crate::prelude::*; pub struct Bind> { src_dir: SrcDir, @@ -18,19 +16,16 @@ impl> Bind { Self { src_dir } } } -#[async_trait] impl + Send + Sync> FileSystem for Bind { - async fn mount + Send + Sync>( - &self, - mountpoint: P, - mount_type: MountType, - ) -> Result<(), Error> { - bind( - self.src_dir.as_ref(), - mountpoint, - matches!(mount_type, ReadOnly), - ) - .await + async fn source(&self) -> Result>, Error> { + Ok(Some(&self.src_dir)) + } + fn extra_args(&self) -> impl IntoIterator> { + ["--bind"] + } + async fn pre_mount(&self) -> Result<(), Error> { + tokio::fs::create_dir_all(self.src_dir.as_ref()).await?; + Ok(()) } async fn source_hash( &self, diff --git a/core/startos/src/disk/mount/filesystem/block_dev.rs b/core/startos/src/disk/mount/filesystem/block_dev.rs index e21f0c42d3..96e9d79a7a 100644 --- a/core/startos/src/disk/mount/filesystem/block_dev.rs +++ b/core/startos/src/disk/mount/filesystem/block_dev.rs @@ -1,33 +1,16 @@ use std::os::unix::ffi::OsStrExt; use std::path::Path; -use async_trait::async_trait; use digest::generic_array::GenericArray; use digest::{Digest, OutputSizeUser}; use serde::{Deserialize, Serialize}; use sha2::Sha256; -use super::{FileSystem, MountType, ReadOnly}; -use crate::util::Invoke; -use crate::{Error, ResultExt}; - -pub async fn mount( - logicalname: impl AsRef, - mountpoint: impl AsRef, - mount_type: MountType, -) -> Result<(), Error> { - tokio::fs::create_dir_all(mountpoint.as_ref()).await?; - let mut cmd = tokio::process::Command::new("mount"); - cmd.arg(logicalname.as_ref()).arg(mountpoint.as_ref()); - if mount_type == ReadOnly { - cmd.arg("-o").arg("ro"); - } - cmd.invoke(crate::ErrorKind::Filesystem).await?; - Ok(()) -} +use super::FileSystem; +use crate::prelude::*; #[derive(Debug, Deserialize, Serialize)] -#[serde(rename_all = "kebab-case")] +#[serde(rename_all = "camelCase")] pub struct BlockDev> { logicalname: LogicalName, } @@ -36,14 +19,9 @@ impl> BlockDev { BlockDev { logicalname } } } -#[async_trait] impl + Send + Sync> FileSystem for BlockDev { - async fn mount + Send + Sync>( - &self, - mountpoint: P, - mount_type: MountType, - ) -> Result<(), Error> { - mount(self.logicalname.as_ref(), mountpoint, mount_type).await + async fn source(&self) -> Result>, Error> { + Ok(Some(&self.logicalname)) } async fn source_hash( &self, diff --git a/core/startos/src/disk/mount/filesystem/cifs.rs b/core/startos/src/disk/mount/filesystem/cifs.rs index 91b477fcf4..e125b9a902 100644 --- a/core/startos/src/disk/mount/filesystem/cifs.rs +++ b/core/startos/src/disk/mount/filesystem/cifs.rs @@ -2,7 +2,6 @@ use std::net::IpAddr; use std::os::unix::ffi::OsStrExt; use std::path::{Path, PathBuf}; -use async_trait::async_trait; use digest::generic_array::GenericArray; use digest::{Digest, OutputSizeUser}; use serde::{Deserialize, Serialize}; @@ -11,7 +10,7 @@ use tokio::process::Command; use tracing::instrument; use super::{FileSystem, MountType, ReadOnly}; -use crate::disk::mount::guard::TmpMountGuard; +use crate::disk::mount::guard::{GenericMountGuard, TmpMountGuard}; use crate::util::Invoke; use crate::Error; @@ -64,7 +63,7 @@ pub async fn mount_cifs( } #[derive(Debug, Deserialize, Serialize)] -#[serde(rename_all = "kebab-case")] +#[serde(rename_all = "camelCase")] pub struct Cifs { pub hostname: String, pub path: PathBuf, @@ -78,9 +77,8 @@ impl Cifs { Ok(()) } } -#[async_trait] impl FileSystem for Cifs { - async fn mount + Send + Sync>( + async fn mount + Send>( &self, mountpoint: P, mount_type: MountType, diff --git a/core/startos/src/disk/mount/filesystem/ecryptfs.rs b/core/startos/src/disk/mount/filesystem/ecryptfs.rs index 78570f49b6..bf2dfe6c6f 100644 --- a/core/startos/src/disk/mount/filesystem/ecryptfs.rs +++ b/core/startos/src/disk/mount/filesystem/ecryptfs.rs @@ -1,33 +1,17 @@ +use std::fmt::Display; use std::os::unix::ffi::OsStrExt; use std::path::Path; -use async_trait::async_trait; use digest::generic_array::GenericArray; use digest::{Digest, OutputSizeUser}; +use lazy_format::lazy_format; use sha2::Sha256; +use tokio::process::Command; -use super::{FileSystem, MountType}; +use super::FileSystem; +use crate::disk::mount::filesystem::default_mount_command; +use crate::prelude::*; use crate::util::Invoke; -use crate::{Error, ResultExt}; - -pub async fn mount_ecryptfs, P1: AsRef>( - src: P0, - dst: P1, - key: &str, -) -> Result<(), Error> { - tokio::fs::create_dir_all(dst.as_ref()).await?; - tokio::process::Command::new("mount") - .arg("-t") - .arg("ecryptfs") - .arg(src.as_ref()) - .arg(dst.as_ref()) - .arg("-o") - // for more information `man ecryptfs` - .arg(format!("key=passphrase:passphrase_passwd={},ecryptfs_cipher=aes,ecryptfs_key_bytes=32,ecryptfs_passthrough=n,ecryptfs_enable_filename_crypto=y,no_sig_cache", key)) - .input(Some(&mut std::io::Cursor::new(b"\n"))) - .invoke(crate::ErrorKind::Filesystem).await?; - Ok(()) -} pub struct EcryptFS, Key: AsRef> { encrypted_dir: EncryptedDir, @@ -38,16 +22,45 @@ impl, Key: AsRef> EcryptFS { EcryptFS { encrypted_dir, key } } } -#[async_trait] impl + Send + Sync, Key: AsRef + Send + Sync> FileSystem for EcryptFS { - async fn mount + Send + Sync>( + fn mount_type(&self) -> Option> { + Some("ecryptfs") + } + async fn source(&self) -> Result>, Error> { + Ok(Some(&self.encrypted_dir)) + } + fn mount_options(&self) -> impl IntoIterator { + [ + Box::new(lazy_format!( + "key=passphrase:passphrase_passwd={}", + self.key.as_ref() + )) as Box, + Box::new("ecryptfs_cipher=aes"), + Box::new("ecryptfs_key_bytes=32"), + Box::new("ecryptfs_passthrough=n"), + Box::new("ecryptfs_enable_filename_crypto=y"), + Box::new("no_sig_cache"), + ] + } + async fn mount + Send>( &self, mountpoint: P, - _mount_type: MountType, // ignored - inherited from parent fs + mount_type: super::MountType, ) -> Result<(), Error> { - mount_ecryptfs(self.encrypted_dir.as_ref(), mountpoint, self.key.as_ref()).await + self.pre_mount().await?; + tokio::fs::create_dir_all(mountpoint.as_ref()).await?; + Command::new("mount") + .args( + default_mount_command(self, mountpoint, mount_type) + .await? + .get_args(), + ) + .input(Some(&mut std::io::Cursor::new(b"\n"))) + .invoke(crate::ErrorKind::Filesystem) + .await?; + Ok(()) } async fn source_hash( &self, diff --git a/core/startos/src/disk/mount/filesystem/efivarfs.rs b/core/startos/src/disk/mount/filesystem/efivarfs.rs index ad9d79941c..4961b47163 100644 --- a/core/startos/src/disk/mount/filesystem/efivarfs.rs +++ b/core/startos/src/disk/mount/filesystem/efivarfs.rs @@ -1,33 +1,19 @@ use std::path::Path; -use async_trait::async_trait; use digest::generic_array::GenericArray; use digest::{Digest, OutputSizeUser}; use sha2::Sha256; -use super::{FileSystem, MountType, ReadOnly}; -use crate::util::Invoke; -use crate::Error; +use super::FileSystem; +use crate::prelude::*; pub struct EfiVarFs; -#[async_trait] impl FileSystem for EfiVarFs { - async fn mount + Send + Sync>( - &self, - mountpoint: P, - mount_type: MountType, - ) -> Result<(), Error> { - tokio::fs::create_dir_all(mountpoint.as_ref()).await?; - let mut cmd = tokio::process::Command::new("mount"); - cmd.arg("-t") - .arg("efivarfs") - .arg("efivarfs") - .arg(mountpoint.as_ref()); - if mount_type == ReadOnly { - cmd.arg("-o").arg("ro"); - } - cmd.invoke(crate::ErrorKind::Filesystem).await?; - Ok(()) + fn mount_type(&self) -> Option> { + Some("efivarfs") + } + async fn source(&self) -> Result>, Error> { + Ok(Some("efivarfs")) } async fn source_hash( &self, diff --git a/core/startos/src/disk/mount/filesystem/httpdirfs.rs b/core/startos/src/disk/mount/filesystem/httpdirfs.rs index fda437ec3f..df94167744 100644 --- a/core/startos/src/disk/mount/filesystem/httpdirfs.rs +++ b/core/startos/src/disk/mount/filesystem/httpdirfs.rs @@ -1,6 +1,5 @@ use std::path::Path; -use async_trait::async_trait; use digest::generic_array::GenericArray; use digest::{Digest, OutputSizeUser}; use reqwest::Url; @@ -23,7 +22,7 @@ pub async fn mount_httpdirfs(url: &Url, mountpoint: impl AsRef) -> Result< } #[derive(Debug, Deserialize, Serialize)] -#[serde(rename_all = "kebab-case")] +#[serde(rename_all = "camelCase")] pub struct HttpDirFS { url: Url, } @@ -32,9 +31,8 @@ impl HttpDirFS { HttpDirFS { url } } } -#[async_trait] impl FileSystem for HttpDirFS { - async fn mount + Send + Sync>( + async fn mount + Send>( &self, mountpoint: P, _mount_type: MountType, diff --git a/core/startos/src/disk/mount/filesystem/idmapped.rs b/core/startos/src/disk/mount/filesystem/idmapped.rs new file mode 100644 index 0000000000..dc6a6e9ab2 --- /dev/null +++ b/core/startos/src/disk/mount/filesystem/idmapped.rs @@ -0,0 +1,88 @@ +use std::ffi::OsStr; +use std::fmt::Display; +use std::path::Path; + +use digest::generic_array::GenericArray; +use digest::{Digest, OutputSizeUser}; +use serde::{Deserialize, Serialize}; +use sha2::Sha256; +use tokio::process::Command; + +use super::{FileSystem, MountType}; +use crate::disk::mount::filesystem::default_mount_command; +use crate::prelude::*; +use crate::util::Invoke; + +#[derive(Debug, Deserialize, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct IdMapped { + filesystem: Fs, + from_id: u32, + to_id: u32, + range: u32, +} +impl IdMapped { + pub fn new(filesystem: Fs, from_id: u32, to_id: u32, range: u32) -> Self { + Self { + filesystem, + from_id, + to_id, + range, + } + } +} +impl FileSystem for IdMapped { + fn mount_type(&self) -> Option> { + self.filesystem.mount_type() + } + fn extra_args(&self) -> impl IntoIterator> { + self.filesystem.extra_args() + } + fn mount_options(&self) -> impl IntoIterator { + self.filesystem + .mount_options() + .into_iter() + .map(|a| Box::new(a) as Box) + .chain(std::iter::once(Box::new(lazy_format!( + "X-mount.idmap=b:{}:{}:{}", + self.from_id, + self.to_id, + self.range, + )) as Box)) + } + async fn source(&self) -> Result>, Error> { + self.filesystem.source().await + } + async fn pre_mount(&self) -> Result<(), Error> { + self.filesystem.pre_mount().await + } + async fn mount + Send>( + &self, + mountpoint: P, + mount_type: MountType, + ) -> Result<(), Error> { + self.pre_mount().await?; + tokio::fs::create_dir_all(mountpoint.as_ref()).await?; + Command::new("mount.next") + .args( + default_mount_command(self, mountpoint, mount_type) + .await? + .get_args(), + ) + .invoke(ErrorKind::Filesystem) + .await?; + + Ok(()) + } + async fn source_hash( + &self, + ) -> Result::OutputSize>, Error> { + let mut sha = Sha256::new(); + sha.update("IdMapped"); + sha.update(self.filesystem.source_hash().await?); + sha.update(u32::to_be_bytes(self.from_id)); + sha.update(u32::to_be_bytes(self.to_id)); + sha.update(u32::to_be_bytes(self.range)); + Ok(sha.finalize()) + } +} diff --git a/core/startos/src/disk/mount/filesystem/label.rs b/core/startos/src/disk/mount/filesystem/label.rs index b1e4f72134..57312bf133 100644 --- a/core/startos/src/disk/mount/filesystem/label.rs +++ b/core/startos/src/disk/mount/filesystem/label.rs @@ -1,28 +1,11 @@ use std::path::Path; -use async_trait::async_trait; use digest::generic_array::GenericArray; use digest::{Digest, OutputSizeUser}; use sha2::Sha256; -use super::{FileSystem, MountType, ReadOnly}; -use crate::util::Invoke; -use crate::Error; - -pub async fn mount_label( - label: &str, - mountpoint: impl AsRef, - mount_type: MountType, -) -> Result<(), Error> { - tokio::fs::create_dir_all(mountpoint.as_ref()).await?; - let mut cmd = tokio::process::Command::new("mount"); - cmd.arg("-L").arg(label).arg(mountpoint.as_ref()); - if mount_type == ReadOnly { - cmd.arg("-o").arg("ro"); - } - cmd.invoke(crate::ErrorKind::Filesystem).await?; - Ok(()) -} +use super::FileSystem; +use crate::prelude::*; pub struct Label> { label: S, @@ -32,14 +15,12 @@ impl> Label { Label { label } } } -#[async_trait] impl + Send + Sync> FileSystem for Label { - async fn mount + Send + Sync>( - &self, - mountpoint: P, - mount_type: MountType, - ) -> Result<(), Error> { - mount_label(self.label.as_ref(), mountpoint, mount_type).await + fn extra_args(&self) -> impl IntoIterator> { + ["-L", self.label.as_ref()] + } + async fn source(&self) -> Result>, Error> { + Ok(None::<&Path>) } async fn source_hash( &self, diff --git a/core/startos/src/disk/mount/filesystem/loop_dev.rs b/core/startos/src/disk/mount/filesystem/loop_dev.rs index 28a18597dc..1728ef2c79 100644 --- a/core/startos/src/disk/mount/filesystem/loop_dev.rs +++ b/core/startos/src/disk/mount/filesystem/loop_dev.rs @@ -1,41 +1,18 @@ +use std::fmt::Display; use std::os::unix::ffi::OsStrExt; use std::path::Path; -use async_trait::async_trait; use digest::generic_array::GenericArray; use digest::{Digest, OutputSizeUser}; +use lazy_format::lazy_format; use serde::{Deserialize, Serialize}; use sha2::Sha256; -use super::{FileSystem, MountType, ReadOnly}; -use crate::util::Invoke; -use crate::{Error, ResultExt}; - -pub async fn mount( - logicalname: impl AsRef, - offset: u64, - size: u64, - mountpoint: impl AsRef, - mount_type: MountType, -) -> Result<(), Error> { - tokio::fs::create_dir_all(mountpoint.as_ref()).await?; - let mut opts = format!("loop,offset={offset},sizelimit={size}"); - if mount_type == ReadOnly { - opts += ",ro"; - } - - tokio::process::Command::new("mount") - .arg(logicalname.as_ref()) - .arg(mountpoint.as_ref()) - .arg("-o") - .arg(opts) - .invoke(crate::ErrorKind::Filesystem) - .await?; - Ok(()) -} +use super::FileSystem; +use crate::prelude::*; #[derive(Debug, Deserialize, Serialize)] -#[serde(rename_all = "kebab-case")] +#[serde(rename_all = "camelCase")] pub struct LoopDev> { logicalname: LogicalName, offset: u64, @@ -50,21 +27,18 @@ impl> LoopDev { } } } -#[async_trait] impl + Send + Sync> FileSystem for LoopDev { - async fn mount + Send + Sync>( - &self, - mountpoint: P, - mount_type: MountType, - ) -> Result<(), Error> { - mount( - self.logicalname.as_ref(), - self.offset, - self.size, - mountpoint, - mount_type, - ) - .await + async fn source(&self) -> Result>, Error> { + Ok(Some( + tokio::fs::canonicalize(self.logicalname.as_ref()).await?, + )) + } + fn mount_options(&self) -> impl IntoIterator { + [ + Box::new("loop") as Box, + Box::new(lazy_format!("offset={}", self.offset)), + Box::new(lazy_format!("sizelimit={}", self.size)), + ] } async fn source_hash( &self, diff --git a/core/startos/src/disk/mount/filesystem/mod.rs b/core/startos/src/disk/mount/filesystem/mod.rs index 11a6671df4..53157937c9 100644 --- a/core/startos/src/disk/mount/filesystem/mod.rs +++ b/core/startos/src/disk/mount/filesystem/mod.rs @@ -1,11 +1,15 @@ +use std::ffi::OsStr; +use std::fmt::{Display, Write}; use std::path::Path; -use async_trait::async_trait; use digest::generic_array::GenericArray; use digest::OutputSizeUser; +use futures::Future; use sha2::Sha256; +use tokio::process::Command; -use crate::Error; +use crate::prelude::*; +use crate::util::Invoke; pub mod bind; pub mod block_dev; @@ -13,8 +17,10 @@ pub mod cifs; pub mod ecryptfs; pub mod efivarfs; pub mod httpdirfs; +pub mod idmapped; pub mod label; pub mod loop_dev; +pub mod overlayfs; #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum MountType { @@ -24,14 +30,78 @@ pub enum MountType { pub use MountType::*; -#[async_trait] -pub trait FileSystem { - async fn mount + Send + Sync>( +pub(self) async fn default_mount_command( + fs: &(impl FileSystem + ?Sized), + mountpoint: impl AsRef + Send, + mount_type: MountType, +) -> Result { + let mut cmd = std::process::Command::new("mount"); + if mount_type == ReadOnly { + cmd.arg("-r"); + } + cmd.args(fs.extra_args()); + if let Some(ty) = fs.mount_type() { + cmd.arg("-t").arg(ty.as_ref()); + } + if let Some(options) = fs + .mount_options() + .into_iter() + .fold(None, |acc: Option, x| match acc { + Some(mut s) => { + write!(s, ",{}", x).unwrap(); + Some(s) + } + None => Some(x.to_string()), + }) + { + cmd.arg("-o").arg(options); + } + if let Some(source) = fs.source().await? { + cmd.arg(source.as_ref()); + } + cmd.arg(mountpoint.as_ref()); + Ok(cmd) +} + +pub(self) async fn default_mount_impl( + fs: &(impl FileSystem + ?Sized), + mountpoint: impl AsRef + Send, + mount_type: MountType, +) -> Result<(), Error> { + fs.pre_mount().await?; + tokio::fs::create_dir_all(mountpoint.as_ref()).await?; + Command::from(default_mount_command(fs, mountpoint, mount_type).await?) + .invoke(ErrorKind::Filesystem) + .await?; + + Ok(()) +} + +pub trait FileSystem: Send + Sync { + fn mount_type(&self) -> Option> { + None::<&str> + } + fn extra_args(&self) -> impl IntoIterator> { + [] as [&str; 0] + } + fn mount_options(&self) -> impl IntoIterator { + [] as [&str; 0] + } + fn source(&self) -> impl Future>, Error>> + Send { + async { Ok(None::<&Path>) } + } + fn pre_mount(&self) -> impl Future> + Send { + async { Ok(()) } + } + fn mount + Send>( &self, mountpoint: P, mount_type: MountType, - ) -> Result<(), Error>; - async fn source_hash( + ) -> impl Future> + Send { + default_mount_impl(self, mountpoint, mount_type) + } + fn source_hash( &self, - ) -> Result::OutputSize>, Error>; + ) -> impl Future::OutputSize>, Error>> + + Send; } diff --git a/core/startos/src/disk/mount/filesystem/overlayfs.rs b/core/startos/src/disk/mount/filesystem/overlayfs.rs new file mode 100644 index 0000000000..ad5eec5018 --- /dev/null +++ b/core/startos/src/disk/mount/filesystem/overlayfs.rs @@ -0,0 +1,153 @@ +use std::fmt::Display; +use std::os::unix::ffi::OsStrExt; +use std::path::Path; + +use digest::generic_array::GenericArray; +use digest::{Digest, OutputSizeUser}; +use sha2::Sha256; + +use crate::disk::mount::filesystem::{FileSystem, ReadOnly, ReadWrite}; +use crate::disk::mount::guard::{GenericMountGuard, MountGuard, TmpMountGuard}; +use crate::prelude::*; +use crate::util::io::TmpDir; + +struct OverlayFs, P1: AsRef> { + lower: P0, + upper: P1, +} +impl, P1: AsRef> OverlayFs { + pub fn new(lower: P0, upper: P1) -> Self { + Self { lower, upper } + } +} +impl + Send + Sync, P1: AsRef + Send + Sync> FileSystem + for OverlayFs +{ + fn mount_type(&self) -> Option> { + Some("overlay") + } + async fn source(&self) -> Result>, Error> { + Ok(Some("overlay")) + } + fn mount_options(&self) -> impl IntoIterator { + [ + Box::new(lazy_format!("lowerdir={}", self.lower.as_ref().display())) + as Box, + Box::new(lazy_format!( + "upperdir={}/upper", + self.upper.as_ref().display() + )), + Box::new(lazy_format!( + "workdir={}/work", + self.upper.as_ref().display() + )), + ] + } + async fn pre_mount(&self) -> Result<(), Error> { + tokio::fs::create_dir_all(self.upper.as_ref().join("upper")).await?; + tokio::fs::create_dir_all(self.upper.as_ref().join("work")).await?; + Ok(()) + } + async fn source_hash( + &self, + ) -> Result::OutputSize>, Error> { + let mut sha = Sha256::new(); + sha.update("OverlayFs"); + sha.update( + tokio::fs::canonicalize(self.lower.as_ref()) + .await + .with_ctx(|_| { + ( + crate::ErrorKind::Filesystem, + self.lower.as_ref().display().to_string(), + ) + })? + .as_os_str() + .as_bytes(), + ); + sha.update( + tokio::fs::canonicalize(self.upper.as_ref()) + .await + .with_ctx(|_| { + ( + crate::ErrorKind::Filesystem, + self.upper.as_ref().display().to_string(), + ) + })? + .as_os_str() + .as_bytes(), + ); + Ok(sha.finalize()) + } +} + +#[derive(Debug)] +pub struct OverlayGuard { + lower: Option, + upper: Option, + inner_guard: MountGuard, +} +impl OverlayGuard { + pub async fn mount( + base: &impl FileSystem, + mountpoint: impl AsRef, + ) -> Result { + let lower = TmpMountGuard::mount(base, ReadOnly).await?; + let upper = TmpDir::new().await?; + let inner_guard = MountGuard::mount( + &OverlayFs::new(lower.path(), upper.as_ref()), + mountpoint, + ReadWrite, + ) + .await?; + Ok(Self { + lower: Some(lower), + upper: Some(upper), + inner_guard, + }) + } + pub async fn unmount(mut self, delete_mountpoint: bool) -> Result<(), Error> { + self.inner_guard.take().unmount(delete_mountpoint).await?; + if let Some(lower) = self.lower.take() { + lower.unmount().await?; + } + if let Some(upper) = self.upper.take() { + upper.delete().await?; + } + Ok(()) + } + pub fn take(&mut self) -> Self { + Self { + lower: self.lower.take(), + upper: self.upper.take(), + inner_guard: self.inner_guard.take(), + } + } +} +#[async_trait::async_trait] +impl GenericMountGuard for OverlayGuard { + fn path(&self) -> &Path { + self.inner_guard.path() + } + async fn unmount(mut self) -> Result<(), Error> { + self.unmount(false).await + } +} +impl Drop for OverlayGuard { + fn drop(&mut self) { + let lower = self.lower.take(); + let upper = self.upper.take(); + let guard = self.inner_guard.take(); + if lower.is_some() || upper.is_some() || guard.mounted { + tokio::spawn(async move { + guard.unmount(false).await.unwrap(); + if let Some(lower) = lower { + lower.unmount().await.unwrap(); + } + if let Some(upper) = upper { + upper.delete().await.unwrap(); + } + }); + } + } +} diff --git a/core/startos/src/disk/mount/guard.rs b/core/startos/src/disk/mount/guard.rs index 617afeb083..af46904fdf 100644 --- a/core/startos/src/disk/mount/guard.rs +++ b/core/startos/src/disk/mount/guard.rs @@ -9,20 +9,47 @@ use tracing::instrument; use super::filesystem::{FileSystem, MountType, ReadOnly, ReadWrite}; use super::util::unmount; -use crate::util::Invoke; +use crate::util::{Invoke, Never}; use crate::Error; pub const TMP_MOUNTPOINT: &'static str = "/media/embassy/tmp"; #[async_trait::async_trait] -pub trait GenericMountGuard: AsRef + std::fmt::Debug + Send + Sync + 'static { +pub trait GenericMountGuard: std::fmt::Debug + Send + Sync + 'static { + fn path(&self) -> &Path; async fn unmount(mut self) -> Result<(), Error>; } +#[async_trait::async_trait] +impl GenericMountGuard for Never { + fn path(&self) -> &Path { + match *self {} + } + async fn unmount(mut self) -> Result<(), Error> { + match self {} + } +} + +#[async_trait::async_trait] +impl GenericMountGuard for Arc +where + T: GenericMountGuard, +{ + fn path(&self) -> &Path { + (&**self).path() + } + async fn unmount(mut self) -> Result<(), Error> { + if let Ok(guard) = Arc::try_unwrap(self) { + guard.unmount().await?; + } + Ok(()) + } +} + #[derive(Debug)] pub struct MountGuard { mountpoint: PathBuf, - mounted: bool, + pub(super) mounted: bool, } impl MountGuard { pub async fn mount( @@ -37,6 +64,16 @@ impl MountGuard { mounted: true, }) } + fn as_unmounted(&self) -> Self { + Self { + mountpoint: self.mountpoint.clone(), + mounted: false, + } + } + pub fn take(&mut self) -> Self { + let unmounted = self.as_unmounted(); + std::mem::replace(self, unmounted) + } pub async fn unmount(mut self, delete_mountpoint: bool) -> Result<(), Error> { if self.mounted { unmount(&self.mountpoint).await?; @@ -57,11 +94,6 @@ impl MountGuard { Ok(()) } } -impl AsRef for MountGuard { - fn as_ref(&self) -> &Path { - &self.mountpoint - } -} impl Drop for MountGuard { fn drop(&mut self) { if self.mounted { @@ -72,6 +104,9 @@ impl Drop for MountGuard { } #[async_trait::async_trait] impl GenericMountGuard for MountGuard { + fn path(&self) -> &Path { + &self.mountpoint + } async fn unmount(mut self) -> Result<(), Error> { MountGuard::unmount(self, false).await } @@ -89,7 +124,7 @@ lazy_static! { Mutex::new(BTreeMap::new()); } -#[derive(Debug)] +#[derive(Debug, Clone)] pub struct TmpMountGuard { guard: Arc, } @@ -122,21 +157,42 @@ impl TmpMountGuard { Ok(TmpMountGuard { guard }) } } - pub async fn unmount(self) -> Result<(), Error> { - if let Ok(guard) = Arc::try_unwrap(self.guard) { - guard.unmount(true).await?; - } - Ok(()) + + pub fn take(&mut self) -> Self { + let unmounted = Self { + guard: Arc::new(self.guard.as_unmounted()), + }; + std::mem::replace(self, unmounted) + } +} +#[async_trait::async_trait] +impl GenericMountGuard for TmpMountGuard { + fn path(&self) -> &Path { + self.guard.path() + } + async fn unmount(mut self) -> Result<(), Error> { + self.guard.unmount().await } } -impl AsRef for TmpMountGuard { - fn as_ref(&self) -> &Path { - (&*self.guard).as_ref() + +#[derive(Debug)] +pub struct SubPath { + guard: G, + path: PathBuf, +} +impl SubPath { + pub fn new(guard: G, path: impl AsRef) -> Self { + let path = path.as_ref(); + let path = guard.path().join(path.strip_prefix("/").unwrap_or(path)); + Self { guard, path } } } #[async_trait::async_trait] -impl GenericMountGuard for TmpMountGuard { +impl GenericMountGuard for SubPath { + fn path(&self) -> &Path { + self.path.as_path() + } async fn unmount(mut self) -> Result<(), Error> { - TmpMountGuard::unmount(self).await + self.guard.unmount().await } } diff --git a/core/startos/src/disk/mount/util.rs b/core/startos/src/disk/mount/util.rs index 392e5d67af..e93ceb7dd3 100644 --- a/core/startos/src/disk/mount/util.rs +++ b/core/startos/src/disk/mount/util.rs @@ -44,7 +44,7 @@ pub async fn bind, P1: AsRef>( pub async fn unmount>(mountpoint: P) -> Result<(), Error> { tracing::debug!("Unmounting {}.", mountpoint.as_ref().display()); tokio::process::Command::new("umount") - .arg("-l") + .arg("-Rl") .arg(mountpoint.as_ref()) .invoke(crate::ErrorKind::Filesystem) .await?; diff --git a/core/startos/src/disk/util.rs b/core/startos/src/disk/util.rs index 7051026cd8..b0bc00a5d8 100644 --- a/core/startos/src/disk/util.rs +++ b/core/startos/src/disk/util.rs @@ -17,20 +17,21 @@ use tracing::instrument; use super::mount::filesystem::block_dev::BlockDev; use super::mount::filesystem::ReadOnly; use super::mount::guard::TmpMountGuard; +use crate::disk::mount::guard::GenericMountGuard; use crate::disk::OsPartitionInfo; use crate::util::serde::IoFormat; use crate::util::{Invoke, Version}; use crate::{Error, ResultExt as _}; #[derive(Clone, Copy, Debug, Deserialize, Serialize)] -#[serde(rename_all = "kebab-case")] +#[serde(rename_all = "camelCase")] pub enum PartitionTable { Mbr, Gpt, } #[derive(Clone, Debug, Deserialize, Serialize)] -#[serde(rename_all = "kebab-case")] +#[serde(rename_all = "camelCase")] pub struct DiskInfo { pub logicalname: PathBuf, pub partition_table: Option, @@ -42,18 +43,18 @@ pub struct DiskInfo { } #[derive(Clone, Debug, Deserialize, Serialize)] -#[serde(rename_all = "kebab-case")] +#[serde(rename_all = "camelCase")] pub struct PartitionInfo { pub logicalname: PathBuf, pub label: Option, pub capacity: u64, pub used: Option, - pub embassy_os: Option, + pub start_os: Option, pub guid: Option, } #[derive(Clone, Debug, Default, Deserialize, Serialize)] -#[serde(rename_all = "kebab-case")] +#[serde(rename_all = "camelCase")] pub struct EmbassyOsRecoveryInfo { pub version: Version, pub full: bool, @@ -389,7 +390,7 @@ async fn disk_info(disk: PathBuf) -> DiskInfo { } async fn part_info(part: PathBuf) -> PartitionInfo { - let mut embassy_os = None; + let mut start_os = None; let label = get_label(&part) .await .map_err(|e| tracing::warn!("Could not get label of {}: {}", part.display(), e.source)) @@ -403,20 +404,20 @@ async fn part_info(part: PathBuf) -> PartitionInfo { match TmpMountGuard::mount(&BlockDev::new(&part), ReadOnly).await { Err(e) => tracing::warn!("Could not collect usage information: {}", e.source), Ok(mount_guard) => { - used = get_used(&mount_guard) + used = get_used(mount_guard.path()) .await .map_err(|e| { tracing::warn!("Could not get usage of {}: {}", part.display(), e.source) }) .ok(); - if let Some(recovery_info) = match recovery_info(&mount_guard).await { + if let Some(recovery_info) = match recovery_info(mount_guard.path()).await { Ok(a) => a, Err(e) => { tracing::error!("Error fetching unencrypted backup metadata: {}", e); None } } { - embassy_os = Some(recovery_info) + start_os = Some(recovery_info) } if let Err(e) = mount_guard.unmount().await { tracing::error!("Error unmounting partition {}: {}", part.display(), e); @@ -429,7 +430,7 @@ async fn part_info(part: PathBuf) -> PartitionInfo { label, capacity, used, - embassy_os, + start_os, guid: None, } } diff --git a/core/startos/src/error.rs b/core/startos/src/error.rs index 2b769b03a2..9f0493f10b 100644 --- a/core/startos/src/error.rs +++ b/core/startos/src/error.rs @@ -1,4 +1,3 @@ -use color_eyre::eyre::eyre; pub use models::{Error, ErrorKind, OptionExt, ResultExt}; #[derive(Debug, Default)] @@ -18,11 +17,15 @@ impl ErrorCollection { } } - pub fn into_result(self) -> Result<(), Error> { - if self.0.is_empty() { - Ok(()) + pub fn into_result(mut self) -> Result<(), Error> { + if self.0.len() <= 1 { + if let Some(err) = self.0.pop() { + Err(err) + } else { + Ok(()) + } } else { - Err(Error::new(eyre!("{}", self), ErrorKind::MultipleErrors)) + Err(Error::new(self, ErrorKind::MultipleErrors)) } } } @@ -49,6 +52,7 @@ impl std::fmt::Display for ErrorCollection { Ok(()) } } +impl std::error::Error for ErrorCollection {} #[macro_export] macro_rules! ensure_code { diff --git a/core/startos/src/firmware.rs b/core/startos/src/firmware.rs index 7f9a4a2735..20347bcff6 100644 --- a/core/startos/src/firmware.rs +++ b/core/startos/src/firmware.rs @@ -2,8 +2,6 @@ use std::collections::BTreeSet; use std::path::Path; use async_compression::tokio::bufread::GzipDecoder; -use clap::ArgMatches; -use rpc_toolkit::command; use serde::{Deserialize, Serialize}; use tokio::fs::File; use tokio::io::BufReader; @@ -16,7 +14,7 @@ use crate::PLATFORM; /// Part of the Firmware, look there for more about #[derive(Clone, Deserialize, Serialize)] -#[serde(rename_all = "kebab-case")] +#[serde(rename_all = "camelCase")] pub struct VersionMatcher { /// Strip this prefix on the version matcher semver_prefix: Option, @@ -30,7 +28,7 @@ pub struct VersionMatcher { /// wanted a structure that could help decide what to do /// for each of the firmware versions #[derive(Clone, Deserialize, Serialize)] -#[serde(rename_all = "kebab-case")] +#[serde(rename_all = "camelCase")] pub struct Firmware { id: String, /// This is the platform(s) the firmware was built for @@ -43,8 +41,8 @@ pub struct Firmware { shasum: String, } -fn display_firmware_update_result(arg: RequiresReboot, _: &ArgMatches) { - if arg.0 { +pub fn display_firmware_update_result(result: RequiresReboot) { + if result.0 { println!("Firmware successfully updated! Reboot to apply changes."); } else { println!("No firmware update available."); @@ -55,7 +53,7 @@ fn display_firmware_update_result(arg: RequiresReboot, _: &ArgMatches) { /// that the firmware was the correct and updated for /// systems like the Pure System that a new firmware /// was released and the updates where pushed through the pure os. -#[command(rename = "update-firmware", display(display_firmware_update_result))] +// #[command(rename = "update-firmware", display(display_firmware_update_result))] pub async fn update_firmware() -> Result { let system_product_name = String::from_utf8( Command::new("dmidecode") diff --git a/core/startos/src/init.rs b/core/startos/src/init.rs index 74c3767e35..d1d5a99434 100644 --- a/core/startos/src/init.rs +++ b/core/startos/src/init.rs @@ -4,25 +4,19 @@ use std::path::Path; use std::time::{Duration, SystemTime}; use color_eyre::eyre::eyre; - use models::ResultExt; use rand::random; -use sqlx::{Pool, Postgres}; use tokio::process::Command; use tracing::instrument; use crate::account::AccountInfo; -use crate::context::rpc::RpcContextConfig; -use crate::db::model::ServerStatus; +use crate::context::config::ServerConfig; +use crate::db::model::public::ServerStatus; use crate::disk::mount::util::unmount; -use crate::install::PKG_ARCHIVE_DIR; use crate::middleware::auth::LOCAL_AUTH_COOKIE_PATH; use crate::prelude::*; - -use crate::util::cpupower::{ - get_available_governors, get_preferred_governor, set_governor, -}; -use crate::util::docker::{create_bridge_network, CONTAINER_DATADIR, CONTAINER_TOOL}; +use crate::ssh::SSH_AUTHORIZED_KEYS_FILE; +use crate::util::cpupower::{get_available_governors, get_preferred_governor, set_governor}; use crate::util::Invoke; use crate::{Error, ARCH}; @@ -185,12 +179,11 @@ pub async fn init_postgres(datadir: impl AsRef) -> Result<(), Error> { } pub struct InitResult { - pub secret_store: Pool, pub db: patch_db::PatchDb, } #[instrument(skip_all)] -pub async fn init(cfg: &RpcContextConfig) -> Result { +pub async fn init(cfg: &ServerConfig) -> Result { tokio::fs::create_dir_all("/run/embassy") .await .with_ctx(|_| (crate::ErrorKind::Filesystem, "mkdir -p /run/embassy"))?; @@ -214,17 +207,20 @@ pub async fn init(cfg: &RpcContextConfig) -> Result { .await?; } - let secret_store = cfg.secret_store().await?; - tracing::info!("Opened Postgres"); + let db = cfg.db().await?; + let peek = db.peek().await; + tracing::info!("Opened PatchDB"); - crate::ssh::sync_keys_from_db(&secret_store, "/home/start9/.ssh/authorized_keys").await?; + crate::ssh::sync_keys( + &peek.as_private().as_ssh_pubkeys().de()?, + SSH_AUTHORIZED_KEYS_FILE, + ) + .await?; tracing::info!("Synced SSH Keys"); - let account = AccountInfo::load(&secret_store).await?; - let db = cfg.db(&account).await?; - tracing::info!("Opened PatchDB"); - let peek = db.peek().await; - let mut server_info = peek.as_server_info().de()?; + let account = AccountInfo::load(&peek)?; + + let mut server_info = peek.as_public().as_server_info().de()?; // write to ca cert store tokio::fs::write( @@ -292,77 +288,6 @@ pub async fn init(cfg: &RpcContextConfig) -> Result { tokio::fs::remove_dir_all(&tmp_var).await?; } crate::disk::mount::util::bind(&tmp_var, "/var/tmp", false).await?; - let tmp_docker = cfg - .datadir() - .join(format!("package-data/tmp/{CONTAINER_TOOL}")); - let tmp_docker_exists = tokio::fs::metadata(&tmp_docker).await.is_ok(); - if CONTAINER_TOOL == "docker" { - Command::new("systemctl") - .arg("stop") - .arg("docker") - .invoke(crate::ErrorKind::Docker) - .await?; - } - crate::disk::mount::util::bind(&tmp_docker, CONTAINER_DATADIR, false).await?; - - if CONTAINER_TOOL == "docker" { - Command::new("systemctl") - .arg("reset-failed") - .arg("docker") - .invoke(crate::ErrorKind::Docker) - .await?; - Command::new("systemctl") - .arg("start") - .arg("docker") - .invoke(crate::ErrorKind::Docker) - .await?; - } - tracing::info!("Mounted Docker Data"); - - if should_rebuild || !tmp_docker_exists { - if CONTAINER_TOOL == "docker" { - tracing::info!("Creating Docker Network"); - create_bridge_network("start9", "172.18.0.1/24", "br-start9").await?; - tracing::info!("Created Docker Network"); - } - - let datadir = cfg.datadir(); - tracing::info!("Loading System Docker Images"); - crate::install::rebuild_from("/usr/lib/startos/system-images", &datadir).await?; - tracing::info!("Loaded System Docker Images"); - - tracing::info!("Loading Package Docker Images"); - crate::install::rebuild_from(datadir.join(PKG_ARCHIVE_DIR), &datadir).await?; - tracing::info!("Loaded Package Docker Images"); - } - - if CONTAINER_TOOL == "podman" { - crate::util::docker::remove_container("netdummy", true).await?; - Command::new("podman") - .arg("run") - .arg("-d") - .arg("--rm") - .arg("--init") - .arg("--network=start9") - .arg("--name=netdummy") - .arg("start9/x_system/utils:latest") - .arg("sleep") - .arg("infinity") - .invoke(crate::ErrorKind::Docker) - .await?; - } - - tracing::info!("Enabling Docker QEMU Emulation"); - Command::new(CONTAINER_TOOL) - .arg("run") - .arg("--privileged") - .arg("--rm") - .arg("start9/x_system/binfmt") - .arg("--install") - .arg("all") - .invoke(crate::ErrorKind::Docker) - .await?; - tracing::info!("Enabled Docker QEMU Emulation"); let governor = if let Some(governor) = &server_info.governor { if get_available_governors().await?.contains(governor) { @@ -420,12 +345,12 @@ pub async fn init(cfg: &RpcContextConfig) -> Result { }; db.mutate(|v| { - v.as_server_info_mut().ser(&server_info)?; + v.as_public_mut().as_server_info_mut().ser(&server_info)?; Ok(()) }) .await?; - crate::version::init(&db, &secret_store).await?; + crate::version::init(&db).await?; db.mutate(|d| { let model = d.de()?; @@ -443,5 +368,5 @@ pub async fn init(cfg: &RpcContextConfig) -> Result { tracing::info!("System initialized."); - Ok(InitResult { secret_store, db }) + Ok(InitResult { db }) } diff --git a/core/startos/src/inspect.rs b/core/startos/src/inspect.rs index cd27bbb2db..3fe84d06f0 100644 --- a/core/startos/src/inspect.rs +++ b/core/startos/src/inspect.rs @@ -1,20 +1,36 @@ use std::path::PathBuf; -use rpc_toolkit::command; +use clap::Parser; +use rpc_toolkit::{command, from_fn_async, AnyContext, HandlerExt, ParentHandler}; +use serde::{Deserialize, Serialize}; +use crate::context::CliContext; use crate::s9pk::manifest::Manifest; -use crate::s9pk::reader::S9pkReader; -use crate::util::display_none; -use crate::util::serde::{display_serializable, IoFormat}; +// use crate::s9pk::reader::S9pkReader; +use crate::util::serde::HandlerExtSerde; use crate::Error; -#[command(subcommands(hash, manifest, license, icon, instructions, docker_images))] -pub fn inspect() -> Result<(), Error> { - Ok(()) +pub fn inspect() -> ParentHandler { + ParentHandler::new() + .subcommand("hash", from_fn_async(hash)) + .subcommand( + "manifest", + from_fn_async(manifest).with_display_serializable(), + ) + .subcommand("license", from_fn_async(license).no_display()) + .subcommand("icon", from_fn_async(icon).no_display()) + .subcommand("instructions", from_fn_async(instructions).no_display()) + .subcommand("docker-images", from_fn_async(docker_images).no_display()) +} + +#[derive(Deserialize, Serialize, Parser)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "kebab-case")] +pub struct HashParams { + path: PathBuf, } -#[command(cli_only)] -pub async fn hash(#[arg] path: PathBuf) -> Result { +pub async fn hash(_: CliContext, HashParams { path }: HashParams) -> Result { Ok(S9pkReader::open(path, true) .await? .hash_str() @@ -22,21 +38,36 @@ pub async fn hash(#[arg] path: PathBuf) -> Result { .to_owned()) } -#[command(cli_only, display(display_serializable))] +#[derive(Deserialize, Serialize, Parser)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "kebab-case")] +pub struct ManifestParams { + path: PathBuf, + #[arg(long = "no-verify")] + no_verify: bool, +} + +// #[command(cli_only, display(display_serializable))] pub async fn manifest( - #[arg] path: PathBuf, - #[arg(rename = "no-verify", long = "no-verify")] no_verify: bool, - #[allow(unused_variables)] - #[arg(long = "format")] - format: Option, + _: CliContext, + ManifestParams { .. }: ManifestParams, ) -> Result { - S9pkReader::open(path, !no_verify).await?.manifest().await + // S9pkReader::open(path, !no_verify).await?.manifest().await + todo!() +} + +#[derive(Deserialize, Serialize, Parser)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "kebab-case")] +pub struct InspectParams { + path: PathBuf, + #[arg(long = "no-verify")] + no_verify: bool, } -#[command(cli_only, display(display_none))] pub async fn license( - #[arg] path: PathBuf, - #[arg(rename = "no-verify", long = "no-verify")] no_verify: bool, + _: AnyContext, + InspectParams { path, no_verify }: InspectParams, ) -> Result<(), Error> { tokio::io::copy( &mut S9pkReader::open(path, !no_verify).await?.license().await?, @@ -46,10 +77,9 @@ pub async fn license( Ok(()) } -#[command(cli_only, display(display_none))] pub async fn icon( - #[arg] path: PathBuf, - #[arg(rename = "no-verify", long = "no-verify")] no_verify: bool, + _: AnyContext, + InspectParams { path, no_verify }: InspectParams, ) -> Result<(), Error> { tokio::io::copy( &mut S9pkReader::open(path, !no_verify).await?.icon().await?, @@ -58,11 +88,18 @@ pub async fn icon( .await?; Ok(()) } +#[derive(Deserialize, Serialize, Parser)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "kebab-case")] +pub struct InstructionParams { + path: PathBuf, + #[arg(long = "no-verify")] + no_verify: bool, +} -#[command(cli_only, display(display_none))] pub async fn instructions( - #[arg] path: PathBuf, - #[arg(rename = "no-verify", long = "no-verify")] no_verify: bool, + _: CliContext, + InstructionParams { path, no_verify }: InstructionParams, ) -> Result<(), Error> { tokio::io::copy( &mut S9pkReader::open(path, !no_verify) @@ -74,11 +111,9 @@ pub async fn instructions( .await?; Ok(()) } - -#[command(cli_only, display(display_none), rename = "docker-images")] pub async fn docker_images( - #[arg] path: PathBuf, - #[arg(rename = "no-verify", long = "no-verify")] no_verify: bool, + _: AnyContext, + InspectParams { path, no_verify }: InspectParams, ) -> Result<(), Error> { tokio::io::copy( &mut S9pkReader::open(path, !no_verify) diff --git a/core/startos/src/install/cleanup.rs b/core/startos/src/install/cleanup.rs deleted file mode 100644 index d90ec502cf..0000000000 --- a/core/startos/src/install/cleanup.rs +++ /dev/null @@ -1,241 +0,0 @@ -use std::path::PathBuf; -use std::sync::Arc; - -use models::OptionExt; -use sqlx::{Executor, Postgres}; -use tracing::instrument; - -use super::PKG_ARCHIVE_DIR; -use crate::context::RpcContext; -use crate::db::model::{ - CurrentDependencies, Database, PackageDataEntry, PackageDataEntryInstalled, - PackageDataEntryMatchModelRef, -}; -use crate::error::ErrorCollection; -use crate::prelude::*; -use crate::s9pk::manifest::PackageId; -use crate::util::{Apply, Version}; -use crate::volume::{asset_dir, script_dir}; -use crate::Error; - -#[instrument(skip_all)] -pub async fn cleanup(ctx: &RpcContext, id: &PackageId, version: &Version) -> Result<(), Error> { - let mut errors = ErrorCollection::new(); - ctx.managers.remove(&(id.clone(), version.clone())).await; - // docker images start9/$APP_ID/*:$VERSION -q | xargs docker rmi - let images = crate::util::docker::images_for(id, version).await?; - errors.extend( - futures::future::join_all(images.into_iter().map(|sha| async { - let sha = sha; // move into future - crate::util::docker::remove_image(&sha).await - })) - .await, - ); - let pkg_archive_dir = ctx - .datadir - .join(PKG_ARCHIVE_DIR) - .join(id) - .join(version.as_str()); - if tokio::fs::metadata(&pkg_archive_dir).await.is_ok() { - tokio::fs::remove_dir_all(&pkg_archive_dir) - .await - .apply(|res| errors.handle(res)); - } - let assets_path = asset_dir(&ctx.datadir, id, version); - if tokio::fs::metadata(&assets_path).await.is_ok() { - tokio::fs::remove_dir_all(&assets_path) - .await - .apply(|res| errors.handle(res)); - } - let scripts_path = script_dir(&ctx.datadir, id, version); - if tokio::fs::metadata(&scripts_path).await.is_ok() { - tokio::fs::remove_dir_all(&scripts_path) - .await - .apply(|res| errors.handle(res)); - } - - errors.into_result() -} - -#[instrument(skip_all)] -pub async fn cleanup_failed(ctx: &RpcContext, id: &PackageId) -> Result<(), Error> { - if let Some(version) = match ctx - .db - .peek() - .await - .as_package_data() - .as_idx(id) - .or_not_found(id)? - .as_match() - { - PackageDataEntryMatchModelRef::Installing(m) => Some(m.as_manifest().as_version().de()?), - PackageDataEntryMatchModelRef::Restoring(m) => Some(m.as_manifest().as_version().de()?), - PackageDataEntryMatchModelRef::Updating(m) => { - let manifest_version = m.as_manifest().as_version().de()?; - let installed = m.as_installed().as_manifest().as_version().de()?; - if manifest_version != installed { - Some(manifest_version) - } else { - None // do not remove existing data - } - } - _ => { - tracing::warn!("{}: Nothing to clean up!", id); - None - } - } { - cleanup(ctx, id, &version).await?; - } - - ctx.db - .mutate(|v| { - match v - .clone() - .into_package_data() - .into_idx(id) - .or_not_found(id)? - .as_match() - { - PackageDataEntryMatchModelRef::Installing(_) - | PackageDataEntryMatchModelRef::Restoring(_) => { - v.as_package_data_mut().remove(id)?; - } - PackageDataEntryMatchModelRef::Updating(pde) => { - v.as_package_data_mut() - .as_idx_mut(id) - .or_not_found(id)? - .ser(&PackageDataEntry::Installed(PackageDataEntryInstalled { - manifest: pde.as_installed().as_manifest().de()?, - static_files: pde.as_static_files().de()?, - installed: pde.as_installed().de()?, - }))?; - } - _ => (), - } - Ok(()) - }) - .await -} - -#[instrument(skip_all)] -pub fn remove_from_current_dependents_lists( - db: &mut Model, - id: &PackageId, - current_dependencies: &CurrentDependencies, -) -> Result<(), Error> { - for dep in current_dependencies.0.keys().chain(std::iter::once(id)) { - if let Some(current_dependents) = db - .as_package_data_mut() - .as_idx_mut(dep) - .and_then(|d| d.as_installed_mut()) - .map(|i| i.as_current_dependents_mut()) - { - current_dependents.remove(id)?; - } - } - Ok(()) -} - -#[instrument(skip_all)] -pub async fn uninstall(ctx: &RpcContext, secrets: &mut Ex, id: &PackageId) -> Result<(), Error> -where - for<'a> &'a mut Ex: Executor<'a, Database = Postgres>, -{ - let db = ctx.db.peek().await; - let entry = db - .as_package_data() - .as_idx(id) - .or_not_found(id)? - .expect_as_removing()?; - - let dependents_paths: Vec = entry - .as_removing() - .as_current_dependents() - .keys()? - .into_iter() - .filter(|x| x != id) - .flat_map(|x| db.as_package_data().as_idx(&x)) - .flat_map(|x| x.as_installed()) - .flat_map(|x| x.as_manifest().as_volumes().de()) - .flat_map(|x| x.values().cloned().collect::>()) - .flat_map(|x| x.pointer_path(&ctx.datadir)) - .collect(); - - let volume_dir = ctx - .datadir - .join(crate::volume::PKG_VOLUME_DIR) - .join(&*entry.as_manifest().as_id().de()?); - let version = entry.as_removing().as_manifest().as_version().de()?; - tracing::debug!( - "Cleaning up {:?} except for {:?}", - volume_dir, - dependents_paths - ); - cleanup(ctx, id, &version).await?; - cleanup_folder(volume_dir, Arc::new(dependents_paths)).await; - remove_network_keys(secrets, id).await?; - - ctx.db - .mutate(|d| { - d.as_package_data_mut().remove(id)?; - remove_from_current_dependents_lists( - d, - id, - &entry.as_removing().as_current_dependencies().de()?, - ) - }) - .await -} - -#[instrument(skip_all)] -pub async fn remove_network_keys(secrets: &mut Ex, id: &PackageId) -> Result<(), Error> -where - for<'a> &'a mut Ex: Executor<'a, Database = Postgres>, -{ - sqlx::query!("DELETE FROM network_keys WHERE package = $1", &*id) - .execute(&mut *secrets) - .await?; - sqlx::query!("DELETE FROM tor WHERE package = $1", &*id) - .execute(&mut *secrets) - .await?; - Ok(()) -} - -/// Needed to remove, without removing the folders that are mounted in the other docker containers -pub fn cleanup_folder( - path: PathBuf, - dependents_volumes: Arc>, -) -> futures::future::BoxFuture<'static, ()> { - Box::pin(async move { - let meta_data = match tokio::fs::metadata(&path).await { - Ok(a) => a, - Err(_e) => { - return; - } - }; - if !meta_data.is_dir() { - tracing::error!("is_not dir, remove {:?}", path); - let _ = tokio::fs::remove_file(&path).await; - return; - } - if !dependents_volumes - .iter() - .any(|v| v.starts_with(&path) || v == &path) - { - tracing::error!("No parents, remove {:?}", path); - let _ = tokio::fs::remove_dir_all(&path).await; - return; - } - let mut read_dir = match tokio::fs::read_dir(&path).await { - Ok(a) => a, - Err(_e) => { - return; - } - }; - tracing::error!("Parents, recurse {:?}", path); - while let Some(entry) = read_dir.next_entry().await.ok().flatten() { - let entry_path = entry.path(); - cleanup_folder(entry_path, dependents_volumes.clone()).await; - } - }) -} diff --git a/core/startos/src/install/mod.rs b/core/startos/src/install/mod.rs index 01f405e7bd..72a7fe9eed 100644 --- a/core/startos/src/install/mod.rs +++ b/core/startos/src/install/mod.rs @@ -1,99 +1,70 @@ -use std::collections::BTreeMap; -use std::io::SeekFrom; -use std::marker::PhantomData; -use std::path::{Path, PathBuf}; -use std::sync::atomic::Ordering; -use std::sync::Arc; +use std::path::PathBuf; use std::time::Duration; +use clap::builder::ValueParserFactory; +use clap::{value_parser, CommandFactory, FromArgMatches, Parser}; use color_eyre::eyre::eyre; use emver::VersionRange; -use futures::future::BoxFuture; -use futures::{FutureExt, StreamExt, TryStreamExt}; -use http::header::CONTENT_LENGTH; -use http::{Request, Response, StatusCode}; -use hyper::Body; -use models::{mime, DataUrl}; +use futures::{FutureExt, StreamExt}; +use patch_db::json_ptr::JsonPointer; +use reqwest::header::{HeaderMap, CONTENT_LENGTH}; use reqwest::Url; -use rpc_toolkit::command; use rpc_toolkit::yajrc::RpcError; +use rpc_toolkit::CallRemote; +use serde::{Deserialize, Serialize}; use serde_json::{json, Value}; -use tokio::fs::{File, OpenOptions}; -use tokio::io::{AsyncRead, AsyncSeek, AsyncSeekExt, AsyncWriteExt}; -use tokio::process::Command; use tokio::sync::oneshot; -use tokio_stream::wrappers::ReadDirStream; use tracing::instrument; -use self::cleanup::{cleanup_failed, remove_from_current_dependents_lists}; -use crate::config::ConfigureContext; use crate::context::{CliContext, RpcContext}; use crate::core::rpc_continuations::{RequestGuid, RpcContinuation}; -use crate::db::model::{ - CurrentDependencies, CurrentDependencyInfo, CurrentDependents, InstalledPackageInfo, - PackageDataEntry, PackageDataEntryInstalled, PackageDataEntryInstalling, - PackageDataEntryMatchModelRef, PackageDataEntryRemoving, PackageDataEntryRestoring, - PackageDataEntryUpdating, StaticDependencyInfo, StaticFiles, -}; -use crate::dependencies::{ - add_dependent_to_current_dependents_lists, compute_dependency_config_errs, - set_dependents_with_live_pointers_to_needs_config, -}; -use crate::install::cleanup::cleanup; -use crate::install::progress::{InstallProgress, InstallProgressTracker}; -use crate::notifications::NotificationLevel; +use crate::db::model::package::{ManifestPreference, PackageState, PackageStateMatchModelRef}; use crate::prelude::*; -use crate::registry::marketplace::with_query_params; -use crate::s9pk::manifest::{Manifest, PackageId}; -use crate::s9pk::reader::S9pkReader; -use crate::status::{MainStatus, Status}; -use crate::util::docker::CONTAINER_TOOL; -use crate::util::io::response_to_reader; -use crate::util::serde::{display_serializable, Port}; -use crate::util::{display_none, AsyncFileExt, Invoke, Version}; -use crate::volume::{asset_dir, script_dir}; -use crate::{Error, ErrorKind, ResultExt}; - -pub mod cleanup; -pub mod progress; +use crate::progress::{FullProgress, PhasedProgressBar}; +use crate::s9pk::manifest::PackageId; +use crate::s9pk::merkle_archive::source::http::HttpSource; +use crate::s9pk::S9pk; +use crate::upload::upload; +use crate::util::clap::FromStrParser; +use crate::util::Never; pub const PKG_ARCHIVE_DIR: &str = "package-data/archive"; pub const PKG_PUBLIC_DIR: &str = "package-data/public"; pub const PKG_WASM_DIR: &str = "package-data/wasm"; -#[command(display(display_serializable))] -pub async fn list(#[context] ctx: RpcContext) -> Result { - Ok(ctx.db.peek().await.as_package_data().as_entries()? +// #[command(display(display_serializable))] +pub async fn list(ctx: RpcContext) -> Result { + Ok(ctx.db.peek().await.as_public().as_package_data().as_entries()? .iter() .filter_map(|(id, pde)| { - let status = match pde.as_match() { - PackageDataEntryMatchModelRef::Installed(_) => { + let status = match pde.as_state_info().as_match() { + PackageStateMatchModelRef::Installed(_) => { "installed" } - PackageDataEntryMatchModelRef::Installing(_) => { + PackageStateMatchModelRef::Installing(_) => { "installing" } - PackageDataEntryMatchModelRef::Updating(_) => { + PackageStateMatchModelRef::Updating(_) => { "updating" } - PackageDataEntryMatchModelRef::Restoring(_) => { + PackageStateMatchModelRef::Restoring(_) => { "restoring" } - PackageDataEntryMatchModelRef::Removing(_) => { + PackageStateMatchModelRef::Removing(_) => { "removing" } - PackageDataEntryMatchModelRef::Error(_) => { + PackageStateMatchModelRef::Error(_) => { "error" } }; - serde_json::to_value(json!({ "status":status, "id": id.clone(), "version": pde.as_manifest().as_version().de().ok()?})) + serde_json::to_value(json!({ "status": status, "id": id.clone(), "version": pde.as_state_info().as_manifest(ManifestPreference::Old).as_version().de().ok()?})) .ok() }) .collect()) } #[derive(Debug, Clone, Copy, serde::Deserialize, serde::Serialize)] -#[serde(rename_all = "kebab-case")] +#[serde(rename_all = "camelCase")] pub enum MinMax { Min, Max, @@ -116,6 +87,12 @@ impl std::str::FromStr for MinMax { } } } +impl ValueParserFactory for MinMax { + type Parser = FromStrParser; + fn value_parser() -> Self::Parser { + FromStrParser::new() + } +} impl std::fmt::Display for MinMax { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { @@ -125,21 +102,31 @@ impl std::fmt::Display for MinMax { } } -#[command( - custom_cli(cli_install(async, context(CliContext))), - display(display_none), - metadata(sync_db = true) -)] +#[derive(Deserialize, Serialize, Parser)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "kebab-case")] +pub struct InstallParams { + id: PackageId, + #[arg(short = 'm', long = "marketplace-url")] + marketplace_url: Option, + #[arg(short = 'v', long = "version-spec")] + version_spec: Option, + #[arg(long = "version-priority")] + version_priority: Option, +} + +// #[command( +// custom_cli(cli_install(async, context(CliContext))), +// )] #[instrument(skip_all)] pub async fn install( - #[context] ctx: RpcContext, - #[arg] id: String, - #[arg(short = 'm', long = "marketplace-url", rename = "marketplace-url")] - marketplace_url: Option, - #[arg(short = 'v', long = "version-spec", rename = "version-spec")] version_spec: Option< - String, - >, - #[arg(long = "version-priority", rename = "version-priority")] version_priority: Option, + ctx: RpcContext, + InstallParams { + id, + marketplace_url, + version_spec, + version_priority, + }: InstallParams, ) -> Result<(), Error> { let version_str = match &version_spec { None => "*", @@ -149,1169 +136,292 @@ pub async fn install( let marketplace_url = marketplace_url.unwrap_or_else(|| crate::DEFAULT_MARKETPLACE.parse().unwrap()); let version_priority = version_priority.unwrap_or_default(); - let man: Manifest = ctx - .client - .get(with_query_params( - ctx.clone(), + let s9pk = S9pk::deserialize( + &HttpSource::new( + ctx.client.clone(), format!( - "{}/package/v0/manifest/{}?spec={}&version-priority={}", + "{}/package/v0/{}.s9pk?spec={}&version-priority={}", marketplace_url, id, version, version_priority, ) .parse()?, - )) - .send() - .await - .with_kind(crate::ErrorKind::Registry)? - .error_for_status() - .with_kind(crate::ErrorKind::Registry)? - .json() - .await - .with_kind(crate::ErrorKind::Registry)?; - let s9pk = ctx - .client - .get(with_query_params( - ctx.clone(), - format!( - "{}/package/v0/{}.s9pk?spec=={}&version-priority={}", - marketplace_url, id, man.version, version_priority, - ) - .parse()?, - )) - .send() - .await - .with_kind(crate::ErrorKind::Registry)? - .error_for_status()?; - - if *man.id != *id || !man.version.satisfies(&version) { - return Err(Error::new( - eyre!("Fetched package does not match requested id and version"), - ErrorKind::Registry, - )); - } - - let public_dir_path = ctx - .datadir - .join(PKG_PUBLIC_DIR) - .join(&man.id) - .join(man.version.as_str()); - tokio::fs::create_dir_all(&public_dir_path).await?; + ) + .await?, + ) + .await?; - let icon_type = man.assets.icon_type(); - let (license_res, instructions_res, icon_res) = tokio::join!( - async { - tokio::io::copy( - &mut response_to_reader( - ctx.client - .get(with_query_params( - ctx.clone(), - format!( - "{}/package/v0/license/{}?spec=={}", - marketplace_url, id, man.version, - ) - .parse()?, - )) - .send() - .await? - .error_for_status()?, - ), - &mut File::create(public_dir_path.join("LICENSE.md")).await?, - ) - .await?; - Ok::<_, color_eyre::eyre::Report>(()) - }, - async { - tokio::io::copy( - &mut response_to_reader( - ctx.client - .get(with_query_params( - ctx.clone(), - format!( - "{}/package/v0/instructions/{}?spec=={}", - marketplace_url, id, man.version, - ) - .parse()?, - )) - .send() - .await? - .error_for_status()?, - ), - &mut File::create(public_dir_path.join("INSTRUCTIONS.md")).await?, - ) - .await?; - Ok::<_, color_eyre::eyre::Report>(()) - }, - async { - tokio::io::copy( - &mut response_to_reader( - ctx.client - .get(with_query_params( - ctx.clone(), - format!( - "{}/package/v0/icon/{}?spec=={}", - marketplace_url, id, man.version, - ) - .parse()?, - )) - .send() - .await? - .error_for_status()?, - ), - &mut File::create(public_dir_path.join(format!("icon.{}", icon_type))).await?, - ) - .await?; - Ok::<_, color_eyre::eyre::Report>(()) - }, + ensure_code!( + &s9pk.as_manifest().id == &id, + ErrorKind::ValidateS9pk, + "manifest.id does not match expected" ); - if let Err(e) = license_res { - tracing::warn!("Failed to pre-download license: {}", e); - } - if let Err(e) = instructions_res { - tracing::warn!("Failed to pre-download instructions: {}", e); - } - if let Err(e) = icon_res { - tracing::warn!("Failed to pre-download icon: {}", e); - } - let progress = Arc::new(InstallProgress::new(s9pk.content_length())); - let static_files = StaticFiles::local(&man.id, &man.version, icon_type); - ctx.db - .mutate(|db| { - let pde = match db - .as_package_data() - .as_idx(&man.id) - .map(|x| x.de()) - .transpose()? - { - Some(PackageDataEntry::Installed(PackageDataEntryInstalled { - installed, - static_files, - .. - })) => PackageDataEntry::Updating(PackageDataEntryUpdating { - install_progress: progress.clone(), - static_files, - installed, - manifest: man.clone(), - }), - None => PackageDataEntry::Installing(PackageDataEntryInstalling { - install_progress: progress.clone(), - static_files, - manifest: man.clone(), - }), - _ => { - return Err(Error::new( - eyre!("Cannot install over a package in a transient state"), - crate::ErrorKind::InvalidRequest, - )) - } - }; - db.as_package_data_mut().insert(&man.id, &pde) - }) + let download = ctx + .services + .install(ctx.clone(), s9pk, None::) .await?; - - let downloading = download_install_s9pk( - ctx.clone(), - man.clone(), - Some(marketplace_url), - Arc::new(InstallProgress::new(s9pk.content_length())), - response_to_reader(s9pk), - None, - ); - tokio::spawn(async move { - if let Err(e) = downloading.await { - let err_str = format!("Install of {}@{} Failed: {}", man.id, man.version, e); - tracing::error!("{}", err_str); - tracing::debug!("{:?}", e); - if let Err(e) = ctx - .notification_manager - .notify( - ctx.db.clone(), - Some(man.id), - NotificationLevel::Error, - String::from("Install Failed"), - err_str, - (), - None, - ) - .await - { - tracing::error!("Failed to issue Notification: {}", e); - tracing::debug!("{:?}", e); - } - } - Ok::<_, String>(()) - }); + tokio::spawn(async move { download.await?.await }); Ok(()) } -#[command(rpc_only, display(display_none))] -#[instrument(skip_all)] -pub async fn sideload( - #[context] ctx: RpcContext, - #[arg] manifest: Manifest, - #[arg] icon: Option, -) -> Result { - let new_ctx = ctx.clone(); - let guid = RequestGuid::new(); - if let Some(icon) = icon { - use tokio::io::AsyncWriteExt; - - let public_dir_path = ctx - .datadir - .join(PKG_PUBLIC_DIR) - .join(&manifest.id) - .join(manifest.version.as_str()); - tokio::fs::create_dir_all(&public_dir_path).await?; - let invalid_data_url = - || Error::new(eyre!("Invalid Icon Data URL"), ErrorKind::InvalidRequest); - let data = icon - .strip_prefix(&format!( - "data:image/{};base64,", - manifest.assets.icon_type() - )) - .ok_or_else(&invalid_data_url)?; - let mut icon_file = - File::create(public_dir_path.join(format!("icon.{}", manifest.assets.icon_type()))) - .await?; - icon_file - .write_all(&base64::decode(data).with_kind(ErrorKind::InvalidRequest)?) - .await?; - icon_file.sync_all().await?; - } - - let handler = Box::new(|req: Request| { - async move { - let content_length = match req.headers().get(CONTENT_LENGTH).map(|a| a.to_str()) { - None => None, - Some(Err(_)) => { - return Response::builder() - .status(StatusCode::BAD_REQUEST) - .body(Body::from("Invalid Content Length")) - .with_kind(ErrorKind::Network) - } - Some(Ok(a)) => match a.parse::() { - Err(_) => { - return Response::builder() - .status(StatusCode::BAD_REQUEST) - .body(Body::from("Invalid Content Length")) - .with_kind(ErrorKind::Network) - } - Ok(a) => Some(a), - }, - }; - let progress = Arc::new(InstallProgress::new(content_length)); - let install_progress = progress.clone(); +#[derive(Deserialize, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct SideloadResponse { + pub upload: RequestGuid, + pub progress: RequestGuid, +} - new_ctx - .db - .mutate(|db| { - let pde = match db - .as_package_data() - .as_idx(&manifest.id) - .map(|x| x.de()) - .transpose()? - { - Some(PackageDataEntry::Installed(PackageDataEntryInstalled { - installed, - static_files, - .. - })) => PackageDataEntry::Updating(PackageDataEntryUpdating { - install_progress, - installed, - manifest: manifest.clone(), - static_files, - }), - None => PackageDataEntry::Installing(PackageDataEntryInstalling { - install_progress, - static_files: StaticFiles::local( - &manifest.id, - &manifest.version, - &manifest.assets.icon_type(), - ), - manifest: manifest.clone(), - }), - _ => { - return Err(Error::new( - eyre!("Cannot install over a package in a transient state"), - crate::ErrorKind::InvalidRequest, - )) +#[instrument(skip_all)] +pub async fn sideload(ctx: RpcContext) -> Result { + let (upload, file) = upload(&ctx).await?; + let (id_send, id_recv) = oneshot::channel(); + let (err_send, err_recv) = oneshot::channel(); + let progress = RequestGuid::new(); + let db = ctx.db.clone(); + let mut sub = db + .subscribe( + "/package-data/{id}/install-progress" + .parse::() + .with_kind(ErrorKind::Database)?, + ) + .await; + ctx.add_continuation( + progress.clone(), + RpcContinuation::ws( + Box::new(|mut ws| { + use axum::extract::ws::Message; + async move { + if let Err(e) = async { + let id = id_recv.await.map_err(|_| { + Error::new( + eyre!("Could not get id to watch progress"), + ErrorKind::Cancelled, + ) + })?; + tokio::select! { + res = async { + while let Some(rev) = sub.recv().await { + if !rev.patch.0.is_empty() { // TODO: don't send empty patches? + ws.send(Message::Text( + serde_json::to_string(&if let Some(p) = db + .peek() + .await + .as_public() + .as_package_data() + .as_idx(&id) + .and_then(|e| e.as_state_info().as_installing_info()).map(|i| i.as_progress()) + { + Ok::<_, ()>(p.de()?) + } else { + let mut p = FullProgress::new(); + p.overall.complete(); + Ok(p) + }) + .with_kind(ErrorKind::Serialization)?, + )) + .await + .with_kind(ErrorKind::Network)?; + } + } + Ok::<_, Error>(()) + } => res?, + err = err_recv => { + if let Ok(e) = err { + ws.send(Message::Text( + serde_json::to_string(&Err::<(), _>(e)) + .with_kind(ErrorKind::Serialization)?, + )) + .await + .with_kind(ErrorKind::Network)?; + } + } } - }; - db.as_package_data_mut().insert(&manifest.id, &pde) - }) - .await?; - let (send, recv) = oneshot::channel(); + ws.close().await.with_kind(ErrorKind::Network)?; - tokio::spawn(async move { - if let Err(e) = download_install_s9pk( - new_ctx.clone(), - manifest.clone(), - None, - progress, - tokio_util::io::StreamReader::new(req.into_body().map_err(|e| { - std::io::Error::new( - match &e { - e if e.is_connect() => std::io::ErrorKind::ConnectionRefused, - e if e.is_timeout() => std::io::ErrorKind::TimedOut, - _ => std::io::ErrorKind::Other, - }, - e, - ) - })), - Some(send), - ) - .await - { - let err_str = format!( - "Install of {}@{} Failed: {}", - manifest.id, manifest.version, e - ); - tracing::error!("{}", err_str); - tracing::debug!("{:?}", e); - if let Err(e) = new_ctx - .notification_manager - .notify( - new_ctx.db.clone(), - Some(manifest.id.clone()), - NotificationLevel::Error, - String::from("Install Failed"), - err_str, - (), - None, - ) - .await + Ok::<_, Error>(()) + } + .await { - tracing::error!("Failed to issue Notification: {}", e); - tracing::debug!("{:?}", e); + tracing::error!("Error tracking sideload progress: {e}"); + tracing::debug!("{e:?}"); } } - }); - - if let Ok(_) = recv.await { - Response::builder() - .status(StatusCode::OK) - .body(Body::empty()) - .with_kind(ErrorKind::Network) - } else { - Response::builder() - .status(StatusCode::INTERNAL_SERVER_ERROR) - .body(Body::from("installation aborted before upload completed")) - .with_kind(ErrorKind::Network) - } - } - .boxed() - }); - ctx.add_continuation( - guid.clone(), - RpcContinuation::rest(handler, Duration::from_secs(30)), + .boxed() + }), + Duration::from_secs(600), + ), ) .await; - Ok(guid) -} - -#[instrument(skip_all)] -async fn cli_install( - ctx: CliContext, - target: String, - marketplace_url: Option, - version_spec: Option, - version_priority: Option, -) -> Result<(), RpcError> { - if target.ends_with(".s9pk") { - let path = PathBuf::from(target); - - // inspect manifest no verify - let mut reader = S9pkReader::open(&path, false).await?; - let manifest = reader.manifest().await?; - let icon = reader.icon().await?.to_vec().await?; - let icon_str = format!( - "data:image/{};base64,{}", - manifest.assets.icon_type(), - base64::encode(&icon) - ); - - // rpc call remote sideload - tracing::debug!("calling package.sideload"); - let guid = rpc_toolkit::command_helpers::call_remote( - ctx.clone(), - "package.sideload", - serde_json::json!({ "manifest": manifest, "icon": icon_str }), - PhantomData::, - ) - .await? - .result?; - tracing::debug!("package.sideload succeeded {:?}", guid); - - // hit continuation api with guid that comes back - let file = tokio::fs::File::open(path).await?; - let content_length = file.metadata().await?.len(); - let body = Body::wrap_stream(tokio_util::io::ReaderStream::new(file)); - let res = ctx - .client - .post(format!("{}rest/rpc/{}", ctx.base_url, guid,)) - .header(CONTENT_LENGTH, content_length) - .body(body) - .send() - .await?; - if res.status().as_u16() == 200 { - tracing::info!("Package Uploaded") - } else { - tracing::info!("Package Upload failed: {}", res.text().await?) - } - } else { - let params = match (target.split_once("@"), version_spec) { - (Some((pkg, v)), None) => { - serde_json::json!({ "id": pkg, "marketplace-url": marketplace_url, "version-spec": v, "version-priority": version_priority }) - } - (Some(_), Some(_)) => { - return Err(crate::Error::new( - eyre!("Invalid package id {}", target), - ErrorKind::InvalidRequest, - ) - .into()) - } - (None, Some(v)) => { - serde_json::json!({ "id": target, "marketplace-url": marketplace_url, "version-spec": v, "version-priority": version_priority }) - } - (None, None) => { - serde_json::json!({ "id": target, "marketplace-url": marketplace_url, "version-priority": version_priority }) - } - }; - tracing::debug!("calling package.install"); - rpc_toolkit::command_helpers::call_remote( - ctx, - "package.install", - params, - PhantomData::<()>, - ) - .await? - .result?; - tracing::debug!("package.install succeeded"); - } - Ok(()) -} - -#[command(display(display_none), metadata(sync_db = true))] -pub async fn uninstall( - #[context] ctx: RpcContext, - #[arg] id: PackageId, -) -> Result { - ctx.db - .mutate(|db| { - let (manifest, static_files, installed) = - match db.as_package_data().as_idx(&id).or_not_found(&id)?.de()? { - PackageDataEntry::Installed(PackageDataEntryInstalled { - manifest, - static_files, - installed, - }) => (manifest, static_files, installed), - _ => { - return Err(Error::new( - eyre!("Package is not installed."), - crate::ErrorKind::NotFound, - )); - } - }; - let pde = PackageDataEntry::Removing(PackageDataEntryRemoving { - manifest, - static_files, - removing: installed, - }); - db.as_package_data_mut().insert(&id, &pde) - }) - .await?; - - let return_id = id.clone(); - tokio::spawn(async move { if let Err(e) = async { - cleanup::uninstall(&ctx, ctx.secret_store.acquire().await?.as_mut(), &id).await + let s9pk = S9pk::deserialize(&file).await?; + let _ = id_send.send(s9pk.as_manifest().id.clone()); + ctx.services + .install(ctx.clone(), s9pk, None::) + .await? + .await? + .await?; + file.delete().await } .await { - let err_str = format!("Uninstall of {} Failed: {}", id, e); - tracing::error!("{}", err_str); - tracing::debug!("{:?}", e); - if let Err(e) = ctx - .notification_manager - .notify( - ctx.db.clone(), // allocating separate handle here because the lifetime of the previous one is the expression - Some(id), - NotificationLevel::Error, - String::from("Uninstall Failed"), - err_str, - (), - None, - ) - .await - { - tracing::error!("Failed to issue Notification: {}", e); - tracing::debug!("{:?}", e); - } + let _ = err_send.send(RpcError::from(e.clone_output())); + tracing::error!("Error sideloading package: {e}"); + tracing::debug!("{e:?}"); } }); - - Ok(return_id) + Ok(SideloadResponse { upload, progress }) } -#[instrument(skip_all)] -pub async fn download_install_s9pk( - ctx: RpcContext, - temp_manifest: Manifest, - marketplace_url: Option, - progress: Arc, - mut s9pk: impl AsyncRead + Unpin, - download_complete: Option>, -) -> Result<(), Error> { - let pkg_id = &temp_manifest.id; - let version = &temp_manifest.version; - let db = ctx.db.peek().await; - - if let Result::<(), Error>::Err(e) = { - let ctx = ctx.clone(); - async move { - // // Build set of existing manifests - let mut manifests = Vec::new(); - for (_id, pkg) in db.as_package_data().as_entries()? { - let m = pkg.as_manifest().de()?; - manifests.push(m); - } - // Build map of current port -> ssl mappings - let port_map = ssl_port_status(&manifests); - tracing::info!("SSL Port Map: {:?}", &port_map); - - // if any of the requested interface lan configs conflict with current state, fail the install - for (_id, iface) in &temp_manifest.interfaces.0 { - if let Some(cfg) = &iface.lan_config { - for (p, lan) in cfg { - if p.0 == 80 && lan.ssl || p.0 == 443 && !lan.ssl { - return Err(Error::new( - eyre!("SSL Conflict with StartOS"), - ErrorKind::LanPortConflict, - )); - } - match port_map.get(&p) { - Some((ssl, pkg)) => { - if *ssl != lan.ssl { - return Err(Error::new( - eyre!("SSL Conflict with package: {}", pkg), - ErrorKind::LanPortConflict, - )); - } - } - None => { - continue; - } - } - } - } - } - - let pkg_archive_dir = ctx - .datadir - .join(PKG_ARCHIVE_DIR) - .join(pkg_id) - .join(version.as_str()); - tokio::fs::create_dir_all(&pkg_archive_dir).await?; - let pkg_archive = - pkg_archive_dir.join(AsRef::::as_ref(pkg_id).with_extension("s9pk")); - - File::delete(&pkg_archive).await?; - let mut dst = OpenOptions::new() - .create(true) - .write(true) - .read(true) - .open(&pkg_archive) - .await?; - - progress - .track_download_during(ctx.db.clone(), pkg_id, || async { - let mut progress_writer = - InstallProgressTracker::new(&mut dst, progress.clone()); - tokio::io::copy(&mut s9pk, &mut progress_writer).await?; - progress.download_complete(); - if let Some(complete) = download_complete { - complete.send(()).unwrap_or_default(); - } - Ok(()) - }) - .await?; - - dst.seek(SeekFrom::Start(0)).await?; - - let progress_reader = InstallProgressTracker::new(dst, progress.clone()); - let mut s9pk_reader = progress - .track_read_during(ctx.db.clone(), pkg_id, || { - S9pkReader::from_reader(progress_reader, true) - }) - .await?; - - install_s9pk( - ctx.clone(), - pkg_id, - version, - marketplace_url, - &mut s9pk_reader, - progress, +#[derive(Deserialize, Serialize)] +#[serde(rename_all = "camelCase")] +pub enum CliInstallParams { + Marketplace(InstallParams), + Sideload(PathBuf), +} +impl CommandFactory for CliInstallParams { + fn command() -> clap::Command { + use clap::{Arg, Command}; + Command::new("install") + .arg( + Arg::new("sideload") + .long("sideload") + .short('s') + .required_unless_present("id") + .value_parser(value_parser!(PathBuf)), ) - .await?; - - Ok(()) - } + .args(InstallParams::command().get_arguments().cloned().map(|a| { + if a.get_id() == "id" { + a.required(false).required_unless_present("sideload") + } else { + a + } + .conflicts_with("sideload") + })) } - .await - { - if let Err(e) = cleanup_failed(&ctx, pkg_id).await { - tracing::error!("Failed to clean up {}@{}: {}", pkg_id, version, e); - tracing::debug!("{:?}", e); - } - - Err(e) - } else { - Ok::<_, Error>(()) + fn command_for_update() -> clap::Command { + Self::command() } } - -#[instrument(skip_all)] -pub async fn install_s9pk( - ctx: RpcContext, - pkg_id: &PackageId, - version: &Version, - marketplace_url: Option, - rdr: &mut S9pkReader>, - progress: Arc, -) -> Result<(), Error> { - rdr.validate().await?; - rdr.validated(); - let developer_key = rdr.developer_key().clone(); - rdr.reset().await?; - let db = ctx.db.peek().await; - - tracing::info!("Install {}@{}: Unpacking Manifest", pkg_id, version); - let manifest = progress - .track_read_during(ctx.db.clone(), pkg_id, || rdr.manifest()) - .await?; - tracing::info!("Install {}@{}: Unpacked Manifest", pkg_id, version); - - tracing::info!("Install {}@{}: Fetching Dependency Info", pkg_id, version); - let mut dependency_info = BTreeMap::new(); - for (dep, info) in &manifest.dependencies.0 { - let manifest: Option = if let Some(local_man) = db - .as_package_data() - .as_idx(dep) - .map(|pde| pde.as_manifest().de()) - { - Some(local_man?) - } else if let Some(marketplace_url) = &marketplace_url { - match ctx - .client - .get(with_query_params( - ctx.clone(), - format!( - "{}/package/v0/manifest/{}?spec={}", - marketplace_url, dep, info.version, - ) - .parse()?, - )) - .send() - .await - .with_kind(crate::ErrorKind::Registry)? - .error_for_status() - { - Ok(a) => Ok(Some( - a.json() - .await - .with_kind(crate::ErrorKind::Deserialization)?, - )), - Err(e) - if e.status() == Some(StatusCode::BAD_REQUEST) - || e.status() == Some(StatusCode::NOT_FOUND) => - { - Ok(None) - } - Err(e) => Err(e), - } - .with_kind(crate::ErrorKind::Registry)? - } else { - None - }; - - let icon_path = if let Some(manifest) = &manifest { - let dir = ctx - .datadir - .join(PKG_PUBLIC_DIR) - .join(&manifest.id) - .join(manifest.version.as_str()); - let icon_path = dir.join(format!("icon.{}", manifest.assets.icon_type())); - if tokio::fs::metadata(&icon_path).await.is_err() { - if let Some(marketplace_url) = &marketplace_url { - tokio::fs::create_dir_all(&dir).await?; - let icon = ctx - .client - .get(with_query_params( - ctx.clone(), - format!( - "{}/package/v0/icon/{}?spec={}", - marketplace_url, dep, info.version, - ) - .parse()?, - )) - .send() - .await - .with_kind(crate::ErrorKind::Registry)?; - let mut dst = File::create(&icon_path).await?; - tokio::io::copy(&mut response_to_reader(icon), &mut dst).await?; - dst.sync_all().await?; - Some(icon_path) - } else { - None - } - } else { - Some(icon_path) - } +impl FromArgMatches for CliInstallParams { + fn from_arg_matches(matches: &clap::ArgMatches) -> Result { + if let Some(sideload) = matches.get_one::("sideload") { + Ok(Self::Sideload(sideload.clone())) } else { - None - }; - - dependency_info.insert( - dep.clone(), - StaticDependencyInfo { - title: manifest - .as_ref() - .map(|x| x.title.clone()) - .unwrap_or_else(|| dep.to_string()), - icon: if let Some(icon_path) = &icon_path { - DataUrl::from_path(icon_path).await? - } else { - DataUrl::from_slice("image/png", include_bytes!("./package-icon.png")) - }, - }, - ); - } - tracing::info!("Install {}@{}: Fetched Dependency Info", pkg_id, version); - - let icon = progress - .track_read_during(ctx.db.clone(), pkg_id, || { - unpack_s9pk(&ctx.datadir, &manifest, rdr) - }) - .await?; - - progress.unpack_complete.store(true, Ordering::SeqCst); - - progress - .track_read( - ctx.db.clone(), - pkg_id.clone(), - Arc::new(::std::sync::atomic::AtomicBool::new(true)), - ) - .await?; - - let peek = ctx.db.peek().await; - let prev = peek - .as_package_data() - .as_idx(pkg_id) - .or_not_found(pkg_id)? - .de()?; - let mut sql_tx = ctx.secret_store.begin().await?; - - tracing::info!("Install {}@{}: Creating volumes", pkg_id, version); - manifest.volumes.install(&ctx, pkg_id, version).await?; - tracing::info!("Install {}@{}: Created volumes", pkg_id, version); - - tracing::info!("Install {}@{}: Installing interfaces", pkg_id, version); - let interface_addresses = manifest.interfaces.install(sql_tx.as_mut(), pkg_id).await?; - tracing::info!( - "Install {}@{}: Installed interfaces {:?}", - pkg_id, - version, - interface_addresses - ); - - tracing::info!("Install {}@{}: Creating manager", pkg_id, version); - let manager = ctx.managers.add(ctx.clone(), manifest.clone()).await?; - tracing::info!("Install {}@{}: Created manager", pkg_id, version); - - let static_files = StaticFiles::local(pkg_id, version, manifest.assets.icon_type()); - let current_dependencies: CurrentDependencies = CurrentDependencies( - manifest - .dependencies - .0 - .iter() - .filter_map(|(id, info)| { - if info.requirement.required() { - Some((id.clone(), CurrentDependencyInfo::default())) - } else { - None - } - }) - .collect(), - ); - let mut dependents_static_dependency_info = BTreeMap::new(); - let current_dependents = { - let mut deps = BTreeMap::new(); - for package in db.as_package_data().keys()? { - if db - .as_package_data() - .as_idx(&package) - .or_not_found(&package)? - .as_installed() - .and_then(|i| i.as_dependency_info().as_idx(&pkg_id)) - .is_some() - { - dependents_static_dependency_info.insert(package.clone(), icon.clone()); - } - if let Some(dep) = db - .as_package_data() - .as_idx(&package) - .or_not_found(&package)? - .as_installed() - .and_then(|i| i.as_current_dependencies().as_idx(pkg_id)) - { - deps.insert(package, dep.de()?); - } + Ok(Self::Marketplace(InstallParams::from_arg_matches(matches)?)) } + } + fn update_from_arg_matches(&mut self, matches: &clap::ArgMatches) -> Result<(), clap::Error> { + *self = Self::from_arg_matches(matches)?; + Ok(()) + } +} - CurrentDependents(deps) - }; - - let installed = InstalledPackageInfo { - status: Status { - configured: manifest.config.is_none(), - main: MainStatus::Stopped, - dependency_config_errors: compute_dependency_config_errs( - &ctx, - &peek, - &manifest, - ¤t_dependencies, - &Default::default(), - ) - .await?, - }, - marketplace_url, - developer_key, - manifest: manifest.clone(), - last_backup: match prev { - PackageDataEntry::Updating(PackageDataEntryUpdating { - installed: - InstalledPackageInfo { - last_backup: Some(time), - .. +#[instrument(skip_all)] +pub async fn cli_install(ctx: CliContext, params: CliInstallParams) -> Result<(), RpcError> { + match params { + CliInstallParams::Sideload(path) => { + let file = crate::s9pk::load(&ctx, path).await?; + + // rpc call remote sideload + let SideloadResponse { upload, progress } = from_value::( + ctx.call_remote("package.sideload", imbl_value::json!({})) + .await?, + )?; + + let upload = async { + let content_length = file.metadata().await?.len(); + ctx.rest_continuation( + upload, + reqwest::Body::wrap_stream(tokio_util::io::ReaderStream::new(file)), + { + let mut map = HeaderMap::new(); + map.insert(CONTENT_LENGTH, content_length.into()); + map }, - .. - }) => Some(time), - _ => None, - }, - dependency_info, - current_dependents: current_dependents.clone(), - current_dependencies: current_dependencies.clone(), - interface_addresses, - }; - let mut next = PackageDataEntryInstalled { - installed, - manifest: manifest.clone(), - static_files, - }; - - let mut auto_start = false; - let mut configured = false; - - let mut to_cleanup = None; + ) + .await? + .error_for_status() + .with_kind(ErrorKind::Network)?; + Ok::<_, Error>(()) + }; - if let PackageDataEntry::Updating(PackageDataEntryUpdating { - installed: prev, .. - }) = &prev - { - let prev_is_configured = prev.status.configured; - let prev_migration = prev - .manifest - .migrations - .to( - &ctx, - version, - pkg_id, - &prev.manifest.version, - &prev.manifest.volumes, - ) - .map(futures::future::Either::Left); - let migration = manifest - .migrations - .from( - &manifest.containers, - &ctx, - &prev.manifest.version, - pkg_id, - version, - &manifest.volumes, - ) - .map(futures::future::Either::Right); + let progress = async { + use tokio_tungstenite::tungstenite::Message; - let viable_migration = if prev.manifest.version > manifest.version { - prev_migration.or(migration) - } else { - migration.or(prev_migration) - }; + let mut bar = PhasedProgressBar::new("Sideloading"); - if let Some(f) = viable_migration { - configured = f.await?.configured && prev_is_configured; - } - if configured || manifest.config.is_none() { - auto_start = prev.status.main.running(); - } - if &prev.manifest.version != version { - to_cleanup = Some((prev.manifest.id.clone(), prev.manifest.version.clone())); - } - } else if let PackageDataEntry::Restoring(PackageDataEntryRestoring { .. }) = prev { - next.installed.marketplace_url = manifest - .backup - .restore(&ctx, pkg_id, version, &manifest.volumes) - .await?; - } + let mut ws = ctx.ws_continuation(progress).await?; - sql_tx.commit().await?; + let mut progress = FullProgress::new(); - let to_configure = ctx - .db - .mutate(|db| { - for (package, icon) in dependents_static_dependency_info { - db.as_package_data_mut() - .as_idx_mut(&package) - .or_not_found(&package)? - .as_installed_mut() - .or_not_found(&package)? - .as_dependency_info_mut() - .insert( - &pkg_id, - &StaticDependencyInfo { - icon, - title: manifest.title.clone(), + loop { + tokio::select! { + msg = ws.next() => { + if let Some(msg) = msg { + if let Message::Text(t) = msg.with_kind(ErrorKind::Network)? { + progress = + serde_json::from_str::>(&t) + .with_kind(ErrorKind::Deserialization)??; + bar.update(&progress); + } + } else { + break; + } + } + _ = tokio::time::sleep(Duration::from_millis(100)) => { + bar.update(&progress); }, - )?; - } - db.as_package_data_mut() - .insert(&pkg_id, &PackageDataEntry::Installed(next))?; - if let PackageDataEntry::Updating(PackageDataEntryUpdating { - installed: prev, .. - }) = &prev - { - remove_from_current_dependents_lists(db, pkg_id, &prev.current_dependencies)?; - } - add_dependent_to_current_dependents_lists(db, pkg_id, ¤t_dependencies)?; - - set_dependents_with_live_pointers_to_needs_config(db, pkg_id) - }) - .await?; - - if let Some((id, version)) = to_cleanup { - cleanup(&ctx, &id, &version).await?; - } - - if configured && manifest.config.is_some() { - let breakages = BTreeMap::new(); - let overrides = Default::default(); + } + } - let configure_context = ConfigureContext { - breakages, - timeout: None, - config: None, - dry_run: false, - overrides, - }; - manager.configure(configure_context).await?; - } + Ok::<_, Error>(()) + }; - for to_configure in to_configure.into_iter().filter(|(dep, _)| dep != pkg_id) { - if let Err(e) = async { - ctx.managers - .get(&to_configure) - .await - .or_not_found(format!("manager for {}", to_configure.0))? - .configure(ConfigureContext { - breakages: BTreeMap::new(), - timeout: None, - config: None, - overrides: BTreeMap::new(), - dry_run: false, - }) - .await + let (upload, progress) = tokio::join!(upload, progress); + progress?; + upload?; } - .await - { - tracing::error!("error configuring dependent: {e}"); - tracing::debug!("{e:?}") + CliInstallParams::Marketplace(params) => { + ctx.call_remote("package.install", to_value(¶ms)?) + .await?; } } - - if auto_start { - manager.start().await; - } - - tracing::info!("Install {}@{}: Complete", pkg_id, version); - Ok(()) } -#[instrument(skip_all)] -pub async fn unpack_s9pk( - datadir: impl AsRef, - manifest: &Manifest, - rdr: &mut S9pkReader, -) -> Result, Error> { - let datadir = datadir.as_ref(); - let pkg_id = &manifest.id; - let version = &manifest.version; - - let public_dir_path = datadir - .join(PKG_PUBLIC_DIR) - .join(pkg_id) - .join(version.as_str()); - tokio::fs::create_dir_all(&public_dir_path).await?; - - tracing::info!("Install {}@{}: Unpacking LICENSE.md", pkg_id, version); - let license_path = public_dir_path.join("LICENSE.md"); - let mut dst = File::create(&license_path).await?; - tokio::io::copy(&mut rdr.license().await?, &mut dst).await?; - dst.sync_all().await?; - tracing::info!("Install {}@{}: Unpacked LICENSE.md", pkg_id, version); - - tracing::info!("Install {}@{}: Unpacking INSTRUCTIONS.md", pkg_id, version); - let instructions_path = public_dir_path.join("INSTRUCTIONS.md"); - let mut dst = File::create(&instructions_path).await?; - tokio::io::copy(&mut rdr.instructions().await?, &mut dst).await?; - dst.sync_all().await?; - tracing::info!("Install {}@{}: Unpacked INSTRUCTIONS.md", pkg_id, version); - - let icon_filename = Path::new("icon").with_extension(manifest.assets.icon_type()); - let icon_path = public_dir_path.join(&icon_filename); - tracing::info!( - "Install {}@{}: Unpacking {}", - pkg_id, - version, - icon_path.display() - ); - let icon_buf = rdr.icon().await?.to_vec().await?; - let mut dst = File::create(&icon_path).await?; - dst.write_all(&icon_buf).await?; - dst.sync_all().await?; - let icon = DataUrl::from_vec( - mime(manifest.assets.icon_type()).unwrap_or("image/png"), - icon_buf, - ); - tracing::info!( - "Install {}@{}: Unpacked {}", - pkg_id, - version, - icon_filename.display() - ); - - tracing::info!("Install {}@{}: Unpacking Docker Images", pkg_id, version); - Command::new(CONTAINER_TOOL) - .arg("load") - .input(Some(&mut rdr.docker_images().await?)) - .invoke(ErrorKind::Docker) - .await?; - tracing::info!("Install {}@{}: Unpacked Docker Images", pkg_id, version,); - - tracing::info!("Install {}@{}: Unpacking Assets", pkg_id, version); - let asset_dir = asset_dir(datadir, pkg_id, version); - if tokio::fs::metadata(&asset_dir).await.is_ok() { - tokio::fs::remove_dir_all(&asset_dir).await?; - } - tokio::fs::create_dir_all(&asset_dir).await?; - let mut tar = tokio_tar::Archive::new(rdr.assets().await?); - tar.unpack(asset_dir).await?; +#[derive(Deserialize, Serialize, Parser)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "kebab-case")] +pub struct UninstallParams { + id: PackageId, +} - let script_dir = script_dir(datadir, pkg_id, version); - if tokio::fs::metadata(&script_dir).await.is_err() { - tokio::fs::create_dir_all(&script_dir).await?; - } - if let Some(mut hdl) = rdr.scripts().await? { - tokio::io::copy( - &mut hdl, - &mut File::create(script_dir.join("embassy.js")).await?, - ) +pub async fn uninstall( + ctx: RpcContext, + UninstallParams { id }: UninstallParams, +) -> Result { + ctx.db + .mutate(|db| { + let entry = db + .as_public_mut() + .as_package_data_mut() + .as_idx_mut(&id) + .or_not_found(&id)?; + entry.as_state_info_mut().map_mutate(|s| match s { + PackageState::Installed(s) => Ok(PackageState::Removing(s)), + _ => Err(Error::new( + eyre!("Package {id} is not installed."), + crate::ErrorKind::NotFound, + )), + }) + }) .await?; - } - tracing::info!("Install {}@{}: Unpacked Assets", pkg_id, version); - Ok(icon) -} + let return_id = id.clone(); -#[instrument(skip_all)] -pub fn rebuild_from<'a>( - source: impl AsRef + 'a + Send + Sync, - datadir: impl AsRef + 'a + Send + Sync, -) -> BoxFuture<'a, Result<(), Error>> { - async move { - let source_dir = source.as_ref(); - let datadir = datadir.as_ref(); - if tokio::fs::metadata(&source_dir).await.is_ok() { - ReadDirStream::new(tokio::fs::read_dir(&source_dir).await?) - .map(|r| { - r.with_ctx(|_| (crate::ErrorKind::Filesystem, format!("{:?}", &source_dir))) - }) - .try_for_each(|entry| async move { - let m = entry.metadata().await?; - if m.is_file() { - let path = entry.path(); - let ext = path.extension().and_then(|ext| ext.to_str()); - if ext == Some("tar") || ext == Some("s9pk") { - if let Err(e) = async { - match ext { - Some("tar") => { - Command::new(CONTAINER_TOOL) - .arg("load") - .input(Some(&mut File::open(&path).await?)) - .invoke(ErrorKind::Docker) - .await?; - Ok::<_, Error>(()) - } - Some("s9pk") => { - let mut s9pk = S9pkReader::open(&path, true).await?; - unpack_s9pk(datadir, &s9pk.manifest().await?, &mut s9pk) - .await?; - Ok(()) - } - _ => unreachable!(), - } - } - .await - { - tracing::error!("Error unpacking {path:?}: {e}"); - tracing::debug!("{e:?}"); - } - Ok(()) - } else { - Ok(()) - } - } else if m.is_dir() { - rebuild_from(entry.path(), datadir).await?; - Ok(()) - } else { - Ok(()) - } - }) - .await - } else { - Ok(()) - } - } - .boxed() -} + tokio::spawn(async move { ctx.services.uninstall(&ctx, &id).await }); -fn ssl_port_status(manifests: &Vec) -> BTreeMap { - let mut ret = BTreeMap::new(); - for m in manifests { - for (_id, iface) in &m.interfaces.0 { - match &iface.lan_config { - None => {} - Some(cfg) => { - for (p, lan) in cfg { - ret.insert(p.clone(), (lan.ssl, m.id.clone())); - } - } - } - } - } - ret + Ok(return_id) } diff --git a/core/startos/src/install/progress.rs b/core/startos/src/install/progress.rs deleted file mode 100644 index 61e58e0e63..0000000000 --- a/core/startos/src/install/progress.rs +++ /dev/null @@ -1,228 +0,0 @@ -use std::future::Future; -use std::io::SeekFrom; -use std::pin::Pin; -use std::sync::atomic::{AtomicBool, AtomicU64, Ordering}; -use std::sync::Arc; -use std::task::{Context, Poll}; -use std::time::Duration; - -use models::{OptionExt, PackageId}; -use serde::{Deserialize, Serialize}; -use tokio::io::{AsyncRead, AsyncSeek, AsyncWrite}; - -use crate::db::model::Database; -use crate::prelude::*; - -#[derive(Debug, Deserialize, Serialize, HasModel, Default)] -#[serde(rename_all = "kebab-case")] -#[model = "Model"] -pub struct InstallProgress { - pub size: Option, - pub downloaded: AtomicU64, - pub download_complete: AtomicBool, - pub validated: AtomicU64, - pub validation_complete: AtomicBool, - pub unpacked: AtomicU64, - pub unpack_complete: AtomicBool, -} -impl InstallProgress { - pub fn new(size: Option) -> Self { - InstallProgress { - size, - downloaded: AtomicU64::new(0), - download_complete: AtomicBool::new(false), - validated: AtomicU64::new(0), - validation_complete: AtomicBool::new(false), - unpacked: AtomicU64::new(0), - unpack_complete: AtomicBool::new(false), - } - } - pub fn download_complete(&self) { - self.download_complete.store(true, Ordering::SeqCst) - } - pub async fn track_download(self: Arc, db: PatchDb, id: PackageId) -> Result<(), Error> { - let update = |d: &mut Model| { - d.as_package_data_mut() - .as_idx_mut(&id) - .or_not_found(&id)? - .as_install_progress_mut() - .or_not_found("install-progress")? - .ser(&self) - }; - while !self.download_complete.load(Ordering::SeqCst) { - db.mutate(&update).await?; - tokio::time::sleep(Duration::from_millis(300)).await; - } - db.mutate(&update).await - } - pub async fn track_download_during< - F: FnOnce() -> Fut, - Fut: Future>, - T, - >( - self: &Arc, - db: PatchDb, - id: &PackageId, - f: F, - ) -> Result { - let tracker = tokio::spawn(self.clone().track_download(db.clone(), id.clone())); - let res = f().await; - self.download_complete.store(true, Ordering::SeqCst); - tracker.await.unwrap()?; - res - } - pub async fn track_read( - self: Arc, - db: PatchDb, - id: PackageId, - complete: Arc, - ) -> Result<(), Error> { - let update = |d: &mut Model| { - d.as_package_data_mut() - .as_idx_mut(&id) - .or_not_found(&id)? - .as_install_progress_mut() - .or_not_found("install-progress")? - .ser(&self) - }; - while !complete.load(Ordering::SeqCst) { - db.mutate(&update).await?; - tokio::time::sleep(Duration::from_millis(300)).await; - } - db.mutate(&update).await - } - pub async fn track_read_during< - F: FnOnce() -> Fut, - Fut: Future>, - T, - >( - self: &Arc, - db: PatchDb, - id: &PackageId, - f: F, - ) -> Result { - let complete = Arc::new(AtomicBool::new(false)); - let tracker = tokio::spawn(self.clone().track_read( - db.clone(), - id.clone(), - complete.clone(), - )); - let res = f().await; - complete.store(true, Ordering::SeqCst); - tracker.await.unwrap()?; - res - } -} - -#[pin_project::pin_project] -#[derive(Debug)] -pub struct InstallProgressTracker { - #[pin] - inner: RW, - validating: bool, - progress: Arc, -} -impl InstallProgressTracker { - pub fn new(inner: RW, progress: Arc) -> Self { - InstallProgressTracker { - inner, - validating: true, - progress, - } - } - pub fn validated(&mut self) { - self.progress - .validation_complete - .store(true, Ordering::SeqCst); - self.validating = false; - } -} -impl AsyncWrite for InstallProgressTracker { - fn poll_write( - self: Pin<&mut Self>, - cx: &mut Context<'_>, - buf: &[u8], - ) -> Poll> { - let this = self.project(); - match this.inner.poll_write(cx, buf) { - Poll::Ready(Ok(n)) => { - this.progress - .downloaded - .fetch_add(n as u64, Ordering::SeqCst); - Poll::Ready(Ok(n)) - } - a => a, - } - } - fn poll_flush(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { - let this = self.project(); - this.inner.poll_flush(cx) - } - fn poll_shutdown( - self: Pin<&mut Self>, - cx: &mut Context<'_>, - ) -> Poll> { - let this = self.project(); - this.inner.poll_shutdown(cx) - } - fn poll_write_vectored( - self: Pin<&mut Self>, - cx: &mut Context<'_>, - bufs: &[std::io::IoSlice<'_>], - ) -> Poll> { - let this = self.project(); - match this.inner.poll_write_vectored(cx, bufs) { - Poll::Ready(Ok(n)) => { - this.progress - .downloaded - .fetch_add(n as u64, Ordering::SeqCst); - Poll::Ready(Ok(n)) - } - a => a, - } - } -} -impl AsyncRead for InstallProgressTracker { - fn poll_read( - self: Pin<&mut Self>, - cx: &mut Context<'_>, - buf: &mut tokio::io::ReadBuf<'_>, - ) -> Poll> { - let this = self.project(); - let prev = buf.filled().len() as u64; - match this.inner.poll_read(cx, buf) { - Poll::Ready(Ok(())) => { - if *this.validating { - &this.progress.validated - } else { - &this.progress.unpacked - } - .fetch_add(buf.filled().len() as u64 - prev, Ordering::SeqCst); - - Poll::Ready(Ok(())) - } - a => a, - } - } -} -impl AsyncSeek for InstallProgressTracker { - fn start_seek(self: Pin<&mut Self>, position: SeekFrom) -> std::io::Result<()> { - let this = self.project(); - this.inner.start_seek(position) - } - fn poll_complete(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { - let this = self.project(); - match this.inner.poll_complete(cx) { - Poll::Ready(Ok(n)) => { - if *this.validating { - &this.progress.validated - } else { - &this.progress.unpacked - } - .store(n, Ordering::SeqCst); - Poll::Ready(Ok(n)) - } - a => a, - } - } -} diff --git a/core/startos/src/install/update.rs b/core/startos/src/install/update.rs index 6940512137..a0374fc801 100644 --- a/core/startos/src/install/update.rs +++ b/core/startos/src/install/update.rs @@ -1,5 +1,6 @@ use std::collections::BTreeMap; +use models::PackageId; use rpc_toolkit::command; use tracing::instrument; @@ -7,7 +8,6 @@ use crate::config::not_found; use crate::context::RpcContext; use crate::db::model::CurrentDependents; use crate::prelude::*; -use crate::s9pk::manifest::PackageId; use crate::util::serde::display_serializable; use crate::util::Version; use crate::Error; diff --git a/core/startos/src/lib.rs b/core/startos/src/lib.rs index 5fde6513fd..0b33367837 100644 --- a/core/startos/src/lib.rs +++ b/core/startos/src/lib.rs @@ -38,20 +38,20 @@ pub mod error; pub mod firmware; pub mod hostname; pub mod init; -pub mod inspect; +pub mod progress; +// pub mod inspect; pub mod install; pub mod logs; -pub mod manager; +pub mod lxc; pub mod middleware; -pub mod migration; pub mod net; pub mod notifications; pub mod os_install; pub mod prelude; -pub mod procedure; pub mod properties; pub mod registry; pub mod s9pk; +pub mod service; pub mod setup; pub mod shutdown; pub mod sound; @@ -59,100 +59,217 @@ pub mod ssh; pub mod status; pub mod system; pub mod update; +pub mod upload; pub mod util; pub mod version; pub mod volume; use std::time::SystemTime; +use clap::Parser; pub use config::Config; pub use error::{Error, ErrorKind, ResultExt}; -use rpc_toolkit::command; +use imbl_value::Value; use rpc_toolkit::yajrc::RpcError; +use rpc_toolkit::{ + command, from_fn, from_fn_async, from_fn_blocking, AnyContext, HandlerExt, ParentHandler, +}; +use serde::{Deserialize, Serialize}; -#[command(metadata(authenticated = false))] -pub fn echo(#[arg] message: String) -> Result { - Ok(message) +use crate::context::CliContext; +use crate::util::serde::HandlerExtSerde; + +#[derive(Deserialize, Serialize, Parser)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "kebab-case")] +pub struct EchoParams { + message: String, } -#[command(subcommands( - version::git_info, - echo, - inspect::inspect, - server, - package, - net::net, - auth::auth, - db::db, - ssh::ssh, - net::wifi::wifi, - disk::disk, - notifications::notification, - backup::backup, - registry::marketplace::marketplace, -))] -pub fn main_api() -> Result<(), RpcError> { - Ok(()) +pub fn echo(_: AnyContext, EchoParams { message }: EchoParams) -> Result { + Ok(message) } -#[command(subcommands( - system::time, - system::experimental, - system::logs, - system::kernel_logs, - system::metrics, - shutdown::shutdown, - shutdown::restart, - shutdown::rebuild, - update::update_system, - firmware::update_firmware, -))] -pub fn server() -> Result<(), RpcError> { - Ok(()) +pub fn main_api() -> ParentHandler { + ParentHandler::new() + .subcommand("git-info", from_fn(version::git_info)) + .subcommand( + "echo", + from_fn(echo) + .with_metadata("authenticated", Value::Bool(false)) + .with_remote_cli::(), + ) + .subcommand("init", from_fn_blocking(developer::init).no_display()) + .subcommand("server", server()) + .subcommand("package", package()) + .subcommand("net", net::net()) + .subcommand("auth", auth::auth()) + .subcommand("db", db::db()) + .subcommand("ssh", ssh::ssh()) + .subcommand("wifi", net::wifi::wifi()) + .subcommand("disk", disk::disk()) + .subcommand("notification", notifications::notification()) + .subcommand("backup", backup::backup()) + .subcommand("marketplace", registry::marketplace::marketplace()) + .subcommand("lxc", lxc::lxc()) + .subcommand("s9pk", s9pk::rpc::s9pk()) } -#[command(subcommands( - action::action, - install::install, - install::sideload, - install::uninstall, - install::list, - config::config, - control::start, - control::stop, - control::restart, - logs::logs, - properties::properties, - dependencies::dependency, - backup::package_backup, -))] -pub fn package() -> Result<(), RpcError> { - Ok(()) +pub fn server() -> ParentHandler { + ParentHandler::new() + .subcommand( + "time", + from_fn_async(system::time) + .with_display_serializable() + .with_custom_display_fn::(|handle, result| { + Ok(system::display_time(handle.params, result)) + }) + .with_remote_cli::(), + ) + .subcommand("experimental", system::experimental()) + .subcommand("logs", system::logs()) + .subcommand("kernel-logs", system::kernel_logs()) + .subcommand( + "metrics", + from_fn_async(system::metrics) + .with_display_serializable() + .with_remote_cli::(), + ) + .subcommand( + "shutdown", + from_fn_async(shutdown::shutdown) + .no_display() + .with_remote_cli::(), + ) + .subcommand( + "restart", + from_fn_async(shutdown::restart) + .no_display() + .with_remote_cli::(), + ) + .subcommand( + "rebuild", + from_fn_async(shutdown::rebuild) + .no_display() + .with_remote_cli::(), + ) + .subcommand( + "update", + from_fn_async(update::update_system) + .with_metadata("sync_db", Value::Bool(true)) + .with_custom_display_fn::(|handle, result| { + Ok(update::display_update_result(handle.params, result)) + }) + .with_remote_cli::(), + ) + .subcommand( + "update-firmware", + from_fn_async(firmware::update_firmware) + .with_custom_display_fn::(|_handle, result| { + Ok(firmware::display_firmware_update_result(result)) + }) + .with_remote_cli::(), + ) } -#[command(subcommands( - version::git_info, - s9pk::pack, - developer::verify, - developer::init, - inspect::inspect, - registry::admin::publish, -))] -pub fn portable_api() -> Result<(), RpcError> { - Ok(()) +pub fn package() -> ParentHandler { + ParentHandler::new() + .subcommand( + "action", + from_fn_async(action::action) + .with_display_serializable() + .with_custom_display_fn::(|handle, result| { + Ok(action::display_action_result(handle.params, result)) + }) + .with_remote_cli::(), + ) + .subcommand( + "install", + from_fn_async(install::install) + .with_metadata("sync_db", Value::Bool(true)) + .no_cli(), + ) + .subcommand("sideload", from_fn_async(install::sideload).no_cli()) + .subcommand("install", from_fn_async(install::cli_install).no_display()) + .subcommand( + "uninstall", + from_fn_async(install::uninstall) + .with_metadata("sync_db", Value::Bool(true)) + .no_display() + .with_remote_cli::(), + ) + .subcommand( + "list", + from_fn_async(install::list) + .with_display_serializable() + .with_remote_cli::(), + ) + .subcommand("config", config::config()) + .subcommand( + "start", + from_fn_async(control::start) + .with_metadata("sync_db", Value::Bool(true)) + .no_display() + .with_remote_cli::(), + ) + .subcommand( + "stop", + from_fn_async(control::stop) + .with_metadata("sync_db", Value::Bool(true)) + .no_display() + .with_remote_cli::(), + ) + .subcommand( + "restart", + from_fn_async(control::restart) + .with_metadata("sync_db", Value::Bool(true)) + .no_display() + .with_remote_cli::(), + ) + .subcommand("logs", logs::logs()) + .subcommand( + "properties", + from_fn_async(properties::properties) + .with_custom_display_fn::(|_handle, result| { + Ok(properties::display_properties(result)) + }) + .with_remote_cli::(), + ) + .subcommand("dependency", dependencies::dependency()) + .subcommand("backup", backup::package_backup()) + .subcommand("connect", from_fn_async(service::connect_rpc).no_cli()) + .subcommand( + "connect", + from_fn_async(service::connect_rpc_cli).no_display(), + ) } -#[command(subcommands(version::git_info, echo, diagnostic::diagnostic))] -pub fn diagnostic_api() -> Result<(), RpcError> { - Ok(()) +pub fn diagnostic_api() -> ParentHandler { + ParentHandler::new() + .subcommand( + "git-info", + from_fn(version::git_info).with_metadata("authenticated", Value::Bool(false)), + ) + .subcommand("echo", from_fn(echo).with_remote_cli::()) + .subcommand("diagnostic", diagnostic::diagnostic()) } -#[command(subcommands(version::git_info, echo, setup::setup))] -pub fn setup_api() -> Result<(), RpcError> { - Ok(()) +pub fn setup_api() -> ParentHandler { + ParentHandler::new() + .subcommand( + "git-info", + from_fn(version::git_info).with_metadata("authenticated", Value::Bool(false)), + ) + .subcommand("echo", from_fn(echo).with_remote_cli::()) + .subcommand("setup", setup::setup()) } -#[command(subcommands(version::git_info, echo, os_install::install))] -pub fn install_api() -> Result<(), RpcError> { - Ok(()) +pub fn install_api() -> ParentHandler { + ParentHandler::new() + .subcommand( + "git-info", + from_fn(version::git_info).with_metadata("authenticated", Value::Bool(false)), + ) + .subcommand("echo", from_fn(echo).with_remote_cli::()) + .subcommand("install", os_install::install()) } diff --git a/core/startos/src/logs.rs b/core/startos/src/logs.rs index 691ae09b90..1cd84c331b 100644 --- a/core/startos/src/logs.rs +++ b/core/startos/src/logs.rs @@ -1,36 +1,28 @@ -use std::future::Future; -use std::marker::PhantomData; use std::ops::{Deref, DerefMut}; use std::process::Stdio; use std::time::{Duration, UNIX_EPOCH}; +use axum::extract::ws::{self, WebSocket}; use chrono::{DateTime, Utc}; +use clap::Parser; use color_eyre::eyre::eyre; use futures::stream::BoxStream; -use futures::{FutureExt, SinkExt, Stream, StreamExt, TryStreamExt}; -use hyper::upgrade::Upgraded; -use hyper::Error as HyperError; -use rpc_toolkit::command; +use futures::{FutureExt, Stream, StreamExt, TryStreamExt}; +use models::PackageId; use rpc_toolkit::yajrc::RpcError; +use rpc_toolkit::{command, from_fn_async, CallRemote, Empty, HandlerExt, ParentHandler}; use serde::{Deserialize, Serialize}; use tokio::io::{AsyncBufReadExt, BufReader}; use tokio::process::{Child, Command}; -use tokio::task::JoinError; use tokio_stream::wrappers::LinesStream; -use tokio_tungstenite::tungstenite::protocol::frame::coding::CloseCode; -use tokio_tungstenite::tungstenite::protocol::CloseFrame; use tokio_tungstenite::tungstenite::Message; -use tokio_tungstenite::WebSocketStream; use tracing::instrument; use crate::context::{CliContext, RpcContext}; use crate::core::rpc_continuations::{RequestGuid, RpcContinuation}; use crate::error::ResultExt; -use crate::procedure::docker::DockerProcedure; -use crate::s9pk::manifest::PackageId; -use crate::util::display_none; +use crate::prelude::*; use crate::util::serde::Reversible; -use crate::{Error, ErrorKind}; #[pin_project::pin_project] pub struct LogStream { @@ -65,21 +57,14 @@ impl Stream for LogStream { } #[instrument(skip_all)] -async fn ws_handler< - WSFut: Future, HyperError>, JoinError>>, ->( +async fn ws_handler( first_entry: Option, mut logs: LogStream, - ws_fut: WSFut, + mut stream: WebSocket, ) -> Result<(), Error> { - let mut stream = ws_fut - .await - .with_kind(crate::ErrorKind::Network)? - .with_kind(crate::ErrorKind::Unknown)?; - if let Some(first_entry) = first_entry { stream - .send(Message::Text( + .send(ws::Message::Text( serde_json::to_string(&first_entry).with_kind(ErrorKind::Serialization)?, )) .await @@ -94,7 +79,7 @@ async fn ws_handler< if let Some(entry) = entry { let (_, log_entry) = entry.log_entry()?; stream - .send(Message::Text( + .send(ws::Message::Text( serde_json::to_string(&log_entry).with_kind(ErrorKind::Serialization)?, )) .await @@ -104,26 +89,27 @@ async fn ws_handler< if !ws_closed { stream - .close(Some(CloseFrame { - code: CloseCode::Normal, + .send(ws::Message::Close(Some(ws::CloseFrame { + code: ws::close_code::NORMAL, reason: "Log Stream Finished".into(), - })) + }))) .await .with_kind(ErrorKind::Network)?; + drop(stream); } Ok(()) } #[derive(serde::Serialize, serde::Deserialize, Debug, Clone)] -#[serde(rename_all = "kebab-case")] +#[serde(rename_all = "camelCase")] pub struct LogResponse { entries: Reversible, start_cursor: Option, end_cursor: Option, } #[derive(serde::Serialize, serde::Deserialize, Debug, Clone)] -#[serde(rename_all = "kebab-case")] +#[serde(rename_all = "camelCase")] pub struct LogFollowResponse { start_cursor: Option, guid: RequestGuid, @@ -224,23 +210,52 @@ pub enum LogSource { pub const SYSTEM_UNIT: &str = "startd"; -#[command( - custom_cli(cli_logs(async, context(CliContext))), - subcommands(self(logs_nofollow(async)), logs_follow), - display(display_none) -)] -pub async fn logs( - #[arg] id: PackageId, - #[arg(short = 'l', long = "limit")] limit: Option, - #[arg(short = 'c', long = "cursor")] cursor: Option, - #[arg(short = 'B', long = "before", default)] before: bool, - #[arg(short = 'f', long = "follow", default)] follow: bool, -) -> Result<(PackageId, Option, Option, bool, bool), Error> { - Ok((id, limit, cursor, before, follow)) +#[derive(Deserialize, Serialize, Parser)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "kebab-case")] +pub struct LogsParam { + id: PackageId, + #[arg(short = 'l', long = "limit")] + limit: Option, + #[arg(short = 'c', long = "cursor")] + cursor: Option, + #[arg(short = 'B', long = "before")] + #[serde(default)] + before: bool, + #[arg(short = 'f', long = "follow")] + #[serde(default)] + follow: bool, +} + +pub fn logs() -> ParentHandler { + ParentHandler::::new() + .root_handler( + from_fn_async(cli_logs) + .no_display() + .with_inherited(|params, _| params), + ) + .root_handler( + from_fn_async(logs_follow) + .with_inherited(|params, _| params) + .no_cli(), + ) + .subcommand( + "follow", + from_fn_async(logs_follow) + .with_inherited(|params, _| params) + .no_cli(), + ) } pub async fn cli_logs( ctx: CliContext, - (id, limit, cursor, before, follow): (PackageId, Option, Option, bool, bool), + _: Empty, + LogsParam { + id, + limit, + cursor, + before, + follow, + }: LogsParam, ) -> Result<(), RpcError> { if follow { if cursor.is_some() { @@ -262,14 +277,21 @@ pub async fn cli_logs( } pub async fn logs_nofollow( _ctx: (), - (id, limit, cursor, before, _): (PackageId, Option, Option, bool, bool), + _: Empty, + LogsParam { + id, + limit, + cursor, + before, + .. + }: LogsParam, ) -> Result { fetch_logs(LogSource::Container(id), limit, cursor, before).await } -#[command(rpc_only, rename = "follow", display(display_none))] pub async fn logs_follow( - #[context] ctx: RpcContext, - #[parent_data] (id, limit, _, _, _): (PackageId, Option, Option, bool, bool), + ctx: RpcContext, + _: Empty, + LogsParam { id, limit, .. }: LogsParam, ) -> Result { follow_logs(ctx, LogSource::Container(id), limit).await } @@ -282,19 +304,18 @@ pub async fn cli_logs_generic_nofollow( cursor: Option, before: bool, ) -> Result<(), RpcError> { - let res = rpc_toolkit::command_helpers::call_remote( - ctx.clone(), - method, - serde_json::json!({ - "id": id, - "limit": limit, - "cursor": cursor, - "before": before, - }), - PhantomData::, - ) - .await? - .result?; + let res = from_value::( + ctx.call_remote( + method, + imbl_value::json!({ + "id": id, + "limit": limit, + "cursor": cursor, + "before": before, + }), + ) + .await?, + )?; for entry in res.entries.iter() { println!("{}", entry); @@ -309,36 +330,18 @@ pub async fn cli_logs_generic_follow( id: Option, limit: Option, ) -> Result<(), RpcError> { - let res = rpc_toolkit::command_helpers::call_remote( - ctx.clone(), - method, - serde_json::json!({ - "id": id, - "limit": limit, - }), - PhantomData::, - ) - .await? - .result?; - - let mut base_url = ctx.base_url.clone(); - let ws_scheme = match base_url.scheme() { - "https" => "wss", - "http" => "ws", - _ => { - return Err(Error::new( - eyre!("Cannot parse scheme from base URL"), - crate::ErrorKind::ParseUrl, - ) - .into()) - } - }; - base_url - .set_scheme(ws_scheme) - .map_err(|_| Error::new(eyre!("Cannot set URL scheme"), crate::ErrorKind::ParseUrl))?; - let (mut stream, _) = - // base_url is "http://127.0.0.1/", with a trailing slash, so we don't put a leading slash in this path: - tokio_tungstenite::connect_async(format!("{}ws/rpc/{}", base_url, res.guid)).await?; + let res = from_value::( + ctx.call_remote( + method, + imbl_value::json!({ + "id": id, + "limit": limit, + }), + ) + .await?, + )?; + + let mut stream = ctx.ws_continuation(res.guid).await?; while let Some(log) = stream.try_next().await? { if let Message::Text(log) = log { println!("{}", serde_json::from_str::(&log)?); @@ -376,15 +379,9 @@ pub async fn journalctl( } LogSource::Container(id) => { #[cfg(not(feature = "docker"))] - cmd.arg(format!( - "SYSLOG_IDENTIFIER={}", - DockerProcedure::container_name(&id, None) - )); + cmd.arg(format!("SYSLOG_IDENTIFIER={}.embassy", id)); #[cfg(feature = "docker")] - cmd.arg(format!( - "CONTAINER_NAME={}", - DockerProcedure::container_name(&id, None) - )); + cmd.arg(format!("CONTAINER_NAME={}.embassy", id)); } }; @@ -498,7 +495,16 @@ pub async fn follow_logs( ctx.add_continuation( guid.clone(), RpcContinuation::ws( - Box::new(move |ws_fut| ws_handler(first_entry, stream, ws_fut).boxed()), + Box::new(move |socket| { + ws_handler(first_entry, stream, socket) + .map(|x| match x { + Ok(_) => (), + Err(e) => { + tracing::error!("Error in log stream: {}", e); + } + }) + .boxed() + }), Duration::from_secs(30), ), ) diff --git a/core/startos/src/lxc/config.template b/core/startos/src/lxc/config.template new file mode 100644 index 0000000000..a85b700e41 --- /dev/null +++ b/core/startos/src/lxc/config.template @@ -0,0 +1,19 @@ +# Distribution configuration +lxc.include = /usr/share/lxc/config/common.conf +lxc.include = /usr/share/lxc/config/userns.conf +lxc.arch = linux64 + +# Container specific configuration +lxc.apparmor.profile = generated +lxc.apparmor.allow_nesting = 1 +lxc.idmap = u 0 100000 65536 +lxc.idmap = g 0 100000 65536 +lxc.rootfs.path = dir:/var/lib/lxc/{guid}/rootfs +lxc.uts.name = {guid} + +# Network configuration +lxc.net.0.type = veth +lxc.net.0.link = lxcbr0 +lxc.net.0.flags = up + +lxc.rootfs.options = rshared diff --git a/core/startos/src/lxc/mod.rs b/core/startos/src/lxc/mod.rs new file mode 100644 index 0000000000..f778372384 --- /dev/null +++ b/core/startos/src/lxc/mod.rs @@ -0,0 +1,563 @@ +use std::collections::BTreeSet; +use std::net::Ipv4Addr; +use std::ops::Deref; +use std::path::Path; +use std::sync::{Arc, Weak}; +use std::time::Duration; + +use clap::Parser; +use futures::{AsyncWriteExt, FutureExt, StreamExt}; +use imbl_value::{InOMap, InternedString}; +use rpc_toolkit::yajrc::{RpcError, RpcResponse}; +use rpc_toolkit::{ + from_fn_async, AnyContext, CallRemoteHandler, GenericRpcMethod, Handler, HandlerArgs, + HandlerExt, ParentHandler, RpcRequest, +}; +use rustyline_async::{ReadlineEvent, SharedWriter}; +use serde::{Deserialize, Serialize}; +use tokio::fs::File; +use tokio::io::{AsyncBufReadExt, BufReader}; +use tokio::process::Command; +use tokio::sync::Mutex; +use tokio::time::Instant; + +use crate::context::{CliContext, RpcContext}; +use crate::core::rpc_continuations::{RequestGuid, RpcContinuation}; +use crate::disk::mount::filesystem::bind::Bind; +use crate::disk::mount::filesystem::block_dev::BlockDev; +use crate::disk::mount::filesystem::idmapped::IdMapped; +use crate::disk::mount::filesystem::overlayfs::OverlayGuard; +use crate::disk::mount::filesystem::ReadWrite; +use crate::disk::mount::guard::{GenericMountGuard, TmpMountGuard}; +use crate::disk::mount::util::unmount; +use crate::prelude::*; +use crate::util::rpc_client::UnixRpcClient; +use crate::util::{new_guid, Invoke}; + +const LXC_CONTAINER_DIR: &str = "/var/lib/lxc"; +const RPC_DIR: &str = "media/startos/rpc"; // must not be absolute path +pub const CONTAINER_RPC_SERVER_SOCKET: &str = "service.sock"; // must not be absolute path +pub const HOST_RPC_SERVER_SOCKET: &str = "host.sock"; // must not be absolute path +const CONTAINER_DHCP_TIMEOUT: Duration = Duration::from_secs(30); + +pub struct LxcManager { + containers: Mutex>>, +} +impl LxcManager { + pub fn new() -> Self { + Self { + containers: Default::default(), + } + } + + pub async fn create(self: &Arc, config: LxcConfig) -> Result { + let container = LxcContainer::new(self, config).await?; + let mut guard = self.containers.lock().await; + *guard = std::mem::take(&mut *guard) + .into_iter() + .filter(|g| g.strong_count() > 0) + .chain(std::iter::once(Arc::downgrade(&container.guid))) + .collect(); + Ok(container) + } + + pub async fn gc(&self) -> Result<(), Error> { + let expected = BTreeSet::from_iter( + self.containers + .lock() + .await + .iter() + .filter_map(|g| g.upgrade()) + .map(|g| (&*g).clone()), + ); + for container in String::from_utf8( + Command::new("lxc-ls") + .arg("-1") + .invoke(ErrorKind::Lxc) + .await?, + )? + .lines() + .map(|s| s.trim()) + { + if !expected.contains(container) { + let rootfs_path = Path::new(LXC_CONTAINER_DIR).join(container).join("rootfs"); + if tokio::fs::metadata(&rootfs_path).await.is_ok() { + unmount(Path::new(LXC_CONTAINER_DIR).join(container).join("rootfs")).await?; + if tokio_stream::wrappers::ReadDirStream::new( + tokio::fs::read_dir(&rootfs_path).await?, + ) + .count() + .await + > 0 + { + return Err(Error::new( + eyre!("rootfs is not empty, refusing to delete"), + ErrorKind::InvalidRequest, + )); + } + } + Command::new("lxc-destroy") + .arg("--force") + .arg("--name") + .arg(container) + .invoke(ErrorKind::Lxc) + .await?; + } + } + Ok(()) + } +} + +pub struct LxcContainer { + manager: Weak, + rootfs: OverlayGuard, + guid: Arc, + rpc_bind: TmpMountGuard, + config: LxcConfig, + exited: bool, +} +impl LxcContainer { + async fn new(manager: &Arc, config: LxcConfig) -> Result { + let guid = new_guid(); + let container_dir = Path::new(LXC_CONTAINER_DIR).join(&*guid); + tokio::fs::create_dir_all(&container_dir).await?; + tokio::fs::write( + container_dir.join("config"), + format!(include_str!("./config.template"), guid = &*guid), + ) + .await?; + // TODO: append config + let rootfs_dir = container_dir.join("rootfs"); + tokio::fs::create_dir_all(&rootfs_dir).await?; + Command::new("chown") + .arg("100000:100000") + .arg(&rootfs_dir) + .invoke(ErrorKind::Filesystem) + .await?; + let rootfs = OverlayGuard::mount( + &IdMapped::new( + BlockDev::new("/usr/lib/startos/container-runtime/rootfs.squashfs"), + 0, + 100000, + 65536, + ), + &rootfs_dir, + ) + .await?; + tokio::fs::write(rootfs_dir.join("etc/hostname"), format!("{guid}\n")).await?; + Command::new("sed") + .arg("-i") + .arg(format!("s/LXC_NAME/{guid}/g")) + .arg(rootfs_dir.join("etc/hosts")) + .invoke(ErrorKind::Filesystem) + .await?; + Command::new("mount") + .arg("--make-rshared") + .arg(rootfs.path()) + .invoke(ErrorKind::Filesystem) + .await?; + let rpc_dir = rootfs_dir.join(RPC_DIR); + tokio::fs::create_dir_all(&rpc_dir).await?; + let rpc_bind = TmpMountGuard::mount(&Bind::new(rpc_dir), ReadWrite).await?; + Command::new("chown") + .arg("-R") + .arg("100000:100000") + .arg(rpc_bind.path()) + .invoke(ErrorKind::Filesystem) + .await?; + Command::new("lxc-start") + .arg("-d") + .arg("--name") + .arg(&*guid) + .invoke(ErrorKind::Lxc) + .await?; + Ok(Self { + manager: Arc::downgrade(manager), + rootfs, + guid: Arc::new(guid), + rpc_bind, + config, + exited: false, + }) + } + + pub fn rootfs_dir(&self) -> &Path { + self.rootfs.path() + } + + pub async fn ip(&self) -> Result { + let start = Instant::now(); + loop { + let output = String::from_utf8( + Command::new("lxc-info") + .arg("--name") + .arg(&*self.guid) + .arg("-iH") + .invoke(ErrorKind::Docker) + .await?, + )?; + let out_str = output.trim(); + if !out_str.is_empty() { + return Ok(out_str.parse()?); + } + if start.elapsed() > CONTAINER_DHCP_TIMEOUT { + return Err(Error::new( + eyre!("Timed out waiting for container to acquire DHCP lease"), + ErrorKind::Timeout, + )); + } + tokio::time::sleep(Duration::from_millis(100)).await; + } + } + + pub fn rpc_dir(&self) -> &Path { + self.rpc_bind.path() + } + + #[instrument(skip_all)] + pub async fn exit(mut self) -> Result<(), Error> { + self.rpc_bind.take().unmount().await?; + self.rootfs.take().unmount(true).await?; + let rootfs_path = self.rootfs_dir(); + let err_path = rootfs_path.join("var/log/containerRuntime.err"); + if tokio::fs::metadata(&err_path).await.is_ok() { + let mut lines = BufReader::new(File::open(&err_path).await?).lines(); + while let Some(line) = lines.next_line().await? { + let container = &**self.guid; + tracing::error!(container, "{}", line); + } + } + if tokio::fs::metadata(&rootfs_path).await.is_ok() { + if tokio_stream::wrappers::ReadDirStream::new(tokio::fs::read_dir(&rootfs_path).await?) + .count() + .await + > 0 + { + return Err(Error::new( + eyre!("rootfs is not empty, refusing to delete"), + ErrorKind::InvalidRequest, + )); + } + } + Command::new("lxc-destroy") + .arg("--force") + .arg("--name") + .arg(&**self.guid) + .invoke(ErrorKind::Lxc) + .await?; + + self.exited = true; + + Ok(()) + } + + pub async fn connect_rpc(&self, timeout: Option) -> Result { + let started = Instant::now(); + let sock_path = self.rpc_dir().join(CONTAINER_RPC_SERVER_SOCKET); + while tokio::fs::metadata(&sock_path).await.is_err() { + if timeout.map_or(false, |t| started.elapsed() > t) { + return Err(Error::new( + eyre!("timed out waiting for socket"), + ErrorKind::Timeout, + )); + } + tokio::time::sleep(Duration::from_millis(100)).await; + } + Ok(UnixRpcClient::new(sock_path)) + } +} +impl Drop for LxcContainer { + fn drop(&mut self) { + if !self.exited { + tracing::warn!( + "Container {} was ungracefully dropped. Cleaning up dangling containers...", + &**self.guid + ); + let rootfs = self.rootfs.take(); + let guid = std::mem::take(&mut self.guid); + if let Some(manager) = self.manager.upgrade() { + tokio::spawn(async move { + if let Err(e) = async { + let err_path = rootfs.path().join("var/log/containerRuntime.err"); + if tokio::fs::metadata(&err_path).await.is_ok() { + let mut lines = BufReader::new(File::open(&err_path).await?).lines(); + while let Some(line) = lines.next_line().await? { + let container = &**guid; + tracing::error!(container, "{}", line); + } + } + Ok::<_, Error>(()) + } + .await + { + tracing::error!("Error reading logs from crashed container: {e}"); + tracing::debug!("{e:?}") + } + rootfs.unmount(true).await.unwrap(); + drop(guid); + if let Err(e) = manager.gc().await { + tracing::error!("Error cleaning up dangling LXC containers: {e}"); + tracing::debug!("{e:?}") + } else { + tracing::info!("Successfully cleaned up dangling LXC containers"); + } + }); + } + } + } +} + +#[derive(Default, Serialize)] +pub struct LxcConfig {} + +pub fn lxc() -> ParentHandler { + ParentHandler::new() + .subcommand( + "create", + from_fn_async(create).with_remote_cli::(), + ) + .subcommand( + "list", + from_fn_async(list) + .with_custom_display_fn::(|_, res| { + use prettytable::*; + let mut table = table!([bc => "GUID"]); + for guid in res { + table.add_row(row![&*guid]); + } + table.printstd(); + Ok(()) + }) + .with_remote_cli::(), + ) + .subcommand( + "remove", + from_fn_async(remove) + .no_display() + .with_remote_cli::(), + ) + .subcommand("connect", from_fn_async(connect_rpc).no_cli()) + .subcommand("connect", from_fn_async(connect_rpc_cli).no_display()) +} + +pub async fn create(ctx: RpcContext) -> Result { + let container = ctx.lxc_manager.create(LxcConfig::default()).await?; + let guid = container.guid.deref().clone(); + ctx.dev.lxc.lock().await.insert(guid.clone(), container); + Ok(guid) +} + +pub async fn list(ctx: RpcContext) -> Result, Error> { + Ok(ctx.dev.lxc.lock().await.keys().cloned().collect()) +} + +#[derive(Deserialize, Serialize, Parser)] +pub struct RemoveParams { + pub guid: InternedString, +} + +pub async fn remove(ctx: RpcContext, RemoveParams { guid }: RemoveParams) -> Result<(), Error> { + if let Some(container) = ctx.dev.lxc.lock().await.remove(&guid) { + container.exit().await?; + } + Ok(()) +} + +#[derive(Deserialize, Serialize, Parser)] +pub struct ConnectParams { + pub guid: InternedString, +} + +pub async fn connect_rpc( + ctx: RpcContext, + ConnectParams { guid }: ConnectParams, +) -> Result { + connect( + &ctx, + ctx.dev.lxc.lock().await.get(&guid).ok_or_else(|| { + Error::new(eyre!("No container with guid: {guid}"), ErrorKind::NotFound) + })?, + ) + .await +} + +pub async fn connect(ctx: &RpcContext, container: &LxcContainer) -> Result { + use axum::extract::ws::Message; + + let rpc = container.connect_rpc(Some(Duration::from_secs(30))).await?; + let guid = RequestGuid::new(); + ctx.add_continuation( + guid.clone(), + RpcContinuation::ws( + Box::new(|mut ws| { + async move { + if let Err(e) = async { + loop { + match ws.next().await { + None => break, + Some(Ok(Message::Text(txt))) => { + let mut id = None; + let result = async { + let req: RpcRequest = + serde_json::from_str(&txt).map_err(|e| RpcError { + data: Some(serde_json::Value::String( + e.to_string(), + )), + ..rpc_toolkit::yajrc::PARSE_ERROR + })?; + id = req.id; + rpc.request(req.method, req.params).await + } + .await; + ws.send(Message::Text( + serde_json::to_string(&RpcResponse:: { + id, + result, + }) + .with_kind(ErrorKind::Serialization)?, + )) + .await + .with_kind(ErrorKind::Network)?; + } + Some(Ok(_)) => (), + Some(Err(e)) => { + return Err(Error::new(e, ErrorKind::Network)); + } + } + } + Ok::<_, Error>(()) + } + .await + { + tracing::error!("{e}"); + tracing::debug!("{e:?}"); + } + } + .boxed() + }), + Duration::from_secs(30), + ), + ) + .await; + Ok(guid) +} + +pub async fn connect_cli(ctx: &CliContext, guid: RequestGuid) -> Result<(), Error> { + use futures::SinkExt; + use tokio_tungstenite::tungstenite::Message; + + let mut ws = ctx.ws_continuation(guid).await?; + let (mut input, mut output) = + rustyline_async::Readline::new("> ".into()).with_kind(ErrorKind::Filesystem)?; + + async fn handle_message( + msg: Option>, + output: &mut SharedWriter, + ) -> Result { + match msg { + None => return Ok(true), + Some(Ok(Message::Text(txt))) => match serde_json::from_str::(&txt) { + Ok(RpcResponse { result: Ok(a), .. }) => { + output + .write_all( + (serde_json::to_string(&a).with_kind(ErrorKind::Serialization)? + "\n") + .as_bytes(), + ) + .await?; + } + Ok(RpcResponse { result: Err(e), .. }) => { + let e: Error = e.into(); + tracing::error!("{e}"); + tracing::debug!("{e:?}"); + } + Err(e) => { + tracing::error!("Error Parsing RPC response: {e}"); + tracing::debug!("{e:?}"); + } + }, + Some(Ok(_)) => (), + Some(Err(e)) => { + return Err(Error::new(e, ErrorKind::Network)); + } + }; + Ok(false) + } + + loop { + tokio::select! { + line = input.readline() => { + let line = line.with_kind(ErrorKind::Filesystem)?; + if let ReadlineEvent::Line(line) = line { + input.add_history_entry(line.clone()); + if serde_json::from_str::(&line).is_ok() { + ws.send(Message::Text(line)) + .await + .with_kind(ErrorKind::Network)?; + } else { + match shell_words::split(&line) { + Ok(command) => { + if let Some((method, rest)) = command.split_first() { + let mut params = InOMap::new(); + for arg in rest { + if let Some((name, value)) = arg.split_once("=") { + params.insert(InternedString::intern(name), if value.is_empty() { + Value::Null + } else if let Ok(v) = serde_json::from_str(value) { + v + } else { + Value::String(Arc::new(value.into())) + }); + } else { + tracing::error!("argument without a value: {arg}"); + tracing::debug!("help: set the value of {arg} with `{arg}=...`"); + continue; + } + } + ws.send(Message::Text(match serde_json::to_string(&RpcRequest { + id: None, + method: GenericRpcMethod::new(method.into()), + params: Value::Object(params), + }) { + Ok(a) => a, + Err(e) => { + tracing::error!("Error Serializing Request: {e}"); + tracing::debug!("{e:?}"); + continue; + } + })).await.with_kind(ErrorKind::Network)?; + if handle_message(ws.next().await, &mut output).await? { + break + } + } + } + Err(e) => { + tracing::error!("{e}"); + tracing::debug!("{e:?}"); + } + } + } + } else { + ws.send(Message::Close(None)).await.with_kind(ErrorKind::Network)?; + } + } + msg = ws.next() => { + if handle_message(msg, &mut output).await? { + break; + } + } + } + } + + Ok(()) +} + +pub async fn connect_rpc_cli( + handle_args: HandlerArgs, +) -> Result<(), Error> { + let ctx = handle_args.context.clone(); + let guid = CallRemoteHandler::::new(from_fn_async(connect_rpc)) + .handle_async(handle_args) + .await?; + + connect_cli(&ctx, guid).await +} diff --git a/core/startos/src/manager/health.rs b/core/startos/src/manager/health.rs deleted file mode 100644 index 30f18051ae..0000000000 --- a/core/startos/src/manager/health.rs +++ /dev/null @@ -1,56 +0,0 @@ -use models::OptionExt; -use tracing::instrument; - -use crate::context::RpcContext; -use crate::prelude::*; -use crate::s9pk::manifest::PackageId; -use crate::status::MainStatus; -use crate::Error; - -/// So, this is used for a service to run a health check cycle, go out and run the health checks, and store those in the db -#[instrument(skip_all)] -pub async fn check(ctx: &RpcContext, id: &PackageId) -> Result<(), Error> { - let (manifest, started) = { - let peeked = ctx.db.peek().await; - let pde = peeked - .as_package_data() - .as_idx(id) - .or_not_found(id)? - .expect_as_installed()?; - - let manifest = pde.as_installed().as_manifest().de()?; - - let started = pde.as_installed().as_status().as_main().de()?.started(); - - (manifest, started) - }; - - let health_results = if let Some(started) = started { - tracing::debug!("Checking health of {}", id); - manifest - .health_checks - .check_all(ctx, started, id, &manifest.version, &manifest.volumes) - .await? - } else { - return Ok(()); - }; - - ctx.db - .mutate(|v| { - let pde = v - .as_package_data_mut() - .as_idx_mut(id) - .or_not_found(id)? - .expect_as_installed_mut()?; - let status = pde.as_installed_mut().as_status_mut().as_main_mut(); - - if let MainStatus::Running { health: _, started } = status.de()? { - status.ser(&MainStatus::Running { - health: health_results.clone(), - started, - })?; - } - Ok(()) - }) - .await -} diff --git a/core/startos/src/manager/manager_container.rs b/core/startos/src/manager/manager_container.rs deleted file mode 100644 index 32e11c2e52..0000000000 --- a/core/startos/src/manager/manager_container.rs +++ /dev/null @@ -1,300 +0,0 @@ -use std::sync::Arc; -use std::time::Duration; - -use models::OptionExt; -use tokio::sync::watch; -use tokio::sync::watch::Sender; -use tracing::instrument; - -use super::start_stop::StartStop; -use super::{manager_seed, run_main, ManagerPersistentContainer, RunMainResult}; -use crate::prelude::*; -use crate::procedure::NoOutput; -use crate::s9pk::manifest::Manifest; -use crate::status::MainStatus; -use crate::util::NonDetachingJoinHandle; -use crate::Error; - -pub type ManageContainerOverride = Arc>>; - -pub type Override = MainStatus; - -pub struct OverrideGuard { - override_main_status: Option, -} -impl OverrideGuard { - pub fn drop(self) {} -} -impl Drop for OverrideGuard { - fn drop(&mut self) { - if let Some(override_main_status) = self.override_main_status.take() { - override_main_status.send_modify(|x| { - *x = None; - }); - } - } -} - -/// This is the thing describing the state machine actor for a service -/// state and current running/ desired states. -pub struct ManageContainer { - pub(super) current_state: Arc>, - pub(super) desired_state: Arc>, - _service: NonDetachingJoinHandle<()>, - _save_state: NonDetachingJoinHandle<()>, - override_main_status: ManageContainerOverride, -} - -impl ManageContainer { - pub async fn new( - seed: Arc, - persistent_container: ManagerPersistentContainer, - ) -> Result { - let current_state = Arc::new(watch::channel(StartStop::Stop).0); - let desired_state = Arc::new( - watch::channel::( - get_status(seed.ctx.db.peek().await, &seed.manifest).into(), - ) - .0, - ); - let override_main_status: ManageContainerOverride = Arc::new(watch::channel(None).0); - let service = tokio::spawn(create_service_manager( - desired_state.clone(), - seed.clone(), - current_state.clone(), - persistent_container, - )) - .into(); - let save_state = tokio::spawn(save_state( - desired_state.clone(), - current_state.clone(), - override_main_status.clone(), - seed.clone(), - )) - .into(); - Ok(ManageContainer { - current_state, - desired_state, - _service: service, - override_main_status, - _save_state: save_state, - }) - } - - /// Set override is used during something like a restart of a service. We want to show certain statuses be different - /// from the actual status of the service. - pub fn set_override(&self, override_status: Override) -> Result { - let status = Some(override_status); - if self.override_main_status.borrow().is_some() { - return Err(Error::new( - eyre!("Already have an override"), - ErrorKind::InvalidRequest, - )); - } - self.override_main_status - .send_modify(|x| *x = status.clone()); - Ok(OverrideGuard { - override_main_status: Some(self.override_main_status.clone()), - }) - } - - /// Set the override, but don't have a guard to revert it. Used only on the mananger to do a shutdown. - pub(super) async fn lock_state_forever( - &self, - seed: &manager_seed::ManagerSeed, - ) -> Result<(), Error> { - let current_state = get_status(seed.ctx.db.peek().await, &seed.manifest); - self.override_main_status - .send_modify(|x| *x = Some(current_state)); - Ok(()) - } - - /// We want to set the state of the service, like to start or stop - pub fn to_desired(&self, new_state: StartStop) { - self.desired_state.send_modify(|x| *x = new_state); - } - - /// This is a tool to say wait for the service to be in a certain state. - pub async fn wait_for_desired(&self, new_state: StartStop) { - let mut current_state = self.current_state(); - self.to_desired(new_state); - while *current_state.borrow() != new_state { - current_state.changed().await.unwrap_or_default(); - } - } - - /// Getter - pub fn current_state(&self) -> watch::Receiver { - self.current_state.subscribe() - } - - /// Getter - pub fn desired_state(&self) -> watch::Receiver { - self.desired_state.subscribe() - } -} - -async fn create_service_manager( - desired_state: Arc>, - seed: Arc, - current_state: Arc>, - persistent_container: Arc>, -) { - let mut desired_state_receiver = desired_state.subscribe(); - let mut running_service: Option> = None; - let seed = seed.clone(); - loop { - let current: StartStop = *current_state.borrow(); - let desired: StartStop = *desired_state_receiver.borrow(); - match (current, desired) { - (StartStop::Start, StartStop::Start) => (), - (StartStop::Start, StartStop::Stop) => { - if persistent_container.is_none() { - if let Err(err) = seed.stop_container().await { - tracing::error!("Could not stop container"); - tracing::debug!("{:?}", err) - } - running_service = None; - } else if let Some(current_service) = running_service.take() { - tokio::select! { - _ = current_service => (), - _ = tokio::time::sleep(Duration::from_secs_f64(seed.manifest - .containers - .as_ref() - .and_then(|c| c.main.sigterm_timeout).map(|x| x.as_secs_f64()).unwrap_or_default())) => { - tracing::error!("Could not stop service"); - } - } - } - current_state.send_modify(|x| *x = StartStop::Stop); - } - (StartStop::Stop, StartStop::Start) => starting_service( - current_state.clone(), - desired_state.clone(), - seed.clone(), - persistent_container.clone(), - &mut running_service, - ), - (StartStop::Stop, StartStop::Stop) => (), - } - - if desired_state_receiver.changed().await.is_err() { - tracing::error!("Desired state error"); - break; - } - } -} - -async fn save_state( - desired_state: Arc>, - current_state: Arc>, - override_main_status: ManageContainerOverride, - seed: Arc, -) { - let mut desired_state_receiver = desired_state.subscribe(); - let mut current_state_receiver = current_state.subscribe(); - let mut override_main_status_receiver = override_main_status.subscribe(); - loop { - let current: StartStop = *current_state_receiver.borrow(); - let desired: StartStop = *desired_state_receiver.borrow(); - let override_status = override_main_status_receiver.borrow().clone(); - let status = match (override_status.clone(), current, desired) { - (Some(status), _, _) => status, - (_, StartStop::Start, StartStop::Start) => MainStatus::Running { - started: chrono::Utc::now(), - health: Default::default(), - }, - (_, StartStop::Start, StartStop::Stop) => MainStatus::Stopping, - (_, StartStop::Stop, StartStop::Start) => MainStatus::Starting, - (_, StartStop::Stop, StartStop::Stop) => MainStatus::Stopped, - }; - - let manifest = &seed.manifest; - if let Err(err) = seed - .ctx - .db - .mutate(|db| set_status(db, manifest, &status)) - .await - { - tracing::error!("Did not set status for {}", seed.container_name); - tracing::debug!("{:?}", err); - } - tokio::select! { - _ = desired_state_receiver.changed() =>{}, - _ = current_state_receiver.changed() => {}, - _ = override_main_status_receiver.changed() => {} - } - } -} - -fn starting_service( - current_state: Arc>, - desired_state: Arc>, - seed: Arc, - persistent_container: ManagerPersistentContainer, - running_service: &mut Option>, -) { - let set_running = { - let current_state = current_state.clone(); - Arc::new(move || { - current_state.send_modify(|x| *x = StartStop::Start); - }) - }; - let set_stopped = { move || current_state.send_modify(|x| *x = StartStop::Stop) }; - let running_main_loop = async move { - while desired_state.borrow().is_start() { - let result = run_main( - seed.clone(), - persistent_container.clone(), - set_running.clone(), - ) - .await; - set_stopped(); - run_main_log_result(result, seed.clone()).await; - } - }; - *running_service = Some(tokio::spawn(running_main_loop).into()); -} - -async fn run_main_log_result(result: RunMainResult, seed: Arc) { - match result { - Ok(Ok(NoOutput)) => (), // restart - Ok(Err(e)) => { - tracing::error!( - "The service {} has crashed with the following exit code: {}", - seed.manifest.id.clone(), - e.0 - ); - - tokio::time::sleep(Duration::from_secs(15)).await; - } - Err(e) => { - tracing::error!("failed to start service: {}", e); - tracing::debug!("{:?}", e); - } - } -} - -/// Used only in the mod where we are doing a backup -#[instrument(skip(db, manifest))] -pub(super) fn get_status(db: Peeked, manifest: &Manifest) -> MainStatus { - db.as_package_data() - .as_idx(&manifest.id) - .and_then(|x| x.as_installed()) - .filter(|x| x.as_manifest().as_version().de().ok() == Some(manifest.version.clone())) - .and_then(|x| x.as_status().as_main().de().ok()) - .unwrap_or(MainStatus::Stopped) -} - -#[instrument(skip(db, manifest))] -fn set_status(db: &mut Peeked, manifest: &Manifest, main_status: &MainStatus) -> Result<(), Error> { - let Some(installed) = db - .as_package_data_mut() - .as_idx_mut(&manifest.id) - .or_not_found(&manifest.id)? - .as_installed_mut() - else { - return Ok(()); - }; - installed.as_status_mut().as_main_mut().ser(main_status) -} diff --git a/core/startos/src/manager/manager_map.rs b/core/startos/src/manager/manager_map.rs deleted file mode 100644 index 07f128ccd4..0000000000 --- a/core/startos/src/manager/manager_map.rs +++ /dev/null @@ -1,96 +0,0 @@ -use std::collections::BTreeMap; -use std::sync::Arc; - -use color_eyre::eyre::eyre; -use tokio::sync::RwLock; -use tracing::instrument; - -use super::Manager; -use crate::context::RpcContext; -use crate::prelude::*; -use crate::s9pk::manifest::{Manifest, PackageId}; -use crate::util::Version; -use crate::Error; - -/// This is the structure to contain all the service managers -#[derive(Default)] -pub struct ManagerMap(RwLock>>); -impl ManagerMap { - #[instrument(skip_all)] - pub async fn init(&self, ctx: RpcContext, peeked: Peeked) -> Result<(), Error> { - let mut res = BTreeMap::new(); - for package in peeked.as_package_data().keys()? { - let man: Manifest = if let Some(manifest) = peeked - .as_package_data() - .as_idx(&package) - .and_then(|x| x.as_installed()) - .map(|x| x.as_manifest().de()) - { - manifest? - } else { - continue; - }; - - res.insert( - (package, man.version.clone()), - Arc::new(Manager::new(ctx.clone(), man).await?), - ); - } - *self.0.write().await = res; - Ok(()) - } - - /// Used during the install process - #[instrument(skip_all)] - pub async fn add(&self, ctx: RpcContext, manifest: Manifest) -> Result, Error> { - let mut lock = self.0.write().await; - let id = (manifest.id.clone(), manifest.version.clone()); - if let Some(man) = lock.remove(&id) { - man.exit().await; - } - let manager = Arc::new(Manager::new(ctx.clone(), manifest).await?); - lock.insert(id, manager.clone()); - Ok(manager) - } - - /// This is ran during the cleanup, so when we are uninstalling the service - #[instrument(skip_all)] - pub async fn remove(&self, id: &(PackageId, Version)) { - if let Some(man) = self.0.write().await.remove(id) { - man.exit().await; - } - } - - /// Used during a shutdown - #[instrument(skip_all)] - pub async fn empty(&self) -> Result<(), Error> { - let res = - futures::future::join_all(std::mem::take(&mut *self.0.write().await).into_iter().map( - |((id, version), man)| async move { - tracing::debug!("Manager for {}@{} shutting down", id, version); - man.shutdown().await?; - tracing::debug!("Manager for {}@{} is shutdown", id, version); - if let Err(e) = Arc::try_unwrap(man) { - tracing::trace!( - "Manager for {}@{} still has {} other open references", - id, - version, - Arc::strong_count(&e) - 1 - ); - } - Ok::<_, Error>(()) - }, - )) - .await; - res.into_iter().fold(Ok(()), |res, x| match (res, x) { - (Ok(()), x) => x, - (Err(e), Ok(())) => Err(e), - (Err(e1), Err(e2)) => Err(Error::new(eyre!("{}, {}", e1.source, e2.source), e1.kind)), - }) - } - - #[instrument(skip_all)] - pub async fn get(&self, id: &(PackageId, Version)) -> Option> { - self.0.read().await.get(id).cloned() - } -} diff --git a/core/startos/src/manager/manager_seed.rs b/core/startos/src/manager/manager_seed.rs deleted file mode 100644 index f90e7739fd..0000000000 --- a/core/startos/src/manager/manager_seed.rs +++ /dev/null @@ -1,37 +0,0 @@ -use models::ErrorKind; - -use crate::context::RpcContext; -use crate::procedure::docker::DockerProcedure; -use crate::procedure::PackageProcedure; -use crate::s9pk::manifest::Manifest; -use crate::util::docker::stop_container; -use crate::Error; - -/// This is helper structure for a service, the seed of the data that is needed for the manager_container -pub struct ManagerSeed { - pub ctx: RpcContext, - pub manifest: Manifest, - pub container_name: String, -} - -impl ManagerSeed { - pub async fn stop_container(&self) -> Result<(), Error> { - match stop_container( - &self.container_name, - match &self.manifest.main { - PackageProcedure::Docker(DockerProcedure { - sigterm_timeout: Some(sigterm_timeout), - .. - }) => Some(**sigterm_timeout), - _ => None, - }, - None, - ) - .await - { - Err(e) if e.kind == ErrorKind::NotFound => (), // Already stopped - a => a?, - } - Ok(()) - } -} diff --git a/core/startos/src/manager/mod.rs b/core/startos/src/manager/mod.rs deleted file mode 100644 index 1151da2f4a..0000000000 --- a/core/startos/src/manager/mod.rs +++ /dev/null @@ -1,888 +0,0 @@ -use std::collections::{BTreeMap, BTreeSet}; -use std::net::Ipv4Addr; -use std::sync::Arc; -use std::task::Poll; -use std::time::Duration; - -use color_eyre::eyre::eyre; -use container_init::ProcessGroupId; -use futures::future::BoxFuture; -use futures::{Future, FutureExt, TryFutureExt}; -use helpers::UnixRpcClient; -use models::{ErrorKind, OptionExt, PackageId}; -use nix::sys::signal::Signal; -use persistent_container::PersistentContainer; -use rand::SeedableRng; -use sqlx::Connection; -use start_stop::StartStop; -use tokio::sync::watch::{self, Sender}; -use tokio::sync::{oneshot, Mutex}; -use tracing::instrument; -use transition_state::TransitionState; - -use crate::backup::target::PackageBackupInfo; -use crate::backup::PackageBackupReport; -use crate::config::action::ConfigRes; -use crate::config::spec::ValueSpecPointer; -use crate::config::ConfigureContext; -use crate::context::RpcContext; -use crate::db::model::{CurrentDependencies, CurrentDependencyInfo}; -use crate::dependencies::{ - add_dependent_to_current_dependents_lists, compute_dependency_config_errs, -}; -use crate::disk::mount::backup::BackupMountGuard; -use crate::disk::mount::guard::TmpMountGuard; -use crate::install::cleanup::remove_from_current_dependents_lists; -use crate::net::net_controller::NetService; -use crate::net::vhost::AlpnInfo; -use crate::prelude::*; -use crate::procedure::docker::{DockerContainer, DockerProcedure, LongRunning}; -use crate::procedure::{NoOutput, ProcedureName}; -use crate::s9pk::manifest::Manifest; -use crate::status::MainStatus; -use crate::util::docker::{get_container_ip, kill_container}; -use crate::util::NonDetachingJoinHandle; -use crate::volume::Volume; -use crate::Error; - -pub mod health; -mod manager_container; -mod manager_map; -pub mod manager_seed; -mod persistent_container; -mod start_stop; -mod transition_state; - -pub use manager_map::ManagerMap; - -use self::manager_container::{get_status, ManageContainer}; -use self::manager_seed::ManagerSeed; - -pub const HEALTH_CHECK_COOLDOWN_SECONDS: u64 = 15; -pub const HEALTH_CHECK_GRACE_PERIOD_SECONDS: u64 = 5; - -type ManagerPersistentContainer = Arc>; -type BackupGuard = Arc>>; -pub enum BackupReturn { - Error(Error), - AlreadyRunning(PackageBackupReport), - Ran { - report: PackageBackupReport, - res: Result, - }, -} - -pub struct Gid { - next_gid: (watch::Sender, watch::Receiver), - main_gid: ( - watch::Sender, - watch::Receiver, - ), -} - -impl Default for Gid { - fn default() -> Self { - Self { - next_gid: watch::channel(1), - main_gid: watch::channel(ProcessGroupId(1)), - } - } -} -impl Gid { - pub fn new_gid(&self) -> ProcessGroupId { - let mut previous = 0; - self.next_gid.0.send_modify(|x| { - previous = *x; - *x = previous + 1; - }); - ProcessGroupId(previous) - } - - pub fn new_main_gid(&self) -> ProcessGroupId { - let gid = self.new_gid(); - self.main_gid.0.send(gid).unwrap_or_default(); - gid - } -} - -/// This is the controller of the services. Here is where we can control a service with a start, stop, restart, etc. -#[derive(Clone)] -pub struct Manager { - seed: Arc, - - manage_container: Arc, - transition: Arc>, - persistent_container: ManagerPersistentContainer, - - pub gid: Arc, -} -impl Manager { - pub async fn new(ctx: RpcContext, manifest: Manifest) -> Result { - let seed = Arc::new(ManagerSeed { - ctx, - container_name: DockerProcedure::container_name(&manifest.id, None), - manifest, - }); - - let persistent_container = Arc::new(PersistentContainer::init(&seed).await?); - let manage_container = Arc::new( - manager_container::ManageContainer::new(seed.clone(), persistent_container.clone()) - .await?, - ); - let (transition, _) = watch::channel(Default::default()); - let transition = Arc::new(transition); - Ok(Self { - seed, - manage_container, - transition, - persistent_container, - gid: Default::default(), - }) - } - - /// awaiting this does not wait for the start to complete - pub async fn start(&self) { - if self._is_transition_restart() { - return; - } - self._transition_abort().await; - self.manage_container.to_desired(StartStop::Start); - } - - /// awaiting this does not wait for the stop to complete - pub async fn stop(&self) { - self._transition_abort().await; - self.manage_container.to_desired(StartStop::Stop); - } - /// awaiting this does not wait for the restart to complete - pub async fn restart(&self) { - if self._is_transition_restart() - && *self.manage_container.desired_state().borrow() == StartStop::Stop - { - return; - } - if self.manage_container.desired_state().borrow().is_start() { - self._transition_replace(self._transition_restart()).await; - } - } - /// awaiting this does not wait for the restart to complete - pub async fn configure( - &self, - configure_context: ConfigureContext, - ) -> Result, Error> { - if self._is_transition_restart() { - self._transition_abort().await; - } else if self._is_transition_backup() { - return Err(Error::new( - eyre!("Can't configure because service is backing up"), - ErrorKind::InvalidRequest, - )); - } - let context = self.seed.ctx.clone(); - let id = self.seed.manifest.id.clone(); - - let breakages = configure(context, id, configure_context).await?; - - self.restart().await; - - Ok(breakages) - } - - /// awaiting this does not wait for the backup to complete - pub async fn backup(&self, backup_guard: BackupGuard) -> BackupReturn { - if self._is_transition_backup() { - return BackupReturn::AlreadyRunning(PackageBackupReport { - error: Some("Can't do backup because service is already backing up".to_owned()), - }); - } - let (transition_state, done) = self._transition_backup(backup_guard); - self._transition_replace(transition_state).await; - done.await - } - pub async fn exit(&self) { - self._transition_abort().await; - self.manage_container - .wait_for_desired(StartStop::Stop) - .await; - } - - /// A special exit that is overridden the start state, should only be called in the shutdown, where we remove other containers - async fn shutdown(&self) -> Result<(), Error> { - self.manage_container.lock_state_forever(&self.seed).await?; - - self.exit().await; - Ok(()) - } - - /// Used when we want to shutdown the service - pub async fn signal(&self, signal: Signal) -> Result<(), Error> { - let gid = self.gid.clone(); - send_signal(self, gid, signal).await - } - - /// Used as a getter, but also used in procedure - pub fn rpc_client(&self) -> Option> { - (*self.persistent_container) - .as_ref() - .map(|x| x.rpc_client()) - } - - async fn _transition_abort(&self) { - self.transition - .send_replace(Default::default()) - .abort() - .await; - } - async fn _transition_replace(&self, transition_state: TransitionState) { - self.transition.send_replace(transition_state).abort().await; - } - - pub(super) fn perform_restart(&self) -> impl Future> + 'static { - let manage_container = self.manage_container.clone(); - async move { - let restart_override = manage_container.set_override(MainStatus::Restarting)?; - manage_container.wait_for_desired(StartStop::Stop).await; - manage_container.wait_for_desired(StartStop::Start).await; - restart_override.drop(); - Ok(()) - } - } - fn _transition_restart(&self) -> TransitionState { - let transition = self.transition.clone(); - let restart = self.perform_restart(); - TransitionState::Restarting( - tokio::spawn(async move { - if let Err(err) = restart.await { - tracing::error!("Error restarting service: {}", err); - } - transition.send_replace(Default::default()); - }) - .into(), - ) - } - fn perform_backup( - &self, - backup_guard: BackupGuard, - ) -> impl Future, Error>> { - let manage_container = self.manage_container.clone(); - let seed = self.seed.clone(); - async move { - let peek = seed.ctx.db.peek().await; - let state_reverter = DesiredStateReverter::new(manage_container.clone()); - let override_guard = - manage_container.set_override(get_status(peek, &seed.manifest).backing_up())?; - manage_container.wait_for_desired(StartStop::Stop).await; - let backup_guard = backup_guard.lock().await; - let guard = backup_guard.mount_package_backup(&seed.manifest.id).await?; - - let return_value = seed.manifest.backup.create(seed.clone()).await; - guard.unmount().await?; - drop(backup_guard); - - let manifest_id = seed.manifest.id.clone(); - seed.ctx - .db - .mutate(|db| { - if let Some(progress) = db - .as_server_info_mut() - .as_status_info_mut() - .as_backup_progress_mut() - .transpose_mut() - .and_then(|p| p.as_idx_mut(&manifest_id)) - { - progress.as_complete_mut().ser(&true)?; - } - Ok(()) - }) - .await?; - - state_reverter.revert().await; - - override_guard.drop(); - Ok::<_, Error>(return_value) - } - } - fn _transition_backup( - &self, - backup_guard: BackupGuard, - ) -> (TransitionState, BoxFuture) { - let (send, done) = oneshot::channel(); - - let transition_state = self.transition.clone(); - ( - TransitionState::BackingUp( - tokio::spawn( - self.perform_backup(backup_guard) - .then(finish_up_backup_task(transition_state, send)), - ) - .into(), - ), - done.map_err(|err| Error::new(eyre!("Oneshot error: {err:?}"), ErrorKind::Unknown)) - .map(flatten_backup_error) - .boxed(), - ) - } - fn _is_transition_restart(&self) -> bool { - let transition = self.transition.borrow(); - matches!(*transition, TransitionState::Restarting(_)) - } - fn _is_transition_backup(&self) -> bool { - let transition = self.transition.borrow(); - matches!(*transition, TransitionState::BackingUp(_)) - } -} - -#[instrument(skip_all)] -async fn configure( - ctx: RpcContext, - id: PackageId, - mut configure_context: ConfigureContext, -) -> Result, Error> { - let db = ctx.db.peek().await; - let id = &id; - let ctx = &ctx; - let overrides = &mut configure_context.overrides; - // fetch data from db - let manifest = db - .as_package_data() - .as_idx(id) - .or_not_found(id)? - .as_manifest() - .de()?; - - // get current config and current spec - let ConfigRes { - config: old_config, - spec, - } = manifest - .config - .as_ref() - .or_not_found("Manifest config")? - .get(ctx, id, &manifest.version, &manifest.volumes) - .await?; - - // determine new config to use - let mut config = if let Some(config) = configure_context.config.or_else(|| old_config.clone()) { - config - } else { - spec.gen( - &mut rand::rngs::StdRng::from_entropy(), - &configure_context.timeout, - )? - }; - - spec.validate(&manifest)?; - spec.matches(&config)?; // check that new config matches spec - - // TODO Commit or not? - spec.update(ctx, &manifest, overrides, &mut config).await?; // dereference pointers in the new config - - let manifest = db - .as_package_data() - .as_idx(id) - .or_not_found(id)? - .as_installed() - .or_not_found(id)? - .as_manifest() - .de()?; - - let dependencies = &manifest.dependencies; - let mut current_dependencies: CurrentDependencies = CurrentDependencies( - dependencies - .0 - .iter() - .filter_map(|(id, info)| { - if info.requirement.required() { - Some((id.clone(), CurrentDependencyInfo::default())) - } else { - None - } - }) - .collect(), - ); - for ptr in spec.pointers(&config)? { - match ptr { - ValueSpecPointer::Package(pkg_ptr) => { - if let Some(info) = current_dependencies.0.get_mut(pkg_ptr.package_id()) { - info.pointers.insert(pkg_ptr); - } else { - let id = pkg_ptr.package_id().to_owned(); - let mut pointers = BTreeSet::new(); - pointers.insert(pkg_ptr); - current_dependencies.0.insert( - id, - CurrentDependencyInfo { - pointers, - health_checks: BTreeSet::new(), - }, - ); - } - } - ValueSpecPointer::System(_) => (), - } - } - - let action = manifest.config.as_ref().or_not_found(id)?; - let version = &manifest.version; - let volumes = &manifest.volumes; - if !configure_context.dry_run { - // run config action - let res = action - .set(ctx, id, version, &dependencies, volumes, &config) - .await?; - - // track dependencies with no pointers - for (package_id, health_checks) in res.depends_on.into_iter() { - if let Some(current_dependency) = current_dependencies.0.get_mut(&package_id) { - current_dependency.health_checks.extend(health_checks); - } else { - current_dependencies.0.insert( - package_id, - CurrentDependencyInfo { - pointers: BTreeSet::new(), - health_checks, - }, - ); - } - } - - // track dependency health checks - current_dependencies = current_dependencies.map(|x| { - x.into_iter() - .filter(|(dep_id, _)| { - if dep_id != id && !manifest.dependencies.0.contains_key(dep_id) { - tracing::warn!("Illegal dependency specified: {}", dep_id); - false - } else { - true - } - }) - .collect() - }); - } - - let dependency_config_errs = - compute_dependency_config_errs(&ctx, &db, &manifest, ¤t_dependencies, overrides) - .await?; - - // cache current config for dependents - configure_context - .overrides - .insert(id.clone(), config.clone()); - - // handle dependents - - let dependents = db - .as_package_data() - .as_idx(id) - .or_not_found(id)? - .as_installed() - .or_not_found(id)? - .as_current_dependents() - .de()?; - for (dependent, _dep_info) in dependents.0.iter().filter(|(dep_id, _)| dep_id != &id) { - // check if config passes dependent check - if let Some(cfg) = db - .as_package_data() - .as_idx(dependent) - .or_not_found(dependent)? - .as_installed() - .or_not_found(dependent)? - .as_manifest() - .as_dependencies() - .as_idx(id) - .or_not_found(id)? - .as_config() - .de()? - { - let manifest = db - .as_package_data() - .as_idx(dependent) - .or_not_found(dependent)? - .as_installed() - .or_not_found(dependent)? - .as_manifest() - .de()?; - if let Err(error) = cfg - .check( - ctx, - dependent, - &manifest.version, - &manifest.volumes, - id, - &config, - ) - .await? - { - configure_context.breakages.insert(dependent.clone(), error); - } - } - } - - if !configure_context.dry_run { - return ctx - .db - .mutate(move |db| { - remove_from_current_dependents_lists(db, id, ¤t_dependencies)?; - add_dependent_to_current_dependents_lists(db, id, ¤t_dependencies)?; - current_dependencies.0.remove(id); - for (dep, errs) in db - .as_package_data_mut() - .as_entries_mut()? - .into_iter() - .filter_map(|(id, pde)| { - pde.as_installed_mut() - .map(|i| (id, i.as_status_mut().as_dependency_config_errors_mut())) - }) - { - errs.remove(id)?; - if let Some(err) = configure_context.breakages.get(&dep) { - errs.insert(id, err)?; - } - } - let installed = db - .as_package_data_mut() - .as_idx_mut(id) - .or_not_found(id)? - .as_installed_mut() - .or_not_found(id)?; - installed - .as_current_dependencies_mut() - .ser(¤t_dependencies)?; - let status = installed.as_status_mut(); - status.as_configured_mut().ser(&true)?; - status - .as_dependency_config_errors_mut() - .ser(&dependency_config_errs)?; - Ok(configure_context.breakages) - }) - .await; // add new - } - - Ok(configure_context.breakages) -} - -struct DesiredStateReverter { - manage_container: Option>, - starting_state: StartStop, -} -impl DesiredStateReverter { - fn new(manage_container: Arc) -> Self { - let starting_state = *manage_container.desired_state().borrow(); - let manage_container = Some(manage_container); - Self { - starting_state, - manage_container, - } - } - async fn revert(mut self) { - if let Some(mut current_state) = self._revert() { - while *current_state.borrow() != self.starting_state { - current_state.changed().await.unwrap(); - } - } - } - fn _revert(&mut self) -> Option> { - if let Some(manage_container) = self.manage_container.take() { - manage_container.to_desired(self.starting_state); - - return Some(manage_container.desired_state()); - } - None - } -} -impl Drop for DesiredStateReverter { - fn drop(&mut self) { - self._revert(); - } -} - -type BackupDoneSender = oneshot::Sender>; - -fn finish_up_backup_task( - transition: Arc>, - send: BackupDoneSender, -) -> impl FnOnce(Result, Error>) -> BoxFuture<'static, ()> { - move |result| { - async move { - transition.send_replace(Default::default()); - send.send(match result { - Ok(a) => a, - Err(e) => Err(e), - }) - .unwrap_or_default(); - } - .boxed() - } -} - -fn response_to_report(response: &Result) -> PackageBackupReport { - PackageBackupReport { - error: response.as_ref().err().map(|e| e.to_string()), - } -} -fn flatten_backup_error(input: Result, Error>) -> BackupReturn { - match input { - Ok(a) => BackupReturn::Ran { - report: response_to_report(&a), - res: a, - }, - Err(err) => BackupReturn::Error(err), - } -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] -pub enum Status { - Starting, - Running, - Stopped, - Paused, - Shutdown, -} - -#[derive(Debug, Clone, Copy)] -pub enum OnStop { - Restart, - Sleep, - Exit, -} - -type RunMainResult = Result, Error>; - -#[instrument(skip_all)] -async fn run_main( - seed: Arc, - persistent_container: ManagerPersistentContainer, - started: Arc, -) -> RunMainResult { - let mut runtime = NonDetachingJoinHandle::from(tokio::spawn(start_up_image(seed.clone()))); - let ip = match persistent_container.is_some() { - false => Some(match get_running_ip(&seed, &mut runtime).await { - GetRunningIp::Ip(x) => x, - GetRunningIp::Error(e) => return Err(e), - GetRunningIp::EarlyExit(x) => return Ok(x), - }), - true => None, - }; - - let svc = if let Some(ip) = ip { - let net = add_network_for_main(&seed, ip).await?; - started(); - Some(net) - } else { - None - }; - - let health = main_health_check_daemon(seed.clone()); - let res = tokio::select! { - a = runtime => a.map_err(|_| Error::new(eyre!("Manager runtime panicked!"), crate::ErrorKind::Docker)).and_then(|a| a), - _ = health => Err(Error::new(eyre!("Health check daemon exited!"), crate::ErrorKind::Unknown)) - }; - if let Some(svc) = svc { - remove_network_for_main(svc).await?; - } - res -} - -/// We want to start up the manifest, but in this case we want to know that we have generated the certificates. -/// Note for _generated_certificate: Needed to know that before we start the state we have generated the certificate -async fn start_up_image(seed: Arc) -> Result, Error> { - seed.manifest - .main - .execute::<(), NoOutput>( - &seed.ctx, - &seed.manifest.id, - &seed.manifest.version, - ProcedureName::Main, - &seed.manifest.volumes, - None, - None, - ) - .await -} - -async fn long_running_docker( - seed: &ManagerSeed, - container: &DockerContainer, -) -> Result<(LongRunning, UnixRpcClient), Error> { - container - .long_running_execute( - &seed.ctx, - &seed.manifest.id, - &seed.manifest.version, - &seed.manifest.volumes, - ) - .await -} - -enum GetRunningIp { - Ip(Ipv4Addr), - Error(Error), - EarlyExit(Result), -} - -async fn get_long_running_ip(seed: &ManagerSeed, runtime: &mut LongRunning) -> GetRunningIp { - loop { - match get_container_ip(&seed.container_name).await { - Ok(Some(ip_addr)) => return GetRunningIp::Ip(ip_addr), - Ok(None) => (), - Err(e) if e.kind == ErrorKind::NotFound => (), - Err(e) => return GetRunningIp::Error(e), - } - if let Poll::Ready(res) = futures::poll!(&mut runtime.running_output) { - match res { - Ok(_) => return GetRunningIp::EarlyExit(Ok(NoOutput)), - Err(_e) => { - return GetRunningIp::Error(Error::new( - eyre!("Manager runtime panicked!"), - crate::ErrorKind::Docker, - )) - } - } - } - } -} - -#[instrument(skip(seed))] -async fn add_network_for_main( - seed: &ManagerSeed, - ip: std::net::Ipv4Addr, -) -> Result { - let mut svc = seed - .ctx - .net_controller - .create_service(seed.manifest.id.clone(), ip) - .await?; - // DEPRECATED - let mut secrets = seed.ctx.secret_store.acquire().await?; - let mut tx = secrets.begin().await?; - for (id, interface) in &seed.manifest.interfaces.0 { - for (external, internal) in interface.lan_config.iter().flatten() { - svc.add_lan( - tx.as_mut(), - id.clone(), - external.0, - internal.internal, - Err(AlpnInfo::Specified(vec![])), - ) - .await?; - } - for (external, internal) in interface.tor_config.iter().flat_map(|t| &t.port_mapping) { - svc.add_tor(tx.as_mut(), id.clone(), external.0, internal.0) - .await?; - } - } - for volume in seed.manifest.volumes.values() { - if let Volume::Certificate { interface_id } = volume { - svc.export_cert(tx.as_mut(), interface_id, ip.into()) - .await?; - } - } - tx.commit().await?; - Ok(svc) -} - -#[instrument(skip(svc))] -async fn remove_network_for_main(svc: NetService) -> Result<(), Error> { - svc.remove_all().await -} - -async fn main_health_check_daemon(seed: Arc) { - tokio::time::sleep(Duration::from_secs(HEALTH_CHECK_GRACE_PERIOD_SECONDS)).await; - loop { - if let Err(e) = health::check(&seed.ctx, &seed.manifest.id).await { - tracing::error!( - "Failed to run health check for {}: {}", - &seed.manifest.id, - e - ); - tracing::debug!("{:?}", e); - } - tokio::time::sleep(Duration::from_secs(HEALTH_CHECK_COOLDOWN_SECONDS)).await; - } -} - -type RuntimeOfCommand = NonDetachingJoinHandle, Error>>; - -#[instrument(skip(seed, runtime))] -async fn get_running_ip(seed: &ManagerSeed, mut runtime: &mut RuntimeOfCommand) -> GetRunningIp { - loop { - match get_container_ip(&seed.container_name).await { - Ok(Some(ip_addr)) => return GetRunningIp::Ip(ip_addr), - Ok(None) => (), - Err(e) if e.kind == ErrorKind::NotFound => (), - Err(e) => return GetRunningIp::Error(e), - } - if let Poll::Ready(res) = futures::poll!(&mut runtime) { - match res { - Ok(Ok(response)) => return GetRunningIp::EarlyExit(response), - Err(e) => { - return GetRunningIp::Error(Error::new( - match e.try_into_panic() { - Ok(e) => { - eyre!( - "Manager runtime panicked: {}", - e.downcast_ref::<&'static str>().unwrap_or(&"UNKNOWN") - ) - } - _ => eyre!("Manager runtime cancelled!"), - }, - crate::ErrorKind::Docker, - )) - } - Ok(Err(e)) => { - return GetRunningIp::Error(Error::new( - eyre!("Manager runtime returned error: {}", e), - crate::ErrorKind::Docker, - )) - } - } - } - } -} - -async fn send_signal(manager: &Manager, gid: Arc, signal: Signal) -> Result<(), Error> { - // stop health checks from committing their results - // shared - // .commit_health_check_results - // .store(false, Ordering::SeqCst); - - if let Some(rpc_client) = manager.rpc_client() { - let main_gid = *gid.main_gid.0.borrow(); - let next_gid = gid.new_gid(); - #[cfg(feature = "js-engine")] - if let Err(e) = crate::procedure::js_scripts::JsProcedure::default() - .execute::<_, NoOutput>( - &manager.seed.ctx.datadir, - &manager.seed.manifest.id, - &manager.seed.manifest.version, - ProcedureName::Signal, - &manager.seed.manifest.volumes, - Some(container_init::SignalGroupParams { - gid: main_gid, - signal: signal as u32, - }), - None, // TODO - next_gid, - Some(rpc_client), - ) - .await? - { - tracing::error!("Failed to send js signal: {}", e.1); - tracing::debug!("{:?}", e); - } - } else { - // send signal to container - kill_container(&manager.seed.container_name, Some(signal)) - .await - .or_else(|e| { - if e.kind == ErrorKind::NotFound { - Ok(()) - } else { - Err(e) - } - })?; - } - - Ok(()) -} diff --git a/core/startos/src/manager/persistent_container.rs b/core/startos/src/manager/persistent_container.rs deleted file mode 100644 index d9868a622d..0000000000 --- a/core/startos/src/manager/persistent_container.rs +++ /dev/null @@ -1,101 +0,0 @@ -use std::sync::Arc; -use std::time::Duration; - -use color_eyre::eyre::eyre; -use helpers::UnixRpcClient; -use tokio::sync::oneshot; -use tokio::sync::watch::{self, Receiver}; -use tracing::instrument; - -use super::manager_seed::ManagerSeed; -use super::{ - add_network_for_main, get_long_running_ip, long_running_docker, remove_network_for_main, - GetRunningIp, -}; -use crate::procedure::docker::DockerContainer; -use crate::util::NonDetachingJoinHandle; -use crate::Error; - -/// Persistant container are the old containers that need to run all the time -/// The goal is that all services will be persistent containers, waiting to run the main system. -pub struct PersistentContainer { - _running_docker: NonDetachingJoinHandle<()>, - pub rpc_client: Receiver>, -} - -impl PersistentContainer { - #[instrument(skip_all)] - pub async fn init(seed: &Arc) -> Result, Error> { - Ok(if let Some(containers) = &seed.manifest.containers { - let (running_docker, rpc_client) = - spawn_persistent_container(seed.clone(), containers.main.clone()).await?; - Some(Self { - _running_docker: running_docker, - rpc_client, - }) - } else { - None - }) - } - - pub fn rpc_client(&self) -> Arc { - self.rpc_client.borrow().clone() - } -} - -pub async fn spawn_persistent_container( - seed: Arc, - container: DockerContainer, -) -> Result<(NonDetachingJoinHandle<()>, Receiver>), Error> { - let (send_inserter, inserter) = oneshot::channel(); - Ok(( - tokio::task::spawn(async move { - let mut inserter_send: Option>> = None; - let mut send_inserter: Option>>> = Some(send_inserter); - loop { - if let Err(e) = async { - let (mut runtime, inserter) = - long_running_docker(&seed, &container).await?; - - - let ip = match get_long_running_ip(&seed, &mut runtime).await { - GetRunningIp::Ip(x) => x, - GetRunningIp::Error(e) => return Err(e), - GetRunningIp::EarlyExit(e) => { - tracing::error!("Early Exit"); - tracing::debug!("{:?}", e); - return Ok(()); - } - }; - let svc = add_network_for_main(&seed, ip).await?; - - if let Some(inserter_send) = inserter_send.as_mut() { - let _ = inserter_send.send(Arc::new(inserter)); - } else { - let (s, r) = watch::channel(Arc::new(inserter)); - inserter_send = Some(s); - if let Some(send_inserter) = send_inserter.take() { - let _ = send_inserter.send(r); - } - } - - let res = tokio::select! { - a = runtime.running_output => a.map_err(|_| Error::new(eyre!("Manager runtime panicked!"), crate::ErrorKind::Docker)).map(|_| ()), - }; - - remove_network_for_main(svc).await?; - - res - }.await { - tracing::error!("Error in persistent container: {}", e); - tracing::debug!("{:?}", e); - } else { - break; - } - tokio::time::sleep(Duration::from_millis(200)).await; - } - }) - .into(), - inserter.await.map_err(|_| Error::new(eyre!("Container handle dropped before inserter sent"), crate::ErrorKind::Unknown))?, - )) -} diff --git a/core/startos/src/manager/transition_state.rs b/core/startos/src/manager/transition_state.rs deleted file mode 100644 index 122c0f7035..0000000000 --- a/core/startos/src/manager/transition_state.rs +++ /dev/null @@ -1,35 +0,0 @@ -use helpers::NonDetachingJoinHandle; - -/// Used only in the manager/mod and is used to keep track of the state of the manager during the -/// transitional states -pub(super) enum TransitionState { - BackingUp(NonDetachingJoinHandle<()>), - Restarting(NonDetachingJoinHandle<()>), - None, -} - -impl TransitionState { - pub(super) fn take(&mut self) -> Self { - std::mem::take(self) - } - pub(super) fn into_join_handle(self) -> Option> { - Some(match self { - TransitionState::BackingUp(a) => a, - TransitionState::Restarting(a) => a, - TransitionState::None => return None, - }) - } - pub(super) async fn abort(&mut self) { - if let Some(s) = self.take().into_join_handle() { - if s.wait_for_abort().await.is_ok() { - tracing::trace!("transition completed before abort"); - } - } - } -} - -impl Default for TransitionState { - fn default() -> Self { - TransitionState::None - } -} diff --git a/core/startos/src/middleware/auth.rs b/core/startos/src/middleware/auth.rs index 611923ad60..30ae56744c 100644 --- a/core/startos/src/middleware/auth.rs +++ b/core/startos/src/middleware/auth.rs @@ -1,33 +1,38 @@ use std::borrow::Borrow; +use std::collections::BTreeSet; +use std::ops::Deref; use std::sync::Arc; use std::time::{Duration, Instant}; +use axum::extract::Request; +use axum::response::Response; use basic_cookies::Cookie; +use chrono::Utc; use color_eyre::eyre::eyre; use digest::Digest; -use futures::future::BoxFuture; -use futures::FutureExt; -use http::StatusCode; -use rpc_toolkit::command_helpers::prelude::RequestParts; -use rpc_toolkit::hyper::header::COOKIE; -use rpc_toolkit::hyper::http::Error as HttpError; -use rpc_toolkit::hyper::{Body, Request, Response}; -use rpc_toolkit::rpc_server_helpers::{ - noop4, to_response, DynMiddleware, DynMiddlewareStage2, DynMiddlewareStage3, -}; -use rpc_toolkit::yajrc::RpcMethod; -use rpc_toolkit::Metadata; +use helpers::const_true; +use http::header::{COOKIE, USER_AGENT}; +use http::HeaderValue; +use imbl_value::InternedString; +use rpc_toolkit::yajrc::INTERNAL_ERROR; +use rpc_toolkit::{Middleware, RpcRequest, RpcResponse}; use serde::{Deserialize, Serialize}; use sha2::Sha256; use tokio::sync::Mutex; use crate::context::RpcContext; -use crate::{Error, ResultExt}; +use crate::prelude::*; pub const LOCAL_AUTH_COOKIE_PATH: &str = "/run/embassy/rpc.authcookie"; +#[derive(Deserialize, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct LoginRes { + pub session: InternedString, +} + pub trait AsLogoutSessionId { - fn as_logout_session_id(self) -> String; + fn as_logout_session_id(self) -> InternedString; } /// Will need to know when we have logged out from a route @@ -36,37 +41,56 @@ pub struct HasLoggedOutSessions(()); impl HasLoggedOutSessions { pub async fn new( - logged_out_sessions: impl IntoIterator, + sessions: impl IntoIterator, ctx: &RpcContext, ) -> Result { - let mut open_authed_websockets = ctx.open_authed_websockets.lock().await; - let mut sqlx_conn = ctx.secret_store.acquire().await?; - for session in logged_out_sessions { - let session = session.as_logout_session_id(); - sqlx::query!( - "UPDATE session SET logged_out = CURRENT_TIMESTAMP WHERE id = $1", - session - ) - .execute(sqlx_conn.as_mut()) + let to_log_out: BTreeSet<_> = sessions + .into_iter() + .map(|s| s.as_logout_session_id()) + .collect(); + ctx.open_authed_websockets + .lock() + .await + .retain(|session, sockets| { + if to_log_out.contains(session.hashed()) { + for socket in std::mem::take(sockets) { + let _ = socket.send(()); + } + false + } else { + true + } + }); + ctx.db + .mutate(|db| { + let sessions = db.as_private_mut().as_sessions_mut(); + for sid in &to_log_out { + sessions.remove(sid)?; + } + + Ok(()) + }) .await?; - for socket in open_authed_websockets.remove(&session).unwrap_or_default() { - let _ = socket.send(()); - } - } Ok(HasLoggedOutSessions(())) } } /// Used when we need to know that we have logged in with a valid user -#[derive(Clone, Copy)] -pub struct HasValidSession(()); +#[derive(Clone)] +pub struct HasValidSession(SessionType); + +#[derive(Clone)] +enum SessionType { + Local, + Session(HashSessionToken), +} impl HasValidSession { - pub async fn from_request_parts( - request_parts: &RequestParts, + pub async fn from_header( + header: Option<&HeaderValue>, ctx: &RpcContext, ) -> Result { - if let Some(cookie_header) = request_parts.headers.get(COOKIE) { + if let Some(cookie_header) = header { let cookies = Cookie::parse( cookie_header .to_str() @@ -79,7 +103,7 @@ impl HasValidSession { } } if let Some(cookie) = cookies.iter().find(|c| c.get_name() == "session") { - if let Ok(s) = Self::from_session(&HashSessionToken::from_cookie(cookie), ctx).await + if let Ok(s) = Self::from_session(HashSessionToken::from_cookie(cookie), ctx).await { return Ok(s); } @@ -91,24 +115,32 @@ impl HasValidSession { )) } - pub async fn from_session(session: &HashSessionToken, ctx: &RpcContext) -> Result { - let session_hash = session.hashed(); - let session = sqlx::query!("UPDATE session SET last_active = CURRENT_TIMESTAMP WHERE id = $1 AND logged_out IS NULL OR logged_out > CURRENT_TIMESTAMP", session_hash) - .execute(ctx.secret_store.acquire().await?.as_mut()) + pub async fn from_session( + session_token: HashSessionToken, + ctx: &RpcContext, + ) -> Result { + let session_hash = session_token.hashed(); + ctx.db + .mutate(|db| { + db.as_private_mut() + .as_sessions_mut() + .as_idx_mut(session_hash) + .ok_or_else(|| { + Error::new(eyre!("UNAUTHORIZED"), crate::ErrorKind::Authorization) + })? + .mutate(|s| { + s.last_active = Utc::now(); + Ok(()) + }) + }) .await?; - if session.rows_affected() == 0 { - return Err(Error::new( - eyre!("UNAUTHORIZED"), - crate::ErrorKind::Authorization, - )); - } - Ok(Self(())) + Ok(Self(SessionType::Session(session_token))) } pub async fn from_local(local: &Cookie<'_>) -> Result { let token = tokio::fs::read_to_string(LOCAL_AUTH_COOKIE_PATH).await?; if local.get_value() == &*token { - Ok(Self(())) + Ok(Self(SessionType::Local)) } else { Err(Error::new( eyre!("UNAUTHORIZED"), @@ -122,27 +154,31 @@ impl HasValidSession { /// Or when we are using internal valid authenticated service. #[derive(Debug, Clone)] pub struct HashSessionToken { - hashed: String, - token: String, + hashed: InternedString, + token: InternedString, } impl HashSessionToken { pub fn new() -> Self { - let token = base32::encode( - base32::Alphabet::RFC4648 { padding: false }, - &rand::random::<[u8; 16]>(), - ) - .to_lowercase(); - let hashed = Self::hash(&token); + Self::from_token(InternedString::intern( + base32::encode( + base32::Alphabet::RFC4648 { padding: false }, + &rand::random::<[u8; 16]>(), + ) + .to_lowercase(), + )) + } + + pub fn from_token(token: InternedString) -> Self { + let hashed = Self::hash(&*token); Self { hashed, token } } + pub fn from_cookie(cookie: &Cookie) -> Self { - let token = cookie.get_value().to_owned(); - let hashed = Self::hash(&token); - Self { hashed, token } + Self::from_token(InternedString::intern(cookie.get_value())) } - pub fn from_request_parts(request_parts: &RequestParts) -> Result { - if let Some(cookie_header) = request_parts.headers.get(COOKIE) { + pub fn from_header(header: Option<&HeaderValue>) -> Result { + if let Some(cookie_header) = header { let cookies = Cookie::parse( cookie_header .to_str() @@ -159,33 +195,30 @@ impl HashSessionToken { )) } - pub fn header_value(&self) -> Result { - http::HeaderValue::from_str(&format!( - "session={}; Path=/; SameSite=Lax; Expires=Fri, 31 Dec 9999 23:59:59 GMT;", - self.token - )) - .with_kind(crate::ErrorKind::Unknown) + pub fn to_login_res(&self) -> LoginRes { + LoginRes { + session: self.token.clone(), + } } - pub fn hashed(&self) -> &str { - self.hashed.as_str() + pub fn hashed(&self) -> &InternedString { + &self.hashed } - pub fn as_hash(self) -> String { - self.hashed - } - fn hash(token: &str) -> String { + fn hash(token: &str) -> InternedString { let mut hasher = Sha256::new(); hasher.update(token.as_bytes()); - base32::encode( - base32::Alphabet::RFC4648 { padding: false }, - hasher.finalize().as_slice(), + InternedString::intern( + base32::encode( + base32::Alphabet::RFC4648 { padding: false }, + hasher.finalize().as_slice(), + ) + .to_lowercase(), ) - .to_lowercase() } } impl AsLogoutSessionId for HashSessionToken { - fn as_logout_session_id(self) -> String { + fn as_logout_session_id(self) -> InternedString { self.hashed } } @@ -205,80 +238,128 @@ impl Ord for HashSessionToken { self.hashed.cmp(&other.hashed) } } -impl Borrow for HashSessionToken { - fn borrow(&self) -> &String { - &self.hashed +impl Borrow for HashSessionToken { + fn borrow(&self) -> &str { + &*self.hashed } } -pub fn auth(ctx: RpcContext) -> DynMiddleware { - let rate_limiter = Arc::new(Mutex::new((0_usize, Instant::now()))); - Box::new( - move |req: &mut Request, - metadata: M| - -> BoxFuture>, HttpError>> { - let ctx = ctx.clone(); - let rate_limiter = rate_limiter.clone(); - async move { - let mut header_stub = Request::new(Body::empty()); - *header_stub.headers_mut() = req.headers().clone(); - let m2: DynMiddlewareStage2 = Box::new(move |req, rpc_req| { - async move { - if let Err(e) = HasValidSession::from_request_parts(req, &ctx).await { - if metadata - .get(rpc_req.method.as_str(), "authenticated") - .unwrap_or(true) - { - let (res_parts, _) = Response::new(()).into_parts(); - return Ok(Err(to_response( - &req.headers, - res_parts, - Err(e.into()), - |_| StatusCode::OK, - )?)); - } else if rpc_req.method.as_str() == "auth.login" { - let guard = rate_limiter.lock().await; - if guard.1.elapsed() < Duration::from_secs(20) { - if guard.0 >= 3 { - let (res_parts, _) = Response::new(()).into_parts(); - return Ok(Err(to_response( - &req.headers, - res_parts, - Err(Error::new( - eyre!( - "Please limit login attempts to 3 per 20 seconds." - ), - crate::ErrorKind::RateLimited, - ) - .into()), - |_| StatusCode::OK, - )?)); - } - } - } - } - let m3: DynMiddlewareStage3 = Box::new(move |_, res| { - async move { - let mut guard = rate_limiter.lock().await; - if guard.1.elapsed() < Duration::from_secs(20) { - if res.is_err() { - guard.0 += 1; - } - } else { - guard.0 = 0; - } - guard.1 = Instant::now(); - Ok(Ok(noop4())) - } - .boxed() - }); - Ok(Ok(m3)) - } - .boxed() +#[derive(Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct Metadata { + #[serde(default = "const_true")] + authenticated: bool, + #[serde(default)] + login: bool, + #[serde(default)] + get_session: bool, +} + +#[derive(Clone)] +pub struct Auth { + rate_limiter: Arc>, + cookie: Option, + is_login: bool, + set_cookie: Option, + user_agent: Option, +} +impl Auth { + pub fn new() -> Self { + Self { + rate_limiter: Arc::new(Mutex::new((0, Instant::now()))), + cookie: None, + is_login: false, + set_cookie: None, + user_agent: None, + } + } +} +#[async_trait::async_trait] +impl Middleware for Auth { + type Metadata = Metadata; + async fn process_http_request( + &mut self, + _: &RpcContext, + request: &mut Request, + ) -> Result<(), Response> { + self.cookie = request.headers_mut().remove(COOKIE); + self.user_agent = request.headers_mut().remove(USER_AGENT); + Ok(()) + } + async fn process_rpc_request( + &mut self, + context: &RpcContext, + metadata: Self::Metadata, + request: &mut RpcRequest, + ) -> Result<(), RpcResponse> { + if metadata.login { + self.is_login = true; + let guard = self.rate_limiter.lock().await; + if guard.1.elapsed() < Duration::from_secs(20) && guard.0 >= 3 { + return Err(RpcResponse { + id: request.id.take(), + result: Err(Error::new( + eyre!("Please limit login attempts to 3 per 20 seconds."), + crate::ErrorKind::RateLimited, + ) + .into()), }); - Ok(Ok(m2)) } - .boxed() - }, - ) + if let Some(user_agent) = self.user_agent.as_ref().and_then(|h| h.to_str().ok()) { + request.params["user-agent"] = Value::String(Arc::new(user_agent.to_owned())) + // TODO: will this panic? + } + } else if metadata.authenticated { + match HasValidSession::from_header(self.cookie.as_ref(), &context).await { + Err(e) => { + return Err(RpcResponse { + id: request.id.take(), + result: Err(e.into()), + }) + } + Ok(HasValidSession(SessionType::Session(s))) if metadata.get_session => { + request.params["session"] = + Value::String(Arc::new(s.hashed().deref().to_owned())); + // TODO: will this panic? + } + _ => (), + } + } + Ok(()) + } + async fn process_rpc_response(&mut self, _: &RpcContext, response: &mut RpcResponse) { + if self.is_login { + let mut guard = self.rate_limiter.lock().await; + if guard.1.elapsed() < Duration::from_secs(20) { + if response.result.is_err() { + guard.0 += 1; + } + } else { + guard.0 = 0; + } + guard.1 = Instant::now(); + if response.result.is_ok() { + let res = std::mem::replace(&mut response.result, Err(INTERNAL_ERROR)); + response.result = async { + let res = res?; + let login_res = from_value::(res.clone())?; + self.set_cookie = Some( + HeaderValue::from_str(&format!( + "session={}; Path=/; SameSite=Lax; Expires=Fri, 31 Dec 9999 23:59:59 GMT;", + login_res.session + )) + .with_kind(crate::ErrorKind::Network)?, + ); + + Ok(res) + } + .await; + } + } + } + async fn process_http_response(&mut self, _: &RpcContext, response: &mut Response) { + if let Some(set_cookie) = self.set_cookie.take() { + response.headers_mut().insert("set-cookie", set_cookie); + } + } } diff --git a/core/startos/src/middleware/cors.rs b/core/startos/src/middleware/cors.rs index 5f33bc08d5..60a472cdd7 100644 --- a/core/startos/src/middleware/cors.rs +++ b/core/startos/src/middleware/cors.rs @@ -1,61 +1,63 @@ -use futures::FutureExt; -use http::HeaderValue; -use hyper::header::HeaderMap; -use rpc_toolkit::hyper::http::Error as HttpError; -use rpc_toolkit::hyper::{Body, Method, Request, Response}; -use rpc_toolkit::rpc_server_helpers::{ - DynMiddlewareStage2, DynMiddlewareStage3, DynMiddlewareStage4, -}; -use rpc_toolkit::Metadata; +use axum::extract::Request; +use axum::response::Response; +use http::{HeaderMap, HeaderValue}; +use rpc_toolkit::{Empty, Middleware}; -fn get_cors_headers(req: &Request) -> HeaderMap { - let mut res = HeaderMap::new(); - if let Some(origin) = req.headers().get("Origin") { - res.insert("Access-Control-Allow-Origin", origin.clone()); - } - if let Some(method) = req.headers().get("Access-Control-Request-Method") { - res.insert("Access-Control-Allow-Methods", method.clone()); +#[derive(Clone)] +pub struct Cors { + headers: HeaderMap, +} +impl Cors { + pub fn new() -> Self { + let mut headers = HeaderMap::new(); + headers.insert( + "Access-Control-Allow-Credentials", + HeaderValue::from_static("true"), + ); + Self { headers } } - if let Some(headers) = req.headers().get("Access-Control-Request-Headers") { - res.insert("Access-Control-Allow-Headers", headers.clone()); + fn get_cors_headers(&mut self, req: &Request) { + if let Some(origin) = req.headers().get("Origin") { + self.headers + .insert("Access-Control-Allow-Origin", origin.clone()); + } else { + self.headers + .insert("Access-Control-Allow-Origin", HeaderValue::from_static("*")); + } + if let Some(method) = req.headers().get("Access-Control-Request-Method") { + self.headers + .insert("Access-Control-Allow-Methods", method.clone()); + } else { + self.headers.insert( + "Access-Control-Allow-Methods", + HeaderValue::from_static("*"), + ); + } + if let Some(headers) = req.headers().get("Access-Control-Request-Headers") { + self.headers + .insert("Access-Control-Allow-Headers", headers.clone()); + } else { + self.headers.insert( + "Access-Control-Allow-Headers", + HeaderValue::from_static("*"), + ); + } } - res.insert( - "Access-Control-Allow-Credentials", - HeaderValue::from_static("true"), - ); - res } - -pub async fn cors( - req: &mut Request, - _metadata: M, -) -> Result>, HttpError> { - let headers = get_cors_headers(req); - if req.method() == Method::OPTIONS { - Ok(Err({ - let mut res = Response::new(Body::empty()); - res.headers_mut().extend(headers.into_iter()); - res - })) - } else { - Ok(Ok(Box::new(|_, _| { - async move { - let res: DynMiddlewareStage3 = Box::new(|_, _| { - async move { - let res: DynMiddlewareStage4 = Box::new(|res| { - async move { - res.headers_mut().extend(headers.into_iter()); - Ok::<_, HttpError>(()) - } - .boxed() - }); - Ok::<_, HttpError>(Ok(res)) - } - .boxed() - }); - Ok::<_, HttpError>(Ok(res)) - } - .boxed() - }))) +#[async_trait::async_trait] +impl Middleware for Cors { + type Metadata = Empty; + async fn process_http_request( + &mut self, + _: &Context, + request: &mut Request, + ) -> Result<(), Response> { + self.get_cors_headers(request); + Ok(()) + } + async fn process_http_response(&mut self, _: &Context, response: &mut Response) { + response + .headers_mut() + .extend(std::mem::take(&mut self.headers)) } } diff --git a/core/startos/src/middleware/db.rs b/core/startos/src/middleware/db.rs index c3ceadda6a..b8cdaa2314 100644 --- a/core/startos/src/middleware/db.rs +++ b/core/startos/src/middleware/db.rs @@ -1,50 +1,54 @@ -use futures::future::BoxFuture; -use futures::FutureExt; +use axum::response::Response; +use http::header::InvalidHeaderValue; use http::HeaderValue; -use rpc_toolkit::hyper::http::Error as HttpError; -use rpc_toolkit::hyper::{Body, Request, Response}; -use rpc_toolkit::rpc_server_helpers::{ - noop4, DynMiddleware, DynMiddlewareStage2, DynMiddlewareStage3, -}; -use rpc_toolkit::yajrc::RpcMethod; -use rpc_toolkit::Metadata; +use rpc_toolkit::{Middleware, RpcRequest, RpcResponse}; +use serde::Deserialize; use crate::context::RpcContext; -pub fn db(ctx: RpcContext) -> DynMiddleware { - Box::new( - move |_: &mut Request, - metadata: M| - -> BoxFuture>, HttpError>> { - let ctx = ctx.clone(); - async move { - let m2: DynMiddlewareStage2 = Box::new(move |_req, rpc_req| { - async move { - let sync_db = metadata - .get(rpc_req.method.as_str(), "sync_db") - .unwrap_or(false); +#[derive(Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct Metadata { + #[serde(default)] + sync_db: bool, +} + +#[derive(Clone)] +pub struct SyncDb { + sync_db: bool, +} +impl SyncDb { + pub fn new() -> Self { + SyncDb { sync_db: false } + } +} - let m3: DynMiddlewareStage3 = Box::new(move |res, _| { - async move { - if sync_db { - res.headers.append( - "X-Patch-Sequence", - HeaderValue::from_str( - &ctx.db.sequence().await.to_string(), - )?, - ); - } - Ok(Ok(noop4())) - } - .boxed() - }); - Ok(Ok(m3)) - } - .boxed() - }); - Ok(Ok(m2)) +#[async_trait::async_trait] +impl Middleware for SyncDb { + type Metadata = Metadata; + async fn process_rpc_request( + &mut self, + _: &RpcContext, + metadata: Self::Metadata, + _: &mut RpcRequest, + ) -> Result<(), RpcResponse> { + self.sync_db = metadata.sync_db; + Ok(()) + } + async fn process_http_response(&mut self, context: &RpcContext, response: &mut Response) { + if let Err(e) = async { + if self.sync_db { + response.headers_mut().append( + "X-Patch-Sequence", + HeaderValue::from_str(&context.db.sequence().await.to_string())?, + ); } - .boxed() - }, - ) + Ok::<_, InvalidHeaderValue>(()) + } + .await + { + tracing::error!("error writing X-Patch-Sequence header: {e}"); + tracing::debug!("{e:?}"); + } + } } diff --git a/core/startos/src/middleware/diagnostic.rs b/core/startos/src/middleware/diagnostic.rs index 959b8ea2da..f779d632f6 100644 --- a/core/startos/src/middleware/diagnostic.rs +++ b/core/startos/src/middleware/diagnostic.rs @@ -1,39 +1,43 @@ -use futures::FutureExt; -use rpc_toolkit::hyper::http::Error as HttpError; -use rpc_toolkit::hyper::{Body, Request, Response}; -use rpc_toolkit::rpc_server_helpers::{noop4, DynMiddlewareStage2, DynMiddlewareStage3}; use rpc_toolkit::yajrc::RpcMethod; -use rpc_toolkit::Metadata; +use rpc_toolkit::{Empty, Middleware, RpcRequest, RpcResponse}; -use crate::Error; +use crate::context::DiagnosticContext; +use crate::prelude::*; -pub async fn diagnostic( - _req: &mut Request, - _metadata: M, -) -> Result>, HttpError> { - Ok(Ok(Box::new(|_, rpc_req| { - let method = rpc_req.method.as_str().to_owned(); - async move { - let res: DynMiddlewareStage3 = Box::new(|_, rpc_res| { - async move { - if let Err(e) = rpc_res { - if e.code == -32601 { - *e = Error::new( - color_eyre::eyre::eyre!( - "{} is not available on the Diagnostic API", - method - ), - crate::ErrorKind::DiagnosticMode, - ) - .into(); - } - } - Ok(Ok(noop4())) - } - .boxed() - }); - Ok::<_, HttpError>(Ok(res)) +#[derive(Clone)] +pub struct DiagnosticMode { + method: Option, +} +impl DiagnosticMode { + pub fn new() -> Self { + Self { method: None } + } +} + +#[async_trait::async_trait] +impl Middleware for DiagnosticMode { + type Metadata = Empty; + async fn process_rpc_request( + &mut self, + _: &DiagnosticContext, + _: Self::Metadata, + request: &mut RpcRequest, + ) -> Result<(), RpcResponse> { + self.method = Some(request.method.as_str().to_owned()); + Ok(()) + } + async fn process_rpc_response(&mut self, _: &DiagnosticContext, response: &mut RpcResponse) { + if let Err(e) = &mut response.result { + if e.code == -32601 { + *e = Error::new( + eyre!( + "{} is not available on the Diagnostic API", + self.method.as_ref().map(|s| s.as_str()).unwrap_or_default() + ), + crate::ErrorKind::DiagnosticMode, + ) + .into(); + } } - .boxed() - }))) + } } diff --git a/core/startos/src/middleware/encrypt.rs b/core/startos/src/middleware/encrypt.rs deleted file mode 100644 index 94167b7e29..0000000000 --- a/core/startos/src/middleware/encrypt.rs +++ /dev/null @@ -1,115 +0,0 @@ -use aes::cipher::{CipherKey, NewCipher, Nonce, StreamCipher}; -use aes::Aes256Ctr; -use hmac::Hmac; -use josekit::jwk::Jwk; -use serde::{Deserialize, Serialize}; -use sha2::Sha256; -use tracing::instrument; - -pub fn pbkdf2(password: impl AsRef<[u8]>, salt: impl AsRef<[u8]>) -> CipherKey { - let mut aeskey = CipherKey::::default(); - pbkdf2::pbkdf2::>( - password.as_ref(), - salt.as_ref(), - 1000, - aeskey.as_mut_slice(), - ) - .unwrap(); - aeskey -} - -pub fn encrypt_slice(input: impl AsRef<[u8]>, password: impl AsRef<[u8]>) -> Vec { - let prefix: [u8; 32] = rand::random(); - let aeskey = pbkdf2(password.as_ref(), &prefix[16..]); - let ctr = Nonce::::from_slice(&prefix[..16]); - let mut aes = Aes256Ctr::new(&aeskey, ctr); - let mut res = Vec::with_capacity(32 + input.as_ref().len()); - res.extend_from_slice(&prefix[..]); - res.extend_from_slice(input.as_ref()); - aes.apply_keystream(&mut res[32..]); - res -} - -pub fn decrypt_slice(input: impl AsRef<[u8]>, password: impl AsRef<[u8]>) -> Vec { - if input.as_ref().len() < 32 { - return Vec::new(); - } - let (prefix, rest) = input.as_ref().split_at(32); - let aeskey = pbkdf2(password.as_ref(), &prefix[16..]); - let ctr = Nonce::::from_slice(&prefix[..16]); - let mut aes = Aes256Ctr::new(&aeskey, ctr); - let mut res = rest.to_vec(); - aes.apply_keystream(&mut res); - res -} - -#[derive(Debug, Clone, Deserialize, Serialize)] -pub struct EncryptedWire { - encrypted: serde_json::Value, -} -impl EncryptedWire { - #[instrument(skip_all)] - pub fn decrypt(self, current_secret: impl AsRef) -> Option { - let current_secret = current_secret.as_ref(); - - let decrypter = match josekit::jwe::alg::ecdh_es::EcdhEsJweAlgorithm::EcdhEs - .decrypter_from_jwk(current_secret) - { - Ok(a) => a, - Err(e) => { - tracing::warn!("Could not setup awk"); - tracing::debug!("{:?}", e); - return None; - } - }; - let encrypted = match serde_json::to_string(&self.encrypted) { - Ok(a) => a, - Err(e) => { - tracing::warn!("Could not deserialize"); - tracing::debug!("{:?}", e); - - return None; - } - }; - let (decoded, _) = match josekit::jwe::deserialize_json(&encrypted, &decrypter) { - Ok(a) => a, - Err(e) => { - tracing::warn!("Could not decrypt"); - tracing::debug!("{:?}", e); - return None; - } - }; - match String::from_utf8(decoded) { - Ok(a) => Some(a), - Err(e) => { - tracing::warn!("Could not decrypt into utf8"); - tracing::debug!("{:?}", e); - return None; - } - } - } -} - -/// We created this test by first making the private key, then restoring from this private key for recreatability. -/// After this the frontend then encoded an password, then we are testing that the output that we got (hand coded) -/// will be the shape we want. -#[test] -fn test_gen_awk() { - let private_key: Jwk = serde_json::from_str( - r#"{ - "kty": "EC", - "crv": "P-256", - "d": "3P-MxbUJtEhdGGpBCRFXkUneGgdyz_DGZWfIAGSCHOU", - "x": "yHTDYSfjU809fkSv9MmN4wuojf5c3cnD7ZDN13n-jz4", - "y": "8Mpkn744A5KDag0DmX2YivB63srjbugYZzWc3JOpQXI" - }"#, - ) - .unwrap(); - let encrypted: EncryptedWire = serde_json::from_str(r#"{ - "encrypted": { "protected": "eyJlbmMiOiJBMTI4Q0JDLUhTMjU2IiwiYWxnIjoiRUNESC1FUyIsImtpZCI6ImgtZnNXUVh2Tm95dmJEazM5dUNsQ0NUdWc5N3MyZnJockJnWUVBUWVtclUiLCJlcGsiOnsia3R5IjoiRUMiLCJjcnYiOiJQLTI1NiIsIngiOiJmRkF0LXNWYWU2aGNkdWZJeUlmVVdUd3ZvWExaTkdKRHZIWVhIckxwOXNNIiwieSI6IjFvVFN6b00teHlFZC1SLUlBaUFHdXgzS1dJZmNYZHRMQ0JHLUh6MVkzY2sifX0", "iv": "NbwvfvWOdLpZfYRIZUrkcw", "ciphertext": "Zc5Br5kYOlhPkIjQKOLMJw", "tag": "EPoch52lDuCsbUUulzZGfg" } - }"#).unwrap(); - assert_eq!( - "testing12345", - &encrypted.decrypt(std::sync::Arc::new(private_key)).unwrap() - ); -} diff --git a/core/startos/src/middleware/mod.rs b/core/startos/src/middleware/mod.rs index 5af2b8121c..3af0cb5a45 100644 --- a/core/startos/src/middleware/mod.rs +++ b/core/startos/src/middleware/mod.rs @@ -2,4 +2,3 @@ pub mod auth; pub mod cors; pub mod db; pub mod diagnostic; -pub mod encrypt; diff --git a/core/startos/src/migration.rs b/core/startos/src/migration.rs deleted file mode 100644 index 13f14c7c3c..0000000000 --- a/core/startos/src/migration.rs +++ /dev/null @@ -1,141 +0,0 @@ -use std::collections::BTreeSet; - -use color_eyre::eyre::eyre; -use emver::VersionRange; -use futures::{Future, FutureExt}; -use indexmap::IndexMap; -use models::ImageId; -use patch_db::HasModel; -use serde::{Deserialize, Serialize}; -use tracing::instrument; - -use crate::context::RpcContext; -use crate::prelude::*; -use crate::procedure::docker::DockerContainers; -use crate::procedure::{PackageProcedure, ProcedureName}; -use crate::s9pk::manifest::PackageId; -use crate::util::Version; -use crate::volume::Volumes; -use crate::{Error, ResultExt}; - -#[derive(Clone, Debug, Default, Deserialize, Serialize, HasModel)] -#[serde(rename_all = "kebab-case")] -#[model = "Model"] -pub struct Migrations { - pub from: IndexMap, - pub to: IndexMap, -} -impl Migrations { - #[instrument(skip_all)] - pub fn validate( - &self, - _container: &Option, - eos_version: &Version, - volumes: &Volumes, - image_ids: &BTreeSet, - ) -> Result<(), Error> { - for (version, migration) in &self.from { - migration - .validate(eos_version, volumes, image_ids, true) - .with_ctx(|_| { - ( - crate::ErrorKind::ValidateS9pk, - format!("Migration from {}", version), - ) - })?; - } - for (version, migration) in &self.to { - migration - .validate(eos_version, volumes, image_ids, true) - .with_ctx(|_| { - ( - crate::ErrorKind::ValidateS9pk, - format!("Migration to {}", version), - ) - })?; - } - Ok(()) - } - - #[instrument(skip_all)] - pub fn from<'a>( - &'a self, - _container: &'a Option, - ctx: &'a RpcContext, - version: &'a Version, - pkg_id: &'a PackageId, - pkg_version: &'a Version, - volumes: &'a Volumes, - ) -> Option> + 'a> { - if let Some((_, migration)) = self - .from - .iter() - .find(|(range, _)| version.satisfies(*range)) - { - Some(async move { - migration - .execute( - ctx, - pkg_id, - pkg_version, - ProcedureName::Migration, // Migrations cannot be executed concurrently - volumes, - Some(version), - None, - ) - .map(|r| { - r.and_then(|r| { - r.map_err(|e| { - Error::new(eyre!("{}", e.1), crate::ErrorKind::MigrationFailed) - }) - }) - }) - .await - }) - } else { - None - } - } - - #[instrument(skip_all)] - pub fn to<'a>( - &'a self, - ctx: &'a RpcContext, - version: &'a Version, - pkg_id: &'a PackageId, - pkg_version: &'a Version, - volumes: &'a Volumes, - ) -> Option> + 'a> { - if let Some((_, migration)) = self.to.iter().find(|(range, _)| version.satisfies(*range)) { - Some(async move { - migration - .execute( - ctx, - pkg_id, - pkg_version, - ProcedureName::Migration, - volumes, - Some(version), - None, - ) - .map(|r| { - r.and_then(|r| { - r.map_err(|e| { - Error::new(eyre!("{}", e.1), crate::ErrorKind::MigrationFailed) - }) - }) - }) - .await - }) - } else { - None - } - } -} - -#[derive(Clone, Debug, Default, Deserialize, Serialize, HasModel)] -#[serde(rename_all = "kebab-case")] -#[model = "Model"] -pub struct MigrationRes { - pub configured: bool, -} diff --git a/core/startos/src/net/dhcp.rs b/core/startos/src/net/dhcp.rs index cbe7ff19d7..f36df0e289 100644 --- a/core/startos/src/net/dhcp.rs +++ b/core/startos/src/net/dhcp.rs @@ -1,15 +1,16 @@ use std::collections::{BTreeMap, BTreeSet}; use std::net::IpAddr; +use clap::Parser; use futures::TryStreamExt; -use rpc_toolkit::command; +use rpc_toolkit::{from_fn_async, HandlerExt, ParentHandler}; +use serde::{Deserialize, Serialize}; use tokio::sync::RwLock; -use crate::context::RpcContext; -use crate::db::model::IpInfo; +use crate::context::{CliContext, RpcContext}; +use crate::db::model::public::IpInfo; use crate::net::utils::{iface_is_physical, list_interfaces}; use crate::prelude::*; -use crate::util::display_none; use crate::Error; lazy_static::lazy_static! { @@ -50,18 +51,32 @@ pub async fn init_ips() -> Result, Error> { Ok(res) } -#[command(subcommands(update))] -pub async fn dhcp() -> Result<(), Error> { - Ok(()) +// #[command(subcommands(update))] +pub fn dhcp() -> ParentHandler { + ParentHandler::new().subcommand( + "update", + from_fn_async::<_, _, (), Error, (RpcContext, UpdateParams)>(update) + .no_display() + .with_remote_cli::(), + ) +} +#[derive(Deserialize, Serialize, Parser)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "kebab-case")] +pub struct UpdateParams { + interface: String, } -#[command(display(display_none))] -pub async fn update(#[context] ctx: RpcContext, #[arg] interface: String) -> Result<(), Error> { +pub async fn update( + ctx: RpcContext, + UpdateParams { interface }: UpdateParams, +) -> Result<(), Error> { if iface_is_physical(&interface).await { let ip_info = IpInfo::for_interface(&interface).await?; ctx.db .mutate(|db| { - db.as_server_info_mut() + db.as_public_mut() + .as_server_info_mut() .as_ip_info_mut() .insert(&interface, &ip_info) }) diff --git a/core/startos/src/net/dns.rs b/core/startos/src/net/dns.rs index 7b2784a502..ba69b6c167 100644 --- a/core/startos/src/net/dns.rs +++ b/core/startos/src/net/dns.rs @@ -18,6 +18,7 @@ use trust_dns_server::proto::rr::{Name, Record, RecordType}; use trust_dns_server::server::{Request, RequestHandler, ResponseHandler, ResponseInfo}; use trust_dns_server::ServerFuture; +use crate::net::forward::START9_BRIDGE_IFACE; use crate::util::Invoke; use crate::{Error, ErrorKind, ResultExt}; @@ -163,13 +164,13 @@ impl DnsController { Command::new("resolvectl") .arg("dns") - .arg("br-start9") + .arg(START9_BRIDGE_IFACE) .arg("127.0.0.1") .invoke(ErrorKind::Network) .await?; Command::new("resolvectl") .arg("domain") - .arg("br-start9") + .arg(START9_BRIDGE_IFACE) .arg("embassy") .invoke(ErrorKind::Network) .await?; diff --git a/core/startos/src/net/forward.rs b/core/startos/src/net/forward.rs new file mode 100644 index 0000000000..e954bc36a5 --- /dev/null +++ b/core/startos/src/net/forward.rs @@ -0,0 +1,177 @@ +use std::collections::BTreeMap; +use std::net::SocketAddr; +use std::sync::{Arc, Weak}; + +use id_pool::IdPool; +use serde::{Deserialize, Serialize}; +use tokio::process::Command; +use tokio::sync::Mutex; + +use crate::prelude::*; +use crate::util::Invoke; + +pub const START9_BRIDGE_IFACE: &str = "lxcbr0"; +pub const FIRST_DYNAMIC_PRIVATE_PORT: u16 = 49152; + +#[derive(Debug, Deserialize, Serialize)] +pub struct AvailablePorts(IdPool); +impl AvailablePorts { + pub fn new() -> Self { + Self(IdPool::new_ranged(FIRST_DYNAMIC_PRIVATE_PORT..u16::MAX)) + } + pub fn alloc(&mut self) -> Result { + self.0.request_id().ok_or_else(|| { + Error::new( + eyre!("No more dynamic ports available!"), + ErrorKind::Network, + ) + }) + } + pub fn free(&mut self, ports: impl IntoIterator) { + for port in ports { + self.0.return_id(port).unwrap_or_default(); + } + } +} + +pub struct LanPortForwardController { + forwards: Mutex>>>, +} +impl LanPortForwardController { + pub fn new() -> Self { + Self { + forwards: Mutex::new(BTreeMap::new()), + } + } + pub async fn add(&self, port: u16, addr: SocketAddr) -> Result, Error> { + let mut writable = self.forwards.lock().await; + let (prev, mut forward) = if let Some(forward) = writable.remove(&port) { + ( + forward.keys().next().cloned(), + forward + .into_iter() + .filter(|(_, rc)| rc.strong_count() > 0) + .collect(), + ) + } else { + (None, BTreeMap::new()) + }; + let rc = Arc::new(()); + forward.insert(addr, Arc::downgrade(&rc)); + let next = forward.keys().next().cloned(); + if !forward.is_empty() { + writable.insert(port, forward); + } + + update_forward(port, prev, next).await?; + Ok(rc) + } + pub async fn gc(&self, external: u16) -> Result<(), Error> { + let mut writable = self.forwards.lock().await; + let (prev, forward) = if let Some(forward) = writable.remove(&external) { + ( + forward.keys().next().cloned(), + forward + .into_iter() + .filter(|(_, rc)| rc.strong_count() > 0) + .collect(), + ) + } else { + (None, BTreeMap::new()) + }; + let next = forward.keys().next().cloned(); + if !forward.is_empty() { + writable.insert(external, forward); + } + + update_forward(external, prev, next).await + } +} + +async fn update_forward( + external: u16, + prev: Option, + next: Option, +) -> Result<(), Error> { + if prev != next { + if let Some(prev) = prev { + unforward(START9_BRIDGE_IFACE, external, prev).await?; + } + if let Some(next) = next { + forward(START9_BRIDGE_IFACE, external, next).await?; + } + } + Ok(()) +} + +// iptables -I FORWARD -o br-start9 -p tcp -d 172.18.0.2 --dport 8333 -j ACCEPT +// iptables -t nat -I PREROUTING -p tcp --dport 32768 -j DNAT --to 172.18.0.2:8333 +async fn forward(iface: &str, external: u16, addr: SocketAddr) -> Result<(), Error> { + Command::new("iptables") + .arg("-I") + .arg("FORWARD") + .arg("-o") + .arg(iface) + .arg("-p") + .arg("tcp") + .arg("-d") + .arg(addr.ip().to_string()) + .arg("--dport") + .arg(addr.port().to_string()) + .arg("-j") + .arg("ACCEPT") + .invoke(crate::ErrorKind::Network) + .await?; + Command::new("iptables") + .arg("-t") + .arg("nat") + .arg("-I") + .arg("PREROUTING") + .arg("-p") + .arg("tcp") + .arg("--dport") + .arg(external.to_string()) + .arg("-j") + .arg("DNAT") + .arg("--to") + .arg(addr.to_string()) + .invoke(crate::ErrorKind::Network) + .await?; + Ok(()) +} + +// iptables -D FORWARD -o br-start9 -p tcp -d 172.18.0.2 --dport 8333 -j ACCEPT +// iptables -t nat -D PREROUTING -p tcp --dport 32768 -j DNAT --to 172.18.0.2:8333 +async fn unforward(iface: &str, external: u16, addr: SocketAddr) -> Result<(), Error> { + Command::new("iptables") + .arg("-D") + .arg("FORWARD") + .arg("-o") + .arg(iface) + .arg("-p") + .arg("tcp") + .arg("-d") + .arg(addr.ip().to_string()) + .arg("--dport") + .arg(addr.port().to_string()) + .arg("-j") + .arg("ACCEPT") + .invoke(crate::ErrorKind::Network) + .await?; + Command::new("iptables") + .arg("-t") + .arg("nat") + .arg("-D") + .arg("PREROUTING") + .arg("-p") + .arg("tcp") + .arg("--dport") + .arg(external.to_string()) + .arg("-j") + .arg("DNAT") + .arg("--to") + .arg(addr.to_string()) + .invoke(crate::ErrorKind::Network) + .await?; + Ok(()) +} diff --git a/core/startos/src/net/host/address.rs b/core/startos/src/net/host/address.rs new file mode 100644 index 0000000000..cb3b485f68 --- /dev/null +++ b/core/startos/src/net/host/address.rs @@ -0,0 +1,14 @@ +use serde::{Deserialize, Serialize}; +use torut::onion::OnionAddressV3; +use ts_rs::TS; + +#[derive(Debug, Deserialize, Serialize, PartialEq, Eq, PartialOrd, Ord, TS)] +#[serde(rename_all = "camelCase")] +#[serde(tag = "kind")] +#[ts(export)] +pub enum HostAddress { + Onion { + #[ts(type = "string")] + address: OnionAddressV3, + }, +} diff --git a/core/startos/src/net/host/binding.rs b/core/startos/src/net/host/binding.rs new file mode 100644 index 0000000000..8301821f52 --- /dev/null +++ b/core/startos/src/net/host/binding.rs @@ -0,0 +1,83 @@ +use imbl_value::InternedString; +use serde::{Deserialize, Serialize}; +use ts_rs::TS; + +use crate::net::forward::AvailablePorts; +use crate::net::vhost::AlpnInfo; +use crate::prelude::*; + +#[derive(Debug, Deserialize, Serialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub struct BindInfo { + pub options: BindOptions, + pub assigned_lan_port: Option, +} +impl BindInfo { + pub fn new(available_ports: &mut AvailablePorts, options: BindOptions) -> Result { + let mut assigned_lan_port = None; + if options.add_ssl.is_some() || options.secure.is_some() { + assigned_lan_port = Some(available_ports.alloc()?); + } + Ok(Self { + options, + assigned_lan_port, + }) + } + pub fn update( + self, + available_ports: &mut AvailablePorts, + options: BindOptions, + ) -> Result { + let Self { + mut assigned_lan_port, + .. + } = self; + if options.add_ssl.is_some() || options.secure.is_some() { + assigned_lan_port = if let Some(port) = assigned_lan_port.take() { + Some(port) + } else { + Some(available_ports.alloc()?) + }; + } else { + if let Some(port) = assigned_lan_port.take() { + available_ports.free([port]); + } + } + Ok(Self { + options, + assigned_lan_port, + }) + } +} + +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] +#[ts(export)] +#[serde(rename_all = "camelCase")] +pub struct Security { + pub ssl: bool, +} + +#[derive(Clone, Debug, Deserialize, Serialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub struct BindOptions { + #[ts(type = "string | null")] + pub scheme: Option, + pub preferred_external_port: u16, + pub add_ssl: Option, + pub secure: Option, +} + +#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub struct AddSslOptions { + #[ts(type = "string | null")] + pub scheme: Option, + pub preferred_external_port: u16, + // #[serde(default)] + // pub add_x_forwarded_headers: bool, // TODO + #[serde(default)] + pub alpn: AlpnInfo, +} diff --git a/core/startos/src/net/host/mod.rs b/core/startos/src/net/host/mod.rs new file mode 100644 index 0000000000..2d50df15ac --- /dev/null +++ b/core/startos/src/net/host/mod.rs @@ -0,0 +1,88 @@ +use std::collections::{BTreeMap, BTreeSet}; + +use imbl_value::InternedString; +use models::HostId; +use serde::{Deserialize, Serialize}; +use ts_rs::TS; + +use crate::net::forward::AvailablePorts; +use crate::net::host::address::HostAddress; +use crate::net::host::binding::{BindInfo, BindOptions}; +use crate::prelude::*; + +pub mod address; +pub mod binding; + +#[derive(Debug, Deserialize, Serialize, HasModel, TS)] +#[serde(rename_all = "camelCase")] +#[model = "Model"] +#[ts(export)] +pub struct Host { + pub kind: HostKind, + pub bindings: BTreeMap, + pub addresses: BTreeSet, + pub primary: Option, +} +impl AsRef for Host { + fn as_ref(&self) -> &Host { + self + } +} +impl Host { + pub fn new(kind: HostKind) -> Self { + Self { + kind, + bindings: BTreeMap::new(), + addresses: BTreeSet::new(), + primary: None, + } + } +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Deserialize, Serialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub enum HostKind { + Multi, + // Single, + // Static, +} + +#[derive(Debug, Default, Deserialize, Serialize, HasModel, TS)] +#[model = "Model"] +#[ts(export)] +pub struct HostInfo(BTreeMap); + +impl Map for HostInfo { + type Key = HostId; + type Value = Host; + fn key_str(key: &Self::Key) -> Result, Error> { + Ok(key) + } + fn key_string(key: &Self::Key) -> Result { + Ok(key.clone().into()) + } +} + +impl Model { + pub fn add_binding( + &mut self, + available_ports: &mut AvailablePorts, + kind: HostKind, + id: &HostId, + internal_port: u16, + options: BindOptions, + ) -> Result<(), Error> { + self.upsert(id, || Host::new(kind))? + .as_bindings_mut() + .mutate(|b| { + let info = if let Some(info) = b.remove(&internal_port) { + info.update(available_ports, options)? + } else { + BindInfo::new(available_ports, options)? + }; + b.insert(internal_port, info); + Ok(()) + }) // TODO: handle host kind change + } +} diff --git a/core/startos/src/net/interface.rs b/core/startos/src/net/interface.rs deleted file mode 100644 index a055bb277f..0000000000 --- a/core/startos/src/net/interface.rs +++ /dev/null @@ -1,122 +0,0 @@ -use std::collections::BTreeMap; - -use indexmap::IndexSet; -pub use models::InterfaceId; -use serde::{Deserialize, Deserializer, Serialize}; -use sqlx::{Executor, Postgres}; -use tracing::instrument; - -use crate::db::model::{InterfaceAddressMap, InterfaceAddresses}; -use crate::net::keys::Key; -use crate::s9pk::manifest::PackageId; -use crate::util::serde::Port; -use crate::{Error, ResultExt}; - -#[derive(Clone, Debug, Deserialize, Serialize)] -#[serde(rename_all = "kebab-case")] -pub struct Interfaces(pub BTreeMap); // TODO -impl Interfaces { - #[instrument(skip_all)] - pub fn validate(&self) -> Result<(), Error> { - for (_, interface) in &self.0 { - interface.validate().with_ctx(|_| { - ( - crate::ErrorKind::ValidateS9pk, - format!("Interface {}", interface.name), - ) - })?; - } - Ok(()) - } - #[instrument(skip_all)] - pub async fn install( - &self, - secrets: &mut Ex, - package_id: &PackageId, - ) -> Result - where - for<'a> &'a mut Ex: Executor<'a, Database = Postgres>, - { - let mut interface_addresses = InterfaceAddressMap(BTreeMap::new()); - for (id, iface) in &self.0 { - let mut addrs = InterfaceAddresses { - tor_address: None, - lan_address: None, - }; - if iface.tor_config.is_some() || iface.lan_config.is_some() { - let key = - Key::for_interface(secrets, Some((package_id.clone(), id.clone()))).await?; - if iface.tor_config.is_some() { - addrs.tor_address = Some(key.tor_address().to_string()); - } - if iface.lan_config.is_some() { - addrs.lan_address = Some(key.local_address()); - } - } - interface_addresses.0.insert(id.clone(), addrs); - } - Ok(interface_addresses) - } -} - -#[derive(Clone, Debug, Deserialize, Serialize)] -#[serde(rename_all = "kebab-case")] -pub struct Interface { - pub name: String, - pub description: String, - pub tor_config: Option, - pub lan_config: Option>, - pub ui: bool, - pub protocols: IndexSet, -} -impl Interface { - #[instrument(skip_all)] - pub fn validate(&self) -> Result<(), color_eyre::eyre::Report> { - if self.tor_config.is_some() && !self.protocols.contains("tcp") { - color_eyre::eyre::bail!("must support tcp to set up a tor hidden service"); - } - if self.lan_config.is_some() && !self.protocols.contains("http") { - color_eyre::eyre::bail!("must support http to set up a lan service"); - } - if self.ui && !(self.protocols.contains("http") || self.protocols.contains("https")) { - color_eyre::eyre::bail!("must support http or https to serve a ui"); - } - Ok(()) - } -} - -#[derive(Clone, Debug, PartialEq, Eq, Deserialize, Serialize)] -#[serde(rename_all = "kebab-case")] -pub struct TorConfig { - pub port_mapping: BTreeMap, -} - -#[derive(Clone, Debug, Serialize)] -#[serde(rename_all = "kebab-case")] -pub struct LanPortConfig { - pub ssl: bool, - pub internal: u16, -} -impl<'de> Deserialize<'de> for LanPortConfig { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - #[derive(Deserialize)] - #[serde(rename_all = "kebab-case")] - struct PermissiveLanPortConfig { - ssl: bool, - internal: Option, - mapping: Option, - } - - let config = PermissiveLanPortConfig::deserialize(deserializer)?; - Ok(LanPortConfig { - ssl: config.ssl, - internal: config - .internal - .or(config.mapping) - .ok_or_else(|| serde::de::Error::missing_field("internal"))?, - }) - } -} diff --git a/core/startos/src/net/keys.rs b/core/startos/src/net/keys.rs index 504bd276d3..02ec173293 100644 --- a/core/startos/src/net/keys.rs +++ b/core/startos/src/net/keys.rs @@ -1,385 +1,24 @@ -use std::collections::BTreeMap; +use serde::{Deserialize, Serialize}; -use clap::ArgMatches; -use color_eyre::eyre::eyre; -use models::{Id, InterfaceId, PackageId}; -use openssl::pkey::{PKey, Private}; -use openssl::sha::Sha256; -use openssl::x509::X509; -use p256::elliptic_curve::pkcs8::EncodePrivateKey; -use rpc_toolkit::command; -use sqlx::{Acquire, PgExecutor}; -use ssh_key::private::Ed25519PrivateKey; -use torut::onion::{OnionAddressV3, TorSecretKeyV3}; -use zeroize::Zeroize; - -use crate::config::{configure, ConfigureContext}; -use crate::context::RpcContext; -use crate::control::restart; -use crate::disk::fsck::RequiresReboot; -use crate::net::ssl::CertPair; +use crate::account::AccountInfo; +use crate::net::ssl::CertStore; +use crate::net::tor::OnionStore; use crate::prelude::*; -use crate::util::crypto::ed25519_expand_key; - -// TODO: delete once we may change tor addresses -async fn compat( - secrets: impl PgExecutor<'_>, - interface: &Option<(PackageId, InterfaceId)>, -) -> Result, Error> { - if let Some((package, interface)) = interface { - if let Some(r) = sqlx::query!( - "SELECT key FROM tor WHERE package = $1 AND interface = $2", - package, - interface - ) - .fetch_optional(secrets) - .await? - { - Ok(Some(<[u8; 64]>::try_from(r.key).map_err(|e| { - Error::new( - eyre!("expected vec of len 64, got len {}", e.len()), - ErrorKind::ParseDbField, - ) - })?)) - } else { - Ok(None) - } - } else if let Some(key) = sqlx::query!("SELECT tor_key FROM account WHERE id = 0") - .fetch_one(secrets) - .await? - .tor_key - { - Ok(Some(<[u8; 64]>::try_from(key).map_err(|e| { - Error::new( - eyre!("expected vec of len 64, got len {}", e.len()), - ErrorKind::ParseDbField, - ) - })?)) - } else { - Ok(None) - } -} -#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] -pub struct Key { - interface: Option<(PackageId, InterfaceId)>, - base: [u8; 32], - tor_key: [u8; 64], // Does NOT necessarily match base -} -impl Key { - pub fn interface(&self) -> Option<(PackageId, InterfaceId)> { - self.interface.clone() - } - pub fn as_bytes(&self) -> [u8; 32] { - self.base - } - pub fn internal_address(&self) -> String { - self.interface - .as_ref() - .map(|(pkg_id, _)| format!("{}.embassy", pkg_id)) - .unwrap_or_else(|| "embassy".to_owned()) - } - pub fn tor_key(&self) -> TorSecretKeyV3 { - self.tor_key.into() - } - pub fn tor_address(&self) -> OnionAddressV3 { - self.tor_key().public().get_onion_address() - } - pub fn base_address(&self) -> String { - self.tor_key() - .public() - .get_onion_address() - .get_address_without_dot_onion() - } - pub fn local_address(&self) -> String { - self.base_address() + ".local" - } - pub fn openssl_key_ed25519(&self) -> PKey { - PKey::private_key_from_raw_bytes(&self.base, openssl::pkey::Id::ED25519).unwrap() - } - pub fn openssl_key_nistp256(&self) -> PKey { - let mut buf = self.base; - loop { - if let Ok(k) = p256::SecretKey::from_slice(&buf) { - return PKey::private_key_from_pkcs8(&*k.to_pkcs8_der().unwrap().as_bytes()) - .unwrap(); - } - let mut sha = Sha256::new(); - sha.update(&buf); - buf = sha.finish(); - } - } - pub fn ssh_key(&self) -> Ed25519PrivateKey { - Ed25519PrivateKey::from_bytes(&self.base) - } - pub(crate) fn from_pair( - interface: Option<(PackageId, InterfaceId)>, - bytes: [u8; 32], - tor_key: [u8; 64], - ) -> Self { - Self { - interface, - tor_key, - base: bytes, - } - } - pub fn from_bytes(interface: Option<(PackageId, InterfaceId)>, bytes: [u8; 32]) -> Self { - Self::from_pair(interface, bytes, ed25519_expand_key(&bytes)) - } - pub fn new(interface: Option<(PackageId, InterfaceId)>) -> Self { - Self::from_bytes(interface, rand::random()) - } - pub(super) fn with_certs(self, certs: CertPair, int: X509, root: X509) -> KeyInfo { - KeyInfo { - key: self, - certs, - int, - root, - } - } - pub async fn for_package( - secrets: impl PgExecutor<'_>, - package: &PackageId, - ) -> Result, Error> { - sqlx::query!( - r#" - SELECT - network_keys.package, - network_keys.interface, - network_keys.key, - tor.key AS "tor_key?" - FROM - network_keys - LEFT JOIN - tor - ON - network_keys.package = tor.package - AND - network_keys.interface = tor.interface - WHERE - network_keys.package = $1 - "#, - package - ) - .fetch_all(secrets) - .await? - .into_iter() - .map(|row| { - let interface = Some(( - package.clone(), - InterfaceId::from(Id::try_from(row.interface)?), - )); - let bytes = row.key.try_into().map_err(|e: Vec| { - Error::new( - eyre!("Invalid length for network key {} expected 32", e.len()), - crate::ErrorKind::Database, - ) - })?; - Ok(match row.tor_key { - Some(tor_key) => Key::from_pair( - interface, - bytes, - tor_key.try_into().map_err(|e: Vec| { - Error::new( - eyre!("Invalid length for tor key {} expected 64", e.len()), - crate::ErrorKind::Database, - ) - })?, - ), - None => Key::from_bytes(interface, bytes), - }) - }) - .collect() - } - pub async fn for_interface( - secrets: &mut Ex, - interface: Option<(PackageId, InterfaceId)>, - ) -> Result - where - for<'a> &'a mut Ex: PgExecutor<'a>, - { - let tentative = rand::random::<[u8; 32]>(); - let actual = if let Some((pkg, iface)) = &interface { - let k = tentative.as_slice(); - let actual = sqlx::query!( - "INSERT INTO network_keys (package, interface, key) VALUES ($1, $2, $3) ON CONFLICT (package, interface) DO UPDATE SET package = EXCLUDED.package RETURNING key", - pkg, - iface, - k, - ) - .fetch_one(&mut *secrets) - .await?.key; - let mut bytes = tentative; - bytes.clone_from_slice(actual.get(0..32).ok_or_else(|| { - Error::new( - eyre!("Invalid key size returned from DB"), - crate::ErrorKind::Database, - ) - })?); - bytes - } else { - let actual = sqlx::query!("SELECT network_key FROM account WHERE id = 0") - .fetch_one(&mut *secrets) - .await? - .network_key; - let mut bytes = tentative; - bytes.clone_from_slice(actual.get(0..32).ok_or_else(|| { - Error::new( - eyre!("Invalid key size returned from DB"), - crate::ErrorKind::Database, - ) - })?); - bytes +#[derive(Debug, Deserialize, Serialize, HasModel)] +#[model = "Model"] +pub struct KeyStore { + pub onion: OnionStore, + pub local_certs: CertStore, + // pub letsencrypt_certs: BTreeMap, CertData> +} +impl KeyStore { + pub fn new(account: &AccountInfo) -> Result { + let mut res = Self { + onion: OnionStore::new(), + local_certs: CertStore::new(account)?, }; - let mut res = Self::from_bytes(interface, actual); - if let Some(tor_key) = compat(secrets, &res.interface).await? { - res.tor_key = tor_key; - } + res.onion.insert(account.tor_key.clone()); Ok(res) } } -impl Drop for Key { - fn drop(&mut self) { - self.base.zeroize(); - self.tor_key.zeroize(); - } -} - -#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] -pub struct KeyInfo { - key: Key, - certs: CertPair, - int: X509, - root: X509, -} -impl KeyInfo { - pub fn key(&self) -> &Key { - &self.key - } - pub fn certs(&self) -> &CertPair { - &self.certs - } - pub fn int_ca(&self) -> &X509 { - &self.int - } - pub fn root_ca(&self) -> &X509 { - &self.root - } - pub fn fullchain_ed25519(&self) -> Vec<&X509> { - vec![&self.certs.ed25519, &self.int, &self.root] - } - pub fn fullchain_nistp256(&self) -> Vec<&X509> { - vec![&self.certs.nistp256, &self.int, &self.root] - } -} - -#[test] -pub fn test_keygen() { - let key = Key::new(None); - key.tor_key(); - key.openssl_key_nistp256(); -} - -fn display_requires_reboot(arg: RequiresReboot, _matches: &ArgMatches) { - if arg.0 { - println!("Server must be restarted for changes to take effect"); - } -} - -#[command(rename = "rotate-key", display(display_requires_reboot))] -pub async fn rotate_key( - #[context] ctx: RpcContext, - #[arg] package: Option, - #[arg] interface: Option, -) -> Result { - let mut pgcon = ctx.secret_store.acquire().await?; - let mut tx = pgcon.begin().await?; - if let Some(package) = package { - let Some(interface) = interface else { - return Err(Error::new( - eyre!("Must specify interface"), - ErrorKind::InvalidRequest, - )); - }; - sqlx::query!( - "DELETE FROM tor WHERE package = $1 AND interface = $2", - &package, - &interface, - ) - .execute(&mut *tx) - .await?; - sqlx::query!( - "DELETE FROM network_keys WHERE package = $1 AND interface = $2", - &package, - &interface, - ) - .execute(&mut *tx) - .await?; - let new_key = - Key::for_interface(&mut *tx, Some((package.clone(), interface.clone()))).await?; - let needs_config = ctx - .db - .mutate(|v| { - let installed = v - .as_package_data_mut() - .as_idx_mut(&package) - .or_not_found(&package)? - .as_installed_mut() - .or_not_found("installed")?; - let addrs = installed - .as_interface_addresses_mut() - .as_idx_mut(&interface) - .or_not_found(&interface)?; - if let Some(lan) = addrs.as_lan_address_mut().transpose_mut() { - lan.ser(&new_key.local_address())?; - } - if let Some(lan) = addrs.as_tor_address_mut().transpose_mut() { - lan.ser(&new_key.tor_address().to_string())?; - } - - if installed - .as_manifest() - .as_config() - .transpose_ref() - .is_some() - { - installed - .as_status_mut() - .as_configured_mut() - .replace(&false) - } else { - Ok(false) - } - }) - .await?; - tx.commit().await?; - if needs_config { - configure( - &ctx, - &package, - ConfigureContext { - breakages: BTreeMap::new(), - timeout: None, - config: None, - overrides: BTreeMap::new(), - dry_run: false, - }, - ) - .await?; - } else { - restart(ctx, package).await?; - } - Ok(RequiresReboot(false)) - } else { - sqlx::query!("UPDATE account SET tor_key = NULL, network_key = gen_random_bytes(32)") - .execute(&mut *tx) - .await?; - let new_key = Key::for_interface(&mut *tx, None).await?; - let url = format!("https://{}", new_key.tor_address()).parse()?; - ctx.db - .mutate(|v| v.as_server_info_mut().as_tor_address_mut().ser(&url)) - .await?; - tx.commit().await?; - Ok(RequiresReboot(true)) - } -} diff --git a/core/startos/src/net/mdns.rs b/core/startos/src/net/mdns.rs index 21054241d1..af5d128a8b 100644 --- a/core/startos/src/net/mdns.rs +++ b/core/startos/src/net/mdns.rs @@ -1,14 +1,10 @@ -use std::collections::BTreeMap; use std::net::Ipv4Addr; -use std::sync::{Arc, Weak}; use color_eyre::eyre::eyre; -use tokio::process::{Child, Command}; -use tokio::sync::Mutex; -use tracing::instrument; +use tokio::process::Command; +use crate::prelude::*; use crate::util::Invoke; -use crate::{Error, ResultExt}; pub async fn resolve_mdns(hostname: &str) -> Result { Ok(String::from_utf8( @@ -30,71 +26,3 @@ pub async fn resolve_mdns(hostname: &str) -> Result { .trim() .parse()?) } - -pub struct MdnsController(Mutex); -impl MdnsController { - pub async fn init() -> Result { - Ok(MdnsController(Mutex::new( - MdnsControllerInner::init().await?, - ))) - } - pub async fn add(&self, alias: String) -> Result, Error> { - self.0.lock().await.add(alias).await - } - pub async fn gc(&self, alias: String) -> Result<(), Error> { - self.0.lock().await.gc(alias).await - } -} - -pub struct MdnsControllerInner { - alias_cmd: Option, - services: BTreeMap>, -} - -impl MdnsControllerInner { - #[instrument(skip_all)] - async fn init() -> Result { - let mut res = MdnsControllerInner { - alias_cmd: None, - services: BTreeMap::new(), - }; - res.sync().await?; - Ok(res) - } - #[instrument(skip_all)] - async fn sync(&mut self) -> Result<(), Error> { - if let Some(mut cmd) = self.alias_cmd.take() { - cmd.kill().await.with_kind(crate::ErrorKind::Network)?; - } - self.alias_cmd = Some( - Command::new("avahi-alias") - .kill_on_drop(true) - .args( - self.services - .iter() - .filter(|(_, rc)| rc.strong_count() > 0) - .map(|(s, _)| s), - ) - .spawn()?, - ); - Ok(()) - } - async fn add(&mut self, alias: String) -> Result, Error> { - let rc = if let Some(rc) = Weak::upgrade(&self.services.remove(&alias).unwrap_or_default()) - { - rc - } else { - Arc::new(()) - }; - self.services.insert(alias, Arc::downgrade(&rc)); - self.sync().await?; - Ok(rc) - } - async fn gc(&mut self, alias: String) -> Result<(), Error> { - if let Some(rc) = Weak::upgrade(&self.services.remove(&alias).unwrap_or_default()) { - self.services.insert(alias, Arc::downgrade(&rc)); - } - self.sync().await?; - Ok(()) - } -} diff --git a/core/startos/src/net/mod.rs b/core/startos/src/net/mod.rs index 50935fb186..aaf019e66b 100644 --- a/core/startos/src/net/mod.rs +++ b/core/startos/src/net/mod.rs @@ -1,17 +1,13 @@ -use std::sync::Arc; - -use futures::future::BoxFuture; -use hyper::{Body, Error as HyperError, Request, Response}; -use rpc_toolkit::command; - -use crate::Error; +use rpc_toolkit::ParentHandler; pub mod dhcp; pub mod dns; -pub mod interface; +pub mod forward; +pub mod host; pub mod keys; pub mod mdns; pub mod net_controller; +pub mod service_interface; pub mod ssl; pub mod static_server; pub mod tor; @@ -22,11 +18,8 @@ pub mod wifi; pub const PACKAGE_CERT_PATH: &str = "/var/lib/embassy/ssl"; -#[command(subcommands(tor::tor, dhcp::dhcp, ssl::ssl, keys::rotate_key))] -pub fn net() -> Result<(), Error> { - Ok(()) +pub fn net() -> ParentHandler { + ParentHandler::new() + .subcommand("tor", tor::tor()) + .subcommand("dhcp", dhcp::dhcp()) } - -pub type HttpHandler = Arc< - dyn Fn(Request) -> BoxFuture<'static, Result, HyperError>> + Send + Sync, ->; diff --git a/core/startos/src/net/net_controller.rs b/core/startos/src/net/net_controller.rs index e2e77ed68a..a4c9ea507b 100644 --- a/core/startos/src/net/net_controller.rs +++ b/core/startos/src/net/net_controller.rs @@ -1,63 +1,72 @@ -use std::collections::BTreeMap; -use std::net::{IpAddr, Ipv4Addr, SocketAddr}; +use std::collections::{BTreeMap, BTreeSet}; +use std::net::{Ipv4Addr, SocketAddr}; use std::sync::{Arc, Weak}; use color_eyre::eyre::eyre; -use models::InterfaceId; -use sqlx::PgExecutor; +use imbl::OrdMap; +use lazy_format::lazy_format; +use models::{HostId, OptionExt, PackageId}; +use patch_db::PatchDb; +use torut::onion::{OnionAddressV3, TorSecretKeyV3}; use tracing::instrument; +use crate::db::prelude::PatchDbExt; use crate::error::ErrorCollection; use crate::hostname::Hostname; use crate::net::dns::DnsController; -use crate::net::keys::Key; -use crate::net::mdns::MdnsController; -use crate::net::ssl::{export_cert, export_key, SslManager}; +use crate::net::forward::LanPortForwardController; +use crate::net::host::address::HostAddress; +use crate::net::host::binding::{AddSslOptions, BindOptions}; +use crate::net::host::{Host, HostKind}; use crate::net::tor::TorController; use crate::net::vhost::{AlpnInfo, VHostController}; -use crate::s9pk::manifest::PackageId; -use crate::volume::cert_dir; +use crate::util::serde::MaybeUtf8String; use crate::{Error, HOST_IP}; pub struct NetController { + db: PatchDb, pub(super) tor: TorController, - pub(super) mdns: MdnsController, pub(super) vhost: VHostController, pub(super) dns: DnsController, - pub(super) ssl: Arc, + pub(super) forward: LanPortForwardController, pub(super) os_bindings: Vec>, } impl NetController { #[instrument(skip_all)] pub async fn init( + db: PatchDb, tor_control: SocketAddr, tor_socks: SocketAddr, dns_bind: &[SocketAddr], - ssl: SslManager, hostname: &Hostname, - os_key: &Key, + os_tor_key: TorSecretKeyV3, ) -> Result { - let ssl = Arc::new(ssl); let mut res = Self { + db: db.clone(), tor: TorController::new(tor_control, tor_socks), - mdns: MdnsController::init().await?, - vhost: VHostController::new(ssl.clone()), + vhost: VHostController::new(db), dns: DnsController::init(dns_bind).await?, - ssl, + forward: LanPortForwardController::new(), os_bindings: Vec::new(), }; - res.add_os_bindings(hostname, os_key).await?; + res.add_os_bindings(hostname, os_tor_key).await?; Ok(res) } - async fn add_os_bindings(&mut self, hostname: &Hostname, key: &Key) -> Result<(), Error> { - let alpn = Err(AlpnInfo::Specified(vec!["http/1.1".into(), "h2".into()])); + async fn add_os_bindings( + &mut self, + hostname: &Hostname, + tor_key: TorSecretKeyV3, + ) -> Result<(), Error> { + let alpn = Err(AlpnInfo::Specified(vec![ + MaybeUtf8String("http/1.1".into()), + MaybeUtf8String("h2".into()), + ])); // Internal DNS self.vhost .add( - key.clone(), Some("embassy".into()), 443, ([127, 0, 0, 1], 80).into(), @@ -70,13 +79,7 @@ impl NetController { // LAN IP self.os_bindings.push( self.vhost - .add( - key.clone(), - None, - 443, - ([127, 0, 0, 1], 80).into(), - alpn.clone(), - ) + .add(None, 443, ([127, 0, 0, 1], 80).into(), alpn.clone()) .await?, ); @@ -84,7 +87,6 @@ impl NetController { self.os_bindings.push( self.vhost .add( - key.clone(), Some("localhost".into()), 443, ([127, 0, 0, 1], 80).into(), @@ -95,7 +97,6 @@ impl NetController { self.os_bindings.push( self.vhost .add( - key.clone(), Some(hostname.no_dot_host_name()), 443, ([127, 0, 0, 1], 80).into(), @@ -108,7 +109,6 @@ impl NetController { self.os_bindings.push( self.vhost .add( - key.clone(), Some(hostname.local_domain_name()), 443, ([127, 0, 0, 1], 80).into(), @@ -117,28 +117,26 @@ impl NetController { .await?, ); - // Tor (http) - self.os_bindings.push( - self.tor - .add(key.tor_key(), 80, ([127, 0, 0, 1], 80).into()) - .await?, - ); - - // Tor (https) + // Tor self.os_bindings.push( self.vhost .add( - key.clone(), - Some(key.tor_address().to_string()), + Some(tor_key.public().get_onion_address().to_string()), 443, ([127, 0, 0, 1], 80).into(), alpn.clone(), ) .await?, ); - self.os_bindings.push( + self.os_bindings.extend( self.tor - .add(key.tor_key(), 443, ([127, 0, 0, 1], 443).into()) + .add( + tor_key, + vec![ + (80, ([127, 0, 0, 1], 80).into()), // http + (443, ([127, 0, 0, 1], 443).into()), // https + ], + ) .await?, ); @@ -159,55 +157,15 @@ impl NetController { ip, dns, controller: Arc::downgrade(self), - tor: BTreeMap::new(), - lan: BTreeMap::new(), + binds: BTreeMap::new(), }) } +} - async fn add_tor( - &self, - key: &Key, - external: u16, - target: SocketAddr, - ) -> Result>, Error> { - let mut rcs = Vec::with_capacity(1); - rcs.push(self.tor.add(key.tor_key(), external, target).await?); - Ok(rcs) - } - - async fn remove_tor(&self, key: &Key, external: u16, rcs: Vec>) -> Result<(), Error> { - drop(rcs); - self.tor.gc(Some(key.tor_key()), Some(external)).await - } - - async fn add_lan( - &self, - key: Key, - external: u16, - target: SocketAddr, - connect_ssl: Result<(), AlpnInfo>, - ) -> Result>, Error> { - let mut rcs = Vec::with_capacity(2); - rcs.push( - self.vhost - .add( - key.clone(), - Some(key.local_address()), - external, - target.into(), - connect_ssl, - ) - .await?, - ); - rcs.push(self.mdns.add(key.base_address()).await?); - Ok(rcs) - } - - async fn remove_lan(&self, key: &Key, external: u16, rcs: Vec>) -> Result<(), Error> { - drop(rcs); - self.mdns.gc(key.base_address()).await?; - self.vhost.gc(Some(key.local_address()), external).await - } +#[derive(Default)] +struct HostBinds { + lan: BTreeMap, Arc<()>)>, + tor: BTreeMap, Vec>)>, } pub struct NetService { @@ -216,8 +174,7 @@ pub struct NetService { ip: Ipv4Addr, dns: Arc<()>, controller: Weak, - tor: BTreeMap<(InterfaceId, u16), (Key, Vec>)>, - lan: BTreeMap<(InterfaceId, u16), (Key, Vec>)>, + binds: BTreeMap, } impl NetService { fn net_controller(&self) -> Result, Error> { @@ -228,111 +185,194 @@ impl NetService { ) }) } - pub async fn add_tor( + + pub async fn bind( &mut self, - secrets: &mut Ex, - id: InterfaceId, - external: u16, - internal: u16, - ) -> Result<(), Error> - where - for<'a> &'a mut Ex: PgExecutor<'a>, - { - let key = Key::for_interface(secrets, Some((self.id.clone(), id.clone()))).await?; - let ctrl = self.net_controller()?; - let tor_idx = (id, external); - let mut tor = self - .tor - .remove(&tor_idx) - .unwrap_or_else(|| (key.clone(), Vec::new())); - tor.1.append( - &mut ctrl - .add_tor(&key, external, SocketAddr::new(self.ip.into(), internal)) - .await?, - ); - self.tor.insert(tor_idx, tor); - Ok(()) + kind: HostKind, + id: HostId, + internal_port: u16, + options: BindOptions, + ) -> Result<(), Error> { + let id_ref = &id; + let pkg_id = &self.id; + let host = self + .net_controller()? + .db + .mutate(|d| { + let mut ports = d.as_private().as_available_ports().de()?; + let hosts = d + .as_public_mut() + .as_package_data_mut() + .as_idx_mut(pkg_id) + .or_not_found(pkg_id)? + .as_hosts_mut(); + hosts.add_binding(&mut ports, kind, &id, internal_port, options)?; + let host = hosts + .as_idx(&id) + .or_not_found(lazy_format!("Host {id_ref} for {pkg_id}"))? + .de()?; + d.as_private_mut().as_available_ports_mut().ser(&ports)?; + Ok(host) + }) + .await?; + self.update(id, host).await } - pub async fn remove_tor(&mut self, id: InterfaceId, external: u16) -> Result<(), Error> { + + async fn update(&mut self, id: HostId, host: Host) -> Result<(), Error> { let ctrl = self.net_controller()?; - if let Some((key, rcs)) = self.tor.remove(&(id, external)) { - ctrl.remove_tor(&key, external, rcs).await?; + let binds = { + if !self.binds.contains_key(&id) { + self.binds.insert(id.clone(), Default::default()); + } + self.binds.get_mut(&id).unwrap() + }; + if true + // TODO: if should listen lan + { + for (port, bind) in &host.bindings { + let old_lan_bind = binds.lan.remove(port); + let old_lan_port = old_lan_bind.as_ref().map(|(external, _, _)| *external); + let lan_bind = old_lan_bind.filter(|(external, ssl, _)| { + ssl == &bind.options.add_ssl + && bind.assigned_lan_port.as_ref() == Some(external) + }); // only keep existing binding if relevant details match + if let Some(external) = bind.assigned_lan_port { + let new_lan_bind = if let Some(b) = lan_bind { + b + } else { + if let Some(ssl) = &bind.options.add_ssl { + let rc = ctrl + .vhost + .add( + None, + external, + (self.ip, *port).into(), + if bind.options.secure.as_ref().map_or(false, |s| s.ssl) { + Ok(()) + } else { + Err(ssl.alpn.clone()) + }, + ) + .await?; + (*port, Some(ssl.clone()), rc) + } else { + let rc = ctrl.forward.add(external, (self.ip, *port).into()).await?; + (*port, None, rc) + } + }; + binds.lan.insert(*port, new_lan_bind); + } + if let Some(external) = old_lan_port { + ctrl.vhost.gc(None, external).await?; + ctrl.forward.gc(external).await?; + } + } + let mut removed = BTreeSet::new(); + let mut removed_ssl = BTreeSet::new(); + binds.lan.retain(|internal, (external, ssl, _)| { + if host.bindings.contains_key(internal) { + true + } else { + if ssl.is_some() { + removed_ssl.insert(*external); + } else { + removed.insert(*external); + } + false + } + }); + for external in removed { + ctrl.forward.gc(external).await?; + } + for external in removed_ssl { + ctrl.vhost.gc(None, external).await?; + } } - Ok(()) - } - pub async fn add_lan( - &mut self, - secrets: &mut Ex, - id: InterfaceId, - external: u16, - internal: u16, - connect_ssl: Result<(), AlpnInfo>, - ) -> Result<(), Error> - where - for<'a> &'a mut Ex: PgExecutor<'a>, - { - let key = Key::for_interface(secrets, Some((self.id.clone(), id.clone()))).await?; - let ctrl = self.net_controller()?; - let lan_idx = (id, external); - let mut lan = self - .lan - .remove(&lan_idx) - .unwrap_or_else(|| (key.clone(), Vec::new())); - lan.1.append( - &mut ctrl - .add_lan( - key, - external, - SocketAddr::new(self.ip.into(), internal), - connect_ssl, - ) - .await?, - ); - self.lan.insert(lan_idx, lan); - Ok(()) - } - pub async fn remove_lan(&mut self, id: InterfaceId, external: u16) -> Result<(), Error> { - let ctrl = self.net_controller()?; - if let Some((key, rcs)) = self.lan.remove(&(id, external)) { - ctrl.remove_lan(&key, external, rcs).await?; + let tor_binds: OrdMap = host + .bindings + .iter() + .flat_map(|(internal, info)| { + let non_ssl = ( + info.options.preferred_external_port, + SocketAddr::from((self.ip, *internal)), + ); + if let (Some(ssl), Some(ssl_internal)) = + (&info.options.add_ssl, info.assigned_lan_port) + { + itertools::Either::Left( + [ + ( + ssl.preferred_external_port, + SocketAddr::from(([127, 0, 0, 1], ssl_internal)), + ), + non_ssl, + ] + .into_iter(), + ) + } else { + itertools::Either::Right([non_ssl].into_iter()) + } + }) + .collect(); + let mut keep_tor_addrs = BTreeSet::new(); + for addr in match host.kind { + HostKind::Multi => { + // itertools::Either::Left( + host.addresses.iter() + // ) + } // HostKind::Single | HostKind::Static => itertools::Either::Right(&host.primary), + } { + match addr { + HostAddress::Onion { address } => { + keep_tor_addrs.insert(address); + let old_tor_bind = binds.tor.remove(address); + let tor_bind = old_tor_bind.filter(|(ports, _)| ports == &tor_binds); + let new_tor_bind = if let Some(tor_bind) = tor_bind { + tor_bind + } else { + let key = ctrl + .db + .peek() + .await + .into_private() + .into_key_store() + .into_onion() + .get_key(address)?; + let rcs = ctrl + .tor + .add(key, tor_binds.clone().into_iter().collect()) + .await?; + (tor_binds.clone(), rcs) + }; + binds.tor.insert(address.clone(), new_tor_bind); + } + } + } + for addr in binds.tor.keys() { + if !keep_tor_addrs.contains(addr) { + ctrl.tor.gc(Some(addr.clone()), None).await?; + } } Ok(()) } - pub async fn export_cert( - &self, - secrets: &mut Ex, - id: &InterfaceId, - ip: IpAddr, - ) -> Result<(), Error> - where - for<'a> &'a mut Ex: PgExecutor<'a>, - { - let key = Key::for_interface(secrets, Some((self.id.clone(), id.clone()))).await?; - let ctrl = self.net_controller()?; - let cert = ctrl.ssl.with_certs(key, ip).await?; - let cert_dir = cert_dir(&self.id, id); - tokio::fs::create_dir_all(&cert_dir).await?; - export_key( - &cert.key().openssl_key_nistp256(), - &cert_dir.join(format!("{id}.key.pem")), - ) - .await?; - export_cert( - &cert.fullchain_nistp256(), - &cert_dir.join(format!("{id}.cert.pem")), - ) - .await?; // TODO: can upgrade to ed25519? - Ok(()) - } + pub async fn remove_all(mut self) -> Result<(), Error> { self.shutdown = true; let mut errors = ErrorCollection::new(); if let Some(ctrl) = Weak::upgrade(&self.controller) { - for ((_, external), (key, rcs)) in std::mem::take(&mut self.lan) { - errors.handle(ctrl.remove_lan(&key, external, rcs).await); - } - for ((_, external), (key, rcs)) in std::mem::take(&mut self.tor) { - errors.handle(ctrl.remove_tor(&key, external, rcs).await); + for (_, binds) in std::mem::take(&mut self.binds) { + for (_, (external, ssl, rc)) in binds.lan { + drop(rc); + if ssl.is_some() { + errors.handle(ctrl.vhost.gc(None, external).await); + } else { + errors.handle(ctrl.forward.gc(external).await); + } + } + for (addr, (_, rcs)) in binds.tor { + drop(rcs); + errors.handle(ctrl.tor.gc(Some(addr), None).await); + } } std::mem::take(&mut self.dns); errors.handle(ctrl.dns.gc(Some(self.id.clone()), self.ip).await); @@ -345,6 +385,10 @@ impl NetService { )) } } + + pub fn get_ip(&self) -> Ipv4Addr { + self.ip.to_owned() + } } impl Drop for NetService { @@ -359,8 +403,7 @@ impl Drop for NetService { ip: Ipv4Addr::new(0, 0, 0, 0), dns: Default::default(), controller: Default::default(), - tor: Default::default(), - lan: Default::default(), + binds: BTreeMap::new(), }, ); tokio::spawn(async move { svc.remove_all().await.unwrap() }); diff --git a/core/startos/src/net/service_interface.rs b/core/startos/src/net/service_interface.rs new file mode 100644 index 0000000000..9a4659cfd6 --- /dev/null +++ b/core/startos/src/net/service_interface.rs @@ -0,0 +1,115 @@ +use std::net::{Ipv4Addr, Ipv6Addr}; + +use models::{HostId, ServiceInterfaceId}; +use serde::{Deserialize, Serialize}; +use ts_rs::TS; + +use crate::net::host::binding::BindOptions; +use crate::net::host::HostKind; +use crate::prelude::*; + +#[derive(Clone, Debug, Deserialize, Serialize, TS)] +#[ts(export)] +#[serde(rename_all = "camelCase")] +pub struct ServiceInterfaceWithHostInfo { + #[serde(flatten)] + pub service_interface: ServiceInterface, + pub host_info: ExportedHostInfo, +} + +#[derive(Clone, Debug, Deserialize, Serialize, TS)] +#[ts(export)] +#[serde(rename_all = "camelCase")] +pub struct ExportedHostInfo { + pub id: HostId, + pub kind: HostKind, + pub hostnames: Vec, +} + +#[derive(Clone, Debug, Deserialize, Serialize, TS)] +#[ts(export)] +#[serde(rename_all = "camelCase")] +#[serde(rename_all_fields = "camelCase")] +#[serde(tag = "kind")] +pub enum ExportedHostnameInfo { + Ip { + network_interface_id: String, + public: bool, + hostname: ExportedIpHostname, + }, + Onion { + hostname: ExportedOnionHostname, + }, +} + +#[derive(Clone, Debug, Deserialize, Serialize, TS)] +#[ts(export)] +#[serde(rename_all = "camelCase")] +pub struct ExportedOnionHostname { + pub value: String, + pub port: Option, + pub ssl_port: Option, +} + +#[derive(Clone, Debug, Deserialize, Serialize, TS)] +#[ts(export)] +#[serde(rename_all = "camelCase")] +#[serde(rename_all_fields = "camelCase")] +#[serde(tag = "kind")] +pub enum ExportedIpHostname { + Ipv4 { + value: Ipv4Addr, + port: Option, + ssl_port: Option, + }, + Ipv6 { + value: Ipv6Addr, + port: Option, + ssl_port: Option, + }, + Local { + value: String, + port: Option, + ssl_port: Option, + }, + Domain { + domain: String, + subdomain: Option, + port: Option, + ssl_port: Option, + }, +} + +#[derive(Clone, Debug, Deserialize, Serialize, TS)] +#[ts(export)] +#[serde(rename_all = "camelCase")] +pub struct ServiceInterface { + pub id: ServiceInterfaceId, + pub name: String, + pub description: String, + pub has_primary: bool, + pub disabled: bool, + pub masked: bool, + pub address_info: AddressInfo, + #[serde(rename = "type")] + pub interface_type: ServiceInterfaceType, +} + +#[derive(Clone, Debug, Deserialize, Serialize, TS)] +#[ts(export)] +#[serde(rename_all = "camelCase")] +pub enum ServiceInterfaceType { + Ui, + P2p, + Api, +} + +#[derive(Clone, Debug, Deserialize, Serialize, TS)] +#[ts(export)] +#[serde(rename_all = "camelCase")] +pub struct AddressInfo { + pub username: Option, + pub host_id: HostId, + pub bind_options: BindOptions, + pub suffix: String, +} diff --git a/core/startos/src/net/ssl.rs b/core/startos/src/net/ssl.rs index 1f9397add8..245881c55f 100644 --- a/core/startos/src/net/ssl.rs +++ b/core/startos/src/net/ssl.rs @@ -5,6 +5,7 @@ use std::path::Path; use std::time::{SystemTime, UNIX_EPOCH}; use futures::FutureExt; +use imbl_value::InternedString; use libc::time_t; use openssl::asn1::{Asn1Integer, Asn1Time}; use openssl::bn::{BigNum, MsbOption}; @@ -14,17 +15,137 @@ use openssl::nid::Nid; use openssl::pkey::{PKey, Private}; use openssl::x509::{X509Builder, X509Extension, X509NameBuilder, X509}; use openssl::*; -use rpc_toolkit::command; -use tokio::sync::{Mutex, RwLock}; +use patch_db::HasModel; +use serde::{Deserialize, Serialize}; +use tokio::sync::Mutex; use tracing::instrument; use crate::account::AccountInfo; -use crate::context::RpcContext; use crate::hostname::Hostname; use crate::init::check_time_is_synchronized; -use crate::net::dhcp::ips; -use crate::net::keys::{Key, KeyInfo}; -use crate::{Error, ErrorKind, ResultExt, SOURCE_DATE}; +use crate::prelude::*; +use crate::util::serde::Pem; +use crate::SOURCE_DATE; + +#[derive(Debug, Deserialize, Serialize, HasModel)] +#[model = "Model"] +#[serde(rename_all = "camelCase")] +pub struct CertStore { + pub root_key: Pem>, + pub root_cert: Pem, + pub int_key: Pem>, + pub int_cert: Pem, + pub leaves: BTreeMap>, CertData>, +} +impl CertStore { + pub fn new(account: &AccountInfo) -> Result { + let int_key = generate_key()?; + let int_cert = make_int_cert((&account.root_ca_key, &account.root_ca_cert), &int_key)?; + Ok(Self { + root_key: Pem::new(account.root_ca_key.clone()), + root_cert: Pem::new(account.root_ca_cert.clone()), + int_key: Pem::new(int_key), + int_cert: Pem::new(int_cert), + leaves: BTreeMap::new(), + }) + } +} +impl Model { + /// This function will grant any cert for any domain. It is up to the *caller* to enusure that the calling service has permission to sign a cert for the requested domain + pub fn cert_for( + &mut self, + hostnames: &BTreeSet, + ) -> Result { + let keys = if let Some(cert_data) = self + .as_leaves() + .as_idx(JsonKey::new_ref(hostnames)) + .map(|m| m.de()) + .transpose()? + { + if cert_data + .certs + .ed25519 + .not_before() + .compare(Asn1Time::days_from_now(0)?.as_ref())? + == Ordering::Less + && cert_data + .certs + .ed25519 + .not_after() + .compare(Asn1Time::days_from_now(30)?.as_ref())? + == Ordering::Greater + && cert_data + .certs + .nistp256 + .not_before() + .compare(Asn1Time::days_from_now(0)?.as_ref())? + == Ordering::Less + && cert_data + .certs + .nistp256 + .not_after() + .compare(Asn1Time::days_from_now(30)?.as_ref())? + == Ordering::Greater + { + return Ok(FullchainCertData { + root: self.as_root_cert().de()?.0, + int: self.as_int_cert().de()?.0, + leaf: cert_data, + }); + } + cert_data.keys + } else { + PKeyPair { + ed25519: PKey::generate_ed25519()?, + nistp256: PKey::from_ec_key(EcKey::generate(&*EcGroup::from_curve_name( + Nid::X9_62_PRIME256V1, + )?)?)?, + } + }; + let int_key = self.as_int_key().de()?.0; + let int_cert = self.as_int_cert().de()?.0; + let cert_data = CertData { + certs: CertPair { + ed25519: make_leaf_cert( + (&int_key, &int_cert), + (&keys.ed25519, &SANInfo::new(hostnames)), + )?, + nistp256: make_leaf_cert( + (&int_key, &int_cert), + (&keys.nistp256, &SANInfo::new(hostnames)), + )?, + }, + keys, + }; + self.as_leaves_mut() + .insert(JsonKey::new_ref(hostnames), &cert_data)?; + Ok(FullchainCertData { + root: self.as_root_cert().de()?.0, + int: self.as_int_cert().de()?.0, + leaf: cert_data, + }) + } +} + +#[derive(Debug, Clone, Deserialize, Serialize)] +pub struct CertData { + pub keys: PKeyPair, + pub certs: CertPair, +} + +pub struct FullchainCertData { + pub root: X509, + pub int: X509, + pub leaf: CertData, +} +impl FullchainCertData { + pub fn fullchain_ed25519(&self) -> Vec<&X509> { + vec![&self.leaf.certs.ed25519, &self.int, &self.root] + } + pub fn fullchain_nistp256(&self) -> Vec<&X509> { + vec![&self.leaf.certs.nistp256, &self.int, &self.root] + } +} static CERTIFICATE_VERSION: i32 = 2; // X509 version 3 is actually encoded as '2' in the cert because fuck you. @@ -35,63 +156,21 @@ fn unix_time(time: SystemTime) -> time_t { .unwrap_or_default() } -#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] +#[derive(Debug, Clone, Deserialize, Serialize)] +pub struct PKeyPair { + #[serde(with = "crate::util::serde::pem")] + pub ed25519: PKey, + #[serde(with = "crate::util::serde::pem")] + pub nistp256: PKey, +} + +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Deserialize, Serialize)] pub struct CertPair { + #[serde(with = "crate::util::serde::pem")] pub ed25519: X509, + #[serde(with = "crate::util::serde::pem")] pub nistp256: X509, } -impl CertPair { - fn updated( - pair: Option<&Self>, - hostname: &Hostname, - signer: (&PKey, &X509), - applicant: &Key, - ip: BTreeSet, - ) -> Result<(Self, bool), Error> { - let mut updated = false; - let mut updated_cert = |cert: Option<&X509>, osk: PKey| -> Result { - let mut ips = BTreeSet::new(); - if let Some(cert) = cert { - ips.extend( - cert.subject_alt_names() - .iter() - .flatten() - .filter_map(|a| a.ipaddress()) - .filter_map(|a| match a.len() { - 4 => Some::(<[u8; 4]>::try_from(a).unwrap().into()), - 16 => Some::(<[u8; 16]>::try_from(a).unwrap().into()), - _ => None, - }), - ); - if cert - .not_before() - .compare(Asn1Time::days_from_now(0)?.as_ref())? - == Ordering::Less - && cert - .not_after() - .compare(Asn1Time::days_from_now(30)?.as_ref())? - == Ordering::Greater - && ips.is_superset(&ip) - { - return Ok(cert.clone()); - } - } - ips.extend(ip.iter().copied()); - updated = true; - make_leaf_cert(signer, (&osk, &SANInfo::new(&applicant, hostname, ips))) - }; - Ok(( - Self { - ed25519: updated_cert(pair.map(|c| &c.ed25519), applicant.openssl_key_ed25519())?, - nistp256: updated_cert( - pair.map(|c| &c.nistp256), - applicant.openssl_key_nistp256(), - )?, - }, - updated, - )) - } -} pub async fn root_ca_start_time() -> Result { Ok(if check_time_is_synchronized().await? { @@ -101,55 +180,9 @@ pub async fn root_ca_start_time() -> Result { }) } -#[derive(Debug)] -pub struct SslManager { - hostname: Hostname, - root_cert: X509, - int_key: PKey, - int_cert: X509, - cert_cache: RwLock>, -} -impl SslManager { - pub fn new(account: &AccountInfo, start_time: SystemTime) -> Result { - let int_key = generate_key()?; - let int_cert = make_int_cert( - (&account.root_ca_key, &account.root_ca_cert), - &int_key, - start_time, - )?; - Ok(Self { - hostname: account.hostname.clone(), - root_cert: account.root_ca_cert.clone(), - int_key, - int_cert, - cert_cache: RwLock::new(BTreeMap::new()), - }) - } - pub async fn with_certs(&self, key: Key, ip: IpAddr) -> Result { - let mut ips = ips().await?; - ips.insert(ip); - let (pair, updated) = CertPair::updated( - self.cert_cache.read().await.get(&key), - &self.hostname, - (&self.int_key, &self.int_cert), - &key, - ips, - )?; - if updated { - self.cert_cache - .write() - .await - .insert(key.clone(), pair.clone()); - } - - Ok(key.with_certs(pair, self.int_cert.clone(), self.root_cert.clone())) - } -} - const EC_CURVE_NAME: nid::Nid = nid::Nid::X9_62_PRIME256V1; lazy_static::lazy_static! { static ref EC_GROUP: EcGroup = EcGroup::from_curve_name(EC_CURVE_NAME).unwrap(); - static ref SSL_MUTEX: Mutex<()> = Mutex::new(()); // TODO: make thread safe } pub async fn export_key(key: &PKey, target: &Path) -> Result<(), Error> { @@ -245,18 +278,13 @@ pub fn make_root_cert( pub fn make_int_cert( signer: (&PKey, &X509), applicant: &PKey, - start_time: SystemTime, ) -> Result { let mut builder = X509Builder::new()?; builder.set_version(CERTIFICATE_VERSION)?; - let unix_start_time = unix_time(start_time); - - let embargo = Asn1Time::from_unix(unix_start_time - 86400)?; - builder.set_not_before(&embargo)?; + builder.set_not_before(signer.1.not_before())?; - let expiration = Asn1Time::from_unix(unix_start_time + (10 * 364 * 86400))?; - builder.set_not_after(&expiration)?; + builder.set_not_after(signer.1.not_after())?; builder.set_serial_number(&*rand_serial()?)?; @@ -309,13 +337,13 @@ pub fn make_int_cert( #[derive(Debug, PartialEq, Eq, PartialOrd, Ord)] pub enum MaybeWildcard { WithWildcard(String), - WithoutWildcard(String), + WithoutWildcard(InternedString), } impl MaybeWildcard { pub fn as_str(&self) -> &str { match self { MaybeWildcard::WithWildcard(s) => s.as_str(), - MaybeWildcard::WithoutWildcard(s) => s.as_str(), + MaybeWildcard::WithoutWildcard(s) => &**s, } } } @@ -334,18 +362,16 @@ pub struct SANInfo { pub ips: BTreeSet, } impl SANInfo { - pub fn new(key: &Key, hostname: &Hostname, ips: BTreeSet) -> Self { + pub fn new(hostnames: &BTreeSet) -> Self { let mut dns = BTreeSet::new(); - if let Some((id, _)) = key.interface() { - dns.insert(MaybeWildcard::WithWildcard(format!("{id}.embassy"))); - dns.insert(MaybeWildcard::WithWildcard(key.local_address().to_string())); - } else { - dns.insert(MaybeWildcard::WithoutWildcard("embassy".to_owned())); - dns.insert(MaybeWildcard::WithWildcard(hostname.local_domain_name())); - dns.insert(MaybeWildcard::WithoutWildcard(hostname.no_dot_host_name())); - dns.insert(MaybeWildcard::WithoutWildcard("localhost".to_owned())); + let mut ips = BTreeSet::new(); + for hostname in hostnames { + if let Ok(ip) = hostname.parse::() { + ips.insert(ip); + } else { + dns.insert(MaybeWildcard::WithoutWildcard(hostname.clone())); // TODO: wildcards? + } } - dns.insert(MaybeWildcard::WithWildcard(key.tor_address().to_string())); Self { dns, ips } } } @@ -443,16 +469,3 @@ pub fn make_leaf_cert( let cert = builder.build(); Ok(cert) } - -#[command(subcommands(size))] -pub async fn ssl() -> Result<(), Error> { - Ok(()) -} - -#[command] -pub async fn size(#[context] ctx: RpcContext) -> Result { - Ok(format!( - "Cert Catch size: {}", - ctx.net_controller.ssl.cert_cache.read().await.len() - )) -} diff --git a/core/startos/src/net/static_server.rs b/core/startos/src/net/static_server.rs index 761566a2c4..fa71672b32 100644 --- a/core/startos/src/net/static_server.rs +++ b/core/startos/src/net/static_server.rs @@ -1,21 +1,24 @@ use std::fs::Metadata; use std::future::Future; use std::path::{Path, PathBuf}; -use std::sync::Arc; use std::time::UNIX_EPOCH; use async_compression::tokio::bufread::GzipEncoder; -use color_eyre::eyre::eyre; +use axum::body::Body; +use axum::extract::{self as x, Request}; +use axum::response::Response; +use axum::routing::{any, get, post}; +use axum::Router; use digest::Digest; -use futures::FutureExt; +use futures::future::ready; use http::header::ACCEPT_ENCODING; use http::request::Parts as RequestParts; -use hyper::{Body, Method, Request, Response, StatusCode}; -use include_dir::{include_dir, Dir}; +use http::{HeaderMap, Method, StatusCode}; +use include_dir::Dir; use new_mime_guess::MimeGuess; use openssl::hash::MessageDigest; use openssl::x509::X509; -use rpc_toolkit::rpc_handler; +use rpc_toolkit::Server; use tokio::fs::File; use tokio::io::BufReader; use tokio_util::io::ReaderStream; @@ -24,26 +27,24 @@ use crate::context::{DiagnosticContext, InstallContext, RpcContext, SetupContext use crate::core::rpc_continuations::RequestGuid; use crate::db::subscribe; use crate::hostname::Hostname; -use crate::install::PKG_PUBLIC_DIR; -use crate::middleware::auth::{auth as auth_middleware, HasValidSession}; -use crate::middleware::cors::cors; -use crate::middleware::db::db as db_middleware; -use crate::middleware::diagnostic::diagnostic as diagnostic_middleware; -use crate::net::HttpHandler; +use crate::middleware::auth::{Auth, HasValidSession}; +use crate::middleware::cors::Cors; +use crate::middleware::db::SyncDb; +use crate::middleware::diagnostic::DiagnosticMode; use crate::{diagnostic_api, install_api, main_api, setup_api, Error, ErrorKind, ResultExt}; -static NOT_FOUND: &[u8] = b"Not Found"; -static METHOD_NOT_ALLOWED: &[u8] = b"Method Not Allowed"; -static NOT_AUTHORIZED: &[u8] = b"Not Authorized"; +const NOT_FOUND: &[u8] = b"Not Found"; +const METHOD_NOT_ALLOWED: &[u8] = b"Method Not Allowed"; +const NOT_AUTHORIZED: &[u8] = b"Not Authorized"; -static EMBEDDED_UIS: Dir<'_> = include_dir!("$CARGO_MANIFEST_DIR/../../web/dist/static"); +#[cfg(all(feature = "daemon", not(feature = "test")))] +const EMBEDDED_UIS: Dir<'_> = + include_dir::include_dir!("$CARGO_MANIFEST_DIR/../../web/dist/static"); +#[cfg(not(all(feature = "daemon", not(feature = "test"))))] +const EMBEDDED_UIS: Dir<'_> = Dir::new("", &[]); const PROXY_STRIP_HEADERS: &[&str] = &["cookie", "host", "origin", "referer", "user-agent"]; -fn status_fn(_: i32) -> StatusCode { - StatusCode::OK -} - #[derive(Clone)] pub enum UiMode { Setup, @@ -63,180 +64,121 @@ impl UiMode { } } -pub async fn setup_ui_file_router(ctx: SetupContext) -> Result { - let handler: HttpHandler = Arc::new(move |req| { - let ctx = ctx.clone(); - - let ui_mode = UiMode::Setup; - async move { - let res = match req.uri().path() { - path if path.starts_with("/rpc/") => { - let rpc_handler = rpc_handler!({ - command: setup_api, - context: ctx, - status: status_fn, - middleware: [ - cors, - ] - }); - - rpc_handler(req) - .await - .map_err(|err| Error::new(eyre!("{}", err), crate::ErrorKind::Network)) - } - _ => alt_ui(req, ui_mode).await, - }; - - match res { - Ok(data) => Ok(data), - Err(err) => Ok(server_error(err)), - } - } - .boxed() - }); - - Ok(handler) +pub fn setup_ui_file_router(ctx: SetupContext) -> Router { + Router::new() + .route_service( + "/rpc/*path", + post(Server::new(move || ready(Ok(ctx.clone())), setup_api()).middleware(Cors::new())), + ) + .fallback(any(|request: Request| async move { + alt_ui(request, UiMode::Setup) + .await + .unwrap_or_else(server_error) + })) } -pub async fn diag_ui_file_router(ctx: DiagnosticContext) -> Result { - let handler: HttpHandler = Arc::new(move |req| { - let ctx = ctx.clone(); - let ui_mode = UiMode::Diag; - async move { - let res = match req.uri().path() { - path if path.starts_with("/rpc/") => { - let rpc_handler = rpc_handler!({ - command: diagnostic_api, - context: ctx, - status: status_fn, - middleware: [ - cors, - diagnostic_middleware, - ] - }); - - rpc_handler(req) - .await - .map_err(|err| Error::new(eyre!("{}", err), crate::ErrorKind::Network)) - } - _ => alt_ui(req, ui_mode).await, - }; - - match res { - Ok(data) => Ok(data), - Err(err) => Ok(server_error(err)), - } - } - .boxed() - }); - - Ok(handler) +pub fn diag_ui_file_router(ctx: DiagnosticContext) -> Router { + Router::new() + .route( + "/rpc/*path", + post( + Server::new(move || ready(Ok(ctx.clone())), diagnostic_api()) + .middleware(Cors::new()) + .middleware(DiagnosticMode::new()), + ), + ) + .fallback(any(|request: Request| async move { + alt_ui(request, UiMode::Diag) + .await + .unwrap_or_else(server_error) + })) } -pub async fn install_ui_file_router(ctx: InstallContext) -> Result { - let handler: HttpHandler = Arc::new(move |req| { - let ctx = ctx.clone(); - let ui_mode = UiMode::Install; - async move { - let res = match req.uri().path() { - path if path.starts_with("/rpc/") => { - let rpc_handler = rpc_handler!({ - command: install_api, - context: ctx, - status: status_fn, - middleware: [ - cors, - ] - }); - - rpc_handler(req) - .await - .map_err(|err| Error::new(eyre!("{}", err), crate::ErrorKind::Network)) - } - _ => alt_ui(req, ui_mode).await, - }; - - match res { - Ok(data) => Ok(data), - Err(err) => Ok(server_error(err)), - } - } - .boxed() - }); - - Ok(handler) +pub fn install_ui_file_router(ctx: InstallContext) -> Router { + Router::new() + .route("/rpc/*path", { + let ctx = ctx.clone(); + post(Server::new(move || ready(Ok(ctx.clone())), install_api()).middleware(Cors::new())) + }) + .fallback(any(|request: Request| async move { + alt_ui(request, UiMode::Install) + .await + .unwrap_or_else(server_error) + })) } -pub async fn main_ui_server_router(ctx: RpcContext) -> Result { - let handler: HttpHandler = Arc::new(move |req| { - let ctx = ctx.clone(); - - async move { - let res = match req.uri().path() { - path if path.starts_with("/rpc/") => { - let auth_middleware = auth_middleware(ctx.clone()); - let db_middleware = db_middleware(ctx.clone()); - let rpc_handler = rpc_handler!({ - command: main_api, - context: ctx, - status: status_fn, - middleware: [ - cors, - auth_middleware, - db_middleware, - ] - }); - - rpc_handler(req) +pub fn main_ui_server_router(ctx: RpcContext) -> Router { + Router::new() + .route("/rpc/*path", { + let ctx = ctx.clone(); + post( + Server::new(move || ready(Ok(ctx.clone())), main_api()) + .middleware(Cors::new()) + .middleware(Auth::new()) + .middleware(SyncDb::new()), + ) + }) + .route( + "/ws/db", + any({ + let ctx = ctx.clone(); + move |headers: HeaderMap, ws: x::WebSocketUpgrade| async move { + subscribe(ctx, headers, ws) .await - .map_err(|err| Error::new(eyre!("{}", err), crate::ErrorKind::Network)) + .unwrap_or_else(server_error) } - "/ws/db" => subscribe(ctx, req).await, - path if path.starts_with("/ws/rpc/") => { - match RequestGuid::from(path.strip_prefix("/ws/rpc/").unwrap()) { + }), + ) + .route( + "/ws/rpc/*path", + get({ + let ctx = ctx.clone(); + move |x::Path(path): x::Path, + ws: axum::extract::ws::WebSocketUpgrade| async move { + match RequestGuid::from(&path) { None => { tracing::debug!("No Guid Path"); - Ok::<_, Error>(bad_request()) + bad_request() } Some(guid) => match ctx.get_ws_continuation_handler(&guid).await { - Some(cont) => match cont(req).await { - Ok::<_, Error>(r) => Ok::<_, Error>(r), - Err(err) => Ok::<_, Error>(server_error(err)), - }, - _ => Ok::<_, Error>(not_found()), + Some(cont) => ws.on_upgrade(cont), + _ => not_found(), }, } } - path if path.starts_with("/rest/rpc/") => { - match RequestGuid::from(path.strip_prefix("/rest/rpc/").unwrap()) { + }), + ) + .route( + "/rest/rpc/*path", + any({ + let ctx = ctx.clone(); + move |request: x::Request| async move { + let path = request + .uri() + .path() + .strip_prefix("/rest/rpc/") + .unwrap_or_default(); + match RequestGuid::from(&path) { None => { tracing::debug!("No Guid Path"); - Ok::<_, Error>(bad_request()) + bad_request() } Some(guid) => match ctx.get_rest_continuation_handler(&guid).await { - None => Ok::<_, Error>(not_found()), - Some(cont) => match cont(req).await { - Ok::<_, Error>(r) => Ok::<_, Error>(r), - Err(e) => Ok::<_, Error>(server_error(e)), - }, + None => not_found(), + Some(cont) => cont(request).await.unwrap_or_else(server_error), }, } } - _ => main_embassy_ui(req, ctx).await, - }; - - match res { - Ok(data) => Ok(data), - Err(err) => Ok(server_error(err)), - } - } - .boxed() - }); - - Ok(handler) + }), + ) + .fallback(any(move |request: Request| async move { + main_start_os_ui(request, ctx) + .await + .unwrap_or_else(server_error) + })) } -async fn alt_ui(req: Request, ui_mode: UiMode) -> Result, Error> { +async fn alt_ui(req: Request, ui_mode: UiMode) -> Result { let (request_parts, _body) = req.into_parts(); match &request_parts.method { &Method::GET => { @@ -266,20 +208,21 @@ async fn alt_ui(req: Request, ui_mode: UiMode) -> Result, E async fn if_authorized< F: FnOnce() -> Fut, - Fut: Future, Error>> + Send + Sync, + Fut: Future> + Send + Sync, >( ctx: &RpcContext, parts: &RequestParts, f: F, -) -> Result, Error> { - if let Err(e) = HasValidSession::from_request_parts(parts, ctx).await { +) -> Result { + if let Err(e) = HasValidSession::from_header(parts.headers.get(http::header::COOKIE), ctx).await + { un_authorized(e, parts.uri.path()) } else { f().await } } -async fn main_embassy_ui(req: Request, ctx: RpcContext) -> Result, Error> { +async fn main_start_os_ui(req: Request, ctx: RpcContext) -> Result { let (request_parts, _body) = req.into_parts(); match ( &request_parts.method, @@ -291,21 +234,7 @@ async fn main_embassy_ui(req: Request, ctx: RpcContext) -> Result { - if_authorized(&ctx, &request_parts, || async { - let sub_path = Path::new(path); - if let Ok(rest) = sub_path.strip_prefix("package-data") { - FileData::from_path( - &request_parts, - &ctx.datadir.join(PKG_PUBLIC_DIR).join(rest), - ) - .await? - .into_response(&request_parts) - .await - } else { - Ok(not_found()) - } - }) - .await + todo!("pull directly from s9pk") } (&Method::GET, Some(("proxy", target))) => { if_authorized(&ctx, &request_parts, || async { @@ -322,19 +251,27 @@ async fn main_embassy_ui(req: Request, ctx: RpcContext) -> Result, ctx: RpcContext) -> Result Result, Error> { +fn un_authorized(err: Error, path: &str) -> Result { tracing::warn!("unauthorized for {} @{:?}", err, path); tracing::debug!("{:?}", err); Ok(Response::builder() @@ -378,7 +315,7 @@ fn un_authorized(err: Error, path: &str) -> Result, Error> { } /// HTTP status code 404 -fn not_found() -> Response { +fn not_found() -> Response { Response::builder() .status(StatusCode::NOT_FOUND) .body(NOT_FOUND.into()) @@ -386,28 +323,28 @@ fn not_found() -> Response { } /// HTTP status code 405 -fn method_not_allowed() -> Response { +fn method_not_allowed() -> Response { Response::builder() .status(StatusCode::METHOD_NOT_ALLOWED) .body(METHOD_NOT_ALLOWED.into()) .unwrap() } -fn server_error(err: Error) -> Response { +fn server_error(err: Error) -> Response { Response::builder() .status(StatusCode::INTERNAL_SERVER_ERROR) .body(err.to_string().into()) .unwrap() } -fn bad_request() -> Response { +fn bad_request() -> Response { Response::builder() .status(StatusCode::BAD_REQUEST) .body(Body::empty()) .unwrap() } -fn cert_send(cert: &X509, hostname: &Hostname) -> Result, Error> { +fn cert_send(cert: &X509, hostname: &Hostname) -> Result { let pem = cert.to_pem()?; Response::builder() .status(StatusCode::OK) @@ -499,12 +436,12 @@ impl FileData { let (len, data) = if encoding == Some("gzip") { ( None, - Body::wrap_stream(ReaderStream::new(GzipEncoder::new(BufReader::new(file)))), + Body::from_stream(ReaderStream::new(GzipEncoder::new(BufReader::new(file)))), ) } else { ( Some(metadata.len()), - Body::wrap_stream(ReaderStream::new(file)), + Body::from_stream(ReaderStream::new(file)), ) }; @@ -519,7 +456,7 @@ impl FileData { }) } - async fn into_response(self, req: &RequestParts) -> Result, Error> { + async fn into_response(self, req: &RequestParts) -> Result { let mut builder = Response::builder(); if let Some(mime) = self.mime { builder = builder.header(http::header::CONTENT_TYPE, &*mime); diff --git a/core/startos/src/net/tor.rs b/core/startos/src/net/tor.rs index 1bf4c5f446..c87f506dcc 100644 --- a/core/startos/src/net/tor.rs +++ b/core/startos/src/net/tor.rs @@ -4,7 +4,7 @@ use std::sync::atomic::AtomicBool; use std::sync::{Arc, Weak}; use std::time::Duration; -use clap::ArgMatches; +use clap::Parser; use color_eyre::eyre::eyre; use futures::future::BoxFuture; use futures::{FutureExt, TryStreamExt}; @@ -12,8 +12,9 @@ use helpers::NonDetachingJoinHandle; use itertools::Itertools; use lazy_static::lazy_static; use regex::Regex; -use rpc_toolkit::command; use rpc_toolkit::yajrc::RpcError; +use rpc_toolkit::{command, from_fn_async, AnyContext, Empty, HandlerExt, ParentHandler}; +use serde::{Deserialize, Serialize}; use tokio::net::TcpStream; use tokio::process::Command; use tokio::sync::{mpsc, oneshot}; @@ -27,13 +28,44 @@ use crate::logs::{ cli_logs_generic_follow, cli_logs_generic_nofollow, fetch_logs, follow_logs, journalctl, LogFollowResponse, LogResponse, LogSource, }; -use crate::util::serde::{display_serializable, IoFormat}; -use crate::util::{display_none, Invoke}; -use crate::{Error, ErrorKind, ResultExt as _}; +use crate::prelude::*; +use crate::util::serde::{display_serializable, HandlerExtSerde, WithIoFormat}; +use crate::util::Invoke; pub const SYSTEMD_UNIT: &str = "tor@default"; const STARTING_HEALTH_TIMEOUT: u64 = 120; // 2min +#[derive(Debug, Default, Deserialize, Serialize)] +pub struct OnionStore(BTreeMap); +impl Map for OnionStore { + type Key = OnionAddressV3; + type Value = TorSecretKeyV3; + fn key_str(key: &Self::Key) -> Result, Error> { + Ok(key.get_address_without_dot_onion()) + } +} +impl OnionStore { + pub fn new() -> Self { + Self::default() + } + pub fn insert(&mut self, key: TorSecretKeyV3) { + self.0.insert(key.public().get_onion_address(), key); + } +} +impl Model { + pub fn new_key(&mut self) -> Result { + let key = TorSecretKeyV3::generate(); + self.insert(&key.public().get_onion_address(), &key)?; + Ok(key) + } + pub fn insert_key(&mut self, key: &TorSecretKeyV3) -> Result<(), Error> { + self.insert(&key.public().get_onion_address(), &key) + } + pub fn get_key(&self, address: &OnionAddressV3) -> Result { + self.as_idx(address).or_not_found(address)?.de() + } +} + enum ErrorLogSeverity { Fatal { wipe_state: bool }, Unknown { wipe_state: bool }, @@ -53,16 +85,37 @@ lazy_static! { static ref PROGRESS_REGEX: Regex = Regex::new("PROGRESS=([0-9]+)").unwrap(); } -#[command(subcommands(list_services, logs, reset))] -pub fn tor() -> Result<(), Error> { - Ok(()) +pub fn tor() -> ParentHandler { + ParentHandler::new() + .subcommand( + "list-services", + from_fn_async(list_services) + .with_display_serializable() + .with_custom_display_fn::(|handle, result| { + Ok(display_services(handle.params, result)) + }) + .with_remote_cli::(), + ) + .subcommand("logs", logs()) + .subcommand( + "reset", + from_fn_async(reset) + .no_display() + .with_remote_cli::(), + ) +} +#[derive(Deserialize, Serialize, Parser)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "kebab-case")] +pub struct ResetParams { + #[arg(name = "wipe-state", short = 'w', long = "wipe-state")] + wipe_state: bool, + reason: String, } -#[command(display(display_none))] pub async fn reset( - #[context] ctx: RpcContext, - #[arg(rename = "wipe-state", short = 'w', long = "wipe-state")] wipe_state: bool, - #[arg] reason: String, + ctx: RpcContext, + ResetParams { reason, wipe_state }: ResetParams, ) -> Result<(), Error> { ctx.net_controller .tor @@ -70,11 +123,11 @@ pub async fn reset( .await } -fn display_services(services: Vec, matches: &ArgMatches) { +pub fn display_services(params: WithIoFormat, services: Vec) { use prettytable::*; - if matches.is_present("format") { - return display_serializable(services, matches); + if let Some(format) = params.format { + return display_serializable(format, services); } let mut table = Table::new(); @@ -85,32 +138,54 @@ fn display_services(services: Vec, matches: &ArgMatches) { table.print_tty(false).unwrap(); } -#[command(rename = "list-services", display(display_services))] -pub async fn list_services( - #[context] ctx: RpcContext, - #[allow(unused_variables)] - #[arg(long = "format")] - format: Option, -) -> Result, Error> { +pub async fn list_services(ctx: RpcContext, _: Empty) -> Result, Error> { ctx.net_controller.tor.list_services().await } -#[command( - custom_cli(cli_logs(async, context(CliContext))), - subcommands(self(logs_nofollow(async)), logs_follow), - display(display_none) -)] -pub async fn logs( - #[arg(short = 'l', long = "limit")] limit: Option, - #[arg(short = 'c', long = "cursor")] cursor: Option, - #[arg(short = 'B', long = "before", default)] before: bool, - #[arg(short = 'f', long = "follow", default)] follow: bool, -) -> Result<(Option, Option, bool, bool), Error> { - Ok((limit, cursor, before, follow)) +#[derive(Deserialize, Serialize, Parser)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "kebab-case")] +pub struct LogsParams { + #[arg(short = 'l', long = "limit")] + limit: Option, + #[arg(short = 'c', long = "cursor")] + cursor: Option, + #[arg(short = 'B', long = "before")] + #[serde(default)] + before: bool, + #[arg(short = 'f', long = "follow")] + #[serde(default)] + follow: bool, +} + +pub fn logs() -> ParentHandler { + ParentHandler::new() + .root_handler( + from_fn_async(cli_logs) + .no_display() + .with_inherited(|params, _| params), + ) + .root_handler( + from_fn_async(logs_nofollow) + .with_inherited(|params, _| params) + .no_cli(), + ) + .subcommand( + "follow", + from_fn_async(logs_follow) + .with_inherited(|params, _| params) + .no_cli(), + ) } pub async fn cli_logs( ctx: CliContext, - (limit, cursor, before, follow): (Option, Option, bool, bool), + _: Empty, + LogsParams { + limit, + cursor, + before, + follow, + }: LogsParams, ) -> Result<(), RpcError> { if follow { if cursor.is_some() { @@ -131,16 +206,22 @@ pub async fn cli_logs( } } pub async fn logs_nofollow( - _ctx: (), - (limit, cursor, before, _): (Option, Option, bool, bool), + _: AnyContext, + _: Empty, + LogsParams { + limit, + cursor, + before, + .. + }: LogsParams, ) -> Result { fetch_logs(LogSource::Unit(SYSTEMD_UNIT), limit, cursor, before).await } -#[command(rpc_only, rename = "follow", display(display_none))] pub async fn logs_follow( - #[context] ctx: RpcContext, - #[parent_data] (limit, _, _, _): (Option, Option, bool, bool), + ctx: RpcContext, + _: Empty, + LogsParams { limit, .. }: LogsParams, ) -> Result { follow_logs(ctx, LogSource::Unit(SYSTEMD_UNIT), limit).await } @@ -158,33 +239,29 @@ impl TorController { pub async fn add( &self, key: TorSecretKeyV3, - external: u16, - target: SocketAddr, - ) -> Result, Error> { + bindings: Vec<(u16, SocketAddr)>, + ) -> Result>, Error> { let (reply, res) = oneshot::channel(); self.0 .send .send(TorCommand::AddOnion { key, - external, - target, + bindings, reply, }) - .ok() - .ok_or_else(|| Error::new(eyre!("TorControl died"), ErrorKind::Tor))?; + .map_err(|_| Error::new(eyre!("TorControl died"), ErrorKind::Tor))?; res.await - .ok() - .ok_or_else(|| Error::new(eyre!("TorControl died"), ErrorKind::Tor)) + .map_err(|_| Error::new(eyre!("TorControl died"), ErrorKind::Tor)) } pub async fn gc( &self, - key: Option, + addr: Option, external: Option, ) -> Result<(), Error> { self.0 .send - .send(TorCommand::GC { key, external }) + .send(TorCommand::GC { addr, external }) .ok() .ok_or_else(|| Error::new(eyre!("TorControl died"), ErrorKind::Tor)) } @@ -216,7 +293,7 @@ impl TorController { .lines() .map(|l| l.trim()) .filter(|l| !l.is_empty()) - .map(|l| l.parse().with_kind(ErrorKind::Tor)) + .map(|l| l.parse::().with_kind(ErrorKind::Tor)) .collect() } } @@ -229,12 +306,11 @@ type AuthenticatedConnection = AuthenticatedConn< enum TorCommand { AddOnion { key: TorSecretKeyV3, - external: u16, - target: SocketAddr, - reply: oneshot::Sender>, + bindings: Vec<(u16, SocketAddr)>, + reply: oneshot::Sender>>, }, GC { - key: Option, + addr: Option, external: Option, }, GetInfo { @@ -252,7 +328,13 @@ async fn torctl( tor_control: SocketAddr, tor_socks: SocketAddr, recv: &mut mpsc::UnboundedReceiver, - services: &mut BTreeMap<[u8; 64], BTreeMap>>>, + services: &mut BTreeMap< + OnionAddressV3, + ( + TorSecretKeyV3, + BTreeMap>>, + ), + >, wipe_state: &AtomicBool, health_timeout: &mut Duration, ) -> Result<(), Error> { @@ -370,27 +452,32 @@ async fn torctl( match command { TorCommand::AddOnion { key, - external, - target, + bindings, reply, } => { - let mut service = if let Some(service) = services.remove(&key.as_bytes()) { + let addr = key.public().get_onion_address(); + let mut service = if let Some((_key, service)) = services.remove(&addr) { + debug_assert_eq!(key, _key); service } else { BTreeMap::new() }; - let mut binding = service.remove(&external).unwrap_or_default(); - let rc = if let Some(rc) = - Weak::upgrade(&binding.remove(&target).unwrap_or_default()) - { - rc - } else { - Arc::new(()) - }; - binding.insert(target, Arc::downgrade(&rc)); - service.insert(external, binding); - services.insert(key.as_bytes(), service); - reply.send(rc).unwrap_or_default(); + let mut rcs = Vec::with_capacity(bindings.len()); + for (external, target) in bindings { + let mut binding = service.remove(&external).unwrap_or_default(); + let rc = if let Some(rc) = + Weak::upgrade(&binding.remove(&target).unwrap_or_default()) + { + rc + } else { + Arc::new(()) + }; + binding.insert(target, Arc::downgrade(&rc)); + service.insert(external, binding); + rcs.push(rc); + } + services.insert(addr, (key, service)); + reply.send(rcs).unwrap_or_default(); } TorCommand::GetInfo { reply, .. } => { reply @@ -430,8 +517,7 @@ async fn torctl( ) .await?; - for (key, service) in std::mem::take(services) { - let key = TorSecretKeyV3::from(key); + for (addr, (key, service)) in std::mem::take(services) { let bindings = service .iter() .flat_map(|(ext, int)| { @@ -441,7 +527,7 @@ async fn torctl( }) .collect::>(); if !bindings.is_empty() { - services.insert(key.as_bytes(), service); + services.insert(addr, (key.clone(), service)); connection .add_onion_v3(&key, false, false, false, None, &mut bindings.iter()) .await?; @@ -453,31 +539,33 @@ async fn torctl( match command { TorCommand::AddOnion { key, - external, - target, + bindings, reply, } => { let mut rm_res = Ok(()); - let onion_base = key - .public() - .get_onion_address() - .get_address_without_dot_onion(); - let mut service = if let Some(service) = services.remove(&key.as_bytes()) { + let addr = key.public().get_onion_address(); + let onion_base = addr.get_address_without_dot_onion(); + let mut service = if let Some((_key, service)) = services.remove(&addr) { + debug_assert_eq!(_key, key); rm_res = connection.del_onion(&onion_base).await; service } else { BTreeMap::new() }; - let mut binding = service.remove(&external).unwrap_or_default(); - let rc = if let Some(rc) = - Weak::upgrade(&binding.remove(&target).unwrap_or_default()) - { - rc - } else { - Arc::new(()) - }; - binding.insert(target, Arc::downgrade(&rc)); - service.insert(external, binding); + let mut rcs = Vec::with_capacity(bindings.len()); + for (external, target) in bindings { + let mut binding = service.remove(&external).unwrap_or_default(); + let rc = if let Some(rc) = + Weak::upgrade(&binding.remove(&target).unwrap_or_default()) + { + rc + } else { + Arc::new(()) + }; + binding.insert(target, Arc::downgrade(&rc)); + service.insert(external, binding); + rcs.push(rc); + } let bindings = service .iter() .flat_map(|(ext, int)| { @@ -486,25 +574,21 @@ async fn torctl( .map(|(addr, _)| (*ext, SocketAddr::from(*addr))) }) .collect::>(); - services.insert(key.as_bytes(), service); - reply.send(rc).unwrap_or_default(); + services.insert(addr, (key.clone(), service)); + reply.send(rcs).unwrap_or_default(); rm_res?; connection .add_onion_v3(&key, false, false, false, None, &mut bindings.iter()) .await?; } - TorCommand::GC { key, external } => { - for key in if key.is_some() { - itertools::Either::Left(key.into_iter().map(|k| k.as_bytes())) + TorCommand::GC { addr, external } => { + for addr in if addr.is_some() { + itertools::Either::Left(addr.into_iter()) } else { itertools::Either::Right(services.keys().cloned().collect_vec().into_iter()) } { - let key = TorSecretKeyV3::from(key); - let onion_base = key - .public() - .get_onion_address() - .get_address_without_dot_onion(); - if let Some(mut service) = services.remove(&key.as_bytes()) { + if let Some((key, mut service)) = services.remove(&addr) { + let onion_base: String = addr.get_address_without_dot_onion(); for external in if external.is_some() { itertools::Either::Left(external.into_iter()) } else { @@ -533,7 +617,7 @@ async fn torctl( }) .collect::>(); if !bindings.is_empty() { - services.insert(key.as_bytes(), service); + services.insert(addr, (key.clone(), service)); } rm_res?; if !bindings.is_empty() { @@ -684,7 +768,7 @@ async fn test() { let mut conn = torut::control::UnauthenticatedConn::new( TcpStream::connect(SocketAddr::from(([127, 0, 0, 1], 9051))) .await - .unwrap(), // TODO + .unwrap(), ); let auth = conn .load_protocol_info() diff --git a/core/startos/src/net/utils.rs b/core/startos/src/net/utils.rs index e496bd1f7c..6de319a5ee 100644 --- a/core/startos/src/net/utils.rs +++ b/core/startos/src/net/utils.rs @@ -1,4 +1,3 @@ -use std::convert::Infallible; use std::net::{Ipv4Addr, Ipv6Addr, SocketAddr}; use std::path::Path; @@ -120,16 +119,16 @@ impl SingleAccept { Self(Some(conn)) } } -impl hyper::server::accept::Accept for SingleAccept { - type Conn = T; - type Error = Infallible; - fn poll_accept( - self: std::pin::Pin<&mut Self>, - _cx: &mut std::task::Context<'_>, - ) -> std::task::Poll>> { - std::task::Poll::Ready(self.project().0.take().map(Ok)) - } -} +// impl axum_server::accept::Accept for SingleAccept { +// type Conn = T; +// type Error = Infallible; +// fn poll_accept( +// self: std::pin::Pin<&mut Self>, +// _cx: &mut std::task::Context<'_>, +// ) -> std::task::Poll>> { +// std::task::Poll::Ready(self.project().0.take().map(Ok)) +// } +// } pub struct TcpListeners { listeners: Vec, @@ -147,20 +146,21 @@ impl TcpListeners { .0 } } -impl hyper::server::accept::Accept for TcpListeners { - type Conn = TcpStream; - type Error = std::io::Error; +// impl hyper::server::accept::Accept for TcpListeners { +// type Conn = TcpStream; +// type Error = std::io::Error; - fn poll_accept( - self: std::pin::Pin<&mut Self>, - cx: &mut std::task::Context<'_>, - ) -> std::task::Poll>> { - for listener in self.listeners.iter() { - let poll = listener.poll_accept(cx); - if poll.is_ready() { - return poll.map(|a| a.map(|a| a.0)).map(Some); - } - } - std::task::Poll::Pending - } -} +// fn poll_accept( +// self: std::pin::Pin<&mut Self>, +// cx: &mut std::task::Context<'_>, +// ) -> std::task::Poll>> { +// for listener in self.listeners.iter() { +// let poll = listener.poll_accept(cx); +// if poll.is_ready() { +// return poll.map(|a| a.map(|a| a.0)).map(Some); +// } +// } +// std::task::Poll::Pending +// } +// } +// TODO diff --git a/core/startos/src/net/vhost.rs b/core/startos/src/net/vhost.rs index bfbba05728..46838ed516 100644 --- a/core/startos/src/net/vhost.rs +++ b/core/startos/src/net/vhost.rs @@ -1,56 +1,54 @@ use std::collections::BTreeMap; -use std::convert::Infallible; use std::net::{IpAddr, Ipv6Addr, SocketAddr}; -use std::str::FromStr; use std::sync::{Arc, Weak}; use std::time::Duration; use color_eyre::eyre::eyre; use helpers::NonDetachingJoinHandle; -use http::{Response, Uri}; -use hyper::service::{make_service_fn, service_fn}; -use hyper::Body; +use imbl_value::InternedString; use models::ResultExt; +use serde::{Deserialize, Serialize}; use tokio::net::{TcpListener, TcpStream}; use tokio::sync::{Mutex, RwLock}; +use tokio_rustls::rustls::pki_types::{ + CertificateDer, PrivateKeyDer, PrivatePkcs8KeyDer, ServerName, +}; use tokio_rustls::rustls::server::Acceptor; use tokio_rustls::rustls::{RootCertStore, ServerConfig}; use tokio_rustls::{LazyConfigAcceptor, TlsConnector}; use tracing::instrument; +use ts_rs::TS; -use crate::net::keys::Key; -use crate::net::ssl::SslManager; -use crate::net::utils::SingleAccept; use crate::prelude::*; use crate::util::io::{BackTrackingReader, TimeoutStream}; +use crate::util::serde::MaybeUtf8String; // not allowed: <=1024, >=32768, 5355, 5432, 9050, 6010, 9051, 5353 pub struct VHostController { - ssl: Arc, + db: PatchDb, servers: Mutex>, } impl VHostController { - pub fn new(ssl: Arc) -> Self { + pub fn new(db: PatchDb) -> Self { Self { - ssl, + db, servers: Mutex::new(BTreeMap::new()), } } #[instrument(skip_all)] pub async fn add( &self, - key: Key, hostname: Option, external: u16, target: SocketAddr, - connect_ssl: Result<(), AlpnInfo>, + connect_ssl: Result<(), AlpnInfo>, // Ok: yes, connect using ssl, pass through alpn; Err: connect tcp, use provided strategy for alpn ) -> Result, Error> { let mut writable = self.servers.lock().await; let server = if let Some(server) = writable.remove(&external) { server } else { - VHostServer::new(external, self.ssl.clone()).await? + VHostServer::new(external, self.db.clone()).await? }; let rc = server .add( @@ -58,7 +56,6 @@ impl VHostController { TargetInfo { addr: target, connect_ssl, - key, }, ) .await; @@ -82,13 +79,19 @@ impl VHostController { struct TargetInfo { addr: SocketAddr, connect_ssl: Result<(), AlpnInfo>, - key: Key, } -#[derive(Clone, PartialEq, Eq, PartialOrd, Ord)] +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Deserialize, Serialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] pub enum AlpnInfo { Reflect, - Specified(Vec>), + Specified(Vec), +} +impl Default for AlpnInfo { + fn default() -> Self { + Self::Reflect + } } struct VHostServer { @@ -97,7 +100,7 @@ struct VHostServer { } impl VHostServer { #[instrument(skip_all)] - async fn new(port: u16, ssl: Arc) -> Result { + async fn new(port: u16, db: PatchDb) -> Result { // check if port allowed let listener = TcpListener::bind(SocketAddr::new(Ipv6Addr::UNSPECIFIED.into(), port)) .await @@ -108,13 +111,13 @@ impl VHostServer { _thread: tokio::spawn(async move { loop { match listener.accept().await { - Ok((stream, _)) => { + Ok((stream, sock_addr)) => { let stream = Box::pin(TimeoutStream::new(stream, Duration::from_secs(300))); let mut stream = BackTrackingReader::new(stream); stream.start_buffering(); let mapping = mapping.clone(); - let ssl = ssl.clone(); + let db = db.clone(); tokio::spawn(async move { if let Err(e) = async { let mid = match LazyConfigAcceptor::new( @@ -125,37 +128,38 @@ impl VHostServer { { Ok(a) => a, Err(_) => { - stream.rewind(); - return hyper::server::Server::builder( - SingleAccept::new(stream), - ) - .serve(make_service_fn(|_| async { - Ok::<_, Infallible>(service_fn(|req| async move { - let host = req - .headers() - .get(http::header::HOST) - .and_then(|host| host.to_str().ok()); - let uri = Uri::from_parts({ - let mut parts = - req.uri().to_owned().into_parts(); - parts.authority = host - .map(FromStr::from_str) - .transpose()?; - parts - })?; - Response::builder() - .status( - http::StatusCode::TEMPORARY_REDIRECT, - ) - .header( - http::header::LOCATION, - uri.to_string(), - ) - .body(Body::default()) - })) - })) - .await - .with_kind(crate::ErrorKind::Network); + // stream.rewind(); + // return hyper::server::Server::builder( + // SingleAccept::new(stream), + // ) + // .serve(make_service_fn(|_| async { + // Ok::<_, Infallible>(service_fn(|req| async move { + // let host = req + // .headers() + // .get(http::header::HOST) + // .and_then(|host| host.to_str().ok()); + // let uri = Uri::from_parts({ + // let mut parts = + // req.uri().to_owned().into_parts(); + // parts.authority = host + // .map(FromStr::from_str) + // .transpose()?; + // parts + // })?; + // Response::builder() + // .status( + // http::StatusCode::TEMPORARY_REDIRECT, + // ) + // .header( + // http::header::LOCATION, + // uri.to_string(), + // ) + // .body(Body::default()) + // })) + // })) + // .await + // .with_kind(crate::ErrorKind::Network); + todo!() } }; let target_name = @@ -169,6 +173,7 @@ impl VHostServer { .find(|(_, rc)| rc.strong_count() > 0) .or_else(|| { if target_name + .as_ref() .map(|s| s.parse::().is_ok()) .unwrap_or(true) { @@ -186,10 +191,23 @@ impl VHostServer { if let Some(target) = target { let mut tcp_stream = TcpStream::connect(target.addr).await?; - let key = - ssl.with_certs(target.key, target.addr.ip()).await?; + let hostnames = target_name + .as_ref() + .into_iter() + .map(InternedString::intern) + .chain(std::iter::once(InternedString::from_display( + &sock_addr.ip(), + ))) + .collect(); + let key = db + .mutate(|v| { + v.as_private_mut() + .as_key_store_mut() + .as_local_certs_mut() + .cert_for(&hostnames) + }) + .await?; let cfg = ServerConfig::builder() - .with_safe_defaults() .with_no_client_auth(); let mut cfg = if mid.client_hello().signature_schemes().contains( @@ -199,45 +217,46 @@ impl VHostServer { key.fullchain_ed25519() .into_iter() .map(|c| { - Ok(tokio_rustls::rustls::Certificate( + Ok(tokio_rustls::rustls::pki_types::CertificateDer::from( c.to_der()?, )) }) .collect::>()?, - tokio_rustls::rustls::PrivateKey( - key.key() - .openssl_key_ed25519() - .private_key_to_der()?, - ), + PrivateKeyDer::from(PrivatePkcs8KeyDer::from( + key.leaf + .keys + .ed25519 + .private_key_to_pkcs8()?, + )), ) } else { cfg.with_single_cert( key.fullchain_nistp256() .into_iter() .map(|c| { - Ok(tokio_rustls::rustls::Certificate( + Ok(tokio_rustls::rustls::pki_types::CertificateDer::from( c.to_der()?, )) }) .collect::>()?, - tokio_rustls::rustls::PrivateKey( - key.key() - .openssl_key_nistp256() - .private_key_to_der()?, - ), + PrivateKeyDer::from(PrivatePkcs8KeyDer::from( + key.leaf + .keys + .nistp256 + .private_key_to_pkcs8()?, + )), ) } .with_kind(crate::ErrorKind::OpenSsl)?; match target.connect_ssl { Ok(()) => { let mut client_cfg = - tokio_rustls::rustls::ClientConfig::builder() - .with_safe_defaults() + tokio_rustls::rustls::ClientConfig::builder() .with_root_certificates({ let mut store = RootCertStore::empty(); store.add( - &tokio_rustls::rustls::Certificate( - key.root_ca().to_der()?, + CertificateDer::from( + key.root.to_der()?, ), ).with_kind(crate::ErrorKind::OpenSsl)?; store @@ -253,13 +272,9 @@ impl VHostServer { let mut target_stream = TlsConnector::from(Arc::new(client_cfg)) .connect_with( - key.key() - .internal_address() - .as_str() - .try_into() - .with_kind( - crate::ErrorKind::OpenSsl, - )?, + ServerName::IpAddress( + target.addr.ip().into(), + ), tcp_stream, |conn| { cfg.alpn_protocols.extend( @@ -310,7 +325,7 @@ impl VHostServer { .await } Err(AlpnInfo::Specified(alpn)) => { - cfg.alpn_protocols = alpn; + cfg.alpn_protocols = alpn.into_iter().map(|a| a.0).collect(); let mut tls_stream = match mid.into_stream(Arc::new(cfg)).await { Ok(a) => a, diff --git a/core/startos/src/net/web_server.rs b/core/startos/src/net/web_server.rs index c2e25a4134..a89aae92f3 100644 --- a/core/startos/src/net/web_server.rs +++ b/core/startos/src/net/web_server.rs @@ -1,18 +1,15 @@ -use std::convert::Infallible; use std::net::SocketAddr; +use std::time::Duration; -use futures::future::ready; -use futures::FutureExt; +use axum::Router; +use axum_server::Handle; use helpers::NonDetachingJoinHandle; -use hyper::service::{make_service_fn, service_fn}; -use hyper::Server; use tokio::sync::oneshot; use crate::context::{DiagnosticContext, InstallContext, RpcContext, SetupContext}; use crate::net::static_server::{ diag_ui_file_router, install_ui_file_router, main_ui_server_router, setup_ui_file_router, }; -use crate::net::HttpHandler; use crate::Error; pub struct WebServer { @@ -20,18 +17,18 @@ pub struct WebServer { thread: NonDetachingJoinHandle<()>, } impl WebServer { - pub fn new(bind: SocketAddr, router: HttpHandler) -> Self { + pub fn new(bind: SocketAddr, router: Router) -> Self { let (shutdown, shutdown_recv) = oneshot::channel(); let thread = NonDetachingJoinHandle::from(tokio::spawn(async move { - let server = Server::bind(&bind) - .http1_preserve_header_case(true) - .http1_title_case_headers(true) - .serve(make_service_fn(move |_| { - let router = router.clone(); - ready(Ok::<_, Infallible>(service_fn(move |req| router(req)))) - })) - .with_graceful_shutdown(shutdown_recv.map(|_| ())); - if let Err(e) = server.await { + let handle = Handle::new(); + let mut server = axum_server::bind(bind).handle(handle.clone()); + server.http_builder().http1().preserve_header_case(true); + server.http_builder().http1().title_case_headers(true); + + if let (Err(e), _) = tokio::join!(server.serve(router.into_make_service()), async { + let _ = shutdown_recv.await; + handle.graceful_shutdown(Some(Duration::from_secs(0))); + }) { tracing::error!("Spawning hyper server error: {}", e); } })); @@ -43,19 +40,19 @@ impl WebServer { self.thread.await.unwrap() } - pub async fn main(bind: SocketAddr, ctx: RpcContext) -> Result { - Ok(Self::new(bind, main_ui_server_router(ctx).await?)) + pub fn main(bind: SocketAddr, ctx: RpcContext) -> Result { + Ok(Self::new(bind, main_ui_server_router(ctx))) } - pub async fn setup(bind: SocketAddr, ctx: SetupContext) -> Result { - Ok(Self::new(bind, setup_ui_file_router(ctx).await?)) + pub fn setup(bind: SocketAddr, ctx: SetupContext) -> Result { + Ok(Self::new(bind, setup_ui_file_router(ctx))) } - pub async fn diagnostic(bind: SocketAddr, ctx: DiagnosticContext) -> Result { - Ok(Self::new(bind, diag_ui_file_router(ctx).await?)) + pub fn diagnostic(bind: SocketAddr, ctx: DiagnosticContext) -> Result { + Ok(Self::new(bind, diag_ui_file_router(ctx))) } - pub async fn install(bind: SocketAddr, ctx: InstallContext) -> Result { - Ok(Self::new(bind, install_ui_file_router(ctx).await?)) + pub fn install(bind: SocketAddr, ctx: InstallContext) -> Result { + Ok(Self::new(bind, install_ui_file_router(ctx))) } } diff --git a/core/startos/src/net/wifi.rs b/core/startos/src/net/wifi.rs index 8429f9205a..2202f5e577 100644 --- a/core/startos/src/net/wifi.rs +++ b/core/startos/src/net/wifi.rs @@ -3,19 +3,21 @@ use std::path::Path; use std::sync::Arc; use std::time::Duration; -use clap::ArgMatches; +use clap::builder::TypedValueParser; +use clap::Parser; use isocountry::CountryCode; use lazy_static::lazy_static; use regex::Regex; -use rpc_toolkit::command; +use rpc_toolkit::{command, from_fn_async, AnyContext, Empty, HandlerExt, ParentHandler}; +use serde::{Deserialize, Serialize}; use tokio::process::Command; use tokio::sync::RwLock; use tracing::instrument; -use crate::context::RpcContext; +use crate::context::{CliContext, RpcContext}; use crate::prelude::*; -use crate::util::serde::{display_serializable, IoFormat}; -use crate::util::{display_none, Invoke}; +use crate::util::serde::{display_serializable, HandlerExtSerde, WithIoFormat}; +use crate::util::Invoke; use crate::{Error, ErrorKind}; type WifiManager = Arc>; @@ -31,28 +33,69 @@ pub fn wifi_manager(ctx: &RpcContext) -> Result<&WifiManager, Error> { } } -#[command(subcommands(add, connect, delete, get, country, available))] -pub async fn wifi() -> Result<(), Error> { - Ok(()) +pub fn wifi() -> ParentHandler { + ParentHandler::new() + .subcommand( + "add", + from_fn_async(add) + .no_display() + .with_remote_cli::(), + ) + .subcommand( + "connect", + from_fn_async(connect) + .no_display() + .with_remote_cli::(), + ) + .subcommand( + "delete", + from_fn_async(delete) + .no_display() + .with_remote_cli::(), + ) + .subcommand( + "get", + from_fn_async(get) + .with_display_serializable() + .with_custom_display_fn::(|handle, result| { + Ok(display_wifi_info(handle.params, result)) + }) + .with_remote_cli::(), + ) + .subcommand("country", country()) + .subcommand("available", available()) } -#[command(subcommands(get_available))] -pub async fn available() -> Result<(), Error> { - Ok(()) +pub fn available() -> ParentHandler { + ParentHandler::new().subcommand( + "get", + from_fn_async(get_available) + .with_display_serializable() + .with_custom_display_fn::(|handle, result| { + Ok(display_wifi_list(handle.params, result)) + }) + .with_remote_cli::(), + ) } -#[command(subcommands(set_country))] -pub async fn country() -> Result<(), Error> { - Ok(()) +pub fn country() -> ParentHandler { + ParentHandler::new().subcommand( + "set", + from_fn_async(set_country) + .no_display() + .with_remote_cli::(), + ) } -#[command(display(display_none))] +#[derive(Deserialize, Serialize, Parser)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "kebab-case")] +pub struct AddParams { + ssid: String, + password: String, +} #[instrument(skip_all)] -pub async fn add( - #[context] ctx: RpcContext, - #[arg] ssid: String, - #[arg] password: String, -) -> Result<(), Error> { +pub async fn add(ctx: RpcContext, AddParams { ssid, password }: AddParams) -> Result<(), Error> { let wifi_manager = wifi_manager(&ctx)?; if !ssid.is_ascii() { return Err(Error::new( @@ -95,10 +138,15 @@ pub async fn add( } Ok(()) } +#[derive(Deserialize, Serialize, Parser)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "kebab-case")] +pub struct SsidParams { + ssid: String, +} -#[command(display(display_none))] #[instrument(skip_all)] -pub async fn connect(#[context] ctx: RpcContext, #[arg] ssid: String) -> Result<(), Error> { +pub async fn connect(ctx: RpcContext, SsidParams { ssid }: SsidParams) -> Result<(), Error> { let wifi_manager = wifi_manager(&ctx)?; if !ssid.is_ascii() { return Err(Error::new( @@ -144,9 +192,8 @@ pub async fn connect(#[context] ctx: RpcContext, #[arg] ssid: String) -> Result< Ok(()) } -#[command(display(display_none))] #[instrument(skip_all)] -pub async fn delete(#[context] ctx: RpcContext, #[arg] ssid: String) -> Result<(), Error> { +pub async fn delete(ctx: RpcContext, SsidParams { ssid }: SsidParams) -> Result<(), Error> { let wifi_manager = wifi_manager(&ctx)?; if !ssid.is_ascii() { return Err(Error::new( @@ -170,7 +217,7 @@ pub async fn delete(#[context] ctx: RpcContext, #[arg] ssid: String) -> Result<( Ok(()) } #[derive(serde::Serialize, serde::Deserialize)] -#[serde(rename_all = "kebab-case")] +#[serde(rename_all = "camelCase")] pub struct WiFiInfo { ssids: HashMap, connected: Option, @@ -179,24 +226,24 @@ pub struct WiFiInfo { available_wifi: Vec, } #[derive(serde::Serialize, serde::Deserialize, Clone)] -#[serde(rename_all = "kebab-case")] +#[serde(rename_all = "camelCase")] pub struct WifiListInfo { strength: SignalStrength, security: Vec, } #[derive(serde::Serialize, serde::Deserialize)] -#[serde(rename_all = "kebab-case")] +#[serde(rename_all = "camelCase")] pub struct WifiListOut { ssid: Ssid, strength: SignalStrength, security: Vec, } pub type WifiList = HashMap; -fn display_wifi_info(info: WiFiInfo, matches: &ArgMatches) { +fn display_wifi_info(params: WithIoFormat, info: WiFiInfo) { use prettytable::*; - if matches.is_present("format") { - return display_serializable(info, matches); + if let Some(format) = params.format { + return display_serializable(format, info); } let mut table_global = Table::new(); @@ -256,11 +303,11 @@ fn display_wifi_info(info: WiFiInfo, matches: &ArgMatches) { table_global.print_tty(false).unwrap(); } -fn display_wifi_list(info: Vec, matches: &ArgMatches) { +fn display_wifi_list(params: WithIoFormat, info: Vec) { use prettytable::*; - if matches.is_present("format") { - return display_serializable(info, matches); + if let Some(format) = params.format { + return display_serializable(format, info); } let mut table_global = Table::new(); @@ -280,14 +327,9 @@ fn display_wifi_list(info: Vec, matches: &ArgMatches) { table_global.print_tty(false).unwrap(); } -#[command(display(display_wifi_info))] +// #[command(display(display_wifi_info))] #[instrument(skip_all)] -pub async fn get( - #[context] ctx: RpcContext, - #[allow(unused_variables)] - #[arg(long = "format")] - format: Option, -) -> Result { +pub async fn get(ctx: RpcContext, _: Empty) -> Result { let wifi_manager = wifi_manager(&ctx)?; let wpa_supplicant = wifi_manager.read().await; let (list_networks, current_res, country_res, ethernet_res, signal_strengths) = tokio::join!( @@ -334,14 +376,8 @@ pub async fn get( }) } -#[command(rename = "get", display(display_wifi_list))] #[instrument(skip_all)] -pub async fn get_available( - #[context] ctx: RpcContext, - #[allow(unused_variables)] - #[arg(long = "format")] - format: Option, -) -> Result, Error> { +pub async fn get_available(ctx: RpcContext, _: Empty) -> Result, Error> { let wifi_manager = wifi_manager(&ctx)?; let wpa_supplicant = wifi_manager.read().await; let (wifi_list, network_list) = tokio::join!( @@ -366,10 +402,16 @@ pub async fn get_available( Ok(wifi_list) } -#[command(rename = "set", display(display_none))] +#[derive(Deserialize, Serialize, Parser)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "kebab-case")] +pub struct SetCountryParams { + #[arg(value_parser = CountryCodeParser)] + country: CountryCode, +} pub async fn set_country( - #[context] ctx: RpcContext, - #[arg(parse(country_code_parse))] country: CountryCode, + ctx: RpcContext, + SetCountryParams { country }: SetCountryParams, ) -> Result<(), Error> { let wifi_manager = wifi_manager(&ctx)?; if !interface_connected(&ctx.ethernet_interface).await? { @@ -640,7 +682,8 @@ impl WpaCli { pub async fn save_config(&mut self, db: PatchDb) -> Result<(), Error> { let new_country = self.get_country_low().await?; db.mutate(|d| { - d.as_server_info_mut() + d.as_public_mut() + .as_server_info_mut() .as_last_wifi_region_mut() .ser(&new_country) }) @@ -769,13 +812,24 @@ pub async fn interface_connected(interface: &str) -> Result { Ok(v.is_some()) } -pub fn country_code_parse(code: &str, _matches: &ArgMatches) -> Result { - CountryCode::for_alpha2(code).map_err(|_| { - Error::new( - color_eyre::eyre::eyre!("Invalid Country Code: {}", code), - ErrorKind::Wifi, - ) - }) +#[derive(Clone)] +struct CountryCodeParser; +impl TypedValueParser for CountryCodeParser { + type Value = CountryCode; + fn parse_ref( + &self, + _: &clap::Command, + _: Option<&clap::Arg>, + value: &std::ffi::OsStr, + ) -> Result { + let code = value.to_string_lossy(); + CountryCode::for_alpha2(&code).map_err(|_| { + clap::Error::raw( + clap::error::ErrorKind::ValueValidation, + color_eyre::eyre::eyre!("Invalid Country Code: {}", code), + ) + }) + } } #[instrument(skip_all)] diff --git a/core/startos/src/net/ws_server.rs b/core/startos/src/net/ws_server.rs deleted file mode 100644 index 16519c6c84..0000000000 --- a/core/startos/src/net/ws_server.rs +++ /dev/null @@ -1,94 +0,0 @@ -use crate::context::RpcContext; - -pub async fn ws_server_handle(rpc_ctx: RpcContext) { - - let ws_ctx = rpc_ctx.clone(); - let ws_server_handle = { - let builder = Server::bind(&ws_ctx.bind_ws); - - let make_svc = ::rpc_toolkit::hyper::service::make_service_fn(move |_| { - let ctx = ws_ctx.clone(); - async move { - Ok::<_, ::rpc_toolkit::hyper::Error>(::rpc_toolkit::hyper::service::service_fn( - move |req| { - let ctx = ctx.clone(); - async move { - tracing::debug!("Request to {}", req.uri().path()); - match req.uri().path() { - "/ws/db" => { - Ok(subscribe(ctx, req).await.unwrap_or_else(err_to_500)) - } - path if path.starts_with("/ws/rpc/") => { - match RequestGuid::from( - path.strip_prefix("/ws/rpc/").unwrap(), - ) { - None => { - tracing::debug!("No Guid Path"); - Response::builder() - .status(StatusCode::BAD_REQUEST) - .body(Body::empty()) - } - Some(guid) => { - match ctx.get_ws_continuation_handler(&guid).await { - Some(cont) => match cont(req).await { - Ok(r) => Ok(r), - Err(e) => Response::builder() - .status( - StatusCode::INTERNAL_SERVER_ERROR, - ) - .body(Body::from(format!("{}", e))), - }, - _ => Response::builder() - .status(StatusCode::NOT_FOUND) - .body(Body::empty()), - } - } - } - } - path if path.starts_with("/rest/rpc/") => { - match RequestGuid::from( - path.strip_prefix("/rest/rpc/").unwrap(), - ) { - None => { - tracing::debug!("No Guid Path"); - Response::builder() - .status(StatusCode::BAD_REQUEST) - .body(Body::empty()) - } - Some(guid) => { - match ctx.get_rest_continuation_handler(&guid).await - { - None => Response::builder() - .status(StatusCode::NOT_FOUND) - .body(Body::empty()), - Some(cont) => match cont(req).await { - Ok(r) => Ok(r), - Err(e) => Response::builder() - .status( - StatusCode::INTERNAL_SERVER_ERROR, - ) - .body(Body::from(format!("{}", e))), - }, - } - } - } - } - _ => Response::builder() - .status(StatusCode::NOT_FOUND) - .body(Body::empty()), - } - } - }, - )) - } - }); - builder.serve(make_svc) - } - .with_graceful_shutdown({ - let mut shutdown = rpc_ctx.shutdown.subscribe(); - async move { - shutdown.recv().await.expect("context dropped"); - } - }); - -} \ No newline at end of file diff --git a/core/startos/src/notifications.rs b/core/startos/src/notifications.rs index 73351471cd..e774d912e5 100644 --- a/core/startos/src/notifications.rs +++ b/core/startos/src/notifications.rs @@ -1,151 +1,179 @@ -use std::collections::HashMap; +use std::collections::BTreeMap; use std::fmt; use std::str::FromStr; use chrono::{DateTime, Utc}; +use clap::builder::ValueParserFactory; +use clap::Parser; use color_eyre::eyre::eyre; -use rpc_toolkit::command; -use sqlx::PgPool; -use tokio::sync::Mutex; +use imbl_value::InternedString; +use models::PackageId; +use rpc_toolkit::{command, from_fn_async, HandlerExt, ParentHandler}; +use serde::{Deserialize, Serialize}; use tracing::instrument; use crate::backup::BackupReport; -use crate::context::RpcContext; +use crate::context::{CliContext, RpcContext}; +use crate::db::model::DatabaseModel; use crate::prelude::*; -use crate::s9pk::manifest::PackageId; -use crate::util::display_none; -use crate::util::serde::display_serializable; -use crate::{Error, ErrorKind, ResultExt}; +use crate::util::clap::FromStrParser; +use crate::util::serde::HandlerExtSerde; -#[command(subcommands(list, delete, delete_before, create))] -pub async fn notification() -> Result<(), Error> { - Ok(()) +// #[command(subcommands(list, delete, delete_before, create))] +pub fn notification() -> ParentHandler { + ParentHandler::new() + .subcommand( + "list", + from_fn_async(list) + .with_display_serializable() + .with_remote_cli::(), + ) + .subcommand( + "delete", + from_fn_async(delete) + .no_display() + .with_remote_cli::(), + ) + .subcommand( + "delete-before", + from_fn_async(delete_before) + .no_display() + .with_remote_cli::(), + ) + .subcommand( + "create", + from_fn_async(create) + .no_display() + .with_remote_cli::(), + ) } -#[command(display(display_serializable))] +#[derive(Deserialize, Serialize, Parser)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "kebab-case")] +pub struct ListParams { + before: Option, + limit: Option, +} +// #[command(display(display_serializable))] #[instrument(skip_all)] pub async fn list( - #[context] ctx: RpcContext, - #[arg] before: Option, - #[arg] limit: Option, -) -> Result, Error> { - let limit = limit.unwrap_or(40); - match before { - None => { - let records = sqlx::query!( - "SELECT id, package_id, created_at, code, level, title, message, data FROM notifications ORDER BY id DESC LIMIT $1", - limit as i64 - ).fetch_all(&ctx.secret_store).await?; - let notifs = records - .into_iter() - .map(|r| { - Ok(Notification { - id: r.id as u32, - package_id: r.package_id.and_then(|p| p.parse().ok()), - created_at: DateTime::from_utc(r.created_at, Utc), - code: r.code as u32, - level: match r.level.parse::() { - Ok(a) => a, - Err(e) => return Err(e.into()), - }, - title: r.title, - message: r.message, - data: match r.data { - None => serde_json::Value::Null, - Some(v) => match v.parse::() { - Ok(a) => a, - Err(e) => { - return Err(Error::new( - eyre!("Invalid Notification Data: {}", e), - ErrorKind::ParseDbField, - )) - } - }, - }, - }) - }) - .collect::, Error>>()?; - - ctx.db - .mutate(|d| { - d.as_server_info_mut() + ctx: RpcContext, + ListParams { before, limit }: ListParams, +) -> Result, Error> { + ctx.db + .mutate(|db| { + let limit = limit.unwrap_or(40); + match before { + None => { + let records = db + .as_private() + .as_notifications() + .as_entries()? + .into_iter() + .take(limit); + let notifs = records + .into_iter() + .map(|(id, notification)| { + Ok(NotificationWithId { + id, + notification: notification.de()?, + }) + }) + .collect::, Error>>()?; + db.as_public_mut() + .as_server_info_mut() .as_unread_notification_count_mut() - .ser(&0) - }) - .await?; - Ok(notifs) - } - Some(before) => { - let records = sqlx::query!( - "SELECT id, package_id, created_at, code, level, title, message, data FROM notifications WHERE id < $1 ORDER BY id DESC LIMIT $2", - before, - limit as i64 - ).fetch_all(&ctx.secret_store).await?; - let res = records - .into_iter() - .map(|r| { - Ok(Notification { - id: r.id as u32, - package_id: r.package_id.and_then(|p| p.parse().ok()), - created_at: DateTime::from_utc(r.created_at, Utc), - code: r.code as u32, - level: match r.level.parse::() { - Ok(a) => a, - Err(e) => return Err(e.into()), - }, - title: r.title, - message: r.message, - data: match r.data { - None => serde_json::Value::Null, - Some(v) => match v.parse::() { - Ok(a) => a, - Err(e) => { - return Err(Error::new( - eyre!("Invalid Notification Data: {}", e), - ErrorKind::ParseDbField, - )) - } - }, - }, - }) - }) - .collect::, Error>>()?; - Ok(res) - } - } + .ser(&0)?; + Ok(notifs) + } + Some(before) => { + let records = db + .as_private() + .as_notifications() + .as_entries()? + .into_iter() + .filter(|(id, _)| *id < before) + .take(limit); + records + .into_iter() + .map(|(id, notification)| { + Ok(NotificationWithId { + id, + notification: notification.de()?, + }) + }) + .collect() + } + } + }) + .await +} + +#[derive(Deserialize, Serialize, Parser)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "kebab-case")] +pub struct DeleteParams { + id: u32, } -#[command(display(display_none))] -pub async fn delete(#[context] ctx: RpcContext, #[arg] id: i32) -> Result<(), Error> { - sqlx::query!("DELETE FROM notifications WHERE id = $1", id) - .execute(&ctx.secret_store) - .await?; - Ok(()) +pub async fn delete(ctx: RpcContext, DeleteParams { id }: DeleteParams) -> Result<(), Error> { + ctx.db + .mutate(|db| { + db.as_private_mut().as_notifications_mut().remove(&id)?; + Ok(()) + }) + .await +} +#[derive(Deserialize, Serialize, Parser)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "kebab-case")] +pub struct DeleteBeforeParams { + before: u32, } -#[command(rename = "delete-before", display(display_none))] -pub async fn delete_before(#[context] ctx: RpcContext, #[arg] before: i32) -> Result<(), Error> { - sqlx::query!("DELETE FROM notifications WHERE id < $1", before) - .execute(&ctx.secret_store) - .await?; - Ok(()) +pub async fn delete_before( + ctx: RpcContext, + DeleteBeforeParams { before }: DeleteBeforeParams, +) -> Result<(), Error> { + ctx.db + .mutate(|db| { + for id in db.as_private().as_notifications().keys()? { + if id < before { + db.as_private_mut().as_notifications_mut().remove(&id)?; + } + } + Ok(()) + }) + .await +} + +#[derive(Deserialize, Serialize, Parser)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "kebab-case")] +pub struct CreateParams { + package: Option, + level: NotificationLevel, + title: String, + message: String, } -#[command(display(display_none))] pub async fn create( - #[context] ctx: RpcContext, - #[arg] package: Option, - #[arg] level: NotificationLevel, - #[arg] title: String, - #[arg] message: String, + ctx: RpcContext, + CreateParams { + package, + level, + title, + message, + }: CreateParams, ) -> Result<(), Error> { - ctx.notification_manager - .notify(ctx.db.clone(), package, level, title, message, (), None) + ctx.db + .mutate(|db| notify(db, package, level, title, message, ())) .await } #[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)] -#[serde(rename_all = "kebab-case")] +#[serde(rename_all = "camelCase")] pub enum NotificationLevel { Success, Info, @@ -162,6 +190,13 @@ impl fmt::Display for NotificationLevel { } } } +impl ValueParserFactory for NotificationLevel { + type Parser = FromStrParser; + fn value_parser() -> Self::Parser { + FromStrParser::new() + } +} + pub struct InvalidNotificationLevel(String); impl From for crate::Error { fn from(val: InvalidNotificationLevel) -> Self { @@ -188,115 +223,95 @@ impl fmt::Display for InvalidNotificationLevel { write!(f, "Invalid Notification Level: {}", self.0) } } -#[derive(Debug, serde::Serialize, serde::Deserialize)] -#[serde(rename_all = "kebab-case")] + +#[derive(Debug, Default, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct Notifications(pub BTreeMap); +impl Notifications { + pub fn new() -> Self { + Self(BTreeMap::new()) + } +} +impl Map for Notifications { + type Key = u32; + type Value = Notification; + fn key_str(key: &Self::Key) -> Result, Error> { + Self::key_string(key) + } + fn key_string(key: &Self::Key) -> Result { + Ok(InternedString::from_display(key)) + } +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] pub struct Notification { - id: u32, - package_id: Option, // TODO change for package id newtype + package_id: Option, created_at: DateTime, code: u32, level: NotificationLevel, title: String, message: String, - data: serde_json::Value, + data: Value, +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct NotificationWithId { + id: u32, + #[serde(flatten)] + notification: Notification, } pub trait NotificationType: serde::Serialize + for<'de> serde::Deserialize<'de> + std::fmt::Debug { - const CODE: i32; + const CODE: u32; } impl NotificationType for () { - const CODE: i32 = 0; + const CODE: u32 = 0; } impl NotificationType for BackupReport { - const CODE: i32 = 1; + const CODE: u32 = 1; } -pub struct NotificationManager { - sqlite: PgPool, - cache: Mutex, NotificationLevel, String), i64>>, -} -impl NotificationManager { - pub fn new(sqlite: PgPool) -> Self { - NotificationManager { - sqlite, - cache: Mutex::new(HashMap::new()), - } - } - #[instrument(skip(db, subtype, self))] - pub async fn notify( - &self, - db: PatchDb, - package_id: Option, - level: NotificationLevel, - title: String, - message: String, - subtype: T, - debounce_interval: Option, - ) -> Result<(), Error> { - let peek = db.peek().await; - if !self - .should_notify(&package_id, &level, &title, debounce_interval) - .await - { - return Ok(()); - } - let mut count = peek.as_server_info().as_unread_notification_count().de()?; - let sql_package_id = package_id.as_ref().map(|p| &**p); - let sql_code = T::CODE; - let sql_level = format!("{}", level); - let sql_data = - serde_json::to_string(&subtype).with_kind(crate::ErrorKind::Serialization)?; - sqlx::query!( - "INSERT INTO notifications (package_id, code, level, title, message, data) VALUES ($1, $2, $3, $4, $5, $6)", - sql_package_id, - sql_code as i32, - sql_level, - title, - message, - sql_data - ).execute(&self.sqlite).await?; - count += 1; - db.mutate(|db| { - db.as_server_info_mut() - .as_unread_notification_count_mut() - .ser(&count) - }) - .await - } - async fn should_notify( - &self, - package_id: &Option, - level: &NotificationLevel, - title: &String, - debounce_interval: Option, - ) -> bool { - let mut guard = self.cache.lock().await; - let k = (package_id.clone(), level.clone(), title.clone()); - let v = (*guard).get(&k); - match v { - None => { - (*guard).insert(k, Utc::now().timestamp()); - true - } - Some(last_issued) => match debounce_interval { - None => { - (*guard).insert(k, Utc::now().timestamp()); - true - } - Some(interval) => { - if last_issued + interval as i64 > Utc::now().timestamp() { - false - } else { - (*guard).insert(k, Utc::now().timestamp()); - true - } - } - }, - } - } +#[instrument(skip(subtype, db))] +pub fn notify( + db: &mut DatabaseModel, + package_id: Option, + level: NotificationLevel, + title: String, + message: String, + subtype: T, +) -> Result<(), Error> { + let data = to_value(&subtype)?; + db.as_public_mut() + .as_server_info_mut() + .as_unread_notification_count_mut() + .mutate(|c| { + *c += 1; + Ok(()) + })?; + let id = db + .as_private() + .as_notifications() + .keys()? + .into_iter() + .max() + .map_or(0, |id| id + 1); + db.as_private_mut().as_notifications_mut().insert( + &id, + &Notification { + package_id, + created_at: Utc::now(), + code: T::CODE, + level, + title, + message, + data, + }, + ) } #[test] diff --git a/core/startos/src/os_install/mod.rs b/core/startos/src/os_install/mod.rs index 9e21e9f230..6d55741f4a 100644 --- a/core/startos/src/os_install/mod.rs +++ b/core/startos/src/os_install/mod.rs @@ -1,46 +1,55 @@ use std::path::{Path, PathBuf}; +use clap::Parser; use color_eyre::eyre::eyre; use models::Error; -use rpc_toolkit::command; +use rpc_toolkit::{command, from_fn_async, AnyContext, HandlerExt, ParentHandler}; use serde::{Deserialize, Serialize}; use tokio::process::Command; -use crate::context::InstallContext; +use crate::context::config::ServerConfig; +use crate::context::{CliContext, InstallContext}; use crate::disk::mount::filesystem::bind::Bind; use crate::disk::mount::filesystem::block_dev::BlockDev; use crate::disk::mount::filesystem::efivarfs::EfiVarFs; use crate::disk::mount::filesystem::{MountType, ReadWrite}; -use crate::disk::mount::guard::{MountGuard, TmpMountGuard}; +use crate::disk::mount::guard::{GenericMountGuard, MountGuard, TmpMountGuard}; use crate::disk::util::{DiskInfo, PartitionTable}; use crate::disk::OsPartitionInfo; use crate::net::utils::{find_eth_iface, find_wifi_iface}; use crate::util::serde::IoFormat; -use crate::util::{display_none, Invoke}; +use crate::util::Invoke; use crate::ARCH; mod gpt; mod mbr; -#[derive(Debug, Deserialize, Serialize)] -#[serde(rename_all = "kebab-case")] -pub struct PostInstallConfig { - os_partitions: OsPartitionInfo, - ethernet_interface: String, - wifi_interface: Option, -} - -#[command(subcommands(disk, execute, reboot))] -pub fn install() -> Result<(), Error> { - Ok(()) +pub fn install() -> ParentHandler { + ParentHandler::new() + .subcommand("disk", disk()) + .subcommand( + "execute", + from_fn_async(execute) + .no_display() + .with_remote_cli::(), + ) + .subcommand( + "reboot", + from_fn_async(reboot) + .no_display() + .with_remote_cli::(), + ) } -#[command(subcommands(list))] -pub fn disk() -> Result<(), Error> { - Ok(()) +pub fn disk() -> ParentHandler { + ParentHandler::new().subcommand( + "list", + from_fn_async(list) + .no_display() + .with_remote_cli::(), + ) } -#[command(display(display_none))] pub async fn list() -> Result, Error> { let skip = match async { Ok::<_, Error>( @@ -103,10 +112,21 @@ async fn partition(disk: &mut DiskInfo, overwrite: bool) -> Result Result<(), Error> { let mut disk = crate::disk::util::list(&Default::default()) .await? @@ -153,21 +173,21 @@ pub async fn execute( { if let Err(e) = async { // cp -r ${guard}/config /tmp/config - if tokio::fs::metadata(guard.as_ref().join("config/upgrade")) + if tokio::fs::metadata(guard.path().join("config/upgrade")) .await .is_ok() { - tokio::fs::remove_file(guard.as_ref().join("config/upgrade")).await?; + tokio::fs::remove_file(guard.path().join("config/upgrade")).await?; } - if tokio::fs::metadata(guard.as_ref().join("config/disk.guid")) + if tokio::fs::metadata(guard.path().join("config/disk.guid")) .await .is_ok() { - tokio::fs::remove_file(guard.as_ref().join("config/disk.guid")).await?; + tokio::fs::remove_file(guard.path().join("config/disk.guid")).await?; } Command::new("cp") .arg("-r") - .arg(guard.as_ref().join("config")) + .arg(guard.path().join("config")) .arg("/tmp/config.bak") .invoke(crate::ErrorKind::Filesystem) .await?; @@ -201,14 +221,14 @@ pub async fn execute( Command::new("cp") .arg("-r") .arg("/tmp/config.bak") - .arg(rootfs.as_ref().join("config")) + .arg(rootfs.path().join("config")) .invoke(crate::ErrorKind::Filesystem) .await?; } else { - tokio::fs::create_dir(rootfs.as_ref().join("config")).await?; + tokio::fs::create_dir(rootfs.path().join("config")).await?; } - tokio::fs::create_dir(rootfs.as_ref().join("next")).await?; - let current = rootfs.as_ref().join("current"); + tokio::fs::create_dir(rootfs.path().join("next")).await?; + let current = rootfs.path().join("current"); tokio::fs::create_dir(¤t).await?; tokio::fs::create_dir(current.join("boot")).await?; @@ -235,11 +255,12 @@ pub async fn execute( .await?; tokio::fs::write( - rootfs.as_ref().join("config/config.yaml"), - IoFormat::Yaml.to_vec(&PostInstallConfig { - os_partitions: part_info.clone(), - ethernet_interface: eth_iface, + rootfs.path().join("config/config.yaml"), + IoFormat::Yaml.to_vec(&ServerConfig { + os_partitions: Some(part_info.clone()), + ethernet_interface: Some(eth_iface), wifi_interface: wifi_iface, + ..Default::default() })?, ) .await?; @@ -272,8 +293,8 @@ pub async fn execute( .invoke(crate::ErrorKind::OpenSsh) .await?; - let embassy_fs = MountGuard::mount( - &Bind::new(rootfs.as_ref()), + let start_os_fs = MountGuard::mount( + &Bind::new(rootfs.path()), current.join("media/embassy/embassyfs"), MountType::ReadOnly, ) @@ -321,7 +342,7 @@ pub async fn execute( } sys.unmount(false).await?; proc.unmount(false).await?; - embassy_fs.unmount(false).await?; + start_os_fs.unmount(false).await?; if let Some(efi) = efi { efi.unmount(false).await?; } @@ -330,8 +351,7 @@ pub async fn execute( Ok(()) } -#[command(display(display_none))] -pub async fn reboot(#[context] ctx: InstallContext) -> Result<(), Error> { +pub async fn reboot(ctx: InstallContext) -> Result<(), Error> { Command::new("sync") .invoke(crate::ErrorKind::Filesystem) .await?; diff --git a/core/startos/src/prelude.rs b/core/startos/src/prelude.rs index 3f70b7a2bb..dddc1ecda5 100644 --- a/core/startos/src/prelude.rs +++ b/core/startos/src/prelude.rs @@ -1,4 +1,5 @@ pub use color_eyre::eyre::eyre; +pub use lazy_format::lazy_format; pub use models::OptionExt; pub use tracing::instrument; diff --git a/core/startos/src/procedure/build.rs b/core/startos/src/procedure/build.rs deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/core/startos/src/procedure/docker.rs b/core/startos/src/procedure/docker.rs deleted file mode 100644 index ad25953a3f..0000000000 --- a/core/startos/src/procedure/docker.rs +++ /dev/null @@ -1,970 +0,0 @@ -use std::borrow::Cow; -use std::collections::{BTreeMap, BTreeSet, VecDeque}; -use std::ffi::{OsStr, OsString}; -use std::net::Ipv4Addr; -use std::os::unix::prelude::FileTypeExt; -use std::path::{Path, PathBuf}; -use std::time::Duration; - -use color_eyre::eyre::eyre; -use futures::future::{BoxFuture, Either as EitherFuture}; -use futures::{FutureExt, TryStreamExt}; -use helpers::{NonDetachingJoinHandle, UnixRpcClient}; -use models::{Id, ImageId, SYSTEM_PACKAGE_ID}; -use nix::sys::signal; -use nix::unistd::Pid; -use serde::de::DeserializeOwned; -use serde::{Deserialize, Serialize}; -use serde_json::Value; -use tokio::io::{AsyncBufRead, AsyncBufReadExt, BufReader}; -use tokio::time::timeout; -use tracing::instrument; - -use super::ProcedureName; -use crate::context::RpcContext; -use crate::prelude::*; -use crate::s9pk::manifest::PackageId; -use crate::util::docker::{remove_container, CONTAINER_TOOL}; -use crate::util::serde::{Duration as SerdeDuration, IoFormat}; -use crate::util::Version; -use crate::volume::{VolumeId, Volumes}; -use crate::{Error, ResultExt, HOST_IP}; - -pub const NET_TLD: &str = "embassy"; - -lazy_static::lazy_static! { - pub static ref SYSTEM_IMAGES: BTreeSet = { - let mut set = BTreeSet::new(); - - set.insert("compat".parse().unwrap()); - set.insert("utils".parse().unwrap()); - - set - }; -} - -#[derive(Clone, Debug, Deserialize, Serialize, HasModel)] -#[serde(rename_all = "kebab-case")] -#[model = "Model"] -pub struct DockerContainers { - pub main: DockerContainer, - // #[serde(default)] - // pub aux: BTreeMap, -} - -/// This is like the docker procedures of the past designs, -/// but this time all the entrypoints and args are not -/// part of this struct by choice. Used for the times that we are creating our own entry points -#[derive(Clone, Debug, Deserialize, Serialize, patch_db::HasModel)] -#[serde(rename_all = "kebab-case")] -#[model = "Model"] -pub struct DockerContainer { - pub image: ImageId, - #[serde(default)] - pub mounts: BTreeMap, - #[serde(default)] - pub shm_size_mb: Option, // TODO: use postfix sizing? like 1k vs 1m vs 1g - #[serde(default)] - pub sigterm_timeout: Option, - #[serde(default)] - pub system: bool, - #[serde(default)] - pub gpu_acceleration: bool, -} - -impl DockerContainer { - /// We created a new exec runner, where we are going to be passing the commands for it to run. - /// Idea is that we are going to send it command and get the inputs be filtered back from the manager. - /// Then we could in theory run commands without the cost of running the docker exec which is known to have - /// a dely of > 200ms which is not acceptable. - #[instrument(skip_all)] - pub async fn long_running_execute( - &self, - ctx: &RpcContext, - pkg_id: &PackageId, - pkg_version: &Version, - volumes: &Volumes, - ) -> Result<(LongRunning, UnixRpcClient), Error> { - let container_name = DockerProcedure::container_name(pkg_id, None); - - let socket_path = - Path::new("/tmp/embassy/containers").join(format!("{pkg_id}_{pkg_version}")); - if tokio::fs::metadata(&socket_path).await.is_ok() { - tokio::fs::remove_dir_all(&socket_path).await?; - } - tokio::fs::create_dir_all(&socket_path).await?; - - let mut cmd = LongRunning::setup_long_running_docker_cmd( - self, - ctx, - &container_name, - volumes, - pkg_id, - pkg_version, - &socket_path, - ) - .await?; - - let mut handle = cmd.spawn().with_kind(crate::ErrorKind::Docker)?; - - let client = UnixRpcClient::new(socket_path.join("rpc.sock")); - - let running_output = NonDetachingJoinHandle::from(tokio::spawn(async move { - if let Err(err) = handle - .wait() - .await - .map_err(|e| eyre!("Runtime error: {e:?}")) - { - tracing::error!("{}", err); - tracing::debug!("{:?}", err); - } - })); - - { - let socket = socket_path.join("rpc.sock"); - if let Err(_err) = timeout(Duration::from_secs(1), async move { - while tokio::fs::metadata(&socket).await.is_err() { - tokio::time::sleep(Duration::from_millis(10)).await; - } - }) - .await - { - tracing::error!("Timed out waiting for init to create socket"); - } - } - - Ok((LongRunning { running_output }, client)) - } -} - -#[derive(Clone, Debug, Deserialize, Serialize)] -#[serde(rename_all = "kebab-case")] -pub struct DockerProcedure { - pub image: ImageId, - #[serde(default)] - pub system: bool, - pub entrypoint: String, - #[serde(default)] - pub args: Vec, - #[serde(default)] - pub inject: bool, - #[serde(default)] - pub mounts: BTreeMap, - #[serde(default)] - pub io_format: Option, - #[serde(default)] - pub sigterm_timeout: Option, - #[serde(default)] - pub shm_size_mb: Option, // TODO: use postfix sizing? like 1k vs 1m vs 1g - #[serde(default)] - pub gpu_acceleration: bool, -} - -#[derive(Clone, Debug, Deserialize, Serialize, Default)] -#[serde(rename_all = "kebab-case")] -pub struct DockerInject { - #[serde(default)] - pub system: bool, - pub entrypoint: String, - #[serde(default)] - pub args: Vec, - #[serde(default)] - pub io_format: Option, - #[serde(default)] - pub sigterm_timeout: Option, -} -impl DockerProcedure { - pub fn main_docker_procedure( - container: &DockerContainer, - injectable: &DockerInject, - ) -> DockerProcedure { - DockerProcedure { - image: container.image.clone(), - system: injectable.system, - entrypoint: injectable.entrypoint.clone(), - args: injectable.args.clone(), - inject: false, - mounts: container.mounts.clone(), - io_format: injectable.io_format, - sigterm_timeout: injectable.sigterm_timeout, - shm_size_mb: container.shm_size_mb, - gpu_acceleration: container.gpu_acceleration, - } - } - - pub fn validate( - &self, - _eos_version: &Version, - volumes: &Volumes, - image_ids: &BTreeSet, - expected_io: bool, - ) -> Result<(), color_eyre::eyre::Report> { - for volume in self.mounts.keys() { - if !volumes.contains_key(volume) && !matches!(&volume, &VolumeId::Backup) { - color_eyre::eyre::bail!("unknown volume: {}", volume); - } - } - if self.system { - if !SYSTEM_IMAGES.contains(&self.image) { - color_eyre::eyre::bail!("unknown system image: {}", self.image); - } - } else if !image_ids.contains(&self.image) { - color_eyre::eyre::bail!("image for {} not contained in package", self.image); - } - if expected_io && self.io_format.is_none() { - color_eyre::eyre::bail!("expected io-format"); - } - Ok(()) - } - - #[instrument(skip_all)] - pub async fn execute( - &self, - ctx: &RpcContext, - pkg_id: &PackageId, - pkg_version: &Version, - name: ProcedureName, - volumes: &Volumes, - input: Option, - timeout: Option, - ) -> Result, Error> { - let name = name.docker_name(); - let name: Option<&str> = name.as_deref(); - let mut cmd = tokio::process::Command::new(CONTAINER_TOOL); - let container_name = Self::container_name(pkg_id, name); - cmd.arg("run") - .arg("--rm") - .arg("--network=start9") - .arg(format!("--add-host=embassy:{}", Ipv4Addr::from(HOST_IP))) - .arg("--name") - .arg(&container_name) - .arg(format!("--hostname={}", &container_name)) - .arg("--no-healthcheck") - .kill_on_drop(true); - remove_container(&container_name, true).await?; - cmd.args(self.docker_args(ctx, pkg_id, pkg_version, volumes).await?); - let input_buf = if let (Some(input), Some(format)) = (&input, &self.io_format) { - cmd.stdin(std::process::Stdio::piped()); - Some(format.to_vec(input)?) - } else { - None - }; - cmd.stdout(std::process::Stdio::piped()); - cmd.stderr(std::process::Stdio::piped()); - tracing::trace!( - "{}", - format!("{:?}", cmd) - .split(r#"" ""#) - .collect::>() - .join(" ") - ); - let mut handle = cmd.spawn().with_kind(crate::ErrorKind::Docker)?; - let id = handle.id(); - let timeout_fut = if let Some(timeout) = timeout { - EitherFuture::Right(async move { - tokio::time::sleep(timeout).await; - - Ok(()) - }) - } else { - EitherFuture::Left(futures::future::pending::>()) - }; - if let (Some(input), Some(mut stdin)) = (&input_buf, handle.stdin.take()) { - use tokio::io::AsyncWriteExt; - stdin - .write_all(input) - .await - .with_kind(crate::ErrorKind::Docker)?; - stdin.flush().await?; - stdin.shutdown().await?; - drop(stdin); - } - enum Race { - Done(T), - TimedOut, - } - - let io_format = self.io_format; - let mut output = BufReader::new( - handle - .stdout - .take() - .ok_or_else(|| eyre!("Can't takeout stdout in execute")) - .with_kind(crate::ErrorKind::Docker)?, - ); - let output = NonDetachingJoinHandle::from(tokio::spawn(async move { - match async { - if let Some(format) = io_format { - return match max_by_lines(&mut output, None).await { - MaxByLines::Done(buffer) => { - Ok::( - match format.from_slice(buffer.as_bytes()) { - Ok(a) => a, - Err(e) => { - tracing::trace!( - "Failed to deserialize stdout from {}: {}, falling back to UTF-8 string.", - format, - e - ); - Value::String(buffer) - } - }, - ) - }, - MaxByLines::Error(e) => Err(e), - MaxByLines::Overflow(buffer) => Ok(Value::String(buffer)) - } - } - - let lines = buf_reader_to_lines(&mut output, 1000).await?; - if lines.is_empty() { - return Ok(Value::Null); - } - - let joined_output = lines.join("\n"); - Ok(Value::String(joined_output)) - }.await { - Ok(a) => Ok((a, output)), - Err(e) => Err((e, output)) - } - })); - let err_output = BufReader::new( - handle - .stderr - .take() - .ok_or_else(|| eyre!("Can't takeout std err")) - .with_kind(crate::ErrorKind::Docker)?, - ); - - let err_output = NonDetachingJoinHandle::from(tokio::spawn(async move { - let lines = buf_reader_to_lines(err_output, 1000).await?; - let joined_output = lines.join("\n"); - Ok::<_, Error>(joined_output) - })); - - let res = tokio::select! { - res = handle.wait() => Race::Done(res.with_kind(crate::ErrorKind::Docker)?), - res = timeout_fut => { - res?; - Race::TimedOut - }, - }; - let exit_status = match res { - Race::Done(x) => x, - Race::TimedOut => { - if let Some(id) = id { - signal::kill(Pid::from_raw(id as i32), signal::SIGKILL) - .with_kind(crate::ErrorKind::Docker)?; - } - return Ok(Err((143, "Timed out. Retrying soon...".to_owned()))); - } - }; - Ok( - if exit_status.success() || exit_status.code() == Some(143) { - Ok(serde_json::from_value( - output - .await - .with_kind(crate::ErrorKind::Unknown)? - .map(|(v, _)| v) - .map_err(|(e, _)| tracing::warn!("{}", e)) - .unwrap_or_default(), - ) - .with_kind(crate::ErrorKind::Deserialization)?) - } else { - Err(( - exit_status.code().unwrap_or_default(), - err_output.await.with_kind(crate::ErrorKind::Unknown)??, - )) - }, - ) - } - - #[instrument(skip_all)] - pub async fn inject( - &self, - _ctx: &RpcContext, - pkg_id: &PackageId, - _pkg_version: &Version, - _name: ProcedureName, - _volumes: &Volumes, - input: Option, - timeout: Option, - ) -> Result, Error> { - let mut cmd = tokio::process::Command::new(CONTAINER_TOOL); - - cmd.arg("exec"); - - cmd.args(self.docker_args_inject(pkg_id)); - let input_buf = if let (Some(input), Some(format)) = (&input, &self.io_format) { - cmd.stdin(std::process::Stdio::piped()); - Some(format.to_vec(input)?) - } else { - None - }; - cmd.stdout(std::process::Stdio::piped()); - cmd.stderr(std::process::Stdio::piped()); - tracing::trace!( - "{}", - format!("{:?}", cmd) - .split(r#"" ""#) - .collect::>() - .join(" ") - ); - let mut handle = cmd.spawn().with_kind(crate::ErrorKind::Docker)?; - let id = handle.id(); - let timeout_fut = if let Some(timeout) = timeout { - EitherFuture::Right(async move { - tokio::time::sleep(timeout).await; - - Ok(()) - }) - } else { - EitherFuture::Left(futures::future::pending::>()) - }; - if let (Some(input), Some(mut stdin)) = (&input_buf, handle.stdin.take()) { - use tokio::io::AsyncWriteExt; - stdin - .write_all(input) - .await - .with_kind(crate::ErrorKind::Docker)?; - stdin.flush().await?; - stdin.shutdown().await?; - drop(stdin); - } - enum Race { - Done(T), - TimedOut, - } - - let io_format = self.io_format; - let mut output = BufReader::new( - handle - .stdout - .take() - .ok_or_else(|| eyre!("Can't takeout stdout in inject")) - .with_kind(crate::ErrorKind::Docker)?, - ); - let output = NonDetachingJoinHandle::from(tokio::spawn(async move { - match async { - if let Some(format) = io_format { - return match max_by_lines(&mut output, None).await { - MaxByLines::Done(buffer) => { - Ok::( - match format.from_slice(buffer.as_bytes()) { - Ok(a) => a, - Err(e) => { - tracing::trace!( - "Failed to deserialize stdout from {}: {}, falling back to UTF-8 string.", - format, - e - ); - Value::String(buffer) - } - }, - ) - }, - MaxByLines::Error(e) => Err(e), - MaxByLines::Overflow(buffer) => Ok(Value::String(buffer)) - } - } - - let lines = buf_reader_to_lines(&mut output, 1000).await?; - if lines.is_empty() { - return Ok(Value::Null); - } - - let joined_output = lines.join("\n"); - Ok(Value::String(joined_output)) - }.await { - Ok(a) => Ok((a, output)), - Err(e) => Err((e, output)) - } - })); - let err_output = BufReader::new( - handle - .stderr - .take() - .ok_or_else(|| eyre!("Can't takeout std err")) - .with_kind(crate::ErrorKind::Docker)?, - ); - - let err_output = NonDetachingJoinHandle::from(tokio::spawn(async move { - let lines = buf_reader_to_lines(err_output, 1000).await?; - let joined_output = lines.join("\n"); - Ok::<_, Error>(joined_output) - })); - - let res = tokio::select! { - res = handle.wait() => Race::Done(res.with_kind(crate::ErrorKind::Docker)?), - res = timeout_fut => { - res?; - Race::TimedOut - }, - }; - let exit_status = match res { - Race::Done(x) => x, - Race::TimedOut => { - if let Some(id) = id { - signal::kill(Pid::from_raw(id as i32), signal::SIGKILL) - .with_kind(crate::ErrorKind::Docker)?; - } - return Ok(Err((143, "Timed out. Retrying soon...".to_owned()))); - } - }; - Ok( - if exit_status.success() || exit_status.code() == Some(143) { - Ok(serde_json::from_value( - output - .await - .with_kind(crate::ErrorKind::Unknown)? - .map(|(v, _)| v) - .map_err(|(e, _)| tracing::warn!("{}", e)) - .unwrap_or_default(), - ) - .with_kind(crate::ErrorKind::Deserialization)?) - } else { - Err(( - exit_status.code().unwrap_or_default(), - err_output.await.with_kind(crate::ErrorKind::Unknown)??, - )) - }, - ) - } - - #[instrument(skip_all)] - pub async fn sandboxed( - &self, - ctx: &RpcContext, - pkg_id: &PackageId, - pkg_version: &Version, - volumes: &Volumes, - input: Option, - timeout: Option, - ) -> Result, Error> { - let mut cmd = tokio::process::Command::new(CONTAINER_TOOL); - cmd.arg("run").arg("--rm").arg("--network=none"); - cmd.args( - self.docker_args(ctx, pkg_id, pkg_version, &volumes.to_readonly()) - .await?, - ); - let input_buf = if let (Some(input), Some(format)) = (&input, &self.io_format) { - cmd.stdin(std::process::Stdio::piped()); - Some(format.to_vec(input)?) - } else { - None - }; - cmd.stdout(std::process::Stdio::piped()); - cmd.stderr(std::process::Stdio::piped()); - let mut handle = cmd.spawn().with_kind(crate::ErrorKind::Docker)?; - if let (Some(input), Some(stdin)) = (&input_buf, &mut handle.stdin) { - use tokio::io::AsyncWriteExt; - stdin - .write_all(input) - .await - .with_kind(crate::ErrorKind::Docker)?; - } - - let err_output = BufReader::new( - handle - .stderr - .take() - .ok_or_else(|| eyre!("Can't takeout std err")) - .with_kind(crate::ErrorKind::Docker)?, - ); - let err_output = NonDetachingJoinHandle::from(tokio::spawn(async move { - let lines = buf_reader_to_lines(err_output, 1000).await?; - let joined_output = lines.join("\n"); - Ok::<_, Error>(joined_output) - })); - - let io_format = self.io_format; - let mut output = BufReader::new( - handle - .stdout - .take() - .ok_or_else(|| eyre!("Can't takeout stdout in sandboxed")) - .with_kind(crate::ErrorKind::Docker)?, - ); - let output = NonDetachingJoinHandle::from(tokio::spawn(async move { - match async { - if let Some(format) = io_format { - return match max_by_lines(&mut output, None).await { - MaxByLines::Done(buffer) => { - Ok::( - match format.from_slice(buffer.as_bytes()) { - Ok(a) => a, - Err(e) => { - tracing::trace!( - "Failed to deserialize stdout from {}: {}, falling back to UTF-8 string.", - format, - e - ); - Value::String(buffer) - } - }, - ) - }, - MaxByLines::Error(e) => Err(e), - MaxByLines::Overflow(buffer) => Ok(Value::String(buffer)) - } - } - - let lines = buf_reader_to_lines(&mut output, 1000).await?; - if lines.is_empty() { - return Ok(Value::Null); - } - - let joined_output = lines.join("\n"); - Ok(Value::String(joined_output)) - }.await { - Ok(a) => Ok((a, output)), - Err(e) => Err((e, output)) - } - })); - - let handle = if let Some(dur) = timeout { - async move { - tokio::time::timeout(dur, handle.wait()) - .await - .with_kind(crate::ErrorKind::Docker)? - .with_kind(crate::ErrorKind::Docker) - } - .boxed() - } else { - async { handle.wait().await.with_kind(crate::ErrorKind::Docker) }.boxed() - }; - let exit_status = handle.await?; - Ok( - if exit_status.success() || exit_status.code() == Some(143) { - Ok(serde_json::from_value( - output - .await - .with_kind(crate::ErrorKind::Unknown)? - .map(|(v, _)| v) - .map_err(|(e, _)| tracing::warn!("{}", e)) - .unwrap_or_default(), - ) - .with_kind(crate::ErrorKind::Deserialization)?) - } else { - Err(( - exit_status.code().unwrap_or_default(), - err_output.await.with_kind(crate::ErrorKind::Unknown)??, - )) - }, - ) - } - - pub fn container_name(pkg_id: &PackageId, name: Option<&str>) -> String { - if let Some(name) = name { - format!("{}_{}.{}", pkg_id, name, NET_TLD) - } else { - format!("{}.{}", pkg_id, NET_TLD) - } - } - - pub fn uncontainer_name(name: &str) -> Option<(PackageId, Option<&str>)> { - let (pre_tld, _) = name.split_once('.')?; - if pre_tld.contains('_') { - let (pkg, name) = name.split_once('_')?; - Some((Id::try_from(pkg).ok()?.into(), Some(name))) - } else { - Some((Id::try_from(pre_tld).ok()?.into(), None)) - } - } - - async fn docker_args( - &self, - ctx: &RpcContext, - pkg_id: &PackageId, - pkg_version: &Version, - volumes: &Volumes, - ) -> Result>, Error> { - let mut res = self.new_docker_args(); - for (volume_id, dst) in &self.mounts { - let volume = if let Some(v) = volumes.get(volume_id) { - v - } else { - continue; - }; - let src = volume.path_for(&ctx.datadir, pkg_id, pkg_version, volume_id); - if let Err(_e) = tokio::fs::metadata(&src).await { - tokio::fs::create_dir_all(&src).await?; - } - res.push(OsStr::new("--mount").into()); - res.push( - OsString::from(format!( - "type=bind,src={},dst={}{}", - src.display(), - dst.display(), - if volume.readonly() { ",readonly" } else { "" } - )) - .into(), - ); - } - if let Some(shm_size_mb) = self.shm_size_mb { - res.push(OsStr::new("--shm-size").into()); - res.push(OsString::from(format!("{}m", shm_size_mb)).into()); - } - if self.gpu_acceleration { - fn get_devices<'a>( - path: &'a Path, - res: &'a mut Vec, - ) -> BoxFuture<'a, Result<(), Error>> { - async move { - let mut read_dir = tokio::fs::read_dir(path).await?; - while let Some(entry) = read_dir.next_entry().await? { - let fty = entry.metadata().await?.file_type(); - if fty.is_block_device() || fty.is_char_device() { - res.push(entry.path()); - } else if fty.is_dir() { - get_devices(&entry.path(), res).await?; - } - } - Ok(()) - } - .boxed() - } - let mut devices = Vec::new(); - get_devices(Path::new("/dev/dri"), &mut devices).await?; - for device in devices { - res.push(OsStr::new("--device").into()); - res.push(OsString::from(device).into()); - } - } - res.push(OsStr::new("--interactive").into()); - res.push(OsStr::new("--log-driver=journald").into()); - res.push(OsStr::new("--entrypoint").into()); - res.push(OsStr::new(&self.entrypoint).into()); - if self.system { - res.push(OsString::from(self.image.for_package(&SYSTEM_PACKAGE_ID, None)).into()); - } else { - res.push(OsString::from(self.image.for_package(pkg_id, Some(pkg_version))).into()); - } - - res.extend(self.args.iter().map(|s| OsStr::new(s).into())); - - Ok(res) - } - - fn new_docker_args(&self) -> Vec> { - Vec::with_capacity( - (2 * self.mounts.len()) // --mount - + (2 * self.shm_size_mb.is_some() as usize) // --shm-size - + 5 // --interactive --log-driver=journald --entrypoint - + self.args.len(), // [ARG...] - ) - } - fn docker_args_inject(&self, pkg_id: &PackageId) -> Vec> { - let mut res = self.new_docker_args(); - if let Some(shm_size_mb) = self.shm_size_mb { - res.push(OsStr::new("--shm-size").into()); - res.push(OsString::from(format!("{}m", shm_size_mb)).into()); - } - res.push(OsStr::new("--interactive").into()); - - res.push(OsString::from(Self::container_name(pkg_id, None)).into()); - res.push(OsStr::new(&self.entrypoint).into()); - - res.extend(self.args.iter().map(|s| OsStr::new(s).into())); - - res - } -} - -struct RingVec { - value: VecDeque, - capacity: usize, -} -impl RingVec { - fn new(capacity: usize) -> Self { - RingVec { - value: VecDeque::with_capacity(capacity), - capacity, - } - } - fn push(&mut self, item: T) -> Option { - let popped_item = if self.value.len() == self.capacity { - self.value.pop_front() - } else { - None - }; - self.value.push_back(item); - popped_item - } -} - -/// This is created when we wanted a long running docker executor that we could send commands to and get the responses back. -/// We wanted a long running since we want to be able to have the equivelent to the docker execute without the heavy costs of 400 + ms time lag. -/// Also the long running let's us have the ability to start/ end the services quicker. -pub struct LongRunning { - pub running_output: NonDetachingJoinHandle<()>, -} - -impl LongRunning { - async fn setup_long_running_docker_cmd( - docker: &DockerContainer, - ctx: &RpcContext, - container_name: &str, - volumes: &Volumes, - pkg_id: &PackageId, - pkg_version: &Version, - socket_path: &Path, - ) -> Result { - const INIT_EXEC: &str = "/start9/bin/container-init"; - const BIND_LOCATION: &str = "/usr/lib/startos/container/"; - tracing::trace!("setup_long_running_docker_cmd"); - - remove_container(container_name, true).await?; - - let image_architecture = { - let mut cmd = tokio::process::Command::new(CONTAINER_TOOL); - cmd.arg("image") - .arg("inspect") - .arg("--format") - .arg("'{{.Architecture}}'"); - - if docker.system { - cmd.arg(docker.image.for_package(&SYSTEM_PACKAGE_ID, None)); - } else { - cmd.arg(docker.image.for_package(pkg_id, Some(pkg_version))); - } - let arch = String::from_utf8(cmd.output().await?.stdout)?; - arch.replace('\'', "").trim().to_string() - }; - - let mut cmd = tokio::process::Command::new(CONTAINER_TOOL); - cmd.arg("run") - .arg("--network=start9") - .arg(format!("--add-host=embassy:{}", Ipv4Addr::from(HOST_IP))) - .arg("--mount") - .arg(format!( - "type=bind,src={BIND_LOCATION},dst=/start9/bin/,readonly" - )) - .arg("--mount") - .arg(format!( - "type=bind,src={input},dst=/start9/sockets/", - input = socket_path.display() - )) - .arg("--name") - .arg(container_name) - .arg(format!("--hostname={}", &container_name)) - .arg("--entrypoint") - .arg(format!("{INIT_EXEC}.{image_architecture}")) - .arg("-i") - .arg("--rm") - .kill_on_drop(true); - - for (volume_id, dst) in &docker.mounts { - let volume = if let Some(v) = volumes.get(volume_id) { - v - } else { - continue; - }; - let src = volume.path_for(&ctx.datadir, pkg_id, pkg_version, volume_id); - if let Err(_e) = tokio::fs::metadata(&src).await { - tokio::fs::create_dir_all(&src).await?; - } - cmd.arg("--mount").arg(format!( - "type=bind,src={},dst={}{}", - src.display(), - dst.display(), - if volume.readonly() { ",readonly" } else { "" } - )); - } - if let Some(shm_size_mb) = docker.shm_size_mb { - cmd.arg("--shm-size").arg(format!("{}m", shm_size_mb)); - } - cmd.arg("--log-driver=journald"); - if docker.system { - cmd.arg(docker.image.for_package(&SYSTEM_PACKAGE_ID, None)); - } else { - cmd.arg(docker.image.for_package(pkg_id, Some(pkg_version))); - } - cmd.stdout(std::process::Stdio::piped()); - cmd.stderr(std::process::Stdio::inherit()); - cmd.stdin(std::process::Stdio::piped()); - Ok(cmd) - } -} -async fn buf_reader_to_lines( - reader: impl AsyncBufRead + Unpin, - limit: impl Into>, -) -> Result, Error> { - let mut lines = reader.lines(); - let mut answer = RingVec::new(limit.into().unwrap_or(1000)); - while let Some(line) = lines.next_line().await? { - answer.push(line); - } - let output: Vec = answer.value.into_iter().collect(); - Ok(output) -} - -enum MaxByLines { - Done(String), - Overflow(String), - Error(Error), -} - -async fn max_by_lines( - reader: impl AsyncBufRead + Unpin, - max_items: impl Into>, -) -> MaxByLines { - let mut answer = String::new(); - - let mut lines = reader.lines(); - let mut has_over_blown = false; - let max_items = max_items.into().unwrap_or(10_000_000); - - while let Some(line) = { - match lines.next_line().await { - Ok(a) => a, - Err(e) => return MaxByLines::Error(e.into()), - } - } { - if has_over_blown { - continue; - } - if !answer.is_empty() { - answer.push('\n'); - } - answer.push_str(&line); - if answer.len() >= max_items { - has_over_blown = true; - tracing::warn!("Reading the buffer exceeding limits of {}", max_items); - } - } - if has_over_blown { - return MaxByLines::Overflow(answer); - } - MaxByLines::Done(answer) -} - -#[cfg(test)] -mod tests { - use super::*; - /// Note, this size doesn't mean the vec will match. The vec will go to the next size, 0 -> 7 = 7 and so forth 7-15 = 15 - /// Just how the vec with capacity works. - const CAPACITY_IN: usize = 7; - #[test] - fn default_capacity_is_set() { - let ring: RingVec = RingVec::new(CAPACITY_IN); - assert_eq!(CAPACITY_IN, ring.value.capacity()); - assert_eq!(0, ring.value.len()); - } - #[test] - fn capacity_can_not_be_exceeded() { - let mut ring = RingVec::new(CAPACITY_IN); - for i in 1..100usize { - ring.push(i); - } - assert_eq!(CAPACITY_IN, ring.value.capacity()); - assert_eq!(CAPACITY_IN, ring.value.len()); - } - - #[test] - fn tests_buf_reader_to_lines() { - let mut reader = BufReader::new("hello\nworld\n".as_bytes()); - let lines = futures::executor::block_on(buf_reader_to_lines(&mut reader, None)).unwrap(); - assert_eq!(lines, vec!["hello", "world"]); - } -} diff --git a/core/startos/src/procedure/js_scripts.rs b/core/startos/src/procedure/js_scripts.rs deleted file mode 100644 index 43553cee08..0000000000 --- a/core/startos/src/procedure/js_scripts.rs +++ /dev/null @@ -1,806 +0,0 @@ -use std::path::{Path, PathBuf}; -use std::sync::Arc; -use std::time::Duration; - -use container_init::ProcessGroupId; -use helpers::UnixRpcClient; -pub use js_engine::JsError; -use js_engine::{JsExecutionEnvironment, PathForVolumeId}; -use models::VolumeId; -use serde::de::DeserializeOwned; -use serde::{Deserialize, Serialize}; -use tokio::process::Command; -use tracing::instrument; - -use super::ProcedureName; -use crate::prelude::*; -use crate::s9pk::manifest::PackageId; -use crate::util::serde::IoFormat; -use crate::util::{Invoke, Version}; -use crate::volume::Volumes; - -#[derive(Debug, Serialize, Deserialize, Clone)] -#[serde(rename_all = "kebab-case")] - -enum ErrorValue { - Error(String), - ErrorCode((i32, String)), - Result(serde_json::Value), -} - -impl PathForVolumeId for Volumes { - fn path_for( - &self, - data_dir: &Path, - package_id: &PackageId, - version: &Version, - volume_id: &VolumeId, - ) -> Option { - let volume = self.get(volume_id)?; - Some(volume.path_for(data_dir, package_id, version, volume_id)) - } - - fn readonly(&self, volume_id: &VolumeId) -> bool { - self.get(volume_id).map(|x| x.readonly()).unwrap_or(false) - } -} - -#[derive(Clone, Debug, Deserialize, Serialize)] -pub struct ExecuteArgs { - pub procedure: JsProcedure, - pub directory: PathBuf, - pub pkg_id: PackageId, - pub pkg_version: Version, - pub name: ProcedureName, - pub volumes: Volumes, - pub input: Option, -} - -#[derive(Clone, Debug, Default, Deserialize, Serialize)] -#[serde(rename_all = "kebab-case")] -pub struct JsProcedure { - #[serde(default)] - args: Vec, -} - -impl JsProcedure { - pub fn validate(&self, _volumes: &Volumes) -> Result<(), color_eyre::eyre::Report> { - Ok(()) - } - - #[instrument(skip_all)] - pub async fn execute( - &self, - directory: &PathBuf, - pkg_id: &PackageId, - pkg_version: &Version, - name: ProcedureName, - volumes: &Volumes, - input: Option, - timeout: Option, - _gid: ProcessGroupId, - _rpc_client: Option>, - ) -> Result, Error> { - #[cfg(not(test))] - let mut cmd = Command::new("start-deno"); - #[cfg(test)] - let mut cmd = test_start_deno_command().await?; - - cmd.arg("execute") - .input(Some(&mut std::io::Cursor::new(IoFormat::Json.to_vec( - &ExecuteArgs { - procedure: self.clone(), - directory: directory.clone(), - pkg_id: pkg_id.clone(), - pkg_version: pkg_version.clone(), - name, - volumes: volumes.clone(), - input: input.and_then(|x| serde_json::to_value(x).ok()), - }, - )?))) - .timeout(timeout) - .invoke(ErrorKind::Javascript) - .await - .and_then(|res| IoFormat::Json.from_slice(&res)) - } - - #[instrument(skip_all)] - pub async fn sandboxed( - &self, - directory: &PathBuf, - pkg_id: &PackageId, - pkg_version: &Version, - volumes: &Volumes, - input: Option, - timeout: Option, - name: ProcedureName, - ) -> Result, Error> { - #[cfg(not(test))] - let mut cmd = Command::new("start-deno"); - #[cfg(test)] - let mut cmd = test_start_deno_command().await?; - - cmd.arg("sandbox") - .input(Some(&mut std::io::Cursor::new(IoFormat::Json.to_vec( - &ExecuteArgs { - procedure: self.clone(), - directory: directory.clone(), - pkg_id: pkg_id.clone(), - pkg_version: pkg_version.clone(), - name, - volumes: volumes.clone(), - input: input.and_then(|x| serde_json::to_value(x).ok()), - }, - )?))) - .timeout(timeout) - .invoke(ErrorKind::Javascript) - .await - .and_then(|res| IoFormat::Json.from_slice(&res)) - } - - #[instrument(skip_all)] - pub async fn execute_impl( - &self, - directory: &PathBuf, - pkg_id: &PackageId, - pkg_version: &Version, - name: ProcedureName, - volumes: &Volumes, - input: Option, - ) -> Result, Error> { - let res = async move { - let running_action = JsExecutionEnvironment::load_from_package( - directory, - pkg_id, - pkg_version, - Box::new(volumes.clone()), - ) - .await? - .run_action(name, input, self.args.clone()); - let output: Option = running_action.await?; - let output: O = unwrap_known_error(output)?; - Ok(output) - } - .await - .map_err(|(error, message)| (error.as_code_num(), message)); - - Ok(res) - } - - #[instrument(skip_all)] - pub async fn sandboxed_impl( - &self, - directory: &PathBuf, - pkg_id: &PackageId, - pkg_version: &Version, - volumes: &Volumes, - input: Option, - name: ProcedureName, - ) -> Result, Error> { - Ok(async move { - let running_action = JsExecutionEnvironment::load_from_package( - directory, - pkg_id, - pkg_version, - Box::new(volumes.clone()), - ) - .await? - .read_only_effects() - .run_action(name, input, self.args.clone()); - let output: Option = running_action.await?; - let output: O = unwrap_known_error(output)?; - Ok(output) - } - .await - .map_err(|(error, message)| (error.as_code_num(), message))) - } -} - -fn unwrap_known_error( - error_value: Option, -) -> Result { - let error_value = error_value.unwrap_or_else(|| ErrorValue::Result(serde_json::Value::Null)); - match error_value { - ErrorValue::Error(error) => Err((JsError::Javascript, error)), - ErrorValue::ErrorCode((code, message)) => Err((JsError::Code(code), message)), - ErrorValue::Result(ref value) => match serde_json::from_value(value.clone()) { - Ok(a) => Ok(a), - Err(err) => { - tracing::error!("{}", err); - tracing::debug!("{:?}", err); - Err(( - JsError::BoundryLayerSerDe, - format!( - "Couldn't convert output = {:#?} to the correct type", - serde_json::to_string_pretty(&error_value).unwrap_or_default() - ), - )) - } - }, - } -} - -#[cfg(test)] -async fn test_start_deno_command() -> Result { - Command::new("cargo") - .arg("build") - .invoke(ErrorKind::Unknown) - .await?; - if tokio::fs::metadata("../target/debug/start-deno") - .await - .is_err() - { - Command::new("ln") - .arg("-rsf") - .arg("../target/debug/startbox") - .arg("../target/debug/start-deno") - .invoke(crate::ErrorKind::Filesystem) - .await?; - } - Ok(Command::new("../target/debug/start-deno")) -} - -#[tokio::test] -async fn js_action_execute() { - let js_action = JsProcedure { args: vec![] }; - let path: PathBuf = "test/js_action_execute/" - .parse::() - .unwrap() - .canonicalize() - .unwrap(); - let package_id = "test-package".parse().unwrap(); - let package_version: Version = "0.3.0.3".parse().unwrap(); - let name = ProcedureName::GetConfig; - let volumes: Volumes = serde_json::from_value(serde_json::json!({ - "main": { - "type": "data" - }, - "compat": { - "type": "assets" - }, - "filebrowser" :{ - "package-id": "filebrowser", - "path": "data", - "readonly": true, - "type": "pointer", - "volume-id": "main", - } - })) - .unwrap(); - let input: Option = Some(serde_json::json!({"test":123})); - let timeout = Some(Duration::from_secs(10)); - let _output: crate::config::action::ConfigRes = js_action - .execute( - &path, - &package_id, - &package_version, - name, - &volumes, - input, - timeout, - ProcessGroupId(0), - None, - ) - .await - .unwrap() - .unwrap(); - assert_eq!( - &std::fs::read_to_string( - "test/js_action_execute/package-data/volumes/test-package/data/main/test.log" - ) - .unwrap(), - "This is a test" - ); - std::fs::remove_file( - "test/js_action_execute/package-data/volumes/test-package/data/main/test.log", - ) - .unwrap(); -} - -#[tokio::test] -async fn js_action_execute_error() { - let js_action = JsProcedure { args: vec![] }; - let path: PathBuf = "test/js_action_execute/" - .parse::() - .unwrap() - .canonicalize() - .unwrap(); - let package_id = "test-package".parse().unwrap(); - let package_version: Version = "0.3.0.3".parse().unwrap(); - let name = ProcedureName::SetConfig; - let volumes: Volumes = serde_json::from_value(serde_json::json!({ - "main": { - "type": "data" - }, - "compat": { - "type": "assets" - }, - "filebrowser" :{ - "package-id": "filebrowser", - "path": "data", - "readonly": true, - "type": "pointer", - "volume-id": "main", - } - })) - .unwrap(); - let input: Option = None; - let timeout = Some(Duration::from_secs(10)); - let output: Result = js_action - .execute( - &path, - &package_id, - &package_version, - name, - &volumes, - input, - timeout, - ProcessGroupId(0), - None, - ) - .await - .unwrap(); - assert_eq!("Err((2, \"Not setup\"))", &format!("{:?}", output)); -} - -#[tokio::test] -async fn js_action_fetch() { - let js_action = JsProcedure { args: vec![] }; - let path: PathBuf = "test/js_action_execute/" - .parse::() - .unwrap() - .canonicalize() - .unwrap(); - let package_id = "test-package".parse().unwrap(); - let package_version: Version = "0.3.0.3".parse().unwrap(); - let name = ProcedureName::Action("fetch".parse().unwrap()); - let volumes: Volumes = serde_json::from_value(serde_json::json!({ - "main": { - "type": "data" - }, - "compat": { - "type": "assets" - }, - "filebrowser" :{ - "package-id": "filebrowser", - "path": "data", - "readonly": true, - "type": "pointer", - "volume-id": "main", - } - })) - .unwrap(); - let input: Option = None; - let timeout = Some(Duration::from_secs(10)); - js_action - .execute::( - &path, - &package_id, - &package_version, - name, - &volumes, - input, - timeout, - ProcessGroupId(0), - None, - ) - .await - .unwrap() - .unwrap(); -} - -#[tokio::test] -async fn js_test_slow() { - let js_action = JsProcedure { args: vec![] }; - let path: PathBuf = "test/js_action_execute/" - .parse::() - .unwrap() - .canonicalize() - .unwrap(); - let package_id = "test-package".parse().unwrap(); - let package_version: Version = "0.3.0.3".parse().unwrap(); - let name = ProcedureName::Action("slow".parse().unwrap()); - let volumes: Volumes = serde_json::from_value(serde_json::json!({ - "main": { - "type": "data" - }, - "compat": { - "type": "assets" - }, - "filebrowser" :{ - "package-id": "filebrowser", - "path": "data", - "readonly": true, - "type": "pointer", - "volume-id": "main", - } - })) - .unwrap(); - let input: Option = None; - let timeout = Some(Duration::from_secs(10)); - tracing::debug!("testing start"); - tokio::select! { - a = js_action - .execute::( - &path, - &package_id, - &package_version, - name, - &volumes, - input, - timeout, - ProcessGroupId(0), - None, - ) => { a.unwrap().unwrap(); }, - _ = tokio::time::sleep(Duration::from_secs(1)) => () - } - tracing::debug!("testing end should"); - tokio::time::sleep(Duration::from_secs(2)).await; - tracing::debug!("Done"); -} -#[tokio::test] -async fn js_action_var_arg() { - let js_action = JsProcedure { - args: vec![42.into()], - }; - let path: PathBuf = "test/js_action_execute/" - .parse::() - .unwrap() - .canonicalize() - .unwrap(); - let package_id = "test-package".parse().unwrap(); - let package_version: Version = "0.3.0.3".parse().unwrap(); - let name = ProcedureName::Action("js-action-var-arg".parse().unwrap()); - let volumes: Volumes = serde_json::from_value(serde_json::json!({ - "main": { - "type": "data" - }, - "compat": { - "type": "assets" - }, - "filebrowser" :{ - "package-id": "filebrowser", - "path": "data", - "readonly": true, - "type": "pointer", - "volume-id": "main", - } - })) - .unwrap(); - let input: Option = None; - let timeout = Some(Duration::from_secs(10)); - js_action - .execute::( - &path, - &package_id, - &package_version, - name, - &volumes, - input, - timeout, - ProcessGroupId(0), - None, - ) - .await - .unwrap() - .unwrap(); -} - -#[tokio::test] -async fn js_action_test_rename() { - let js_action = JsProcedure { args: vec![] }; - let path: PathBuf = "test/js_action_execute/" - .parse::() - .unwrap() - .canonicalize() - .unwrap(); - let package_id = "test-package".parse().unwrap(); - let package_version: Version = "0.3.0.3".parse().unwrap(); - let name = ProcedureName::Action("test-rename".parse().unwrap()); - let volumes: Volumes = serde_json::from_value(serde_json::json!({ - "main": { - "type": "data" - }, - "compat": { - "type": "assets" - }, - "filebrowser" :{ - "package-id": "filebrowser", - "path": "data", - "readonly": true, - "type": "pointer", - "volume-id": "main", - } - })) - .unwrap(); - let input: Option = None; - let timeout = Some(Duration::from_secs(10)); - js_action - .execute::( - &path, - &package_id, - &package_version, - name, - &volumes, - input, - timeout, - ProcessGroupId(0), - None, - ) - .await - .unwrap() - .unwrap(); -} - -#[tokio::test] -async fn js_action_test_deep_dir() { - let js_action = JsProcedure { args: vec![] }; - let path: PathBuf = "test/js_action_execute/" - .parse::() - .unwrap() - .canonicalize() - .unwrap(); - let package_id = "test-package".parse().unwrap(); - let package_version: Version = "0.3.0.3".parse().unwrap(); - let name = ProcedureName::Action("test-deep-dir".parse().unwrap()); - let volumes: Volumes = serde_json::from_value(serde_json::json!({ - "main": { - "type": "data" - }, - "compat": { - "type": "assets" - }, - "filebrowser" :{ - "package-id": "filebrowser", - "path": "data", - "readonly": true, - "type": "pointer", - "volume-id": "main", - } - })) - .unwrap(); - let input: Option = None; - let timeout = Some(Duration::from_secs(10)); - js_action - .execute::( - &path, - &package_id, - &package_version, - name, - &volumes, - input, - timeout, - ProcessGroupId(0), - None, - ) - .await - .unwrap() - .unwrap(); -} -#[tokio::test] -async fn js_action_test_deep_dir_escape() { - let js_action = JsProcedure { args: vec![] }; - let path: PathBuf = "test/js_action_execute/" - .parse::() - .unwrap() - .canonicalize() - .unwrap(); - let package_id = "test-package".parse().unwrap(); - let package_version: Version = "0.3.0.3".parse().unwrap(); - let name = ProcedureName::Action("test-deep-dir-escape".parse().unwrap()); - let volumes: Volumes = serde_json::from_value(serde_json::json!({ - "main": { - "type": "data" - }, - "compat": { - "type": "assets" - }, - "filebrowser" :{ - "package-id": "filebrowser", - "path": "data", - "readonly": true, - "type": "pointer", - "volume-id": "main", - } - })) - .unwrap(); - let input: Option = None; - let timeout = Some(Duration::from_secs(10)); - js_action - .execute::( - &path, - &package_id, - &package_version, - name, - &volumes, - input, - timeout, - ProcessGroupId(0), - None, - ) - .await - .unwrap() - .unwrap(); -} -#[tokio::test] -async fn js_action_test_zero_dir() { - let js_action = JsProcedure { args: vec![] }; - let path: PathBuf = "test/js_action_execute/" - .parse::() - .unwrap() - .canonicalize() - .unwrap(); - let package_id = "test-package".parse().unwrap(); - let package_version: Version = "0.3.0.3".parse().unwrap(); - let name = ProcedureName::Action("test-zero-dir".parse().unwrap()); - let volumes: Volumes = serde_json::from_value(serde_json::json!({ - "main": { - "type": "data" - }, - "compat": { - "type": "assets" - }, - "filebrowser" :{ - "package-id": "filebrowser", - "path": "data", - "readonly": true, - "type": "pointer", - "volume-id": "main", - } - })) - .unwrap(); - let input: Option = None; - let timeout = Some(Duration::from_secs(10)); - js_action - .execute::( - &path, - &package_id, - &package_version, - name, - &volumes, - input, - timeout, - ProcessGroupId(0), - None, - ) - .await - .unwrap() - .unwrap(); -} -#[tokio::test] -async fn js_action_test_read_dir() { - let js_action = JsProcedure { args: vec![] }; - let path: PathBuf = "test/js_action_execute/" - .parse::() - .unwrap() - .canonicalize() - .unwrap(); - let package_id = "test-package".parse().unwrap(); - let package_version: Version = "0.3.0.3".parse().unwrap(); - let name = ProcedureName::Action("test-read-dir".parse().unwrap()); - let volumes: Volumes = serde_json::from_value(serde_json::json!({ - "main": { - "type": "data" - }, - "compat": { - "type": "assets" - }, - "filebrowser" :{ - "package-id": "filebrowser", - "path": "data", - "readonly": true, - "type": "pointer", - "volume-id": "main", - } - })) - .unwrap(); - let input: Option = None; - let timeout = Some(Duration::from_secs(10)); - js_action - .execute::( - &path, - &package_id, - &package_version, - name, - &volumes, - input, - timeout, - ProcessGroupId(0), - None, - ) - .await - .unwrap() - .unwrap(); -} - -#[tokio::test] -async fn js_rsync() { - let js_action = JsProcedure { args: vec![] }; - let path: PathBuf = "test/js_action_execute/" - .parse::() - .unwrap() - .canonicalize() - .unwrap(); - let package_id = "test-package".parse().unwrap(); - let package_version: Version = "0.3.0.3".parse().unwrap(); - let name = ProcedureName::Action("test-rsync".parse().unwrap()); - let volumes: Volumes = serde_json::from_value(serde_json::json!({ - "main": { - "type": "data" - }, - "compat": { - "type": "assets" - }, - "filebrowser" :{ - "package-id": "filebrowser", - "path": "data", - "readonly": true, - "type": "pointer", - "volume-id": "main", - } - })) - .unwrap(); - let input: Option = None; - let timeout = Some(Duration::from_secs(10)); - js_action - .execute::( - &path, - &package_id, - &package_version, - name, - &volumes, - input, - timeout, - ProcessGroupId(0), - None, - ) - .await - .unwrap() - .unwrap(); -} - -#[tokio::test] -async fn js_disk_usage() { - let js_action = JsProcedure { args: vec![] }; - let path: PathBuf = "test/js_action_execute/" - .parse::() - .unwrap() - .canonicalize() - .unwrap(); - let package_id = "test-package".parse().unwrap(); - let package_version: Version = "0.3.0.3".parse().unwrap(); - let name = ProcedureName::Action("test-disk-usage".parse().unwrap()); - let volumes: Volumes = serde_json::from_value(serde_json::json!({ - "main": { - "type": "data" - }, - "compat": { - "type": "assets" - }, - "filebrowser" :{ - "package-id": "filebrowser", - "path": "data", - "readonly": true, - "type": "pointer", - "volume-id": "main", - } - })) - .unwrap(); - let input: Option = None; - let timeout = Some(Duration::from_secs(10)); - js_action - .execute::( - &path, - &package_id, - &package_version, - name, - &volumes, - input, - timeout, - ProcessGroupId(0), - None, - ) - .await - .unwrap() - .unwrap(); -} diff --git a/core/startos/src/procedure/mod.rs b/core/startos/src/procedure/mod.rs deleted file mode 100644 index f3a528713e..0000000000 --- a/core/startos/src/procedure/mod.rs +++ /dev/null @@ -1,185 +0,0 @@ -use std::collections::BTreeSet; -use std::time::Duration; - -use color_eyre::eyre::eyre; -use models::ImageId; -use patch_db::HasModel; -use serde::de::DeserializeOwned; -use serde::{Deserialize, Serialize}; -use tracing::instrument; - -use self::docker::DockerProcedure; -use crate::context::RpcContext; -use crate::prelude::*; -use crate::s9pk::manifest::PackageId; -use crate::util::Version; -use crate::volume::Volumes; -use crate::{Error, ErrorKind}; - -pub mod docker; -#[cfg(feature = "js-engine")] -pub mod js_scripts; -pub use models::ProcedureName; - -#[derive(Clone, Debug, Deserialize, Serialize, HasModel)] -#[serde(rename_all = "kebab-case")] -#[serde(tag = "type")] -#[model = "Model"] -pub enum PackageProcedure { - Docker(DockerProcedure), - - #[cfg(feature = "js-engine")] - Script(js_scripts::JsProcedure), -} - -impl PackageProcedure { - pub fn is_script(&self) -> bool { - match self { - #[cfg(feature = "js-engine")] - Self::Script(_) => true, - _ => false, - } - } - #[instrument(skip_all)] - pub fn validate( - &self, - eos_version: &Version, - volumes: &Volumes, - image_ids: &BTreeSet, - expected_io: bool, - ) -> Result<(), color_eyre::eyre::Report> { - match self { - PackageProcedure::Docker(action) => { - action.validate(eos_version, volumes, image_ids, expected_io) - } - #[cfg(feature = "js-engine")] - PackageProcedure::Script(action) => action.validate(volumes), - } - } - - #[instrument(skip_all)] - pub async fn execute( - &self, - ctx: &RpcContext, - pkg_id: &PackageId, - pkg_version: &Version, - name: ProcedureName, - volumes: &Volumes, - input: Option, - timeout: Option, - ) -> Result, Error> { - tracing::trace!("Procedure execute {} {} - {:?}", self, pkg_id, name); - match self { - PackageProcedure::Docker(procedure) if procedure.inject == true => { - procedure - .inject(ctx, pkg_id, pkg_version, name, volumes, input, timeout) - .await - } - PackageProcedure::Docker(procedure) => { - procedure - .execute(ctx, pkg_id, pkg_version, name, volumes, input, timeout) - .await - } - #[cfg(feature = "js-engine")] - PackageProcedure::Script(procedure) => { - let man = ctx - .managers - .get(&(pkg_id.clone(), pkg_version.clone())) - .await - .ok_or_else(|| { - Error::new( - eyre!("No manager found for {}", pkg_id), - ErrorKind::NotFound, - ) - })?; - let rpc_client = man.rpc_client(); - let gid = if matches!(name, ProcedureName::Main) { - man.gid.new_main_gid() - } else { - man.gid.new_gid() - }; - - procedure - .execute( - &ctx.datadir, - pkg_id, - pkg_version, - name, - volumes, - input, - timeout, - gid, - rpc_client, - ) - .await - } - } - } - - #[instrument(skip_all)] - pub async fn sandboxed( - &self, - ctx: &RpcContext, - pkg_id: &PackageId, - pkg_version: &Version, - volumes: &Volumes, - input: Option, - timeout: Option, - name: ProcedureName, - ) -> Result, Error> { - tracing::trace!("Procedure sandboxed {} {} - {:?}", self, pkg_id, name); - match self { - PackageProcedure::Docker(procedure) => { - procedure - .sandboxed(ctx, pkg_id, pkg_version, volumes, input, timeout) - .await - } - #[cfg(feature = "js-engine")] - PackageProcedure::Script(procedure) => { - procedure - .sandboxed( - &ctx.datadir, - pkg_id, - pkg_version, - volumes, - input, - timeout, - name, - ) - .await - } - } - } -} - -impl std::fmt::Display for PackageProcedure { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - PackageProcedure::Docker(_) => write!(f, "Docker")?, - #[cfg(feature = "js-engine")] - PackageProcedure::Script(_) => write!(f, "JS")?, - } - Ok(()) - } -} - -// TODO: make this not allocate -#[derive(Debug)] -pub struct NoOutput; -impl<'de> Deserialize<'de> for NoOutput { - fn deserialize(deserializer: D) -> Result - where - D: serde::Deserializer<'de>, - { - let _ = Value::deserialize(deserializer); - Ok(NoOutput) - } -} - -#[test] -fn test_deser_no_output() { - serde_json::from_str::("").unwrap(); - serde_json::from_str::>("{\"Ok\": null}") - .unwrap() - .unwrap(); -} diff --git a/core/startos/src/progress.rs b/core/startos/src/progress.rs new file mode 100644 index 0000000000..eec6375751 --- /dev/null +++ b/core/startos/src/progress.rs @@ -0,0 +1,451 @@ +use std::panic::UnwindSafe; +use std::sync::Arc; +use std::time::Duration; + +use futures::Future; +use imbl_value::{InOMap, InternedString}; +use indicatif::{MultiProgress, ProgressBar, ProgressStyle}; +use itertools::Itertools; +use serde::{Deserialize, Serialize}; +use tokio::io::{AsyncSeek, AsyncWrite}; +use tokio::sync::{mpsc, watch}; +use ts_rs::TS; + +use crate::db::model::DatabaseModel; +use crate::prelude::*; + +lazy_static::lazy_static! { + static ref SPINNER: ProgressStyle = ProgressStyle::with_template("{spinner} {msg}...").unwrap(); + static ref PERCENTAGE: ProgressStyle = ProgressStyle::with_template("{msg} {percent}% {wide_bar} [{bytes}/{total_bytes}] [{binary_bytes_per_sec} {eta}]").unwrap(); + static ref BYTES: ProgressStyle = ProgressStyle::with_template("{spinner} {wide_msg} [{bytes}/?] [{binary_bytes_per_sec} {elapsed}]").unwrap(); +} + +#[derive(Debug, Clone, Copy, Deserialize, Serialize, PartialEq, Eq, PartialOrd, Ord, TS)] +#[serde(untagged)] +pub enum Progress { + Complete(bool), + Progress { + #[ts(type = "number")] + done: u64, + #[ts(type = "number | null")] + total: Option, + }, +} +impl Progress { + pub fn new() -> Self { + Progress::Complete(false) + } + pub fn update_bar(self, bar: &ProgressBar) { + match self { + Self::Complete(false) => { + bar.set_style(SPINNER.clone()); + bar.tick(); + } + Self::Complete(true) => { + bar.finish(); + } + Self::Progress { done, total: None } => { + bar.set_style(BYTES.clone()); + bar.set_position(done); + bar.tick(); + } + Self::Progress { + done, + total: Some(total), + } => { + bar.set_style(PERCENTAGE.clone()); + bar.set_position(done); + bar.set_length(total); + bar.tick(); + } + } + } + pub fn set_done(&mut self, done: u64) { + *self = match *self { + Self::Complete(false) => Self::Progress { done, total: None }, + Self::Progress { mut done, total } => { + if let Some(total) = total { + if done > total { + done = total; + } + } + Self::Progress { done, total } + } + Self::Complete(true) => Self::Complete(true), + }; + } + pub fn set_total(&mut self, total: u64) { + *self = match *self { + Self::Complete(false) => Self::Progress { + done: 0, + total: Some(total), + }, + Self::Progress { done, .. } => Self::Progress { + done, + total: Some(total), + }, + Self::Complete(true) => Self::Complete(true), + } + } + pub fn add_total(&mut self, total: u64) { + if let Self::Progress { + done, + total: Some(old), + } = *self + { + *self = Self::Progress { + done, + total: Some(old + total), + }; + } else { + self.set_total(total) + } + } + pub fn complete(&mut self) { + *self = Self::Complete(true); + } +} +impl std::ops::Add for Progress { + type Output = Self; + fn add(self, rhs: u64) -> Self::Output { + match self { + Self::Complete(false) => Self::Progress { + done: rhs, + total: None, + }, + Self::Progress { done, total } => { + let mut done = done + rhs; + if let Some(total) = total { + if done > total { + done = total; + } + } + Self::Progress { done, total } + } + Self::Complete(true) => Self::Complete(true), + } + } +} +impl std::ops::AddAssign for Progress { + fn add_assign(&mut self, rhs: u64) { + *self = *self + rhs; + } +} + +#[derive(Debug, Clone, Deserialize, Serialize, TS)] +#[ts(export)] +pub struct NamedProgress { + #[ts(type = "string")] + pub name: InternedString, + pub progress: Progress, +} + +#[derive(Debug, Clone, Deserialize, Serialize, TS)] +#[ts(export)] +pub struct FullProgress { + pub overall: Progress, + pub phases: Vec, +} +impl FullProgress { + pub fn new() -> Self { + Self { + overall: Progress::new(), + phases: Vec::new(), + } + } +} + +pub struct FullProgressTracker { + overall: Arc>, + overall_recv: watch::Receiver, + phases: InOMap>, + new_phase: ( + mpsc::UnboundedSender<(InternedString, watch::Receiver)>, + mpsc::UnboundedReceiver<(InternedString, watch::Receiver)>, + ), +} +impl FullProgressTracker { + pub fn new() -> Self { + let (overall, overall_recv) = watch::channel(Progress::new()); + Self { + overall: Arc::new(overall), + overall_recv, + phases: InOMap::new(), + new_phase: mpsc::unbounded_channel(), + } + } + fn fill_phases(&mut self) -> bool { + let mut changed = false; + while let Ok((name, phase)) = self.new_phase.1.try_recv() { + self.phases.insert(name, phase); + changed = true; + } + changed + } + pub fn snapshot(&mut self) -> FullProgress { + self.fill_phases(); + FullProgress { + overall: *self.overall.borrow(), + phases: self + .phases + .iter() + .map(|(name, progress)| NamedProgress { + name: name.clone(), + progress: *progress.borrow(), + }) + .collect(), + } + } + pub async fn changed(&mut self) { + if self.fill_phases() { + return; + } + let phases = self + .phases + .iter_mut() + .map(|(_, p)| Box::pin(p.changed())) + .collect_vec(); + tokio::select! { + _ = self.overall_recv.changed() => (), + _ = futures::future::select_all(phases) => (), + } + } + pub fn handle(&self) -> FullProgressTrackerHandle { + FullProgressTrackerHandle { + overall: self.overall.clone(), + new_phase: self.new_phase.0.clone(), + } + } + pub fn sync_to_db( + mut self, + db: PatchDb, + deref: DerefFn, + min_interval: Option, + ) -> impl Future> + 'static + where + DerefFn: Fn(&mut DatabaseModel) -> Option<&mut Model> + 'static, + for<'a> &'a DerefFn: UnwindSafe + Send, + { + async move { + loop { + let progress = self.snapshot(); + if db + .mutate(|v| { + if let Some(p) = deref(v) { + p.ser(&progress)?; + Ok(false) + } else { + Ok(true) + } + }) + .await? + { + break; + } + tokio::join!(self.changed(), async { + if let Some(interval) = min_interval { + tokio::time::sleep(interval).await + } else { + futures::future::ready(()).await + } + }); + } + Ok(()) + } + } +} + +#[derive(Clone)] +pub struct FullProgressTrackerHandle { + overall: Arc>, + new_phase: mpsc::UnboundedSender<(InternedString, watch::Receiver)>, +} +impl FullProgressTrackerHandle { + pub fn add_phase( + &self, + name: InternedString, + overall_contribution: Option, + ) -> PhaseProgressTrackerHandle { + if let Some(overall_contribution) = overall_contribution { + self.overall + .send_modify(|o| o.add_total(overall_contribution)); + } + let (send, recv) = watch::channel(Progress::new()); + let _ = self.new_phase.send((name, recv)); + PhaseProgressTrackerHandle { + overall: self.overall.clone(), + overall_contribution, + contributed: 0, + progress: send, + } + } + pub fn complete(&self) { + self.overall.send_modify(|o| o.complete()); + } +} + +pub struct PhaseProgressTrackerHandle { + overall: Arc>, + overall_contribution: Option, + contributed: u64, + progress: watch::Sender, +} +impl PhaseProgressTrackerHandle { + fn update_overall(&mut self) { + if let Some(overall_contribution) = self.overall_contribution { + let contribution = match *self.progress.borrow() { + Progress::Complete(true) => overall_contribution, + Progress::Progress { + done, + total: Some(total), + } => ((done as f64 / total as f64) * overall_contribution as f64) as u64, + _ => 0, + }; + if contribution > self.contributed { + self.overall + .send_modify(|o| *o += contribution - self.contributed); + self.contributed = contribution; + } + } + } + pub fn set_done(&mut self, done: u64) { + self.progress.send_modify(|p| p.set_done(done)); + self.update_overall(); + } + pub fn set_total(&mut self, total: u64) { + self.progress.send_modify(|p| p.set_total(total)); + self.update_overall(); + } + pub fn add_total(&mut self, total: u64) { + self.progress.send_modify(|p| p.add_total(total)); + self.update_overall(); + } + pub fn complete(&mut self) { + self.progress.send_modify(|p| p.complete()); + self.update_overall(); + } +} +impl std::ops::AddAssign for PhaseProgressTrackerHandle { + fn add_assign(&mut self, rhs: u64) { + self.progress.send_modify(|p| *p += rhs); + self.update_overall(); + } +} + +#[pin_project::pin_project] +pub struct ProgressTrackerWriter { + #[pin] + writer: W, + progress: PhaseProgressTrackerHandle, +} +impl ProgressTrackerWriter { + pub fn new(writer: W, progress: PhaseProgressTrackerHandle) -> Self { + Self { writer, progress } + } + pub fn into_inner(self) -> (W, PhaseProgressTrackerHandle) { + (self.writer, self.progress) + } +} +impl AsyncWrite for ProgressTrackerWriter { + fn poll_write( + self: std::pin::Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + buf: &[u8], + ) -> std::task::Poll> { + let this = self.project(); + match this.writer.poll_write(cx, buf) { + std::task::Poll::Ready(Ok(n)) => { + *this.progress += n as u64; + std::task::Poll::Ready(Ok(n)) + } + a => a, + } + } + fn poll_flush( + self: std::pin::Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> std::task::Poll> { + self.project().writer.poll_flush(cx) + } + fn poll_shutdown( + self: std::pin::Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> std::task::Poll> { + self.project().writer.poll_shutdown(cx) + } + fn is_write_vectored(&self) -> bool { + self.writer.is_write_vectored() + } + fn poll_write_vectored( + self: std::pin::Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + bufs: &[std::io::IoSlice<'_>], + ) -> std::task::Poll> { + self.project().writer.poll_write_vectored(cx, bufs) + } +} +impl AsyncSeek for ProgressTrackerWriter { + fn start_seek( + self: std::pin::Pin<&mut Self>, + position: std::io::SeekFrom, + ) -> std::io::Result<()> { + self.project().writer.start_seek(position) + } + fn poll_complete( + self: std::pin::Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> std::task::Poll> { + let this = self.project(); + match this.writer.poll_complete(cx) { + std::task::Poll::Ready(Ok(n)) => { + this.progress.set_done(n); + std::task::Poll::Ready(Ok(n)) + } + a => a, + } + } +} + +pub struct PhasedProgressBar { + multi: MultiProgress, + overall: ProgressBar, + phases: InOMap, +} +impl PhasedProgressBar { + pub fn new(name: &str) -> Self { + let multi = MultiProgress::new(); + Self { + overall: multi.add( + ProgressBar::new(0) + .with_style(SPINNER.clone()) + .with_message(name.to_owned()), + ), + multi, + phases: InOMap::new(), + } + } + pub fn update(&mut self, progress: &FullProgress) { + for phase in progress.phases.iter() { + if !self.phases.contains_key(&phase.name) { + self.phases.insert( + phase.name.clone(), + self.multi + .add(ProgressBar::new(0).with_style(SPINNER.clone())) + .with_message((&*phase.name).to_owned()), + ); + } + } + progress.overall.update_bar(&self.overall); + for (name, bar) in self.phases.iter() { + if let Some(progress) = progress.phases.iter().find_map(|p| { + if &p.name == name { + Some(p.progress) + } else { + None + } + }) { + progress.update_bar(bar); + } + } + } +} diff --git a/core/startos/src/properties.rs b/core/startos/src/properties.rs index 851033b718..5aa8a01d49 100644 --- a/core/startos/src/properties.rs +++ b/core/startos/src/properties.rs @@ -1,50 +1,33 @@ -use clap::ArgMatches; -use color_eyre::eyre::eyre; +use clap::Parser; +use imbl_value::Value; +use models::PackageId; use rpc_toolkit::command; -use serde_json::Value; -use tracing::instrument; +use serde::{Deserialize, Serialize}; use crate::context::RpcContext; use crate::prelude::*; -use crate::procedure::ProcedureName; -use crate::s9pk::manifest::PackageId; -use crate::{Error, ErrorKind}; +use crate::Error; -pub fn display_properties(response: Value, _: &ArgMatches) { +pub fn display_properties(response: Value) { println!("{}", response); } -#[command(display(display_properties))] -pub async fn properties(#[context] ctx: RpcContext, #[arg] id: PackageId) -> Result { - Ok(fetch_properties(ctx, id).await?) +#[derive(Deserialize, Serialize, Parser)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "kebab-case")] +pub struct PropertiesParam { + id: PackageId, } - -#[instrument(skip_all)] -pub async fn fetch_properties(ctx: RpcContext, id: PackageId) -> Result { - let peek = ctx.db.peek().await; - - let manifest = peek - .as_package_data() - .as_idx(&id) - .ok_or_else(|| Error::new(eyre!("{} is not installed", id), ErrorKind::NotFound))? - .expect_as_installed()? - .as_manifest() - .de()?; - if let Some(props) = manifest.properties { - props - .execute::<(), Value>( - &ctx, - &manifest.id, - &manifest.version, - ProcedureName::Properties, - &manifest.volumes, - None, - None, - ) - .await? - .map_err(|(_, e)| Error::new(eyre!("{}", e), ErrorKind::Docker)) - .and_then(|a| Ok(a)) - } else { - Ok(Value::Null) +// #[command(display(display_properties))] +pub async fn properties( + ctx: RpcContext, + PropertiesParam { id }: PropertiesParam, +) -> Result { + match &*ctx.services.get(&id).await { + Some(service) => service.properties().await, + None => Err(Error::new( + eyre!("Could not find a service with id {id}"), + ErrorKind::NotFound, + )), } } diff --git a/core/startos/src/registry/admin.rs b/core/startos/src/registry/admin.rs index 44b83d1616..83dc2ff106 100644 --- a/core/startos/src/registry/admin.rs +++ b/core/startos/src/registry/admin.rs @@ -1,15 +1,17 @@ use std::path::PathBuf; use std::time::Duration; +use clap::Parser; use color_eyre::eyre::eyre; use console::style; use futures::StreamExt; use indicatif::{ProgressBar, ProgressStyle}; use reqwest::{header, Body, Client, Url}; use rpc_toolkit::command; +use serde::{Deserialize, Serialize}; -use crate::s9pk::reader::S9pkReader; -use crate::util::display_none; +use crate::context::CliContext; +use crate::s9pk::S9pk; use crate::{Error, ErrorKind}; async fn registry_user_pass(location: &str) -> Result<(Url, String, String), Error> { @@ -88,13 +90,29 @@ async fn do_upload( Ok(()) } -#[command(cli_only, display(display_none))] +#[derive(Deserialize, Serialize, Parser)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "kebab-case")] +pub struct PublishParams { + location: String, + path: PathBuf, + #[arg(name = "no-verify", long = "no-verify")] + no_verify: bool, + #[arg(name = "no-upload", long = "no-upload")] + no_upload: bool, + #[arg(name = "no-index", long = "no-index")] + no_index: bool, +} + pub async fn publish( - #[arg] location: String, - #[arg] path: PathBuf, - #[arg(rename = "no-verify", long = "no-verify")] no_verify: bool, - #[arg(rename = "no-upload", long = "no-upload")] no_upload: bool, - #[arg(rename = "no-index", long = "no-index")] no_index: bool, + _: CliContext, + PublishParams { + location, + no_index, + no_upload, + no_verify, + path, + }: PublishParams, ) -> Result<(), Error> { // Prepare for progress bars. let bytes_bar_style = @@ -115,8 +133,8 @@ pub async fn publish( .with_prefix("[1/3]") .with_message("Querying s9pk"); pb.enable_steady_tick(Duration::from_millis(200)); - let mut s9pk = S9pkReader::open(&path, false).await?; - let m = s9pk.manifest().await?.clone(); + let s9pk = S9pk::open(&path, None).await?; + let m = s9pk.as_manifest().clone(); pb.set_style(plain_line_style.clone()); pb.abandon(); m @@ -126,9 +144,10 @@ pub async fn publish( .with_prefix("[1/3]") .with_message("Verifying s9pk"); pb.enable_steady_tick(Duration::from_millis(200)); - let mut s9pk = S9pkReader::open(&path, true).await?; - s9pk.validate().await?; - let m = s9pk.manifest().await?.clone(); + let s9pk = S9pk::open(&path, None).await?; + // s9pk.validate().await?; + todo!(); + let m = s9pk.as_manifest().clone(); pb.set_style(plain_line_style.clone()); pb.abandon(); m diff --git a/core/startos/src/registry/marketplace.rs b/core/startos/src/registry/marketplace.rs index 9797331986..1807b3397a 100644 --- a/core/startos/src/registry/marketplace.rs +++ b/core/startos/src/registry/marketplace.rs @@ -1,16 +1,17 @@ use base64::Engine; +use clap::Parser; use color_eyre::eyre::eyre; use reqwest::{StatusCode, Url}; -use rpc_toolkit::command; +use rpc_toolkit::{command, from_fn_async, HandlerExt, ParentHandler}; +use serde::{Deserialize, Serialize}; use serde_json::Value; -use crate::context::RpcContext; +use crate::context::{CliContext, RpcContext}; use crate::version::VersionT; use crate::{Error, ResultExt}; -#[command(subcommands(get))] -pub fn marketplace() -> Result<(), Error> { - Ok(()) +pub fn marketplace() -> ParentHandler { + ParentHandler::new().subcommand("get", from_fn_async(get).with_remote_cli::()) } pub fn with_query_params(ctx: RpcContext, mut url: Url) -> Url { @@ -35,8 +36,14 @@ pub fn with_query_params(ctx: RpcContext, mut url: Url) -> Url { url } -#[command] -pub async fn get(#[context] ctx: RpcContext, #[arg] url: Url) -> Result { +#[derive(Deserialize, Serialize, Parser)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "kebab-case")] +pub struct GetParams { + url: Url, +} + +pub async fn get(ctx: RpcContext, GetParams { url }: GetParams) -> Result { let mut response = ctx .client .get(with_query_params(ctx.clone(), url)) diff --git a/core/startos/src/s9pk/builder.rs b/core/startos/src/s9pk/builder.rs deleted file mode 100644 index 1997424396..0000000000 --- a/core/startos/src/s9pk/builder.rs +++ /dev/null @@ -1,145 +0,0 @@ -use sha2::{Digest, Sha512}; -use tokio::io::{AsyncReadExt, AsyncSeekExt, AsyncWriteExt, SeekFrom}; -use tracing::instrument; -use typed_builder::TypedBuilder; - -use super::header::{FileSection, Header}; -use super::manifest::Manifest; -use super::SIG_CONTEXT; -use crate::util::io::to_cbor_async_writer; -use crate::util::HashWriter; -use crate::{Error, ResultExt}; - -#[derive(TypedBuilder)] -pub struct S9pkPacker< - 'a, - W: AsyncWriteExt + AsyncSeekExt, - RLicense: AsyncReadExt + Unpin, - RInstructions: AsyncReadExt + Unpin, - RIcon: AsyncReadExt + Unpin, - RDockerImages: AsyncReadExt + Unpin, - RAssets: AsyncReadExt + Unpin, - RScripts: AsyncReadExt + Unpin, -> { - writer: W, - manifest: &'a Manifest, - license: RLicense, - instructions: RInstructions, - icon: RIcon, - docker_images: RDockerImages, - assets: RAssets, - scripts: Option, -} -impl< - 'a, - W: AsyncWriteExt + AsyncSeekExt + Unpin, - RLicense: AsyncReadExt + Unpin, - RInstructions: AsyncReadExt + Unpin, - RIcon: AsyncReadExt + Unpin, - RDockerImages: AsyncReadExt + Unpin, - RAssets: AsyncReadExt + Unpin, - RScripts: AsyncReadExt + Unpin, - > S9pkPacker<'a, W, RLicense, RInstructions, RIcon, RDockerImages, RAssets, RScripts> -{ - /// BLOCKING - #[instrument(skip_all)] - pub async fn pack(mut self, key: &ed25519_dalek::SigningKey) -> Result<(), Error> { - let header_pos = self.writer.stream_position().await?; - if header_pos != 0 { - tracing::warn!("Appending to non-empty file."); - } - let mut header = Header::placeholder(); - header.serialize(&mut self.writer).await.with_ctx(|_| { - ( - crate::ErrorKind::Serialization, - "Writing Placeholder Header", - ) - })?; - let mut position = self.writer.stream_position().await?; - - let mut writer = HashWriter::new(Sha512::new(), &mut self.writer); - // manifest - to_cbor_async_writer(&mut writer, self.manifest).await?; - let new_pos = writer.inner_mut().stream_position().await?; - header.table_of_contents.manifest = FileSection { - position, - length: new_pos - position, - }; - position = new_pos; - // license - tokio::io::copy(&mut self.license, &mut writer) - .await - .with_ctx(|_| (crate::ErrorKind::Filesystem, "Copying License"))?; - let new_pos = writer.inner_mut().stream_position().await?; - header.table_of_contents.license = FileSection { - position, - length: new_pos - position, - }; - position = new_pos; - // instructions - tokio::io::copy(&mut self.instructions, &mut writer) - .await - .with_ctx(|_| (crate::ErrorKind::Filesystem, "Copying Instructions"))?; - let new_pos = writer.inner_mut().stream_position().await?; - header.table_of_contents.instructions = FileSection { - position, - length: new_pos - position, - }; - position = new_pos; - // icon - tokio::io::copy(&mut self.icon, &mut writer) - .await - .with_ctx(|_| (crate::ErrorKind::Filesystem, "Copying Icon"))?; - let new_pos = writer.inner_mut().stream_position().await?; - header.table_of_contents.icon = FileSection { - position, - length: new_pos - position, - }; - position = new_pos; - // docker_images - tokio::io::copy(&mut self.docker_images, &mut writer) - .await - .with_ctx(|_| (crate::ErrorKind::Filesystem, "Copying Docker Images"))?; - let new_pos = writer.inner_mut().stream_position().await?; - header.table_of_contents.docker_images = FileSection { - position, - length: new_pos - position, - }; - position = new_pos; - // assets - tokio::io::copy(&mut self.assets, &mut writer) - .await - .with_ctx(|_| (crate::ErrorKind::Filesystem, "Copying Assets"))?; - let new_pos = writer.inner_mut().stream_position().await?; - header.table_of_contents.assets = FileSection { - position, - length: new_pos - position, - }; - position = new_pos; - // scripts - if let Some(mut scripts) = self.scripts { - tokio::io::copy(&mut scripts, &mut writer) - .await - .with_ctx(|_| (crate::ErrorKind::Filesystem, "Copying Scripts"))?; - let new_pos = writer.inner_mut().stream_position().await?; - header.table_of_contents.scripts = Some(FileSection { - position, - length: new_pos - position, - }); - position = new_pos; - } - - // header - let (hash, _) = writer.finish(); - self.writer.seek(SeekFrom::Start(header_pos)).await?; - header.pubkey = key.into(); - header.signature = key.sign_prehashed(hash, Some(SIG_CONTEXT))?; - header - .serialize(&mut self.writer) - .await - .with_ctx(|_| (crate::ErrorKind::Serialization, "Writing Header"))?; - self.writer.seek(SeekFrom::Start(position)).await?; - - Ok(()) - } -} diff --git a/core/startos/src/s9pk/docker.rs b/core/startos/src/s9pk/docker.rs deleted file mode 100644 index be93905fb6..0000000000 --- a/core/startos/src/s9pk/docker.rs +++ /dev/null @@ -1,95 +0,0 @@ -use std::borrow::Cow; -use std::collections::BTreeSet; -use std::io::SeekFrom; -use std::path::Path; - -use color_eyre::eyre::eyre; -use futures::{FutureExt, TryStreamExt}; -use serde::{Deserialize, Serialize}; -use tokio::io::{AsyncRead, AsyncSeek, AsyncSeekExt}; -use tokio_tar::{Archive, Entry}; - -use crate::util::io::from_cbor_async_reader; -use crate::{Error, ErrorKind, ARCH}; - -#[derive(Default, Deserialize, Serialize)] -#[serde(rename_all = "kebab-case")] -pub struct DockerMultiArch { - pub default: String, - pub available: BTreeSet, -} - -#[pin_project::pin_project(project = DockerReaderProject)] -#[derive(Debug)] -pub enum DockerReader { - SingleArch(#[pin] R), - MultiArch(#[pin] Entry>), -} -impl DockerReader { - pub async fn new(mut rdr: R) -> Result { - let arch = if let Some(multiarch) = tokio_tar::Archive::new(&mut rdr) - .entries()? - .try_filter_map(|e| { - async move { - Ok(if &*e.path()? == Path::new("multiarch.cbor") { - Some(e) - } else { - None - }) - } - .boxed() - }) - .try_next() - .await? - { - let multiarch: DockerMultiArch = from_cbor_async_reader(multiarch).await?; - Some(if multiarch.available.contains(&**ARCH) { - Cow::Borrowed(&**ARCH) - } else { - Cow::Owned(multiarch.default) - }) - } else { - None - }; - rdr.seek(SeekFrom::Start(0)).await?; - if let Some(arch) = arch { - if let Some(image) = tokio_tar::Archive::new(rdr) - .entries()? - .try_filter_map(|e| { - let arch = arch.clone(); - async move { - Ok(if &*e.path()? == Path::new(&format!("{}.tar", arch)) { - Some(e) - } else { - None - }) - } - .boxed() - }) - .try_next() - .await? - { - Ok(Self::MultiArch(image)) - } else { - Err(Error::new( - eyre!("Docker image section does not contain tarball for architecture"), - ErrorKind::ParseS9pk, - )) - } - } else { - Ok(Self::SingleArch(rdr)) - } - } -} -impl AsyncRead for DockerReader { - fn poll_read( - self: std::pin::Pin<&mut Self>, - cx: &mut std::task::Context<'_>, - buf: &mut tokio::io::ReadBuf<'_>, - ) -> std::task::Poll> { - match self.project() { - DockerReaderProject::SingleArch(r) => r.poll_read(cx, buf), - DockerReaderProject::MultiArch(r) => r.poll_read(cx, buf), - } - } -} diff --git a/core/startos/src/s9pk/git_hash.rs b/core/startos/src/s9pk/git_hash.rs deleted file mode 100644 index b2990a111e..0000000000 --- a/core/startos/src/s9pk/git_hash.rs +++ /dev/null @@ -1,41 +0,0 @@ -use std::path::Path; - -use crate::Error; - -#[derive(Clone, Debug, serde::Serialize, serde::Deserialize)] -pub struct GitHash(String); - -impl GitHash { - pub async fn from_path(path: impl AsRef) -> Result { - let hash = tokio::process::Command::new("git") - .args(["describe", "--always", "--abbrev=40", "--dirty=-modified"]) - .current_dir(path) - .output() - .await?; - if !hash.status.success() { - return Err(Error::new( - color_eyre::eyre::eyre!("Could not get hash: {}", String::from_utf8(hash.stderr)?), - crate::ErrorKind::Filesystem, - )); - } - Ok(GitHash(String::from_utf8(hash.stdout)?)) - } -} - -impl AsRef for GitHash { - fn as_ref(&self) -> &str { - &self.0 - } -} - -// #[tokio::test] -// async fn test_githash_for_current() { -// let answer: GitHash = GitHash::from_path(std::env::current_dir().unwrap()) -// .await -// .unwrap(); -// let answer_str: &str = answer.as_ref(); -// assert!( -// !answer_str.is_empty(), -// "Should have a hash for this current working" -// ); -// } diff --git a/core/startos/src/s9pk/header.rs b/core/startos/src/s9pk/header.rs deleted file mode 100644 index 4f77ad8550..0000000000 --- a/core/startos/src/s9pk/header.rs +++ /dev/null @@ -1,187 +0,0 @@ -use std::collections::BTreeMap; - -use color_eyre::eyre::eyre; -use ed25519_dalek::{Signature, VerifyingKey}; -use tokio::io::{AsyncRead, AsyncReadExt, AsyncWriteExt}; - -use crate::Error; - -pub const MAGIC: [u8; 2] = [59, 59]; -pub const VERSION: u8 = 1; - -#[derive(Debug)] -pub struct Header { - pub pubkey: VerifyingKey, - pub signature: Signature, - pub table_of_contents: TableOfContents, -} -impl Header { - pub fn placeholder() -> Self { - Header { - pubkey: VerifyingKey::default(), - signature: Signature::from_bytes(&[0; 64]), - table_of_contents: Default::default(), - } - } - // MUST BE SAME SIZE REGARDLESS OF DATA - pub async fn serialize(&self, mut writer: W) -> std::io::Result<()> { - writer.write_all(&MAGIC).await?; - writer.write_all(&[VERSION]).await?; - writer.write_all(self.pubkey.as_bytes()).await?; - writer.write_all(&self.signature.to_bytes()).await?; - self.table_of_contents.serialize(writer).await?; - Ok(()) - } - pub async fn deserialize(mut reader: R) -> Result { - let mut magic = [0; 2]; - reader.read_exact(&mut magic).await?; - if magic != MAGIC { - return Err(Error::new( - eyre!("Incorrect Magic: {:?}", magic), - crate::ErrorKind::ParseS9pk, - )); - } - let mut version = [0]; - reader.read_exact(&mut version).await?; - if version[0] != VERSION { - return Err(Error::new( - eyre!("Unknown Version: {}", version[0]), - crate::ErrorKind::ParseS9pk, - )); - } - let mut pubkey_bytes = [0; 32]; - reader.read_exact(&mut pubkey_bytes).await?; - let pubkey = VerifyingKey::from_bytes(&pubkey_bytes) - .map_err(|e| Error::new(e, crate::ErrorKind::ParseS9pk))?; - let mut sig_bytes = [0; 64]; - reader.read_exact(&mut sig_bytes).await?; - let signature = Signature::from_bytes(&sig_bytes); - let table_of_contents = TableOfContents::deserialize(reader).await?; - - Ok(Header { - pubkey, - signature, - table_of_contents, - }) - } -} - -#[derive(Debug, Default)] -pub struct TableOfContents { - pub manifest: FileSection, - pub license: FileSection, - pub instructions: FileSection, - pub icon: FileSection, - pub docker_images: FileSection, - pub assets: FileSection, - pub scripts: Option, -} -impl TableOfContents { - pub async fn serialize(&self, mut writer: W) -> std::io::Result<()> { - let len: u32 = ((1 + "manifest".len() + 16) - + (1 + "license".len() + 16) - + (1 + "instructions".len() + 16) - + (1 + "icon".len() + 16) - + (1 + "docker_images".len() + 16) - + (1 + "assets".len() + 16) - + (1 + "scripts".len() + 16)) as u32; - writer.write_all(&u32::to_be_bytes(len)).await?; - self.manifest - .serialize_entry("manifest", &mut writer) - .await?; - self.license.serialize_entry("license", &mut writer).await?; - self.instructions - .serialize_entry("instructions", &mut writer) - .await?; - self.icon.serialize_entry("icon", &mut writer).await?; - self.docker_images - .serialize_entry("docker_images", &mut writer) - .await?; - self.assets.serialize_entry("assets", &mut writer).await?; - self.scripts - .unwrap_or_default() - .serialize_entry("scripts", &mut writer) - .await?; - Ok(()) - } - pub async fn deserialize(mut reader: R) -> std::io::Result { - let mut toc_len = [0; 4]; - reader.read_exact(&mut toc_len).await?; - let toc_len = u32::from_be_bytes(toc_len); - let mut reader = reader.take(toc_len as u64); - let mut table = BTreeMap::new(); - while let Some((label, section)) = FileSection::deserialize_entry(&mut reader).await? { - table.insert(label, section); - } - fn from_table( - table: &BTreeMap, FileSection>, - label: &str, - ) -> std::io::Result { - table.get(label.as_bytes()).copied().ok_or_else(|| { - std::io::Error::new( - std::io::ErrorKind::UnexpectedEof, - format!("Missing Required Label: {}", label), - ) - }) - } - #[allow(dead_code)] - fn as_opt(fs: FileSection) -> Option { - if fs.position | fs.length == 0 { - // 0/0 is not a valid file section - None - } else { - Some(fs) - } - } - Ok(TableOfContents { - manifest: from_table(&table, "manifest")?, - license: from_table(&table, "license")?, - instructions: from_table(&table, "instructions")?, - icon: from_table(&table, "icon")?, - docker_images: from_table(&table, "docker_images")?, - assets: from_table(&table, "assets")?, - scripts: table.get("scripts".as_bytes()).cloned(), - }) - } -} - -#[derive(Clone, Copy, Debug, Default)] -pub struct FileSection { - pub position: u64, - pub length: u64, -} -impl FileSection { - pub async fn serialize_entry( - self, - label: &str, - mut writer: W, - ) -> std::io::Result<()> { - writer.write_all(&[label.len() as u8]).await?; - writer.write_all(label.as_bytes()).await?; - writer.write_all(&u64::to_be_bytes(self.position)).await?; - writer.write_all(&u64::to_be_bytes(self.length)).await?; - Ok(()) - } - pub async fn deserialize_entry( - mut reader: R, - ) -> std::io::Result, Self)>> { - let mut label_len = [0]; - let read = reader.read(&mut label_len).await?; - if read == 0 { - return Ok(None); - } - let mut label = vec![0; label_len[0] as usize]; - reader.read_exact(&mut label).await?; - let mut pos = [0; 8]; - reader.read_exact(&mut pos).await?; - let mut len = [0; 8]; - reader.read_exact(&mut len).await?; - Ok(Some(( - label, - FileSection { - position: u64::from_be_bytes(pos), - length: u64::from_be_bytes(len), - }, - ))) - } -} diff --git a/core/startos/src/s9pk/manifest.rs b/core/startos/src/s9pk/manifest.rs deleted file mode 100644 index 3eee540ed1..0000000000 --- a/core/startos/src/s9pk/manifest.rs +++ /dev/null @@ -1,211 +0,0 @@ -use std::collections::BTreeMap; -use std::path::{Path, PathBuf}; - -use color_eyre::eyre::eyre; -pub use models::PackageId; -use serde::{Deserialize, Serialize}; -use url::Url; - -use super::git_hash::GitHash; -use crate::action::Actions; -use crate::backup::BackupActions; -use crate::config::action::ConfigActions; -use crate::dependencies::Dependencies; -use crate::migration::Migrations; -use crate::net::interface::Interfaces; -use crate::prelude::*; -use crate::procedure::docker::DockerContainers; -use crate::procedure::PackageProcedure; -use crate::status::health_check::HealthChecks; -use crate::util::serde::Regex; -use crate::util::Version; -use crate::version::{Current, VersionT}; -use crate::volume::Volumes; -use crate::Error; - -fn current_version() -> Version { - Current::new().semver().into() -} - -#[derive(Clone, Debug, Deserialize, Serialize, HasModel)] -#[serde(rename_all = "kebab-case")] -#[model = "Model"] -pub struct Manifest { - #[serde(default = "current_version")] - pub eos_version: Version, - pub id: PackageId, - #[serde(default)] - pub git_hash: Option, - pub title: String, - pub version: Version, - pub description: Description, - #[serde(default)] - pub assets: Assets, - #[serde(default)] - pub build: Option>, - pub release_notes: String, - pub license: String, // type of license - pub wrapper_repo: Url, - pub upstream_repo: Url, - pub support_site: Option, - pub marketing_site: Option, - pub donation_url: Option, - #[serde(default)] - pub alerts: Alerts, - pub main: PackageProcedure, - pub health_checks: HealthChecks, - pub config: Option, - pub properties: Option, - pub volumes: Volumes, - // #[serde(default)] - pub interfaces: Interfaces, - // #[serde(default)] - pub backup: BackupActions, - #[serde(default)] - pub migrations: Migrations, - #[serde(default)] - pub actions: Actions, - // #[serde(default)] - // pub permissions: Permissions, - #[serde(default)] - pub dependencies: Dependencies, - pub containers: Option, - - #[serde(default)] - pub replaces: Vec, - - #[serde(default)] - pub hardware_requirements: HardwareRequirements, -} - -impl Manifest { - pub fn package_procedures(&self) -> impl Iterator { - use std::iter::once; - let main = once(&self.main); - let cfg_get = self.config.as_ref().map(|a| &a.get).into_iter(); - let cfg_set = self.config.as_ref().map(|a| &a.set).into_iter(); - let props = self.properties.iter(); - let backups = vec![&self.backup.create, &self.backup.restore].into_iter(); - let migrations = self - .migrations - .to - .values() - .chain(self.migrations.from.values()); - let actions = self.actions.0.values().map(|a| &a.implementation); - main.chain(cfg_get) - .chain(cfg_set) - .chain(props) - .chain(backups) - .chain(migrations) - .chain(actions) - } - - pub fn with_git_hash(mut self, git_hash: GitHash) -> Self { - self.git_hash = Some(git_hash); - self - } -} - -#[derive(Clone, Debug, Default, Deserialize, Serialize)] -#[serde(rename_all = "kebab-case")] -pub struct HardwareRequirements { - #[serde(default)] - device: BTreeMap, - ram: Option, - pub arch: Option>, -} - -#[derive(Clone, Debug, Default, Deserialize, Serialize)] -#[serde(rename_all = "kebab-case")] -pub struct Assets { - #[serde(default)] - pub license: Option, - #[serde(default)] - pub instructions: Option, - #[serde(default)] - pub icon: Option, - #[serde(default)] - pub docker_images: Option, - #[serde(default)] - pub assets: Option, - #[serde(default)] - pub scripts: Option, -} -impl Assets { - pub fn license_path(&self) -> &Path { - self.license - .as_ref() - .map(|a| a.as_path()) - .unwrap_or(Path::new("LICENSE.md")) - } - pub fn instructions_path(&self) -> &Path { - self.instructions - .as_ref() - .map(|a| a.as_path()) - .unwrap_or(Path::new("INSTRUCTIONS.md")) - } - pub fn icon_path(&self) -> &Path { - self.icon - .as_ref() - .map(|a| a.as_path()) - .unwrap_or(Path::new("icon.png")) - } - pub fn icon_type(&self) -> &str { - self.icon - .as_ref() - .and_then(|icon| icon.extension()) - .and_then(|ext| ext.to_str()) - .unwrap_or("png") - } - pub fn docker_images_path(&self) -> &Path { - self.docker_images - .as_ref() - .map(|a| a.as_path()) - .unwrap_or(Path::new("docker-images")) - } - pub fn assets_path(&self) -> &Path { - self.assets - .as_ref() - .map(|a| a.as_path()) - .unwrap_or(Path::new("assets")) - } - pub fn scripts_path(&self) -> &Path { - self.scripts - .as_ref() - .map(|a| a.as_path()) - .unwrap_or(Path::new("scripts")) - } -} - -#[derive(Clone, Debug, Deserialize, Serialize)] -pub struct Description { - pub short: String, - pub long: String, -} -impl Description { - pub fn validate(&self) -> Result<(), Error> { - if self.short.chars().skip(160).next().is_some() { - return Err(Error::new( - eyre!("Short description must be 160 characters or less."), - crate::ErrorKind::ValidateS9pk, - )); - } - if self.long.chars().skip(5000).next().is_some() { - return Err(Error::new( - eyre!("Long description must be 5000 characters or less."), - crate::ErrorKind::ValidateS9pk, - )); - } - Ok(()) - } -} - -#[derive(Clone, Debug, Default, Deserialize, Serialize)] -#[serde(rename_all = "kebab-case")] -pub struct Alerts { - pub install: Option, - pub uninstall: Option, - pub restore: Option, - pub start: Option, - pub stop: Option, -} diff --git a/core/startos/src/s9pk/merkle_archive/directory_contents.rs b/core/startos/src/s9pk/merkle_archive/directory_contents.rs index f662300b65..c5373a31bc 100644 --- a/core/startos/src/s9pk/merkle_archive/directory_contents.rs +++ b/core/startos/src/s9pk/merkle_archive/directory_contents.rs @@ -1,23 +1,48 @@ -use std::collections::BTreeMap; -use std::path::Path; +use std::ffi::OsStr; +use std::fmt::Debug; +use std::path::{Path, PathBuf}; +use std::sync::Arc; use futures::future::BoxFuture; use futures::FutureExt; +use imbl::OrdMap; use imbl_value::InternedString; +use itertools::Itertools; use tokio::io::AsyncRead; use crate::prelude::*; use crate::s9pk::merkle_archive::hash::{Hash, HashWriter}; use crate::s9pk::merkle_archive::sink::{Sink, TrackingWriter}; -use crate::s9pk::merkle_archive::source::{ArchiveSource, FileSource, Section}; +use crate::s9pk::merkle_archive::source::{ArchiveSource, DynFileSource, FileSource, Section}; use crate::s9pk::merkle_archive::write_queue::WriteQueue; use crate::s9pk::merkle_archive::{varint, Entry, EntryContents}; -#[derive(Debug)] -pub struct DirectoryContents(BTreeMap>); +#[derive(Clone)] +pub struct DirectoryContents { + contents: OrdMap>, + /// used to optimize files to have earliest needed information up front + sort_by: Option std::cmp::Ordering + Send + Sync>>, +} +impl Debug for DirectoryContents { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("DirectoryContents") + .field("contents", &self.contents) + .finish_non_exhaustive() + } +} impl DirectoryContents { pub fn new() -> Self { - Self(BTreeMap::new()) + Self { + contents: OrdMap::new(), + sort_by: None, + } + } + + pub fn sort_by( + &mut self, + sort_by: impl Fn(&str, &str) -> std::cmp::Ordering + Send + Sync + 'static, + ) { + self.sort_by = Some(Arc::new(sort_by)) } #[instrument(skip_all)] @@ -39,6 +64,57 @@ impl DirectoryContents { res } + pub fn file_paths(&self, prefix: impl AsRef) -> Vec { + let prefix = prefix.as_ref(); + let mut res = Vec::new(); + for (name, entry) in &self.contents { + let path = prefix.join(name); + if let EntryContents::Directory(d) = entry.as_contents() { + res.push(path.join("")); + res.append(&mut d.file_paths(path)); + } else { + res.push(path); + } + } + res + } + + pub const fn header_size() -> u64 { + 8 // position: u64 BE + + 8 // size: u64 BE + } + + #[instrument(skip_all)] + pub async fn serialize_header(&self, position: u64, w: &mut W) -> Result { + use tokio::io::AsyncWriteExt; + + let size = self.toc_size(); + + w.write_all(&position.to_be_bytes()).await?; + w.write_all(&size.to_be_bytes()).await?; + + Ok(position) + } + + pub fn toc_size(&self) -> u64 { + self.iter().fold( + varint::serialized_varint_size(self.len() as u64), + |acc, (name, entry)| { + acc + varint::serialized_varstring_size(&**name) + entry.header_size() + }, + ) + } +} +impl DirectoryContents { + pub fn with_stem(&self, stem: &str) -> impl Iterator)> { + let prefix = InternedString::intern(stem); + let (_, center, right) = self.split_lookup(&*stem); + center.map(|e| (prefix.clone(), e)).into_iter().chain( + right.into_iter().take_while(move |(k, _)| { + Path::new(&**k).file_stem() == Some(OsStr::new(&*prefix)) + }), + ) + } pub fn insert_path(&mut self, path: impl AsRef, entry: Entry) -> Result<(), Error> { let path = path.as_ref(); let (parent, Some(file)) = (path.parent(), path.file_name().and_then(|f| f.to_str())) @@ -73,32 +149,6 @@ impl DirectoryContents { dir.insert(file.into(), entry); Ok(()) } - - pub const fn header_size() -> u64 { - 8 // position: u64 BE - + 8 // size: u64 BE - } - - #[instrument(skip_all)] - pub async fn serialize_header(&self, position: u64, w: &mut W) -> Result { - use tokio::io::AsyncWriteExt; - - let size = self.toc_size(); - - w.write_all(&position.to_be_bytes()).await?; - w.write_all(&size.to_be_bytes()).await?; - - Ok(position) - } - - pub fn toc_size(&self) -> u64 { - self.0.iter().fold( - varint::serialized_varint_size(self.0.len() as u64), - |acc, (name, entry)| { - acc + varint::serialized_varstring_size(&**name) + entry.header_size() - }, - ) - } } impl DirectoryContents> { #[instrument(skip_all)] @@ -121,7 +171,7 @@ impl DirectoryContents> { let mut toc_reader = source.fetch(position, size).await?; let len = varint::deserialize_varint(&mut toc_reader).await?; - let mut entries = BTreeMap::new(); + let mut entries = OrdMap::new(); for _ in 0..len { entries.insert( varint::deserialize_varstring(&mut toc_reader).await?.into(), @@ -129,7 +179,10 @@ impl DirectoryContents> { ); } - let res = Self(entries); + let res = Self { + contents: entries, + sort_by: None, + }; if res.sighash().await? == sighash { Ok(res) @@ -144,11 +197,33 @@ impl DirectoryContents> { } } impl DirectoryContents { + pub fn filter(&mut self, filter: impl Fn(&Path) -> bool) -> Result<(), Error> { + for k in self.keys().cloned().collect::>() { + let path = Path::new(&*k); + if let Some(v) = self.get_mut(&k) { + if !filter(path) { + if v.hash.is_none() { + return Err(Error::new( + eyre!("cannot filter out unhashed file, run `update_hashes` first"), + ErrorKind::InvalidRequest, + )); + } + v.contents = EntryContents::Missing; + } else { + let filter: Box bool> = Box::new(|p| filter(&path.join(p))); + v.filter(filter)?; + } + } + } + Ok(()) + } #[instrument(skip_all)] pub fn update_hashes<'a>(&'a mut self, only_missing: bool) -> BoxFuture<'a, Result<(), Error>> { async move { - for (_, entry) in &mut self.0 { - entry.update_hash(only_missing).await?; + for key in self.keys().cloned().collect::>() { + if let Some(entry) = self.get_mut(&key) { + entry.update_hash(only_missing).await?; + } } Ok(()) } @@ -159,13 +234,16 @@ impl DirectoryContents { pub fn sighash<'a>(&'a self) -> BoxFuture<'a, Result> { async move { let mut hasher = TrackingWriter::new(0, HashWriter::new()); - let mut sig_contents = BTreeMap::new(); - for (name, entry) in &self.0 { + let mut sig_contents = OrdMap::new(); + for (name, entry) in &**self { sig_contents.insert(name.clone(), entry.to_missing().await?); } - Self(sig_contents) - .serialize_toc(&mut WriteQueue::new(0), &mut hasher) - .await?; + Self { + contents: sig_contents, + sort_by: None, + } + .serialize_toc(&mut WriteQueue::new(0), &mut hasher) + .await?; Ok(hasher.into_inner().finalize()) } .boxed() @@ -177,23 +255,42 @@ impl DirectoryContents { queue: &mut WriteQueue<'a, S>, w: &mut W, ) -> Result<(), Error> { - varint::serialize_varint(self.0.len() as u64, w).await?; - for (name, entry) in self.0.iter() { + varint::serialize_varint(self.len() as u64, w).await?; + for (name, entry) in self.iter().sorted_by(|a, b| match (a, b, &self.sort_by) { + ((_, a), (_, b), _) if a.as_contents().is_dir() && !b.as_contents().is_dir() => { + std::cmp::Ordering::Less + } + ((_, a), (_, b), _) if !a.as_contents().is_dir() && b.as_contents().is_dir() => { + std::cmp::Ordering::Greater + } + ((a, _), (b, _), Some(sort_by)) => sort_by(&***a, &***b), + _ => std::cmp::Ordering::Equal, + }) { varint::serialize_varstring(&**name, w).await?; entry.serialize_header(queue.add(entry).await?, w).await?; } Ok(()) } + pub fn into_dyn(self) -> DirectoryContents { + DirectoryContents { + contents: self + .contents + .into_iter() + .map(|(k, v)| (k, v.into_dyn())) + .collect(), + sort_by: self.sort_by, + } + } } impl std::ops::Deref for DirectoryContents { - type Target = BTreeMap>; + type Target = OrdMap>; fn deref(&self) -> &Self::Target { - &self.0 + &self.contents } } impl std::ops::DerefMut for DirectoryContents { fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 + &mut self.contents } } diff --git a/core/startos/src/s9pk/merkle_archive/file_contents.rs b/core/startos/src/s9pk/merkle_archive/file_contents.rs index c02c0e8793..7529fd2d0f 100644 --- a/core/startos/src/s9pk/merkle_archive/file_contents.rs +++ b/core/startos/src/s9pk/merkle_archive/file_contents.rs @@ -3,9 +3,9 @@ use tokio::io::AsyncRead; use crate::prelude::*; use crate::s9pk::merkle_archive::hash::{Hash, HashWriter}; use crate::s9pk::merkle_archive::sink::{Sink, TrackingWriter}; -use crate::s9pk::merkle_archive::source::{ArchiveSource, FileSource, Section}; +use crate::s9pk::merkle_archive::source::{ArchiveSource, DynFileSource, FileSource, Section}; -#[derive(Debug)] +#[derive(Debug, Clone)] pub struct FileContents(S); impl FileContents { pub fn new(source: S) -> Self { @@ -73,6 +73,9 @@ impl FileContents { } Ok(()) } + pub fn into_dyn(self) -> FileContents { + FileContents(DynFileSource::new(self.0)) + } } impl std::ops::Deref for FileContents { type Target = S; diff --git a/core/startos/src/s9pk/merkle_archive/mod.rs b/core/startos/src/s9pk/merkle_archive/mod.rs index f83cd24648..afd00032a2 100644 --- a/core/startos/src/s9pk/merkle_archive/mod.rs +++ b/core/startos/src/s9pk/merkle_archive/mod.rs @@ -1,3 +1,5 @@ +use std::path::Path; + use ed25519_dalek::{Signature, SigningKey, VerifyingKey}; use tokio::io::AsyncRead; @@ -6,7 +8,7 @@ use crate::s9pk::merkle_archive::directory_contents::DirectoryContents; use crate::s9pk::merkle_archive::file_contents::FileContents; use crate::s9pk::merkle_archive::hash::Hash; use crate::s9pk::merkle_archive::sink::Sink; -use crate::s9pk::merkle_archive::source::{ArchiveSource, FileSource, Section}; +use crate::s9pk::merkle_archive::source::{ArchiveSource, DynFileSource, FileSource, Section}; use crate::s9pk::merkle_archive::write_queue::WriteQueue; pub mod directory_contents; @@ -19,13 +21,13 @@ mod test; pub mod varint; pub mod write_queue; -#[derive(Debug)] +#[derive(Debug, Clone)] enum Signer { Signed(VerifyingKey, Signature), Signer(SigningKey), } -#[derive(Debug)] +#[derive(Debug, Clone)] pub struct MerkleArchive { signer: Signer, contents: DirectoryContents, @@ -37,14 +39,33 @@ impl MerkleArchive { contents, } } + pub fn signer(&self) -> VerifyingKey { + match &self.signer { + Signer::Signed(k, _) => *k, + Signer::Signer(k) => k.verifying_key(), + } + } pub const fn header_size() -> u64 { 32 // pubkey + 64 // signature + + 32 // sighash + DirectoryContents::>::header_size() } pub fn contents(&self) -> &DirectoryContents { &self.contents } + pub fn contents_mut(&mut self) -> &mut DirectoryContents { + &mut self.contents + } + pub fn set_signer(&mut self, key: SigningKey) { + self.signer = Signer::Signer(key); + } + pub fn sort_by( + &mut self, + sort_by: impl Fn(&str, &str) -> std::cmp::Ordering + Send + Sync + 'static, + ) { + self.contents.sort_by(sort_by) + } } impl MerkleArchive> { #[instrument(skip_all)] @@ -80,6 +101,9 @@ impl MerkleArchive { pub async fn update_hashes(&mut self, only_missing: bool) -> Result<(), Error> { self.contents.update_hashes(only_missing).await } + pub fn filter(&mut self, filter: impl Fn(&Path) -> bool) -> Result<(), Error> { + self.contents.filter(filter) + } #[instrument(skip_all)] pub async fn serialize(&self, w: &mut W, verify: bool) -> Result<(), Error> { use tokio::io::AsyncWriteExt; @@ -103,9 +127,15 @@ impl MerkleArchive { queue.serialize(w, verify).await?; Ok(()) } + pub fn into_dyn(self) -> MerkleArchive { + MerkleArchive { + signer: self.signer, + contents: self.contents.into_dyn(), + } + } } -#[derive(Debug)] +#[derive(Debug, Clone)] pub struct Entry { hash: Option, contents: EntryContents, @@ -117,12 +147,27 @@ impl Entry { contents, } } + pub fn file(source: S) -> Self { + Self::new(EntryContents::File(FileContents::new(source))) + } pub fn hash(&self) -> Option { self.hash } pub fn as_contents(&self) -> &EntryContents { &self.contents } + pub fn as_file(&self) -> Option<&FileContents> { + match self.as_contents() { + EntryContents::File(f) => Some(f), + _ => None, + } + } + pub fn as_directory(&self) -> Option<&DirectoryContents> { + match self.as_contents() { + EntryContents::Directory(d) => Some(d), + _ => None, + } + } pub fn as_contents_mut(&mut self) -> &mut EntryContents { self.hash = None; &mut self.contents @@ -130,11 +175,24 @@ impl Entry { pub fn into_contents(self) -> EntryContents { self.contents } + pub fn into_file(self) -> Option> { + match self.into_contents() { + EntryContents::File(f) => Some(f), + _ => None, + } + } + pub fn into_directory(self) -> Option> { + match self.into_contents() { + EntryContents::Directory(d) => Some(d), + _ => None, + } + } pub fn header_size(&self) -> u64 { 32 // hash + self.contents.header_size() } } +impl Entry {} impl Entry> { #[instrument(skip_all)] pub async fn deserialize( @@ -156,6 +214,24 @@ impl Entry> { } } impl Entry { + pub fn filter(&mut self, filter: impl Fn(&Path) -> bool) -> Result<(), Error> { + if let EntryContents::Directory(d) = &mut self.contents { + d.filter(filter)?; + } + Ok(()) + } + pub async fn read_file_to_vec(&self) -> Result, Error> { + match self.as_contents() { + EntryContents::File(f) => Ok(f.to_vec(self.hash).await?), + EntryContents::Directory(_) => Err(Error::new( + eyre!("expected file, found directory"), + ErrorKind::ParseS9pk, + )), + EntryContents::Missing => { + Err(Error::new(eyre!("entry is missing"), ErrorKind::ParseS9pk)) + } + } + } pub async fn to_missing(&self) -> Result { let hash = if let Some(hash) = self.hash { hash @@ -190,9 +266,15 @@ impl Entry { w.write_all(hash.as_bytes()).await?; self.contents.serialize_header(position, w).await } + pub fn into_dyn(self) -> Entry { + Entry { + hash: self.hash, + contents: self.contents.into_dyn(), + } + } } -#[derive(Debug)] +#[derive(Debug, Clone)] pub enum EntryContents { Missing, File(FileContents), @@ -214,6 +296,9 @@ impl EntryContents { Self::Directory(_) => DirectoryContents::::header_size(), } } + pub fn is_dir(&self) -> bool { + matches!(self, &EntryContents::Directory(_)) + } } impl EntryContents> { #[instrument(skip_all)] @@ -265,4 +350,11 @@ impl EntryContents { Self::Directory(d) => Some(d.serialize_header(position, w).await?), }) } + pub fn into_dyn(self) -> EntryContents { + match self { + Self::Missing => EntryContents::Missing, + Self::File(f) => EntryContents::File(f.into_dyn()), + Self::Directory(d) => EntryContents::Directory(d.into_dyn()), + } + } } diff --git a/core/startos/src/s9pk/merkle_archive/source/http.rs b/core/startos/src/s9pk/merkle_archive/source/http.rs index f38fd70287..1cb9ba961c 100644 --- a/core/startos/src/s9pk/merkle_archive/source/http.rs +++ b/core/startos/src/s9pk/merkle_archive/source/http.rs @@ -1,12 +1,9 @@ -use std::sync::Arc; - use bytes::Bytes; use futures::stream::BoxStream; use futures::{StreamExt, TryStreamExt}; -use http::header::{ACCEPT_RANGES, RANGE}; +use reqwest::header::{ACCEPT_RANGES, CONTENT_LENGTH, RANGE}; use reqwest::{Client, Url}; use tokio::io::AsyncRead; -use tokio::sync::Mutex; use tokio_util::io::StreamReader; use crate::prelude::*; @@ -16,6 +13,7 @@ use crate::s9pk::merkle_archive::source::ArchiveSource; pub struct HttpSource { url: Url, client: Client, + size: Option, range_support: Result< (), (), // Arc>> @@ -23,24 +21,31 @@ pub struct HttpSource { } impl HttpSource { pub async fn new(client: Client, url: Url) -> Result { - let range_support = client + let head = client .head(url.clone()) .send() .await .with_kind(ErrorKind::Network)? .error_for_status() - .with_kind(ErrorKind::Network)? + .with_kind(ErrorKind::Network)?; + let range_support = head .headers() .get(ACCEPT_RANGES) .and_then(|s| s.to_str().ok()) == Some("bytes"); + let size = head + .headers() + .get(CONTENT_LENGTH) + .and_then(|s| s.to_str().ok()) + .and_then(|s| s.parse().ok()); Ok(Self { url, client, + size, range_support: if range_support { Ok(()) } else { - todo!() // Err(Arc::new(Mutex::new(None))) + Err(()) // Err(Arc::new(Mutex::new(None))) }, }) } @@ -48,6 +53,9 @@ impl HttpSource { #[async_trait::async_trait] impl ArchiveSource for HttpSource { type Reader = HttpReader; + async fn size(&self) -> Option { + self.size + } async fn fetch(&self, position: u64, size: u64) -> Result { match self.range_support { Ok(_) => Ok(HttpReader::Range(StreamReader::new(if size > 0 { diff --git a/core/startos/src/s9pk/merkle_archive/source/mod.rs b/core/startos/src/s9pk/merkle_archive/source/mod.rs index 3a7d60a40f..97c94b4803 100644 --- a/core/startos/src/s9pk/merkle_archive/source/mod.rs +++ b/core/startos/src/s9pk/merkle_archive/source/mod.rs @@ -12,15 +12,15 @@ pub mod http; pub mod multi_cursor_file; #[async_trait::async_trait] -pub trait FileSource: Send + Sync + Sized + 'static { +pub trait FileSource: Clone + Send + Sync + Sized + 'static { type Reader: AsyncRead + Unpin + Send; async fn size(&self) -> Result; async fn reader(&self) -> Result; - async fn copy(&self, w: &mut W) -> Result<(), Error> { + async fn copy(&self, w: &mut W) -> Result<(), Error> { tokio::io::copy(&mut self.reader().await?, w).await?; Ok(()) } - async fn copy_verify( + async fn copy_verify( &self, w: &mut W, verify: Option, @@ -37,6 +37,75 @@ pub trait FileSource: Send + Sync + Sized + 'static { } } +#[derive(Clone)] +pub struct DynFileSource(Arc); +impl DynFileSource { + pub fn new(source: T) -> Self { + Self(Arc::new(source)) + } +} +#[async_trait::async_trait] +impl FileSource for DynFileSource { + type Reader = Box; + async fn size(&self) -> Result { + self.0.size().await + } + async fn reader(&self) -> Result { + self.0.reader().await + } + async fn copy( + &self, + mut w: &mut W, + ) -> Result<(), Error> { + self.0.copy(&mut w).await + } + async fn copy_verify( + &self, + mut w: &mut W, + verify: Option, + ) -> Result<(), Error> { + self.0.copy_verify(&mut w, verify).await + } + async fn to_vec(&self, verify: Option) -> Result, Error> { + self.0.to_vec(verify).await + } +} + +#[async_trait::async_trait] +trait DynableFileSource: Send + Sync + 'static { + async fn size(&self) -> Result; + async fn reader(&self) -> Result, Error>; + async fn copy(&self, w: &mut (dyn AsyncWrite + Unpin + Send)) -> Result<(), Error>; + async fn copy_verify( + &self, + w: &mut (dyn AsyncWrite + Unpin + Send), + verify: Option, + ) -> Result<(), Error>; + async fn to_vec(&self, verify: Option) -> Result, Error>; +} +#[async_trait::async_trait] +impl DynableFileSource for T { + async fn size(&self) -> Result { + FileSource::size(self).await + } + async fn reader(&self) -> Result, Error> { + Ok(Box::new(FileSource::reader(self).await?)) + } + async fn copy(&self, w: &mut (dyn AsyncWrite + Unpin + Send)) -> Result<(), Error> { + FileSource::copy(self, w).await + } + async fn copy_verify( + &self, + w: &mut (dyn AsyncWrite + Unpin + Send), + verify: Option, + ) -> Result<(), Error> { + FileSource::copy_verify(self, w, verify).await + } + async fn to_vec(&self, verify: Option) -> Result, Error> { + FileSource::to_vec(self, verify).await + } +} + #[async_trait::async_trait] impl FileSource for PathBuf { type Reader = File; @@ -57,7 +126,7 @@ impl FileSource for Arc<[u8]> { async fn reader(&self) -> Result { Ok(std::io::Cursor::new(self.clone())) } - async fn copy(&self, w: &mut W) -> Result<(), Error> { + async fn copy(&self, w: &mut W) -> Result<(), Error> { use tokio::io::AsyncWriteExt; w.write_all(&*self).await?; @@ -68,8 +137,11 @@ impl FileSource for Arc<[u8]> { #[async_trait::async_trait] pub trait ArchiveSource: Clone + Send + Sync + Sized + 'static { type Reader: AsyncRead + Unpin + Send; + async fn size(&self) -> Option { + None + } async fn fetch(&self, position: u64, size: u64) -> Result; - async fn copy_to( + async fn copy_to( &self, position: u64, size: u64, @@ -99,7 +171,7 @@ impl ArchiveSource for Arc<[u8]> { } } -#[derive(Debug)] +#[derive(Debug, Clone)] pub struct Section { source: S, position: u64, @@ -114,7 +186,7 @@ impl FileSource for Section { async fn reader(&self) -> Result { self.source.fetch(self.position, self.size).await } - async fn copy(&self, w: &mut W) -> Result<(), Error> { + async fn copy(&self, w: &mut W) -> Result<(), Error> { self.source.copy_to(self.position, self.size, w).await } } diff --git a/core/startos/src/s9pk/merkle_archive/source/multi_cursor_file.rs b/core/startos/src/s9pk/merkle_archive/source/multi_cursor_file.rs index cda3e51033..7add68e6f7 100644 --- a/core/startos/src/s9pk/merkle_archive/source/multi_cursor_file.rs +++ b/core/startos/src/s9pk/merkle_archive/source/multi_cursor_file.rs @@ -4,13 +4,17 @@ use std::path::{Path, PathBuf}; use std::sync::Arc; use tokio::fs::File; -use tokio::io::AsyncRead; +use tokio::io::{AsyncRead, AsyncReadExt}; use tokio::sync::{Mutex, OwnedMutexGuard}; use crate::disk::mount::filesystem::loop_dev::LoopDev; use crate::prelude::*; use crate::s9pk::merkle_archive::source::{ArchiveSource, Section}; +fn path_from_fd(fd: RawFd) -> PathBuf { + Path::new("/proc/self/fd").join(fd.to_string()) +} + #[derive(Clone)] pub struct MultiCursorFile { fd: RawFd, @@ -18,7 +22,14 @@ pub struct MultiCursorFile { } impl MultiCursorFile { fn path(&self) -> PathBuf { - Path::new("/proc/self/fd").join(self.fd.to_string()) + path_from_fd(self.fd) + } + pub async fn open(fd: &impl AsRawFd) -> Result { + let fd = fd.as_raw_fd(); + Ok(Self { + fd, + file: Arc::new(Mutex::new(File::open(path_from_fd(fd)).await?)), + }) } } impl From for MultiCursorFile { @@ -47,8 +58,8 @@ impl AsyncRead for FileSectionReader { return std::task::Poll::Ready(Ok(())); } let before = buf.filled().len() as u64; - let res = std::pin::Pin::new(&mut **this.file.get_mut()) - .poll_read(cx, &mut buf.take(*this.remaining as usize)); + let res = std::pin::Pin::new(&mut (&mut **this.file.get_mut()).take(*this.remaining)) + .poll_read(cx, buf); *this.remaining = this .remaining .saturating_sub(buf.filled().len() as u64 - before); @@ -59,13 +70,36 @@ impl AsyncRead for FileSectionReader { #[async_trait::async_trait] impl ArchiveSource for MultiCursorFile { type Reader = FileSectionReader; + async fn size(&self) -> Option { + tokio::fs::metadata(self.path()).await.ok().map(|m| m.len()) + } async fn fetch(&self, position: u64, size: u64) -> Result { use tokio::io::AsyncSeekExt; let mut file = if let Ok(file) = self.file.clone().try_lock_owned() { file } else { - Arc::new(Mutex::new(File::open(self.path()).await?)) + #[cfg(target_os = "linux")] + let file = File::open(self.path()).await?; + #[cfg(target_os = "macos")] // here be dragons + let file = unsafe { + let mut buf = [0u8; libc::PATH_MAX as usize]; + if libc::fcntl( + self.fd, + libc::F_GETPATH, + buf.as_mut_ptr().cast::(), + ) == -1 + { + return Err(std::io::Error::last_os_error().into()); + } + File::open( + &*std::ffi::CStr::from_bytes_until_nul(&buf) + .with_kind(ErrorKind::Utf8)? + .to_string_lossy(), + ) + .await? + }; + Arc::new(Mutex::new(file)) .try_lock_owned() .expect("freshly created") }; @@ -77,8 +111,8 @@ impl ArchiveSource for MultiCursorFile { } } -impl From> for LoopDev { - fn from(value: Section) -> Self { +impl From<&Section> for LoopDev { + fn from(value: &Section) -> Self { LoopDev::new(value.source.path(), value.position, value.size) } } diff --git a/core/startos/src/s9pk/merkle_archive/write_queue.rs b/core/startos/src/s9pk/merkle_archive/write_queue.rs index 973ffcf30f..9496d5e83f 100644 --- a/core/startos/src/s9pk/merkle_archive/write_queue.rs +++ b/core/startos/src/s9pk/merkle_archive/write_queue.rs @@ -4,7 +4,6 @@ use crate::prelude::*; use crate::s9pk::merkle_archive::sink::Sink; use crate::s9pk::merkle_archive::source::FileSource; use crate::s9pk::merkle_archive::{Entry, EntryContents}; -use crate::util::MaybeOwned; pub struct WriteQueue<'a, S> { next_available_position: u64, diff --git a/core/startos/src/s9pk/mod.rs b/core/startos/src/s9pk/mod.rs index 6720f29992..83924293a8 100644 --- a/core/startos/src/s9pk/mod.rs +++ b/core/startos/src/s9pk/mod.rs @@ -1,5 +1,39 @@ pub mod merkle_archive; +pub mod rpc; pub mod v1; pub mod v2; -pub use v1::*; +use std::io::SeekFrom; +use std::path::Path; + +use tokio::fs::File; +use tokio::io::{AsyncReadExt, AsyncSeekExt}; +pub use v2::{manifest, S9pk}; + +use crate::context::CliContext; +use crate::prelude::*; +use crate::s9pk::v1::reader::S9pkReader; +use crate::s9pk::v2::compat::MAGIC_AND_VERSION; + +pub async fn load(ctx: &CliContext, path: impl AsRef) -> Result { + // TODO: return s9pk + const MAGIC_LEN: usize = MAGIC_AND_VERSION.len(); + let mut magic = [0_u8; MAGIC_LEN]; + let mut file = tokio::fs::File::open(&path).await?; + file.read_exact(&mut magic).await?; + file.seek(SeekFrom::Start(0)).await?; + if magic == v2::compat::MAGIC_AND_VERSION { + tracing::info!("Converting package to v2 s9pk"); + let new_path = path.as_ref().with_extension("compat.s9pk"); + S9pk::from_v1( + S9pkReader::from_reader(file, true).await?, + &new_path, + ctx.developer_key()?.clone(), + ) + .await?; + tokio::fs::rename(&new_path, &path).await?; + file = tokio::fs::File::open(&path).await?; + tracing::info!("Converted s9pk successfully"); + } + Ok(file) +} diff --git a/core/startos/src/s9pk/reader.rs b/core/startos/src/s9pk/reader.rs deleted file mode 100644 index 61b5e46a85..0000000000 --- a/core/startos/src/s9pk/reader.rs +++ /dev/null @@ -1,406 +0,0 @@ -use std::collections::BTreeSet; -use std::io::SeekFrom; -use std::ops::Range; -use std::path::Path; -use std::pin::Pin; -use std::str::FromStr; -use std::task::{Context, Poll}; - -use color_eyre::eyre::eyre; -use digest::Output; -use ed25519_dalek::VerifyingKey; -use futures::TryStreamExt; -use models::ImageId; -use sha2::{Digest, Sha512}; -use tokio::fs::File; -use tokio::io::{AsyncRead, AsyncReadExt, AsyncSeek, AsyncSeekExt, ReadBuf}; -use tracing::instrument; - -use super::header::{FileSection, Header, TableOfContents}; -use super::manifest::{Manifest, PackageId}; -use super::SIG_CONTEXT; -use crate::install::progress::InstallProgressTracker; -use crate::s9pk::docker::DockerReader; -use crate::util::Version; -use crate::{Error, ResultExt}; - -const MAX_REPLACES: usize = 10; -const MAX_TITLE_LEN: usize = 30; - -#[pin_project::pin_project] -#[derive(Debug)] -pub struct ReadHandle<'a, R = File> { - pos: &'a mut u64, - range: Range, - #[pin] - rdr: &'a mut R, -} -impl<'a, R: AsyncRead + Unpin> ReadHandle<'a, R> { - pub async fn to_vec(mut self) -> std::io::Result> { - let mut buf = vec![0; (self.range.end - self.range.start) as usize]; - self.read_exact(&mut buf).await?; - Ok(buf) - } -} -impl<'a, R: AsyncRead + Unpin> AsyncRead for ReadHandle<'a, R> { - fn poll_read( - self: Pin<&mut Self>, - cx: &mut Context<'_>, - buf: &mut ReadBuf<'_>, - ) -> Poll> { - let this = self.project(); - let start = buf.filled().len(); - let mut take_buf = buf.take(this.range.end.saturating_sub(**this.pos) as usize); - let res = AsyncRead::poll_read(this.rdr, cx, &mut take_buf); - let n = take_buf.filled().len(); - unsafe { buf.assume_init(start + n) }; - buf.advance(n); - **this.pos += n as u64; - res - } -} -impl<'a, R: AsyncSeek + Unpin> AsyncSeek for ReadHandle<'a, R> { - fn start_seek(self: Pin<&mut Self>, position: SeekFrom) -> std::io::Result<()> { - let this = self.project(); - AsyncSeek::start_seek( - this.rdr, - match position { - SeekFrom::Current(n) => SeekFrom::Current(n), - SeekFrom::End(n) => SeekFrom::Start((this.range.end as i64 + n) as u64), - SeekFrom::Start(n) => SeekFrom::Start(this.range.start + n), - }, - ) - } - fn poll_complete(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { - let this = self.project(); - match AsyncSeek::poll_complete(this.rdr, cx) { - Poll::Ready(Ok(n)) => { - let res = n.saturating_sub(this.range.start); - **this.pos = this.range.start + res; - Poll::Ready(Ok(res)) - } - a => a, - } - } -} - -#[derive(Debug)] -pub struct ImageTag { - pub package_id: PackageId, - pub image_id: ImageId, - pub version: Version, -} -impl ImageTag { - #[instrument(skip_all)] - pub fn validate(&self, id: &PackageId, version: &Version) -> Result<(), Error> { - if id != &self.package_id { - return Err(Error::new( - eyre!( - "Contains image for incorrect package: id {}", - self.package_id, - ), - crate::ErrorKind::ValidateS9pk, - )); - } - if version != &self.version { - return Err(Error::new( - eyre!( - "Contains image with incorrect version: expected {} received {}", - version, - self.version, - ), - crate::ErrorKind::ValidateS9pk, - )); - } - Ok(()) - } -} -impl FromStr for ImageTag { - type Err = Error; - fn from_str(s: &str) -> Result { - let rest = s.strip_prefix("start9/").ok_or_else(|| { - Error::new( - eyre!("Invalid image tag prefix: expected start9/"), - crate::ErrorKind::ValidateS9pk, - ) - })?; - let (package, rest) = rest.split_once("/").ok_or_else(|| { - Error::new( - eyre!("Image tag missing image id"), - crate::ErrorKind::ValidateS9pk, - ) - })?; - let (image, version) = rest.split_once(":").ok_or_else(|| { - Error::new( - eyre!("Image tag missing version"), - crate::ErrorKind::ValidateS9pk, - ) - })?; - Ok(ImageTag { - package_id: package.parse()?, - image_id: image.parse()?, - version: version.parse()?, - }) - } -} - -pub struct S9pkReader { - hash: Option>, - hash_string: Option, - developer_key: VerifyingKey, - toc: TableOfContents, - pos: u64, - rdr: R, -} -impl S9pkReader { - pub async fn open>(path: P, check_sig: bool) -> Result { - let p = path.as_ref(); - let rdr = File::open(p) - .await - .with_ctx(|_| (crate::error::ErrorKind::Filesystem, p.display().to_string()))?; - - Self::from_reader(rdr, check_sig).await - } -} -impl S9pkReader> { - pub fn validated(&mut self) { - self.rdr.validated() - } -} -impl S9pkReader { - #[instrument(skip_all)] - pub async fn validate(&mut self) -> Result<(), Error> { - if self.toc.icon.length > 102_400 { - // 100 KiB - return Err(Error::new( - eyre!("icon must be less than 100KiB"), - crate::ErrorKind::ValidateS9pk, - )); - } - let image_tags = self.image_tags().await?; - let man = self.manifest().await?; - let containers = &man.containers; - let validated_image_ids = image_tags - .into_iter() - .map(|i| i.validate(&man.id, &man.version).map(|_| i.image_id)) - .collect::, _>>()?; - man.description.validate()?; - man.actions.0.iter().try_for_each(|(_, action)| { - action.validate( - containers, - &man.eos_version, - &man.volumes, - &validated_image_ids, - ) - })?; - man.backup.validate( - containers, - &man.eos_version, - &man.volumes, - &validated_image_ids, - )?; - if let Some(cfg) = &man.config { - cfg.validate( - containers, - &man.eos_version, - &man.volumes, - &validated_image_ids, - )?; - } - man.health_checks - .validate(&man.eos_version, &man.volumes, &validated_image_ids)?; - man.interfaces.validate()?; - man.main - .validate(&man.eos_version, &man.volumes, &validated_image_ids, false) - .with_ctx(|_| (crate::ErrorKind::ValidateS9pk, "Main"))?; - man.migrations.validate( - containers, - &man.eos_version, - &man.volumes, - &validated_image_ids, - )?; - - #[cfg(feature = "js-engine")] - if man.containers.is_some() - || matches!(man.main, crate::procedure::PackageProcedure::Script(_)) - { - return Err(Error::new( - eyre!("Right now we don't support the containers and the long running main"), - crate::ErrorKind::ValidateS9pk, - )); - } - - if man.replaces.len() >= MAX_REPLACES { - return Err(Error::new( - eyre!("Cannot have more than {MAX_REPLACES} replaces"), - crate::ErrorKind::ValidateS9pk, - )); - } - if let Some(too_big) = man.replaces.iter().find(|x| x.len() >= MAX_REPLACES) { - return Err(Error::new( - eyre!("We have found a replaces of ({too_big}) that exceeds the max length of {MAX_TITLE_LEN} "), - crate::ErrorKind::ValidateS9pk, - )); - } - if man.title.len() >= MAX_TITLE_LEN { - return Err(Error::new( - eyre!("Cannot have more than a length of {MAX_TITLE_LEN} for title"), - crate::ErrorKind::ValidateS9pk, - )); - } - - if man.containers.is_some() - && matches!(man.main, crate::procedure::PackageProcedure::Docker(_)) - { - return Err(Error::new( - eyre!("Cannot have a main docker and a main in containers"), - crate::ErrorKind::ValidateS9pk, - )); - } - if let Some(props) = &man.properties { - props - .validate(&man.eos_version, &man.volumes, &validated_image_ids, true) - .with_ctx(|_| (crate::ErrorKind::ValidateS9pk, "Properties"))?; - } - man.volumes.validate(&man.interfaces)?; - - Ok(()) - } - #[instrument(skip_all)] - pub async fn image_tags(&mut self) -> Result, Error> { - let mut tar = tokio_tar::Archive::new(self.docker_images().await?); - let mut entries = tar.entries()?; - while let Some(mut entry) = entries.try_next().await? { - if &*entry.path()? != Path::new("manifest.json") { - continue; - } - let mut buf = Vec::with_capacity(entry.header().size()? as usize); - entry.read_to_end(&mut buf).await?; - #[derive(serde::Deserialize)] - struct ManEntry { - #[serde(rename = "RepoTags")] - tags: Vec, - } - let man_entries = serde_json::from_slice::>(&buf) - .with_ctx(|_| (crate::ErrorKind::Deserialization, "manifest.json"))?; - return man_entries - .iter() - .flat_map(|e| &e.tags) - .map(|t| t.parse()) - .collect(); - } - Err(Error::new( - eyre!("image.tar missing manifest.json"), - crate::ErrorKind::ParseS9pk, - )) - } - #[instrument(skip_all)] - pub async fn from_reader(mut rdr: R, check_sig: bool) -> Result { - let header = Header::deserialize(&mut rdr).await?; - - let (hash, hash_string) = if check_sig { - let mut hasher = Sha512::new(); - let mut buf = [0; 1024]; - let mut read; - while { - read = rdr.read(&mut buf).await?; - read != 0 - } { - hasher.update(&buf[0..read]); - } - let hash = hasher.clone().finalize(); - header - .pubkey - .verify_prehashed(hasher, Some(SIG_CONTEXT), &header.signature)?; - ( - Some(hash), - Some(base32::encode( - base32::Alphabet::RFC4648 { padding: false }, - hash.as_slice(), - )), - ) - } else { - (None, None) - }; - - let pos = rdr.stream_position().await?; - - Ok(S9pkReader { - hash_string, - hash, - developer_key: header.pubkey, - toc: header.table_of_contents, - pos, - rdr, - }) - } - - pub fn hash(&self) -> Option<&Output> { - self.hash.as_ref() - } - - pub fn hash_str(&self) -> Option<&str> { - self.hash_string.as_ref().map(|s| s.as_str()) - } - - pub fn developer_key(&self) -> &VerifyingKey { - &self.developer_key - } - - pub async fn reset(&mut self) -> Result<(), Error> { - self.rdr.seek(SeekFrom::Start(0)).await?; - Ok(()) - } - - async fn read_handle<'a>( - &'a mut self, - section: FileSection, - ) -> Result, Error> { - if self.pos != section.position { - self.rdr.seek(SeekFrom::Start(section.position)).await?; - self.pos = section.position; - } - Ok(ReadHandle { - range: self.pos..(self.pos + section.length), - pos: &mut self.pos, - rdr: &mut self.rdr, - }) - } - - pub async fn manifest_raw(&mut self) -> Result, Error> { - self.read_handle(self.toc.manifest).await - } - - pub async fn manifest(&mut self) -> Result { - let slice = self.manifest_raw().await?.to_vec().await?; - serde_cbor::de::from_reader(slice.as_slice()) - .with_ctx(|_| (crate::ErrorKind::ParseS9pk, "Deserializing Manifest (CBOR)")) - } - - pub async fn license(&mut self) -> Result, Error> { - self.read_handle(self.toc.license).await - } - - pub async fn instructions(&mut self) -> Result, Error> { - self.read_handle(self.toc.instructions).await - } - - pub async fn icon(&mut self) -> Result, Error> { - self.read_handle(self.toc.icon).await - } - - pub async fn docker_images(&mut self) -> Result>, Error> { - DockerReader::new(self.read_handle(self.toc.docker_images).await?).await - } - - pub async fn assets(&mut self) -> Result, Error> { - self.read_handle(self.toc.assets).await - } - - pub async fn scripts(&mut self) -> Result>, Error> { - Ok(match self.toc.scripts { - None => None, - Some(a) => Some(self.read_handle(a).await?), - }) - } -} diff --git a/core/startos/src/s9pk/rpc.rs b/core/startos/src/s9pk/rpc.rs new file mode 100644 index 0000000000..3a98da02b7 --- /dev/null +++ b/core/startos/src/s9pk/rpc.rs @@ -0,0 +1,227 @@ +use std::path::{Path, PathBuf}; +use std::sync::Arc; + +use clap::Parser; +use itertools::Itertools; +use models::ImageId; +use rpc_toolkit::{from_fn_async, Empty, HandlerExt, ParentHandler}; +use serde::{Deserialize, Serialize}; +use tokio::fs::File; +use tokio::process::Command; + +use crate::context::CliContext; +use crate::prelude::*; +use crate::s9pk::manifest::Manifest; +use crate::s9pk::merkle_archive::source::DynFileSource; +use crate::s9pk::merkle_archive::Entry; +use crate::s9pk::v2::compat::CONTAINER_TOOL; +use crate::s9pk::S9pk; +use crate::util::io::TmpDir; +use crate::util::serde::{apply_expr, HandlerExtSerde}; +use crate::util::Invoke; + +pub const SKIP_ENV: &[&str] = &["TERM", "container", "HOME", "HOSTNAME"]; + +pub fn s9pk() -> ParentHandler { + ParentHandler::new() + .subcommand("edit", edit()) + .subcommand("inspect", inspect()) +} + +#[derive(Deserialize, Serialize, Parser)] +struct S9pkPath { + s9pk: PathBuf, +} + +fn edit() -> ParentHandler { + let only_parent = |a, _| a; + ParentHandler::::new() + .subcommand( + "add-image", + from_fn_async(add_image) + .with_inherited(only_parent) + .no_display(), + ) + .subcommand( + "manifest", + from_fn_async(edit_manifest) + .with_inherited(only_parent) + .with_display_serializable(), + ) +} + +fn inspect() -> ParentHandler { + let only_parent = |a, _| a; + ParentHandler::::new() + .subcommand( + "file-tree", + from_fn_async(file_tree) + .with_inherited(only_parent) + .with_display_serializable(), + ) + .subcommand( + "manifest", + from_fn_async(inspect_manifest) + .with_inherited(only_parent) + .with_display_serializable(), + ) +} + +#[derive(Deserialize, Serialize, Parser)] +struct AddImageParams { + id: ImageId, + image: String, +} +async fn add_image( + ctx: CliContext, + AddImageParams { id, image }: AddImageParams, + S9pkPath { s9pk: s9pk_path }: S9pkPath, +) -> Result<(), Error> { + let tmpdir = TmpDir::new().await?; + let sqfs_path = tmpdir.join("image.squashfs"); + let arch = String::from_utf8( + Command::new(CONTAINER_TOOL) + .arg("run") + .arg("--rm") + .arg("--entrypoint") + .arg("uname") + .arg(&image) + .arg("-m") + .invoke(ErrorKind::Docker) + .await?, + )?; + let env = String::from_utf8( + Command::new(CONTAINER_TOOL) + .arg("run") + .arg("--rm") + .arg("--entrypoint") + .arg("env") + .arg(&image) + .invoke(ErrorKind::Docker) + .await?, + )? + .lines() + .filter(|l| { + l.trim() + .split_once("=") + .map_or(false, |(v, _)| !SKIP_ENV.contains(&v)) + }) + .join("\n") + + "\n"; + let workdir = Path::new( + String::from_utf8( + Command::new(CONTAINER_TOOL) + .arg("run") + .arg("--rm") + .arg("--entrypoint") + .arg("pwd") + .arg(&image) + .invoke(ErrorKind::Docker) + .await?, + )? + .trim(), + ) + .to_owned(); + let container_id = String::from_utf8( + Command::new(CONTAINER_TOOL) + .arg("create") + .arg(&image) + .invoke(ErrorKind::Docker) + .await?, + )?; + Command::new("bash") + .arg("-c") + .arg(format!( + "{CONTAINER_TOOL} export {container_id} | mksquashfs - {sqfs} -tar", + container_id = container_id.trim(), + sqfs = sqfs_path.display() + )) + .invoke(ErrorKind::Docker) + .await?; + Command::new(CONTAINER_TOOL) + .arg("rm") + .arg(container_id.trim()) + .invoke(ErrorKind::Docker) + .await?; + let mut s9pk = S9pk::from_file(super::load(&ctx, &s9pk_path).await?) + .await? + .into_dyn(); + let archive = s9pk.as_archive_mut(); + archive.set_signer(ctx.developer_key()?.clone()); + archive.contents_mut().insert_path( + Path::new("images") + .join(arch.trim()) + .join(&id) + .with_extension("squashfs"), + Entry::file(DynFileSource::new(sqfs_path)), + )?; + archive.contents_mut().insert_path( + Path::new("images") + .join(arch.trim()) + .join(&id) + .with_extension("env"), + Entry::file(DynFileSource::new(Arc::from(Vec::from(env)))), + )?; + archive.contents_mut().insert_path( + Path::new("images") + .join(arch.trim()) + .join(&id) + .with_extension("json"), + Entry::file(DynFileSource::new(Arc::from( + serde_json::to_vec(&serde_json::json!({ + "workdir": workdir + })) + .with_kind(ErrorKind::Serialization)?, + ))), + )?; + let tmp_path = s9pk_path.with_extension("s9pk.tmp"); + let mut tmp_file = File::create(&tmp_path).await?; + s9pk.serialize(&mut tmp_file, true).await?; + tmp_file.sync_all().await?; + tokio::fs::rename(&tmp_path, &s9pk_path).await?; + + Ok(()) +} + +#[derive(Deserialize, Serialize, Parser)] +struct EditManifestParams { + expression: String, +} +async fn edit_manifest( + ctx: CliContext, + EditManifestParams { expression }: EditManifestParams, + S9pkPath { s9pk: s9pk_path }: S9pkPath, +) -> Result { + let mut s9pk = S9pk::from_file(super::load(&ctx, &s9pk_path).await?).await?; + let old = serde_json::to_value(s9pk.as_manifest()).with_kind(ErrorKind::Serialization)?; + *s9pk.as_manifest_mut() = serde_json::from_value(apply_expr(old.into(), &expression)?.into()) + .with_kind(ErrorKind::Serialization)?; + let manifest = s9pk.as_manifest().clone(); + let tmp_path = s9pk_path.with_extension("s9pk.tmp"); + let mut tmp_file = File::create(&tmp_path).await?; + s9pk.as_archive_mut() + .set_signer(ctx.developer_key()?.clone()); + s9pk.serialize(&mut tmp_file, true).await?; + tmp_file.sync_all().await?; + tokio::fs::rename(&tmp_path, &s9pk_path).await?; + + Ok(manifest) +} + +async fn file_tree( + ctx: CliContext, + _: Empty, + S9pkPath { s9pk }: S9pkPath, +) -> Result, Error> { + let s9pk = S9pk::from_file(super::load(&ctx, &s9pk).await?).await?; + Ok(s9pk.as_archive().contents().file_paths("")) +} + +async fn inspect_manifest( + ctx: CliContext, + _: Empty, + S9pkPath { s9pk }: S9pkPath, +) -> Result { + let s9pk = S9pk::from_file(super::load(&ctx, &s9pk).await?).await?; + Ok(s9pk.as_manifest().clone()) +} diff --git a/core/startos/src/s9pk/specv2.md b/core/startos/src/s9pk/specv2.md deleted file mode 100644 index 9bf993463a..0000000000 --- a/core/startos/src/s9pk/specv2.md +++ /dev/null @@ -1,28 +0,0 @@ -## Header - -### Magic - -2B: `0x3b3b` - -### Version - -varint: `0x02` - -### Pubkey - -32B: ed25519 pubkey - -### TOC - -- number of sections (varint) -- FOREACH section - - sig (32B: ed25519 signature of BLAKE-3 of rest of section) - - name (varstring) - - TYPE (varint) - - TYPE=FILE (`0x01`) - - mime (varstring) - - pos (32B: u64 BE) - - len (32B: u64 BE) - - hash (32B: BLAKE-3 of file contents) - - TYPE=TOC (`0x02`) - - recursively defined diff --git a/core/startos/src/s9pk/v1/docker.rs b/core/startos/src/s9pk/v1/docker.rs index be93905fb6..7f15077039 100644 --- a/core/startos/src/s9pk/v1/docker.rs +++ b/core/startos/src/s9pk/v1/docker.rs @@ -13,7 +13,7 @@ use crate::util::io::from_cbor_async_reader; use crate::{Error, ErrorKind, ARCH}; #[derive(Default, Deserialize, Serialize)] -#[serde(rename_all = "kebab-case")] +#[serde(rename_all = "camelCase")] pub struct DockerMultiArch { pub default: String, pub available: BTreeSet, diff --git a/core/startos/src/s9pk/v1/manifest.rs b/core/startos/src/s9pk/v1/manifest.rs index 3eee540ed1..ef346ad2bc 100644 --- a/core/startos/src/s9pk/v1/manifest.rs +++ b/core/startos/src/s9pk/v1/manifest.rs @@ -1,34 +1,25 @@ use std::collections::BTreeMap; use std::path::{Path, PathBuf}; -use color_eyre::eyre::eyre; +use emver::VersionRange; +use imbl_value::InOMap; pub use models::PackageId; +use models::VolumeId; use serde::{Deserialize, Serialize}; use url::Url; use super::git_hash::GitHash; -use crate::action::Actions; -use crate::backup::BackupActions; -use crate::config::action::ConfigActions; -use crate::dependencies::Dependencies; -use crate::migration::Migrations; -use crate::net::interface::Interfaces; use crate::prelude::*; -use crate::procedure::docker::DockerContainers; -use crate::procedure::PackageProcedure; -use crate::status::health_check::HealthChecks; -use crate::util::serde::Regex; +use crate::s9pk::manifest::{Alerts, Description, HardwareRequirements}; use crate::util::Version; use crate::version::{Current, VersionT}; -use crate::volume::Volumes; -use crate::Error; fn current_version() -> Version { Current::new().semver().into() } #[derive(Clone, Debug, Deserialize, Serialize, HasModel)] -#[serde(rename_all = "kebab-case")] +#[serde(rename_all = "camelCase")] #[model = "Model"] pub struct Manifest { #[serde(default = "current_version")] @@ -36,13 +27,11 @@ pub struct Manifest { pub id: PackageId, #[serde(default)] pub git_hash: Option, + #[serde(default)] + pub assets: Assets, pub title: String, pub version: Version, pub description: Description, - #[serde(default)] - pub assets: Assets, - #[serde(default)] - pub build: Option>, pub release_notes: String, pub license: String, // type of license pub wrapper_repo: Url, @@ -52,24 +41,10 @@ pub struct Manifest { pub donation_url: Option, #[serde(default)] pub alerts: Alerts, - pub main: PackageProcedure, - pub health_checks: HealthChecks, - pub config: Option, - pub properties: Option, - pub volumes: Volumes, - // #[serde(default)] - pub interfaces: Interfaces, - // #[serde(default)] - pub backup: BackupActions, - #[serde(default)] - pub migrations: Migrations, + pub volumes: BTreeMap, #[serde(default)] - pub actions: Actions, - // #[serde(default)] - // pub permissions: Permissions, - #[serde(default)] - pub dependencies: Dependencies, - pub containers: Option, + pub dependencies: BTreeMap, + pub config: Option>, #[serde(default)] pub replaces: Vec, @@ -78,45 +53,31 @@ pub struct Manifest { pub hardware_requirements: HardwareRequirements, } -impl Manifest { - pub fn package_procedures(&self) -> impl Iterator { - use std::iter::once; - let main = once(&self.main); - let cfg_get = self.config.as_ref().map(|a| &a.get).into_iter(); - let cfg_set = self.config.as_ref().map(|a| &a.set).into_iter(); - let props = self.properties.iter(); - let backups = vec![&self.backup.create, &self.backup.restore].into_iter(); - let migrations = self - .migrations - .to - .values() - .chain(self.migrations.from.values()); - let actions = self.actions.0.values().map(|a| &a.implementation); - main.chain(cfg_get) - .chain(cfg_set) - .chain(props) - .chain(backups) - .chain(migrations) - .chain(actions) - } - - pub fn with_git_hash(mut self, git_hash: GitHash) -> Self { - self.git_hash = Some(git_hash); - self +#[derive(Clone, Debug, Deserialize, Serialize)] +#[serde(rename_all = "camelCase")] +#[serde(tag = "type")] +pub enum DependencyRequirement { + OptIn { how: String }, + OptOut { how: String }, + Required, +} +impl DependencyRequirement { + pub fn required(&self) -> bool { + matches!(self, &DependencyRequirement::Required) } } -#[derive(Clone, Debug, Default, Deserialize, Serialize)] -#[serde(rename_all = "kebab-case")] -pub struct HardwareRequirements { - #[serde(default)] - device: BTreeMap, - ram: Option, - pub arch: Option>, +#[derive(Clone, Debug, Deserialize, Serialize, HasModel)] +#[serde(rename_all = "camelCase")] +#[model = "Model"] +pub struct DepInfo { + pub version: VersionRange, + pub requirement: DependencyRequirement, + pub description: Option, } #[derive(Clone, Debug, Default, Deserialize, Serialize)] -#[serde(rename_all = "kebab-case")] +#[serde(rename_all = "camelCase")] pub struct Assets { #[serde(default)] pub license: Option, @@ -176,36 +137,3 @@ impl Assets { .unwrap_or(Path::new("scripts")) } } - -#[derive(Clone, Debug, Deserialize, Serialize)] -pub struct Description { - pub short: String, - pub long: String, -} -impl Description { - pub fn validate(&self) -> Result<(), Error> { - if self.short.chars().skip(160).next().is_some() { - return Err(Error::new( - eyre!("Short description must be 160 characters or less."), - crate::ErrorKind::ValidateS9pk, - )); - } - if self.long.chars().skip(5000).next().is_some() { - return Err(Error::new( - eyre!("Long description must be 5000 characters or less."), - crate::ErrorKind::ValidateS9pk, - )); - } - Ok(()) - } -} - -#[derive(Clone, Debug, Default, Deserialize, Serialize)] -#[serde(rename_all = "kebab-case")] -pub struct Alerts { - pub install: Option, - pub uninstall: Option, - pub restore: Option, - pub start: Option, - pub stop: Option, -} diff --git a/core/startos/src/s9pk/v1/mod.rs b/core/startos/src/s9pk/v1/mod.rs index e1bf4caba2..2f95f0813d 100644 --- a/core/startos/src/s9pk/v1/mod.rs +++ b/core/startos/src/s9pk/v1/mod.rs @@ -1,25 +1,7 @@ -use std::ffi::OsStr; use std::path::PathBuf; -use color_eyre::eyre::eyre; -use futures::TryStreamExt; -use imbl::OrdMap; -use rpc_toolkit::command; -use serde_json::Value; -use tokio::io::AsyncRead; -use tracing::instrument; - -use crate::context::SdkContext; -use crate::s9pk::builder::S9pkPacker; -use crate::s9pk::docker::DockerMultiArch; -use crate::s9pk::git_hash::GitHash; -use crate::s9pk::manifest::Manifest; -use crate::s9pk::reader::S9pkReader; -use crate::util::display_none; -use crate::util::io::BufferedWriteReader; -use crate::util::serde::IoFormat; -use crate::volume::Volume; -use crate::{Error, ErrorKind, ResultExt}; +use clap::Parser; +use serde::{Deserialize, Serialize}; pub mod builder; pub mod docker; @@ -30,217 +12,9 @@ pub mod reader; pub const SIG_CONTEXT: &[u8] = b"s9pk"; -#[command(cli_only, display(display_none))] -#[instrument(skip_all)] -pub async fn pack(#[context] ctx: SdkContext, #[arg] path: Option) -> Result<(), Error> { - use tokio::fs::File; - - let path = if let Some(path) = path { - path - } else { - std::env::current_dir()? - }; - let manifest_value: Value = if path.join("manifest.toml").exists() { - IoFormat::Toml - .from_async_reader(File::open(path.join("manifest.toml")).await?) - .await? - } else if path.join("manifest.yaml").exists() { - IoFormat::Yaml - .from_async_reader(File::open(path.join("manifest.yaml")).await?) - .await? - } else if path.join("manifest.json").exists() { - IoFormat::Json - .from_async_reader(File::open(path.join("manifest.json")).await?) - .await? - } else { - return Err(Error::new( - eyre!("manifest not found"), - crate::ErrorKind::Pack, - )); - }; - - let manifest: Manifest = serde_json::from_value::(manifest_value.clone()) - .with_kind(crate::ErrorKind::Deserialization)? - .with_git_hash(GitHash::from_path(&path).await?); - let extra_keys = - enumerate_extra_keys(&serde_json::to_value(&manifest).unwrap(), &manifest_value); - for k in extra_keys { - tracing::warn!("Unrecognized Manifest Key: {}", k); - } - - let outfile_path = path.join(format!("{}.s9pk", manifest.id)); - let mut outfile = File::create(outfile_path).await?; - S9pkPacker::builder() - .manifest(&manifest) - .writer(&mut outfile) - .license( - File::open(path.join(manifest.assets.license_path())) - .await - .with_ctx(|_| { - ( - crate::ErrorKind::Filesystem, - manifest.assets.license_path().display().to_string(), - ) - })?, - ) - .icon( - File::open(path.join(manifest.assets.icon_path())) - .await - .with_ctx(|_| { - ( - crate::ErrorKind::Filesystem, - manifest.assets.icon_path().display().to_string(), - ) - })?, - ) - .instructions( - File::open(path.join(manifest.assets.instructions_path())) - .await - .with_ctx(|_| { - ( - crate::ErrorKind::Filesystem, - manifest.assets.instructions_path().display().to_string(), - ) - })?, - ) - .docker_images({ - let docker_images_path = path.join(manifest.assets.docker_images_path()); - let res: Box = if tokio::fs::metadata(&docker_images_path).await?.is_dir() { - let tars: Vec<_> = tokio_stream::wrappers::ReadDirStream::new(tokio::fs::read_dir(&docker_images_path).await?).try_collect().await?; - let mut arch_info = DockerMultiArch::default(); - for tar in &tars { - if tar.path().extension() == Some(OsStr::new("tar")) { - arch_info.available.insert(tar.path().file_stem().unwrap_or_default().to_str().unwrap_or_default().to_owned()); - } - } - if arch_info.available.contains("aarch64") { - arch_info.default = "aarch64".to_owned(); - } else { - arch_info.default = arch_info.available.iter().next().cloned().unwrap_or_default(); - } - let arch_info_cbor = IoFormat::Cbor.to_vec(&arch_info)?; - Box::new(BufferedWriteReader::new(|w| async move { - let mut docker_images = tokio_tar::Builder::new(w); - let mut multiarch_header = tokio_tar::Header::new_gnu(); - multiarch_header.set_path("multiarch.cbor")?; - multiarch_header.set_size(arch_info_cbor.len() as u64); - multiarch_header.set_cksum(); - docker_images.append(&multiarch_header, std::io::Cursor::new(arch_info_cbor)).await?; - for tar in tars - { - docker_images - .append_path_with_name( - tar.path(), - tar.file_name(), - ) - .await?; - } - Ok::<_, std::io::Error>(()) - }, 1024 * 1024)) - } else { - Box::new(File::open(docker_images_path) - .await - .with_ctx(|_| { - ( - crate::ErrorKind::Filesystem, - manifest.assets.docker_images_path().display().to_string(), - ) - })?) - }; - res - }) - .assets({ - let asset_volumes = manifest - .volumes - .iter() - .filter(|(_, v)| matches!(v, &&Volume::Assets {})).map(|(id, _)| id.clone()).collect::>(); - let assets_path = manifest.assets.assets_path().to_owned(); - let path = path.clone(); - - BufferedWriteReader::new(|w| async move { - let mut assets = tokio_tar::Builder::new(w); - for asset_volume in asset_volumes - { - assets - .append_dir_all( - &asset_volume, - path.join(&assets_path).join(&asset_volume), - ) - .await?; - } - Ok::<_, std::io::Error>(()) - }, 1024 * 1024) - }) - .scripts({ - let script_path = path.join(manifest.assets.scripts_path()).join("embassy.js"); - let needs_script = manifest.package_procedures().any(|a| a.is_script()); - let has_script = script_path.exists(); - match (needs_script, has_script) { - (true, true) => Some(File::open(script_path).await?), - (true, false) => { - return Err(Error::new(eyre!("Script is declared in manifest, but no such script exists at ./scripts/embassy.js"), ErrorKind::Pack).into()) - } - (false, true) => { - tracing::warn!("Manifest does not declare any actions that use scripts, but a script exists at ./scripts/embassy.js"); - None - } - (false, false) => None - } - }) - .build() - .pack(&ctx.developer_key()?) - .await?; - outfile.sync_all().await?; - - Ok(()) -} - -#[command(rename = "s9pk", cli_only, display(display_none))] -pub async fn verify(#[arg] path: PathBuf) -> Result<(), Error> { - let mut s9pk = S9pkReader::open(path, true).await?; - s9pk.validate().await?; - - Ok(()) -} - -fn enumerate_extra_keys(reference: &Value, candidate: &Value) -> Vec { - match (reference, candidate) { - (Value::Object(m_r), Value::Object(m_c)) => { - let om_r: OrdMap = m_r.clone().into_iter().collect(); - let om_c: OrdMap = m_c.clone().into_iter().collect(); - let common = om_r.clone().intersection(om_c.clone()); - let top_extra = common.clone().symmetric_difference(om_c.clone()); - let mut all_extra = top_extra - .keys() - .map(|s| format!(".{}", s)) - .collect::>(); - for (k, v) in common { - all_extra.extend( - enumerate_extra_keys(&v, om_c.get(&k).unwrap()) - .into_iter() - .map(|s| format!(".{}{}", k, s)), - ) - } - all_extra - } - (_, Value::Object(m1)) => m1.clone().keys().map(|s| format!(".{}", s)).collect(), - _ => Vec::new(), - } -} - -#[test] -fn test_enumerate_extra_keys() { - use serde_json::json; - let extras = enumerate_extra_keys( - &json!({ - "test": 1, - "test2": null, - }), - &json!({ - "test": 1, - "test2": { "test3": null }, - "test4": null - }), - ); - println!("{:?}", extras) +#[derive(Deserialize, Serialize, Parser)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "kebab-case")] +pub struct VerifyParams { + pub path: PathBuf, } diff --git a/core/startos/src/s9pk/v1/reader.rs b/core/startos/src/s9pk/v1/reader.rs index 61b5e46a85..82f62e1dfa 100644 --- a/core/startos/src/s9pk/v1/reader.rs +++ b/core/startos/src/s9pk/v1/reader.rs @@ -1,4 +1,3 @@ -use std::collections::BTreeSet; use std::io::SeekFrom; use std::ops::Range; use std::path::Path; @@ -10,22 +9,17 @@ use color_eyre::eyre::eyre; use digest::Output; use ed25519_dalek::VerifyingKey; use futures::TryStreamExt; -use models::ImageId; +use models::{ImageId, PackageId}; use sha2::{Digest, Sha512}; use tokio::fs::File; -use tokio::io::{AsyncRead, AsyncReadExt, AsyncSeek, AsyncSeekExt, ReadBuf}; +use tokio::io::{AsyncRead, AsyncReadExt, AsyncSeek, AsyncSeekExt, BufReader, ReadBuf}; use tracing::instrument; use super::header::{FileSection, Header, TableOfContents}; -use super::manifest::{Manifest, PackageId}; use super::SIG_CONTEXT; -use crate::install::progress::InstallProgressTracker; -use crate::s9pk::docker::DockerReader; +use crate::prelude::*; +use crate::s9pk::v1::docker::DockerReader; use crate::util::Version; -use crate::{Error, ResultExt}; - -const MAX_REPLACES: usize = 10; -const MAX_TITLE_LEN: usize = 30; #[pin_project::pin_project] #[derive(Debug)] @@ -144,7 +138,7 @@ impl FromStr for ImageTag { } } -pub struct S9pkReader { +pub struct S9pkReader> { hash: Option>, hash_string: Option, developer_key: VerifyingKey, @@ -159,113 +153,10 @@ impl S9pkReader { .await .with_ctx(|_| (crate::error::ErrorKind::Filesystem, p.display().to_string()))?; - Self::from_reader(rdr, check_sig).await - } -} -impl S9pkReader> { - pub fn validated(&mut self) { - self.rdr.validated() + Self::from_reader(BufReader::new(rdr), check_sig).await } } impl S9pkReader { - #[instrument(skip_all)] - pub async fn validate(&mut self) -> Result<(), Error> { - if self.toc.icon.length > 102_400 { - // 100 KiB - return Err(Error::new( - eyre!("icon must be less than 100KiB"), - crate::ErrorKind::ValidateS9pk, - )); - } - let image_tags = self.image_tags().await?; - let man = self.manifest().await?; - let containers = &man.containers; - let validated_image_ids = image_tags - .into_iter() - .map(|i| i.validate(&man.id, &man.version).map(|_| i.image_id)) - .collect::, _>>()?; - man.description.validate()?; - man.actions.0.iter().try_for_each(|(_, action)| { - action.validate( - containers, - &man.eos_version, - &man.volumes, - &validated_image_ids, - ) - })?; - man.backup.validate( - containers, - &man.eos_version, - &man.volumes, - &validated_image_ids, - )?; - if let Some(cfg) = &man.config { - cfg.validate( - containers, - &man.eos_version, - &man.volumes, - &validated_image_ids, - )?; - } - man.health_checks - .validate(&man.eos_version, &man.volumes, &validated_image_ids)?; - man.interfaces.validate()?; - man.main - .validate(&man.eos_version, &man.volumes, &validated_image_ids, false) - .with_ctx(|_| (crate::ErrorKind::ValidateS9pk, "Main"))?; - man.migrations.validate( - containers, - &man.eos_version, - &man.volumes, - &validated_image_ids, - )?; - - #[cfg(feature = "js-engine")] - if man.containers.is_some() - || matches!(man.main, crate::procedure::PackageProcedure::Script(_)) - { - return Err(Error::new( - eyre!("Right now we don't support the containers and the long running main"), - crate::ErrorKind::ValidateS9pk, - )); - } - - if man.replaces.len() >= MAX_REPLACES { - return Err(Error::new( - eyre!("Cannot have more than {MAX_REPLACES} replaces"), - crate::ErrorKind::ValidateS9pk, - )); - } - if let Some(too_big) = man.replaces.iter().find(|x| x.len() >= MAX_REPLACES) { - return Err(Error::new( - eyre!("We have found a replaces of ({too_big}) that exceeds the max length of {MAX_TITLE_LEN} "), - crate::ErrorKind::ValidateS9pk, - )); - } - if man.title.len() >= MAX_TITLE_LEN { - return Err(Error::new( - eyre!("Cannot have more than a length of {MAX_TITLE_LEN} for title"), - crate::ErrorKind::ValidateS9pk, - )); - } - - if man.containers.is_some() - && matches!(man.main, crate::procedure::PackageProcedure::Docker(_)) - { - return Err(Error::new( - eyre!("Cannot have a main docker and a main in containers"), - crate::ErrorKind::ValidateS9pk, - )); - } - if let Some(props) = &man.properties { - props - .validate(&man.eos_version, &man.volumes, &validated_image_ids, true) - .with_ctx(|_| (crate::ErrorKind::ValidateS9pk, "Properties"))?; - } - man.volumes.validate(&man.interfaces)?; - - Ok(()) - } #[instrument(skip_all)] pub async fn image_tags(&mut self) -> Result, Error> { let mut tar = tokio_tar::Archive::new(self.docker_images().await?); @@ -371,7 +262,7 @@ impl S9pkReader { self.read_handle(self.toc.manifest).await } - pub async fn manifest(&mut self) -> Result { + pub async fn manifest(&mut self) -> Result { let slice = self.manifest_raw().await?.to_vec().await?; serde_cbor::de::from_reader(slice.as_slice()) .with_ctx(|_| (crate::ErrorKind::ParseS9pk, "Deserializing Manifest (CBOR)")) diff --git a/core/startos/src/s9pk/v2/compat.rs b/core/startos/src/s9pk/v2/compat.rs new file mode 100644 index 0000000000..5a98538dc0 --- /dev/null +++ b/core/startos/src/s9pk/v2/compat.rs @@ -0,0 +1,372 @@ +use std::io::Cursor; +use std::path::{Path, PathBuf}; +use std::sync::Arc; + +use itertools::Itertools; +use tokio::fs::File; +use tokio::io::{AsyncRead, AsyncSeek, AsyncWriteExt}; +use tokio::process::Command; + +use crate::dependencies::{DepInfo, Dependencies}; +use crate::prelude::*; +use crate::s9pk::manifest::Manifest; +use crate::s9pk::merkle_archive::directory_contents::DirectoryContents; +use crate::s9pk::merkle_archive::source::multi_cursor_file::MultiCursorFile; +use crate::s9pk::merkle_archive::source::{FileSource, Section}; +use crate::s9pk::merkle_archive::{Entry, MerkleArchive}; +use crate::s9pk::rpc::SKIP_ENV; +use crate::s9pk::v1::manifest::Manifest as ManifestV1; +use crate::s9pk::v1::reader::S9pkReader; +use crate::s9pk::v2::S9pk; +use crate::util::io::TmpDir; +use crate::util::Invoke; +use crate::ARCH; + +pub const MAGIC_AND_VERSION: &[u8] = &[0x3b, 0x3b, 0x01]; + +#[cfg(not(feature = "docker"))] +pub const CONTAINER_TOOL: &str = "podman"; + +#[cfg(feature = "docker")] +pub const CONTAINER_TOOL: &str = "docker"; + +type DynRead = Box; +fn into_dyn_read(r: R) -> DynRead { + Box::new(r) +} + +#[derive(Clone)] +enum CompatSource { + Buffered(Arc<[u8]>), + File(PathBuf), +} +#[async_trait::async_trait] +impl FileSource for CompatSource { + type Reader = Box; + async fn size(&self) -> Result { + match self { + Self::Buffered(a) => Ok(a.len() as u64), + Self::File(f) => Ok(tokio::fs::metadata(f).await?.len()), + } + } + async fn reader(&self) -> Result { + match self { + Self::Buffered(a) => Ok(into_dyn_read(Cursor::new(a.clone()))), + Self::File(f) => Ok(into_dyn_read(File::open(f).await?)), + } + } +} + +impl S9pk> { + #[instrument(skip_all)] + pub async fn from_v1( + mut reader: S9pkReader, + destination: impl AsRef, + signer: ed25519_dalek::SigningKey, + ) -> Result { + let scratch_dir = TmpDir::new().await?; + + let mut archive = DirectoryContents::::new(); + + // manifest.json + let manifest_raw = reader.manifest().await?; + let manifest = from_value::(manifest_raw.clone())?; + let mut new_manifest = Manifest::from(manifest.clone()); + + // LICENSE.md + let license: Arc<[u8]> = reader.license().await?.to_vec().await?.into(); + archive.insert_path( + "LICENSE.md", + Entry::file(CompatSource::Buffered(license.into())), + )?; + + // instructions.md + let instructions: Arc<[u8]> = reader.instructions().await?.to_vec().await?.into(); + archive.insert_path( + "instructions.md", + Entry::file(CompatSource::Buffered(instructions.into())), + )?; + + // icon.md + let icon: Arc<[u8]> = reader.icon().await?.to_vec().await?.into(); + archive.insert_path( + format!("icon.{}", manifest.assets.icon_type()), + Entry::file(CompatSource::Buffered(icon.into())), + )?; + + // images + let images_dir = scratch_dir.join("images"); + tokio::fs::create_dir_all(&images_dir).await?; + Command::new(CONTAINER_TOOL) + .arg("load") + .input(Some(&mut reader.docker_images().await?)) + .invoke(ErrorKind::Docker) + .await?; + #[derive(serde::Deserialize)] + #[serde(rename_all = "PascalCase")] + struct DockerImagesOut { + repository: Option, + tag: Option, + #[serde(default)] + names: Vec, + } + for image in { + #[cfg(feature = "docker")] + let images = std::str::from_utf8( + &Command::new(CONTAINER_TOOL) + .arg("images") + .arg("--format=json") + .invoke(ErrorKind::Docker) + .await?, + )? + .lines() + .map(|l| serde_json::from_str::(l)) + .collect::, _>>() + .with_kind(ErrorKind::Deserialization)? + .into_iter(); + #[cfg(not(feature = "docker"))] + let images = serde_json::from_slice::>( + &Command::new(CONTAINER_TOOL) + .arg("images") + .arg("--format=json") + .invoke(ErrorKind::Docker) + .await?, + ) + .with_kind(ErrorKind::Deserialization)? + .into_iter(); + images + } + .flat_map(|i| { + if let (Some(repository), Some(tag)) = (i.repository, i.tag) { + vec![format!("{repository}:{tag}")] + } else { + i.names + .into_iter() + .filter_map(|i| i.strip_prefix("docker.io/").map(|s| s.to_owned())) + .collect() + } + }) + .filter_map(|i| { + i.strip_suffix(&format!(":{}", manifest.version)) + .map(|s| s.to_owned()) + }) + .filter_map(|i| { + i.strip_prefix(&format!("start9/{}/", manifest.id)) + .map(|s| s.to_owned()) + }) { + new_manifest.images.push(image.parse()?); + let sqfs_path = images_dir.join(&image).with_extension("squashfs"); + let image_name = format!("start9/{}/{}:{}", manifest.id, image, manifest.version); + let id = String::from_utf8( + Command::new(CONTAINER_TOOL) + .arg("create") + .arg(&image_name) + .invoke(ErrorKind::Docker) + .await?, + )?; + let env = String::from_utf8( + Command::new(CONTAINER_TOOL) + .arg("run") + .arg("--rm") + .arg("--entrypoint") + .arg("env") + .arg(&image_name) + .invoke(ErrorKind::Docker) + .await?, + )? + .lines() + .filter(|l| { + l.trim() + .split_once("=") + .map_or(false, |(v, _)| !SKIP_ENV.contains(&v)) + }) + .join("\n") + + "\n"; + let workdir = Path::new( + String::from_utf8( + Command::new(CONTAINER_TOOL) + .arg("run") + .arg("--rm") + .arg("--entrypoint") + .arg("pwd") + .arg(&image_name) + .invoke(ErrorKind::Docker) + .await?, + )? + .trim(), + ) + .to_owned(); + Command::new("bash") + .arg("-c") + .arg(format!( + "{CONTAINER_TOOL} export {id} | mksquashfs - {sqfs} -tar", + id = id.trim(), + sqfs = sqfs_path.display() + )) + .invoke(ErrorKind::Docker) + .await?; + Command::new(CONTAINER_TOOL) + .arg("rm") + .arg(id.trim()) + .invoke(ErrorKind::Docker) + .await?; + archive.insert_path( + Path::new("images") + .join(&*ARCH) + .join(&image) + .with_extension("squashfs"), + Entry::file(CompatSource::File(sqfs_path)), + )?; + archive.insert_path( + Path::new("images") + .join(&*ARCH) + .join(&image) + .with_extension("env"), + Entry::file(CompatSource::Buffered(Vec::from(env).into())), + )?; + archive.insert_path( + Path::new("images") + .join(&*ARCH) + .join(&image) + .with_extension("json"), + Entry::file(CompatSource::Buffered( + serde_json::to_vec(&serde_json::json!({ + "workdir": workdir + })) + .with_kind(ErrorKind::Serialization)? + .into(), + )), + )?; + } + Command::new(CONTAINER_TOOL) + .arg("image") + .arg("prune") + .arg("-af") + .invoke(ErrorKind::Docker) + .await?; + + // assets + let asset_dir = scratch_dir.join("assets"); + tokio::fs::create_dir_all(&asset_dir).await?; + tokio_tar::Archive::new(reader.assets().await?) + .unpack(&asset_dir) + .await?; + for (asset_id, _) in manifest + .volumes + .iter() + .filter(|(_, v)| v.get("type").and_then(|v| v.as_str()) == Some("assets")) + { + let assets_path = asset_dir.join(&asset_id); + let sqfs_path = assets_path.with_extension("squashfs"); + Command::new("mksquashfs") + .arg(&assets_path) + .arg(&sqfs_path) + .invoke(ErrorKind::Filesystem) + .await?; + archive.insert_path( + Path::new("assets").join(&asset_id), + Entry::file(CompatSource::File(sqfs_path)), + )?; + } + + // javascript + let js_dir = scratch_dir.join("javascript"); + let sqfs_path = js_dir.with_extension("squashfs"); + tokio::fs::create_dir_all(&js_dir).await?; + if let Some(mut scripts) = reader.scripts().await? { + let mut js_file = File::create(js_dir.join("embassy.js")).await?; + tokio::io::copy(&mut scripts, &mut js_file).await?; + js_file.sync_all().await?; + } + { + let mut js_file = File::create(js_dir.join("embassyManifest.json")).await?; + js_file + .write_all(&serde_json::to_vec(&manifest_raw).with_kind(ErrorKind::Serialization)?) + .await?; + js_file.sync_all().await?; + } + Command::new("mksquashfs") + .arg(&js_dir) + .arg(&sqfs_path) + .invoke(ErrorKind::Filesystem) + .await?; + archive.insert_path( + Path::new("javascript.squashfs"), + Entry::file(CompatSource::File(sqfs_path)), + )?; + + archive.insert_path( + "manifest.json", + Entry::file(CompatSource::Buffered( + serde_json::to_vec::(&new_manifest) + .with_kind(ErrorKind::Serialization)? + .into(), + )), + )?; + + let mut s9pk = S9pk::new(MerkleArchive::new(archive, signer), None).await?; + let mut dest_file = File::create(destination.as_ref()).await?; + s9pk.serialize(&mut dest_file, false).await?; + dest_file.sync_all().await?; + + scratch_dir.delete().await?; + + Ok(S9pk::deserialize(&MultiCursorFile::from( + File::open(destination.as_ref()).await?, + )) + .await?) + } +} + +impl From for Manifest { + fn from(value: ManifestV1) -> Self { + let default_url = value.upstream_repo.clone(); + Self { + id: value.id, + title: value.title, + version: value.version, + release_notes: value.release_notes, + license: value.license, + replaces: value.replaces, + wrapper_repo: value.wrapper_repo, + upstream_repo: value.upstream_repo, + support_site: value.support_site.unwrap_or_else(|| default_url.clone()), + marketing_site: value.marketing_site.unwrap_or_else(|| default_url.clone()), + donation_url: value.donation_url, + description: value.description, + images: Vec::new(), + assets: value + .volumes + .iter() + .filter(|(_, v)| v.get("type").and_then(|v| v.as_str()) == Some("assets")) + .map(|(id, _)| id.clone()) + .collect(), + volumes: value + .volumes + .iter() + .filter(|(_, v)| v.get("type").and_then(|v| v.as_str()) == Some("data")) + .map(|(id, _)| id.clone()) + .collect(), + alerts: value.alerts, + dependencies: Dependencies( + value + .dependencies + .into_iter() + .map(|(id, value)| { + ( + id, + DepInfo { + description: value.description, + optional: !value.requirement.required(), + }, + ) + }) + .collect(), + ), + hardware_requirements: value.hardware_requirements, + git_hash: value.git_hash, + os_version: value.eos_version, + has_config: value.config.is_some(), + } + } +} diff --git a/core/startos/src/s9pk/v2/manifest.rs b/core/startos/src/s9pk/v2/manifest.rs new file mode 100644 index 0000000000..ea4524400f --- /dev/null +++ b/core/startos/src/s9pk/v2/manifest.rs @@ -0,0 +1,109 @@ +use std::collections::BTreeMap; + +use color_eyre::eyre::eyre; +use helpers::const_true; +pub use models::PackageId; +use models::{ImageId, VolumeId}; +use serde::{Deserialize, Serialize}; +use ts_rs::TS; +use url::Url; + +use crate::dependencies::Dependencies; +use crate::prelude::*; +use crate::s9pk::v1::git_hash::GitHash; +use crate::util::serde::Regex; +use crate::util::Version; +use crate::version::{Current, VersionT}; + +fn current_version() -> Version { + Current::new().semver().into() +} + +#[derive(Clone, Debug, Deserialize, Serialize, HasModel, TS)] +#[serde(rename_all = "camelCase")] +#[model = "Model"] +#[ts(export)] +pub struct Manifest { + pub id: PackageId, + pub title: String, + #[ts(type = "string")] + pub version: Version, + pub release_notes: String, + pub license: String, // type of license + #[serde(default)] + pub replaces: Vec, + #[ts(type = "string")] + pub wrapper_repo: Url, + #[ts(type = "string")] + pub upstream_repo: Url, + #[ts(type = "string")] + pub support_site: Url, + #[ts(type = "string")] + pub marketing_site: Url, + #[ts(type = "string | null")] + pub donation_url: Option, + pub description: Description, + pub images: Vec, + pub assets: Vec, // TODO: AssetsId + pub volumes: Vec, + #[serde(default)] + pub alerts: Alerts, + #[serde(default)] + pub dependencies: Dependencies, + #[serde(default)] + pub hardware_requirements: HardwareRequirements, + #[serde(default)] + #[ts(type = "string | null")] + pub git_hash: Option, + #[serde(default = "current_version")] + #[ts(type = "string")] + pub os_version: Version, + #[serde(default = "const_true")] + pub has_config: bool, +} + +#[derive(Clone, Debug, Default, Deserialize, Serialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub struct HardwareRequirements { + #[serde(default)] + #[ts(type = "{ [key: string]: string }")] + device: BTreeMap, + ram: Option, + pub arch: Option>, +} + +#[derive(Clone, Debug, Deserialize, Serialize, TS)] +#[ts(export)] +pub struct Description { + pub short: String, + pub long: String, +} +impl Description { + pub fn validate(&self) -> Result<(), Error> { + if self.short.chars().skip(160).next().is_some() { + return Err(Error::new( + eyre!("Short description must be 160 characters or less."), + crate::ErrorKind::ValidateS9pk, + )); + } + if self.long.chars().skip(5000).next().is_some() { + return Err(Error::new( + eyre!("Long description must be 5000 characters or less."), + crate::ErrorKind::ValidateS9pk, + )); + } + Ok(()) + } +} + +#[derive(Clone, Debug, Default, Deserialize, Serialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub struct Alerts { + pub install: Option, + pub uninstall: Option, + pub restore: Option, + pub start: Option, + pub stop: Option, +} diff --git a/core/startos/src/s9pk/v2/mod.rs b/core/startos/src/s9pk/v2/mod.rs index be42d06126..af1cd1c17a 100644 --- a/core/startos/src/s9pk/v2/mod.rs +++ b/core/startos/src/s9pk/v2/mod.rs @@ -1,23 +1,178 @@ +use std::ffi::OsStr; +use std::path::Path; +use std::sync::Arc; + +use imbl_value::InternedString; +use models::{mime, DataUrl, PackageId}; +use tokio::fs::File; + use crate::prelude::*; +use crate::s9pk::manifest::Manifest; +use crate::s9pk::merkle_archive::file_contents::FileContents; use crate::s9pk::merkle_archive::sink::Sink; -use crate::s9pk::merkle_archive::source::{ArchiveSource, FileSource, Section}; -use crate::s9pk::merkle_archive::MerkleArchive; +use crate::s9pk::merkle_archive::source::multi_cursor_file::MultiCursorFile; +use crate::s9pk::merkle_archive::source::{ArchiveSource, DynFileSource, FileSource, Section}; +use crate::s9pk::merkle_archive::{Entry, MerkleArchive}; +use crate::ARCH; const MAGIC_AND_VERSION: &[u8] = &[0x3b, 0x3b, 0x02]; -pub struct S9pk(MerkleArchive); +pub mod compat; +pub mod manifest; + +/** + / + ├── manifest.json + ├── icon. + ├── LICENSE.md + ├── instructions.md + ├── javascript.squashfs + ├── assets + │ └── .squashfs (xN) + └── images + └── + ├── .env (xN) + └── .squashfs (xN) +*/ + +fn priority(s: &str) -> Option { + match s { + "manifest.json" => Some(0), + a if Path::new(a).file_stem() == Some(OsStr::new("icon")) => Some(1), + "LICENSE.md" => Some(2), + "instructions.md" => Some(3), + "javascript.squashfs" => Some(4), + "assets" => Some(5), + "images" => Some(6), + _ => None, + } +} + +fn filter(p: &Path) -> bool { + match p.iter().count() { + 1 if p.file_name() == Some(OsStr::new("manifest.json")) => true, + 1 if p.file_stem() == Some(OsStr::new("icon")) => true, + 1 if p.file_name() == Some(OsStr::new("LICENSE.md")) => true, + 1 if p.file_name() == Some(OsStr::new("instructions.md")) => true, + 1 if p.file_name() == Some(OsStr::new("javascript.squashfs")) => true, + 1 if p.file_name() == Some(OsStr::new("assets")) => true, + 1 if p.file_name() == Some(OsStr::new("images")) => true, + 2 if p.parent() == Some(Path::new("assets")) => { + p.extension().map_or(false, |ext| ext == "squashfs") + } + 2 if p.parent() == Some(Path::new("images")) => p.file_name() == Some(OsStr::new(&*ARCH)), + 3 if p.parent() == Some(&*Path::new("images").join(&*ARCH)) => p + .extension() + .map_or(false, |ext| ext == "squashfs" || ext == "env"), + _ => false, + } +} + +#[derive(Clone)] +pub struct S9pk> { + manifest: Manifest, + manifest_dirty: bool, + archive: MerkleArchive, + size: Option, +} +impl S9pk { + pub fn as_manifest(&self) -> &Manifest { + &self.manifest + } + pub fn as_manifest_mut(&mut self) -> &mut Manifest { + self.manifest_dirty = true; + &mut self.manifest + } + pub fn as_archive(&self) -> &MerkleArchive { + &self.archive + } + pub fn as_archive_mut(&mut self) -> &mut MerkleArchive { + &mut self.archive + } + pub fn size(&self) -> Option { + self.size + } +} + impl S9pk { + pub async fn new(archive: MerkleArchive, size: Option) -> Result { + let manifest = extract_manifest(&archive).await?; + Ok(Self { + manifest, + manifest_dirty: false, + archive, + size, + }) + } + + pub async fn icon(&self) -> Result<(InternedString, FileContents), Error> { + let mut best_icon = None; + for (path, icon) in self + .archive + .contents() + .with_stem("icon") + .filter(|(p, _)| { + Path::new(&*p) + .extension() + .and_then(|e| e.to_str()) + .and_then(mime) + .map_or(false, |e| e.starts_with("image/")) + }) + .filter_map(|(k, v)| v.into_file().map(|f| (k, f))) + { + let size = icon.size().await?; + best_icon = match best_icon { + Some((s, a)) if s >= size => Some((s, a)), + _ => Some((size, (path, icon))), + }; + } + best_icon + .map(|(_, a)| a) + .ok_or_else(|| Error::new(eyre!("no icon found in archive"), ErrorKind::ParseS9pk)) + } + + pub async fn icon_data_url(&self) -> Result, Error> { + let (name, contents) = self.icon().await?; + let mime = Path::new(&*name) + .extension() + .and_then(|e| e.to_str()) + .and_then(mime) + .unwrap_or("image/png"); + DataUrl::from_reader(mime, contents.reader().await?, Some(contents.size().await?)).await + } + pub async fn serialize(&mut self, w: &mut W, verify: bool) -> Result<(), Error> { use tokio::io::AsyncWriteExt; w.write_all(MAGIC_AND_VERSION).await?; - self.0.serialize(w, verify).await?; + if !self.manifest_dirty { + self.archive.serialize(w, verify).await?; + } else { + let mut dyn_s9pk = self.clone().into_dyn(); + dyn_s9pk.as_archive_mut().contents_mut().insert_path( + "manifest.json", + Entry::file(DynFileSource::new(Arc::<[u8]>::from( + serde_json::to_vec(&self.manifest).with_kind(ErrorKind::Serialization)?, + ))), + )?; + dyn_s9pk.archive.serialize(w, verify).await?; + } Ok(()) } + + pub fn into_dyn(self) -> S9pk { + S9pk { + manifest: self.manifest, + manifest_dirty: self.manifest_dirty, + archive: self.archive.into_dyn(), + size: self.size, + } + } } impl S9pk> { + #[instrument(skip_all)] pub async fn deserialize(source: &S) -> Result { use tokio::io::AsyncReadExt; @@ -36,6 +191,46 @@ impl S9pk> { "Invalid Magic or Unexpected Version" ); - Ok(Self(MerkleArchive::deserialize(source, &mut header).await?)) + let mut archive = MerkleArchive::deserialize(source, &mut header).await?; + + archive.filter(filter)?; + + archive.sort_by(|a, b| match (priority(a), priority(b)) { + (Some(a), Some(b)) => a.cmp(&b), + (Some(_), None) => std::cmp::Ordering::Less, + (None, Some(_)) => std::cmp::Ordering::Greater, + (None, None) => std::cmp::Ordering::Equal, + }); + + Self::new(archive, source.size().await).await + } +} +impl S9pk { + pub async fn from_file(file: File) -> Result { + Self::deserialize(&MultiCursorFile::from(file)).await + } + pub async fn open(path: impl AsRef, id: Option<&PackageId>) -> Result { + let res = Self::from_file(tokio::fs::File::open(path).await?).await?; + if let Some(id) = id { + ensure_code!( + &res.as_manifest().id == id, + ErrorKind::ValidateS9pk, + "manifest.id does not match expected" + ); + } + Ok(res) } } + +async fn extract_manifest(archive: &MerkleArchive) -> Result { + let manifest = serde_json::from_slice( + &archive + .contents() + .get_path("manifest.json") + .or_not_found("manifest.json")? + .read_file_to_vec() + .await?, + ) + .with_kind(ErrorKind::Deserialization)?; + Ok(manifest) +} diff --git a/core/startos/src/service/cli.rs b/core/startos/src/service/cli.rs new file mode 100644 index 0000000000..d3bdccd72b --- /dev/null +++ b/core/startos/src/service/cli.rs @@ -0,0 +1,66 @@ +use std::path::{Path, PathBuf}; +use std::sync::Arc; + +use clap::Parser; +use imbl_value::Value; +use once_cell::sync::OnceCell; +use rpc_toolkit::yajrc::RpcError; +use rpc_toolkit::{call_remote_socket, yajrc, CallRemote, Context}; +use tokio::runtime::Runtime; + +use crate::lxc::HOST_RPC_SERVER_SOCKET; + +#[derive(Debug, Default, Parser)] +pub struct ContainerClientConfig { + #[arg(long = "socket")] + pub socket: Option, +} + +pub struct ContainerCliSeed { + socket: PathBuf, + runtime: OnceCell, +} + +#[derive(Clone)] +pub struct ContainerCliContext(Arc); +impl ContainerCliContext { + pub fn init(cfg: ContainerClientConfig) -> Self { + Self(Arc::new(ContainerCliSeed { + socket: cfg + .socket + .unwrap_or_else(|| Path::new("/").join(HOST_RPC_SERVER_SOCKET)), + runtime: OnceCell::new(), + })) + } +} +impl Context for ContainerCliContext { + fn runtime(&self) -> tokio::runtime::Handle { + self.0 + .runtime + .get_or_init(|| { + tokio::runtime::Builder::new_current_thread() + .enable_all() + .build() + .unwrap() + }) + .handle() + .clone() + } +} + +#[async_trait::async_trait] +impl CallRemote for ContainerCliContext { + async fn call_remote(&self, method: &str, params: Value) -> Result { + call_remote_socket( + tokio::net::UnixStream::connect(&self.0.socket) + .await + .map_err(|e| RpcError { + data: Some(e.to_string().into()), + ..yajrc::INTERNAL_ERROR + })?, + method, + params, + ) + .await + } +} diff --git a/core/startos/src/service/config.rs b/core/startos/src/service/config.rs new file mode 100644 index 0000000000..e1294a465e --- /dev/null +++ b/core/startos/src/service/config.rs @@ -0,0 +1,19 @@ +use models::ProcedureName; + +use crate::config::ConfigureContext; +use crate::prelude::*; +use crate::service::Service; + +impl Service { + pub async fn configure( + &self, + ConfigureContext { timeout, config }: ConfigureContext, + ) -> Result<(), Error> { + let container = &self.seed.persistent_container; + container + .execute::(ProcedureName::SetConfig, to_value(&config)?, timeout) + .await + .with_kind(ErrorKind::Action)?; + Ok(()) + } +} diff --git a/core/startos/src/service/control.rs b/core/startos/src/service/control.rs new file mode 100644 index 0000000000..88d66d97cd --- /dev/null +++ b/core/startos/src/service/control.rs @@ -0,0 +1,46 @@ +use crate::prelude::*; +use crate::service::start_stop::StartStop; +use crate::service::transition::TransitionKind; +use crate::service::{Service, ServiceActor}; +use crate::util::actor::{BackgroundJobs, Handler}; + +struct Start; +#[async_trait::async_trait] +impl Handler for ServiceActor { + type Response = (); + async fn handle(&mut self, _: Start, _: &mut BackgroundJobs) -> Self::Response { + self.0.persistent_container.state.send_modify(|x| { + x.desired_state = StartStop::Start; + }); + self.0.synchronized.notified().await + } +} +impl Service { + pub async fn start(&self) -> Result<(), Error> { + self.actor.send(Start).await + } +} + +struct Stop; +#[async_trait::async_trait] +impl Handler for ServiceActor { + type Response = (); + async fn handle(&mut self, _: Stop, _: &mut BackgroundJobs) -> Self::Response { + let mut transition_state = None; + self.0.persistent_container.state.send_modify(|x| { + x.desired_state = StartStop::Stop; + if x.transition_state.as_ref().map(|x| x.kind()) == Some(TransitionKind::Restarting) { + transition_state = std::mem::take(&mut x.transition_state); + } + }); + if let Some(restart) = transition_state { + restart.abort().await; + } + self.0.synchronized.notified().await + } +} +impl Service { + pub async fn stop(&self) -> Result<(), Error> { + self.actor.send(Stop).await + } +} diff --git a/core/startos/src/service/fake.cert.key b/core/startos/src/service/fake.cert.key new file mode 100644 index 0000000000..a4eb56cb7f --- /dev/null +++ b/core/startos/src/service/fake.cert.key @@ -0,0 +1,5 @@ +-----BEGIN EC PRIVATE KEY----- +MHcCAQEEINn5jiv9VFgEwdUJsDksSTAjPKwkl2DCmCmumu4D1GnNoAoGCCqGSM49 +AwEHoUQDQgAE5KuqP+Wdn8pzmNMxK2hya6mKj1H0j5b47y97tIXqf5ajTi8koRPl +yao3YcqdtBtN37aw4rVlXVwEJIozZgyiyA== +-----END EC PRIVATE KEY----- \ No newline at end of file diff --git a/core/startos/src/service/fake.cert.pem b/core/startos/src/service/fake.cert.pem new file mode 100644 index 0000000000..fdacaff160 --- /dev/null +++ b/core/startos/src/service/fake.cert.pem @@ -0,0 +1,13 @@ +-----BEGIN CERTIFICATE----- +MIIB9DCCAZmgAwIBAgIUIWsFiA8JqIqeUo+Psn91oCQIcdwwCgYIKoZIzj0EAwIw +TzELMAkGA1UEBhMCVVMxCzAJBgNVBAgMAkNPMRowGAYDVQQKDBFTdGFydDkgTGFi +cywgSW5jLjEXMBUGA1UEAwwOZmFrZW5hbWUubG9jYWwwHhcNMjQwMjE0MTk1MTUz +WhcNMjUwMjEzMTk1MTUzWjBPMQswCQYDVQQGEwJVUzELMAkGA1UECAwCQ08xGjAY +BgNVBAoMEVN0YXJ0OSBMYWJzLCBJbmMuMRcwFQYDVQQDDA5mYWtlbmFtZS5sb2Nh +bDBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABOSrqj/lnZ/Kc5jTMStocmupio9R +9I+W+O8ve7SF6n+Wo04vJKET5cmqN2HKnbQbTd+2sOK1ZV1cBCSKM2YMosijUzBR +MB0GA1UdDgQWBBR+qd4W//H34Eg90yAPjYz3nZK79DAfBgNVHSMEGDAWgBR+qd4W +//H34Eg90yAPjYz3nZK79DAPBgNVHRMBAf8EBTADAQH/MAoGCCqGSM49BAMCA0kA +MEYCIQDNSN9YWkGbntG+nC+NzEyqE9FcvYZ8TaF3sOnthqSVKwIhAM2N+WJG/p4C +cPl4HSPPgDaOIhVZzxSje2ycb7wvFtpH +-----END CERTIFICATE----- \ No newline at end of file diff --git a/core/startos/src/service/mod.rs b/core/startos/src/service/mod.rs new file mode 100644 index 0000000000..a10857c5a6 --- /dev/null +++ b/core/startos/src/service/mod.rs @@ -0,0 +1,536 @@ +use std::sync::Arc; +use std::time::Duration; + +use chrono::{DateTime, Utc}; +use clap::Parser; +use futures::future::BoxFuture; +use imbl::OrdMap; +use models::{ActionId, HealthCheckId, PackageId, ProcedureName}; +use persistent_container::PersistentContainer; +use rpc_toolkit::{from_fn_async, CallRemoteHandler, Empty, Handler, HandlerArgs}; +use serde::{Deserialize, Serialize}; +use start_stop::StartStop; +use tokio::sync::Notify; + +use crate::config::action::ConfigRes; +use crate::context::{CliContext, RpcContext}; +use crate::core::rpc_continuations::RequestGuid; +use crate::db::model::package::{ + InstalledState, PackageDataEntry, PackageState, PackageStateMatchModelRef, UpdatingState, +}; +use crate::disk::mount::guard::GenericMountGuard; +use crate::install::PKG_ARCHIVE_DIR; +use crate::prelude::*; +use crate::progress::{NamedProgress, Progress}; +use crate::s9pk::S9pk; +use crate::service::service_map::InstallProgressHandles; +use crate::service::transition::TransitionKind; +use crate::status::health_check::HealthCheckResult; +use crate::status::MainStatus; +use crate::util::actor::{Actor, BackgroundJobs, SimpleActor}; +use crate::volume::data_dir; +use crate::{action::ActionResult, util::serde::Pem}; + +pub mod cli; +mod config; +mod control; +pub mod persistent_container; +mod rpc; +pub mod service_effect_handler; +pub mod service_map; +mod start_stop; +mod transition; +mod util; + +pub use service_map::ServiceMap; + +pub const HEALTH_CHECK_COOLDOWN_SECONDS: u64 = 15; +pub const HEALTH_CHECK_GRACE_PERIOD_SECONDS: u64 = 5; +pub const SYNC_RETRY_COOLDOWN_SECONDS: u64 = 10; + +pub type Task<'a> = BoxFuture<'a, Result<(), Error>>; + +/// TODO +pub enum BackupReturn { + TODO, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] +pub enum LoadDisposition { + Retry, + Undo, +} + +pub struct Service { + actor: SimpleActor, + seed: Arc, +} +impl Service { + #[instrument(skip_all)] + async fn new(ctx: RpcContext, s9pk: S9pk, start: StartStop) -> Result { + let id = s9pk.as_manifest().id.clone(); + let persistent_container = PersistentContainer::new( + &ctx, s9pk, + start, + // desired_state.subscribe(), + // temp_desired_state.subscribe(), + ) + .await?; + let seed = Arc::new(ServiceActorSeed { + id, + persistent_container, + ctx, + synchronized: Arc::new(Notify::new()), + }); + seed.persistent_container + .init(Arc::downgrade(&seed)) + .await?; + Ok(Self { + actor: SimpleActor::new(ServiceActor(seed.clone())), + seed, + }) + } + + #[instrument(skip_all)] + pub async fn load( + ctx: &RpcContext, + id: &PackageId, + disposition: LoadDisposition, + ) -> Result, Error> { + let handle_installed = { + let ctx = ctx.clone(); + move |s9pk: S9pk, i: Model| async move { + for volume_id in &s9pk.as_manifest().volumes { + let tmp_path = + data_dir(&ctx.datadir, &s9pk.as_manifest().id.clone(), volume_id); + if tokio::fs::metadata(&tmp_path).await.is_err() { + tokio::fs::create_dir_all(&tmp_path).await?; + } + } + let start_stop = if i.as_status().as_main().de()?.running() { + StartStop::Start + } else { + StartStop::Stop + }; + Self::new(ctx, s9pk, start_stop).await.map(Some) + } + }; + let s9pk_dir = ctx.datadir.join(PKG_ARCHIVE_DIR).join("installed"); // TODO: make this based on hash + let s9pk_path = s9pk_dir.join(id).with_extension("s9pk"); + let Some(entry) = ctx + .db + .peek() + .await + .into_public() + .into_package_data() + .into_idx(id) + else { + return Ok(None); + }; + match entry.as_state_info().as_match() { + PackageStateMatchModelRef::Installing(_) => { + if disposition == LoadDisposition::Retry { + if let Ok(s9pk) = S9pk::open(s9pk_path, Some(id)).await.map_err(|e| { + tracing::error!("Error opening s9pk for install: {e}"); + tracing::debug!("{e:?}") + }) { + if let Ok(service) = Self::install(ctx.clone(), s9pk, None, None) + .await + .map_err(|e| { + tracing::error!("Error installing service: {e}"); + tracing::debug!("{e:?}") + }) + { + return Ok(Some(service)); + } + } + } + // TODO: delete s9pk? + ctx.db + .mutate(|v| v.as_public_mut().as_package_data_mut().remove(id)) + .await?; + Ok(None) + } + PackageStateMatchModelRef::Updating(s) => { + if disposition == LoadDisposition::Retry + && s.as_installing_info() + .as_progress() + .de()? + .phases + .iter() + .any(|NamedProgress { name, progress }| { + name.eq_ignore_ascii_case("download") + && progress == &Progress::Complete(true) + }) + { + if let Ok(s9pk) = S9pk::open(&s9pk_path, Some(id)).await.map_err(|e| { + tracing::error!("Error opening s9pk for update: {e}"); + tracing::debug!("{e:?}") + }) { + if let Ok(service) = Self::install( + ctx.clone(), + s9pk, + Some(s.as_manifest().as_version().de()?), + None, + ) + .await + .map_err(|e| { + tracing::error!("Error installing service: {e}"); + tracing::debug!("{e:?}") + }) { + return Ok(Some(service)); + } + } + } + let s9pk = S9pk::open(s9pk_path, Some(id)).await?; + ctx.db + .mutate({ + |db| { + db.as_public_mut() + .as_package_data_mut() + .as_idx_mut(&id) + .or_not_found(&id)? + .as_state_info_mut() + .map_mutate(|s| { + if let PackageState::Updating(UpdatingState { + manifest, .. + }) = s + { + Ok(PackageState::Installed(InstalledState { manifest })) + } else { + Err(Error::new(eyre!("Race condition detected - package state changed during load"), ErrorKind::Database)) + } + }) + } + }) + .await?; + handle_installed(s9pk, entry).await + } + PackageStateMatchModelRef::Removing(_) | PackageStateMatchModelRef::Restoring(_) => { + if let Ok(s9pk) = S9pk::open(s9pk_path, Some(id)).await.map_err(|e| { + tracing::error!("Error opening s9pk for removal: {e}"); + tracing::debug!("{e:?}") + }) { + if let Ok(service) = Self::new(ctx.clone(), s9pk, StartStop::Stop) + .await + .map_err(|e| { + tracing::error!("Error loading service for removal: {e}"); + tracing::debug!("{e:?}") + }) + { + if service + .uninstall(None) + .await + .map_err(|e| { + tracing::error!("Error uninstalling service: {e}"); + tracing::debug!("{e:?}") + }) + .is_ok() + { + return Ok(None); + } + } + } + + ctx.db + .mutate(|v| v.as_public_mut().as_package_data_mut().remove(id)) + .await?; + + Ok(None) + } + PackageStateMatchModelRef::Installed(_) => { + handle_installed(S9pk::open(s9pk_path, Some(id)).await?, entry).await + } + PackageStateMatchModelRef::Error(e) => Err(Error::new( + eyre!("Failed to parse PackageDataEntry, found {e:?}"), + ErrorKind::Deserialization, + )), + } + } + + #[instrument(skip_all)] + pub async fn install( + ctx: RpcContext, + s9pk: S9pk, + src_version: Option, + progress: Option, + ) -> Result { + let manifest = s9pk.as_manifest().clone(); + let developer_key = s9pk.as_archive().signer(); + let icon = s9pk.icon_data_url().await?; + let service = Self::new(ctx.clone(), s9pk, StartStop::Stop).await?; + service + .seed + .persistent_container + .execute(ProcedureName::Init, to_value(&src_version)?, None) // TODO timeout + .await + .with_kind(ErrorKind::MigrationFailed)?; // TODO: handle cancellation + if let Some(mut progress) = progress { + progress.finalization_progress.complete(); + progress.progress_handle.complete(); + tokio::task::yield_now().await; + } + ctx.db + .mutate(|d| { + let entry = d + .as_public_mut() + .as_package_data_mut() + .as_idx_mut(&manifest.id) + .or_not_found(&manifest.id)?; + entry + .as_state_info_mut() + .ser(&PackageState::Installed(InstalledState { manifest }))?; + entry.as_developer_key_mut().ser(&Pem::new(developer_key))?; + entry.as_icon_mut().ser(&icon)?; + // TODO: marketplace url + // TODO: dependency info + Ok(()) + }) + .await?; + Ok(service) + } + + pub async fn restore( + ctx: RpcContext, + s9pk: S9pk, + guard: impl GenericMountGuard, + progress: Option, + ) -> Result { + // TODO + Err(Error::new(eyre!("not yet implemented"), ErrorKind::Unknown)) + } + + pub async fn get_config(&self) -> Result { + let container = &self.seed.persistent_container; + container + .execute::( + ProcedureName::GetConfig, + Value::Null, + Some(Duration::from_secs(30)), // TODO timeout + ) + .await + .with_kind(ErrorKind::ConfigGen) + } + + // TODO DO the Action Get + + pub async fn action(&self, id: ActionId, input: Value) -> Result { + let container = &self.seed.persistent_container; + container + .execute::( + ProcedureName::RunAction(id), + input, + Some(Duration::from_secs(30)), + ) + .await + .with_kind(ErrorKind::Action) + } + pub async fn properties(&self) -> Result { + let container = &self.seed.persistent_container; + container + .execute::( + ProcedureName::Properties, + Value::Null, + Some(Duration::from_secs(30)), + ) + .await + .with_kind(ErrorKind::Unknown) + } + + pub async fn shutdown(self) -> Result<(), Error> { + self.actor + .shutdown(crate::util::actor::PendingMessageStrategy::FinishAll { timeout: None }) // TODO timeout + .await; + if let Some((hdl, shutdown)) = self.seed.persistent_container.rpc_server.send_replace(None) + { + self.seed + .persistent_container + .rpc_client + .request(rpc::Exit, Empty {}) + .await?; + shutdown.shutdown(); + hdl.await.with_kind(ErrorKind::Cancelled)?; + } + Arc::try_unwrap(self.seed) + .map_err(|_| { + Error::new( + eyre!("ServiceActorSeed held somewhere after actor shutdown"), + ErrorKind::Unknown, + ) + })? + .persistent_container + .exit() + .await?; + Ok(()) + } + + pub async fn uninstall(self, target_version: Option) -> Result<(), Error> { + self.seed + .persistent_container + .execute(ProcedureName::Uninit, to_value(&target_version)?, None) // TODO timeout + .await?; + let id = self.seed.persistent_container.s9pk.as_manifest().id.clone(); + self.seed + .ctx + .db + .mutate(|d| d.as_public_mut().as_package_data_mut().remove(&id)) + .await?; + self.shutdown().await + } + pub async fn backup(&self, _guard: impl GenericMountGuard) -> Result { + // TODO + Err(Error::new(eyre!("not yet implemented"), ErrorKind::Unknown)) + } +} + +#[derive(Debug, Clone)] +struct RunningStatus { + health: OrdMap, + started: DateTime, +} + +struct ServiceActorSeed { + ctx: RpcContext, + id: PackageId, + /// Needed to interact with the container for the service + persistent_container: PersistentContainer, + /// This is notified every time the background job created in ServiceActor::init responds to a change + synchronized: Arc, +} + +impl ServiceActorSeed { + /// Used to indicate that we have finished the task of starting the service + pub fn started(&self) { + self.persistent_container.state.send_modify(|state| { + state.running_status = + Some( + state + .running_status + .take() + .unwrap_or_else(|| RunningStatus { + health: Default::default(), + started: Utc::now(), + }), + ); + }); + } + /// Used to indicate that we have finished the task of stopping the service + pub fn stopped(&self) { + self.persistent_container.state.send_modify(|state| { + state.running_status = None; + }); + } +} +struct ServiceActor(Arc); + +impl Actor for ServiceActor { + fn init(&mut self, jobs: &mut BackgroundJobs) { + let seed = self.0.clone(); + jobs.add_job(async move { + let id = seed.id.clone(); + let mut current = seed.persistent_container.state.subscribe(); + + loop { + let kinds = dbg!(current.borrow().kinds()); + + if let Err(e) = async { + let main_status = match ( + kinds.transition_state, + kinds.desired_state, + kinds.running_status, + ) { + (Some(TransitionKind::Restarting), _, _) => MainStatus::Restarting, + (Some(TransitionKind::BackingUp), _, Some(status)) => { + MainStatus::BackingUp { + started: Some(status.started), + health: status.health.clone(), + } + } + (Some(TransitionKind::BackingUp), _, None) => MainStatus::BackingUp { + started: None, + health: OrdMap::new(), + }, + (None, StartStop::Stop, None) => MainStatus::Stopped, + (None, StartStop::Stop, Some(_)) => MainStatus::Stopping { + timeout: seed.persistent_container.stop().await?.into(), + }, + (None, StartStop::Start, Some(status)) => MainStatus::Running { + started: status.started, + health: status.health.clone(), + }, + (None, StartStop::Start, None) => { + seed.persistent_container.start().await?; + MainStatus::Starting + } + }; + seed.ctx + .db + .mutate(|d| { + if let Some(i) = d.as_public_mut().as_package_data_mut().as_idx_mut(&id) + { + i.as_status_mut().as_main_mut().ser(&main_status)?; + } + Ok(()) + }) + .await?; + + Ok::<_, Error>(()) + } + .await + { + tracing::error!("error synchronizing state of service: {e}"); + tracing::debug!("{e:?}"); + + seed.synchronized.notify_waiters(); + + tracing::error!("Retrying in {}s...", SYNC_RETRY_COOLDOWN_SECONDS); + tokio::time::sleep(Duration::from_secs(SYNC_RETRY_COOLDOWN_SECONDS)).await; + continue; + } + + seed.synchronized.notify_waiters(); + + tokio::select! { + _ = current.changed() => (), + } + } + }) + } +} + +#[derive(Deserialize, Serialize, Parser)] +pub struct ConnectParams { + pub id: PackageId, +} + +pub async fn connect_rpc( + ctx: RpcContext, + ConnectParams { id }: ConnectParams, +) -> Result { + let id_ref = &id; + crate::lxc::connect( + &ctx, + ctx.services + .get(&id) + .await + .as_ref() + .or_not_found(lazy_format!("service for {id_ref}"))? + .seed + .persistent_container + .lxc_container + .get() + .or_not_found(lazy_format!("container for {id_ref}"))?, + ) + .await +} + +pub async fn connect_rpc_cli( + handle_args: HandlerArgs, +) -> Result<(), Error> { + let ctx = handle_args.context.clone(); + let guid = CallRemoteHandler::::new(from_fn_async(connect_rpc)) + .handle_async(handle_args) + .await?; + + crate::lxc::connect_cli(&ctx, guid).await +} diff --git a/core/startos/src/service/persistent_container.rs b/core/startos/src/service/persistent_container.rs new file mode 100644 index 0000000000..038661aced --- /dev/null +++ b/core/startos/src/service/persistent_container.rs @@ -0,0 +1,414 @@ +use std::collections::BTreeMap; +use std::path::Path; +use std::sync::{Arc, Weak}; +use std::time::Duration; + +use futures::future::ready; +use futures::Future; +use helpers::NonDetachingJoinHandle; +use imbl_value::InternedString; +use models::{ProcedureName, VolumeId}; +use rpc_toolkit::{Empty, Server, ShutdownHandle}; +use serde::de::DeserializeOwned; +use tokio::fs::File; +use tokio::process::Command; +use tokio::sync::{oneshot, watch, Mutex, OnceCell}; +use tracing::instrument; + +use super::service_effect_handler::{service_effect_handler, EffectContext}; +use super::transition::{TransitionKind, TransitionState}; +use super::ServiceActorSeed; +use crate::context::RpcContext; +use crate::disk::mount::filesystem::bind::Bind; +use crate::disk::mount::filesystem::idmapped::IdMapped; +use crate::disk::mount::filesystem::loop_dev::LoopDev; +use crate::disk::mount::filesystem::overlayfs::OverlayGuard; +use crate::disk::mount::filesystem::{MountType, ReadOnly}; +use crate::disk::mount::guard::MountGuard; +use crate::lxc::{LxcConfig, LxcContainer, HOST_RPC_SERVER_SOCKET}; +use crate::net::net_controller::NetService; +use crate::prelude::*; +use crate::s9pk::merkle_archive::source::FileSource; +use crate::s9pk::S9pk; +use crate::service::start_stop::StartStop; +use crate::service::{rpc, RunningStatus}; +use crate::util::rpc_client::UnixRpcClient; +use crate::util::Invoke; +use crate::volume::{asset_dir, data_dir}; +use crate::ARCH; + +const RPC_CONNECT_TIMEOUT: Duration = Duration::from_secs(10); + +struct ProcedureId(u64); + +#[derive(Debug)] +pub struct ServiceState { + // This contains the start time and health check information for when the service is running. Note: Will be overwritting to the db, + pub(super) running_status: Option, + /// Setting this value causes the service actor to try to bring the service to the specified state. This is done in the background job created in ServiceActor::init + pub(super) desired_state: StartStop, + /// Override the current desired state for the service during a transition (this is protected by a guard that sets this value to null on drop) + pub(super) temp_desired_state: Option, + /// This represents a currently running task that affects the service's shown state, such as BackingUp or Restarting. + pub(super) transition_state: Option, +} + +#[derive(Debug)] +pub struct ServiceStateKinds { + pub transition_state: Option, + pub running_status: Option, + pub desired_state: StartStop, +} + +impl ServiceState { + pub fn new(desired_state: StartStop) -> Self { + Self { + running_status: Default::default(), + temp_desired_state: Default::default(), + transition_state: Default::default(), + desired_state, + } + } + pub fn kinds(&self) -> ServiceStateKinds { + ServiceStateKinds { + transition_state: self.transition_state.as_ref().map(|x| x.kind()), + desired_state: self.temp_desired_state.unwrap_or(self.desired_state), + running_status: self.running_status.clone(), + } + } +} + +// @DRB On top of this we need to also have the procedures to have the effects and get the results back for them, maybe lock them to the running instance? +/// This contains the LXC container running the javascript init system +/// that can be used via a JSON RPC Client connected to a unix domain +/// socket served by the container +pub struct PersistentContainer { + pub(super) s9pk: S9pk, + pub(super) lxc_container: OnceCell, + pub(super) rpc_client: UnixRpcClient, + pub(super) rpc_server: watch::Sender, ShutdownHandle)>>, + // procedures: Mutex>, + js_mount: MountGuard, + volumes: BTreeMap, + assets: BTreeMap, + pub(super) overlays: Arc>>, + pub(super) state: Arc>, + pub(super) net_service: Mutex, +} + +impl PersistentContainer { + #[instrument(skip_all)] + pub async fn new(ctx: &RpcContext, s9pk: S9pk, start: StartStop) -> Result { + let lxc_container = ctx.lxc_manager.create(LxcConfig::default()).await?; + let rpc_client = lxc_container.connect_rpc(Some(RPC_CONNECT_TIMEOUT)).await?; + let js_mount = MountGuard::mount( + &LoopDev::from( + &**s9pk + .as_archive() + .contents() + .get_path("javascript.squashfs") + .and_then(|f| f.as_file()) + .or_not_found("javascript")?, + ), + lxc_container.rootfs_dir().join("usr/lib/startos/package"), + ReadOnly, + ) + .await?; + let mut volumes = BTreeMap::new(); + for volume in &s9pk.as_manifest().volumes { + let mountpoint = lxc_container + .rootfs_dir() + .join("media/startos/volumes") + .join(volume); + tokio::fs::create_dir_all(&mountpoint).await?; + Command::new("chown") + .arg("100000:100000") + .arg(&mountpoint) + .invoke(crate::ErrorKind::Filesystem) + .await?; + let mount = MountGuard::mount( + &IdMapped::new( + Bind::new(data_dir(&ctx.datadir, &s9pk.as_manifest().id, volume)), + 0, + 100000, + 65536, + ), + mountpoint, + MountType::ReadWrite, + ) + .await?; + volumes.insert(volume.clone(), mount); + } + let mut assets = BTreeMap::new(); + for asset in &s9pk.as_manifest().assets { + let mountpoint = lxc_container + .rootfs_dir() + .join("media/startos/assets") + .join(asset); + tokio::fs::create_dir_all(&mountpoint).await?; + Command::new("chown") + .arg("100000:100000") + .arg(&mountpoint) + .invoke(crate::ErrorKind::Filesystem) + .await?; + assets.insert( + asset.clone(), + MountGuard::mount( + &Bind::new( + asset_dir( + &ctx.datadir, + &s9pk.as_manifest().id, + &s9pk.as_manifest().version, + ) + .join(asset), + ), + mountpoint, + MountType::ReadWrite, + ) + .await?, + ); + } + let image_path = lxc_container.rootfs_dir().join("media/startos/images"); + tokio::fs::create_dir_all(&image_path).await?; + for image in &s9pk.as_manifest().images { + let env_filename = Path::new(image.as_ref()).with_extension("env"); + if let Some(env) = s9pk + .as_archive() + .contents() + .get_path(Path::new("images").join(&*ARCH).join(&env_filename)) + .and_then(|e| e.as_file()) + { + env.copy(&mut File::create(image_path.join(&env_filename)).await?) + .await?; + } + let json_filename = Path::new(image.as_ref()).with_extension("json"); + if let Some(json) = s9pk + .as_archive() + .contents() + .get_path(Path::new("images").join(&*ARCH).join(&json_filename)) + .and_then(|e| e.as_file()) + { + json.copy(&mut File::create(image_path.join(&json_filename)).await?) + .await?; + } + } + let net_service = ctx + .net_controller + .create_service(s9pk.as_manifest().id.clone(), lxc_container.ip().await?) + .await?; + Ok(Self { + s9pk, + lxc_container: OnceCell::new_with(Some(lxc_container)), + rpc_client, + rpc_server: watch::channel(None).0, + // procedures: Default::default(), + js_mount, + volumes, + assets, + overlays: Arc::new(Mutex::new(BTreeMap::new())), + state: Arc::new(watch::channel(ServiceState::new(start)).0), + net_service: Mutex::new(net_service), + }) + } + + #[instrument(skip_all)] + pub async fn init(&self, seed: Weak) -> Result<(), Error> { + let socket_server_context = EffectContext::new(seed); + let server = Server::new( + move || ready(Ok(socket_server_context.clone())), + service_effect_handler(), + ); + let path = self + .lxc_container + .get() + .ok_or_else(|| { + Error::new( + eyre!("PersistentContainer has been destroyed"), + ErrorKind::Incoherent, + ) + })? + .rpc_dir() + .join(HOST_RPC_SERVER_SOCKET); + let (send, recv) = oneshot::channel(); + let handle = NonDetachingJoinHandle::from(tokio::spawn(async move { + let (shutdown, fut) = match async { + let res = server.run_unix(&path, |err| { + tracing::error!("error on unix socket {}: {err}", path.display()) + })?; + Command::new("chown") + .arg("100000:100000") + .arg(&path) + .invoke(ErrorKind::Filesystem) + .await?; + Ok::<_, Error>(res) + } + .await + { + Ok((shutdown, fut)) => (Ok(shutdown), Some(fut)), + Err(e) => (Err(e), None), + }; + if send.send(shutdown).is_err() { + panic!("failed to send shutdown handle"); + } + if let Some(fut) = fut { + fut.await; + } + })); + let shutdown = recv.await.map_err(|_| { + Error::new( + eyre!("unix socket server thread panicked"), + ErrorKind::Unknown, + ) + })??; + if self + .rpc_server + .send_replace(Some((handle, shutdown))) + .is_some() + { + return Err(Error::new( + eyre!("PersistentContainer already initialized"), + ErrorKind::InvalidRequest, + )); + } + + self.rpc_client.request(rpc::Init, Empty {}).await?; + + Ok(()) + } + + #[instrument(skip_all)] + fn destroy(&mut self) -> impl Future> + 'static { + let rpc_client = self.rpc_client.clone(); + let rpc_server = self.rpc_server.send_replace(None); + let js_mount = self.js_mount.take(); + let volumes = std::mem::take(&mut self.volumes); + let assets = std::mem::take(&mut self.assets); + let overlays = self.overlays.clone(); + let lxc_container = self.lxc_container.take(); + async move { + let mut errs = ErrorCollection::new(); + if let Some((hdl, shutdown)) = rpc_server { + errs.handle(rpc_client.request(rpc::Exit, Empty {}).await); + shutdown.shutdown(); + errs.handle(hdl.await.with_kind(ErrorKind::Cancelled)); + } + for (_, volume) in volumes { + errs.handle(volume.unmount(true).await); + } + for (_, assets) in assets { + errs.handle(assets.unmount(true).await); + } + for (_, overlay) in std::mem::take(&mut *overlays.lock().await) { + errs.handle(overlay.unmount(true).await); + } + errs.handle(js_mount.unmount(true).await); + if let Some(lxc_container) = lxc_container { + errs.handle(lxc_container.exit().await); + } + errs.into_result() + } + } + + #[instrument(skip_all)] + pub async fn exit(mut self) -> Result<(), Error> { + self.destroy().await?; + + Ok(()) + } + + #[instrument(skip_all)] + pub async fn start(&self) -> Result<(), Error> { + self.execute( + ProcedureName::StartMain, + Value::Null, + Some(Duration::from_secs(5)), // TODO + ) + .await?; + Ok(()) + } + + #[instrument(skip_all)] + pub async fn stop(&self) -> Result { + let timeout: Option = self + .execute(ProcedureName::StopMain, Value::Null, None) + .await?; + Ok(timeout.map(|a| *a).unwrap_or(Duration::from_secs(30))) + } + + #[instrument(skip_all)] + pub async fn execute( + &self, + name: ProcedureName, + input: Value, + timeout: Option, + ) -> Result + where + O: DeserializeOwned, + { + self._execute(name, input, timeout) + .await + .and_then(from_value) + } + + #[instrument(skip_all)] + pub async fn sanboxed( + &self, + name: ProcedureName, + input: Value, + timeout: Option, + ) -> Result + where + O: DeserializeOwned, + { + self._sandboxed(name, input, timeout) + .await + .and_then(from_value) + } + + #[instrument(skip_all)] + async fn _execute( + &self, + name: ProcedureName, + input: Value, + timeout: Option, + ) -> Result { + let fut = self + .rpc_client + .request(rpc::Execute, rpc::ExecuteParams::new(name, input, timeout)); + + Ok(if let Some(timeout) = timeout { + tokio::time::timeout(timeout, fut) + .await + .with_kind(ErrorKind::Timeout)?? + } else { + fut.await? + }) + } + + #[instrument(skip_all)] + async fn _sandboxed( + &self, + name: ProcedureName, + input: Value, + timeout: Option, + ) -> Result { + let fut = self + .rpc_client + .request(rpc::Sandbox, rpc::ExecuteParams::new(name, input, timeout)); + + Ok(if let Some(timeout) = timeout { + tokio::time::timeout(timeout, fut) + .await + .with_kind(ErrorKind::Timeout)?? + } else { + fut.await? + }) + } +} + +impl Drop for PersistentContainer { + fn drop(&mut self) { + let destroy = self.destroy(); + tokio::spawn(async move { destroy.await.unwrap() }); + } +} diff --git a/core/startos/src/service/rpc.rs b/core/startos/src/service/rpc.rs new file mode 100644 index 0000000000..6823a71890 --- /dev/null +++ b/core/startos/src/service/rpc.rs @@ -0,0 +1,96 @@ +use std::time::Duration; + +use imbl_value::Value; +use models::ProcedureName; +use rpc_toolkit::yajrc::RpcMethod; +use rpc_toolkit::Empty; + +use crate::prelude::*; + +#[derive(Clone)] +pub struct Init; +impl RpcMethod for Init { + type Params = Empty; + type Response = (); + fn as_str<'a>(&'a self) -> &'a str { + "init" + } +} +impl serde::Serialize for Init { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + serializer.serialize_str(self.as_str()) + } +} + +#[derive(Clone)] +pub struct Exit; +impl RpcMethod for Exit { + type Params = Empty; + type Response = (); + fn as_str<'a>(&'a self) -> &'a str { + "exit" + } +} +impl serde::Serialize for Exit { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + serializer.serialize_str(self.as_str()) + } +} + +#[derive(Clone, serde::Deserialize, serde::Serialize)] +pub struct ExecuteParams { + procedure: String, + input: Value, + timeout: Option, +} +impl ExecuteParams { + pub fn new(procedure: ProcedureName, input: Value, timeout: Option) -> Self { + Self { + procedure: procedure.js_function_name(), + input, + timeout: timeout.map(|d| d.as_millis()), + } + } +} + +#[derive(Clone)] +pub struct Execute; +impl RpcMethod for Execute { + type Params = ExecuteParams; + type Response = Value; + fn as_str<'a>(&'a self) -> &'a str { + "execute" + } +} +impl serde::Serialize for Execute { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + serializer.serialize_str(self.as_str()) + } +} + +#[derive(Clone)] +pub struct Sandbox; +impl RpcMethod for Sandbox { + type Params = ExecuteParams; + type Response = Value; + fn as_str<'a>(&'a self) -> &'a str { + "sandbox" + } +} +impl serde::Serialize for Sandbox { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + serializer.serialize_str(self.as_str()) + } +} diff --git a/core/startos/src/service/service_effect_handler.rs b/core/startos/src/service/service_effect_handler.rs new file mode 100644 index 0000000000..4041cbb985 --- /dev/null +++ b/core/startos/src/service/service_effect_handler.rs @@ -0,0 +1,1170 @@ +use std::collections::BTreeSet; +use std::ffi::OsString; +use std::net::Ipv4Addr; +use std::os::unix::process::CommandExt; +use std::path::{Path, PathBuf}; +use std::str::FromStr; +use std::sync::{Arc, Weak}; + +use clap::builder::ValueParserFactory; +use clap::Parser; +use emver::VersionRange; +use imbl::OrdMap; +use imbl_value::{json, InternedString}; +use models::{ActionId, HealthCheckId, HostId, ImageId, PackageId, VolumeId}; +use patch_db::json_ptr::JsonPointer; +use rpc_toolkit::{from_fn, from_fn_async, AnyContext, Context, Empty, HandlerExt, ParentHandler}; +use serde::{Deserialize, Serialize}; +use tokio::process::Command; +use ts_rs::TS; +use url::Url; + +use crate::db::model::package::{ + ActionMetadata, CurrentDependencies, CurrentDependencyInfo, CurrentDependencyKind, +}; +use crate::disk::mount::filesystem::idmapped::IdMapped; +use crate::disk::mount::filesystem::loop_dev::LoopDev; +use crate::disk::mount::filesystem::overlayfs::OverlayGuard; +use crate::net::host::binding::BindOptions; +use crate::net::host::HostKind; +use crate::prelude::*; +use crate::s9pk::rpc::SKIP_ENV; +use crate::service::cli::ContainerCliContext; +use crate::service::ServiceActorSeed; +use crate::status::health_check::HealthCheckResult; +use crate::status::MainStatus; +use crate::util::clap::FromStrParser; +use crate::util::{new_guid, Invoke}; +use crate::{echo, ARCH}; + +#[derive(Clone)] +pub(super) struct EffectContext(Weak); +impl EffectContext { + pub fn new(seed: Weak) -> Self { + Self(seed) + } +} +impl Context for EffectContext {} +impl EffectContext { + fn deref(&self) -> Result, Error> { + if let Some(seed) = Weak::upgrade(&self.0) { + Ok(seed) + } else { + Err(Error::new( + eyre!("Service has already been destroyed"), + ErrorKind::InvalidRequest, + )) + } + } +} + +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] +struct RpcData { + id: i64, + method: String, + params: Value, +} +pub fn service_effect_handler() -> ParentHandler { + ParentHandler::new() + .subcommand("gitInfo", from_fn(crate::version::git_info)) + .subcommand( + "echo", + from_fn(echo).with_remote_cli::(), + ) + .subcommand("chroot", from_fn(chroot).no_display()) + .subcommand("exists", from_fn_async(exists).no_cli()) + .subcommand("executeAction", from_fn_async(execute_action).no_cli()) + .subcommand("getConfigured", from_fn_async(get_configured).no_cli()) + .subcommand( + "stopped", + from_fn_async(stopped) + .no_display() + .with_remote_cli::(), + ) + .subcommand( + "running", + from_fn_async(running) + .no_display() + .with_remote_cli::(), + ) + .subcommand( + "restart", + from_fn_async(restart) + .no_display() + .with_remote_cli::(), + ) + .subcommand( + "shutdown", + from_fn_async(shutdown) + .no_display() + .with_remote_cli::(), + ) + .subcommand( + "setConfigured", + from_fn_async(set_configured) + .no_display() + .with_remote_cli::(), + ) + .subcommand( + "setMainStatus", + from_fn_async(set_main_status).with_remote_cli::(), + ) + .subcommand("setHealth", from_fn_async(set_health).no_cli()) + .subcommand("getStore", from_fn_async(get_store).no_cli()) + .subcommand("setStore", from_fn_async(set_store).no_cli()) + .subcommand( + "exposeForDependents", + from_fn_async(expose_for_dependents).no_cli(), + ) + .subcommand( + "createOverlayedImage", + from_fn_async(create_overlayed_image) + .with_custom_display_fn::(|_, (path, _)| { + Ok(println!("{}", path.display())) + }) + .with_remote_cli::(), + ) + .subcommand( + "destroyOverlayedImage", + from_fn_async(destroy_overlayed_image).no_cli(), + ) + .subcommand( + "getSslCertificate", + from_fn_async(get_ssl_certificate).no_cli(), + ) + .subcommand("getSslKey", from_fn_async(get_ssl_key).no_cli()) + .subcommand( + "getServiceInterface", + from_fn_async(get_service_interface).no_cli(), + ) + .subcommand("clearBindings", from_fn_async(clear_bindings).no_cli()) + .subcommand("bind", from_fn_async(bind).no_cli()) + .subcommand("getHostInfo", from_fn_async(get_host_info).no_cli()) + .subcommand( + "setDependencies", + from_fn_async(set_dependencies) + .no_display() + .with_remote_cli::(), + ) + .subcommand("getSystemSmtp", from_fn_async(get_system_smtp).no_cli()) + .subcommand("getContainerIp", from_fn_async(get_container_ip).no_cli()) + .subcommand( + "getServicePortForward", + from_fn_async(get_service_port_forward).no_cli(), + ) + .subcommand( + "clearServiceInterfaces", + from_fn_async(clear_network_interfaces).no_cli(), + ) + .subcommand( + "exportServiceInterface", + from_fn_async(export_service_interface).no_cli(), + ) + .subcommand("getPrimaryUrl", from_fn_async(get_primary_url).no_cli()) + .subcommand( + "listServiceInterfaces", + from_fn_async(list_service_interfaces).no_cli(), + ) + .subcommand("removeAddress", from_fn_async(remove_address).no_cli()) + .subcommand("exportAction", from_fn_async(export_action).no_cli()) + .subcommand("removeAction", from_fn_async(remove_action).no_cli()) + .subcommand("reverseProxy", from_fn_async(reverse_proxy).no_cli()) + .subcommand("mount", from_fn_async(mount).no_cli()) + // TODO Callbacks +} + +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] +#[ts(export)] +#[serde(rename_all = "camelCase")] +struct GetSystemSmtpParams { + callback: Callback, +} +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] +#[ts(export)] +#[serde(rename_all = "camelCase")] +struct GetServicePortForwardParams { + #[ts(type = "string | null")] + package_id: Option, + internal_port: u32, +} + +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] +#[ts(export)] +#[serde(rename_all = "camelCase")] +struct AddressInfo { + username: Option, + host_id: String, + bind_options: BindOptions, + suffix: String, +} + +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] +#[ts(export)] +#[serde(rename_all = "camelCase")] +enum ServiceInterfaceType { + Ui, + P2p, + Api, +} +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] +#[ts(export)] +#[serde(rename_all = "camelCase")] +struct ExportServiceInterfaceParams { + id: String, + name: String, + description: String, + has_primary: bool, + disabled: bool, + masked: bool, + address_info: AddressInfo, + r#type: ServiceInterfaceType, +} +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] +#[ts(export)] +#[serde(rename_all = "camelCase")] +struct GetPrimaryUrlParams { + #[ts(type = "string | null")] + package_id: Option, + service_interface_id: String, + callback: Callback, +} +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] +#[ts(export)] +#[serde(rename_all = "camelCase")] +struct ListServiceInterfacesParams { + #[ts(type = "string | null")] + package_id: Option, + callback: Callback, +} +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] +#[ts(export)] +#[serde(rename_all = "camelCase")] +struct RemoveAddressParams { + id: String, +} + +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] +#[ts(export)] +#[serde(rename_all = "camelCase")] +struct ExportActionParams { + #[ts(type = "string")] + id: ActionId, + metadata: ActionMetadata, +} +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] +#[ts(export)] +#[serde(rename_all = "camelCase")] +struct RemoveActionParams { + #[ts(type = "string")] + id: ActionId, +} +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] +#[ts(export)] +#[serde(rename_all = "camelCase")] +struct ReverseProxyBind { + ip: Option, + port: u32, + ssl: bool, +} +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] +#[ts(export)] +#[serde(rename_all = "camelCase")] +struct ReverseProxyDestination { + ip: Option, + port: u32, + ssl: bool, +} +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] +#[ts(export)] +#[serde(rename_all = "camelCase")] +struct ReverseProxyHttp { + #[ts(type = "null | {[key: string]: string}")] + headers: Option>, +} +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] +#[ts(export)] +#[serde(rename_all = "camelCase")] +struct ReverseProxyParams { + bind: ReverseProxyBind, + dst: ReverseProxyDestination, + http: ReverseProxyHttp, +} +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] +#[ts(export)] +#[serde(rename_all = "camelCase")] +struct MountTarget { + #[ts(type = "string")] + package_id: PackageId, + #[ts(type = "string")] + volume_id: VolumeId, + subpath: Option, + readonly: bool, +} +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] +#[ts(export)] +#[serde(rename_all = "camelCase")] +struct MountParams { + location: String, + target: MountTarget, +} +async fn get_system_smtp( + context: EffectContext, + data: GetSystemSmtpParams, +) -> Result { + todo!() +} +async fn get_container_ip(context: EffectContext, _: Empty) -> Result { + match context.0.upgrade() { + Some(c) => { + let net_service = c.persistent_container.net_service.lock().await; + Ok(net_service.get_ip()) + } + None => Err(Error::new( + eyre!("Upgrade on Weak resulted in a None variant"), + crate::ErrorKind::NotFound, + )), + } +} +async fn get_service_port_forward( + context: EffectContext, + data: GetServicePortForwardParams, +) -> Result { + todo!() +} +async fn clear_network_interfaces(context: EffectContext, _: Empty) -> Result { + todo!() +} +async fn export_service_interface( + context: EffectContext, + data: ExportServiceInterfaceParams, +) -> Result { + todo!() +} +async fn get_primary_url( + context: EffectContext, + data: GetPrimaryUrlParams, +) -> Result { + todo!() +} +async fn list_service_interfaces( + context: EffectContext, + data: ListServiceInterfacesParams, +) -> Result { + todo!() +} +async fn remove_address(context: EffectContext, data: RemoveAddressParams) -> Result { + todo!() +} +async fn export_action(context: EffectContext, data: ExportActionParams) -> Result<(), Error> { + let context = context.deref()?; + let package_id = context.id.clone(); + context + .ctx + .db + .mutate(|db| { + let model = db + .as_public_mut() + .as_package_data_mut() + .as_idx_mut(&package_id) + .or_not_found(&package_id)? + .as_actions_mut(); + let mut value = model.de()?; + value + .insert(data.id, data.metadata) + .map(|_| ()) + .unwrap_or_default(); + model.ser(&value) + }) + .await?; + Ok(()) +} +async fn remove_action(context: EffectContext, data: RemoveActionParams) -> Result<(), Error> { + let context = context.deref()?; + let package_id = context.id.clone(); + context + .ctx + .db + .mutate(|db| { + let model = db + .as_public_mut() + .as_package_data_mut() + .as_idx_mut(&package_id) + .or_not_found(&package_id)? + .as_actions_mut(); + let mut value = model.de()?; + value.remove(&data.id).map(|_| ()).unwrap_or_default(); + model.ser(&value) + }) + .await?; + Ok(()) +} +async fn reverse_proxy(context: EffectContext, data: ReverseProxyParams) -> Result { + todo!() +} +async fn mount(context: EffectContext, data: MountParams) -> Result { + todo!() +} + +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] +#[ts(export)] +struct Callback(#[ts(type = "() => void")] i64); + +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +enum GetHostInfoParamsKind { + Multi, +} +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +struct GetHostInfoParams { + kind: Option, + service_interface_id: String, + #[ts(type = "string | null")] + package_id: Option, + callback: Callback, +} +async fn get_host_info( + _: AnyContext, + GetHostInfoParams { .. }: GetHostInfoParams, +) -> Result { + todo!() +} + +async fn clear_bindings(context: EffectContext, _: Empty) -> Result { + todo!() +} + +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +struct BindParams { + kind: HostKind, + id: HostId, + internal_port: u16, + #[serde(flatten)] + options: BindOptions, +} +async fn bind(_: AnyContext, BindParams { .. }: BindParams) -> Result { + todo!() +} + +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +struct GetServiceInterfaceParams { + #[ts(type = "string | null")] + package_id: Option, + service_interface_id: String, + callback: Callback, +} +async fn get_service_interface( + _: AnyContext, + GetServiceInterfaceParams { + callback, + package_id, + service_interface_id, + }: GetServiceInterfaceParams, +) -> Result { + // TODO @Dr_Bonez + Ok(json!({ + "id": service_interface_id, + "name": service_interface_id, + "description": "This is a fake", + "hasPrimary": true, + "disabled": false, + "masked": false, + "addressInfo": json!({ + "username": Value::Null, + "hostId": "HostId?", + "options": json!({ + "scheme": Value::Null, + "preferredExternalPort": 80, + "addSsl":Value::Null, + "secure": false, + "ssl": false + }), + "suffix": "http" + }), + "type": "api" + })) +} + +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, Parser, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +struct ChrootParams { + #[arg(short = 'e', long = "env")] + env: Option, + #[arg(short = 'w', long = "workdir")] + workdir: Option, + #[arg(short = 'u', long = "user")] + user: Option, + path: PathBuf, + #[ts(type = "string")] + command: OsString, + #[ts(type = "string[]")] + args: Vec, +} +fn chroot( + _: AnyContext, + ChrootParams { + env, + workdir, + user, + path, + command, + args, + }: ChrootParams, +) -> Result<(), Error> { + let mut cmd = std::process::Command::new(command); + if let Some(env) = env { + for (k, v) in std::fs::read_to_string(env)? + .lines() + .map(|l| l.trim()) + .filter_map(|l| l.split_once("=")) + .filter(|(k, _)| !SKIP_ENV.contains(&k)) + { + cmd.env(k, v); + } + } + std::os::unix::fs::chroot(path)?; + if let Some(uid) = user.as_deref().and_then(|u| u.parse::().ok()) { + cmd.uid(uid); + } else if let Some(user) = user { + let (uid, gid) = std::fs::read_to_string("/etc/passwd")? + .lines() + .find_map(|l| { + let mut split = l.trim().split(":"); + if user != split.next()? { + return None; + } + split.next(); // throw away x + Some((split.next()?.parse().ok()?, split.next()?.parse().ok()?)) + // uid gid + }) + .or_not_found(lazy_format!("{user} in /etc/passwd"))?; + cmd.uid(uid); + cmd.gid(gid); + }; + if let Some(workdir) = workdir { + cmd.current_dir(workdir); + } + cmd.args(args); + Err(cmd.exec().into()) +} + +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +enum Algorithm { + Ecdsa, + Ed25519, +} + +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +struct GetSslCertificateParams { + package_id: Option, + host_id: String, + algorithm: Option, //"ecdsa" | "ed25519" +} + +async fn get_ssl_certificate( + context: EffectContext, + GetSslCertificateParams { + package_id, + algorithm, + host_id, + }: GetSslCertificateParams, +) -> Result { + let fake = include_str!("./fake.cert.pem"); + Ok(json!([fake, fake, fake])) +} + +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +struct GetSslKeyParams { + package_id: Option, + host_id: String, + algorithm: Option, +} + +async fn get_ssl_key( + context: EffectContext, + GetSslKeyParams { + package_id, + host_id, + algorithm, + }: GetSslKeyParams, +) -> Result { + let fake = include_str!("./fake.cert.key"); + Ok(json!(fake)) +} +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +struct GetStoreParams { + #[ts(type = "string | null")] + package_id: Option, + #[ts(type = "string")] + path: JsonPointer, +} + +async fn get_store( + context: EffectContext, + GetStoreParams { package_id, path }: GetStoreParams, +) -> Result { + let context = context.deref()?; + let peeked = context.ctx.db.peek().await; + let package_id = package_id.unwrap_or(context.id.clone()); + let value = peeked + .as_private() + .as_package_stores() + .as_idx(&package_id) + .or_not_found(&package_id)? + .de()?; + + Ok(path + .get(&value) + .ok_or_else(|| Error::new(eyre!("Did not find value at path"), ErrorKind::NotFound))? + .clone()) +} +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +struct SetStoreParams { + #[ts(type = "any")] + value: Value, + #[ts(type = "string")] + path: JsonPointer, +} + +async fn set_store( + context: EffectContext, + SetStoreParams { value, path }: SetStoreParams, +) -> Result<(), Error> { + let context = context.deref()?; + let package_id = context.id.clone(); + context + .ctx + .db + .mutate(|db| { + let model = db + .as_private_mut() + .as_package_stores_mut() + .upsert(&package_id, || Box::new(json!({})))?; + let mut model_value = model.de()?; + if model_value.is_null() { + model_value = json!({}); + } + path.set(&mut model_value, value, true) + .with_kind(ErrorKind::ParseDbField)?; + model.ser(&model_value) + }) + .await?; + Ok(()) +} + +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +struct ExposeForDependentsParams { + #[ts(type = "string[]")] + paths: Vec, +} + +async fn expose_for_dependents( + context: EffectContext, + ExposeForDependentsParams { paths }: ExposeForDependentsParams, +) -> Result<(), Error> { + Ok(()) +} + +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, Parser, TS)] +#[ts(export)] +#[serde(rename_all = "camelCase")] +struct ParamsPackageId { + #[ts(type = "string")] + package_id: PackageId, +} +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, Parser, TS)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "camelCase")] +#[ts(export)] +struct ParamsMaybePackageId { + #[ts(type = "string | null")] + package_id: Option, +} + +async fn exists(context: EffectContext, params: ParamsPackageId) -> Result { + let context = context.deref()?; + let peeked = context.ctx.db.peek().await; + let package = peeked + .as_public() + .as_package_data() + .as_idx(¶ms.package_id) + .is_some(); + Ok(json!(package)) +} + +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +struct ExecuteAction { + #[ts(type = "string | null")] + service_id: Option, + #[ts(type = "string")] + action_id: ActionId, + #[ts(type = "any")] + input: Value, +} +async fn execute_action( + context: EffectContext, + ExecuteAction { + action_id, + input, + service_id, + }: ExecuteAction, +) -> Result { + let context = context.deref()?; + let package_id = service_id.clone().unwrap_or_else(|| context.id.clone()); + let service = context.ctx.services.get(&package_id).await; + let service = service.as_ref().ok_or_else(|| { + Error::new( + eyre!("Could not find package {package_id}"), + ErrorKind::Unknown, + ) + })?; + + Ok(json!(service.action(action_id, input).await?)) +} +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] +#[serde(rename_all = "camelCase")] +struct FromService {} +async fn get_configured(context: EffectContext, _: Empty) -> Result { + let context = context.deref()?; + let peeked = context.ctx.db.peek().await; + let package_id = &context.id; + let package = peeked + .as_public() + .as_package_data() + .as_idx(package_id) + .or_not_found(package_id)? + .as_status() + .as_configured() + .de()?; + Ok(json!(package)) +} + +async fn stopped(context: EffectContext, params: ParamsMaybePackageId) -> Result { + let context = context.deref()?; + let peeked = context.ctx.db.peek().await; + let package_id = params.package_id.unwrap_or_else(|| context.id.clone()); + let package = peeked + .as_public() + .as_package_data() + .as_idx(&package_id) + .or_not_found(&package_id)? + .as_status() + .as_main() + .de()?; + Ok(json!(matches!(package, MainStatus::Stopped))) +} +async fn running(context: EffectContext, params: ParamsPackageId) -> Result { + dbg!("Starting the running {params:?}"); + let context = context.deref()?; + let peeked = context.ctx.db.peek().await; + let package_id = params.package_id; + let package = peeked + .as_public() + .as_package_data() + .as_idx(&package_id) + .or_not_found(&package_id)? + .as_status() + .as_main() + .de()?; + Ok(json!(matches!(package, MainStatus::Running { .. }))) +} + +async fn restart(context: EffectContext, _: Empty) -> Result { + let context = context.deref()?; + let service = context.ctx.services.get(&context.id).await; + let service = service.as_ref().ok_or_else(|| { + Error::new( + eyre!("Could not find package {}", context.id), + ErrorKind::Unknown, + ) + })?; + service.restart().await?; + Ok(json!(())) +} + +async fn shutdown(context: EffectContext, _: Empty) -> Result { + let context = context.deref()?; + let service = context.ctx.services.get(&context.id).await; + let service = service.as_ref().ok_or_else(|| { + Error::new( + eyre!("Could not find package {}", context.id), + ErrorKind::Unknown, + ) + })?; + service.stop().await?; + Ok(json!(())) +} + +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, Parser, TS)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "camelCase")] +#[ts(export)] +struct SetConfigured { + configured: bool, +} +async fn set_configured(context: EffectContext, params: SetConfigured) -> Result { + let context = context.deref()?; + let package_id = &context.id; + context + .ctx + .db + .mutate(|db| { + db.as_public_mut() + .as_package_data_mut() + .as_idx_mut(package_id) + .or_not_found(package_id)? + .as_status_mut() + .as_configured_mut() + .ser(¶ms.configured) + }) + .await?; + Ok(json!(())) +} + +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +enum Status { + Running, + Stopped, +} +impl FromStr for Status { + type Err = color_eyre::eyre::Report; + fn from_str(s: &str) -> Result { + match s { + "running" => Ok(Self::Running), + "stopped" => Ok(Self::Stopped), + _ => Err(eyre!("unknown status {s}")), + } + } +} +impl ValueParserFactory for Status { + type Parser = FromStrParser; + fn value_parser() -> Self::Parser { + FromStrParser::new() + } +} + +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, Parser, TS)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "camelCase")] +#[ts(export)] +struct SetMainStatus { + status: Status, +} +async fn set_main_status(context: EffectContext, params: SetMainStatus) -> Result { + dbg!(format!("Status for main will be is {params:?}")); + let context = context.deref()?; + match params.status { + Status::Running => context.started(), + Status::Stopped => context.stopped(), + } + Ok(Value::Null) +} + +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +struct SetHealth { + id: HealthCheckId, + #[serde(flatten)] + result: HealthCheckResult, +} + +async fn set_health( + context: EffectContext, + SetHealth { id, result }: SetHealth, +) -> Result { + let context = context.deref()?; + + let package_id = &context.id; + context + .ctx + .db + .mutate(move |db| { + db.as_public_mut() + .as_package_data_mut() + .as_idx_mut(package_id) + .or_not_found(package_id)? + .as_status_mut() + .as_main_mut() + .mutate(|main| { + match main { + &mut MainStatus::Running { ref mut health, .. } + | &mut MainStatus::BackingUp { ref mut health, .. } => { + health.insert(id, result); + } + _ => (), + } + Ok(()) + }) + }) + .await?; + Ok(json!(())) +} +#[derive(serde::Deserialize, serde::Serialize, Parser, TS)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "camelCase")] +#[ts(export)] +pub struct DestroyOverlayedImageParams { + #[ts(type = "string")] + guid: InternedString, +} + +#[instrument(skip_all)] +pub async fn destroy_overlayed_image( + ctx: EffectContext, + DestroyOverlayedImageParams { guid }: DestroyOverlayedImageParams, +) -> Result<(), Error> { + let ctx = ctx.deref()?; + if ctx + .persistent_container + .overlays + .lock() + .await + .remove(&guid) + .is_none() + { + tracing::warn!("Could not find a guard to remove on the destroy overlayed image; assumming that it already is removed and will be skipping"); + } + Ok(()) +} +#[derive(serde::Deserialize, serde::Serialize, Parser, TS)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "camelCase")] +#[ts(export)] +pub struct CreateOverlayedImageParams { + #[ts(type = "string")] + image_id: ImageId, +} + +#[instrument(skip_all)] +pub async fn create_overlayed_image( + ctx: EffectContext, + CreateOverlayedImageParams { image_id }: CreateOverlayedImageParams, +) -> Result<(PathBuf, InternedString), Error> { + let ctx = ctx.deref()?; + let path = Path::new("images") + .join(*ARCH) + .join(&image_id) + .with_extension("squashfs"); + if let Some(image) = ctx + .persistent_container + .s9pk + .as_archive() + .contents() + .get_path(dbg!(&path)) + .and_then(|e| e.as_file()) + { + let guid = new_guid(); + let rootfs_dir = ctx + .persistent_container + .lxc_container + .get() + .ok_or_else(|| { + Error::new( + eyre!("PersistentContainer has been destroyed"), + ErrorKind::Incoherent, + ) + })? + .rootfs_dir(); + let mountpoint = rootfs_dir.join("media/startos/overlays").join(&*guid); + tokio::fs::create_dir_all(&mountpoint).await?; + Command::new("chown") + .arg("100000:100000") + .arg(&mountpoint) + .invoke(ErrorKind::Filesystem) + .await?; + let container_mountpoint = Path::new("/").join( + mountpoint + .strip_prefix(rootfs_dir) + .with_kind(ErrorKind::Incoherent)?, + ); + tracing::info!("Mounting overlay {guid} for {image_id}"); + let guard = OverlayGuard::mount( + &IdMapped::new(LoopDev::from(&**image), 0, 100000, 65536), + mountpoint, + ) + .await?; + tracing::info!("Mounted overlay {guid} for {image_id}"); + ctx.persistent_container + .overlays + .lock() + .await + .insert(guid.clone(), guard); + Ok((container_mountpoint, guid)) + } else { + Err(Error::new( + eyre!("image {image_id} not found in s9pk"), + ErrorKind::NotFound, + )) + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Deserialize, Serialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +enum DependencyKind { + Exists, + Running, +} + +#[derive(Debug, Clone, Deserialize, Serialize, TS)] +#[serde(rename_all = "camelCase", tag = "kind")] +#[ts(export)] +enum DependencyRequirement { + #[serde(rename_all = "camelCase")] + Running { + #[ts(type = "string")] + id: PackageId, + #[ts(type = "string[]")] + health_checks: BTreeSet, + #[ts(type = "string")] + version_spec: VersionRange, + #[ts(type = "string")] + registry_url: Url, + }, + #[serde(rename_all = "camelCase")] + Exists { + #[ts(type = "string")] + id: PackageId, + #[ts(type = "string")] + version_spec: VersionRange, + #[ts(type = "string")] + registry_url: Url, + }, +} +// filebrowser:exists,bitcoind:running:foo+bar+baz +impl FromStr for DependencyRequirement { + type Err = Error; + fn from_str(s: &str) -> Result { + match s.split_once(':') { + Some((id, "e")) | Some((id, "exists")) => Ok(Self::Exists { + id: id.parse()?, + registry_url: "".parse()?, // TODO + version_spec: "*".parse()?, // TODO + }), + Some((id, rest)) => { + let health_checks = match rest.split_once(':') { + Some(("r", rest)) | Some(("running", rest)) => rest + .split('+') + .map(|id| id.parse().map_err(Error::from)) + .collect(), + Some((kind, _)) => Err(Error::new( + eyre!("unknown dependency kind {kind}"), + ErrorKind::InvalidRequest, + )), + None => match rest { + "r" | "running" => Ok(BTreeSet::new()), + kind => Err(Error::new( + eyre!("unknown dependency kind {kind}"), + ErrorKind::InvalidRequest, + )), + }, + }?; + Ok(Self::Running { + id: id.parse()?, + health_checks, + registry_url: "".parse()?, // TODO + version_spec: "*".parse()?, // TODO + }) + } + None => Ok(Self::Running { + id: s.parse()?, + health_checks: BTreeSet::new(), + registry_url: "".parse()?, // TODO + version_spec: "*".parse()?, // TODO + }), + } + } +} +impl ValueParserFactory for DependencyRequirement { + type Parser = FromStrParser; + fn value_parser() -> Self::Parser { + FromStrParser::new() + } +} + +#[derive(Deserialize, Serialize, Parser, TS)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "camelCase")] +#[ts(export)] +struct SetDependenciesParams { + dependencies: Vec, +} + +async fn set_dependencies( + ctx: EffectContext, + SetDependenciesParams { dependencies }: SetDependenciesParams, +) -> Result<(), Error> { + let ctx = ctx.deref()?; + let id = &ctx.id; + ctx.ctx + .db + .mutate(|db| { + let dependencies = CurrentDependencies( + dependencies + .into_iter() + .map(|dependency| match dependency { + DependencyRequirement::Exists { + id, + registry_url, + version_spec, + } => ( + id, + CurrentDependencyInfo { + kind: CurrentDependencyKind::Exists, + registry_url, + version_spec, + icon: todo!(), + title: todo!(), + }, + ), + DependencyRequirement::Running { + id, + health_checks, + registry_url, + version_spec, + } => ( + id, + CurrentDependencyInfo { + kind: CurrentDependencyKind::Running { health_checks }, + registry_url, + version_spec, + icon: todo!(), + title: todo!(), + }, + ), + }) + .collect(), + ); + db.as_public_mut() + .as_package_data_mut() + .as_idx_mut(id) + .or_not_found(id)? + .as_current_dependencies_mut() + .ser(&dependencies) + }) + .await +} diff --git a/core/startos/src/service/service_map.rs b/core/startos/src/service/service_map.rs new file mode 100644 index 0000000000..934497eb96 --- /dev/null +++ b/core/startos/src/service/service_map.rs @@ -0,0 +1,393 @@ +use std::sync::Arc; +use std::time::Duration; + +use color_eyre::eyre::eyre; +use futures::future::BoxFuture; +use futures::{Future, FutureExt}; +use helpers::NonDetachingJoinHandle; +use imbl::OrdMap; +use imbl_value::InternedString; +use tokio::sync::{Mutex, OwnedRwLockReadGuard, OwnedRwLockWriteGuard, RwLock}; +use tracing::instrument; + +use crate::context::RpcContext; +use crate::db::model::package::{ + InstallingInfo, InstallingState, PackageDataEntry, PackageState, UpdatingState, +}; +use crate::disk::mount::guard::GenericMountGuard; +use crate::install::PKG_ARCHIVE_DIR; +use crate::notifications::{notify, NotificationLevel}; +use crate::prelude::*; +use crate::progress::{ + FullProgressTracker, FullProgressTrackerHandle, PhaseProgressTrackerHandle, + ProgressTrackerWriter, +}; +use crate::s9pk::manifest::PackageId; +use crate::s9pk::merkle_archive::source::FileSource; +use crate::s9pk::S9pk; +use crate::service::{LoadDisposition, Service}; +use crate::status::{MainStatus, Status}; +use crate::util::serde::Pem; + +pub type DownloadInstallFuture = BoxFuture<'static, Result>; +pub type InstallFuture = BoxFuture<'static, Result<(), Error>>; + +pub(super) struct InstallProgressHandles { + pub(super) finalization_progress: PhaseProgressTrackerHandle, + pub(super) progress_handle: FullProgressTrackerHandle, +} + +/// This is the structure to contain all the services +#[derive(Default)] +pub struct ServiceMap(Mutex>>>>); +impl ServiceMap { + async fn entry(&self, id: &PackageId) -> Arc>> { + self.0 + .lock() + .await + .entry(id.clone()) + .or_insert_with(|| Arc::new(RwLock::new(None))) + .clone() + } + + #[instrument(skip_all)] + pub async fn get(&self, id: &PackageId) -> OwnedRwLockReadGuard> { + self.entry(id).await.read_owned().await + } + + #[instrument(skip_all)] + pub async fn get_mut(&self, id: &PackageId) -> OwnedRwLockWriteGuard> { + self.entry(id).await.write_owned().await + } + + #[instrument(skip_all)] + pub async fn init(&self, ctx: &RpcContext) -> Result<(), Error> { + for id in ctx.db.peek().await.as_public().as_package_data().keys()? { + if let Err(e) = self.load(ctx, &id, LoadDisposition::Retry).await { + tracing::error!("Error loading installed package as service: {e}"); + tracing::debug!("{e:?}"); + } + } + Ok(()) + } + + #[instrument(skip_all)] + pub async fn load( + &self, + ctx: &RpcContext, + id: &PackageId, + disposition: LoadDisposition, + ) -> Result<(), Error> { + let mut shutdown_err = Ok(()); + let mut service = self.get_mut(id).await; + if let Some(service) = service.take() { + shutdown_err = service.shutdown().await; + } + // TODO: retry on error? + *service = Service::load(ctx, id, disposition).await?; + shutdown_err?; + Ok(()) + } + + #[instrument(skip_all)] + pub async fn install( + &self, + ctx: RpcContext, + mut s9pk: S9pk, + recovery_source: Option, + ) -> Result { + let manifest = s9pk.as_manifest().clone(); + let id = manifest.id.clone(); + let icon = s9pk.icon_data_url().await?; + let developer_key = s9pk.as_archive().signer(); + let mut service = self.get_mut(&id).await; + + let op_name = if recovery_source.is_none() { + if service.is_none() { + "Install" + } else { + "Update" + } + } else { + "Restore" + }; + + let size = s9pk.size(); + let mut progress = FullProgressTracker::new(); + let download_progress_contribution = size.unwrap_or(60); + let progress_handle = progress.handle(); + let mut download_progress = progress_handle.add_phase( + InternedString::intern("Download"), + Some(download_progress_contribution), + ); + if let Some(size) = size { + download_progress.set_total(size); + } + let mut finalization_progress = progress_handle.add_phase( + InternedString::intern(op_name), + Some(download_progress_contribution / 2), + ); + let restoring = recovery_source.is_some(); + + let mut reload_guard = ServiceReloadGuard::new(ctx.clone(), id.clone(), op_name); + + reload_guard + .handle(ctx.db.mutate({ + let manifest = manifest.clone(); + let id = id.clone(); + let install_progress = progress.snapshot(); + move |db| { + if let Some(pde) = db.as_public_mut().as_package_data_mut().as_idx_mut(&id) { + let prev = pde.as_state_info().expect_installed()?.de()?; + pde.as_state_info_mut() + .ser(&PackageState::Updating(UpdatingState { + manifest: prev.manifest, + installing_info: InstallingInfo { + new_manifest: manifest, + progress: install_progress, + }, + }))?; + } else { + let installing = InstallingState { + installing_info: InstallingInfo { + new_manifest: manifest, + progress: install_progress, + }, + }; + db.as_public_mut().as_package_data_mut().insert( + &id, + &PackageDataEntry { + state_info: if restoring { + PackageState::Restoring(installing) + } else { + PackageState::Installing(installing) + }, + status: Status { + configured: false, + main: MainStatus::Stopped, + dependency_config_errors: Default::default(), + }, + marketplace_url: None, + developer_key: Pem::new(developer_key), + icon, + last_backup: None, + current_dependencies: Default::default(), + actions: Default::default(), + service_interfaces: Default::default(), + hosts: Default::default(), + store_exposed_dependents: Default::default(), + }, + )?; + }; + Ok(()) + } + })) + .await?; + + Ok(async move { + let (installed_path, sync_progress_task) = reload_guard + .handle(async { + let download_path = ctx + .datadir + .join(PKG_ARCHIVE_DIR) + .join("downloading") + .join(&id) + .with_extension("s9pk"); + + let deref_id = id.clone(); + let sync_progress_task = + NonDetachingJoinHandle::from(tokio::spawn(progress.sync_to_db( + ctx.db.clone(), + move |v| { + v.as_public_mut() + .as_package_data_mut() + .as_idx_mut(&deref_id) + .and_then(|e| e.as_state_info_mut().as_installing_info_mut()) + .map(|i| i.as_progress_mut()) + }, + Some(Duration::from_millis(100)), + ))); + + let mut progress_writer = ProgressTrackerWriter::new( + crate::util::io::create_file(&download_path).await?, + download_progress, + ); + s9pk.serialize(&mut progress_writer, true).await?; + let (file, mut download_progress) = progress_writer.into_inner(); + file.sync_all().await?; + download_progress.complete(); + + let installed_path = ctx + .datadir + .join(PKG_ARCHIVE_DIR) + .join("installed") + .join(&id) + .with_extension("s9pk"); + + crate::util::io::rename(&download_path, &installed_path).await?; + + Ok::<_, Error>((installed_path, sync_progress_task)) + }) + .await?; + Ok(reload_guard + .handle_last(async move { + let s9pk = S9pk::open(&installed_path, Some(&id)).await?; + let prev = if let Some(service) = service.take() { + ensure_code!( + recovery_source.is_none(), + ErrorKind::InvalidRequest, + "cannot restore over existing package" + ); + let version = service + .seed + .persistent_container + .s9pk + .as_manifest() + .version + .clone(); + service + .uninstall(Some(s9pk.as_manifest().version.clone())) + .await?; + finalization_progress.complete(); + progress_handle.complete(); + Some(version) + } else { + None + }; + if let Some(recovery_source) = recovery_source { + *service = Some( + Service::restore( + ctx, + s9pk, + recovery_source, + Some(InstallProgressHandles { + finalization_progress, + progress_handle, + }), + ) + .await?, + ); + } else { + *service = Some( + Service::install( + ctx, + s9pk, + prev, + Some(InstallProgressHandles { + finalization_progress, + progress_handle, + }), + ) + .await?, + ); + } + sync_progress_task.await.map_err(|_| { + Error::new(eyre!("progress sync task panicked"), ErrorKind::Unknown) + })??; + Ok(()) + }) + .boxed()) + } + .boxed()) + } + + /// This is ran during the cleanup, so when we are uninstalling the service + #[instrument(skip_all)] + pub async fn uninstall(&self, ctx: &RpcContext, id: &PackageId) -> Result<(), Error> { + if let Some(service) = self.get_mut(id).await.take() { + ServiceReloadGuard::new(ctx.clone(), id.clone(), "Uninstall") + .handle_last(service.uninstall(None)) + .await?; + } + Ok(()) + } + + pub async fn shutdown_all(&self) -> Result<(), Error> { + let lock = self.0.lock().await; + let mut futs = Vec::with_capacity(lock.len()); + for service in lock.values().cloned() { + futs.push(async move { + if let Some(service) = service.write_owned().await.take() { + service.shutdown().await? + } + Ok::<_, Error>(()) + }); + } + drop(lock); + let mut errors = ErrorCollection::new(); + for res in futures::future::join_all(futs).await { + errors.handle(res); + } + errors.into_result() + } +} + +pub struct ServiceReloadGuard(Option); +impl Drop for ServiceReloadGuard { + fn drop(&mut self) { + if let Some(info) = self.0.take() { + tokio::spawn(info.reload(None)); + } + } +} +impl ServiceReloadGuard { + pub fn new(ctx: RpcContext, id: PackageId, operation: &'static str) -> Self { + Self(Some(ServiceReloadInfo { ctx, id, operation })) + } + + pub async fn handle( + &mut self, + operation: impl Future>, + ) -> Result { + let mut errors = ErrorCollection::new(); + match operation.await { + Ok(a) => Ok(a), + Err(e) => { + if let Some(info) = self.0.take() { + errors.handle(info.reload(Some(e.clone_output())).await); + } + errors.handle::<(), _>(Err(e)); + errors.into_result().map(|_| unreachable!()) // TODO: there's gotta be a more elegant way? + } + } + } + pub async fn handle_last( + mut self, + operation: impl Future>, + ) -> Result { + let res = self.handle(operation).await; + self.0.take(); + res + } +} + +struct ServiceReloadInfo { + ctx: RpcContext, + id: PackageId, + operation: &'static str, +} +impl ServiceReloadInfo { + async fn reload(self, error: Option) -> Result<(), Error> { + self.ctx + .services + .load(&self.ctx, &self.id, LoadDisposition::Undo) + .await?; + if let Some(error) = error { + let error_string = error.to_string(); + self.ctx + .db + .mutate(|db| { + notify( + db, + Some(self.id.clone()), + NotificationLevel::Error, + format!("{} Failed", self.operation), + error_string, + (), + ) + }) + .await?; + } + Ok(()) + } +} diff --git a/core/startos/src/manager/start_stop.rs b/core/startos/src/service/start_stop.rs similarity index 93% rename from core/startos/src/manager/start_stop.rs rename to core/startos/src/service/start_stop.rs index 3842abe574..bc24574ac5 100644 --- a/core/startos/src/manager/start_stop.rs +++ b/core/startos/src/service/start_stop.rs @@ -16,7 +16,7 @@ impl From for StartStop { match value { MainStatus::Stopped => StartStop::Stop, MainStatus::Restarting => StartStop::Start, - MainStatus::Stopping => StartStop::Stop, + MainStatus::Stopping { .. } => StartStop::Stop, MainStatus::Starting => StartStop::Start, MainStatus::Running { started: _, diff --git a/core/startos/src/service/transition/backup.rs b/core/startos/src/service/transition/backup.rs new file mode 100644 index 0000000000..8b13789179 --- /dev/null +++ b/core/startos/src/service/transition/backup.rs @@ -0,0 +1 @@ + diff --git a/core/startos/src/service/transition/mod.rs b/core/startos/src/service/transition/mod.rs new file mode 100644 index 0000000000..af62ccc1c2 --- /dev/null +++ b/core/startos/src/service/transition/mod.rs @@ -0,0 +1,86 @@ +use std::sync::Arc; + +use futures::{Future, FutureExt}; +use tokio::sync::watch; + +use super::persistent_container::ServiceState; +use crate::service::start_stop::StartStop; +use crate::util::actor::BackgroundJobs; +use crate::util::future::{CancellationHandle, RemoteCancellable}; + +pub mod backup; +pub mod restart; + +#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)] +pub enum TransitionKind { + BackingUp, + Restarting, +} + +/// Used only in the manager/mod and is used to keep track of the state of the manager during the +/// transitional states +pub struct TransitionState { + cancel_handle: CancellationHandle, + kind: TransitionKind, +} +impl ::std::fmt::Debug for TransitionState { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("TransitionState") + .field("kind", &self.kind) + .finish_non_exhaustive() + } +} + +impl TransitionState { + pub fn kind(&self) -> TransitionKind { + self.kind + } + pub async fn abort(mut self) { + self.cancel_handle.cancel_and_wait().await + } + fn new( + task: impl Future + Send + 'static, + kind: TransitionKind, + jobs: &mut BackgroundJobs, + ) -> Self { + let task = RemoteCancellable::new(task); + let cancel_handle = task.cancellation_handle(); + jobs.add_job(task.map(|_| ())); + Self { + cancel_handle, + kind, + } + } +} +impl Drop for TransitionState { + fn drop(&mut self) { + self.cancel_handle.cancel(); + } +} + +#[derive(Debug, Clone)] +pub struct TempDesiredState(pub(super) Arc>); +impl TempDesiredState { + pub fn new(state: &Arc>) -> Self { + Self(state.clone()) + } + pub fn stop(&self) { + self.0 + .send_modify(|s| s.temp_desired_state = Some(StartStop::Stop)); + } + pub fn start(&self) { + self.0 + .send_modify(|s| s.temp_desired_state = Some(StartStop::Start)); + } +} +impl Drop for TempDesiredState { + fn drop(&mut self) { + self.0.send_modify(|s| s.temp_desired_state = None); + } +} +// impl Deref for TempDesiredState { +// type Target = watch::Sender>; +// fn deref(&self) -> &Self::Target { +// &*self.0 +// } +// } diff --git a/core/startos/src/service/transition/restart.rs b/core/startos/src/service/transition/restart.rs new file mode 100644 index 0000000000..9c82d02829 --- /dev/null +++ b/core/startos/src/service/transition/restart.rs @@ -0,0 +1,52 @@ +use std::sync::Arc; + +use futures::FutureExt; + +use crate::prelude::*; +use crate::service::transition::{TransitionKind, TransitionState}; +use crate::service::{Service, ServiceActor}; +use crate::util::actor::{BackgroundJobs, Handler}; +use crate::util::future::RemoteCancellable; + +use super::TempDesiredState; + +struct Restart; +#[async_trait::async_trait] +impl Handler for ServiceActor { + type Response = (); + async fn handle(&mut self, _: Restart, jobs: &mut BackgroundJobs) -> Self::Response { + // So Need a handle to just a single field in the state + let temp = TempDesiredState::new(&self.0.persistent_container.state); + let mut current = self.0.persistent_container.state.subscribe(); + let transition = RemoteCancellable::new(async move { + temp.stop(); + current.wait_for(|s| s.running_status.is_none()).await; + temp.start(); + current.wait_for(|s| s.running_status.is_some()).await; + drop(temp); + }); + let cancel_handle = transition.cancellation_handle(); + jobs.add_job(transition.map(|_| ())); + let notified = self.0.synchronized.notified(); + + let mut old = None; + self.0.persistent_container.state.send_modify(|s| { + old = std::mem::replace( + &mut s.transition_state, + Some(TransitionState { + kind: TransitionKind::Restarting, + cancel_handle, + }), + ) + }); + if let Some(t) = old { + t.abort().await; + } + notified.await + } +} +impl Service { + pub async fn restart(&self) -> Result<(), Error> { + self.actor.send(Restart).await + } +} diff --git a/core/startos/src/service/util.rs b/core/startos/src/service/util.rs new file mode 100644 index 0000000000..3c53c23660 --- /dev/null +++ b/core/startos/src/service/util.rs @@ -0,0 +1,14 @@ +use futures::Future; +use tokio::sync::Notify; + +use crate::prelude::*; + +pub async fn cancellable( + cancel_transition: &Notify, + transition: impl Future, +) -> Result { + tokio::select! { + a = transition => Ok(a), + _ = cancel_transition.notified() => Err(Error::new(eyre!("transition was cancelled"), ErrorKind::Cancelled)), + } +} diff --git a/core/startos/src/setup.rs b/core/startos/src/setup.rs index 64c3240952..ad76066e86 100644 --- a/core/startos/src/setup.rs +++ b/core/startos/src/setup.rs @@ -5,10 +5,10 @@ use std::time::Duration; use color_eyre::eyre::eyre; use josekit::jwk::Jwk; use openssl::x509::X509; -use rpc_toolkit::command; +use patch_db::json_ptr::ROOT; use rpc_toolkit::yajrc::RpcError; +use rpc_toolkit::{from_fn_async, HandlerExt, ParentHandler}; use serde::{Deserialize, Serialize}; -use sqlx::Connection; use tokio::fs::File; use tokio::io::AsyncWriteExt; use tokio::try_join; @@ -18,36 +18,56 @@ use tracing::instrument; use crate::account::AccountInfo; use crate::backup::restore::recover_full_embassy; use crate::backup::target::BackupTargetFS; -use crate::context::rpc::RpcContextConfig; use crate::context::setup::SetupResult; use crate::context::SetupContext; +use crate::db::model::Database; use crate::disk::fsck::RepairStrategy; use crate::disk::main::DEFAULT_PASSWORD; use crate::disk::mount::filesystem::cifs::Cifs; use crate::disk::mount::filesystem::ReadWrite; -use crate::disk::mount::guard::TmpMountGuard; +use crate::disk::mount::guard::{GenericMountGuard, TmpMountGuard}; use crate::disk::util::{pvscan, recovery_info, DiskInfo, EmbassyOsRecoveryInfo}; use crate::disk::REPAIR_DISK_PATH; use crate::hostname::Hostname; use crate::init::{init, InitResult}; -use crate::middleware::encrypt::EncryptedWire; use crate::net::ssl::root_ca_start_time; use crate::prelude::*; +use crate::util::crypto::EncryptedWire; use crate::util::io::{dir_copy, dir_size, Counter}; use crate::{Error, ErrorKind, ResultExt}; -#[command(subcommands(status, disk, attach, execute, cifs, complete, get_pubkey, exit))] -pub fn setup() -> Result<(), Error> { - Ok(()) +pub fn setup() -> ParentHandler { + ParentHandler::new() + .subcommand( + "status", + from_fn_async(status) + .with_metadata("authenticated", Value::Bool(false)) + .no_cli(), + ) + .subcommand("disk", disk()) + .subcommand("attach", from_fn_async(attach).no_cli()) + .subcommand("execute", from_fn_async(execute).no_cli()) + .subcommand("cifs", cifs()) + .subcommand("complete", from_fn_async(complete).no_cli()) + .subcommand( + "get-pubkey", + from_fn_async(get_pubkey) + .with_metadata("authenticated", Value::Bool(false)) + .no_cli(), + ) + .subcommand("exit", from_fn_async(exit).no_cli()) } -#[command(subcommands(list_disks))] -pub fn disk() -> Result<(), Error> { - Ok(()) +pub fn disk() -> ParentHandler { + ParentHandler::new().subcommand( + "list", + from_fn_async(list_disks) + .with_metadata("authenticated", Value::Bool(false)) + .no_cli(), + ) } -#[command(rename = "list", rpc_only, metadata(authenticated = false))] -pub async fn list_disks(#[context] ctx: SetupContext) -> Result, Error> { +pub async fn list_disks(ctx: SetupContext) -> Result, Error> { crate::disk::util::list(&ctx.os_partitions).await } @@ -55,38 +75,41 @@ async fn setup_init( ctx: &SetupContext, password: Option, ) -> Result<(Hostname, OnionAddressV3, X509), Error> { - let InitResult { secret_store, db } = - init(&RpcContextConfig::load(ctx.config_path.clone()).await?).await?; - let mut secrets_handle = secret_store.acquire().await?; - let mut secrets_tx = secrets_handle.begin().await?; - - let mut account = AccountInfo::load(secrets_tx.as_mut()).await?; - - if let Some(password) = password { - account.set_password(&password)?; - account.save(secrets_tx.as_mut()).await?; - db.mutate(|m| { - m.as_server_info_mut() + let InitResult { db } = init(&ctx.config).await?; + + let account = db + .mutate(|m| { + let mut account = AccountInfo::load(m)?; + if let Some(password) = password { + account.set_password(&password)?; + } + account.save(m)?; + m.as_public_mut() + .as_server_info_mut() .as_password_hash_mut() - .ser(&account.password) + .ser(&account.password)?; + Ok(account) }) .await?; - } - - secrets_tx.commit().await?; Ok(( account.hostname, - account.key.tor_address(), + account.tor_key.public().get_onion_address(), account.root_ca_cert, )) } -#[command(rpc_only)] +#[derive(Deserialize, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct AttachParams { + #[serde(rename = "embassy-password")] + password: Option, + guid: Arc, +} + pub async fn attach( - #[context] ctx: SetupContext, - #[arg] guid: Arc, - #[arg(rename = "embassy-password")] password: Option, + ctx: SetupContext, + AttachParams { password, guid }: AttachParams, ) -> Result<(), Error> { let mut status = ctx.setup_status.write().await; if status.is_some() { @@ -162,15 +185,14 @@ pub async fn attach( } #[derive(Debug, Clone, Deserialize, Serialize)] -#[serde(rename_all = "kebab-case")] +#[serde(rename_all = "camelCase")] pub struct SetupStatus { pub bytes_transferred: u64, pub total_bytes: Option, pub complete: bool, } -#[command(rpc_only, metadata(authenticated = false))] -pub async fn status(#[context] ctx: SetupContext) -> Result, RpcError> { +pub async fn status(ctx: SetupContext) -> Result, RpcError> { ctx.setup_status.read().await.clone().transpose() } @@ -178,25 +200,34 @@ pub async fn status(#[context] ctx: SetupContext) -> Result, /// This way the frontend can send a secret, like the password for the setup/ recovory /// without knowing the password over clearnet. We use the public key shared across the network /// since it is fine to share the public, and encrypt against the public. -#[command(rename = "get-pubkey", rpc_only, metadata(authenticated = false))] -pub async fn get_pubkey(#[context] ctx: SetupContext) -> Result { +pub async fn get_pubkey(ctx: SetupContext) -> Result { let secret = ctx.as_ref().clone(); let pub_key = secret.to_public_key()?; Ok(pub_key) } -#[command(subcommands(verify_cifs))] -pub fn cifs() -> Result<(), Error> { - Ok(()) +pub fn cifs() -> ParentHandler { + ParentHandler::new().subcommand("verify", from_fn_async(verify_cifs).no_cli()) +} + +#[derive(Deserialize, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct VerifyCifsParams { + hostname: String, + path: PathBuf, + username: String, + password: Option, } -#[command(rename = "verify", rpc_only)] +// #[command(rename = "verify", rpc_only)] pub async fn verify_cifs( - #[context] ctx: SetupContext, - #[arg] hostname: String, - #[arg] path: PathBuf, - #[arg] username: String, - #[arg] password: Option, + ctx: SetupContext, + VerifyCifsParams { + hostname, + path, + username, + password, + }: VerifyCifsParams, ) -> Result { let password: Option = password.map(|x| x.decrypt(&*ctx)).flatten(); let guard = TmpMountGuard::mount( @@ -209,28 +240,39 @@ pub async fn verify_cifs( ReadWrite, ) .await?; - let embassy_os = recovery_info(&guard).await?; + let start_os = recovery_info(guard.path()).await?; guard.unmount().await?; - embassy_os.ok_or_else(|| Error::new(eyre!("No Backup Found"), crate::ErrorKind::NotFound)) + start_os.ok_or_else(|| Error::new(eyre!("No Backup Found"), crate::ErrorKind::NotFound)) } -#[derive(Debug, Deserialize)] +#[derive(Debug, Deserialize, Serialize)] #[serde(tag = "type")] -#[serde(rename_all = "kebab-case")] +#[serde(rename_all = "camelCase")] pub enum RecoverySource { Migrate { guid: String }, Backup { target: BackupTargetFS }, } -#[command(rpc_only)] +#[derive(Deserialize, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct ExecuteParams { + start_os_logicalname: PathBuf, + start_os_password: EncryptedWire, + recovery_source: Option, + recovery_password: Option, +} + +// #[command(rpc_only)] pub async fn execute( - #[context] ctx: SetupContext, - #[arg(rename = "embassy-logicalname")] embassy_logicalname: PathBuf, - #[arg(rename = "embassy-password")] embassy_password: EncryptedWire, - #[arg(rename = "recovery-source")] recovery_source: Option, - #[arg(rename = "recovery-password")] recovery_password: Option, + ctx: SetupContext, + ExecuteParams { + start_os_logicalname, + start_os_password, + recovery_source, + recovery_password, + }: ExecuteParams, ) -> Result<(), Error> { - let embassy_password = match embassy_password.decrypt(&*ctx) { + let start_os_password = match start_os_password.decrypt(&*ctx) { Some(a) => a, None => { return Err(Error::new( @@ -267,15 +309,10 @@ pub async fn execute( tokio::task::spawn({ async move { let ctx = ctx.clone(); - let recovery_source = recovery_source; - - let embassy_password = embassy_password; - let recovery_source = recovery_source; - let recovery_password = recovery_password; match execute_inner( ctx.clone(), - embassy_logicalname, - embassy_password, + start_os_logicalname, + start_os_password, recovery_source, recovery_password, ) @@ -312,8 +349,8 @@ pub async fn execute( } #[instrument(skip_all)] -#[command(rpc_only)] -pub async fn complete(#[context] ctx: SetupContext) -> Result { +// #[command(rpc_only)] +pub async fn complete(ctx: SetupContext) -> Result { let (guid, setup_result) = if let Some((guid, setup_result)) = &*ctx.setup_result.read().await { (guid.clone(), setup_result.clone()) } else { @@ -329,8 +366,8 @@ pub async fn complete(#[context] ctx: SetupContext) -> Result Result<(), Error> { +// #[command(rpc_only)] +pub async fn exit(ctx: SetupContext) -> Result<(), Error> { ctx.shutdown.send(()).expect("failed to shutdown"); Ok(()) } @@ -338,8 +375,8 @@ pub async fn exit(#[context] ctx: SetupContext) -> Result<(), Error> { #[instrument(skip_all)] pub async fn execute_inner( ctx: SetupContext, - embassy_logicalname: PathBuf, - embassy_password: String, + start_os_logicalname: PathBuf, + start_os_password: String, recovery_source: Option, recovery_password: Option, ) -> Result<(Arc, Hostname, OnionAddressV3, X509), Error> { @@ -350,7 +387,7 @@ pub async fn execute_inner( }; let guid = Arc::new( crate::disk::main::create( - &[embassy_logicalname], + &[start_os_logicalname], &pvscan().await?, &ctx.datadir, encryption_password, @@ -366,30 +403,28 @@ pub async fn execute_inner( .await?; if let Some(RecoverySource::Backup { target }) = recovery_source { - recover(ctx, guid, embassy_password, target, recovery_password).await + recover(ctx, guid, start_os_password, target, recovery_password).await } else if let Some(RecoverySource::Migrate { guid: old_guid }) = recovery_source { - migrate(ctx, guid, &old_guid, embassy_password).await + migrate(ctx, guid, &old_guid, start_os_password).await } else { - let (hostname, tor_addr, root_ca) = fresh_setup(&ctx, &embassy_password).await?; + let (hostname, tor_addr, root_ca) = fresh_setup(&ctx, &start_os_password).await?; Ok((guid, hostname, tor_addr, root_ca)) } } async fn fresh_setup( ctx: &SetupContext, - embassy_password: &str, + start_os_password: &str, ) -> Result<(Hostname, OnionAddressV3, X509), Error> { - let account = AccountInfo::new(embassy_password, root_ca_start_time().await?)?; - let sqlite_pool = ctx.secret_store().await?; - account.save(&sqlite_pool).await?; - sqlite_pool.close().await; - let InitResult { secret_store, .. } = - init(&RpcContextConfig::load(ctx.config_path.clone()).await?).await?; - secret_store.close().await; + let account = AccountInfo::new(start_os_password, root_ca_start_time().await?)?; + let db = ctx.db().await?; + db.put(&ROOT, &Database::init(&account)?).await?; + drop(db); + init(&ctx.config).await?; Ok(( - account.hostname.clone(), - account.key.tor_address(), - account.root_ca_cert.clone(), + account.hostname, + account.tor_key.public().get_onion_address(), + account.root_ca_cert, )) } @@ -397,7 +432,7 @@ async fn fresh_setup( async fn recover( ctx: SetupContext, guid: Arc, - embassy_password: String, + start_os_password: String, recovery_source: BackupTargetFS, recovery_password: Option, ) -> Result<(Arc, Hostname, OnionAddressV3, X509), Error> { @@ -405,7 +440,7 @@ async fn recover( recover_full_embassy( ctx, guid.clone(), - embassy_password, + start_os_password, recovery_source, recovery_password, ) @@ -417,7 +452,7 @@ async fn migrate( ctx: SetupContext, guid: Arc, old_guid: &str, - embassy_password: String, + start_os_password: String, ) -> Result<(Arc, Hostname, OnionAddressV3, X509), Error> { *ctx.setup_status.write().await = Some(Ok(SetupStatus { bytes_transferred: 0, @@ -502,7 +537,7 @@ async fn migrate( } => res, } - let (hostname, tor_addr, root_ca) = setup_init(&ctx, Some(embassy_password)).await?; + let (hostname, tor_addr, root_ca) = setup_init(&ctx, Some(start_os_password)).await?; crate::disk::main::export(&old_guid, "/media/embassy/migrate").await?; diff --git a/core/startos/src/shutdown.rs b/core/startos/src/shutdown.rs index e5ff969b6a..f6a9848975 100644 --- a/core/startos/src/shutdown.rs +++ b/core/startos/src/shutdown.rs @@ -1,15 +1,12 @@ use std::path::PathBuf; use std::sync::Arc; -use rpc_toolkit::command; - use crate::context::RpcContext; use crate::disk::main::export; use crate::init::{STANDBY_MODE_PATH, SYSTEM_REBUILD_PATH}; use crate::prelude::*; use crate::sound::SHUTDOWN; -use crate::util::docker::CONTAINER_TOOL; -use crate::util::{display_none, Invoke}; +use crate::util::Invoke; use crate::PLATFORM; #[derive(Debug, Clone)] @@ -44,28 +41,6 @@ impl Shutdown { tracing::error!("Error Stopping Journald: {}", e); tracing::debug!("{:?}", e); } - if CONTAINER_TOOL == "docker" { - if let Err(e) = Command::new("systemctl") - .arg("stop") - .arg("docker") - .invoke(crate::ErrorKind::Docker) - .await - { - tracing::error!("Error Stopping Docker: {}", e); - tracing::debug!("{:?}", e); - } - } else if CONTAINER_TOOL == "podman" { - if let Err(e) = Command::new("podman") - .arg("rm") - .arg("-f") - .arg("netdummy") - .invoke(crate::ErrorKind::Docker) - .await - { - tracing::error!("Error Stopping Podman: {}", e); - tracing::debug!("{:?}", e); - } - } if let Some((guid, datadir)) = &self.export_args { if let Err(e) = export(guid, datadir).await { tracing::error!("Error Exporting Volume Group: {}", e); @@ -100,11 +75,11 @@ impl Shutdown { } } -#[command(display(display_none))] -pub async fn shutdown(#[context] ctx: RpcContext) -> Result<(), Error> { +pub async fn shutdown(ctx: RpcContext) -> Result<(), Error> { ctx.db .mutate(|db| { - db.as_server_info_mut() + db.as_public_mut() + .as_server_info_mut() .as_status_info_mut() .as_shutting_down_mut() .ser(&true) @@ -120,11 +95,11 @@ pub async fn shutdown(#[context] ctx: RpcContext) -> Result<(), Error> { Ok(()) } -#[command(display(display_none))] -pub async fn restart(#[context] ctx: RpcContext) -> Result<(), Error> { +pub async fn restart(ctx: RpcContext) -> Result<(), Error> { ctx.db .mutate(|db| { - db.as_server_info_mut() + db.as_public_mut() + .as_server_info_mut() .as_status_info_mut() .as_restarting_mut() .ser(&true) @@ -140,8 +115,7 @@ pub async fn restart(#[context] ctx: RpcContext) -> Result<(), Error> { Ok(()) } -#[command(display(display_none))] -pub async fn rebuild(#[context] ctx: RpcContext) -> Result<(), Error> { +pub async fn rebuild(ctx: RpcContext) -> Result<(), Error> { tokio::fs::write(SYSTEM_REBUILD_PATH, b"").await?; restart(ctx).await } diff --git a/core/startos/src/ssh.rs b/core/startos/src/ssh.rs index 697e05727f..aaf941e1f3 100644 --- a/core/startos/src/ssh.rs +++ b/core/startos/src/ssh.rs @@ -1,31 +1,57 @@ +use std::collections::BTreeMap; use std::path::Path; -use chrono::Utc; -use clap::ArgMatches; +use clap::builder::ValueParserFactory; +use clap::Parser; use color_eyre::eyre::eyre; -use rpc_toolkit::command; -use sqlx::{Pool, Postgres}; +use imbl_value::InternedString; +use rpc_toolkit::{command, from_fn_async, AnyContext, Empty, HandlerExt, ParentHandler}; +use serde::{Deserialize, Serialize}; use tracing::instrument; -use crate::context::RpcContext; -use crate::util::display_none; -use crate::util::serde::{display_serializable, IoFormat}; -use crate::{Error, ErrorKind}; +use crate::context::{CliContext, RpcContext}; +use crate::prelude::*; +use crate::util::clap::FromStrParser; +use crate::util::serde::{display_serializable, HandlerExtSerde, WithIoFormat}; -static SSH_AUTHORIZED_KEYS_FILE: &str = "/home/start9/.ssh/authorized_keys"; +pub const SSH_AUTHORIZED_KEYS_FILE: &str = "/home/start9/.ssh/authorized_keys"; -#[derive(Debug, serde::Deserialize, serde::Serialize)] -pub struct PubKey( +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct SshKeys(BTreeMap>); +impl SshKeys { + pub fn new() -> Self { + Self(BTreeMap::new()) + } +} +impl Map for SshKeys { + type Key = InternedString; + type Value = WithTimeData; + fn key_str(key: &Self::Key) -> Result, Error> { + Ok(key) + } + fn key_string(key: &Self::Key) -> Result { + Ok(key.clone()) + } +} + +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct SshPubKey( #[serde(serialize_with = "crate::util::serde::serialize_display")] #[serde(deserialize_with = "crate::util::serde::deserialize_from_str")] openssh_keys::PublicKey, ); +impl ValueParserFactory for SshPubKey { + type Parser = FromStrParser; + fn value_parser() -> Self::Parser { + FromStrParser::new() + } +} #[derive(serde::Serialize, serde::Deserialize)] -#[serde(rename_all = "kebab-case")] +#[serde(rename_all = "camelCase")] pub struct SshKeyResponse { pub alg: String, - pub fingerprint: String, + pub fingerprint: InternedString, pub hostname: String, pub created_at: String, } @@ -39,10 +65,10 @@ impl std::fmt::Display for SshKeyResponse { } } -impl std::str::FromStr for PubKey { +impl std::str::FromStr for SshPubKey { type Err = Error; fn from_str(s: &str) -> Result { - s.parse().map(|pk| PubKey(pk)).map_err(|e| Error { + s.parse().map(|pk| SshPubKey(pk)).map_err(|e| Error { source: e.into(), kind: crate::ErrorKind::ParseSshKey, revision: None, @@ -50,75 +76,100 @@ impl std::str::FromStr for PubKey { } } -#[command(subcommands(add, delete, list,))] -pub fn ssh() -> Result<(), Error> { - Ok(()) +// #[command(subcommands(add, delete, list,))] +pub fn ssh() -> ParentHandler { + ParentHandler::new() + .subcommand( + "add", + from_fn_async(add) + .no_display() + .with_remote_cli::(), + ) + .subcommand( + "delete", + from_fn_async(delete) + .no_display() + .with_remote_cli::(), + ) + .subcommand( + "list", + from_fn_async(list) + .with_display_serializable() + .with_custom_display_fn::(|handle, result| { + Ok(display_all_ssh_keys(handle.params, result)) + }) + .with_remote_cli::(), + ) +} + +#[derive(Deserialize, Serialize, Parser)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "kebab-case")] +pub struct AddParams { + key: SshPubKey, } -#[command(display(display_none))] #[instrument(skip_all)] -pub async fn add(#[context] ctx: RpcContext, #[arg] key: PubKey) -> Result { - let pool = &ctx.secret_store; - // check fingerprint for duplicates - let fp = key.0.fingerprint_md5(); - match sqlx::query!("SELECT * FROM ssh_keys WHERE fingerprint = $1", fp) - .fetch_optional(pool) - .await? - { - None => { - // if no duplicates, insert into DB - let raw_key = format!("{}", key.0); - let created_at = Utc::now().to_rfc3339(); - sqlx::query!( - "INSERT INTO ssh_keys (fingerprint, openssh_pubkey, created_at) VALUES ($1, $2, $3)", - fp, - raw_key, - created_at - ) - .execute(pool) - .await?; - // insert into live key file, for now we actually do a wholesale replacement of the keys file, for maximum - // consistency - sync_keys_from_db(pool, Path::new(SSH_AUTHORIZED_KEYS_FILE)).await?; - Ok(SshKeyResponse { - alg: key.0.keytype().to_owned(), - fingerprint: fp, - hostname: key.0.comment.unwrap_or(String::new()).to_owned(), - created_at, - }) - } - Some(_) => Err(Error::new(eyre!("Duplicate ssh key"), ErrorKind::Duplicate)), - } +pub async fn add(ctx: RpcContext, AddParams { key }: AddParams) -> Result { + let mut key = WithTimeData::new(key); + let fingerprint = InternedString::intern(key.0.fingerprint_md5()); + let (keys, res) = ctx + .db + .mutate(move |m| { + m.as_private_mut() + .as_ssh_pubkeys_mut() + .insert(&fingerprint, &key)?; + + Ok(( + m.as_private().as_ssh_pubkeys().de()?, + SshKeyResponse { + alg: key.0.keytype().to_owned(), + fingerprint, + hostname: key.0.comment.take().unwrap_or_default(), + created_at: key.created_at.to_rfc3339(), + }, + )) + }) + .await?; + sync_keys(&keys, SSH_AUTHORIZED_KEYS_FILE).await?; + Ok(res) +} + +#[derive(Deserialize, Serialize, Parser)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "kebab-case")] +pub struct DeleteParams { + fingerprint: InternedString, } -#[command(display(display_none))] + #[instrument(skip_all)] -pub async fn delete(#[context] ctx: RpcContext, #[arg] fingerprint: String) -> Result<(), Error> { - let pool = &ctx.secret_store; - // check if fingerprint is in DB - // if in DB, remove it from DB - let n = sqlx::query!("DELETE FROM ssh_keys WHERE fingerprint = $1", fingerprint) - .execute(pool) - .await? - .rows_affected(); - // if not in DB, Err404 - if n == 0 { - Err(Error { - source: color_eyre::eyre::eyre!("SSH Key Not Found"), - kind: crate::error::ErrorKind::NotFound, - revision: None, +pub async fn delete( + ctx: RpcContext, + DeleteParams { fingerprint }: DeleteParams, +) -> Result<(), Error> { + let keys = ctx + .db + .mutate(|m| { + let keys_ref = m.as_private_mut().as_ssh_pubkeys_mut(); + if keys_ref.remove(&fingerprint)?.is_some() { + keys_ref.de() + } else { + Err(Error { + source: color_eyre::eyre::eyre!("SSH Key Not Found"), + kind: crate::error::ErrorKind::NotFound, + revision: None, + }) + } }) - } else { - // AND overlay key file - sync_keys_from_db(pool, Path::new(SSH_AUTHORIZED_KEYS_FILE)).await?; - Ok(()) - } + .await?; + sync_keys(&keys, SSH_AUTHORIZED_KEYS_FILE).await } -fn display_all_ssh_keys(all: Vec, matches: &ArgMatches) { +fn display_all_ssh_keys(params: WithIoFormat, result: Vec) { use prettytable::*; - if matches.is_present("format") { - return display_serializable(all, matches); + if let Some(format) = params.format { + return display_serializable(format, params); } let mut table = Table::new(); @@ -128,7 +179,7 @@ fn display_all_ssh_keys(all: Vec, matches: &ArgMatches) { "FINGERPRINT", "HOSTNAME", ]); - for key in all { + for key in result { let row = row![ &format!("{}", key.created_at), &key.alg, @@ -140,50 +191,32 @@ fn display_all_ssh_keys(all: Vec, matches: &ArgMatches) { table.print_tty(false).unwrap(); } -#[command(display(display_all_ssh_keys))] #[instrument(skip_all)] -pub async fn list( - #[context] ctx: RpcContext, - #[allow(unused_variables)] - #[arg(long = "format")] - format: Option, -) -> Result, Error> { - let pool = &ctx.secret_store; - // list keys in DB and return them - let entries = sqlx::query!("SELECT fingerprint, openssh_pubkey, created_at FROM ssh_keys") - .fetch_all(pool) - .await?; - Ok(entries +pub async fn list(ctx: RpcContext) -> Result, Error> { + ctx.db + .peek() + .await + .into_private() + .into_ssh_pubkeys() + .into_entries()? .into_iter() - .map(|r| { - let k = PubKey(r.openssh_pubkey.parse().unwrap()).0; - let alg = k.keytype().to_owned(); - let fingerprint = k.fingerprint_md5(); - let hostname = k.comment.unwrap_or("".to_owned()); - let created_at = r.created_at; - SshKeyResponse { - alg, + .map(|(fingerprint, key)| { + let mut key = key.de()?; + Ok(SshKeyResponse { + alg: key.0.keytype().to_owned(), fingerprint, - hostname, - created_at, - } + hostname: key.0.comment.take().unwrap_or_default(), + created_at: key.created_at.to_rfc3339(), + }) }) - .collect()) + .collect() } #[instrument(skip_all)] -pub async fn sync_keys_from_db>( - pool: &Pool, - dest: P, -) -> Result<(), Error> { +pub async fn sync_keys>(keys: &SshKeys, dest: P) -> Result<(), Error> { + use tokio::io::AsyncWriteExt; + let dest = dest.as_ref(); - let keys = sqlx::query!("SELECT openssh_pubkey FROM ssh_keys") - .fetch_all(pool) - .await?; - let contents: String = keys - .into_iter() - .map(|k| format!("{}\n", k.openssh_pubkey)) - .collect(); let ssh_dir = dest.parent().ok_or_else(|| { Error::new( eyre!("SSH Key File cannot be \"/\""), @@ -193,5 +226,10 @@ pub async fn sync_keys_from_db>( if tokio::fs::metadata(ssh_dir).await.is_err() { tokio::fs::create_dir_all(ssh_dir).await?; } - std::fs::write(dest, contents).map_err(|e| e.into()) + let mut f = tokio::fs::File::create(dest).await?; + for key in keys.0.values() { + f.write_all(key.0.to_key_format().as_bytes()).await?; + f.write_all(b"\n").await?; + } + Ok(()) } diff --git a/core/startos/src/status/health_check.rs b/core/startos/src/status/health_check.rs index 1b3e8f6b5f..cd56165275 100644 --- a/core/startos/src/status/health_check.rs +++ b/core/startos/src/status/health_check.rs @@ -1,126 +1,52 @@ -use std::collections::{BTreeMap, BTreeSet}; - -use chrono::{DateTime, Utc}; pub use models::HealthCheckId; -use models::ImageId; use serde::{Deserialize, Serialize}; -use tracing::instrument; - -use crate::context::RpcContext; -use crate::procedure::{NoOutput, PackageProcedure, ProcedureName}; -use crate::s9pk::manifest::PackageId; -use crate::util::serde::Duration; -use crate::util::Version; -use crate::volume::Volumes; -use crate::{Error, ResultExt}; +use ts_rs::TS; -#[derive(Clone, Debug, Deserialize, Serialize)] -pub struct HealthChecks(pub BTreeMap); -impl HealthChecks { - #[instrument(skip_all)] - pub fn validate( - &self, - eos_version: &Version, - volumes: &Volumes, - image_ids: &BTreeSet, - ) -> Result<(), Error> { - for check in self.0.values() { - check - .implementation - .validate(eos_version, volumes, image_ids, false) - .with_ctx(|_| { - ( - crate::ErrorKind::ValidateS9pk, - format!("Health Check {}", check.name), - ) - })?; - } - Ok(()) - } - pub async fn check_all( - &self, - ctx: &RpcContext, - started: DateTime, - pkg_id: &PackageId, - pkg_version: &Version, - volumes: &Volumes, - ) -> Result, Error> { - let res = futures::future::try_join_all(self.0.iter().map(|(id, check)| async move { - Ok::<_, Error>(( - id.clone(), - check - .check(ctx, id, started, pkg_id, pkg_version, volumes) - .await?, - )) - })) - .await?; - Ok(res.into_iter().collect()) - } -} - -#[derive(Clone, Debug, Deserialize, Serialize)] -#[serde(rename_all = "kebab-case")] -pub struct HealthCheck { +#[derive(Clone, Debug, Deserialize, Serialize, PartialEq, Eq, TS)] +#[serde(rename_all = "camelCase")] +pub struct HealthCheckResult { pub name: String, - pub success_message: Option, #[serde(flatten)] - implementation: PackageProcedure, - pub timeout: Option, -} -impl HealthCheck { - #[instrument(skip_all)] - pub async fn check( - &self, - ctx: &RpcContext, - id: &HealthCheckId, - started: DateTime, - pkg_id: &PackageId, - pkg_version: &Version, - volumes: &Volumes, - ) -> Result { - let res = self - .implementation - .execute( - ctx, - pkg_id, - pkg_version, - ProcedureName::Health(id.clone()), - volumes, - Some(Utc::now().signed_duration_since(started).num_milliseconds()), - Some( - self.timeout - .map_or(std::time::Duration::from_secs(30), |d| *d), - ), - ) - .await?; - Ok(match res { - Ok(NoOutput) => HealthCheckResult::Success, - Err((59, _)) => HealthCheckResult::Disabled, - Err((60, _)) => HealthCheckResult::Starting, - Err((61, message)) => HealthCheckResult::Loading { message }, - Err((_, error)) => HealthCheckResult::Failure { error }, - }) - } + pub kind: HealthCheckResultKind, } -#[derive(Clone, Debug, Deserialize, Serialize, PartialEq, Eq)] -#[serde(rename_all = "kebab-case")] +#[derive(Clone, Debug, Deserialize, Serialize, PartialEq, Eq, TS)] +#[serde(rename_all = "camelCase")] #[serde(tag = "result")] -pub enum HealthCheckResult { - Success, - Disabled, - Starting, +pub enum HealthCheckResultKind { + Success { message: Option }, + Disabled { message: Option }, + Starting { message: Option }, Loading { message: String }, - Failure { error: String }, + Failure { message: String }, } impl std::fmt::Display for HealthCheckResult { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - HealthCheckResult::Success => write!(f, "Succeeded"), - HealthCheckResult::Disabled => write!(f, "Disabled"), - HealthCheckResult::Starting => write!(f, "Starting"), - HealthCheckResult::Loading { message } => write!(f, "Loading ({})", message), - HealthCheckResult::Failure { error } => write!(f, "Failed ({})", error), + let name = &self.name; + match &self.kind { + HealthCheckResultKind::Success { message } => { + if let Some(message) = message { + write!(f, "{name}: Succeeded ({message})") + } else { + write!(f, "{name}: Succeeded") + } + } + HealthCheckResultKind::Disabled { message } => { + if let Some(message) = message { + write!(f, "{name}: Disabled ({message})") + } else { + write!(f, "{name}: Disabled") + } + } + HealthCheckResultKind::Starting { message } => { + if let Some(message) = message { + write!(f, "{name}: Starting ({message})") + } else { + write!(f, "{name}: Starting") + } + } + HealthCheckResultKind::Loading { message } => write!(f, "{name}: Loading ({message})"), + HealthCheckResultKind::Failure { message } => write!(f, "{name}: Failed ({message})"), } } } diff --git a/core/startos/src/status/mod.rs b/core/startos/src/status/mod.rs index 2a5a9391fe..2faa90e795 100644 --- a/core/startos/src/status/mod.rs +++ b/core/startos/src/status/mod.rs @@ -1,17 +1,20 @@ use std::collections::BTreeMap; use chrono::{DateTime, Utc}; +use imbl::OrdMap; use models::PackageId; use serde::{Deserialize, Serialize}; +use ts_rs::TS; use self::health_check::HealthCheckId; use crate::prelude::*; use crate::status::health_check::HealthCheckResult; pub mod health_check; -#[derive(Clone, Debug, Deserialize, Serialize, HasModel)] -#[serde(rename_all = "kebab-case")] +#[derive(Clone, Debug, Deserialize, Serialize, HasModel, TS)] +#[serde(rename_all = "camelCase")] #[model = "Model"] +#[ts(export)] pub struct Status { pub configured: bool, pub main: MainStatus, @@ -19,30 +22,45 @@ pub struct Status { pub dependency_config_errors: DependencyConfigErrors, } -#[derive(Clone, Debug, Deserialize, Serialize, HasModel, Default)] -#[serde(rename_all = "kebab-case")] +#[derive(Clone, Debug, Deserialize, Serialize, HasModel, Default, TS)] #[model = "Model"] +#[ts(export)] pub struct DependencyConfigErrors(pub BTreeMap); impl Map for DependencyConfigErrors { type Key = PackageId; type Value = String; + fn key_str(key: &Self::Key) -> Result, Error> { + Ok(key) + } + fn key_string(key: &Self::Key) -> Result { + Ok(key.clone().into()) + } } -#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)] +#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq, TS)] #[serde(tag = "status")] -#[serde(rename_all = "kebab-case")] +#[serde(rename_all = "camelCase")] pub enum MainStatus { Stopped, Restarting, - Stopping, + #[serde(rename_all = "camelCase")] + Stopping { + timeout: crate::util::serde::Duration, + }, Starting, + #[serde(rename_all = "camelCase")] Running { + #[ts(type = "string")] started: DateTime, - health: BTreeMap, + #[ts(as = "BTreeMap")] + health: OrdMap, }, + #[serde(rename_all = "camelCase")] BackingUp { + #[ts(type = "string | null")] started: Option>, - health: BTreeMap, + #[ts(as = "BTreeMap")] + health: OrdMap, }, } impl MainStatus { @@ -54,29 +72,29 @@ impl MainStatus { started: Some(_), .. } => true, MainStatus::Stopped - | MainStatus::Stopping + | MainStatus::Stopping { .. } | MainStatus::Restarting | MainStatus::BackingUp { started: None, .. } => false, } } - pub fn stop(&mut self) { - match self { - MainStatus::Starting { .. } | MainStatus::Running { .. } => { - *self = MainStatus::Stopping; - } - MainStatus::BackingUp { started, .. } => { - *started = None; - } - MainStatus::Stopped | MainStatus::Stopping | MainStatus::Restarting => (), - } - } + // pub fn stop(&mut self) { + // match self { + // MainStatus::Starting { .. } | MainStatus::Running { .. } => { + // *self = MainStatus::Stopping; + // } + // MainStatus::BackingUp { started, .. } => { + // *started = None; + // } + // MainStatus::Stopped | MainStatus::Stopping | MainStatus::Restarting => (), + // } + // } pub fn started(&self) -> Option> { match self { MainStatus::Running { started, .. } => Some(*started), MainStatus::BackingUp { started, .. } => *started, MainStatus::Stopped => None, MainStatus::Restarting => None, - MainStatus::Stopping => None, + MainStatus::Stopping { .. } => None, MainStatus::Starting { .. } => None, } } @@ -84,7 +102,7 @@ impl MainStatus { let (started, health) = match self { MainStatus::Starting { .. } => (Some(Utc::now()), Default::default()), MainStatus::Running { started, health } => (Some(started.clone()), health.clone()), - MainStatus::Stopped | MainStatus::Stopping | MainStatus::Restarting => { + MainStatus::Stopped | MainStatus::Stopping { .. } | MainStatus::Restarting => { (None, Default::default()) } MainStatus::BackingUp { .. } => return self.clone(), diff --git a/core/startos/src/system.rs b/core/startos/src/system.rs index b5cd428446..faead07381 100644 --- a/core/startos/src/system.rs +++ b/core/startos/src/system.rs @@ -2,11 +2,11 @@ use std::collections::BTreeSet; use std::fmt; use chrono::Utc; -use clap::ArgMatches; +use clap::Parser; use color_eyre::eyre::eyre; use futures::FutureExt; -use rpc_toolkit::command; use rpc_toolkit::yajrc::RpcError; +use rpc_toolkit::{command, from_fn_async, AnyContext, Empty, HandlerExt, ParentHandler}; use serde::{Deserialize, Deserializer, Serialize, Serializer}; use tokio::process::Command; use tokio::sync::broadcast::Receiver; @@ -22,13 +22,27 @@ use crate::logs::{ use crate::prelude::*; use crate::shutdown::Shutdown; use crate::util::cpupower::{get_available_governors, set_governor, Governor}; -use crate::util::serde::{display_serializable, IoFormat}; -use crate::util::{display_none, Invoke}; +use crate::util::serde::{display_serializable, HandlerExtSerde, WithIoFormat}; +use crate::util::Invoke; use crate::{Error, ErrorKind, ResultExt}; -#[command(subcommands(zram, governor))] -pub async fn experimental() -> Result<(), Error> { - Ok(()) +pub fn experimental() -> ParentHandler { + ParentHandler::new() + .subcommand( + "zram", + from_fn_async(zram) + .no_display() + .with_remote_cli::(), + ) + .subcommand( + "governor", + from_fn_async(governor) + .with_display_serializable() + .with_custom_display_fn::(|handle, result| { + Ok(display_governor_info(handle.params, result)) + }) + .with_remote_cli::(), + ) } pub async fn enable_zram() -> Result<(), Error> { @@ -59,11 +73,17 @@ pub async fn enable_zram() -> Result<(), Error> { Ok(()) } -#[command(display(display_none))] -pub async fn zram(#[context] ctx: RpcContext, #[arg] enable: bool) -> Result<(), Error> { +#[derive(Deserialize, Serialize, Parser)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "kebab-case")] +pub struct ZramParams { + enable: bool, +} + +pub async fn zram(ctx: RpcContext, ZramParams { enable }: ZramParams) -> Result<(), Error> { let db = ctx.db.peek().await; - let zram = db.as_server_info().as_zram().de()?; + let zram = db.as_public().as_server_info().as_zram().de()?; if enable == zram { return Ok(()); } @@ -80,7 +100,10 @@ pub async fn zram(#[context] ctx: RpcContext, #[arg] enable: bool) -> Result<(), } ctx.db .mutate(|v| { - v.as_server_info_mut().as_zram_mut().ser(&enable)?; + v.as_public_mut() + .as_server_info_mut() + .as_zram_mut() + .ser(&enable)?; Ok(()) }) .await?; @@ -93,17 +116,17 @@ pub struct GovernorInfo { available: BTreeSet, } -fn display_governor_info(arg: GovernorInfo, matches: &ArgMatches) { +fn display_governor_info(params: WithIoFormat, result: GovernorInfo) { use prettytable::*; - if matches.is_present("format") { - return display_serializable(arg, matches); + if let Some(format) = params.format { + return display_serializable(format, params); } let mut table = Table::new(); table.add_row(row![bc -> "GOVERNORS"]); - for entry in arg.available { - if Some(&entry) == arg.current.as_ref() { + for entry in result.available { + if Some(&entry) == result.current.as_ref() { table.add_row(row![g -> format!("* {entry} (current)")]); } else { table.add_row(row![entry]); @@ -112,13 +135,16 @@ fn display_governor_info(arg: GovernorInfo, matches: &ArgMatches) { table.print_tty(false).unwrap(); } -#[command(display(display_governor_info))] +#[derive(Deserialize, Serialize, Parser)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "kebab-case")] +pub struct GovernorParams { + set: Option, +} + pub async fn governor( - #[context] ctx: RpcContext, - #[allow(unused_variables)] - #[arg(long = "format")] - format: Option, - #[arg] set: Option, + ctx: RpcContext, + GovernorParams { set, .. }: GovernorParams, ) -> Result { let available = get_available_governors().await?; if let Some(set) = set { @@ -130,10 +156,22 @@ pub async fn governor( } set_governor(&set).await?; ctx.db - .mutate(|d| d.as_server_info_mut().as_governor_mut().ser(&Some(set))) + .mutate(|d| { + d.as_public_mut() + .as_server_info_mut() + .as_governor_mut() + .ser(&Some(set)) + }) .await?; } - let current = ctx.db.peek().await.as_server_info().as_governor().de()?; + let current = ctx + .db + .peek() + .await + .as_public() + .as_server_info() + .as_governor() + .de()?; Ok(GovernorInfo { current, available }) } @@ -143,13 +181,13 @@ pub struct TimeInfo { uptime: u64, } -fn display_time(arg: TimeInfo, matches: &ArgMatches) { +pub fn display_time(params: WithIoFormat, arg: TimeInfo) { use std::fmt::Write; use prettytable::*; - if matches.is_present("format") { - return display_serializable(arg, matches); + if let Some(format) = params.format { + return display_serializable(format, arg); } let days = arg.uptime / (24 * 60 * 60); @@ -185,35 +223,57 @@ fn display_time(arg: TimeInfo, matches: &ArgMatches) { table.print_tty(false).unwrap(); } -#[command(display(display_time))] -pub async fn time( - #[context] ctx: RpcContext, - #[allow(unused_variables)] - #[arg(long = "format")] - format: Option, -) -> Result { +pub async fn time(ctx: RpcContext, _: Empty) -> Result { Ok(TimeInfo { now: Utc::now().to_rfc3339(), uptime: ctx.start_time.elapsed().as_secs(), }) } - -#[command( - custom_cli(cli_logs(async, context(CliContext))), - subcommands(self(logs_nofollow(async)), logs_follow), - display(display_none) -)] -pub async fn logs( - #[arg(short = 'l', long = "limit")] limit: Option, - #[arg(short = 'c', long = "cursor")] cursor: Option, - #[arg(short = 'B', long = "before", default)] before: bool, - #[arg(short = 'f', long = "follow", default)] follow: bool, -) -> Result<(Option, Option, bool, bool), Error> { - Ok((limit, cursor, before, follow)) +#[derive(Deserialize, Serialize, Parser)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "kebab-case")] +pub struct LogsParams { + #[arg(short = 'l', long = "limit")] + limit: Option, + #[arg(short = 'c', long = "cursor")] + cursor: Option, + #[arg(short = 'B', long = "before")] + #[serde(default)] + before: bool, + #[arg(short = 'f', long = "follow")] + #[serde(default)] + follow: bool, +} + +pub fn logs() -> ParentHandler { + ParentHandler::new() + .root_handler( + from_fn_async(cli_logs) + .no_display() + .with_inherited(|params, _| params), + ) + .root_handler( + from_fn_async(logs_nofollow) + .with_inherited(|params, _| params) + .no_cli(), + ) + .subcommand( + "follow", + from_fn_async(logs_follow) + .with_inherited(|params, _| params) + .no_cli(), + ) } + pub async fn cli_logs( ctx: CliContext, - (limit, cursor, before, follow): (Option, Option, bool, bool), + _: Empty, + LogsParams { + limit, + cursor, + before, + follow, + }: LogsParams, ) -> Result<(), RpcError> { if follow { if cursor.is_some() { @@ -234,37 +294,68 @@ pub async fn cli_logs( } } pub async fn logs_nofollow( - _ctx: (), - (limit, cursor, before, _): (Option, Option, bool, bool), + _ctx: AnyContext, + _: Empty, + LogsParams { + limit, + cursor, + before, + .. + }: LogsParams, ) -> Result { fetch_logs(LogSource::System, limit, cursor, before).await } -#[command(rpc_only, rename = "follow", display(display_none))] pub async fn logs_follow( - #[context] ctx: RpcContext, - #[parent_data] (limit, _, _, _): (Option, Option, bool, bool), + ctx: RpcContext, + _: Empty, + LogsParams { limit, .. }: LogsParams, ) -> Result { follow_logs(ctx, LogSource::System, limit).await } - -#[command( - rename = "kernel-logs", - custom_cli(cli_kernel_logs(async, context(CliContext))), - subcommands(self(kernel_logs_nofollow(async)), kernel_logs_follow), - display(display_none) -)] -pub async fn kernel_logs( - #[arg(short = 'l', long = "limit")] limit: Option, - #[arg(short = 'c', long = "cursor")] cursor: Option, - #[arg(short = 'B', long = "before", default)] before: bool, - #[arg(short = 'f', long = "follow", default)] follow: bool, -) -> Result<(Option, Option, bool, bool), Error> { - Ok((limit, cursor, before, follow)) +#[derive(Deserialize, Serialize, Parser)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "kebab-case")] +pub struct KernelLogsParams { + #[arg(short = 'l', long = "limit")] + limit: Option, + #[arg(short = 'c', long = "cursor")] + cursor: Option, + #[arg(short = 'B', long = "before")] + #[serde(default)] + before: bool, + #[arg(short = 'f', long = "follow")] + #[serde(default)] + follow: bool, +} +pub fn kernel_logs() -> ParentHandler { + ParentHandler::new() + .root_handler( + from_fn_async(cli_kernel_logs) + .no_display() + .with_inherited(|params, _| params), + ) + .root_handler( + from_fn_async(kernel_logs_nofollow) + .with_inherited(|params, _| params) + .no_cli(), + ) + .subcommand( + "follow", + from_fn_async(kernel_logs_follow) + .with_inherited(|params, _| params) + .no_cli(), + ) } pub async fn cli_kernel_logs( ctx: CliContext, - (limit, cursor, before, follow): (Option, Option, bool, bool), + _: Empty, + KernelLogsParams { + limit, + cursor, + before, + follow, + }: KernelLogsParams, ) -> Result<(), RpcError> { if follow { if cursor.is_some() { @@ -285,16 +376,22 @@ pub async fn cli_kernel_logs( } } pub async fn kernel_logs_nofollow( - _ctx: (), - (limit, cursor, before, _): (Option, Option, bool, bool), + _ctx: AnyContext, + _: Empty, + KernelLogsParams { + limit, + cursor, + before, + .. + }: KernelLogsParams, ) -> Result { fetch_logs(LogSource::Kernel, limit, cursor, before).await } -#[command(rpc_only, rename = "follow", display(display_none))] pub async fn kernel_logs_follow( - #[context] ctx: RpcContext, - #[parent_data] (limit, _, _, _): (Option, Option, bool, bool), + ctx: RpcContext, + _: Empty, + KernelLogsParams { limit, .. }: KernelLogsParams, ) -> Result { follow_logs(ctx, LogSource::Kernel, limit).await } @@ -412,12 +509,12 @@ impl<'de> Deserialize<'de> for GigaBytes { } #[derive(Deserialize, Serialize, Clone, Debug)] -#[serde(rename_all = "kebab-case")] +#[serde(rename_all = "camelCase")] pub struct MetricsGeneral { pub temperature: Option, } #[derive(Deserialize, Serialize, Clone, Debug)] -#[serde(rename_all = "kebab-case")] +#[serde(rename_all = "camelCase")] pub struct MetricsMemory { pub percentage_used: Percentage, pub total: MebiBytes, @@ -428,7 +525,7 @@ pub struct MetricsMemory { pub zram_used: MebiBytes, } #[derive(Deserialize, Serialize, Clone, Debug)] -#[serde(rename_all = "kebab-case")] +#[serde(rename_all = "camelCase")] pub struct MetricsCpu { percentage_used: Percentage, idle: Percentage, @@ -437,7 +534,7 @@ pub struct MetricsCpu { wait: Percentage, } #[derive(Deserialize, Serialize, Clone, Debug)] -#[serde(rename_all = "kebab-case")] +#[serde(rename_all = "camelCase")] pub struct MetricsDisk { percentage_used: Percentage, used: GigaBytes, @@ -445,7 +542,7 @@ pub struct MetricsDisk { capacity: GigaBytes, } #[derive(Deserialize, Serialize, Clone, Debug)] -#[serde(rename_all = "kebab-case")] +#[serde(rename_all = "camelCase")] pub struct Metrics { general: MetricsGeneral, memory: MetricsMemory, @@ -453,13 +550,8 @@ pub struct Metrics { disk: MetricsDisk, } -#[command(display(display_serializable))] -pub async fn metrics( - #[context] ctx: RpcContext, - #[allow(unused_variables)] - #[arg(long = "format")] - format: Option, -) -> Result { +// #[command(display(display_serializable))] +pub async fn metrics(ctx: RpcContext, _: Empty) -> Result { match ctx.metrics_cache.read().await.clone() { None => Err(Error { source: color_eyre::eyre::eyre!("No Metrics Found"), diff --git a/core/startos/src/update/mod.rs b/core/startos/src/update/mod.rs index 4ce57a8d17..26bf0357db 100644 --- a/core/startos/src/update/mod.rs +++ b/core/startos/src/update/mod.rs @@ -1,23 +1,24 @@ use std::path::PathBuf; use std::sync::atomic::{AtomicBool, Ordering}; -use clap::ArgMatches; +use clap::Parser; use color_eyre::eyre::{eyre, Result}; use emver::Version; use helpers::{Rsync, RsyncOptions}; use lazy_static::lazy_static; use reqwest::Url; use rpc_toolkit::command; +use serde::{Deserialize, Serialize}; use tokio::process::Command; use tokio_stream::StreamExt; use tracing::instrument; use crate::context::RpcContext; -use crate::db::model::UpdateProgress; +use crate::db::model::public::UpdateProgress; use crate::disk::mount::filesystem::bind::Bind; use crate::disk::mount::filesystem::ReadWrite; use crate::disk::mount::guard::MountGuard; -use crate::notifications::NotificationLevel; +use crate::notifications::{notify, NotificationLevel}; use crate::prelude::*; use crate::registry::marketplace::with_query_params; use crate::sound::{ @@ -33,17 +34,19 @@ lazy_static! { static ref UPDATED: AtomicBool = AtomicBool::new(false); } +#[derive(Deserialize, Serialize, Parser)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "kebab-case")] +pub struct UpdateSystemParams { + marketplace_url: Url, +} + /// An user/ daemon would call this to update the system to the latest version and do the updates available, /// and this will return something if there is an update, and in that case there will need to be a restart. -#[command( - rename = "update", - display(display_update_result), - metadata(sync_db = true) -)] #[instrument(skip_all)] pub async fn update_system( - #[context] ctx: RpcContext, - #[arg(rename = "marketplace-url")] marketplace_url: Url, + ctx: RpcContext, + UpdateSystemParams { marketplace_url }: UpdateSystemParams, ) -> Result { if UPDATED.load(Ordering::SeqCst) { return Ok(UpdateResult::NoUpdates); @@ -57,13 +60,13 @@ pub async fn update_system( /// What is the status of the updates? #[derive(serde::Serialize, serde::Deserialize, Clone, Debug)] -#[serde(rename_all = "kebab-case")] +#[serde(rename_all = "camelCase")] pub enum UpdateResult { NoUpdates, Updating, } -fn display_update_result(status: UpdateResult, _: &ArgMatches) { +pub fn display_update_result(_: UpdateSystemParams, status: UpdateResult) { match status { UpdateResult::Updating => { println!("Updating..."); @@ -90,7 +93,7 @@ async fn maybe_do_update(ctx: RpcContext, marketplace_url: Url) -> Result: Send { + async fn handle_with( + self: Box, + actor: &mut A, + jobs: &mut BackgroundJobs, + ) -> Box; +} +#[async_trait::async_trait] +impl Message for M +where + A: Handler, +{ + async fn handle_with( + self: Box, + actor: &mut A, + jobs: &mut BackgroundJobs, + ) -> Box { + Box::new(actor.handle(*self, jobs).await) + } +} + +type Request = (Box>, oneshot::Sender>); + +#[derive(Default)] +pub struct BackgroundJobs { + jobs: Vec>, +} +impl BackgroundJobs { + pub fn add_job(&mut self, fut: impl Future + Send + 'static) { + self.jobs.push(fut.boxed()); + } +} +impl Future for BackgroundJobs { + type Output = Never; + fn poll( + mut self: std::pin::Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> std::task::Poll { + let complete = self + .jobs + .iter_mut() + .enumerate() + .filter_map(|(i, f)| match f.poll_unpin(cx) { + std::task::Poll::Pending => None, + std::task::Poll::Ready(_) => Some(i), + }) + .collect::>(); + for idx in complete.into_iter().rev() { + #[allow(clippy::let_underscore_future)] + let _ = self.jobs.swap_remove(idx); + } + std::task::Poll::Pending + } +} + +pub struct SimpleActor { + shutdown: oneshot::Sender<()>, + runtime: NonDetachingJoinHandle<()>, + messenger: mpsc::UnboundedSender>, +} +impl SimpleActor { + pub fn new(mut actor: A) -> Self { + let (shutdown_send, mut shutdown_recv) = oneshot::channel(); + let (messenger_send, mut messenger_recv) = mpsc::unbounded_channel::>(); + let runtime = NonDetachingJoinHandle::from(tokio::spawn(async move { + let mut bg = BackgroundJobs::default(); + actor.init(&mut bg); + loop { + tokio::select! { + _ = &mut bg => (), + msg = messenger_recv.recv() => match msg { + Some((msg, reply)) if shutdown_recv.try_recv() == Err(TryRecvError::Empty) => { + let mut new_bg = BackgroundJobs::default(); + tokio::select! { + res = msg.handle_with(&mut actor, &mut new_bg) => { let _ = reply.send(res); }, + _ = &mut bg => (), + } + bg.jobs.append(&mut new_bg.jobs); + } + _ => break, + }, + } + } + })); + Self { + shutdown: shutdown_send, + runtime, + messenger: messenger_send, + } + } + + /// Message is guaranteed to be queued immediately + pub fn queue( + &self, + message: M, + ) -> impl Future> + where + A: Handler, + { + if self.runtime.is_finished() { + return futures::future::Either::Left(ready(Err(Error::new( + eyre!("actor runtime has exited"), + ErrorKind::Unknown, + )))); + } + let (reply_send, reply_recv) = oneshot::channel(); + self.messenger + .send((Box::new(message), reply_send)) + .unwrap(); + futures::future::Either::Right( + reply_recv + .map_err(|_| Error::new(eyre!("actor runtime has exited"), ErrorKind::Unknown)) + .and_then(|a| { + ready( + a.downcast() + .map_err(|_| { + Error::new( + eyre!("received incorrect type in response"), + ErrorKind::Incoherent, + ) + }) + .map(|a| *a), + ) + }), + ) + } + + pub async fn send(&self, message: M) -> Result + where + A: Handler, + { + self.queue(message).await + } + + pub async fn shutdown(self, strategy: PendingMessageStrategy) { + drop(self.messenger); + let timeout = match strategy { + PendingMessageStrategy::CancelAll => { + self.shutdown.send(()).unwrap(); + Some(Duration::from_secs(0)) + } + PendingMessageStrategy::FinishCurrentCancelPending { timeout } => { + self.shutdown.send(()).unwrap(); + timeout + } + PendingMessageStrategy::FinishAll { timeout } => timeout, + }; + let aborter = if let Some(timeout) = timeout { + let hdl = self.runtime.abort_handle(); + async move { + tokio::time::sleep(timeout).await; + hdl.abort(); + } + .boxed() + } else { + futures::future::pending().boxed() + }; + tokio::select! { + _ = aborter => (), + _ = self.runtime => (), + } + } +} + +pub enum PendingMessageStrategy { + CancelAll, + FinishCurrentCancelPending { timeout: Option }, + FinishAll { timeout: Option }, +} diff --git a/core/startos/src/util/clap.rs b/core/startos/src/util/clap.rs new file mode 100644 index 0000000000..7c3b5a0bce --- /dev/null +++ b/core/startos/src/util/clap.rs @@ -0,0 +1,36 @@ +use std::marker::PhantomData; +use std::str::FromStr; + +use clap::builder::TypedValueParser; + +use crate::prelude::*; + +pub struct FromStrParser(PhantomData); +impl FromStrParser { + pub fn new() -> Self { + Self(PhantomData) + } +} +impl Clone for FromStrParser { + fn clone(&self) -> Self { + Self(PhantomData) + } +} +impl TypedValueParser for FromStrParser +where + T: FromStr + Clone + Send + Sync + 'static, + T::Err: std::fmt::Display, +{ + type Value = T; + fn parse_ref( + &self, + _: &clap::Command, + _: Option<&clap::Arg>, + value: &std::ffi::OsStr, + ) -> Result { + value + .to_string_lossy() + .parse() + .map_err(|e| clap::Error::raw(clap::error::ErrorKind::ValueValidation, e)) + } +} diff --git a/core/startos/src/util/config.rs b/core/startos/src/util/config.rs deleted file mode 100644 index f719f563f3..0000000000 --- a/core/startos/src/util/config.rs +++ /dev/null @@ -1,58 +0,0 @@ -use std::fs::File; -use std::path::{Path, PathBuf}; - -use patch_db::Value; -use serde::Deserialize; - -use crate::prelude::*; -use crate::util::serde::IoFormat; -use crate::{Config, Error}; - -pub const DEVICE_CONFIG_PATH: &str = "/media/embassy/config/config.yaml"; -pub const CONFIG_PATH: &str = "/etc/embassy/config.yaml"; -pub const CONFIG_PATH_LOCAL: &str = ".embassy/config.yaml"; - -pub fn local_config_path() -> Option { - if let Ok(home) = std::env::var("HOME") { - Some(Path::new(&home).join(CONFIG_PATH_LOCAL)) - } else { - None - } -} - -/// BLOCKING -pub fn load_config_from_paths<'a, T: for<'de> Deserialize<'de>>( - paths: impl IntoIterator>, -) -> Result { - let mut config = Default::default(); - for path in paths { - if path.as_ref().exists() { - let format: IoFormat = path - .as_ref() - .extension() - .and_then(|s| s.to_str()) - .map(|f| f.parse()) - .transpose()? - .unwrap_or_default(); - let new = format.from_reader(File::open(path)?)?; - config = merge_configs(config, new); - } - } - from_value(Value::Object(config)) -} - -pub fn merge_configs(mut first: Config, second: Config) -> Config { - for (k, v) in second.into_iter() { - let new = match first.remove(&k) { - None => v, - Some(old) => match (old, v) { - (Value::Object(first), Value::Object(second)) => { - Value::Object(merge_configs(first, second)) - } - (first, _) => first, - }, - }; - first.insert(k, new); - } - first -} diff --git a/core/startos/src/util/cpupower.rs b/core/startos/src/util/cpupower.rs index cc4ac5ef4b..db625f90e1 100644 --- a/core/startos/src/util/cpupower.rs +++ b/core/startos/src/util/cpupower.rs @@ -3,6 +3,7 @@ use std::collections::BTreeSet; use imbl::OrdMap; use tokio::process::Command; +use ts_rs::TS; use crate::prelude::*; use crate::util::Invoke; @@ -13,7 +14,10 @@ pub const GOVERNOR_HEIRARCHY: &[Governor] = &[ Governor(Cow::Borrowed("conservative")), ]; -#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, serde::Serialize, serde::Deserialize)] +#[derive( + Debug, Clone, PartialEq, Eq, PartialOrd, Ord, serde::Serialize, serde::Deserialize, TS, +)] +#[ts(export, type = "string")] pub struct Governor(Cow<'static, str>); impl std::str::FromStr for Governor { type Err = std::convert::Infallible; diff --git a/core/startos/src/util/crypto.rs b/core/startos/src/util/crypto.rs index 5c1aed01ea..aaafe6536f 100644 --- a/core/startos/src/util/crypto.rs +++ b/core/startos/src/util/crypto.rs @@ -7,3 +7,119 @@ pub fn ed25519_expand_key(key: &SecretKey) -> [u8; EXPANDED_SECRET_KEY_LENGTH] { ) .to_bytes() } + +use aes::cipher::{CipherKey, NewCipher, Nonce, StreamCipher}; +use aes::Aes256Ctr; +use hmac::Hmac; +use josekit::jwk::Jwk; +use serde::{Deserialize, Serialize}; +use sha2::Sha256; +use tracing::instrument; + +pub fn pbkdf2(password: impl AsRef<[u8]>, salt: impl AsRef<[u8]>) -> CipherKey { + let mut aeskey = CipherKey::::default(); + pbkdf2::pbkdf2::>( + password.as_ref(), + salt.as_ref(), + 1000, + aeskey.as_mut_slice(), + ) + .unwrap(); + aeskey +} + +pub fn encrypt_slice(input: impl AsRef<[u8]>, password: impl AsRef<[u8]>) -> Vec { + let prefix: [u8; 32] = rand::random(); + let aeskey = pbkdf2(password.as_ref(), &prefix[16..]); + let ctr = Nonce::::from_slice(&prefix[..16]); + let mut aes = Aes256Ctr::new(&aeskey, ctr); + let mut res = Vec::with_capacity(32 + input.as_ref().len()); + res.extend_from_slice(&prefix[..]); + res.extend_from_slice(input.as_ref()); + aes.apply_keystream(&mut res[32..]); + res +} + +pub fn decrypt_slice(input: impl AsRef<[u8]>, password: impl AsRef<[u8]>) -> Vec { + if input.as_ref().len() < 32 { + return Vec::new(); + } + let (prefix, rest) = input.as_ref().split_at(32); + let aeskey = pbkdf2(password.as_ref(), &prefix[16..]); + let ctr = Nonce::::from_slice(&prefix[..16]); + let mut aes = Aes256Ctr::new(&aeskey, ctr); + let mut res = rest.to_vec(); + aes.apply_keystream(&mut res); + res +} + +#[derive(Debug, Clone, Deserialize, Serialize)] +pub struct EncryptedWire { + encrypted: serde_json::Value, +} +impl EncryptedWire { + #[instrument(skip_all)] + pub fn decrypt(self, current_secret: impl AsRef) -> Option { + let current_secret = current_secret.as_ref(); + + let decrypter = match josekit::jwe::alg::ecdh_es::EcdhEsJweAlgorithm::EcdhEs + .decrypter_from_jwk(current_secret) + { + Ok(a) => a, + Err(e) => { + tracing::warn!("Could not setup awk"); + tracing::debug!("{:?}", e); + return None; + } + }; + let encrypted = match serde_json::to_string(&self.encrypted) { + Ok(a) => a, + Err(e) => { + tracing::warn!("Could not deserialize"); + tracing::debug!("{:?}", e); + + return None; + } + }; + let (decoded, _) = match josekit::jwe::deserialize_json(&encrypted, &decrypter) { + Ok(a) => a, + Err(e) => { + tracing::warn!("Could not decrypt"); + tracing::debug!("{:?}", e); + return None; + } + }; + match String::from_utf8(decoded) { + Ok(a) => Some(a), + Err(e) => { + tracing::warn!("Could not decrypt into utf8"); + tracing::debug!("{:?}", e); + return None; + } + } + } +} + +/// We created this test by first making the private key, then restoring from this private key for recreatability. +/// After this the frontend then encoded an password, then we are testing that the output that we got (hand coded) +/// will be the shape we want. +#[test] +fn test_gen_awk() { + let private_key: Jwk = serde_json::from_str( + r#"{ + "kty": "EC", + "crv": "P-256", + "d": "3P-MxbUJtEhdGGpBCRFXkUneGgdyz_DGZWfIAGSCHOU", + "x": "yHTDYSfjU809fkSv9MmN4wuojf5c3cnD7ZDN13n-jz4", + "y": "8Mpkn744A5KDag0DmX2YivB63srjbugYZzWc3JOpQXI" + }"#, + ) + .unwrap(); + let encrypted: EncryptedWire = serde_json::from_str(r#"{ + "encrypted": { "protected": "eyJlbmMiOiJBMTI4Q0JDLUhTMjU2IiwiYWxnIjoiRUNESC1FUyIsImtpZCI6ImgtZnNXUVh2Tm95dmJEazM5dUNsQ0NUdWc5N3MyZnJockJnWUVBUWVtclUiLCJlcGsiOnsia3R5IjoiRUMiLCJjcnYiOiJQLTI1NiIsIngiOiJmRkF0LXNWYWU2aGNkdWZJeUlmVVdUd3ZvWExaTkdKRHZIWVhIckxwOXNNIiwieSI6IjFvVFN6b00teHlFZC1SLUlBaUFHdXgzS1dJZmNYZHRMQ0JHLUh6MVkzY2sifX0", "iv": "NbwvfvWOdLpZfYRIZUrkcw", "ciphertext": "Zc5Br5kYOlhPkIjQKOLMJw", "tag": "EPoch52lDuCsbUUulzZGfg" } + }"#).unwrap(); + assert_eq!( + "testing12345", + &encrypted.decrypt(std::sync::Arc::new(private_key)).unwrap() + ); +} diff --git a/core/startos/src/util/docker.rs b/core/startos/src/util/docker.rs deleted file mode 100644 index fb6bc15f44..0000000000 --- a/core/startos/src/util/docker.rs +++ /dev/null @@ -1,239 +0,0 @@ -use std::net::Ipv4Addr; -use std::time::Duration; - -use models::{Error, ErrorKind, PackageId, ResultExt, Version}; -use nix::sys::signal::Signal; -use tokio::process::Command; - -use crate::util::Invoke; - -#[cfg(feature = "docker")] -pub const CONTAINER_TOOL: &str = "docker"; -#[cfg(not(feature = "docker"))] -pub const CONTAINER_TOOL: &str = "podman"; - -#[cfg(feature = "docker")] -pub const CONTAINER_DATADIR: &str = "/var/lib/docker"; -#[cfg(not(feature = "docker"))] -pub const CONTAINER_DATADIR: &str = "/var/lib/containers"; - -pub struct DockerImageSha(String); - -// docker images start9/${package}/*:${version} -q --no-trunc -pub async fn images_for( - package: &PackageId, - version: &Version, -) -> Result, Error> { - Ok(String::from_utf8( - Command::new(CONTAINER_TOOL) - .arg("images") - .arg(format!("start9/{package}/*:{version}")) - .arg("--no-trunc") - .arg("-q") - .invoke(ErrorKind::Docker) - .await?, - )? - .lines() - .map(|l| DockerImageSha(l.trim().to_owned())) - .collect()) -} - -// docker rmi -f ${sha} -pub async fn remove_image(sha: &DockerImageSha) -> Result<(), Error> { - match Command::new(CONTAINER_TOOL) - .arg("rmi") - .arg("-f") - .arg(&sha.0) - .invoke(ErrorKind::Docker) - .await - .map(|_| ()) - { - Err(e) - if e.source - .to_string() - .to_ascii_lowercase() - .contains("no such image") => - { - Ok(()) - } - a => a, - }?; - Ok(()) -} - -// docker image prune -f -pub async fn prune_images() -> Result<(), Error> { - Command::new(CONTAINER_TOOL) - .arg("image") - .arg("prune") - .arg("-f") - .invoke(ErrorKind::Docker) - .await?; - Ok(()) -} - -// docker container inspect ${name} --format '{{.NetworkSettings.Networks.start9.IPAddress}}' -pub async fn get_container_ip(name: &str) -> Result, Error> { - match Command::new(CONTAINER_TOOL) - .arg("container") - .arg("inspect") - .arg(name) - .arg("--format") - .arg("{{.NetworkSettings.Networks.start9.IPAddress}}") - .invoke(ErrorKind::Docker) - .await - { - Err(e) - if e.source - .to_string() - .to_ascii_lowercase() - .contains("no such container") => - { - Ok(None) - } - Err(e) => Err(e), - Ok(a) => { - let out = std::str::from_utf8(&a)?.trim(); - if out.is_empty() { - Ok(None) - } else { - Ok(Some({ - out.parse() - .with_ctx(|_| (ErrorKind::ParseNetAddress, out.to_string()))? - })) - } - } - } -} - -// docker stop -t ${timeout} -s ${signal} ${name} -pub async fn stop_container( - name: &str, - timeout: Option, - signal: Option, -) -> Result<(), Error> { - let mut cmd = Command::new(CONTAINER_TOOL); - cmd.arg("stop"); - if let Some(dur) = timeout { - cmd.arg("-t").arg(dur.as_secs().to_string()); - } - if let Some(sig) = signal { - cmd.arg("-s").arg(sig.to_string()); - } - cmd.arg(name); - match cmd.invoke(ErrorKind::Docker).await { - Ok(_) => Ok(()), - Err(mut e) - if e.source - .to_string() - .to_ascii_lowercase() - .contains("no such container") => - { - e.kind = ErrorKind::NotFound; - Err(e) - } - Err(e) => Err(e), - } -} - -// docker kill -s ${signal} ${name} -pub async fn kill_container(name: &str, signal: Option) -> Result<(), Error> { - let mut cmd = Command::new(CONTAINER_TOOL); - cmd.arg("kill"); - if let Some(sig) = signal { - cmd.arg("-s").arg(sig.to_string()); - } - cmd.arg(name); - match cmd.invoke(ErrorKind::Docker).await { - Ok(_) => Ok(()), - Err(mut e) - if e.source - .to_string() - .to_ascii_lowercase() - .contains("no such container") => - { - e.kind = ErrorKind::NotFound; - Err(e) - } - Err(e) => Err(e), - } -} - -// docker pause ${name} -pub async fn pause_container(name: &str) -> Result<(), Error> { - let mut cmd = Command::new(CONTAINER_TOOL); - cmd.arg("pause"); - cmd.arg(name); - match cmd.invoke(ErrorKind::Docker).await { - Ok(_) => Ok(()), - Err(mut e) - if e.source - .to_string() - .to_ascii_lowercase() - .contains("no such container") => - { - e.kind = ErrorKind::NotFound; - Err(e) - } - Err(e) => Err(e), - } -} - -// docker unpause ${name} -pub async fn unpause_container(name: &str) -> Result<(), Error> { - let mut cmd = Command::new(CONTAINER_TOOL); - cmd.arg("unpause"); - cmd.arg(name); - match cmd.invoke(ErrorKind::Docker).await { - Ok(_) => Ok(()), - Err(mut e) - if e.source - .to_string() - .to_ascii_lowercase() - .contains("no such container") => - { - e.kind = ErrorKind::NotFound; - Err(e) - } - Err(e) => Err(e), - } -} - -// docker rm -f ${name} -pub async fn remove_container(name: &str, force: bool) -> Result<(), Error> { - let mut cmd = Command::new(CONTAINER_TOOL); - cmd.arg("rm"); - if force { - cmd.arg("-f"); - } - cmd.arg(name); - match cmd.invoke(ErrorKind::Docker).await { - Ok(_) => Ok(()), - Err(e) - if e.source - .to_string() - .to_ascii_lowercase() - .contains("no such container") => - { - Ok(()) - } - Err(e) => Err(e), - } -} - -// docker network create -d bridge --subnet ${subnet} --opt com.podman.network.bridge.name=${bridge_name} -pub async fn create_bridge_network( - name: &str, - subnet: &str, - bridge_name: &str, -) -> Result<(), Error> { - let mut cmd = Command::new(CONTAINER_TOOL); - cmd.arg("network").arg("create"); - cmd.arg("-d").arg("bridge"); - cmd.arg("--subnet").arg(subnet); - cmd.arg("--opt") - .arg(format!("com.docker.network.bridge.name={bridge_name}")); - cmd.arg(name); - cmd.invoke(ErrorKind::Docker).await?; - Ok(()) -} diff --git a/core/startos/src/util/future.rs b/core/startos/src/util/future.rs new file mode 100644 index 0000000000..f40e847bf1 --- /dev/null +++ b/core/startos/src/util/future.rs @@ -0,0 +1,119 @@ +use std::pin::Pin; +use std::task::{Context, Poll}; + +use futures::future::abortable; +use futures::stream::{AbortHandle, Abortable}; +use futures::Future; +use tokio::sync::watch; + +#[pin_project::pin_project(PinnedDrop)] +pub struct DropSignaling { + #[pin] + fut: F, + on_drop: watch::Sender, +} +impl DropSignaling { + pub fn new(fut: F) -> Self { + Self { + fut, + on_drop: watch::channel(false).0, + } + } + pub fn subscribe(&self) -> DropHandle { + DropHandle(self.on_drop.subscribe()) + } +} +impl Future for DropSignaling +where + F: Future, +{ + type Output = F::Output; + fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll { + let this = self.project(); + this.fut.poll(cx) + } +} +#[pin_project::pinned_drop] +impl PinnedDrop for DropSignaling { + fn drop(self: Pin<&mut Self>) { + let _ = self.on_drop.send(true); + } +} + +#[derive(Clone)] +pub struct DropHandle(watch::Receiver); +impl DropHandle { + pub async fn wait(&mut self) { + let _ = self.0.wait_for(|a| *a).await; + } +} + +#[pin_project::pin_project] +pub struct RemoteCancellable { + #[pin] + fut: Abortable>, + on_drop: DropHandle, + handle: AbortHandle, +} +impl RemoteCancellable { + pub fn new(fut: F) -> Self { + let sig_fut = DropSignaling::new(fut); + let on_drop = sig_fut.subscribe(); + let (fut, handle) = abortable(sig_fut); + Self { + fut, + on_drop, + handle, + } + } +} +impl RemoteCancellable { + pub fn cancellation_handle(&self) -> CancellationHandle { + CancellationHandle { + on_drop: self.on_drop.clone(), + handle: self.handle.clone(), + } + } +} +impl Future for RemoteCancellable +where + F: Future, +{ + type Output = Option; + fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll { + let this = self.project(); + this.fut.poll(cx).map(|a| a.ok()) + } +} + +#[derive(Clone)] +pub struct CancellationHandle { + on_drop: DropHandle, + handle: AbortHandle, +} +impl CancellationHandle { + pub fn cancel(&mut self) { + self.handle.abort(); + } + + pub async fn cancel_and_wait(&mut self) { + self.handle.abort(); + self.on_drop.wait().await + } +} + +#[tokio::test] +async fn test_cancellable() { + use std::sync::Arc; + + let arc = Arc::new(()); + let weak = Arc::downgrade(&arc); + let cancellable = RemoteCancellable::new(async move { + futures::future::pending::<()>().await; + drop(arc) + }); + let mut handle = cancellable.cancellation_handle(); + tokio::spawn(cancellable); + handle.cancel_and_wait().await; + assert!(weak.strong_count() == 0); +} diff --git a/core/startos/src/util/http_reader.rs b/core/startos/src/util/http_reader.rs index 87e8c114ea..02a9f57ae3 100644 --- a/core/startos/src/util/http_reader.rs +++ b/core/startos/src/util/http_reader.rs @@ -6,11 +6,11 @@ use std::io::Error as StdIOError; use std::pin::Pin; use std::task::{Context, Poll}; +use bytes::Bytes; use color_eyre::eyre::eyre; use futures::Stream; -use http::header::{ACCEPT_RANGES, CONTENT_LENGTH, RANGE}; -use hyper::body::Bytes; use pin_project::pin_project; +use reqwest::header::{ACCEPT_RANGES, CONTENT_LENGTH, RANGE}; use reqwest::{Client, Url}; use tokio::io::{AsyncRead, AsyncSeek}; @@ -359,22 +359,3 @@ async fn main_test() { assert_eq!(buf.len(), test_reader.total_bytes) } - -#[tokio::test] -#[ignore] -async fn s9pk_test() { - use tokio::io::BufReader; - - let http_url = Url::parse("http://qhc6ac47cytstejcepk2ia3ipadzjhlkc5qsktsbl4e7u2krfmfuaqqd.onion/content/files/2022/09/ghost.s9pk").unwrap(); - - println!("Getting this resource: {}", http_url); - let test_reader = - BufReader::with_capacity(1024 * 1024, HttpReader::new(http_url).await.unwrap()); - - let mut s9pk = crate::s9pk::reader::S9pkReader::from_reader(test_reader, false) - .await - .unwrap(); - - let manifest = s9pk.manifest().await.unwrap(); - assert_eq!(&manifest.id.to_string(), "ghost"); -} diff --git a/core/startos/src/util/io.rs b/core/startos/src/util/io.rs index 282a2db8e0..f5a951142d 100644 --- a/core/startos/src/util/io.rs +++ b/core/startos/src/util/io.rs @@ -1,7 +1,7 @@ use std::future::Future; use std::io::Cursor; use std::os::unix::prelude::MetadataExt; -use std::path::Path; +use std::path::{Path, PathBuf}; use std::sync::atomic::AtomicU64; use std::task::Poll; use std::time::Duration; @@ -10,13 +10,14 @@ use futures::future::{BoxFuture, Fuse}; use futures::{AsyncSeek, FutureExt, TryStreamExt}; use helpers::NonDetachingJoinHandle; use nix::unistd::{Gid, Uid}; +use tokio::fs::File; use tokio::io::{ duplex, AsyncRead, AsyncReadExt, AsyncWrite, AsyncWriteExt, DuplexStream, ReadBuf, WriteHalf, }; use tokio::net::TcpStream; use tokio::time::{Instant, Sleep}; -use crate::ResultExt; +use crate::prelude::*; pub trait AsyncReadSeek: AsyncRead + AsyncSeek {} impl AsyncReadSeek for T {} @@ -669,3 +670,77 @@ impl AsyncWrite for TimeoutStream { res } } + +pub struct TmpFile {} + +#[derive(Debug)] +pub struct TmpDir { + path: PathBuf, +} +impl TmpDir { + pub async fn new() -> Result { + let path = Path::new("/var/tmp/startos").join(base32::encode( + base32::Alphabet::RFC4648 { padding: false }, + &rand::random::<[u8; 8]>(), + )); + if tokio::fs::metadata(&path).await.is_ok() { + return Err(Error::new( + eyre!("{path:?} already exists"), + ErrorKind::Filesystem, + )); + } + tokio::fs::create_dir_all(&path).await?; + Ok(Self { path }) + } + + pub async fn delete(self) -> Result<(), Error> { + tokio::fs::remove_dir_all(&self.path).await?; + Ok(()) + } +} +impl std::ops::Deref for TmpDir { + type Target = Path; + fn deref(&self) -> &Self::Target { + &self.path + } +} +impl AsRef for TmpDir { + fn as_ref(&self) -> &Path { + &*self + } +} +impl Drop for TmpDir { + fn drop(&mut self) { + if self.path.exists() { + let path = std::mem::take(&mut self.path); + tokio::spawn(async move { + tokio::fs::remove_dir_all(&path).await.unwrap(); + }); + } + } +} + +pub async fn create_file(path: impl AsRef) -> Result { + let path = path.as_ref(); + if let Some(parent) = path.parent() { + tokio::fs::create_dir_all(parent) + .await + .with_ctx(|_| (ErrorKind::Filesystem, lazy_format!("mkdir -p {parent:?}")))?; + } + File::create(path) + .await + .with_ctx(|_| (ErrorKind::Filesystem, lazy_format!("create {path:?}"))) +} + +pub async fn rename(src: impl AsRef, dst: impl AsRef) -> Result<(), Error> { + let src = src.as_ref(); + let dst = dst.as_ref(); + if let Some(parent) = dst.parent() { + tokio::fs::create_dir_all(parent) + .await + .with_ctx(|_| (ErrorKind::Filesystem, lazy_format!("mkdir -p {parent:?}")))?; + } + tokio::fs::rename(src, dst) + .await + .with_ctx(|_| (ErrorKind::Filesystem, lazy_format!("mv {src:?} -> {dst:?}"))) +} diff --git a/core/startos/src/util/lshw.rs b/core/startos/src/util/lshw.rs index dd260f6440..2a658a8c8f 100644 --- a/core/startos/src/util/lshw.rs +++ b/core/startos/src/util/lshw.rs @@ -8,7 +8,7 @@ const KNOWN_CLASSES: &[&str] = &["processor", "display"]; #[derive(Debug, Deserialize, Serialize)] #[serde(tag = "class")] -#[serde(rename_all = "kebab-case")] +#[serde(rename_all = "camelCase")] pub enum LshwDevice { Processor(LshwProcessor), Display(LshwDisplay), diff --git a/core/startos/src/util/mod.rs b/core/startos/src/util/mod.rs index 34c05934b7..772a64a323 100644 --- a/core/startos/src/util/mod.rs +++ b/core/startos/src/util/mod.rs @@ -9,11 +9,11 @@ use std::task::{Context, Poll}; use std::time::Duration; use async_trait::async_trait; -use clap::ArgMatches; use color_eyre::eyre::{self, eyre}; use fd_lock_rs::FdLock; use helpers::canonicalize; pub use helpers::NonDetachingJoinHandle; +use imbl_value::InternedString; use lazy_static::lazy_static; pub use models::Version; use pin_project::pin_project; @@ -24,14 +24,16 @@ use tracing::instrument; use crate::shutdown::Shutdown; use crate::{Error, ErrorKind, ResultExt as _}; -pub mod config; +pub mod actor; +pub mod clap; pub mod cpupower; pub mod crypto; -pub mod docker; +pub mod future; pub mod http_reader; pub mod io; pub mod logger; pub mod lshw; +pub mod rpc_client; pub mod serde; #[derive(Clone, Copy, Debug, ::serde::Deserialize, ::serde::Serialize)] @@ -48,8 +50,12 @@ impl std::fmt::Display for Never { } } impl std::error::Error for Never {} +impl AsRef for Never { + fn as_ref(&self) -> &T { + match *self {} + } +} -#[async_trait::async_trait] pub trait Invoke<'a> { type Extended<'ext> where @@ -60,7 +66,10 @@ pub trait Invoke<'a> { &'ext mut self, input: Option<&'ext mut Input>, ) -> Self::Extended<'ext>; - async fn invoke(&mut self, error_kind: crate::ErrorKind) -> Result, Error>; + fn invoke( + &mut self, + error_kind: crate::ErrorKind, + ) -> impl Future, Error>> + Send; } pub struct ExtendedCommand<'a> { @@ -80,7 +89,6 @@ impl<'a> std::ops::DerefMut for ExtendedCommand<'a> { } } -#[async_trait::async_trait] impl<'a> Invoke<'a> for tokio::process::Command { type Extended<'ext> = ExtendedCommand<'ext> where @@ -118,7 +126,6 @@ impl<'a> Invoke<'a> for tokio::process::Command { } } -#[async_trait::async_trait] impl<'a> Invoke<'a> for ExtendedCommand<'a> { type Extended<'ext> = &'ext mut ExtendedCommand<'ext> where @@ -146,7 +153,7 @@ impl<'a> Invoke<'a> for ExtendedCommand<'a> { } self.cmd.stdout(Stdio::piped()); self.cmd.stderr(Stdio::piped()); - let mut child = self.cmd.spawn()?; + let mut child = self.cmd.spawn().with_kind(error_kind)?; if let (Some(mut stdin), Some(input)) = (child.stdin.take(), self.input.take()) { use tokio::io::AsyncWriteExt; tokio::io::copy(input, &mut stdin).await?; @@ -275,8 +282,6 @@ impl std::io::Write for FmtWriter { } } -pub fn display_none(_: T, _: &ArgMatches) {} - pub struct Container(RwLock>); impl Container { pub fn new(value: Option) -> Self { @@ -490,3 +495,13 @@ impl<'a, T> From<&'a T> for MaybeOwned<'a, T> { MaybeOwned::Borrowed(value) } } + +pub fn new_guid() -> InternedString { + use rand::RngCore; + let mut buf = [0; 40]; + rand::thread_rng().fill_bytes(&mut buf); + InternedString::intern(base32::encode( + base32::Alphabet::RFC4648 { padding: false }, + &buf, + )) +} diff --git a/core/helpers/src/rpc_client.rs b/core/startos/src/util/rpc_client.rs similarity index 69% rename from core/helpers/src/rpc_client.rs rename to core/startos/src/util/rpc_client.rs index bdb505b400..36fe0031a9 100644 --- a/core/helpers/src/rpc_client.rs +++ b/core/startos/src/util/rpc_client.rs @@ -5,17 +5,18 @@ use std::sync::{Arc, Weak}; use futures::future::BoxFuture; use futures::{FutureExt, TryFutureExt}; +use helpers::NonDetachingJoinHandle; use lazy_async_pool::Pool; use models::{Error, ErrorKind, ResultExt}; +use rpc_toolkit::yajrc::{self, Id, RpcError, RpcMethod, RpcRequest, RpcResponse}; use serde::{Deserialize, Serialize}; use serde_json::{json, Value}; use tokio::io::{AsyncBufReadExt, AsyncRead, AsyncWrite, AsyncWriteExt, BufReader}; use tokio::net::UnixStream; use tokio::runtime::Handle; -use tokio::sync::{oneshot, Mutex}; -use yajrc::{Id, RpcError, RpcMethod, RpcRequest, RpcResponse}; +use tokio::sync::{oneshot, Mutex, OnceCell}; -use crate::NonDetachingJoinHandle; +use crate::util::io::TmpDir; type DynWrite = Box; type ResponseMap = BTreeMap>>; @@ -24,7 +25,7 @@ const MAX_TRIES: u64 = 3; pub struct RpcClient { id: Arc, - _handler: NonDetachingJoinHandle<()>, + handler: NonDetachingJoinHandle<()>, writer: DynWrite, responses: Weak>, } @@ -42,11 +43,11 @@ impl RpcClient { let weak_responses = Arc::downgrade(&responses); RpcClient { id, - _handler: tokio::spawn(async move { + handler: tokio::spawn(async move { let mut lines = BufReader::new(reader).lines(); while let Some(line) = lines.next_line().await.transpose() { match line.map_err(Error::from).and_then(|l| { - serde_json::from_str::(&l) + serde_json::from_str::(dbg!(&l)) .with_kind(ErrorKind::Deserialization) }) { Ok(l) => { @@ -54,7 +55,7 @@ impl RpcClient { if let Some(res) = responses.lock().await.remove(&id) { if let Err(e) = res.send(l.result) { tracing::warn!( - "RpcClient Response for Unknown ID: {:?}", + "RpcClient Response after request aborted: {:?}", e ); } @@ -74,6 +75,14 @@ impl RpcClient { } } } + for (_, res) in std::mem::take(&mut *responses.lock().await) { + if let Err(e) = res.send(Err(RpcError { + data: Some("client disconnected before response received".into()), + ..yajrc::INTERNAL_ERROR + })) { + tracing::warn!("RpcClient Response after request aborted: {:?}", e); + } + } }) .into(), writer, @@ -105,10 +114,10 @@ impl RpcClient { let (send, recv) = oneshot::channel(); w.lock().await.insert(id.clone(), send); self.writer - .write_all((serde_json::to_string(&request)? + "\n").as_bytes()) + .write_all((dbg!(serde_json::to_string(&request))? + "\n").as_bytes()) .await .map_err(|e| { - let mut err = yajrc::INTERNAL_ERROR.clone(); + let mut err = rpc_toolkit::yajrc::INTERNAL_ERROR.clone(); err.data = Some(json!(e.to_string())); err })?; @@ -123,14 +132,15 @@ impl RpcClient { } tracing::debug!( "Client has finished {:?}", - futures::poll!(&mut self._handler) + futures::poll!(&mut self.handler) ); - let mut err = yajrc::INTERNAL_ERROR.clone(); + let mut err = rpc_toolkit::yajrc::INTERNAL_ERROR.clone(); err.data = Some(json!("RpcClient thread has terminated")); Err(err) } } +#[derive(Clone)] pub struct UnixRpcClient { pool: Pool< RpcClient, @@ -141,18 +151,35 @@ pub struct UnixRpcClient { } impl UnixRpcClient { pub fn new(path: PathBuf) -> Self { + let tmpdir = Arc::new(OnceCell::new()); let rt = Handle::current(); let id = Arc::new(AtomicUsize::new(0)); Self { pool: Pool::new( 0, Box::new(move || { - let path = path.clone(); + let mut path = path.clone(); let id = id.clone(); - rt.spawn(async move { + let tmpdir = tmpdir.clone(); + NonDetachingJoinHandle::from(rt.spawn(async move { + if path.as_os_str().len() >= 108 + // libc::sockaddr_un.sun_path.len() + { + let new_path = tmpdir + .get_or_try_init(|| TmpDir::new()) + .await + .map_err(|e| { + std::io::Error::new(std::io::ErrorKind::Other, e.source) + })? + .join("link.sock"); + if tokio::fs::metadata(&new_path).await.is_err() { + tokio::fs::symlink(&path, &new_path).await?; + } + path = new_path; + } let (r, w) = UnixStream::connect(&path).await?.into_split(); Ok(RpcClient::new(w, r, id)) - }) + })) .map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e)) .and_then(|x| async move { x }) .boxed() @@ -173,15 +200,23 @@ impl UnixRpcClient { { let mut tries = 0; let res = loop { - tries += 1; let mut client = self.pool.clone().get().await?; + if client.handler.is_finished() { + client.destroy(); + continue; + } let res = client.request(method.clone(), params.clone()).await; match &res { - Err(e) if e.code == yajrc::INTERNAL_ERROR.code => { + Err(e) if e.code == rpc_toolkit::yajrc::INTERNAL_ERROR.code => { + let mut e = Error::from(e.clone()); + e.kind = ErrorKind::Filesystem; + tracing::error!("{e}"); + tracing::debug!("{e:?}"); client.destroy(); } _ => break res, } + tries += 1; if tries > MAX_TRIES { tracing::warn!("Max Tries exceeded"); break res; diff --git a/core/startos/src/util/serde.rs b/core/startos/src/util/serde.rs index 4a6f7551b8..2fa992abdb 100644 --- a/core/startos/src/util/serde.rs +++ b/core/startos/src/util/serde.rs @@ -1,15 +1,24 @@ +use std::any::TypeId; +use std::collections::VecDeque; use std::marker::PhantomData; use std::ops::Deref; -use std::process::exit; use std::str::FromStr; -use clap::ArgMatches; +use clap::builder::ValueParserFactory; +use clap::{ArgMatches, CommandFactory, FromArgMatches}; use color_eyre::eyre::eyre; +use imbl::OrdMap; +use openssl::pkey::{PKey, Private}; +use openssl::x509::{X509Ref, X509}; +use rpc_toolkit::{AnyContext, Handler, HandlerArgs, HandlerArgsFor, HandlerTypes, PrintCliResult}; +use serde::de::DeserializeOwned; use serde::ser::{SerializeMap, SerializeSeq}; use serde::{Deserialize, Deserializer, Serialize, Serializer}; use serde_json::Value; +use ts_rs::TS; use super::IntoDoubleEndedIterator; +use crate::util::clap::FromStrParser; use crate::{Error, ResultExt}; pub fn deserialize_from_str< @@ -266,7 +275,7 @@ impl<'de> serde::de::Deserialize<'de> for ValuePrimative { } } -#[derive(Clone, Copy, Debug, Deserialize, Serialize)] +#[derive(Clone, Copy, Debug, Deserialize, Serialize, PartialEq, Eq, PartialOrd, Ord)] #[serde(rename_all = "kebab-case")] pub enum IoFormat { Json, @@ -425,36 +434,208 @@ impl IoFormat { } } -pub fn display_serializable(t: T, matches: &ArgMatches) { - let format = match matches.value_of("format").map(|f| f.parse()) { - Some(Ok(f)) => f, - Some(Err(_)) => { - eprintln!("unrecognized formatter"); - exit(1) - } - None => IoFormat::default(), - }; +pub fn display_serializable(format: IoFormat, result: T) { format - .to_writer(std::io::stdout(), &t) - .expect("Error serializing result to stdout") + .to_writer(std::io::stdout(), &result) + .expect("Error serializing result to stdout"); + if format == IoFormat::JsonPretty { + println!() + } } -pub fn parse_stdin_deserializable Deserialize<'de>>( - stdin: &mut std::io::Stdin, - matches: &ArgMatches, -) -> Result { - let format = match matches.value_of("format").map(|f| f.parse()) { - Some(Ok(f)) => f, - Some(Err(_)) => { - eprintln!("unrecognized formatter"); - exit(1) +#[derive(Deserialize, Serialize)] +pub struct WithIoFormat { + pub format: Option, + #[serde(flatten)] + pub rest: T, +} +impl FromArgMatches for WithIoFormat { + fn from_arg_matches(matches: &ArgMatches) -> Result { + Ok(Self { + rest: T::from_arg_matches(matches)?, + format: matches.get_one("format").copied(), + }) + } + fn update_from_arg_matches(&mut self, matches: &ArgMatches) -> Result<(), clap::Error> { + self.rest.update_from_arg_matches(matches)?; + self.format = matches.get_one("format").copied(); + Ok(()) + } +} +impl CommandFactory for WithIoFormat { + fn command() -> clap::Command { + let cmd = T::command(); + if !cmd.get_arguments().any(|a| a.get_id() == "format") { + cmd.arg( + clap::Arg::new("format") + .long("format") + .value_parser(|s: &str| s.parse::().map_err(|e| eyre!("{e}"))), + ) + } else { + cmd } - None => IoFormat::default(), - }; - format.from_reader(stdin) + } + fn command_for_update() -> clap::Command { + let cmd = T::command_for_update(); + if !cmd.get_arguments().any(|a| a.get_id() == "format") { + cmd.arg( + clap::Arg::new("format") + .long("format") + .value_parser(|s: &str| s.parse::().map_err(|e| eyre!("{e}"))), + ) + } else { + cmd + } + } +} + +pub trait HandlerExtSerde: Handler { + fn with_display_serializable(self) -> DisplaySerializable; +} +impl HandlerExtSerde for T { + fn with_display_serializable(self) -> DisplaySerializable { + DisplaySerializable(self) + } +} + +#[derive(Debug, Clone)] +pub struct DisplaySerializable(pub T); +impl HandlerTypes for DisplaySerializable { + type Params = WithIoFormat; + type InheritedParams = T::InheritedParams; + type Ok = T::Ok; + type Err = T::Err; +} +#[async_trait::async_trait] +impl Handler for DisplaySerializable { + type Context = T::Context; + fn handle_sync( + &self, + HandlerArgs { + context, + parent_method, + method, + params, + inherited_params, + raw_params, + }: HandlerArgsFor, + ) -> Result { + self.0.handle_sync(HandlerArgs { + context, + parent_method, + method, + params: params.rest, + inherited_params, + raw_params, + }) + } + async fn handle_async( + &self, + HandlerArgs { + context, + parent_method, + method, + params, + inherited_params, + raw_params, + }: HandlerArgsFor, + ) -> Result { + self.0 + .handle_async(HandlerArgs { + context, + parent_method, + method, + params: params.rest, + inherited_params, + raw_params, + }) + .await + } + fn contexts(&self) -> Option> { + self.0.contexts() + } + fn metadata( + &self, + method: VecDeque<&'static str>, + ctx_ty: TypeId, + ) -> OrdMap<&'static str, imbl_value::Value> { + self.0.metadata(method, ctx_ty) + } + fn method_from_dots(&self, method: &str, ctx_ty: TypeId) -> Option> { + self.0.method_from_dots(method, ctx_ty) + } +} +impl PrintCliResult for DisplaySerializable +where + T::Ok: Serialize, +{ + type Context = AnyContext; + fn print( + &self, + HandlerArgs { params, .. }: HandlerArgsFor, + result: Self::Ok, + ) -> Result<(), Self::Err> { + display_serializable(params.format.unwrap_or_default(), result); + Ok(()) + } +} + +#[derive(Deserialize, Serialize)] +pub struct StdinDeserializable(pub T); +impl FromArgMatches for StdinDeserializable +where + T: DeserializeOwned, +{ + fn from_arg_matches(matches: &ArgMatches) -> Result { + let format = matches + .get_one::("format") + .copied() + .unwrap_or_default(); + Ok(Self(format.from_reader(&mut std::io::stdin()).map_err( + |e| clap::Error::raw(clap::error::ErrorKind::ValueValidation, e), + )?)) + } + fn update_from_arg_matches(&mut self, matches: &ArgMatches) -> Result<(), clap::Error> { + let format = matches + .get_one::("format") + .copied() + .unwrap_or_default(); + self.0 = format + .from_reader(&mut std::io::stdin()) + .map_err(|e| clap::Error::raw(clap::error::ErrorKind::ValueValidation, e))?; + Ok(()) + } +} +impl clap::Args for StdinDeserializable +where + T: DeserializeOwned, +{ + fn augment_args(cmd: clap::Command) -> clap::Command { + if !cmd.get_arguments().any(|a| a.get_id() == "format") { + cmd.arg( + clap::Arg::new("format") + .long("format") + .value_parser(|s: &str| s.parse::().map_err(|e| eyre!("{e}"))), + ) + } else { + cmd + } + } + fn augment_args_for_update(cmd: clap::Command) -> clap::Command { + if !cmd.get_arguments().any(|a| a.get_id() == "format") { + cmd.arg( + clap::Arg::new("format") + .long("format") + .value_parser(|s: &str| s.parse::().map_err(|e| eyre!("{e}"))), + ) + } else { + cmd + } + } } -#[derive(Debug, Clone, Copy)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, TS)] +#[ts(export, type = "string")] pub struct Duration(std::time::Duration); impl Deref for Duration { type Target = std::time::Duration; @@ -518,6 +699,12 @@ impl std::str::FromStr for Duration { })) } } +impl ValueParserFactory for Duration { + type Parser = FromStrParser; + fn value_parser() -> Self::Parser { + FromStrParser::new() + } +} impl std::fmt::Display for Duration { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let nanos = self.as_nanos(); @@ -843,3 +1030,228 @@ impl Serialize for Regex { serialize_display(&self.0, serializer) } } + +// TODO: make this not allocate +#[derive(Debug)] +pub struct NoOutput; +impl<'de> Deserialize<'de> for NoOutput { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + let _ = Value::deserialize(deserializer); + Ok(NoOutput) + } +} + +pub fn apply_expr(input: jaq_core::Val, expr: &str) -> Result { + let (expr, errs) = jaq_core::parse::parse(expr, jaq_core::parse::main()); + + let Some(expr) = expr else { + return Err(Error::new( + eyre!("Failed to parse expression: {:?}", errs), + crate::ErrorKind::InvalidRequest, + )); + }; + + let mut errs = Vec::new(); + + let mut defs = jaq_core::Definitions::core(); + for def in jaq_std::std() { + defs.insert(def, &mut errs); + } + + let filter = defs.finish(expr, Vec::new(), &mut errs); + + if !errs.is_empty() { + return Err(Error::new( + eyre!("Failed to compile expression: {:?}", errs), + crate::ErrorKind::InvalidRequest, + )); + }; + + let inputs = jaq_core::RcIter::new(std::iter::empty()); + let mut res_iter = filter.run(jaq_core::Ctx::new([], &inputs), input); + + let Some(res) = res_iter + .next() + .transpose() + .map_err(|e| eyre!("{e}")) + .with_kind(crate::ErrorKind::Deserialization)? + else { + return Err(Error::new( + eyre!("expr returned no results"), + crate::ErrorKind::InvalidRequest, + )); + }; + + if res_iter.next().is_some() { + return Err(Error::new( + eyre!("expr returned too many results"), + crate::ErrorKind::InvalidRequest, + )); + } + + Ok(res) +} + +pub trait PemEncoding: Sized { + fn from_pem(pem: &str) -> Result; + fn to_pem(&self) -> Result; +} + +impl PemEncoding for X509 { + fn from_pem(pem: &str) -> Result { + X509::from_pem(pem.as_bytes()).map_err(E::custom) + } + fn to_pem(&self) -> Result { + String::from_utf8((&**self).to_pem().map_err(E::custom)?).map_err(E::custom) + } +} + +impl PemEncoding for PKey { + fn from_pem(pem: &str) -> Result { + PKey::::private_key_from_pem(pem.as_bytes()).map_err(E::custom) + } + fn to_pem(&self) -> Result { + String::from_utf8((&**self).private_key_to_pem_pkcs8().map_err(E::custom)?) + .map_err(E::custom) + } +} + +impl PemEncoding for ssh_key::PrivateKey { + fn from_pem(pem: &str) -> Result { + ssh_key::PrivateKey::from_openssh(pem.as_bytes()).map_err(E::custom) + } + fn to_pem(&self) -> Result { + self.to_openssh(ssh_key::LineEnding::LF) + .map_err(E::custom) + .map(|s| (&*s).clone()) + } +} + +impl PemEncoding for ed25519_dalek::VerifyingKey { + fn from_pem(pem: &str) -> Result { + use ed25519_dalek::pkcs8::DecodePublicKey; + ed25519_dalek::VerifyingKey::from_public_key_pem(pem).map_err(E::custom) + } + fn to_pem(&self) -> Result { + use ed25519_dalek::pkcs8::EncodePublicKey; + self.to_public_key_pem(pkcs8::LineEnding::LF) + .map_err(E::custom) + } +} + +pub mod pem { + use serde::{Deserialize, Deserializer, Serializer}; + + use crate::util::serde::PemEncoding; + + pub fn serialize( + value: &T, + serializer: S, + ) -> Result { + serializer.serialize_str(&value.to_pem()?) + } + + pub fn deserialize<'de, T: PemEncoding, D: Deserializer<'de>>( + deserializer: D, + ) -> Result { + let pem = String::deserialize(deserializer)?; + Ok(T::from_pem(&pem)?) + } +} + +#[repr(transparent)] +#[derive(Debug, Deserialize, Serialize)] +pub struct Pem(#[serde(with = "pem")] pub T); +impl Pem { + pub fn new(value: T) -> Self { + Pem(value) + } + pub fn new_ref(value: &T) -> &Self { + unsafe { std::mem::transmute(value) } + } + pub fn new_mut(value: &mut T) -> &mut Self { + unsafe { std::mem::transmute(value) } + } +} + +#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, TS)] +#[ts(export, type = "string | number[]")] +pub struct MaybeUtf8String(pub Vec); +impl std::fmt::Debug for MaybeUtf8String { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + if let Ok(s) = std::str::from_utf8(&self.0) { + s.fmt(f) + } else { + self.0.fmt(f) + } + } +} +impl<'de> Deserialize<'de> for MaybeUtf8String { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct Visitor; + impl<'de> serde::de::Visitor<'de> for Visitor { + type Value = Vec; + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "a string or byte array") + } + fn visit_str(self, v: &str) -> Result + where + E: serde::de::Error, + { + Ok(v.as_bytes().to_owned()) + } + fn visit_string(self, v: String) -> Result + where + E: serde::de::Error, + { + Ok(v.into_bytes()) + } + fn visit_bytes(self, v: &[u8]) -> Result + where + E: serde::de::Error, + { + Ok(v.to_owned()) + } + fn visit_byte_buf(self, v: Vec) -> Result + where + E: serde::de::Error, + { + Ok(v) + } + fn visit_unit(self) -> Result + where + E: serde::de::Error, + { + Ok(Vec::new()) + } + fn visit_seq(self, mut seq: A) -> Result + where + A: serde::de::SeqAccess<'de>, + { + std::iter::repeat_with(|| seq.next_element::().transpose()) + .take_while(|a| a.is_some()) + .flatten() + .collect::, _>>() + } + } + deserializer.deserialize_any(Visitor).map(Self) + } +} +impl Serialize for MaybeUtf8String { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + if let Ok(s) = std::str::from_utf8(&self.0) { + serializer.serialize_str(s) + } else { + serializer.serialize_bytes(&self.0) + } + } +} diff --git a/core/startos/src/version/mod.rs b/core/startos/src/version/mod.rs index 4c6f157a57..8ff42522f2 100644 --- a/core/startos/src/version/mod.rs +++ b/core/startos/src/version/mod.rs @@ -1,33 +1,26 @@ use std::cmp::Ordering; -use async_trait::async_trait; use color_eyre::eyre::eyre; -use rpc_toolkit::command; -use sqlx::PgPool; +use futures::future::BoxFuture; +use futures::{Future, FutureExt}; +use imbl_value::InternedString; use crate::prelude::*; use crate::Error; -mod v0_3_4; -mod v0_3_4_1; -mod v0_3_4_2; -mod v0_3_4_3; -mod v0_3_4_4; mod v0_3_5; mod v0_3_5_1; +mod v0_3_6; -pub type Current = v0_3_5_1::Version; +pub type Current = v0_3_6::Version; #[derive(serde::Serialize, serde::Deserialize, Debug, Clone)] #[serde(untagged)] enum Version { - V0_3_4(Wrapper), - V0_3_4_1(Wrapper), - V0_3_4_2(Wrapper), - V0_3_4_3(Wrapper), - V0_3_4_4(Wrapper), + LT0_3_5(LTWrapper), V0_3_5(Wrapper), V0_3_5_1(Wrapper), + V0_3_6(Wrapper), Other(emver::Version), } @@ -43,19 +36,15 @@ impl Version { #[cfg(test)] fn as_sem_ver(&self) -> emver::Version { match self { - Version::V0_3_4(Wrapper(x)) => x.semver(), - Version::V0_3_4_1(Wrapper(x)) => x.semver(), - Version::V0_3_4_2(Wrapper(x)) => x.semver(), - Version::V0_3_4_3(Wrapper(x)) => x.semver(), - Version::V0_3_4_4(Wrapper(x)) => x.semver(), + Version::LT0_3_5(LTWrapper(_, x)) => x.clone(), Version::V0_3_5(Wrapper(x)) => x.semver(), Version::V0_3_5_1(Wrapper(x)) => x.semver(), + Version::V0_3_6(Wrapper(x)) => x.semver(), Version::Other(x) => x.clone(), } } } -#[async_trait] pub trait VersionT where Self: Sized + Send + Sync, @@ -64,82 +53,117 @@ where fn new() -> Self; fn semver(&self) -> emver::Version; fn compat(&self) -> &'static emver::VersionRange; - async fn up(&self, db: PatchDb, secrets: &PgPool) -> Result<(), Error>; - async fn down(&self, db: PatchDb, secrets: &PgPool) -> Result<(), Error>; - async fn commit(&self, db: PatchDb) -> Result<(), Error> { - let semver = self.semver().into(); - let compat = self.compat().clone(); - db.mutate(|d| { - d.as_server_info_mut().as_version_mut().ser(&semver)?; - d.as_server_info_mut() - .as_eos_version_compat_mut() - .ser(&compat)?; + fn up(&self, db: &PatchDb) -> impl Future> + Send; + fn down(&self, db: &PatchDb) -> impl Future> + Send; + fn commit(&self, db: &PatchDb) -> impl Future> + Send { + async { + let semver = self.semver().into(); + let compat = self.compat().clone(); + db.mutate(|d| { + d.as_public_mut() + .as_server_info_mut() + .as_version_mut() + .ser(&semver)?; + d.as_public_mut() + .as_server_info_mut() + .as_eos_version_compat_mut() + .ser(&compat)?; + Ok(()) + }) + .await?; Ok(()) - }) - .await?; - Ok(()) + } } - async fn migrate_to( + fn migrate_to( &self, version: &V, - db: PatchDb, - secrets: &PgPool, - ) -> Result<(), Error> { - match self.semver().cmp(&version.semver()) { - Ordering::Greater => self.rollback_to_unchecked(version, db, secrets).await, - Ordering::Less => version.migrate_from_unchecked(self, db, secrets).await, - Ordering::Equal => Ok(()), + db: &PatchDb, + ) -> impl Future> + Send { + async { + match self.semver().cmp(&version.semver()) { + Ordering::Greater => self.rollback_to_unchecked(version, db).await, + Ordering::Less => version.migrate_from_unchecked(self, db).await, + Ordering::Equal => Ok(()), + } } } - async fn migrate_from_unchecked( - &self, - version: &V, - db: PatchDb, - secrets: &PgPool, - ) -> Result<(), Error> { - let previous = Self::Previous::new(); - if version.semver() < previous.semver() { - previous - .migrate_from_unchecked(version, db.clone(), secrets) - .await?; - } else if version.semver() > previous.semver() { - return Err(Error::new( - eyre!( - "NO PATH FROM {}, THIS IS LIKELY A MISTAKE IN THE VERSION DEFINITION", - version.semver() - ), - crate::ErrorKind::MigrationFailed, - )); + fn migrate_from_unchecked<'a, V: VersionT>( + &'a self, + version: &'a V, + db: &'a PatchDb, + ) -> BoxFuture<'a, Result<(), Error>> { + async { + let previous = Self::Previous::new(); + if version.semver() < previous.semver() { + previous.migrate_from_unchecked(version, db).await?; + } else if version.semver() > previous.semver() { + return Err(Error::new( + eyre!( + "NO PATH FROM {}, THIS IS LIKELY A MISTAKE IN THE VERSION DEFINITION", + version.semver() + ), + crate::ErrorKind::MigrationFailed, + )); + } + tracing::info!("{} -> {}", previous.semver(), self.semver(),); + self.up(db).await?; + self.commit(db).await?; + Ok(()) } - tracing::info!("{} -> {}", previous.semver(), self.semver(),); - self.up(db.clone(), secrets).await?; - self.commit(db).await?; - Ok(()) + .boxed() } - async fn rollback_to_unchecked( - &self, - version: &V, - db: PatchDb, - secrets: &PgPool, - ) -> Result<(), Error> { - let previous = Self::Previous::new(); - tracing::info!("{} -> {}", self.semver(), previous.semver(),); - self.down(db.clone(), secrets).await?; - previous.commit(db.clone()).await?; - if version.semver() < previous.semver() { - previous.rollback_to_unchecked(version, db, secrets).await?; - } else if version.semver() > previous.semver() { - return Err(Error::new( - eyre!( - "NO PATH TO {}, THIS IS LIKELY A MISTAKE IN THE VERSION DEFINITION", - version.semver() - ), - crate::ErrorKind::MigrationFailed, - )); + fn rollback_to_unchecked<'a, V: VersionT>( + &'a self, + version: &'a V, + db: &'a PatchDb, + ) -> BoxFuture<'a, Result<(), Error>> { + async { + let previous = Self::Previous::new(); + tracing::info!("{} -> {}", self.semver(), previous.semver(),); + self.down(db).await?; + previous.commit(db).await?; + if version.semver() < previous.semver() { + previous.rollback_to_unchecked(version, db).await?; + } else if version.semver() > previous.semver() { + return Err(Error::new( + eyre!( + "NO PATH TO {}, THIS IS LIKELY A MISTAKE IN THE VERSION DEFINITION", + version.semver() + ), + crate::ErrorKind::MigrationFailed, + )); + } + Ok(()) } - Ok(()) + .boxed() } } + +#[derive(Debug, Clone)] +struct LTWrapper(T, emver::Version); +impl serde::Serialize for LTWrapper +where + T: VersionT, +{ + fn serialize(&self, serializer: S) -> Result { + self.0.semver().serialize(serializer) + } +} +impl<'de, T> serde::Deserialize<'de> for LTWrapper +where + T: VersionT, +{ + fn deserialize>(deserializer: D) -> Result { + let v = crate::util::Version::deserialize(deserializer)?; + let version = T::new(); + if *v < version.semver() { + Ok(Self(version, v.into_version())) + } else { + Err(serde::de::Error::custom("Mismatched Version")) + } + } +} + #[derive(Debug, Clone)] struct Wrapper(T); impl serde::Serialize for Wrapper @@ -165,17 +189,26 @@ where } } -pub async fn init(db: &PatchDb, secrets: &PgPool) -> Result<(), Error> { - let version = Version::from_util_version(db.peek().await.as_server_info().as_version().de()?); +pub async fn init(db: &PatchDb) -> Result<(), Error> { + let version = Version::from_util_version( + db.peek() + .await + .as_public() + .as_server_info() + .as_version() + .de()?, + ); match version { - Version::V0_3_4(v) => v.0.migrate_to(&Current::new(), db.clone(), secrets).await?, - Version::V0_3_4_1(v) => v.0.migrate_to(&Current::new(), db.clone(), secrets).await?, - Version::V0_3_4_2(v) => v.0.migrate_to(&Current::new(), db.clone(), secrets).await?, - Version::V0_3_4_3(v) => v.0.migrate_to(&Current::new(), db.clone(), secrets).await?, - Version::V0_3_4_4(v) => v.0.migrate_to(&Current::new(), db.clone(), secrets).await?, - Version::V0_3_5(v) => v.0.migrate_to(&Current::new(), db.clone(), secrets).await?, - Version::V0_3_5_1(v) => v.0.migrate_to(&Current::new(), db.clone(), secrets).await?, + Version::LT0_3_5(_) => { + return Err(Error::new( + eyre!("Cannot migrate from pre-0.3.5. Please update to v0.3.5 first."), + ErrorKind::MigrationFailed, + )); + } + Version::V0_3_5(v) => v.0.migrate_to(&Current::new(), &db).await?, + Version::V0_3_5_1(v) => v.0.migrate_to(&Current::new(), &db).await?, + Version::V0_3_6(v) => v.0.migrate_to(&Current::new(), &db).await?, Version::Other(_) => { return Err(Error::new( eyre!("Cannot downgrade"), @@ -189,9 +222,8 @@ pub async fn init(db: &PatchDb, secrets: &PgPool) -> Result<(), Error> { pub const COMMIT_HASH: &str = include_str!(concat!(env!("CARGO_MANIFEST_DIR"), "/../../GIT_HASH.txt")); -#[command(rename = "git-info", local, metadata(authenticated = false))] -pub fn git_info() -> Result<&'static str, Error> { - Ok(COMMIT_HASH) +pub fn git_info() -> Result { + Ok(InternedString::intern(COMMIT_HASH)) } #[cfg(test)] @@ -208,11 +240,6 @@ mod tests { fn versions() -> impl Strategy { prop_oneof![ - Just(Version::V0_3_4(Wrapper(v0_3_4::Version::new()))), - Just(Version::V0_3_4_1(Wrapper(v0_3_4_1::Version::new()))), - Just(Version::V0_3_4_2(Wrapper(v0_3_4_2::Version::new()))), - Just(Version::V0_3_4_3(Wrapper(v0_3_4_3::Version::new()))), - Just(Version::V0_3_4_4(Wrapper(v0_3_4_4::Version::new()))), Just(Version::V0_3_5(Wrapper(v0_3_5::Version::new()))), Just(Version::V0_3_5_1(Wrapper(v0_3_5_1::Version::new()))), em_version().prop_map(Version::Other), diff --git a/core/startos/src/version/v0_3_4.rs b/core/startos/src/version/v0_3_4.rs deleted file mode 100644 index e33dcb931c..0000000000 --- a/core/startos/src/version/v0_3_4.rs +++ /dev/null @@ -1,135 +0,0 @@ -use async_trait::async_trait; -use emver::VersionRange; -use itertools::Itertools; -use openssl::hash::MessageDigest; -use serde_json::json; -use ssh_key::public::Ed25519PublicKey; - -use super::*; -use crate::account::AccountInfo; -use crate::hostname::{sync_hostname, Hostname}; -use crate::prelude::*; - -const V0_3_4: emver::Version = emver::Version::new(0, 3, 4, 0); - -lazy_static::lazy_static! { - pub static ref V0_3_0_COMPAT: VersionRange = VersionRange::Conj( - Box::new(VersionRange::Anchor( - emver::GTE, - emver::Version::new(0, 3, 0, 0), - )), - Box::new(VersionRange::Anchor(emver::LTE, Current::new().semver())), - ); -} - -const COMMUNITY_URL: &str = "https://community-registry.start9.com/"; -const MAIN_REGISTRY: &str = "https://registry.start9.com/"; -const COMMUNITY_SERVICES: &[&str] = &[ - "ipfs", - "agora", - "lightning-jet", - "balanceofsatoshis", - "mastodon", - "lndg", - "robosats", - "thunderhub", - "syncthing", - "sphinx-relay", -]; - -#[derive(Clone, Debug)] -pub struct Version; - -#[async_trait] -impl VersionT for Version { - type Previous = Self; - fn new() -> Self { - Version - } - fn semver(&self) -> emver::Version { - V0_3_4 - } - fn compat(&self) -> &'static VersionRange { - &*V0_3_0_COMPAT - } - async fn up(&self, db: PatchDb, secrets: &PgPool) -> Result<(), Error> { - let mut account = AccountInfo::load(secrets).await?; - let account = db - .mutate(|d| { - d.as_server_info_mut().as_pubkey_mut().ser( - &ssh_key::PublicKey::from(Ed25519PublicKey::from(&account.key.ssh_key())) - .to_openssh()?, - )?; - d.as_server_info_mut().as_ca_fingerprint_mut().ser( - &account - .root_ca_cert - .digest(MessageDigest::sha256()) - .unwrap() - .iter() - .map(|x| format!("{x:X}")) - .join(":"), - )?; - let server_info = d.as_server_info(); - account.hostname = server_info.as_hostname().de().map(Hostname)?; - account.server_id = server_info.as_id().de()?; - - Ok(account) - }) - .await?; - account.save(secrets).await?; - sync_hostname(&account.hostname).await?; - - let parsed_url = Some(COMMUNITY_URL.parse().unwrap()); - db.mutate(|d| { - let mut ui = d.as_ui().de()?; - use imbl_value::json; - ui["marketplace"]["known-hosts"][COMMUNITY_URL] = json!({}); - ui["marketplace"]["known-hosts"][MAIN_REGISTRY] = json!({}); - for package_id in d.as_package_data().keys()? { - if !COMMUNITY_SERVICES.contains(&&*package_id.to_string()) { - continue; - } - d.as_package_data_mut() - .as_idx_mut(&package_id) - .or_not_found(&package_id)? - .as_installed_mut() - .or_not_found(&package_id)? - .as_marketplace_url_mut() - .ser(&parsed_url)?; - } - ui["theme"] = json!("Dark".to_string()); - ui["widgets"] = json!([]); - - d.as_ui_mut().ser(&ui) - }) - .await - } - async fn down(&self, db: PatchDb, _secrets: &PgPool) -> Result<(), Error> { - db.mutate(|d| { - let mut ui = d.as_ui().de()?; - let parsed_url = Some(MAIN_REGISTRY.parse().unwrap()); - for package_id in d.as_package_data().keys()? { - if !COMMUNITY_SERVICES.contains(&&*package_id.to_string()) { - continue; - } - d.as_package_data_mut() - .as_idx_mut(&package_id) - .or_not_found(&package_id)? - .as_installed_mut() - .or_not_found(&package_id)? - .as_marketplace_url_mut() - .ser(&parsed_url)?; - } - - if let imbl_value::Value::Object(ref mut obj) = ui { - obj.remove("theme"); - obj.remove("widgets"); - } - - ui["marketplace"]["known-hosts"][COMMUNITY_URL].take(); - ui["marketplace"]["known-hosts"][MAIN_REGISTRY].take(); - d.as_ui_mut().ser(&ui) - }) - .await - } -} diff --git a/core/startos/src/version/v0_3_4_1.rs b/core/startos/src/version/v0_3_4_1.rs deleted file mode 100644 index 915a47235b..0000000000 --- a/core/startos/src/version/v0_3_4_1.rs +++ /dev/null @@ -1,31 +0,0 @@ -use async_trait::async_trait; -use emver::VersionRange; - -use super::v0_3_4::V0_3_0_COMPAT; -use super::*; -use crate::prelude::*; - -const V0_3_4_1: emver::Version = emver::Version::new(0, 3, 4, 1); - -#[derive(Clone, Debug)] -pub struct Version; - -#[async_trait] -impl VersionT for Version { - type Previous = v0_3_4::Version; - fn new() -> Self { - Version - } - fn semver(&self) -> emver::Version { - V0_3_4_1 - } - fn compat(&self) -> &'static VersionRange { - &*V0_3_0_COMPAT - } - async fn up(&self, _db: PatchDb, _secrets: &PgPool) -> Result<(), Error> { - Ok(()) - } - async fn down(&self, _db: PatchDb, _secrets: &PgPool) -> Result<(), Error> { - Ok(()) - } -} diff --git a/core/startos/src/version/v0_3_4_2.rs b/core/startos/src/version/v0_3_4_2.rs deleted file mode 100644 index 5931b2879a..0000000000 --- a/core/startos/src/version/v0_3_4_2.rs +++ /dev/null @@ -1,31 +0,0 @@ -use async_trait::async_trait; -use emver::VersionRange; - -use super::v0_3_4::V0_3_0_COMPAT; -use super::*; -use crate::prelude::*; - -const V0_3_4_2: emver::Version = emver::Version::new(0, 3, 4, 2); - -#[derive(Clone, Debug)] -pub struct Version; - -#[async_trait] -impl VersionT for Version { - type Previous = v0_3_4_1::Version; - fn new() -> Self { - Version - } - fn semver(&self) -> emver::Version { - V0_3_4_2 - } - fn compat(&self) -> &'static VersionRange { - &*V0_3_0_COMPAT - } - async fn up(&self, _db: PatchDb, _secrets: &PgPool) -> Result<(), Error> { - Ok(()) - } - async fn down(&self, _db: PatchDb, _secrets: &PgPool) -> Result<(), Error> { - Ok(()) - } -} diff --git a/core/startos/src/version/v0_3_4_3.rs b/core/startos/src/version/v0_3_4_3.rs deleted file mode 100644 index d3199e9132..0000000000 --- a/core/startos/src/version/v0_3_4_3.rs +++ /dev/null @@ -1,31 +0,0 @@ -use async_trait::async_trait; -use emver::VersionRange; - -use super::v0_3_4::V0_3_0_COMPAT; -use super::*; -use crate::prelude::*; - -const V0_3_4_3: emver::Version = emver::Version::new(0, 3, 4, 3); - -#[derive(Clone, Debug)] -pub struct Version; - -#[async_trait] -impl VersionT for Version { - type Previous = v0_3_4_2::Version; - fn new() -> Self { - Version - } - fn semver(&self) -> emver::Version { - V0_3_4_3 - } - fn compat(&self) -> &'static VersionRange { - &V0_3_0_COMPAT - } - async fn up(&self, _db: PatchDb, _secrets: &PgPool) -> Result<(), Error> { - Ok(()) - } - async fn down(&self, _db: PatchDb, _secrets: &PgPool) -> Result<(), Error> { - Ok(()) - } -} diff --git a/core/startos/src/version/v0_3_4_4.rs b/core/startos/src/version/v0_3_4_4.rs deleted file mode 100644 index b6345ca4ca..0000000000 --- a/core/startos/src/version/v0_3_4_4.rs +++ /dev/null @@ -1,43 +0,0 @@ -use async_trait::async_trait; -use emver::VersionRange; -use models::ResultExt; -use sqlx::PgPool; - -use super::v0_3_4::V0_3_0_COMPAT; -use super::{v0_3_4_3, VersionT}; -use crate::prelude::*; - -const V0_3_4_4: emver::Version = emver::Version::new(0, 3, 4, 4); - -#[derive(Clone, Debug)] -pub struct Version; - -#[async_trait] -impl VersionT for Version { - type Previous = v0_3_4_3::Version; - fn new() -> Self { - Version - } - fn semver(&self) -> emver::Version { - V0_3_4_4 - } - fn compat(&self) -> &'static VersionRange { - &V0_3_0_COMPAT - } - async fn up(&self, db: PatchDb, _secrets: &PgPool) -> Result<(), Error> { - db.mutate(|v| { - let tor_address_lens = v.as_server_info_mut().as_tor_address_mut(); - let mut tor_addr = tor_address_lens.de()?; - tor_addr - .set_scheme("https") - .map_err(|_| eyre!("unable to update url scheme to https")) - .with_kind(crate::ErrorKind::ParseUrl)?; - tor_address_lens.ser(&tor_addr) - }) - .await?; - Ok(()) - } - async fn down(&self, _db: PatchDb, _secrets: &PgPool) -> Result<(), Error> { - Ok(()) - } -} diff --git a/core/startos/src/version/v0_3_5.rs b/core/startos/src/version/v0_3_5.rs index ba28cd4682..e5634d2d90 100644 --- a/core/startos/src/version/v0_3_5.rs +++ b/core/startos/src/version/v0_3_5.rs @@ -1,23 +1,26 @@ -use std::collections::BTreeMap; -use std::path::Path; - -use async_trait::async_trait; use emver::VersionRange; -use models::DataUrl; -use sqlx::PgPool; -use super::v0_3_4::V0_3_0_COMPAT; -use super::{v0_3_4_4, VersionT}; +use super::VersionT; use crate::prelude::*; +use crate::version::Current; + +lazy_static::lazy_static! { + pub static ref V0_3_0_COMPAT: VersionRange = VersionRange::Conj( + Box::new(VersionRange::Anchor( + emver::GTE, + emver::Version::new(0, 3, 0, 0), + )), + Box::new(VersionRange::Anchor(emver::LTE, Current::new().semver())), + ); +} const V0_3_5: emver::Version = emver::Version::new(0, 3, 5, 0); #[derive(Clone, Debug)] pub struct Version; -#[async_trait] impl VersionT for Version { - type Previous = v0_3_4_4::Version; + type Previous = Self; fn new() -> Self { Version } @@ -27,83 +30,10 @@ impl VersionT for Version { fn compat(&self) -> &'static VersionRange { &V0_3_0_COMPAT } - async fn up(&self, db: PatchDb, _secrets: &PgPool) -> Result<(), Error> { - let peek = db.peek().await; - let mut url_replacements = BTreeMap::new(); - for (_, pde) in peek.as_package_data().as_entries()? { - for (dependency, info) in pde - .as_installed() - .map(|i| i.as_dependency_info().as_entries()) - .transpose()? - .into_iter() - .flatten() - { - if !url_replacements.contains_key(&dependency) { - url_replacements.insert( - dependency, - DataUrl::from_path( - <&Value>::from(info.as_icon()) - .as_str() - .and_then(|i| i.strip_prefix("/public/package-data/")) - .map(|path| { - Path::new("/embassy-data/package-data/public").join(path) - }) - .unwrap_or_default(), - ) - .await - .unwrap_or_else(|_| { - DataUrl::from_slice( - "image/png", - include_bytes!("../install/package-icon.png"), - ) - }), - ); - } - } - } - let prev_zram = db - .mutate(|v| { - for (_, pde) in v.as_package_data_mut().as_entries_mut()? { - for (dependency, info) in pde - .as_installed_mut() - .map(|i| i.as_dependency_info_mut().as_entries_mut()) - .transpose()? - .into_iter() - .flatten() - { - if let Some(url) = url_replacements.get(&dependency) { - info.as_icon_mut().ser(url)?; - } else { - info.as_icon_mut().ser(&DataUrl::from_slice( - "image/png", - include_bytes!("../install/package-icon.png"), - ))?; - } - let manifest = <&mut Value>::from(&mut *info) - .as_object_mut() - .and_then(|o| o.remove("manifest")); - if let Some(title) = manifest - .as_ref() - .and_then(|m| m.as_object()) - .and_then(|m| m.get("title")) - .and_then(|t| t.as_str()) - .map(|s| s.to_owned()) - { - info.as_title_mut().ser(&title)?; - } else { - info.as_title_mut().ser(&dependency.to_string())?; - } - } - } - v.as_server_info_mut().as_zram_mut().replace(&true) - }) - .await?; - if !prev_zram { - crate::system::enable_zram().await?; - } + async fn up(&self, _db: &PatchDb) -> Result<(), Error> { Ok(()) } - async fn down(&self, _db: PatchDb, _secrets: &PgPool) -> Result<(), Error> { + async fn down(&self, _db: &PatchDb) -> Result<(), Error> { Ok(()) } } diff --git a/core/startos/src/version/v0_3_5_1.rs b/core/startos/src/version/v0_3_5_1.rs index c004dc8b6e..27e77c8ff3 100644 --- a/core/startos/src/version/v0_3_5_1.rs +++ b/core/startos/src/version/v0_3_5_1.rs @@ -1,8 +1,6 @@ -use async_trait::async_trait; use emver::VersionRange; -use sqlx::PgPool; -use super::v0_3_4::V0_3_0_COMPAT; +use super::v0_3_5::V0_3_0_COMPAT; use super::{v0_3_5, VersionT}; use crate::prelude::*; @@ -11,7 +9,6 @@ const V0_3_5_1: emver::Version = emver::Version::new(0, 3, 5, 1); #[derive(Clone, Debug)] pub struct Version; -#[async_trait] impl VersionT for Version { type Previous = v0_3_5::Version; fn new() -> Self { @@ -23,10 +20,10 @@ impl VersionT for Version { fn compat(&self) -> &'static VersionRange { &V0_3_0_COMPAT } - async fn up(&self, _db: PatchDb, _secrets: &PgPool) -> Result<(), Error> { + async fn up(&self, _db: &PatchDb) -> Result<(), Error> { Ok(()) } - async fn down(&self, _db: PatchDb, _secrets: &PgPool) -> Result<(), Error> { + async fn down(&self, _db: &PatchDb) -> Result<(), Error> { Ok(()) } } diff --git a/core/startos/src/version/v0_3_6.rs b/core/startos/src/version/v0_3_6.rs new file mode 100644 index 0000000000..b75e28b274 --- /dev/null +++ b/core/startos/src/version/v0_3_6.rs @@ -0,0 +1,29 @@ +use emver::VersionRange; + +use super::v0_3_5::V0_3_0_COMPAT; +use super::{v0_3_5_1, VersionT}; +use crate::prelude::*; + +const V0_3_6: emver::Version = emver::Version::new(0, 3, 6, 0); + +#[derive(Clone, Debug)] +pub struct Version; + +impl VersionT for Version { + type Previous = v0_3_5_1::Version; + fn new() -> Self { + Version + } + fn semver(&self) -> emver::Version { + V0_3_6 + } + fn compat(&self) -> &'static VersionRange { + &V0_3_0_COMPAT + } + async fn up(&self, _db: &PatchDb) -> Result<(), Error> { + Err(Error::new(eyre!("unimplemented"), ErrorKind::Unknown)) + } + async fn down(&self, _db: &PatchDb) -> Result<(), Error> { + Ok(()) + } +} diff --git a/core/startos/src/volume.rs b/core/startos/src/volume.rs index 1633b7d188..593422a678 100644 --- a/core/startos/src/volume.rs +++ b/core/startos/src/volume.rs @@ -1,95 +1,16 @@ -use std::collections::BTreeMap; -use std::ops::{Deref, DerefMut}; use std::path::{Path, PathBuf}; pub use helpers::script_dir; pub use models::VolumeId; -use serde::{Deserialize, Serialize}; -use tracing::instrument; +use models::{HostId, PackageId}; -use crate::context::RpcContext; -use crate::net::interface::{InterfaceId, Interfaces}; use crate::net::PACKAGE_CERT_PATH; use crate::prelude::*; -use crate::s9pk::manifest::PackageId; use crate::util::Version; -use crate::{Error, ResultExt}; pub const PKG_VOLUME_DIR: &str = "package-data/volumes"; pub const BACKUP_DIR: &str = "/media/embassy/backups"; -#[derive(Clone, Debug, Default, Deserialize, Serialize)] -pub struct Volumes(BTreeMap); -impl Volumes { - #[instrument(skip_all)] - pub fn validate(&self, interfaces: &Interfaces) -> Result<(), Error> { - for (id, volume) in &self.0 { - volume - .validate(interfaces) - .with_ctx(|_| (crate::ErrorKind::ValidateS9pk, format!("Volume {}", id)))?; - if let Volume::Backup { .. } = volume { - return Err(Error::new( - eyre!("Invalid volume type \"backup\""), - ErrorKind::ParseS9pk, - )); // Volume::Backup is for internal use and shouldn't be declared in manifest - } - } - Ok(()) - } - #[instrument(skip_all)] - pub async fn install( - &self, - ctx: &RpcContext, - pkg_id: &PackageId, - version: &Version, - ) -> Result<(), Error> { - for (volume_id, volume) in &self.0 { - volume - .install(&ctx.datadir, pkg_id, version, volume_id) - .await?; // TODO: concurrent? - } - Ok(()) - } - pub fn get_path_for( - &self, - path: &PathBuf, - pkg_id: &PackageId, - version: &Version, - volume_id: &VolumeId, - ) -> Option { - self.0 - .get(volume_id) - .map(|volume| volume.path_for(path, pkg_id, version, volume_id)) - } - pub fn to_readonly(&self) -> Self { - Volumes( - self.0 - .iter() - .map(|(id, volume)| { - let mut volume = volume.clone(); - volume.set_readonly(); - (id.clone(), volume) - }) - .collect(), - ) - } -} -impl Deref for Volumes { - type Target = BTreeMap; - fn deref(&self) -> &Self::Target { - &self.0 - } -} -impl DerefMut for Volumes { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} -impl Map for Volumes { - type Key = VolumeId; - type Value = Volume; -} - pub fn data_dir>(datadir: P, pkg_id: &PackageId, volume_id: &VolumeId) -> PathBuf { datadir .as_ref() @@ -112,129 +33,6 @@ pub fn backup_dir(pkg_id: &PackageId) -> PathBuf { Path::new(BACKUP_DIR).join(pkg_id).join("data") } -pub fn cert_dir(pkg_id: &PackageId, interface_id: &InterfaceId) -> PathBuf { - Path::new(PACKAGE_CERT_PATH).join(pkg_id).join(interface_id) -} - -#[derive(Clone, Debug, Deserialize, Serialize)] -#[serde(tag = "type")] -#[serde(rename_all = "kebab-case")] -pub enum Volume { - #[serde(rename_all = "kebab-case")] - Data { - #[serde(skip)] - readonly: bool, - }, - #[serde(rename_all = "kebab-case")] - Assets {}, - #[serde(rename_all = "kebab-case")] - Pointer { - package_id: PackageId, - volume_id: VolumeId, - path: PathBuf, - readonly: bool, - }, - #[serde(rename_all = "kebab-case")] - Certificate { interface_id: InterfaceId }, - #[serde(rename_all = "kebab-case")] - Backup { readonly: bool }, -} -impl Volume { - #[instrument(skip_all)] - pub fn validate(&self, interfaces: &Interfaces) -> Result<(), color_eyre::eyre::Report> { - match self { - Volume::Certificate { interface_id } => { - if !interfaces.0.contains_key(interface_id) { - color_eyre::eyre::bail!("unknown interface: {}", interface_id); - } - } - _ => (), - } - Ok(()) - } - pub async fn install( - &self, - path: &PathBuf, - pkg_id: &PackageId, - version: &Version, - volume_id: &VolumeId, - ) -> Result<(), Error> { - match self { - Volume::Data { .. } => { - tokio::fs::create_dir_all(self.path_for(path, pkg_id, version, volume_id)).await?; - } - _ => (), - } - Ok(()) - } - pub fn path_for( - &self, - data_dir_path: impl AsRef, - pkg_id: &PackageId, - version: &Version, - volume_id: &VolumeId, - ) -> PathBuf { - match self { - Volume::Data { .. } => data_dir(&data_dir_path, pkg_id, volume_id), - Volume::Assets {} => asset_dir(&data_dir_path, pkg_id, version).join(volume_id), - Volume::Pointer { - package_id, - volume_id, - path, - .. - } => data_dir(&data_dir_path, package_id, volume_id).join(if path.is_absolute() { - path.strip_prefix("/").unwrap() - } else { - path.as_ref() - }), - Volume::Certificate { interface_id } => cert_dir(pkg_id, &interface_id), - Volume::Backup { .. } => backup_dir(pkg_id), - } - } - - pub fn pointer_path(&self, data_dir_path: impl AsRef) -> Option { - if let Volume::Pointer { - path, - package_id, - volume_id, - .. - } = self - { - Some( - data_dir(data_dir_path.as_ref(), package_id, volume_id).join( - if path.is_absolute() { - path.strip_prefix("/").unwrap() - } else { - path.as_ref() - }, - ), - ) - } else { - None - } - } - - pub fn set_readonly(&mut self) { - match self { - Volume::Data { readonly } => { - *readonly = true; - } - Volume::Pointer { readonly, .. } => { - *readonly = true; - } - Volume::Backup { readonly } => { - *readonly = true; - } - _ => (), - } - } - pub fn readonly(&self) -> bool { - match self { - Volume::Data { readonly } => *readonly, - Volume::Assets {} => true, - Volume::Pointer { readonly, .. } => *readonly, - Volume::Certificate { .. } => true, - Volume::Backup { readonly } => *readonly, - } - } +pub fn cert_dir(pkg_id: &PackageId, host_id: &HostId) -> PathBuf { + Path::new(PACKAGE_CERT_PATH).join(pkg_id).join(host_id) } diff --git a/core/startos/startd.service b/core/startos/startd.service index 894298e548..56cf92e225 100644 --- a/core/startos/startd.service +++ b/core/startos/startd.service @@ -1,8 +1,5 @@ [Unit] Description=StartOS Daemon -After=network-online.target -Requires=network-online.target -Wants=avahi-daemon.service [Service] Type=simple diff --git a/debian/postinst b/debian/postinst index 6a65a749d8..731298af9f 100755 --- a/debian/postinst +++ b/debian/postinst @@ -121,3 +121,9 @@ rm -f /etc/motd ln -sf /usr/lib/startos/motd /etc/update-motd.d/00-embassy chmod -x /etc/update-motd.d/* chmod +x /etc/update-motd.d/00-embassy + +# LXC +echo "root:100000:65536" >>/etc/subuid +echo "root:100000:65536" >>/etc/subgid +echo "lxc.idmap = u 0 100000 65536" >>/etc/lxc/default.conf +echo "lxc.idmap = g 0 100000 65536" >>/etc/lxc/default.conf diff --git a/devmode.sh b/devmode.sh new file mode 100755 index 0000000000..19b0651de5 --- /dev/null +++ b/devmode.sh @@ -0,0 +1,4 @@ +#!/bin/bash + +export ENVIRONMENT=dev +export GIT_BRANCH_AS_HASH=1 diff --git a/image-recipe/README.md b/image-recipe/README.md index cbaf8944a7..9eba047270 100644 --- a/image-recipe/README.md +++ b/image-recipe/README.md @@ -8,13 +8,9 @@ official StartOS images, you can use the `run-local-build.sh` helper script: ```bash # Prerequisites -sudo apt-get install -y debspawn +sudo apt-get install -y debspawn binfmt-support sudo mkdir -p /etc/debspawn/ && echo "AllowUnsafePermissions=true" | sudo tee /etc/debspawn/global.toml -# Get dpkg -mkdir -p overlays/startos/root -wget -O overlays/startos/root/startos_0.3.x-1_amd64.deb - # Build image ./run-local-build.sh ``` diff --git a/image-recipe/build.sh b/image-recipe/build.sh index 836fc49edd..7aae39c7f5 100755 --- a/image-recipe/build.sh +++ b/image-recipe/build.sh @@ -18,10 +18,6 @@ echo "Saving results in: $RESULTS_DIR" IMAGE_BASENAME=startos-${VERSION_FULL}_${IB_TARGET_PLATFORM} -mkdir -p $prep_results_dir - -cd $prep_results_dir - QEMU_ARCH=${IB_TARGET_ARCH} BOOTLOADERS=grub-efi,syslinux if [ "$QEMU_ARCH" = 'amd64' ]; then @@ -30,6 +26,19 @@ elif [ "$QEMU_ARCH" = 'arm64' ]; then QEMU_ARCH=aarch64 BOOTLOADERS=grub-efi fi + +# TODO: remove when util-linux is released at v2.39 +cd $base_dir +git clone --depth=1 --branch=v2.39.3 https://github.com/util-linux/util-linux.git +cd util-linux +./autogen.sh +CC=$QEMU_ARCH-linux-gnu-gcc ./configure --host=$QEMU_ARCH-linux-gnu --disable-all-programs --enable-mount --enable-libmount --enable-libblkid --enable-libuuid --enable-static-programs +CC=$QEMU_ARCH-linux-gnu-gcc make -j mount.static + +mkdir -p $prep_results_dir + +cd $prep_results_dir + NON_FREE= if [[ "${IB_TARGET_PLATFORM}" =~ -nonfree$ ]] || [ "${IB_TARGET_PLATFORM}" = "raspberrypi" ]; then NON_FREE=1 @@ -64,6 +73,7 @@ elif [ "${IB_TARGET_PLATFORM}" = "rockchip64" ]; then PLATFORM_CONFIG_EXTRAS="$PLATFORM_CONFIG_EXTRAS --linux-flavours rockchip64" fi + cat > /etc/wgetrc << EOF retry_connrefused = on tries = 100 @@ -91,6 +101,9 @@ lb config \ mkdir -p config/includes.chroot/deb cp $base_dir/deb/${IMAGE_BASENAME}.deb config/includes.chroot/deb/ +mkdir -p config/includes.chroot/usr/local/bin +cp $base_dir/util-linux/mount.static config/includes.chroot/usr/local/bin/mount.next + if [ "${IB_TARGET_PLATFORM}" = "raspberrypi" ]; then cp -r $base_dir/raspberrypi/squashfs/* config/includes.chroot/ fi @@ -139,13 +152,11 @@ if [ "${IB_TARGET_PLATFORM}" = "raspberrypi" ]; then echo "deb https://archive.raspberrypi.org/debian/ bullseye main" > config/archives/raspi.list fi -if [ "${IB_SUITE}" = "bullseye" ]; then - cat > config/archives/backports.pref <<- EOF - Package: * - Pin: release a=bullseye-backports - Pin-Priority: 500 - EOF -fi +cat > config/archives/backports.pref <<- EOF +Package: * +Pin: release a=stable-backports +Pin-Priority: 500 +EOF if [ "${IB_TARGET_PLATFORM}" = "rockchip64" ]; then curl -fsSL https://apt.armbian.com/armbian.key | gpg --dearmor -o config/archives/armbian.key @@ -204,6 +215,10 @@ if [ "${IB_TARGET_PLATFORM}" = "raspberrypi" ]; then update-initramfs -c -k \$v done ln -sf /usr/bin/pi-beep /usr/local/bin/beep + wget https://archive.raspberrypi.org/debian/pool/main/w/wireless-regdb/wireless-regdb_2018.05.09-0~rpt1_all.deb + echo 1b7b1076257726609535b71d146a5721622d19a0843061ee7568188e836dd10f wireless-regdb_2018.05.09-0~rpt1_all.deb | sha256sum -c + apt-get install -y --allow-downgrades ./wireless-regdb_2018.05.09-0~rpt1_all.deb + rm wireless-regdb_2018.05.09-0~rpt1_all.deb fi useradd --shell /bin/bash -G embassy -m start9 diff --git a/image-recipe/prepare.sh b/image-recipe/prepare.sh index 1c6779608c..8962d84485 100755 --- a/image-recipe/prepare.sh +++ b/image-recipe/prepare.sh @@ -22,3 +22,16 @@ apt-get install -yq \ e2fsprogs \ squashfs-tools \ rsync +# TODO: remove when util-linux is released at v2.39.3 +apt-get install -yq \ + git \ + build-essential \ + crossbuild-essential-arm64 \ + crossbuild-essential-amd64 \ + automake \ + autoconf \ + gettext \ + libtool \ + pkg-config \ + autopoint \ + bison \ No newline at end of file diff --git a/patch-db b/patch-db index 6af2221add..3dc11afd46 160000 --- a/patch-db +++ b/patch-db @@ -1 +1 @@ -Subproject commit 6af2221add56f0a557b37a268ef9fb2299a05255 +Subproject commit 3dc11afd46d93094ac52ae1fef311a91c4561e8c diff --git a/sdk/.gitignore b/sdk/.gitignore new file mode 100644 index 0000000000..a7ca92b2d8 --- /dev/null +++ b/sdk/.gitignore @@ -0,0 +1,5 @@ +.vscode +dist/ +node_modules/ +lib/coverage +lib/test/output.ts \ No newline at end of file diff --git a/sdk/LICENSE b/sdk/LICENSE new file mode 100644 index 0000000000..793257b965 --- /dev/null +++ b/sdk/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2022 Start9 Labs + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/sdk/Makefile b/sdk/Makefile new file mode 100644 index 0000000000..8370650b88 --- /dev/null +++ b/sdk/Makefile @@ -0,0 +1,44 @@ +TS_FILES := $(shell find ./**/*.ts ) +version = $(shell git tag --sort=committerdate | tail -1) +test: $(TS_FILES) lib/test/output.ts + npm test + +clean: + rm -rf dist/* | true + +lib/test/output.ts: lib/test/makeOutput.ts scripts/oldSpecToBuilder.ts + npm run buildOutput + +buildOutput: lib/test/output.ts fmt + echo 'done' + + +bundle: $(TS_FILES) package.json .FORCE node_modules test fmt + npx tsc + npx tsc --project tsconfig-cjs.json + cp package.json dist/package.json + cp README.md dist/README.md + cp LICENSE dist/LICENSE + touch dist + +full-bundle: + make clean + make bundle + +check: + npm run check + +fmt: node_modules + npx prettier --write "**/*.ts" + +node_modules: package.json + npm install + +publish: clean bundle package.json README.md LICENSE + cd dist && npm publish --access=public +link: bundle + cp package.json dist/package.json + cp README.md dist/README.md + cp LICENSE dist/LICENSE + cd dist && npm link +.FORCE: diff --git a/sdk/README.md b/sdk/README.md new file mode 100644 index 0000000000..d51b25b589 --- /dev/null +++ b/sdk/README.md @@ -0,0 +1,18 @@ +# Start SDK + +## Config Conversion + +- Copy the old config json (from the getConfig.ts) +- Install the start-sdk with `npm i` +- paste the config into makeOutput.ts::oldSpecToBuilder (second param) +- Make the third param + +```ts + { + StartSdk: "start-sdk/lib", + } +``` + +- run the script `npm run buildOutput` to make the output.ts +- Copy this whole file into startos/procedures/config/spec.ts +- Fix all the TODO diff --git a/sdk/jest.config.js b/sdk/jest.config.js new file mode 100644 index 0000000000..c6aed8f3d2 --- /dev/null +++ b/sdk/jest.config.js @@ -0,0 +1,8 @@ +/** @type {import('ts-jest').JestConfigWithTsJest} */ +module.exports = { + preset: "ts-jest", + automock: false, + testEnvironment: "node", + rootDir: "./lib/", + modulePathIgnorePatterns: ["./dist/"], +}; diff --git a/sdk/lib/Dependency.ts b/sdk/lib/Dependency.ts new file mode 100644 index 0000000000..1e70629da2 --- /dev/null +++ b/sdk/lib/Dependency.ts @@ -0,0 +1,18 @@ +import { Checker } from "./emverLite/mod" + +export class Dependency { + constructor( + readonly data: + | { + type: "running" + versionSpec: Checker + registryUrl: string + healthChecks: string[] + } + | { + type: "exists" + versionSpec: Checker + registryUrl: string + }, + ) {} +} diff --git a/sdk/lib/StartSdk.ts b/sdk/lib/StartSdk.ts new file mode 100644 index 0000000000..43dc3ba5f4 --- /dev/null +++ b/sdk/lib/StartSdk.ts @@ -0,0 +1,774 @@ +import { ManifestVersion, SDKManifest } from "./manifest/ManifestTypes" +import { RequiredDefault, Value } from "./config/builder/value" +import { Config, ExtractConfigType, LazyBuild } from "./config/builder/config" +import { + DefaultString, + ListValueSpecText, + Pattern, + RandomString, + UniqueBy, + ValueSpecDatetime, + ValueSpecText, +} from "./config/configTypes" +import { Variants } from "./config/builder/variants" +import { CreatedAction, createAction } from "./actions/createAction" +import { + ActionMetadata, + Effects, + ActionResult, + BackupOptions, + DeepPartial, + MaybePromise, + ServiceInterfaceId, + PackageId, + ValidIfNoStupidEscape, +} from "./types" +import * as patterns from "./util/patterns" +import { DependencyConfig, Update } from "./dependencyConfig/DependencyConfig" +import { BackupSet, Backups } from "./backup/Backups" +import { smtpConfig } from "./config/configConstants" +import { Daemons } from "./mainFn/Daemons" +import { healthCheck } from "./health/HealthCheck" +import { checkPortListening } from "./health/checkFns/checkPortListening" +import { checkWebUrl, runHealthScript } from "./health/checkFns" +import { List } from "./config/builder/list" +import { Migration } from "./inits/migrations/Migration" +import { Install, InstallFn } from "./inits/setupInstall" +import { setupActions } from "./actions/setupActions" +import { setupDependencyConfig } from "./dependencyConfig/setupDependencyConfig" +import { SetupBackupsParams, setupBackups } from "./backup/setupBackups" +import { setupInit } from "./inits/setupInit" +import { + EnsureUniqueId, + Migrations, + setupMigrations, +} from "./inits/migrations/setupMigrations" +import { Uninstall, UninstallFn, setupUninstall } from "./inits/setupUninstall" +import { setupMain } from "./mainFn" +import { defaultTrigger } from "./trigger/defaultTrigger" +import { changeOnFirstSuccess, cooldownTrigger } from "./trigger" +import setupConfig, { + DependenciesReceipt, + Read, + Save, +} from "./config/setupConfig" +import { + InterfacesReceipt, + SetInterfaces, + setupInterfaces, +} from "./interfaces/setupInterfaces" +import { successFailure } from "./trigger/successFailure" +import { HealthReceipt } from "./health/HealthReceipt" +import { MultiHost, Scheme, SingleHost, StaticHost } from "./interfaces/Host" +import { ServiceInterfaceBuilder } from "./interfaces/ServiceInterfaceBuilder" +import { GetSystemSmtp } from "./util/GetSystemSmtp" +import nullIfEmpty from "./util/nullIfEmpty" +import { + GetServiceInterface, + getServiceInterface, +} from "./util/getServiceInterface" +import { getServiceInterfaces } from "./util/getServiceInterfaces" +import { getStore } from "./store/getStore" +import { CommandOptions, MountOptions, Overlay } from "./util/Overlay" +import { splitCommand } from "./util/splitCommand" +import { Mounts } from "./mainFn/Mounts" +import { Dependency } from "./Dependency" +import * as T from "./types" +import { Checker, EmVer } from "./emverLite/mod" +import { ExposedStorePaths } from "./store/setupExposeStore" +import { PathBuilder, extractJsonPath, pathBuilder } from "./store/PathBuilder" + +// prettier-ignore +type AnyNeverCond = + T extends [] ? Else : + T extends [never, ...Array] ? Then : + T extends [any, ...infer U] ? AnyNeverCond : + never + +export type ServiceInterfaceType = "ui" | "p2p" | "api" +export type MainEffects = Effects & { _type: "main" } +export type Signals = NodeJS.Signals +export const SIGTERM: Signals = "SIGTERM" +export const SIGKILL: Signals = "SIGTERM" +export const NO_TIMEOUT = -1 + +function removeConstType() { + return (t: T) => t as T & (E extends MainEffects ? {} : { const: never }) +} + +export class StartSdk { + private constructor(readonly manifest: Manifest) {} + static of() { + return new StartSdk(null as never) + } + withManifest(manifest: Manifest) { + return new StartSdk(manifest) + } + withStore>() { + return new StartSdk(this.manifest) + } + + build(isReady: AnyNeverCond<[Manifest, Store], "Build not ready", true>) { + type DependencyType = { + [K in keyof { + [K in keyof Manifest["dependencies"]]: Manifest["dependencies"][K]["optional"] extends false + ? K + : never + }]: Dependency + } & { + [K in keyof { + [K in keyof Manifest["dependencies"]]: Manifest["dependencies"][K]["optional"] extends true + ? K + : never + }]?: Dependency + } + + return { + serviceInterface: { + getOwn: (effects: E, id: ServiceInterfaceId) => + removeConstType()( + getServiceInterface(effects, { + id, + packageId: null, + }), + ), + get: ( + effects: E, + opts: { id: ServiceInterfaceId; packageId: PackageId }, + ) => removeConstType()(getServiceInterface(effects, opts)), + getAllOwn: (effects: E) => + removeConstType()( + getServiceInterfaces(effects, { + packageId: null, + }), + ), + getAll: ( + effects: E, + opts: { packageId: PackageId }, + ) => removeConstType()(getServiceInterfaces(effects, opts)), + }, + + store: { + get: ( + effects: E, + packageId: string, + path: PathBuilder, + ) => + removeConstType()( + getStore(effects, path, { + packageId, + }), + ), + getOwn: ( + effects: E, + path: PathBuilder, + ) => removeConstType()(getStore(effects, path)), + setOwn: >( + effects: E, + path: Path, + value: Path extends PathBuilder ? Value : never, + ) => + effects.store.set({ + value, + path: extractJsonPath(path), + }), + }, + + host: { + static: (effects: Effects, id: string) => + new StaticHost({ id, effects }), + single: (effects: Effects, id: string) => + new SingleHost({ id, effects }), + multi: (effects: Effects, id: string) => new MultiHost({ id, effects }), + }, + nullIfEmpty, + runCommand: async ( + effects: Effects, + imageId: Manifest["images"][number], + command: ValidIfNoStupidEscape | [string, ...string[]], + options: CommandOptions & { + mounts?: { path: string; options: MountOptions }[] + }, + ): Promise<{ stdout: string | Buffer; stderr: string | Buffer }> => { + return runCommand(effects, imageId, command, options) + }, + + createAction: < + ConfigType extends + | Record + | Config + | Config, + Type extends Record = ExtractConfigType, + >( + id: string, + metaData: Omit & { + input: Config | Config + }, + fn: (options: { + effects: Effects + input: Type + }) => Promise, + ) => { + const { input, ...rest } = metaData + return createAction( + id, + rest, + fn, + input, + ) + }, + configConstants: { smtpConfig }, + createInterface: ( + effects: Effects, + options: { + name: string + id: string + description: string + hasPrimary: boolean + disabled: boolean + type: ServiceInterfaceType + username: null | string + path: string + search: Record + schemeOverride: { ssl: Scheme; noSsl: Scheme } | null + masked: boolean + }, + ) => new ServiceInterfaceBuilder({ ...options, effects }), + getSystemSmtp: (effects: E) => + removeConstType()(new GetSystemSmtp(effects)), + + createDynamicAction: < + ConfigType extends + | Record + | Config + | Config, + Type extends Record = ExtractConfigType, + >( + id: string, + metaData: (options: { + effects: Effects + }) => MaybePromise>, + fn: (options: { + effects: Effects + input: Type + }) => Promise, + input: Config | Config, + ) => { + return createAction( + id, + metaData, + fn, + input, + ) + }, + HealthCheck: { + of: healthCheck, + }, + Dependency: { + of(data: Dependency["data"]) { + return new Dependency({ ...data }) + }, + }, + healthCheck: { + checkPortListening, + checkWebUrl, + runHealthScript, + }, + patterns, + setupActions: (...createdActions: CreatedAction[]) => + setupActions(...createdActions), + setupBackups: (...args: SetupBackupsParams) => + setupBackups(...args), + setupConfig: < + ConfigType extends Config | Config, + Type extends Record = ExtractConfigType, + >( + spec: ConfigType, + write: Save, + read: Read, + ) => setupConfig(spec, write, read), + setupConfigRead: < + ConfigSpec extends + | Config, any> + | Config, never>, + >( + _configSpec: ConfigSpec, + fn: Read, + ) => fn, + setupConfigSave: < + ConfigSpec extends + | Config, any> + | Config, never>, + >( + _configSpec: ConfigSpec, + fn: Save, + ) => fn, + setupDependencyConfig: >( + config: Config | Config, + autoConfigs: { + [K in keyof Manifest["dependencies"]]: DependencyConfig< + Manifest, + Store, + Input, + any + > | null + }, + ) => setupDependencyConfig(config, autoConfigs), + setupDependencies: >( + fn: (options: { + effects: Effects + input: Input | null + }) => Promise, + ) => { + return async (options: { effects: Effects; input: Input }) => { + const dependencyType = await fn(options) + return await options.effects.setDependencies({ + dependencies: Object.entries(dependencyType).map( + ([ + id, + { + data: { versionSpec, ...x }, + }, + ]) => ({ + id, + ...x, + ...(x.type === "running" + ? { + kind: "running", + healthChecks: x.healthChecks, + } + : { + kind: "exists", + }), + versionSpec: versionSpec.range, + }), + ), + }) + } + }, + setupInit: ( + migrations: Migrations, + install: Install, + uninstall: Uninstall, + setInterfaces: SetInterfaces, + setDependencies: (options: { + effects: Effects + input: any + }) => Promise, + exposedStore: ExposedStorePaths, + ) => + setupInit( + migrations, + install, + uninstall, + setInterfaces, + setDependencies, + exposedStore, + ), + setupInstall: (fn: InstallFn) => Install.of(fn), + setupInterfaces: < + ConfigInput extends Record, + Output extends InterfacesReceipt, + >( + config: Config, + fn: SetInterfaces, + ) => setupInterfaces(config, fn), + setupMain: ( + fn: (o: { + effects: MainEffects + started(onTerm: () => PromiseLike): PromiseLike + }) => Promise>, + ) => setupMain(fn), + setupMigrations: < + Migrations extends Array>, + >( + ...migrations: EnsureUniqueId + ) => + setupMigrations( + this.manifest, + ...migrations, + ), + setupProperties: + ( + fn: (options: { effects: Effects }) => Promise, + ): T.ExpectedExports.Properties => + (options) => + fn(options).then(nullifyProperties), + setupUninstall: (fn: UninstallFn) => + setupUninstall(fn), + trigger: { + defaultTrigger, + cooldownTrigger, + changeOnFirstSuccess, + successFailure, + }, + Mounts: { + of() { + return Mounts.of() + }, + }, + Backups: { + volumes: ( + ...volumeNames: Array + ) => Backups.volumes(...volumeNames), + addSets: ( + ...options: BackupSet[] + ) => Backups.addSets(...options), + withOptions: (options?: Partial) => + Backups.with_options(options), + }, + Config: { + of: < + Spec extends Record | Value>, + >( + spec: Spec, + ) => Config.of(spec), + }, + Checker: { + parse: Checker.parse, + }, + Daemons: { + of(config: { + effects: Effects + started: (onTerm: () => PromiseLike) => PromiseLike + healthReceipts: HealthReceipt[] + }) { + return Daemons.of(config) + }, + }, + DependencyConfig: { + of< + LocalConfig extends Record, + RemoteConfig extends Record, + >({ + localConfigSpec, + remoteConfigSpec, + dependencyConfig, + update, + }: { + localConfigSpec: + | Config + | Config + remoteConfigSpec: + | Config + | Config + dependencyConfig: (options: { + effects: Effects + localConfig: LocalConfig + }) => Promise> + update?: Update, RemoteConfig> + }) { + return new DependencyConfig< + Manifest, + Store, + LocalConfig, + RemoteConfig + >(dependencyConfig, update) + }, + }, + EmVer: { + from: EmVer.from, + parse: EmVer.parse, + }, + List: { + text: List.text, + number: List.number, + obj: >( + a: { + name: string + description?: string | null + warning?: string | null + /** Default [] */ + default?: [] + minLength?: number | null + maxLength?: number | null + }, + aSpec: { + spec: Config + displayAs?: null | string + uniqueBy?: null | UniqueBy + }, + ) => List.obj(a, aSpec), + dynamicText: ( + getA: LazyBuild< + Store, + { + name: string + description?: string | null + warning?: string | null + /** Default = [] */ + default?: string[] + minLength?: number | null + maxLength?: number | null + disabled?: false | string + generate?: null | RandomString + spec: { + /** Default = false */ + masked?: boolean + placeholder?: string | null + minLength?: number | null + maxLength?: number | null + patterns: Pattern[] + /** Default = "text" */ + inputmode?: ListValueSpecText["inputmode"] + } + } + >, + ) => List.dynamicText(getA), + dynamicNumber: ( + getA: LazyBuild< + Store, + { + name: string + description?: string | null + warning?: string | null + /** Default = [] */ + default?: string[] + minLength?: number | null + maxLength?: number | null + disabled?: false | string + spec: { + integer: boolean + min?: number | null + max?: number | null + step?: number | null + units?: string | null + placeholder?: string | null + } + } + >, + ) => List.dynamicNumber(getA), + }, + Migration: { + of: (options: { + version: Version + up: (opts: { effects: Effects }) => Promise + down: (opts: { effects: Effects }) => Promise + }) => Migration.of(options), + }, + StorePath: pathBuilder(), + Value: { + toggle: Value.toggle, + text: Value.text, + textarea: Value.textarea, + number: Value.number, + color: Value.color, + datetime: Value.datetime, + select: Value.select, + multiselect: Value.multiselect, + object: Value.object, + union: Value.union, + list: Value.list, + dynamicToggle: ( + a: LazyBuild< + Store, + { + name: string + description?: string | null + warning?: string | null + default: boolean + disabled?: false | string + } + >, + ) => Value.dynamicToggle(a), + dynamicText: ( + getA: LazyBuild< + Store, + { + name: string + description?: string | null + warning?: string | null + required: RequiredDefault + + /** Default = false */ + masked?: boolean + placeholder?: string | null + minLength?: number | null + maxLength?: number | null + patterns?: Pattern[] + /** Default = 'text' */ + inputmode?: ValueSpecText["inputmode"] + generate?: null | RandomString + } + >, + ) => Value.dynamicText(getA), + dynamicTextarea: ( + getA: LazyBuild< + Store, + { + name: string + description?: string | null + warning?: string | null + required: boolean + minLength?: number | null + maxLength?: number | null + placeholder?: string | null + disabled?: false | string + generate?: null | RandomString + } + >, + ) => Value.dynamicTextarea(getA), + dynamicNumber: ( + getA: LazyBuild< + Store, + { + name: string + description?: string | null + warning?: string | null + required: RequiredDefault + min?: number | null + max?: number | null + /** Default = '1' */ + step?: number | null + integer: boolean + units?: string | null + placeholder?: string | null + disabled?: false | string + } + >, + ) => Value.dynamicNumber(getA), + dynamicColor: ( + getA: LazyBuild< + Store, + { + name: string + description?: string | null + warning?: string | null + required: RequiredDefault + + disabled?: false | string + } + >, + ) => Value.dynamicColor(getA), + dynamicDatetime: ( + getA: LazyBuild< + Store, + { + name: string + description?: string | null + warning?: string | null + required: RequiredDefault + /** Default = 'datetime-local' */ + inputmode?: ValueSpecDatetime["inputmode"] + min?: string | null + max?: string | null + disabled?: false | string + } + >, + ) => Value.dynamicDatetime(getA), + dynamicSelect: ( + getA: LazyBuild< + Store, + { + name: string + description?: string | null + warning?: string | null + required: RequiredDefault + values: Record + disabled?: false | string + } + >, + ) => Value.dynamicSelect(getA), + dynamicMultiselect: ( + getA: LazyBuild< + Store, + { + name: string + description?: string | null + warning?: string | null + default: string[] + values: Record + minLength?: number | null + maxLength?: number | null + disabled?: false | string + } + >, + ) => Value.dynamicMultiselect(getA), + filteredUnion: < + Required extends RequiredDefault, + Type extends Record, + >( + getDisabledFn: LazyBuild, + a: { + name: string + description?: string | null + warning?: string | null + required: Required + }, + aVariants: Variants | Variants, + ) => + Value.filteredUnion( + getDisabledFn, + a, + aVariants, + ), + + dynamicUnion: < + Required extends RequiredDefault, + Type extends Record, + >( + getA: LazyBuild< + Store, + { + disabled: string[] | false | string + name: string + description?: string | null + warning?: string | null + required: Required + } + >, + aVariants: Variants | Variants, + ) => Value.dynamicUnion(getA, aVariants), + }, + Variants: { + of: < + VariantValues extends { + [K in string]: { + name: string + spec: Config + } + }, + >( + a: VariantValues, + ) => Variants.of(a), + }, + } + } +} + +export async function runCommand( + effects: Effects, + imageId: Manifest["images"][number], + command: string | [string, ...string[]], + options: CommandOptions & { + mounts?: { path: string; options: MountOptions }[] + }, +): Promise<{ stdout: string | Buffer; stderr: string | Buffer }> { + const commands = splitCommand(command) + const overlay = await Overlay.of(effects, imageId) + try { + for (let mount of options.mounts || []) { + await overlay.mount(mount.options, mount.path) + } + return await overlay.exec(commands) + } finally { + await overlay.destroy() + } +} +function nullifyProperties(value: T.SdkPropertiesReturn): T.PropertiesReturn { + return Object.fromEntries( + Object.entries(value).map(([k, v]) => [k, nullifyProperties_(v)]), + ) +} +function nullifyProperties_(value: T.SdkPropertiesValue): T.PropertiesValue { + if (value.type === "string") { + return { description: null, copyable: null, qr: null, ...value } + } + return { + description: null, + ...value, + value: Object.fromEntries( + Object.entries(value.value).map(([k, v]) => [k, nullifyProperties_(v)]), + ), + } +} diff --git a/sdk/lib/actions/createAction.ts b/sdk/lib/actions/createAction.ts new file mode 100644 index 0000000000..2fe4dfa747 --- /dev/null +++ b/sdk/lib/actions/createAction.ts @@ -0,0 +1,88 @@ +import { Config, ExtractConfigType } from "../config/builder/config" +import { SDKManifest } from "../manifest/ManifestTypes" +import { ActionMetadata, ActionResult, Effects, ExportedAction } from "../types" + +export type MaybeFn = + | Value + | ((options: { effects: Effects }) => Promise | Value) +export class CreatedAction< + Manifest extends SDKManifest, + Store, + ConfigType extends + | Record + | Config + | Config, + Type extends Record = ExtractConfigType, +> { + private constructor( + public readonly id: string, + public readonly myMetadata: MaybeFn< + Manifest, + Store, + Omit + >, + readonly fn: (options: { + effects: Effects + input: Type + }) => Promise, + readonly input: Config, + public validator = input.validator, + ) {} + + static of< + Manifest extends SDKManifest, + Store, + ConfigType extends + | Record + | Config + | Config, + Type extends Record = ExtractConfigType, + >( + id: string, + metadata: MaybeFn>, + fn: (options: { effects: Effects; input: Type }) => Promise, + inputConfig: Config | Config, + ) { + return new CreatedAction( + id, + metadata, + fn, + inputConfig as Config, + ) + } + + exportedAction: ExportedAction = ({ effects, input }) => { + return this.fn({ + effects, + input: this.validator.unsafeCast(input), + }) + } + + run = async ({ effects, input }: { effects: Effects; input?: Type }) => { + return this.fn({ + effects, + input: this.validator.unsafeCast(input), + }) + } + + async metadata(options: { effects: Effects }) { + if (this.myMetadata instanceof Function) + return await this.myMetadata(options) + return this.myMetadata + } + + async ActionMetadata(options: { effects: Effects }): Promise { + return { + ...(await this.metadata(options)), + input: await this.input.build(options), + } + } + + async getConfig({ effects }: { effects: Effects }) { + return this.input.build({ + effects, + }) + } +} + +export const createAction = CreatedAction.of diff --git a/sdk/lib/actions/index.ts b/sdk/lib/actions/index.ts new file mode 100644 index 0000000000..603684b67b --- /dev/null +++ b/sdk/lib/actions/index.ts @@ -0,0 +1,3 @@ +import "./createAction" + +import "./setupActions" diff --git a/sdk/lib/actions/setupActions.ts b/sdk/lib/actions/setupActions.ts new file mode 100644 index 0000000000..9dd9937b4d --- /dev/null +++ b/sdk/lib/actions/setupActions.ts @@ -0,0 +1,29 @@ +import { SDKManifest } from "../manifest/ManifestTypes" +import { Effects, ExpectedExports } from "../types" +import { CreatedAction } from "./createAction" + +export function setupActions( + ...createdActions: CreatedAction[] +) { + const myActions = async (options: { effects: Effects }) => { + const actions: Record> = {} + for (const action of createdActions) { + actions[action.id] = action + } + return actions + } + const answer: { + actions: ExpectedExports.actions + actionsMetadata: ExpectedExports.actionsMetadata + } = { + actions(options: { effects: Effects }) { + return myActions(options) + }, + async actionsMetadata({ effects }: { effects: Effects }) { + return Promise.all( + createdActions.map((x) => x.ActionMetadata({ effects })), + ) + }, + } + return answer +} diff --git a/sdk/lib/backup/Backups.ts b/sdk/lib/backup/Backups.ts new file mode 100644 index 0000000000..20099c86de --- /dev/null +++ b/sdk/lib/backup/Backups.ts @@ -0,0 +1,181 @@ +import { SDKManifest } from "../manifest/ManifestTypes" +import * as T from "../types" + +export type BACKUP = "BACKUP" +export const DEFAULT_OPTIONS: T.BackupOptions = { + delete: true, + force: true, + ignoreExisting: false, + exclude: [], +} +export type BackupSet = { + srcPath: string + srcVolume: Volumes | BACKUP + dstPath: string + dstVolume: Volumes | BACKUP + options?: Partial +} +/** + * This utility simplifies the volume backup process. + * ```ts + * export const { createBackup, restoreBackup } = Backups.volumes("main").build(); + * ``` + * + * Changing the options of the rsync, (ie exludes) use either + * ```ts + * Backups.volumes("main").set_options({exclude: ['bigdata/']}).volumes('excludedVolume').build() + * // or + * Backups.with_options({exclude: ['bigdata/']}).volumes('excludedVolume').build() + * ``` + * + * Using the more fine control, using the addSets for more control + * ```ts + * Backups.addSets({ + * srcVolume: 'main', srcPath:'smallData/', dstPath: 'main/smallData/', dstVolume: : Backups.BACKUP + * }, { + * srcVolume: 'main', srcPath:'bigData/', dstPath: 'main/bigData/', dstVolume: : Backups.BACKUP, options: {exclude:['bigData/excludeThis']}} + * ).build()q + * ``` + */ +export class Backups { + static BACKUP: BACKUP = "BACKUP" + + private constructor( + private options = DEFAULT_OPTIONS, + private backupSet = [] as BackupSet[], + ) {} + static volumes( + ...volumeNames: Array + ): Backups { + return new Backups().addSets( + ...volumeNames.map((srcVolume) => ({ + srcVolume, + srcPath: "./", + dstPath: `./${srcVolume}/`, + dstVolume: Backups.BACKUP, + })), + ) + } + static addSets( + ...options: BackupSet[] + ) { + return new Backups().addSets(...options) + } + static with_options( + options?: Partial, + ) { + return new Backups({ ...DEFAULT_OPTIONS, ...options }) + } + + static withOptions = Backups.with_options + setOptions(options?: Partial) { + this.options = { + ...this.options, + ...options, + } + return this + } + volumes(...volumeNames: Array) { + return this.addSets( + ...volumeNames.map((srcVolume) => ({ + srcVolume, + srcPath: "./", + dstPath: `./${srcVolume}/`, + dstVolume: Backups.BACKUP, + })), + ) + } + addSets(...options: BackupSet[]) { + options.forEach((x) => + this.backupSet.push({ ...x, options: { ...this.options, ...x.options } }), + ) + return this + } + build() { + const createBackup: T.ExpectedExports.createBackup = async ({ + effects, + }) => { + // const previousItems = ( + // await effects + // .readDir({ + // volumeId: Backups.BACKUP, + // path: ".", + // }) + // .catch(() => []) + // ).map((x) => `${x}`) + // const backupPaths = this.backupSet + // .filter((x) => x.dstVolume === Backups.BACKUP) + // .map((x) => x.dstPath) + // .map((x) => x.replace(/\.\/([^]*)\//, "$1")) + // const filteredItems = previousItems.filter( + // (x) => backupPaths.indexOf(x) === -1, + // ) + // for (const itemToRemove of filteredItems) { + // effects.console.error(`Trying to remove ${itemToRemove}`) + // await effects + // .removeDir({ + // volumeId: Backups.BACKUP, + // path: itemToRemove, + // }) + // .catch(() => + // effects.removeFile({ + // volumeId: Backups.BACKUP, + // path: itemToRemove, + // }), + // ) + // .catch(() => { + // console.warn(`Failed to remove ${itemToRemove} from backup volume`) + // }) + // } + for (const item of this.backupSet) { + // if (notEmptyPath(item.dstPath)) { + // await effects.createDir({ + // volumeId: item.dstVolume, + // path: item.dstPath, + // }) + // } + // await effects + // .runRsync({ + // ...item, + // options: { + // ...this.options, + // ...item.options, + // }, + // }) + // .wait() + } + return + } + const restoreBackup: T.ExpectedExports.restoreBackup = async ({ + effects, + }) => { + for (const item of this.backupSet) { + // if (notEmptyPath(item.srcPath)) { + // await new Promise((resolve, reject) => fs.mkdir(items.src)).createDir( + // { + // volumeId: item.srcVolume, + // path: item.srcPath, + // }, + // ) + // } + // await effects + // .runRsync({ + // options: { + // ...this.options, + // ...item.options, + // }, + // srcVolume: item.dstVolume, + // dstVolume: item.srcVolume, + // srcPath: item.dstPath, + // dstPath: item.srcPath, + // }) + // .wait() + } + return + } + return { createBackup, restoreBackup } + } +} +function notEmptyPath(file: string) { + return ["", ".", "./"].indexOf(file) === -1 +} diff --git a/sdk/lib/backup/index.ts b/sdk/lib/backup/index.ts new file mode 100644 index 0000000000..fe9cd8569b --- /dev/null +++ b/sdk/lib/backup/index.ts @@ -0,0 +1,3 @@ +import "./Backups" + +import "./setupBackups" diff --git a/sdk/lib/backup/setupBackups.ts b/sdk/lib/backup/setupBackups.ts new file mode 100644 index 0000000000..af2d084100 --- /dev/null +++ b/sdk/lib/backup/setupBackups.ts @@ -0,0 +1,43 @@ +import { Backups } from "./Backups" +import { SDKManifest } from "../manifest/ManifestTypes" +import { ExpectedExports } from "../types" +import { _ } from "../util" + +export type SetupBackupsParams = Array< + M["volumes"][number] | Backups +> + +export function setupBackups( + ...args: _> +) { + const backups = Array>() + const volumes = new Set() + for (const arg of args) { + if (arg instanceof Backups) { + backups.push(arg) + } else { + volumes.add(arg) + } + } + backups.push(Backups.volumes(...volumes)) + const answer: { + createBackup: ExpectedExports.createBackup + restoreBackup: ExpectedExports.restoreBackup + } = { + get createBackup() { + return (async (options) => { + for (const backup of backups) { + await backup.build().createBackup(options) + } + }) as ExpectedExports.createBackup + }, + get restoreBackup() { + return (async (options) => { + for (const backup of backups) { + await backup.build().restoreBackup(options) + } + }) as ExpectedExports.restoreBackup + }, + } + return answer +} diff --git a/sdk/lib/config/builder/config.ts b/sdk/lib/config/builder/config.ts new file mode 100644 index 0000000000..c30f37890b --- /dev/null +++ b/sdk/lib/config/builder/config.ts @@ -0,0 +1,137 @@ +import { ValueSpec } from "../configTypes" +import { Value } from "./value" +import { _ } from "../../util" +import { Effects } from "../../types" +import { Parser, object } from "ts-matches" + +export type LazyBuildOptions = { + effects: Effects +} +export type LazyBuild = ( + options: LazyBuildOptions, +) => Promise | ExpectedOut + +// prettier-ignore +export type ExtractConfigType | Config, any> | Config, never>> = + A extends Config | Config ? B : + A + +export type ConfigSpecOf, Store = never> = { + [K in keyof A]: Value +} + +export type MaybeLazyValues = LazyBuild | A +/** + * Configs are the specs that are used by the os configuration form for this service. + * Here is an example of a simple configuration + ```ts + const smallConfig = Config.of({ + test: Value.boolean({ + name: "Test", + description: "This is the description for the test", + warning: null, + default: false, + }), + }); + ``` + + The idea of a config is that now the form is going to ask for + Test: [ ] and the value is going to be checked as a boolean. + There are more complex values like selects, lists, and objects. See {@link Value} + + Also, there is the ability to get a validator/parser from this config spec. + ```ts + const matchSmallConfig = smallConfig.validator(); + type SmallConfig = typeof matchSmallConfig._TYPE; + ``` + + Here is an example of a more complex configuration which came from a configuration for a service + that works with bitcoin, like c-lightning. + ```ts + + export const hostname = Value.string({ + name: "Hostname", + default: null, + description: "Domain or IP address of bitcoin peer", + warning: null, + required: true, + masked: false, + placeholder: null, + pattern: + "(^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$)|((^(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))$)|(^[a-z2-7]{16}\\.onion$)|(^([a-z0-9]([a-z0-9-]{0,61}[a-z0-9])?\\.)+[a-z0-9][a-z0-9-]{0,61}[a-z0-9]$))", + patternDescription: + "Must be either a domain name, or an IPv4 or IPv6 address. Do not include protocol scheme (eg 'http://') or port.", +}); +export const port = Value.number({ + name: "Port", + default: null, + description: "Port that peer is listening on for inbound p2p connections", + warning: null, + required: false, + range: "[0,65535]", + integral: true, + units: null, + placeholder: null, +}); +export const addNodesSpec = Config.of({ hostname: hostname, port: port }); + + ``` + */ +export class Config, Store = never> { + private constructor( + private readonly spec: { + [K in keyof Type]: Value | Value + }, + public validator: Parser, + ) {} + async build(options: LazyBuildOptions) { + const answer = {} as { + [K in keyof Type]: ValueSpec + } + for (const k in this.spec) { + answer[k] = await this.spec[k].build(options as any) + } + return answer + } + + static of< + Spec extends Record | Value>, + Store = never, + >(spec: Spec) { + const validatorObj = {} as { + [K in keyof Spec]: Parser + } + for (const key in spec) { + validatorObj[key] = spec[key].validator + } + const validator = object(validatorObj) + return new Config< + { + [K in keyof Spec]: Spec[K] extends + | Value + | Value + ? T + : never + }, + Store + >(spec, validator as any) + } + + /** + * Use this during the times that the input needs a more specific type. + * Used in types that the value/ variant/ list/ config is constructed somewhere else. + ```ts + const a = Config.text({ + name: "a", + required: false, + }) + + return Config.of()({ + myValue: a.withStore(), + }) + ``` + */ + withStore() { + return this as any as Config + } +} diff --git a/sdk/lib/config/builder/index.ts b/sdk/lib/config/builder/index.ts new file mode 100644 index 0000000000..6b6ddc7307 --- /dev/null +++ b/sdk/lib/config/builder/index.ts @@ -0,0 +1,6 @@ +import { Config } from "./config" +import { List } from "./list" +import { Value } from "./value" +import { Variants } from "./variants" + +export { Config, List, Value, Variants } diff --git a/sdk/lib/config/builder/list.ts b/sdk/lib/config/builder/list.ts new file mode 100644 index 0000000000..8a251069d5 --- /dev/null +++ b/sdk/lib/config/builder/list.ts @@ -0,0 +1,278 @@ +import { Config, LazyBuild } from "./config" +import { + ListValueSpecText, + Pattern, + RandomString, + UniqueBy, + ValueSpecList, + ValueSpecListOf, +} from "../configTypes" +import { Parser, arrayOf, number, string } from "ts-matches" +/** + * Used as a subtype of Value.list +```ts +export const authorizationList = List.string({ + "name": "Authorization", + "range": "[0,*)", + "default": [], + "description": "Username and hashed password for JSON-RPC connections. RPC clients connect using the usual http basic authentication.", + "warning": null +}, {"masked":false,"placeholder":null,"pattern":"^[a-zA-Z0-9_-]+:([0-9a-fA-F]{2})+\\$([0-9a-fA-F]{2})+$","patternDescription":"Each item must be of the form \":$\"."}); +export const auth = Value.list(authorizationList); +``` +*/ +export class List { + private constructor( + public build: LazyBuild, + public validator: Parser, + ) {} + static text( + a: { + name: string + description?: string | null + warning?: string | null + /** Default = [] */ + default?: string[] + minLength?: number | null + maxLength?: number | null + }, + aSpec: { + /** Default = false */ + masked?: boolean + placeholder?: string | null + minLength?: number | null + maxLength?: number | null + patterns: Pattern[] + /** Default = "text" */ + inputmode?: ListValueSpecText["inputmode"] + generate?: null | RandomString + }, + ) { + return new List(() => { + const spec = { + type: "text" as const, + placeholder: null, + minLength: null, + maxLength: null, + masked: false, + inputmode: "text" as const, + generate: null, + ...aSpec, + } + const built: ValueSpecListOf<"text"> = { + description: null, + warning: null, + default: [], + type: "list" as const, + minLength: null, + maxLength: null, + disabled: false, + ...a, + spec, + } + return built + }, arrayOf(string)) + } + static dynamicText( + getA: LazyBuild< + Store, + { + name: string + description?: string | null + warning?: string | null + /** Default = [] */ + default?: string[] + minLength?: number | null + maxLength?: number | null + disabled?: false | string + generate?: null | RandomString + spec: { + /** Default = false */ + masked?: boolean + placeholder?: string | null + minLength?: number | null + maxLength?: number | null + patterns: Pattern[] + /** Default = "text" */ + inputmode?: ListValueSpecText["inputmode"] + } + } + >, + ) { + return new List(async (options) => { + const { spec: aSpec, ...a } = await getA(options) + const spec = { + type: "text" as const, + placeholder: null, + minLength: null, + maxLength: null, + masked: false, + inputmode: "text" as const, + generate: null, + ...aSpec, + } + const built: ValueSpecListOf<"text"> = { + description: null, + warning: null, + default: [], + type: "list" as const, + minLength: null, + maxLength: null, + disabled: false, + ...a, + spec, + } + return built + }, arrayOf(string)) + } + static number( + a: { + name: string + description?: string | null + warning?: string | null + /** Default = [] */ + default?: string[] + minLength?: number | null + maxLength?: number | null + }, + aSpec: { + integer: boolean + min?: number | null + max?: number | null + step?: number | null + units?: string | null + placeholder?: string | null + }, + ) { + return new List(() => { + const spec = { + type: "number" as const, + placeholder: null, + min: null, + max: null, + step: null, + units: null, + ...aSpec, + } + const built: ValueSpecListOf<"number"> = { + description: null, + warning: null, + minLength: null, + maxLength: null, + default: [], + type: "list" as const, + disabled: false, + ...a, + spec, + } + return built + }, arrayOf(number)) + } + static dynamicNumber( + getA: LazyBuild< + Store, + { + name: string + description?: string | null + warning?: string | null + /** Default = [] */ + default?: string[] + minLength?: number | null + maxLength?: number | null + disabled?: false | string + spec: { + integer: boolean + min?: number | null + max?: number | null + step?: number | null + units?: string | null + placeholder?: string | null + } + } + >, + ) { + return new List(async (options) => { + const { spec: aSpec, ...a } = await getA(options) + const spec = { + type: "number" as const, + placeholder: null, + min: null, + max: null, + step: null, + units: null, + ...aSpec, + } + return { + description: null, + warning: null, + minLength: null, + maxLength: null, + default: [], + type: "list" as const, + disabled: false, + ...a, + spec, + } + }, arrayOf(number)) + } + static obj, Store>( + a: { + name: string + description?: string | null + warning?: string | null + /** Default [] */ + default?: [] + minLength?: number | null + maxLength?: number | null + }, + aSpec: { + spec: Config + displayAs?: null | string + uniqueBy?: null | UniqueBy + }, + ) { + return new List(async (options) => { + const { spec: previousSpecSpec, ...restSpec } = aSpec + const specSpec = await previousSpecSpec.build(options) + const spec = { + type: "object" as const, + displayAs: null, + uniqueBy: null, + ...restSpec, + spec: specSpec, + } + const value = { + spec, + default: [], + ...a, + } + return { + description: null, + warning: null, + minLength: null, + maxLength: null, + type: "list" as const, + disabled: false, + ...value, + } + }, arrayOf(aSpec.spec.validator)) + } + + /** + * Use this during the times that the input needs a more specific type. + * Used in types that the value/ variant/ list/ config is constructed somewhere else. + ```ts + const a = Config.text({ + name: "a", + required: false, + }) + + return Config.of()({ + myValue: a.withStore(), + }) + ``` + */ + withStore() { + return this as any as List + } +} diff --git a/sdk/lib/config/builder/value.ts b/sdk/lib/config/builder/value.ts new file mode 100644 index 0000000000..01673a6df9 --- /dev/null +++ b/sdk/lib/config/builder/value.ts @@ -0,0 +1,783 @@ +import { Config, LazyBuild, LazyBuildOptions } from "./config" +import { List } from "./list" +import { Variants } from "./variants" +import { + FilePath, + Pattern, + RandomString, + ValueSpec, + ValueSpecDatetime, + ValueSpecText, + ValueSpecTextarea, +} from "../configTypes" +import { DefaultString } from "../configTypes" +import { _ } from "../../util" +import { + Parser, + anyOf, + arrayOf, + boolean, + literal, + literals, + number, + object, + string, + unknown, +} from "ts-matches" +import { once } from "../../util/once" + +export type RequiredDefault = + | false + | { + default: A | null + } + +function requiredLikeToAbove, A>( + requiredLike: Input, +) { + // prettier-ignore + return { + required: (typeof requiredLike === 'object' ? true : requiredLike) as ( + Input extends { default: unknown} ? true: + Input extends true ? true : + false + ), + default:(typeof requiredLike === 'object' ? requiredLike.default : null) as ( + Input extends { default: infer Default } ? Default : + null + ) + }; +} +type AsRequired = MaybeRequiredType extends + | { default: unknown } + | never + ? Type + : Type | null | undefined + +type InputAsRequired = A extends + | { required: { default: any } | never } + | never + ? Type + : Type | null | undefined +const testForAsRequiredParser = once( + () => object({ required: object({ default: unknown }) }).test, +) +function asRequiredParser< + Type, + Input, + Return extends + | Parser + | Parser, +>(parser: Parser, input: Input): Return { + if (testForAsRequiredParser()(input)) return parser as any + return parser.optional() as any +} + +/** + * A value is going to be part of the form in the FE of the OS. + * Something like a boolean, a string, a number, etc. + * in the fe it will ask for the name of value, and use the rest of the value to determine how to render it. + * While writing with a value, you will start with `Value.` then let the IDE suggest the rest. + * for things like string, the options are going to be in {}. + * Keep an eye out for another config builder types as params. + * Note, usually this is going to be used in a `Config` {@link Config} builder. + ```ts +const username = Value.string({ + name: "Username", + default: "bitcoin", + description: "The username for connecting to Bitcoin over RPC.", + warning: null, + required: true, + masked: true, + placeholder: null, + pattern: "^[a-zA-Z0-9_]+$", + patternDescription: "Must be alphanumeric (can contain underscore).", +}); + ``` + */ +export class Value { + protected constructor( + public build: LazyBuild, + public validator: Parser, + ) {} + static toggle(a: { + name: string + description?: string | null + warning?: string | null + default: boolean + /** Immutable means it can only be configed at the first config then never again + Default is false */ + immutable?: boolean + }) { + return new Value( + async () => ({ + description: null, + warning: null, + type: "toggle" as const, + disabled: false, + immutable: a.immutable ?? false, + ...a, + }), + boolean, + ) + } + static dynamicToggle( + a: LazyBuild< + Store, + { + name: string + description?: string | null + warning?: string | null + default: boolean + disabled?: false | string + } + >, + ) { + return new Value( + async (options) => ({ + description: null, + warning: null, + type: "toggle" as const, + disabled: false, + immutable: false, + ...(await a(options)), + }), + boolean, + ) + } + static text>(a: { + name: string + description?: string | null + warning?: string | null + required: Required + + /** Default = false */ + masked?: boolean + placeholder?: string | null + minLength?: number | null + maxLength?: number | null + patterns?: Pattern[] + /** Default = 'text' */ + inputmode?: ValueSpecText["inputmode"] + /** Immutable means it can only be configured at the first config then never again + * Default is false + */ + immutable?: boolean + generate?: null | RandomString + }) { + return new Value, never>( + async () => ({ + type: "text" as const, + description: null, + warning: null, + masked: false, + placeholder: null, + minLength: null, + maxLength: null, + patterns: [], + inputmode: "text", + disabled: false, + immutable: a.immutable ?? false, + generate: a.generate ?? null, + ...a, + ...requiredLikeToAbove(a.required), + }), + asRequiredParser(string, a), + ) + } + static dynamicText( + getA: LazyBuild< + Store, + { + name: string + description?: string | null + warning?: string | null + required: RequiredDefault + + /** Default = false */ + masked?: boolean + placeholder?: string | null + minLength?: number | null + maxLength?: number | null + patterns?: Pattern[] + /** Default = 'text' */ + inputmode?: ValueSpecText["inputmode"] + disabled?: string | false + /** Immutable means it can only be configured at the first config then never again + * Default is false + */ + generate?: null | RandomString + } + >, + ) { + return new Value(async (options) => { + const a = await getA(options) + return { + type: "text" as const, + description: null, + warning: null, + masked: false, + placeholder: null, + minLength: null, + maxLength: null, + patterns: [], + inputmode: "text", + disabled: false, + immutable: false, + generate: a.generate ?? null, + ...a, + ...requiredLikeToAbove(a.required), + } + }, string.optional()) + } + static textarea(a: { + name: string + description?: string | null + warning?: string | null + required: boolean + minLength?: number | null + maxLength?: number | null + placeholder?: string | null + /** Immutable means it can only be configed at the first config then never again + Default is false */ + immutable?: boolean + }) { + return new Value(async () => { + const built: ValueSpecTextarea = { + description: null, + warning: null, + minLength: null, + maxLength: null, + placeholder: null, + type: "textarea" as const, + disabled: false, + immutable: a.immutable ?? false, + ...a, + } + return built + }, string) + } + static dynamicTextarea( + getA: LazyBuild< + Store, + { + name: string + description?: string | null + warning?: string | null + required: boolean + minLength?: number | null + maxLength?: number | null + placeholder?: string | null + disabled?: false | string + } + >, + ) { + return new Value(async (options) => { + const a = await getA(options) + return { + description: null, + warning: null, + minLength: null, + maxLength: null, + placeholder: null, + type: "textarea" as const, + disabled: false, + immutable: false, + ...a, + } + }, string) + } + static number>(a: { + name: string + description?: string | null + warning?: string | null + required: Required + min?: number | null + max?: number | null + /** Default = '1' */ + step?: number | null + integer: boolean + units?: string | null + placeholder?: string | null + /** Immutable means it can only be configed at the first config then never again + Default is false */ + immutable?: boolean + }) { + return new Value, never>( + () => ({ + type: "number" as const, + description: null, + warning: null, + min: null, + max: null, + step: null, + units: null, + placeholder: null, + disabled: false, + immutable: a.immutable ?? false, + ...a, + ...requiredLikeToAbove(a.required), + }), + asRequiredParser(number, a), + ) + } + static dynamicNumber( + getA: LazyBuild< + Store, + { + name: string + description?: string | null + warning?: string | null + required: RequiredDefault + min?: number | null + max?: number | null + /** Default = '1' */ + step?: number | null + integer: boolean + units?: string | null + placeholder?: string | null + disabled?: false | string + } + >, + ) { + return new Value(async (options) => { + const a = await getA(options) + return { + type: "number" as const, + description: null, + warning: null, + min: null, + max: null, + step: null, + units: null, + placeholder: null, + disabled: false, + immutable: false, + ...a, + ...requiredLikeToAbove(a.required), + } + }, number.optional()) + } + static color>(a: { + name: string + description?: string | null + warning?: string | null + required: Required + /** Immutable means it can only be configed at the first config then never again + Default is false */ + immutable?: boolean + }) { + return new Value, never>( + () => ({ + type: "color" as const, + description: null, + warning: null, + disabled: false, + immutable: a.immutable ?? false, + ...a, + ...requiredLikeToAbove(a.required), + }), + + asRequiredParser(string, a), + ) + } + + static dynamicColor( + getA: LazyBuild< + Store, + { + name: string + description?: string | null + warning?: string | null + required: RequiredDefault + disabled?: false | string + } + >, + ) { + return new Value(async (options) => { + const a = await getA(options) + return { + type: "color" as const, + description: null, + warning: null, + disabled: false, + immutable: false, + ...a, + ...requiredLikeToAbove(a.required), + } + }, string.optional()) + } + static datetime>(a: { + name: string + description?: string | null + warning?: string | null + required: Required + /** Default = 'datetime-local' */ + inputmode?: ValueSpecDatetime["inputmode"] + min?: string | null + max?: string | null + /** Immutable means it can only be configed at the first config then never again + Default is false */ + immutable?: boolean + }) { + return new Value, never>( + () => ({ + type: "datetime" as const, + description: null, + warning: null, + inputmode: "datetime-local", + min: null, + max: null, + step: null, + disabled: false, + immutable: a.immutable ?? false, + ...a, + ...requiredLikeToAbove(a.required), + }), + asRequiredParser(string, a), + ) + } + static dynamicDatetime( + getA: LazyBuild< + Store, + { + name: string + description?: string | null + warning?: string | null + required: RequiredDefault + /** Default = 'datetime-local' */ + inputmode?: ValueSpecDatetime["inputmode"] + min?: string | null + max?: string | null + disabled?: false | string + } + >, + ) { + return new Value(async (options) => { + const a = await getA(options) + return { + type: "datetime" as const, + description: null, + warning: null, + inputmode: "datetime-local", + min: null, + max: null, + disabled: false, + immutable: false, + ...a, + ...requiredLikeToAbove(a.required), + } + }, string.optional()) + } + static select< + Required extends RequiredDefault, + B extends Record, + >(a: { + name: string + description?: string | null + warning?: string | null + required: Required + values: B + /** + * Disabled: false means that there is nothing disabled, good to modify + * string means that this is the message displayed and the whole thing is disabled + * string[] means that the options are disabled + */ + disabled?: false | string | (string & keyof B)[] + /** Immutable means it can only be configed at the first config then never again + Default is false */ + immutable?: boolean + }) { + return new Value, never>( + () => ({ + description: null, + warning: null, + type: "select" as const, + disabled: false, + immutable: a.immutable ?? false, + ...a, + ...requiredLikeToAbove(a.required), + }), + asRequiredParser( + anyOf( + ...Object.keys(a.values).map((x: keyof B & string) => literal(x)), + ), + a, + ) as any, + ) + } + static dynamicSelect( + getA: LazyBuild< + Store, + { + name: string + description?: string | null + warning?: string | null + required: RequiredDefault + values: Record + /** + * Disabled: false means that there is nothing disabled, good to modify + * string means that this is the message displayed and the whole thing is disabled + * string[] means that the options are disabled + */ + disabled?: false | string | string[] + } + >, + ) { + return new Value(async (options) => { + const a = await getA(options) + return { + description: null, + warning: null, + type: "select" as const, + disabled: false, + immutable: false, + ...a, + ...requiredLikeToAbove(a.required), + } + }, string.optional()) + } + static multiselect>(a: { + name: string + description?: string | null + warning?: string | null + default: string[] + values: Values + minLength?: number | null + maxLength?: number | null + /** Immutable means it can only be configed at the first config then never again + Default is false */ + immutable?: boolean + /** + * Disabled: false means that there is nothing disabled, good to modify + * string means that this is the message displayed and the whole thing is disabled + * string[] means that the options are disabled + */ + disabled?: false | string | (string & keyof Values)[] + }) { + return new Value<(keyof Values)[], never>( + () => ({ + type: "multiselect" as const, + minLength: null, + maxLength: null, + warning: null, + description: null, + disabled: false, + immutable: a.immutable ?? false, + ...a, + }), + arrayOf( + literals(...(Object.keys(a.values) as any as [keyof Values & string])), + ), + ) + } + static dynamicMultiselect( + getA: LazyBuild< + Store, + { + name: string + description?: string | null + warning?: string | null + default: string[] + values: Record + minLength?: number | null + maxLength?: number | null + /** + * Disabled: false means that there is nothing disabled, good to modify + * string means that this is the message displayed and the whole thing is disabled + * string[] means that the options are disabled + */ + disabled?: false | string | string[] + } + >, + ) { + return new Value(async (options) => { + const a = await getA(options) + return { + type: "multiselect" as const, + minLength: null, + maxLength: null, + warning: null, + description: null, + disabled: false, + immutable: false, + ...a, + } + }, arrayOf(string)) + } + static object, Store>( + a: { + name: string + description?: string | null + warning?: string | null + }, + spec: Config, + ) { + return new Value(async (options) => { + const built = await spec.build(options as any) + return { + type: "object" as const, + description: null, + warning: null, + ...a, + spec: built, + } + }, spec.validator) + } + static file, Store>(a: { + name: string + description?: string | null + warning?: string | null + extensions: string[] + required: Required + }) { + const buildValue = { + type: "file" as const, + description: null, + warning: null, + ...a, + } + return new Value, Store>( + () => ({ + ...buildValue, + + ...requiredLikeToAbove(a.required), + }), + asRequiredParser(object({ filePath: string }), a), + ) + } + static dynamicFile( + a: LazyBuild< + Store, + { + name: string + description?: string | null + warning?: string | null + extensions: string[] + required: Required + } + >, + ) { + return new Value( + async (options) => ({ + type: "file" as const, + description: null, + warning: null, + ...(await a(options)), + }), + string.optional(), + ) + } + static union, Type, Store>( + a: { + name: string + description?: string | null + warning?: string | null + required: Required + /** Immutable means it can only be configed at the first config then never again + Default is false */ + immutable?: boolean + /** + * Disabled: false means that there is nothing disabled, good to modify + * string means that this is the message displayed and the whole thing is disabled + * string[] means that the options are disabled + */ + disabled?: false | string | string[] + }, + aVariants: Variants, + ) { + return new Value, Store>( + async (options) => ({ + type: "union" as const, + description: null, + warning: null, + disabled: false, + ...a, + variants: await aVariants.build(options as any), + ...requiredLikeToAbove(a.required), + immutable: a.immutable ?? false, + }), + asRequiredParser(aVariants.validator, a), + ) + } + static filteredUnion< + Required extends RequiredDefault, + Type extends Record, + Store = never, + >( + getDisabledFn: LazyBuild, + a: { + name: string + description?: string | null + warning?: string | null + required: Required + }, + aVariants: Variants | Variants, + ) { + return new Value, Store>( + async (options) => ({ + type: "union" as const, + description: null, + warning: null, + ...a, + variants: await aVariants.build(options as any), + ...requiredLikeToAbove(a.required), + disabled: (await getDisabledFn(options)) || false, + immutable: false, + }), + asRequiredParser(aVariants.validator, a), + ) + } + static dynamicUnion< + Required extends RequiredDefault, + Type extends Record, + Store = never, + >( + getA: LazyBuild< + Store, + { + disabled: string[] | false | string + name: string + description?: string | null + warning?: string | null + required: Required + } + >, + aVariants: Variants | Variants, + ) { + return new Value(async (options) => { + const newValues = await getA(options) + return { + type: "union" as const, + description: null, + warning: null, + ...newValues, + variants: await aVariants.build(options as any), + ...requiredLikeToAbove(newValues.required), + immutable: false, + } + }, aVariants.validator.optional()) + } + + static list(a: List) { + return new Value((options) => a.build(options), a.validator) + } + + /** + * Use this during the times that the input needs a more specific type. + * Used in types that the value/ variant/ list/ config is constructed somewhere else. + ```ts + const a = Config.text({ + name: "a", + required: false, + }) + + return Config.of()({ + myValue: a.withStore(), + }) + ``` + */ + withStore() { + return this as any as Value + } +} diff --git a/sdk/lib/config/builder/variants.ts b/sdk/lib/config/builder/variants.ts new file mode 100644 index 0000000000..1e7a2a3841 --- /dev/null +++ b/sdk/lib/config/builder/variants.ts @@ -0,0 +1,120 @@ +import { InputSpec, ValueSpecUnion } from "../configTypes" +import { LazyBuild, Config } from "./config" +import { Parser, anyOf, literals, object } from "ts-matches" + +/** + * Used in the the Value.select { @link './value.ts' } + * to indicate the type of select variants that are available. The key for the record passed in will be the + * key to the tag.id in the Value.select +```ts + +export const disabled = Config.of({}); +export const size = Value.number({ + name: "Max Chain Size", + default: 550, + description: "Limit of blockchain size on disk.", + warning: "Increasing this value will require re-syncing your node.", + required: true, + range: "[550,1000000)", + integral: true, + units: "MiB", + placeholder: null, +}); +export const automatic = Config.of({ size: size }); +export const size1 = Value.number({ + name: "Failsafe Chain Size", + default: 65536, + description: "Prune blockchain if size expands beyond this.", + warning: null, + required: true, + range: "[550,1000000)", + integral: true, + units: "MiB", + placeholder: null, +}); +export const manual = Config.of({ size: size1 }); +export const pruningSettingsVariants = Variants.of({ + disabled: { name: "Disabled", spec: disabled }, + automatic: { name: "Automatic", spec: automatic }, + manual: { name: "Manual", spec: manual }, +}); +export const pruning = Value.union( + { + name: "Pruning Settings", + description: + '- Disabled: Disable pruning\n- Automatic: Limit blockchain size on disk to a certain number of megabytes\n- Manual: Prune blockchain with the "pruneblockchain" RPC\n', + warning: null, + required: true, + default: "disabled", + }, + pruningSettingsVariants +); +``` + */ +export class Variants { + static text: any + private constructor( + public build: LazyBuild, + public validator: Parser, + ) {} + static of< + VariantValues extends { + [K in string]: { + name: string + spec: Config | Config + } + }, + Store = never, + >(a: VariantValues) { + const validator = anyOf( + ...Object.entries(a).map(([name, { spec }]) => + object({ + unionSelectKey: literals(name), + unionValueKey: spec.validator, + }), + ), + ) as Parser + + return new Variants< + { + [K in keyof VariantValues]: { + unionSelectKey: K + // prettier-ignore + unionValueKey: + VariantValues[K]["spec"] extends (Config | Config) ? B : + never + } + }[keyof VariantValues], + Store + >(async (options) => { + const variants = {} as { + [K in keyof VariantValues]: { name: string; spec: InputSpec } + } + for (const key in a) { + const value = a[key] + variants[key] = { + name: value.name, + spec: await value.spec.build(options as any), + } + } + return variants + }, validator) + } + /** + * Use this during the times that the input needs a more specific type. + * Used in types that the value/ variant/ list/ config is constructed somewhere else. + ```ts + const a = Config.text({ + name: "a", + required: false, + }) + + return Config.of()({ + myValue: a.withStore(), + }) + ``` + */ + withStore() { + return this as any as Variants + } +} diff --git a/sdk/lib/config/configConstants.ts b/sdk/lib/config/configConstants.ts new file mode 100644 index 0000000000..aa0e024c92 --- /dev/null +++ b/sdk/lib/config/configConstants.ts @@ -0,0 +1,81 @@ +import { SmtpValue } from "../types" +import { GetSystemSmtp } from "../util/GetSystemSmtp" +import { email } from "../util/patterns" +import { Config, ConfigSpecOf } from "./builder/config" +import { Value } from "./builder/value" +import { Variants } from "./builder/variants" + +/** + * Base SMTP settings, to be used by StartOS for system wide SMTP + */ +export const customSmtp = Config.of, never>({ + server: Value.text({ + name: "SMTP Server", + required: { + default: null, + }, + }), + port: Value.number({ + name: "Port", + required: { default: 587 }, + min: 1, + max: 65535, + integer: true, + }), + from: Value.text({ + name: "From Address", + required: { + default: null, + }, + placeholder: "test@example.com", + inputmode: "email", + patterns: [email], + }), + login: Value.text({ + name: "Login", + required: { + default: null, + }, + }), + password: Value.text({ + name: "Password", + required: false, + masked: true, + }), +}) + +/** + * For service config. Gives users 3 options for SMTP: (1) disabled, (2) use system SMTP settings, (3) use custom SMTP settings + */ +export const smtpConfig = Value.filteredUnion( + async ({ effects }) => { + const smtp = await new GetSystemSmtp(effects).once() + return smtp ? [] : ["system"] + }, + { + name: "SMTP", + description: "Optionally provide an SMTP server for sending emails", + required: { default: "disabled" }, + }, + Variants.of({ + disabled: { name: "Disabled", spec: Config.of({}) }, + system: { + name: "System Credentials", + spec: Config.of({ + customFrom: Value.text({ + name: "Custom From Address", + description: + "A custom from address for this service. If not provided, the system from address will be used.", + required: false, + placeholder: "test@example.com", + inputmode: "email", + patterns: [email], + }), + }), + }, + custom: { + name: "Custom Credentials", + spec: customSmtp, + }, + }), +) diff --git a/sdk/lib/config/configDependencies.ts b/sdk/lib/config/configDependencies.ts new file mode 100644 index 0000000000..be0475b0f7 --- /dev/null +++ b/sdk/lib/config/configDependencies.ts @@ -0,0 +1,26 @@ +import { SDKManifest } from "../manifest/ManifestTypes" +import { Dependency } from "../types" + +export type ConfigDependencies = { + exists(id: keyof T["dependencies"]): Dependency + running(id: keyof T["dependencies"], healthChecks: string[]): Dependency +} + +export const configDependenciesSet = < + T extends SDKManifest, +>(): ConfigDependencies => ({ + exists(id: keyof T["dependencies"]) { + return { + id, + kind: "exists", + } as Dependency + }, + + running(id: keyof T["dependencies"], healthChecks: string[]) { + return { + id, + kind: "running", + healthChecks, + } as Dependency + }, +}) diff --git a/sdk/lib/config/configTypes.ts b/sdk/lib/config/configTypes.ts new file mode 100644 index 0000000000..14e0e1d1df --- /dev/null +++ b/sdk/lib/config/configTypes.ts @@ -0,0 +1,249 @@ +export type InputSpec = Record +export type ValueType = + | "text" + | "textarea" + | "number" + | "color" + | "datetime" + | "toggle" + | "select" + | "multiselect" + | "list" + | "object" + | "file" + | "union" +export type ValueSpec = ValueSpecOf +/** core spec types. These types provide the metadata for performing validations */ +// prettier-ignore +export type ValueSpecOf = T extends "text" + ? ValueSpecText + : T extends "textarea" + ? ValueSpecTextarea + : T extends "number" + ? ValueSpecNumber + : T extends "color" + ? ValueSpecColor + : T extends "datetime" + ? ValueSpecDatetime + : T extends "toggle" + ? ValueSpecToggle + : T extends "select" + ? ValueSpecSelect + : T extends "multiselect" + ? ValueSpecMultiselect + : T extends "list" + ? ValueSpecList + : T extends "object" + ? ValueSpecObject + : T extends "file" + ? ValueSpecFile + : T extends "union" + ? ValueSpecUnion + : never + +export interface ValueSpecText extends ListValueSpecText, WithStandalone { + required: boolean + default: DefaultString | null + disabled: false | string + generate: null | RandomString + /** Immutable means it can only be configed at the first config then never again */ + immutable: boolean +} +export interface ValueSpecTextarea extends WithStandalone { + type: "textarea" + placeholder: string | null + minLength: number | null + maxLength: number | null + required: boolean + disabled: false | string + /** Immutable means it can only be configed at the first config then never again */ + immutable: boolean +} + +export type FilePath = { + filePath: string +} +export interface ValueSpecNumber extends ListValueSpecNumber, WithStandalone { + required: boolean + default: number | null + disabled: false | string + /** Immutable means it can only be configed at the first config then never again */ + immutable: boolean +} +export interface ValueSpecColor extends WithStandalone { + type: "color" + required: boolean + default: string | null + disabled: false | string + /** Immutable means it can only be configed at the first config then never again */ + immutable: boolean +} +export interface ValueSpecDatetime extends WithStandalone { + type: "datetime" + required: boolean + inputmode: "date" | "time" | "datetime-local" + min: string | null + max: string | null + default: string | null + disabled: false | string + /** Immutable means it can only be configed at the first config then never again */ + immutable: boolean +} +export interface ValueSpecSelect extends SelectBase, WithStandalone { + type: "select" + required: boolean + default: string | null + /** + * Disabled: false means that there is nothing disabled, good to modify + * string means that this is the message displayed and the whole thing is disabled + * string[] means that the options are disabled + */ + disabled: false | string | string[] + /** Immutable means it can only be configed at the first config then never again */ + immutable: boolean +} +export interface ValueSpecMultiselect extends SelectBase, WithStandalone { + type: "multiselect" + minLength: number | null + maxLength: number | null + /** + * Disabled: false means that there is nothing disabled, good to modify + * string means that this is the message displayed and the whole thing is disabled + * string[] means that the options are disabled + */ + disabled: false | string | string[] + default: string[] + /** Immutable means it can only be configed at the first config then never again */ + immutable: boolean +} +export interface ValueSpecToggle extends WithStandalone { + type: "toggle" + default: boolean | null + disabled: false | string + /** Immutable means it can only be configed at the first config then never again */ + immutable: boolean +} +export interface ValueSpecUnion extends WithStandalone { + type: "union" + variants: Record< + string, + { + name: string + spec: InputSpec + } + > + /** + * Disabled: false means that there is nothing disabled, good to modify + * string means that this is the message displayed and the whole thing is disabled + * string[] means that the options are disabled + */ + disabled: false | string | string[] + required: boolean + default: string | null + /** Immutable means it can only be configed at the first config then never again */ + immutable: boolean +} +export interface ValueSpecFile extends WithStandalone { + type: "file" + extensions: string[] + required: boolean +} +export interface ValueSpecObject extends WithStandalone { + type: "object" + spec: InputSpec +} +export interface WithStandalone { + name: string + description: string | null + warning: string | null +} +export interface SelectBase { + values: Record +} +export type ListValueSpecType = "text" | "number" | "object" +/** represents a spec for the values of a list */ +export type ListValueSpecOf = T extends "text" + ? ListValueSpecText + : T extends "number" + ? ListValueSpecNumber + : T extends "object" + ? ListValueSpecObject + : never +/** represents a spec for a list */ +export type ValueSpecList = ValueSpecListOf +export interface ValueSpecListOf + extends WithStandalone { + type: "list" + spec: ListValueSpecOf + minLength: number | null + maxLength: number | null + disabled: false | string + default: + | string[] + | number[] + | DefaultString[] + | Record[] + | readonly string[] + | readonly number[] + | readonly DefaultString[] + | readonly Record[] +} +export interface Pattern { + regex: string + description: string +} +export interface ListValueSpecText { + type: "text" + patterns: Pattern[] + minLength: number | null + maxLength: number | null + masked: boolean + + generate: null | RandomString + inputmode: "text" | "email" | "tel" | "url" + placeholder: string | null +} +export interface ListValueSpecNumber { + type: "number" + min: number | null + max: number | null + integer: boolean + step: number | null + units: string | null + placeholder: string | null +} +export interface ListValueSpecObject { + type: "object" + /** this is a mapped type of the config object at this level, replacing the object's values with specs on those values */ + spec: InputSpec + /** indicates whether duplicates can be permitted in the list */ + uniqueBy: UniqueBy + /** this should be a handlebars template which can make use of the entire config which corresponds to 'spec' */ + displayAs: string | null +} +export type UniqueBy = + | null + | string + | { + any: readonly UniqueBy[] | UniqueBy[] + } + | { + all: readonly UniqueBy[] | UniqueBy[] + } +export type DefaultString = string | RandomString +export type RandomString = { + charset: string + len: number +} +// sometimes the type checker needs just a little bit of help +export function isValueSpecListOf( + t: ValueSpec, + s: S, +): t is ValueSpecListOf & { spec: ListValueSpecOf } { + return "spec" in t && t.spec.type === s +} +export const unionSelectKey = "unionSelectKey" as const +export type UnionSelectKey = typeof unionSelectKey + +export const unionValueKey = "unionValueKey" as const +export type UnionValueKey = typeof unionValueKey diff --git a/sdk/lib/config/index.ts b/sdk/lib/config/index.ts new file mode 100644 index 0000000000..35c3e274eb --- /dev/null +++ b/sdk/lib/config/index.ts @@ -0,0 +1,3 @@ +export * as constants from "./configConstants" +export * as types from "./configTypes" +export * as builder from "./builder" diff --git a/sdk/lib/config/setupConfig.ts b/sdk/lib/config/setupConfig.ts new file mode 100644 index 0000000000..a49e545a87 --- /dev/null +++ b/sdk/lib/config/setupConfig.ts @@ -0,0 +1,89 @@ +import { Effects, ExpectedExports } from "../types" +import { SDKManifest } from "../manifest/ManifestTypes" +import * as D from "./configDependencies" +import { Config, ExtractConfigType } from "./builder/config" +import nullIfEmpty from "../util/nullIfEmpty" +import { InterfacesReceipt as InterfacesReceipt } from "../interfaces/setupInterfaces" + +declare const dependencyProof: unique symbol +export type DependenciesReceipt = void & { + [dependencyProof]: never +} + +export type Save< + Store, + A extends + | Record + | Config, any> + | Config, never>, + Manifest extends SDKManifest, +> = (options: { + effects: Effects + input: ExtractConfigType & Record + dependencies: D.ConfigDependencies +}) => Promise<{ + dependenciesReceipt: DependenciesReceipt + interfacesReceipt: InterfacesReceipt + restart: boolean +}> +export type Read< + Manifest extends SDKManifest, + Store, + A extends + | Record + | Config, any> + | Config, never>, +> = (options: { + effects: Effects +}) => Promise & Record)> +/** + * We want to setup a config export with a get and set, this + * is going to be the default helper to setup config, because it will help + * enforce that we have a spec, write, and reading. + * @param options + * @returns + */ +export function setupConfig< + Store, + ConfigType extends + | Record + | Config + | Config, + Manifest extends SDKManifest, + Type extends Record = ExtractConfigType, +>( + spec: Config | Config, + write: Save, + read: Read, +) { + const validator = spec.validator + return { + setConfig: (async ({ effects, input }) => { + if (!validator.test(input)) { + await console.error(String(validator.errorMessage(input))) + return { error: "Set config type error for config" } + } + await effects.clearBindings() + await effects.clearServiceInterfaces() + const { restart } = await write({ + input: JSON.parse(JSON.stringify(input)), + effects, + dependencies: D.configDependenciesSet(), + }) + if (restart) { + await effects.restart() + } + }) as ExpectedExports.setConfig, + getConfig: (async ({ effects }) => { + const configValue = nullIfEmpty((await read({ effects })) || null) + return { + spec: await spec.build({ + effects, + }), + config: configValue, + } + }) as ExpectedExports.getConfig, + } +} + +export default setupConfig diff --git a/sdk/lib/dependencyConfig/DependencyConfig.ts b/sdk/lib/dependencyConfig/DependencyConfig.ts new file mode 100644 index 0000000000..d7ce435adb --- /dev/null +++ b/sdk/lib/dependencyConfig/DependencyConfig.ts @@ -0,0 +1,44 @@ +import { + DependencyConfig as DependencyConfigType, + DeepPartial, + Effects, +} from "../types" +import { deepEqual } from "../util/deepEqual" +import { deepMerge } from "../util/deepMerge" +import { SDKManifest } from "../manifest/ManifestTypes" + +export type Update = (options: { + remoteConfig: RemoteConfig + queryResults: QueryResults +}) => Promise + +export class DependencyConfig< + Manifest extends SDKManifest, + Store, + Input extends Record, + RemoteConfig extends Record, +> { + static defaultUpdate = async (options: { + queryResults: unknown + remoteConfig: unknown + }): Promise => { + return deepMerge({}, options.remoteConfig, options.queryResults || {}) + } + constructor( + readonly dependencyConfig: (options: { + effects: Effects + localConfig: Input + }) => Promise>, + readonly update: Update< + void | DeepPartial, + RemoteConfig + > = DependencyConfig.defaultUpdate as any, + ) {} + + async query(options: { effects: Effects; localConfig: unknown }) { + return this.dependencyConfig({ + localConfig: options.localConfig as Input, + effects: options.effects, + }) + } +} diff --git a/sdk/lib/dependencyConfig/index.ts b/sdk/lib/dependencyConfig/index.ts new file mode 100644 index 0000000000..3fe78b4f3d --- /dev/null +++ b/sdk/lib/dependencyConfig/index.ts @@ -0,0 +1,9 @@ +// prettier-ignore +export type ReadonlyDeep = + A extends Function ? A : + A extends {} ? { readonly [K in keyof A]: ReadonlyDeep } : A; +export type MaybePromise = Promise | A +export type Message = string + +import "./DependencyConfig" +import "./setupDependencyConfig" diff --git a/sdk/lib/dependencyConfig/setupDependencyConfig.ts b/sdk/lib/dependencyConfig/setupDependencyConfig.ts new file mode 100644 index 0000000000..c67c46a44a --- /dev/null +++ b/sdk/lib/dependencyConfig/setupDependencyConfig.ts @@ -0,0 +1,22 @@ +import { Config } from "../config/builder/config" +import { SDKManifest } from "../manifest/ManifestTypes" +import { ExpectedExports } from "../types" +import { DependencyConfig } from "./DependencyConfig" + +export function setupDependencyConfig< + Store, + Input extends Record, + Manifest extends SDKManifest, +>( + _config: Config | Config, + autoConfigs: { + [key in keyof Manifest["dependencies"] & string]: DependencyConfig< + Manifest, + Store, + Input, + any + > | null + }, +): ExpectedExports.dependencyConfig { + return autoConfigs +} diff --git a/sdk/lib/emverLite/mod.ts b/sdk/lib/emverLite/mod.ts new file mode 100644 index 0000000000..f69d4f35de --- /dev/null +++ b/sdk/lib/emverLite/mod.ts @@ -0,0 +1,323 @@ +import * as matches from "ts-matches" + +const starSub = /((\d+\.)*\d+)\.\*/ +// prettier-ignore +export type ValidEmVer = `${number}${`.${number}` | ""}${`.${number}` | ""}${`-${string}` | ""}`; +// prettier-ignore +export type ValidEmVerRange = `${'>=' | '<='| '<' | '>' | ''}${'^' | '~' | ''}${number | '*'}${`.${number | '*'}` | ""}${`.${number | '*'}` | ""}${`-${string}` | ""}`; + +function incrementLastNumber(list: number[]) { + const newList = [...list] + newList[newList.length - 1]++ + return newList +} +/** + * Will take in a range, like `>1.2` or `<1.2.3.4` or `=1.2` or `1.*` + * and return a checker, that has the check function for checking that a version is in the valid + * @param range + * @returns + */ +export function rangeOf(range: string | Checker): Checker { + return Checker.parse(range) +} + +/** + * Used to create a checker that will `and` all the ranges passed in + * @param ranges + * @returns + */ +export function rangeAnd(...ranges: (string | Checker)[]): Checker { + if (ranges.length === 0) { + throw new Error("No ranges given") + } + const [firstCheck, ...rest] = ranges + return Checker.parse(firstCheck).and(...rest) +} + +/** + * Used to create a checker that will `or` all the ranges passed in + * @param ranges + * @returns + */ +export function rangeOr(...ranges: (string | Checker)[]): Checker { + if (ranges.length === 0) { + throw new Error("No ranges given") + } + const [firstCheck, ...rest] = ranges + return Checker.parse(firstCheck).or(...rest) +} + +/** + * This will negate the checker, so given a checker that checks for >= 1.0.0, it will check for < 1.0.0 + * @param range + * @returns + */ +export function notRange(range: string | Checker): Checker { + return rangeOf(range).not() +} + +/** + * EmVer is a set of versioning of any pattern like 1 or 1.2 or 1.2.3 or 1.2.3.4 or .. + */ +export class EmVer { + /** + * Convert the range, should be 1.2.* or * into a emver + * Or an already made emver + * IsUnsafe + */ + static from(range: string | EmVer): EmVer { + if (range instanceof EmVer) { + return range + } + return EmVer.parse(range) + } + /** + * Convert the range, should be 1.2.* or * into a emver + * IsUnsafe + */ + static parse(rangeExtra: string): EmVer { + const [range, extra] = rangeExtra.split("-") + const values = range.split(".").map((x) => parseInt(x)) + for (const value of values) { + if (isNaN(value)) { + throw new Error(`Couldn't parse range: ${range}`) + } + } + return new EmVer(values, extra) + } + private constructor( + public readonly values: number[], + readonly extra: string | null, + ) {} + + /** + * Used when we need a new emver that has the last number incremented, used in the 1.* like things + */ + public withLastIncremented() { + return new EmVer(incrementLastNumber(this.values), null) + } + + public greaterThan(other: EmVer): boolean { + for (const i in this.values) { + if (other.values[i] == null) { + return true + } + if (this.values[i] > other.values[i]) { + return true + } + + if (this.values[i] < other.values[i]) { + return false + } + } + return false + } + + public equals(other: EmVer): boolean { + if (other.values.length !== this.values.length) { + return false + } + for (const i in this.values) { + if (this.values[i] !== other.values[i]) { + return false + } + } + return true + } + public greaterThanOrEqual(other: EmVer): boolean { + return this.greaterThan(other) || this.equals(other) + } + public lessThanOrEqual(other: EmVer): boolean { + return !this.greaterThan(other) + } + public lessThan(other: EmVer): boolean { + return !this.greaterThanOrEqual(other) + } + /** + * Return a enum string that describes (used for switching/iffs) + * to know comparison + * @param other + * @returns + */ + public compare(other: EmVer) { + if (this.equals(other)) { + return "equal" as const + } else if (this.greaterThan(other)) { + return "greater" as const + } else { + return "less" as const + } + } + /** + * Used when sorting emver's in a list using the sort method + * @param other + * @returns + */ + public compareForSort(other: EmVer) { + return matches + .matches(this.compare(other)) + .when("equal", () => 0 as const) + .when("greater", () => 1 as const) + .when("less", () => -1 as const) + .unwrap() + } + + toString() { + return `${this.values.join(".")}${this.extra ? `-${this.extra}` : ""}` as ValidEmVer + } +} + +/** + * A checker is a function that takes a version and returns true if the version matches the checker. + * Used when we are doing range checking, like saying ">=1.0.0".check("1.2.3") will be true + */ +export class Checker { + /** + * Will take in a range, like `>1.2` or `<1.2.3.4` or `=1.2` or `1.*` + * and return a checker, that has the check function for checking that a version is in the valid + * @param range + * @returns + */ + static parse(range: string | Checker): Checker { + if (range instanceof Checker) { + return range + } + range = range.trim() + if (range.indexOf("||") !== -1) { + return rangeOr(...range.split("||").map((x) => Checker.parse(x))) + } + if (range.indexOf("&&") !== -1) { + return rangeAnd(...range.split("&&").map((x) => Checker.parse(x))) + } + if (range === "*") { + return new Checker((version) => { + EmVer.from(version) + return true + }, range) + } + if (range.startsWith("!!")) return Checker.parse(range.substring(2)) + if (range.startsWith("!")) { + const tempValue = Checker.parse(range.substring(1)) + return new Checker((x) => !tempValue.check(x), range) + } + const starSubMatches = starSub.exec(range) + if (starSubMatches != null) { + const emVarLower = EmVer.parse(starSubMatches[1]) + const emVarUpper = emVarLower.withLastIncremented() + + return new Checker((version) => { + const v = EmVer.from(version) + return ( + (v.greaterThan(emVarLower) || v.equals(emVarLower)) && + !v.greaterThan(emVarUpper) && + !v.equals(emVarUpper) + ) + }, range) + } + + switch (range.substring(0, 2)) { + case ">=": { + const emVar = EmVer.parse(range.substring(2)) + return new Checker((version) => { + const v = EmVer.from(version) + return v.greaterThanOrEqual(emVar) + }, range) + } + case "<=": { + const emVar = EmVer.parse(range.substring(2)) + return new Checker((version) => { + const v = EmVer.from(version) + return v.lessThanOrEqual(emVar) + }, range) + } + } + + switch (range.substring(0, 1)) { + case ">": { + const emVar = EmVer.parse(range.substring(1)) + return new Checker((version) => { + const v = EmVer.from(version) + return v.greaterThan(emVar) + }, range) + } + case "<": { + const emVar = EmVer.parse(range.substring(1)) + return new Checker((version) => { + const v = EmVer.from(version) + return v.lessThan(emVar) + }, range) + } + case "=": { + const emVar = EmVer.parse(range.substring(1)) + return new Checker((version) => { + const v = EmVer.from(version) + return v.equals(emVar) + }, `=${emVar.toString()}`) + } + } + throw new Error("Couldn't parse range: " + range) + } + constructor( + /** + * Check is the function that will be given a emver or unparsed emver and should give if it follows + * a pattern + */ + public readonly check: (value: ValidEmVer | EmVer) => boolean, + private readonly _range: string, + ) {} + + get range() { + return this._range as ValidEmVerRange + } + + /** + * Used when we want the `and` condition with another checker + */ + public and(...others: (Checker | string)[]): Checker { + const othersCheck = others.map(Checker.parse) + return new Checker( + (value) => { + if (!this.check(value)) { + return false + } + for (const other of othersCheck) { + if (!other.check(value)) { + return false + } + } + return true + }, + othersCheck.map((x) => x._range).join(" && "), + ) + } + + /** + * Used when we want the `or` condition with another checker + */ + public or(...others: (Checker | string)[]): Checker { + const othersCheck = others.map(Checker.parse) + return new Checker( + (value) => { + if (this.check(value)) { + return true + } + for (const other of othersCheck) { + if (other.check(value)) { + return true + } + } + return false + }, + othersCheck.map((x) => x._range).join(" || "), + ) + } + + /** + * A useful example is making sure we don't match an exact version, like !=1.2.3 + * @returns + */ + public not(): Checker { + let newRange = `!${this._range}` + return Checker.parse(newRange) + } +} diff --git a/sdk/lib/health/HealthCheck.ts b/sdk/lib/health/HealthCheck.ts new file mode 100644 index 0000000000..e1fbee97b6 --- /dev/null +++ b/sdk/lib/health/HealthCheck.ts @@ -0,0 +1,74 @@ +import { InterfaceReceipt } from "../interfaces/interfaceReceipt" +import { Daemon, Effects } from "../types" +import { CheckResult } from "./checkFns/CheckResult" +import { HealthReceipt } from "./HealthReceipt" +import { Trigger } from "../trigger" +import { TriggerInput } from "../trigger/TriggerInput" +import { defaultTrigger } from "../trigger/defaultTrigger" +import { once } from "../util/once" +import { Overlay } from "../util/Overlay" + +export function healthCheck(o: { + effects: Effects + name: string + imageId: string + trigger?: Trigger + fn(overlay: Overlay): Promise | CheckResult + onFirstSuccess?: () => unknown | Promise +}) { + new Promise(async () => { + const overlay = await Overlay.of(o.effects, o.imageId) + try { + let currentValue: TriggerInput = { + hadSuccess: false, + } + const getCurrentValue = () => currentValue + const trigger = (o.trigger ?? defaultTrigger)(getCurrentValue) + const triggerFirstSuccess = once(() => + Promise.resolve( + "onFirstSuccess" in o && o.onFirstSuccess + ? o.onFirstSuccess() + : undefined, + ), + ) + for ( + let res = await trigger.next(); + !res.done; + res = await trigger.next() + ) { + try { + const { status, message } = await o.fn(overlay) + await o.effects.setHealth({ + name: o.name, + id: o.name, + result: status, + message: message || "", + }) + currentValue.hadSuccess = true + currentValue.lastResult = "success" + await triggerFirstSuccess().catch((err) => { + console.error(err) + }) + } catch (e) { + await o.effects.setHealth({ + name: o.name, + id: o.name, + result: "failure", + message: asMessage(e) || "", + }) + currentValue.lastResult = "failure" + } + } + } finally { + await overlay.destroy() + } + }) + return {} as HealthReceipt +} +function asMessage(e: unknown) { + if (typeof e === "object" && e != null && "message" in e) + return String(e.message) + const value = String(e) + if (value.length == null) return null + return value +} diff --git a/sdk/lib/health/HealthReceipt.ts b/sdk/lib/health/HealthReceipt.ts new file mode 100644 index 0000000000..a0995ba0ae --- /dev/null +++ b/sdk/lib/health/HealthReceipt.ts @@ -0,0 +1,4 @@ +declare const HealthProof: unique symbol +export type HealthReceipt = { + [HealthProof]: never +} diff --git a/sdk/lib/health/checkFns/CheckResult.ts b/sdk/lib/health/checkFns/CheckResult.ts new file mode 100644 index 0000000000..8b46ee5c43 --- /dev/null +++ b/sdk/lib/health/checkFns/CheckResult.ts @@ -0,0 +1,6 @@ +import { HealthStatus } from "../../types" + +export type CheckResult = { + status: HealthStatus + message: string | null +} diff --git a/sdk/lib/health/checkFns/checkPortListening.ts b/sdk/lib/health/checkFns/checkPortListening.ts new file mode 100644 index 0000000000..4cc0738da5 --- /dev/null +++ b/sdk/lib/health/checkFns/checkPortListening.ts @@ -0,0 +1,67 @@ +import { Effects } from "../../types" +import { stringFromStdErrOut } from "../../util/stringFromStdErrOut" +import { CheckResult } from "./CheckResult" + +import { promisify } from "node:util" +import * as CP from "node:child_process" + +const cpExec = promisify(CP.exec) +const cpExecFile = promisify(CP.execFile) +export function containsAddress(x: string, port: number) { + const readPorts = x + .split("\n") + .filter(Boolean) + .splice(1) + .map((x) => x.split(" ").filter(Boolean)[1]?.split(":")?.[1]) + .filter(Boolean) + .map((x) => Number.parseInt(x, 16)) + .filter(Number.isFinite) + return readPorts.indexOf(port) >= 0 +} + +/** + * This is used to check if a port is listening on the system. + * Used during the health check fn or the check main fn. + */ +export async function checkPortListening( + effects: Effects, + port: number, + options: { + errorMessage: string + successMessage: string + timeoutMessage?: string + timeout?: number + }, +): Promise { + return Promise.race([ + Promise.resolve().then(async () => { + const hasAddress = + containsAddress( + await cpExec(`cat /proc/net/tcp`, {}).then(stringFromStdErrOut), + port, + ) || + containsAddress( + await cpExec("cat /proc/net/udp", {}).then(stringFromStdErrOut), + port, + ) + if (hasAddress) { + return { status: "success", message: options.successMessage } + } + return { + status: "failure", + message: options.errorMessage, + } + }), + new Promise((resolve) => { + setTimeout( + () => + resolve({ + status: "failure", + message: + options.timeoutMessage || `Timeout trying to check port ${port}`, + }), + options.timeout ?? 1_000, + ) + }), + ]) +} diff --git a/sdk/lib/health/checkFns/checkWebUrl.ts b/sdk/lib/health/checkFns/checkWebUrl.ts new file mode 100644 index 0000000000..8f61ae2efa --- /dev/null +++ b/sdk/lib/health/checkFns/checkWebUrl.ts @@ -0,0 +1,35 @@ +import { Effects } from "../../types" +import { CheckResult } from "./CheckResult" +import { timeoutPromise } from "./index" +import "isomorphic-fetch" + +/** + * This is a helper function to check if a web url is reachable. + * @param url + * @param createSuccess + * @returns + */ +export const checkWebUrl = async ( + effects: Effects, + url: string, + { + timeout = 1000, + successMessage = `Reached ${url}`, + errorMessage = `Error while fetching URL: ${url}`, + } = {}, +): Promise => { + return Promise.race([fetch(url), timeoutPromise(timeout)]) + .then( + (x) => + ({ + status: "success", + message: successMessage, + }) as const, + ) + .catch((e) => { + console.warn(`Error while fetching URL: ${url}`) + console.error(JSON.stringify(e)) + console.error(e.toString()) + return { status: "failure" as const, message: errorMessage } + }) +} diff --git a/sdk/lib/health/checkFns/index.ts b/sdk/lib/health/checkFns/index.ts new file mode 100644 index 0000000000..d33d5ad0df --- /dev/null +++ b/sdk/lib/health/checkFns/index.ts @@ -0,0 +1,11 @@ +import { runHealthScript } from "./runHealthScript" +export { checkPortListening } from "./checkPortListening" +export { CheckResult } from "./CheckResult" +export { checkWebUrl } from "./checkWebUrl" + +export function timeoutPromise(ms: number, { message = "Timed out" } = {}) { + return new Promise((resolve, reject) => + setTimeout(() => reject(new Error(message)), ms), + ) +} +export { runHealthScript } diff --git a/sdk/lib/health/checkFns/runHealthScript.ts b/sdk/lib/health/checkFns/runHealthScript.ts new file mode 100644 index 0000000000..f0f41ee91a --- /dev/null +++ b/sdk/lib/health/checkFns/runHealthScript.ts @@ -0,0 +1,38 @@ +import { Effects } from "../../types" +import { Overlay } from "../../util/Overlay" +import { stringFromStdErrOut } from "../../util/stringFromStdErrOut" +import { CheckResult } from "./CheckResult" +import { timeoutPromise } from "./index" + +/** + * Running a health script, is used when we want to have a simple + * script in bash or something like that. It should return something that is useful + * in {result: string} else it is considered an error + * @param param0 + * @returns + */ +export const runHealthScript = async ( + effects: Effects, + runCommand: string[], + overlay: Overlay, + { + timeout = 30000, + errorMessage = `Error while running command: ${runCommand}`, + message = (res: string) => + `Have ran script ${runCommand} and the result: ${res}`, + } = {}, +): Promise => { + const res = await Promise.race([ + overlay.exec(runCommand), + timeoutPromise(timeout), + ]).catch((e) => { + console.warn(errorMessage) + console.warn(JSON.stringify(e)) + console.warn(e.toString()) + throw { status: "failure", message: errorMessage } as CheckResult + }) + return { + status: "success", + message: message(res.stdout.toString()), + } as CheckResult +} diff --git a/sdk/lib/health/index.ts b/sdk/lib/health/index.ts new file mode 100644 index 0000000000..b6e1d26f55 --- /dev/null +++ b/sdk/lib/health/index.ts @@ -0,0 +1,3 @@ +import "./checkFns" + +import "./HealthReceipt" diff --git a/sdk/lib/index.browser.ts b/sdk/lib/index.browser.ts new file mode 100644 index 0000000000..ff422a7f23 --- /dev/null +++ b/sdk/lib/index.browser.ts @@ -0,0 +1,14 @@ +export { EmVer } from "./emverLite/mod" +export { setupManifest } from "./manifest/setupManifest" +export { setupExposeStore } from "./store/setupExposeStore" +export * as config from "./config" +export * as CB from "./config/builder" +export * as CT from "./config/configTypes" +export * as dependencyConfig from "./dependencyConfig" +export * as manifest from "./manifest" +export * as types from "./types" +export * as T from "./types" +export * as yaml from "yaml" +export * as matches from "ts-matches" + +export * as util from "./util/index.browser" diff --git a/sdk/lib/index.ts b/sdk/lib/index.ts new file mode 100644 index 0000000000..2fef6a4fa4 --- /dev/null +++ b/sdk/lib/index.ts @@ -0,0 +1,29 @@ +export { Daemons } from "./mainFn/Daemons" +export { EmVer } from "./emverLite/mod" +export { Overlay } from "./util/Overlay" +export { StartSdk } from "./StartSdk" +export { setupManifest } from "./manifest/setupManifest" +export { FileHelper } from "./util/fileHelper" +export { setupExposeStore } from "./store/setupExposeStore" +export { pathBuilder } from "./store/PathBuilder" +export * as actions from "./actions" +export * as backup from "./backup" +export * as config from "./config" +export * as CB from "./config/builder" +export * as CT from "./config/configTypes" +export * as dependencyConfig from "./dependencyConfig" +export * as daemons from "./mainFn/Daemons" +export * as health from "./health" +export * as healthFns from "./health/checkFns" +export * as inits from "./inits" +export * as mainFn from "./mainFn" +export * as manifest from "./manifest" +export * as toml from "@iarna/toml" +export * as types from "./types" +export * as T from "./types" +export * as yaml from "yaml" +export * as startSdk from "./StartSdk" +export * as utils from "./util" +export * as matches from "ts-matches" +export * as YAML from "yaml" +export * as TOML from "@iarna/toml" diff --git a/sdk/lib/inits/index.ts b/sdk/lib/inits/index.ts new file mode 100644 index 0000000000..0a326a61e9 --- /dev/null +++ b/sdk/lib/inits/index.ts @@ -0,0 +1,3 @@ +import "./setupInit" +import "./setupUninstall" +import "./setupInstall" diff --git a/sdk/lib/inits/migrations/Migration.ts b/sdk/lib/inits/migrations/Migration.ts new file mode 100644 index 0000000000..119271aea9 --- /dev/null +++ b/sdk/lib/inits/migrations/Migration.ts @@ -0,0 +1,35 @@ +import { ManifestVersion, SDKManifest } from "../../manifest/ManifestTypes" +import { Effects } from "../../types" + +export class Migration< + Manifest extends SDKManifest, + Store, + Version extends ManifestVersion, +> { + constructor( + readonly options: { + version: Version + up: (opts: { effects: Effects }) => Promise + down: (opts: { effects: Effects }) => Promise + }, + ) {} + static of< + Manifest extends SDKManifest, + Store, + Version extends ManifestVersion, + >(options: { + version: Version + up: (opts: { effects: Effects }) => Promise + down: (opts: { effects: Effects }) => Promise + }) { + return new Migration(options) + } + + async up(opts: { effects: Effects }) { + this.up(opts) + } + + async down(opts: { effects: Effects }) { + this.down(opts) + } +} diff --git a/sdk/lib/inits/migrations/setupMigrations.ts b/sdk/lib/inits/migrations/setupMigrations.ts new file mode 100644 index 0000000000..288b2b9d7b --- /dev/null +++ b/sdk/lib/inits/migrations/setupMigrations.ts @@ -0,0 +1,73 @@ +import { EmVer } from "../../emverLite/mod" +import { SDKManifest } from "../../manifest/ManifestTypes" +import { ExpectedExports } from "../../types" +import { once } from "../../util/once" +import { Migration } from "./Migration" + +export class Migrations { + private constructor( + readonly manifest: SDKManifest, + readonly migrations: Array>, + ) {} + private sortedMigrations = once(() => { + const migrationsAsVersions = ( + this.migrations as Array> + ).map((x) => [EmVer.parse(x.options.version), x] as const) + migrationsAsVersions.sort((a, b) => a[0].compareForSort(b[0])) + return migrationsAsVersions + }) + private currentVersion = once(() => EmVer.parse(this.manifest.version)) + static of< + Manifest extends SDKManifest, + Store, + Migrations extends Array>, + >(manifest: SDKManifest, ...migrations: EnsureUniqueId) { + return new Migrations( + manifest, + migrations as Array>, + ) + } + async init({ + effects, + previousVersion, + }: Parameters[0]) { + if (!!previousVersion) { + const previousVersionEmVer = EmVer.parse(previousVersion) + for (const [_, migration] of this.sortedMigrations() + .filter((x) => x[0].greaterThan(previousVersionEmVer)) + .filter((x) => x[0].lessThanOrEqual(this.currentVersion()))) { + await migration.up({ effects }) + } + } + } + async uninit({ + effects, + nextVersion, + }: Parameters[0]) { + if (!!nextVersion) { + const nextVersionEmVer = EmVer.parse(nextVersion) + const reversed = [...this.sortedMigrations()].reverse() + for (const [_, migration] of reversed + .filter((x) => x[0].greaterThan(nextVersionEmVer)) + .filter((x) => x[0].lessThanOrEqual(this.currentVersion()))) { + await migration.down({ effects }) + } + } + } +} + +export function setupMigrations< + Manifest extends SDKManifest, + Store, + Migrations extends Array>, +>(manifest: SDKManifest, ...migrations: EnsureUniqueId) { + return Migrations.of(manifest, ...migrations) +} + +// prettier-ignore +export type EnsureUniqueId = + B extends [] ? A : + B extends [Migration, ...infer Rest] ? ( + id extends ids ? "One of the ids are not unique"[] : + EnsureUniqueId + ) : "There exists a migration that is not a Migration"[] diff --git a/sdk/lib/inits/setupInit.ts b/sdk/lib/inits/setupInit.ts new file mode 100644 index 0000000000..03a7085c56 --- /dev/null +++ b/sdk/lib/inits/setupInit.ts @@ -0,0 +1,40 @@ +import { DependenciesReceipt } from "../config/setupConfig" +import { SetInterfaces } from "../interfaces/setupInterfaces" +import { SDKManifest } from "../manifest/ManifestTypes" +import { ExposedStorePaths } from "../store/setupExposeStore" +import { Effects, ExpectedExports } from "../types" +import { Migrations } from "./migrations/setupMigrations" +import { Install } from "./setupInstall" +import { Uninstall } from "./setupUninstall" + +export function setupInit( + migrations: Migrations, + install: Install, + uninstall: Uninstall, + setInterfaces: SetInterfaces, + setDependencies: (options: { + effects: Effects + input: any + }) => Promise, + exposedStore: ExposedStorePaths, +): { + init: ExpectedExports.init + uninit: ExpectedExports.uninit +} { + return { + init: async (opts) => { + await migrations.init(opts) + await install.init(opts) + await setInterfaces({ + ...opts, + input: null, + }) + await opts.effects.exposeForDependents({ paths: exposedStore }) + await setDependencies({ effects: opts.effects, input: null }) + }, + uninit: async (opts) => { + await migrations.uninit(opts) + await uninstall.uninit(opts) + }, + } +} diff --git a/sdk/lib/inits/setupInstall.ts b/sdk/lib/inits/setupInstall.ts new file mode 100644 index 0000000000..3990be0ca0 --- /dev/null +++ b/sdk/lib/inits/setupInstall.ts @@ -0,0 +1,30 @@ +import { SDKManifest } from "../manifest/ManifestTypes" +import { Effects, ExpectedExports } from "../types" + +export type InstallFn = (opts: { + effects: Effects +}) => Promise +export class Install { + private constructor(readonly fn: InstallFn) {} + static of( + fn: InstallFn, + ) { + return new Install(fn) + } + + async init({ + effects, + previousVersion, + }: Parameters[0]) { + if (!previousVersion) + await this.fn({ + effects, + }) + } +} + +export function setupInstall( + fn: InstallFn, +) { + return Install.of(fn) +} diff --git a/sdk/lib/inits/setupUninstall.ts b/sdk/lib/inits/setupUninstall.ts new file mode 100644 index 0000000000..812848c8f7 --- /dev/null +++ b/sdk/lib/inits/setupUninstall.ts @@ -0,0 +1,30 @@ +import { SDKManifest } from "../manifest/ManifestTypes" +import { Effects, ExpectedExports } from "../types" + +export type UninstallFn = (opts: { + effects: Effects +}) => Promise +export class Uninstall { + private constructor(readonly fn: UninstallFn) {} + static of( + fn: UninstallFn, + ) { + return new Uninstall(fn) + } + + async uninit({ + effects, + nextVersion, + }: Parameters[0]) { + if (!nextVersion) + await this.fn({ + effects, + }) + } +} + +export function setupUninstall( + fn: UninstallFn, +) { + return Uninstall.of(fn) +} diff --git a/sdk/lib/interfaces/AddressReceipt.ts b/sdk/lib/interfaces/AddressReceipt.ts new file mode 100644 index 0000000000..d57d856858 --- /dev/null +++ b/sdk/lib/interfaces/AddressReceipt.ts @@ -0,0 +1,4 @@ +declare const AddressProof: unique symbol +export type AddressReceipt = { + [AddressProof]: never +} diff --git a/sdk/lib/interfaces/Host.ts b/sdk/lib/interfaces/Host.ts new file mode 100644 index 0000000000..de54f7dc01 --- /dev/null +++ b/sdk/lib/interfaces/Host.ts @@ -0,0 +1,193 @@ +import { object, string } from "ts-matches" +import { Effects } from "../types" +import { Origin } from "./Origin" +import { AddSslOptions } from "../../../core/startos/bindings/AddSslOptions" +import { Security } from "../../../core/startos/bindings/Security" +import { BindOptions } from "../../../core/startos/bindings/BindOptions" +import { AlpnInfo } from "../../../core/startos/bindings/AlpnInfo" + +export { AddSslOptions, Security, BindOptions } + +const knownProtocols = { + http: { + secure: null, + defaultPort: 80, + withSsl: "https", + alpn: { specified: ["http/1.1"] } as AlpnInfo, + }, + https: { + secure: { ssl: true }, + defaultPort: 443, + }, + ws: { + secure: null, + defaultPort: 80, + withSsl: "wss", + alpn: { specified: ["http/1.1"] } as AlpnInfo, + }, + wss: { + secure: { ssl: true }, + defaultPort: 443, + }, + ssh: { + secure: { ssl: false }, + defaultPort: 22, + }, + bitcoin: { + secure: { ssl: false }, + defaultPort: 8333, + }, + lightning: { + secure: { ssl: true }, + defaultPort: 9735, + }, + grpc: { + secure: { ssl: true }, + defaultPort: 50051, + }, + dns: { + secure: { ssl: false }, + defaultPort: 53, + }, +} as const + +export type Scheme = string | null + +type KnownProtocols = typeof knownProtocols +type ProtocolsWithSslVariants = { + [K in keyof KnownProtocols]: KnownProtocols[K] extends { + withSsl: string + } + ? K + : never +}[keyof KnownProtocols] +type NotProtocolsWithSslVariants = Exclude< + keyof KnownProtocols, + ProtocolsWithSslVariants +> + +type BindOptionsByKnownProtocol = + | { + protocol: ProtocolsWithSslVariants + preferredExternalPort?: number + scheme?: Scheme + addSsl?: Partial + } + | { + protocol: NotProtocolsWithSslVariants + preferredExternalPort?: number + scheme?: Scheme + addSsl?: AddSslOptions + } +type BindOptionsByProtocol = BindOptionsByKnownProtocol | BindOptions + +export type HostKind = "static" | "single" | "multi" + +const hasStringProtocol = object({ + protocol: string, +}).test + +export class Host { + constructor( + readonly options: { + effects: Effects + kind: HostKind + id: string + }, + ) {} + + async bindPort( + internalPort: number, + options: BindOptionsByProtocol, + ): Promise> { + if (hasStringProtocol(options)) { + return await this.bindPortForKnown(options, internalPort) + } else { + return await this.bindPortForUnknown(internalPort, options) + } + } + + private async bindPortForUnknown( + internalPort: number, + options: { + scheme: Scheme + preferredExternalPort: number + addSsl: AddSslOptions | null + secure: { ssl: boolean } | null + }, + ) { + await this.options.effects.bind({ + kind: this.options.kind, + id: this.options.id, + internalPort: internalPort, + ...options, + }) + + return new Origin(this, options) + } + + private async bindPortForKnown( + options: BindOptionsByKnownProtocol, + internalPort: number, + ) { + const scheme = + options.scheme === undefined ? options.protocol : options.scheme + const protoInfo = knownProtocols[options.protocol] + const preferredExternalPort = + options.preferredExternalPort || + knownProtocols[options.protocol].defaultPort + const addSsl = this.getAddSsl(options, protoInfo) + + const secure: Security | null = !protoInfo.secure ? null : { ssl: false } + + const newOptions = { + scheme, + preferredExternalPort, + addSsl, + secure, + } + + await this.options.effects.bind({ + kind: this.options.kind, + id: this.options.id, + internalPort, + ...newOptions, + }) + + return new Origin(this, newOptions) + } + + private getAddSsl( + options: BindOptionsByKnownProtocol, + protoInfo: KnownProtocols[keyof KnownProtocols], + ): AddSslOptions | null { + if ("noAddSsl" in options && options.noAddSsl) return null + if ("withSsl" in protoInfo && protoInfo.withSsl) + return { + // addXForwardedHeaders: null, + preferredExternalPort: knownProtocols[protoInfo.withSsl].defaultPort, + scheme: protoInfo.withSsl, + alpn: protoInfo.alpn, + ...("addSsl" in options ? options.addSsl : null), + } + return null + } +} + +export class StaticHost extends Host { + constructor(options: { effects: Effects; id: string }) { + super({ ...options, kind: "static" }) + } +} + +export class SingleHost extends Host { + constructor(options: { effects: Effects; id: string }) { + super({ ...options, kind: "single" }) + } +} + +export class MultiHost extends Host { + constructor(options: { effects: Effects; id: string }) { + super({ ...options, kind: "multi" }) + } +} diff --git a/sdk/lib/interfaces/Origin.ts b/sdk/lib/interfaces/Origin.ts new file mode 100644 index 0000000000..aaadbea507 --- /dev/null +++ b/sdk/lib/interfaces/Origin.ts @@ -0,0 +1,97 @@ +import { AddressInfo } from "../types" +import { AddressReceipt } from "./AddressReceipt" +import { Host, BindOptions, Scheme } from "./Host" +import { ServiceInterfaceBuilder } from "./ServiceInterfaceBuilder" + +export class Origin { + constructor( + readonly host: T, + readonly options: BindOptions, + ) {} + + build({ username, path, search, schemeOverride }: BuildOptions): AddressInfo { + const qpEntries = Object.entries(search) + .map( + ([key, val]) => `${encodeURIComponent(key)}=${encodeURIComponent(val)}`, + ) + .join("&") + + const qp = qpEntries.length ? `?${qpEntries}` : "" + + return { + hostId: this.host.options.id, + bindOptions: { + ...this.options, + scheme: schemeOverride ? schemeOverride.noSsl : this.options.scheme, + addSsl: this.options.addSsl + ? { + ...this.options.addSsl, + scheme: schemeOverride + ? schemeOverride.ssl + : this.options.addSsl.scheme, + } + : null, + }, + suffix: `${path}${qp}`, + username, + } + } + + /** + * A function to register a group of origins ( :// : ) with StartOS + * + * The returned addressReceipt serves as proof that the addresses were registered + * + * @param addressInfo + * @returns + */ + async export( + serviceInterfaces: ServiceInterfaceBuilder[], + ): Promise { + const addressesInfo = [] + for (let serviceInterface of serviceInterfaces) { + const { + name, + description, + hasPrimary, + disabled, + id, + type, + username, + path, + search, + schemeOverride, + masked, + } = serviceInterface.options + + const addressInfo = this.build({ + username, + path, + search, + schemeOverride, + }) + + await serviceInterface.options.effects.exportServiceInterface({ + id, + name, + description, + hasPrimary, + disabled, + addressInfo, + type, + masked, + }) + + addressesInfo.push(addressInfo) + } + + return addressesInfo as AddressInfo[] & AddressReceipt + } +} + +type BuildOptions = { + schemeOverride: { ssl: Scheme; noSsl: Scheme } | null + username: string | null + path: string + search: Record +} diff --git a/sdk/lib/interfaces/ServiceInterfaceBuilder.ts b/sdk/lib/interfaces/ServiceInterfaceBuilder.ts new file mode 100644 index 0000000000..14eaee1d35 --- /dev/null +++ b/sdk/lib/interfaces/ServiceInterfaceBuilder.ts @@ -0,0 +1,33 @@ +import { ServiceInterfaceType } from "../StartSdk" +import { Effects } from "../types" +import { Scheme } from "./Host" + +/** + * A helper class for creating a Network Interface + * + * Network Interfaces are collections of web addresses that expose the same API or other resource, + * display to the user with under a common name and description. + * + * All URIs on an interface inherit the same ui: bool, basic auth credentials, path, and search (query) params + * + * @param options + * @returns + */ +export class ServiceInterfaceBuilder { + constructor( + readonly options: { + effects: Effects + name: string + id: string + description: string + hasPrimary: boolean + disabled: boolean + type: ServiceInterfaceType + username: string | null + path: string + search: Record + schemeOverride: { ssl: Scheme; noSsl: Scheme } | null + masked: boolean + }, + ) {} +} diff --git a/sdk/lib/interfaces/interfaceReceipt.ts b/sdk/lib/interfaces/interfaceReceipt.ts new file mode 100644 index 0000000000..24873e67e2 --- /dev/null +++ b/sdk/lib/interfaces/interfaceReceipt.ts @@ -0,0 +1,4 @@ +declare const InterfaceProof: unique symbol +export type InterfaceReceipt = { + [InterfaceProof]: never +} diff --git a/sdk/lib/interfaces/setupInterfaces.ts b/sdk/lib/interfaces/setupInterfaces.ts new file mode 100644 index 0000000000..5ad8d8a7db --- /dev/null +++ b/sdk/lib/interfaces/setupInterfaces.ts @@ -0,0 +1,23 @@ +import { Config } from "../config/builder/config" +import { SDKManifest } from "../manifest/ManifestTypes" +import { AddressInfo, Effects } from "../types" +import { AddressReceipt } from "./AddressReceipt" + +export type InterfacesReceipt = Array +export type SetInterfaces< + Manifest extends SDKManifest, + Store, + ConfigInput extends Record, + Output extends InterfacesReceipt, +> = (opts: { effects: Effects; input: null | ConfigInput }) => Promise +export type SetupInterfaces = < + Manifest extends SDKManifest, + Store, + ConfigInput extends Record, + Output extends InterfacesReceipt, +>( + config: Config, + fn: SetInterfaces, +) => SetInterfaces +export const NO_INTERFACE_CHANGES = [] as InterfacesReceipt +export const setupInterfaces: SetupInterfaces = (_config, fn) => fn diff --git a/sdk/lib/mainFn/Daemons.ts b/sdk/lib/mainFn/Daemons.ts new file mode 100644 index 0000000000..640ab8c4bd --- /dev/null +++ b/sdk/lib/mainFn/Daemons.ts @@ -0,0 +1,256 @@ +import { NO_TIMEOUT, SIGKILL, SIGTERM, Signals } from "../StartSdk" +import { HealthReceipt } from "../health/HealthReceipt" +import { CheckResult } from "../health/checkFns" +import { SDKManifest } from "../manifest/ManifestTypes" +import { Trigger } from "../trigger" +import { TriggerInput } from "../trigger/TriggerInput" +import { defaultTrigger } from "../trigger/defaultTrigger" +import { DaemonReturned, Effects, ValidIfNoStupidEscape } from "../types" +import { Mounts } from "./Mounts" +import { CommandOptions, MountOptions, Overlay } from "../util/Overlay" +import { splitCommand } from "../util/splitCommand" + +import { promisify } from "node:util" +import * as CP from "node:child_process" + +const cpExec = promisify(CP.exec) +const cpExecFile = promisify(CP.execFile) +async function psTree(pid: number, overlay: Overlay): Promise { + const { stdout } = await cpExec(`pstree -p ${pid}`) + const regex: RegExp = /\((\d+)\)/g + return [...stdout.toString().matchAll(regex)].map(([_all, pid]) => + parseInt(pid), + ) +} +type Daemon< + Manifest extends SDKManifest, + Ids extends string, + Command extends string, + Id extends string, +> = { + id: "" extends Id ? never : Id + command: ValidIfNoStupidEscape | [string, ...string[]] + imageId: Manifest["images"][number] + mounts: Mounts + env?: Record + ready: { + display: string | null + fn: () => Promise | CheckResult + trigger?: Trigger + } + requires: Exclude[] +} + +type ErrorDuplicateId = `The id '${Id}' is already used` + +export const runDaemon = + () => + async ( + effects: Effects, + imageId: Manifest["images"][number], + command: ValidIfNoStupidEscape | [string, ...string[]], + options: CommandOptions & { + mounts?: { path: string; options: MountOptions }[] + overlay?: Overlay + }, + ): Promise => { + const commands = splitCommand(command) + const overlay = options.overlay || (await Overlay.of(effects, imageId)) + for (let mount of options.mounts || []) { + await overlay.mount(mount.options, mount.path) + } + const childProcess = await overlay.spawn(commands, { + env: options.env, + }) + const answer = new Promise((resolve, reject) => { + childProcess.stdout.on("data", (data: any) => { + console.log(data.toString()) + }) + childProcess.stderr.on("data", (data: any) => { + console.error(data.toString()) + }) + + childProcess.on("exit", (code: any) => { + if (code === 0) { + return resolve(null) + } + return reject(new Error(`${commands[0]} exited with code ${code}`)) + }) + }) + + const pid = childProcess.pid + return { + async wait() { + const pids = pid ? await psTree(pid, overlay) : [] + try { + return await answer + } finally { + for (const process of pids) { + cpExecFile("kill", [`-9`, String(process)]).catch((_) => {}) + } + } + }, + async term({ signal = SIGTERM, timeout = NO_TIMEOUT } = {}) { + const pids = pid ? await psTree(pid, overlay) : [] + try { + childProcess.kill(signal) + + if (timeout > NO_TIMEOUT) { + const didTimeout = await Promise.race([ + new Promise((resolve) => setTimeout(resolve, timeout)).then( + () => true, + ), + answer.then(() => false), + ]) + if (didTimeout) { + childProcess.kill(SIGKILL) + } + } else { + await answer + } + } finally { + await overlay.destroy() + } + + try { + for (const process of pids) { + await cpExecFile("kill", [`-${signal}`, String(process)]) + } + } finally { + for (const process of pids) { + cpExecFile("kill", [`-9`, String(process)]).catch((_) => {}) + } + } + }, + } + } + +/** + * A class for defining and controlling the service daemons +```ts +Daemons.of({ + effects, + started, + interfaceReceipt, // Provide the interfaceReceipt to prove it was completed + healthReceipts, // Provide the healthReceipts or [] to prove they were at least considered +}).addDaemon('webui', { + command: 'hello-world', // The command to start the daemon + ready: { + display: 'Web Interface', + // The function to run to determine the health status of the daemon + fn: () => + checkPortListening(effects, 80, { + successMessage: 'The web interface is ready', + errorMessage: 'The web interface is not ready', + }), + }, + requires: [], +}) +``` + */ +export class Daemons { + private constructor( + readonly effects: Effects, + readonly started: (onTerm: () => PromiseLike) => PromiseLike, + readonly daemons?: Daemon[], + ) {} + /** + * Returns an empty new Daemons class with the provided config. + * + * Call .addDaemon() on the returned class to add a daemon. + * + * Daemons run in the order they are defined, with latter daemons being capable of + * depending on prior daemons + * @param config + * @returns + */ + static of(config: { + effects: Effects + started: (onTerm: () => PromiseLike) => PromiseLike + healthReceipts: HealthReceipt[] + }) { + return new Daemons(config.effects, config.started) + } + /** + * Returns the complete list of daemons, including the one defined here + * @param id + * @param newDaemon + * @returns + */ + addDaemon( + // prettier-ignore + id: + "" extends Id ? never : + ErrorDuplicateId extends Id ? never : + Id extends Ids ? ErrorDuplicateId : + Id, + newDaemon: Omit, "id">, + ) { + const daemons = ((this?.daemons ?? []) as any[]).concat({ + ...newDaemon, + id, + }) + return new Daemons(this.effects, this.started, daemons) + } + + async build() { + const daemonsStarted = {} as Record> + const { effects } = this + const daemons = this.daemons ?? [] + for (const daemon of daemons) { + const requiredPromise = Promise.all( + daemon.requires?.map((id) => daemonsStarted[id]) ?? [], + ) + daemonsStarted[daemon.id] = requiredPromise.then(async () => { + const { command, imageId } = daemon + + const child = runDaemon()(effects, imageId, command, { + env: daemon.env, + mounts: daemon.mounts.build(), + }) + let currentInput: TriggerInput = {} + const getCurrentInput = () => currentInput + const trigger = (daemon.ready.trigger ?? defaultTrigger)( + getCurrentInput, + ) + return new Promise(async (resolve) => { + for ( + let res = await trigger.next(); + !res.done; + res = await trigger.next() + ) { + const response = await Promise.resolve(daemon.ready.fn()).catch( + (err) => + ({ + status: "failure", + message: "message" in err ? err.message : String(err), + }) as CheckResult, + ) + currentInput.lastResult = response.status || null + if (!currentInput.hadSuccess && response.status === "success") { + currentInput.hadSuccess = true + resolve(child) + } + } + resolve(child) + }) + }) + } + return { + async term(options?: { signal?: Signals; timeout?: number }) { + await Promise.all( + Object.values>(daemonsStarted).map((x) => + x.then((x) => x.term(options)), + ), + ) + }, + async wait() { + await Promise.all( + Object.values>(daemonsStarted).map((x) => + x.then((x) => x.wait()), + ), + ) + }, + } + } +} diff --git a/sdk/lib/mainFn/Mounts.ts b/sdk/lib/mainFn/Mounts.ts new file mode 100644 index 0000000000..eeedc79c6d --- /dev/null +++ b/sdk/lib/mainFn/Mounts.ts @@ -0,0 +1,126 @@ +import { SDKManifest } from "../manifest/ManifestTypes" +import { Effects } from "../types" +import { MountOptions } from "../util/Overlay" + +type MountArray = { path: string; options: MountOptions }[] + +export class Mounts { + private constructor( + readonly volumes: { + id: Manifest["volumes"][number] + subpath: string | null + mountpoint: string + readonly: boolean + }[], + readonly assets: { + id: Manifest["assets"][number] + subpath: string | null + mountpoint: string + }[], + readonly dependencies: { + dependencyId: string + volumeId: string + subpath: string | null + mountpoint: string + readonly: boolean + }[], + ) {} + + static of() { + return new Mounts([], [], []) + } + + addVolume( + id: Manifest["volumes"][number], + subpath: string | null, + mountpoint: string, + readonly: boolean, + ) { + this.volumes.push({ + id, + subpath, + mountpoint, + readonly, + }) + return this + } + + addAssets( + id: Manifest["assets"][number], + subpath: string | null, + mountpoint: string, + ) { + this.assets.push({ + id, + subpath, + mountpoint, + }) + return this + } + + addDependency( + dependencyId: keyof Manifest["dependencies"] & string, + volumeId: DependencyManifest["volumes"][number], + subpath: string | null, + mountpoint: string, + readonly: boolean, + ) { + this.dependencies.push({ + dependencyId, + volumeId, + subpath, + mountpoint, + readonly, + }) + return this + } + + build(): MountArray { + const mountpoints = new Set() + for (let mountpoint of this.volumes + .map((v) => v.mountpoint) + .concat(this.assets.map((a) => a.mountpoint)) + .concat(this.dependencies.map((d) => d.mountpoint))) { + if (mountpoints.has(mountpoint)) { + throw new Error( + `cannot mount more than once to mountpoint ${mountpoint}`, + ) + } + mountpoints.add(mountpoint) + } + return ([] as MountArray) + .concat( + this.volumes.map((v) => ({ + path: v.mountpoint, + options: { + type: "volume", + id: v.id, + subpath: v.subpath, + readonly: v.readonly, + }, + })), + ) + .concat( + this.assets.map((a) => ({ + path: a.mountpoint, + options: { + type: "assets", + id: a.id, + subpath: a.subpath, + }, + })), + ) + .concat( + this.dependencies.map((d) => ({ + path: d.mountpoint, + options: { + type: "pointer", + packageId: d.dependencyId, + volumeId: d.volumeId, + subpath: d.subpath, + readonly: d.readonly, + }, + })), + ) + } +} diff --git a/sdk/lib/mainFn/index.ts b/sdk/lib/mainFn/index.ts new file mode 100644 index 0000000000..3da57d32f6 --- /dev/null +++ b/sdk/lib/mainFn/index.ts @@ -0,0 +1,30 @@ +import { ExpectedExports } from "../types" +import { Daemons } from "./Daemons" +import "../interfaces/ServiceInterfaceBuilder" +import "../interfaces/Origin" + +import "./Daemons" +import { SDKManifest } from "../manifest/ManifestTypes" +import { MainEffects } from "../StartSdk" + +/** + * Used to ensure that the main function is running with the valid proofs. + * We first do the folowing order of things + * 1. We get the interfaces + * 2. We setup all the commands to setup the system + * 3. We create the health checks + * 4. We setup the daemons init system + * @param fn + * @returns + */ +export const setupMain = ( + fn: (o: { + effects: MainEffects + started(onTerm: () => PromiseLike): PromiseLike + }) => Promise>, +): ExpectedExports.main => { + return async (options) => { + const result = await fn(options) + return result + } +} diff --git a/sdk/lib/manifest/ManifestTypes.ts b/sdk/lib/manifest/ManifestTypes.ts new file mode 100644 index 0000000000..ed8703bf2a --- /dev/null +++ b/sdk/lib/manifest/ManifestTypes.ts @@ -0,0 +1,87 @@ +import { ValidEmVer } from "../emverLite/mod" +import { ActionMetadata } from "../types" + +export interface Container { + /** This should be pointing to a docker container name */ + image: string + /** These should match the manifest data volumes */ + mounts: Record + /** Default is 64mb */ + shmSizeMb?: `${number}${"mb" | "gb" | "b" | "kb"}` + /** if more than 30s to shutdown */ + sigtermTimeout?: `${number}${"s" | "m" | "h"}` +} + +export type ManifestVersion = ValidEmVer + +export type SDKManifest = { + /** The package identifier used by the OS. This must be unique amongst all other known packages */ + readonly id: string + /** A human readable service title */ + readonly title: string + /** Service version - accepts up to four digits, where the last confirms to revisions necessary for StartOs + * - see documentation: https://github.com/Start9Labs/emver-rs. This value will change with each release of + * the service + */ + readonly version: ManifestVersion + /** Release notes for the update - can be a string, paragraph or URL */ + readonly releaseNotes: string + /** The type of license for the project. Include the LICENSE in the root of the project directory. A license is required for a Start9 package.*/ + readonly license: string // name of license + /** A list of normie (hosted, SaaS, custodial, etc) services this services intends to replace */ + readonly replaces: Readonly + /** The Start9 wrapper repository URL for the package. This repo contains the manifest file (this), + * any scripts necessary for configuration, backups, actions, or health checks (more below). This key + * must exist. But could be embedded into the source repository + */ + readonly wrapperRepo: string + /** The original project repository URL. There is no upstream repo in this example */ + readonly upstreamRepo: string + /** URL to the support site / channel for the project. This key can be omitted if none exists, or it can link to the original project repository issues */ + readonly supportSite: string + /** URL to the marketing site for the project. If there is no marketing site, it can link to the original project repository */ + readonly marketingSite: string + /** URL where users can donate to the upstream project */ + readonly donationUrl: string | null + /**Human readable descriptions for the service. These are used throughout the StartOS user interface, primarily in the marketplace. */ + readonly description: { + /**This is the first description visible to the user in the marketplace */ + readonly short: string + /** This description will display with additional details in the service's individual marketplace page */ + readonly long: string + } + + /** Defines the os images needed to run the container processes */ + readonly images: string[] + /** This denotes readonly asset directories that should be available to mount to the container. + * Assuming that there will be three files with names along the lines: + * icon.* : the icon that will be this packages icon on the ui + * LICENSE : What the license is for this service + * Instructions : to be seen in the ui section of the package + * */ + readonly assets: string[] + /** This denotes any data volumes that should be available to mount to the container */ + readonly volumes: string[] + + readonly alerts: { + readonly install: string | null + readonly update: string | null + readonly uninstall: string | null + readonly restore: string | null + readonly start: string | null + readonly stop: string | null + } + readonly dependencies: Readonly> +} + +export interface ManifestDependency { + /** + * A human readable explanation on what the dependency is used for + */ + description: string | null + /** + * Determines if the dependency is optional or not. Times that optional that are good include such situations + * such as being able to toggle other services or to use a different service for the same purpose. + */ + optional: boolean +} diff --git a/sdk/lib/manifest/index.ts b/sdk/lib/manifest/index.ts new file mode 100644 index 0000000000..806ef5e61f --- /dev/null +++ b/sdk/lib/manifest/index.ts @@ -0,0 +1,2 @@ +import "./setupManifest" +import "./ManifestTypes" diff --git a/sdk/lib/manifest/setupManifest.ts b/sdk/lib/manifest/setupManifest.ts new file mode 100644 index 0000000000..41c74baa05 --- /dev/null +++ b/sdk/lib/manifest/setupManifest.ts @@ -0,0 +1,20 @@ +import { SDKManifest, ManifestVersion } from "./ManifestTypes" + +export function setupManifest< + Id extends string, + Version extends ManifestVersion, + Dependencies extends Record, + VolumesTypes extends string, + AssetTypes extends string, + ImagesTypes extends string, + Manifest extends SDKManifest & { + dependencies: Dependencies + id: Id + version: Version + assets: AssetTypes[] + images: ImagesTypes[] + volumes: VolumesTypes[] + }, +>(manifest: Manifest): Manifest { + return manifest +} diff --git a/sdk/lib/store/PathBuilder.ts b/sdk/lib/store/PathBuilder.ts new file mode 100644 index 0000000000..038fa5ac28 --- /dev/null +++ b/sdk/lib/store/PathBuilder.ts @@ -0,0 +1,38 @@ +import { Affine } from "../util" + +const pathValue = Symbol("pathValue") +export type PathValue = typeof pathValue + +export type PathBuilderStored = { + [K in PathValue]: [AllStore, Store] +} + +export type PathBuilder = (Store extends Record< + string, + unknown +> + ? { + [K in keyof Store]: PathBuilder + } + : {}) & + PathBuilderStored + +export type StorePath = string & Affine<"StorePath"> +const privateSymbol = Symbol("jsonPath") +export const extractJsonPath = (builder: PathBuilder) => { + return (builder as any)[privateSymbol] as StorePath +} + +export const pathBuilder = ( + paths: string[] = [], +): PathBuilder => { + return new Proxy({} as PathBuilder, { + get(target, prop) { + if (prop === privateSymbol) { + if (paths.length === 0) return "" + return `/${paths.join("/")}` + } + return pathBuilder([...paths, prop as string]) + }, + }) as PathBuilder +} diff --git a/sdk/lib/store/getStore.ts b/sdk/lib/store/getStore.ts new file mode 100644 index 0000000000..38265c7fdf --- /dev/null +++ b/sdk/lib/store/getStore.ts @@ -0,0 +1,62 @@ +import { Effects } from "../types" +import { PathBuilder, extractJsonPath } from "./PathBuilder" + +export class GetStore { + constructor( + readonly effects: Effects, + readonly path: PathBuilder, + readonly options: { + /** Defaults to what ever the package currently in */ + packageId?: string | undefined + } = {}, + ) {} + + /** + * Returns the value of Store at the provided path. Restart the service if the value changes + */ + const() { + return this.effects.store.get({ + ...this.options, + path: extractJsonPath(this.path), + callback: this.effects.restart, + }) + } + /** + * Returns the value of Store at the provided path. Does nothing if the value changes + */ + once() { + return this.effects.store.get({ + ...this.options, + path: extractJsonPath(this.path), + callback: () => {}, + }) + } + + /** + * Watches the value of Store at the provided path. Takes a custom callback function to run whenever the value changes + */ + async *watch() { + while (true) { + let callback: () => void + const waitForNext = new Promise((resolve) => { + callback = resolve + }) + yield await this.effects.store.get({ + ...this.options, + path: extractJsonPath(this.path), + callback: () => callback(), + }) + await waitForNext + } + } +} +export function getStore( + effects: Effects, + path: PathBuilder, + options: { + /** Defaults to what ever the package currently in */ + packageId?: string | undefined + } = {}, +) { + return new GetStore(effects, path, options) +} diff --git a/sdk/lib/store/setupExposeStore.ts b/sdk/lib/store/setupExposeStore.ts new file mode 100644 index 0000000000..9272a9a6b7 --- /dev/null +++ b/sdk/lib/store/setupExposeStore.ts @@ -0,0 +1,12 @@ +import { Affine, _ } from "../util" +import { PathBuilder, extractJsonPath, pathBuilder } from "./PathBuilder" + +export type ExposedStorePaths = string[] & Affine<"ExposedStorePaths"> + +export const setupExposeStore = >( + fn: (pathBuilder: PathBuilder) => PathBuilder[], +) => { + return fn(pathBuilder()).map( + (x) => extractJsonPath(x) as string, + ) as ExposedStorePaths +} diff --git a/sdk/lib/test/configBuilder.test.ts b/sdk/lib/test/configBuilder.test.ts new file mode 100644 index 0000000000..2df40b95c9 --- /dev/null +++ b/sdk/lib/test/configBuilder.test.ts @@ -0,0 +1,865 @@ +import { testOutput } from "./output.test" +import { Config } from "../config/builder/config" +import { List } from "../config/builder/list" +import { Value } from "../config/builder/value" +import { Variants } from "../config/builder/variants" +import { ValueSpec } from "../config/configTypes" +import { setupManifest } from "../manifest/setupManifest" +import { StartSdk } from "../StartSdk" + +describe("builder tests", () => { + test("text", async () => { + const bitcoinPropertiesBuilt: { + "peer-tor-address": ValueSpec + } = await Config.of({ + "peer-tor-address": Value.text({ + name: "Peer tor address", + description: "The Tor address of the peer interface", + required: { default: null }, + }), + }).build({} as any) + expect(bitcoinPropertiesBuilt).toMatchObject({ + "peer-tor-address": { + type: "text", + description: "The Tor address of the peer interface", + warning: null, + masked: false, + placeholder: null, + minLength: null, + maxLength: null, + patterns: [], + disabled: false, + inputmode: "text", + name: "Peer tor address", + required: true, + default: null, + }, + }) + }) +}) + +describe("values", () => { + test("toggle", async () => { + const value = Value.toggle({ + name: "Testing", + description: null, + warning: null, + default: false, + }) + const validator = value.validator + validator.unsafeCast(false) + testOutput()(null) + }) + test("text", async () => { + const value = Value.text({ + name: "Testing", + required: { default: null }, + }) + const validator = value.validator + const rawIs = await value.build({} as any) + validator.unsafeCast("test text") + expect(() => validator.unsafeCast(null)).toThrowError() + testOutput()(null) + }) + test("text with default", async () => { + const value = Value.text({ + name: "Testing", + required: { default: "this is a default value" }, + }) + const validator = value.validator + const rawIs = await value.build({} as any) + validator.unsafeCast("test text") + expect(() => validator.unsafeCast(null)).toThrowError() + testOutput()(null) + }) + test("optional text", async () => { + const value = Value.text({ + name: "Testing", + required: false, + }) + const validator = value.validator + const rawIs = await value.build({} as any) + validator.unsafeCast("test text") + validator.unsafeCast(null) + testOutput()(null) + }) + test("color", async () => { + const value = Value.color({ + name: "Testing", + required: false, + description: null, + warning: null, + }) + const validator = value.validator + validator.unsafeCast("#000000") + testOutput()(null) + }) + test("datetime", async () => { + const value = Value.datetime({ + name: "Testing", + required: { default: null }, + description: null, + warning: null, + inputmode: "date", + min: null, + max: null, + }) + const validator = value.validator + validator.unsafeCast("2021-01-01") + testOutput()(null) + }) + test("optional datetime", async () => { + const value = Value.datetime({ + name: "Testing", + required: false, + description: null, + warning: null, + inputmode: "date", + min: null, + max: null, + }) + const validator = value.validator + validator.unsafeCast("2021-01-01") + testOutput()(null) + }) + test("textarea", async () => { + const value = Value.textarea({ + name: "Testing", + required: false, + description: null, + warning: null, + minLength: null, + maxLength: null, + placeholder: null, + }) + const validator = value.validator + validator.unsafeCast("test text") + testOutput()(null) + }) + test("number", async () => { + const value = Value.number({ + name: "Testing", + required: { default: null }, + integer: false, + description: null, + warning: null, + min: null, + max: null, + step: null, + units: null, + placeholder: null, + }) + const validator = value.validator + validator.unsafeCast(2) + testOutput()(null) + }) + test("optional number", async () => { + const value = Value.number({ + name: "Testing", + required: false, + integer: false, + description: null, + warning: null, + min: null, + max: null, + step: null, + units: null, + placeholder: null, + }) + const validator = value.validator + validator.unsafeCast(2) + testOutput()(null) + }) + test("select", async () => { + const value = Value.select({ + name: "Testing", + required: { default: null }, + values: { + a: "A", + b: "B", + }, + description: null, + warning: null, + }) + const validator = value.validator + validator.unsafeCast("a") + validator.unsafeCast("b") + expect(() => validator.unsafeCast("c")).toThrowError() + testOutput()(null) + }) + test("nullable select", async () => { + const value = Value.select({ + name: "Testing", + required: false, + values: { + a: "A", + b: "B", + }, + description: null, + warning: null, + }) + const validator = value.validator + validator.unsafeCast("a") + validator.unsafeCast("b") + validator.unsafeCast(null) + testOutput()(null) + }) + test("multiselect", async () => { + const value = Value.multiselect({ + name: "Testing", + values: { + a: "A", + b: "B", + }, + default: [], + description: null, + warning: null, + minLength: null, + maxLength: null, + }) + const validator = value.validator + validator.unsafeCast([]) + validator.unsafeCast(["a", "b"]) + + expect(() => validator.unsafeCast(["e"])).toThrowError() + expect(() => validator.unsafeCast([4])).toThrowError() + testOutput>()(null) + }) + test("object", async () => { + const value = Value.object( + { + name: "Testing", + description: null, + warning: null, + }, + Config.of({ + a: Value.toggle({ + name: "test", + description: null, + warning: null, + default: false, + }), + }), + ) + const validator = value.validator + validator.unsafeCast({ a: true }) + testOutput()(null) + }) + test("union", async () => { + const value = Value.union( + { + name: "Testing", + required: { default: null }, + description: null, + warning: null, + }, + Variants.of({ + a: { + name: "a", + spec: Config.of({ + b: Value.toggle({ + name: "b", + description: null, + warning: null, + default: false, + }), + }), + }, + }), + ) + const validator = value.validator + validator.unsafeCast({ unionSelectKey: "a", unionValueKey: { b: false } }) + type Test = typeof validator._TYPE + testOutput()( + null, + ) + }) + test("list", async () => { + const value = Value.list( + List.number( + { + name: "test", + }, + { + integer: false, + }, + ), + ) + const validator = value.validator + validator.unsafeCast([1, 2, 3]) + testOutput()(null) + }) + + describe("dynamic", () => { + const fakeOptions = { + config: "config", + effects: "effects", + utils: "utils", + } as any + test("toggle", async () => { + const value = Value.dynamicToggle(async () => ({ + name: "Testing", + description: null, + warning: null, + default: false, + })) + const validator = value.validator + validator.unsafeCast(false) + expect(() => validator.unsafeCast(null)).toThrowError() + testOutput()(null) + expect(await value.build(fakeOptions)).toMatchObject({ + name: "Testing", + description: null, + warning: null, + default: false, + }) + }) + test("text", async () => { + const value = Value.dynamicText(async () => ({ + name: "Testing", + required: { default: null }, + })) + const validator = value.validator + const rawIs = await value.build({} as any) + validator.unsafeCast("test text") + validator.unsafeCast(null) + testOutput()(null) + expect(await value.build(fakeOptions)).toMatchObject({ + name: "Testing", + required: true, + default: null, + }) + }) + test("text with default", async () => { + const value = Value.dynamicText(async () => ({ + name: "Testing", + required: { default: "this is a default value" }, + })) + const validator = value.validator + validator.unsafeCast("test text") + validator.unsafeCast(null) + testOutput()(null) + expect(await value.build(fakeOptions)).toMatchObject({ + name: "Testing", + required: true, + default: "this is a default value", + }) + }) + test("optional text", async () => { + const value = Value.dynamicText(async () => ({ + name: "Testing", + required: false, + })) + const validator = value.validator + const rawIs = await value.build({} as any) + validator.unsafeCast("test text") + validator.unsafeCast(null) + testOutput()(null) + expect(await value.build(fakeOptions)).toMatchObject({ + name: "Testing", + required: false, + default: null, + }) + }) + test("color", async () => { + const value = Value.dynamicColor(async () => ({ + name: "Testing", + required: false, + description: null, + warning: null, + })) + const validator = value.validator + validator.unsafeCast("#000000") + validator.unsafeCast(null) + testOutput()(null) + expect(await value.build(fakeOptions)).toMatchObject({ + name: "Testing", + required: false, + default: null, + description: null, + warning: null, + }) + }) + test("datetime", async () => { + const sdk = StartSdk.of() + .withManifest( + setupManifest({ + id: "testOutput", + title: "", + version: "1.0", + releaseNotes: "", + license: "", + replaces: [], + wrapperRepo: "", + upstreamRepo: "", + supportSite: "", + marketingSite: "", + donationUrl: null, + description: { + short: "", + long: "", + }, + containers: {}, + images: [], + volumes: [], + assets: [], + alerts: { + install: null, + update: null, + uninstall: null, + restore: null, + start: null, + stop: null, + }, + dependencies: { + remoteTest: { + description: "", + optional: true, + }, + }, + }), + ) + .withStore<{ test: "a" }>() + .build(true) + + const value = Value.dynamicDatetime<{ test: "a" }>( + async ({ effects }) => { + ;async () => { + ;(await sdk.store + .getOwn(effects, sdk.StorePath.test) + .once()) satisfies "a" + } + + return { + name: "Testing", + required: { default: null }, + inputmode: "date", + } + }, + ) + const validator = value.validator + validator.unsafeCast("2021-01-01") + validator.unsafeCast(null) + testOutput()(null) + expect(await value.build(fakeOptions)).toMatchObject({ + name: "Testing", + required: true, + default: null, + description: null, + warning: null, + inputmode: "date", + }) + }) + test("textarea", async () => { + const value = Value.dynamicTextarea(async () => ({ + name: "Testing", + required: false, + description: null, + warning: null, + minLength: null, + maxLength: null, + placeholder: null, + })) + const validator = value.validator + validator.unsafeCast("test text") + expect(() => validator.unsafeCast(null)).toThrowError() + testOutput()(null) + expect(await value.build(fakeOptions)).toMatchObject({ + name: "Testing", + required: false, + }) + }) + test("number", async () => { + const value = Value.dynamicNumber(() => ({ + name: "Testing", + required: { default: null }, + integer: false, + description: null, + warning: null, + min: null, + max: null, + step: null, + units: null, + placeholder: null, + })) + const validator = value.validator + validator.unsafeCast(2) + validator.unsafeCast(null) + expect(() => validator.unsafeCast("null")).toThrowError() + testOutput()(null) + expect(await value.build(fakeOptions)).toMatchObject({ + name: "Testing", + required: true, + }) + }) + test("select", async () => { + const value = Value.dynamicSelect(() => ({ + name: "Testing", + required: { default: null }, + values: { + a: "A", + b: "B", + }, + description: null, + warning: null, + })) + const validator = value.validator + validator.unsafeCast("a") + validator.unsafeCast("b") + validator.unsafeCast("c") + validator.unsafeCast(null) + testOutput()(null) + expect(await value.build(fakeOptions)).toMatchObject({ + name: "Testing", + required: true, + }) + }) + test("multiselect", async () => { + const value = Value.dynamicMultiselect(() => ({ + name: "Testing", + values: { + a: "A", + b: "B", + }, + default: [], + description: null, + warning: null, + minLength: null, + maxLength: null, + })) + const validator = value.validator + validator.unsafeCast([]) + validator.unsafeCast(["a", "b"]) + validator.unsafeCast(["c"]) + + expect(() => validator.unsafeCast([4])).toThrowError() + expect(() => validator.unsafeCast(null)).toThrowError() + testOutput>()(null) + expect(await value.build(fakeOptions)).toMatchObject({ + name: "Testing", + default: [], + }) + }) + }) + describe("filtering", () => { + test("union", async () => { + const value = Value.filteredUnion( + () => ["a", "c"], + { + name: "Testing", + required: { default: null }, + description: null, + warning: null, + }, + Variants.of({ + a: { + name: "a", + spec: Config.of({ + b: Value.toggle({ + name: "b", + description: null, + warning: null, + default: false, + }), + }), + }, + b: { + name: "b", + spec: Config.of({ + b: Value.toggle({ + name: "b", + description: null, + warning: null, + default: false, + }), + }), + }, + }), + ) + const validator = value.validator + validator.unsafeCast({ unionSelectKey: "a", unionValueKey: { b: false } }) + type Test = typeof validator._TYPE + testOutput< + Test, + | { unionSelectKey: "a"; unionValueKey: { b: boolean } } + | { unionSelectKey: "b"; unionValueKey: { b: boolean } } + >()(null) + + const built = await value.build({} as any) + expect(built).toMatchObject({ + name: "Testing", + variants: { + b: {}, + }, + }) + expect(built).toMatchObject({ + name: "Testing", + variants: { + a: {}, + b: {}, + }, + }) + expect(built).toMatchObject({ + name: "Testing", + variants: { + a: {}, + b: {}, + }, + disabled: ["a", "c"], + }) + }) + }) + test("dynamic union", async () => { + const value = Value.dynamicUnion( + () => ({ + disabled: ["a", "c"], + name: "Testing", + required: { default: null }, + description: null, + warning: null, + }), + Variants.of({ + a: { + name: "a", + spec: Config.of({ + b: Value.toggle({ + name: "b", + description: null, + warning: null, + default: false, + }), + }), + }, + b: { + name: "b", + spec: Config.of({ + b: Value.toggle({ + name: "b", + description: null, + warning: null, + default: false, + }), + }), + }, + }), + ) + const validator = value.validator + validator.unsafeCast({ unionSelectKey: "a", unionValueKey: { b: false } }) + type Test = typeof validator._TYPE + testOutput< + Test, + | { unionSelectKey: "a"; unionValueKey: { b: boolean } } + | { unionSelectKey: "b"; unionValueKey: { b: boolean } } + | null + | undefined + >()(null) + + const built = await value.build({} as any) + expect(built).toMatchObject({ + name: "Testing", + variants: { + b: {}, + }, + }) + expect(built).toMatchObject({ + name: "Testing", + variants: { + a: {}, + b: {}, + }, + }) + expect(built).toMatchObject({ + name: "Testing", + variants: { + a: {}, + b: {}, + }, + disabled: ["a", "c"], + }) + }) +}) + +describe("Builder List", () => { + test("obj", async () => { + const value = Value.list( + List.obj( + { + name: "test", + }, + { + spec: Config.of({ + test: Value.toggle({ + name: "test", + description: null, + warning: null, + default: false, + }), + }), + }, + ), + ) + const validator = value.validator + validator.unsafeCast([{ test: true }]) + testOutput()(null) + }) + test("text", async () => { + const value = Value.list( + List.text( + { + name: "test", + }, + { + patterns: [], + }, + ), + ) + const validator = value.validator + validator.unsafeCast(["test", "text"]) + testOutput()(null) + }) + describe("dynamic", () => { + test("text", async () => { + const value = Value.list( + List.dynamicText(() => ({ + name: "test", + spec: { patterns: [] }, + })), + ) + const validator = value.validator + validator.unsafeCast(["test", "text"]) + expect(() => validator.unsafeCast([3, 4])).toThrowError() + expect(() => validator.unsafeCast(null)).toThrowError() + testOutput()(null) + expect(await value.build({} as any)).toMatchObject({ + name: "test", + spec: { patterns: [] }, + }) + }) + }) + test("number", async () => { + const value = Value.list( + List.dynamicNumber(() => ({ + name: "test", + spec: { integer: true }, + })), + ) + const validator = value.validator + expect(() => validator.unsafeCast(["test", "text"])).toThrowError() + validator.unsafeCast([4, 2]) + expect(() => validator.unsafeCast(null)).toThrowError() + validator.unsafeCast([]) + testOutput()(null) + expect(await value.build({} as any)).toMatchObject({ + name: "test", + spec: { integer: true }, + }) + }) +}) + +describe("Nested nullable values", () => { + test("Testing text", async () => { + const value = Config.of({ + a: Value.text({ + name: "Temp Name", + description: + "If no name is provided, the name from config will be used", + required: false, + }), + }) + const validator = value.validator + validator.unsafeCast({ a: null }) + validator.unsafeCast({ a: "test" }) + expect(() => validator.unsafeCast({ a: 4 })).toThrowError() + testOutput()(null) + }) + test("Testing number", async () => { + const value = Config.of({ + a: Value.number({ + name: "Temp Name", + description: + "If no name is provided, the name from config will be used", + required: false, + warning: null, + placeholder: null, + integer: false, + min: null, + max: null, + step: null, + units: null, + }), + }) + const validator = value.validator + validator.unsafeCast({ a: null }) + validator.unsafeCast({ a: 5 }) + expect(() => validator.unsafeCast({ a: "4" })).toThrowError() + testOutput()(null) + }) + test("Testing color", async () => { + const value = Config.of({ + a: Value.color({ + name: "Temp Name", + description: + "If no name is provided, the name from config will be used", + required: false, + warning: null, + }), + }) + const validator = value.validator + validator.unsafeCast({ a: null }) + validator.unsafeCast({ a: "5" }) + expect(() => validator.unsafeCast({ a: 4 })).toThrowError() + testOutput()(null) + }) + test("Testing select", async () => { + const value = Config.of({ + a: Value.select({ + name: "Temp Name", + description: + "If no name is provided, the name from config will be used", + required: false, + warning: null, + values: { + a: "A", + }, + }), + }) + const higher = await Value.select({ + name: "Temp Name", + description: "If no name is provided, the name from config will be used", + required: false, + warning: null, + values: { + a: "A", + }, + }).build({} as any) + + const validator = value.validator + validator.unsafeCast({ a: null }) + validator.unsafeCast({ a: "a" }) + expect(() => validator.unsafeCast({ a: "4" })).toThrowError() + testOutput()(null) + }) + test("Testing multiselect", async () => { + const value = Config.of({ + a: Value.multiselect({ + name: "Temp Name", + description: + "If no name is provided, the name from config will be used", + + warning: null, + default: [], + values: { + a: "A", + }, + minLength: null, + maxLength: null, + }), + }) + const validator = value.validator + validator.unsafeCast({ a: [] }) + validator.unsafeCast({ a: ["a"] }) + expect(() => validator.unsafeCast({ a: ["4"] })).toThrowError() + expect(() => validator.unsafeCast({ a: "4" })).toThrowError() + testOutput()(null) + }) +}) diff --git a/sdk/lib/test/configTypes.test.ts b/sdk/lib/test/configTypes.test.ts new file mode 100644 index 0000000000..7e3ff5ca63 --- /dev/null +++ b/sdk/lib/test/configTypes.test.ts @@ -0,0 +1,32 @@ +import { + ListValueSpecOf, + ValueSpec, + isValueSpecListOf, +} from "../config/configTypes" +import { Config } from "../config/builder/config" +import { List } from "../config/builder/list" +import { Value } from "../config/builder/value" + +describe("Config Types", () => { + test("isValueSpecListOf", async () => { + const options = [List.obj, List.text, List.number] + for (const option of options) { + const test = (option as any)( + {} as any, + { spec: Config.of({}) } as any, + ) as any + const someList = await Value.list(test).build({} as any) + if (isValueSpecListOf(someList, "text")) { + someList.spec satisfies ListValueSpecOf<"text"> + } else if (isValueSpecListOf(someList, "number")) { + someList.spec satisfies ListValueSpecOf<"number"> + } else if (isValueSpecListOf(someList, "object")) { + someList.spec satisfies ListValueSpecOf<"object"> + } else { + throw new Error( + "Failed to figure out the type: " + JSON.stringify(someList), + ) + } + } + }) +}) diff --git a/sdk/lib/test/emverList.test.ts b/sdk/lib/test/emverList.test.ts new file mode 100644 index 0000000000..43919aa83e --- /dev/null +++ b/sdk/lib/test/emverList.test.ts @@ -0,0 +1,262 @@ +import { EmVer, notRange, rangeAnd, rangeOf, rangeOr } from "../emverLite/mod" +describe("EmVer", () => { + { + { + const checker = rangeOf("*") + test("rangeOf('*')", () => { + checker.check("1") + checker.check("1.2") + checker.check("1.2.3") + checker.check("1.2.3.4") + // @ts-expect-error + checker.check("1.2.3.4.5") + // @ts-expect-error + checker.check("1.2.3.4.5.6") + expect(checker.check("1")).toEqual(true) + expect(checker.check("1.2")).toEqual(true) + expect(checker.check("1.2.3.4")).toEqual(true) + }) + test("rangeOf('*') invalid", () => { + // @ts-expect-error + expect(() => checker.check("a")).toThrow() + // @ts-expect-error + expect(() => checker.check("")).toThrow() + expect(() => checker.check("1..3")).toThrow() + }) + } + + { + const checker = rangeOf(">1.2.3.4") + test(`rangeOf(">1.2.3.4") valid`, () => { + expect(checker.check("2-beta123")).toEqual(true) + expect(checker.check("2")).toEqual(true) + expect(checker.check("1.2.3.5")).toEqual(true) + // @ts-expect-error + expect(checker.check("1.2.3.4.1")).toEqual(true) + }) + + test(`rangeOf(">1.2.3.4") invalid`, () => { + expect(checker.check("1.2.3.4")).toEqual(false) + expect(checker.check("1.2.3")).toEqual(false) + expect(checker.check("1")).toEqual(false) + }) + } + { + const checker = rangeOf("=1.2.3") + test(`rangeOf("=1.2.3") valid`, () => { + expect(checker.check("1.2.3")).toEqual(true) + }) + + test(`rangeOf("=1.2.3") invalid`, () => { + expect(checker.check("2")).toEqual(false) + expect(checker.check("1.2.3.1")).toEqual(false) + expect(checker.check("1.2")).toEqual(false) + }) + } + { + const checker = rangeOf(">=1.2.3.4") + test(`rangeOf(">=1.2.3.4") valid`, () => { + expect(checker.check("2")).toEqual(true) + expect(checker.check("1.2.3.5")).toEqual(true) + // @ts-expect-error + expect(checker.check("1.2.3.4.1")).toEqual(true) + expect(checker.check("1.2.3.4")).toEqual(true) + }) + + test(`rangeOf(">=1.2.3.4") invalid`, () => { + expect(checker.check("1.2.3")).toEqual(false) + expect(checker.check("1")).toEqual(false) + }) + } + { + const checker = rangeOf("<1.2.3.4") + test(`rangeOf("<1.2.3.4") invalid`, () => { + expect(checker.check("2")).toEqual(false) + expect(checker.check("1.2.3.5")).toEqual(false) + // @ts-expect-error + expect(checker.check("1.2.3.4.1")).toEqual(false) + expect(checker.check("1.2.3.4")).toEqual(false) + }) + + test(`rangeOf("<1.2.3.4") valid`, () => { + expect(checker.check("1.2.3")).toEqual(true) + expect(checker.check("1")).toEqual(true) + }) + } + { + const checker = rangeOf("<=1.2.3.4") + test(`rangeOf("<=1.2.3.4") invalid`, () => { + expect(checker.check("2")).toEqual(false) + expect(checker.check("1.2.3.5")).toEqual(false) + // @ts-expect-error + expect(checker.check("1.2.3.4.1")).toEqual(false) + }) + + test(`rangeOf("<=1.2.3.4") valid`, () => { + expect(checker.check("1.2.3")).toEqual(true) + expect(checker.check("1")).toEqual(true) + expect(checker.check("1.2.3.4")).toEqual(true) + }) + } + + { + const checkA = rangeOf(">1") + const checkB = rangeOf("<=2") + + const checker = rangeAnd(checkA, checkB) + test(`simple and(checkers) valid`, () => { + expect(checker.check("2")).toEqual(true) + + expect(checker.check("1.1")).toEqual(true) + }) + test(`simple and(checkers) invalid`, () => { + expect(checker.check("2.1")).toEqual(false) + expect(checker.check("1")).toEqual(false) + expect(checker.check("0")).toEqual(false) + }) + } + { + const checkA = rangeOf("<1") + const checkB = rangeOf("=2") + + const checker = rangeOr(checkA, checkB) + test(`simple or(checkers) valid`, () => { + expect(checker.check("2")).toEqual(true) + expect(checker.check("0.1")).toEqual(true) + }) + test(`simple or(checkers) invalid`, () => { + expect(checker.check("2.1")).toEqual(false) + expect(checker.check("1")).toEqual(false) + expect(checker.check("1.1")).toEqual(false) + }) + } + + { + const checker = rangeOf("1.2.*") + test(`rangeOf(1.2.*) valid`, () => { + expect(checker.check("1.2")).toEqual(true) + expect(checker.check("1.2.1")).toEqual(true) + }) + test(`rangeOf(1.2.*) invalid`, () => { + expect(checker.check("1.3")).toEqual(false) + expect(checker.check("1.3.1")).toEqual(false) + + expect(checker.check("1.1.1")).toEqual(false) + expect(checker.check("1.1")).toEqual(false) + expect(checker.check("1")).toEqual(false) + + expect(checker.check("2")).toEqual(false) + }) + } + + { + const checker = notRange(rangeOf("1.2.*")) + test(`notRange(rangeOf(1.2.*)) valid`, () => { + expect(checker.check("1.3")).toEqual(true) + expect(checker.check("1.3.1")).toEqual(true) + + expect(checker.check("1.1.1")).toEqual(true) + expect(checker.check("1.1")).toEqual(true) + expect(checker.check("1")).toEqual(true) + + expect(checker.check("2")).toEqual(true) + }) + test(`notRange(rangeOf(1.2.*)) invalid `, () => { + expect(checker.check("1.2")).toEqual(false) + expect(checker.check("1.2.1")).toEqual(false) + }) + } + { + const checker = rangeOf("!1.2.*") + test(`!(rangeOf(1.2.*)) valid`, () => { + expect(checker.check("1.3")).toEqual(true) + expect(checker.check("1.3.1")).toEqual(true) + + expect(checker.check("1.1.1")).toEqual(true) + expect(checker.check("1.1")).toEqual(true) + expect(checker.check("1")).toEqual(true) + + expect(checker.check("2")).toEqual(true) + }) + test(`!(rangeOf(1.2.*)) invalid `, () => { + expect(checker.check("1.2")).toEqual(false) + expect(checker.check("1.2.1")).toEqual(false) + }) + } + { + test(`no and ranges`, () => { + expect(() => rangeAnd()).toThrow() + }) + test(`no or ranges`, () => { + expect(() => rangeOr()).toThrow() + }) + } + { + const checker = rangeOf("!>1.2.3.4") + test(`rangeOf("!>1.2.3.4") invalid`, () => { + expect(checker.check("2")).toEqual(false) + expect(checker.check("1.2.3.5")).toEqual(false) + // @ts-expect-error + expect(checker.check("1.2.3.4.1")).toEqual(false) + }) + + test(`rangeOf("!>1.2.3.4") valid`, () => { + expect(checker.check("1.2.3.4")).toEqual(true) + expect(checker.check("1.2.3")).toEqual(true) + expect(checker.check("1")).toEqual(true) + }) + } + + { + test(">1 && =1.2", () => { + const checker = rangeOf(">1 && =1.2") + + expect(checker.check("1.2")).toEqual(true) + expect(checker.check("1.2.1")).toEqual(false) + }) + test("=1 || =2", () => { + const checker = rangeOf("=1 || =2") + + expect(checker.check("1")).toEqual(true) + expect(checker.check("2")).toEqual(true) + expect(checker.check("3")).toEqual(false) + }) + + test(">1 && =1.2 || =2", () => { + const checker = rangeOf(">1 && =1.2 || =2") + + expect(checker.check("1.2")).toEqual(true) + expect(checker.check("1")).toEqual(false) + expect(checker.check("2")).toEqual(true) + expect(checker.check("3")).toEqual(false) + }) + + test("&& before || order of operationns: <1.5 && >1 || >1.5 && <3", () => { + const checker = rangeOf("<1.5 && >1 || >1.5 && <3") + expect(checker.check("1.1")).toEqual(true) + expect(checker.check("2")).toEqual(true) + + expect(checker.check("1.5")).toEqual(false) + expect(checker.check("1")).toEqual(false) + expect(checker.check("3")).toEqual(false) + }) + + test("Compare function on the emver", () => { + const a = EmVer.from("1.2.3") + const b = EmVer.from("1.2.4") + + expect(a.compare(b)).toEqual("less") + expect(b.compare(a)).toEqual("greater") + expect(a.compare(a)).toEqual("equal") + }) + test("Compare for sort function on the emver", () => { + const a = EmVer.from("1.2.3") + const b = EmVer.from("1.2.4") + + expect(a.compareForSort(b)).toEqual(-1) + expect(b.compareForSort(a)).toEqual(1) + expect(a.compareForSort(a)).toEqual(0) + }) + } + } +}) diff --git a/sdk/lib/test/health.readyCheck.test.ts b/sdk/lib/test/health.readyCheck.test.ts new file mode 100644 index 0000000000..49efcc7599 --- /dev/null +++ b/sdk/lib/test/health.readyCheck.test.ts @@ -0,0 +1,17 @@ +import { containsAddress } from "../health/checkFns/checkPortListening" + +describe("Health ready check", () => { + it("Should be able to parse an example information", () => { + let input = ` + + sl local_address rem_address st tx_queue rx_queue tr tm->when retrnsmt uid timeout inode + 0: 00000000:1F90 00000000:0000 0A 00000000:00000000 00:00000000 00000000 0 0 21634478 1 0000000000000000 100 0 0 10 0 + 1: 00000000:0050 00000000:0000 0A 00000000:00000000 00:00000000 00000000 0 0 21634477 1 0000000000000000 100 0 0 10 0 + 2: 0B00007F:9671 00000000:0000 0A 00000000:00000000 00:00000000 00000000 0 0 21635458 1 0000000000000000 100 0 0 10 0 + 3: 00000000:0D73 00000000:0000 0A 00000000:00000000 00:00000000 00000000 0 0 21634479 1 0000000000000000 100 0 0 10 0 + ` + + expect(containsAddress(input, 80)).toBe(true) + expect(containsAddress(input, 1234)).toBe(false) + }) +}) diff --git a/sdk/lib/test/host.test.ts b/sdk/lib/test/host.test.ts new file mode 100644 index 0000000000..82372f61b4 --- /dev/null +++ b/sdk/lib/test/host.test.ts @@ -0,0 +1,30 @@ +import { ServiceInterfaceBuilder } from "../interfaces/ServiceInterfaceBuilder" +import { Effects } from "../types" +import { sdk } from "./output.sdk" + +describe("host", () => { + test("Testing that the types work", () => { + async function test(effects: Effects) { + const foo = sdk.host.multi(effects, "foo") + const fooOrigin = await foo.bindPort(80, { + protocol: "http" as const, + }) + const fooInterface = new ServiceInterfaceBuilder({ + effects, + name: "Foo", + id: "foo", + description: "A Foo", + hasPrimary: false, + disabled: false, + type: "ui", + username: "bar", + path: "/baz", + search: { qux: "yes" }, + schemeOverride: null, + masked: false, + }) + + await fooOrigin.export([fooInterface]) + } + }) +}) diff --git a/sdk/lib/test/makeOutput.ts b/sdk/lib/test/makeOutput.ts new file mode 100644 index 0000000000..cef17a7e86 --- /dev/null +++ b/sdk/lib/test/makeOutput.ts @@ -0,0 +1,428 @@ +import { oldSpecToBuilder } from "../../scripts/oldSpecToBuilder" + +oldSpecToBuilder( + // Make the location + "./lib/test/output.ts", + // Put the config here + { + mediasources: { + type: "list", + subtype: "enum", + name: "Media Sources", + description: "List of Media Sources to use with Jellyfin", + range: "[1,*)", + default: ["nextcloud"], + spec: { + values: ["nextcloud", "filebrowser"], + "value-names": { + nextcloud: "NextCloud", + filebrowser: "File Browser", + }, + }, + }, + testListUnion: { + type: "list", + subtype: "union", + name: "Lightning Nodes", + description: "List of Lightning Network node instances to manage", + range: "[1,*)", + default: ["lnd"], + spec: { + type: "string", + "display-as": "{{name}}", + "unique-by": "name", + name: "Node Implementation", + tag: { + id: "type", + name: "Type", + description: + "- LND: Lightning Network Daemon from Lightning Labs\n- CLN: Core Lightning from Blockstream\n", + "variant-names": { + lnd: "Lightning Network Daemon (LND)", + "c-lightning": "Core Lightning (CLN)", + }, + }, + default: "lnd", + variants: { + lnd: { + name: { + type: "string", + name: "Node Name", + description: "Name of this node in the list", + default: "LND Wrapper", + nullable: false, + }, + }, + }, + }, + }, + rpc: { + type: "object", + name: "RPC Settings", + description: "RPC configuration options.", + spec: { + enable: { + type: "boolean", + name: "Enable", + description: "Allow remote RPC requests.", + default: true, + }, + username: { + type: "string", + nullable: false, + name: "Username", + description: "The username for connecting to Bitcoin over RPC.", + default: "bitcoin", + masked: true, + pattern: "^[a-zA-Z0-9_]+$", + "pattern-description": + "Must be alphanumeric (can contain underscore).", + }, + password: { + type: "string", + nullable: false, + name: "RPC Password", + description: "The password for connecting to Bitcoin over RPC.", + default: { + charset: "a-z,2-7", + len: 20, + }, + pattern: '^[^\\n"]*$', + "pattern-description": + "Must not contain newline or quote characters.", + copyable: true, + masked: true, + }, + bio: { + type: "string", + nullable: false, + name: "Username", + description: "The username for connecting to Bitcoin over RPC.", + default: "bitcoin", + masked: true, + pattern: "^[a-zA-Z0-9_]+$", + "pattern-description": + "Must be alphanumeric (can contain underscore).", + textarea: true, + }, + advanced: { + type: "object", + name: "Advanced", + description: "Advanced RPC Settings", + spec: { + auth: { + name: "Authorization", + description: + "Username and hashed password for JSON-RPC connections. RPC clients connect using the usual http basic authentication.", + type: "list", + subtype: "string", + default: [], + spec: { + pattern: + "^[a-zA-Z0-9_-]+:([0-9a-fA-F]{2})+\\$([0-9a-fA-F]{2})+$", + "pattern-description": + 'Each item must be of the form ":$".', + masked: false, + }, + range: "[0,*)", + }, + serialversion: { + name: "Serialization Version", + description: + "Return raw transaction or block hex with Segwit or non-SegWit serialization.", + type: "enum", + values: ["non-segwit", "segwit"], + "value-names": {}, + default: "segwit", + }, + servertimeout: { + name: "Rpc Server Timeout", + description: + "Number of seconds after which an uncompleted RPC call will time out.", + type: "number", + nullable: false, + range: "[5,300]", + integral: true, + units: "seconds", + default: 30, + }, + threads: { + name: "Threads", + description: + "Set the number of threads for handling RPC calls. You may wish to increase this if you are making lots of calls via an integration.", + type: "number", + nullable: false, + default: 16, + range: "[1,64]", + integral: true, + }, + workqueue: { + name: "Work Queue", + description: + "Set the depth of the work queue to service RPC calls. Determines how long the backlog of RPC requests can get before it just rejects new ones.", + type: "number", + nullable: false, + default: 128, + range: "[8,256]", + integral: true, + units: "requests", + }, + }, + }, + }, + }, + "zmq-enabled": { + type: "boolean", + name: "ZeroMQ Enabled", + description: "Enable the ZeroMQ interface", + default: true, + }, + txindex: { + type: "boolean", + name: "Transaction Index", + description: "Enable the Transaction Index (txindex)", + default: true, + }, + wallet: { + type: "object", + name: "Wallet", + description: "Wallet Settings", + spec: { + enable: { + name: "Enable Wallet", + description: "Load the wallet and enable wallet RPC calls.", + type: "boolean", + default: true, + }, + avoidpartialspends: { + name: "Avoid Partial Spends", + description: + "Group outputs by address, selecting all or none, instead of selecting on a per-output basis. This improves privacy at the expense of higher transaction fees.", + type: "boolean", + default: true, + }, + discardfee: { + name: "Discard Change Tolerance", + description: + "The fee rate (in BTC/kB) that indicates your tolerance for discarding change by adding it to the fee.", + type: "number", + nullable: false, + default: 0.0001, + range: "[0,.01]", + integral: false, + units: "BTC/kB", + }, + }, + }, + advanced: { + type: "object", + name: "Advanced", + description: "Advanced Settings", + spec: { + mempool: { + type: "object", + name: "Mempool", + description: "Mempool Settings", + spec: { + mempoolfullrbf: { + name: "Enable Full RBF", + description: + "Policy for your node to use for relaying and mining unconfirmed transactions. For details, see https://github.com/bitcoin/bitcoin/blob/master/doc/release-notes/release-notes-24.0.md#notice-of-new-option-for-transaction-replacement-policies", + type: "boolean", + default: false, + }, + persistmempool: { + type: "boolean", + name: "Persist Mempool", + description: "Save the mempool on shutdown and load on restart.", + default: true, + }, + maxmempool: { + type: "number", + nullable: false, + name: "Max Mempool Size", + description: + "Keep the transaction memory pool below megabytes.", + range: "[1,*)", + integral: true, + units: "MiB", + default: 300, + }, + mempoolexpiry: { + type: "number", + nullable: false, + name: "Mempool Expiration", + description: + "Do not keep transactions in the mempool longer than hours.", + range: "[1,*)", + integral: true, + units: "Hr", + default: 336, + }, + }, + }, + peers: { + type: "object", + name: "Peers", + description: "Peer Connection Settings", + spec: { + listen: { + type: "boolean", + name: "Make Public", + description: + "Allow other nodes to find your server on the network.", + default: true, + }, + onlyconnect: { + type: "boolean", + name: "Disable Peer Discovery", + description: "Only connect to specified peers.", + default: false, + }, + onlyonion: { + type: "boolean", + name: "Disable Clearnet", + description: "Only connect to peers over Tor.", + default: false, + }, + addnode: { + name: "Add Nodes", + description: "Add addresses of nodes to connect to.", + type: "list", + subtype: "object", + range: "[0,*)", + default: [], + spec: { + "unique-by": null, + spec: { + hostname: { + type: "string", + nullable: true, + name: "Hostname", + description: "Domain or IP address of bitcoin peer", + pattern: + "(^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$)|((^(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))$)|(^[a-z2-7]{16}\\.onion$)|(^([a-z0-9]([a-z0-9-]{0,61}[a-z0-9])?\\.)+[a-z0-9][a-z0-9-]{0,61}[a-z0-9]$))", + "pattern-description": + "Must be either a domain name, or an IPv4 or IPv6 address. Do not include protocol scheme (eg 'http://') or port.", + masked: false, + }, + port: { + type: "number", + nullable: true, + name: "Port", + description: + "Port that peer is listening on for inbound p2p connections", + range: "[0,65535]", + integral: true, + }, + }, + }, + }, + }, + }, + dbcache: { + type: "number", + nullable: true, + name: "Database Cache", + description: + "How much RAM to allocate for caching the TXO set. Higher values improve syncing performance, but increase your chance of using up all your system's memory or corrupting your database in the event of an ungraceful shutdown. Set this high but comfortably below your system's total RAM during IBD, then turn down to 450 (or leave blank) once the sync completes.", + warning: + "WARNING: Increasing this value results in a higher chance of ungraceful shutdowns, which can leave your node unusable if it happens during the initial block download. Use this setting with caution. Be sure to set this back to the default (450 or leave blank) once your node is synced. DO NOT press the STOP button if your dbcache is large. Instead, set this number back to the default, hit save, and wait for bitcoind to restart on its own.", + range: "(0,*)", + integral: true, + units: "MiB", + }, + pruning: { + type: "union", + name: "Pruning Settings", + description: + "Blockchain Pruning Options\nReduce the blockchain size on disk\n", + warning: + "If you set pruning to Manual and your disk is smaller than the total size of the blockchain, you MUST have something running that prunes these blocks or you may overfill your disk!\nDisabling pruning will convert your node into a full archival node. This requires a resync of the entire blockchain, a process that may take several days. Make sure you have enough free disk space or you may fill up your disk.\n", + tag: { + id: "mode", + name: "Pruning Mode", + description: + '- Disabled: Disable pruning\n- Automatic: Limit blockchain size on disk to a certain number of megabytes\n- Manual: Prune blockchain with the "pruneblockchain" RPC\n', + "variant-names": { + disabled: "Disabled", + automatic: "Automatic", + manual: "Manual", + }, + }, + variants: { + disabled: {}, + automatic: { + size: { + type: "number", + nullable: false, + name: "Max Chain Size", + description: "Limit of blockchain size on disk.", + warning: + "Increasing this value will require re-syncing your node.", + default: 550, + range: "[550,1000000)", + integral: true, + units: "MiB", + }, + }, + manual: { + size: { + type: "number", + nullable: false, + name: "Failsafe Chain Size", + description: "Prune blockchain if size expands beyond this.", + default: 65536, + range: "[550,1000000)", + integral: true, + units: "MiB", + }, + }, + }, + default: "disabled", + }, + blockfilters: { + type: "object", + name: "Block Filters", + description: "Settings for storing and serving compact block filters", + spec: { + blockfilterindex: { + type: "boolean", + name: "Compute Compact Block Filters (BIP158)", + description: + "Generate Compact Block Filters during initial sync (IBD) to enable 'getblockfilter' RPC. This is useful if dependent services need block filters to efficiently scan for addresses/transactions etc.", + default: true, + }, + peerblockfilters: { + type: "boolean", + name: "Serve Compact Block Filters to Peers (BIP157)", + description: + "Serve Compact Block Filters as a peer service to other nodes on the network. This is useful if you wish to connect an SPV client to your node to make it efficient to scan transactions without having to download all block data. 'Compute Compact Block Filters (BIP158)' is required.", + default: false, + }, + }, + }, + bloomfilters: { + type: "object", + name: "Bloom Filters (BIP37)", + description: "Setting for serving Bloom Filters", + spec: { + peerbloomfilters: { + type: "boolean", + name: "Serve Bloom Filters to Peers", + description: + "Peers have the option of setting filters on each connection they make after the version handshake has completed. Bloom filters are for clients implementing SPV (Simplified Payment Verification) that want to check that block headers connect together correctly, without needing to verify the full blockchain. The client must trust that the transactions in the chain are in fact valid. It is highly recommended AGAINST using for anything except Bisq integration.", + warning: + "This is ONLY for use with Bisq integration, please use Block Filters for all other applications.", + default: false, + }, + }, + }, + }, + }, + }, + { + // convert this to `start-sdk/lib` for conversions + StartSdk: "./output.sdk", + }, +) diff --git a/sdk/lib/test/output.sdk.ts b/sdk/lib/test/output.sdk.ts new file mode 100644 index 0000000000..a0bab1f6e2 --- /dev/null +++ b/sdk/lib/test/output.sdk.ts @@ -0,0 +1,44 @@ +import { StartSdk } from "../StartSdk" +import { setupManifest } from "../manifest/setupManifest" + +export type Manifest = any +export const sdk = StartSdk.of() + .withManifest( + setupManifest({ + id: "testOutput", + title: "", + version: "1.0", + releaseNotes: "", + license: "", + replaces: [], + wrapperRepo: "", + upstreamRepo: "", + supportSite: "", + marketingSite: "", + donationUrl: null, + description: { + short: "", + long: "", + }, + containers: {}, + images: [], + volumes: [], + assets: [], + alerts: { + install: null, + update: null, + uninstall: null, + restore: null, + start: null, + stop: null, + }, + dependencies: { + remoteTest: { + description: "", + optional: false, + }, + }, + }), + ) + .withStore<{ storeRoot: { storeLeaf: "value" } }>() + .build(true) diff --git a/sdk/lib/test/output.test.ts b/sdk/lib/test/output.test.ts new file mode 100644 index 0000000000..2b3afb5de0 --- /dev/null +++ b/sdk/lib/test/output.test.ts @@ -0,0 +1,152 @@ +import { + UnionSelectKey, + unionSelectKey, + UnionValueKey, + unionValueKey, +} from "../config/configTypes" +import { ConfigSpec, matchConfigSpec } from "./output" +import * as _I from "../index" +import { camelCase } from "../../scripts/oldSpecToBuilder" +import { deepMerge } from "../util/deepMerge" + +export type IfEquals = + (() => G extends T ? 1 : 2) extends () => G extends U ? 1 : 2 ? Y : N +export function testOutput(): (c: IfEquals) => null { + return () => null +} + +/// Testing the types of the input spec +testOutput()(null) +testOutput()(null) +testOutput()(null) + +testOutput()(null) +testOutput< + ConfigSpec["rpc"]["advanced"]["serialversion"], + "segwit" | "non-segwit" +>()(null) +testOutput()(null) +testOutput< + ConfigSpec["advanced"]["peers"]["addnode"][0]["hostname"], + string | null | undefined +>()(null) +testOutput< + ConfigSpec["testListUnion"][0]["union"][UnionValueKey]["name"], + string +>()(null) +testOutput()( + null, +) +testOutput>()( + null, +) + +// @ts-expect-error Because enable should be a boolean +testOutput()(null) +// prettier-ignore +// @ts-expect-error Expect that the string is the one above +testOutput()(null); + +/// Here we test the output of the matchConfigSpec function +describe("Inputs", () => { + const validInput: ConfigSpec = { + mediasources: ["filebrowser"], + testListUnion: [ + { + union: { [unionSelectKey]: "lnd", [unionValueKey]: { name: "string" } }, + }, + ], + rpc: { + enable: true, + bio: "This is a bio", + username: "test", + password: "test", + advanced: { + auth: ["test"], + serialversion: "segwit", + servertimeout: 6, + threads: 3, + workqueue: 9, + }, + }, + "zmq-enabled": false, + txindex: false, + wallet: { enable: false, avoidpartialspends: false, discardfee: 0.0001 }, + advanced: { + mempool: { + maxmempool: 1, + persistmempool: true, + mempoolexpiry: 23, + mempoolfullrbf: true, + }, + peers: { + listen: true, + onlyconnect: true, + onlyonion: true, + addnode: [ + { + hostname: "test", + port: 1, + }, + ], + }, + dbcache: 5, + pruning: { + unionSelectKey: "disabled", + unionValueKey: {}, + }, + blockfilters: { + blockfilterindex: false, + peerblockfilters: false, + }, + bloomfilters: { peerbloomfilters: false }, + }, + } + + test("test valid input", () => { + const output = matchConfigSpec.unsafeCast(validInput) + expect(output).toEqual(validInput) + }) + test("test no longer care about the conversion of min/max and validating", () => { + matchConfigSpec.unsafeCast( + deepMerge({}, validInput, { rpc: { advanced: { threads: 0 } } }), + ) + }) + test("test errors should throw for number in string", () => { + expect(() => + matchConfigSpec.unsafeCast( + deepMerge({}, validInput, { rpc: { enable: 2 } }), + ), + ).toThrowError() + }) + test("Test that we set serialversion to something not segwit or non-segwit", () => { + expect(() => + matchConfigSpec.unsafeCast( + deepMerge({}, validInput, { + rpc: { advanced: { serialversion: "testing" } }, + }), + ), + ).toThrowError() + }) +}) + +describe("camelCase", () => { + test("'EquipmentClass name'", () => { + expect(camelCase("EquipmentClass name")).toEqual("equipmentClassName") + }) + test("'Equipment className'", () => { + expect(camelCase("Equipment className")).toEqual("equipmentClassName") + }) + test("'equipment class name'", () => { + expect(camelCase("equipment class name")).toEqual("equipmentClassName") + }) + test("'Equipment Class Name'", () => { + expect(camelCase("Equipment Class Name")).toEqual("equipmentClassName") + }) + test("'hyphen-name-format'", () => { + expect(camelCase("hyphen-name-format")).toEqual("hyphenNameFormat") + }) + test("'underscore_name_format'", () => { + expect(camelCase("underscore_name_format")).toEqual("underscoreNameFormat") + }) +}) diff --git a/sdk/lib/test/setupDependencyConfig.test.ts b/sdk/lib/test/setupDependencyConfig.test.ts new file mode 100644 index 0000000000..5fa4a0ddf7 --- /dev/null +++ b/sdk/lib/test/setupDependencyConfig.test.ts @@ -0,0 +1,27 @@ +import { sdk } from "./output.sdk" + +describe("setupDependencyConfig", () => { + test("test", () => { + const testConfig = sdk.Config.of({ + test: sdk.Value.text({ + name: "testValue", + required: false, + }), + }) + + const testConfig2 = sdk.Config.of({ + test2: sdk.Value.text({ + name: "testValue2", + required: false, + }), + }) + const remoteTest = sdk.DependencyConfig.of({ + localConfigSpec: testConfig, + remoteConfigSpec: testConfig2, + dependencyConfig: async ({}) => {}, + }) + sdk.setupDependencyConfig(testConfig, { + remoteTest, + }) + }) +}) diff --git a/sdk/lib/test/startosTypeValidation.test.ts b/sdk/lib/test/startosTypeValidation.test.ts new file mode 100644 index 0000000000..5fa3f74b38 --- /dev/null +++ b/sdk/lib/test/startosTypeValidation.test.ts @@ -0,0 +1,72 @@ +import { Effects } from "../types" +import { ExecuteAction } from "../../../core/startos/bindings/ExecuteAction" +import { CreateOverlayedImageParams } from "../../../core/startos/bindings/CreateOverlayedImageParams" +import { DestroyOverlayedImageParams } from "../../../core/startos/bindings/DestroyOverlayedImageParams" +import { BindParams } from "../../../core/startos/bindings/BindParams" +import { GetHostInfoParams } from "../../../core/startos/bindings/GetHostInfoParams" +import { ParamsPackageId } from "../../../core/startos/bindings/ParamsPackageId" +import { ParamsMaybePackageId } from "../../../core/startos/bindings/ParamsMaybePackageId" +import { SetConfigured } from "../../../core/startos/bindings/SetConfigured" +import { SetHealth } from "../../../core/startos/bindings/SetHealth" +import { ExposeForDependentsParams } from "../../../core/startos/bindings/ExposeForDependentsParams" +import { GetSslCertificateParams } from "../../../core/startos/bindings/GetSslCertificateParams" +import { GetSslKeyParams } from "../../../core/startos/bindings/GetSslKeyParams" +import { GetServiceInterfaceParams } from "../../../core/startos/bindings/GetServiceInterfaceParams" +import { SetDependenciesParams } from "../../../core/startos/bindings/SetDependenciesParams" +import { GetSystemSmtpParams } from "../../../core/startos/bindings/GetSystemSmtpParams" +import { GetServicePortForwardParams } from "../../../core/startos/bindings/GetServicePortForwardParams" +import { ExportServiceInterfaceParams } from "../../../core/startos/bindings/ExportServiceInterfaceParams" +import { GetPrimaryUrlParams } from "../../../core/startos/bindings/GetPrimaryUrlParams" +import { ListServiceInterfacesParams } from "../../../core/startos/bindings/ListServiceInterfacesParams" +import { RemoveAddressParams } from "../../../core/startos/bindings/RemoveAddressParams" +import { ExportActionParams } from "../../../core/startos/bindings/ExportActionParams" +import { RemoveActionParams } from "../../../core/startos/bindings/RemoveActionParams" +import { ReverseProxyParams } from "../../../core/startos/bindings/ReverseProxyParams" +import { MountParams } from "../../../core/startos/bindings/MountParams" +function typeEquality(_a: ExpectedType) {} +describe("startosTypeValidation ", () => { + test(`checking the params match`, () => { + const testInput: any = {} + typeEquality<{ + [K in keyof Effects]: Effects[K] extends (args: infer A) => any + ? A + : never + }>({ + executeAction: {} as ExecuteAction, + createOverlayedImage: {} as CreateOverlayedImageParams, + destroyOverlayedImage: {} as DestroyOverlayedImageParams, + clearBindings: undefined, + bind: {} as BindParams, + getHostInfo: {} as GetHostInfoParams, + exists: {} as ParamsPackageId, + getConfigured: undefined, + stopped: {} as ParamsMaybePackageId, + running: {} as ParamsPackageId, + restart: undefined, + shutdown: undefined, + setConfigured: {} as SetConfigured, + setHealth: {} as SetHealth, + exposeForDependents: {} as ExposeForDependentsParams, + getSslCertificate: {} as GetSslCertificateParams, + getSslKey: {} as GetSslKeyParams, + getServiceInterface: {} as GetServiceInterfaceParams, + setDependencies: {} as SetDependenciesParams, + store: {} as never, + getSystemSmtp: {} as GetSystemSmtpParams, + getContainerIp: undefined, + getServicePortForward: {} as GetServicePortForwardParams, + clearServiceInterfaces: undefined, + exportServiceInterface: {} as ExportServiceInterfaceParams, + getPrimaryUrl: {} as GetPrimaryUrlParams, + listServiceInterfaces: {} as ListServiceInterfacesParams, + removeAddress: {} as RemoveAddressParams, + exportAction: {} as ExportActionParams, + removeAction: {} as RemoveActionParams, + reverseProxy: {} as ReverseProxyParams, + mount: {} as MountParams, + }) + typeEquality[0]>( + testInput as ExecuteAction, + ) + }) +}) diff --git a/sdk/lib/test/store.test.ts b/sdk/lib/test/store.test.ts new file mode 100644 index 0000000000..fa7bc4a4c2 --- /dev/null +++ b/sdk/lib/test/store.test.ts @@ -0,0 +1,114 @@ +import { MainEffects, StartSdk } from "../StartSdk" +import { extractJsonPath } from "../store/PathBuilder" +import { Effects } from "../types" + +type Store = { + config: { + someValue: "a" | "b" + } +} +type Manifest = any +const todo = (): A => { + throw new Error("not implemented") +} +const noop = () => {} + +const sdk = StartSdk.of() + .withManifest({} as Manifest) + .withStore() + .build(true) + +const storePath = sdk.StorePath + +describe("Store", () => { + test("types", async () => { + ;async () => { + sdk.store.setOwn(todo(), storePath.config, { + someValue: "a", + }) + sdk.store.setOwn(todo(), storePath.config.someValue, "b") + sdk.store.setOwn(todo(), storePath, { + config: { someValue: "b" }, + }) + sdk.store.setOwn( + todo(), + storePath.config.someValue, + + // @ts-expect-error Type is wrong for the setting value + 5, + ) + sdk.store.setOwn( + todo(), + // @ts-expect-error Path is wrong + "/config/someVae3lue", + "someValue", + ) + + todo().store.set({ + path: extractJsonPath(storePath.config.someValue), + value: "b", + }) + todo().store.set({ + path: extractJsonPath(storePath.config.someValue), + //@ts-expect-error Path is wrong + value: "someValueIn", + }) + ;(await sdk.store + .getOwn(todo(), storePath.config.someValue) + .const()) satisfies string + ;(await sdk.store + .getOwn(todo(), storePath.config) + .const()) satisfies Store["config"] + await sdk.store // @ts-expect-error Path is wrong + .getOwn(todo(), "/config/somdsfeValue") + .const() + /// ----------------- ERRORS ----------------- + + sdk.store.setOwn(todo(), storePath, { + // @ts-expect-error Type is wrong for the setting value + config: { someValue: "notInAOrB" }, + }) + sdk.store.setOwn( + todo(), + sdk.StorePath.config.someValue, + // @ts-expect-error Type is wrong for the setting value + "notInAOrB", + ) + ;(await sdk.store + .getOwn(todo(), storePath.config.someValue) + // @ts-expect-error Const should normally not be callable + .const()) satisfies string + ;(await sdk.store + .getOwn(todo(), storePath.config) + // @ts-expect-error Const should normally not be callable + .const()) satisfies Store["config"] + await sdk.store // @ts-expect-error Path is wrong + .getOwn("/config/somdsfeValue") + // @ts-expect-error Const should normally not be callable + .const() + + /// + ;(await sdk.store + .getOwn(todo(), storePath.config.someValue) + // @ts-expect-error satisfies type is wrong + .const()) satisfies number + await sdk.store // @ts-expect-error Path is wrong + .getOwn(todo(), extractJsonPath(storePath.config)) + .const() + ;(await todo().store.get({ + path: extractJsonPath(storePath.config.someValue), + callback: noop, + })) satisfies string + await todo().store.get({ + // @ts-expect-error Path is wrong as in it doesn't match above + path: "/config/someV2alue", + callback: noop, + }) + await todo().store.get({ + // @ts-expect-error Path is wrong as in it doesn't exists in wrapper type + path: "/config/someV2alue", + callback: noop, + }) + } + }) +}) diff --git a/sdk/lib/test/util.deepMerge.test.ts b/sdk/lib/test/util.deepMerge.test.ts new file mode 100644 index 0000000000..25a4a7d226 --- /dev/null +++ b/sdk/lib/test/util.deepMerge.test.ts @@ -0,0 +1,26 @@ +import { deepEqual } from "../util/deepEqual" +import { deepMerge } from "../util/deepMerge" + +describe("deepMerge", () => { + test("deepMerge({}, {a: 1}, {b: 2}) should return {a: 1, b: 2}", () => { + expect(deepMerge({}, { a: 1 }, { b: 2 })).toEqual({ a: 1, b: 2 }) + }) + test("deepMerge(null, [1,2,3]) should equal [1,2,3]", () => { + expect(deepMerge(null, [1, 2, 3])).toEqual([1, 2, 3]) + }) + test("deepMerge({a: {b: 1, c:2}}, {a: {b: 3}}) should equal {a: {b: 3, c: 2}}", () => { + expect(deepMerge({ a: { b: 1, c: 2 } }, { a: { b: 3 } })).toEqual({ + a: { b: 3, c: 2 }, + }) + }) + test("deepMerge({a: {b: 1, c:2}}, {a: {b: 3}}) should equal {a: {b: 3, c: 2}} with deep equal", () => { + expect( + deepEqual(deepMerge({ a: { b: 1, c: 2 } }, { a: { b: 3 } }), { + a: { b: 3, c: 2 }, + }), + ).toBeTruthy() + }) + test("deepMerge([1,2,3], [2,3,4]) should equal [2,3,4]", () => { + expect(deepMerge([1, 2, 3], [2, 3, 4])).toEqual([2, 3, 4]) + }) +}) diff --git a/sdk/lib/test/util.getNetworkInterface.test.ts b/sdk/lib/test/util.getNetworkInterface.test.ts new file mode 100644 index 0000000000..df7ac73c6e --- /dev/null +++ b/sdk/lib/test/util.getNetworkInterface.test.ts @@ -0,0 +1,20 @@ +import { getHostname } from "../util/getServiceInterface" + +describe("getHostname ", () => { + const inputToExpected = [ + ["http://localhost:3000", "localhost"], + ["http://localhost", "localhost"], + ["localhost", "localhost"], + ["http://127.0.0.1/", "127.0.0.1"], + ["http://127.0.0.1/testing/1234?314345", "127.0.0.1"], + ["127.0.0.1/", "127.0.0.1"], + ["http://mail.google.com/", "mail.google.com"], + ["mail.google.com/", "mail.google.com"], + ] + + for (const [input, expectValue] of inputToExpected) { + test(`should return ${expectValue} for ${input}`, () => { + expect(getHostname(input)).toEqual(expectValue) + }) + } +}) diff --git a/sdk/lib/test/utils.splitCommand.test.ts b/sdk/lib/test/utils.splitCommand.test.ts new file mode 100644 index 0000000000..aafddb177a --- /dev/null +++ b/sdk/lib/test/utils.splitCommand.test.ts @@ -0,0 +1,42 @@ +import { getHostname } from "../util/getServiceInterface" +import { splitCommand } from "../util/splitCommand" + +describe("splitCommand ", () => { + const inputToExpected = [ + ["cat", ["cat"]], + [["cat"], ["cat"]], + [ + ["cat", "hello all my homies"], + ["cat", "hello all my homies"], + ], + ["cat hello world", ["cat", "hello", "world"]], + ["cat hello 'big world'", ["cat", "hello", "big world"]], + [`cat hello "big world"`, ["cat", "hello", "big world"]], + [ + `cat hello "big world's are the greatest"`, + ["cat", "hello", "big world's are the greatest"], + ], + // Too many spaces + ["cat ", ["cat"]], + [["cat "], ["cat "]], + [ + ["cat ", "hello all my homies "], + ["cat ", "hello all my homies "], + ], + ["cat hello world ", ["cat", "hello", "world"]], + [ + " cat hello 'big world' ", + ["cat", "hello", "big world"], + ], + [ + ` cat hello "big world" `, + ["cat", "hello", "big world"], + ], + ] + + for (const [input, expectValue] of inputToExpected) { + test(`should return ${expectValue} for ${input}`, () => { + expect(splitCommand(input as any)).toEqual(expectValue) + }) + } +}) diff --git a/sdk/lib/trigger/TriggerInput.ts b/sdk/lib/trigger/TriggerInput.ts new file mode 100644 index 0000000000..9a52d8ca52 --- /dev/null +++ b/sdk/lib/trigger/TriggerInput.ts @@ -0,0 +1,6 @@ +import { HealthStatus } from "../types" + +export type TriggerInput = { + lastResult?: HealthStatus + hadSuccess?: boolean +} diff --git a/sdk/lib/trigger/changeOnFirstSuccess.ts b/sdk/lib/trigger/changeOnFirstSuccess.ts new file mode 100644 index 0000000000..4c45afe311 --- /dev/null +++ b/sdk/lib/trigger/changeOnFirstSuccess.ts @@ -0,0 +1,30 @@ +import { Trigger } from "./index" + +export function changeOnFirstSuccess(o: { + beforeFirstSuccess: Trigger + afterFirstSuccess: Trigger +}): Trigger { + return async function* (getInput) { + const beforeFirstSuccess = o.beforeFirstSuccess(getInput) + yield + let currentValue = getInput() + beforeFirstSuccess.next() + for ( + let res = await beforeFirstSuccess.next(); + currentValue?.lastResult !== "success" && !res.done; + res = await beforeFirstSuccess.next() + ) { + yield + currentValue = getInput() + } + const afterFirstSuccess = o.afterFirstSuccess(getInput) + for ( + let res = await afterFirstSuccess.next(); + !res.done; + res = await afterFirstSuccess.next() + ) { + yield + currentValue = getInput() + } + } +} diff --git a/sdk/lib/trigger/cooldownTrigger.ts b/sdk/lib/trigger/cooldownTrigger.ts new file mode 100644 index 0000000000..991e81054d --- /dev/null +++ b/sdk/lib/trigger/cooldownTrigger.ts @@ -0,0 +1,8 @@ +export function cooldownTrigger(timeMs: number) { + return async function* () { + while (true) { + await new Promise((resolve) => setTimeout(resolve, timeMs)) + yield + } + } +} diff --git a/sdk/lib/trigger/defaultTrigger.ts b/sdk/lib/trigger/defaultTrigger.ts new file mode 100644 index 0000000000..bd52dc7ccc --- /dev/null +++ b/sdk/lib/trigger/defaultTrigger.ts @@ -0,0 +1,8 @@ +import { cooldownTrigger } from "./cooldownTrigger" +import { changeOnFirstSuccess } from "./changeOnFirstSuccess" +import { successFailure } from "./successFailure" + +export const defaultTrigger = successFailure({ + duringSuccess: cooldownTrigger(0), + duringError: cooldownTrigger(30000), +}) diff --git a/sdk/lib/trigger/index.ts b/sdk/lib/trigger/index.ts new file mode 100644 index 0000000000..6da034262b --- /dev/null +++ b/sdk/lib/trigger/index.ts @@ -0,0 +1,7 @@ +import { TriggerInput } from "./TriggerInput" +export { changeOnFirstSuccess } from "./changeOnFirstSuccess" +export { cooldownTrigger } from "./cooldownTrigger" + +export type Trigger = ( + getInput: () => TriggerInput, +) => AsyncIterator diff --git a/sdk/lib/trigger/successFailure.ts b/sdk/lib/trigger/successFailure.ts new file mode 100644 index 0000000000..1bab27289b --- /dev/null +++ b/sdk/lib/trigger/successFailure.ts @@ -0,0 +1,32 @@ +import { Trigger } from "." + +export function successFailure(o: { + duringSuccess: Trigger + duringError: Trigger +}): Trigger { + return async function* (getInput) { + while (true) { + const beforeSuccess = o.duringSuccess(getInput) + yield + let currentValue = getInput() + beforeSuccess.next() + for ( + let res = await beforeSuccess.next(); + currentValue?.lastResult !== "success" && !res.done; + res = await beforeSuccess.next() + ) { + yield + currentValue = getInput() + } + const duringError = o.duringError(getInput) + for ( + let res = await duringError.next(); + currentValue?.lastResult === "success" && !res.done; + res = await duringError.next() + ) { + yield + currentValue = getInput() + } + } + } +} diff --git a/sdk/lib/types.ts b/sdk/lib/types.ts new file mode 100644 index 0000000000..3ddf79a365 --- /dev/null +++ b/sdk/lib/types.ts @@ -0,0 +1,591 @@ +export * as configTypes from "./config/configTypes" +import { HealthCheckId } from "../../core/startos/bindings/HealthCheckId" +import { HealthCheckResult } from "../../core/startos/bindings/HealthCheckResult" +import { MainEffects, ServiceInterfaceType, Signals } from "./StartSdk" +import { InputSpec } from "./config/configTypes" +import { DependenciesReceipt } from "./config/setupConfig" +import { BindOptions, Scheme } from "./interfaces/Host" +import { Daemons } from "./mainFn/Daemons" +import { PathBuilder, StorePath } from "./store/PathBuilder" +import { ExposedStorePaths } from "./store/setupExposeStore" +import { UrlString } from "./util/getServiceInterface" + +export { SDKManifest } from "./manifest/ManifestTypes" + +export type ExportedAction = (options: { + effects: Effects + input?: Record +}) => Promise +export type MaybePromise = Promise | A +export namespace ExpectedExports { + version: 1 + /** Set configuration is called after we have modified and saved the configuration in the start9 ui. Use this to make a file for the docker to read from for configuration. */ + export type setConfig = (options: { + effects: Effects + input: Record + }) => Promise + /** Get configuration returns a shape that describes the format that the start9 ui will generate, and later send to the set config */ + export type getConfig = (options: { effects: Effects }) => Promise + // /** These are how we make sure the our dependency configurations are valid and if not how to fix them. */ + // export type dependencies = Dependencies; + /** For backing up service data though the startOS UI */ + export type createBackup = (options: { effects: Effects }) => Promise + /** For restoring service data that was previously backed up using the startOS UI create backup flow. Backup restores are also triggered via the startOS UI, or doing a system restore flow during setup. */ + export type restoreBackup = (options: { + effects: Effects + }) => Promise + + // /** Health checks are used to determine if the service is working properly after starting + // * A good use case is if we are using a web server, seeing if we can get to the web server. + // */ + // export type health = { + // /** Should be the health check id */ + // [id: string]: (options: { effects: Effects; input: TimeMs }) => Promise; + // }; + + /** + * Actions are used so we can effect the service, like deleting a directory. + * One old use case is to add a action where we add a file, that will then be run during the + * service starting, and that file would indicate that it would rescan all the data. + */ + export type actions = (options: { effects: Effects }) => MaybePromise<{ + [id: string]: { + run: ExportedAction + getConfig: (options: { effects: Effects }) => Promise + } + }> + + export type actionsMetadata = (options: { + effects: Effects + }) => Promise> + + /** + * This is the entrypoint for the main container. Used to start up something like the service that the + * package represents, like running a bitcoind in a bitcoind-wrapper. + */ + export type main = (options: { + effects: MainEffects + started(onTerm: () => PromiseLike): PromiseLike + }) => Promise> + + /** + * After a shutdown, if we wanted to do any operations to clean up things, like + * set the action as unavailable or something. + */ + export type afterShutdown = (options: { + effects: Effects + }) => Promise + + /** + * Every time a package completes an install, this function is called before the main. + * Can be used to do migration like things. + */ + export type init = (options: { + effects: Effects + previousVersion: null | string + }) => Promise + /** This will be ran during any time a package is uninstalled, for example during a update + * this will be called. + */ + export type uninit = (options: { + effects: Effects + nextVersion: null | string + }) => Promise + + /** Auto configure is used to make sure that other dependencies have the values t + * that this service could use. + */ + export type dependencyConfig = Record + + export type Properties = (options: { + effects: Effects + }) => Promise +} +export type TimeMs = number +export type VersionString = string + +/** + * AutoConfigure is used as the value to the key of package id, + * this is used to make sure that other dependencies have the values that this service could use. + */ +export type DependencyConfig = { + /** During autoconfigure, we have access to effects and local data. We are going to figure out all the data that we need and send it to update. For the sdk it is the desired delta */ + query(options: { effects: Effects; localConfig: unknown }): Promise + /** This is the second part. Given the query results off the previous function, we will determine what to change the remote config to. In our sdk normall we are going to use the previous as a deep merge. */ + update(options: { + queryResults: unknown + remoteConfig: unknown + }): Promise +} + +export type ValidIfNoStupidEscape = A extends + | `${string}'"'"'${string}` + | `${string}\\"${string}` + ? never + : "" extends A & "" + ? never + : A + +export type ConfigRes = { + /** This should be the previous config, that way during set config we start with the previous */ + config?: null | Record + /** Shape that is describing the form in the ui */ + spec: InputSpec +} + +declare const DaemonProof: unique symbol +export type DaemonReceipt = { + [DaemonProof]: never +} +export type Daemon = { + wait(): Promise + term(): Promise + [DaemonProof]: never +} + +export type HealthStatus = HealthCheckResult["result"] +export type SmtpValue = { + server: string + port: number + from: string + login: string + password: string | null | undefined +} + +export type CommandType = + | ValidIfNoStupidEscape + | [string, ...string[]] + +export type DaemonReturned = { + wait(): Promise + term(options?: { signal?: Signals; timeout?: number }): Promise +} + +export type ActionMetadata = { + name: string + description: string + warning: string | null + input: InputSpec + disabled: boolean + allowedStatuses: "onlyRunning" | "onlyStopped" | "any" + /** + * So the ordering of the actions is by alphabetical order of the group, then followed by the alphabetical of the actions + */ + group: string | null +} +export declare const hostName: unique symbol +// asdflkjadsf.onion | 1.2.3.4 +export type Hostname = string & { [hostName]: never } + +/** ${scheme}://${username}@${host}:${externalPort}${suffix} */ +export type AddressInfo = { + username: string | null + hostId: string + bindOptions: BindOptions + suffix: string +} + +export type HostnameInfoIp = { + kind: "ip" + networkInterfaceId: string + public: boolean + hostname: + | { + kind: "ipv4" | "ipv6" | "local" + value: string + port: number | null + sslPort: number | null + } + | { + kind: "domain" + domain: string + subdomain: string | null + port: number | null + sslPort: number | null + } +} + +export type HostnameInfoOnion = { + kind: "onion" + hostname: { value: string; port: number | null; sslPort: number | null } +} + +export type HostnameInfo = HostnameInfoIp | HostnameInfoOnion + +export type SingleHost = { + id: string + kind: "single" | "static" + hostname: HostnameInfo | null +} + +export type MultiHost = { + id: string + kind: "multi" + hostnames: HostnameInfo[] +} + +export type HostInfo = SingleHost | MultiHost + +export type ServiceInterfaceId = string + +export type ServiceInterface = { + id: ServiceInterfaceId + /** The title of this field to be displayed */ + name: string + /** Human readable description, used as tooltip usually */ + description: string + /** Whether or not one address must be the primary address */ + hasPrimary: boolean + /** Disabled interfaces do not serve, but they retain their metadata and addresses */ + disabled: boolean + /** Whether or not to mask the URIs for this interface. Useful if the URIs contain sensitive information, such as a password, macaroon, or API key */ + masked: boolean + /** URI Information */ + addressInfo: AddressInfo + /** The network interface could be several types, something like ui, p2p, or network */ + type: ServiceInterfaceType +} + +export type ServiceInterfaceWithHostInfo = ServiceInterface & { + hostInfo: HostInfo +} + +export type ExposeServicePaths = { + /** The path to the value in the Store. [JsonPath](https://jsonpath.com/) */ + paths: ExposedStorePaths +} + +export type SdkPropertiesValue = + | { + type: "object" + value: { [k: string]: SdkPropertiesValue } + description?: string + } + | { + type: "string" + /** Value */ + value: string + /** A human readable description or explanation of the value */ + description?: string + /** (string/number only) Whether or not to mask the value, for example, when displaying a password */ + masked: boolean + /** (string/number only) Whether or not to include a button for copying the value to clipboard */ + copyable?: boolean + /** (string/number only) Whether or not to include a button for displaying the value as a QR code */ + qr?: boolean + } + +export type SdkPropertiesReturn = { + [key: string]: SdkPropertiesValue +} + +export type PropertiesValue = + | { + type: "object" + value: { [k: string]: PropertiesValue } + description: string | null + } + | { + type: "string" + /** Value */ + value: string + /** A human readable description or explanation of the value */ + description: string | null + /** (string/number only) Whether or not to mask the value, for example, when displaying a password */ + masked: boolean + /** (string/number only) Whether or not to include a button for copying the value to clipboard */ + copyable: boolean | null + /** (string/number only) Whether or not to include a button for displaying the value as a QR code */ + qr: boolean | null + } + +export type PropertiesReturn = { + [key: string]: PropertiesValue +} + +/** Used to reach out from the pure js runtime */ +export type Effects = { + executeAction(opts: { + serviceId: string | null + input: Input + }): Promise + + /** A low level api used by makeOverlay */ + createOverlayedImage(options: { imageId: string }): Promise<[string, string]> + + /** A low level api used by destroyOverlay + makeOverlay:destroy */ + destroyOverlayedImage(options: { guid: string }): Promise + + /** Removes all network bindings */ + clearBindings(): Promise + /** Creates a host connected to the specified port with the provided options */ + bind( + options: { + kind: "static" | "single" | "multi" + id: string + internalPort: number + } & BindOptions, + ): Promise + /** Retrieves the current hostname(s) associated with a host id */ + // getHostInfo(options: { + // kind: "static" | "single" + // serviceInterfaceId: string + // packageId: string | null + // callback: () => void + // }): Promise + getHostInfo(options: { + kind: "multi" | null + serviceInterfaceId: string + packageId: string | null + callback: () => void + }): Promise + + // /** + // * Run rsync between two volumes. This is used to backup data between volumes. + // * This is a long running process, and a structure that we can either wait for, or get the progress of. + // */ + // runRsync(options: { + // srcVolume: string + // dstVolume: string + // srcPath: string + // dstPath: string + // // rsync options: https://linux.die.net/man/1/rsync + // options: BackupOptions + // }): { + // id: () => Promise + // wait: () => Promise + // progress: () => Promise + // } + + store: { + /** Get a value in a json like data, can be observed and subscribed */ + get(options: { + /** If there is no packageId it is assumed the current package */ + packageId?: string + /** The path defaults to root level, using the [JsonPath](https://jsonpath.com/) */ + path: StorePath + callback: (config: unknown, previousConfig: unknown) => void + }): Promise + /** Used to store values that can be accessed and subscribed to */ + set(options: { + /** Sets the value for the wrapper at the path, it will override, using the [JsonPath](https://jsonpath.com/) */ + path: StorePath + value: ExtractStore + }): Promise + } + + getSystemSmtp(input: { + callback: (config: unknown, previousConfig: unknown) => void + }): Promise + + /** Get the IP address of the container */ + getContainerIp(): Promise + /** + * Get the port address for another service + */ + getServicePortForward(options: { + internalPort: number + packageId: string | null + }): Promise + + /** Removes all network interfaces */ + clearServiceInterfaces(): Promise + /** When we want to create a link in the front end interfaces, and example is + * exposing a url to view a web service + */ + exportServiceInterface(options: ServiceInterface): Promise + + exposeForDependents(options: { paths: string[] }): Promise + + /** + * There are times that we want to see the addresses that where exported + * @param options.addressId If we want to filter the address id + * + * Note: any auth should be filtered out already + */ + getServiceInterface(options: { + packageId: PackageId | null + serviceInterfaceId: ServiceInterfaceId + callback: () => void + }): Promise + + /** + * The user sets the primary url for a interface + * @param options + */ + getPrimaryUrl(options: { + packageId: PackageId | null + serviceInterfaceId: ServiceInterfaceId + callback: () => void + }): Promise + + /** + * There are times that we want to see the addresses that where exported + * @param options.addressId If we want to filter the address id + * + * Note: any auth should be filtered out already + */ + listServiceInterfaces(options: { + packageId: PackageId | null + callback: () => void + }): Promise + + /** + *Remove an address that was exported. Used problably during main or during setConfig. + * @param options + */ + removeAddress(options: { id: string }): Promise + + /** + * + * @param options + */ + exportAction(options: { id: string; metadata: ActionMetadata }): Promise + /** + * Remove an action that was exported. Used problably during main or during setConfig. + */ + removeAction(options: { id: string }): Promise + + getConfigured(): Promise + /** + * This called after a valid set config as well as during init. + * @param configured + */ + setConfigured(options: { configured: boolean }): Promise + + /** + * + * @returns PEM encoded fullchain (ecdsa) + */ + getSslCertificate: (options: { + packageId: string | null + hostId: string + algorithm: "ecdsa" | "ed25519" | null + }) => Promise<[string, string, string]> + /** + * @returns PEM encoded ssl key (ecdsa) + */ + getSslKey: (options: { + packageId: string | null + hostId: string + algorithm: "ecdsa" | "ed25519" | null + }) => Promise + + setHealth( + o: HealthCheckResult & { + id: HealthCheckId + }, + ): Promise + + /** Set the dependencies of what the service needs, usually ran during the set config as a best practice */ + setDependencies(options: { + dependencies: Dependencies + }): Promise + /** Exists could be useful during the runtime to know if some service exists, option dep */ + exists(options: { packageId: PackageId }): Promise + /** Exists could be useful during the runtime to know if some service is running, option dep */ + running(options: { packageId: PackageId }): Promise + + /** Instead of creating proxies with nginx, we have a utility to create and maintain a proxy in the lifetime of this running. */ + reverseProxy(options: { + bind: { + /** Optional, default is 0.0.0.0 */ + ip: string | null + port: number + ssl: boolean + } + dst: { + /** Optional: default is 127.0.0.1 */ + ip: string | null // optional, default 127.0.0.1 + port: number + ssl: boolean + } + http: { + // optional, will do TCP layer proxy only if not present + headers: Record | null + } | null + }): Promise<{ stop(): Promise }> + restart(): void + shutdown(): void + + mount(options: { + location: string + target: { + packageId: string + volumeId: string + subpath: string | null + readonly: boolean + } + }): Promise + + stopped(options: { packageId: string | null }): Promise +} + +/** rsync options: https://linux.die.net/man/1/rsync + */ +export type BackupOptions = { + delete: boolean + force: boolean + ignoreExisting: boolean + exclude: string[] +} +/** + * This is the metadata that is returned from the metadata call. + */ +export type Metadata = { + fileType: string + isDir: boolean + isFile: boolean + isSymlink: boolean + len: number + modified?: Date + accessed?: Date + created?: Date + readonly: boolean + uid: number + gid: number + mode: number +} + +export type MigrationRes = { + configured: boolean +} + +export type ActionResult = { + message: string + value: null | { + value: string + copyable: boolean + qr: boolean + } +} +export type SetResult = { + /** These are the unix process signals */ + signal: Signals + "depends-on": DependsOn +} + +export type PackageId = string +export type Message = string +export type DependencyKind = "running" | "exists" + +export type DependsOn = { + [packageId: string]: string[] +} + +export type KnownError = + | { error: string } + | { + "error-code": [number, string] | readonly [number, string] + } + +export type Dependency = { + id: PackageId + versionSpec: string + registryUrl: string +} & ({ kind: "exists" } | { kind: "running"; healthChecks: string[] }) +export type Dependencies = Array + +export type DeepPartial = T extends {} + ? { [P in keyof T]?: DeepPartial } + : T diff --git a/sdk/lib/util/GetSystemSmtp.ts b/sdk/lib/util/GetSystemSmtp.ts new file mode 100644 index 0000000000..1853afd78e --- /dev/null +++ b/sdk/lib/util/GetSystemSmtp.ts @@ -0,0 +1,37 @@ +import { Effects } from "../types" + +export class GetSystemSmtp { + constructor(readonly effects: Effects) {} + + /** + * Returns the system SMTP credentials. Restarts the service if the credentials change + */ + const() { + return this.effects.getSystemSmtp({ + callback: this.effects.restart, + }) + } + /** + * Returns the system SMTP credentials. Does nothing if the credentials change + */ + once() { + return this.effects.getSystemSmtp({ + callback: () => {}, + }) + } + /** + * Watches the system SMTP credentials. Takes a custom callback function to run whenever the credentials change + */ + async *watch() { + while (true) { + let callback: () => void + const waitForNext = new Promise((resolve) => { + callback = resolve + }) + yield await this.effects.getSystemSmtp({ + callback: () => callback(), + }) + await waitForNext + } + } +} diff --git a/sdk/lib/util/Overlay.ts b/sdk/lib/util/Overlay.ts new file mode 100644 index 0000000000..f5ff0e0d14 --- /dev/null +++ b/sdk/lib/util/Overlay.ts @@ -0,0 +1,169 @@ +import * as fs from "fs/promises" +import * as T from "../types" +import * as cp from "child_process" +import { promisify } from "util" +import { Buffer } from "node:buffer" +export const execFile = promisify(cp.execFile) +const WORKDIR = (imageId: string) => `/media/startos/images/${imageId}/` +export class Overlay { + private constructor( + readonly effects: T.Effects, + readonly imageId: string, + readonly rootfs: string, + readonly guid: string, + ) {} + static async of(effects: T.Effects, imageId: string) { + const [rootfs, guid] = await effects.createOverlayedImage({ imageId }) + + for (const dirPart of ["dev", "sys", "proc", "run"] as const) { + await fs.mkdir(`${rootfs}/${dirPart}`, { recursive: true }) + await execFile("mount", [ + "--rbind", + `/${dirPart}`, + `${rootfs}/${dirPart}`, + ]) + } + + return new Overlay(effects, imageId, rootfs, guid) + } + + async mount(options: MountOptions, path: string): Promise { + path = path.startsWith("/") + ? `${this.rootfs}${path}` + : `${this.rootfs}/${path}` + if (options.type === "volume") { + const subpath = options.subpath + ? options.subpath.startsWith("/") + ? options.subpath + : `/${options.subpath}` + : "/" + await execFile("mount", [ + "--bind", + `/media/startos/volumes/${options.id}${subpath}`, + path, + ]) + } else if (options.type === "assets") { + const subpath = options.subpath + ? options.subpath.startsWith("/") + ? options.subpath + : `/${options.subpath}` + : "/" + await execFile("mount", [ + "--bind", + `/media/startos/assets/${options.id}${subpath}`, + path, + ]) + } else if (options.type === "pointer") { + await this.effects.mount({ location: path, target: options }) + } else { + throw new Error(`unknown type ${(options as any).type}`) + } + return this + } + + async destroy() { + const imageId = this.imageId + const guid = this.guid + await this.effects.destroyOverlayedImage({ guid }) + } + + async exec( + command: string[], + options?: CommandOptions, + ): Promise<{ stdout: string | Buffer; stderr: string | Buffer }> { + const imageMeta = await fs + .readFile(`/media/startos/images/${this.imageId}.json`, { + encoding: "utf8", + }) + .catch(() => "{}") + .then(JSON.parse) + let extra: string[] = [] + if (options?.user) { + extra.push(`--user=${options.user}`) + delete options.user + } + let workdir = imageMeta.workdir || "/" + if (options?.cwd) { + workdir = options.cwd + delete options.cwd + } + return await execFile( + "start-cli", + [ + "chroot", + `--env=/media/startos/images/${this.imageId}.env`, + `--workdir=${workdir}`, + ...extra, + this.rootfs, + ...command, + ], + options, + ) + } + + async spawn( + command: string[], + options?: CommandOptions, + ): Promise { + const imageMeta = await fs + .readFile(`/media/startos/images/${this.imageId}.json`, { + encoding: "utf8", + }) + .catch(() => "{}") + .then(JSON.parse) + let extra: string[] = [] + if (options?.user) { + extra.push(`--user=${options.user}`) + delete options.user + } + let workdir = imageMeta.workdir || "/" + if (options?.cwd) { + workdir = options.cwd + delete options.cwd + } + return cp.spawn( + "start-cli", + [ + "chroot", + `--env=/media/startos/images/${this.imageId}.env`, + `--workdir=${workdir}`, + ...extra, + this.rootfs, + ...command, + ], + options, + ) + } +} + +export type CommandOptions = { + env?: { [variable: string]: string } + cwd?: string + user?: string +} + +export type MountOptions = + | MountOptionsVolume + | MountOptionsAssets + | MountOptionsPointer + +export type MountOptionsVolume = { + type: "volume" + id: string + subpath: string | null + readonly: boolean +} + +export type MountOptionsAssets = { + type: "assets" + id: string + subpath: string | null +} + +export type MountOptionsPointer = { + type: "pointer" + packageId: string + volumeId: string + subpath: string | null + readonly: boolean +} diff --git a/sdk/lib/util/deepEqual.ts b/sdk/lib/util/deepEqual.ts new file mode 100644 index 0000000000..8e6ba4b65c --- /dev/null +++ b/sdk/lib/util/deepEqual.ts @@ -0,0 +1,19 @@ +import { object } from "ts-matches" + +export function deepEqual(...args: unknown[]) { + if (!object.test(args[args.length - 1])) return args[args.length - 1] + const objects = args.filter(object.test) + if (objects.length === 0) { + for (const x of args) if (x !== args[0]) return false + return true + } + if (objects.length !== args.length) return false + const allKeys = new Set(objects.flatMap((x) => Object.keys(x))) + for (const key of allKeys) { + for (const x of objects) { + if (!(key in x)) return false + if (!deepEqual((objects[0] as any)[key], (x as any)[key])) return false + } + } + return true +} diff --git a/sdk/lib/util/deepMerge.ts b/sdk/lib/util/deepMerge.ts new file mode 100644 index 0000000000..ae68c242fb --- /dev/null +++ b/sdk/lib/util/deepMerge.ts @@ -0,0 +1,17 @@ +import { object } from "ts-matches" + +export function deepMerge(...args: unknown[]): unknown { + const lastItem = (args as any)[args.length - 1] + if (!object.test(lastItem)) return lastItem + const objects = args.filter(object.test).filter((x) => !Array.isArray(x)) + if (objects.length === 0) return lastItem as any + if (objects.length === 1) objects.unshift({}) + const allKeys = new Set(objects.flatMap((x) => Object.keys(x))) + for (const key of allKeys) { + const filteredValues = objects.flatMap((x) => + key in x ? [(x as any)[key]] : [], + ) + ;(objects as any)[0][key] = deepMerge(...filteredValues) + } + return objects[0] as any +} diff --git a/sdk/lib/util/fileHelper.ts b/sdk/lib/util/fileHelper.ts new file mode 100644 index 0000000000..56706f95ab --- /dev/null +++ b/sdk/lib/util/fileHelper.ts @@ -0,0 +1,147 @@ +import * as matches from "ts-matches" +import * as YAML from "yaml" +import * as TOML from "@iarna/toml" +import * as T from "../types" +import * as fs from "fs" + +const previousPath = /(.+?)\/([^/]*)$/ + +/** + * Used in the get config and the set config exported functions. + * The idea is that we are going to be reading/ writing to a file, or multiple files. And then we use this tool + * to keep the same path on the read and write, and have methods for helping with structured data. + * And if we are not using a structured data, we can use the raw method which forces the construction of a BiMap + * ```ts + import {InputSpec} from './InputSpec.ts' + import {matches, T} from '../deps.ts'; + const { object, string, number, boolean, arrayOf, array, anyOf, allOf } = matches + const someValidator = object({ + data: string + }) + const jsonFile = FileHelper.json({ + path: 'data.json', + validator: someValidator, + volume: 'main' + }) + const tomlFile = FileHelper.toml({ + path: 'data.toml', + validator: someValidator, + volume: 'main' + }) + const rawFile = FileHelper.raw({ + path: 'data.amazingSettings', + volume: 'main' + fromData(dataIn: Data): string { + return `myDatais ///- ${dataIn.data}` + }, + toData(rawData: string): Data { + const [,data] = /myDatais \/\/\/- (.*)/.match(rawData) + return {data} + } + }) + + export const setConfig : T.ExpectedExports.setConfig= async (effects, config) => { + await jsonFile.write({ data: 'here lies data'}, effects) + } + + export const getConfig: T.ExpectedExports.getConfig = async (effects, config) => ({ + spec: InputSpec, + config: nullIfEmpty({ + ...jsonFile.get(effects) + }) + ``` + */ +export class FileHelper { + protected constructor( + readonly path: string, + readonly writeData: (dataIn: A) => string, + readonly readData: (stringValue: string) => A, + ) {} + async write(data: A, effects: T.Effects) { + if (previousPath.exec(this.path)) { + await new Promise((resolve, reject) => + fs.mkdir(this.path, (err: any) => (!err ? resolve(null) : reject(err))), + ) + } + + await new Promise((resolve, reject) => + fs.writeFile(this.path, this.writeData(data), (err: any) => + !err ? resolve(null) : reject(err), + ), + ) + } + async read(effects: T.Effects) { + if (!fs.existsSync(this.path)) { + return null + } + return this.readData( + await new Promise((resolve, reject) => + fs.readFile(this.path, (err: any, data: any) => + !err ? resolve(data.toString("utf-8")) : reject(err), + ), + ), + ) + } + /** + * Create a File Helper for an arbitrary file type. + * + * Provide custom functions for translating data to the file format and visa versa. + */ + static raw( + path: string, + toFile: (dataIn: A) => string, + fromFile: (rawData: string) => A, + ) { + return new FileHelper(path, toFile, fromFile) + } + /** + * Create a File Helper for a .json file + */ + static json(path: string, shape: matches.Validator) { + return new FileHelper( + path, + (inData) => { + return JSON.stringify(inData, null, 2) + }, + (inString) => { + return shape.unsafeCast(JSON.parse(inString)) + }, + ) + } + /** + * Create a File Helper for a .toml file + */ + static toml>( + path: string, + shape: matches.Validator, + ) { + return new FileHelper( + path, + (inData) => { + return TOML.stringify(inData as any) + }, + (inString) => { + return shape.unsafeCast(TOML.parse(inString)) + }, + ) + } + /** + * Create a File Helper for a .yaml file + */ + static yaml>( + path: string, + shape: matches.Validator, + ) { + return new FileHelper( + path, + (inData) => { + return JSON.stringify(inData, null, 2) + }, + (inString) => { + return shape.unsafeCast(YAML.parse(inString)) + }, + ) + } +} + +export default FileHelper diff --git a/sdk/lib/util/getDefaultString.ts b/sdk/lib/util/getDefaultString.ts new file mode 100644 index 0000000000..fa35b4e66e --- /dev/null +++ b/sdk/lib/util/getDefaultString.ts @@ -0,0 +1,10 @@ +import { DefaultString } from "../config/configTypes" +import { getRandomString } from "./getRandomString" + +export function getDefaultString(defaultSpec: DefaultString): string { + if (typeof defaultSpec === "string") { + return defaultSpec + } else { + return getRandomString(defaultSpec) + } +} diff --git a/sdk/lib/util/getRandomCharInSet.ts b/sdk/lib/util/getRandomCharInSet.ts new file mode 100644 index 0000000000..b26eef648d --- /dev/null +++ b/sdk/lib/util/getRandomCharInSet.ts @@ -0,0 +1,98 @@ +// a,g,h,A-Z,,,,- + +import * as crypto from "crypto" +export function getRandomCharInSet(charset: string): string { + const set = stringToCharSet(charset) + let charIdx = crypto.randomInt(0, set.len) + for (let range of set.ranges) { + if (range.len > charIdx) { + return String.fromCharCode(range.start.charCodeAt(0) + charIdx) + } + charIdx -= range.len + } + throw new Error("unreachable") +} +function stringToCharSet(charset: string): CharSet { + let set: CharSet = { ranges: [], len: 0 } + let start: string | null = null + let end: string | null = null + let in_range = false + for (let char of charset) { + switch (char) { + case ",": + if (start !== null && end !== null) { + if (start!.charCodeAt(0) > end!.charCodeAt(0)) { + throw new Error("start > end of charset") + } + const len = end.charCodeAt(0) - start.charCodeAt(0) + 1 + set.ranges.push({ + start, + end, + len, + }) + set.len += len + start = null + end = null + in_range = false + } else if (start !== null && !in_range) { + set.len += 1 + set.ranges.push({ start, end: start, len: 1 }) + start = null + } else if (start !== null && in_range) { + end = "," + } else if (start === null && end === null && !in_range) { + start = "," + } else { + throw new Error('unexpected ","') + } + break + case "-": + if (start === null) { + start = "-" + } else if (!in_range) { + in_range = true + } else if (in_range && end === null) { + end = "-" + } else { + throw new Error('unexpected "-"') + } + break + default: + if (start === null) { + start = char + } else if (in_range && end === null) { + end = char + } else { + throw new Error(`unexpected "${char}"`) + } + } + } + if (start !== null && end !== null) { + if (start!.charCodeAt(0) > end!.charCodeAt(0)) { + throw new Error("start > end of charset") + } + const len = end.charCodeAt(0) - start.charCodeAt(0) + 1 + set.ranges.push({ + start, + end, + len, + }) + set.len += len + } else if (start !== null) { + set.len += 1 + set.ranges.push({ + start, + end: start, + len: 1, + }) + } + return set +} +type CharSet = { + ranges: { + start: string + end: string + len: number + }[] + len: number +} diff --git a/sdk/lib/util/getRandomString.ts b/sdk/lib/util/getRandomString.ts new file mode 100644 index 0000000000..ea0989bcd9 --- /dev/null +++ b/sdk/lib/util/getRandomString.ts @@ -0,0 +1,11 @@ +import { RandomString } from "../config/configTypes" +import { getRandomCharInSet } from "./getRandomCharInSet" + +export function getRandomString(generator: RandomString): string { + let s = "" + for (let i = 0; i < generator.len; i++) { + s = s + getRandomCharInSet(generator.charset) + } + + return s +} diff --git a/sdk/lib/util/getServiceInterface.ts b/sdk/lib/util/getServiceInterface.ts new file mode 100644 index 0000000000..3b7af1c41f --- /dev/null +++ b/sdk/lib/util/getServiceInterface.ts @@ -0,0 +1,283 @@ +import { ServiceInterfaceType } from "../StartSdk" +import { + AddressInfo, + Effects, + HostInfo, + Hostname, + HostnameInfo, +} from "../types" + +export type UrlString = string +export type HostId = string + +const getHostnameRegex = /^(\w+:\/\/)?([^\/\:]+)(:\d{1,3})?(\/)?/ +export const getHostname = (url: string): Hostname | null => { + const founds = url.match(getHostnameRegex)?.[2] + if (!founds) return null + const parts = founds.split("@") + const last = parts[parts.length - 1] as Hostname | null + return last +} + +export type Filled = { + hostnames: Hostname[] + onionHostnames: Hostname[] + localHostnames: Hostname[] + ipHostnames: Hostname[] + ipv4Hostnames: Hostname[] + ipv6Hostnames: Hostname[] + nonIpHostnames: Hostname[] + + urls: UrlString[] + onionUrls: UrlString[] + localUrls: UrlString[] + ipUrls: UrlString[] + ipv4Urls: UrlString[] + ipv6Urls: UrlString[] + nonIpUrls: UrlString[] +} +export type FilledAddressInfo = AddressInfo & Filled +export type ServiceInterfaceFilled = { + id: string + /** The title of this field to be displayed */ + name: string + /** Human readable description, used as tooltip usually */ + description: string + /** Whether or not the interface has a primary URL */ + hasPrimary: boolean + /** Whether or not the interface disabled */ + disabled: boolean + /** Whether or not to mask the URIs for this interface. Useful if the URIs contain sensitive information, such as a password, macaroon, or API key */ + masked: boolean + /** Information about the host for this binding */ + hostInfo: HostInfo + /** URI information */ + addressInfo: FilledAddressInfo + /** Indicates if we are a ui/p2p/api for the kind of interface that this is representing */ + type: ServiceInterfaceType + /** The primary hostname for the service, as chosen by the user */ + primaryHostname: Hostname | null + /** The primary URL for the service, as chosen by the user */ + primaryUrl: UrlString | null +} +const either = + (...args: ((a: A) => boolean)[]) => + (a: A) => + args.some((x) => x(a)) +const negate = + (fn: (a: A) => boolean) => + (a: A) => + !fn(a) +const unique = (values: A[]) => Array.from(new Set(values)) +function stringifyHostname(info: HostnameInfo): Hostname { + let base: string + if ("kind" in info.hostname && info.hostname.kind === "domain") { + base = info.hostname.subdomain + ? `${info.hostname.subdomain}.${info.hostname.domain}` + : info.hostname.domain + } else { + base = info.hostname.value + } + if (info.hostname.port && info.hostname.sslPort) { + return `${base}:${info.hostname.port}` as Hostname + } else if (info.hostname.sslPort) { + return `${base}:${info.hostname.sslPort}` as Hostname + } else if (info.hostname.port) { + return `${base}:${info.hostname.port}` as Hostname + } + return base as Hostname +} +const addressHostToUrl = ( + { bindOptions, username, suffix }: AddressInfo, + host: Hostname, +): UrlString => { + const scheme = host.endsWith(".onion") + ? bindOptions.scheme + : bindOptions.addSsl + ? bindOptions.addSsl.scheme + : bindOptions.scheme // TODO: encode whether hostname transport is "secure"? + return `${scheme ? `${scheme}//` : ""}${ + username ? `${username}@` : "" + }${host}${suffix}` +} +export const filledAddress = ( + hostInfo: HostInfo, + addressInfo: AddressInfo, +): FilledAddressInfo => { + const toUrl = addressHostToUrl.bind(null, addressInfo) + const hostnameInfo = + hostInfo.kind == "multi" + ? hostInfo.hostnames + : hostInfo.hostname + ? [hostInfo.hostname] + : [] + return { + ...addressInfo, + hostnames: hostnameInfo.flatMap((h) => stringifyHostname(h)), + get onionHostnames() { + return hostnameInfo + .filter((h) => h.kind === "onion") + .map((h) => stringifyHostname(h)) + }, + get localHostnames() { + return hostnameInfo + .filter((h) => h.kind === "ip" && h.hostname.kind === "local") + .map((h) => stringifyHostname(h)) + }, + get ipHostnames() { + return hostnameInfo + .filter( + (h) => + h.kind === "ip" && + (h.hostname.kind === "ipv4" || h.hostname.kind === "ipv6"), + ) + .map((h) => stringifyHostname(h)) + }, + get ipv4Hostnames() { + return hostnameInfo + .filter((h) => h.kind === "ip" && h.hostname.kind === "ipv4") + .map((h) => stringifyHostname(h)) + }, + get ipv6Hostnames() { + return hostnameInfo + .filter((h) => h.kind === "ip" && h.hostname.kind === "ipv6") + .map((h) => stringifyHostname(h)) + }, + get nonIpHostnames() { + return hostnameInfo + .filter( + (h) => + h.kind === "ip" && + h.hostname.kind !== "ipv4" && + h.hostname.kind !== "ipv6", + ) + .map((h) => stringifyHostname(h)) + }, + get urls() { + return this.hostnames.map(toUrl) + }, + get onionUrls() { + return this.onionHostnames.map(toUrl) + }, + get localUrls() { + return this.localHostnames.map(toUrl) + }, + get ipUrls() { + return this.ipHostnames.map(toUrl) + }, + get ipv4Urls() { + return this.ipv4Hostnames.map(toUrl) + }, + get ipv6Urls() { + return this.ipv6Hostnames.map(toUrl) + }, + get nonIpUrls() { + return this.nonIpHostnames.map(toUrl) + }, + } +} + +const makeInterfaceFilled = async ({ + effects, + id, + packageId, + callback, +}: { + effects: Effects + id: string + packageId: string | null + callback: () => void +}) => { + const serviceInterfaceValue = await effects.getServiceInterface({ + serviceInterfaceId: id, + packageId, + callback, + }) + const hostInfo = await effects.getHostInfo({ + packageId, + kind: null, + serviceInterfaceId: serviceInterfaceValue.id, + callback, + }) + const primaryUrl = await effects.getPrimaryUrl({ + serviceInterfaceId: id, + packageId, + callback, + }) + + const interfaceFilled: ServiceInterfaceFilled = { + ...serviceInterfaceValue, + primaryUrl: primaryUrl, + hostInfo, + addressInfo: filledAddress(hostInfo, serviceInterfaceValue.addressInfo), + get primaryHostname() { + if (primaryUrl == null) return null + return getHostname(primaryUrl) + }, + } + return interfaceFilled +} + +export class GetServiceInterface { + constructor( + readonly effects: Effects, + readonly opts: { id: string; packageId: string | null }, + ) {} + + /** + * Returns the value of Store at the provided path. Restart the service if the value changes + */ + async const() { + const { id, packageId } = this.opts + const callback = this.effects.restart + const interfaceFilled: ServiceInterfaceFilled = await makeInterfaceFilled({ + effects: this.effects, + id, + packageId, + callback, + }) + + return interfaceFilled + } + /** + * Returns the value of ServiceInterfacesFilled at the provided path. Does nothing if the value changes + */ + async once() { + const { id, packageId } = this.opts + const callback = () => {} + const interfaceFilled: ServiceInterfaceFilled = await makeInterfaceFilled({ + effects: this.effects, + id, + packageId, + callback, + }) + + return interfaceFilled + } + + /** + * Watches the value of ServiceInterfacesFilled at the provided path. Takes a custom callback function to run whenever the value changes + */ + async *watch() { + const { id, packageId } = this.opts + while (true) { + let callback: () => void = () => {} + const waitForNext = new Promise((resolve) => { + callback = resolve + }) + yield await makeInterfaceFilled({ + effects: this.effects, + id, + packageId, + callback, + }) + await waitForNext + } + } +} +export function getServiceInterface( + effects: Effects, + opts: { id: string; packageId: string | null }, +) { + return new GetServiceInterface(effects, opts) +} diff --git a/sdk/lib/util/getServiceInterfaces.ts b/sdk/lib/util/getServiceInterfaces.ts new file mode 100644 index 0000000000..a7106568b3 --- /dev/null +++ b/sdk/lib/util/getServiceInterfaces.ts @@ -0,0 +1,127 @@ +import { Effects } from "../types" +import { + ServiceInterfaceFilled, + filledAddress, + getHostname, +} from "./getServiceInterface" + +const makeManyInterfaceFilled = async ({ + effects, + packageId, + callback, +}: { + effects: Effects + packageId: string | null + callback: () => void +}) => { + const serviceInterfaceValues = await effects.listServiceInterfaces({ + packageId, + callback, + }) + const hostIdsRecord = Object.fromEntries( + await Promise.all( + Array.from(new Set(serviceInterfaceValues.map((x) => x.id))).map( + async (id) => + [ + id, + await effects.getHostInfo({ + kind: null, + packageId, + serviceInterfaceId: id, + callback, + }), + ] as const, + ), + ), + ) + + const serviceInterfacesFilled: ServiceInterfaceFilled[] = await Promise.all( + serviceInterfaceValues.map(async (serviceInterfaceValue) => { + const hostInfo = await effects.getHostInfo({ + kind: null, + packageId, + serviceInterfaceId: serviceInterfaceValue.id, + callback, + }) + const primaryUrl = await effects.getPrimaryUrl({ + serviceInterfaceId: serviceInterfaceValue.id, + packageId, + callback, + }) + return { + ...serviceInterfaceValue, + primaryUrl: primaryUrl, + hostInfo, + addressInfo: filledAddress(hostInfo, serviceInterfaceValue.addressInfo), + get primaryHostname() { + if (primaryUrl == null) return null + return getHostname(primaryUrl) + }, + } + }), + ) + return serviceInterfacesFilled +} + +export class GetServiceInterfaces { + constructor( + readonly effects: Effects, + readonly opts: { packageId: string | null }, + ) {} + + /** + * Returns the value of Store at the provided path. Restart the service if the value changes + */ + async const() { + const { packageId } = this.opts + const callback = this.effects.restart + const interfaceFilled: ServiceInterfaceFilled[] = + await makeManyInterfaceFilled({ + effects: this.effects, + packageId, + callback, + }) + + return interfaceFilled + } + /** + * Returns the value of ServiceInterfacesFilled at the provided path. Does nothing if the value changes + */ + async once() { + const { packageId } = this.opts + const callback = () => {} + const interfaceFilled: ServiceInterfaceFilled[] = + await makeManyInterfaceFilled({ + effects: this.effects, + packageId, + callback, + }) + + return interfaceFilled + } + + /** + * Watches the value of ServiceInterfacesFilled at the provided path. Takes a custom callback function to run whenever the value changes + */ + async *watch() { + const { packageId } = this.opts + while (true) { + let callback: () => void = () => {} + const waitForNext = new Promise((resolve) => { + callback = resolve + }) + yield await makeManyInterfaceFilled({ + effects: this.effects, + packageId, + callback, + }) + await waitForNext + } + } +} +export function getServiceInterfaces( + effects: Effects, + opts: { packageId: string | null }, +) { + return new GetServiceInterfaces(effects, opts) +} diff --git a/sdk/lib/util/index.browser.ts b/sdk/lib/util/index.browser.ts new file mode 100644 index 0000000000..6ff7ed01cb --- /dev/null +++ b/sdk/lib/util/index.browser.ts @@ -0,0 +1,25 @@ +import * as T from "../types" + +export { GetServiceInterface, getServiceInterface } from "./getServiceInterface" +export { getServiceInterfaces } from "./getServiceInterfaces" +// prettier-ignore +export type FlattenIntersection = +T extends ArrayLike ? T : +T extends object ? {} & {[P in keyof T]: T[P]} : + T; + +export type _ = FlattenIntersection + +export const isKnownError = (e: unknown): e is T.KnownError => + e instanceof Object && ("error" in e || "error-code" in e) + +declare const affine: unique symbol + +export type Affine = { [affine]: A } + +type NeverPossible = { [affine]: string } +export type NoAny = NeverPossible extends A + ? keyof NeverPossible extends keyof A + ? never + : A + : A diff --git a/sdk/lib/util/index.ts b/sdk/lib/util/index.ts new file mode 100644 index 0000000000..bd144f35ab --- /dev/null +++ b/sdk/lib/util/index.ts @@ -0,0 +1,35 @@ +import * as T from "../types" + +import "./nullIfEmpty" +import "./fileHelper" +import "../store/getStore" +import "./deepEqual" +import "./deepMerge" +import "./Overlay" +import "./once" + +export { GetServiceInterface, getServiceInterface } from "./getServiceInterface" +export { getServiceInterfaces } from "./getServiceInterfaces" +// prettier-ignore +export type FlattenIntersection = +T extends ArrayLike ? T : +T extends object ? {} & {[P in keyof T]: T[P]} : + T; + +export type _ = FlattenIntersection + +export const isKnownError = (e: unknown): e is T.KnownError => + e instanceof Object && ("error" in e || "error-code" in e) + +declare const affine: unique symbol + +export type Affine = { [affine]: A } + +type NeverPossible = { [affine]: string } +export type NoAny = NeverPossible extends A + ? keyof NeverPossible extends keyof A + ? never + : A + : A + +export { getDefaultString } from "./getDefaultString" diff --git a/sdk/lib/util/nullIfEmpty.ts b/sdk/lib/util/nullIfEmpty.ts new file mode 100644 index 0000000000..337b9098f3 --- /dev/null +++ b/sdk/lib/util/nullIfEmpty.ts @@ -0,0 +1,12 @@ +/** + * A useful tool when doing a getConfig. + * Look into the config {@link FileHelper} for an example of the use. + * @param s + * @returns + */ +export default function nullIfEmpty>( + s: null | A, +) { + if (s === null) return null + return Object.keys(s).length === 0 ? null : s +} diff --git a/sdk/lib/util/once.ts b/sdk/lib/util/once.ts new file mode 100644 index 0000000000..5f689b0e16 --- /dev/null +++ b/sdk/lib/util/once.ts @@ -0,0 +1,9 @@ +export function once(fn: () => B): () => B { + let result: [B] | [] = [] + return () => { + if (!result.length) { + result = [fn()] + } + return result[0] + } +} diff --git a/sdk/lib/util/patterns.ts b/sdk/lib/util/patterns.ts new file mode 100644 index 0000000000..ac281b0811 --- /dev/null +++ b/sdk/lib/util/patterns.ts @@ -0,0 +1,59 @@ +import { Pattern } from "../config/configTypes" +import * as regexes from "./regexes" + +export const ipv6: Pattern = { + regex: regexes.ipv6.toString(), + description: "Must be a valid IPv6 address", +} + +export const ipv4: Pattern = { + regex: regexes.ipv4.toString(), + description: "Must be a valid IPv4 address", +} + +export const hostname: Pattern = { + regex: regexes.hostname.toString(), + description: "Must be a valid hostname", +} + +export const localHostname: Pattern = { + regex: regexes.localHostname.toString(), + description: 'Must be a valid ".local" hostname', +} + +export const torHostname: Pattern = { + regex: regexes.torHostname.toString(), + description: 'Must be a valid Tor (".onion") hostname', +} + +export const url: Pattern = { + regex: regexes.url.toString(), + description: "Must be a valid URL", +} + +export const localUrl: Pattern = { + regex: regexes.localUrl.toString(), + description: 'Must be a valid ".local" URL', +} + +export const torUrl: Pattern = { + regex: regexes.torUrl.toString(), + description: 'Must be a valid Tor (".onion") URL', +} + +export const ascii: Pattern = { + regex: regexes.ascii.toString(), + description: + "May only contain ASCII characters. See https://www.w3schools.com/charsets/ref_html_ascii.asp", +} + +export const email: Pattern = { + regex: regexes.email.toString(), + description: "Must be a valid email address", +} + +export const base64: Pattern = { + regex: regexes.base64.toString(), + description: + "May only contain base64 characters. See https://base64.guru/learn/base64-characters", +} diff --git a/sdk/lib/util/regexes.ts b/sdk/lib/util/regexes.ts new file mode 100644 index 0000000000..f261963816 --- /dev/null +++ b/sdk/lib/util/regexes.ts @@ -0,0 +1,34 @@ +// https://ihateregex.io/expr/ipv6/ +export const ipv6 = + /(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))/ + +// https://ihateregex.io/expr/ipv4/ +export const ipv4 = + /(\b25[0-5]|\b2[0-4][0-9]|\b[01]?[0-9][0-9]?)(\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)){3}/ + +export const hostname = + /^(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]*[a-zA-Z0-9])\.)*([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\-]*[A-Za-z0-9])$/ + +export const localHostname = /[-a-zA-Z0-9@:%._\+~#=]{1,256}\.local/ + +export const torHostname = /[-a-zA-Z0-9@:%._\+~#=]{1,256}\.onion/ + +// https://ihateregex.io/expr/url/ +export const url = + /https?:\/\/(www\.)?[-a-zA-Z0-9@:%._\+~#=]{1,256}\.[a-zA-Z0-9()]{1,6}\b([-a-zA-Z0-9()!@:%_\+.~#?&\/\/=]*)/ + +export const localUrl = + /https?:\/\/(www\.)?[-a-zA-Z0-9@:%._\+~#=]{1,256}\.local\b([-a-zA-Z0-9()!@:%_\+.~#?&\/\/=]*)/ + +export const torUrl = + /https?:\/\/(www\.)?[-a-zA-Z0-9@:%._\+~#=]{1,256}\.onion\b([-a-zA-Z0-9()!@:%_\+.~#?&\/\/=]*)/ + +// https://ihateregex.io/expr/ascii/ +export const ascii = /^[ -~]*$/ + +//https://ihateregex.io/expr/email/ +export const email = /[^@ \t\r\n]+@[^@ \t\r\n]+\.[^@ \t\r\n]+/ + +//https://rgxdb.com/r/1NUN74O6 +export const base64 = + /^(?:[a-zA-Z0-9+\/]{4})*(?:|(?:[a-zA-Z0-9+\/]{3}=)|(?:[a-zA-Z0-9+\/]{2}==)|(?:[a-zA-Z0-9+\/]{1}===))$/ diff --git a/sdk/lib/util/splitCommand.ts b/sdk/lib/util/splitCommand.ts new file mode 100644 index 0000000000..69f00a5a74 --- /dev/null +++ b/sdk/lib/util/splitCommand.ts @@ -0,0 +1,17 @@ +import { arrayOf, string } from "ts-matches" +import { ValidIfNoStupidEscape } from "../types" + +export const splitCommand = ( + command: string | [string, ...string[]], +): string[] => { + if (arrayOf(string).test(command)) return command + return String(command) + .split('"') + .flatMap((x, i) => + i % 2 !== 0 + ? [x] + : x.split("'").flatMap((x, i) => (i % 2 !== 0 ? [x] : x.split(" "))), + ) + .map((x) => x.trim()) + .filter(Boolean) +} diff --git a/sdk/lib/util/stringFromStdErrOut.ts b/sdk/lib/util/stringFromStdErrOut.ts new file mode 100644 index 0000000000..452aaa029a --- /dev/null +++ b/sdk/lib/util/stringFromStdErrOut.ts @@ -0,0 +1,6 @@ +export async function stringFromStdErrOut(x: { + stdout: string + stderr: string +}) { + return x?.stderr ? Promise.reject(x.stderr) : x.stdout +} diff --git a/sdk/package-lock.json b/sdk/package-lock.json new file mode 100644 index 0000000000..f0d61ea4dc --- /dev/null +++ b/sdk/package-lock.json @@ -0,0 +1,4322 @@ +{ + "name": "@start9labs/start-sdk", + "version": "0.4.0-rev0.lib0.rc8.beta10", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "@start9labs/start-sdk", + "version": "0.4.0-rev0.lib0.rc8.beta10", + "license": "MIT", + "dependencies": { + "isomorphic-fetch": "^3.0.0", + "ts-matches": "^5.4.1" + }, + "devDependencies": { + "@iarna/toml": "^2.2.5", + "@types/jest": "^29.4.0", + "jest": "^29.4.3", + "prettier": "^3.2.5", + "ts-jest": "^29.0.5", + "ts-node": "^10.9.1", + "tsx": "^4.7.1", + "typescript": "^5.0.4", + "yaml": "^2.2.2" + } + }, + "node_modules/@ampproject/remapping": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.2.0.tgz", + "integrity": "sha512-qRmjj8nj9qmLTQXXmaR1cck3UXSRMPrbsLJAasZpF+t3riI71BXed5ebIOYwQntykeZuhjsdweEc9BxH5Jc26w==", + "dev": true, + "dependencies": { + "@jridgewell/gen-mapping": "^0.1.0", + "@jridgewell/trace-mapping": "^0.3.9" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/code-frame": { + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.18.6.tgz", + "integrity": "sha512-TDCmlK5eOvH+eH7cdAFlNXeVJqWIQ7gW9tY1GJIpUtFb6CmjVyq2VM3u71bOyR8CRihcCgMUYoDNyLXao3+70Q==", + "dev": true, + "dependencies": { + "@babel/highlight": "^7.18.6" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/compat-data": { + "version": "7.21.0", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.21.0.tgz", + "integrity": "sha512-gMuZsmsgxk/ENC3O/fRw5QY8A9/uxQbbCEypnLIiYYc/qVJtEV7ouxC3EllIIwNzMqAQee5tanFabWsUOutS7g==", + "dev": true, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/core": { + "version": "7.21.3", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.21.3.tgz", + "integrity": "sha512-qIJONzoa/qiHghnm0l1n4i/6IIziDpzqc36FBs4pzMhDUraHqponwJLiAKm1hGLP3OSB/TVNz6rMwVGpwxxySw==", + "dev": true, + "dependencies": { + "@ampproject/remapping": "^2.2.0", + "@babel/code-frame": "^7.18.6", + "@babel/generator": "^7.21.3", + "@babel/helper-compilation-targets": "^7.20.7", + "@babel/helper-module-transforms": "^7.21.2", + "@babel/helpers": "^7.21.0", + "@babel/parser": "^7.21.3", + "@babel/template": "^7.20.7", + "@babel/traverse": "^7.21.3", + "@babel/types": "^7.21.3", + "convert-source-map": "^1.7.0", + "debug": "^4.1.0", + "gensync": "^1.0.0-beta.2", + "json5": "^2.2.2", + "semver": "^6.3.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/babel" + } + }, + "node_modules/@babel/core/node_modules/convert-source-map": { + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.9.0.tgz", + "integrity": "sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A==", + "dev": true + }, + "node_modules/@babel/generator": { + "version": "7.21.3", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.21.3.tgz", + "integrity": "sha512-QS3iR1GYC/YGUnW7IdggFeN5c1poPUurnGttOV/bZgPGV+izC/D8HnD6DLwod0fsatNyVn1G3EVWMYIF0nHbeA==", + "dev": true, + "dependencies": { + "@babel/types": "^7.21.3", + "@jridgewell/gen-mapping": "^0.3.2", + "@jridgewell/trace-mapping": "^0.3.17", + "jsesc": "^2.5.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/generator/node_modules/@jridgewell/gen-mapping": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.2.tgz", + "integrity": "sha512-mh65xKQAzI6iBcFzwv28KVWSmCkdRBWoOh+bYQGW3+6OZvbbN3TqMGo5hqYxQniRcH9F2VZIoJCm4pa3BPDK/A==", + "dev": true, + "dependencies": { + "@jridgewell/set-array": "^1.0.1", + "@jridgewell/sourcemap-codec": "^1.4.10", + "@jridgewell/trace-mapping": "^0.3.9" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/helper-compilation-targets": { + "version": "7.20.7", + "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.20.7.tgz", + "integrity": "sha512-4tGORmfQcrc+bvrjb5y3dG9Mx1IOZjsHqQVUz7XCNHO+iTmqxWnVg3KRygjGmpRLJGdQSKuvFinbIb0CnZwHAQ==", + "dev": true, + "dependencies": { + "@babel/compat-data": "^7.20.5", + "@babel/helper-validator-option": "^7.18.6", + "browserslist": "^4.21.3", + "lru-cache": "^5.1.1", + "semver": "^6.3.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/helper-environment-visitor": { + "version": "7.18.9", + "resolved": "https://registry.npmjs.org/@babel/helper-environment-visitor/-/helper-environment-visitor-7.18.9.tgz", + "integrity": "sha512-3r/aACDJ3fhQ/EVgFy0hpj8oHyHpQc+LPtJoY9SzTThAsStm4Ptegq92vqKoE3vD706ZVFWITnMnxucw+S9Ipg==", + "dev": true, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-function-name": { + "version": "7.21.0", + "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.21.0.tgz", + "integrity": "sha512-HfK1aMRanKHpxemaY2gqBmL04iAPOPRj7DxtNbiDOrJK+gdwkiNRVpCpUJYbUT+aZyemKN8brqTOxzCaG6ExRg==", + "dev": true, + "dependencies": { + "@babel/template": "^7.20.7", + "@babel/types": "^7.21.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-hoist-variables": { + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/helper-hoist-variables/-/helper-hoist-variables-7.18.6.tgz", + "integrity": "sha512-UlJQPkFqFULIcyW5sbzgbkxn2FKRgwWiRexcuaR8RNJRy8+LLveqPjwZV/bwrLZCN0eUHD/x8D0heK1ozuoo6Q==", + "dev": true, + "dependencies": { + "@babel/types": "^7.18.6" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-imports": { + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.18.6.tgz", + "integrity": "sha512-0NFvs3VkuSYbFi1x2Vd6tKrywq+z/cLeYC/RJNFrIX/30Bf5aiGYbtvGXolEktzJH8o5E5KJ3tT+nkxuuZFVlA==", + "dev": true, + "dependencies": { + "@babel/types": "^7.18.6" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-transforms": { + "version": "7.21.2", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.21.2.tgz", + "integrity": "sha512-79yj2AR4U/Oqq/WOV7Lx6hUjau1Zfo4cI+JLAVYeMV5XIlbOhmjEk5ulbTc9fMpmlojzZHkUUxAiK+UKn+hNQQ==", + "dev": true, + "dependencies": { + "@babel/helper-environment-visitor": "^7.18.9", + "@babel/helper-module-imports": "^7.18.6", + "@babel/helper-simple-access": "^7.20.2", + "@babel/helper-split-export-declaration": "^7.18.6", + "@babel/helper-validator-identifier": "^7.19.1", + "@babel/template": "^7.20.7", + "@babel/traverse": "^7.21.2", + "@babel/types": "^7.21.2" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-plugin-utils": { + "version": "7.20.2", + "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.20.2.tgz", + "integrity": "sha512-8RvlJG2mj4huQ4pZ+rU9lqKi9ZKiRmuvGuM2HlWmkmgOhbs6zEAw6IEiJ5cQqGbDzGZOhwuOQNtZMi/ENLjZoQ==", + "dev": true, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-simple-access": { + "version": "7.20.2", + "resolved": "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.20.2.tgz", + "integrity": "sha512-+0woI/WPq59IrqDYbVGfshjT5Dmk/nnbdpcF8SnMhhXObpTq2KNBdLFRFrkVdbDOyUmHBCxzm5FHV1rACIkIbA==", + "dev": true, + "dependencies": { + "@babel/types": "^7.20.2" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-split-export-declaration": { + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.18.6.tgz", + "integrity": "sha512-bde1etTx6ZyTmobl9LLMMQsaizFVZrquTEHOqKeQESMKo4PlObf+8+JA25ZsIpZhT/WEd39+vOdLXAFG/nELpA==", + "dev": true, + "dependencies": { + "@babel/types": "^7.18.6" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-string-parser": { + "version": "7.19.4", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.19.4.tgz", + "integrity": "sha512-nHtDoQcuqFmwYNYPz3Rah5ph2p8PFeFCsZk9A/48dPc/rGocJ5J3hAAZ7pb76VWX3fZKu+uEr/FhH5jLx7umrw==", + "dev": true, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-identifier": { + "version": "7.19.1", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.19.1.tgz", + "integrity": "sha512-awrNfaMtnHUr653GgGEs++LlAvW6w+DcPrOliSMXWCKo597CwL5Acf/wWdNkf/tfEQE3mjkeD1YOVZOUV/od1w==", + "dev": true, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-option": { + "version": "7.21.0", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.21.0.tgz", + "integrity": "sha512-rmL/B8/f0mKS2baE9ZpyTcTavvEuWhTTW8amjzXNvYG4AwBsqTLikfXsEofsJEfKHf+HQVQbFOHy6o+4cnC/fQ==", + "dev": true, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helpers": { + "version": "7.21.0", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.21.0.tgz", + "integrity": "sha512-XXve0CBtOW0pd7MRzzmoyuSj0e3SEzj8pgyFxnTT1NJZL38BD1MK7yYrm8yefRPIDvNNe14xR4FdbHwpInD4rA==", + "dev": true, + "dependencies": { + "@babel/template": "^7.20.7", + "@babel/traverse": "^7.21.0", + "@babel/types": "^7.21.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/highlight": { + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.18.6.tgz", + "integrity": "sha512-u7stbOuYjaPezCuLj29hNW1v64M2Md2qupEKP1fHc7WdOA3DgLh37suiSrZYY7haUB7iBeQZ9P1uiRF359do3g==", + "dev": true, + "dependencies": { + "@babel/helper-validator-identifier": "^7.18.6", + "chalk": "^2.0.0", + "js-tokens": "^4.0.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/highlight/node_modules/ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "dev": true, + "dependencies": { + "color-convert": "^1.9.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/highlight/node_modules/chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dev": true, + "dependencies": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/highlight/node_modules/color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "dev": true, + "dependencies": { + "color-name": "1.1.3" + } + }, + "node_modules/@babel/highlight/node_modules/color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==", + "dev": true + }, + "node_modules/@babel/highlight/node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "dev": true, + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/@babel/highlight/node_modules/has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/highlight/node_modules/supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "dev": true, + "dependencies": { + "has-flag": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/parser": { + "version": "7.21.3", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.21.3.tgz", + "integrity": "sha512-lobG0d7aOfQRXh8AyklEAgZGvA4FShxo6xQbUrrT/cNBPUdIDojlokwJsQyCC/eKia7ifqM0yP+2DRZ4WKw2RQ==", + "dev": true, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/plugin-syntax-async-generators": { + "version": "7.8.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz", + "integrity": "sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-bigint": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-bigint/-/plugin-syntax-bigint-7.8.3.tgz", + "integrity": "sha512-wnTnFlG+YxQm3vDxpGE57Pj0srRU4sHE/mDkt1qv2YJJSeUAec2ma4WLUnUPeKjyrfntVwe/N6dCXpU+zL3Npg==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-class-properties": { + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz", + "integrity": "sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.12.13" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-import-meta": { + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.4.tgz", + "integrity": "sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.10.4" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-json-strings": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz", + "integrity": "sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-jsx": { + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.18.6.tgz", + "integrity": "sha512-6mmljtAedFGTWu2p/8WIORGwy+61PLgOMPOdazc7YoJ9ZCWUyFy3A6CpPkRKLKD1ToAesxX8KGEViAiLo9N+7Q==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.18.6" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-logical-assignment-operators": { + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz", + "integrity": "sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.10.4" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-nullish-coalescing-operator": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz", + "integrity": "sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-numeric-separator": { + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz", + "integrity": "sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.10.4" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-object-rest-spread": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz", + "integrity": "sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-optional-catch-binding": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz", + "integrity": "sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-optional-chaining": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz", + "integrity": "sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-top-level-await": { + "version": "7.14.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz", + "integrity": "sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.14.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-typescript": { + "version": "7.20.0", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.20.0.tgz", + "integrity": "sha512-rd9TkG+u1CExzS4SM1BlMEhMXwFLKVjOAFFCDx9PbX5ycJWDoWMcwdJH9RhkPu1dOgn5TrxLot/Gx6lWFuAUNQ==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.19.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/template": { + "version": "7.20.7", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.20.7.tgz", + "integrity": "sha512-8SegXApWe6VoNw0r9JHpSteLKTpTiLZ4rMlGIm9JQ18KiCtyQiAMEazujAHrUS5flrcqYZa75ukev3P6QmUwUw==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.18.6", + "@babel/parser": "^7.20.7", + "@babel/types": "^7.20.7" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/traverse": { + "version": "7.21.3", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.21.3.tgz", + "integrity": "sha512-XLyopNeaTancVitYZe2MlUEvgKb6YVVPXzofHgqHijCImG33b/uTurMS488ht/Hbsb2XK3U2BnSTxKVNGV3nGQ==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.18.6", + "@babel/generator": "^7.21.3", + "@babel/helper-environment-visitor": "^7.18.9", + "@babel/helper-function-name": "^7.21.0", + "@babel/helper-hoist-variables": "^7.18.6", + "@babel/helper-split-export-declaration": "^7.18.6", + "@babel/parser": "^7.21.3", + "@babel/types": "^7.21.3", + "debug": "^4.1.0", + "globals": "^11.1.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/types": { + "version": "7.21.3", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.21.3.tgz", + "integrity": "sha512-sBGdETxC+/M4o/zKC0sl6sjWv62WFR/uzxrJ6uYyMLZOUlPnwzw0tKgVHOXxaAd5l2g8pEDM5RZ495GPQI77kg==", + "dev": true, + "dependencies": { + "@babel/helper-string-parser": "^7.19.4", + "@babel/helper-validator-identifier": "^7.19.1", + "to-fast-properties": "^2.0.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@bcoe/v8-coverage": { + "version": "0.2.3", + "resolved": "https://registry.npmjs.org/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz", + "integrity": "sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==", + "dev": true + }, + "node_modules/@cspotcode/source-map-support": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz", + "integrity": "sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==", + "dev": true, + "dependencies": { + "@jridgewell/trace-mapping": "0.3.9" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@cspotcode/source-map-support/node_modules/@jridgewell/trace-mapping": { + "version": "0.3.9", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz", + "integrity": "sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==", + "dev": true, + "dependencies": { + "@jridgewell/resolve-uri": "^3.0.3", + "@jridgewell/sourcemap-codec": "^1.4.10" + } + }, + "node_modules/@esbuild/aix-ppc64": { + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.19.12.tgz", + "integrity": "sha512-bmoCYyWdEL3wDQIVbcyzRyeKLgk2WtWLTWz1ZIAZF/EGbNOwSA6ew3PftJ1PqMiOOGu0OyFMzG53L0zqIpPeNA==", + "cpu": [ + "ppc64" + ], + "dev": true, + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@iarna/toml": { + "version": "2.2.5", + "resolved": "https://registry.npmjs.org/@iarna/toml/-/toml-2.2.5.tgz", + "integrity": "sha512-trnsAYxU3xnS1gPHPyU961coFyLkh4gAD/0zQ5mymY4yOZ+CYvsPqUbOFSw0aDM4y0tV7tiFxL/1XfXPNC6IPg==", + "dev": true + }, + "node_modules/@istanbuljs/load-nyc-config": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz", + "integrity": "sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ==", + "dev": true, + "dependencies": { + "camelcase": "^5.3.1", + "find-up": "^4.1.0", + "get-package-type": "^0.1.0", + "js-yaml": "^3.13.1", + "resolve-from": "^5.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@istanbuljs/schema": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/@istanbuljs/schema/-/schema-0.1.3.tgz", + "integrity": "sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/@jest/console": { + "version": "29.5.0", + "resolved": "https://registry.npmjs.org/@jest/console/-/console-29.5.0.tgz", + "integrity": "sha512-NEpkObxPwyw/XxZVLPmAGKE89IQRp4puc6IQRPru6JKd1M3fW9v1xM1AnzIJE65hbCkzQAdnL8P47e9hzhiYLQ==", + "dev": true, + "dependencies": { + "@jest/types": "^29.5.0", + "@types/node": "*", + "chalk": "^4.0.0", + "jest-message-util": "^29.5.0", + "jest-util": "^29.5.0", + "slash": "^3.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/core": { + "version": "29.5.0", + "resolved": "https://registry.npmjs.org/@jest/core/-/core-29.5.0.tgz", + "integrity": "sha512-28UzQc7ulUrOQw1IsN/kv1QES3q2kkbl/wGslyhAclqZ/8cMdB5M68BffkIdSJgKBUt50d3hbwJ92XESlE7LiQ==", + "dev": true, + "dependencies": { + "@jest/console": "^29.5.0", + "@jest/reporters": "^29.5.0", + "@jest/test-result": "^29.5.0", + "@jest/transform": "^29.5.0", + "@jest/types": "^29.5.0", + "@types/node": "*", + "ansi-escapes": "^4.2.1", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "exit": "^0.1.2", + "graceful-fs": "^4.2.9", + "jest-changed-files": "^29.5.0", + "jest-config": "^29.5.0", + "jest-haste-map": "^29.5.0", + "jest-message-util": "^29.5.0", + "jest-regex-util": "^29.4.3", + "jest-resolve": "^29.5.0", + "jest-resolve-dependencies": "^29.5.0", + "jest-runner": "^29.5.0", + "jest-runtime": "^29.5.0", + "jest-snapshot": "^29.5.0", + "jest-util": "^29.5.0", + "jest-validate": "^29.5.0", + "jest-watcher": "^29.5.0", + "micromatch": "^4.0.4", + "pretty-format": "^29.5.0", + "slash": "^3.0.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" + }, + "peerDependenciesMeta": { + "node-notifier": { + "optional": true + } + } + }, + "node_modules/@jest/environment": { + "version": "29.5.0", + "resolved": "https://registry.npmjs.org/@jest/environment/-/environment-29.5.0.tgz", + "integrity": "sha512-5FXw2+wD29YU1d4I2htpRX7jYnAyTRjP2CsXQdo9SAM8g3ifxWPSV0HnClSn71xwctr0U3oZIIH+dtbfmnbXVQ==", + "dev": true, + "dependencies": { + "@jest/fake-timers": "^29.5.0", + "@jest/types": "^29.5.0", + "@types/node": "*", + "jest-mock": "^29.5.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/expect": { + "version": "29.5.0", + "resolved": "https://registry.npmjs.org/@jest/expect/-/expect-29.5.0.tgz", + "integrity": "sha512-PueDR2HGihN3ciUNGr4uelropW7rqUfTiOn+8u0leg/42UhblPxHkfoh0Ruu3I9Y1962P3u2DY4+h7GVTSVU6g==", + "dev": true, + "dependencies": { + "expect": "^29.5.0", + "jest-snapshot": "^29.5.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/expect-utils": { + "version": "29.5.0", + "resolved": "https://registry.npmjs.org/@jest/expect-utils/-/expect-utils-29.5.0.tgz", + "integrity": "sha512-fmKzsidoXQT2KwnrwE0SQq3uj8Z763vzR8LnLBwC2qYWEFpjX8daRsk6rHUM1QvNlEW/UJXNXm59ztmJJWs2Mg==", + "dev": true, + "dependencies": { + "jest-get-type": "^29.4.3" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/fake-timers": { + "version": "29.5.0", + "resolved": "https://registry.npmjs.org/@jest/fake-timers/-/fake-timers-29.5.0.tgz", + "integrity": "sha512-9ARvuAAQcBwDAqOnglWq2zwNIRUDtk/SCkp/ToGEhFv5r86K21l+VEs0qNTaXtyiY0lEePl3kylijSYJQqdbDg==", + "dev": true, + "dependencies": { + "@jest/types": "^29.5.0", + "@sinonjs/fake-timers": "^10.0.2", + "@types/node": "*", + "jest-message-util": "^29.5.0", + "jest-mock": "^29.5.0", + "jest-util": "^29.5.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/globals": { + "version": "29.5.0", + "resolved": "https://registry.npmjs.org/@jest/globals/-/globals-29.5.0.tgz", + "integrity": "sha512-S02y0qMWGihdzNbUiqSAiKSpSozSuHX5UYc7QbnHP+D9Lyw8DgGGCinrN9uSuHPeKgSSzvPom2q1nAtBvUsvPQ==", + "dev": true, + "dependencies": { + "@jest/environment": "^29.5.0", + "@jest/expect": "^29.5.0", + "@jest/types": "^29.5.0", + "jest-mock": "^29.5.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/reporters": { + "version": "29.5.0", + "resolved": "https://registry.npmjs.org/@jest/reporters/-/reporters-29.5.0.tgz", + "integrity": "sha512-D05STXqj/M8bP9hQNSICtPqz97u7ffGzZu+9XLucXhkOFBqKcXe04JLZOgIekOxdb73MAoBUFnqvf7MCpKk5OA==", + "dev": true, + "dependencies": { + "@bcoe/v8-coverage": "^0.2.3", + "@jest/console": "^29.5.0", + "@jest/test-result": "^29.5.0", + "@jest/transform": "^29.5.0", + "@jest/types": "^29.5.0", + "@jridgewell/trace-mapping": "^0.3.15", + "@types/node": "*", + "chalk": "^4.0.0", + "collect-v8-coverage": "^1.0.0", + "exit": "^0.1.2", + "glob": "^7.1.3", + "graceful-fs": "^4.2.9", + "istanbul-lib-coverage": "^3.0.0", + "istanbul-lib-instrument": "^5.1.0", + "istanbul-lib-report": "^3.0.0", + "istanbul-lib-source-maps": "^4.0.0", + "istanbul-reports": "^3.1.3", + "jest-message-util": "^29.5.0", + "jest-util": "^29.5.0", + "jest-worker": "^29.5.0", + "slash": "^3.0.0", + "string-length": "^4.0.1", + "strip-ansi": "^6.0.0", + "v8-to-istanbul": "^9.0.1" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" + }, + "peerDependenciesMeta": { + "node-notifier": { + "optional": true + } + } + }, + "node_modules/@jest/schemas": { + "version": "29.4.3", + "resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-29.4.3.tgz", + "integrity": "sha512-VLYKXQmtmuEz6IxJsrZwzG9NvtkQsWNnWMsKxqWNu3+CnfzJQhp0WDDKWLVV9hLKr0l3SLLFRqcYHjhtyuDVxg==", + "dev": true, + "dependencies": { + "@sinclair/typebox": "^0.25.16" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/source-map": { + "version": "29.4.3", + "resolved": "https://registry.npmjs.org/@jest/source-map/-/source-map-29.4.3.tgz", + "integrity": "sha512-qyt/mb6rLyd9j1jUts4EQncvS6Yy3PM9HghnNv86QBlV+zdL2inCdK1tuVlL+J+lpiw2BI67qXOrX3UurBqQ1w==", + "dev": true, + "dependencies": { + "@jridgewell/trace-mapping": "^0.3.15", + "callsites": "^3.0.0", + "graceful-fs": "^4.2.9" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/test-result": { + "version": "29.5.0", + "resolved": "https://registry.npmjs.org/@jest/test-result/-/test-result-29.5.0.tgz", + "integrity": "sha512-fGl4rfitnbfLsrfx1uUpDEESS7zM8JdgZgOCQuxQvL1Sn/I6ijeAVQWGfXI9zb1i9Mzo495cIpVZhA0yr60PkQ==", + "dev": true, + "dependencies": { + "@jest/console": "^29.5.0", + "@jest/types": "^29.5.0", + "@types/istanbul-lib-coverage": "^2.0.0", + "collect-v8-coverage": "^1.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/test-sequencer": { + "version": "29.5.0", + "resolved": "https://registry.npmjs.org/@jest/test-sequencer/-/test-sequencer-29.5.0.tgz", + "integrity": "sha512-yPafQEcKjkSfDXyvtgiV4pevSeyuA6MQr6ZIdVkWJly9vkqjnFfcfhRQqpD5whjoU8EORki752xQmjaqoFjzMQ==", + "dev": true, + "dependencies": { + "@jest/test-result": "^29.5.0", + "graceful-fs": "^4.2.9", + "jest-haste-map": "^29.5.0", + "slash": "^3.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/transform": { + "version": "29.5.0", + "resolved": "https://registry.npmjs.org/@jest/transform/-/transform-29.5.0.tgz", + "integrity": "sha512-8vbeZWqLJOvHaDfeMuoHITGKSz5qWc9u04lnWrQE3VyuSw604PzQM824ZeX9XSjUCeDiE3GuxZe5UKa8J61NQw==", + "dev": true, + "dependencies": { + "@babel/core": "^7.11.6", + "@jest/types": "^29.5.0", + "@jridgewell/trace-mapping": "^0.3.15", + "babel-plugin-istanbul": "^6.1.1", + "chalk": "^4.0.0", + "convert-source-map": "^2.0.0", + "fast-json-stable-stringify": "^2.1.0", + "graceful-fs": "^4.2.9", + "jest-haste-map": "^29.5.0", + "jest-regex-util": "^29.4.3", + "jest-util": "^29.5.0", + "micromatch": "^4.0.4", + "pirates": "^4.0.4", + "slash": "^3.0.0", + "write-file-atomic": "^4.0.2" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/types": { + "version": "29.5.0", + "resolved": "https://registry.npmjs.org/@jest/types/-/types-29.5.0.tgz", + "integrity": "sha512-qbu7kN6czmVRc3xWFQcAN03RAUamgppVUdXrvl1Wr3jlNF93o9mJbGcDWrwGB6ht44u7efB1qCFgVQmca24Uog==", + "dev": true, + "dependencies": { + "@jest/schemas": "^29.4.3", + "@types/istanbul-lib-coverage": "^2.0.0", + "@types/istanbul-reports": "^3.0.0", + "@types/node": "*", + "@types/yargs": "^17.0.8", + "chalk": "^4.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jridgewell/gen-mapping": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.1.1.tgz", + "integrity": "sha512-sQXCasFk+U8lWYEe66WxRDOE9PjVz4vSM51fTu3Hw+ClTpUSQb718772vH3pyS5pShp6lvQM7SxgIDXXXmOX7w==", + "dev": true, + "dependencies": { + "@jridgewell/set-array": "^1.0.0", + "@jridgewell/sourcemap-codec": "^1.4.10" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz", + "integrity": "sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w==", + "dev": true, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/set-array": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.1.2.tgz", + "integrity": "sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw==", + "dev": true, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.4.14", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz", + "integrity": "sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw==", + "dev": true + }, + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.17", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.17.tgz", + "integrity": "sha512-MCNzAp77qzKca9+W/+I0+sEpaUnZoeasnghNeVc41VZCEKaCH73Vq3BZZ/SzWIgrqE4H4ceI+p+b6C0mHf9T4g==", + "dev": true, + "dependencies": { + "@jridgewell/resolve-uri": "3.1.0", + "@jridgewell/sourcemap-codec": "1.4.14" + } + }, + "node_modules/@sinclair/typebox": { + "version": "0.25.24", + "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.25.24.tgz", + "integrity": "sha512-XJfwUVUKDHF5ugKwIcxEgc9k8b7HbznCp6eUfWgu710hMPNIO4aw4/zB5RogDQz8nd6gyCDpU9O/m6qYEWY6yQ==", + "dev": true + }, + "node_modules/@sinonjs/commons": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@sinonjs/commons/-/commons-2.0.0.tgz", + "integrity": "sha512-uLa0j859mMrg2slwQYdO/AkrOfmH+X6LTVmNTS9CqexuE2IvVORIkSpJLqePAbEnKJ77aMmCwr1NUZ57120Xcg==", + "dev": true, + "dependencies": { + "type-detect": "4.0.8" + } + }, + "node_modules/@sinonjs/fake-timers": { + "version": "10.0.2", + "resolved": "https://registry.npmjs.org/@sinonjs/fake-timers/-/fake-timers-10.0.2.tgz", + "integrity": "sha512-SwUDyjWnah1AaNl7kxsa7cfLhlTYoiyhDAIgyh+El30YvXs/o7OLXpYH88Zdhyx9JExKrmHDJ+10bwIcY80Jmw==", + "dev": true, + "dependencies": { + "@sinonjs/commons": "^2.0.0" + } + }, + "node_modules/@tsconfig/node10": { + "version": "1.0.9", + "resolved": "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.9.tgz", + "integrity": "sha512-jNsYVVxU8v5g43Erja32laIDHXeoNvFEpX33OK4d6hljo3jDhCBDhx5dhCCTMWUojscpAagGiRkBKxpdl9fxqA==", + "dev": true + }, + "node_modules/@tsconfig/node12": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/@tsconfig/node12/-/node12-1.0.11.tgz", + "integrity": "sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==", + "dev": true + }, + "node_modules/@tsconfig/node14": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@tsconfig/node14/-/node14-1.0.3.tgz", + "integrity": "sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==", + "dev": true + }, + "node_modules/@tsconfig/node16": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@tsconfig/node16/-/node16-1.0.3.tgz", + "integrity": "sha512-yOlFc+7UtL/89t2ZhjPvvB/DeAr3r+Dq58IgzsFkOAvVC6NMJXmCGjbptdXdR9qsX7pKcTL+s87FtYREi2dEEQ==", + "dev": true + }, + "node_modules/@types/babel__core": { + "version": "7.20.0", + "resolved": "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.20.0.tgz", + "integrity": "sha512-+n8dL/9GWblDO0iU6eZAwEIJVr5DWigtle+Q6HLOrh/pdbXOhOtqzq8VPPE2zvNJzSKY4vH/z3iT3tn0A3ypiQ==", + "dev": true, + "dependencies": { + "@babel/parser": "^7.20.7", + "@babel/types": "^7.20.7", + "@types/babel__generator": "*", + "@types/babel__template": "*", + "@types/babel__traverse": "*" + } + }, + "node_modules/@types/babel__generator": { + "version": "7.6.4", + "resolved": "https://registry.npmjs.org/@types/babel__generator/-/babel__generator-7.6.4.tgz", + "integrity": "sha512-tFkciB9j2K755yrTALxD44McOrk+gfpIpvC3sxHjRawj6PfnQxrse4Clq5y/Rq+G3mrBurMax/lG8Qn2t9mSsg==", + "dev": true, + "dependencies": { + "@babel/types": "^7.0.0" + } + }, + "node_modules/@types/babel__template": { + "version": "7.4.1", + "resolved": "https://registry.npmjs.org/@types/babel__template/-/babel__template-7.4.1.tgz", + "integrity": "sha512-azBFKemX6kMg5Io+/rdGT0dkGreboUVR0Cdm3fz9QJWpaQGJRQXl7C+6hOTCZcMll7KFyEQpgbYI2lHdsS4U7g==", + "dev": true, + "dependencies": { + "@babel/parser": "^7.1.0", + "@babel/types": "^7.0.0" + } + }, + "node_modules/@types/babel__traverse": { + "version": "7.18.3", + "resolved": "https://registry.npmjs.org/@types/babel__traverse/-/babel__traverse-7.18.3.tgz", + "integrity": "sha512-1kbcJ40lLB7MHsj39U4Sh1uTd2E7rLEa79kmDpI6cy+XiXsteB3POdQomoq4FxszMrO3ZYchkhYJw7A2862b3w==", + "dev": true, + "dependencies": { + "@babel/types": "^7.3.0" + } + }, + "node_modules/@types/graceful-fs": { + "version": "4.1.6", + "resolved": "https://registry.npmjs.org/@types/graceful-fs/-/graceful-fs-4.1.6.tgz", + "integrity": "sha512-Sig0SNORX9fdW+bQuTEovKj3uHcUL6LQKbCrrqb1X7J6/ReAbhCXRAhc+SMejhLELFj2QcyuxmUooZ4bt5ReSw==", + "dev": true, + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/istanbul-lib-coverage": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.4.tgz", + "integrity": "sha512-z/QT1XN4K4KYuslS23k62yDIDLwLFkzxOuMplDtObz0+y7VqJCaO2o+SPwHCvLFZh7xazvvoor2tA/hPz9ee7g==", + "dev": true + }, + "node_modules/@types/istanbul-lib-report": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@types/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz", + "integrity": "sha512-plGgXAPfVKFoYfa9NpYDAkseG+g6Jr294RqeqcqDixSbU34MZVJRi/P+7Y8GDpzkEwLaGZZOpKIEmeVZNtKsrg==", + "dev": true, + "dependencies": { + "@types/istanbul-lib-coverage": "*" + } + }, + "node_modules/@types/istanbul-reports": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@types/istanbul-reports/-/istanbul-reports-3.0.1.tgz", + "integrity": "sha512-c3mAZEuK0lvBp8tmuL74XRKn1+y2dcwOUpH7x4WrF6gk1GIgiluDRgMYQtw2OFcBvAJWlt6ASU3tSqxp0Uu0Aw==", + "dev": true, + "dependencies": { + "@types/istanbul-lib-report": "*" + } + }, + "node_modules/@types/jest": { + "version": "29.5.0", + "resolved": "https://registry.npmjs.org/@types/jest/-/jest-29.5.0.tgz", + "integrity": "sha512-3Emr5VOl/aoBwnWcH/EFQvlSAmjV+XtV9GGu5mwdYew5vhQh0IUZx/60x0TzHDu09Bi7HMx10t/namdJw5QIcg==", + "dev": true, + "dependencies": { + "expect": "^29.0.0", + "pretty-format": "^29.0.0" + } + }, + "node_modules/@types/node": { + "version": "18.15.10", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.15.10.tgz", + "integrity": "sha512-9avDaQJczATcXgfmMAW3MIWArOO7A+m90vuCFLr8AotWf8igO/mRoYukrk2cqZVtv38tHs33retzHEilM7FpeQ==", + "dev": true + }, + "node_modules/@types/prettier": { + "version": "2.7.2", + "resolved": "https://registry.npmjs.org/@types/prettier/-/prettier-2.7.2.tgz", + "integrity": "sha512-KufADq8uQqo1pYKVIYzfKbJfBAc0sOeXqGbFaSpv8MRmC/zXgowNZmFcbngndGk922QDmOASEXUZCaY48gs4cg==", + "dev": true + }, + "node_modules/@types/stack-utils": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@types/stack-utils/-/stack-utils-2.0.1.tgz", + "integrity": "sha512-Hl219/BT5fLAaz6NDkSuhzasy49dwQS/DSdu4MdggFB8zcXv7vflBI3xp7FEmkmdDkBUI2bPUNeMttp2knYdxw==", + "dev": true + }, + "node_modules/@types/yargs": { + "version": "17.0.24", + "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-17.0.24.tgz", + "integrity": "sha512-6i0aC7jV6QzQB8ne1joVZ0eSFIstHsCrobmOtghM11yGlH0j43FKL2UhWdELkyps0zuf7qVTUVCCR+tgSlyLLw==", + "dev": true, + "dependencies": { + "@types/yargs-parser": "*" + } + }, + "node_modules/@types/yargs-parser": { + "version": "21.0.0", + "resolved": "https://registry.npmjs.org/@types/yargs-parser/-/yargs-parser-21.0.0.tgz", + "integrity": "sha512-iO9ZQHkZxHn4mSakYV0vFHAVDyEOIJQrV2uZ06HxEPcx+mt8swXoZHIbaaJ2crJYFfErySgktuTZ3BeLz+XmFA==", + "dev": true + }, + "node_modules/acorn": { + "version": "8.8.2", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.8.2.tgz", + "integrity": "sha512-xjIYgE8HBrkpd/sJqOGNspf8uHG+NOHGOw6a/Urj8taM2EXfdNAH2oFcPeIFfsv3+kz/mJrS5VuMqbNLjCa2vw==", + "dev": true, + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/acorn-walk": { + "version": "8.2.0", + "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.2.0.tgz", + "integrity": "sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA==", + "dev": true, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/ansi-escapes": { + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz", + "integrity": "sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==", + "dev": true, + "dependencies": { + "type-fest": "^0.21.3" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/anymatch": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", + "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==", + "dev": true, + "dependencies": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/arg": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz", + "integrity": "sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==", + "dev": true + }, + "node_modules/argparse": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "dev": true, + "dependencies": { + "sprintf-js": "~1.0.2" + } + }, + "node_modules/babel-jest": { + "version": "29.5.0", + "resolved": "https://registry.npmjs.org/babel-jest/-/babel-jest-29.5.0.tgz", + "integrity": "sha512-mA4eCDh5mSo2EcA9xQjVTpmbbNk32Zb3Q3QFQsNhaK56Q+yoXowzFodLux30HRgyOho5rsQ6B0P9QpMkvvnJ0Q==", + "dev": true, + "dependencies": { + "@jest/transform": "^29.5.0", + "@types/babel__core": "^7.1.14", + "babel-plugin-istanbul": "^6.1.1", + "babel-preset-jest": "^29.5.0", + "chalk": "^4.0.0", + "graceful-fs": "^4.2.9", + "slash": "^3.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "@babel/core": "^7.8.0" + } + }, + "node_modules/babel-plugin-istanbul": { + "version": "6.1.1", + "resolved": "https://registry.npmjs.org/babel-plugin-istanbul/-/babel-plugin-istanbul-6.1.1.tgz", + "integrity": "sha512-Y1IQok9821cC9onCx5otgFfRm7Lm+I+wwxOx738M/WLPZ9Q42m4IG5W0FNX8WLL2gYMZo3JkuXIH2DOpWM+qwA==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.0.0", + "@istanbuljs/load-nyc-config": "^1.0.0", + "@istanbuljs/schema": "^0.1.2", + "istanbul-lib-instrument": "^5.0.4", + "test-exclude": "^6.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/babel-plugin-jest-hoist": { + "version": "29.5.0", + "resolved": "https://registry.npmjs.org/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-29.5.0.tgz", + "integrity": "sha512-zSuuuAlTMT4mzLj2nPnUm6fsE6270vdOfnpbJ+RmruU75UhLFvL0N2NgI7xpeS7NaB6hGqmd5pVpGTDYvi4Q3w==", + "dev": true, + "dependencies": { + "@babel/template": "^7.3.3", + "@babel/types": "^7.3.3", + "@types/babel__core": "^7.1.14", + "@types/babel__traverse": "^7.0.6" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/babel-preset-current-node-syntax": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/babel-preset-current-node-syntax/-/babel-preset-current-node-syntax-1.0.1.tgz", + "integrity": "sha512-M7LQ0bxarkxQoN+vz5aJPsLBn77n8QgTFmo8WK0/44auK2xlCXrYcUxHFxgU7qW5Yzw/CjmLRK2uJzaCd7LvqQ==", + "dev": true, + "dependencies": { + "@babel/plugin-syntax-async-generators": "^7.8.4", + "@babel/plugin-syntax-bigint": "^7.8.3", + "@babel/plugin-syntax-class-properties": "^7.8.3", + "@babel/plugin-syntax-import-meta": "^7.8.3", + "@babel/plugin-syntax-json-strings": "^7.8.3", + "@babel/plugin-syntax-logical-assignment-operators": "^7.8.3", + "@babel/plugin-syntax-nullish-coalescing-operator": "^7.8.3", + "@babel/plugin-syntax-numeric-separator": "^7.8.3", + "@babel/plugin-syntax-object-rest-spread": "^7.8.3", + "@babel/plugin-syntax-optional-catch-binding": "^7.8.3", + "@babel/plugin-syntax-optional-chaining": "^7.8.3", + "@babel/plugin-syntax-top-level-await": "^7.8.3" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/babel-preset-jest": { + "version": "29.5.0", + "resolved": "https://registry.npmjs.org/babel-preset-jest/-/babel-preset-jest-29.5.0.tgz", + "integrity": "sha512-JOMloxOqdiBSxMAzjRaH023/vvcaSaec49zvg+2LmNsktC7ei39LTJGw02J+9uUtTZUq6xbLyJ4dxe9sSmIuAg==", + "dev": true, + "dependencies": { + "babel-plugin-jest-hoist": "^29.5.0", + "babel-preset-current-node-syntax": "^1.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true + }, + "node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/braces": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", + "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "dev": true, + "dependencies": { + "fill-range": "^7.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/browserslist": { + "version": "4.21.5", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.21.5.tgz", + "integrity": "sha512-tUkiguQGW7S3IhB7N+c2MV/HZPSCPAAiYBZXLsBhFB/PCy6ZKKsZrmBayHV9fdGV/ARIfJ14NkxKzRDjvp7L6w==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + } + ], + "dependencies": { + "caniuse-lite": "^1.0.30001449", + "electron-to-chromium": "^1.4.284", + "node-releases": "^2.0.8", + "update-browserslist-db": "^1.0.10" + }, + "bin": { + "browserslist": "cli.js" + }, + "engines": { + "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" + } + }, + "node_modules/bs-logger": { + "version": "0.2.6", + "resolved": "https://registry.npmjs.org/bs-logger/-/bs-logger-0.2.6.tgz", + "integrity": "sha512-pd8DCoxmbgc7hyPKOvxtqNcjYoOsABPQdcCUjGp3d42VR2CX1ORhk2A87oqqu5R1kk+76nsxZupkmyd+MVtCog==", + "dev": true, + "dependencies": { + "fast-json-stable-stringify": "2.x" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/bser": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/bser/-/bser-2.1.1.tgz", + "integrity": "sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ==", + "dev": true, + "dependencies": { + "node-int64": "^0.4.0" + } + }, + "node_modules/buffer-from": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz", + "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==", + "dev": true + }, + "node_modules/callsites": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", + "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/camelcase": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", + "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/caniuse-lite": { + "version": "1.0.30001470", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001470.tgz", + "integrity": "sha512-065uNwY6QtHCBOExzbV6m236DDhYCCtPmQUCoQtwkVqzud8v5QPidoMr6CoMkC2nfp6nksjttqWQRRh75LqUmA==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/caniuse-lite" + } + ] + }, + "node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/char-regex": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/char-regex/-/char-regex-1.0.2.tgz", + "integrity": "sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/ci-info": { + "version": "3.8.0", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-3.8.0.tgz", + "integrity": "sha512-eXTggHWSooYhq49F2opQhuHWgzucfF2YgODK4e1566GQs5BIfP30B0oenwBJHfWxAs2fyPB1s7Mg949zLf61Yw==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/sibiraj-s" + } + ], + "engines": { + "node": ">=8" + } + }, + "node_modules/cjs-module-lexer": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/cjs-module-lexer/-/cjs-module-lexer-1.2.2.tgz", + "integrity": "sha512-cOU9usZw8/dXIXKtwa8pM0OTJQuJkxMN6w30csNRUerHfeQ5R6U3kkU/FtJeIf3M202OHfY2U8ccInBG7/xogA==", + "dev": true + }, + "node_modules/cliui": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", + "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", + "dev": true, + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.1", + "wrap-ansi": "^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/cliui/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "node_modules/cliui/node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/cliui/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/co": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/co/-/co-4.6.0.tgz", + "integrity": "sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ==", + "dev": true, + "engines": { + "iojs": ">= 1.0.0", + "node": ">= 0.12.0" + } + }, + "node_modules/collect-v8-coverage": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/collect-v8-coverage/-/collect-v8-coverage-1.0.1.tgz", + "integrity": "sha512-iBPtljfCNcTKNAto0KEtDfZ3qzjJvqE3aTGZsbhjSBlorqpXJlaWWtPO35D+ZImoC3KWejX64o+yPGxhWSTzfg==", + "dev": true + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "dev": true + }, + "node_modules/convert-source-map": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", + "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==", + "dev": true + }, + "node_modules/create-require": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/create-require/-/create-require-1.1.1.tgz", + "integrity": "sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==", + "dev": true + }, + "node_modules/cross-spawn": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", + "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", + "dev": true, + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "dev": true, + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/dedent": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/dedent/-/dedent-0.7.0.tgz", + "integrity": "sha512-Q6fKUPqnAHAyhiUgFU7BUzLiv0kd8saH9al7tnu5Q/okj6dnupxyTgFIBjVzJATdfIAm9NAsvXNzjaKa+bxVyA==", + "dev": true + }, + "node_modules/deepmerge": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.3.1.tgz", + "integrity": "sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/detect-newline": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/detect-newline/-/detect-newline-3.1.0.tgz", + "integrity": "sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/diff": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", + "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==", + "dev": true, + "engines": { + "node": ">=0.3.1" + } + }, + "node_modules/diff-sequences": { + "version": "29.4.3", + "resolved": "https://registry.npmjs.org/diff-sequences/-/diff-sequences-29.4.3.tgz", + "integrity": "sha512-ofrBgwpPhCD85kMKtE9RYFFq6OC1A89oW2vvgWZNCwxrUpRUILopY7lsYyMDSjc8g6U6aiO0Qubg6r4Wgt5ZnA==", + "dev": true, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/electron-to-chromium": { + "version": "1.4.341", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.341.tgz", + "integrity": "sha512-R4A8VfUBQY9WmAhuqY5tjHRf5fH2AAf6vqitBOE0y6u2PgHgqHSrhZmu78dIX3fVZtjqlwJNX1i2zwC3VpHtQQ==", + "dev": true + }, + "node_modules/emittery": { + "version": "0.13.1", + "resolved": "https://registry.npmjs.org/emittery/-/emittery-0.13.1.tgz", + "integrity": "sha512-DeWwawk6r5yR9jFgnDKYt4sLS0LmHJJi3ZOnb5/JdbYwj3nW+FxQnHIjhBKz8YLC7oRNPVM9NQ47I3CVx34eqQ==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sindresorhus/emittery?sponsor=1" + } + }, + "node_modules/error-ex": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", + "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", + "dev": true, + "dependencies": { + "is-arrayish": "^0.2.1" + } + }, + "node_modules/escalade": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", + "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/escape-string-regexp": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz", + "integrity": "sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/esprima": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", + "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", + "dev": true, + "bin": { + "esparse": "bin/esparse.js", + "esvalidate": "bin/esvalidate.js" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/execa": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", + "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==", + "dev": true, + "dependencies": { + "cross-spawn": "^7.0.3", + "get-stream": "^6.0.0", + "human-signals": "^2.1.0", + "is-stream": "^2.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^4.0.1", + "onetime": "^5.1.2", + "signal-exit": "^3.0.3", + "strip-final-newline": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sindresorhus/execa?sponsor=1" + } + }, + "node_modules/exit": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/exit/-/exit-0.1.2.tgz", + "integrity": "sha512-Zk/eNKV2zbjpKzrsQ+n1G6poVbErQxJ0LBOJXaKZ1EViLzH+hrLu9cdXI4zw9dBQJslwBEpbQ2P1oS7nDxs6jQ==", + "dev": true, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/expect": { + "version": "29.5.0", + "resolved": "https://registry.npmjs.org/expect/-/expect-29.5.0.tgz", + "integrity": "sha512-yM7xqUrCO2JdpFo4XpM82t+PJBFybdqoQuJLDGeDX2ij8NZzqRHyu3Hp188/JX7SWqud+7t4MUdvcgGBICMHZg==", + "dev": true, + "dependencies": { + "@jest/expect-utils": "^29.5.0", + "jest-get-type": "^29.4.3", + "jest-matcher-utils": "^29.5.0", + "jest-message-util": "^29.5.0", + "jest-util": "^29.5.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", + "dev": true + }, + "node_modules/fb-watchman": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/fb-watchman/-/fb-watchman-2.0.2.tgz", + "integrity": "sha512-p5161BqbuCaSnB8jIbzQHOlpgsPmK5rJVDfDKO91Axs5NC1uu3HRQm6wt9cd9/+GtQQIO53JdGXXoyDpTAsgYA==", + "dev": true, + "dependencies": { + "bser": "2.1.1" + } + }, + "node_modules/fill-range": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", + "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "dev": true, + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/find-up": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", + "dev": true, + "dependencies": { + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", + "dev": true + }, + "node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "dev": true, + "hasInstallScript": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/function-bind": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", + "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==", + "dev": true + }, + "node_modules/gensync": { + "version": "1.0.0-beta.2", + "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", + "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", + "dev": true, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "dev": true, + "engines": { + "node": "6.* || 8.* || >= 10.*" + } + }, + "node_modules/get-package-type": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/get-package-type/-/get-package-type-0.1.0.tgz", + "integrity": "sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q==", + "dev": true, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/get-stream": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", + "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/get-tsconfig": { + "version": "4.7.2", + "resolved": "https://registry.npmjs.org/get-tsconfig/-/get-tsconfig-4.7.2.tgz", + "integrity": "sha512-wuMsz4leaj5hbGgg4IvDU0bqJagpftG5l5cXIAvo8uZrqn0NJqwtfupTN00VnkQJPcIRrxYrm1Ue24btpCha2A==", + "dev": true, + "dependencies": { + "resolve-pkg-maps": "^1.0.0" + }, + "funding": { + "url": "https://github.com/privatenumber/get-tsconfig?sponsor=1" + } + }, + "node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "dev": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/globals": { + "version": "11.12.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", + "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/graceful-fs": { + "version": "4.2.11", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", + "dev": true + }, + "node_modules/has": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", + "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", + "dev": true, + "dependencies": { + "function-bind": "^1.1.1" + }, + "engines": { + "node": ">= 0.4.0" + } + }, + "node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/html-escaper": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.2.tgz", + "integrity": "sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==", + "dev": true + }, + "node_modules/human-signals": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz", + "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==", + "dev": true, + "engines": { + "node": ">=10.17.0" + } + }, + "node_modules/import-local": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/import-local/-/import-local-3.1.0.tgz", + "integrity": "sha512-ASB07uLtnDs1o6EHjKpX34BKYDSqnFerfTOJL2HvMqF70LnxpjkzDB8J44oT9pu4AMPkQwf8jl6szgvNd2tRIg==", + "dev": true, + "dependencies": { + "pkg-dir": "^4.2.0", + "resolve-cwd": "^3.0.0" + }, + "bin": { + "import-local-fixture": "fixtures/cli.js" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", + "dev": true, + "engines": { + "node": ">=0.8.19" + } + }, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "dev": true, + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "dev": true + }, + "node_modules/is-arrayish": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", + "integrity": "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==", + "dev": true + }, + "node_modules/is-core-module": { + "version": "2.11.0", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.11.0.tgz", + "integrity": "sha512-RRjxlvLDkD1YJwDbroBHMb+cukurkDWNyHx7D3oNB5x9rb5ogcksMC5wHCadcXoo67gVr/+3GFySh3134zi6rw==", + "dev": true, + "dependencies": { + "has": "^1.0.3" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-generator-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-generator-fn/-/is-generator-fn-2.1.0.tgz", + "integrity": "sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true, + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/is-stream": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", + "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", + "dev": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "dev": true + }, + "node_modules/isomorphic-fetch": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/isomorphic-fetch/-/isomorphic-fetch-3.0.0.tgz", + "integrity": "sha512-qvUtwJ3j6qwsF3jLxkZ72qCgjMysPzDfeV240JHiGZsANBYd+EEuu35v7dfrJ9Up0Ak07D7GGSkGhCHTqg/5wA==", + "dependencies": { + "node-fetch": "^2.6.1", + "whatwg-fetch": "^3.4.1" + } + }, + "node_modules/isomorphic-fetch/node_modules/node-fetch": { + "version": "2.6.11", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.11.tgz", + "integrity": "sha512-4I6pdBY1EthSqDmJkiNk3JIT8cswwR9nfeW/cPdUagJYEQG7R95WRH74wpz7ma8Gh/9dI9FP+OU+0E4FvtA55w==", + "dependencies": { + "whatwg-url": "^5.0.0" + }, + "engines": { + "node": "4.x || >=6.0.0" + }, + "peerDependencies": { + "encoding": "^0.1.0" + }, + "peerDependenciesMeta": { + "encoding": { + "optional": true + } + } + }, + "node_modules/istanbul-lib-coverage": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.0.tgz", + "integrity": "sha512-eOeJ5BHCmHYvQK7xt9GkdHuzuCGS1Y6g9Gvnx3Ym33fz/HpLRYxiS0wHNr+m/MBC8B647Xt608vCDEvhl9c6Mw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/istanbul-lib-instrument": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-5.2.1.tgz", + "integrity": "sha512-pzqtp31nLv/XFOzXGuvhCb8qhjmTVo5vjVk19XE4CRlSWz0KoeJ3bw9XsA7nOp9YBf4qHjwBxkDzKcME/J29Yg==", + "dev": true, + "dependencies": { + "@babel/core": "^7.12.3", + "@babel/parser": "^7.14.7", + "@istanbuljs/schema": "^0.1.2", + "istanbul-lib-coverage": "^3.2.0", + "semver": "^6.3.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/istanbul-lib-report": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz", + "integrity": "sha512-wcdi+uAKzfiGT2abPpKZ0hSU1rGQjUQnLvtY5MpQ7QCTahD3VODhcu4wcfY1YtkGaDD5yuydOLINXsfbus9ROw==", + "dev": true, + "dependencies": { + "istanbul-lib-coverage": "^3.0.0", + "make-dir": "^3.0.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/istanbul-lib-source-maps": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.1.tgz", + "integrity": "sha512-n3s8EwkdFIJCG3BPKBYvskgXGoy88ARzvegkitk60NxRdwltLOTaH7CUiMRXvwYorl0Q712iEjcWB+fK/MrWVw==", + "dev": true, + "dependencies": { + "debug": "^4.1.1", + "istanbul-lib-coverage": "^3.0.0", + "source-map": "^0.6.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/istanbul-reports": { + "version": "3.1.5", + "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-3.1.5.tgz", + "integrity": "sha512-nUsEMa9pBt/NOHqbcbeJEgqIlY/K7rVWUX6Lql2orY5e9roQOthbR3vtY4zzf2orPELg80fnxxk9zUyPlgwD1w==", + "dev": true, + "dependencies": { + "html-escaper": "^2.0.0", + "istanbul-lib-report": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/jest": { + "version": "29.5.0", + "resolved": "https://registry.npmjs.org/jest/-/jest-29.5.0.tgz", + "integrity": "sha512-juMg3he2uru1QoXX078zTa7pO85QyB9xajZc6bU+d9yEGwrKX6+vGmJQ3UdVZsvTEUARIdObzH68QItim6OSSQ==", + "dev": true, + "dependencies": { + "@jest/core": "^29.5.0", + "@jest/types": "^29.5.0", + "import-local": "^3.0.2", + "jest-cli": "^29.5.0" + }, + "bin": { + "jest": "bin/jest.js" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" + }, + "peerDependenciesMeta": { + "node-notifier": { + "optional": true + } + } + }, + "node_modules/jest-changed-files": { + "version": "29.5.0", + "resolved": "https://registry.npmjs.org/jest-changed-files/-/jest-changed-files-29.5.0.tgz", + "integrity": "sha512-IFG34IUMUaNBIxjQXF/iu7g6EcdMrGRRxaUSw92I/2g2YC6vCdTltl4nHvt7Ci5nSJwXIkCu8Ka1DKF+X7Z1Ag==", + "dev": true, + "dependencies": { + "execa": "^5.0.0", + "p-limit": "^3.1.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-circus": { + "version": "29.5.0", + "resolved": "https://registry.npmjs.org/jest-circus/-/jest-circus-29.5.0.tgz", + "integrity": "sha512-gq/ongqeQKAplVxqJmbeUOJJKkW3dDNPY8PjhJ5G0lBRvu0e3EWGxGy5cI4LAGA7gV2UHCtWBI4EMXK8c9nQKA==", + "dev": true, + "dependencies": { + "@jest/environment": "^29.5.0", + "@jest/expect": "^29.5.0", + "@jest/test-result": "^29.5.0", + "@jest/types": "^29.5.0", + "@types/node": "*", + "chalk": "^4.0.0", + "co": "^4.6.0", + "dedent": "^0.7.0", + "is-generator-fn": "^2.0.0", + "jest-each": "^29.5.0", + "jest-matcher-utils": "^29.5.0", + "jest-message-util": "^29.5.0", + "jest-runtime": "^29.5.0", + "jest-snapshot": "^29.5.0", + "jest-util": "^29.5.0", + "p-limit": "^3.1.0", + "pretty-format": "^29.5.0", + "pure-rand": "^6.0.0", + "slash": "^3.0.0", + "stack-utils": "^2.0.3" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-cli": { + "version": "29.5.0", + "resolved": "https://registry.npmjs.org/jest-cli/-/jest-cli-29.5.0.tgz", + "integrity": "sha512-L1KcP1l4HtfwdxXNFCL5bmUbLQiKrakMUriBEcc1Vfz6gx31ORKdreuWvmQVBit+1ss9NNR3yxjwfwzZNdQXJw==", + "dev": true, + "dependencies": { + "@jest/core": "^29.5.0", + "@jest/test-result": "^29.5.0", + "@jest/types": "^29.5.0", + "chalk": "^4.0.0", + "exit": "^0.1.2", + "graceful-fs": "^4.2.9", + "import-local": "^3.0.2", + "jest-config": "^29.5.0", + "jest-util": "^29.5.0", + "jest-validate": "^29.5.0", + "prompts": "^2.0.1", + "yargs": "^17.3.1" + }, + "bin": { + "jest": "bin/jest.js" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" + }, + "peerDependenciesMeta": { + "node-notifier": { + "optional": true + } + } + }, + "node_modules/jest-config": { + "version": "29.5.0", + "resolved": "https://registry.npmjs.org/jest-config/-/jest-config-29.5.0.tgz", + "integrity": "sha512-kvDUKBnNJPNBmFFOhDbm59iu1Fii1Q6SxyhXfvylq3UTHbg6o7j/g8k2dZyXWLvfdKB1vAPxNZnMgtKJcmu3kA==", + "dev": true, + "dependencies": { + "@babel/core": "^7.11.6", + "@jest/test-sequencer": "^29.5.0", + "@jest/types": "^29.5.0", + "babel-jest": "^29.5.0", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "deepmerge": "^4.2.2", + "glob": "^7.1.3", + "graceful-fs": "^4.2.9", + "jest-circus": "^29.5.0", + "jest-environment-node": "^29.5.0", + "jest-get-type": "^29.4.3", + "jest-regex-util": "^29.4.3", + "jest-resolve": "^29.5.0", + "jest-runner": "^29.5.0", + "jest-util": "^29.5.0", + "jest-validate": "^29.5.0", + "micromatch": "^4.0.4", + "parse-json": "^5.2.0", + "pretty-format": "^29.5.0", + "slash": "^3.0.0", + "strip-json-comments": "^3.1.1" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "@types/node": "*", + "ts-node": ">=9.0.0" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + }, + "ts-node": { + "optional": true + } + } + }, + "node_modules/jest-diff": { + "version": "29.5.0", + "resolved": "https://registry.npmjs.org/jest-diff/-/jest-diff-29.5.0.tgz", + "integrity": "sha512-LtxijLLZBduXnHSniy0WMdaHjmQnt3g5sa16W4p0HqukYTTsyTW3GD1q41TyGl5YFXj/5B2U6dlh5FM1LIMgxw==", + "dev": true, + "dependencies": { + "chalk": "^4.0.0", + "diff-sequences": "^29.4.3", + "jest-get-type": "^29.4.3", + "pretty-format": "^29.5.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-docblock": { + "version": "29.4.3", + "resolved": "https://registry.npmjs.org/jest-docblock/-/jest-docblock-29.4.3.tgz", + "integrity": "sha512-fzdTftThczeSD9nZ3fzA/4KkHtnmllawWrXO69vtI+L9WjEIuXWs4AmyME7lN5hU7dB0sHhuPfcKofRsUb/2Fg==", + "dev": true, + "dependencies": { + "detect-newline": "^3.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-each": { + "version": "29.5.0", + "resolved": "https://registry.npmjs.org/jest-each/-/jest-each-29.5.0.tgz", + "integrity": "sha512-HM5kIJ1BTnVt+DQZ2ALp3rzXEl+g726csObrW/jpEGl+CDSSQpOJJX2KE/vEg8cxcMXdyEPu6U4QX5eruQv5hA==", + "dev": true, + "dependencies": { + "@jest/types": "^29.5.0", + "chalk": "^4.0.0", + "jest-get-type": "^29.4.3", + "jest-util": "^29.5.0", + "pretty-format": "^29.5.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-environment-node": { + "version": "29.5.0", + "resolved": "https://registry.npmjs.org/jest-environment-node/-/jest-environment-node-29.5.0.tgz", + "integrity": "sha512-ExxuIK/+yQ+6PRGaHkKewYtg6hto2uGCgvKdb2nfJfKXgZ17DfXjvbZ+jA1Qt9A8EQSfPnt5FKIfnOO3u1h9qw==", + "dev": true, + "dependencies": { + "@jest/environment": "^29.5.0", + "@jest/fake-timers": "^29.5.0", + "@jest/types": "^29.5.0", + "@types/node": "*", + "jest-mock": "^29.5.0", + "jest-util": "^29.5.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-get-type": { + "version": "29.4.3", + "resolved": "https://registry.npmjs.org/jest-get-type/-/jest-get-type-29.4.3.tgz", + "integrity": "sha512-J5Xez4nRRMjk8emnTpWrlkyb9pfRQQanDrvWHhsR1+VUfbwxi30eVcZFlcdGInRibU4G5LwHXpI7IRHU0CY+gg==", + "dev": true, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-haste-map": { + "version": "29.5.0", + "resolved": "https://registry.npmjs.org/jest-haste-map/-/jest-haste-map-29.5.0.tgz", + "integrity": "sha512-IspOPnnBro8YfVYSw6yDRKh/TiCdRngjxeacCps1cQ9cgVN6+10JUcuJ1EabrgYLOATsIAigxA0rLR9x/YlrSA==", + "dev": true, + "dependencies": { + "@jest/types": "^29.5.0", + "@types/graceful-fs": "^4.1.3", + "@types/node": "*", + "anymatch": "^3.0.3", + "fb-watchman": "^2.0.0", + "graceful-fs": "^4.2.9", + "jest-regex-util": "^29.4.3", + "jest-util": "^29.5.0", + "jest-worker": "^29.5.0", + "micromatch": "^4.0.4", + "walker": "^1.0.8" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "optionalDependencies": { + "fsevents": "^2.3.2" + } + }, + "node_modules/jest-leak-detector": { + "version": "29.5.0", + "resolved": "https://registry.npmjs.org/jest-leak-detector/-/jest-leak-detector-29.5.0.tgz", + "integrity": "sha512-u9YdeeVnghBUtpN5mVxjID7KbkKE1QU4f6uUwuxiY0vYRi9BUCLKlPEZfDGR67ofdFmDz9oPAy2G92Ujrntmow==", + "dev": true, + "dependencies": { + "jest-get-type": "^29.4.3", + "pretty-format": "^29.5.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-matcher-utils": { + "version": "29.5.0", + "resolved": "https://registry.npmjs.org/jest-matcher-utils/-/jest-matcher-utils-29.5.0.tgz", + "integrity": "sha512-lecRtgm/rjIK0CQ7LPQwzCs2VwW6WAahA55YBuI+xqmhm7LAaxokSB8C97yJeYyT+HvQkH741StzpU41wohhWw==", + "dev": true, + "dependencies": { + "chalk": "^4.0.0", + "jest-diff": "^29.5.0", + "jest-get-type": "^29.4.3", + "pretty-format": "^29.5.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-message-util": { + "version": "29.5.0", + "resolved": "https://registry.npmjs.org/jest-message-util/-/jest-message-util-29.5.0.tgz", + "integrity": "sha512-Kijeg9Dag6CKtIDA7O21zNTACqD5MD/8HfIV8pdD94vFyFuer52SigdC3IQMhab3vACxXMiFk+yMHNdbqtyTGA==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.12.13", + "@jest/types": "^29.5.0", + "@types/stack-utils": "^2.0.0", + "chalk": "^4.0.0", + "graceful-fs": "^4.2.9", + "micromatch": "^4.0.4", + "pretty-format": "^29.5.0", + "slash": "^3.0.0", + "stack-utils": "^2.0.3" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-mock": { + "version": "29.5.0", + "resolved": "https://registry.npmjs.org/jest-mock/-/jest-mock-29.5.0.tgz", + "integrity": "sha512-GqOzvdWDE4fAV2bWQLQCkujxYWL7RxjCnj71b5VhDAGOevB3qj3Ovg26A5NI84ZpODxyzaozXLOh2NCgkbvyaw==", + "dev": true, + "dependencies": { + "@jest/types": "^29.5.0", + "@types/node": "*", + "jest-util": "^29.5.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-pnp-resolver": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/jest-pnp-resolver/-/jest-pnp-resolver-1.2.3.tgz", + "integrity": "sha512-+3NpwQEnRoIBtx4fyhblQDPgJI0H1IEIkX7ShLUjPGA7TtUTvI1oiKi3SR4oBR0hQhQR80l4WAe5RrXBwWMA8w==", + "dev": true, + "engines": { + "node": ">=6" + }, + "peerDependencies": { + "jest-resolve": "*" + }, + "peerDependenciesMeta": { + "jest-resolve": { + "optional": true + } + } + }, + "node_modules/jest-regex-util": { + "version": "29.4.3", + "resolved": "https://registry.npmjs.org/jest-regex-util/-/jest-regex-util-29.4.3.tgz", + "integrity": "sha512-O4FglZaMmWXbGHSQInfXewIsd1LMn9p3ZXB/6r4FOkyhX2/iP/soMG98jGvk/A3HAN78+5VWcBGO0BJAPRh4kg==", + "dev": true, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-resolve": { + "version": "29.5.0", + "resolved": "https://registry.npmjs.org/jest-resolve/-/jest-resolve-29.5.0.tgz", + "integrity": "sha512-1TzxJ37FQq7J10jPtQjcc+MkCkE3GBpBecsSUWJ0qZNJpmg6m0D9/7II03yJulm3H/fvVjgqLh/k2eYg+ui52w==", + "dev": true, + "dependencies": { + "chalk": "^4.0.0", + "graceful-fs": "^4.2.9", + "jest-haste-map": "^29.5.0", + "jest-pnp-resolver": "^1.2.2", + "jest-util": "^29.5.0", + "jest-validate": "^29.5.0", + "resolve": "^1.20.0", + "resolve.exports": "^2.0.0", + "slash": "^3.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-resolve-dependencies": { + "version": "29.5.0", + "resolved": "https://registry.npmjs.org/jest-resolve-dependencies/-/jest-resolve-dependencies-29.5.0.tgz", + "integrity": "sha512-sjV3GFr0hDJMBpYeUuGduP+YeCRbd7S/ck6IvL3kQ9cpySYKqcqhdLLC2rFwrcL7tz5vYibomBrsFYWkIGGjOg==", + "dev": true, + "dependencies": { + "jest-regex-util": "^29.4.3", + "jest-snapshot": "^29.5.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-runner": { + "version": "29.5.0", + "resolved": "https://registry.npmjs.org/jest-runner/-/jest-runner-29.5.0.tgz", + "integrity": "sha512-m7b6ypERhFghJsslMLhydaXBiLf7+jXy8FwGRHO3BGV1mcQpPbwiqiKUR2zU2NJuNeMenJmlFZCsIqzJCTeGLQ==", + "dev": true, + "dependencies": { + "@jest/console": "^29.5.0", + "@jest/environment": "^29.5.0", + "@jest/test-result": "^29.5.0", + "@jest/transform": "^29.5.0", + "@jest/types": "^29.5.0", + "@types/node": "*", + "chalk": "^4.0.0", + "emittery": "^0.13.1", + "graceful-fs": "^4.2.9", + "jest-docblock": "^29.4.3", + "jest-environment-node": "^29.5.0", + "jest-haste-map": "^29.5.0", + "jest-leak-detector": "^29.5.0", + "jest-message-util": "^29.5.0", + "jest-resolve": "^29.5.0", + "jest-runtime": "^29.5.0", + "jest-util": "^29.5.0", + "jest-watcher": "^29.5.0", + "jest-worker": "^29.5.0", + "p-limit": "^3.1.0", + "source-map-support": "0.5.13" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-runtime": { + "version": "29.5.0", + "resolved": "https://registry.npmjs.org/jest-runtime/-/jest-runtime-29.5.0.tgz", + "integrity": "sha512-1Hr6Hh7bAgXQP+pln3homOiEZtCDZFqwmle7Ew2j8OlbkIu6uE3Y/etJQG8MLQs3Zy90xrp2C0BRrtPHG4zryw==", + "dev": true, + "dependencies": { + "@jest/environment": "^29.5.0", + "@jest/fake-timers": "^29.5.0", + "@jest/globals": "^29.5.0", + "@jest/source-map": "^29.4.3", + "@jest/test-result": "^29.5.0", + "@jest/transform": "^29.5.0", + "@jest/types": "^29.5.0", + "@types/node": "*", + "chalk": "^4.0.0", + "cjs-module-lexer": "^1.0.0", + "collect-v8-coverage": "^1.0.0", + "glob": "^7.1.3", + "graceful-fs": "^4.2.9", + "jest-haste-map": "^29.5.0", + "jest-message-util": "^29.5.0", + "jest-mock": "^29.5.0", + "jest-regex-util": "^29.4.3", + "jest-resolve": "^29.5.0", + "jest-snapshot": "^29.5.0", + "jest-util": "^29.5.0", + "slash": "^3.0.0", + "strip-bom": "^4.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-snapshot": { + "version": "29.5.0", + "resolved": "https://registry.npmjs.org/jest-snapshot/-/jest-snapshot-29.5.0.tgz", + "integrity": "sha512-x7Wolra5V0tt3wRs3/ts3S6ciSQVypgGQlJpz2rsdQYoUKxMxPNaoHMGJN6qAuPJqS+2iQ1ZUn5kl7HCyls84g==", + "dev": true, + "dependencies": { + "@babel/core": "^7.11.6", + "@babel/generator": "^7.7.2", + "@babel/plugin-syntax-jsx": "^7.7.2", + "@babel/plugin-syntax-typescript": "^7.7.2", + "@babel/traverse": "^7.7.2", + "@babel/types": "^7.3.3", + "@jest/expect-utils": "^29.5.0", + "@jest/transform": "^29.5.0", + "@jest/types": "^29.5.0", + "@types/babel__traverse": "^7.0.6", + "@types/prettier": "^2.1.5", + "babel-preset-current-node-syntax": "^1.0.0", + "chalk": "^4.0.0", + "expect": "^29.5.0", + "graceful-fs": "^4.2.9", + "jest-diff": "^29.5.0", + "jest-get-type": "^29.4.3", + "jest-matcher-utils": "^29.5.0", + "jest-message-util": "^29.5.0", + "jest-util": "^29.5.0", + "natural-compare": "^1.4.0", + "pretty-format": "^29.5.0", + "semver": "^7.3.5" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-snapshot/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/jest-snapshot/node_modules/semver": { + "version": "7.3.8", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz", + "integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==", + "dev": true, + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/jest-snapshot/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + }, + "node_modules/jest-util": { + "version": "29.5.0", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-29.5.0.tgz", + "integrity": "sha512-RYMgG/MTadOr5t8KdhejfvUU82MxsCu5MF6KuDUHl+NuwzUt+Sm6jJWxTJVrDR1j5M/gJVCPKQEpWXY+yIQ6lQ==", + "dev": true, + "dependencies": { + "@jest/types": "^29.5.0", + "@types/node": "*", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "graceful-fs": "^4.2.9", + "picomatch": "^2.2.3" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-validate": { + "version": "29.5.0", + "resolved": "https://registry.npmjs.org/jest-validate/-/jest-validate-29.5.0.tgz", + "integrity": "sha512-pC26etNIi+y3HV8A+tUGr/lph9B18GnzSRAkPaaZJIE1eFdiYm6/CewuiJQ8/RlfHd1u/8Ioi8/sJ+CmbA+zAQ==", + "dev": true, + "dependencies": { + "@jest/types": "^29.5.0", + "camelcase": "^6.2.0", + "chalk": "^4.0.0", + "jest-get-type": "^29.4.3", + "leven": "^3.1.0", + "pretty-format": "^29.5.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-validate/node_modules/camelcase": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz", + "integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/jest-watcher": { + "version": "29.5.0", + "resolved": "https://registry.npmjs.org/jest-watcher/-/jest-watcher-29.5.0.tgz", + "integrity": "sha512-KmTojKcapuqYrKDpRwfqcQ3zjMlwu27SYext9pt4GlF5FUgB+7XE1mcCnSm6a4uUpFyQIkb6ZhzZvHl+jiBCiA==", + "dev": true, + "dependencies": { + "@jest/test-result": "^29.5.0", + "@jest/types": "^29.5.0", + "@types/node": "*", + "ansi-escapes": "^4.2.1", + "chalk": "^4.0.0", + "emittery": "^0.13.1", + "jest-util": "^29.5.0", + "string-length": "^4.0.1" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-worker": { + "version": "29.5.0", + "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-29.5.0.tgz", + "integrity": "sha512-NcrQnevGoSp4b5kg+akIpthoAFHxPBcb5P6mYPY0fUNT+sSvmtu6jlkEle3anczUKIKEbMxFimk9oTP/tpIPgA==", + "dev": true, + "dependencies": { + "@types/node": "*", + "jest-util": "^29.5.0", + "merge-stream": "^2.0.0", + "supports-color": "^8.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-worker/node_modules/supports-color": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", + "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/supports-color?sponsor=1" + } + }, + "node_modules/js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "dev": true + }, + "node_modules/js-yaml": { + "version": "3.14.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", + "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", + "dev": true, + "dependencies": { + "argparse": "^1.0.7", + "esprima": "^4.0.0" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/jsesc": { + "version": "2.5.2", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz", + "integrity": "sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==", + "dev": true, + "bin": { + "jsesc": "bin/jsesc" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/json-parse-even-better-errors": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", + "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==", + "dev": true + }, + "node_modules/json5": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", + "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", + "dev": true, + "bin": { + "json5": "lib/cli.js" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/kleur": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/kleur/-/kleur-3.0.3.tgz", + "integrity": "sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/leven": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/leven/-/leven-3.1.0.tgz", + "integrity": "sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/lines-and-columns": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", + "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==", + "dev": true + }, + "node_modules/locate-path": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", + "dev": true, + "dependencies": { + "p-locate": "^4.1.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/lodash.memoize": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz", + "integrity": "sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag==", + "dev": true + }, + "node_modules/lru-cache": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", + "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", + "dev": true, + "dependencies": { + "yallist": "^3.0.2" + } + }, + "node_modules/make-dir": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", + "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==", + "dev": true, + "dependencies": { + "semver": "^6.0.0" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/make-error": { + "version": "1.3.6", + "resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz", + "integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==", + "dev": true + }, + "node_modules/makeerror": { + "version": "1.0.12", + "resolved": "https://registry.npmjs.org/makeerror/-/makeerror-1.0.12.tgz", + "integrity": "sha512-JmqCvUhmt43madlpFzG4BQzG2Z3m6tvQDNKdClZnO3VbIudJYmxsT0FNJMeiB2+JTSlTQTSbU8QdesVmwJcmLg==", + "dev": true, + "dependencies": { + "tmpl": "1.0.5" + } + }, + "node_modules/merge-stream": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", + "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", + "dev": true + }, + "node_modules/micromatch": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz", + "integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==", + "dev": true, + "dependencies": { + "braces": "^3.0.2", + "picomatch": "^2.3.1" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/mimic-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", + "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + }, + "node_modules/natural-compare": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", + "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==", + "dev": true + }, + "node_modules/node-int64": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/node-int64/-/node-int64-0.4.0.tgz", + "integrity": "sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw==", + "dev": true + }, + "node_modules/node-releases": { + "version": "2.0.10", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.10.tgz", + "integrity": "sha512-5GFldHPXVG/YZmFzJvKK2zDSzPKhEp0+ZR5SVaoSag9fsL5YgHbUHDfnG5494ISANDcK4KwPXAx2xqVEydmd7w==", + "dev": true + }, + "node_modules/normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/npm-run-path": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", + "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", + "dev": true, + "dependencies": { + "path-key": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "dev": true, + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/onetime": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", + "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", + "dev": true, + "dependencies": { + "mimic-fn": "^2.1.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "dev": true, + "dependencies": { + "yocto-queue": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-locate": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", + "dev": true, + "dependencies": { + "p-limit": "^2.2.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/p-locate/node_modules/p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/parse-json": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", + "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.0.0", + "error-ex": "^1.3.1", + "json-parse-even-better-errors": "^2.3.0", + "lines-and-columns": "^1.1.6" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "dev": true + }, + "node_modules/picocolors": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz", + "integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==", + "dev": true + }, + "node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true, + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/pirates": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.5.tgz", + "integrity": "sha512-8V9+HQPupnaXMA23c5hvl69zXvTwTzyAYasnkb0Tts4XvO4CliqONMOnvlq26rkhLC3nWDFBJf73LU1e1VZLaQ==", + "dev": true, + "engines": { + "node": ">= 6" + } + }, + "node_modules/pkg-dir": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-4.2.0.tgz", + "integrity": "sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==", + "dev": true, + "dependencies": { + "find-up": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/prettier": { + "version": "3.2.5", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.2.5.tgz", + "integrity": "sha512-3/GWa9aOC0YeD7LUfvOG2NiDyhOWRvt1k+rcKhOuYnMY24iiCphgneUfJDyFXd6rZCAnuLBv6UeAULtrhT/F4A==", + "dev": true, + "bin": { + "prettier": "bin/prettier.cjs" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/prettier/prettier?sponsor=1" + } + }, + "node_modules/pretty-format": { + "version": "29.5.0", + "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-29.5.0.tgz", + "integrity": "sha512-V2mGkI31qdttvTFX7Mt4efOqHXqJWMu4/r66Xh3Z3BwZaPfPJgp6/gbwoujRpPUtfEF6AUUWx3Jim3GCw5g/Qw==", + "dev": true, + "dependencies": { + "@jest/schemas": "^29.4.3", + "ansi-styles": "^5.0.0", + "react-is": "^18.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/pretty-format/node_modules/ansi-styles": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz", + "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/prompts": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/prompts/-/prompts-2.4.2.tgz", + "integrity": "sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q==", + "dev": true, + "dependencies": { + "kleur": "^3.0.3", + "sisteransi": "^1.0.5" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/pure-rand": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/pure-rand/-/pure-rand-6.0.1.tgz", + "integrity": "sha512-t+x1zEHDjBwkDGY5v5ApnZ/utcd4XYDiJsaQQoptTXgUXX95sDg1elCdJghzicm7n2mbCBJ3uYWr6M22SO19rg==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/dubzzz" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fast-check" + } + ] + }, + "node_modules/react-is": { + "version": "18.2.0", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-18.2.0.tgz", + "integrity": "sha512-xWGDIW6x921xtzPkhiULtthJHoJvBbF3q26fzloPCK0hsvxtPVelvftw3zjbHWSkR2km9Z+4uxbDDK/6Zw9B8w==", + "dev": true + }, + "node_modules/require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/resolve": { + "version": "1.22.1", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.1.tgz", + "integrity": "sha512-nBpuuYuY5jFsli/JIs1oldw6fOQCBioohqWZg/2hiaOybXOft4lonv85uDOKXdf8rhyK159cxU5cDcK/NKk8zw==", + "dev": true, + "dependencies": { + "is-core-module": "^2.9.0", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/resolve-cwd": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/resolve-cwd/-/resolve-cwd-3.0.0.tgz", + "integrity": "sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg==", + "dev": true, + "dependencies": { + "resolve-from": "^5.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/resolve-from": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz", + "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/resolve-pkg-maps": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/resolve-pkg-maps/-/resolve-pkg-maps-1.0.0.tgz", + "integrity": "sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==", + "dev": true, + "funding": { + "url": "https://github.com/privatenumber/resolve-pkg-maps?sponsor=1" + } + }, + "node_modules/resolve.exports": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/resolve.exports/-/resolve.exports-2.0.2.tgz", + "integrity": "sha512-X2UW6Nw3n/aMgDVy+0rSqgHlv39WZAlZrXCdnbyEiKm17DSqHX4MmQMaST3FbeWR5FTuRcUwYAziZajji0Y7mg==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "dev": true, + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", + "dev": true + }, + "node_modules/sisteransi": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/sisteransi/-/sisteransi-1.0.5.tgz", + "integrity": "sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==", + "dev": true + }, + "node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/source-map-support": { + "version": "0.5.13", + "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.13.tgz", + "integrity": "sha512-SHSKFHadjVA5oR4PPqhtAVdcBWwRYVd6g6cAXnIbRiIwc2EhPrTuKUBdSLvlEKyIP3GCf89fltvcZiP9MMFA1w==", + "dev": true, + "dependencies": { + "buffer-from": "^1.0.0", + "source-map": "^0.6.0" + } + }, + "node_modules/sprintf-js": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", + "integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==", + "dev": true + }, + "node_modules/stack-utils": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/stack-utils/-/stack-utils-2.0.6.tgz", + "integrity": "sha512-XlkWvfIm6RmsWtNJx+uqtKLS8eqFbxUg0ZzLXqY0caEy9l7hruX8IpiDnjsLavoBgqCCR71TqWO8MaXYheJ3RQ==", + "dev": true, + "dependencies": { + "escape-string-regexp": "^2.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/string-length": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/string-length/-/string-length-4.0.2.tgz", + "integrity": "sha512-+l6rNN5fYHNhZZy41RXsYptCjA2Igmq4EG7kZAYFQI1E1VTXarr6ZPXBg6eq7Y6eK4FEhY6AJlyuFIb/v/S0VQ==", + "dev": true, + "dependencies": { + "char-regex": "^1.0.2", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-bom": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-4.0.0.tgz", + "integrity": "sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-final-newline": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", + "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/strip-json-comments": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", + "dev": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/test-exclude": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-6.0.0.tgz", + "integrity": "sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w==", + "dev": true, + "dependencies": { + "@istanbuljs/schema": "^0.1.2", + "glob": "^7.1.4", + "minimatch": "^3.0.4" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/tmpl": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/tmpl/-/tmpl-1.0.5.tgz", + "integrity": "sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw==", + "dev": true + }, + "node_modules/to-fast-properties": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz", + "integrity": "sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/tr46": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", + "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==" + }, + "node_modules/ts-jest": { + "version": "29.0.5", + "resolved": "https://registry.npmjs.org/ts-jest/-/ts-jest-29.0.5.tgz", + "integrity": "sha512-PL3UciSgIpQ7f6XjVOmbi96vmDHUqAyqDr8YxzopDqX3kfgYtX1cuNeBjP+L9sFXi6nzsGGA6R3fP3DDDJyrxA==", + "dev": true, + "dependencies": { + "bs-logger": "0.x", + "fast-json-stable-stringify": "2.x", + "jest-util": "^29.0.0", + "json5": "^2.2.3", + "lodash.memoize": "4.x", + "make-error": "1.x", + "semver": "7.x", + "yargs-parser": "^21.0.1" + }, + "bin": { + "ts-jest": "cli.js" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "@babel/core": ">=7.0.0-beta.0 <8", + "@jest/types": "^29.0.0", + "babel-jest": "^29.0.0", + "jest": "^29.0.0", + "typescript": ">=4.3" + }, + "peerDependenciesMeta": { + "@babel/core": { + "optional": true + }, + "@jest/types": { + "optional": true + }, + "babel-jest": { + "optional": true + }, + "esbuild": { + "optional": true + } + } + }, + "node_modules/ts-jest/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/ts-jest/node_modules/semver": { + "version": "7.3.8", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz", + "integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==", + "dev": true, + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/ts-jest/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + }, + "node_modules/ts-matches": { + "version": "5.4.1", + "resolved": "https://registry.npmjs.org/ts-matches/-/ts-matches-5.4.1.tgz", + "integrity": "sha512-kXrY75F0s0WD15N2bWKDScKlKgwnusN6dTRzGs1N7LlxQRnazrsBISC1HL4sy2adsyk65Zbx3Ui3IGN8leAFOQ==" + }, + "node_modules/ts-node": { + "version": "10.9.1", + "resolved": "https://registry.npmjs.org/ts-node/-/ts-node-10.9.1.tgz", + "integrity": "sha512-NtVysVPkxxrwFGUUxGYhfux8k78pQB3JqYBXlLRZgdGUqTO5wU/UyHop5p70iEbGhB7q5KmiZiU0Y3KlJrScEw==", + "dev": true, + "dependencies": { + "@cspotcode/source-map-support": "^0.8.0", + "@tsconfig/node10": "^1.0.7", + "@tsconfig/node12": "^1.0.7", + "@tsconfig/node14": "^1.0.0", + "@tsconfig/node16": "^1.0.2", + "acorn": "^8.4.1", + "acorn-walk": "^8.1.1", + "arg": "^4.1.0", + "create-require": "^1.1.0", + "diff": "^4.0.1", + "make-error": "^1.1.1", + "v8-compile-cache-lib": "^3.0.1", + "yn": "3.1.1" + }, + "bin": { + "ts-node": "dist/bin.js", + "ts-node-cwd": "dist/bin-cwd.js", + "ts-node-esm": "dist/bin-esm.js", + "ts-node-script": "dist/bin-script.js", + "ts-node-transpile-only": "dist/bin-transpile.js", + "ts-script": "dist/bin-script-deprecated.js" + }, + "peerDependencies": { + "@swc/core": ">=1.2.50", + "@swc/wasm": ">=1.2.50", + "@types/node": "*", + "typescript": ">=2.7" + }, + "peerDependenciesMeta": { + "@swc/core": { + "optional": true + }, + "@swc/wasm": { + "optional": true + } + } + }, + "node_modules/tsx": { + "version": "4.7.1", + "resolved": "https://registry.npmjs.org/tsx/-/tsx-4.7.1.tgz", + "integrity": "sha512-8d6VuibXHtlN5E3zFkgY8u4DX7Y3Z27zvvPKVmLon/D4AjuKzarkUBTLDBgj9iTQ0hg5xM7c/mYiRVM+HETf0g==", + "dev": true, + "dependencies": { + "esbuild": "~0.19.10", + "get-tsconfig": "^4.7.2" + }, + "bin": { + "tsx": "dist/cli.mjs" + }, + "engines": { + "node": ">=18.0.0" + }, + "optionalDependencies": { + "fsevents": "~2.3.3" + } + }, + "node_modules/tsx/node_modules/@esbuild/android-arm": { + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.19.12.tgz", + "integrity": "sha512-qg/Lj1mu3CdQlDEEiWrlC4eaPZ1KztwGJ9B6J+/6G+/4ewxJg7gqj8eVYWvao1bXrqGiW2rsBZFSX3q2lcW05w==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/tsx/node_modules/@esbuild/android-arm64": { + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.19.12.tgz", + "integrity": "sha512-P0UVNGIienjZv3f5zq0DP3Nt2IE/3plFzuaS96vihvD0Hd6H/q4WXUGpCxD/E8YrSXfNyRPbpTq+T8ZQioSuPA==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/tsx/node_modules/@esbuild/android-x64": { + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.19.12.tgz", + "integrity": "sha512-3k7ZoUW6Q6YqhdhIaq/WZ7HwBpnFBlW905Fa4s4qWJyiNOgT1dOqDiVAQFwBH7gBRZr17gLrlFCRzF6jFh7Kew==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/tsx/node_modules/@esbuild/darwin-arm64": { + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.19.12.tgz", + "integrity": "sha512-B6IeSgZgtEzGC42jsI+YYu9Z3HKRxp8ZT3cqhvliEHovq8HSX2YX8lNocDn79gCKJXOSaEot9MVYky7AKjCs8g==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/tsx/node_modules/@esbuild/darwin-x64": { + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.19.12.tgz", + "integrity": "sha512-hKoVkKzFiToTgn+41qGhsUJXFlIjxI/jSYeZf3ugemDYZldIXIxhvwN6erJGlX4t5h417iFuheZ7l+YVn05N3A==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/tsx/node_modules/@esbuild/freebsd-arm64": { + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.19.12.tgz", + "integrity": "sha512-4aRvFIXmwAcDBw9AueDQ2YnGmz5L6obe5kmPT8Vd+/+x/JMVKCgdcRwH6APrbpNXsPz+K653Qg8HB/oXvXVukA==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/tsx/node_modules/@esbuild/freebsd-x64": { + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.19.12.tgz", + "integrity": "sha512-EYoXZ4d8xtBoVN7CEwWY2IN4ho76xjYXqSXMNccFSx2lgqOG/1TBPW0yPx1bJZk94qu3tX0fycJeeQsKovA8gg==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/tsx/node_modules/@esbuild/linux-arm": { + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.19.12.tgz", + "integrity": "sha512-J5jPms//KhSNv+LO1S1TX1UWp1ucM6N6XuL6ITdKWElCu8wXP72l9MM0zDTzzeikVyqFE6U8YAV9/tFyj0ti+w==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/tsx/node_modules/@esbuild/linux-arm64": { + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.19.12.tgz", + "integrity": "sha512-EoTjyYyLuVPfdPLsGVVVC8a0p1BFFvtpQDB/YLEhaXyf/5bczaGeN15QkR+O4S5LeJ92Tqotve7i1jn35qwvdA==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/tsx/node_modules/@esbuild/linux-ia32": { + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.19.12.tgz", + "integrity": "sha512-Thsa42rrP1+UIGaWz47uydHSBOgTUnwBwNq59khgIwktK6x60Hivfbux9iNR0eHCHzOLjLMLfUMLCypBkZXMHA==", + "cpu": [ + "ia32" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/tsx/node_modules/@esbuild/linux-loong64": { + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.19.12.tgz", + "integrity": "sha512-LiXdXA0s3IqRRjm6rV6XaWATScKAXjI4R4LoDlvO7+yQqFdlr1Bax62sRwkVvRIrwXxvtYEHHI4dm50jAXkuAA==", + "cpu": [ + "loong64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/tsx/node_modules/@esbuild/linux-mips64el": { + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.19.12.tgz", + "integrity": "sha512-fEnAuj5VGTanfJ07ff0gOA6IPsvrVHLVb6Lyd1g2/ed67oU1eFzL0r9WL7ZzscD+/N6i3dWumGE1Un4f7Amf+w==", + "cpu": [ + "mips64el" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/tsx/node_modules/@esbuild/linux-ppc64": { + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.19.12.tgz", + "integrity": "sha512-nYJA2/QPimDQOh1rKWedNOe3Gfc8PabU7HT3iXWtNUbRzXS9+vgB0Fjaqr//XNbd82mCxHzik2qotuI89cfixg==", + "cpu": [ + "ppc64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/tsx/node_modules/@esbuild/linux-riscv64": { + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.19.12.tgz", + "integrity": "sha512-2MueBrlPQCw5dVJJpQdUYgeqIzDQgw3QtiAHUC4RBz9FXPrskyyU3VI1hw7C0BSKB9OduwSJ79FTCqtGMWqJHg==", + "cpu": [ + "riscv64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/tsx/node_modules/@esbuild/linux-s390x": { + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.19.12.tgz", + "integrity": "sha512-+Pil1Nv3Umes4m3AZKqA2anfhJiVmNCYkPchwFJNEJN5QxmTs1uzyy4TvmDrCRNT2ApwSari7ZIgrPeUx4UZDg==", + "cpu": [ + "s390x" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/tsx/node_modules/@esbuild/linux-x64": { + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.19.12.tgz", + "integrity": "sha512-B71g1QpxfwBvNrfyJdVDexenDIt1CiDN1TIXLbhOw0KhJzE78KIFGX6OJ9MrtC0oOqMWf+0xop4qEU8JrJTwCg==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/tsx/node_modules/@esbuild/netbsd-x64": { + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.19.12.tgz", + "integrity": "sha512-3ltjQ7n1owJgFbuC61Oj++XhtzmymoCihNFgT84UAmJnxJfm4sYCiSLTXZtE00VWYpPMYc+ZQmB6xbSdVh0JWA==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/tsx/node_modules/@esbuild/openbsd-x64": { + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.19.12.tgz", + "integrity": "sha512-RbrfTB9SWsr0kWmb9srfF+L933uMDdu9BIzdA7os2t0TXhCRjrQyCeOt6wVxr79CKD4c+p+YhCj31HBkYcXebw==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/tsx/node_modules/@esbuild/sunos-x64": { + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.19.12.tgz", + "integrity": "sha512-HKjJwRrW8uWtCQnQOz9qcU3mUZhTUQvi56Q8DPTLLB+DawoiQdjsYq+j+D3s9I8VFtDr+F9CjgXKKC4ss89IeA==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/tsx/node_modules/@esbuild/win32-arm64": { + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.19.12.tgz", + "integrity": "sha512-URgtR1dJnmGvX864pn1B2YUYNzjmXkuJOIqG2HdU62MVS4EHpU2946OZoTMnRUHklGtJdJZ33QfzdjGACXhn1A==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/tsx/node_modules/@esbuild/win32-ia32": { + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.19.12.tgz", + "integrity": "sha512-+ZOE6pUkMOJfmxmBZElNOx72NKpIa/HFOMGzu8fqzQJ5kgf6aTGrcJaFsNiVMH4JKpMipyK+7k0n2UXN7a8YKQ==", + "cpu": [ + "ia32" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/tsx/node_modules/@esbuild/win32-x64": { + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.19.12.tgz", + "integrity": "sha512-T1QyPSDCyMXaO3pzBkF96E8xMkiRYbUEZADd29SyPGabqxMViNoii+NcK7eWJAEoU6RZyEm5lVSIjTmcdoB9HA==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/tsx/node_modules/esbuild": { + "version": "0.19.12", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.19.12.tgz", + "integrity": "sha512-aARqgq8roFBj054KvQr5f1sFu0D65G+miZRCuJyJ0G13Zwx7vRar5Zhn2tkQNzIXcBrNVsv/8stehpj+GAjgbg==", + "dev": true, + "hasInstallScript": true, + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=12" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.19.12", + "@esbuild/android-arm": "0.19.12", + "@esbuild/android-arm64": "0.19.12", + "@esbuild/android-x64": "0.19.12", + "@esbuild/darwin-arm64": "0.19.12", + "@esbuild/darwin-x64": "0.19.12", + "@esbuild/freebsd-arm64": "0.19.12", + "@esbuild/freebsd-x64": "0.19.12", + "@esbuild/linux-arm": "0.19.12", + "@esbuild/linux-arm64": "0.19.12", + "@esbuild/linux-ia32": "0.19.12", + "@esbuild/linux-loong64": "0.19.12", + "@esbuild/linux-mips64el": "0.19.12", + "@esbuild/linux-ppc64": "0.19.12", + "@esbuild/linux-riscv64": "0.19.12", + "@esbuild/linux-s390x": "0.19.12", + "@esbuild/linux-x64": "0.19.12", + "@esbuild/netbsd-x64": "0.19.12", + "@esbuild/openbsd-x64": "0.19.12", + "@esbuild/sunos-x64": "0.19.12", + "@esbuild/win32-arm64": "0.19.12", + "@esbuild/win32-ia32": "0.19.12", + "@esbuild/win32-x64": "0.19.12" + } + }, + "node_modules/type-detect": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz", + "integrity": "sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/type-fest": { + "version": "0.21.3", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz", + "integrity": "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/typescript": { + "version": "5.0.4", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.0.4.tgz", + "integrity": "sha512-cW9T5W9xY37cc+jfEnaUvX91foxtHkza3Nw3wkoF4sSlKn0MONdkdEndig/qPBWXNkmplh3NzayQzCiHM4/hqw==", + "dev": true, + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=12.20" + } + }, + "node_modules/update-browserslist-db": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.0.10.tgz", + "integrity": "sha512-OztqDenkfFkbSG+tRxBeAnCVPckDBcvibKd35yDONx6OU8N7sqgwc7rCbkJ/WcYtVRZ4ba68d6byhC21GFh7sQ==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + } + ], + "dependencies": { + "escalade": "^3.1.1", + "picocolors": "^1.0.0" + }, + "bin": { + "browserslist-lint": "cli.js" + }, + "peerDependencies": { + "browserslist": ">= 4.21.0" + } + }, + "node_modules/v8-compile-cache-lib": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz", + "integrity": "sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==", + "dev": true + }, + "node_modules/v8-to-istanbul": { + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/v8-to-istanbul/-/v8-to-istanbul-9.1.0.tgz", + "integrity": "sha512-6z3GW9x8G1gd+JIIgQQQxXuiJtCXeAjp6RaPEPLv62mH3iPHPxV6W3robxtCzNErRo6ZwTmzWhsbNvjyEBKzKA==", + "dev": true, + "dependencies": { + "@jridgewell/trace-mapping": "^0.3.12", + "@types/istanbul-lib-coverage": "^2.0.1", + "convert-source-map": "^1.6.0" + }, + "engines": { + "node": ">=10.12.0" + } + }, + "node_modules/v8-to-istanbul/node_modules/convert-source-map": { + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.9.0.tgz", + "integrity": "sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A==", + "dev": true + }, + "node_modules/walker": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/walker/-/walker-1.0.8.tgz", + "integrity": "sha512-ts/8E8l5b7kY0vlWLewOkDXMmPdLcVV4GmOQLyxuSswIJsweeFZtAsMF7k1Nszz+TYBQrlYRmzOnr398y1JemQ==", + "dev": true, + "dependencies": { + "makeerror": "1.0.12" + } + }, + "node_modules/webidl-conversions": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", + "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==" + }, + "node_modules/whatwg-fetch": { + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/whatwg-fetch/-/whatwg-fetch-3.6.2.tgz", + "integrity": "sha512-bJlen0FcuU/0EMLrdbJ7zOnW6ITZLrZMIarMUVmdKtsGvZna8vxKYaexICWPfZ8qwf9fzNq+UEIZrnSaApt6RA==" + }, + "node_modules/whatwg-url": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", + "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", + "dependencies": { + "tr46": "~0.0.3", + "webidl-conversions": "^3.0.0" + } + }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrap-ansi/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "node_modules/wrap-ansi/node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/wrap-ansi/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "dev": true + }, + "node_modules/write-file-atomic": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-4.0.2.tgz", + "integrity": "sha512-7KxauUdBmSdWnmpaGFg+ppNjKF8uNLry8LyzjauQDOVONfFLNKrKvQOxZ/VuTIcS/gge/YNahf5RIIQWTSarlg==", + "dev": true, + "dependencies": { + "imurmurhash": "^0.1.4", + "signal-exit": "^3.0.7" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/y18n": { + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/yallist": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", + "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==", + "dev": true + }, + "node_modules/yaml": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.2.2.tgz", + "integrity": "sha512-CBKFWExMn46Foo4cldiChEzn7S7SRV+wqiluAb6xmueD/fGyRHIhX8m14vVGgeFWjN540nKCNVj6P21eQjgTuA==", + "dev": true, + "engines": { + "node": ">= 14" + } + }, + "node_modules/yargs": { + "version": "17.7.1", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.1.tgz", + "integrity": "sha512-cwiTb08Xuv5fqF4AovYacTFNxk62th7LKJ6BL9IGUpTJrWoU7/7WdQGTP2SjKf1dUNBGzDd28p/Yfs/GI6JrLw==", + "dev": true, + "dependencies": { + "cliui": "^8.0.1", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.3", + "y18n": "^5.0.5", + "yargs-parser": "^21.1.1" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/yargs-parser": { + "version": "21.1.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", + "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", + "dev": true, + "engines": { + "node": ">=12" + } + }, + "node_modules/yargs/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "node_modules/yargs/node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/yargs/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/yn": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz", + "integrity": "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + } + } +} diff --git a/sdk/package.json b/sdk/package.json new file mode 100644 index 0000000000..6176f4d8e1 --- /dev/null +++ b/sdk/package.json @@ -0,0 +1,53 @@ +{ + "name": "@start9labs/start-sdk", + "version": "0.4.0-rev0.lib0.rc8.beta10", + "description": "Software development kit to facilitate packaging services for StartOS", + "main": "./cjs/sdk/lib/index.js", + "types": "./cjs/sdk/lib/index.d.ts", + "module": "./mjs/sdk/lib/index.js", + "browser": "./mjs/sdk/lib/index.browser.js", + "sideEffects": true, + "typesVersion": { + ">=3.1": { + "*": [ + "cjs/lib/*" + ] + } + }, + "scripts": { + "test": "jest -c ./jest.config.js --coverage", + "buildOutput": "ts-node --project ./tsconfig-cjs.json ./lib/test/makeOutput.ts && npx prettier --write '**/*.ts'", + "check": "tsc --noEmit" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/Start9Labs/start-sdk.git" + }, + "author": "Start9 Labs", + "license": "MIT", + "bugs": { + "url": "https://github.com/Start9Labs/start-sdk/issues" + }, + "homepage": "https://github.com/Start9Labs/start-sdk#readme", + "dependencies": { + "isomorphic-fetch": "^3.0.0", + "ts-matches": "^5.4.1" + }, + "prettier": { + "trailingComma": "all", + "tabWidth": 2, + "semi": false, + "singleQuote": false + }, + "devDependencies": { + "@iarna/toml": "^2.2.5", + "yaml": "^2.2.2", + "@types/jest": "^29.4.0", + "jest": "^29.4.3", + "prettier": "^3.2.5", + "ts-jest": "^29.0.5", + "ts-node": "^10.9.1", + "tsx": "^4.7.1", + "typescript": "^5.0.4" + } +} diff --git a/sdk/scripts/oldSpecToBuilder.ts b/sdk/scripts/oldSpecToBuilder.ts new file mode 100644 index 0000000000..ce8ea4e5ff --- /dev/null +++ b/sdk/scripts/oldSpecToBuilder.ts @@ -0,0 +1,413 @@ +import * as fs from "fs" + +// https://stackoverflow.com/questions/2970525/converting-any-string-into-camel-case +export function camelCase(value: string) { + return value + .replace(/([\(\)\[\]])/g, "") + .replace(/^([A-Z])|[\s-_](\w)/g, function (match, p1, p2, offset) { + if (p2) return p2.toUpperCase() + return p1.toLowerCase() + }) +} + +export async function oldSpecToBuilder( + file: string, + inputData: Promise | any, + options?: Parameters[1], +) { + await fs.writeFile( + file, + await makeFileContentFromOld(inputData, options), + (err) => console.error(err), + ) +} + +function isString(x: unknown): x is string { + return typeof x === "string" +} + +export default async function makeFileContentFromOld( + inputData: Promise | any, + { StartSdk = "start-sdk", nested = true } = {}, +) { + const outputLines: string[] = [] + outputLines.push(` +import { sdk } from "${StartSdk}" +const {Config, List, Value, Variants} = sdk +`) + const data = await inputData + + const namedConsts = new Set(["Config", "Value", "List"]) + const configName = newConst("configSpec", convertInputSpec(data)) + const configMatcherName = newConst( + "matchConfigSpec", + `${configName}.validator`, + ) + outputLines.push( + `export type ConfigSpec = typeof ${configMatcherName}._TYPE;`, + ) + + return outputLines.join("\n") + + function newConst(key: string, data: string, type?: string) { + const variableName = getNextConstName(camelCase(key)) + outputLines.push( + `export const ${variableName}${!type ? "" : `: ${type}`} = ${data};`, + ) + return variableName + } + function maybeNewConst(key: string, data: string) { + if (nested) return data + return newConst(key, data) + } + function convertInputSpecInner(data: any) { + let answer = "{" + for (const [key, value] of Object.entries(data)) { + const variableName = maybeNewConst(key, convertValueSpec(value)) + + answer += `${JSON.stringify(key)}: ${variableName},` + } + return `${answer}}` + } + + function convertInputSpec(data: any) { + return `Config.of(${convertInputSpecInner(data)})` + } + function convertValueSpec(value: any): string { + switch (value.type) { + case "string": { + if (value.textarea) { + return `${rangeToTodoComment( + value?.range, + )}Value.textarea(${JSON.stringify( + { + name: value.name || null, + description: value.description || null, + warning: value.warning || null, + required: !(value.nullable || false), + placeholder: value.placeholder || null, + maxLength: null, + minLength: null, + }, + null, + 2, + )})` + } + return `${rangeToTodoComment(value?.range)}Value.text(${JSON.stringify( + { + name: value.name || null, + // prettier-ignore + required: ( + value.default != null ? {default: value.default} : + value.nullable === false ? {default: null} : + !value.nullable + ), + description: value.description || null, + warning: value.warning || null, + masked: value.masked || false, + placeholder: value.placeholder || null, + inputmode: "text", + patterns: value.pattern + ? [ + { + regex: value.pattern, + description: value["pattern-description"], + }, + ] + : [], + minLength: null, + maxLength: null, + }, + null, + 2, + )})` + } + case "number": { + return `${rangeToTodoComment( + value?.range, + )}Value.number(${JSON.stringify( + { + name: value.name || null, + description: value.description || null, + warning: value.warning || null, + // prettier-ignore + required: ( + value.default != null ? {default: value.default} : + value.nullable === false ? {default: null} : + !value.nullable + ), + min: null, + max: null, + step: null, + integer: value.integral || false, + units: value.units || null, + placeholder: value.placeholder || null, + }, + null, + 2, + )})` + } + case "boolean": { + return `Value.toggle(${JSON.stringify( + { + name: value.name || null, + default: value.default || false, + description: value.description || null, + warning: value.warning || null, + }, + null, + 2, + )})` + } + case "enum": { + const allValueNames = new Set([ + ...(value?.["values"] || []), + ...Object.keys(value?.["value-names"] || {}), + ]) + const values = Object.fromEntries( + Array.from(allValueNames) + .filter(isString) + .map((key) => [key, value?.spec?.["value-names"]?.[key] || key]), + ) + return `Value.select(${JSON.stringify( + { + name: value.name || null, + description: value.description || null, + warning: value.warning || null, + + // prettier-ignore + required:( + value.default != null ? {default: value.default} : + value.nullable === false ? {default: null} : + !value.nullable + ), + values, + }, + null, + 2, + )} as const)` + } + case "object": { + const specName = maybeNewConst( + value.name + "_spec", + convertInputSpec(value.spec), + ) + return `Value.object({ + name: ${JSON.stringify(value.name || null)}, + description: ${JSON.stringify(value.description || null)}, + warning: ${JSON.stringify(value.warning || null)}, + }, ${specName})` + } + case "union": { + const variants = maybeNewConst( + value.name + "_variants", + convertVariants(value.variants, value.tag["variant-names"] || {}), + ) + + return `Value.union({ + name: ${JSON.stringify(value.name || null)}, + description: ${JSON.stringify(value.tag.description || null)}, + warning: ${JSON.stringify(value.tag.warning || null)}, + + // prettier-ignore + required: ${JSON.stringify( + // prettier-ignore + value.default != null ? {default: value.default} : + value.nullable === false ? {default: null} : + !value.nullable, + )}, + }, ${variants})` + } + case "list": { + if (value.subtype === "enum") { + const allValueNames = new Set([ + ...(value?.spec?.["values"] || []), + ...Object.keys(value?.spec?.["value-names"] || {}), + ]) + const values = Object.fromEntries( + Array.from(allValueNames) + .filter(isString) + .map((key: string) => [ + key, + value?.spec?.["value-names"]?.[key] ?? key, + ]), + ) + return `Value.multiselect(${JSON.stringify( + { + name: value.name || null, + minLength: null, + maxLength: null, + default: value.default ?? null, + description: value.description || null, + warning: value.warning || null, + values, + }, + null, + 2, + )})` + } + const list = maybeNewConst(value.name + "_list", convertList(value)) + return `Value.list(${list})` + } + case "pointer": { + return `/* TODO deal with point removed point "${value.name}" */null as any` + } + } + throw Error(`Unknown type "${value.type}"`) + } + + function convertList(value: any) { + switch (value.subtype) { + case "string": { + return `${rangeToTodoComment(value?.range)}List.text(${JSON.stringify( + { + name: value.name || null, + minLength: null, + maxLength: null, + default: value.default || null, + description: value.description || null, + warning: value.warning || null, + }, + null, + 2, + )}, ${JSON.stringify({ + masked: value?.spec?.masked || false, + placeholder: value?.spec?.placeholder || null, + patterns: value?.spec?.pattern + ? [ + { + regex: value.spec.pattern, + description: value?.spec?.["pattern-description"], + }, + ] + : [], + minLength: null, + maxLength: null, + })})` + } + case "number": { + return `${rangeToTodoComment(value?.range)}List.number(${JSON.stringify( + { + name: value.name || null, + minLength: null, + maxLength: null, + default: value.default || null, + description: value.description || null, + warning: value.warning || null, + }, + null, + 2, + )}, ${JSON.stringify({ + integer: value?.spec?.integral || false, + min: null, + max: null, + units: value?.spec?.units || null, + placeholder: value?.spec?.placeholder || null, + })})` + } + case "enum": { + return "/* error!! list.enum */" + } + case "object": { + const specName = maybeNewConst( + value.name + "_spec", + convertInputSpec(value.spec.spec), + ) + return `${rangeToTodoComment(value?.range)}List.obj({ + name: ${JSON.stringify(value.name || null)}, + minLength: ${JSON.stringify(null)}, + maxLength: ${JSON.stringify(null)}, + default: ${JSON.stringify(value.default || null)}, + description: ${JSON.stringify(value.description || null)}, + warning: ${JSON.stringify(value.warning || null)}, + }, { + spec: ${specName}, + displayAs: ${JSON.stringify(value?.spec?.["display-as"] || null)}, + uniqueBy: ${JSON.stringify(value?.spec?.["unique-by"] || null)}, + })` + } + case "union": { + const variants = maybeNewConst( + value.name + "_variants", + convertVariants( + value.spec.variants, + value.spec["variant-names"] || {}, + ), + ) + const unionValueName = maybeNewConst( + value.name + "_union", + `${rangeToTodoComment(value?.range)} + Value.union({ + name: ${JSON.stringify(value?.spec?.tag?.name || null)}, + description: ${JSON.stringify( + value?.spec?.tag?.description || null, + )}, + warning: ${JSON.stringify(value?.spec?.tag?.warning || null)}, + required: ${JSON.stringify( + // prettier-ignore + 'default' in value?.spec ? {default: value?.spec?.default} : + !!value?.spec?.tag?.nullable || false ? {default: null} : + false, + )}, + }, ${variants}) + `, + ) + const listConfig = maybeNewConst( + value.name + "_list_config", + ` + Config.of({ + "union": ${unionValueName} + }) + `, + ) + return `${rangeToTodoComment(value?.range)}List.obj({ + name:${JSON.stringify(value.name || null)}, + minLength:${JSON.stringify(null)}, + maxLength:${JSON.stringify(null)}, + default: [], + description: ${JSON.stringify(value.description || null)}, + warning: ${JSON.stringify(value.warning || null)}, + }, { + spec: ${listConfig}, + displayAs: ${JSON.stringify(value?.spec?.["display-as"] || null)}, + uniqueBy: ${JSON.stringify(value?.spec?.["unique-by"] || null)}, + })` + } + } + throw new Error(`Unknown subtype "${value.subtype}"`) + } + + function convertVariants( + variants: Record, + variantNames: Record, + ): string { + let answer = "Variants.of({" + for (const [key, value] of Object.entries(variants)) { + const variantSpec = maybeNewConst(key, convertInputSpec(value)) + answer += `"${key}": {name: "${ + variantNames[key] || key + }", spec: ${variantSpec}},` + } + return `${answer}})` + } + + function getNextConstName(name: string, i = 0): string { + const newName = !i ? name : name + i + if (namedConsts.has(newName)) { + return getNextConstName(name, i + 1) + } + namedConsts.add(newName) + return newName + } +} + +function rangeToTodoComment(range: string | undefined) { + if (!range) return "" + return `/* TODO: Convert range for this value (${range})*/` +} + +// oldSpecToBuilder( +// "./config.ts", +// // Put config here +// {}, +// ) diff --git a/sdk/tsconfig-base.json b/sdk/tsconfig-base.json new file mode 100644 index 0000000000..cc14a817cd --- /dev/null +++ b/sdk/tsconfig-base.json @@ -0,0 +1,19 @@ +{ + "compilerOptions": { + "module": "esnext", + "strict": true, + "outDir": "dist", + "preserveConstEnums": true, + "sourceMap": true, + "target": "es2017", + "pretty": true, + "declaration": true, + "noImplicitAny": true, + "esModuleInterop": true, + "types": ["node", "jest"], + "moduleResolution": "node", + "skipLibCheck": true + }, + "include": ["lib/**/*"], + "exclude": ["lib/**/*.spec.ts", "lib/**/*.gen.ts", "list", "node_modules"] +} diff --git a/sdk/tsconfig-cjs.json b/sdk/tsconfig-cjs.json new file mode 100644 index 0000000000..8413cf2486 --- /dev/null +++ b/sdk/tsconfig-cjs.json @@ -0,0 +1,8 @@ +{ + "extends": "./tsconfig-base.json", + "compilerOptions": { + "module": "commonjs", + "outDir": "dist/cjs", + "target": "es2018" + } +} diff --git a/sdk/tsconfig.json b/sdk/tsconfig.json new file mode 100644 index 0000000000..8ae7d62a88 --- /dev/null +++ b/sdk/tsconfig.json @@ -0,0 +1,8 @@ +{ + "extends": "./tsconfig-base.json", + "compilerOptions": { + "module": "esnext", + "outDir": "dist/mjs", + "target": "esnext" + } +} diff --git a/system-images/compat/Cargo.lock b/system-images/compat/Cargo.lock index c1e0959fb6..0f82176f48 100644 --- a/system-images/compat/Cargo.lock +++ b/system-images/compat/Cargo.lock @@ -4,9 +4,9 @@ version = 3 [[package]] name = "addr2line" -version = "0.19.0" +version = "0.21.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a76fd60b23679b7d19bd066031410fb7e458ccc5e958eb5c325888ce4baedc97" +checksum = "8a30b2e23b9e17a9f90641c7ab1549cd9b44f296d3ccbf309d2863cfe398a0cb" dependencies = [ "gimli", ] @@ -32,25 +32,26 @@ dependencies = [ [[package]] name = "ahash" -version = "0.7.6" +version = "0.7.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fcb51a0695d8f838b1ee009b3fbf66bda078cd64590202a864a8f3e8c4315c47" +checksum = "5a824f2aa7e75a0c98c5a504fceb80649e9c35265d44525b5f94de4771a395cd" dependencies = [ - "getrandom 0.2.8", + "getrandom 0.2.12", "once_cell", "version_check", ] [[package]] name = "ahash" -version = "0.8.3" +version = "0.8.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c99f64d1e06488f620f932677e24bc6e2897582980441ae90a671415bd7ec2f" +checksum = "77c3a9648d43b9cd48db467b3f87fdd6e146bcc88ab0180006cef2179fe11d01" dependencies = [ "cfg-if", - "getrandom 0.2.8", + "getrandom 0.2.12", "once_cell", "version_check", + "zerocopy", ] [[package]] @@ -77,6 +78,12 @@ dependencies = [ "alloc-no-stdlib", ] +[[package]] +name = "allocator-api2" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0942ffc6dcaadf03badf6e6a2d0228460359d5e34b57ccdc720b7382dfbd5ec5" + [[package]] name = "android-tzdata" version = "0.1.1" @@ -101,26 +108,74 @@ dependencies = [ "winapi", ] +[[package]] +name = "anstream" +version = "0.6.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e2e1ebcb11de5c03c67de28a7df593d32191b44939c482e97702baaaa6ab6a5" +dependencies = [ + "anstyle", + "anstyle-parse", + "anstyle-query", + "anstyle-wincon", + "colorchoice", + "utf8parse", +] + +[[package]] +name = "anstyle" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7079075b41f533b8c61d2a4d073c4676e1f8b249ff94a393b0595db304e0dd87" + +[[package]] +name = "anstyle-parse" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c75ac65da39e5fe5ab759307499ddad880d724eed2f6ce5b5e8a26f4f387928c" +dependencies = [ + "utf8parse", +] + +[[package]] +name = "anstyle-query" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e28923312444cdd728e4738b3f9c9cac739500909bb3d3c94b43551b16517648" +dependencies = [ + "windows-sys 0.52.0", +] + +[[package]] +name = "anstyle-wincon" +version = "3.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1cd54b81ec8d6180e24654d0b371ad22fc3dd083b6ff8ba325b72e00c87660a7" +dependencies = [ + "anstyle", + "windows-sys 0.52.0", +] + [[package]] name = "anyhow" -version = "1.0.68" +version = "1.0.79" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2cb2f989d18dd141ab8ae82f64d1a8cdd37e0840f73a406896cf5e99502fab61" +checksum = "080e9890a082662b09c1ad45f567faeeb47f22b5fb23895fbe1e651e718e25ca" dependencies = [ "backtrace", ] [[package]] name = "arrayref" -version = "0.3.6" +version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4c527152e37cf757a3f78aae5a06fbeefdb07ccc535c980a3208ee3060dd544" +checksum = "6b4930d2cb77ce62f89ee5d5289b4ac049559b1c45539271f5ed4fdc7db34545" [[package]] name = "arrayvec" -version = "0.7.2" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8da52d66c7071e2e3fa2a1e5c6d088fec47b593032b254f5e980de8ea54454d6" +checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711" [[package]] name = "ascii-canvas" @@ -133,9 +188,9 @@ dependencies = [ [[package]] name = "async-channel" -version = "1.8.0" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf46fee83e5ccffc220104713af3292ff9bc7c64c7de289f66dae8e38d826833" +checksum = "81953c529336010edd6d8e358f886d9581267795c61b19475b71314bffa46d35" dependencies = [ "concurrent-queue", "event-listener", @@ -144,9 +199,9 @@ dependencies = [ [[package]] name = "async-compression" -version = "0.4.4" +version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f658e2baef915ba0f26f1f7c42bfb8e12f532a01f449a090ded75ae7a07e9ba2" +checksum = "a116f46a969224200a0a97f29cfd4c50e7534e4b4826bd23ea2c3c533039c82c" dependencies = [ "brotli", "flate2", @@ -175,18 +230,18 @@ checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" dependencies = [ "proc-macro2", "quote", - "syn 2.0.37", + "syn 2.0.48", ] [[package]] name = "async-trait" -version = "0.1.74" +version = "0.1.77" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a66537f1bb974b254c98ed142ff995236e81b9d0fe4db0575f46612cb15eb0f9" +checksum = "c980ee35e870bd1a4d2c8294d4c04d0499e67bca1e4b5cefcc693c2fa00caea9" dependencies = [ "proc-macro2", "quote", - "syn 2.0.37", + "syn 2.0.48", ] [[package]] @@ -198,6 +253,16 @@ dependencies = [ "num-traits", ] +[[package]] +name = "atomic-write-file" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "edcdbedc2236483ab103a53415653d6b4442ea6141baf1ffa85df29635e88436" +dependencies = [ + "nix 0.27.1", + "rand 0.8.5", +] + [[package]] name = "atty" version = "0.2.14" @@ -215,11 +280,88 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" +[[package]] +name = "axum" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1236b4b292f6c4d6dc34604bb5120d85c3fe1d1aa596bd5cc52ca054d13e7b9e" +dependencies = [ + "async-trait", + "axum-core", + "base64 0.21.7", + "bytes", + "futures-util", + "http 1.0.0", + "http-body 1.0.0", + "http-body-util", + "hyper 1.1.0", + "hyper-util", + "itoa", + "matchit", + "memchr", + "mime", + "percent-encoding", + "pin-project-lite", + "rustversion", + "serde", + "serde_json", + "serde_path_to_error", + "serde_urlencoded", + "sha1", + "sync_wrapper", + "tokio", + "tokio-tungstenite", + "tower", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "axum-core" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a15c63fd72d41492dc4f497196f5da1fb04fb7529e631d73630d1b491e47a2e3" +dependencies = [ + "async-trait", + "bytes", + "futures-util", + "http 1.0.0", + "http-body 1.0.0", + "http-body-util", + "mime", + "pin-project-lite", + "rustversion", + "sync_wrapper", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "axum-server" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1ad46c3ec4e12f4a4b6835e173ba21c25e484c9d02b49770bf006ce5367c036" +dependencies = [ + "bytes", + "futures-util", + "http 1.0.0", + "http-body 1.0.0", + "http-body-util", + "hyper 1.1.0", + "hyper-util", + "pin-project-lite", + "tokio", + "tower", + "tower-service", +] + [[package]] name = "backtrace" -version = "0.3.67" +version = "0.3.69" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "233d376d6d185f2a3093e58f283f60f880315b6c60075b01f36b3b85154564ca" +checksum = "2089b7e3f35b9dd2d0ed921ead4f6d318c27680d4a5bd167b3ee120edb105837" dependencies = [ "addr2line", "cc", @@ -250,9 +392,9 @@ checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" [[package]] name = "base64" -version = "0.21.4" +version = "0.21.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ba43ea6f343b788c8764558649e08df62f86c6ef251fdaeb1ffd010a9ae50a2" +checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" [[package]] name = "base64ct" @@ -262,9 +404,9 @@ checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" [[package]] name = "basic-cookies" -version = "0.1.4" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb53b6b315f924c7f113b162e53b3901c05fc9966baf84d201dfcc7432a4bb38" +checksum = "67bd8fd42c16bdb08688243dc5f0cc117a3ca9efeeaba3a345a18a6159ad96f7" dependencies = [ "lalrpop", "lalrpop-util", @@ -313,9 +455,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.4.1" +version = "2.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "327762f6e5a765692301e5bb513e0d9fef63be86bbc14528052b1cd3e6f03e07" +checksum = "ed570934406eb16438a4e976b1b4500774099c13b8cb96eec99f620f05090ddf" dependencies = [ "serde", ] @@ -340,13 +482,26 @@ dependencies = [ [[package]] name = "blake2b_simd" -version = "1.0.0" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72936ee4afc7f8f736d1c38383b56480b5497b4617b4a77bdbf1d2ababc76127" +checksum = "23285ad32269793932e830392f2fe2f83e26488fd3ec778883a93c8323735780" dependencies = [ "arrayref", "arrayvec", - "constant_time_eq 0.1.5", + "constant_time_eq", +] + +[[package]] +name = "blake3" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0231f06152bf547e9c2b5194f247cd97aacf6dcd8b15d8e5ec0663f64580da87" +dependencies = [ + "arrayref", + "arrayvec", + "cc", + "cfg-if", + "constant_time_eq", ] [[package]] @@ -361,9 +516,9 @@ dependencies = [ [[package]] name = "block-buffer" -version = "0.10.3" +version = "0.10.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69cce20737498f97b993470a6e536b8523f0af7892a4f928cceb1ac5e52ebe7e" +checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" dependencies = [ "generic-array", ] @@ -376,9 +531,9 @@ checksum = "8d696c370c750c948ada61c69a0ee2cbbb9c50b1019ddb86d9317157a99c2cae" [[package]] name = "brotli" -version = "3.3.4" +version = "3.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1a0b1dbcc8ae29329621f8d4f0d835787c1c38bb1401979b49d13b0b305ff68" +checksum = "516074a47ef4bce09577a3b379392300159ce5b1ba2e501ff1c819950066100f" dependencies = [ "alloc-no-stdlib", "alloc-stdlib", @@ -387,43 +542,31 @@ dependencies = [ [[package]] name = "brotli-decompressor" -version = "2.3.4" +version = "2.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b6561fd3f895a11e8f72af2cb7d22e08366bebc2b6b57f7744c4bda27034744" +checksum = "4e2e4afe60d7dd600fdd3de8d0f08c2b7ec039712e3b6137ff98b7004e82de4f" dependencies = [ "alloc-no-stdlib", "alloc-stdlib", ] -[[package]] -name = "bstr" -version = "0.2.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba3569f383e8f1598449f1a423e72e99569137b47740b1da11ef19af3d5c3223" -dependencies = [ - "lazy_static", - "memchr", - "regex-automata 0.1.10", - "serde", -] - [[package]] name = "bumpalo" -version = "3.12.0" +version = "3.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d261e256854913907f67ed06efbc3338dfe6179796deefc1ff763fc1aee5535" +checksum = "7f30e7476521f6f8af1a1c4c0b8cc94f0bee37d91763d0ca2665f299b6cd8aec" [[package]] name = "byteorder" -version = "1.4.3" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" +checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" -version = "1.3.0" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dfb24e866b15a1af2a1b663f10c6b6b8f397a84aadb828f12e5b289ec23a3a3c" +checksum = "a2bd12c1caf447e69cd4528f47f94d203fd2582878ecb9e9465484c4148a8223" [[package]] name = "cc" @@ -442,9 +585,9 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "chrono" -version = "0.4.31" +version = "0.4.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f2c685bad3eb3d45a01354cedb7d5faa66194d1d58ba6e267a8de788f79db38" +checksum = "9f13690e35a5e4ace198e7beea2895d29f3a9cc55015fcebe6336bd2010af9eb" dependencies = [ "android-tzdata", "iana-time-zone", @@ -452,23 +595,23 @@ dependencies = [ "num-traits", "serde", "wasm-bindgen", - "windows-targets 0.48.5", + "windows-targets 0.52.0", ] [[package]] name = "chumsky" -version = "0.9.2" +version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23170228b96236b5a7299057ac284a321457700bc8c41a4476052f0f4ba5349d" +checksum = "8eebd66744a15ded14960ab4ccdbfb51ad3b81f51f3f04a80adac98c985396c9" dependencies = [ - "hashbrown 0.12.3", + "hashbrown 0.14.3", ] [[package]] name = "ciborium" -version = "0.2.1" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "effd91f6c78e5a4ace8a5d3c0b6bfaec9e2baaef55f3efc00e45fb2e477ee926" +checksum = "42e69ffd6f0917f5c029256a24d0161db17cea3997d185db0d35926308770f0e" dependencies = [ "ciborium-io", "ciborium-ll", @@ -477,18 +620,18 @@ dependencies = [ [[package]] name = "ciborium-io" -version = "0.2.1" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cdf919175532b369853f5d5e20b26b43112613fd6fe7aee757e35f7a44642656" +checksum = "05afea1e0a06c9be33d539b876f1ce3692f4afea2cb41f740e7743225ed1c757" [[package]] name = "ciborium-ll" -version = "0.2.1" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "defaa24ecc093c77630e6c15e17c51f5e187bf35ee514f4e2d67baaa96dae22b" +checksum = "57663b653d948a338bfb3eeba9bb2fd5fcfaecb9e199e87e1eda4d9e8b240fd9" dependencies = [ "ciborium-io", - "half", + "half 2.3.1", ] [[package]] @@ -533,13 +676,47 @@ checksum = "4ea181bf566f71cb9a5d17a59e1871af638180a18fb0035c92ae62b705207123" dependencies = [ "atty", "bitflags 1.3.2", - "clap_lex", - "indexmap 1.9.2", + "clap_lex 0.2.4", + "indexmap 1.9.3", "strsim 0.10.0", "termcolor", "textwrap 0.16.0", ] +[[package]] +name = "clap" +version = "4.4.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e578d6ec4194633722ccf9544794b71b1385c3c027efe0c55db226fc880865c" +dependencies = [ + "clap_builder", + "clap_derive", +] + +[[package]] +name = "clap_builder" +version = "4.4.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4df4df40ec50c46000231c914968278b1eb05098cf8f1b3a518a95030e71d1c7" +dependencies = [ + "anstream", + "anstyle", + "clap_lex 0.6.0", + "strsim 0.10.0", +] + +[[package]] +name = "clap_derive" +version = "4.4.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf9804afaaf59a91e75b022a30fb7229a7901f60c755489cc61c9b423b836442" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "syn 2.0.48", +] + [[package]] name = "clap_lex" version = "0.2.4" @@ -550,14 +727,10 @@ dependencies = [ ] [[package]] -name = "codespan-reporting" -version = "0.11.1" +name = "clap_lex" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3538270d33cc669650c4b093848450d380def10c331d38c768e34cac80576e6e" -dependencies = [ - "termcolor", - "unicode-width", -] +checksum = "702fc72eb24e5a1e48ce58027a675bc24edd52096d5397d4aea7c6dd9eca0bd1" [[package]] name = "color-eyre" @@ -576,9 +749,9 @@ dependencies = [ [[package]] name = "color-spantrace" -version = "0.2.0" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ba75b3d9449ecdccb27ecbc479fdc0b87fa2dd43d2f8298f9bf0e59aacc8dce" +checksum = "cd6be1b2a7e382e2b98b43b2adcca6bb0e465af0bdd38123873ae61eb17a72c2" dependencies = [ "once_cell", "owo-colors", @@ -586,6 +759,12 @@ dependencies = [ "tracing-error", ] +[[package]] +name = "colorchoice" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7" + [[package]] name = "compat" version = "0.1.0" @@ -597,7 +776,7 @@ dependencies = [ "emver", "failure", "imbl-value", - "indexmap 1.9.2", + "indexmap 1.9.3", "itertools 0.10.5", "lazy_static", "linear-map", @@ -607,7 +786,7 @@ dependencies = [ "pest_derive", "rand 0.8.5", "regex", - "rust-argon2 1.0.0", + "rust-argon2 1.0.1", "serde", "serde_json", "serde_yaml 0.8.26", @@ -616,86 +795,58 @@ dependencies = [ [[package]] name = "concurrent-queue" -version = "2.1.0" +version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c278839b831783b70278b14df4d45e1beb1aad306c07bb796637de9a0e323e8e" +checksum = "d16048cd947b08fa32c24458a22f5dc5e835264f689f4f5653210c69fd107363" dependencies = [ "crossbeam-utils", ] [[package]] name = "console" -version = "0.15.7" +version = "0.15.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c926e00cc70edefdc64d3a5ff31cc65bb97a3460097762bd23afb4d8145fccf8" +checksum = "0e1f83fc076bd6dd27517eacdf25fef6c4dfe5f1d7448bafaaf3a26f13b5e4eb" dependencies = [ "encode_unicode 0.3.6", "lazy_static", "libc", "unicode-width", - "windows-sys 0.45.0", + "windows-sys 0.52.0", ] [[package]] name = "const-oid" -version = "0.9.5" +version = "0.9.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28c122c3980598d243d63d9a704629a2d748d101f278052ff068be5a4423ab6f" +checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8" [[package]] name = "const_format" -version = "0.2.31" +version = "0.2.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c990efc7a285731f9a4378d81aff2f0e85a2c8781a05ef0f8baa8dac54d0ff48" +checksum = "e3a214c7af3d04997541b18d432afaff4c455e79e2029079647e72fc2bd27673" dependencies = [ "const_format_proc_macros", ] [[package]] name = "const_format_proc_macros" -version = "0.2.31" +version = "0.2.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e026b6ce194a874cb9cf32cd5772d1ef9767cc8fcb5765948d74f37a9d8b2bf6" +checksum = "c7f6ff08fd20f4f299298a28e2dfa8a8ba1036e6cd2460ac1de7b425d76f2500" dependencies = [ "proc-macro2", "quote", "unicode-xid", ] -[[package]] -name = "constant_time_eq" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "245097e9a4535ee1e3e3931fcfcd55a796a44c643e8596ff6566d68f09b87bbc" - [[package]] name = "constant_time_eq" version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f7144d30dcf0fafbce74250a3963025d8d52177934239851c917d29f1df280c2" -[[package]] -name = "container-init" -version = "0.1.0" -dependencies = [ - "async-stream", - "color-eyre", - "futures", - "helpers", - "imbl", - "nix 0.27.1", - "procfs", - "serde", - "serde_json", - "tokio", - "tokio-stream", - "tracing", - "tracing-error", - "tracing-futures", - "tracing-subscriber", - "yajrc 0.1.0 (git+https://github.com/dr-bonez/yajrc.git?branch=develop)", -] - [[package]] name = "convert_case" version = "0.4.0" @@ -745,15 +896,16 @@ dependencies = [ [[package]] name = "cookie_store" -version = "0.16.1" +version = "0.16.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e4b6aa369f41f5faa04bb80c9b1f4216ea81646ed6124d76ba5c49a7aafd9cd" +checksum = "d606d0fba62e13cf04db20536c05cb7f13673c161cb47a47a82b9b9e7d3f1daa" dependencies = [ "cookie 0.16.2", "idna 0.2.3", "log", "publicsuffix", "serde", + "serde_derive", "serde_json", "time", "url", @@ -778,9 +930,9 @@ dependencies = [ [[package]] name = "core-foundation" -version = "0.9.3" +version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "194a7a9e6de53fa55116934067c844d9d749312f75c6f6d0980e8c252f8c2146" +checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" dependencies = [ "core-foundation-sys", "libc", @@ -788,33 +940,33 @@ dependencies = [ [[package]] name = "core-foundation-sys" -version = "0.8.3" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5827cebf4670468b8772dd191856768aedcb1b0278a04f989f7766351917b9dc" +checksum = "06ea2b9bc92be3c2baa9334a323ebca2d6f074ff852cd1d7b11064035cd3868f" [[package]] name = "cpufeatures" -version = "0.2.9" +version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a17b76ff3a4162b0b27f354a0c87015ddad39d35f9c0c36607a3bdd175dde1f1" +checksum = "53fe5e26ff1b7aef8bca9c6080520cfb8d9333c7568e1829cef191a9723e5504" dependencies = [ "libc", ] [[package]] name = "crc" -version = "3.0.0" +version = "3.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53757d12b596c16c78b83458d732a5d1a17ab3f53f2f7412f6fb57cc8a140ab3" +checksum = "86ec7a15cbe22e59248fc7eadb1907dab5ba09372595da4d73dd805ed4417dfe" dependencies = [ "crc-catalog", ] [[package]] name = "crc-catalog" -version = "2.2.0" +version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9cace84e55f07e7301bae1c519df89cdad8cc3cd868413d3fdbdeca9ff3db484" +checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5" [[package]] name = "crc32fast" @@ -827,21 +979,43 @@ dependencies = [ [[package]] name = "crossbeam-queue" -version = "0.3.8" +version = "0.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d1cfb3ea8a53f37c40dea2c7bedcbd88bdfae54f5e2175d6ecaff1c988353add" +checksum = "df0346b5d5e76ac2fe4e327c5fd1118d6be7c51dfb18f9b7922923f287471e35" dependencies = [ - "cfg-if", "crossbeam-utils", ] [[package]] name = "crossbeam-utils" -version = "0.8.14" +version = "0.8.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "248e3bacc7dc6baa3b21e405ee045c3047101a49145e7e9eca583ab4c2ca5345" + +[[package]] +name = "crossterm" +version = "0.27.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fb766fa798726286dbbb842f174001dab8abc7b627a1dd86e0b7222a95d929f" +checksum = "f476fe445d41c9e991fd07515a6f463074b782242ccf4a5b7b1d1012e70824df" dependencies = [ - "cfg-if", + "bitflags 2.4.2", + "crossterm_winapi", + "futures-core", + "libc", + "mio", + "parking_lot", + "signal-hook", + "signal-hook-mio", + "winapi", +] + +[[package]] +name = "crossterm_winapi" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "acdd7c62a3665c7f6830a51635d9ac9b23ed385797f70a83bb8bafe9c572ab2b" +dependencies = [ + "winapi", ] [[package]] @@ -852,9 +1026,9 @@ checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7" [[package]] name = "crypto-bigint" -version = "0.5.3" +version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "740fe28e594155f10cfc383984cbefd529d7396050557148f79cb0f621204124" +checksum = "0dc92fb57ca44df6db8059111ab3af99a63d5d0f8375d9972e319a379c6bab76" dependencies = [ "generic-array", "rand_core 0.6.4", @@ -874,9 +1048,9 @@ dependencies = [ [[package]] name = "crypto-mac" -version = "0.11.1" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1d1a86f49236c215f271d40892d5fc950490551400b02ef360692c29815c714" +checksum = "25fab6889090c8133f3deb8f73ba3c65a7f456f66436fc012a1b1e272b1e103e" dependencies = [ "generic-array", "subtle", @@ -884,22 +1058,21 @@ dependencies = [ [[package]] name = "csv" -version = "1.1.6" +version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22813a6dc45b335f9bade10bf7271dc477e81113e89eb251a0bc2a8a81c536e1" +checksum = "ac574ff4d437a7b5ad237ef331c17ccca63c46479e5b5453eb8e10bb99a759fe" dependencies = [ - "bstr", "csv-core", - "itoa 0.4.8", + "itoa", "ryu", "serde", ] [[package]] name = "csv-core" -version = "0.1.10" +version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b2466559f260f48ad25fe6317b3c8dac77b5bdb5763ac7d9d6103530663bc90" +checksum = "5efa2b3d7902f4b634a20cae3c9c4e6209dc4779feb6863329607560143efa70" dependencies = [ "memchr", ] @@ -951,57 +1124,13 @@ dependencies = [ [[package]] name = "curve25519-dalek-derive" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "83fdaf97f4804dcebfa5862639bc9ce4121e82140bec2a987ac5140294865b5b" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.37", -] - -[[package]] -name = "cxx" -version = "1.0.86" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51d1075c37807dcf850c379432f0df05ba52cc30f279c5cfc43cc221ce7f8579" -dependencies = [ - "cc", - "cxxbridge-flags", - "cxxbridge-macro", - "link-cplusplus", -] - -[[package]] -name = "cxx-build" -version = "1.0.86" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5044281f61b27bc598f2f6647d480aed48d2bf52d6eb0b627d84c0361b17aa70" -dependencies = [ - "cc", - "codespan-reporting", - "once_cell", - "proc-macro2", - "quote", - "scratch", - "syn 1.0.107", -] - -[[package]] -name = "cxxbridge-flags" -version = "1.0.86" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61b50bc93ba22c27b0d31128d2d130a0a6b3d267ae27ef7e4fae2167dfe8781c" - -[[package]] -name = "cxxbridge-macro" -version = "1.0.86" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39e61fda7e62115119469c7b3591fd913ecca96fb766cfd3f2e2502ab7bc87a5" +checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" dependencies = [ "proc-macro2", "quote", - "syn 1.0.107", + "syn 2.0.48", ] [[package]] @@ -1025,7 +1154,7 @@ dependencies = [ "proc-macro2", "quote", "strsim 0.10.0", - "syn 2.0.37", + "syn 2.0.48", ] [[package]] @@ -1036,17 +1165,17 @@ checksum = "836a9bbc7ad63342d6d6e7b815ccab164bc77a2d95d84bc3117a8c0d5c98e2d5" dependencies = [ "darling_core", "quote", - "syn 2.0.37", + "syn 2.0.48", ] [[package]] name = "dashmap" -version = "5.4.0" +version = "5.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "907076dfda823b0b36d2a1bb5f90c96660a5bbcd7729e10727f07858f22c4edc" +checksum = "978747c1d849a7d2ee5e8adc0159961c48fb7e5db2f06af6723b80123bb53856" dependencies = [ "cfg-if", - "hashbrown 0.12.3", + "hashbrown 0.14.3", "lock_api", "once_cell", "parking_lot_core", @@ -1054,9 +1183,9 @@ dependencies = [ [[package]] name = "data-encoding" -version = "2.3.3" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23d8666cb01533c39dde32bcbab8e227b4ed6679b2c925eba05feabea39508fb" +checksum = "7e962a19be5cfc3f3bf6dd8f61eb50107f356ad6270fbb3ed41476571db78be5" [[package]] name = "der" @@ -1069,6 +1198,16 @@ dependencies = [ "zeroize", ] +[[package]] +name = "deranged" +version = "0.3.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4" +dependencies = [ + "powerfmt", + "serde", +] + [[package]] name = "derive_more" version = "0.99.17" @@ -1079,7 +1218,7 @@ dependencies = [ "proc-macro2", "quote", "rustc_version", - "syn 1.0.107", + "syn 1.0.109", ] [[package]] @@ -1103,7 +1242,7 @@ version = "0.10.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" dependencies = [ - "block-buffer 0.10.3", + "block-buffer 0.10.4", "const-oid", "crypto-common", "subtle", @@ -1138,9 +1277,9 @@ checksum = "69dde51e8fef5e12c1d65e0929b03d66e4c0c18282bc30ed2ca050ad6f44dd82" [[package]] name = "dotenvy" -version = "0.15.6" +version = "0.15.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03d8c417d7a8cb362e0c37e5d815f5eb7c37f79ff93707329d5a194e42e54ca0" +checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b" [[package]] name = "drain" @@ -1153,21 +1292,21 @@ dependencies = [ [[package]] name = "dyn-clone" -version = "1.0.12" +version = "1.0.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "304e6508efa593091e97a9abbc10f90aa7ca635b6d2784feff3c89d41dd12272" +checksum = "545b22097d44f8a9581187cdf93de7a71e4722bf51200cfaba810865b49a495d" [[package]] name = "ecdsa" -version = "0.16.8" +version = "0.16.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4b1e0c257a9e9f25f90ff76d7a68360ed497ee519c8e428d1825ef0000799d4" +checksum = "ee27f32b5c5292967d2d4a9d7f1e0b0aed2c15daded5a60300e4abb9d8020bca" dependencies = [ "der", "digest 0.10.7", "elliptic-curve", "rfc6979", - "signature 2.0.0", + "signature 2.2.0", "spki", ] @@ -1188,7 +1327,7 @@ checksum = "115531babc129696a58c64a4fef0a8bf9e9698629fb97e9e40767d235cfbcd53" dependencies = [ "pkcs8", "serde", - "signature 2.0.0", + "signature 2.2.0", ] [[package]] @@ -1207,33 +1346,34 @@ dependencies = [ [[package]] name = "ed25519-dalek" -version = "2.0.0" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7277392b266383ef8396db7fdeb1e77b6c52fed775f5df15bb24f35b72156980" +checksum = "1f628eaec48bfd21b865dc2950cfa014450c01d2fa2b69a86c2fd5844ec523c0" dependencies = [ "curve25519-dalek 4.1.1", "ed25519 2.2.3", "rand_core 0.6.4", "serde", "sha2 0.10.8", - "signature 2.0.0", + "signature 2.2.0", + "subtle", "zeroize", ] [[package]] name = "either" -version = "1.8.0" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90e5c1c8368803113bf0c9584fc495a58b86dc8a29edbf8fe877d21d9507e797" +checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07" dependencies = [ "serde", ] [[package]] name = "elliptic-curve" -version = "0.13.6" +version = "0.13.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d97ca172ae9dc9f9b779a6e3a65d308f2af74e5b8c921299075bdb4a0370e914" +checksum = "b5e6043086bf7973472e0c7dff2142ea0b680d30e18d9cc40f267efbf222bd47" dependencies = [ "base16ct", "crypto-bigint", @@ -1262,9 +1402,9 @@ dependencies = [ [[package]] name = "ena" -version = "0.14.0" +version = "0.14.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7402b94a93c24e742487327a7cd839dc9d36fec9de9fb25b09f2dae459f36c3" +checksum = "c533630cf40e9caa44bd91aadc88a75d75a4c3a12b4cfde353cbed41daa1e1f1" dependencies = [ "log", ] @@ -1283,9 +1423,9 @@ checksum = "34aa73646ffb006b8f5147f3dc182bd4bcb190227ce861fc4a4844bf8e3cb2c0" [[package]] name = "encoding_rs" -version = "0.8.31" +version = "0.8.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9852635589dc9f9ea1b6fe9f05b50ef208c85c834a562f0c6abb1c475736ec2b" +checksum = "7268b386296a025e474d5140678f75d6de9493ae55a5d709eeb9dd08149945e1" dependencies = [ "cfg-if", ] @@ -1299,20 +1439,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.37", -] - -[[package]] -name = "env_logger" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85cdab6a89accf66733ad5a1693a4dcced6aeff64602b634530dd73c1f3ee9f0" -dependencies = [ - "humantime", - "is-terminal", - "log", - "regex", - "termcolor", + "syn 2.0.48", ] [[package]] @@ -1323,23 +1450,12 @@ checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" [[package]] name = "errno" -version = "0.2.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f639046355ee4f37944e44f60642c6f3a7efa3cf6b78c78a0d989a8ce6c396a1" -dependencies = [ - "errno-dragonfly", - "libc", - "winapi", -] - -[[package]] -name = "errno-dragonfly" -version = "0.1.2" +version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf" +checksum = "a258e46cdc063eb8519c00b9fc845fc47bcfca4130e2f08e88665ceda8474245" dependencies = [ - "cc", "libc", + "windows-sys 0.52.0", ] [[package]] @@ -1361,9 +1477,9 @@ checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" [[package]] name = "eyre" -version = "0.6.8" +version = "0.6.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c2b6b5a29c02cdc822728b7d7b8ae1bab3e3b05d44522770ddd49722eeac7eb" +checksum = "b6267a1fa6f59179ea4afc8e50fd8612a3cc60bc858f786ff877a4a8cb042799" dependencies = [ "indenter", "once_cell", @@ -1387,18 +1503,15 @@ checksum = "aa4da3c766cd7a0db8242e326e9e4e081edd567072893ed320008189715366a4" dependencies = [ "proc-macro2", "quote", - "syn 1.0.107", + "syn 1.0.109", "synstructure", ] [[package]] name = "fastrand" -version = "1.8.0" +version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7a407cfaa3385c4ae6b23e84623d48c2798d06e3e6a1878f7f59f17b3f86499" -dependencies = [ - "instant", -] +checksum = "25cbce373ec4653f1a01a31e8a5e5ec0c622dc27ff9c4e6606eefef5cbbed4a5" [[package]] name = "fd-lock-rs" @@ -1421,22 +1534,28 @@ dependencies = [ [[package]] name = "fiat-crypto" -version = "0.2.1" +version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0870c84016d4b481be5c9f323c24f65e31e901ae618f0e80f4308fb00de1d2d" +checksum = "27573eac26f4dd11e2b1916c3fe1baa56407c83c71a773a8ba17ec0bca03b6b7" [[package]] name = "filetime" -version = "0.2.19" +version = "0.2.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4e884668cd0c7480504233e951174ddc3b382f7c2666e3b7310b5c4e7b0c37f9" +checksum = "1ee447700ac8aa0b2f2bd7bc4462ad686ba06baa6727ac149a2d6277f0d240fd" dependencies = [ "cfg-if", "libc", - "redox_syscall 0.2.16", - "windows-sys 0.42.0", + "redox_syscall 0.4.1", + "windows-sys 0.52.0", ] +[[package]] +name = "finl_unicode" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fcfdc7a0362c9f4444381a9e697c79d435fe65b52a37466fc2c1184cee9edc6" + [[package]] name = "fixedbitset" version = "0.4.2" @@ -1445,9 +1564,9 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" [[package]] name = "flate2" -version = "1.0.25" +version = "1.0.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8a2db397cb1c8772f31494cb8917e48cd1e64f0fa7efac59fbd741a0a8ce841" +checksum = "46303f565772937ffe1d394a4fac6f411c6013172fadde9dcdb1e147a086940e" dependencies = [ "crc32fast", "miniz_oxide", @@ -1487,9 +1606,9 @@ checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" [[package]] name = "form_urlencoded" -version = "1.2.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a62bc1cf6f830c2ec14a513a9fb124d0a213a629668a4186f329db21fe045652" +checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" dependencies = [ "percent-encoding", ] @@ -1511,9 +1630,9 @@ checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c" [[package]] name = "futures" -version = "0.3.28" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23342abe12aba583913b2e62f22225ff9c950774065e4bfb61a19cd9770fec40" +checksum = "645c6916888f6cb6350d2550b80fb63e734897a8498abe35cfb732b6487804b0" dependencies = [ "futures-channel", "futures-core", @@ -1526,9 +1645,9 @@ dependencies = [ [[package]] name = "futures-channel" -version = "0.3.28" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "955518d47e09b25bbebc7a18df10b81f0c766eaf4c4f1cccef2fca5f2a4fb5f2" +checksum = "eac8f7d7865dcb88bd4373ab671c8cf4508703796caa2b1985a9ca867b3fcb78" dependencies = [ "futures-core", "futures-sink", @@ -1536,15 +1655,15 @@ dependencies = [ [[package]] name = "futures-core" -version = "0.3.28" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4bca583b7e26f571124fe5b7561d49cb2868d79116cfa0eefce955557c6fee8c" +checksum = "dfc6580bb841c5a68e9ef15c77ccc837b40a7504914d52e47b8b0e9bbda25a1d" [[package]] name = "futures-executor" -version = "0.3.28" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ccecee823288125bd88b4d7f565c9e58e41858e47ab72e8ea2d64e93624386e0" +checksum = "a576fc72ae164fca6b9db127eaa9a9dda0d61316034f33a0a0d4eda41f02b01d" dependencies = [ "futures-core", "futures-task", @@ -1564,38 +1683,38 @@ dependencies = [ [[package]] name = "futures-io" -version = "0.3.28" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fff74096e71ed47f8e023204cfd0aa1289cd54ae5430a9523be060cdb849964" +checksum = "a44623e20b9681a318efdd71c299b6b222ed6f231972bfe2f224ebad6311f0c1" [[package]] name = "futures-macro" -version = "0.3.28" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72" +checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" dependencies = [ "proc-macro2", "quote", - "syn 2.0.37", + "syn 2.0.48", ] [[package]] name = "futures-sink" -version = "0.3.28" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f43be4fe21a13b9781a69afa4985b0f6ee0e1afab2c6f454a8cf30e2b2237b6e" +checksum = "9fb8e00e87438d937621c1c6269e53f536c14d3fbd6a042bb24879e57d474fb5" [[package]] name = "futures-task" -version = "0.3.28" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76d3d132be6c0e6aa1534069c705a74a5997a356c0dc2f86a47765e5617c5b65" +checksum = "38d84fa142264698cdce1a9f9172cf383a0c82de1bddcf3092901442c4097004" [[package]] name = "futures-util" -version = "0.3.28" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26b01e40b772d54cf6c6d721c1d1abd0647a0106a12ecaa1c186273392a69533" +checksum = "3d6401deb83407ab3da39eba7e33987a73c3df0c82b4bb5813ee871c19c41d48" dependencies = [ "futures-channel", "futures-core", @@ -1633,9 +1752,9 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.2.8" +version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c05aeb6a22b8f62540c194aac980f2115af067bfe15a0734d7277a768d396b31" +checksum = "190092ea657667030ac6a35e305e62fc4dd69fd98ac98631e5d3a2b1575a12b5" dependencies = [ "cfg-if", "libc", @@ -1644,9 +1763,9 @@ dependencies = [ [[package]] name = "gimli" -version = "0.27.0" +version = "0.28.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dec7af912d60cdbd3677c1af9352ebae6fb8394d165568a2234df0fa00f87793" +checksum = "4271d37baee1b8c7e4b708028c57d816cf9d2434acb33a549475f78c181f6253" [[package]] name = "gpt" @@ -1654,7 +1773,7 @@ version = "3.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8283e7331b8c93b9756e0cfdbcfb90312852f953c6faf9bf741e684cc3b6ad69" dependencies = [ - "bitflags 2.4.1", + "bitflags 2.4.2", "crc", "log", "uuid", @@ -1673,17 +1792,36 @@ dependencies = [ [[package]] name = "h2" -version = "0.3.21" +version = "0.3.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91fc23aa11be92976ef4729127f1a74adf36d8436f7816b185d18df956790833" +checksum = "bb2c4422095b67ee78da96fbb51a4cc413b3b25883c7717ff7ca1ab31022c9c9" dependencies = [ "bytes", "fnv", "futures-core", "futures-sink", "futures-util", - "http", - "indexmap 1.9.2", + "http 0.2.11", + "indexmap 2.1.0", + "slab", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "h2" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "31d030e59af851932b72ceebadf4a2b5986dba4c3b99dd2493f8273a0f151943" +dependencies = [ + "bytes", + "fnv", + "futures-core", + "futures-sink", + "futures-util", + "http 1.0.0", + "indexmap 2.1.0", "slab", "tokio", "tokio-util", @@ -1696,14 +1834,21 @@ version = "1.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "eabb4a44450da02c90444cf74558da904edde8fb4e9035a9a6a4e15445af0bd7" +[[package]] +name = "half" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc52e53916c08643f1b56ec082790d1e86a32e58dc5268f897f313fbae7b4872" +dependencies = [ + "cfg-if", + "crunchy", +] + [[package]] name = "hashbrown" version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" -dependencies = [ - "ahash 0.7.6", -] [[package]] name = "hashbrown" @@ -1711,22 +1856,26 @@ version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "43a3c133739dddd0d2990f9a4bdf8eb4b21ef50e4851ca85ab661199821d510e" dependencies = [ - "ahash 0.8.3", + "ahash 0.8.7", ] [[package]] name = "hashbrown" -version = "0.14.1" +version = "0.14.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7dfda62a12f55daeae5015f81b0baea145391cb4520f86c248fc615d72640d12" +checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604" +dependencies = [ + "ahash 0.8.7", + "allocator-api2", +] [[package]] name = "hashlink" -version = "0.8.1" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69fe1fcf8b4278d860ad0548329f892a3631fb63f82574df68275f34cdbe0ffa" +checksum = "e8094feaf31ff591f651a2664fb9cfd92bba7a60ce3197265e9482ebe753c8f7" dependencies = [ - "hashbrown 0.12.3", + "hashbrown 0.14.3", ] [[package]] @@ -1747,12 +1896,12 @@ dependencies = [ "lazy_async_pool", "models", "pin-project", + "rpc-toolkit 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", "serde", "serde_json", "tokio", "tokio-stream", "tracing", - "yajrc 0.1.0 (git+https://github.com/dr-bonez/yajrc.git?branch=develop)", ] [[package]] @@ -1766,18 +1915,9 @@ dependencies = [ [[package]] name = "hermit-abi" -version = "0.2.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee512640fe35acbfb4bb779db6f0d80704c2cacfa2e39b601ef3e3f47d1ae4c7" -dependencies = [ - "libc", -] - -[[package]] -name = "hermit-abi" -version = "0.3.1" +version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fed44880c466736ef9a5c5b5facefb5ed0785676d0c02d612db14e54f0d84286" +checksum = "5d3d0e0f38255e7fa3cf31335b3a56f05febd18025f4db5ef7a0cfb4f8da651f" [[package]] name = "hex" @@ -1793,9 +1933,9 @@ checksum = "85ef6b41c333e6dd2a4aaa59125a19b633cd17e7aaf372b2260809777bcdef4a" [[package]] name = "hkdf" -version = "0.12.3" +version = "0.12.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "791a029f6b9fc27657f6f188ec6e5e43f6911f6f878e0dc5501396e09809d437" +checksum = "7b5f8eb2ad728638ea2c7d47a21db23b7b58a72ed6a38256b8a1849f15fbbdf7" dependencies = [ "hmac 0.12.1", ] @@ -1821,32 +1961,66 @@ dependencies = [ [[package]] name = "home" -version = "0.5.5" +version = "0.5.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5444c27eef6923071f7ebcc33e3444508466a76f7a2b93da00ed6e19f30c1ddb" +checksum = "e3d1354bf6b7235cb4a0576c2619fd4ed18183f689b12b006a0ee7329eeff9a5" dependencies = [ - "windows-sys 0.48.0", + "windows-sys 0.52.0", +] + +[[package]] +name = "http" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8947b1a6fad4393052c7ba1f4cd97bed3e953a95c79c92ad9b051a04611d9fbb" +dependencies = [ + "bytes", + "fnv", + "itoa", ] [[package]] name = "http" -version = "0.2.9" +version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd6effc99afb63425aff9b05836f029929e345a6148a14b7ecd5ab67af944482" +checksum = "b32afd38673a8016f7c9ae69e5af41a58f81b1d31689040f2f1959594ce194ea" dependencies = [ "bytes", "fnv", - "itoa 1.0.5", + "itoa", ] [[package]] name = "http-body" -version = "0.4.5" +version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d5f38f16d184e36f2408a55281cd658ecbd3ca05cce6d6510a176eca393e26d1" +checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" dependencies = [ "bytes", - "http", + "http 0.2.11", + "pin-project-lite", +] + +[[package]] +name = "http-body" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1cac85db508abc24a2e48553ba12a996e87244a0395ce011e62b37158745d643" +dependencies = [ + "bytes", + "http 1.0.0", +] + +[[package]] +name = "http-body-util" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41cb79eb393015dadd30fc252023adb0b2400a0caee0fa2a077e6e21a551e840" +dependencies = [ + "bytes", + "futures-util", + "http 1.0.0", + "http-body 1.0.0", "pin-project-lite", ] @@ -1858,40 +2032,53 @@ checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904" [[package]] name = "httpdate" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4a1e36c821dbe04574f602848a19f742f4fb3c98d40449f11bcad18d6b17421" - -[[package]] -name = "humantime" -version = "2.1.0" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" +checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" [[package]] name = "hyper" -version = "0.14.27" +version = "0.14.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffb1cfd654a8219eaef89881fdb3bb3b1cdc5fa75ded05d6933b2b382e395468" +checksum = "bf96e135eb83a2a8ddf766e426a841d8ddd7449d5f00d34ea02b41d2f19eef80" dependencies = [ "bytes", "futures-channel", "futures-core", "futures-util", - "h2", - "http", - "http-body", + "h2 0.3.24", + "http 0.2.11", + "http-body 0.4.6", "httparse", "httpdate", - "itoa 1.0.5", + "itoa", "pin-project-lite", - "socket2 0.4.7", + "socket2", "tokio", "tower-service", "tracing", "want", ] +[[package]] +name = "hyper" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fb5aa53871fc917b1a9ed87b683a5d86db645e23acb32c2e0785a353e522fb75" +dependencies = [ + "bytes", + "futures-channel", + "futures-util", + "h2 0.4.2", + "http 1.0.0", + "http-body 1.0.0", + "httparse", + "httpdate", + "itoa", + "pin-project-lite", + "tokio", +] + [[package]] name = "hyper-tls" version = "0.5.0" @@ -1899,51 +2086,51 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" dependencies = [ "bytes", - "hyper", + "hyper 0.14.28", "native-tls", "tokio", "tokio-native-tls", ] [[package]] -name = "hyper-ws-listener" -version = "0.3.0" +name = "hyper-util" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dcbfe4981e45b0a7403a55d4af12f8d30e173e722409658c3857243990e72180" +checksum = "bdea9aac0dbe5a9240d68cfd9501e2db94222c6dc06843e06640b9e07f0fdc67" dependencies = [ - "anyhow", - "base64 0.21.4", - "env_logger", - "futures", - "hyper", - "log", - "sha-1", + "bytes", + "futures-channel", + "futures-util", + "http 1.0.0", + "http-body 1.0.0", + "hyper 1.1.0", + "pin-project-lite", + "socket2", "tokio", - "tokio-tungstenite", + "tracing", ] [[package]] name = "iana-time-zone" -version = "0.1.53" +version = "0.1.59" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64c122667b287044802d6ce17ee2ddf13207ed924c712de9a66a5814d5b64765" +checksum = "b6a67363e2aa4443928ce15e57ebae94fd8949958fd1223c4cfc0cd473ad7539" dependencies = [ "android_system_properties", "core-foundation-sys", "iana-time-zone-haiku", "js-sys", "wasm-bindgen", - "winapi", + "windows-core", ] [[package]] name = "iana-time-zone-haiku" -version = "0.1.1" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0703ae284fc167426161c2e3f1da3ea71d94b21bedbcc9494e92b28e334e3dca" +checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" dependencies = [ - "cxx", - "cxx-build", + "cc", ] [[package]] @@ -1983,11 +2170,21 @@ dependencies = [ "unicode-normalization", ] +[[package]] +name = "idna" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6" +dependencies = [ + "unicode-bidi", + "unicode-normalization", +] + [[package]] name = "imbl" -version = "2.0.2" +version = "2.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85b4555023847ca2cd6fd11f20b553886e6981c7e8aee9b3e7e960b4b17fb440" +checksum = "978d142c8028edf52095703af2fad11d6f611af1246685725d6b850634647085" dependencies = [ "bitmaps", "imbl-sized-chunks", @@ -1999,9 +2196,9 @@ dependencies = [ [[package]] name = "imbl-sized-chunks" -version = "0.1.1" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6957ea0b2541c5ca561d3ef4538044af79f8a05a1eb3a3b148936aaceaa1076" +checksum = "144006fb58ed787dcae3f54575ff4349755b00ccc99f4b4873860b654be1ed63" dependencies = [ "bitmaps", ] @@ -2009,7 +2206,7 @@ dependencies = [ [[package]] name = "imbl-value" version = "0.1.0" -source = "git+https://github.com/Start9Labs/imbl-value.git#929395141c3a882ac366c12ac9402d0ebaa2201b" +source = "git+https://github.com/Start9Labs/imbl-value.git#48dc39a762a3b4f9300d3b9f850cbd394e777ae0" dependencies = [ "imbl", "serde", @@ -2045,9 +2242,9 @@ checksum = "ce23b50ad8242c51a442f3ff322d56b02f08852c77e4c0b4d3fd684abc89c683" [[package]] name = "indexmap" -version = "1.9.2" +version = "1.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1885e79c1fc4b10f0e172c475f458b7f7b93061064d98c3293e98c5ba0c8b399" +checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" dependencies = [ "autocfg", "hashbrown 0.12.3", @@ -2056,12 +2253,12 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.0.2" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8adf3ddd720272c6ea8bf59463c04e0f93d0bbf7c5439b691bca2987e0270897" +checksum = "d530e1a18b1cb4c484e6e34556a0d948706958449fca0cab753d649f2bce3d1f" dependencies = [ "equivalent", - "hashbrown 0.14.1", + "hashbrown 0.14.3", "serde", ] @@ -2098,20 +2295,20 @@ dependencies = [ ] [[package]] -name = "io-lifetimes" -version = "1.0.4" +name = "integer-encoding" +version = "4.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7d6c6f8c91b4b9ed43484ad1a938e393caf35960fce7f82a040497207bd8e9e" +checksum = "924df4f0e24e2e7f9cdd90babb0b96f93b20f3ecfa949ea9e6613756b8c8e1bf" dependencies = [ - "libc", - "windows-sys 0.42.0", + "async-trait", + "tokio", ] [[package]] name = "ipnet" -version = "2.8.0" +version = "2.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28b29a3cd74f0f4598934efe3aeba42bae0eb4680554128851ebbecb02af14e6" +checksum = "8f518f335dce6725a761382244631d86cf0ccb2863413590b31338feb467f9c3" dependencies = [ "serde", ] @@ -2128,14 +2325,13 @@ dependencies = [ [[package]] name = "is-terminal" -version = "0.4.4" +version = "0.4.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21b6b32576413a8e69b90e952e4a026476040d81017b80445deda5f2d3921857" +checksum = "0bad00257d07be169d870ab665980b06cdb366d792ad690bf2e76876dc503455" dependencies = [ - "hermit-abi 0.3.1", - "io-lifetimes", + "hermit-abi 0.3.4", "rustix", - "windows-sys 0.45.0", + "windows-sys 0.52.0", ] [[package]] @@ -2177,24 +2373,18 @@ dependencies = [ [[package]] name = "itertools" -version = "0.11.0" +version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1c173a5686ce8bfa551b3563d0c2170bf24ca44da99c7ca4bfdab5418c3fe57" +checksum = "25db6b064527c5d482d0423354fcd07a89a2dfe07b67892e62411946db7f07b0" dependencies = [ "either", ] [[package]] name = "itoa" -version = "0.4.8" +version = "1.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b71991ff56294aa922b450139ee08b3bfc70982c6b2c7562771375cf73542dd4" - -[[package]] -name = "itoa" -version = "1.0.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fad582f4b9e86b6caa621cabeb0963332d92eea04729ab12892c2533951e6440" +checksum = "b1a46d1a171d865aa5f83f92695765caa047a9b4cbae2cbf37dbd613a793fd4c" [[package]] name = "jaq-core" @@ -2202,10 +2392,10 @@ version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "eb52eeac20f256459e909bd4a03bb8c4fab6a1fdbb8ed52d00f644152df48ece" dependencies = [ - "ahash 0.7.6", + "ahash 0.7.7", "dyn-clone", "hifijson", - "indexmap 1.9.2", + "indexmap 1.9.3", "itertools 0.10.5", "jaq-parse", "log", @@ -2236,12 +2426,12 @@ dependencies = [ [[package]] name = "josekit" -version = "0.8.4" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5754487a088f527b1407df470db8e654e4064dccbbe1fe850e0773721e9962b7" +checksum = "cd20997283339a19226445db97d632c8dc7adb6b8172537fe0e9e540fb141df2" dependencies = [ "anyhow", - "base64 0.21.4", + "base64 0.21.7", "flate2", "once_cell", "openssl", @@ -2254,9 +2444,9 @@ dependencies = [ [[package]] name = "js-sys" -version = "0.3.60" +version = "0.3.67" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49409df3e3bf0856b916e2ceaca09ee28e6871cf7d9ce97a692cacfdb2a25a47" +checksum = "9a1d36f1235bc969acba30b7f5990b864423a6068a10f7c90ae8f0112e3a59d1" dependencies = [ "wasm-bindgen", ] @@ -2294,30 +2484,30 @@ dependencies = [ [[package]] name = "keccak" -version = "0.1.3" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3afef3b6eff9ce9d8ff9b3601125eec7f0c8cbac7abd14f355d053fa56c98768" +checksum = "ecc2af9a1119c51f12a14607e783cb977bde58bc069ff0c3da1095e635d70654" dependencies = [ "cpufeatures", ] [[package]] name = "lalrpop" -version = "0.19.8" +version = "0.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b30455341b0e18f276fa64540aff54deafb54c589de6aca68659c63dd2d5d823" +checksum = "da4081d44f4611b66c6dd725e6de3169f9f63905421e8626fcb86b6a898998b8" dependencies = [ "ascii-canvas", - "atty", "bit-set", "diff", "ena", + "is-terminal", "itertools 0.10.5", "lalrpop-util", "petgraph", "pico-args", "regex", - "regex-syntax 0.6.28", + "regex-syntax 0.7.5", "string_cache", "term", "tiny-keccak", @@ -2326,9 +2516,9 @@ dependencies = [ [[package]] name = "lalrpop-util" -version = "0.19.8" +version = "0.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bcf796c978e9b4d983414f4caedc9273aa33ee214c5b887bd55fde84c85d2dc4" +checksum = "3f35c735096c0293d313e8f2a641627472b83d01b937177fe76e5e2708d31e0d" dependencies = [ "regex", ] @@ -2343,6 +2533,12 @@ dependencies = [ "futures", ] +[[package]] +name = "lazy_format" +version = "2.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e479e99b287d578ed5f6cd4c92cdf48db219088adb9c5b14f7c155b71dfba792" + [[package]] name = "lazy_static" version = "1.4.0" @@ -2354,21 +2550,32 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.149" +version = "0.2.152" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a08173bc88b7955d1b3145aa561539096c421ac8debde8cbc3612ec635fee29b" +checksum = "13e3bf6590cbc649f4d1a3eefc9d5d6eb746f5200ffb04e5e142700b8faa56e7" [[package]] name = "libm" -version = "0.2.6" +version = "0.2.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058" + +[[package]] +name = "libredox" +version = "0.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "348108ab3fba42ec82ff6e9564fc4ca0247bdccdc68dd8af9764bbc79c3c8ffb" +checksum = "85c833ca1e66078851dba29046874e38f08b2c883700aa29a03ddd3b23814ee8" +dependencies = [ + "bitflags 2.4.2", + "libc", + "redox_syscall 0.4.1", +] [[package]] name = "libsqlite3-sys" -version = "0.26.0" +version = "0.27.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "afc22eff61b133b115c6e8c74e818c628d6d5e7a502afea6f64dee076dd94326" +checksum = "cf4e226dcd58b4be396f7bd3c20da8fdee2911400705297ba7d2d7cc2c30f716" dependencies = [ "cc", "pkg-config", @@ -2385,15 +2592,6 @@ dependencies = [ "serde_test", ] -[[package]] -name = "link-cplusplus" -version = "1.0.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ecd207c9c713c34f95a097a5b029ac2ce6010530c7b49d7fea24d977dede04f5" -dependencies = [ - "cc", -] - [[package]] name = "linked-hash-map" version = "0.5.6" @@ -2402,15 +2600,15 @@ checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" [[package]] name = "linux-raw-sys" -version = "0.1.4" +version = "0.4.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f051f77a7c8e6957c0696eac88f26b0117e54f52d3fc682ab19397a8812846a4" +checksum = "01cda141df6706de531b6c46c3a33ecca755538219bd484262fa09410c13539c" [[package]] name = "lock_api" -version = "0.4.9" +version = "0.4.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "435011366fe56583b16cf956f9df0095b405b82d76425bc8981c0e22e60ec4df" +checksum = "3c168f8615b12bc01f9c17e2eb0cc07dcae1940121185446edc3744920e8ef45" dependencies = [ "autocfg", "scopeguard", @@ -2433,9 +2631,15 @@ dependencies = [ [[package]] name = "matches" -version = "0.1.9" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2532096657941c2fea9c289d370a250971c689d4f143798ff67113ec042024a5" + +[[package]] +name = "matchit" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a3e378b66a060d48947b590737b30a1be76706c8dd7b8ba0f2fe3989c68a853f" +checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94" [[package]] name = "mbrman" @@ -2452,18 +2656,19 @@ dependencies = [ [[package]] name = "md-5" -version = "0.10.5" +version = "0.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6365506850d44bff6e2fbcb5176cf63650e48bd45ef2fe2665ae1570e0f4b9ca" +checksum = "d89e7ee0cfbedfc4da3340218492196241d89eefb6dab27de5df917a6d2e78cf" dependencies = [ + "cfg-if", "digest 0.10.7", ] [[package]] name = "memchr" -version = "2.6.4" +version = "2.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f665ee40bc4a3c5590afb1e9677db74a508659dfd71e126420da8274909a0167" +checksum = "523dc4f511e55ab87b694dc30d0f820d60906ef06413f93d4d7a1385599cc149" [[package]] name = "memoffset" @@ -2485,9 +2690,9 @@ dependencies = [ [[package]] name = "mime" -version = "0.3.16" +version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a60c7ce501c71e03a9c9c0d35b861413ae925bd979cc7a4e30d060069aaac8d" +checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" [[package]] name = "minimal-lexical" @@ -2497,20 +2702,21 @@ checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" [[package]] name = "miniz_oxide" -version = "0.6.2" +version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b275950c28b37e794e8c55d88aeb5e139d0ce23fdbbeda68f8d7174abdf9e8fa" +checksum = "e7810e0be55b428ada41041c41f32c9f1a42817901b4ccf45fa3d4b6561e74c7" dependencies = [ "adler", ] [[package]] name = "mio" -version = "0.8.8" +version = "0.8.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "927a765cd3fc26206e66b296465fa9d3e5ab003e651c1b3c060e7956d96b19d2" +checksum = "8f3d0b296e374a4e6f3c7b0a1f5a51d748a0d34c85e7dc48fc3fa9a87657fe09" dependencies = [ "libc", + "log", "wasi 0.11.0+wasi-snapshot-preview1", "windows-sys 0.48.0", ] @@ -2519,19 +2725,20 @@ dependencies = [ name = "models" version = "0.1.0" dependencies = [ - "base64 0.21.4", + "base64 0.21.7", "color-eyre", - "ed25519-dalek 2.0.0", + "ed25519-dalek 2.1.0", "emver", "ipnet", "lazy_static", "mbrman", + "num_enum", "openssl", "patch-db", "rand 0.8.5", "regex", "reqwest", - "rpc-toolkit", + "rpc-toolkit 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", "serde", "serde_json", "sqlx", @@ -2622,7 +2829,7 @@ version = "0.27.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2eb04e9c688eff1c89d72b407f168cf79bb9e867a9d3323ed6c01519eb9cc053" dependencies = [ - "bitflags 2.4.1", + "bitflags 2.4.2", "cfg-if", "libc", ] @@ -2663,9 +2870,9 @@ dependencies = [ [[package]] name = "num-bigint" -version = "0.4.3" +version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f93ab6289c7b344a8a9f60f88d80aa20032336fe78da341afc91c8a2341fc75f" +checksum = "608e7659b5c3d7cba262d894801b9ec9d00de989e8a82bd4bef91d08da45cdc0" dependencies = [ "autocfg", "num-integer", @@ -2674,9 +2881,9 @@ dependencies = [ [[package]] name = "num-bigint-dig" -version = "0.8.2" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2399c9463abc5f909349d8aa9ba080e0b88b3ce2885389b60b993f39b1a56905" +checksum = "dc84195820f291c7697304f3cbdadd1cb7199c0efc917ff5eafd71225c136151" dependencies = [ "byteorder", "lazy_static", @@ -2733,9 +2940,9 @@ dependencies = [ [[package]] name = "num-traits" -version = "0.2.15" +version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "578ede34cf02f8924ab9447f50c28075b4d3e5b269972345e7e0372b38c6cdcd" +checksum = "39e3200413f237f41ab11ad6d161bc7239c84dcb631773ccd7de3dfe4b5c267c" dependencies = [ "autocfg", "libm", @@ -2743,33 +2950,33 @@ dependencies = [ [[package]] name = "num_cpus" -version = "1.15.0" +version = "1.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fac9e2da13b5eb447a6ce3d392f23a29d8694bff781bf03a16cd9ac8697593b" +checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" dependencies = [ - "hermit-abi 0.2.6", + "hermit-abi 0.3.4", "libc", ] [[package]] name = "num_enum" -version = "0.7.0" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70bf6736f74634d299d00086f02986875b3c2d924781a6a2cb6c201e73da0ceb" +checksum = "02339744ee7253741199f897151b38e72257d13802d4ee837285cc2990a90845" dependencies = [ "num_enum_derive", ] [[package]] name = "num_enum_derive" -version = "0.7.0" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56ea360eafe1022f7cc56cd7b869ed57330fb2453d0c7831d99b74c65d2f5597" +checksum = "681030a937600a36906c185595136d26abfebb4aa9c65701cefcaf8578bb982b" dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.37", + "syn 2.0.48", ] [[package]] @@ -2780,18 +2987,18 @@ checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3" [[package]] name = "object" -version = "0.30.2" +version = "0.32.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b8c786513eb403643f2a88c244c2aaa270ef2153f55094587d0c48a3cf22a83" +checksum = "a6a622008b6e321afc04970976f62ee297fdbaa6f95318ca343e3eebb9648441" dependencies = [ "memchr", ] [[package]] name = "once_cell" -version = "1.18.0" +version = "1.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d" +checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" [[package]] name = "opaque-debug" @@ -2805,7 +3012,7 @@ version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c75a0ec2d1b302412fb503224289325fcc0e44600176864804c7211b055cfd58" dependencies = [ - "base64 0.21.4", + "base64 0.21.7", "byteorder", "md-5", "sha2 0.10.8", @@ -2814,11 +3021,11 @@ dependencies = [ [[package]] name = "openssl" -version = "0.10.57" +version = "0.10.63" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bac25ee399abb46215765b1cb35bc0212377e58a061560d8b29b024fd0430e7c" +checksum = "15c9d69dd87a29568d4d017cfe8ec518706046a05184e5aea92d0af890b803c8" dependencies = [ - "bitflags 2.4.1", + "bitflags 2.4.2", "cfg-if", "foreign-types", "libc", @@ -2829,13 +3036,13 @@ dependencies = [ [[package]] name = "openssl-macros" -version = "0.1.0" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b501e44f11665960c7e7fcf062c7d96a14ade4aa98116c004b2e37b5be7d736c" +checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2", "quote", - "syn 1.0.107", + "syn 2.0.48", ] [[package]] @@ -2846,18 +3053,18 @@ checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" [[package]] name = "openssl-src" -version = "300.1.5+3.1.3" +version = "300.2.1+3.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "559068e4c12950d7dcaa1857a61725c0d38d4fc03ff8e070ab31a75d6e316491" +checksum = "3fe476c29791a5ca0d1273c697e96085bbabbbea2ef7afd5617e78a4b40332d3" dependencies = [ "cc", ] [[package]] name = "openssl-sys" -version = "0.9.93" +version = "0.9.99" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db4d56a4c0478783083cfafcc42493dd4a981d41669da64b4572a2a089b51b1d" +checksum = "22e1bf214306098e4832460f797824c05d25aacdf896f64a985fb0fd992454ae" dependencies = [ "cc", "libc", @@ -2868,9 +3075,9 @@ dependencies = [ [[package]] name = "os_str_bytes" -version = "6.4.1" +version = "6.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b7820b9daea5457c9f21c69448905d723fbd21136ccf521748f23fd49e723ee" +checksum = "e2355d85b9a3786f481747ced0e0ff2ba35213a1f9bd406ed906554d7af805a1" [[package]] name = "overload" @@ -2908,6 +3115,20 @@ dependencies = [ "sha2 0.10.8", ] +[[package]] +name = "p521" +version = "0.13.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fc9e2161f1f215afdfce23677034ae137bbd45016a880c2eb3ba8eb95f085b2" +dependencies = [ + "base16ct", + "ecdsa", + "elliptic-curve", + "primeorder", + "rand_core 0.6.4", + "sha2 0.10.8", +] + [[package]] name = "parking_lot" version = "0.12.1" @@ -2920,22 +3141,22 @@ dependencies = [ [[package]] name = "parking_lot_core" -version = "0.9.6" +version = "0.9.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba1ef8814b5c993410bb3adfad7a5ed269563e4a2f90c41f5d85be7fb47133bf" +checksum = "4c42a9226546d68acdd9c0a280d17ce19bfe27a46bf68784e4066115788d008e" dependencies = [ "cfg-if", "libc", - "redox_syscall 0.2.16", + "redox_syscall 0.4.1", "smallvec", - "windows-sys 0.42.0", + "windows-targets 0.48.5", ] [[package]] name = "paste" -version = "1.0.11" +version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d01a5bd0424d00070b0098dd17ebca6f961a959dead1dbcbbbc1d1cd8d3deeba" +checksum = "de3145af08024dea9fa9914f381a17b8fc6034dfb00f3a84013f7ff43f29ed4c" [[package]] name = "patch-db" @@ -2965,7 +3186,7 @@ version = "0.1.0" dependencies = [ "patch-db-macro-internals", "proc-macro2", - "syn 1.0.107", + "syn 1.0.109", ] [[package]] @@ -2975,7 +3196,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn 1.0.107", + "syn 1.0.109", ] [[package]] @@ -2999,25 +3220,26 @@ dependencies = [ [[package]] name = "percent-encoding" -version = "2.3.0" +version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b2a4787296e9989611394c33f193f676704af1686e70b8f8033ab5ba9a35a94" +checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" [[package]] name = "pest" -version = "2.5.3" +version = "2.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4257b4a04d91f7e9e6290be5d3da4804dd5784fafde3a497d73eb2b4a158c30a" +checksum = "1f200d8d83c44a45b21764d1916299752ca035d15ecd46faca3e9a2a2bf6ad06" dependencies = [ + "memchr", "thiserror", "ucd-trie", ] [[package]] name = "pest_derive" -version = "2.5.3" +version = "2.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "241cda393b0cdd65e62e07e12454f1f25d57017dcc514b1514cd3c4645e3a0a6" +checksum = "bcd6ab1236bbdb3a49027e920e693192ebfe8913f6d60e294de57463a493cfde" dependencies = [ "pest", "pest_generator", @@ -3025,22 +3247,22 @@ dependencies = [ [[package]] name = "pest_generator" -version = "2.5.3" +version = "2.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46b53634d8c8196302953c74d5352f33d0c512a9499bd2ce468fc9f4128fa27c" +checksum = "2a31940305ffc96863a735bef7c7994a00b325a7138fdbc5bda0f1a0476d3275" dependencies = [ "pest", "pest_meta", "proc-macro2", "quote", - "syn 1.0.107", + "syn 2.0.48", ] [[package]] name = "pest_meta" -version = "2.5.3" +version = "2.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ef4f1332a8d4678b41966bb4cc1d0676880e84183a1ecc3f4b69f03e99c7a51" +checksum = "a7ff62f5259e53b78d1af898941cdcdccfae7385cf7d793a6e55de5d05bb4b7d" dependencies = [ "once_cell", "pest", @@ -3049,12 +3271,12 @@ dependencies = [ [[package]] name = "petgraph" -version = "0.6.2" +version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6d5014253a1331579ce62aa67443b4a658c5e7dd03d4bc6d302b94474888143" +checksum = "e1d3afd2628e69da2be385eb6f2fd57c8ac7977ceeff6dc166ff1657b0e386a9" dependencies = [ "fixedbitset", - "indexmap 1.9.2", + "indexmap 2.1.0", ] [[package]] @@ -3068,28 +3290,28 @@ dependencies = [ [[package]] name = "pico-args" -version = "0.4.2" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db8bcd96cb740d03149cbad5518db9fd87126a10ab519c011893b1754134c468" +checksum = "5be167a7af36ee22fe3115051bc51f6e6c7054c9348e28deb4f49bd6f705a315" [[package]] name = "pin-project" -version = "1.1.3" +version = "1.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fda4ed1c6c173e3fc7a83629421152e01d7b1f9b7f65fb301e490e8cfc656422" +checksum = "0302c4a0442c456bd56f841aee5c3bfd17967563f6fadc9ceb9f9c23cf3807e0" dependencies = [ "pin-project-internal", ] [[package]] name = "pin-project-internal" -version = "1.1.3" +version = "1.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4359fd9c9171ec6e8c62926d6faaf553a8dc3f64e1507e76da7911b4f6a04405" +checksum = "266c042b60c9c76b8d53061e52b2e0d1116abc57cefc8c5cd671619a56ac3690" dependencies = [ "proc-macro2", "quote", - "syn 2.0.37", + "syn 2.0.48", ] [[package]] @@ -3127,21 +3349,27 @@ dependencies = [ [[package]] name = "pkg-config" -version = "0.3.26" +version = "0.3.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ac9a59f73473f1b8d852421e59e64809f025994837ef743615c6d0c5b305160" +checksum = "2900ede94e305130c13ddd391e0ab7cbaeb783945ae07a279c268cb05109c6cb" [[package]] name = "platforms" -version = "3.1.2" +version = "3.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4503fa043bf02cee09a9582e9554b4c6403b2ef55e4612e96561d294419429f8" +checksum = "626dec3cac7cc0e1577a2ec3fc496277ec2baa084bebad95bb6fdbfae235f84c" [[package]] name = "portable-atomic" -version = "1.4.3" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7170ef9988bc169ba16dd36a7fa041e5c4cbeb6a35b76d4c03daded371eae7c0" + +[[package]] +name = "powerfmt" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31114a898e107c51bb1609ffaf55a0e011cf6a4d7f1170d0015a165082c0338b" +checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" [[package]] name = "ppv-lite86" @@ -3171,63 +3399,46 @@ dependencies = [ [[package]] name = "primeorder" -version = "0.13.2" +version = "0.13.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c2fcef82c0ec6eefcc179b978446c399b3cdf73c392c35604e399eee6df1ee3" +checksum = "353e1ca18966c16d9deb1c69278edbc5f194139612772bd9537af60ac231e1e6" dependencies = [ "elliptic-curve", ] [[package]] name = "proc-macro-crate" -version = "1.2.1" +version = "3.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eda0fc3b0fb7c975631757e14d9049da17374063edb6ebbcbc54d880d4fe94e9" +checksum = "6d37c51ca738a55da99dc0c4a34860fd675453b8b36209178c2249bb13651284" dependencies = [ - "once_cell", - "thiserror", - "toml 0.5.10", + "toml_edit 0.21.0", ] [[package]] name = "proc-macro2" -version = "1.0.67" +version = "1.0.78" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d433d9f1a3e8c1263d9456598b16fec66f4acc9a74dacffd35c7bb09b3a1328" +checksum = "e2422ad645d89c99f8f3e6b88a9fdeca7fabeac836b1002371c4367c8f984aae" dependencies = [ "unicode-ident", ] -[[package]] -name = "procfs" -version = "0.15.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "943ca7f9f29bab5844ecd8fdb3992c5969b6622bb9609b9502fef9b4310e3f1f" -dependencies = [ - "bitflags 1.3.2", - "byteorder", - "chrono", - "flate2", - "hex", - "lazy_static", - "rustix", -] - [[package]] name = "proptest" -version = "1.3.1" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7c003ac8c77cb07bb74f5f198bce836a689bcd5a42574612bf14d17bfd08c20e" +checksum = "31b476131c3c86cb68032fdc5cb6d5a1045e3e42d96b69fa599fd77701e1f5bf" dependencies = [ "bit-set", "bit-vec", - "bitflags 2.4.1", + "bitflags 2.4.2", "lazy_static", "num-traits", "rand 0.8.5", "rand_chacha 0.3.1", "rand_xorshift", - "regex-syntax 0.7.5", + "regex-syntax 0.8.2", "rusty-fork", "tempfile", "unarray", @@ -3241,7 +3452,7 @@ checksum = "9cf16337405ca084e9c78985114633b6827711d22b9e6ef6c6c0d665eb3f0b6e" dependencies = [ "proc-macro2", "quote", - "syn 1.0.107", + "syn 1.0.109", ] [[package]] @@ -3268,9 +3479,9 @@ checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" [[package]] name = "quote" -version = "1.0.33" +version = "1.0.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae" +checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef" dependencies = [ "proc-macro2", ] @@ -3340,7 +3551,7 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ - "getrandom 0.2.8", + "getrandom 0.2.12", ] [[package]] @@ -3385,26 +3596,35 @@ dependencies = [ "bitflags 1.3.2", ] +[[package]] +name = "redox_syscall" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa" +dependencies = [ + "bitflags 1.3.2", +] + [[package]] name = "redox_users" -version = "0.4.3" +version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b033d837a7cf162d7993aded9304e30a83213c648b6e389db233191f891e5c2b" +checksum = "a18479200779601e498ada4e8c1e1f50e3ee19deb0259c25825a98b5603b2cb4" dependencies = [ - "getrandom 0.2.8", - "redox_syscall 0.2.16", + "getrandom 0.2.12", + "libredox", "thiserror", ] [[package]] name = "regex" -version = "1.10.2" +version = "1.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "380b951a9c5e80ddfd6136919eef32310721aa4aacd4889a8d39124b026ab343" +checksum = "b62dbe01f0b06f9d8dc7d49e05a0785f153b00b2c227856282f671e0318c9b15" dependencies = [ "aho-corasick", "memchr", - "regex-automata 0.4.3", + "regex-automata 0.4.5", "regex-syntax 0.8.2", ] @@ -3414,14 +3634,14 @@ version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" dependencies = [ - "regex-syntax 0.6.28", + "regex-syntax 0.6.29", ] [[package]] name = "regex-automata" -version = "0.4.3" +version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f804c7828047e88b2d32e2d7fe5a105da8ee3264f01902f796c8e067dc2483f" +checksum = "5bb987efffd3c6d0d8f5f89510bb458559eab11e4f869acb20bf845e016259cd" dependencies = [ "aho-corasick", "memchr", @@ -3430,9 +3650,9 @@ dependencies = [ [[package]] name = "regex-syntax" -version = "0.6.28" +version = "0.6.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "456c603be3e8d448b072f410900c09faf164fbce2d480456f50eea6e25f9c848" +checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" [[package]] name = "regex-syntax" @@ -3446,32 +3666,23 @@ version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f" -[[package]] -name = "remove_dir_all" -version = "0.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3acd125665422973a33ac9d3dd2df85edad0f4ae9b00dafb1a05e43a9f5ef8e7" -dependencies = [ - "winapi", -] - [[package]] name = "reqwest" -version = "0.11.22" +version = "0.11.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "046cd98826c46c2ac8ddecae268eb5c2e58628688a5fc7a2643704a73faba95b" +checksum = "37b1ae8d9ac08420c66222fb9096fc5de435c3c48542bc5336c51892cffafb41" dependencies = [ - "base64 0.21.4", + "base64 0.21.7", "bytes", "cookie 0.16.2", - "cookie_store 0.16.1", + "cookie_store 0.16.2", "encoding_rs", "futures-core", "futures-util", - "h2", - "http", - "http-body", - "hyper", + "h2 0.3.24", + "http 0.2.11", + "http-body 0.4.6", + "hyper 0.14.28", "hyper-tls", "ipnet", "js-sys", @@ -3522,50 +3733,78 @@ dependencies = [ [[package]] name = "ring" -version = "0.16.20" +version = "0.17.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc" +checksum = "688c63d65483050968b2a8937f7995f443e27041a0f7700aa59b0822aedebb74" dependencies = [ "cc", + "getrandom 0.2.12", "libc", - "once_cell", - "spin 0.5.2", + "spin 0.9.8", "untrusted", - "web-sys", - "winapi", + "windows-sys 0.48.0", ] [[package]] name = "rpassword" -version = "7.2.0" +version = "7.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6678cf63ab3491898c0d021b493c94c9b221d91295294a2a5746eacbe5928322" +checksum = "80472be3c897911d0137b2d2b9055faf6eeac5b14e324073d83bc17b191d7e3f" dependencies = [ "libc", "rtoolbox", - "winapi", + "windows-sys 0.48.0", ] [[package]] name = "rpc-toolkit" -version = "0.2.2" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5353673ffd8265292281141560d2b851e4da49e83e2f5e255fd473736d45ee10" +checksum = "c48252a30abb9426a3239fa8dfd2c8dd2647bb24db0b6145db2df04ae53fe647" dependencies = [ "clap 3.2.25", "futures", - "hyper", + "hyper 0.14.28", "lazy_static", "openssl", "reqwest", - "rpc-toolkit-macro", + "rpc-toolkit-macro 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "serde", "serde_cbor 0.11.2", "serde_json", "thiserror", "tokio", "url", - "yajrc 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "yajrc", +] + +[[package]] +name = "rpc-toolkit" +version = "0.2.3" +source = "git+https://github.com/Start9Labs/rpc-toolkit.git?branch=refactor/traits#9e989e23adb440bc72faa585b28e5aa2667a0a0d" +dependencies = [ + "async-stream", + "async-trait", + "axum", + "clap 4.4.18", + "futures", + "http 1.0.0", + "http-body-util", + "imbl-value", + "itertools 0.12.0", + "lazy_format", + "lazy_static", + "openssl", + "pin-project", + "reqwest", + "rpc-toolkit-macro 0.2.2 (git+https://github.com/Start9Labs/rpc-toolkit.git?branch=refactor/traits)", + "serde", + "serde_json", + "thiserror", + "tokio", + "tokio-stream", + "url", + "yajrc", ] [[package]] @@ -3575,8 +3814,18 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f8e4b9cb00baf2d61bcd35e98d67dcb760382a3b4540df7e63b38d053c8a7b8b" dependencies = [ "proc-macro2", - "rpc-toolkit-macro-internals", - "syn 1.0.107", + "rpc-toolkit-macro-internals 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.109", +] + +[[package]] +name = "rpc-toolkit-macro" +version = "0.2.2" +source = "git+https://github.com/Start9Labs/rpc-toolkit.git?branch=refactor/traits#9e989e23adb440bc72faa585b28e5aa2667a0a0d" +dependencies = [ + "proc-macro2", + "rpc-toolkit-macro-internals 0.2.2 (git+https://github.com/Start9Labs/rpc-toolkit.git?branch=refactor/traits)", + "syn 1.0.109", ] [[package]] @@ -3587,27 +3836,36 @@ checksum = "d3e2ce21b936feaecdab9c9a8e75b9dca64374ccc11951a58045ad6559b75f42" dependencies = [ "proc-macro2", "quote", - "syn 1.0.107", + "syn 1.0.109", +] + +[[package]] +name = "rpc-toolkit-macro-internals" +version = "0.2.2" +source = "git+https://github.com/Start9Labs/rpc-toolkit.git?branch=refactor/traits#9e989e23adb440bc72faa585b28e5aa2667a0a0d" +dependencies = [ + "itertools 0.12.0", + "proc-macro2", + "quote", + "syn 1.0.109", ] [[package]] name = "rsa" -version = "0.9.2" +version = "0.9.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ab43bb47d23c1a631b4b680199a45255dce26fa9ab2fa902581f624ff13e6a8" +checksum = "5d0e5124fcb30e76a7e79bfee683a2746db83784b86289f6251b54b7950a0dfc" dependencies = [ - "byteorder", "const-oid", "digest 0.10.7", "num-bigint-dig", "num-integer", - "num-iter", "num-traits", "pkcs1", "pkcs8", "rand_core 0.6.4", "sha2 0.10.8", - "signature 2.0.0", + "signature 2.2.0", "spki", "subtle", "zeroize", @@ -3615,42 +3873,42 @@ dependencies = [ [[package]] name = "rtoolbox" -version = "0.0.1" +version = "0.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "034e22c514f5c0cb8a10ff341b9b048b5ceb21591f31c8f44c43b960f9b3524a" +checksum = "c247d24e63230cdb56463ae328478bd5eac8b8faa8c69461a77e8e323afac90e" dependencies = [ "libc", - "winapi", + "windows-sys 0.48.0", ] [[package]] name = "rust-argon2" -version = "1.0.0" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b50162d19404029c1ceca6f6980fe40d45c8b369f6f44446fa14bb39573b5bb9" +checksum = "a5885493fdf0be6cdff808d1533ce878d21cfa49c7086fa00c66355cd9141bfc" dependencies = [ - "base64 0.13.1", + "base64 0.21.7", "blake2b_simd", - "constant_time_eq 0.1.5", + "constant_time_eq", "crossbeam-utils", ] [[package]] name = "rust-argon2" -version = "2.0.0" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e71971821b3ae0e769e4a4328dbcb517607b434db7697e9aba17203ec14e46a" +checksum = "9d9848531d60c9cbbcf9d166c885316c24bc0e2a9d3eba0956bb6cbbd79bc6e8" dependencies = [ - "base64 0.21.4", + "base64 0.21.7", "blake2b_simd", - "constant_time_eq 0.3.0", + "constant_time_eq", ] [[package]] name = "rustc-demangle" -version = "0.1.21" +version = "0.1.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ef03e0a2b150c7a90d01faf6254c9c48a41e95fb2a8c2ac1c6f0d2b9aefc342" +checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76" [[package]] name = "rustc_version" @@ -3663,99 +3921,138 @@ dependencies = [ [[package]] name = "rustix" -version = "0.36.6" +version = "0.38.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4feacf7db682c6c329c4ede12649cd36ecab0f3be5b7d74e6a20304725db4549" +checksum = "322394588aaf33c24007e8bb3238ee3e4c5c09c084ab32bc73890b99ff326bca" dependencies = [ - "bitflags 1.3.2", + "bitflags 2.4.2", "errno", - "io-lifetimes", "libc", "linux-raw-sys", - "windows-sys 0.42.0", + "windows-sys 0.52.0", ] [[package]] name = "rustls" -version = "0.21.7" +version = "0.21.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd8d6c9f025a446bc4d18ad9632e69aec8f287aa84499ee335599fabd20c3fd8" +checksum = "f9d5a6813c0759e4609cd494e8e725babae6a2ca7b62a5536a13daaec6fcb7ba" dependencies = [ - "log", "ring", - "rustls-webpki", + "rustls-webpki 0.101.7", "sct", ] +[[package]] +name = "rustls" +version = "0.22.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e87c9956bd9807afa1f77e0f7594af32566e830e088a5576d27c5b6f30f49d41" +dependencies = [ + "log", + "ring", + "rustls-pki-types", + "rustls-webpki 0.102.1", + "subtle", + "zeroize", +] + [[package]] name = "rustls-pemfile" -version = "1.0.2" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c" +dependencies = [ + "base64 0.21.7", +] + +[[package]] +name = "rustls-pki-types" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e9d979b3ce68192e42760c7810125eb6cf2ea10efae545a156063e61f314e2a" + +[[package]] +name = "rustls-webpki" +version = "0.101.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d194b56d58803a43635bdc398cd17e383d6f71f9182b9a192c127ca42494a59b" +checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765" dependencies = [ - "base64 0.21.4", + "ring", + "untrusted", ] [[package]] name = "rustls-webpki" -version = "0.101.6" +version = "0.102.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c7d5dece342910d9ba34d259310cae3e0154b873b35408b787b59bce53d34fe" +checksum = "ef4ca26037c909dedb327b48c3327d0ba91d3dd3c4e05dad328f210ffb68e95b" dependencies = [ "ring", + "rustls-pki-types", "untrusted", ] [[package]] name = "rustversion" -version = "1.0.11" +version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5583e89e108996506031660fe09baa5011b9dd0341b89029313006d1fb508d70" +checksum = "7ffc183a10b4478d04cbbbfc96d0873219d962dd5accaff2ffbd4ceb7df837f4" [[package]] name = "rusty-fork" version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb3dcc6e454c328bb824492db107ab7c0ae8fcffe4ad210136ef014458c1bc4f" +checksum = "cb3dcc6e454c328bb824492db107ab7c0ae8fcffe4ad210136ef014458c1bc4f" +dependencies = [ + "fnv", + "quick-error", + "tempfile", + "wait-timeout", +] + +[[package]] +name = "rustyline-async" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9eca4447465ceb8c01c253cc81660b242547c58e4a59c85b13294a6e70de8b9e" dependencies = [ - "fnv", - "quick-error", - "tempfile", - "wait-timeout", + "crossterm", + "futures-channel", + "futures-util", + "pin-project", + "thingbuf", + "thiserror", + "unicode-segmentation", + "unicode-width", ] [[package]] name = "ryu" -version = "1.0.12" +version = "1.0.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b4b9743ed687d4b4bcedf9ff5eaa7398495ae14e61cba0a295704edbc7decde" +checksum = "f98d2aa92eebf49b69786be48e4477826b256916e84a57ff2a4f21923b48eb4c" [[package]] name = "schannel" -version = "0.1.21" +version = "0.1.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "713cfb06c7059f3588fb8044c0fad1d09e3c01d225e25b9220dbfdcf16dbb1b3" +checksum = "fbc91545643bcf3a0bbb6569265615222618bdf33ce4ffbbd13c4bbd4c093534" dependencies = [ - "windows-sys 0.42.0", + "windows-sys 0.52.0", ] [[package]] name = "scopeguard" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" - -[[package]] -name = "scratch" -version = "1.0.3" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ddccb15bcce173023b3fedd9436f882a0739b8dfb45e4f6b6002bee5929f61b2" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" [[package]] name = "sct" -version = "0.7.0" +version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d53dcdb7c9f8158937a7981b48accfd39a43af418591a5d008c7b22b5e1b7ca4" +checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" dependencies = [ "ring", "untrusted", @@ -3777,9 +4074,9 @@ dependencies = [ [[package]] name = "security-framework" -version = "2.7.0" +version = "2.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2bc1bb97804af6631813c55739f771071e0f2ed33ee20b68c86ec505d906356c" +checksum = "05b64fb303737d99b81884b2c63433e9ae28abebe5eb5045dcdd175dc2ecf4de" dependencies = [ "bitflags 1.3.2", "core-foundation", @@ -3790,9 +4087,9 @@ dependencies = [ [[package]] name = "security-framework-sys" -version = "2.6.1" +version = "2.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0160a13a177a45bfb43ce71c01580998474f556ad854dcbca936dd2841a5c556" +checksum = "e932934257d3b408ed8f30db49d85ea163bfe74961f017f405b025af298f0c7a" dependencies = [ "core-foundation-sys", "libc", @@ -3800,18 +4097,18 @@ dependencies = [ [[package]] name = "semver" -version = "1.0.20" +version = "1.0.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "836fa6a3e1e547f9a2c4040802ec865b5d85f4014efe00555d7090a3dcaa1090" +checksum = "b97ed7a9823b74f99c7742f5336af7be5ecd3eeafcb1507d1fa93347b1d589b0" dependencies = [ "serde", ] [[package]] name = "serde" -version = "1.0.152" +version = "1.0.195" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb7d1f0d3021d347a83e556fc4683dea2ea09d87bccdf88ff5c12545d89d5efb" +checksum = "63261df402c67811e9ac6def069e4786148c4563f4b50fd4bf30aa370d626b02" dependencies = [ "serde_derive", ] @@ -3829,7 +4126,7 @@ dependencies = [ name = "serde_cbor" version = "0.11.1" dependencies = [ - "half", + "half 1.8.2", "serde", ] @@ -3839,47 +4136,57 @@ version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2bef2ebfde456fb76bbcf9f59315333decc4fda0b2b44b420243c11e0f5ec1f5" dependencies = [ - "half", + "half 1.8.2", "serde", ] [[package]] name = "serde_derive" -version = "1.0.152" +version = "1.0.195" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af487d118eecd09402d70a5d72551860e788df87b464af30e5ea6a38c75c541e" +checksum = "46fe8f8603d81ba86327b23a2e9cdf49e1255fb94a4c5f297f6ee0547178ea2c" dependencies = [ "proc-macro2", "quote", - "syn 1.0.107", + "syn 2.0.48", ] [[package]] name = "serde_json" -version = "1.0.91" +version = "1.0.111" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "877c235533714907a8c2464236f5c4b2a17262ef1bd71f38f35ea592c8da6883" +checksum = "176e46fa42316f18edd598015a5166857fc835ec732f5215eac6b7bdbf0a84f4" dependencies = [ - "indexmap 1.9.2", - "itoa 1.0.5", + "indexmap 2.1.0", + "itoa", "ryu", "serde", ] +[[package]] +name = "serde_path_to_error" +version = "0.1.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebd154a240de39fdebcf5775d2675c204d7c13cf39a4c697be6493c8e734337c" +dependencies = [ + "itoa", + "serde", +] + [[package]] name = "serde_spanned" -version = "0.6.3" +version = "0.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96426c9936fd7a0124915f9185ea1d20aa9445cc9821142f0a73bc9207a2e186" +checksum = "eb3622f419d1296904700073ea6cc23ad690adbd66f13ea683df73298736f0c1" dependencies = [ "serde", ] [[package]] name = "serde_test" -version = "1.0.152" +version = "1.0.176" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3611210d2d67e3513204742004d6ac6f589e521861dabb0f649b070eea8bed9e" +checksum = "5a2f49ace1498612d14f7e0b8245519584db8299541dfe31a06374a828d620ab" dependencies = [ "serde", ] @@ -3891,22 +4198,22 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" dependencies = [ "form_urlencoded", - "itoa 1.0.5", + "itoa", "ryu", "serde", ] [[package]] name = "serde_with" -version = "3.4.0" +version = "3.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64cd236ccc1b7a29e7e2739f27c0b2dd199804abc4290e32f59f3b68d6405c23" +checksum = "f5c9fdb6b00a489875b22efd4b78fe2b363b72265cc5f6eb2e2b9ee270e6140c" dependencies = [ - "base64 0.21.4", + "base64 0.21.7", "chrono", "hex", - "indexmap 1.9.2", - "indexmap 2.0.2", + "indexmap 1.9.3", + "indexmap 2.1.0", "serde", "serde_json", "serde_with_macros", @@ -3915,14 +4222,14 @@ dependencies = [ [[package]] name = "serde_with_macros" -version = "3.4.0" +version = "3.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93634eb5f75a2323b16de4748022ac4297f9e76b6dced2be287a099f41b5e788" +checksum = "dbff351eb4b33600a2e138dfa0b10b65a238ea8ff8fb2387c422c5022a3e8298" dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.37", + "syn 2.0.48", ] [[package]] @@ -3931,7 +4238,7 @@ version = "0.8.26" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "578a7433b776b56a35785ed5ce9a7e777ac0598aac5a6dd1b4b18a307c7fc71b" dependencies = [ - "indexmap 1.9.2", + "indexmap 1.9.3", "ryu", "serde", "yaml-rust", @@ -3939,33 +4246,22 @@ dependencies = [ [[package]] name = "serde_yaml" -version = "0.9.25" +version = "0.9.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a49e178e4452f45cb61d0cd8cebc1b0fafd3e41929e996cef79aa3aca91f574" +checksum = "b1bf28c79a99f70ee1f1d83d10c875d2e70618417fda01ad1785e027579d9d38" dependencies = [ - "indexmap 2.0.2", - "itoa 1.0.5", + "indexmap 2.1.0", + "itoa", "ryu", "serde", "unsafe-libyaml", ] -[[package]] -name = "sha-1" -version = "0.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f5058ada175748e33390e40e872bd0fe59a19f265d0158daa551c5a88a76009c" -dependencies = [ - "cfg-if", - "cpufeatures", - "digest 0.10.7", -] - [[package]] name = "sha1" -version = "0.10.5" +version = "0.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f04293dc80c3993519f2d7f6f511707ee7094fe0c6d3406feb330cdb3540eba3" +checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" dependencies = [ "cfg-if", "cpufeatures", @@ -4010,18 +4306,45 @@ dependencies = [ [[package]] name = "sharded-slab" -version = "0.1.4" +version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "900fba806f70c630b0a382d0d825e17a0f19fcd059a2ade1ff237bcddf446b31" +checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" dependencies = [ "lazy_static", ] +[[package]] +name = "shell-words" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24188a676b6ae68c3b2cb3a01be17fbf7240ce009799bb56d5b1409051e78fde" + +[[package]] +name = "signal-hook" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8621587d4798caf8eb44879d42e56b9a93ea5dcd315a6487c357130095b62801" +dependencies = [ + "libc", + "signal-hook-registry", +] + +[[package]] +name = "signal-hook-mio" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29ad2e15f37ec9a6cc544097b78a1ec90001e9f71b81338ca39f430adaca99af" +dependencies = [ + "libc", + "mio", + "signal-hook", +] + [[package]] name = "signal-hook-registry" -version = "1.4.0" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e51e73328dc4ac0c7ccbda3a494dfa03df1de2f46018127f60c693f2648455b0" +checksum = "d8229b473baa5980ac72ef434c4415e70c4b5e71b423043adb4ba059f89c99a1" dependencies = [ "libc", ] @@ -4034,9 +4357,9 @@ checksum = "74233d3b3b2f6d4b006dc19dee745e73e2a6bfb6f93607cd3b02bd5b00797d7c" [[package]] name = "signature" -version = "2.0.0" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8fe458c98333f9c8152221191a77e2a44e8325d0193484af2e9421a53019e57d" +checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de" dependencies = [ "digest 0.10.7", "rand_core 0.6.4", @@ -4055,40 +4378,30 @@ dependencies = [ [[package]] name = "siphasher" -version = "0.3.10" +version = "0.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7bd3e3206899af3f8b12af284fafc038cc1dc2b41d1b89dd17297221c5d225de" +checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d" [[package]] name = "slab" -version = "0.4.7" +version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4614a76b2a8be0058caa9dbbaf66d988527d86d003c11a94fbd335d7661edcef" +checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" dependencies = [ "autocfg", ] [[package]] name = "smallvec" -version = "1.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0" - -[[package]] -name = "socket2" -version = "0.4.7" +version = "1.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02e2d2db9033d13a1567121ddd7a095ee144db4e1ca1b1bda3419bc0da294ebd" -dependencies = [ - "libc", - "winapi", -] +checksum = "e6ecd384b10a64542d77071bd64bd7b231f4ed5940fba55e98c3de13824cf3d7" [[package]] name = "socket2" -version = "0.5.4" +version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4031e820eb552adee9295814c0ced9e5cf38ddf1e8b7d566d6de8e2538ea989e" +checksum = "7b5fac59a5cb5dd637972e5fca70daf0523c9067fcdc4842f053dae04a18f8e9" dependencies = [ "libc", "windows-sys 0.48.0", @@ -4111,9 +4424,9 @@ dependencies = [ [[package]] name = "spki" -version = "0.7.2" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d1e996ef02c474957d681f1b05213dfb0abab947b446a62d37770b23500184a" +checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d" dependencies = [ "base64ct", "der", @@ -4121,20 +4434,20 @@ dependencies = [ [[package]] name = "sqlformat" -version = "0.2.1" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c12bc9199d1db8234678b7051747c07f517cdcf019262d1847b94ec8b1aee3e" +checksum = "ce81b7bd7c4493975347ef60d8c7e8b742d4694f4c49f93e0a12ea263938176c" dependencies = [ - "itertools 0.10.5", + "itertools 0.12.0", "nom", "unicode_categories", ] [[package]] name = "sqlx" -version = "0.7.2" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e50c216e3624ec8e7ecd14c6a6a6370aad6ee5d8cfc3ab30b5162eeeef2ed33" +checksum = "dba03c279da73694ef99763320dea58b51095dfe87d001b1d4b5fe78ba8763cf" dependencies = [ "sqlx-core", "sqlx-macros", @@ -4145,11 +4458,11 @@ dependencies = [ [[package]] name = "sqlx-core" -version = "0.7.2" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d6753e460c998bbd4cd8c6f0ed9a64346fcca0723d6e75e52fdc351c5d2169d" +checksum = "d84b0a3c3739e220d94b3239fd69fb1f74bc36e16643423bd99de3b43c21bfbd" dependencies = [ - "ahash 0.8.3", + "ahash 0.8.7", "atoi", "byteorder", "bytes", @@ -4166,13 +4479,13 @@ dependencies = [ "futures-util", "hashlink", "hex", - "indexmap 2.0.2", + "indexmap 2.1.0", "log", "memchr", "once_cell", "paste", "percent-encoding", - "rustls", + "rustls 0.21.10", "rustls-pemfile", "serde", "serde_json", @@ -4189,23 +4502,24 @@ dependencies = [ [[package]] name = "sqlx-macros" -version = "0.7.2" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a793bb3ba331ec8359c1853bd39eed32cdd7baaf22c35ccf5c92a7e8d1189ec" +checksum = "89961c00dc4d7dffb7aee214964b065072bff69e36ddb9e2c107541f75e4f2a5" dependencies = [ "proc-macro2", "quote", "sqlx-core", "sqlx-macros-core", - "syn 1.0.107", + "syn 1.0.109", ] [[package]] name = "sqlx-macros-core" -version = "0.7.2" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a4ee1e104e00dedb6aa5ffdd1343107b0a4702e862a84320ee7cc74782d96fc" +checksum = "d0bd4519486723648186a08785143599760f7cc81c52334a55d6a83ea1e20841" dependencies = [ + "atomic-write-file", "dotenvy", "either", "heck", @@ -4220,7 +4534,7 @@ dependencies = [ "sqlx-mysql", "sqlx-postgres", "sqlx-sqlite", - "syn 1.0.107", + "syn 1.0.109", "tempfile", "tokio", "url", @@ -4228,13 +4542,13 @@ dependencies = [ [[package]] name = "sqlx-mysql" -version = "0.7.2" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "864b869fdf56263f4c95c45483191ea0af340f9f3e3e7b4d57a61c7c87a970db" +checksum = "e37195395df71fd068f6e2082247891bc11e3289624bbc776a0cdfa1ca7f1ea4" dependencies = [ "atoi", - "base64 0.21.4", - "bitflags 2.4.1", + "base64 0.21.7", + "bitflags 2.4.2", "byteorder", "bytes", "chrono", @@ -4250,7 +4564,7 @@ dependencies = [ "hex", "hkdf", "hmac 0.12.1", - "itoa 1.0.5", + "itoa", "log", "md-5", "memchr", @@ -4271,13 +4585,13 @@ dependencies = [ [[package]] name = "sqlx-postgres" -version = "0.7.2" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb7ae0e6a97fb3ba33b23ac2671a5ce6e3cabe003f451abd5a56e7951d975624" +checksum = "d6ac0ac3b7ccd10cc96c7ab29791a7dd236bd94021f31eec7ba3d46a74aa1c24" dependencies = [ "atoi", - "base64 0.21.4", - "bitflags 2.4.1", + "base64 0.21.7", + "bitflags 2.4.2", "byteorder", "chrono", "crc", @@ -4291,7 +4605,7 @@ dependencies = [ "hkdf", "hmac 0.12.1", "home", - "itoa 1.0.5", + "itoa", "log", "md-5", "memchr", @@ -4311,9 +4625,9 @@ dependencies = [ [[package]] name = "sqlx-sqlite" -version = "0.7.2" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d59dc83cf45d89c555a577694534fcd1b55c545a816c816ce51f20bbe56a4f3f" +checksum = "210976b7d948c7ba9fced8ca835b11cbb2d677c59c79de41ac0d397e14547490" dependencies = [ "atoi", "chrono", @@ -4330,6 +4644,7 @@ dependencies = [ "sqlx-core", "tracing", "url", + "urlencoding", ] [[package]] @@ -4353,9 +4668,9 @@ dependencies = [ "convert_case 0.6.0", "proc-macro2", "quote", - "regex-syntax 0.6.28", + "regex-syntax 0.6.29", "strsim 0.10.0", - "syn 2.0.37", + "syn 2.0.48", "unicode-width", ] @@ -4382,18 +4697,19 @@ dependencies = [ [[package]] name = "ssh-key" -version = "0.6.2" +version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2180b3bc4955efd5661a97658d3cf4c8107e0d132f619195afe9486c13cca313" +checksum = "01f8f4ea73476c0aa5d5e6a75ce1e8634e2c3f82005ef3bbed21547ac57f2bf7" dependencies = [ - "ed25519-dalek 2.0.0", + "ed25519-dalek 2.1.0", "p256", "p384", + "p521", "rand_core 0.6.4", "rsa", "sec1", "sha2 0.10.8", - "signature 2.0.0", + "signature 2.2.0", "ssh-cipher", "ssh-encoding", "subtle", @@ -4408,17 +4724,19 @@ dependencies = [ "async-compression", "async-stream", "async-trait", + "axum", + "axum-server", "base32", - "base64 0.21.4", + "base64 0.21.7", "base64ct", "basic-cookies", + "blake3", "bytes", "chrono", "ciborium", - "clap 3.2.25", + "clap 4.4.18", "color-eyre", "console", - "container-init", "cookie 0.18.0", "cookie_store 0.20.0", "current_platform", @@ -4426,7 +4744,7 @@ dependencies = [ "divrem", "ed25519 2.2.3", "ed25519-dalek 1.0.1", - "ed25519-dalek 2.0.0", + "ed25519-dalek 2.1.0", "emver", "fd-lock-rs", "futures", @@ -4434,22 +4752,23 @@ dependencies = [ "helpers", "hex", "hmac 0.12.1", - "http", - "hyper", - "hyper-ws-listener", + "http 1.0.0", "imbl", "imbl-value", "include_dir", - "indexmap 2.0.2", + "indexmap 2.1.0", "indicatif", + "integer-encoding", "ipnet", "iprange", "isocountry", - "itertools 0.11.0", + "itertools 0.12.0", "jaq-core", "jaq-std", "josekit", "jsonpath_lib", + "lazy_async_pool", + "lazy_format", "lazy_static", "libc", "log", @@ -4460,6 +4779,7 @@ dependencies = [ "nom", "num", "num_enum", + "once_cell", "openssh-keys", "openssl", "p256", @@ -4475,15 +4795,16 @@ dependencies = [ "reqwest", "reqwest_cookie_store", "rpassword", - "rpc-toolkit", - "rust-argon2 2.0.0", - "scopeguard", + "rpc-toolkit 0.2.3 (git+https://github.com/Start9Labs/rpc-toolkit.git?branch=refactor/traits)", + "rust-argon2 2.1.0", + "rustyline-async", "semver", "serde", "serde_json", "serde_with", - "serde_yaml 0.9.25", + "serde_yaml 0.9.30", "sha2 0.10.8", + "shell-words", "simple-logging", "sqlx", "sscanf", @@ -4498,7 +4819,7 @@ dependencies = [ "tokio-tar", "tokio-tungstenite", "tokio-util", - "toml 0.8.2", + "toml 0.8.8", "torut", "tracing", "tracing-error", @@ -4528,9 +4849,9 @@ dependencies = [ [[package]] name = "string_cache" -version = "0.8.4" +version = "0.8.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "213494b7a2b503146286049378ce02b482200519accc31872ee8be91fa820a08" +checksum = "f91138e76242f575eb1d3b38b4f1362f10d3a43f47d182a5b359af488a02293b" dependencies = [ "new_debug_unreachable", "once_cell", @@ -4541,10 +4862,11 @@ dependencies = [ [[package]] name = "stringprep" -version = "0.1.2" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ee348cb74b87454fff4b551cbf727025810a004f88aeacae7f85b87f4e9a1c1" +checksum = "bb41d74e231a107a1b4ee36bd1214b11285b77768d2e3824aedafa988fd36ee6" dependencies = [ + "finl_unicode", "unicode-bidi", "unicode-normalization", ] @@ -4563,15 +4885,15 @@ checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" [[package]] name = "subtle" -version = "2.4.1" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601" +checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc" [[package]] name = "syn" -version = "1.0.107" +version = "1.0.109" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f4064b5b16e03ae50984a5a8ed5d4f8803e6bc1fd170a3cda91a1be4b18e3f5" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" dependencies = [ "proc-macro2", "quote", @@ -4580,15 +4902,21 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.37" +version = "2.0.48" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7303ef2c05cd654186cb250d29049a24840ca25d2747c25c0381c8d9e2f582e8" +checksum = "0f3531638e407dfc0814761abb7c00a5b54992b849452a0646b7f65c9f770f3f" dependencies = [ "proc-macro2", "quote", "unicode-ident", ] +[[package]] +name = "sync_wrapper" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" + [[package]] name = "synstructure" version = "0.12.6" @@ -4597,7 +4925,7 @@ checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f" dependencies = [ "proc-macro2", "quote", - "syn 1.0.107", + "syn 1.0.109", "unicode-xid", ] @@ -4636,21 +4964,20 @@ checksum = "b16afcea1f22891c49a00c751c7b63b2233284064f11a200fc624137c51e2ddb" dependencies = [ "filetime", "libc", - "xattr 1.0.1", + "xattr 1.3.1", ] [[package]] name = "tempfile" -version = "3.3.0" +version = "3.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5cdb1ef4eaeeaddc8fbd371e5017057064af0911902ef36b39801f67cc6d79e4" +checksum = "01ce4141aa927a6d1bd34a041795abd0db1cccba5d5f24b009f694bdf3a1f3fa" dependencies = [ "cfg-if", "fastrand", - "libc", - "redox_syscall 0.2.16", - "remove_dir_all", - "winapi", + "redox_syscall 0.4.1", + "rustix", + "windows-sys 0.52.0", ] [[package]] @@ -4688,24 +5015,34 @@ version = "0.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "222a222a5bfe1bba4a77b45ec488a741b3cb8872e5e499451fd7d0129c9c7c3d" +[[package]] +name = "thingbuf" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4706f1bfb859af03f099ada2de3cea3e515843c2d3e93b7893f16d94a37f9415" +dependencies = [ + "parking_lot", + "pin-project", +] + [[package]] name = "thiserror" -version = "1.0.49" +version = "1.0.56" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1177e8c6d7ede7afde3585fd2513e611227efd6481bd78d2e82ba1ce16557ed4" +checksum = "d54378c645627613241d077a3a79db965db602882668f9136ac42af9ecb730ad" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.49" +version = "1.0.56" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "10712f02019e9288794769fba95cd6847df9874d49d871d062172f9dd41bc4cc" +checksum = "fa0faa943b50f3db30a20aa7e265dbc66076993efed8463e8de414e5d06d3471" dependencies = [ "proc-macro2", "quote", - "syn 2.0.37", + "syn 2.0.48", ] [[package]] @@ -4721,20 +5058,23 @@ dependencies = [ [[package]] name = "thread_local" -version = "1.1.4" +version = "1.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5516c27b78311c50bf42c071425c560ac799b11c30b31f87e3081965fe5e0180" +checksum = "3fdd6f064ccff2d6567adcb3873ca630700f00b5ad3f060c25b5dcfd9a4ce152" dependencies = [ + "cfg-if", "once_cell", ] [[package]] name = "time" -version = "0.3.17" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a561bf4617eebd33bca6434b988f39ed798e527f51a1e797d0ee4f61c0a38376" +checksum = "f657ba42c3f86e7680e53c8cd3af8abbe56b5491790b46e22e19c0d57463583e" dependencies = [ - "itoa 1.0.5", + "deranged", + "itoa", + "powerfmt", "serde", "time-core", "time-macros", @@ -4742,15 +5082,15 @@ dependencies = [ [[package]] name = "time-core" -version = "0.1.0" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e153e1f1acaef8acc537e68b44906d2db6436e2b35ac2c6b42640fff91f00fd" +checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" [[package]] name = "time-macros" -version = "0.2.6" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d967f99f534ca7e495c575c62638eebc2898a8c84c119b89e250477bc4ba16b2" +checksum = "26197e33420244aeb70c3e8c78376ca46571bc4e701e4791c2cd9f57dcb3a43f" dependencies = [ "time-core", ] @@ -4775,15 +5115,15 @@ dependencies = [ [[package]] name = "tinyvec_macros" -version = "0.1.0" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.33.0" +version = "1.35.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f38200e3ef7995e5ef13baec2f432a6da0aa9ac495b2c0e8f3b7eec2c92d653" +checksum = "c89b4efa943be685f629b149f53829423f8f5531ea21249408e8e2f8671ec104" dependencies = [ "backtrace", "bytes", @@ -4793,20 +5133,20 @@ dependencies = [ "parking_lot", "pin-project-lite", "signal-hook-registry", - "socket2 0.5.4", + "socket2", "tokio-macros", "windows-sys 0.48.0", ] [[package]] name = "tokio-macros" -version = "2.1.0" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "630bdcf245f78637c13ec01ffae6187cca34625e8c63150d424b59e55af2675e" +checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.37", + "syn 2.0.48", ] [[package]] @@ -4821,11 +5161,12 @@ dependencies = [ [[package]] name = "tokio-rustls" -version = "0.24.1" +version = "0.25.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" +checksum = "775e0c0f0adb3a2f22a00c4745d728b479985fc15ee7ca6a2608388c5569860f" dependencies = [ - "rustls", + "rustls 0.22.2", + "rustls-pki-types", "tokio", ] @@ -4869,9 +5210,9 @@ dependencies = [ [[package]] name = "tokio-tungstenite" -version = "0.20.1" +version = "0.21.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "212d5dcb2a1ce06d81107c3d0ffa3121fe974b73f068c8282cb1c32328113b6c" +checksum = "c83b561d025642014097b66e6c1bb422783339e0909e4429cde4749d1990bc38" dependencies = [ "futures-util", "log", @@ -4883,9 +5224,9 @@ dependencies = [ [[package]] name = "tokio-util" -version = "0.7.9" +version = "0.7.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d68074620f57a0b21594d9735eb2e98ab38b17f80d3fcb189fca266771ca60d" +checksum = "5419f34732d9eb6ee4c3578b7989078579b7f039cbbb9ca2c4da015749371e15" dependencies = [ "bytes", "futures-core", @@ -4895,15 +5236,6 @@ dependencies = [ "tracing", ] -[[package]] -name = "toml" -version = "0.5.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1333c76748e868a4d9d1017b5ab53171dfd095f70c712fdb4653a406547f598f" -dependencies = [ - "serde", -] - [[package]] name = "toml" version = "0.7.8" @@ -4918,21 +5250,21 @@ dependencies = [ [[package]] name = "toml" -version = "0.8.2" +version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "185d8ab0dfbb35cf1399a6344d8484209c088f75f8f68230da55d48d95d43e3d" +checksum = "a1a195ec8c9da26928f773888e0742ca3ca1040c6cd859c919c9f59c1954ab35" dependencies = [ "serde", "serde_spanned", "toml_datetime", - "toml_edit 0.20.2", + "toml_edit 0.21.0", ] [[package]] name = "toml_datetime" -version = "0.6.3" +version = "0.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7cda73e2f1397b1262d6dfdcef8aafae14d1de7748d66822d3bfeeb6d03e5e4b" +checksum = "3550f4e9685620ac18a50ed434eb3aec30db8ba93b0287467bca5826ea25baf1" dependencies = [ "serde", ] @@ -4943,7 +5275,7 @@ version = "0.19.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421" dependencies = [ - "indexmap 2.0.2", + "indexmap 2.1.0", "serde", "serde_spanned", "toml_datetime", @@ -4952,11 +5284,11 @@ dependencies = [ [[package]] name = "toml_edit" -version = "0.20.2" +version = "0.21.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "396e4d48bbb2b7554c944bde63101b5ae446cff6ec4a24227428f15eb72ef338" +checksum = "d34d383cd00a163b4a5b85053df514d45bc330f6de7737edfe0a93311d1eaa03" dependencies = [ - "indexmap 2.0.2", + "indexmap 2.1.0", "serde", "serde_spanned", "toml_datetime", @@ -4983,6 +5315,28 @@ dependencies = [ "tokio", ] +[[package]] +name = "tower" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" +dependencies = [ + "futures-core", + "futures-util", + "pin-project", + "pin-project-lite", + "tokio", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tower-layer" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c20c8dbed6283a09604c3e69b4b7eeb54e298b8a600d4d5ecb5ad39de609f1d0" + [[package]] name = "tower-service" version = "0.3.2" @@ -4991,9 +5345,9 @@ checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" [[package]] name = "tracing" -version = "0.1.39" +version = "0.1.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee2ef2af84856a50c1d430afce2fdded0a4ec7eda868db86409b4543df0797f9" +checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" dependencies = [ "log", "pin-project-lite", @@ -5009,7 +5363,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.37", + "syn 2.0.48", ] [[package]] @@ -5055,20 +5409,20 @@ dependencies = [ [[package]] name = "tracing-log" -version = "0.1.3" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78ddad33d2d10b1ed7eb9d1f518a5674713876e97e5bb9b7345a7984fbb4f922" +checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" dependencies = [ - "lazy_static", "log", + "once_cell", "tracing-core", ] [[package]] name = "tracing-subscriber" -version = "0.3.17" +version = "0.3.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30a651bc37f915e81f087d86e62a18eec5f79550c7faff886f7090b4ea757c77" +checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b" dependencies = [ "matchers", "nu-ansi-term", @@ -5093,9 +5447,9 @@ dependencies = [ [[package]] name = "trust-dns-proto" -version = "0.23.1" +version = "0.23.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "559ac980345f7f5020883dd3bcacf176355225e01916f8c2efecad7534f682c6" +checksum = "3119112651c157f4488931a01e586aa459736e9d6046d3bd9105ffb69352d374" dependencies = [ "async-trait", "cfg-if", @@ -5118,9 +5472,9 @@ dependencies = [ [[package]] name = "trust-dns-server" -version = "0.23.1" +version = "0.23.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4307166910ddf09378e651e9d4730c44900e9e0e1f157a6b955e48b539cd1d6" +checksum = "c540f73c2b2ec2f6c54eabd0900e7aafb747a820224b742f556e8faabb461bc7" dependencies = [ "async-trait", "bytes", @@ -5140,20 +5494,20 @@ dependencies = [ [[package]] name = "try-lock" -version = "0.2.4" +version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3528ecfd12c466c6f163363caf2d02a71161dd5e1cc6ae7b34207ea2d42d81ed" +checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" [[package]] name = "tungstenite" -version = "0.20.1" +version = "0.21.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e3dac10fd62eaf6617d3a904ae222845979aec67c615d1c842b4002c7666fb9" +checksum = "9ef1a641ea34f399a848dea702823bbecfb4c486f911735368f1f137cb8257e1" dependencies = [ "byteorder", "bytes", "data-encoding", - "http", + "http 1.0.0", "httparse", "log", "native-tls", @@ -5166,35 +5520,35 @@ dependencies = [ [[package]] name = "typed-builder" -version = "0.17.0" +version = "0.18.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1c6a006a6d3d6a6f143fda41cf4d1ad35110080687628c9f2117bd3cc7924f3" +checksum = "444d8748011b93cb168770e8092458cb0f8854f931ff82fdf6ddfbd72a9c933e" dependencies = [ "typed-builder-macro", ] [[package]] name = "typed-builder-macro" -version = "0.17.0" +version = "0.18.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8fa054ee5e2346187d631d2f1d1fd3b33676772d6d03a2d84e1c5213b31674ee" +checksum = "563b3b88238ec95680aef36bdece66896eaa7ce3c0f1b4f39d38fb2435261352" dependencies = [ "proc-macro2", "quote", - "syn 2.0.37", + "syn 2.0.48", ] [[package]] name = "typenum" -version = "1.16.0" +version = "1.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba" +checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" [[package]] name = "ucd-trie" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e79c4d996edb816c91e4308506774452e55e95c3c9de07b6729e17e15a5ef81" +checksum = "ed646292ffc8188ef8ea4d1e0e0150fb15a5c2e12ad9b8fc191ae7a8a7f3c4b9" [[package]] name = "unarray" @@ -5204,24 +5558,24 @@ checksum = "eaea85b334db583fe3274d12b4cd1880032beab409c0d774be044d4480ab9a94" [[package]] name = "unicase" -version = "2.6.0" +version = "2.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50f37be617794602aabbeee0be4f259dc1778fabe05e2d67ee8f79326d5cb4f6" +checksum = "f7d2d4dafb69621809a81864c9c1b864479e1235c0dd4e199924b9742439ed89" dependencies = [ "version_check", ] [[package]] name = "unicode-bidi" -version = "0.3.13" +version = "0.3.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92888ba5573ff080736b3648696b70cafad7d250551175acbaa4e0385b3e1460" +checksum = "08f95100a766bf4f8f28f90d77e0a5461bbdb219042e7679bebe79004fed8d75" [[package]] name = "unicode-ident" -version = "1.0.6" +version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84a22b9f218b40614adcb3f4ff08b703773ad44fa9423e4e0d346d5db86e4ebc" +checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" [[package]] name = "unicode-normalization" @@ -5234,15 +5588,15 @@ dependencies = [ [[package]] name = "unicode-segmentation" -version = "1.10.0" +version = "1.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fdbf052a0783de01e944a6ce7a8cb939e295b1e7be835a1112c3b9a7f047a5a" +checksum = "1dd624098567895118886609431a7c3b8f516e41d30e0643f03d94592a147e36" [[package]] name = "unicode-width" -version = "0.1.10" +version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b" +checksum = "e51733f11c9c4f72aa0c160008246859e340b00807569a0da0e7a1079b27ba85" [[package]] name = "unicode-xid" @@ -5258,24 +5612,24 @@ checksum = "39ec24b3121d976906ece63c9daad25b85969647682eee313cb5779fdd69e14e" [[package]] name = "unsafe-libyaml" -version = "0.2.9" +version = "0.2.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f28467d3e1d3c6586d8f25fa243f544f5800fec42d97032474e17222c2b75cfa" +checksum = "ab4c90930b95a82d00dc9e9ac071b4991924390d46cbd0dfe566148667605e4b" [[package]] name = "untrusted" -version = "0.7.1" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" +checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" [[package]] name = "url" -version = "2.4.1" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "143b538f18257fac9cad154828a57c6bf5157e1aa604d4816b5995bf6de87ae5" +checksum = "31e6302e3bb753d46e83516cae55ae196fc0c309407cf11ab35cc51a4c2a4633" dependencies = [ "form_urlencoded", - "idna 0.4.0", + "idna 0.5.0", "percent-encoding", "serde", ] @@ -5292,13 +5646,19 @@ version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" +[[package]] +name = "utf8parse" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" + [[package]] name = "uuid" -version = "1.4.1" +version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79daa5ed5740825c40b389c5e50312b9c86df53fccd33f281df655642b43869d" +checksum = "f00cc9702ca12d3c81455259621e676d0f7251cec66a21e98fe2e9a37db93b2a" dependencies = [ - "getrandom 0.2.8", + "getrandom 0.2.12", ] [[package]] @@ -5336,11 +5696,10 @@ dependencies = [ [[package]] name = "want" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ce8a968cb1cd110d136ff8b819a556d6fb6d919363c61534f6860c7eb172ba0" +checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" dependencies = [ - "log", "try-lock", ] @@ -5358,9 +5717,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasm-bindgen" -version = "0.2.83" +version = "0.2.90" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eaf9f5aceeec8be17c128b2e93e031fb8a4d469bb9c4ae2d7dc1888b26887268" +checksum = "b1223296a201415c7fad14792dbefaace9bd52b62d33453ade1c5b5f07555406" dependencies = [ "cfg-if", "wasm-bindgen-macro", @@ -5368,24 +5727,24 @@ dependencies = [ [[package]] name = "wasm-bindgen-backend" -version = "0.2.83" +version = "0.2.90" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c8ffb332579b0557b52d268b91feab8df3615f265d5270fec2a8c95b17c1142" +checksum = "fcdc935b63408d58a32f8cc9738a0bffd8f05cc7c002086c6ef20b7312ad9dcd" dependencies = [ "bumpalo", "log", "once_cell", "proc-macro2", "quote", - "syn 1.0.107", + "syn 2.0.48", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-futures" -version = "0.4.33" +version = "0.4.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23639446165ca5a5de86ae1d8896b737ae80319560fbaa4c2887b7da6e7ebd7d" +checksum = "bde2032aeb86bdfaecc8b261eef3cba735cc426c1f3a3416d1e0791be95fc461" dependencies = [ "cfg-if", "js-sys", @@ -5395,9 +5754,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.83" +version = "0.2.90" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "052be0f94026e6cbc75cdefc9bae13fd6052cdcaf532fa6c45e7ae33a1e6c810" +checksum = "3e4c238561b2d428924c49815533a8b9121c664599558a5d9ec51f8a1740a999" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -5405,22 +5764,22 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.83" +version = "0.2.90" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07bc0c051dc5f23e307b13285f9d75df86bfdf816c5721e573dec1f9b8aa193c" +checksum = "bae1abb6806dc1ad9e560ed242107c0f6c84335f1749dd4e8ddb012ebd5e25a7" dependencies = [ "proc-macro2", "quote", - "syn 1.0.107", + "syn 2.0.48", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.83" +version = "0.2.90" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c38c045535d93ec4f0b4defec448e4291638ee608530863b1e2ba115d4fff7f" +checksum = "4d91413b1c31d7539ba5ef2451af3f0b833a005eb27a631cec32bc0635a8602b" [[package]] name = "wasm-streams" @@ -5437,9 +5796,9 @@ dependencies = [ [[package]] name = "web-sys" -version = "0.3.60" +version = "0.3.67" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bcda906d8be16e728fd5adc5b729afad4e444e106ab28cd1c7256e54fa61510f" +checksum = "58cd2333b6e0be7a39605f0e255892fd7418a682d8da8fe042fe25128794d2ed" dependencies = [ "js-sys", "wasm-bindgen", @@ -5447,18 +5806,15 @@ dependencies = [ [[package]] name = "webpki-roots" -version = "0.24.0" +version = "0.25.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b291546d5d9d1eab74f069c77749f2cb8504a12caa20f0f2de93ddbf6f411888" -dependencies = [ - "rustls-webpki", -] +checksum = "1778a42e8b3b90bff8d0f5032bf22250792889a5cdc752aa0020c84abe3aaf10" [[package]] name = "whoami" -version = "1.3.0" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "45dbc71f0cdca27dc261a9bd37ddec174e4a0af2b900b890f378460f745426e3" +checksum = "22fc3756b8a9133049b26c7f61ab35416c130e8c09b660f5b3958b446f52cc50" [[package]] name = "winapi" @@ -5478,9 +5834,9 @@ checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" [[package]] name = "winapi-util" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178" +checksum = "f29e6f9198ba0d26b4c9f07dbe6f9ed633e1f3d5b8b414090084349e46a52596" dependencies = [ "winapi", ] @@ -5492,27 +5848,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" [[package]] -name = "windows-sys" -version = "0.42.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a3e1820f08b8513f676f7ab6c1f99ff312fb97b553d30ff4dd86f9f15728aa7" -dependencies = [ - "windows_aarch64_gnullvm 0.42.1", - "windows_aarch64_msvc 0.42.1", - "windows_i686_gnu 0.42.1", - "windows_i686_msvc 0.42.1", - "windows_x86_64_gnu 0.42.1", - "windows_x86_64_gnullvm 0.42.1", - "windows_x86_64_msvc 0.42.1", -] - -[[package]] -name = "windows-sys" -version = "0.45.0" +name = "windows-core" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0" +checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9" dependencies = [ - "windows-targets 0.42.1", + "windows-targets 0.52.0", ] [[package]] @@ -5525,18 +5866,12 @@ dependencies = [ ] [[package]] -name = "windows-targets" -version = "0.42.1" +name = "windows-sys" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e2522491fbfcd58cc84d47aeb2958948c4b8982e9a2d8a2a35bbaed431390e7" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" dependencies = [ - "windows_aarch64_gnullvm 0.42.1", - "windows_aarch64_msvc 0.42.1", - "windows_i686_gnu 0.42.1", - "windows_i686_msvc 0.42.1", - "windows_x86_64_gnu 0.42.1", - "windows_x86_64_gnullvm 0.42.1", - "windows_x86_64_msvc 0.42.1", + "windows-targets 0.52.0", ] [[package]] @@ -5555,10 +5890,19 @@ dependencies = [ ] [[package]] -name = "windows_aarch64_gnullvm" -version = "0.42.1" +name = "windows-targets" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c9864e83243fdec7fc9c5444389dcbbfd258f745e7853198f365e3c4968a608" +checksum = "8a18201040b24831fbb9e4eb208f8892e1f50a37feb53cc7ff887feb8f50e7cd" +dependencies = [ + "windows_aarch64_gnullvm 0.52.0", + "windows_aarch64_msvc 0.52.0", + "windows_i686_gnu 0.52.0", + "windows_i686_msvc 0.52.0", + "windows_x86_64_gnu 0.52.0", + "windows_x86_64_gnullvm 0.52.0", + "windows_x86_64_msvc 0.52.0", +] [[package]] name = "windows_aarch64_gnullvm" @@ -5567,10 +5911,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" [[package]] -name = "windows_aarch64_msvc" -version = "0.42.1" +name = "windows_aarch64_gnullvm" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c8b1b673ffc16c47a9ff48570a9d85e25d265735c503681332589af6253c6c7" +checksum = "cb7764e35d4db8a7921e09562a0304bf2f93e0a51bfccee0bd0bb0b666b015ea" [[package]] name = "windows_aarch64_msvc" @@ -5579,10 +5923,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" [[package]] -name = "windows_i686_gnu" -version = "0.42.1" +name = "windows_aarch64_msvc" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "de3887528ad530ba7bdbb1faa8275ec7a1155a45ffa57c37993960277145d640" +checksum = "bbaa0368d4f1d2aaefc55b6fcfee13f41544ddf36801e793edbbfd7d7df075ef" [[package]] name = "windows_i686_gnu" @@ -5591,10 +5935,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" [[package]] -name = "windows_i686_msvc" -version = "0.42.1" +name = "windows_i686_gnu" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf4d1122317eddd6ff351aa852118a2418ad4214e6613a50e0191f7004372605" +checksum = "a28637cb1fa3560a16915793afb20081aba2c92ee8af57b4d5f28e4b3e7df313" [[package]] name = "windows_i686_msvc" @@ -5603,10 +5947,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" [[package]] -name = "windows_x86_64_gnu" -version = "0.42.1" +name = "windows_i686_msvc" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1040f221285e17ebccbc2591ffdc2d44ee1f9186324dd3e84e99ac68d699c45" +checksum = "ffe5e8e31046ce6230cc7215707b816e339ff4d4d67c65dffa206fd0f7aa7b9a" [[package]] name = "windows_x86_64_gnu" @@ -5615,10 +5959,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" [[package]] -name = "windows_x86_64_gnullvm" -version = "0.42.1" +name = "windows_x86_64_gnu" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "628bfdf232daa22b0d64fdb62b09fcc36bb01f05a3939e20ab73aaf9470d0463" +checksum = "3d6fa32db2bc4a2f5abeacf2b69f7992cd09dca97498da74a151a3132c26befd" [[package]] name = "windows_x86_64_gnullvm" @@ -5627,10 +5971,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" [[package]] -name = "windows_x86_64_msvc" -version = "0.42.1" +name = "windows_x86_64_gnullvm" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "447660ad36a13288b1db4d4248e857b510e8c3a225c822ba4fb748c0aafecffd" +checksum = "1a657e1e9d3f514745a572a6846d3c7aa7dbe1658c056ed9c3344c4109a6949e" [[package]] name = "windows_x86_64_msvc" @@ -5638,11 +5982,17 @@ version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dff9641d1cd4be8d1a070daf9e3773c5f67e78b4d9d42263020c057706765c04" + [[package]] name = "winnow" -version = "0.5.17" +version = "0.5.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a3b801d0e0a6726477cc207f60162da452f3a95adb368399bef20a946e06f65c" +checksum = "b7cf47b659b318dccbd69cc4797a39ae128f533dce7902a1096044d1967b9c16" dependencies = [ "memchr", ] @@ -5677,29 +6027,20 @@ dependencies = [ [[package]] name = "xattr" -version = "1.0.1" +version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4686009f71ff3e5c4dbcf1a282d0a44db3f021ba69350cd42086b3e5f1c6985" +checksum = "8da84f1a25939b27f6820d92aed108f83ff920fdf11a7b19366c27c4cda81d4f" dependencies = [ "libc", + "linux-raw-sys", + "rustix", ] [[package]] name = "yajrc" -version = "0.1.0" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b40687b4c165cb760e35730055c8840f36897e7c98099b2d3d66ba8cb624c79a" -dependencies = [ - "anyhow", - "serde", - "serde_json", - "thiserror", -] - -[[package]] -name = "yajrc" -version = "0.1.0" -source = "git+https://github.com/dr-bonez/yajrc.git?branch=develop#72a22f7ac2197d7a5cdce4be601cf20e5280eec5" +checksum = "ce7af47ad983c2f8357333ef87d859e66deb7eef4bf6f9e1ae7b5e99044a48bf" dependencies = [ "anyhow", "serde", @@ -5722,29 +6063,48 @@ version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f355ab62ebe30b758c1f4ab096a306722c4b7dbfb9d8c07d18c70d71a945588" dependencies = [ - "ahash 0.8.3", + "ahash 0.8.7", "hashbrown 0.13.2", "lazy_static", "serde", ] +[[package]] +name = "zerocopy" +version = "0.7.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74d4d3961e53fa4c9a25a8637fc2bfaf2595b3d3ae34875568a5cf64787716be" +dependencies = [ + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.7.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ce1b18ccd8e73a9321186f97e46f9f04b778851177567b1975109d26a08d2a6" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.48", +] + [[package]] name = "zeroize" -version = "1.6.0" +version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a0956f1ba7c7909bfb66c2e9e4124ab6f6482560f6628b5aaeba39207c9aad9" +checksum = "525b4ec142c6b68a2d10f01f7bbf6755599ca3f81ea53b8431b7dd348f5fdb2d" dependencies = [ "zeroize_derive", ] [[package]] name = "zeroize_derive" -version = "1.3.3" +version = "1.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44bf07cb3e50ea2003396695d58bf46bc9887a1f362260446fad6bc4e79bd36c" +checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 1.0.107", - "synstructure", + "syn 2.0.48", ] diff --git a/system-images/compat/Dockerfile b/system-images/compat/Dockerfile index d48ac34025..c01248b048 100644 --- a/system-images/compat/Dockerfile +++ b/system-images/compat/Dockerfile @@ -1,8 +1 @@ -FROM alpine:latest - -ARG ARCH - -RUN apk update && apk add duplicity curl -ADD ./target/$ARCH-unknown-linux-musl/release/compat /usr/local/bin/compat - -ENTRYPOINT ["compat"] +FROM start9/compat \ No newline at end of file diff --git a/system-images/compat/Makefile b/system-images/compat/Makefile index 86d8811933..b6cd1bfec8 100644 --- a/system-images/compat/Makefile +++ b/system-images/compat/Makefile @@ -11,10 +11,10 @@ clean: docker-images: mkdir docker-images -docker-images/aarch64.tar: Dockerfile target/aarch64-unknown-linux-musl/release/compat docker-images +docker-images/aarch64.tar: Dockerfile docker-images docker buildx build --build-arg ARCH=aarch64 --tag start9/x_system/compat --platform=linux/arm64 -o type=docker,dest=docker-images/aarch64.tar . -docker-images/x86_64.tar: Dockerfile target/x86_64-unknown-linux-musl/release/compat docker-images +docker-images/x86_64.tar: Dockerfile docker-images docker buildx build --build-arg ARCH=x86_64 --tag start9/x_system/compat --platform=linux/amd64 -o type=docker,dest=docker-images/x86_64.tar . target/aarch64-unknown-linux-musl/release/compat: $(COMPAT_SRC) ../../core/Cargo.lock diff --git a/web/package-lock.json b/web/package-lock.json index 160cb31bbc..90a6ad58d8 100644 --- a/web/package-lock.json +++ b/web/package-lock.json @@ -25,6 +25,7 @@ "@ng-web-apis/resize-observer": "^2.0.0", "@start9labs/argon2": "^0.2.2", "@start9labs/emver": "^0.1.5", + "@start9labs/start-sdk": "file:../sdk/dist", "@taiga-ui/addon-charts": "3.20.0", "@taiga-ui/cdk": "3.20.0", "@taiga-ui/core": "3.20.0", @@ -47,7 +48,7 @@ "mustache": "^4.2.0", "ng-qrcode": "^7.0.0", "node-jose": "^2.2.0", - "patch-db-client": "file: ../../../patch-db/client", + "patch-db-client": "file:../patch-db/client", "pbkdf2": "^3.1.2", "rxjs": "^7.8.1", "swiper": "^8.2.4", @@ -1970,6 +1971,26 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "../sdk/dist": { + "name": "@start9labs/start-sdk", + "version": "0.4.0-rev0.lib0.rc8.beta10", + "license": "MIT", + "dependencies": { + "isomorphic-fetch": "^3.0.0", + "ts-matches": "^5.4.1" + }, + "devDependencies": { + "@iarna/toml": "^2.2.5", + "@types/jest": "^29.4.0", + "jest": "^29.4.3", + "prettier": "^3.2.5", + "ts-jest": "^29.0.5", + "ts-node": "^10.9.1", + "tsx": "^4.7.1", + "typescript": "^5.0.4", + "yaml": "^2.2.2" + } + }, "node_modules/@adobe/css-tools": { "version": "4.0.1", "dev": true, @@ -5291,6 +5312,10 @@ "version": "0.1.5", "license": "MIT" }, + "node_modules/@start9labs/start-sdk": { + "resolved": "../sdk/dist", + "link": true + }, "node_modules/@stencil/core": { "version": "2.18.0", "license": "MIT", diff --git a/web/package.json b/web/package.json index 7784543fe7..e4ae2220f4 100644 --- a/web/package.json +++ b/web/package.json @@ -12,7 +12,7 @@ "check:install-wiz": "tsc --project projects/install-wizard/tsconfig.json --noEmit --skipLibCheck", "check:setup": "tsc --project projects/setup-wizard/tsconfig.json --noEmit --skipLibCheck", "check:ui": "tsc --project projects/ui/tsconfig.json --noEmit --skipLibCheck", - "build:deps": "rm -rf .angular/cache && cd ../patch-db/client && npm ci && npm run build", + "build:deps": "rm -rf .angular/cache && (cd ../patch-db/client && npm ci && npm run build) && (cd ../sdk && make bundle)", "build:dui": "ng run diagnostic-ui:build", "build:install-wiz": "ng run install-wizard:build", "build:setup": "ng run setup-wizard:build", @@ -50,6 +50,7 @@ "@ng-web-apis/resize-observer": "^2.0.0", "@start9labs/argon2": "^0.2.2", "@start9labs/emver": "^0.1.5", + "@start9labs/start-sdk": "file:../sdk/dist", "@taiga-ui/addon-charts": "3.20.0", "@taiga-ui/cdk": "3.20.0", "@taiga-ui/core": "3.20.0", @@ -72,7 +73,7 @@ "mustache": "^4.2.0", "ng-qrcode": "^7.0.0", "node-jose": "^2.2.0", - "patch-db-client": "file: ../../../patch-db/client", + "patch-db-client": "file:../patch-db/client", "pbkdf2": "^3.1.2", "rxjs": "^7.8.1", "swiper": "^8.2.4", diff --git a/web/projects/diagnostic-ui/src/app/pages/logs/logs.page.ts b/web/projects/diagnostic-ui/src/app/pages/logs/logs.page.ts index 317cd1ea34..44c314a96d 100644 --- a/web/projects/diagnostic-ui/src/app/pages/logs/logs.page.ts +++ b/web/projects/diagnostic-ui/src/app/pages/logs/logs.page.ts @@ -49,7 +49,7 @@ export class LogsPage { private async getLogs() { try { - const { 'start-cursor': startCursor, entries } = await this.api.getLogs({ + const { startCursor, entries } = await this.api.getLogs({ cursor: this.startCursor, before: !!this.startCursor, limit: this.limit, diff --git a/web/projects/diagnostic-ui/src/app/services/api/mock-api.service.ts b/web/projects/diagnostic-ui/src/app/services/api/mock-api.service.ts index 5d8c13a4f6..d991edd325 100644 --- a/web/projects/diagnostic-ui/src/app/services/api/mock-api.service.ts +++ b/web/projects/diagnostic-ui/src/app/services/api/mock-api.service.ts @@ -45,8 +45,8 @@ export class MockApiService implements ApiService { } return { entries, - 'start-cursor': 'startCursor', - 'end-cursor': 'endCursor', + startCursor: 'start-cursor', + endCursor: 'end-cursor', } } } diff --git a/web/projects/install-wizard/src/app/services/api/mock-api.service.ts b/web/projects/install-wizard/src/app/services/api/mock-api.service.ts index 9caf4f88e3..5be394c678 100644 --- a/web/projects/install-wizard/src/app/services/api/mock-api.service.ts +++ b/web/projects/install-wizard/src/app/services/api/mock-api.service.ts @@ -17,12 +17,12 @@ export class MockApiService implements ApiService { label: null, capacity: 73264762332, used: null, - 'embassy-os': { + startOs: { version: '0.2.17', full: true, - 'password-hash': + passwordHash: '$argon2d$v=19$m=1024,t=1,p=1$YXNkZmFzZGZhc2RmYXNkZg$Ceev1I901G6UwU+hY0sHrFZ56D+o+LNJ', - 'wrapped-key': null, + wrappedKey: null, }, guid: null, }, @@ -40,12 +40,12 @@ export class MockApiService implements ApiService { label: null, capacity: 73264762332, used: null, - 'embassy-os': { + startOs: { version: '0.3.3', full: true, - 'password-hash': + passwordHash: '$argon2d$v=19$m=1024,t=1,p=1$YXNkZmFzZGZhc2RmYXNkZg$Ceev1I901G6UwU+hY0sHrFZ56D+o+LNJ', - 'wrapped-key': null, + wrappedKey: null, }, guid: null, }, @@ -63,12 +63,12 @@ export class MockApiService implements ApiService { label: null, capacity: 73264762332, used: null, - 'embassy-os': { + startOs: { version: '0.3.2', full: true, - 'password-hash': + passwordHash: '$argon2d$v=19$m=1024,t=1,p=1$YXNkZmFzZGZhc2RmYXNkZg$Ceev1I901G6UwU+hY0sHrFZ56D+o+LNJ', - 'wrapped-key': null, + wrappedKey: null, }, guid: 'guid-guid-guid-guid', }, diff --git a/web/projects/marketplace/src/pages/list/item/item.component.html b/web/projects/marketplace/src/pages/list/item/item.component.html index 0220a0a0c8..9a88d08693 100644 --- a/web/projects/marketplace/src/pages/list/item/item.component.html +++ b/web/projects/marketplace/src/pages/list/item/item.component.html @@ -1,6 +1,6 @@ - +

diff --git a/web/projects/marketplace/src/pages/list/item/item.module.ts b/web/projects/marketplace/src/pages/list/item/item.module.ts index dd157174e0..3f943ac2dd 100644 --- a/web/projects/marketplace/src/pages/list/item/item.module.ts +++ b/web/projects/marketplace/src/pages/list/item/item.module.ts @@ -4,17 +4,10 @@ import { IonicModule } from '@ionic/angular' import { RouterModule } from '@angular/router' import { SharedPipesModule } from '@start9labs/shared' import { ItemComponent } from './item.component' -import { MimeTypePipeModule } from '../../../pipes/mime-type.pipe' @NgModule({ declarations: [ItemComponent], exports: [ItemComponent], - imports: [ - CommonModule, - IonicModule, - RouterModule, - SharedPipesModule, - MimeTypePipeModule, - ], + imports: [CommonModule, IonicModule, RouterModule, SharedPipesModule], }) export class ItemModule {} diff --git a/web/projects/marketplace/src/pages/show/about/about.component.html b/web/projects/marketplace/src/pages/show/about/about.component.html index c1d76dd2c9..1126f8b54e 100644 --- a/web/projects/marketplace/src/pages/show/about/about.component.html +++ b/web/projects/marketplace/src/pages/show/about/about.component.html @@ -4,7 +4,7 @@ -
+
@@ -18,10 +18,7 @@

{{ pkg.manifest.description.long }}

-
+
View website diff --git a/web/projects/marketplace/src/pages/show/additional/additional.component.html b/web/projects/marketplace/src/pages/show/additional/additional.component.html index 8937e8c74d..d362991e36 100644 --- a/web/projects/marketplace/src/pages/show/additional/additional.component.html +++ b/web/projects/marketplace/src/pages/show/additional/additional.component.html @@ -15,7 +15,7 @@ Instructions

Source Repository

-

{{ manifest['upstream-repo'] }}

+

{{ manifest.upstreamRepo }}

Wrapper Repository

-

{{ manifest['wrapper-repo'] }}

+

{{ manifest.wrapperRepo }}

Support Site

-

{{ manifest['support-site'] || 'Not provided' }}

+

{{ manifest.supportSite || 'Not provided' }}

diff --git a/web/projects/marketplace/src/pages/show/dependencies/dependencies.component.html b/web/projects/marketplace/src/pages/show/dependencies/dependencies.component.html index 87b9026037..4c1bfcc651 100644 --- a/web/projects/marketplace/src/pages/show/dependencies/dependencies.component.html +++ b/web/projects/marketplace/src/pages/show/dependencies/dependencies.component.html @@ -17,16 +17,12 @@

- {{ pkg['dependency-metadata'][dep.key].title }} - - (required) - (required by default) - (optional) - + {{ pkg.dependencyMetadata[dep.key].title }} + (optional) + + (Required) +

-

- {{ dep.value.version | displayEmver }} -

{{ dep.value.description }}

diff --git a/web/projects/marketplace/src/pages/show/dependencies/dependencies.component.ts b/web/projects/marketplace/src/pages/show/dependencies/dependencies.component.ts index a6ecb103f6..a1fed6cca8 100644 --- a/web/projects/marketplace/src/pages/show/dependencies/dependencies.component.ts +++ b/web/projects/marketplace/src/pages/show/dependencies/dependencies.component.ts @@ -11,7 +11,6 @@ export class DependenciesComponent { pkg!: MarketplacePkg getImg(key: string): string { - // @TODO fix when registry api is updated to include mimetype in icon url - return 'data:image/png;base64,' + this.pkg['dependency-metadata'][key].icon + return this.pkg.dependencyMetadata[key].icon } } diff --git a/web/projects/marketplace/src/pages/show/package/package.component.html b/web/projects/marketplace/src/pages/show/package/package.component.html index ca853030e3..94e7006b1f 100644 --- a/web/projects/marketplace/src/pages/show/package/package.component.html +++ b/web/projects/marketplace/src/pages/show/package/package.component.html @@ -1,11 +1,9 @@
- +

{{ pkg.manifest.title }}

{{ pkg.manifest.version | displayEmver }}

-

- Released: {{ pkg['published-at'] | date: 'medium' }} -

+

Released: {{ pkg.publishedAt | date: 'medium' }}

diff --git a/web/projects/marketplace/src/pages/show/package/package.module.ts b/web/projects/marketplace/src/pages/show/package/package.module.ts index 665e146b58..502ee82bbf 100644 --- a/web/projects/marketplace/src/pages/show/package/package.module.ts +++ b/web/projects/marketplace/src/pages/show/package/package.module.ts @@ -8,7 +8,6 @@ import { } from '@start9labs/shared' import { PackageComponent } from './package.component' -import { MimeTypePipeModule } from '../../../pipes/mime-type.pipe' @NgModule({ declarations: [PackageComponent], @@ -19,7 +18,6 @@ import { MimeTypePipeModule } from '../../../pipes/mime-type.pipe' SharedPipesModule, EmverPipesModule, TickerModule, - MimeTypePipeModule, ], }) export class PackageModule {} diff --git a/web/projects/marketplace/src/pipes/filter-packages.pipe.ts b/web/projects/marketplace/src/pipes/filter-packages.pipe.ts index 5b0bb52b19..48313376f0 100644 --- a/web/projects/marketplace/src/pipes/filter-packages.pipe.ts +++ b/web/projects/marketplace/src/pipes/filter-packages.pipe.ts @@ -71,8 +71,7 @@ export class FilterPackagesPipe implements PipeTransform { .filter(p => category === 'all' || p.categories.includes(category)) .sort((a, b) => { return ( - new Date(b['published-at']).valueOf() - - new Date(a['published-at']).valueOf() + new Date(b.publishedAt).valueOf() - new Date(a.publishedAt).valueOf() ) }) } diff --git a/web/projects/marketplace/src/pipes/mime-type.pipe.ts b/web/projects/marketplace/src/pipes/mime-type.pipe.ts deleted file mode 100644 index a0dc14a001..0000000000 --- a/web/projects/marketplace/src/pipes/mime-type.pipe.ts +++ /dev/null @@ -1,32 +0,0 @@ -import { NgModule, Pipe, PipeTransform } from '@angular/core' -import { MarketplacePkg } from '../types' - -@Pipe({ - name: 'mimeType', -}) -export class MimeTypePipe implements PipeTransform { - transform(pkg: MarketplacePkg): string { - if (pkg.manifest.assets.icon) { - switch (pkg.manifest.assets.icon.split('.').pop()) { - case 'png': - return `data:image/png;base64,${pkg.icon}` - case 'jpeg': - case 'jpg': - return `data:image/jpeg;base64,${pkg.icon}` - case 'gif': - return `data:image/gif;base64,${pkg.icon}` - case 'svg': - return `data:image/svg+xml;base64,${pkg.icon}` - default: - return `data:image/png;base64,${pkg.icon}` - } - } - return `data:image/png;base64,${pkg.icon}` - } -} - -@NgModule({ - declarations: [MimeTypePipe], - exports: [MimeTypePipe], -}) -export class MimeTypePipeModule {} diff --git a/web/projects/marketplace/src/public-api.ts b/web/projects/marketplace/src/public-api.ts index fef451a3eb..b9e8c46866 100644 --- a/web/projects/marketplace/src/public-api.ts +++ b/web/projects/marketplace/src/public-api.ts @@ -22,7 +22,6 @@ export * from './pages/show/package/package.component' export * from './pages/show/package/package.module' export * from './pipes/filter-packages.pipe' -export * from './pipes/mime-type.pipe' export * from './services/marketplace.service' diff --git a/web/projects/marketplace/src/types.ts b/web/projects/marketplace/src/types.ts index 8187d3bc0f..bc7c70f804 100644 --- a/web/projects/marketplace/src/types.ts +++ b/web/projects/marketplace/src/types.ts @@ -1,4 +1,5 @@ import { Url } from '@start9labs/shared' +import { Manifest } from '../../../../core/startos/bindings/Manifest' export type StoreURL = string export type StoreName = string @@ -19,69 +20,29 @@ export interface StoreInfo { categories: string[] } +export type StoreIdentityWithData = StoreData & StoreIdentity + export interface MarketplacePkg { icon: Url license: Url instructions: Url - manifest: MarketplaceManifest + manifest: Manifest categories: string[] versions: string[] - 'dependency-metadata': { + dependencyMetadata: { [id: string]: DependencyMetadata } - 'published-at': string + publishedAt: string } export interface DependencyMetadata { title: string icon: Url + optional: boolean hidden: boolean } -export interface MarketplaceManifest { - id: string - title: string - version: string - 'git-hash'?: string - description: { - short: string - long: string - } - assets: { - icon: string // ie. icon.png - } - replaces?: string[] - 'release-notes': string - license: string // type of license - 'wrapper-repo': Url - 'upstream-repo': Url - 'support-site': Url - 'marketing-site': Url - 'donation-url': Url | null - alerts: { - install: string | null - uninstall: string | null - restore: string | null - start: string | null - stop: string | null - } - dependencies: Record> -} - -export interface Dependency { - version: string - requirement: - | { - type: 'opt-in' - how: string - } - | { - type: 'opt-out' - how: string - } - | { - type: 'required' - } +export interface Dependency { description: string | null - config: T + optional: boolean } diff --git a/web/projects/setup-wizard/src/app/modals/cifs-modal/cifs-modal.page.ts b/web/projects/setup-wizard/src/app/modals/cifs-modal/cifs-modal.page.ts index 7f293f5e0e..0664be38f5 100644 --- a/web/projects/setup-wizard/src/app/modals/cifs-modal/cifs-modal.page.ts +++ b/web/projects/setup-wizard/src/app/modals/cifs-modal/cifs-modal.page.ts @@ -61,7 +61,7 @@ export class CifsModal { const target: CifsBackupTarget = { ...this.cifs, mountable: true, - 'embassy-os': diskInfo, + startOs: diskInfo, } const modal = await this.modalController.create({ diff --git a/web/projects/setup-wizard/src/app/modals/password/password.page.ts b/web/projects/setup-wizard/src/app/modals/password/password.page.ts index 98de93e1a9..8ea00ce0cd 100644 --- a/web/projects/setup-wizard/src/app/modals/password/password.page.ts +++ b/web/projects/setup-wizard/src/app/modals/password/password.page.ts @@ -31,11 +31,11 @@ export class PasswordPage { } async verifyPw() { - if (!this.target || !this.target['embassy-os']) + if (!this.target || !this.target.startOs) this.pwError = 'No recovery target' // unreachable try { - const passwordHash = this.target!['embassy-os']?.['password-hash'] || '' + const passwordHash = this.target!.startOs?.passwordHash || '' argon2.verify(passwordHash, this.password) this.modalController.dismiss({ password: this.password }, 'success') diff --git a/web/projects/setup-wizard/src/app/pages/loading/loading.page.ts b/web/projects/setup-wizard/src/app/pages/loading/loading.page.ts index 10ec47d311..ce1a1b3c05 100644 --- a/web/projects/setup-wizard/src/app/pages/loading/loading.page.ts +++ b/web/projects/setup-wizard/src/app/pages/loading/loading.page.ts @@ -38,10 +38,7 @@ export class LoadingPage { if (!progress) return - const { - 'total-bytes': totalBytes, - 'bytes-transferred': bytesTransferred, - } = progress + const { totalBytes, bytesTransferred } = progress this.progress$.next({ totalBytes, diff --git a/web/projects/setup-wizard/src/app/pages/recover/recover.page.ts b/web/projects/setup-wizard/src/app/pages/recover/recover.page.ts index a8cd194bae..f9e5adbb10 100644 --- a/web/projects/setup-wizard/src/app/pages/recover/recover.page.ts +++ b/web/projects/setup-wizard/src/app/pages/recover/recover.page.ts @@ -35,7 +35,7 @@ export class RecoverPage { } driveClickable(mapped: MappedDisk) { - return mapped.drive['embassy-os']?.full + return mapped.drive.startOs?.full } async getDrives() { @@ -53,10 +53,10 @@ export class RecoverPage { label: p.label, capacity: p.capacity, used: p.used, - 'embassy-os': p['embassy-os'], + startOs: p.startOs, } this.mappedDrives.push({ - hasValidBackup: !!p['embassy-os']?.full, + hasValidBackup: !!p.startOs?.full, drive, }) }) diff --git a/web/projects/setup-wizard/src/app/pages/success/success.page.ts b/web/projects/setup-wizard/src/app/pages/success/success.page.ts index c1d175aaac..71566580a4 100644 --- a/web/projects/setup-wizard/src/app/pages/success/success.page.ts +++ b/web/projects/setup-wizard/src/app/pages/success/success.page.ts @@ -76,9 +76,9 @@ export class SuccessPage { try { const ret = await this.api.complete() if (!this.isKiosk) { - this.torAddress = ret['tor-address'].replace(/^https:/, 'http:') - this.lanAddress = ret['lan-address'].replace(/^https:/, 'http:') - this.cert = ret['root-ca'] + this.torAddress = ret.torAddress.replace(/^https:/, 'http:') + this.lanAddress = ret.lanAddress.replace(/^https:/, 'http:') + this.cert = ret.rootCa await this.api.exit() } diff --git a/web/projects/setup-wizard/src/app/services/api/api.service.ts b/web/projects/setup-wizard/src/app/services/api/api.service.ts index df96360935..375e64a78c 100644 --- a/web/projects/setup-wizard/src/app/services/api/api.service.ts +++ b/web/projects/setup-wizard/src/app/services/api/api.service.ts @@ -28,27 +28,27 @@ type Encrypted = { } export type StatusRes = { - 'bytes-transferred': number - 'total-bytes': number | null + bytesTransferred: number + totalBytes: number | null complete: boolean } | null export type AttachReq = { guid: string - 'embassy-password': Encrypted + startOsPassword: Encrypted } export type ExecuteReq = { - 'embassy-logicalname': string - 'embassy-password': Encrypted - 'recovery-source': RecoverySource | null - 'recovery-password': Encrypted | null + startOsLogicalname: string + startOsPassword: Encrypted + recoverySource: RecoverySource | null + recoveryPassword: Encrypted | null } export type CompleteRes = { - 'tor-address': string - 'lan-address': string - 'root-ca': string + torAddress: string + lanAddress: string + rootCa: string } export type DiskBackupTarget = { @@ -58,7 +58,7 @@ export type DiskBackupTarget = { label: string | null capacity: number used: number | null - 'embassy-os': StartOSDiskInfo | null + startOs: StartOSDiskInfo | null } export type CifsBackupTarget = { @@ -66,7 +66,7 @@ export type CifsBackupTarget = { path: string username: string mountable: boolean - 'embassy-os': StartOSDiskInfo | null + startOs: StartOSDiskInfo | null } export type DiskRecoverySource = { diff --git a/web/projects/setup-wizard/src/app/services/api/live-api.service.ts b/web/projects/setup-wizard/src/app/services/api/live-api.service.ts index ab67db5d0f..566cc84cfc 100644 --- a/web/projects/setup-wizard/src/app/services/api/live-api.service.ts +++ b/web/projects/setup-wizard/src/app/services/api/live-api.service.ts @@ -73,11 +73,10 @@ export class LiveApiService extends ApiService { } async execute(setupInfo: ExecuteReq) { - if (setupInfo['recovery-source']?.type === 'backup') { - if (isCifsSource(setupInfo['recovery-source'].target)) { - setupInfo['recovery-source'].target.path = setupInfo[ - 'recovery-source' - ].target.path.replace('/\\/g', '/') + if (setupInfo.recoverySource?.type === 'backup') { + if (isCifsSource(setupInfo.recoverySource.target)) { + setupInfo.recoverySource.target.path = + setupInfo.recoverySource.target.path.replace('/\\/g', '/') } } @@ -95,7 +94,7 @@ export class LiveApiService extends ApiService { return { ...res, - 'root-ca': encodeBase64(res['root-ca']), + rootCa: encodeBase64(res.rootCa), } } diff --git a/web/projects/setup-wizard/src/app/services/api/mock-api.service.ts b/web/projects/setup-wizard/src/app/services/api/mock-api.service.ts index 51acf4e7b5..df32bd09e2 100644 --- a/web/projects/setup-wizard/src/app/services/api/mock-api.service.ts +++ b/web/projects/setup-wizard/src/app/services/api/mock-api.service.ts @@ -30,8 +30,8 @@ export class MockApiService extends ApiService { const progress = tries > 4 ? (tries - 4) * 268435456 : 0 return { - 'bytes-transferred': restoreOrMigrate ? progress : 0, - 'total-bytes': restoreOrMigrate ? total : null, + bytesTransferred: restoreOrMigrate ? progress : 0, + totalBytes: restoreOrMigrate ? total : null, complete: progress === total, } } @@ -65,12 +65,12 @@ export class MockApiService extends ApiService { label: null, capacity: 1979120929996, used: null, - 'embassy-os': { + startOs: { version: '0.2.17', full: true, - 'password-hash': + passwordHash: '$argon2d$v=19$m=1024,t=1,p=1$YXNkZmFzZGZhc2RmYXNkZg$Ceev1I901G6UwU+hY0sHrFZ56D+o+LNJ', - 'wrapped-key': null, + wrappedKey: null, }, guid: null, }, @@ -88,12 +88,12 @@ export class MockApiService extends ApiService { label: null, capacity: 73264762332, used: null, - 'embassy-os': { + startOs: { version: '0.3.3', full: true, - 'password-hash': + passwordHash: '$argon2d$v=19$m=1024,t=1,p=1$YXNkZmFzZGZhc2RmYXNkZg$Ceev1I901G6UwU+hY0sHrFZ56D+o+LNJ', - 'wrapped-key': null, + wrappedKey: null, }, guid: null, }, @@ -111,12 +111,12 @@ export class MockApiService extends ApiService { label: null, capacity: 73264762332, used: null, - 'embassy-os': { + startOs: { version: '0.3.2', full: true, - 'password-hash': + passwordHash: '$argon2d$v=19$m=1024,t=1,p=1$YXNkZmFzZGZhc2RmYXNkZg$Ceev1I901G6UwU+hY0sHrFZ56D+o+LNJ', - 'wrapped-key': null, + wrappedKey: null, }, guid: 'guid-guid-guid-guid', }, @@ -132,9 +132,9 @@ export class MockApiService extends ApiService { return { version: '0.3.0', full: true, - 'password-hash': + passwordHash: '$argon2d$v=19$m=1024,t=1,p=1$YXNkZmFzZGZhc2RmYXNkZg$Ceev1I901G6UwU+hY0sHrFZ56D+o+LNJ', - 'wrapped-key': '', + wrappedKey: '', } } @@ -149,9 +149,9 @@ export class MockApiService extends ApiService { async complete(): Promise { await pauseFor(1000) return { - 'tor-address': 'https://asdafsadasdasasdasdfasdfasdf.onion', - 'lan-address': 'https://adjective-noun.local', - 'root-ca': encodeBase64(rootCA), + torAddress: 'https://asdafsadasdasasdasdfasdfasdf.onion', + lanAddress: 'https://adjective-noun.local', + rootCa: encodeBase64(rootCA), } } diff --git a/web/projects/setup-wizard/src/app/services/state.service.ts b/web/projects/setup-wizard/src/app/services/state.service.ts index e704785591..916b066ee2 100644 --- a/web/projects/setup-wizard/src/app/services/state.service.ts +++ b/web/projects/setup-wizard/src/app/services/state.service.ts @@ -15,7 +15,7 @@ export class StateService { async importDrive(guid: string, password: string): Promise { await this.api.attach({ guid, - 'embassy-password': await this.api.encrypt(password), + startOsPassword: await this.api.encrypt(password), }) } @@ -24,10 +24,10 @@ export class StateService { password: string, ): Promise { await this.api.execute({ - 'embassy-logicalname': storageLogicalname, - 'embassy-password': await this.api.encrypt(password), - 'recovery-source': this.recoverySource || null, - 'recovery-password': this.recoveryPassword + startOsLogicalname: storageLogicalname, + startOsPassword: await this.api.encrypt(password), + recoverySource: this.recoverySource || null, + recoveryPassword: this.recoveryPassword ? await this.api.encrypt(this.recoveryPassword) : null, }) diff --git a/web/projects/shared/src/types/api.ts b/web/projects/shared/src/types/api.ts index 1473dc16a0..26cd002185 100644 --- a/web/projects/shared/src/types/api.ts +++ b/web/projects/shared/src/types/api.ts @@ -6,8 +6,8 @@ export type ServerLogsReq = { export type LogsRes = { entries: Log[] - 'start-cursor'?: string - 'end-cursor'?: string + startCursor?: string + endCursor?: string } export interface Log { @@ -31,13 +31,13 @@ export interface PartitionInfo { label: string | null capacity: number used: number | null - 'embassy-os': StartOSDiskInfo | null + startOs: StartOSDiskInfo | null guid: string | null } export type StartOSDiskInfo = { version: string full: boolean - 'password-hash': string | null - 'wrapped-key': string | null + passwordHash: string | null + wrappedKey: string | null } diff --git a/web/projects/shared/src/types/workspace-config.ts b/web/projects/shared/src/types/workspace-config.ts index 101af40fd7..57d5e2a4ce 100644 --- a/web/projects/shared/src/types/workspace-config.ts +++ b/web/projects/shared/src/types/workspace-config.ts @@ -13,7 +13,7 @@ export type WorkspaceConfig = { community: 'https://community-registry.start9.com/' } mocks: { - maskAs: 'tor' | 'local' | 'localhost' + maskAs: 'tor' | 'local' | 'ip' | 'localhost' // enables local development in secure mode maskAsHttps: boolean skipStartupAlerts: boolean diff --git a/web/projects/ui/src/app/app-routing.module.ts b/web/projects/ui/src/app/app-routing.module.ts index b1b79c05d7..ddb66d3c58 100644 --- a/web/projects/ui/src/app/app-routing.module.ts +++ b/web/projects/ui/src/app/app-routing.module.ts @@ -63,15 +63,6 @@ const routes: Routes = [ m => m.AppsRoutingModule, ), }, - { - path: 'developer', - canActivate: [AuthGuard], - canActivateChild: [AuthGuard], - loadChildren: () => - import('./pages/developer-routes/developer-routing.module').then( - m => m.DeveloperRoutingModule, - ), - }, ] @NgModule({ diff --git a/web/projects/ui/src/app/app.component.ts b/web/projects/ui/src/app/app.component.ts index 5675fbf5cf..1210eba7a5 100644 --- a/web/projects/ui/src/app/app.component.ts +++ b/web/projects/ui/src/app/app.component.ts @@ -29,13 +29,12 @@ export class AppComponent implements OnDestroy { this.authService.isVerified$, this.connection.connected$, this.patch - .watch$('server-info', 'status-info') - .pipe(startWith({ restarting: false, 'shutting-down': false })), + .watch$('serverInfo', 'statusInfo') + .pipe(startWith({ restarting: false, shuttingDown: false })), ]).pipe( map( ([verified, connected, status]) => - verified && - (!connected || status.restarting || status['shutting-down']), + verified && (!connected || status.restarting || status.shuttingDown), ), ) diff --git a/web/projects/ui/src/app/app/footer/footer.component.ts b/web/projects/ui/src/app/app/footer/footer.component.ts index 9a16f5a474..adbb27fe39 100644 --- a/web/projects/ui/src/app/app/footer/footer.component.ts +++ b/web/projects/ui/src/app/app/footer/footer.component.ts @@ -13,7 +13,7 @@ import { DataModel } from '../../services/patch-db/data-model' }) export class FooterComponent { readonly progress$ = this.patch - .watch$('server-info', 'status-info', 'update-progress') + .watch$('serverInfo', 'statusInfo', 'updateProgress') .pipe(map(a => a && { ...a })) readonly animation = { diff --git a/web/projects/ui/src/app/app/menu/menu.component.html b/web/projects/ui/src/app/app/menu/menu.component.html index a54033b678..6db19dea61 100644 --- a/web/projects/ui/src/app/app/menu/menu.component.html +++ b/web/projects/ui/src/app/app/menu/menu.component.html @@ -22,12 +22,6 @@ {{ page.title }} - this.patch.watch$('package-data').pipe(first())), + switchMap(() => this.patch.watch$('packageData').pipe(first())), switchMap(outer => - this.patch.watch$('package-data').pipe( + this.patch.watch$('packageData').pipe( pairwise(), filter(([prev, curr]) => Object.values(prev).some( p => - p['install-progress'] && - !curr[p.manifest.id]?.['install-progress'], + ['installing', 'updating', 'restoring'].includes( + p.stateInfo.state, + ) && + ['installed', 'removing'].includes( + curr[getManifest(p).id].stateInfo.state, + ), ), ), map(([_, curr]) => curr), @@ -97,9 +99,10 @@ export class MenuComponent { Object.entries(marketplace).reduce((list, [_, store]) => { store?.packages.forEach(({ manifest: { id, version } }) => { if ( + local[id] && this.emver.compare( version, - local[id]?.installed?.manifest.version || '', + getManifest(local[id]).version || '', ) === 1 ) list.add(id) @@ -114,11 +117,6 @@ export class MenuComponent { readonly theme$ = inject(THEME) - readonly warning$ = merge( - of(this.config.isTorHttp()), - this.patch.watch$('server-info', 'ntp-synced').pipe(map(synced => !synced)), - ) - constructor( private readonly patch: PatchDB, private readonly eosService: EOSService, @@ -127,6 +125,5 @@ export class MenuComponent { private readonly splitPane: SplitPaneTracker, private readonly emver: Emver, private readonly connectionService: ConnectionService, - private readonly config: ConfigService, ) {} } diff --git a/web/projects/ui/src/app/app/preloader/preloader.component.ts b/web/projects/ui/src/app/app/preloader/preloader.component.ts index 9823ac9811..0b387ac453 100644 --- a/web/projects/ui/src/app/app/preloader/preloader.component.ts +++ b/web/projects/ui/src/app/app/preloader/preloader.component.ts @@ -40,8 +40,6 @@ const ICONS = [ 'file-tray-stacked-outline', 'finger-print-outline', 'flash-outline', - 'flask-outline', - 'flash-off-outline', 'folder-open-outline', 'globe-outline', 'grid-outline', @@ -70,6 +68,7 @@ const ICONS = [ 'receipt-outline', 'refresh', 'reload', + 'reload-circle-outline', 'remove', 'remove-circle-outline', 'remove-outline', diff --git a/web/projects/ui/src/app/components/backup-drives/backup.service.ts b/web/projects/ui/src/app/components/backup-drives/backup.service.ts index 381110de46..6beec7e2d2 100644 --- a/web/projects/ui/src/app/components/backup-drives/backup.service.ts +++ b/web/projects/ui/src/app/components/backup-drives/backup.service.ts @@ -56,7 +56,7 @@ export class BackupService { } hasValidBackup(target: BackupTarget): boolean { - const backup = target['embassy-os'] + const backup = target.startOs return !!backup && this.emver.compare(backup.version, '0.3.0') !== -1 } } diff --git a/web/projects/ui/src/app/components/badge-menu-button/badge-menu.component.ts b/web/projects/ui/src/app/components/badge-menu-button/badge-menu.component.ts index 3f91da5c47..5fe9b621eb 100644 --- a/web/projects/ui/src/app/components/badge-menu-button/badge-menu.component.ts +++ b/web/projects/ui/src/app/components/badge-menu-button/badge-menu.component.ts @@ -21,8 +21,8 @@ const { enableWidgets } = }) export class BadgeMenuComponent { readonly unreadCount$ = this.patch.watch$( - 'server-info', - 'unread-notification-count', + 'serverInfo', + 'unreadNotificationCount', ) readonly sidebarOpen$ = this.splitPane.sidebarOpen$ readonly widgetDrawer$ = this.clientStorageService.widgetDrawer$ diff --git a/web/projects/ui/src/app/components/connection-bar/connection-bar.component.ts b/web/projects/ui/src/app/components/connection-bar/connection-bar.component.ts index 9c4b07b7fd..da28f805f9 100644 --- a/web/projects/ui/src/app/components/connection-bar/connection-bar.component.ts +++ b/web/projects/ui/src/app/components/connection-bar/connection-bar.component.ts @@ -22,8 +22,8 @@ export class ConnectionBarComponent { this.connectionService.networkConnected$, this.websocket$.pipe(startWith(false)), this.patch - .watch$('server-info', 'status-info') - .pipe(startWith({ restarting: false, 'shutting-down': false })), + .watch$('serverInfo', 'statusInfo') + .pipe(startWith({ restarting: false, shuttingDown: false })), ]).pipe( map(([network, websocket, status]) => { if (!network) @@ -40,7 +40,7 @@ export class ConnectionBarComponent { icon: 'cloud-offline-outline', dots: true, } - if (status['shutting-down']) + if (status.shuttingDown) return { message: 'Shutting Down', color: 'dark', diff --git a/web/projects/ui/src/app/components/logs/logs.component.ts b/web/projects/ui/src/app/components/logs/logs.component.ts index 5f06887577..5d033fc830 100644 --- a/web/projects/ui/src/app/components/logs/logs.component.ts +++ b/web/projects/ui/src/app/components/logs/logs.component.ts @@ -78,7 +78,7 @@ export class LogsComponent { async ngOnInit() { from(this.followLogs({ limit: this.limit })) .pipe( - switchMap(({ 'start-cursor': startCursor, guid }) => { + switchMap(({ startCursor, guid }) => { this.startCursor = startCursor return this.connect$(guid) }), @@ -206,7 +206,7 @@ export class LogsComponent { } private processRes(res: LogsRes) { - const { entries, 'start-cursor': startCursor } = res + const { entries, startCursor } = res if (!entries.length) return diff --git a/web/projects/ui/src/app/components/status/status.component.html b/web/projects/ui/src/app/components/status/status.component.html index fd265fd969..db9e8f8f7d 100644 --- a/web/projects/ui/src/app/components/status/status.component.html +++ b/web/projects/ui/src/app/components/status/status.component.html @@ -8,21 +8,13 @@ > {{ (connected$ | async) ? rendering.display : 'Unknown' }} - - this may take a while + + . This may take a while - - - {{ progress }} + + + {{ installingInfo.progress.overall | installingProgressString }} diff --git a/web/projects/ui/src/app/components/status/status.component.module.ts b/web/projects/ui/src/app/components/status/status.component.module.ts index ea093b8e9a..04fa4690c1 100644 --- a/web/projects/ui/src/app/components/status/status.component.module.ts +++ b/web/projects/ui/src/app/components/status/status.component.module.ts @@ -3,7 +3,7 @@ import { CommonModule } from '@angular/common' import { IonicModule } from '@ionic/angular' import { UnitConversionPipesModule } from '@start9labs/shared' import { StatusComponent } from './status.component' -import { InstallProgressPipeModule } from 'src/app/pipes/install-progress/install-progress.module' +import { InstallingProgressPipeModule } from 'src/app/pipes/install-progress/install-progress.module' @NgModule({ declarations: [StatusComponent], @@ -11,7 +11,7 @@ import { InstallProgressPipeModule } from 'src/app/pipes/install-progress/instal CommonModule, IonicModule, UnitConversionPipesModule, - InstallProgressPipeModule, + InstallingProgressPipeModule, ], exports: [StatusComponent], }) diff --git a/web/projects/ui/src/app/components/status/status.component.ts b/web/projects/ui/src/app/components/status/status.component.ts index 14ece402c0..03894406e5 100644 --- a/web/projects/ui/src/app/components/status/status.component.ts +++ b/web/projects/ui/src/app/components/status/status.component.ts @@ -1,6 +1,6 @@ import { Component, Input } from '@angular/core' import { ConnectionService } from 'src/app/services/connection.service' -import { InstallProgress } from 'src/app/services/patch-db/data-model' +import { InstallingInfo } from 'src/app/services/patch-db/data-model' import { PrimaryRendering, PrimaryStatus, @@ -20,7 +20,7 @@ export class StatusComponent { @Input() size?: string @Input() style?: string = 'regular' @Input() weight?: string = 'normal' - @Input() installProgress?: InstallProgress + @Input() installingInfo?: InstallingInfo @Input() sigtermTimeout?: string | null = null readonly connected$ = this.connectionService.connected$ diff --git a/web/projects/ui/src/app/components/toast-container/notifications-toast/notifications-toast.service.ts b/web/projects/ui/src/app/components/toast-container/notifications-toast/notifications-toast.service.ts index 52d35ea3c6..a2368438d6 100644 --- a/web/projects/ui/src/app/components/toast-container/notifications-toast/notifications-toast.service.ts +++ b/web/projects/ui/src/app/components/toast-container/notifications-toast/notifications-toast.service.ts @@ -7,7 +7,7 @@ import { DataModel } from 'src/app/services/patch-db/data-model' @Injectable({ providedIn: 'root' }) export class NotificationsToastService extends Observable { private readonly stream$ = this.patch - .watch$('server-info', 'unread-notification-count') + .watch$('serverInfo', 'unreadNotificationCount') .pipe( pairwise(), map(([prev, cur]) => cur > prev), diff --git a/web/projects/ui/src/app/components/toast-container/refresh-alert/refresh-alert.service.ts b/web/projects/ui/src/app/components/toast-container/refresh-alert/refresh-alert.service.ts index 43c7f82c44..50cbc5a564 100644 --- a/web/projects/ui/src/app/components/toast-container/refresh-alert/refresh-alert.service.ts +++ b/web/projects/ui/src/app/components/toast-container/refresh-alert/refresh-alert.service.ts @@ -8,7 +8,7 @@ import { DataModel } from 'src/app/services/patch-db/data-model' @Injectable({ providedIn: 'root' }) export class RefreshAlertService extends Observable { - private readonly stream$ = this.patch.watch$('server-info', 'version').pipe( + private readonly stream$ = this.patch.watch$('serverInfo', 'version').pipe( map(version => !!this.emver.compare(this.config.version, version)), endWith(false), ) diff --git a/web/projects/ui/src/app/components/toast-container/update-toast/update-toast.service.ts b/web/projects/ui/src/app/components/toast-container/update-toast/update-toast.service.ts index 5eb6508698..819ed3dc6a 100644 --- a/web/projects/ui/src/app/components/toast-container/update-toast/update-toast.service.ts +++ b/web/projects/ui/src/app/components/toast-container/update-toast/update-toast.service.ts @@ -7,7 +7,7 @@ import { DataModel } from 'src/app/services/patch-db/data-model' @Injectable({ providedIn: 'root' }) export class UpdateToastService extends Observable { private readonly stream$ = this.patch - .watch$('server-info', 'status-info', 'updated') + .watch$('serverInfo', 'statusInfo', 'updated') .pipe(distinctUntilChanged(), filter(Boolean), endWith(false)) constructor(private readonly patch: PatchDB) { diff --git a/web/projects/ui/src/app/modals/app-config/app-config.page.html b/web/projects/ui/src/app/modals/app-config/app-config.page.html index 49eec5f5fb..42eb6cf393 100644 --- a/web/projects/ui/src/app/modals/app-config/app-config.page.html +++ b/web/projects/ui/src/app/modals/app-config/app-config.page.html @@ -9,7 +9,7 @@ - + - +

- {{ pkg.manifest.title }} has been automatically configured with + {{ manifest.title }} has been automatically configured with recommended defaults. Make whatever changes you want, then click "Save". @@ -59,19 +59,19 @@

- {{ pkg.manifest.title }} + {{ manifest.title }}

- The following modifications have been made to {{ - pkg.manifest.title }} to satisfy {{ dependentInfo.title }}: + The following modifications have been made to {{ manifest.title }} + to satisfy {{ dependentInfo.title }}:

@@ -82,11 +82,10 @@

- +

- No config options for {{ pkg.manifest.title }} {{ - pkg.manifest.version }}. + No config options for {{ manifest.title }} {{ manifest.version }}.

@@ -112,11 +111,7 @@

- + Reset Defaults @@ -124,7 +119,7 @@

Save loadingText = '' configSpec?: ConfigSpec @@ -53,8 +59,6 @@ export class AppConfigPage { saving = false loadingError: string | IonicSafeString = '' - hasOptions = false - constructor( private readonly embassyApi: ApiService, private readonly errToast: ErrorToastService, @@ -65,13 +69,18 @@ export class AppConfigPage { private readonly patch: PatchDB, ) {} + get hasConfig() { + return this.pkg.stateInfo.manifest.hasConfig + } + async ngOnInit() { try { const pkg = await getPackage(this.patch, this.pkgId) - if (!pkg) return + if (!pkg || !isInstalled(pkg)) return + this.pkg = pkg - if (!this.pkg.manifest.config) return + if (!this.hasConfig) return let newConfig: object | undefined let patch: Operation[] | undefined @@ -79,12 +88,12 @@ export class AppConfigPage { if (this.dependentInfo) { this.loadingText = `Setting properties to accommodate ${this.dependentInfo.title}` const { - 'old-config': oc, - 'new-config': nc, + oldConfig: oc, + newConfig: nc, spec: s, } = await this.embassyApi.dryConfigureDependency({ - 'dependency-id': this.pkgId, - 'dependent-id': this.dependentInfo.id, + dependencyId: this.pkgId, + dependentId: this.dependentInfo.id, }) this.original = oc newConfig = nc @@ -104,10 +113,6 @@ export class AppConfigPage { newConfig || this.original, ) - this.hasOptions = !!Object.values(this.configSpec).find( - valSpec => valSpec.type !== 'pointer', - ) - if (patch) { this.diff = this.getDiff(patch) this.markDirty(patch) @@ -145,7 +150,7 @@ export class AppConfigPage { this.saving = true - if (hasCurrentDeps(this.pkg)) { + if (hasCurrentDeps(this.pkgId, await getAllPackages(this.patch))) { this.dryConfigure() } else { this.configure() @@ -210,7 +215,7 @@ export class AppConfigPage { 'As a result of this change, the following services will no longer work properly and may crash:
    ' const localPkgs = await getAllPackages(this.patch) const bullets = Object.keys(breakages).map(id => { - const title = localPkgs[id].manifest.title + const title = getManifest(localPkgs[id]).title return `
  • ${title}
  • ` }) message = `${message}${bullets}
` diff --git a/web/projects/ui/src/app/modals/app-recover-select/app-recover-select.page.html b/web/projects/ui/src/app/modals/app-recover-select/app-recover-select.page.html index 09a0556507..9dfb72da9d 100644 --- a/web/projects/ui/src/app/modals/app-recover-select/app-recover-select.page.html +++ b/web/projects/ui/src/app/modals/app-recover-select/app-recover-select.page.html @@ -1,5 +1,5 @@ diff --git a/web/projects/ui/src/app/modals/app-recover-select/app-recover-select.page.ts b/web/projects/ui/src/app/modals/app-recover-select/app-recover-select.page.ts index 65f61e643e..15bec3d4fc 100644 --- a/web/projects/ui/src/app/modals/app-recover-select/app-recover-select.page.ts +++ b/web/projects/ui/src/app/modals/app-recover-select/app-recover-select.page.ts @@ -23,7 +23,7 @@ export class AppRecoverSelectPage { @Input() password!: string @Input() oldPassword?: string - readonly packageData$ = this.patch.watch$('package-data').pipe(take(1)) + readonly packageData$ = this.patch.watch$('packageData').pipe(take(1)) hasSelection = false error: string | IonicSafeString = '' @@ -53,8 +53,8 @@ export class AppRecoverSelectPage { try { await this.embassyApi.restorePackages({ ids, - 'target-id': this.id, - 'old-password': this.oldPassword || null, + targetId: this.id, + oldPassword: this.oldPassword || null, password: this.password, }) this.modalCtrl.dismiss(undefined, 'success') diff --git a/web/projects/ui/src/app/modals/app-recover-select/to-options.pipe.ts b/web/projects/ui/src/app/modals/app-recover-select/to-options.pipe.ts index 2ac688a6b5..6a81df4bc8 100644 --- a/web/projects/ui/src/app/modals/app-recover-select/to-options.pipe.ts +++ b/web/projects/ui/src/app/modals/app-recover-select/to-options.pipe.ts @@ -34,7 +34,7 @@ export class ToOptionsPipe implements PipeTransform { id, installed: !!packageData[id], checked: false, - 'newer-eos': this.compare(packageBackups[id]['os-version']), + 'newer-eos': this.compare(packageBackups[id].osVersion), })) .sort((a, b) => b.title.toLowerCase() > a.title.toLowerCase() ? -1 : 1, diff --git a/web/projects/ui/src/app/modals/backup-select/backup-select.page.ts b/web/projects/ui/src/app/modals/backup-select/backup-select.page.ts index 032c0c8400..2b6934c39c 100644 --- a/web/projects/ui/src/app/modals/backup-select/backup-select.page.ts +++ b/web/projects/ui/src/app/modals/backup-select/backup-select.page.ts @@ -1,9 +1,10 @@ import { Component } from '@angular/core' import { ModalController } from '@ionic/angular' import { map, take } from 'rxjs/operators' -import { DataModel, PackageState } from 'src/app/services/patch-db/data-model' +import { DataModel } from 'src/app/services/patch-db/data-model' import { PatchDB } from 'patch-db-client' import { firstValueFrom } from 'rxjs' +import { getManifest } from 'src/app/util/get-package-data' @Component({ selector: 'backup-select', @@ -28,17 +29,17 @@ export class BackupSelectPage { async ngOnInit() { this.pkgs = await firstValueFrom( - this.patch.watch$('package-data').pipe( + this.patch.watch$('packageData').pipe( map(pkgs => { return Object.values(pkgs) .map(pkg => { - const { id, title } = pkg.manifest + const { id, title } = getManifest(pkg) return { id, title, - icon: pkg['static-files'].icon, - disabled: pkg.state !== PackageState.Installed, - checked: pkg.state === PackageState.Installed, + icon: pkg.icon, + disabled: pkg.stateInfo.state !== 'installed', + checked: false, } }) .sort((a, b) => diff --git a/web/projects/ui/src/app/modals/marketplace-settings/marketplace-settings.page.ts b/web/projects/ui/src/app/modals/marketplace-settings/marketplace-settings.page.ts index 9743e531c3..f623318195 100644 --- a/web/projects/ui/src/app/modals/marketplace-settings/marketplace-settings.page.ts +++ b/web/projects/ui/src/app/modals/marketplace-settings/marketplace-settings.page.ts @@ -200,7 +200,7 @@ export class MarketplaceSettingsPage { ): Promise { // Error on duplicates const hosts = await firstValueFrom( - this.patch.watch$('ui', 'marketplace', 'known-hosts'), + this.patch.watch$('ui', 'marketplace', 'knownHosts'), ) const currentUrls = Object.keys(hosts).map(toUrl) if (currentUrls.includes(url)) throw new Error('marketplace already added') @@ -217,7 +217,7 @@ export class MarketplaceSettingsPage { loader.message = 'Saving...' await this.api.setDbValue<{ name: string }>( - ['marketplace', 'known-hosts', url], + ['marketplace', 'knownHosts', url], { name }, ) } @@ -229,7 +229,7 @@ export class MarketplaceSettingsPage { await loader.present() const hosts = await firstValueFrom( - this.patch.watch$('ui', 'marketplace', 'known-hosts'), + this.patch.watch$('ui', 'marketplace', 'knownHosts'), ) const filtered: { [url: string]: UIStore } = Object.keys(hosts) @@ -244,7 +244,7 @@ export class MarketplaceSettingsPage { try { await this.api.setDbValue<{ [url: string]: UIStore }>( - ['marketplace', 'known-hosts'], + ['marketplace', 'knownHosts'], filtered, ) } catch (e: any) { diff --git a/web/projects/ui/src/app/modals/os-update/os-update.page.ts b/web/projects/ui/src/app/modals/os-update/os-update.page.ts index eb9de93dbb..9c49b72bf3 100644 --- a/web/projects/ui/src/app/modals/os-update/os-update.page.ts +++ b/web/projects/ui/src/app/modals/os-update/os-update.page.ts @@ -22,7 +22,7 @@ export class OSUpdatePage { ) {} ngOnInit() { - const releaseNotes = this.eosService.eos?.['release-notes']! + const releaseNotes = this.eosService.eos?.releaseNotes! this.versions = Object.keys(releaseNotes) .sort() diff --git a/web/projects/ui/src/app/pages/apps-routes/app-actions/app-actions.page.html b/web/projects/ui/src/app/pages/apps-routes/app-actions/app-actions.page.html index adcfcb8295..ec425f8bb6 100644 --- a/web/projects/ui/src/app/pages/apps-routes/app-actions/app-actions.page.html +++ b/web/projects/ui/src/app/pages/apps-routes/app-actions/app-actions.page.html @@ -8,30 +8,32 @@ - - - Standard Actions - + + + + Standard Actions + - - - Actions for {{ pkg.manifest.title }} - - - + + + Actions for {{ pkg.stateInfo.manifest.title }} + + + + diff --git a/web/projects/ui/src/app/pages/apps-routes/app-actions/app-actions.page.ts b/web/projects/ui/src/app/pages/apps-routes/app-actions/app-actions.page.ts index bd25a1a422..62038e78ba 100644 --- a/web/projects/ui/src/app/pages/apps-routes/app-actions/app-actions.page.ts +++ b/web/projects/ui/src/app/pages/apps-routes/app-actions/app-actions.page.ts @@ -9,15 +9,16 @@ import { } from '@ionic/angular' import { PatchDB } from 'patch-db-client' import { - Action, DataModel, PackageDataEntry, - PackageMainStatus, } from 'src/app/services/patch-db/data-model' import { GenericFormPage } from 'src/app/modals/generic-form/generic-form.page' import { isEmptyObject, ErrorToastService, getPkgId } from '@start9labs/shared' import { ActionSuccessPage } from 'src/app/modals/action-success/action-success.page' import { hasCurrentDeps } from 'src/app/util/has-deps' +import { getAllPackages, getManifest } from 'src/app/util/get-package-data' +import { ActionMetadata } from '@start9labs/start-sdk/cjs/sdk/lib/types' +import { Status } from '../../../../../../../../core/startos/bindings/Status' @Component({ selector: 'app-actions', @@ -27,7 +28,7 @@ import { hasCurrentDeps } from 'src/app/util/has-deps' }) export class AppActionsPage { readonly pkgId = getPkgId(this.route) - readonly pkg$ = this.patch.watch$('package-data', this.pkgId) + readonly pkg$ = this.patch.watch$('packageData', this.pkgId) constructor( private readonly route: ActivatedRoute, @@ -41,22 +42,21 @@ export class AppActionsPage { ) {} async handleAction( - pkg: PackageDataEntry, - action: { key: string; value: Action }, + status: Status, + action: { key: string; value: ActionMetadata }, ) { - const status = pkg.installed?.status if ( status && - (action.value['allowed-statuses'] as PackageMainStatus[]).includes( - status.main.status, + action.value.allowedStatuses.includes( + status.main.status, // @TODO ) ) { - if (!isEmptyObject(action.value['input-spec'] || {})) { + if (!isEmptyObject(action.value.input || {})) { const modal = await this.modalCtrl.create({ component: GenericFormPage, componentProps: { title: action.value.name, - spec: action.value['input-spec'], + spec: action.value.input, buttons: [ { text: 'Execute', @@ -92,7 +92,7 @@ export class AppActionsPage { await alert.present() } } else { - const statuses = [...action.value['allowed-statuses']] + const statuses = [...action.value.allowedStatuses] // @TODO const last = statuses.pop() let statusesStr = statuses.join(', ') let error = '' @@ -120,13 +120,13 @@ export class AppActionsPage { } async tryUninstall(pkg: PackageDataEntry): Promise { - const { title, alerts } = pkg.manifest + const { title, alerts } = getManifest(pkg) let message = alerts.uninstall || `Uninstalling ${title} will permanently delete its data` - if (hasCurrentDeps(pkg)) { + if (hasCurrentDeps(this.pkgId, await getAllPackages(this.patch))) { message = `${message}. Services that depend on ${title} will no longer work properly and may crash` } @@ -183,7 +183,7 @@ export class AppActionsPage { try { const res = await this.embassyApi.executePackageAction({ id: this.pkgId, - 'action-id': actionId, + actionId, input, }) diff --git a/web/projects/ui/src/app/pages/apps-routes/app-interfaces/app-interfaces-item.component.html b/web/projects/ui/src/app/pages/apps-routes/app-interfaces/app-interfaces-item.component.html index 932c1fd0a9..420cbcf1bf 100644 --- a/web/projects/ui/src/app/pages/apps-routes/app-interfaces/app-interfaces-item.component.html +++ b/web/projects/ui/src/app/pages/apps-routes/app-interfaces/app-interfaces-item.component.html @@ -1,72 +1,44 @@ - + -

{{ interface.def.name }}

-

{{ interface.def.description }}

+

{{ iFace.name }}

+

{{ iFace.description }}

-
- - +
+ -

Tor Address

-

{{ tor }}

+

{{ address.name }}

+

{{ address.url }}

- + - + - +
- - - -

Tor Address

-

Service does not use a Tor Address

-
-
- - - - -

LAN Address

-

{{ lan }}

-
- - - - - - - - - - - -
- - - -

LAN Address

-

N/A

-
-
diff --git a/web/projects/ui/src/app/pages/apps-routes/app-interfaces/app-interfaces.page.html b/web/projects/ui/src/app/pages/apps-routes/app-interfaces/app-interfaces.page.html index 16c6a5bd68..79b1a1fde3 100644 --- a/web/projects/ui/src/app/pages/apps-routes/app-interfaces/app-interfaces.page.html +++ b/web/projects/ui/src/app/pages/apps-routes/app-interfaces/app-interfaces.page.html @@ -8,19 +8,29 @@ - - - - User Interface - + + + User Interfaces + - - - Machine Interfaces -
- -
+ + Application Program Interfaces + + + + + Peer-To-Peer Interfaces +
diff --git a/web/projects/ui/src/app/pages/apps-routes/app-interfaces/app-interfaces.page.ts b/web/projects/ui/src/app/pages/apps-routes/app-interfaces/app-interfaces.page.ts index 825d6536df..5f72274644 100644 --- a/web/projects/ui/src/app/pages/apps-routes/app-interfaces/app-interfaces.page.ts +++ b/web/projects/ui/src/app/pages/apps-routes/app-interfaces/app-interfaces.page.ts @@ -3,19 +3,20 @@ import { WINDOW } from '@ng-web-apis/common' import { ActivatedRoute } from '@angular/router' import { ModalController, ToastController } from '@ionic/angular' import { copyToClipboard, getPkgId } from '@start9labs/shared' -import { getUiInterfaceKey } from 'src/app/services/config.service' -import { - DataModel, - InstalledPackageDataEntry, - InterfaceDef, -} from 'src/app/services/patch-db/data-model' +import { DataModel } from 'src/app/services/patch-db/data-model' import { PatchDB } from 'patch-db-client' import { QRComponent } from 'src/app/components/qr/qr.component' -import { getPackage } from '../../../util/get-package-data' +import { map } from 'rxjs' +import { types as T } from '@start9labs/start-sdk' +import { ServiceInterface } from '../../../../../../../../core/startos/bindings/ServiceInterface' +import { ServiceInterfaceWithHostInfo } from '../../../../../../../../core/startos/bindings/ServiceInterfaceWithHostInfo' -interface LocalInterface { - def: InterfaceDef - addresses: InstalledPackageDataEntry['interface-addresses'][string] +type MappedInterface = ServiceInterface & { + addresses: MappedAddress[] +} +type MappedAddress = { + name: string + url: string } @Component({ @@ -24,60 +25,33 @@ interface LocalInterface { styleUrls: ['./app-interfaces.page.scss'], }) export class AppInterfacesPage { - ui?: LocalInterface - other: LocalInterface[] = [] readonly pkgId = getPkgId(this.route) + readonly serviceInterfaces$ = this.patch + .watch$('packageData', this.pkgId, 'serviceInterfaces') + .pipe( + map(interfaces => { + const sorted = Object.values(interfaces) + .sort(iface => + iface.name.toLowerCase() > iface.name.toLowerCase() ? -1 : 1, + ) + .map(iface => ({ + ...iface, + addresses: getAddresses(iface), + })) + + return { + ui: sorted.filter(val => val.type === 'ui'), + api: sorted.filter(val => val.type === 'api'), + p2p: sorted.filter(val => val.type === 'p2p'), + } + }), + ) + constructor( private readonly route: ActivatedRoute, private readonly patch: PatchDB, ) {} - - async ngOnInit() { - const pkg = await getPackage(this.patch, this.pkgId) - if (!pkg) return - - const interfaces = pkg.manifest.interfaces - const uiKey = getUiInterfaceKey(interfaces) - - if (!pkg.installed) return - - const addressesMap = pkg.installed['interface-addresses'] - - if (uiKey) { - const uiAddresses = addressesMap[uiKey] - this.ui = { - def: interfaces[uiKey], - addresses: { - 'lan-address': uiAddresses['lan-address'] - ? 'https://' + uiAddresses['lan-address'] - : '', - // leave http for services - 'tor-address': uiAddresses['tor-address'] - ? 'http://' + uiAddresses['tor-address'] - : '', - }, - } - } - - this.other = Object.keys(interfaces) - .filter(key => key !== uiKey) - .map(key => { - const addresses = addressesMap[key] - return { - def: interfaces[key], - addresses: { - 'lan-address': addresses['lan-address'] - ? 'https://' + addresses['lan-address'] - : '', - 'tor-address': addresses['tor-address'] - ? // leave http for services - 'http://' + addresses['tor-address'] - : '', - }, - } - }) - } } @Component({ @@ -86,8 +60,7 @@ export class AppInterfacesPage { styleUrls: ['./app-interfaces.page.scss'], }) export class AppInterfacesItemComponent { - @Input() - interface!: LocalInterface + @Input() iFace!: MappedInterface constructor( private readonly toastCtrl: ToastController, @@ -126,3 +99,68 @@ export class AppInterfacesItemComponent { await toast.present() } } + +function getAddresses( + serviceInterface: ServiceInterfaceWithHostInfo, +): MappedAddress[] { + const host = serviceInterface.hostInfo + const addressInfo = serviceInterface.addressInfo + const username = addressInfo.username ? addressInfo.username + '@' : '' + const suffix = addressInfo.suffix || '' + + const hostnames = host.kind === 'multi' ? host.hostnames : [] // TODO: non-multi + /* host.hostname + ? [host.hostname] + : [] */ + + const addresses: MappedAddress[] = [] + + hostnames.forEach(h => { + let name = '' + let hostname = '' + + if (h.kind === 'onion') { + name = 'Tor' + hostname = h.hostname.value + } else { + const hostnameKind = h.hostname.kind + + if (hostnameKind === 'domain') { + name = 'Domain' + hostname = `${h.hostname.subdomain}.${h.hostname.domain}` + } else { + name = + hostnameKind === 'local' + ? 'Local' + : `${h.networkInterfaceId} (${hostnameKind})` + hostname = h.hostname.value + } + } + + if (h.hostname.sslPort) { + const port = h.hostname.sslPort === 443 ? '' : `:${h.hostname.sslPort}` + const scheme = addressInfo.bindOptions.addSsl?.scheme + ? `${addressInfo.bindOptions.addSsl.scheme}://` + : '' + + addresses.push({ + name: name === 'Tor' ? 'Tor (HTTPS)' : name, + url: `${scheme}${username}${hostname}${port}${suffix}`, + }) + } + + if (h.hostname.port) { + const port = h.hostname.port === 80 ? '' : `:${h.hostname.port}` + const scheme = addressInfo.bindOptions.scheme + ? `${addressInfo.bindOptions.scheme}://` + : '' + + addresses.push({ + name: name === 'Tor' ? 'Tor (HTTP)' : name, + url: `${scheme}${username}${hostname}${port}${suffix}`, + }) + } + }) + + return addresses +} diff --git a/web/projects/ui/src/app/pages/apps-routes/app-list/app-list-pkg/app-list-pkg.component.html b/web/projects/ui/src/app/pages/apps-routes/app-list/app-list-pkg/app-list-pkg.component.html index 037f7cc07c..9173de39f2 100644 --- a/web/projects/ui/src/app/pages/apps-routes/app-list/app-list-pkg/app-list-pkg.component.html +++ b/web/projects/ui/src/app/pages/apps-routes/app-list/app-list-pkg/app-list-pkg.component.html @@ -1,32 +1,34 @@ - +

{{ manifest.title }}

{{ manifest.version | displayEmver }}

diff --git a/web/projects/ui/src/app/pages/apps-routes/app-list/app-list-pkg/app-list-pkg.component.ts b/web/projects/ui/src/app/pages/apps-routes/app-list/app-list-pkg/app-list-pkg.component.ts index 3302e182e1..b1c62618d8 100644 --- a/web/projects/ui/src/app/pages/apps-routes/app-list/app-list-pkg/app-list-pkg.component.ts +++ b/web/projects/ui/src/app/pages/apps-routes/app-list/app-list-pkg/app-list-pkg.component.ts @@ -1,7 +1,8 @@ import { ChangeDetectionStrategy, Component, Input } from '@angular/core' -import { PackageMainStatus } from 'src/app/services/patch-db/data-model' +import { PackageDataEntry } from 'src/app/services/patch-db/data-model' import { PkgInfo } from 'src/app/util/get-package-info' import { UiLauncherService } from 'src/app/services/ui-launcher.service' +import { MainStatus } from '../../../../../../../../../core/startos/bindings/MainStatus' @Component({ selector: 'app-list-pkg', @@ -14,15 +15,23 @@ export class AppListPkgComponent { constructor(private readonly launcherService: UiLauncherService) {} - get status(): PackageMainStatus { + get pkgMainStatus(): MainStatus { return ( - this.pkg.entry.installed?.status.main.status || PackageMainStatus.Stopped + this.pkg.entry.status.main || { + status: 'stopped', + } ) } - launchUi(e: Event): void { + get sigtermTimeout(): string | null { + return this.pkgMainStatus.status === 'stopping' + ? this.pkgMainStatus.timeout + : null + } + + launchUi(e: Event, interfaces: PackageDataEntry['serviceInterfaces']): void { e.stopPropagation() e.preventDefault() - this.launcherService.launch(this.pkg.entry) + this.launcherService.launch(interfaces) } } diff --git a/web/projects/ui/src/app/pages/apps-routes/app-list/app-list.page.html b/web/projects/ui/src/app/pages/apps-routes/app-list/app-list.page.html index 3bc38f762d..cc19bdff59 100644 --- a/web/projects/ui/src/app/pages/apps-routes/app-list/app-list.page.html +++ b/web/projects/ui/src/app/pages/apps-routes/app-list/app-list.page.html @@ -27,7 +27,7 @@

Welcome to StartOS

sizeMd="6" > diff --git a/web/projects/ui/src/app/pages/apps-routes/app-list/app-list.page.ts b/web/projects/ui/src/app/pages/apps-routes/app-list/app-list.page.ts index f7d7685ff5..8c36d22dce 100644 --- a/web/projects/ui/src/app/pages/apps-routes/app-list/app-list.page.ts +++ b/web/projects/ui/src/app/pages/apps-routes/app-list/app-list.page.ts @@ -2,6 +2,7 @@ import { ChangeDetectionStrategy, Component } from '@angular/core' import { PatchDB } from 'patch-db-client' import { DataModel } from 'src/app/services/patch-db/data-model' import { filter, map, pairwise, startWith } from 'rxjs/operators' +import { getManifest } from 'src/app/util/get-package-data' @Component({ selector: 'app-list', @@ -10,7 +11,7 @@ import { filter, map, pairwise, startWith } from 'rxjs/operators' changeDetection: ChangeDetectionStrategy.OnPush, }) export class AppListPage { - readonly pkgs$ = this.patch.watch$('package-data').pipe( + readonly pkgs$ = this.patch.watch$('packageData').pipe( map(pkgs => Object.values(pkgs)), startWith([]), pairwise(), @@ -20,7 +21,7 @@ export class AppListPage { }), map(([_, pkgs]) => pkgs.sort((a, b) => - b.manifest.title.toLowerCase() > a.manifest.title.toLowerCase() + getManifest(b).title.toLowerCase() > getManifest(a).title.toLowerCase() ? -1 : 1, ), diff --git a/web/projects/ui/src/app/pages/apps-routes/app-list/package-info.pipe.ts b/web/projects/ui/src/app/pages/apps-routes/app-list/package-info.pipe.ts index c0d00c2c9d..3695c45360 100644 --- a/web/projects/ui/src/app/pages/apps-routes/app-list/package-info.pipe.ts +++ b/web/projects/ui/src/app/pages/apps-routes/app-list/package-info.pipe.ts @@ -1,5 +1,5 @@ import { Pipe, PipeTransform } from '@angular/core' -import { Observable, combineLatest, firstValueFrom } from 'rxjs' +import { Observable, combineLatest } from 'rxjs' import { filter, map } from 'rxjs/operators' import { DataModel } from 'src/app/services/patch-db/data-model' import { getPackageInfo, PkgInfo } from '../../../util/get-package-info' @@ -17,7 +17,7 @@ export class PackageInfoPipe implements PipeTransform { transform(pkgId: string): Observable { return combineLatest([ - this.patch.watch$('package-data', pkgId).pipe(filter(Boolean)), + this.patch.watch$('packageData', pkgId).pipe(filter(Boolean)), this.depErrorService.getPkgDepErrors$(pkgId), ]).pipe(map(([pkg, depErrors]) => getPackageInfo(pkg, depErrors))) } diff --git a/web/projects/ui/src/app/pages/apps-routes/app-properties/app-properties.page.ts b/web/projects/ui/src/app/pages/apps-routes/app-properties/app-properties.page.ts index 8a49bad528..d9b6bbc3e6 100644 --- a/web/projects/ui/src/app/pages/apps-routes/app-properties/app-properties.page.ts +++ b/web/projects/ui/src/app/pages/apps-routes/app-properties/app-properties.page.ts @@ -11,10 +11,7 @@ import { import { PackageProperties } from 'src/app/util/properties.util' import { QRComponent } from 'src/app/components/qr/qr.component' import { PatchDB } from 'patch-db-client' -import { - DataModel, - PackageMainStatus, -} from 'src/app/services/patch-db/data-model' +import { DataModel } from 'src/app/services/patch-db/data-model' import { ErrorToastService, getPkgId, @@ -41,8 +38,8 @@ export class AppPropertiesPage { unmasked: { [key: string]: boolean } = {} stopped$ = this.patch - .watch$('package-data', this.pkgId, 'installed', 'status', 'main', 'status') - .pipe(map(status => status === PackageMainStatus.Stopped)) + .watch$('packageData', this.pkgId, 'status', 'main', 'status') + .pipe(map(status => status === 'stopped')) @ViewChild(IonBackButtonDelegate, { static: false }) backButton?: IonBackButtonDelegate diff --git a/web/projects/ui/src/app/pages/apps-routes/app-show/app-show.module.ts b/web/projects/ui/src/app/pages/apps-routes/app-show/app-show.module.ts index c69910c784..0b2d82763f 100644 --- a/web/projects/ui/src/app/pages/apps-routes/app-show/app-show.module.ts +++ b/web/projects/ui/src/app/pages/apps-routes/app-show/app-show.module.ts @@ -3,7 +3,12 @@ import { CommonModule } from '@angular/common' import { Routes, RouterModule } from '@angular/router' import { IonicModule } from '@ionic/angular' import { AppShowPage } from './app-show.page' -import { EmverPipesModule, ResponsiveColModule } from '@start9labs/shared' +import { + EmptyPipe, + EmverPipesModule, + ResponsiveColModule, + SharedPipesModule, +} from '@start9labs/shared' import { StatusComponentModule } from 'src/app/components/status/status.component.module' import { AppConfigPageModule } from 'src/app/modals/app-config/app-config.module' import { LaunchablePipeModule } from 'src/app/pipes/launchable/launchable.module' @@ -18,7 +23,7 @@ import { AppShowAdditionalComponent } from './components/app-show-additional/app import { HealthColorPipe } from './pipes/health-color.pipe' import { ToHealthChecksPipe } from './pipes/to-health-checks.pipe' import { ToButtonsPipe } from './pipes/to-buttons.pipe' -import { ProgressDataPipe } from './pipes/progress-data.pipe' +import { InstallingProgressPipeModule } from 'src/app/pipes/install-progress/install-progress.module' const routes: Routes = [ { @@ -31,7 +36,6 @@ const routes: Routes = [ declarations: [ AppShowPage, HealthColorPipe, - ProgressDataPipe, ToHealthChecksPipe, ToButtonsPipe, AppShowHeaderComponent, @@ -44,7 +48,7 @@ const routes: Routes = [ ], imports: [ CommonModule, - StatusComponentModule, + InstallingProgressPipeModule, IonicModule, RouterModule.forChild(routes), AppConfigPageModule, @@ -52,6 +56,8 @@ const routes: Routes = [ LaunchablePipeModule, UiPipeModule, ResponsiveColModule, + StatusComponentModule, + SharedPipesModule, ], }) export class AppShowPageModule {} diff --git a/web/projects/ui/src/app/pages/apps-routes/app-show/app-show.page.html b/web/projects/ui/src/app/pages/apps-routes/app-show/app-show.page.html index d2b7eaead7..238d1d2089 100644 --- a/web/projects/ui/src/app/pages/apps-routes/app-show/app-show.page.html +++ b/web/projects/ui/src/app/pages/apps-routes/app-show/app-show.page.html @@ -7,9 +7,8 @@ @@ -19,11 +18,13 @@ - + - + diff --git a/web/projects/ui/src/app/pages/apps-routes/app-show/app-show.page.ts b/web/projects/ui/src/app/pages/apps-routes/app-show/app-show.page.ts index ab250e1a71..52404aea39 100644 --- a/web/projects/ui/src/app/pages/apps-routes/app-show/app-show.page.ts +++ b/web/projects/ui/src/app/pages/apps-routes/app-show/app-show.page.ts @@ -3,16 +3,11 @@ import { NavController } from '@ionic/angular' import { PatchDB } from 'patch-db-client' import { DataModel, - InstalledPackageDataEntry, - Manifest, + InstallingState, PackageDataEntry, - PackageState, + UpdatingState, } from 'src/app/services/patch-db/data-model' -import { - PackageStatus, - PrimaryStatus, - renderPkgStatus, -} from 'src/app/services/pkg-status-rendering.service' +import { renderPkgStatus } from 'src/app/services/pkg-status-rendering.service' import { map, tap } from 'rxjs/operators' import { ActivatedRoute, NavigationExtras } from '@angular/router' import { getPkgId } from '@start9labs/shared' @@ -24,6 +19,14 @@ import { PkgDependencyErrors, } from 'src/app/services/dep-error.service' import { combineLatest } from 'rxjs' +import { + getManifest, + isInstalled, + isInstalling, + isRestoring, + isUpdating, +} from 'src/app/util/get-package-data' +import { Manifest } from '../../../../../../../../core/startos/bindings/Manifest' export interface DependencyInfo { id: string @@ -35,12 +38,6 @@ export interface DependencyInfo { action: () => any } -const STATES = [ - PackageState.Installing, - PackageState.Updating, - PackageState.Restoring, -] - @Component({ selector: 'app-show', templateUrl: './app-show.page.html', @@ -50,7 +47,7 @@ export class AppShowPage { private readonly pkgId = getPkgId(this.route) readonly pkgPlus$ = combineLatest([ - this.patch.watch$('package-data', this.pkgId), + this.patch.watch$('packageData', this.pkgId), this.depErrorService.getPkgDepErrors$(this.pkgId), ]).pipe( tap(([pkg, _]) => { @@ -66,6 +63,8 @@ export class AppShowPage { }), ) + isInstalled = isInstalled + constructor( private readonly route: ActivatedRoute, private readonly navCtrl: NavController, @@ -74,55 +73,45 @@ export class AppShowPage { private readonly depErrorService: DepErrorService, ) {} - isInstalled({ state }: PackageDataEntry): boolean { - return state === PackageState.Installed - } - - isRunning({ primary }: PackageStatus): boolean { - return primary === PrimaryStatus.Running - } - - isBackingUp({ primary }: PackageStatus): boolean { - return primary === PrimaryStatus.BackingUp - } - - showProgress({ state }: PackageDataEntry): boolean { - return STATES.includes(state) + showProgress( + pkg: PackageDataEntry, + ): pkg is PackageDataEntry { + return isInstalling(pkg) || isUpdating(pkg) || isRestoring(pkg) } private getDepInfo( pkg: PackageDataEntry, depErrors: PkgDependencyErrors, ): DependencyInfo[] { - const pkgInstalled = pkg.installed + const manifest = getManifest(pkg) - if (!pkgInstalled) return [] - - return Object.keys(pkgInstalled['current-dependencies']) - .filter(id => !!pkgInstalled.manifest.dependencies[id]) - .map(id => this.getDepValues(pkgInstalled, id, depErrors)) + return Object.keys(pkg.currentDependencies) + .filter(id => !!manifest.dependencies[id]) + .map(id => this.getDepValues(pkg, manifest, id, depErrors)) } private getDepValues( - pkgInstalled: InstalledPackageDataEntry, + pkg: PackageDataEntry, + manifest: Manifest, depId: string, depErrors: PkgDependencyErrors, ): DependencyInfo { const { errorText, fixText, fixAction } = this.getDepErrors( - pkgInstalled, + pkg, + manifest, depId, depErrors, ) - const depInfo = pkgInstalled['dependency-info'][depId] + const { title, icon, versionSpec } = pkg.currentDependencies[depId] return { id: depId, - version: pkgInstalled.manifest.dependencies[depId].version, // do we want this version range? - title: depInfo?.title || depId, - icon: depInfo?.icon || '', + version: versionSpec, + title, + icon, errorText: errorText - ? `${errorText}. ${pkgInstalled.manifest.title} will not work as expected.` + ? `${errorText}. ${manifest.title} will not work as expected.` : '', actionText: fixText || 'View', action: @@ -131,11 +120,11 @@ export class AppShowPage { } private getDepErrors( - pkgInstalled: InstalledPackageDataEntry, + pkg: PackageDataEntry, + manifest: Manifest, depId: string, depErrors: PkgDependencyErrors, ) { - const pkgManifest = pkgInstalled.manifest const depError = depErrors[depId] let errorText: string | null = null @@ -146,15 +135,15 @@ export class AppShowPage { if (depError.type === DependencyErrorType.NotInstalled) { errorText = 'Not installed' fixText = 'Install' - fixAction = () => this.fixDep(pkgManifest, 'install', depId) + fixAction = () => this.fixDep(pkg, manifest, 'install', depId) } else if (depError.type === DependencyErrorType.IncorrectVersion) { errorText = 'Incorrect version' fixText = 'Update' - fixAction = () => this.fixDep(pkgManifest, 'update', depId) + fixAction = () => this.fixDep(pkg, manifest, 'update', depId) } else if (depError.type === DependencyErrorType.ConfigUnsatisfied) { errorText = 'Config not satisfied' fixText = 'Auto config' - fixAction = () => this.fixDep(pkgManifest, 'configure', depId) + fixAction = () => this.fixDep(pkg, manifest, 'configure', depId) } else if (depError.type === DependencyErrorType.NotRunning) { errorText = 'Not running' fixText = 'Start' @@ -173,6 +162,7 @@ export class AppShowPage { } private async fixDep( + pkg: PackageDataEntry, pkgManifest: Manifest, action: 'install' | 'update' | 'configure', id: string, @@ -180,22 +170,21 @@ export class AppShowPage { switch (action) { case 'install': case 'update': - return this.installDep(pkgManifest, id) + return this.installDep(pkg, pkgManifest, id) case 'configure': return this.configureDep(pkgManifest, id) } } private async installDep( + pkg: PackageDataEntry, pkgManifest: Manifest, depId: string, ): Promise { - const version = pkgManifest.dependencies[depId].version - const dependentInfo: DependentInfo = { id: pkgManifest.id, title: pkgManifest.title, - version, + version: pkg.currentDependencies[depId].versionSpec, } const navigationExtras: NavigationExtras = { state: { dependentInfo }, diff --git a/web/projects/ui/src/app/pages/apps-routes/app-show/components/app-show-additional/app-show-additional.component.html b/web/projects/ui/src/app/pages/apps-routes/app-show/components/app-show-additional/app-show-additional.component.html index 81fe8fb845..73fc2158b4 100644 --- a/web/projects/ui/src/app/pages/apps-routes/app-show/components/app-show-additional/app-show-additional.component.html +++ b/web/projects/ui/src/app/pages/apps-routes/app-show/components/app-show-additional/app-show-additional.component.html @@ -1,5 +1,5 @@ Additional Info - + @@ -10,7 +10,7 @@

Version

License

Marketing Site

-

{{ manifest['marketing-site'] || 'Not provided' }}

+

{{ manifest.marketingSite || 'Not provided' }}

@@ -54,52 +54,52 @@

Marketing Site

Source Repository

-

{{ manifest['upstream-repo'] }}

+

{{ manifest.upstreamRepo }}

Wrapper Repository

-

{{ manifest['wrapper-repo'] }}

+

{{ manifest.wrapperRepo }}

Support Site

-

{{ manifest['support-site'] || 'Not provided' }}

+

{{ manifest.supportSite || 'Not provided' }}

Donation Link

-

{{ manifest['donation-url'] || 'Not provided' }}

+

{{ manifest.donationUrl || 'Not provided' }}

diff --git a/web/projects/ui/src/app/pages/apps-routes/app-show/components/app-show-additional/app-show-additional.component.ts b/web/projects/ui/src/app/pages/apps-routes/app-show/components/app-show-additional/app-show-additional.component.ts index 5018984283..b639b216f8 100644 --- a/web/projects/ui/src/app/pages/apps-routes/app-show/components/app-show-additional/app-show-additional.component.ts +++ b/web/projects/ui/src/app/pages/apps-routes/app-show/components/app-show-additional/app-show-additional.component.ts @@ -3,7 +3,7 @@ import { ModalController, ToastController } from '@ionic/angular' import { copyToClipboard, MarkdownComponent } from '@start9labs/shared' import { from } from 'rxjs' import { ApiService } from 'src/app/services/api/embassy-api.service' -import { PackageDataEntry } from 'src/app/services/patch-db/data-model' +import { Manifest } from '../../../../../../../../../../core/startos/bindings/Manifest' @Component({ selector: 'app-show-additional', @@ -12,7 +12,7 @@ import { PackageDataEntry } from 'src/app/services/patch-db/data-model' }) export class AppShowAdditionalComponent { @Input() - pkg!: PackageDataEntry + manifest!: Manifest constructor( private readonly modalCtrl: ModalController, @@ -35,10 +35,16 @@ export class AppShowAdditionalComponent { } async presentModalLicense() { + const { id, version } = this.manifest + const modal = await this.modalCtrl.create({ componentProps: { title: 'License', - content: from(this.api.getStatic(this.pkg['static-files']['license'])), + content: from( + this.api.getStatic( + `/public/package-data/${id}/${version}/LICENSE.md`, + ), + ), }, component: MarkdownComponent, }) diff --git a/web/projects/ui/src/app/pages/apps-routes/app-show/components/app-show-header/app-show-header.component.html b/web/projects/ui/src/app/pages/apps-routes/app-show/components/app-show-header/app-show-header.component.html index 17dcd1be11..06f5172226 100644 --- a/web/projects/ui/src/app/pages/apps-routes/app-show/components/app-show-header/app-show-header.component.html +++ b/web/projects/ui/src/app/pages/apps-routes/app-show/components/app-show-header/app-show-header.component.html @@ -4,15 +4,12 @@
- - -

- {{ pkg.manifest.title }} + + +

+ {{ manifest.title }}

-

{{ pkg.manifest.version | displayEmver }}

+

{{ manifest.version | displayEmver }}

diff --git a/web/projects/ui/src/app/pages/apps-routes/app-show/components/app-show-health-checks/app-show-health-checks.component.html b/web/projects/ui/src/app/pages/apps-routes/app-show/components/app-show-health-checks/app-show-health-checks.component.html index fe854f8fc4..3668adc7bb 100644 --- a/web/projects/ui/src/app/pages/apps-routes/app-show/components/app-show-health-checks/app-show-health-checks.component.html +++ b/web/projects/ui/src/app/pages/apps-routes/app-show/components/app-show-health-checks/app-show-health-checks.component.html @@ -1,93 +1,81 @@ - - - Health Checks - - - - - - - - - - -

- {{ pkg.manifest['health-checks'][health.key].name }} -

- -

- {{ result | titlecase }} - ... - - {{ $any(health.value).error }} - - - {{ $any(health.value).message }} - - - : - {{ - pkg.manifest['health-checks'][health.key]['success-message'] - }} - -

-
-
-
- - - - -

- {{ pkg.manifest['health-checks'][health.key].name }} -

-

Awaiting result...

-
-
-
-
- - - - - - + + Health Checks + + + + + + + + + - - +

+ {{ check.value.name }} +

+ +

+ {{ result | titlecase }} + ... + + {{ check.value.message }} + +

+
-
-
+
+ + + + +

+ {{ check.value.name }} +

+

Awaiting result...

+
+
+
+ + + + + + + + + + + + diff --git a/web/projects/ui/src/app/pages/apps-routes/app-show/components/app-show-health-checks/app-show-health-checks.component.ts b/web/projects/ui/src/app/pages/apps-routes/app-show/components/app-show-health-checks/app-show-health-checks.component.ts index 5db1ab1cac..ccf46a6e8e 100644 --- a/web/projects/ui/src/app/pages/apps-routes/app-show/components/app-show-health-checks/app-show-health-checks.component.ts +++ b/web/projects/ui/src/app/pages/apps-routes/app-show/components/app-show-health-checks/app-show-health-checks.component.ts @@ -1,9 +1,6 @@ import { ChangeDetectionStrategy, Component, Input } from '@angular/core' import { ConnectionService } from 'src/app/services/connection.service' -import { - HealthResult, - PackageDataEntry, -} from 'src/app/services/patch-db/data-model' +import { HealthCheckResult } from '../../../../../../../../../../core/startos/bindings/HealthCheckResult' @Component({ selector: 'app-show-health-checks', @@ -13,20 +10,18 @@ import { }) export class AppShowHealthChecksComponent { @Input() - pkg!: PackageDataEntry - - HealthResult = HealthResult + healthChecks!: Record readonly connected$ = this.connectionService.connected$ constructor(private readonly connectionService: ConnectionService) {} - isLoading(result: HealthResult): boolean { - return result === HealthResult.Starting || result === HealthResult.Loading + isLoading(result: HealthCheckResult['result']): boolean { + return result === 'starting' || result === 'loading' } - isReady(result: HealthResult): boolean { - return result !== HealthResult.Failure && result !== HealthResult.Loading + isReady(result: HealthCheckResult['result']): boolean { + return result !== 'failure' && result !== 'loading' } asIsOrder() { diff --git a/web/projects/ui/src/app/pages/apps-routes/app-show/components/app-show-progress/app-show-progress.component.html b/web/projects/ui/src/app/pages/apps-routes/app-show/components/app-show-progress/app-show-progress.component.html index 3134cee5ef..e259f2792a 100644 --- a/web/projects/ui/src/app/pages/apps-routes/app-show/components/app-show-progress/app-show-progress.component.html +++ b/web/projects/ui/src/app/pages/apps-routes/app-show/components/app-show-progress/app-show-progress.component.html @@ -1,20 +1,18 @@ -

Downloading: {{ progressData.downloadProgress }}%

- - -

Validating: {{ progressData.validateProgress }}%

- - -

Unpacking: {{ progressData.unpackProgress }}%

- + +

+ {{ phase.name }} + + : {{ progress * 100 }}% + +

+ +
diff --git a/web/projects/ui/src/app/pages/apps-routes/app-show/components/app-show-progress/app-show-progress.component.ts b/web/projects/ui/src/app/pages/apps-routes/app-show/components/app-show-progress/app-show-progress.component.ts index 8ee7b750a1..474b62f62c 100644 --- a/web/projects/ui/src/app/pages/apps-routes/app-show/components/app-show-progress/app-show-progress.component.ts +++ b/web/projects/ui/src/app/pages/apps-routes/app-show/components/app-show-progress/app-show-progress.component.ts @@ -1,9 +1,5 @@ import { ChangeDetectionStrategy, Component, Input } from '@angular/core' -import { - InstallProgress, - PackageDataEntry, -} from 'src/app/services/patch-db/data-model' -import { ProgressData } from 'src/app/types/progress-data' +import { FullProgress } from '../../../../../../../../../../core/startos/bindings/FullProgress' @Component({ selector: 'app-show-progress', @@ -13,26 +9,5 @@ import { ProgressData } from 'src/app/types/progress-data' }) export class AppShowProgressComponent { @Input() - pkg!: PackageDataEntry - - @Input() - progressData!: ProgressData - - get unpackingBuffer(): number { - return this.progressData.validateProgress === 100 && - !this.progressData.unpackProgress - ? 0 - : 1 - } - - get validationBuffer(): number { - return this.progressData.downloadProgress === 100 && - !this.progressData.validateProgress - ? 0 - : 1 - } - - getColor(action: keyof InstallProgress): string { - return this.pkg['install-progress']?.[action] ? 'success' : 'secondary' - } + phases!: FullProgress['phases'] } diff --git a/web/projects/ui/src/app/pages/apps-routes/app-show/components/app-show-status/app-show-status.component.html b/web/projects/ui/src/app/pages/apps-routes/app-show/components/app-show-status/app-show-status.component.html index 6a8676ebc7..f9db34c732 100644 --- a/web/projects/ui/src/app/pages/apps-routes/app-show/components/app-show-status/app-show-status.component.html +++ b/web/projects/ui/src/app/pages/apps-routes/app-show/components/app-show-status/app-show-status.component.html @@ -4,14 +4,14 @@ - + @@ -56,13 +56,13 @@ Launch UI diff --git a/web/projects/ui/src/app/pages/apps-routes/app-show/components/app-show-status/app-show-status.component.ts b/web/projects/ui/src/app/pages/apps-routes/app-show/components/app-show-status/app-show-status.component.ts index f67a2b2fa4..d67ce29419 100644 --- a/web/projects/ui/src/app/pages/apps-routes/app-show/components/app-show-status/app-show-status.component.ts +++ b/web/projects/ui/src/app/pages/apps-routes/app-show/components/app-show-status/app-show-status.component.ts @@ -6,10 +6,8 @@ import { PrimaryStatus, } from 'src/app/services/pkg-status-rendering.service' import { - InterfaceDef, + DataModel, PackageDataEntry, - PackageState, - Status, } from 'src/app/services/patch-db/data-model' import { ErrorToastService } from '@start9labs/shared' import { AlertController, LoadingController } from '@ionic/angular' @@ -17,6 +15,14 @@ import { ApiService } from 'src/app/services/api/embassy-api.service' import { ModalService } from 'src/app/services/modal.service' import { hasCurrentDeps } from 'src/app/util/has-deps' import { ConnectionService } from 'src/app/services/connection.service' +import { + isInstalled, + getManifest, + getAllPackages, +} from 'src/app/util/get-package-data' +import { PatchDB } from 'patch-db-client' +import { Status } from '../../../../../../../../../../core/startos/bindings/Status' +import { Manifest } from '../../../../../../../../../../core/startos/bindings/Manifest' @Component({ selector: 'app-show-status', @@ -33,6 +39,8 @@ export class AppShowStatusComponent { PR = PrimaryRendering + isInstalled = isInstalled + readonly connected$ = this.connectionService.connected$ constructor( @@ -43,18 +51,19 @@ export class AppShowStatusComponent { private readonly launcherService: UiLauncherService, private readonly modalService: ModalService, private readonly connectionService: ConnectionService, + private readonly patch: PatchDB, ) {} - get interfaces(): Record { - return this.pkg.manifest.interfaces || {} + get interfaces(): PackageDataEntry['serviceInterfaces'] { + return this.pkg.serviceInterfaces } - get pkgStatus(): Status | null { - return this.pkg.installed?.status || null + get pkgStatus(): Status { + return this.pkg.status } - get isInstalled(): boolean { - return this.pkg.state === PackageState.Installed + get manifest(): Manifest { + return getManifest(this.pkg) } get isRunning(): boolean { @@ -73,25 +82,31 @@ export class AppShowStatusComponent { return this.status.primary === PrimaryStatus.Stopped } - launchUi(): void { - this.launcherService.launch(this.pkg) + get sigtermTimeout(): string | null { + return this.pkgStatus?.main.status === 'stopping' + ? this.pkgStatus.main.timeout + : null + } + + launchUi(interfaces: PackageDataEntry['serviceInterfaces']): void { + this.launcherService.launch(interfaces) } async presentModalConfig(): Promise { return this.modalService.presentModalConfig({ - pkgId: this.id, + pkgId: this.manifest.id, }) } async tryStart(): Promise { if (this.status.dependency === 'warning') { - const depErrMsg = `${this.pkg.manifest.title} has unmet dependencies. It will not work as expected.` + const depErrMsg = `${this.manifest.title} has unmet dependencies. It will not work as expected.` const proceed = await this.presentAlertStart(depErrMsg) if (!proceed) return } - const alertMsg = this.pkg.manifest.alerts.start + const alertMsg = this.manifest.alerts.start if (alertMsg) { const proceed = await this.presentAlertStart(alertMsg) @@ -103,10 +118,10 @@ export class AppShowStatusComponent { } async tryStop(): Promise { - const { title, alerts } = this.pkg.manifest + const { title, alerts } = this.manifest let message = alerts.stop || '' - if (hasCurrentDeps(this.pkg)) { + if (hasCurrentDeps(this.manifest.id, await getAllPackages(this.patch))) { const depMessage = `Services that depend on ${title} will no longer work properly and may crash` message = message ? `${message}.\n\n${depMessage}` : depMessage } @@ -138,10 +153,10 @@ export class AppShowStatusComponent { } async tryRestart(): Promise { - if (hasCurrentDeps(this.pkg)) { + if (hasCurrentDeps(this.manifest.id, await getAllPackages(this.patch))) { const alert = await this.alertCtrl.create({ header: 'Warning', - message: `Services that depend on ${this.pkg.manifest.title} may temporarily experiences issues`, + message: `Services that depend on ${this.manifest.title} may temporarily experiences issues`, buttons: [ { text: 'Cancel', @@ -164,10 +179,6 @@ export class AppShowStatusComponent { } } - private get id(): string { - return this.pkg.manifest.id - } - private async start(): Promise { const loader = await this.loadingCtrl.create({ message: `Starting...`, @@ -175,7 +186,7 @@ export class AppShowStatusComponent { await loader.present() try { - await this.embassyApi.startPackage({ id: this.id }) + await this.embassyApi.startPackage({ id: this.manifest.id }) } catch (e: any) { this.errToast.present(e) } finally { @@ -190,7 +201,7 @@ export class AppShowStatusComponent { await loader.present() try { - await this.embassyApi.stopPackage({ id: this.id }) + await this.embassyApi.stopPackage({ id: this.manifest.id }) } catch (e: any) { this.errToast.present(e) } finally { @@ -205,7 +216,7 @@ export class AppShowStatusComponent { await loader.present() try { - await this.embassyApi.restartPackage({ id: this.id }) + await this.embassyApi.restartPackage({ id: this.manifest.id }) } catch (e: any) { this.errToast.present(e) } finally { diff --git a/web/projects/ui/src/app/pages/apps-routes/app-show/pipes/health-color.pipe.ts b/web/projects/ui/src/app/pages/apps-routes/app-show/pipes/health-color.pipe.ts index a274aa8c04..a676c28f76 100644 --- a/web/projects/ui/src/app/pages/apps-routes/app-show/pipes/health-color.pipe.ts +++ b/web/projects/ui/src/app/pages/apps-routes/app-show/pipes/health-color.pipe.ts @@ -1,20 +1,20 @@ import { Pipe, PipeTransform } from '@angular/core' -import { HealthResult } from 'src/app/services/patch-db/data-model' +import { HealthCheckResult } from '../../../../../../../../../core/startos/bindings/HealthCheckResult' @Pipe({ name: 'healthColor', }) export class HealthColorPipe implements PipeTransform { - transform(val: HealthResult): string { + transform(val: HealthCheckResult['result']): string { switch (val) { - case HealthResult.Success: + case 'success': return 'success' - case HealthResult.Failure: + case 'failure': return 'warning' - case HealthResult.Disabled: + case 'disabled': return 'dark' - case HealthResult.Starting: - case HealthResult.Loading: + case 'starting': + case 'loading': return 'primary' } } diff --git a/web/projects/ui/src/app/pages/apps-routes/app-show/pipes/progress-data.pipe.ts b/web/projects/ui/src/app/pages/apps-routes/app-show/pipes/progress-data.pipe.ts deleted file mode 100644 index 1e5397648f..0000000000 --- a/web/projects/ui/src/app/pages/apps-routes/app-show/pipes/progress-data.pipe.ts +++ /dev/null @@ -1,13 +0,0 @@ -import { Pipe, PipeTransform } from '@angular/core' -import { PackageDataEntry } from 'src/app/services/patch-db/data-model' -import { ProgressData } from 'src/app/types/progress-data' -import { packageLoadingProgress } from 'src/app/util/package-loading-progress' - -@Pipe({ - name: 'progressData', -}) -export class ProgressDataPipe implements PipeTransform { - transform(pkg: PackageDataEntry): ProgressData | null { - return packageLoadingProgress(pkg['install-progress']) - } -} diff --git a/web/projects/ui/src/app/pages/apps-routes/app-show/pipes/to-buttons.pipe.ts b/web/projects/ui/src/app/pages/apps-routes/app-show/pipes/to-buttons.pipe.ts index eeb00b4356..c51b4cac79 100644 --- a/web/projects/ui/src/app/pages/apps-routes/app-show/pipes/to-buttons.pipe.ts +++ b/web/projects/ui/src/app/pages/apps-routes/app-show/pipes/to-buttons.pipe.ts @@ -4,12 +4,14 @@ import { ModalController, NavController } from '@ionic/angular' import { MarkdownComponent } from '@start9labs/shared' import { DataModel, + InstalledState, PackageDataEntry, } from 'src/app/services/patch-db/data-model' import { ModalService } from 'src/app/services/modal.service' import { ApiService } from 'src/app/services/api/embassy-api.service' import { from, map, Observable } from 'rxjs' import { PatchDB } from 'patch-db-client' +import { Manifest } from '../../../../../../../../../core/startos/bindings/Manifest' export interface Button { title: string @@ -33,26 +35,26 @@ export class ToButtonsPipe implements PipeTransform { private readonly patch: PatchDB, ) {} - transform(pkg: PackageDataEntry): Button[] { - const pkgTitle = pkg.manifest.title + transform(pkg: PackageDataEntry): Button[] { + const manifest = pkg.stateInfo.manifest return [ // instructions { - action: () => this.presentModalInstructions(pkg), + action: () => this.presentModalInstructions(manifest), title: 'Instructions', - description: `Understand how to use ${pkgTitle}`, + description: `Understand how to use ${manifest.title}`, icon: 'list-outline', highlighted$: this.patch - .watch$('ui', 'ack-instructions', pkg.manifest.id) + .watch$('ui', 'ackInstructions', manifest.id) .pipe(map(seen => !seen)), }, // config { action: async () => - this.modalService.presentModalConfig({ pkgId: pkg.manifest.id }), + this.modalService.presentModalConfig({ pkgId: manifest.id }), title: 'Config', - description: `Customize ${pkgTitle}`, + description: `Customize ${manifest.title}`, icon: 'options-outline', }, // properties @@ -71,7 +73,7 @@ export class ToButtonsPipe implements PipeTransform { action: () => this.navCtrl.navigateForward(['actions'], { relativeTo: this.route }), title: 'Actions', - description: `Uninstall and other commands specific to ${pkgTitle}`, + description: `Uninstall and other commands specific to ${manifest.title}`, icon: 'flash-outline', }, // interfaces @@ -97,16 +99,18 @@ export class ToButtonsPipe implements PipeTransform { ] } - private async presentModalInstructions(pkg: PackageDataEntry) { + private async presentModalInstructions(manifest: Manifest) { this.apiService - .setDbValue(['ack-instructions', pkg.manifest.id], true) + .setDbValue(['ack-instructions', manifest.id], true) .catch(e => console.error('Failed to mark instructions as seen', e)) const modal = await this.modalCtrl.create({ componentProps: { title: 'Instructions', content: from( - this.apiService.getStatic(pkg['static-files']['instructions']), + this.apiService.getStatic( + `/public/package-data/${manifest.id}/${manifest.version}/INSTRUCTIONS.md`, + ), ), }, component: MarkdownComponent, @@ -115,17 +119,22 @@ export class ToButtonsPipe implements PipeTransform { await modal.present() } - private viewInMarketplaceButton(pkg: PackageDataEntry): Button { - const url = pkg.installed?.['marketplace-url'] + private viewInMarketplaceButton( + pkg: PackageDataEntry, + ): Button { + const url = pkg.marketplaceUrl const queryParams = url ? { url } : {} let button: Button = { title: 'Marketplace Listing', icon: 'storefront-outline', action: () => - this.navCtrl.navigateForward([`marketplace/${pkg.manifest.id}`], { - queryParams, - }), + this.navCtrl.navigateForward( + [`marketplace/${pkg.stateInfo.manifest.id}`], + { + queryParams, + }, + ), disabled: false, description: 'View service in the marketplace', } diff --git a/web/projects/ui/src/app/pages/apps-routes/app-show/pipes/to-health-checks.pipe.ts b/web/projects/ui/src/app/pages/apps-routes/app-show/pipes/to-health-checks.pipe.ts index 8ba9bd4f39..63c31263ae 100644 --- a/web/projects/ui/src/app/pages/apps-routes/app-show/pipes/to-health-checks.pipe.ts +++ b/web/projects/ui/src/app/pages/apps-routes/app-show/pipes/to-health-checks.pipe.ts @@ -1,14 +1,11 @@ import { Pipe, PipeTransform } from '@angular/core' -import { - DataModel, - HealthCheckResult, - PackageDataEntry, - PackageMainStatus, -} from 'src/app/services/patch-db/data-model' +import { DataModel } from 'src/app/services/patch-db/data-model' import { isEmptyObject } from '@start9labs/shared' import { map, startWith } from 'rxjs/operators' import { PatchDB } from 'patch-db-client' import { Observable } from 'rxjs' +import { HealthCheckResult } from '../../../../../../../../../core/startos/bindings/HealthCheckResult' +import { Manifest } from '../../../../../../../../../core/startos/bindings/Manifest' @Pipe({ name: 'toHealthChecks', @@ -17,27 +14,15 @@ export class ToHealthChecksPipe implements PipeTransform { constructor(private readonly patch: PatchDB) {} transform( - pkg: PackageDataEntry, - ): Observable> | null { - const healthChecks = Object.keys(pkg.manifest['health-checks']).reduce( - (obj, key) => ({ ...obj, [key]: null }), - {}, + manifest: Manifest, + ): Observable | null> { + return this.patch.watch$('packageData', manifest.id, 'status', 'main').pipe( + map(main => { + return main.status === 'running' && !isEmptyObject(main.health) + ? main.health + : null + }), + startWith(null), ) - - const healthChecks$ = this.patch - .watch$('package-data', pkg.manifest.id, 'installed', 'status', 'main') - .pipe( - map(main => { - // Question: is this ok or do we have to use Object.keys - // to maintain order and the keys initially present in pkg? - return main.status === PackageMainStatus.Running && - !isEmptyObject(main.health) - ? main.health - : healthChecks - }), - startWith(healthChecks), - ) - - return isEmptyObject(healthChecks) ? null : healthChecks$ } } diff --git a/web/projects/ui/src/app/pages/developer-routes/dev-config/dev-config.module.ts b/web/projects/ui/src/app/pages/developer-routes/dev-config/dev-config.module.ts deleted file mode 100644 index 649ab7dfce..0000000000 --- a/web/projects/ui/src/app/pages/developer-routes/dev-config/dev-config.module.ts +++ /dev/null @@ -1,30 +0,0 @@ -import { NgModule } from '@angular/core' -import { CommonModule } from '@angular/common' -import { IonicModule } from '@ionic/angular' -import { RouterModule, Routes } from '@angular/router' -import { DevConfigPage } from './dev-config.page' -import { BadgeMenuComponentModule } from 'src/app/components/badge-menu-button/badge-menu.component.module' -import { BackupReportPageModule } from 'src/app/modals/backup-report/backup-report.module' -import { FormsModule } from '@angular/forms' -import { MonacoEditorModule } from '@materia-ui/ngx-monaco-editor' - -const routes: Routes = [ - { - path: '', - component: DevConfigPage, - }, -] - -@NgModule({ - imports: [ - CommonModule, - IonicModule, - RouterModule.forChild(routes), - BadgeMenuComponentModule, - BackupReportPageModule, - FormsModule, - MonacoEditorModule, - ], - declarations: [DevConfigPage], -}) -export class DevConfigPageModule {} diff --git a/web/projects/ui/src/app/pages/developer-routes/dev-config/dev-config.page.html b/web/projects/ui/src/app/pages/developer-routes/dev-config/dev-config.page.html deleted file mode 100644 index 334236d1ef..0000000000 --- a/web/projects/ui/src/app/pages/developer-routes/dev-config/dev-config.page.html +++ /dev/null @@ -1,28 +0,0 @@ - - - - - - - Config - - - - Preview - - - - - - - diff --git a/web/projects/ui/src/app/pages/developer-routes/dev-config/dev-config.page.scss b/web/projects/ui/src/app/pages/developer-routes/dev-config/dev-config.page.scss deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/web/projects/ui/src/app/pages/developer-routes/dev-config/dev-config.page.ts b/web/projects/ui/src/app/pages/developer-routes/dev-config/dev-config.page.ts deleted file mode 100644 index 6dc4c7e138..0000000000 --- a/web/projects/ui/src/app/pages/developer-routes/dev-config/dev-config.page.ts +++ /dev/null @@ -1,82 +0,0 @@ -import { Component } from '@angular/core' -import { ActivatedRoute } from '@angular/router' -import { ModalController } from '@ionic/angular' -import { debounce, ErrorToastService } from '@start9labs/shared' -import * as yaml from 'js-yaml' -import { filter, take } from 'rxjs/operators' -import { ApiService } from 'src/app/services/api/embassy-api.service' -import { PatchDB } from 'patch-db-client' -import { getProjectId } from 'src/app/util/get-project-id' -import { GenericFormPage } from '../../../modals/generic-form/generic-form.page' -import { DataModel } from 'src/app/services/patch-db/data-model' - -@Component({ - selector: 'dev-config', - templateUrl: 'dev-config.page.html', - styleUrls: ['dev-config.page.scss'], -}) -export class DevConfigPage { - readonly projectId = getProjectId(this.route) - editorOptions = { theme: 'vs-dark', language: 'yaml' } - code: string = '' - saving: boolean = false - - constructor( - private readonly route: ActivatedRoute, - private readonly errToast: ErrorToastService, - private readonly modalCtrl: ModalController, - private readonly patch: PatchDB, - private readonly api: ApiService, - ) {} - - ngOnInit() { - this.patch - .watch$('ui', 'dev', this.projectId, 'config') - .pipe(filter(Boolean), take(1)) - .subscribe(config => { - this.code = config - }) - } - - async preview() { - let doc: any - try { - doc = yaml.load(this.code) - } catch (e: any) { - this.errToast.present(e) - } - - const modal = await this.modalCtrl.create({ - component: GenericFormPage, - componentProps: { - title: 'Config Sample', - spec: JSON.parse(JSON.stringify(doc, null, 2)), - buttons: [ - { - text: 'OK', - handler: () => { - return - }, - isSubmit: true, - }, - ], - }, - }) - await modal.present() - } - - @debounce(1000) - async save() { - this.saving = true - try { - await this.api.setDbValue( - ['dev', this.projectId, 'config'], - this.code, - ) - } catch (e: any) { - this.errToast.present(e) - } finally { - this.saving = false - } - } -} diff --git a/web/projects/ui/src/app/pages/developer-routes/dev-instructions/dev-instructions.module.ts b/web/projects/ui/src/app/pages/developer-routes/dev-instructions/dev-instructions.module.ts deleted file mode 100644 index ce15130e5a..0000000000 --- a/web/projects/ui/src/app/pages/developer-routes/dev-instructions/dev-instructions.module.ts +++ /dev/null @@ -1,30 +0,0 @@ -import { NgModule } from '@angular/core' -import { CommonModule } from '@angular/common' -import { IonicModule } from '@ionic/angular' -import { RouterModule, Routes } from '@angular/router' -import { DevInstructionsPage } from './dev-instructions.page' -import { BadgeMenuComponentModule } from 'src/app/components/badge-menu-button/badge-menu.component.module' -import { BackupReportPageModule } from 'src/app/modals/backup-report/backup-report.module' -import { FormsModule } from '@angular/forms' -import { MonacoEditorModule } from '@materia-ui/ngx-monaco-editor' - -const routes: Routes = [ - { - path: '', - component: DevInstructionsPage, - }, -] - -@NgModule({ - imports: [ - CommonModule, - IonicModule, - RouterModule.forChild(routes), - BadgeMenuComponentModule, - BackupReportPageModule, - FormsModule, - MonacoEditorModule, - ], - declarations: [DevInstructionsPage], -}) -export class DevInstructionsPageModule {} diff --git a/web/projects/ui/src/app/pages/developer-routes/dev-instructions/dev-instructions.page.html b/web/projects/ui/src/app/pages/developer-routes/dev-instructions/dev-instructions.page.html deleted file mode 100644 index 8775ec1754..0000000000 --- a/web/projects/ui/src/app/pages/developer-routes/dev-instructions/dev-instructions.page.html +++ /dev/null @@ -1,28 +0,0 @@ - - - - - - - Instructions - - - - Preview - - - - - - - diff --git a/web/projects/ui/src/app/pages/developer-routes/dev-instructions/dev-instructions.page.scss b/web/projects/ui/src/app/pages/developer-routes/dev-instructions/dev-instructions.page.scss deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/web/projects/ui/src/app/pages/developer-routes/dev-instructions/dev-instructions.page.ts b/web/projects/ui/src/app/pages/developer-routes/dev-instructions/dev-instructions.page.ts deleted file mode 100644 index bf3fa0b566..0000000000 --- a/web/projects/ui/src/app/pages/developer-routes/dev-instructions/dev-instructions.page.ts +++ /dev/null @@ -1,69 +0,0 @@ -import { Component } from '@angular/core' -import { ActivatedRoute } from '@angular/router' -import { ModalController } from '@ionic/angular' -import { filter, take } from 'rxjs/operators' -import { ApiService } from 'src/app/services/api/embassy-api.service' -import { - debounce, - ErrorToastService, - MarkdownComponent, -} from '@start9labs/shared' -import { PatchDB } from 'patch-db-client' -import { getProjectId } from 'src/app/util/get-project-id' -import { DataModel } from 'src/app/services/patch-db/data-model' - -@Component({ - selector: 'dev-instructions', - templateUrl: 'dev-instructions.page.html', - styleUrls: ['dev-instructions.page.scss'], -}) -export class DevInstructionsPage { - readonly projectId = getProjectId(this.route) - editorOptions = { theme: 'vs-dark', language: 'markdown' } - code = '' - saving = false - - constructor( - private readonly route: ActivatedRoute, - private readonly errToast: ErrorToastService, - private readonly modalCtrl: ModalController, - private readonly patch: PatchDB, - private readonly api: ApiService, - ) {} - - ngOnInit() { - this.patch - .watch$('ui', 'dev', this.projectId, 'instructions') - .pipe(filter(Boolean), take(1)) - .subscribe(config => { - this.code = config - }) - } - - async preview() { - const modal = await this.modalCtrl.create({ - componentProps: { - title: 'Instructions Sample', - content: this.code, - }, - component: MarkdownComponent, - }) - - await modal.present() - } - - @debounce(1000) - async save() { - this.saving = true - try { - await this.api.setDbValue( - ['dev', this.projectId, 'instructions'], - this.code, - ) - } catch (e: any) { - this.errToast.present(e) - } finally { - this.saving = false - } - } -} diff --git a/web/projects/ui/src/app/pages/developer-routes/dev-manifest/dev-manifest.module.ts b/web/projects/ui/src/app/pages/developer-routes/dev-manifest/dev-manifest.module.ts deleted file mode 100644 index 49d738fe80..0000000000 --- a/web/projects/ui/src/app/pages/developer-routes/dev-manifest/dev-manifest.module.ts +++ /dev/null @@ -1,30 +0,0 @@ -import { NgModule } from '@angular/core' -import { CommonModule } from '@angular/common' -import { IonicModule } from '@ionic/angular' -import { RouterModule, Routes } from '@angular/router' -import { DevManifestPage } from './dev-manifest.page' -import { BadgeMenuComponentModule } from 'src/app/components/badge-menu-button/badge-menu.component.module' -import { BackupReportPageModule } from 'src/app/modals/backup-report/backup-report.module' -import { FormsModule } from '@angular/forms' -import { MonacoEditorModule } from '@materia-ui/ngx-monaco-editor' - -const routes: Routes = [ - { - path: '', - component: DevManifestPage, - }, -] - -@NgModule({ - imports: [ - CommonModule, - IonicModule, - RouterModule.forChild(routes), - BadgeMenuComponentModule, - BackupReportPageModule, - FormsModule, - MonacoEditorModule, - ], - declarations: [DevManifestPage], -}) -export class DevManifestPageModule {} diff --git a/web/projects/ui/src/app/pages/developer-routes/dev-manifest/dev-manifest.page.html b/web/projects/ui/src/app/pages/developer-routes/dev-manifest/dev-manifest.page.html deleted file mode 100644 index 595adb6c32..0000000000 --- a/web/projects/ui/src/app/pages/developer-routes/dev-manifest/dev-manifest.page.html +++ /dev/null @@ -1,17 +0,0 @@ - - - - - - Manifest - - - - - - diff --git a/web/projects/ui/src/app/pages/developer-routes/dev-manifest/dev-manifest.page.scss b/web/projects/ui/src/app/pages/developer-routes/dev-manifest/dev-manifest.page.scss deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/web/projects/ui/src/app/pages/developer-routes/dev-manifest/dev-manifest.page.ts b/web/projects/ui/src/app/pages/developer-routes/dev-manifest/dev-manifest.page.ts deleted file mode 100644 index f039abbd81..0000000000 --- a/web/projects/ui/src/app/pages/developer-routes/dev-manifest/dev-manifest.page.ts +++ /dev/null @@ -1,32 +0,0 @@ -import { Component } from '@angular/core' -import { ActivatedRoute } from '@angular/router' -import * as yaml from 'js-yaml' -import { take } from 'rxjs/operators' -import { PatchDB } from 'patch-db-client' -import { getProjectId } from 'src/app/util/get-project-id' -import { DataModel } from 'src/app/services/patch-db/data-model' - -@Component({ - selector: 'dev-manifest', - templateUrl: 'dev-manifest.page.html', - styleUrls: ['dev-manifest.page.scss'], -}) -export class DevManifestPage { - readonly projectId = getProjectId(this.route) - editorOptions = { theme: 'vs-dark', language: 'yaml', readOnly: true } - manifest: string = '' - - constructor( - private readonly route: ActivatedRoute, - private readonly patch: PatchDB, - ) {} - - ngOnInit() { - this.patch - .watch$('ui', 'dev', this.projectId) - .pipe(take(1)) - .subscribe(devData => { - this.manifest = yaml.dump(devData['basic-info']) - }) - } -} diff --git a/web/projects/ui/src/app/pages/developer-routes/developer-list/developer-list.module.ts b/web/projects/ui/src/app/pages/developer-routes/developer-list/developer-list.module.ts deleted file mode 100644 index ea0c784238..0000000000 --- a/web/projects/ui/src/app/pages/developer-routes/developer-list/developer-list.module.ts +++ /dev/null @@ -1,26 +0,0 @@ -import { NgModule } from '@angular/core' -import { CommonModule } from '@angular/common' -import { IonicModule } from '@ionic/angular' -import { RouterModule, Routes } from '@angular/router' -import { DeveloperListPage } from './developer-list.page' -import { BadgeMenuComponentModule } from 'src/app/components/badge-menu-button/badge-menu.component.module' -import { BackupReportPageModule } from 'src/app/modals/backup-report/backup-report.module' - -const routes: Routes = [ - { - path: '', - component: DeveloperListPage, - }, -] - -@NgModule({ - imports: [ - CommonModule, - IonicModule, - RouterModule.forChild(routes), - BadgeMenuComponentModule, - BackupReportPageModule, - ], - declarations: [DeveloperListPage], -}) -export class DeveloperListPageModule {} diff --git a/web/projects/ui/src/app/pages/developer-routes/developer-list/developer-list.page.html b/web/projects/ui/src/app/pages/developer-routes/developer-list/developer-list.page.html deleted file mode 100644 index e00330448f..0000000000 --- a/web/projects/ui/src/app/pages/developer-routes/developer-list/developer-list.page.html +++ /dev/null @@ -1,37 +0,0 @@ - - - - - - Developer Tools - - - - - - - - Projects - - - - - Create project - - - - -

{{ entry.value.name }}

- - - -
-
diff --git a/web/projects/ui/src/app/pages/developer-routes/developer-list/developer-list.page.scss b/web/projects/ui/src/app/pages/developer-routes/developer-list/developer-list.page.scss deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/web/projects/ui/src/app/pages/developer-routes/developer-list/developer-list.page.ts b/web/projects/ui/src/app/pages/developer-routes/developer-list/developer-list.page.ts deleted file mode 100644 index 96aa6126a2..0000000000 --- a/web/projects/ui/src/app/pages/developer-routes/developer-list/developer-list.page.ts +++ /dev/null @@ -1,266 +0,0 @@ -import { Component } from '@angular/core' -import { - ActionSheetButton, - ActionSheetController, - AlertController, - LoadingController, - ModalController, -} from '@ionic/angular' -import { - GenericInputComponent, - GenericInputOptions, -} from 'src/app/modals/generic-input/generic-input.component' -import { PatchDB } from 'patch-db-client' -import { ApiService } from 'src/app/services/api/embassy-api.service' -import { ConfigSpec } from 'src/app/pkg-config/config-types' -import * as yaml from 'js-yaml' -import { v4 } from 'uuid' -import { DataModel, DevData } from 'src/app/services/patch-db/data-model' -import { ErrorToastService } from '@start9labs/shared' -import { TuiDestroyService } from '@taiga-ui/cdk' -import { takeUntil } from 'rxjs/operators' - -@Component({ - selector: 'developer-list', - templateUrl: 'developer-list.page.html', - styleUrls: ['developer-list.page.scss'], - providers: [TuiDestroyService], -}) -export class DeveloperListPage { - devData: DevData = {} - - constructor( - private readonly modalCtrl: ModalController, - private readonly api: ApiService, - private readonly loadingCtrl: LoadingController, - private readonly errToast: ErrorToastService, - private readonly alertCtrl: AlertController, - private readonly destroy$: TuiDestroyService, - private readonly patch: PatchDB, - private readonly actionCtrl: ActionSheetController, - ) {} - - ngOnInit() { - this.patch - .watch$('ui', 'dev') - .pipe(takeUntil(this.destroy$)) - .subscribe(dd => { - this.devData = dd - }) - } - - async openCreateProjectModal() { - const projNumber = Object.keys(this.devData).length + 1 - const options: GenericInputOptions = { - title: 'Add new project', - message: 'Create a new dev project.', - label: 'New project', - useMask: false, - placeholder: `Project ${projNumber}`, - nullable: true, - initialValue: `Project ${projNumber}`, - buttonText: 'Save', - submitFn: (value: string) => this.createProject(value), - } - - const modal = await this.modalCtrl.create({ - componentProps: { options }, - cssClass: 'alertlike-modal', - presentingElement: await this.modalCtrl.getTop(), - component: GenericInputComponent, - }) - - await modal.present() - } - - async presentAction(id: string, event: Event) { - event.stopPropagation() - event.preventDefault() - const buttons: ActionSheetButton[] = [ - { - text: 'Edit Name', - icon: 'pencil', - handler: () => { - this.openEditNameModal(id) - }, - }, - { - text: 'Delete', - icon: 'trash', - role: 'destructive', - handler: () => { - this.presentAlertDelete(id) - }, - }, - ] - - const action = await this.actionCtrl.create({ - header: this.devData[id].name, - subHeader: 'Manage project', - mode: 'ios', - buttons, - }) - - await action.present() - } - - async openEditNameModal(id: string) { - const curName = this.devData[id].name - const options: GenericInputOptions = { - title: 'Edit Name', - message: 'Edit the name of your project.', - label: 'Name', - useMask: false, - placeholder: curName, - nullable: true, - initialValue: curName, - buttonText: 'Save', - submitFn: (value: string) => this.editName(id, value), - } - - const modal = await this.modalCtrl.create({ - componentProps: { options }, - cssClass: 'alertlike-modal', - presentingElement: await this.modalCtrl.getTop(), - component: GenericInputComponent, - }) - - await modal.present() - } - - async createProject(name: string) { - // fail silently if duplicate project name - if ( - Object.values(this.devData) - .map(v => v.name) - .includes(name) - ) - return - - const loader = await this.loadingCtrl.create({ - message: 'Creating Project...', - }) - await loader.present() - - try { - const id = v4() - const config = yaml - .dump(SAMPLE_CONFIG) - .replace(/warning:/g, '# Optional\n warning:') - - const def = { name, config, instructions: SAMPLE_INSTUCTIONS } - await this.api.setDbValue<{ - name: string - config: string - instructions: string - }>(['dev', id], def) - } catch (e: any) { - this.errToast.present(e) - } finally { - loader.dismiss() - } - } - - async presentAlertDelete(id: string) { - const alert = await this.alertCtrl.create({ - header: 'Caution', - message: `Are you sure you want to delete this project?`, - buttons: [ - { - text: 'Cancel', - role: 'cancel', - }, - { - text: 'Delete', - handler: () => { - this.delete(id) - }, - cssClass: 'enter-click', - }, - ], - }) - await alert.present() - } - - async editName(id: string, newName: string) { - const loader = await this.loadingCtrl.create({ - message: 'Saving...', - }) - await loader.present() - - try { - await this.api.setDbValue(['dev', id, 'name'], newName) - } catch (e: any) { - this.errToast.present(e) - } finally { - loader.dismiss() - } - } - - async delete(id: string) { - const loader = await this.loadingCtrl.create({ - message: 'Removing Project...', - }) - await loader.present() - - try { - const devDataToSave: DevData = JSON.parse(JSON.stringify(this.devData)) - delete devDataToSave[id] - await this.api.setDbValue(['dev'], devDataToSave) - } catch (e: any) { - this.errToast.present(e) - } finally { - loader.dismiss() - } - } -} - -const SAMPLE_INSTUCTIONS = `# Create Instructions using Markdown! :)` - -const SAMPLE_CONFIG: ConfigSpec = { - 'sample-string': { - type: 'string', - name: 'Example String Input', - nullable: false, - masked: false, - copyable: false, - // optional - description: 'Example description for required string input.', - placeholder: 'Enter string value', - pattern: '^[a-zA-Z0-9! _]+$', - 'pattern-description': 'Must be alphanumeric (may contain underscore).', - }, - 'sample-number': { - type: 'number', - name: 'Example Number Input', - nullable: false, - range: '[5,1000000]', - integral: true, - // optional - warning: 'Example warning to display when changing this number value.', - units: 'ms', - description: 'Example description for optional number input.', - placeholder: 'Enter number value', - }, - 'sample-boolean': { - type: 'boolean', - name: 'Example Boolean Toggle', - // optional - description: 'Example description for boolean toggle', - default: true, - }, - 'sample-enum': { - type: 'enum', - name: 'Example Enum Select', - values: ['red', 'blue', 'green'], - 'value-names': { - red: 'Red', - blue: 'Blue', - green: 'Green', - }, - // optional - warning: 'Example warning to display when changing this enum value.', - description: 'Example description for enum select', - default: 'red', - }, -} diff --git a/web/projects/ui/src/app/pages/developer-routes/developer-menu/developer-menu.module.ts b/web/projects/ui/src/app/pages/developer-routes/developer-menu/developer-menu.module.ts deleted file mode 100644 index d33ecf47f7..0000000000 --- a/web/projects/ui/src/app/pages/developer-routes/developer-menu/developer-menu.module.ts +++ /dev/null @@ -1,34 +0,0 @@ -import { NgModule } from '@angular/core' -import { CommonModule } from '@angular/common' -import { IonicModule } from '@ionic/angular' -import { RouterModule, Routes } from '@angular/router' -import { DeveloperMenuPage } from './developer-menu.page' -import { BadgeMenuComponentModule } from 'src/app/components/badge-menu-button/badge-menu.component.module' -import { BackupReportPageModule } from 'src/app/modals/backup-report/backup-report.module' -import { GenericFormPageModule } from 'src/app/modals/generic-form/generic-form.module' -import { MonacoEditorModule } from '@materia-ui/ngx-monaco-editor' -import { FormsModule } from '@angular/forms' -import { SharedPipesModule } from '@start9labs/shared' - -const routes: Routes = [ - { - path: '', - component: DeveloperMenuPage, - }, -] - -@NgModule({ - imports: [ - CommonModule, - IonicModule, - RouterModule.forChild(routes), - BadgeMenuComponentModule, - BackupReportPageModule, - GenericFormPageModule, - FormsModule, - MonacoEditorModule, - SharedPipesModule, - ], - declarations: [DeveloperMenuPage], -}) -export class DeveloperMenuPageModule {} diff --git a/web/projects/ui/src/app/pages/developer-routes/developer-menu/developer-menu.page.html b/web/projects/ui/src/app/pages/developer-routes/developer-menu/developer-menu.page.html deleted file mode 100644 index ee10e7fc66..0000000000 --- a/web/projects/ui/src/app/pages/developer-routes/developer-menu/developer-menu.page.html +++ /dev/null @@ -1,51 +0,0 @@ - - - - - - {{ (projectData$ | async)?.name || '' }} - - View Manifest - - - - - - - - -

Basic Info

-

Complete basic info for your package

-
- - -
- - - -

Instructions Generator

-

Create instructions and see how they will appear to the end user

-
-
- - - -

Config Generator

-

Edit the config with YAML and see it in real time

-
-
-
diff --git a/web/projects/ui/src/app/pages/developer-routes/developer-menu/developer-menu.page.scss b/web/projects/ui/src/app/pages/developer-routes/developer-menu/developer-menu.page.scss deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/web/projects/ui/src/app/pages/developer-routes/developer-menu/developer-menu.page.ts b/web/projects/ui/src/app/pages/developer-routes/developer-menu/developer-menu.page.ts deleted file mode 100644 index 3ae2b394cc..0000000000 --- a/web/projects/ui/src/app/pages/developer-routes/developer-menu/developer-menu.page.ts +++ /dev/null @@ -1,68 +0,0 @@ -import { ChangeDetectionStrategy, Component } from '@angular/core' -import { ActivatedRoute } from '@angular/router' -import { LoadingController, ModalController } from '@ionic/angular' -import { GenericFormPage } from 'src/app/modals/generic-form/generic-form.page' -import { BasicInfo, getBasicInfoSpec } from './form-info' -import { PatchDB } from 'patch-db-client' -import { ApiService } from 'src/app/services/api/embassy-api.service' -import { ErrorToastService } from '@start9labs/shared' -import { getProjectId } from 'src/app/util/get-project-id' -import { DataModel, DevProjectData } from 'src/app/services/patch-db/data-model' - -@Component({ - selector: 'developer-menu', - templateUrl: 'developer-menu.page.html', - styleUrls: ['developer-menu.page.scss'], - changeDetection: ChangeDetectionStrategy.OnPush, -}) -export class DeveloperMenuPage { - readonly projectId = getProjectId(this.route) - readonly projectData$ = this.patch.watch$('ui', 'dev', this.projectId) - - constructor( - private readonly route: ActivatedRoute, - private readonly modalCtrl: ModalController, - private readonly loadingCtrl: LoadingController, - private readonly api: ApiService, - private readonly errToast: ErrorToastService, - private readonly patch: PatchDB, - ) {} - - async openBasicInfoModal(data: DevProjectData) { - const modal = await this.modalCtrl.create({ - component: GenericFormPage, - componentProps: { - title: 'Basic Info', - spec: getBasicInfoSpec(data), - buttons: [ - { - text: 'Save', - handler: (basicInfo: BasicInfo) => { - this.saveBasicInfo(basicInfo) - }, - isSubmit: true, - }, - ], - }, - }) - await modal.present() - } - - async saveBasicInfo(basicInfo: BasicInfo) { - const loader = await this.loadingCtrl.create({ - message: 'Saving...', - }) - await loader.present() - - try { - await this.api.setDbValue( - ['dev', this.projectId, 'basic-info'], - basicInfo, - ) - } catch (e: any) { - this.errToast.present(e) - } finally { - loader.dismiss() - } - } -} diff --git a/web/projects/ui/src/app/pages/developer-routes/developer-menu/form-info.ts b/web/projects/ui/src/app/pages/developer-routes/developer-menu/form-info.ts deleted file mode 100644 index b8a848f126..0000000000 --- a/web/projects/ui/src/app/pages/developer-routes/developer-menu/form-info.ts +++ /dev/null @@ -1,171 +0,0 @@ -import { ConfigSpec } from 'src/app/pkg-config/config-types' -import { DevProjectData } from 'src/app/services/patch-db/data-model' - -export type BasicInfo = { - id: string - title: string - 'service-version-number': string - 'release-notes': string - license: string - 'wrapper-repo': string - 'upstream-repo'?: string - 'support-site'?: string - 'marketing-site'?: string - description: { - short: string - long: string - } -} - -export function getBasicInfoSpec(devData: DevProjectData): ConfigSpec { - const basicInfo = devData['basic-info'] - return { - id: { - type: 'string', - name: 'ID', - description: 'The package identifier used by the OS', - placeholder: 'e.g. bitcoind', - nullable: false, - masked: false, - copyable: false, - pattern: '^([a-z][a-z0-9]*)(-[a-z0-9]+)*$', - 'pattern-description': 'Must be kebab case', - default: basicInfo?.id, - }, - title: { - type: 'string', - name: 'Service Name', - description: 'A human readable service title', - placeholder: 'e.g. Bitcoin Core', - nullable: false, - masked: false, - copyable: false, - default: basicInfo ? basicInfo.title : devData.name, - }, - 'service-version-number': { - type: 'string', - name: 'Service Version', - description: - 'Service version - accepts up to four digits, where the last confirms to revisions necessary for StartOS - see documentation: https://github.com/Start9Labs/emver-rs. This value will change with each release of the service', - placeholder: 'e.g. 0.1.2.3', - nullable: false, - masked: false, - copyable: false, - pattern: '^([0-9]+).([0-9]+).([0-9]+).([0-9]+)$', - 'pattern-description': 'Must be valid Emver version', - default: basicInfo?.['service-version-number'], - }, - description: { - type: 'object', - name: 'Marketplace Descriptions', - spec: { - short: { - type: 'string', - name: 'Short Description', - description: - 'This is the first description visible to the user in the marketplace', - nullable: false, - masked: false, - copyable: false, - textarea: true, - default: basicInfo?.description?.short, - pattern: '^.{1,320}$', - 'pattern-description': 'Must be shorter than 320 characters', - }, - long: { - type: 'string', - name: 'Long Description', - description: `This description will display with additional details in the service's individual marketplace page`, - nullable: false, - masked: false, - copyable: false, - textarea: true, - default: basicInfo?.description?.long, - pattern: '^.{1,5000}$', - 'pattern-description': 'Must be shorter than 5000 characters', - }, - }, - }, - 'release-notes': { - type: 'string', - name: 'Release Notes', - description: - 'Markdown supported release notes for this version of this service.', - placeholder: 'e.g. Markdown _release notes_ for **Bitcoin Core**', - nullable: false, - masked: false, - copyable: false, - textarea: true, - default: basicInfo?.['release-notes'], - }, - license: { - type: 'enum', - name: 'License', - values: [ - 'gnu-agpl-v3', - 'gnu-gpl-v3', - 'gnu-lgpl-v3', - 'mozilla-public-license-2.0', - 'apache-license-2.0', - 'mit', - 'boost-software-license-1.0', - 'the-unlicense', - 'custom', - ], - 'value-names': { - 'gnu-agpl-v3': 'GNU AGPLv3', - 'gnu-gpl-v3': 'GNU GPLv3', - 'gnu-lgpl-v3': 'GNU LGPLv3', - 'mozilla-public-license-2.0': 'Mozilla Public License 2.0', - 'apache-license-2.0': 'Apache License 2.0', - mit: 'mit', - 'boost-software-license-1.0': 'Boost Software License 1.0', - 'the-unlicense': 'The Unlicense', - custom: 'Custom', - }, - description: 'Example description for enum select', - default: 'mit', - }, - 'wrapper-repo': { - type: 'string', - name: 'Wrapper Repo', - description: - 'The Start9 wrapper repository URL for the package. This repo contains the manifest file (this), any scripts necessary for configuration, backups, actions, or health checks', - placeholder: 'e.g. www.github.com/example', - nullable: false, - masked: false, - copyable: false, - default: basicInfo?.['wrapper-repo'], - }, - 'upstream-repo': { - type: 'string', - name: 'Upstream Repo', - description: 'The original project repository URL', - placeholder: 'e.g. www.github.com/example', - nullable: true, - masked: false, - copyable: false, - default: basicInfo?.['upstream-repo'], - }, - 'support-site': { - type: 'string', - name: 'Support Site', - description: 'URL to the support site / channel for the project', - placeholder: 'e.g. start9.com/support', - nullable: true, - masked: false, - copyable: false, - default: basicInfo?.['support-site'], - }, - 'marketing-site': { - type: 'string', - name: 'Marketing Site', - description: 'URL to the marketing site / channel for the project', - placeholder: 'e.g. start9.com', - nullable: true, - masked: false, - copyable: false, - default: basicInfo?.['marketing-site'], - }, - } -} diff --git a/web/projects/ui/src/app/pages/developer-routes/developer-routing.module.ts b/web/projects/ui/src/app/pages/developer-routes/developer-routing.module.ts deleted file mode 100644 index 03720d87d4..0000000000 --- a/web/projects/ui/src/app/pages/developer-routes/developer-routing.module.ts +++ /dev/null @@ -1,49 +0,0 @@ -import { NgModule } from '@angular/core' -import { Routes, RouterModule } from '@angular/router' - -const routes: Routes = [ - { - path: '', - redirectTo: 'projects', - pathMatch: 'full', - }, - { - path: 'projects', - loadChildren: () => - import('./developer-list/developer-list.module').then( - m => m.DeveloperListPageModule, - ), - }, - { - path: 'projects/:projectId', - loadChildren: () => - import('./developer-menu/developer-menu.module').then( - m => m.DeveloperMenuPageModule, - ), - }, - { - path: 'projects/:projectId/config', - loadChildren: () => - import('./dev-config/dev-config.module').then(m => m.DevConfigPageModule), - }, - { - path: 'projects/:projectId/instructions', - loadChildren: () => - import('./dev-instructions/dev-instructions.module').then( - m => m.DevInstructionsPageModule, - ), - }, - { - path: 'projects/:projectId/manifest', - loadChildren: () => - import('./dev-manifest/dev-manifest.module').then( - m => m.DevManifestPageModule, - ), - }, -] - -@NgModule({ - imports: [RouterModule.forChild(routes)], - exports: [RouterModule], -}) -export class DeveloperRoutingModule {} diff --git a/web/projects/ui/src/app/pages/login/login.page.html b/web/projects/ui/src/app/pages/login/login.page.html index 99f6abbe8b..7e27a16c31 100644 --- a/web/projects/ui/src/app/pages/login/login.page.html +++ b/web/projects/ui/src/app/pages/login/login.page.html @@ -6,30 +6,6 @@ -
- diff --git a/web/projects/ui/src/app/pages/login/login.page.ts b/web/projects/ui/src/app/pages/login/login.page.ts index 15f7d588e0..db0d1c2c47 100644 --- a/web/projects/ui/src/app/pages/login/login.page.ts +++ b/web/projects/ui/src/app/pages/login/login.page.ts @@ -5,7 +5,6 @@ import { AuthService } from 'src/app/services/auth.service' import { Router } from '@angular/router' import { ConfigService } from 'src/app/services/config.service' import { DOCUMENT } from '@angular/common' -import { WINDOW } from '@ng-web-apis/common' @Component({ selector: 'login', @@ -24,14 +23,8 @@ export class LoginPage { private readonly api: ApiService, public readonly config: ConfigService, @Inject(DOCUMENT) public readonly document: Document, - @Inject(WINDOW) private readonly windowRef: Window, ) {} - launchHttps() { - const host = this.config.getHost() - this.windowRef.open(`https://${host}`, '_self') - } - async submit() { this.error = '' diff --git a/web/projects/ui/src/app/pages/marketplace-routes/marketplace-list/marketplace-list.page.ts b/web/projects/ui/src/app/pages/marketplace-routes/marketplace-list/marketplace-list.page.ts index 089b66139a..6792b917a6 100644 --- a/web/projects/ui/src/app/pages/marketplace-routes/marketplace-list/marketplace-list.page.ts +++ b/web/projects/ui/src/app/pages/marketplace-routes/marketplace-list/marketplace-list.page.ts @@ -29,7 +29,7 @@ export class MarketplaceListPage { }), ) - readonly localPkgs$ = this.patch.watch$('package-data') + readonly localPkgs$ = this.patch.watch$('packageData') readonly details$ = this.marketplaceService.getSelectedHost$().pipe( map(({ url, name }) => { diff --git a/web/projects/ui/src/app/pages/marketplace-routes/marketplace-show/marketplace-show-controls/marketplace-show-controls.component.html b/web/projects/ui/src/app/pages/marketplace-routes/marketplace-show/marketplace-show-controls/marketplace-show-controls.component.html index b150d2d4fa..6995dde5e1 100644 --- a/web/projects/ui/src/app/pages/marketplace-routes/marketplace-show/marketplace-show-controls/marketplace-show-controls.component.html +++ b/web/projects/ui/src/app/pages/marketplace-routes/marketplace-show/marketplace-show-controls/marketplace-show-controls.component.html @@ -8,9 +8,14 @@ View Installed - + , ) {} - get localVersion(): string { - return this.localPkg?.manifest.version || '' - } - async tryInstall() { const currentMarketplace = await firstValueFrom( this.marketplaceService.getSelectedHost$(), @@ -72,7 +65,7 @@ export class MarketplaceShowControlsComponent { if (!this.localPkg) { this.alertInstall(url) } else { - const originalUrl = this.localPkg.installed?.['marketplace-url'] + const originalUrl = this.localPkg.marketplaceUrl if (!sameUrl(url, originalUrl)) { const proceed = await this.presentAlertDifferentMarketplace( @@ -82,10 +75,12 @@ export class MarketplaceShowControlsComponent { if (!proceed) return } + const localManifest = getManifest(this.localPkg) + if ( - this.emver.compare(this.localVersion, this.pkg.manifest.version) !== + this.emver.compare(localManifest.version, this.pkg.manifest.version) !== 0 && - hasCurrentDeps(this.localPkg) + hasCurrentDeps(localManifest.id, await getAllPackages(this.patch)) ) { this.dryInstall(url) } else { @@ -102,12 +97,11 @@ export class MarketplaceShowControlsComponent { this.patch.watch$('ui', 'marketplace'), ) - const name: string = marketplaces['known-hosts'][url]?.name || url + const name: string = marketplaces.knownHosts[url]?.name || url let originalName: string | undefined if (originalUrl) { - originalName = - marketplaces['known-hosts'][originalUrl]?.name || originalUrl + originalName = marketplaces.knownHosts[originalUrl]?.name || originalUrl } return new Promise(async resolve => { diff --git a/web/projects/ui/src/app/pages/marketplace-routes/marketplace-show/marketplace-show.module.ts b/web/projects/ui/src/app/pages/marketplace-routes/marketplace-show/marketplace-show.module.ts index 3cef27e61e..058cbb1582 100644 --- a/web/projects/ui/src/app/pages/marketplace-routes/marketplace-show/marketplace-show.module.ts +++ b/web/projects/ui/src/app/pages/marketplace-routes/marketplace-show/marketplace-show.module.ts @@ -19,6 +19,7 @@ import { MarketplaceShowPage } from './marketplace-show.page' import { MarketplaceShowHeaderComponent } from './marketplace-show-header/marketplace-show-header.component' import { MarketplaceShowDependentComponent } from './marketplace-show-dependent/marketplace-show-dependent.component' import { MarketplaceShowControlsComponent } from './marketplace-show-controls/marketplace-show-controls.component' +import { UiPipeModule } from 'src/app/pipes/ui/ui.module' const routes: Routes = [ { @@ -41,6 +42,7 @@ const routes: Routes = [ AboutModule, DependenciesModule, AdditionalModule, + UiPipeModule, ], declarations: [ MarketplaceShowPage, diff --git a/web/projects/ui/src/app/pages/marketplace-routes/marketplace-show/marketplace-show.page.ts b/web/projects/ui/src/app/pages/marketplace-routes/marketplace-show/marketplace-show.page.ts index dcb6e48d13..876e71924c 100644 --- a/web/projects/ui/src/app/pages/marketplace-routes/marketplace-show/marketplace-show.page.ts +++ b/web/projects/ui/src/app/pages/marketplace-routes/marketplace-show/marketplace-show.page.ts @@ -20,7 +20,7 @@ export class MarketplaceShowPage { readonly loadVersion$ = new BehaviorSubject('*') readonly localPkg$ = this.patch - .watch$('package-data', this.pkgId) + .watch$('packageData', this.pkgId) .pipe(filter(Boolean), shareReplay({ bufferSize: 1, refCount: true })) readonly pkg$ = this.loadVersion$.pipe( diff --git a/web/projects/ui/src/app/pages/marketplace-routes/marketplace-status/marketplace-status.component.html b/web/projects/ui/src/app/pages/marketplace-routes/marketplace-status/marketplace-status.component.html index b39e47de6f..8958cdda2e 100644 --- a/web/projects/ui/src/app/pages/marketplace-routes/marketplace-status/marketplace-status.component.html +++ b/web/projects/ui/src/app/pages/marketplace-routes/marketplace-status/marketplace-status.component.html @@ -1,5 +1,5 @@ - -
+ +
-
+ +
Removing
-
+ +
Installing diff --git a/web/projects/ui/src/app/pages/marketplace-routes/marketplace-status/marketplace-status.component.ts b/web/projects/ui/src/app/pages/marketplace-routes/marketplace-status/marketplace-status.component.ts index 05e36471b4..9db50dc295 100644 --- a/web/projects/ui/src/app/pages/marketplace-routes/marketplace-status/marketplace-status.component.ts +++ b/web/projects/ui/src/app/pages/marketplace-routes/marketplace-status/marketplace-status.component.ts @@ -1,8 +1,13 @@ import { Component, Input } from '@angular/core' +import { PackageDataEntry } from 'src/app/services/patch-db/data-model' import { - PackageDataEntry, - PackageState, -} from 'src/app/services/patch-db/data-model' + isInstalled, + isInstalling, + isUpdating, + isRemoving, + isRestoring, + getManifest, +} from 'src/app/util/get-package-data' @Component({ selector: 'marketplace-status', @@ -14,9 +19,13 @@ export class MarketplaceStatusComponent { @Input() localPkg?: PackageDataEntry - PackageState = PackageState + isInstalled = isInstalled + isInstalling = isInstalling + isUpdating = isUpdating + isRemoving = isRemoving + isRestoring = isRestoring get localVersion(): string { - return this.localPkg?.manifest.version || '' + return this.localPkg ? getManifest(this.localPkg).version : '' } } diff --git a/web/projects/ui/src/app/pages/marketplace-routes/marketplace-status/marketplace-status.module.ts b/web/projects/ui/src/app/pages/marketplace-routes/marketplace-status/marketplace-status.module.ts index 8a7151f40e..c31f4fd16d 100644 --- a/web/projects/ui/src/app/pages/marketplace-routes/marketplace-status/marketplace-status.module.ts +++ b/web/projects/ui/src/app/pages/marketplace-routes/marketplace-status/marketplace-status.module.ts @@ -2,8 +2,7 @@ import { CommonModule } from '@angular/common' import { NgModule } from '@angular/core' import { IonicModule } from '@ionic/angular' import { EmverPipesModule } from '@start9labs/shared' - -import { InstallProgressPipeModule } from '../../../pipes/install-progress/install-progress.module' +import { InstallingProgressPipeModule } from '../../../pipes/install-progress/install-progress.module' import { MarketplaceStatusComponent } from './marketplace-status.component' @NgModule({ @@ -11,7 +10,7 @@ import { MarketplaceStatusComponent } from './marketplace-status.component' CommonModule, IonicModule, EmverPipesModule, - InstallProgressPipeModule, + InstallingProgressPipeModule, ], declarations: [MarketplaceStatusComponent], exports: [MarketplaceStatusComponent], diff --git a/web/projects/ui/src/app/pages/notifications/notifications.module.ts b/web/projects/ui/src/app/pages/notifications/notifications.module.ts index 5ff2b4572f..4ec29702cf 100644 --- a/web/projects/ui/src/app/pages/notifications/notifications.module.ts +++ b/web/projects/ui/src/app/pages/notifications/notifications.module.ts @@ -6,6 +6,7 @@ import { NotificationsPage } from './notifications.page' import { BadgeMenuComponentModule } from 'src/app/components/badge-menu-button/badge-menu.component.module' import { SharedPipesModule } from '@start9labs/shared' import { BackupReportPageModule } from 'src/app/modals/backup-report/backup-report.module' +import { UiPipeModule } from 'src/app/pipes/ui/ui.module' const routes: Routes = [ { @@ -22,6 +23,7 @@ const routes: Routes = [ BadgeMenuComponentModule, SharedPipesModule, BackupReportPageModule, + UiPipeModule, ], declarations: [NotificationsPage], }) diff --git a/web/projects/ui/src/app/pages/notifications/notifications.page.html b/web/projects/ui/src/app/pages/notifications/notifications.page.html index f9e697107c..fb3ba08838 100644 --- a/web/projects/ui/src/app/pages/notifications/notifications.page.html +++ b/web/projects/ui/src/app/pages/notifications/notifications.page.html @@ -86,9 +86,9 @@

- - - {{ $any(packageData[pkgId])?.manifest.title || pkgId }} - + + {{ packageData[pkgId] ? (packageData[pkgId] | + toManifest).title : pkgId }} - {{ not.title }} @@ -104,7 +104,7 @@

{{ truncate(not.message) }}

View Full Message

-

{{ not['created-at'] | date: 'medium' }}

+

{{ not.createdAt | date: 'medium' }}

{{ truncate(not.message) }}

View Report View Service diff --git a/web/projects/ui/src/app/pages/notifications/notifications.page.ts b/web/projects/ui/src/app/pages/notifications/notifications.page.ts index 71bf503707..933c50e673 100644 --- a/web/projects/ui/src/app/pages/notifications/notifications.page.ts +++ b/web/projects/ui/src/app/pages/notifications/notifications.page.ts @@ -29,7 +29,7 @@ export class NotificationsPage { needInfinite = false fromToast = !!this.route.snapshot.queryParamMap.get('toast') readonly perPage = 40 - readonly packageData$ = this.patch.watch$('package-data').pipe(first()) + readonly packageData$ = this.patch.watch$('packageData').pipe(first()) constructor( private readonly embassyApi: ApiService, @@ -116,7 +116,7 @@ export class NotificationsPage { component: BackupReportPage, componentProps: { report: notification.data, - timestamp: notification['created-at'], + timestamp: notification.createdAt, }, }) await modal.present() diff --git a/web/projects/ui/src/app/pages/server-routes/experimental-features/experimental-features.module.ts b/web/projects/ui/src/app/pages/server-routes/experimental-features/experimental-features.module.ts deleted file mode 100644 index 86e374b170..0000000000 --- a/web/projects/ui/src/app/pages/server-routes/experimental-features/experimental-features.module.ts +++ /dev/null @@ -1,24 +0,0 @@ -import { NgModule } from '@angular/core' -import { CommonModule } from '@angular/common' -import { Routes, RouterModule } from '@angular/router' -import { IonicModule } from '@ionic/angular' -import { ExperimentalFeaturesPage } from './experimental-features.page' -import { EmverPipesModule } from '@start9labs/shared' - -const routes: Routes = [ - { - path: '', - component: ExperimentalFeaturesPage, - }, -] - -@NgModule({ - imports: [ - CommonModule, - IonicModule, - RouterModule.forChild(routes), - EmverPipesModule, - ], - declarations: [ExperimentalFeaturesPage], -}) -export class ExperimentalFeaturesPageModule {} diff --git a/web/projects/ui/src/app/pages/server-routes/experimental-features/experimental-features.page.html b/web/projects/ui/src/app/pages/server-routes/experimental-features/experimental-features.page.html deleted file mode 100644 index 0ca8c7d8e9..0000000000 --- a/web/projects/ui/src/app/pages/server-routes/experimental-features/experimental-features.page.html +++ /dev/null @@ -1,36 +0,0 @@ - - - - - - Experimental Features - - - - - - - - -

Reset Tor

-

- Resetting the Tor daemon on your server may resolve Tor connectivity - issues. -

-
-
- - - -

{{ server.zram ? 'Disable' : 'Enable' }} zram

-

- Zram creates compressed swap in memory, resulting in faster I/O for - low RAM devices -

-
-
-
-
diff --git a/web/projects/ui/src/app/pages/server-routes/experimental-features/experimental-features.page.scss b/web/projects/ui/src/app/pages/server-routes/experimental-features/experimental-features.page.scss deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/web/projects/ui/src/app/pages/server-routes/experimental-features/experimental-features.page.ts b/web/projects/ui/src/app/pages/server-routes/experimental-features/experimental-features.page.ts deleted file mode 100644 index bf445250a6..0000000000 --- a/web/projects/ui/src/app/pages/server-routes/experimental-features/experimental-features.page.ts +++ /dev/null @@ -1,151 +0,0 @@ -import { ChangeDetectionStrategy, Component } from '@angular/core' -import { - AlertController, - LoadingController, - ToastController, -} from '@ionic/angular' -import { PatchDB } from 'patch-db-client' -import { ApiService } from 'src/app/services/api/embassy-api.service' -import { ConfigService } from 'src/app/services/config.service' -import { DataModel } from 'src/app/services/patch-db/data-model' -import { ErrorToastService } from '@start9labs/shared' - -@Component({ - selector: 'experimental-features', - templateUrl: './experimental-features.page.html', - styleUrls: ['./experimental-features.page.scss'], - changeDetection: ChangeDetectionStrategy.OnPush, -}) -export class ExperimentalFeaturesPage { - readonly server$ = this.patch.watch$('server-info') - - constructor( - private readonly toastCtrl: ToastController, - private readonly patch: PatchDB, - private readonly config: ConfigService, - private readonly alertCtrl: AlertController, - private readonly loadingCtrl: LoadingController, - private readonly api: ApiService, - private readonly errToast: ErrorToastService, - ) {} - - async presentAlertResetTor() { - const isTor = this.config.isTor() - const shared = - 'Optionally wipe state to forcibly acquire new guard nodes. It is recommended to try without wiping state first.' - const alert = await this.alertCtrl.create({ - header: isTor ? 'Warning' : 'Confirm', - message: isTor - ? `You are currently connected over Tor. If you reset the Tor daemon, you will loose connectivity until it comes back online.

${shared}` - : `Reset Tor?

${shared}`, - inputs: [ - { - label: 'Wipe state', - type: 'checkbox', - value: 'wipe', - }, - ], - buttons: [ - { - text: 'Cancel', - role: 'cancel', - }, - { - text: 'Reset', - handler: (value: string[]) => { - this.resetTor(value.some(v => v === 'wipe')) - }, - cssClass: 'enter-click', - }, - ], - cssClass: isTor ? 'alert-warning-message' : '', - }) - await alert.present() - } - - async presentAlertZram(enabled: boolean) { - const alert = await this.alertCtrl.create({ - header: 'Confirm', - message: enabled - ? 'Are you sure you want to disable zram? It provides significant performance benefits on low RAM devices.' - : 'Enable zram? It will only make a difference on lower RAM devices.', - buttons: [ - { - text: 'Cancel', - role: 'cancel', - }, - { - text: enabled ? 'Disable' : 'Enable', - handler: () => { - this.toggleZram(enabled) - }, - cssClass: 'enter-click', - }, - ], - }) - await alert.present() - } - - private async resetTor(wipeState: boolean) { - const loader = await this.loadingCtrl.create({ - message: 'Resetting Tor...', - }) - await loader.present() - - try { - await this.api.resetTor({ - 'wipe-state': wipeState, - reason: 'User triggered', - }) - const toast = await this.toastCtrl.create({ - header: 'Tor reset in progress', - position: 'bottom', - duration: 4000, - buttons: [ - { - side: 'start', - icon: 'close', - handler: () => { - return true - }, - }, - ], - }) - await toast.present() - } catch (e: any) { - this.errToast.present(e) - } finally { - loader.dismiss() - } - } - - private async toggleZram(enabled: boolean) { - const loader = await this.loadingCtrl.create({ - message: enabled ? 'Disabling zram...' : 'Enabling zram...', - }) - await loader.present() - - try { - await this.api.toggleZram({ enable: !enabled }) - const toast = await this.toastCtrl.create({ - header: `Zram ${enabled ? 'disabled' : 'enabled'}`, - position: 'bottom', - duration: 4000, - buttons: [ - { - side: 'start', - icon: 'close', - handler: () => { - return true - }, - }, - ], - }) - await toast.present() - } catch (e: any) { - this.errToast.present(e) - } finally { - loader.dismiss() - } - } -} diff --git a/web/projects/ui/src/app/pages/server-routes/restore/restore.component.ts b/web/projects/ui/src/app/pages/server-routes/restore/restore.component.ts index 24adff639f..88e9cb4a88 100644 --- a/web/projects/ui/src/app/pages/server-routes/restore/restore.component.ts +++ b/web/projects/ui/src/app/pages/server-routes/restore/restore.component.ts @@ -43,7 +43,7 @@ export class RestorePage { useMask: true, buttonText: 'Next', submitFn: async (password: string) => { - const passwordHash = target.entry['embassy-os']?.['password-hash'] || '' + const passwordHash = target.entry.startOs?.passwordHash || '' argon2.verify(passwordHash, password) await this.restoreFromBackup(target, password) }, @@ -71,7 +71,7 @@ export class RestorePage { try { const backupInfo = await this.embassyApi.getBackupInfo({ - 'target-id': target.id, + targetId: target.id, password, }) this.presentModalSelect(target.id, backupInfo, password, oldPassword) diff --git a/web/projects/ui/src/app/pages/server-routes/server-backup/backing-up/backing-up.component.html b/web/projects/ui/src/app/pages/server-routes/server-backup/backing-up/backing-up.component.html index 1bbd5aa865..d70291de96 100644 --- a/web/projects/ui/src/app/pages/server-routes/server-backup/backing-up/backing-up.component.html +++ b/web/projects/ui/src/app/pages/server-routes/server-backup/backing-up/backing-up.component.html @@ -15,9 +15,9 @@ - + - {{ pkg.value.manifest.title }} + {{ (pkg.value | toManifest).title }} diff --git a/web/projects/ui/src/app/pages/server-routes/server-backup/backing-up/backing-up.component.ts b/web/projects/ui/src/app/pages/server-routes/server-backup/backing-up/backing-up.component.ts index 85ffbb2e1e..4a5d8951d6 100644 --- a/web/projects/ui/src/app/pages/server-routes/server-backup/backing-up/backing-up.component.ts +++ b/web/projects/ui/src/app/pages/server-routes/server-backup/backing-up/backing-up.component.ts @@ -6,11 +6,9 @@ import { } from '@angular/core' import { PatchDB } from 'patch-db-client' import { take } from 'rxjs/operators' -import { - DataModel, - PackageMainStatus, -} from 'src/app/services/patch-db/data-model' +import { DataModel } from 'src/app/services/patch-db/data-model' import { Observable } from 'rxjs' +import { MainStatus } from '../../../../../../../../../core/startos/bindings/MainStatus' @Component({ selector: 'backing-up', @@ -18,15 +16,13 @@ import { Observable } from 'rxjs' changeDetection: ChangeDetectionStrategy.OnPush, }) export class BackingUpComponent { - readonly pkgs$ = this.patch.watch$('package-data').pipe(take(1)) + readonly pkgs$ = this.patch.watch$('packageData').pipe(take(1)) readonly backupProgress$ = this.patch.watch$( - 'server-info', - 'status-info', - 'backup-progress', + 'serverInfo', + 'statusInfo', + 'backupProgress', ) - PackageMainStatus = PackageMainStatus - constructor(private readonly patch: PatchDB) {} } @@ -34,15 +30,8 @@ export class BackingUpComponent { name: 'pkgMainStatus', }) export class PkgMainStatusPipe implements PipeTransform { - transform(pkgId: string): Observable { - return this.patch.watch$( - 'package-data', - pkgId, - 'installed', - 'status', - 'main', - 'status', - ) + transform(pkgId: string): Observable { + return this.patch.watch$('packageData', pkgId, 'status', 'main', 'status') } constructor(private readonly patch: PatchDB) {} diff --git a/web/projects/ui/src/app/pages/server-routes/server-backup/server-backup.module.ts b/web/projects/ui/src/app/pages/server-routes/server-backup/server-backup.module.ts index 6a17829853..d5b32cd6e5 100644 --- a/web/projects/ui/src/app/pages/server-routes/server-backup/server-backup.module.ts +++ b/web/projects/ui/src/app/pages/server-routes/server-backup/server-backup.module.ts @@ -8,6 +8,7 @@ import { BackupDrivesComponentModule } from 'src/app/components/backup-drives/ba import { SharedPipesModule } from '@start9labs/shared' import { BackupSelectPageModule } from 'src/app/modals/backup-select/backup-select.module' import { PkgMainStatusPipe } from './backing-up/backing-up.component' +import { UiPipeModule } from 'src/app/pipes/ui/ui.module' const routes: Routes = [ { @@ -24,6 +25,7 @@ const routes: Routes = [ SharedPipesModule, BackupDrivesComponentModule, BackupSelectPageModule, + UiPipeModule, ], declarations: [ServerBackupPage, BackingUpComponent, PkgMainStatusPipe], }) diff --git a/web/projects/ui/src/app/pages/server-routes/server-backup/server-backup.page.ts b/web/projects/ui/src/app/pages/server-routes/server-backup/server-backup.page.ts index 2d4d3901ef..0467df1027 100644 --- a/web/projects/ui/src/app/pages/server-routes/server-backup/server-backup.page.ts +++ b/web/projects/ui/src/app/pages/server-routes/server-backup/server-backup.page.ts @@ -84,9 +84,7 @@ export class ServerBackupPage { buttonText: 'Create Backup', submitFn: async (password: string) => { // confirm password matches current master password - const { 'password-hash': passwordHash } = await getServerInfo( - this.patch, - ) + const { passwordHash } = await getServerInfo(this.patch) argon2.verify(passwordHash, password) // first time backup @@ -95,8 +93,7 @@ export class ServerBackupPage { // existing backup } else { try { - const passwordHash = - target.entry['embassy-os']?.['password-hash'] || '' + const passwordHash = target.entry.startOs?.passwordHash || '' argon2.verify(passwordHash, password) } catch { @@ -133,7 +130,7 @@ export class ServerBackupPage { useMask: true, buttonText: 'Create Backup', submitFn: async (oldPassword: string) => { - const passwordHash = target.entry['embassy-os']?.['password-hash'] || '' + const passwordHash = target.entry.startOs?.passwordHash || '' argon2.verify(passwordHash, oldPassword) await this.createBackup(target, password, oldPassword) @@ -161,9 +158,9 @@ export class ServerBackupPage { try { await this.embassyApi.createBackup({ - 'target-id': target.id, - 'package-ids': this.serviceIds, - 'old-password': oldPassword || null, + targetId: target.id, + packageIds: this.serviceIds, + oldPassword: oldPassword || null, password, }) } finally { diff --git a/web/projects/ui/src/app/pages/server-routes/server-metrics/server-metrics.page.html b/web/projects/ui/src/app/pages/server-routes/server-metrics/server-metrics.page.html index 0926334aa3..70b977ecc7 100644 --- a/web/projects/ui/src/app/pages/server-routes/server-metrics/server-metrics.page.html +++ b/web/projects/ui/src/app/pages/server-routes/server-metrics/server-metrics.page.html @@ -74,7 +74,7 @@

Memory Percentage Used - {{ memory['percentage-used'].value }} % + {{ memory.percentageUsed }} % Total @@ -94,15 +94,15 @@

zram Used - {{ memory['zram-used'].value }} MiB + {{ memory.zramUsed.value }} MiB zram Total - {{ memory['zram-total'].value }} MiB + {{ memory.zramTotal }} MiB zram Available - {{ memory['zram-available'].value }} MiB + {{ memory.available.value }} MiB @@ -110,18 +110,18 @@

CPU Percentage Used - {{ cpu['percentage-used'].value }} % + {{ cpu.percentageUsed.value }} % User Space - {{ cpu['user-space'].value }} % + {{ cpu.userSpace.value }} % Kernel Space - {{ cpu['kernel-space'].value }} % + {{ cpu.kernelSpace.value }} % @@ -138,7 +138,7 @@

Disk Percentage Used - {{ disk['percentage-used'].value }} % + {{ disk.percentageUsed.value }} % Capacity diff --git a/web/projects/ui/src/app/pages/server-routes/server-routing.module.ts b/web/projects/ui/src/app/pages/server-routes/server-routing.module.ts index 5c728e6684..e7eb43aad6 100644 --- a/web/projects/ui/src/app/pages/server-routes/server-routing.module.ts +++ b/web/projects/ui/src/app/pages/server-routes/server-routing.module.ts @@ -80,13 +80,6 @@ const routes: Routes = [ loadChildren: () => import('./wifi/wifi.module').then(m => m.WifiPageModule), }, - { - path: 'experimental-features', - loadChildren: () => - import('./experimental-features/experimental-features.module').then( - m => m.ExperimentalFeaturesPageModule, - ), - }, ] @NgModule({ diff --git a/web/projects/ui/src/app/pages/server-routes/server-show/server-show.page.html b/web/projects/ui/src/app/pages/server-routes/server-show/server-show.page.html index 760a013e61..02cba9052e 100644 --- a/web/projects/ui/src/app/pages/server-routes/server-show/server-show.page.html +++ b/web/projects/ui/src/app/pages/server-routes/server-show/server-show.page.html @@ -16,7 +16,7 @@ @@ -40,27 +40,6 @@

Clock sync failure

- - - -

Http detected

-

- Tor is faster over https. - - Download and trust your server's Root CA - - , then switch to https. -

-
- - Open Https - - -
-
@@ -83,15 +62,15 @@

{{ button.title }}

- + - Last Backup: {{ server['last-backup'] ? (server['last-backup'] | - date: 'medium') : 'never' }} + Last Backup: {{ server.lastBackup ? (server.lastBackup | date: + 'medium') : 'never' }} - + {{ button.title }}

diff --git a/web/projects/ui/src/app/pages/server-routes/server-show/server-show.page.ts b/web/projects/ui/src/app/pages/server-routes/server-show/server-show.page.ts index 2b64fcf1c6..b479f17502 100644 --- a/web/projects/ui/src/app/pages/server-routes/server-show/server-show.page.ts +++ b/web/projects/ui/src/app/pages/server-routes/server-show/server-show.page.ts @@ -37,12 +37,10 @@ export class ServerShowPage { manageClicks = 0 powerClicks = 0 - readonly server$ = this.patch.watch$('server-info') + readonly server$ = this.patch.watch$('serverInfo') readonly showUpdate$ = this.eosService.showUpdate$ readonly showDiskRepair$ = this.ClientStorageService.showDiskRepair$ - readonly isTorHttp = this.config.isTorHttp() - constructor( private readonly alertCtrl: AlertController, private readonly modalCtrl: ModalController, @@ -143,7 +141,7 @@ export class ServerShowPage { } // confirm current password is correct - const { 'password-hash': passwordHash } = await getServerInfo(this.patch) + const { passwordHash } = await getServerInfo(this.patch) try { argon2.verify(passwordHash, value.currPass) } catch (e) { @@ -162,8 +160,8 @@ export class ServerShowPage { try { await this.embassyApi.resetPassword({ - 'old-password': value.currPass, - 'new-password': value.newPass, + oldPassword: value.currPass, + newPassword: value.newPass, }) const toast = await this.toastCtrl.create({ header: 'Password changed!', @@ -181,6 +179,73 @@ export class ServerShowPage { } } + async presentAlertResetTor() { + const isTor = this.config.isTor() + const shared = + 'Optionally wipe state to forcibly acquire new guard nodes. It is recommended to try without wiping state first.' + const alert = await this.alertCtrl.create({ + header: isTor ? 'Warning' : 'Confirm', + message: isTor + ? `You are currently connected over Tor. If you reset the Tor daemon, you will loose connectivity until it comes back online.

${shared}` + : `Reset Tor?

${shared}`, + inputs: [ + { + label: 'Wipe state', + type: 'checkbox', + value: 'wipe', + }, + ], + buttons: [ + { + text: 'Cancel', + role: 'cancel', + }, + { + text: 'Reset', + handler: (value: string[]) => { + this.resetTor(value.some(v => v === 'wipe')) + }, + cssClass: 'enter-click', + }, + ], + cssClass: isTor ? 'alert-warning-message' : '', + }) + await alert.present() + } + + private async resetTor(wipeState: boolean) { + const loader = await this.loadingCtrl.create({ + message: 'Resetting Tor...', + }) + await loader.present() + + try { + await this.embassyApi.resetTor({ + wipeState: wipeState, + reason: 'User triggered', + }) + const toast = await this.toastCtrl.create({ + header: 'Tor reset in progress', + position: 'bottom', + duration: 4000, + buttons: [ + { + side: 'start', + icon: 'close', + handler: () => { + return true + }, + }, + ], + }) + await toast.present() + } catch (e: any) { + this.errToast.present(e) + } finally { + loader.dismiss() + } + } + async updateEos(): Promise { const modal = await this.modalCtrl.create({ component: OSUpdatePage, @@ -305,11 +370,6 @@ export class ServerShowPage { await alert.present() } - async launchHttps() { - const { 'tor-address': torAddress } = await getServerInfo(this.patch) - this.windowRef.open(torAddress, '_self') - } - addClick(title: string) { switch (title) { case 'Manage': @@ -520,14 +580,11 @@ export class ServerShowPage { disabled$: of(false), }, { - title: 'Experimental Features', - description: 'Try out new and potentially unstable new features', - icon: 'flask-outline', - action: () => - this.navCtrl.navigateForward(['experimental-features'], { - relativeTo: this.route, - }), - detail: true, + title: 'Reset Tor', + description: 'May help resolve Tor connectivity issues.', + icon: 'reload-circle-outline', + action: () => this.presentAlertResetTor(), + detail: false, disabled$: of(false), }, ], diff --git a/web/projects/ui/src/app/pages/server-routes/server-specs/server-specs.page.html b/web/projects/ui/src/app/pages/server-routes/server-specs/server-specs.page.html index 87dd8ef242..f9df6eb37d 100644 --- a/web/projects/ui/src/app/pages/server-routes/server-specs/server-specs.page.html +++ b/web/projects/ui/src/app/pages/server-routes/server-specs/server-specs.page.html @@ -30,17 +30,17 @@

Git Hash

Tor

-

{{ server['tor-address'] }}

+

{{ server.torAddress }}

- + - + Tor

LAN

-

{{ server['lan-address'] }}

+

{{ server.lanAddress }}

- +
- +

{{ iface.key }} (IPv4)

@@ -92,13 +92,9 @@

Pubkey

CA fingerprint

-

{{ server['ca-fingerprint'] }}

+

{{ server.caFingerprint }}

- +
diff --git a/web/projects/ui/src/app/pages/server-routes/server-specs/server-specs.page.ts b/web/projects/ui/src/app/pages/server-routes/server-specs/server-specs.page.ts index c80003bb1a..0055ff6c1b 100644 --- a/web/projects/ui/src/app/pages/server-routes/server-specs/server-specs.page.ts +++ b/web/projects/ui/src/app/pages/server-routes/server-specs/server-specs.page.ts @@ -13,7 +13,7 @@ import { DataModel } from 'src/app/services/patch-db/data-model' changeDetection: ChangeDetectionStrategy.OnPush, }) export class ServerSpecsPage { - readonly server$ = this.patch.watch$('server-info') + readonly server$ = this.patch.watch$('serverInfo') constructor( private readonly toastCtrl: ToastController, diff --git a/web/projects/ui/src/app/pages/server-routes/sessions/sessions.page.html b/web/projects/ui/src/app/pages/server-routes/sessions/sessions.page.html index 4e291b5fc5..3e349b49b3 100644 --- a/web/projects/ui/src/app/pages/server-routes/sessions/sessions.page.html +++ b/web/projects/ui/src/app/pages/server-routes/sessions/sessions.page.html @@ -52,10 +52,8 @@ >

{{ getPlatformName(currentSession.metadata.platforms) }}

-

- Last Active: {{ currentSession['last-active'] | date : 'medium' }} -

-

{{ currentSession['user-agent'] }}

+

Last Active: {{ currentSession.lastActive| date : 'medium' }}

+

{{ currentSession.userAgent }}

@@ -80,8 +78,8 @@

>

{{ getPlatformName(session.metadata.platforms) }}

-

Last Active: {{ session['last-active'] | date : 'medium' }}

-

{{ session['user-agent'] }}

+

Last Active: {{ session.lastActive | date : 'medium' }}

+

{{ session.userAgent }}

{ return ( - new Date(b['last-active']).valueOf() - - new Date(a['last-active']).valueOf() + new Date(b.lastActive).valueOf() - new Date(a.lastActive).valueOf() ) }) } catch (e: any) { diff --git a/web/projects/ui/src/app/pages/server-routes/sideload/sideload.page.ts b/web/projects/ui/src/app/pages/server-routes/sideload/sideload.page.ts index baaf5a9ecc..242b941720 100644 --- a/web/projects/ui/src/app/pages/server-routes/sideload/sideload.page.ts +++ b/web/projects/ui/src/app/pages/server-routes/sideload/sideload.page.ts @@ -1,10 +1,10 @@ import { Component } from '@angular/core' import { isPlatform, LoadingController, NavController } from '@ionic/angular' import { ApiService } from 'src/app/services/api/embassy-api.service' -import { Manifest } from 'src/app/services/patch-db/data-model' import { ConfigService } from 'src/app/services/config.service' import cbor from 'cbor' import { ErrorToastService } from '@start9labs/shared' +import { Manifest } from '../../../../../../../../core/startos/bindings/Manifest' interface Positions { [key: string]: [bigint, bigint] // [position, length] @@ -133,9 +133,7 @@ export class SideloadPage { } async getIcon(positions: Positions, file: Blob) { - const contentType = `image/${this.toUpload.manifest?.assets.icon - .split('.') - .pop()}` + const contentType = '' // @TODO const data = file.slice( Number(positions['icon'][0]), Number(positions['icon'][0]) + Number(positions['icon'][1]), diff --git a/web/projects/ui/src/app/pages/server-routes/ssh-keys/ssh-keys.page.html b/web/projects/ui/src/app/pages/server-routes/ssh-keys/ssh-keys.page.html index 4ac7d5439c..61737639b5 100644 --- a/web/projects/ui/src/app/pages/server-routes/ssh-keys/ssh-keys.page.html +++ b/web/projects/ui/src/app/pages/server-routes/ssh-keys/ssh-keys.page.html @@ -66,7 +66,7 @@

{{ ssh.hostname }}

-

{{ ssh['created-at'] | date: 'medium' }}

+

{{ ssh.createdAt| date: 'medium' }}

{{ ssh.alg }} {{ ssh.fingerprint }}

Available Networks - + - +

{{ pkg.manifest.title }}

- {{ local.installed?.manifest?.version || '' | - displayEmver }} + {{ local.stateInfo.manifest.version | displayEmver }}   @@ -56,15 +55,18 @@

- + + + > @@ -86,9 +88,7 @@

What's new
-

+

+ localPkgs: Record> errors: string[] } @@ -36,7 +42,14 @@ export class UpdatesPage { readonly data$: Observable = combineLatest({ hosts: this.marketplaceService.getKnownHosts$(true), marketplace: this.marketplaceService.getMarketplace$(), - localPkgs: this.patch.watch$('package-data'), + localPkgs: this.patch.watch$('packageData').pipe( + map(pkgs => + Object.entries(pkgs).reduce((acc, [id, val]) => { + if (isInstalled(val) || isUpdating(val)) return { ...acc, [id]: val } + return acc + }, {} as Record>), + ), + ), errors: this.marketplaceService.getRequestErrors$(), }) @@ -57,12 +70,7 @@ export class UpdatesPage { }) } - async tryUpdate( - manifest: MarketplaceManifest, - url: string, - local: PackageDataEntry, - e: Event, - ): Promise { + async tryUpdate(manifest: Manifest, url: string, e: Event): Promise { e.stopPropagation() const { id, version } = manifest @@ -70,14 +78,15 @@ export class UpdatesPage { delete this.marketplaceService.updateErrors[id] this.marketplaceService.updateQueue[id] = true - if (hasCurrentDeps(local)) { + // manifest.id OK because same as local id for update + if (hasCurrentDeps(manifest.id, await getAllPackages(this.patch))) { this.dryInstall(manifest, url) } else { this.install(id, version, url) } } - private async dryInstall(manifest: MarketplaceManifest, url: string) { + private async dryInstall(manifest: Manifest, url: string) { const { id, version, title } = manifest const breakages = dryUpdate( @@ -154,14 +163,17 @@ export class FilterUpdatesPipe implements PipeTransform { transform( pkgs: MarketplacePkg[], - local: Record, + local: Record>, ): MarketplacePkg[] { - return pkgs.filter( - ({ manifest }) => + return pkgs.filter(({ manifest }) => { + const localPkg = local[manifest.id] + return ( + localPkg && this.emver.compare( manifest.version, - local[manifest.id]?.installed?.manifest.version || '', - ) === 1, - ) + localPkg.stateInfo.manifest.version, + ) === 1 + ) + }) } } diff --git a/web/projects/ui/src/app/pages/widgets/built-in/health/health.component.ts b/web/projects/ui/src/app/pages/widgets/built-in/health/health.component.ts index e4f456c014..bc496b2196 100644 --- a/web/projects/ui/src/app/pages/widgets/built-in/health/health.component.ts +++ b/web/projects/ui/src/app/pages/widgets/built-in/health/health.component.ts @@ -9,6 +9,7 @@ import { PrimaryStatus } from 'src/app/services/pkg-status-rendering.service' import { getPackageInfo, PkgInfo } from '../../../../util/get-package-info' import { combineLatest } from 'rxjs' import { DepErrorService } from 'src/app/services/dep-error.service' +import { getManifest } from 'src/app/util/get-package-data' @Component({ selector: 'widget-health', @@ -26,12 +27,12 @@ export class HealthComponent { ] as const readonly data$ = combineLatest([ - inject(PatchDB).watch$('package-data'), + inject(PatchDB).watch$('packageData'), inject(DepErrorService).depErrors$, ]).pipe( map(([data, depErrors]) => { const pkgs = Object.values(data).map(pkg => - getPackageInfo(pkg, depErrors[pkg.manifest.id]), + getPackageInfo(pkg, depErrors[getManifest(pkg).id]), ) const result = this.labels.reduce>( (acc, label) => ({ diff --git a/web/projects/ui/src/app/pipes/install-progress/install-progress.module.ts b/web/projects/ui/src/app/pipes/install-progress/install-progress.module.ts index 37bbd0744a..a0997f29ce 100644 --- a/web/projects/ui/src/app/pipes/install-progress/install-progress.module.ts +++ b/web/projects/ui/src/app/pipes/install-progress/install-progress.module.ts @@ -1,11 +1,11 @@ import { NgModule } from '@angular/core' import { - InstallProgressDisplayPipe, - InstallProgressPipe, + InstallingProgressDisplayPipe, + InstallingProgressPipe, } from './install-progress.pipe' @NgModule({ - declarations: [InstallProgressPipe, InstallProgressDisplayPipe], - exports: [InstallProgressPipe, InstallProgressDisplayPipe], + declarations: [InstallingProgressPipe, InstallingProgressDisplayPipe], + exports: [InstallingProgressPipe, InstallingProgressDisplayPipe], }) -export class InstallProgressPipeModule {} +export class InstallingProgressPipeModule {} diff --git a/web/projects/ui/src/app/pipes/install-progress/install-progress.pipe.ts b/web/projects/ui/src/app/pipes/install-progress/install-progress.pipe.ts index 459dc722b2..7d6a646d03 100644 --- a/web/projects/ui/src/app/pipes/install-progress/install-progress.pipe.ts +++ b/web/projects/ui/src/app/pipes/install-progress/install-progress.pipe.ts @@ -1,24 +1,26 @@ import { Pipe, PipeTransform } from '@angular/core' -import { InstallProgress } from 'src/app/services/patch-db/data-model' -import { packageLoadingProgress } from 'src/app/util/package-loading-progress' +import { Progress } from '../../../../../../../core/startos/bindings/Progress' @Pipe({ - name: 'installProgress', + name: 'installingProgressString', }) -export class InstallProgressPipe implements PipeTransform { - transform(installProgress?: InstallProgress): number { - return packageLoadingProgress(installProgress)?.totalProgress || 0 +export class InstallingProgressDisplayPipe implements PipeTransform { + transform(progress: Progress): string { + if (progress === true) return 'finalizing' + if (progress === false || !progress.total) return 'unknown %' + const percentage = Math.round((100 * progress.done) / progress.total) + + return percentage < 99 ? String(percentage) + '%' : 'finalizing' } } @Pipe({ - name: 'installProgressDisplay', + name: 'installingProgress', }) -export class InstallProgressDisplayPipe implements PipeTransform { - transform(installProgress?: InstallProgress): string { - const totalProgress = - packageLoadingProgress(installProgress)?.totalProgress || 0 - - return totalProgress < 99 ? totalProgress + '%' : 'finalizing' +export class InstallingProgressPipe implements PipeTransform { + transform(progress: Progress): number | null { + if (progress === true) return 1 + if (progress === false || !progress.total) return null + return Number((progress.done / progress.total).toFixed(2)) } } diff --git a/web/projects/ui/src/app/pipes/launchable/launchable.pipe.ts b/web/projects/ui/src/app/pipes/launchable/launchable.pipe.ts index be1d5218dc..d99e69c87a 100644 --- a/web/projects/ui/src/app/pipes/launchable/launchable.pipe.ts +++ b/web/projects/ui/src/app/pipes/launchable/launchable.pipe.ts @@ -1,10 +1,7 @@ import { Pipe, PipeTransform } from '@angular/core' -import { - InterfaceDef, - PackageMainStatus, - PackageState, -} from 'src/app/services/patch-db/data-model' import { ConfigService } from '../../services/config.service' +import { PackageState } from '../../../../../../../core/startos/bindings/PackageState' +import { MainStatus } from '../../../../../../../core/startos/bindings/MainStatus' @Pipe({ name: 'isLaunchable', @@ -13,10 +10,9 @@ export class LaunchablePipe implements PipeTransform { constructor(private configService: ConfigService) {} transform( - state: PackageState, - status: PackageMainStatus, - interfaces: Record, + state: PackageState['state'], + status: MainStatus['status'], ): boolean { - return this.configService.isLaunchable(state, status, interfaces) + return this.configService.isLaunchable(state, status) } } diff --git a/web/projects/ui/src/app/pipes/ui/ui.module.ts b/web/projects/ui/src/app/pipes/ui/ui.module.ts index 9637306deb..a031c20591 100644 --- a/web/projects/ui/src/app/pipes/ui/ui.module.ts +++ b/web/projects/ui/src/app/pipes/ui/ui.module.ts @@ -1,8 +1,8 @@ import { NgModule } from '@angular/core' -import { UiPipe } from './ui.pipe' +import { ToManifestPipe, UiPipe } from './ui.pipe' @NgModule({ - declarations: [UiPipe], - exports: [UiPipe], + declarations: [UiPipe, ToManifestPipe], + exports: [UiPipe, ToManifestPipe], }) export class UiPipeModule {} diff --git a/web/projects/ui/src/app/pipes/ui/ui.pipe.ts b/web/projects/ui/src/app/pipes/ui/ui.pipe.ts index 9d46bfd86b..49ad380ad8 100644 --- a/web/projects/ui/src/app/pipes/ui/ui.pipe.ts +++ b/web/projects/ui/src/app/pipes/ui/ui.pipe.ts @@ -1,12 +1,23 @@ import { Pipe, PipeTransform } from '@angular/core' -import { InterfaceDef } from '../../services/patch-db/data-model' +import { PackageDataEntry } from '../../services/patch-db/data-model' import { hasUi } from '../../services/config.service' +import { getManifest } from 'src/app/util/get-package-data' +import { Manifest } from '../../../../../../../core/startos/bindings/Manifest' @Pipe({ name: 'hasUi', }) export class UiPipe implements PipeTransform { - transform(interfaces: Record): boolean { - return hasUi(interfaces) + transform(interfaces: PackageDataEntry['serviceInterfaces']): boolean { + return interfaces ? hasUi(interfaces) : false + } +} + +@Pipe({ + name: 'toManifest', +}) +export class ToManifestPipe implements PipeTransform { + transform(pkg: PackageDataEntry): Manifest { + return getManifest(pkg) } } diff --git a/web/projects/ui/src/app/pkg-config/config-types.ts b/web/projects/ui/src/app/pkg-config/config-types.ts index 08f0b9d261..f73dbc0a45 100644 --- a/web/projects/ui/src/app/pkg-config/config-types.ts +++ b/web/projects/ui/src/app/pkg-config/config-types.ts @@ -24,8 +24,6 @@ export type ValueSpecOf = T extends 'string' ? ValueSpecList : T extends 'object' ? ValueSpecObject - : T extends 'pointer' - ? ValueSpecPointer : T extends 'union' ? ValueSpecUnion : never @@ -60,16 +58,6 @@ export interface ValueSpecUnion { default: string } -export interface ValueSpecPointer extends WithStandalone { - type: 'pointer' - subtype: 'package' | 'system' - 'package-id': string - target: 'lan-address' | 'tor-address' | 'config' | 'tor-key' - interface: string // will only exist if target = tor-key || tor-address || lan-address - selector?: string // will only exist if target = config - multi?: boolean // will only exist if target = config -} - export interface ValueSpecObject extends WithStandalone { type: 'object' spec: ConfigSpec diff --git a/web/projects/ui/src/app/services/api/api.fixures.ts b/web/projects/ui/src/app/services/api/api.fixures.ts index 17460609a6..4ceaff1f94 100644 --- a/web/projects/ui/src/app/services/api/api.fixures.ts +++ b/web/projects/ui/src/app/services/api/api.fixures.ts @@ -1,29 +1,26 @@ import { - DockerIoFormat, - Manifest, + InstalledState, PackageDataEntry, - PackageMainStatus, - PackageState, - ServerStatusInfo, } from 'src/app/services/patch-db/data-model' import { Metric, NotificationLevel, RR, ServerNotifications } from './api.types' - import { BTC_ICON, LND_ICON, PROXY_ICON } from './api-icons' import { DependencyMetadata, MarketplacePkg } from '@start9labs/marketplace' import { Log } from '@start9labs/shared' +import { ServerStatus } from '../../../../../../../core/startos/bindings/ServerStatus' +import { Manifest } from '../../../../../../../core/startos/bindings/Manifest' export module Mock { - export const ServerUpdated: ServerStatusInfo = { - 'backup-progress': null, - 'update-progress': null, + export const ServerUpdated: ServerStatus = { + backupProgress: null, + updateProgress: null, updated: true, restarting: false, - 'shutting-down': false, + shuttingDown: false, } export const MarketplaceEos: RR.GetMarketplaceEosRes = { version: '0.3.5.1', headline: 'Our biggest release ever.', - 'release-notes': { + releaseNotes: { '0.3.5.1': 'Some **Markdown** release _notes_ for 0.3.5.1', '0.3.4.4': 'Some **Markdown** release _notes_ for 0.3.4.4', '0.3.4.3': 'Some **Markdown** release _notes_ for 0.3.4.3', @@ -49,27 +46,19 @@ export module Mock { id: 'bitcoind', title: 'Bitcoin Core', version: '0.21.0', - 'git-hash': 'abcdefgh', + gitHash: 'abcdefgh', description: { short: 'A Bitcoin full node by Bitcoin Core.', long: 'Bitcoin is a decentralized consensus protocol and settlement network.', }, replaces: ['banks', 'governments'], - 'release-notes': 'Taproot, Schnorr, and more.', - assets: { - icon: 'icon.png', - license: 'LICENSE.md', - instructions: 'INSTRUCTIONS.md', - docker_images: 'image.tar', - assets: './assets', - scripts: './scripts', - }, + releaseNotes: 'Taproot, Schnorr, and more.', license: 'MIT', - 'wrapper-repo': 'https://github.com/start9labs/bitcoind-wrapper', - 'upstream-repo': 'https://github.com/bitcoin/bitcoin', - 'support-site': 'https://bitcoin.org', - 'marketing-site': 'https://bitcoin.org', - 'donation-url': 'https://start9.com', + wrapperRepo: 'https://github.com/start9labs/bitcoind-wrapper', + upstreamRepo: 'https://github.com/bitcoin/bitcoin', + supportSite: 'https://bitcoin.org', + marketingSite: 'https://bitcoin.org', + donationUrl: 'https://start9.com', alerts: { install: 'Bitcoin can take over a week to sync.', uninstall: @@ -78,302 +67,36 @@ export module Mock { start: 'Starting Bitcoin is good for your health.', stop: null, }, - main: { - type: 'docker', - image: '', - system: true, - entrypoint: '', - args: [], - mounts: {}, - 'io-format': DockerIoFormat.Yaml, - inject: false, - 'shm-size': '', - 'sigterm-timeout': '1ms', - }, - 'health-checks': {}, - config: { - get: null, - set: null, - }, - volumes: {}, - 'min-os-version': '0.2.12', - interfaces: { - ui: { - name: 'Node Visualizer', - description: - 'Web application for viewing information about your node and the Bitcoin network.', - ui: true, - 'tor-config': { - 'port-mapping': {}, - }, - 'lan-config': {}, - protocols: [], - }, - rpc: { - name: 'RPC', - description: 'Used by wallets to interact with your Bitcoin Core node.', - ui: false, - 'tor-config': { - 'port-mapping': {}, - }, - 'lan-config': {}, - protocols: [], - }, - p2p: { - name: 'P2P', - description: - 'Used by other Bitcoin nodes to communicate and interact with your node.', - ui: false, - 'tor-config': { - 'port-mapping': {}, - }, - 'lan-config': {}, - protocols: [], - }, - }, - backup: { - create: { - type: 'docker', - image: '', - system: true, - entrypoint: '', - args: [], - mounts: {}, - 'io-format': DockerIoFormat.Yaml, - inject: false, - 'shm-size': '', - 'sigterm-timeout': null, - }, - restore: { - type: 'docker', - image: '', - system: true, - entrypoint: '', - args: [], - mounts: {}, - 'io-format': DockerIoFormat.Yaml, - inject: false, - 'shm-size': '', - 'sigterm-timeout': null, - }, - }, - migrations: null, - actions: { - resync: { - name: 'Resync Blockchain', - description: 'Use this to resync the Bitcoin blockchain from genesis', - warning: 'This will take a couple of days.', - 'allowed-statuses': [ - PackageMainStatus.Running, - PackageMainStatus.Stopped, - ], - implementation: { - type: 'docker', - image: '', - system: true, - entrypoint: '', - args: [], - mounts: {}, - 'io-format': DockerIoFormat.Yaml, - inject: false, - 'shm-size': '', - 'sigterm-timeout': null, - }, - 'input-spec': { - reason: { - type: 'string', - name: 'Re-sync Reason', - description: 'Your reason for re-syncing. Why are you doing this?', - nullable: false, - masked: false, - copyable: false, - pattern: '^[a-zA-Z]+$', - 'pattern-description': 'Must contain only letters.', - }, - name: { - type: 'string', - name: 'Your Name', - description: 'Tell the class your name.', - nullable: true, - masked: false, - copyable: false, - warning: 'You may loose all your money by providing your name.', - }, - notifications: { - name: 'Notification Preferences', - type: 'list', - subtype: 'enum', - description: 'how you want to be notified', - range: '[1,3]', - default: ['email'], - spec: { - 'value-names': { - email: 'Email', - text: 'Text', - call: 'Call', - push: 'Push', - webhook: 'Webhook', - }, - values: ['email', 'text', 'call', 'push', 'webhook'], - }, - }, - 'days-ago': { - type: 'number', - name: 'Days Ago', - description: 'Number of days to re-sync.', - nullable: false, - default: 100, - range: '[0, 9999]', - integral: true, - }, - 'top-speed': { - type: 'number', - name: 'Top Speed', - description: 'The fastest you can possibly run.', - nullable: false, - range: '[-1000, 1000]', - integral: false, - units: 'm/s', - }, - testnet: { - name: 'Testnet', - type: 'boolean', - description: - '
  • determines whether your node is running on testnet or mainnet
', - warning: 'Chain will have to resync!', - default: false, - }, - randomEnum: { - name: 'Random Enum', - type: 'enum', - 'value-names': { - null: 'Null', - good: 'Good', - bad: 'Bad', - ugly: 'Ugly', - }, - default: 'null', - description: 'This is not even real.', - warning: 'Be careful changing this!', - values: ['null', 'good', 'bad', 'ugly'], - }, - 'emergency-contact': { - name: 'Emergency Contact', - type: 'object', - description: 'The person to contact in case of emergency.', - spec: { - name: { - type: 'string', - name: 'Name', - nullable: false, - masked: false, - copyable: false, - pattern: '^[a-zA-Z]+$', - 'pattern-description': 'Must contain only letters.', - }, - email: { - type: 'string', - name: 'Email', - nullable: false, - masked: false, - copyable: true, - }, - }, - }, - ips: { - name: 'Whitelist IPs', - type: 'list', - subtype: 'string', - description: - 'external ip addresses that are authorized to access your Bitcoin node', - warning: - 'Any IP you allow here will have RPC access to your Bitcoin node.', - range: '[1,10]', - default: ['192.168.1.1'], - spec: { - pattern: '^[0-9]{1,3}([,.][0-9]{1,3})?$', - 'pattern-description': 'Must be a valid IP address', - masked: false, - copyable: false, - }, - }, - bitcoinNode: { - type: 'union', - default: 'internal', - tag: { - id: 'type', - 'variant-names': { - internal: 'Internal', - external: 'External', - }, - name: 'Bitcoin Node Settings', - description: 'The node settings', - warning: 'Careful changing this', - }, - variants: { - internal: { - 'lan-address': { - name: 'LAN Address', - type: 'pointer', - subtype: 'package', - target: 'lan-address', - description: 'the lan address', - interface: 'tor-address', - 'package-id': '12341234', - }, - 'friendly-name': { - name: 'Friendly Name', - type: 'string', - description: 'the lan address', - nullable: true, - masked: false, - copyable: false, - }, - }, - external: { - 'public-domain': { - name: 'Public Domain', - type: 'string', - description: 'the public address of the node', - nullable: false, - default: 'bitcoinnode.com', - pattern: '.*', - 'pattern-description': 'anything', - masked: false, - copyable: true, - }, - }, - }, - }, - }, - }, - }, + osVersion: '0.2.12', dependencies: {}, + hasConfig: true, + images: ['main'], + assets: [], + volumes: ['main'], + hardwareRequirements: { + device: {}, + arch: null, + ram: null, + }, } export const MockManifestLnd: Manifest = { id: 'lnd', title: 'Lightning Network Daemon', version: '0.11.1', + gitHash: 'abcdefgh', description: { short: 'A bolt spec compliant client.', long: 'More info about LND. More info about LND. More info about LND.', }, - 'release-notes': 'Dual funded channels!', - assets: { - icon: 'icon.png', - license: 'LICENSE.md', - instructions: 'INSTRUCTIONS.md', - docker_images: 'image.tar', - assets: './assets', - scripts: './scripts', - }, + replaces: ['banks', 'governments'], + releaseNotes: 'Dual funded channels!', license: 'MIT', - 'wrapper-repo': 'https://github.com/start9labs/lnd-wrapper', - 'upstream-repo': 'https://github.com/lightningnetwork/lnd', - 'support-site': 'https://lightning.engineering/', - 'marketing-site': 'https://lightning.engineering/', - 'donation-url': null, + wrapperRepo: 'https://github.com/start9labs/lnd-wrapper', + upstreamRepo: 'https://github.com/lightningnetwork/lnd', + supportSite: 'https://lightning.engineering/', + marketingSite: 'https://lightning.engineering/', + donationUrl: null, alerts: { install: null, uninstall: null, @@ -382,152 +105,45 @@ export module Mock { start: 'Starting LND is good for your health.', stop: null, }, - main: { - type: 'docker', - image: '', - system: true, - entrypoint: '', - args: [], - mounts: {}, - 'io-format': DockerIoFormat.Yaml, - inject: false, - 'shm-size': '', - 'sigterm-timeout': '10000µs', - }, - 'health-checks': {}, - config: { - get: null, - set: null, - }, - volumes: {}, - 'min-os-version': '0.2.12', - interfaces: { - rpc: { - name: 'RPC interface', - description: 'Good for connecting to your node at a distance.', - ui: true, - 'tor-config': { - 'port-mapping': {}, - }, - 'lan-config': { - '44': { - ssl: true, - mapping: 33, - }, - }, - protocols: [], - }, - grpc: { - name: 'GRPC', - description: 'Certain wallet use grpc.', - ui: false, - 'tor-config': { - 'port-mapping': {}, - }, - 'lan-config': { - '66': { - ssl: true, - mapping: 55, - }, - }, - protocols: [], - }, - }, - backup: { - create: { - type: 'docker', - image: '', - system: true, - entrypoint: '', - args: [], - mounts: {}, - 'io-format': DockerIoFormat.Yaml, - inject: false, - 'shm-size': '', - 'sigterm-timeout': null, - }, - restore: { - type: 'docker', - image: '', - system: true, - entrypoint: '', - args: [], - mounts: {}, - 'io-format': DockerIoFormat.Yaml, - inject: false, - 'shm-size': '', - 'sigterm-timeout': null, - }, - }, - migrations: null, - actions: { - resync: { - name: 'Resync Network Graph', - description: 'Your node will resync its network graph.', - warning: 'This will take a couple hours.', - 'allowed-statuses': [PackageMainStatus.Running], - implementation: { - type: 'docker', - image: '', - system: true, - entrypoint: '', - args: [], - mounts: {}, - 'io-format': DockerIoFormat.Yaml, - inject: false, - 'shm-size': '', - 'sigterm-timeout': null, - }, - 'input-spec': null, - }, - }, + osVersion: '0.2.12', dependencies: { bitcoind: { - version: '=0.21.0', description: 'LND needs bitcoin to live.', - requirement: { - type: 'opt-out', - how: 'You can use an external node from your server if you prefer.', - }, - config: null, + optional: true, }, 'btc-rpc-proxy': { - version: '>=0.2.2', description: 'As long as Bitcoin is pruned, LND needs Bitcoin Proxy to fetch block over the P2P network.', - requirement: { - type: 'opt-in', - how: `To use Proxy's user management system, go to LND config and select Bitcoin Proxy under Bitcoin config.`, - }, - config: null, + optional: true, }, }, + hasConfig: true, + images: ['main'], + assets: [], + volumes: ['main'], + hardwareRequirements: { + device: {}, + arch: null, + ram: null, + }, } export const MockManifestBitcoinProxy: Manifest = { id: 'btc-rpc-proxy', title: 'Bitcoin Proxy', version: '0.2.2', - 'git-hash': 'lmnopqrx', + gitHash: 'lmnopqrx', description: { short: 'A super charger for your Bitcoin node.', long: 'More info about Bitcoin Proxy. More info about Bitcoin Proxy. More info about Bitcoin Proxy.', }, - 'release-notes': 'Even better support for Bitcoin and wallets!', - assets: { - icon: 'icon.png', - license: 'LICENSE.md', - instructions: 'INSTRUCTIONS.md', - docker_images: 'image.tar', - assets: './assets', - scripts: './scripts', - }, + releaseNotes: 'Even better support for Bitcoin and wallets!', license: 'MIT', - 'wrapper-repo': 'https://github.com/start9labs/btc-rpc-proxy-wrapper', - 'upstream-repo': 'https://github.com/Kixunil/btc-rpc-proxy', - 'support-site': '', - 'marketing-site': '', - 'donation-url': 'https://start9.com', + wrapperRepo: 'https://github.com/start9labs/btc-rpc-proxy-wrapper', + upstreamRepo: 'https://github.com/Kixunil/btc-rpc-proxy', + supportSite: '', + marketingSite: '', + donationUrl: 'https://start9.com', alerts: { install: 'Testing install alert', uninstall: null, @@ -535,113 +151,36 @@ export module Mock { start: null, stop: null, }, - main: { - type: 'docker', - image: '', - system: true, - entrypoint: '', - args: [''], - mounts: {}, - 'io-format': DockerIoFormat.Yaml, - inject: false, - 'shm-size': '', - 'sigterm-timeout': '1m', - }, - 'health-checks': {}, - config: { get: {} as any, set: {} as any }, - volumes: {}, - 'min-os-version': '0.2.12', - interfaces: { - rpc: { - name: 'RPC interface', - description: 'Good for connecting to your node at a distance.', - ui: false, - 'tor-config': { - 'port-mapping': {}, - }, - 'lan-config': { - 44: { - ssl: true, - mapping: 33, - }, - }, - protocols: [], - }, - }, - backup: { - create: { - type: 'docker', - image: '', - system: true, - entrypoint: '', - args: [''], - mounts: {}, - 'io-format': DockerIoFormat.Yaml, - inject: false, - 'shm-size': '', - 'sigterm-timeout': null, - }, - restore: { - type: 'docker', - image: '', - system: true, - entrypoint: '', - args: [''], - mounts: {}, - 'io-format': DockerIoFormat.Yaml, - inject: false, - 'shm-size': '', - 'sigterm-timeout': null, - }, - }, - migrations: null, - actions: {}, + osVersion: '0.2.12', dependencies: { bitcoind: { - version: '>=0.20.0', description: 'Bitcoin Proxy requires a Bitcoin node.', - requirement: { - type: 'required', - }, - config: { - check: { - type: 'docker', - image: 'alpine', - system: true, - entrypoint: 'true', - args: [], - mounts: {}, - 'io-format': DockerIoFormat.Cbor, - inject: false, - 'shm-size': '10m', - 'sigterm-timeout': null, - }, - 'auto-configure': { - type: 'docker', - image: 'alpine', - system: true, - entrypoint: 'cat', - args: [], - mounts: {}, - 'io-format': DockerIoFormat.Cbor, - inject: false, - 'shm-size': '10m', - 'sigterm-timeout': null, - }, - }, + optional: false, }, }, + replaces: [], + hasConfig: false, + images: ['main'], + assets: [], + volumes: ['main'], + hardwareRequirements: { + device: {}, + arch: null, + ram: null, + }, } export const BitcoinDep: DependencyMetadata = { title: 'Bitcoin Core', icon: BTC_ICON, + optional: false, hidden: true, } export const ProxyDep: DependencyMetadata = { title: 'Bitcoin Proxy', icon: PROXY_ICON, + optional: true, hidden: false, } @@ -661,8 +200,8 @@ export module Mock { }, categories: ['bitcoin', 'cryptocurrency'], versions: ['0.19.0', '0.20.0', '0.21.0'], - 'dependency-metadata': {}, - 'published-at': new Date().toISOString(), + dependencyMetadata: {}, + publishedAt: new Date().toISOString(), }, '0.20.0': { icon: BTC_ICON, @@ -674,8 +213,8 @@ export module Mock { }, categories: ['bitcoin', 'cryptocurrency'], versions: ['0.19.0', '0.20.0', '0.21.0'], - 'dependency-metadata': {}, - 'published-at': new Date().toISOString(), + dependencyMetadata: {}, + publishedAt: new Date().toISOString(), }, '0.21.0': { icon: BTC_ICON, @@ -684,13 +223,13 @@ export module Mock { manifest: { ...Mock.MockManifestBitcoind, version: '0.21.0', - 'release-notes': + releaseNotes: 'For a complete list of changes, please visit https://bitcoincore.org/en/releases/0.21.0/
  • Taproot!
  • New RPCs
  • Experimental Descriptor Wallets
', }, categories: ['bitcoin', 'cryptocurrency'], versions: ['0.19.0', '0.20.0', '0.21.0'], - 'dependency-metadata': {}, - 'published-at': new Date().toISOString(), + dependencyMetadata: {}, + publishedAt: new Date().toISOString(), }, latest: { icon: BTC_ICON, @@ -698,13 +237,13 @@ export module Mock { instructions: 'instructionsUrl', manifest: { ...Mock.MockManifestBitcoind, - 'release-notes': + releaseNotes: 'For a complete list of changes, please visit https://bitcoincore.org/en/releases/0.21.0/
Or in [markdown](https://bitcoincore.org/en/releases/0.21.0/)
  • Taproot!
  • New RPCs
  • Experimental Descriptor Wallets
', }, categories: ['bitcoin', 'cryptocurrency'], versions: ['0.19.0', '0.20.0', '0.21.0'], - 'dependency-metadata': {}, - 'published-at': new Date().toISOString(), + dependencyMetadata: {}, + publishedAt: new Date().toISOString(), }, }, lnd: { @@ -715,15 +254,15 @@ export module Mock { manifest: { ...Mock.MockManifestLnd, version: '0.11.0', - 'release-notes': 'release notes for LND 0.11.0', + releaseNotes: 'release notes for LND 0.11.0', }, categories: ['bitcoin', 'lightning', 'cryptocurrency'], versions: ['0.11.0', '0.11.1'], - 'dependency-metadata': { + dependencyMetadata: { bitcoind: BitcoinDep, 'btc-rpc-proxy': ProxyDep, }, - 'published-at': new Date().toISOString(), + publishedAt: new Date().toISOString(), }, '0.11.1': { icon: LND_ICON, @@ -732,15 +271,15 @@ export module Mock { manifest: { ...Mock.MockManifestLnd, version: '0.11.1', - 'release-notes': 'release notes for LND 0.11.1', + releaseNotes: 'release notes for LND 0.11.1', }, categories: ['bitcoin', 'lightning', 'cryptocurrency'], versions: ['0.11.0', '0.11.1'], - 'dependency-metadata': { + dependencyMetadata: { bitcoind: BitcoinDep, 'btc-rpc-proxy': ProxyDep, }, - 'published-at': new Date().toISOString(), + publishedAt: new Date().toISOString(), }, latest: { icon: LND_ICON, @@ -749,11 +288,11 @@ export module Mock { manifest: Mock.MockManifestLnd, categories: ['bitcoin', 'lightning', 'cryptocurrency'], versions: ['0.11.0', '0.11.1'], - 'dependency-metadata': { + dependencyMetadata: { bitcoind: BitcoinDep, 'btc-rpc-proxy': ProxyDep, }, - 'published-at': new Date(new Date().valueOf() + 10).toISOString(), + publishedAt: new Date(new Date().valueOf() + 10).toISOString(), }, }, 'btc-rpc-proxy': { @@ -764,10 +303,10 @@ export module Mock { manifest: Mock.MockManifestBitcoinProxy, categories: ['bitcoin'], versions: ['0.2.2'], - 'dependency-metadata': { + dependencyMetadata: { bitcoind: BitcoinDep, }, - 'published-at': new Date().toISOString(), + publishedAt: new Date().toISOString(), }, }, } @@ -778,8 +317,8 @@ export module Mock { export const Notifications: ServerNotifications = [ { id: 1, - 'package-id': null, - 'created-at': '2019-12-26T14:20:30.872Z', + packageId: null, + createdAt: '2019-12-26T14:20:30.872Z', code: 1, level: NotificationLevel.Success, title: 'Backup Complete', @@ -798,8 +337,8 @@ export module Mock { }, { id: 2, - 'package-id': null, - 'created-at': '2019-12-26T14:20:30.872Z', + packageId: null, + createdAt: '2019-12-26T14:20:30.872Z', code: 2, level: NotificationLevel.Warning, title: 'SSH Key Added', @@ -808,8 +347,8 @@ export module Mock { }, { id: 3, - 'package-id': null, - 'created-at': '2019-12-26T14:20:30.872Z', + packageId: null, + createdAt: '2019-12-26T14:20:30.872Z', code: 3, level: NotificationLevel.Info, title: 'SSH Key Removed', @@ -818,8 +357,8 @@ export module Mock { }, { id: 4, - 'package-id': 'bitcoind', - 'created-at': '2019-12-26T14:20:30.872Z', + packageId: 'bitcoind', + createdAt: '2019-12-26T14:20:30.872Z', code: 4, level: NotificationLevel.Error, title: 'Service Crashed', @@ -844,7 +383,7 @@ export module Mock { }, }, memory: { - 'percentage-used': { + percentageUsed: { value: '30.7', unit: '%', }, @@ -860,29 +399,29 @@ export module Mock { value: '8784.97', unit: 'MiB', }, - 'zram-total': { + zramTotal: { value: '7992.00', unit: 'MiB', }, - 'zram-available': { + zramAvailable: { value: '7882.50', unit: 'MiB', }, - 'zram-used': { + zramUsed: { value: '109.50', unit: 'MiB', }, }, cpu: { - 'percentage-used': { + percentageUsed: { value: '8.4', unit: '%', }, - 'user-space': { + userSpace: { value: '7.0', unit: '%', }, - 'kernel-space': { + kernelSpace: { value: '1.4', unit: '%', }, @@ -908,7 +447,7 @@ export module Mock { value: '992.59', unit: 'GB', }, - 'percentage-used': { + percentageUsed: { value: '46.4', unit: '%', }, @@ -970,15 +509,15 @@ export module Mock { current: 'b7b1a9cef4284f00af9e9dda6e676177', sessions: { '9513226517c54ddd8107d6d7b9d8aed7': { - 'last-active': '2021-07-14T20:49:17.774Z', - 'user-agent': 'AppleWebKit/{WebKit Rev} (KHTML, like Gecko)', + lastActive: '2021-07-14T20:49:17.774Z', + userAgent: 'AppleWebKit/{WebKit Rev} (KHTML, like Gecko)', metadata: { platforms: ['iphone', 'mobileweb', 'mobile', 'ios'], }, }, b7b1a9cef4284f00af9e9dda6e676177: { - 'last-active': '2021-06-14T20:49:17.774Z', - 'user-agent': + lastActive: '2021-06-14T20:49:17.774Z', + userAgent: 'Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:47.0) Gecko/20100101 Firefox/47.0', metadata: { platforms: ['desktop'], @@ -997,13 +536,13 @@ export module Mock { export const SshKeys: RR.GetSSHKeysRes = [ { - 'created-at': new Date().toISOString(), + createdAt: new Date().toISOString(), alg: 'ed25519', hostname: 'Matt Key', fingerprint: '28:d2:7e:78:61:b4:bf:g2:de:24:15:96:4e:d4:15:53', }, { - 'created-at': new Date().toISOString(), + createdAt: new Date().toISOString(), alg: 'ed25519', hostname: 'Aiden Key', fingerprint: '12:f8:7e:78:61:b4:bf:e2:de:24:15:96:4e:d4:72:53', @@ -1011,7 +550,7 @@ export module Mock { ] export const SshKey: RR.AddSSHKeyRes = { - 'created-at': new Date().toISOString(), + createdAt: new Date().toISOString(), alg: 'ed25519', hostname: 'Lucy Key', fingerprint: '44:44:7e:78:61:b4:bf:g2:de:24:15:96:4e:d4:15:53', @@ -1025,7 +564,7 @@ export module Mock { }, connected: 'Goosers', country: 'US', - 'available-wifi': [ + availableWifi: [ { ssid: 'Goosers a billion', strength: 40, @@ -1051,13 +590,13 @@ export module Mock { path: '/Desktop/startos-backups', username: 'TestUser', mountable: false, - 'embassy-os': { + startOs: { version: '0.3.0', full: true, - 'password-hash': + passwordHash: // password is asdfasdf '$argon2d$v=19$m=1024,t=1,p=1$YXNkZmFzZGZhc2RmYXNkZg$Ceev1I901G6UwU+hY0sHrFZ56D+o+LNJ', - 'wrapped-key': '', + wrappedKey: '', }, }, // 'ftcvewdnkemfksdm': { @@ -1068,7 +607,7 @@ export module Mock { // used: 0, // model: 'Evo SATA 2.5', // vendor: 'Samsung', - // 'embassy-os': null, + // startOs: null, // }, csgashbdjkasnd: { type: 'cifs', @@ -1076,7 +615,7 @@ export module Mock { path: '/Desktop/startos-backups-2', username: 'TestUser', mountable: true, - 'embassy-os': null, + startOs: null, }, powjefhjbnwhdva: { type: 'disk', @@ -1086,13 +625,13 @@ export module Mock { used: 100000000000, model: null, vendor: 'SSK', - 'embassy-os': { + startOs: { version: '0.3.0', full: true, // password is asdfasdf - 'password-hash': + passwordHash: '$argon2d$v=19$m=1024,t=1,p=1$YXNkZmFzZGZhc2RmYXNkZg$Ceev1I901G6UwU+hY0sHrFZ56D+o+LNJ', - 'wrapped-key': '', + wrappedKey: '', }, }, } @@ -1100,17 +639,17 @@ export module Mock { export const BackupInfo: RR.GetBackupInfoRes = { version: '0.3.0', timestamp: new Date().toISOString(), - 'package-backups': { + packageBackups: { bitcoind: { title: 'Bitcoin Core', version: '0.21.0', - 'os-version': '0.3.0', + osVersion: '0.3.0', timestamp: new Date().toISOString(), }, 'btc-rpc-proxy': { title: 'Bitcoin Proxy', version: '0.2.2', - 'os-version': '0.3.0', + osVersion: '0.3.0', timestamp: new Date().toISOString(), }, }, @@ -1545,17 +1084,7 @@ export module Mock { warning: 'Careful changing this', }, variants: { - internal: { - 'lan-address': { - name: 'LAN Address', - type: 'pointer', - subtype: 'package', - target: 'lan-address', - 'package-id': 'bitcoind', - description: 'the lan address', - interface: 'asdf', - }, - }, + internal: {}, external: { 'emergency-contact': { name: 'Emergency Contact', @@ -1867,165 +1396,644 @@ export module Mock { export const MockDependencyConfig = MockConfig - export const bitcoind: PackageDataEntry = { - state: PackageState.Installed, - 'static-files': { - license: '/public/package-data/bitcoind/0.20.0/LICENSE.md', - icon: '/assets/img/service-icons/bitcoind.svg', - instructions: '/public/package-data/bitcoind/0.20.0/INSTRUCTIONS.md', - }, - manifest: MockManifestBitcoind, - installed: { + export const bitcoind: PackageDataEntry = { + stateInfo: { + state: 'installed', manifest: MockManifestBitcoind, - 'last-backup': null, - status: { - configured: true, - main: { - status: PackageMainStatus.Running, - started: new Date().toISOString(), - health: {}, - }, - 'dependency-config-errors': {}, + }, + icon: '/assets/img/service-icons/bitcoind.svg', + lastBackup: null, + status: { + configured: true, + main: { + status: 'running', + started: new Date().toISOString(), + health: {}, }, - 'interface-addresses': { - ui: { - 'tor-address': 'bitcoind-ui-address.onion', - 'lan-address': 'bitcoind-ui-address.local', - }, - rpc: { - 'tor-address': 'bitcoind-rpc-address.onion', - 'lan-address': 'bitcoind-rpc-address.local', + dependencyConfigErrors: {}, + }, + actions: {}, // @TODO need mocks + serviceInterfaces: { + ui: { + id: 'ui', + hasPrimary: false, + disabled: false, + masked: false, + name: 'Web UI', + description: + 'A launchable web app for you to interact with your Bitcoin node', + type: 'ui', + addressInfo: { + username: null, + hostId: 'abcdefg', + bindOptions: { + scheme: 'http', + preferredExternalPort: 80, + addSsl: { + // addXForwardedHeaders: false, + preferredExternalPort: 443, + scheme: 'https', + alpn: { specified: ['http/1.1', 'h2'] }, + }, + secure: null, + }, + suffix: '', + }, + hostInfo: { + id: 'abcdefg', + kind: 'multi', + hostnames: [ + { + kind: 'ip', + networkInterfaceId: 'elan0', + public: false, + hostname: { + kind: 'local', + value: 'adjective-noun.local', + port: null, + sslPort: 1234, + }, + }, + { + kind: 'onion', + hostname: { + value: 'bitcoin-ui-address.onion', + port: 80, + sslPort: 443, + }, + }, + { + kind: 'ip', + networkInterfaceId: 'elan0', + public: false, + hostname: { + kind: 'ipv4', + value: '192.168.1.5', + port: null, + sslPort: 1234, + }, + }, + { + kind: 'ip', + networkInterfaceId: 'elan0', + public: false, + hostname: { + kind: 'ipv6', + value: '[2001:db8:85a3:8d3:1319:8a2e:370:7348]', + port: null, + sslPort: 1234, + }, + }, + ], }, - p2p: { - 'tor-address': 'bitcoind-p2p-address.onion', - 'lan-address': 'bitcoind-p2p-address.local', + }, + rpc: { + id: 'rpc', + hasPrimary: false, + disabled: false, + masked: false, + name: 'RPC', + description: + 'Used by dependent services and client wallets for connecting to your node', + type: 'api', + addressInfo: { + username: null, + hostId: 'bcdefgh', + bindOptions: { + scheme: 'http', + preferredExternalPort: 80, + addSsl: { + // addXForwardedHeaders: false, + preferredExternalPort: 443, + scheme: 'https', + alpn: { specified: ['http/1.1'] }, + }, + secure: null, + }, + suffix: '', + }, + hostInfo: { + id: 'bcdefgh', + kind: 'multi', + hostnames: [ + { + kind: 'ip', + networkInterfaceId: 'elan0', + public: false, + hostname: { + kind: 'local', + value: 'adjective-noun.local', + port: null, + sslPort: 2345, + }, + }, + { + kind: 'onion', + hostname: { + value: 'bitcoin-rpc-address.onion', + port: 80, + sslPort: 443, + }, + }, + { + kind: 'ip', + networkInterfaceId: 'elan0', + public: false, + hostname: { + kind: 'ipv4', + value: '192.168.1.5', + port: null, + sslPort: 2345, + }, + }, + { + kind: 'ip', + networkInterfaceId: 'elan0', + public: false, + hostname: { + kind: 'ipv6', + value: '[2001:db8:85a3:8d3:1319:8a2e:370:7348]', + port: null, + sslPort: 2345, + }, + }, + ], }, }, - 'system-pointers': [], - 'current-dependents': { - lnd: { - pointers: [], - 'health-checks': [], + p2p: { + id: 'p2p', + hasPrimary: true, + disabled: false, + masked: false, + name: 'P2P', + description: + 'Used for connecting to other nodes on the Bitcoin network', + type: 'p2p', + addressInfo: { + username: null, + hostId: 'cdefghi', + bindOptions: { + scheme: 'bitcoin', + preferredExternalPort: 8333, + addSsl: null, + secure: { + ssl: false, + }, + }, + suffix: '', + }, + hostInfo: { + id: 'cdefghi', + kind: 'multi', + hostnames: [ + { + kind: 'ip', + networkInterfaceId: 'elan0', + public: false, + hostname: { + kind: 'local', + value: 'adjective-noun.local', + port: 3456, + sslPort: null, + }, + }, + { + kind: 'onion', + hostname: { + value: 'bitcoin-p2p-address.onion', + port: 8333, + sslPort: null, + }, + }, + { + kind: 'ip', + networkInterfaceId: 'elan0', + public: false, + hostname: { + kind: 'ipv4', + value: '192.168.1.5', + port: 3456, + sslPort: null, + }, + }, + { + kind: 'ip', + networkInterfaceId: 'elan0', + public: false, + hostname: { + kind: 'ipv6', + value: '[2001:db8:85a3:8d3:1319:8a2e:370:7348]', + port: 3456, + sslPort: null, + }, + }, + ], }, }, - 'current-dependencies': {}, - 'dependency-info': {}, - 'marketplace-url': 'https://registry.start9.com/', - 'developer-key': 'developer-key', }, - 'install-progress': undefined, + currentDependencies: {}, + hosts: {}, + storeExposedDependents: [], + marketplaceUrl: 'https://registry.start9.com/', + developerKey: 'developer-key', } - export const bitcoinProxy: PackageDataEntry = { - state: PackageState.Installed, - 'static-files': { - license: '/public/package-data/btc-rpc-proxy/0.20.0/LICENSE.md', - icon: '/assets/img/service-icons/btc-rpc-proxy.png', - instructions: '/public/package-data/btc-rpc-proxy/0.20.0/INSTRUCTIONS.md', - }, - manifest: MockManifestBitcoinProxy, - installed: { - 'last-backup': null, - status: { - configured: false, - main: { - status: PackageMainStatus.Stopped, - }, - 'dependency-config-errors': {}, - }, + export const bitcoinProxy: PackageDataEntry = { + stateInfo: { + state: 'installed', manifest: MockManifestBitcoinProxy, - 'interface-addresses': { - rpc: { - 'tor-address': 'bitcoinproxy-rpc-address.onion', - 'lan-address': 'bitcoinproxy-rpc-address.local', - }, - }, - 'system-pointers': [], - 'current-dependents': { - lnd: { - pointers: [], - 'health-checks': [], - }, - }, - 'current-dependencies': { - bitcoind: { - pointers: [], - 'health-checks': [], - }, + }, + icon: '/assets/img/service-icons/btc-rpc-proxy.png', + lastBackup: null, + status: { + configured: false, + main: { + status: 'stopped', }, - 'dependency-info': { - bitcoind: { - title: Mock.MockManifestBitcoind.title, - icon: 'assets/img/service-icons/bitcoind.svg', + dependencyConfigErrors: {}, + }, + actions: {}, + serviceInterfaces: { + ui: { + id: 'ui', + hasPrimary: false, + disabled: false, + masked: false, + name: 'Web UI', + description: 'A launchable web app for Bitcoin Proxy', + type: 'ui', + addressInfo: { + username: null, + hostId: 'hijklmnop', + bindOptions: { + scheme: 'http', + preferredExternalPort: 80, + addSsl: { + // addXForwardedHeaders: false, + preferredExternalPort: 443, + scheme: 'https', + alpn: { specified: ['http/1.1', 'h2'] }, + }, + secure: { + ssl: true, + }, + }, + suffix: '', + }, + hostInfo: { + id: 'hijklmnop', + kind: 'multi', + hostnames: [ + { + kind: 'ip', + networkInterfaceId: 'elan0', + public: false, + hostname: { + kind: 'local', + value: 'adjective-noun.local', + port: null, + sslPort: 4567, + }, + }, + { + kind: 'onion', + hostname: { + value: 'proxy-ui-address.onion', + port: 80, + sslPort: 443, + }, + }, + { + kind: 'ip', + networkInterfaceId: 'elan0', + public: false, + hostname: { + kind: 'ipv4', + value: '192.168.1.5', + port: null, + sslPort: 4567, + }, + }, + { + kind: 'ip', + networkInterfaceId: 'elan0', + public: false, + hostname: { + kind: 'ipv6', + value: '[2001:db8:85a3:8d3:1319:8a2e:370:7348]', + port: null, + sslPort: 4567, + }, + }, + { + kind: 'ip', + networkInterfaceId: 'wlan0', + public: false, + hostname: { + kind: 'local', + value: 'adjective-noun.local', + port: null, + sslPort: 4567, + }, + }, + { + kind: 'ip', + networkInterfaceId: 'wlan0', + public: false, + hostname: { + kind: 'ipv4', + value: '192.168.1.7', + port: null, + sslPort: 4567, + }, + }, + { + kind: 'ip', + networkInterfaceId: 'wlan0', + public: false, + hostname: { + kind: 'ipv6', + value: '[2001:db8:85a3:8d3:1319:8a2e:370:7348]', + port: null, + sslPort: 4567, + }, + }, + ], }, }, - 'marketplace-url': 'https://registry.start9.com/', - 'developer-key': 'developer-key', }, - 'install-progress': undefined, + currentDependencies: { + bitcoind: { + title: Mock.MockManifestBitcoind.title, + icon: 'assets/img/service-icons/bitcoind.svg', + kind: 'running', + registryUrl: '', + versionSpec: '>=26.0.0', + healthChecks: [], + }, + }, + hosts: {}, + storeExposedDependents: [], + marketplaceUrl: 'https://registry.start9.com/', + developerKey: 'developer-key', } - export const lnd: PackageDataEntry = { - state: PackageState.Installed, - 'static-files': { - license: '/public/package-data/lnd/0.11.0/LICENSE.md', - icon: '/assets/img/service-icons/lnd.png', - instructions: '/public/package-data/lnd/0.11.0/INSTRUCTIONS.md', + export const lnd: PackageDataEntry = { + stateInfo: { + state: 'installed', + manifest: MockManifestLnd, }, - manifest: MockManifestLnd, - installed: { - 'last-backup': null, - status: { - configured: true, - main: { - status: PackageMainStatus.Stopped, - }, - 'dependency-config-errors': { - 'btc-rpc-proxy': 'Username not found', - }, + icon: '/assets/img/service-icons/lnd.png', + lastBackup: null, + status: { + configured: true, + main: { + status: 'stopped', }, - manifest: MockManifestLnd, - 'interface-addresses': { - rpc: { - 'tor-address': 'lnd-rpc-address.onion', - 'lan-address': 'lnd-rpc-address.local', - }, - grpc: { - 'tor-address': 'lnd-grpc-address.onion', - 'lan-address': 'lnd-grpc-address.local', - }, + dependencyConfigErrors: { + 'btc-rpc-proxy': 'Username not found', }, - 'system-pointers': [], - 'current-dependents': {}, - 'current-dependencies': { - bitcoind: { - pointers: [], - 'health-checks': [], - }, - 'btc-rpc-proxy': { - pointers: [], - 'health-checks': [], + }, + actions: {}, + serviceInterfaces: { + grpc: { + id: 'grpc', + hasPrimary: false, + disabled: false, + masked: false, + name: 'GRPC', + description: + 'Used by dependent services and client wallets for connecting to your node', + type: 'api', + addressInfo: { + username: null, + hostId: 'qrstuv', + bindOptions: { + scheme: 'grpc', + preferredExternalPort: 10009, + addSsl: null, + secure: { + ssl: true, + }, + }, + suffix: '', + }, + hostInfo: { + id: 'qrstuv', + kind: 'multi', + hostnames: [ + { + kind: 'ip', + networkInterfaceId: 'elan0', + public: false, + hostname: { + kind: 'local', + value: 'adjective-noun.local', + port: 5678, + sslPort: null, + }, + }, + { + kind: 'onion', + hostname: { + value: 'lnd-grpc-address.onion', + port: 10009, + sslPort: null, + }, + }, + { + kind: 'ip', + networkInterfaceId: 'elan0', + public: false, + hostname: { + kind: 'ipv4', + value: '192.168.1.5', + port: 5678, + sslPort: null, + }, + }, + { + kind: 'ip', + networkInterfaceId: 'elan0', + public: false, + hostname: { + kind: 'ipv6', + value: '[2001:db8:85a3:8d3:1319:8a2e:370:7348]', + port: 5678, + sslPort: null, + }, + }, + ], }, }, - 'dependency-info': { - bitcoind: { - title: Mock.MockManifestBitcoind.title, - icon: 'assets/img/service-icons/bitcoind.svg', + lndconnect: { + id: 'lndconnect', + hasPrimary: false, + disabled: false, + masked: true, + name: 'LND Connect', + description: + 'Used by client wallets adhering to LND Connect protocol to connect to your node', + type: 'api', + addressInfo: { + username: null, + hostId: 'qrstuv', + bindOptions: { + scheme: 'lndconnect', + preferredExternalPort: 10009, + addSsl: null, + secure: { + ssl: true, + }, + }, + suffix: 'cert=askjdfbjadnaskjnd&macaroon=ksjbdfnhjasbndjksand', + }, + hostInfo: { + id: 'qrstuv', + kind: 'multi', + hostnames: [ + { + kind: 'ip', + networkInterfaceId: 'elan0', + public: false, + hostname: { + kind: 'local', + value: 'adjective-noun.local', + port: 5678, + sslPort: null, + }, + }, + { + kind: 'onion', + hostname: { + value: 'lnd-grpc-address.onion', + port: 10009, + sslPort: null, + }, + }, + { + kind: 'ip', + networkInterfaceId: 'elan0', + public: false, + hostname: { + kind: 'ipv4', + value: '192.168.1.5', + port: 5678, + sslPort: null, + }, + }, + { + kind: 'ip', + networkInterfaceId: 'elan0', + public: false, + hostname: { + kind: 'ipv6', + value: '[2001:db8:85a3:8d3:1319:8a2e:370:7348]', + port: 5678, + sslPort: null, + }, + }, + ], }, - 'btc-rpc-proxy': { - title: Mock.MockManifestBitcoinProxy.title, - icon: 'assets/img/service-icons/btc-rpc-proxy.png', + }, + p2p: { + id: 'p2p', + hasPrimary: true, + disabled: false, + masked: false, + name: 'P2P', + description: + 'Used for connecting to other nodes on the Bitcoin network', + type: 'p2p', + addressInfo: { + username: null, + hostId: 'rstuvw', + bindOptions: { + scheme: null, + preferredExternalPort: 9735, + addSsl: null, + secure: { + ssl: true, + }, + }, + suffix: '', + }, + hostInfo: { + id: 'rstuvw', + kind: 'multi', + hostnames: [ + { + kind: 'ip', + networkInterfaceId: 'elan0', + public: false, + hostname: { + kind: 'local', + value: 'adjective-noun.local', + port: 6789, + sslPort: null, + }, + }, + { + kind: 'onion', + hostname: { + value: 'lnd-p2p-address.onion', + port: 9735, + sslPort: null, + }, + }, + { + kind: 'ip', + networkInterfaceId: 'elan0', + public: false, + hostname: { + kind: 'ipv4', + value: '192.168.1.5', + port: 6789, + sslPort: null, + }, + }, + { + kind: 'ip', + networkInterfaceId: 'elan0', + public: false, + hostname: { + kind: 'ipv6', + value: '[2001:db8:85a3:8d3:1319:8a2e:370:7348]', + port: 6789, + sslPort: null, + }, + }, + ], }, }, - 'marketplace-url': 'https://registry.start9.com/', - 'developer-key': 'developer-key', }, - 'install-progress': undefined, + currentDependencies: { + bitcoind: { + title: Mock.MockManifestBitcoind.title, + icon: 'assets/img/service-icons/bitcoind.svg', + kind: 'running', + registryUrl: 'https://registry.start9.com', + versionSpec: '>=26.0.0', + healthChecks: [], + }, + 'btc-rpc-proxy': { + title: Mock.MockManifestBitcoinProxy.title, + icon: 'assets/img/service-icons/btc-rpc-proxy.png', + kind: 'exists', + registryUrl: 'https://community-registry.start9.com', + versionSpec: '>2.0.0', // @TODO + }, + }, + hosts: {}, + storeExposedDependents: [], + marketplaceUrl: 'https://registry.start9.com/', + developerKey: 'developer-key', } - export const LocalPkgs: { [key: string]: PackageDataEntry } = { - bitcoind: bitcoind, - 'btc-rpc-proxy': bitcoinProxy, - lnd: lnd, - } + export const LocalPkgs: { [key: string]: PackageDataEntry } = + { + bitcoind: bitcoind, + 'btc-rpc-proxy': bitcoinProxy, + lnd: lnd, + } } diff --git a/web/projects/ui/src/app/services/api/api.types.ts b/web/projects/ui/src/app/services/api/api.types.ts index 9d804caf95..43edea08e8 100644 --- a/web/projects/ui/src/app/services/api/api.types.ts +++ b/web/projects/ui/src/app/services/api/api.types.ts @@ -2,12 +2,10 @@ import { Dump, Revision } from 'patch-db-client' import { MarketplacePkg, StoreInfo } from '@start9labs/marketplace' import { PackagePropertiesVersioned } from 'src/app/util/properties.util' import { ConfigSpec } from 'src/app/pkg-config/config-types' -import { - DataModel, - HealthCheckResult, - Manifest, -} from 'src/app/services/patch-db/data-model' +import { DataModel } from 'src/app/services/patch-db/data-model' import { StartOSDiskInfo, LogsRes, ServerLogsReq } from '@start9labs/shared' +import { HealthCheckResult } from '../../../../../../../core/startos/bindings/HealthCheckResult' +import { Manifest } from '../../../../../../../core/startos/bindings/Manifest' export module RR { // DB @@ -31,8 +29,8 @@ export module RR { export type LogoutRes = null export type ResetPasswordReq = { - 'old-password': string - 'new-password': string + oldPassword: string + newPassword: string } // auth.reset-password export type ResetPasswordRes = null @@ -52,14 +50,14 @@ export module RR { export type FollowServerLogsReq = { limit?: number } // server.logs.follow & server.kernel-logs.follow export type FollowServerLogsRes = { - 'start-cursor': string + startCursor: string guid: string } export type GetServerMetricsReq = {} // server.metrics export type GetServerMetricsRes = Metrics - export type UpdateServerReq = { 'marketplace-url': string } // server.update + export type UpdateServerReq = { marketplaceUrl: string } // server.update export type UpdateServerRes = 'updating' | 'no-updates' export type RestartServerReq = {} // server.restart @@ -72,16 +70,11 @@ export module RR { export type SystemRebuildRes = null export type ResetTorReq = { - 'wipe-state': boolean + wipeState: boolean reason: string } // net.tor.reset export type ResetTorRes = null - export type ToggleZramReq = { - enable: boolean - } // server.experimental.zram - export type ToggleZramRes = null - // sessions export type GetSessionsReq = {} // sessions.list @@ -120,7 +113,7 @@ export module RR { connected: string | null country: string | null ethernet: boolean - 'available-wifi': AvailableWifi[] + availableWifi: AvailableWifi[] } export type AddWifiReq = { @@ -169,14 +162,14 @@ export module RR { export type RemoveBackupTargetReq = { id: string } // backup.target.cifs.remove export type RemoveBackupTargetRes = null - export type GetBackupInfoReq = { 'target-id': string; password: string } // backup.target.info + export type GetBackupInfoReq = { targetId: string; password: string } // backup.target.info export type GetBackupInfoRes = BackupInfo export type CreateBackupReq = { // backup.create - 'target-id': string - 'package-ids': string[] - 'old-password': string | null + targetId: string + packageIds: string[] + oldPassword: string | null password: string } export type CreateBackupRes = null @@ -198,9 +191,9 @@ export module RR { export type InstallPackageReq = { id: string - 'version-spec'?: string - 'version-priority'?: 'min' | 'max' - 'marketplace-url': string + versionSpec?: string + versionPriority?: 'min' | 'max' + marketplaceUrl: string } // package.install export type InstallPackageRes = null @@ -216,15 +209,15 @@ export module RR { export type RestorePackagesReq = { // package.backup.restore ids: string[] - 'target-id': string - 'old-password': string | null + targetId: string + oldPassword: string | null password: string } export type RestorePackagesRes = null export type ExecutePackageActionReq = { id: string - 'action-id': string + actionId: string input?: object } // package.action export type ExecutePackageActionRes = ActionResponse @@ -242,12 +235,12 @@ export module RR { export type UninstallPackageRes = null export type DryConfigureDependencyReq = { - 'dependency-id': string - 'dependent-id': string + dependencyId: string + dependentId: string } // package.dependency.configure.dry export type DryConfigureDependencyRes = { - 'old-config': object - 'new-config': object + oldConfig: object + newConfig: object spec: ConfigSpec } @@ -259,10 +252,10 @@ export module RR { // marketplace - export type GetMarketplaceInfoReq = { 'server-id': string } + export type GetMarketplaceInfoReq = { serverId: string } export type GetMarketplaceInfoRes = StoreInfo - export type GetMarketplaceEosReq = { 'server-id': string } + export type GetMarketplaceEosReq = { serverId: string } export type GetMarketplaceEosRes = MarketplaceEOS export type GetMarketplacePackagesReq = { @@ -271,7 +264,7 @@ export module RR { category?: string query?: string page?: number - 'per-page'?: number + perPage?: number } export type GetMarketplacePackagesRes = MarketplacePkg[] @@ -282,7 +275,7 @@ export module RR { export interface MarketplaceEOS { version: string headline: string - 'release-notes': { [version: string]: string } + releaseNotes: { [version: string]: string } } export interface Breakages { @@ -312,23 +305,23 @@ export interface Metrics { } memory: { total: MetricData - 'percentage-used': MetricData + percentageUsed: MetricData used: MetricData available: MetricData - 'zram-total': MetricData - 'zram-used': MetricData - 'zram-available': MetricData + zramTotal: MetricData + zramUsed: MetricData + zramAvailable: MetricData } cpu: { - 'percentage-used': MetricData + percentageUsed: MetricData idle: MetricData - 'user-space': MetricData - 'kernel-space': MetricData + userSpace: MetricData + kernelSpace: MetricData wait: MetricData } disk: { capacity: MetricData - 'percentage-used': MetricData + percentageUsed: MetricData used: MetricData available: MetricData } @@ -342,8 +335,8 @@ export interface Metric { } export interface Session { - 'last-active': string - 'user-agent': string + lastActive: string + userAgent: string metadata: SessionMetadata } @@ -378,7 +371,7 @@ export interface DiskBackupTarget { label: string | null capacity: number used: number | null - 'embassy-os': StartOSDiskInfo | null + startOs: StartOSDiskInfo | null } export interface CifsBackupTarget { @@ -387,7 +380,7 @@ export interface CifsBackupTarget { path: string username: string mountable: boolean - 'embassy-os': StartOSDiskInfo | null + startOs: StartOSDiskInfo | null } export type RecoverySource = DiskRecoverySource | CifsRecoverySource @@ -408,7 +401,7 @@ export interface CifsRecoverySource { export interface BackupInfo { version: string timestamp: string - 'package-backups': { + packageBackups: { [id: string]: PackageBackupInfo } } @@ -416,7 +409,7 @@ export interface BackupInfo { export interface PackageBackupInfo { title: string version: string - 'os-version': string + osVersion: string timestamp: string } @@ -425,7 +418,7 @@ export interface ServerSpecs { } export interface SSHKey { - 'created-at': string + createdAt: string alg: string hostname: string fingerprint: string @@ -435,8 +428,8 @@ export type ServerNotifications = ServerNotification[] export interface ServerNotification { id: number - 'package-id': string | null - 'created-at': string + packageId: string | null + createdAt: string code: T level: NotificationLevel title: string diff --git a/web/projects/ui/src/app/services/api/embassy-api.service.ts b/web/projects/ui/src/app/services/api/embassy-api.service.ts index 17ce2d9d6b..3f1d9881d1 100644 --- a/web/projects/ui/src/app/services/api/embassy-api.service.ts +++ b/web/projects/ui/src/app/services/api/embassy-api.service.ts @@ -97,8 +97,6 @@ export abstract class ApiService { abstract resetTor(params: RR.ResetTorReq): Promise - abstract toggleZram(params: RR.ToggleZramReq): Promise - // marketplace URLs abstract marketplaceProxy( diff --git a/web/projects/ui/src/app/services/api/embassy-live-api.service.ts b/web/projects/ui/src/app/services/api/embassy-live-api.service.ts index 18d47e2ce3..12b6f1d1a2 100644 --- a/web/projects/ui/src/app/services/api/embassy-live-api.service.ts +++ b/web/projects/ui/src/app/services/api/embassy-live-api.service.ts @@ -158,7 +158,7 @@ export class LiveApiService extends ApiService { async updateServer(url?: string): Promise { const params = { - 'marketplace-url': url || this.config.marketplace.start9, + marketplaceUrl: url || this.config.marketplace.start9, } return this.rpcRequest({ method: 'server.update', params }) } @@ -189,10 +189,6 @@ export class LiveApiService extends ApiService { return this.rpcRequest({ method: 'net.tor.reset', params }) } - async toggleZram(params: RR.ToggleZramReq): Promise { - return this.rpcRequest({ method: 'server.experimental.zram', params }) - } - // marketplace URLs async marketplaceProxy( @@ -209,7 +205,7 @@ export class LiveApiService extends ApiService { async getEos(): Promise { const { id } = await getServerInfo(this.patch) - const qp: RR.GetMarketplaceEosReq = { 'server-id': id } + const qp: RR.GetMarketplaceEosReq = { serverId: id } return this.marketplaceProxy( '/eos/v0/latest', diff --git a/web/projects/ui/src/app/services/api/embassy-mock-api.service.ts b/web/projects/ui/src/app/services/api/embassy-mock-api.service.ts index c98a4bd87b..5cbc841efc 100644 --- a/web/projects/ui/src/app/services/api/embassy-mock-api.service.ts +++ b/web/projects/ui/src/app/services/api/embassy-mock-api.service.ts @@ -10,11 +10,10 @@ import { } from 'patch-db-client' import { DataModel, - InstallProgress, + InstallingState, PackageDataEntry, - PackageMainStatus, - PackageState, - ServerStatus, + StateInfo, + UpdatingState, } from 'src/app/services/patch-db/data-model' import { CifsBackupTarget, RR } from './api.types' import { parsePropertiesPermissive } from 'src/app/util/properties.util' @@ -38,15 +37,37 @@ import { WebSocketSubjectConfig } from 'rxjs/webSocket' import { AuthService } from '../auth.service' import { ConnectionService } from '../connection.service' import { StoreInfo } from '@start9labs/marketplace' - -const PROGRESS: InstallProgress = { - size: 120, - downloaded: 0, - 'download-complete': false, - validated: 0, - 'validation-complete': false, - unpacked: 0, - 'unpack-complete': false, +import { FullProgress } from '../../../../../../../core/startos/bindings/FullProgress' +import { ServerStatus } from '../../../../../../../core/startos/bindings/ServerStatus' + +const PROGRESS: FullProgress = { + overall: { + done: 0, + total: 120, + }, + phases: [ + { + name: 'Downloading', + progress: { + done: 0, + total: 40, + }, + }, + { + name: 'Validating', + progress: { + done: 0, + total: 40, + }, + }, + { + name: 'Installing', + progress: { + done: 0, + total: 40, + }, + }, + ], } @Injectable() @@ -193,8 +214,8 @@ export class MockApiService extends ApiService { return { entries, - 'start-cursor': 'startCursor', - 'end-cursor': 'endCursor', + startCursor: 'start-cursor', + endCursor: 'end-cursor', } } @@ -206,8 +227,8 @@ export class MockApiService extends ApiService { return { entries, - 'start-cursor': 'startCursor', - 'end-cursor': 'endCursor', + startCursor: 'start-cursor', + endCursor: 'end-cursor', } } @@ -217,8 +238,8 @@ export class MockApiService extends ApiService { return { entries, - 'start-cursor': 'startCursor', - 'end-cursor': 'endCursor', + startCursor: 'startCursor', + endCursor: 'end-cursor', } } @@ -227,7 +248,7 @@ export class MockApiService extends ApiService { ): Promise { await pauseFor(2000) return { - 'start-cursor': 'start-cursor', + startCursor: 'start-cursor', guid: '7251d5be-645f-4362-a51b-3a85be92b31e', } } @@ -237,7 +258,7 @@ export class MockApiService extends ApiService { ): Promise { await pauseFor(2000) return { - 'start-cursor': 'start-cursor', + startCursor: 'start-cursor', guid: '7251d5be-645f-4362-a51b-3a85be92b31e', } } @@ -247,7 +268,7 @@ export class MockApiService extends ApiService { ): Promise { await pauseFor(2000) return { - 'start-cursor': 'start-cursor', + startCursor: 'start-cursor', guid: '7251d5be-645f-4362-a51b-3a85be92b31e', } } @@ -289,7 +310,7 @@ export class MockApiService extends ApiService { const patch = [ { op: PatchOp.REPLACE, - path: '/server-info/status-info/update-progress', + path: '/serverInfo/statusInfo/updateProgress', value: initialProgress, }, ] @@ -306,7 +327,7 @@ export class MockApiService extends ApiService { const patch = [ { op: PatchOp.REPLACE, - path: '/server-info/status-info/restarting', + path: '/serverInfo/statusInfo/restarting', value: true, }, ] @@ -316,7 +337,7 @@ export class MockApiService extends ApiService { const patch2 = [ { op: PatchOp.REPLACE, - path: '/server-info/status-info/restarting', + path: '/serverInfo/statusInfo/restarting', value: false, }, ] @@ -334,7 +355,7 @@ export class MockApiService extends ApiService { const patch = [ { op: PatchOp.REPLACE, - path: '/server-info/status-info/shutting-down', + path: '/serverInfo/statusInfo/shuttingDown', value: true, }, ] @@ -344,7 +365,7 @@ export class MockApiService extends ApiService { const patch2 = [ { op: PatchOp.REPLACE, - path: '/server-info/status-info/shutting-down', + path: '/serverInfo/statusInfo/shuttingDown', value: false, }, ] @@ -370,20 +391,6 @@ export class MockApiService extends ApiService { return null } - async toggleZram(params: RR.ToggleZramReq): Promise { - await pauseFor(2000) - const patch = [ - { - op: PatchOp.REPLACE, - path: '/server-info/zram', - value: params.enable, - }, - ] - this.mockRevision(patch) - - return null - } - // marketplace URLs async marketplaceProxy( @@ -430,7 +437,7 @@ export class MockApiService extends ApiService { const patch = [ { op: PatchOp.REPLACE, - path: '/server-info/unread-notification-count', + path: '/serverInfo/unreadNotificationCount', value: 0, }, ] @@ -520,7 +527,7 @@ export class MockApiService extends ApiService { path: path.replace(/\\/g, '/'), username, mountable: true, - 'embassy-os': null, + startOs: null, }, } } @@ -556,18 +563,18 @@ export class MockApiService extends ApiService { async createBackup(params: RR.CreateBackupReq): Promise { await pauseFor(2000) - const path = '/server-info/status-info/backup-progress' - const ids = params['package-ids'] + const path = '/serverInfo/statusInfo/backupProgress' + const ids = params.packageIds setTimeout(async () => { for (let i = 0; i < ids.length; i++) { const id = ids[i] - const appPath = `/package-data/${id}/installed/status/main/status` + const appPath = `/packageData/${id}/status/main/status` const appPatch = [ { op: PatchOp.REPLACE, path: appPath, - value: PackageMainStatus.BackingUp, + value: 'backingUp', }, ] this.mockRevision(appPatch) @@ -577,7 +584,7 @@ export class MockApiService extends ApiService { this.mockRevision([ { ...appPatch[0], - value: PackageMainStatus.Stopped, + value: 'stopped', }, ]) this.mockRevision([ @@ -646,8 +653,8 @@ export class MockApiService extends ApiService { } return { entries, - 'start-cursor': 'startCursor', - 'end-cursor': 'endCursor', + startCursor: 'startCursor', + endCursor: 'end-cursor', } } @@ -656,7 +663,7 @@ export class MockApiService extends ApiService { ): Promise { await pauseFor(2000) return { - 'start-cursor': 'start-cursor', + startCursor: 'start-cursor', guid: '7251d5be-645f-4362-a51b-3a85be92b31e', } } @@ -670,15 +677,28 @@ export class MockApiService extends ApiService { this.updateProgress(params.id) }, 1000) - const patch: Operation[] = [ + const patch: Operation< + PackageDataEntry + >[] = [ { op: PatchOp.ADD, - path: `/package-data/${params.id}`, + path: `/packageData/${params.id}`, value: { ...Mock.LocalPkgs[params.id], - // state: PackageState.Installing, - state: PackageState.Updating, - 'install-progress': { ...PROGRESS }, + stateInfo: { + // if installing + // state: PackageState.Installing, + + // if updating + state: 'updating', + manifest: mockPatchData.packageData[params.id].stateInfo.manifest!, + + // both + installingInfo: { + newManifest: Mock.LocalPkgs[params.id].stateInfo.manifest, + progress: PROGRESS, + }, + }, }, }, ] @@ -711,7 +731,7 @@ export class MockApiService extends ApiService { const patch = [ { op: PatchOp.REPLACE, - path: `/package-data/${params.id}/installed/status/configured`, + path: `/packageData/${params.id}/status/configured`, value: true, }, ] @@ -731,12 +751,16 @@ export class MockApiService extends ApiService { return { op: PatchOp.ADD, - path: `/package-data/${id}`, + path: `/packageData/${id}`, value: { ...Mock.LocalPkgs[id], - state: PackageState.Restoring, - 'install-progress': { ...PROGRESS }, - installed: undefined, + stateInfo: { + state: 'restoring', + installingInfo: { + newManifest: Mock.LocalPkgs[id].stateInfo.manifest!, + progress: PROGRESS, + }, + }, }, } }) @@ -754,7 +778,7 @@ export class MockApiService extends ApiService { } async startPackage(params: RR.StartPackageReq): Promise { - const path = `/package-data/${params.id}/installed/status/main` + const path = `/packageData/${params.id}/status/main` await pauseFor(2000) @@ -763,7 +787,7 @@ export class MockApiService extends ApiService { { op: PatchOp.REPLACE, path: path + '/status', - value: PackageMainStatus.Running, + value: 'running', }, { op: PatchOp.REPLACE, @@ -823,7 +847,7 @@ export class MockApiService extends ApiService { { op: PatchOp.REPLACE, path: path + '/status', - value: PackageMainStatus.Starting, + value: 'starting', }, ] @@ -837,14 +861,14 @@ export class MockApiService extends ApiService { ): Promise { // first enact stop await pauseFor(2000) - const path = `/package-data/${params.id}/installed/status/main` + const path = `/packageData/${params.id}/status/main` setTimeout(async () => { const patch2: Operation[] = [ { op: PatchOp.REPLACE, path: path + '/status', - value: PackageMainStatus.Starting, + value: 'starting', }, { op: PatchOp.ADD, @@ -860,7 +884,7 @@ export class MockApiService extends ApiService { { op: PatchOp.REPLACE, path: path + '/status', - value: PackageMainStatus.Running, + value: 'running', }, { op: PatchOp.REMOVE, @@ -897,7 +921,7 @@ export class MockApiService extends ApiService { { op: PatchOp.REPLACE, path: path + '/status', - value: PackageMainStatus.Restarting, + value: 'restarting', }, { op: PatchOp.REPLACE, @@ -913,14 +937,16 @@ export class MockApiService extends ApiService { async stopPackage(params: RR.StopPackageReq): Promise { await pauseFor(2000) - const path = `/package-data/${params.id}/installed/status/main` + const path = `/packageData/${params.id}/status/main` setTimeout(() => { const patch2 = [ { op: PatchOp.REPLACE, - path: path + '/status', - value: PackageMainStatus.Stopped, + path: path, + value: { + status: 'stopped', + }, }, ] this.mockRevision(patch2) @@ -929,13 +955,11 @@ export class MockApiService extends ApiService { const patch = [ { op: PatchOp.REPLACE, - path: path + '/status', - value: PackageMainStatus.Stopping, - }, - { - op: PatchOp.REPLACE, - path: path + '/health', - value: {}, + path: path, + value: { + status: 'stopping', + timeout: '35s', + }, }, ] @@ -953,7 +977,7 @@ export class MockApiService extends ApiService { const patch2: RemoveOperation[] = [ { op: PatchOp.REMOVE, - path: `/package-data/${params.id}`, + path: `/packageData/${params.id}`, }, ] this.mockRevision(patch2) @@ -962,8 +986,8 @@ export class MockApiService extends ApiService { const patch = [ { op: PatchOp.REPLACE, - path: `/package-data/${params.id}/state`, - value: PackageState.Removing, + path: `/packageData/${params.id}/stateInfo/state`, + value: 'removing', }, ] @@ -977,8 +1001,8 @@ export class MockApiService extends ApiService { ): Promise { await pauseFor(2000) return { - 'old-config': Mock.MockConfig, - 'new-config': Mock.MockDependencyConfig, + oldConfig: Mock.MockConfig, + newConfig: Mock.MockDependencyConfig, spec: Mock.ConfigSpec, } } @@ -991,55 +1015,100 @@ export class MockApiService extends ApiService { } private async updateProgress(id: string): Promise { - const progress = { ...PROGRESS } - const phases = [ - { progress: 'downloaded', completion: 'download-complete' }, - { progress: 'validated', completion: 'validation-complete' }, - { progress: 'unpacked', completion: 'unpack-complete' }, - ] as const - - for (let phase of phases) { - let i = progress[phase.progress] - const size = progress?.size || 0 - while (i < size) { - await pauseFor(250) - i = Math.min(i + 5, size) - progress[phase.progress] = i - - if (i === progress.size) { - progress[phase.completion] = true - } + const progress = JSON.parse(JSON.stringify(PROGRESS)) + + for (let [i, phase] of progress.phases.entries()) { + if (typeof phase.progress !== 'object' || !phase.progress.total) { + await pauseFor(2000) - const patch = [ + const patches: Operation[] = [ { op: PatchOp.REPLACE, - path: `/package-data/${id}/install-progress`, - value: { ...progress }, + path: `/packageData/${id}/stateInfo/installingInfo/progress/phases/${i}/progress`, + value: true, }, ] - this.mockRevision(patch) + + // overall + if (typeof progress.overall === 'object' && progress.overall.total) { + const step = progress.overall.total / progress.phases.length + + progress.overall.done += step + + patches.push({ + op: PatchOp.REPLACE, + path: `/packageData/${id}/stateInfo/installingInfo/progress/overall/done`, + value: progress.overall.done, + }) + } + + this.mockRevision(patches) + } else { + const step = phase.progress.total / 4 + + while (phase.progress.done < phase.progress.total) { + await pauseFor(500) + + phase.progress.done += step + + const patches: Operation[] = [ + { + op: PatchOp.REPLACE, + path: `/packageData/${id}/stateInfo/installingInfo/progress/phases/${i}/progress/done`, + value: phase.progress.done, + }, + ] + + // overall + if (typeof progress.overall === 'object' && progress.overall.total) { + const step = progress.overall.total / progress.phases.length / 4 + + progress.overall.done += step + + patches.push({ + op: PatchOp.REPLACE, + path: `/packageData/${id}/stateInfo/installingInfo/progress/overall/done`, + value: progress.overall.done, + }) + } + + this.mockRevision(patches) + + if (phase.progress.done === phase.progress.total) { + await pauseFor(250) + this.mockRevision([ + { + op: PatchOp.REPLACE, + path: `/packageData/${id}/stateInfo/installingInfo/progress/phases/${i}/progress`, + value: true, + }, + ]) + } + } } } - setTimeout(() => { - const patch2: Operation[] = [ - { - op: PatchOp.REPLACE, - path: `/package-data/${id}/state`, - value: PackageState.Installed, - }, - { - op: PatchOp.ADD, - path: `/package-data/${id}/installed`, - value: { ...Mock.LocalPkgs[id].installed }, - }, - { - op: PatchOp.REMOVE, - path: `/package-data/${id}/install-progress`, + await pauseFor(1000) + this.mockRevision([ + { + op: PatchOp.REPLACE, + path: `/packageData/${id}/stateInfo/installingInfo/progress/overall`, + value: true, + }, + ]) + + await pauseFor(1000) + const patch2: Operation[] = [ + { + op: PatchOp.REPLACE, + path: `/packageData/${id}/stateInfo`, + value: { + state: 'installed', + manifest: Mock.LocalPkgs[id].stateInfo.manifest, }, - ] - this.mockRevision(patch2) - }, 1000) + }, + ] + this.mockRevision(patch2) } private async updateOSProgress() { @@ -1049,7 +1118,7 @@ export class MockApiService extends ApiService { const patch0 = [ { op: PatchOp.REPLACE, - path: `/server-info/status-info/update-progress/size`, + path: `/serverInfo/statusInfo/updateProgress/size`, value: size, }, ] @@ -1061,7 +1130,7 @@ export class MockApiService extends ApiService { const patch = [ { op: PatchOp.REPLACE, - path: `/server-info/status-info/update-progress/downloaded`, + path: `/serverInfo/statusInfo/updateProgress/downloaded`, value: downloaded, }, ] @@ -1071,22 +1140,22 @@ export class MockApiService extends ApiService { const patch2 = [ { op: PatchOp.REPLACE, - path: `/server-info/status-info/update-progress/downloaded`, + path: `/serverInfo/statusInfo/updateProgress/downloaded`, value: size, }, ] this.mockRevision(patch2) setTimeout(async () => { - const patch3: Operation[] = [ + const patch3: Operation[] = [ { op: PatchOp.REPLACE, - path: '/server-info/status', - value: ServerStatus.Updated, + path: '/serverInfo/statusInfo/updated', + value: true, }, { op: PatchOp.REMOVE, - path: '/server-info/status-info/update-progress', + path: '/serverInfo/statusInfo/updateProgress', }, ] this.mockRevision(patch3) @@ -1095,8 +1164,8 @@ export class MockApiService extends ApiService { const patch4 = [ { op: PatchOp.REPLACE, - path: '/server-info/status', - value: ServerStatus.Running, + path: '/serverInfo/status', + value: 'running', }, ] this.mockRevision(patch4) @@ -1105,7 +1174,7 @@ export class MockApiService extends ApiService { const patch6 = [ { op: PatchOp.REPLACE, - path: '/server-info/status-info', + path: '/serverInfo/statusInfo', value: Mock.ServerUpdated, }, ] diff --git a/web/projects/ui/src/app/services/api/mock-patch.ts b/web/projects/ui/src/app/services/api/mock-patch.ts index 1dc7abd669..5d876cf30d 100644 --- a/web/projects/ui/src/app/services/api/mock-patch.ts +++ b/web/projects/ui/src/app/services/api/mock-patch.ts @@ -1,17 +1,11 @@ -import { - DataModel, - DockerIoFormat, - HealthResult, - PackageMainStatus, - PackageState, -} from 'src/app/services/patch-db/data-model' +import { DataModel } from 'src/app/services/patch-db/data-model' import { Mock } from './api.fixures' import { BUILT_IN_WIDGETS } from '../../pages/widgets/built-in/widgets' export const mockPatchData: DataModel = { ui: { name: `Matt's Server`, - 'ack-welcome': '1.0.0', + ackWelcome: '1.0.0', theme: 'Dark', widgets: BUILT_IN_WIDGETS.filter( ({ id }) => @@ -21,8 +15,8 @@ export const mockPatchData: DataModel = { id === 'metrics', ), marketplace: { - 'selected-url': 'https://registry.start9.com/', - 'known-hosts': { + selectedUrl: 'https://registry.start9.com/', + knownHosts: { 'https://registry.start9.com/': { name: 'Start9 Registry', }, @@ -32,651 +26,584 @@ export const mockPatchData: DataModel = { }, }, }, - dev: {}, gaming: { snake: { - 'high-score': 0, + highScore: 0, }, }, - 'ack-instructions': {}, + ackInstructions: {}, }, - 'server-info': { + serverInfo: { + arch: 'x86_64', + onionAddress: 'myveryownspecialtoraddress', id: 'abcdefgh', version: '0.3.5.1', - 'last-backup': new Date(new Date().valueOf() - 604800001).toISOString(), - 'lan-address': 'https://adjective-noun.local', - 'tor-address': 'https://myveryownspecialtoraddress.onion', - 'ip-info': { + lastBackup: new Date(new Date().valueOf() - 604800001).toISOString(), + lanAddress: 'https://adjective-noun.local', + torAddress: 'https://myveryownspecialtoraddress.onion', + ipInfo: { eth0: { ipv4: '10.0.0.1', + ipv4Range: '10.0.0.1/24', ipv6: null, + ipv6Range: null, }, wlan0: { ipv4: '10.0.90.12', + ipv4Range: '10.0.90.12/24', ipv6: 'FE80:CD00:0000:0CDE:1257:0000:211E:729CD', + ipv6Range: 'FE80:CD00:0000:0CDE:1257:0000:211E:729CD/64', }, }, - 'last-wifi-region': null, - 'unread-notification-count': 4, + lastWifiRegion: null, + unreadNotificationCount: 4, // password is asdfasdf - 'password-hash': + passwordHash: '$argon2d$v=19$m=1024,t=1,p=1$YXNkZmFzZGZhc2RmYXNkZg$Ceev1I901G6UwU+hY0sHrFZ56D+o+LNJ', - 'eos-version-compat': '>=0.3.0 <=0.3.0.1', - 'status-info': { - 'backup-progress': null, + eosVersionCompat: '>=0.3.0 <=0.3.0.1', + statusInfo: { + backupProgress: null, updated: false, - 'update-progress': null, + updateProgress: null, restarting: false, - 'shutting-down': false, + shuttingDown: false, }, hostname: 'random-words', pubkey: 'npub1sg6plzptd64u62a878hep2kev88swjh3tw00gjsfl8f237lmu63q0uf63m', - 'ca-fingerprint': 'SHA-256: 63 2B 11 99 44 40 17 DF 37 FC C3 DF 0F 3D 15', - 'ntp-synced': false, - zram: false, + caFingerprint: 'SHA-256: 63 2B 11 99 44 40 17 DF 37 FC C3 DF 0F 3D 15', + ntpSynced: false, platform: 'x86_64-nonfree', + zram: true, + governor: 'performance', + wifi: { + ssids: [], + selected: null, + connected: null, + }, }, - 'package-data': { + packageData: { bitcoind: { - state: PackageState.Installed, - 'static-files': { - license: '/public/package-data/bitcoind/0.20.0/LICENSE.md', - icon: '/assets/img/service-icons/bitcoind.svg', - instructions: '/public/package-data/bitcoind/0.20.0/INSTRUCTIONS.md', - }, - manifest: { - id: 'bitcoind', - title: 'Bitcoin Core', - version: '0.20.0', - 'git-hash': 'abcdefgh', - description: { - short: 'A Bitcoin full node by Bitcoin Core.', - long: 'Bitcoin is a decentralized consensus protocol and settlement network.', - }, - 'release-notes': 'Taproot, Schnorr, and more.', - assets: { - icon: 'icon.png', - license: 'LICENSE.md', - instructions: 'INSTRUCTIONS.md', - docker_images: 'image.tar', - assets: './assets', - scripts: './scripts', - }, - license: 'MIT', - 'wrapper-repo': 'https://github.com/start9labs/bitcoind-wrapper', - 'upstream-repo': 'https://github.com/bitcoin/bitcoin', - 'support-site': 'https://bitcoin.org', - 'marketing-site': 'https://bitcoin.org', - 'donation-url': 'https://start9.com', - alerts: { - install: 'Bitcoin can take over a week to sync.', - uninstall: - 'Chain state will be lost, as will any funds stored on your Bitcoin Core waller that have not been backed up.', - restore: null, - start: 'Starting Bitcoin is good for your health.', - stop: null, + stateInfo: { + state: 'installed', + manifest: { + ...Mock.MockManifestBitcoind, + version: '0.20.0', }, + }, + icon: '/assets/img/service-icons/bitcoind.svg', + lastBackup: null, + status: { + configured: true, main: { - type: 'docker', - image: '', - system: true, - entrypoint: '', - args: [], - mounts: {}, - 'io-format': DockerIoFormat.Yaml, - inject: false, - 'shm-size': '', - 'sigterm-timeout': '.49m', - }, - 'health-checks': { - 'chain-state': { - name: 'Chain State', - }, - 'ephemeral-health-check': { - name: 'Ephemeral Health Check', - }, - 'p2p-interface': { - name: 'P2P Interface', - 'success-message': 'the health check ran succesfully', - }, - 'rpc-interface': { - name: 'RPC Interface', - }, - 'unnecessary-health-check': { - name: 'Unneccessary Health Check', - }, - } as any, - config: { - get: {}, - set: {}, - } as any, - volumes: {}, - 'min-os-version': '0.2.12', - interfaces: { - ui: { - name: 'Node Visualizer', - description: - 'Web application for viewing information about your node and the Bitcoin network.', - ui: true, - 'tor-config': { - 'port-mapping': {}, + status: 'running', + started: '2021-06-14T20:49:17.774Z', + health: { + 'ephemeral-health-check': { + name: 'Ephemeral Health Check', + result: 'starting', + message: null, }, - 'lan-config': {}, - protocols: [], - }, - rpc: { - name: 'RPC', - description: - 'Used by wallets to interact with your Bitcoin Core node.', - ui: false, - 'tor-config': { - 'port-mapping': {}, + 'chain-state': { + name: 'Chain State', + result: 'loading', + message: 'Bitcoin is syncing from genesis', }, - 'lan-config': {}, - protocols: [], - }, - p2p: { - name: 'P2P', - description: - 'Used by other Bitcoin nodes to communicate and interact with your node.', - ui: false, - 'tor-config': { - 'port-mapping': {}, + 'p2p-interface': { + name: 'P2P', + result: 'success', + message: 'Health check successful', + }, + 'rpc-interface': { + name: 'RPC', + result: 'failure', + message: 'RPC interface unreachable.', + }, + 'unnecessary-health-check': { + name: 'Unnecessary Health Check', + result: 'disabled', + message: null, }, - 'lan-config': {}, - protocols: [], - }, - }, - backup: { - create: { - type: 'docker', - image: '', - system: true, - entrypoint: '', - args: [], - mounts: {}, - 'io-format': DockerIoFormat.Yaml, - inject: false, - 'shm-size': '', - 'sigterm-timeout': null, - }, - restore: { - type: 'docker', - image: '', - system: true, - entrypoint: '', - args: [], - mounts: {}, - 'io-format': DockerIoFormat.Yaml, - inject: false, - 'shm-size': '', - 'sigterm-timeout': null, }, }, - migrations: null, - actions: { - resync: { - name: 'Resync Blockchain', - description: - 'Use this to resync the Bitcoin blockchain from genesis', - warning: 'This will take a couple of days.', - 'allowed-statuses': [ - PackageMainStatus.Running, - PackageMainStatus.Stopped, - ], - implementation: { - type: 'docker', - image: '', - system: true, - entrypoint: '', - args: [], - mounts: {}, - 'io-format': DockerIoFormat.Yaml, - inject: false, - 'shm-size': '', - 'sigterm-timeout': null, - }, - 'input-spec': { - reason: { - type: 'string', - name: 'Re-sync Reason', - description: - 'Your reason for re-syncing. Why are you doing this?', - nullable: false, - masked: false, - copyable: false, - pattern: '^[a-zA-Z]+$', - 'pattern-description': 'Must contain only letters.', + dependencyConfigErrors: {}, + }, + actions: {}, // @TODO + serviceInterfaces: { + ui: { + id: 'ui', + hasPrimary: false, + disabled: false, + masked: false, + name: 'Web UI', + description: + 'A launchable web app for you to interact with your Bitcoin node', + type: 'ui', + addressInfo: { + username: null, + hostId: 'abcdefg', + bindOptions: { + scheme: 'http', + preferredExternalPort: 80, + addSsl: { + // addXForwardedHeaders: false, + preferredExternalPort: 443, + scheme: 'https', + alpn: { specified: ['http/1.1', 'h2'] }, }, - name: { - type: 'string', - name: 'Your Name', - description: 'Tell the class your name.', - nullable: true, - masked: false, - copyable: false, - warning: 'You may loose all your money by providing your name.', + secure: null, + }, + suffix: '', + }, + hostInfo: { + id: 'abcdefg', + kind: 'multi', + hostnames: [ + { + kind: 'ip', + networkInterfaceId: 'elan0', + public: false, + hostname: { + kind: 'local', + value: 'adjective-noun.local', + port: null, + sslPort: 1234, + }, }, - notifications: { - name: 'Notification Preferences', - type: 'list', - subtype: 'enum', - description: 'how you want to be notified', - range: '[1,3]', - default: ['email'], - spec: { - 'value-names': { - email: 'Email', - text: 'Text', - call: 'Call', - push: 'Push', - webhook: 'Webhook', - }, - values: ['email', 'text', 'call', 'push', 'webhook'], + { + kind: 'onion', + hostname: { + value: 'bitcoin-ui-address.onion', + port: 80, + sslPort: 443, }, }, - 'days-ago': { - type: 'number', - name: 'Days Ago', - description: 'Number of days to re-sync.', - nullable: false, - default: 100, - range: '[0, 9999]', - integral: true, + { + kind: 'ip', + networkInterfaceId: 'elan0', + public: false, + hostname: { + kind: 'ipv4', + value: '192.168.1.5', + port: null, + sslPort: 1234, + }, }, - 'top-speed': { - type: 'number', - name: 'Top Speed', - description: 'The fastest you can possibly run.', - nullable: false, - range: '[-1000, 1000]', - integral: false, - units: 'm/s', + { + kind: 'ip', + networkInterfaceId: 'elan0', + public: false, + hostname: { + kind: 'ipv6', + value: '[2001:db8:85a3:8d3:1319:8a2e:370:7348]', + port: null, + sslPort: 1234, + }, }, - testnet: { - name: 'Testnet', - type: 'boolean', - description: - '
  • determines whether your node is running on testnet or mainnet
', - warning: 'Chain will have to resync!', - default: false, + ], + }, + }, + rpc: { + id: 'rpc', + hasPrimary: false, + disabled: false, + masked: false, + name: 'RPC', + description: + 'Used by dependent services and client wallets for connecting to your node', + type: 'api', + addressInfo: { + username: null, + hostId: 'bcdefgh', + bindOptions: { + scheme: 'http', + preferredExternalPort: 80, + addSsl: { + // addXForwardedHeaders: false, + preferredExternalPort: 443, + scheme: 'https', + alpn: { specified: ['http/1.1'] }, }, - randomEnum: { - name: 'Random Enum', - type: 'enum', - 'value-names': { - null: 'Null', - good: 'Good', - bad: 'Bad', - ugly: 'Ugly', + secure: null, + }, + suffix: '', + }, + hostInfo: { + id: 'bcdefgh', + kind: 'multi', + hostnames: [ + { + kind: 'ip', + networkInterfaceId: 'elan0', + public: false, + hostname: { + kind: 'local', + value: 'adjective-noun.local', + port: null, + sslPort: 2345, }, - default: 'null', - description: 'This is not even real.', - warning: 'Be careful changing this!', - values: ['null', 'good', 'bad', 'ugly'], }, - 'emergency-contact': { - name: 'Emergency Contact', - type: 'object', - description: 'The person to contact in case of emergency.', - spec: { - name: { - type: 'string', - name: 'Name', - nullable: false, - masked: false, - copyable: false, - pattern: '^[a-zA-Z]+$', - 'pattern-description': 'Must contain only letters.', - }, - email: { - type: 'string', - name: 'Email', - nullable: false, - masked: false, - copyable: true, - }, + { + kind: 'onion', + hostname: { + value: 'bitcoin-rpc-address.onion', + port: 80, + sslPort: 443, }, }, - ips: { - name: 'Whitelist IPs', - type: 'list', - subtype: 'string', - description: - 'external ip addresses that are authorized to access your Bitcoin node', - warning: - 'Any IP you allow here will have RPC access to your Bitcoin node.', - range: '[1,10]', - default: ['192.168.1.1'], - spec: { - pattern: '^[0-9]{1,3}([,.][0-9]{1,3})?$', - 'pattern-description': 'Must be a valid IP address', - masked: false, - copyable: false, + { + kind: 'ip', + networkInterfaceId: 'elan0', + public: false, + hostname: { + kind: 'ipv4', + value: '192.168.1.5', + port: null, + sslPort: 2345, }, }, - bitcoinNode: { - type: 'union', - default: 'internal', - tag: { - id: 'type', - 'variant-names': { - internal: 'Internal', - external: 'External', - }, - name: 'Bitcoin Node Settings', - description: 'The node settings', - warning: 'Careful changing this', - }, - variants: { - internal: { - 'lan-address': { - name: 'LAN Address', - type: 'pointer', - subtype: 'package', - target: 'lan-address', - 'package-id': 'bitcoind', - description: 'the lan address', - interface: '', - }, - 'friendly-name': { - name: 'Friendly Name', - type: 'string', - description: 'the lan address', - nullable: true, - masked: false, - copyable: false, - }, - }, - external: { - 'public-domain': { - name: 'Public Domain', - type: 'string', - description: 'the public address of the node', - nullable: false, - default: 'bitcoinnode.com', - pattern: '.*', - 'pattern-description': 'anything', - masked: false, - copyable: true, - }, - }, + { + kind: 'ip', + networkInterfaceId: 'elan0', + public: false, + hostname: { + kind: 'ipv6', + value: '[2001:db8:85a3:8d3:1319:8a2e:370:7348]', + port: null, + sslPort: 2345, }, }, - }, + ], }, }, - dependencies: {}, - }, - installed: { - manifest: { - ...Mock.MockManifestBitcoind, - version: '0.20.0', - }, - 'last-backup': null, - status: { - configured: true, - main: { - status: PackageMainStatus.Running, - started: '2021-06-14T20:49:17.774Z', - health: { - 'ephemeral-health-check': { - result: HealthResult.Starting, + p2p: { + id: 'p2p', + hasPrimary: true, + disabled: false, + masked: false, + name: 'P2P', + description: + 'Used for connecting to other nodes on the Bitcoin network', + type: 'p2p', + addressInfo: { + username: null, + hostId: 'cdefghi', + bindOptions: { + scheme: 'bitcoin', + preferredExternalPort: 8333, + addSsl: null, + secure: { + ssl: false, }, - 'chain-state': { - result: HealthResult.Loading, - message: 'Bitcoin is syncing from genesis', + }, + suffix: '', + }, + hostInfo: { + id: 'cdefghi', + kind: 'multi', + hostnames: [ + { + kind: 'ip', + networkInterfaceId: 'elan0', + public: false, + hostname: { + kind: 'local', + value: 'adjective-noun.local', + port: 3456, + sslPort: null, + }, }, - 'p2p-interface': { - result: HealthResult.Success, + { + kind: 'onion', + hostname: { + value: 'bitcoin-p2p-address.onion', + port: 8333, + sslPort: null, + }, }, - 'rpc-interface': { - result: HealthResult.Failure, - error: 'RPC interface unreachable.', + { + kind: 'ip', + networkInterfaceId: 'elan0', + public: false, + hostname: { + kind: 'ipv4', + value: '192.168.1.5', + port: 3456, + sslPort: null, + }, }, - 'unnecessary-health-check': { - result: HealthResult.Disabled, + { + kind: 'ip', + networkInterfaceId: 'elan0', + public: false, + hostname: { + kind: 'ipv6', + value: '[2001:db8:85a3:8d3:1319:8a2e:370:7348]', + port: 3456, + sslPort: null, + }, }, - }, - }, - 'dependency-config-errors': {}, - }, - 'interface-addresses': { - ui: { - 'tor-address': 'bitcoind-ui-address.onion', - 'lan-address': 'bitcoind-ui-address.local', - }, - rpc: { - 'tor-address': 'bitcoind-rpc-address.onion', - 'lan-address': 'bitcoind-rpc-address.local', - }, - p2p: { - 'tor-address': 'bitcoind-p2p-address.onion', - 'lan-address': 'bitcoind-p2p-address.local', - }, - }, - 'system-pointers': [], - 'current-dependents': { - lnd: { - pointers: [], - 'health-checks': [], + ], }, }, - 'current-dependencies': {}, - 'dependency-info': {}, - 'marketplace-url': 'https://registry.start9.com/', - 'developer-key': 'developer-key', }, + currentDependencies: {}, + hosts: {}, + storeExposedDependents: [], + marketplaceUrl: 'https://registry.start9.com/', + developerKey: 'developer-key', }, lnd: { - state: PackageState.Installed, - 'static-files': { - license: '/public/package-data/lnd/0.11.1/LICENSE.md', - icon: '/assets/img/service-icons/lnd.png', - instructions: '/public/package-data/lnd/0.11.1/INSTRUCTIONS.md', - }, - manifest: { - id: 'lnd', - title: 'Lightning Network Daemon', - version: '0.11.0', - description: { - short: 'A bolt spec compliant client.', - long: 'More info about LND. More info about LND. More info about LND.', - }, - 'release-notes': 'Dual funded channels!', - assets: { - icon: 'icon.png', - license: 'LICENSE.md', - instructions: 'INSTRUCTIONS.md', - docker_images: 'image.tar', - assets: './assets', - scripts: './scripts', - }, - license: 'MIT', - 'wrapper-repo': 'https://github.com/start9labs/lnd-wrapper', - 'upstream-repo': 'https://github.com/lightningnetwork/lnd', - 'support-site': 'https://lightning.engineering/', - 'marketing-site': 'https://lightning.engineering/', - 'donation-url': null, - alerts: { - install: null, - uninstall: null, - restore: - 'If this is a duplicate instance of the same LND node, you may loose your funds.', - start: 'Starting LND is good for your health.', - stop: null, + stateInfo: { + state: 'installed', + manifest: { + ...Mock.MockManifestLnd, + version: '0.11.0', }, + }, + icon: '/assets/img/service-icons/lnd.png', + lastBackup: null, + status: { + configured: true, main: { - type: 'docker', - image: '', - system: true, - entrypoint: '', - args: [], - mounts: {}, - 'io-format': DockerIoFormat.Yaml, - inject: false, - 'shm-size': '', - 'sigterm-timeout': '0.5s', + status: 'stopped', }, - 'health-checks': {}, - config: { - get: null, - set: null, + dependencyConfigErrors: { + 'btc-rpc-proxy': 'This is a config unsatisfied error', }, - volumes: {}, - 'min-os-version': '0.2.12', - interfaces: { - rpc: { - name: 'RPC interface', - description: 'Good for connecting to your node at a distance.', - ui: true, - 'tor-config': { - 'port-mapping': {}, - }, - 'lan-config': { - '44': { + }, + actions: {}, + serviceInterfaces: { + grpc: { + id: 'grpc', + hasPrimary: false, + disabled: false, + masked: false, + name: 'GRPC', + description: + 'Used by dependent services and client wallets for connecting to your node', + type: 'api', + addressInfo: { + username: null, + hostId: 'qrstuv', + bindOptions: { + scheme: 'grpc', + preferredExternalPort: 10009, + addSsl: null, + secure: { ssl: true, - mapping: 33, }, }, - protocols: [], + suffix: '', }, - grpc: { - name: 'GRPC', - description: 'Certain wallet use grpc.', - ui: false, - 'tor-config': { - 'port-mapping': {}, - }, - 'lan-config': { - '66': { - ssl: true, - mapping: 55, + hostInfo: { + id: 'qrstuv', + kind: 'multi', + hostnames: [ + { + kind: 'ip', + networkInterfaceId: 'elan0', + public: false, + hostname: { + kind: 'local', + value: 'adjective-noun.local', + port: 5678, + sslPort: null, + }, }, - }, - protocols: [], - }, - }, - backup: { - create: { - type: 'docker', - image: '', - system: true, - entrypoint: '', - args: [], - mounts: {}, - 'io-format': DockerIoFormat.Yaml, - inject: false, - 'shm-size': '', - 'sigterm-timeout': null, - }, - restore: { - type: 'docker', - image: '', - system: true, - entrypoint: '', - args: [], - mounts: {}, - 'io-format': DockerIoFormat.Yaml, - inject: false, - 'shm-size': '', - 'sigterm-timeout': null, + { + kind: 'onion', + hostname: { + value: 'lnd-grpc-address.onion', + port: 10009, + sslPort: null, + }, + }, + { + kind: 'ip', + networkInterfaceId: 'elan0', + public: false, + hostname: { + kind: 'ipv4', + value: '192.168.1.5', + port: 5678, + sslPort: null, + }, + }, + { + kind: 'ip', + networkInterfaceId: 'elan0', + public: false, + hostname: { + kind: 'ipv6', + value: '[2001:db8:85a3:8d3:1319:8a2e:370:7348]', + port: 5678, + sslPort: null, + }, + }, + ], }, }, - migrations: null, - actions: { - resync: { - name: 'Resync Network Graph', - description: 'Your node will resync its network graph.', - warning: 'This will take a couple hours.', - 'allowed-statuses': [PackageMainStatus.Running], - implementation: { - type: 'docker', - image: '', - system: true, - entrypoint: '', - args: [], - mounts: {}, - 'io-format': DockerIoFormat.Yaml, - inject: false, - 'shm-size': '', - 'sigterm-timeout': null, + lndconnect: { + id: 'lndconnect', + hasPrimary: false, + disabled: false, + masked: true, + name: 'LND Connect', + description: + 'Used by client wallets adhering to LND Connect protocol to connect to your node', + type: 'api', + addressInfo: { + username: null, + hostId: 'qrstuv', + bindOptions: { + scheme: 'lndconnect', + preferredExternalPort: 10009, + addSsl: null, + secure: { + ssl: true, + }, }, - 'input-spec': null, + suffix: 'cert=askjdfbjadnaskjnd&macaroon=ksjbdfnhjasbndjksand', + }, + hostInfo: { + id: 'qrstuv', + kind: 'multi', + hostnames: [ + { + kind: 'ip', + networkInterfaceId: 'elan0', + public: false, + hostname: { + kind: 'local', + value: 'adjective-noun.local', + port: 5678, + sslPort: null, + }, + }, + { + kind: 'onion', + hostname: { + value: 'lnd-grpc-address.onion', + port: 10009, + sslPort: null, + }, + }, + { + kind: 'ip', + networkInterfaceId: 'elan0', + public: false, + hostname: { + kind: 'ipv4', + value: '192.168.1.5', + port: 5678, + sslPort: null, + }, + }, + { + kind: 'ip', + networkInterfaceId: 'elan0', + public: false, + hostname: { + kind: 'ipv6', + value: '[2001:db8:85a3:8d3:1319:8a2e:370:7348]', + port: 5678, + sslPort: null, + }, + }, + ], }, }, - dependencies: { - bitcoind: { - version: '=0.21.0', - description: 'LND needs bitcoin to live.', - requirement: { - type: 'opt-out', - how: 'You can use an external node from your server if you prefer.', + p2p: { + id: 'p2p', + hasPrimary: true, + disabled: false, + masked: false, + name: 'P2P', + description: + 'Used for connecting to other nodes on the Bitcoin network', + type: 'p2p', + addressInfo: { + username: null, + hostId: 'rstuvw', + bindOptions: { + scheme: null, + preferredExternalPort: 9735, + addSsl: null, + secure: { ssl: true }, }, - config: null, + suffix: '', }, - 'btc-rpc-proxy': { - version: '>=0.2.2', - description: - 'As long as Bitcoin is pruned, LND needs Bitcoin Proxy to fetch block over the P2P network.', - requirement: { - type: 'opt-in', - how: `To use Proxy's user management system, go to LND config and select Bitcoin Proxy under Bitcoin config.`, - }, - config: null, + hostInfo: { + id: 'rstuvw', + kind: 'multi', + hostnames: [ + { + kind: 'ip', + networkInterfaceId: 'elan0', + public: false, + hostname: { + kind: 'local', + value: 'adjective-noun.local', + port: 6789, + sslPort: null, + }, + }, + { + kind: 'onion', + hostname: { + value: 'lnd-p2p-address.onion', + port: 9735, + sslPort: null, + }, + }, + { + kind: 'ip', + networkInterfaceId: 'elan0', + public: false, + hostname: { + kind: 'ipv4', + value: '192.168.1.5', + port: 6789, + sslPort: null, + }, + }, + { + kind: 'ip', + networkInterfaceId: 'elan0', + public: false, + hostname: { + kind: 'ipv6', + value: '[2001:db8:85a3:8d3:1319:8a2e:370:7348]', + port: 6789, + sslPort: null, + }, + }, + ], }, }, }, - installed: { - manifest: { - ...Mock.MockManifestLnd, - version: '0.11.0', - }, - 'last-backup': null, - status: { - configured: true, - main: { - status: PackageMainStatus.Stopped, - }, - 'dependency-config-errors': { - 'btc-rpc-proxy': 'This is a config unsatisfied error', - }, - }, - 'interface-addresses': { - rpc: { - 'tor-address': 'lnd-rpc-address.onion', - 'lan-address': 'lnd-rpc-address.local', - }, - grpc: { - 'tor-address': 'lnd-grpc-address.onion', - 'lan-address': 'lnd-grpc-address.local', - }, + currentDependencies: { + bitcoind: { + title: 'Bitcoin Core', + icon: 'assets/img/service-icons/bitcoind.svg', + kind: 'running', + registryUrl: 'https://registry.start9.com', + versionSpec: '>=26.0.0', + healthChecks: [], }, - 'system-pointers': [], - 'current-dependents': {}, - 'current-dependencies': { - bitcoind: { - pointers: [], - 'health-checks': [], - }, - 'btc-rpc-proxy': { - pointers: [], - 'health-checks': [], - }, - }, - 'dependency-info': { - bitcoind: { - title: 'Bitcoin Core', - icon: 'assets/img/service-icons/bitcoind.svg', - }, - 'btc-rpc-proxy': { - title: 'Bitcoin Proxy', - icon: 'assets/img/service-icons/btc-rpc-proxy.png', - }, + 'btc-rpc-proxy': { + title: 'Bitcoin Proxy', + icon: 'assets/img/service-icons/btc-rpc-proxy.png', + kind: 'running', + registryUrl: 'https://community-registry.start9.com', + versionSpec: '>2.0.0', + healthChecks: [], }, - 'marketplace-url': 'https://registry.start9.com/', - 'developer-key': 'developer-key', }, + hosts: {}, + storeExposedDependents: [], + marketplaceUrl: 'https://registry.start9.com/', + developerKey: 'developer-key', }, }, } diff --git a/web/projects/ui/src/app/services/config.service.ts b/web/projects/ui/src/app/services/config.service.ts index f8fef3d60b..eb53dd3854 100644 --- a/web/projects/ui/src/app/services/config.service.ts +++ b/web/projects/ui/src/app/services/config.service.ts @@ -1,12 +1,13 @@ import { DOCUMENT } from '@angular/common' import { Inject, Injectable } from '@angular/core' import { WorkspaceConfig } from '@start9labs/shared' -import { - InterfaceDef, - PackageDataEntry, - PackageMainStatus, - PackageState, -} from 'src/app/services/patch-db/data-model' +import { types } from '@start9labs/start-sdk' +import { PackageDataEntry } from 'src/app/services/patch-db/data-model' +import { PackageState } from '../../../../../../core/startos/bindings/PackageState' +import { MainStatus } from '../../../../../../core/startos/bindings/MainStatus' +import { ExportedOnionHostname } from '../../../../../../core/startos/bindings/ExportedOnionHostname' +import { ExportedIpHostname } from '../../../../../../core/startos/bindings/ExportedIpHostname' +import { ExportedHostnameInfo } from '../../../../../../core/startos/bindings/ExportedHostnameInfo' const { gitHash, @@ -45,10 +46,6 @@ export class ConfigService { : this.hostname.endsWith('.local') } - isTorHttp(): boolean { - return this.isTor() && !this.isHttps() - } - isLanHttp(): boolean { return !this.isTor() && !this.isLocalhost() && !this.isHttps() } @@ -58,23 +55,55 @@ export class ConfigService { } isLaunchable( - state: PackageState, - status: PackageMainStatus, - interfaces: Record, + state: PackageState['state'], + status: MainStatus['status'], ): boolean { - return ( - state === PackageState.Installed && - status === PackageMainStatus.Running && - hasUi(interfaces) - ) + return state === 'installed' && status === 'running' } - launchableURL(pkg: PackageDataEntry): string { - if (!this.isTor() && hasLocalUi(pkg.manifest.interfaces)) { - return `https://${lanUiAddress(pkg)}` + /** ${scheme}://${username}@${host}:${externalPort}${suffix} */ + launchableAddress(interfaces: PackageDataEntry['serviceInterfaces']): string { + const ui = Object.values(interfaces).find(i => i.type === 'ui') + + if (!ui) return '' + + const host = ui.hostInfo + const addressInfo = ui.addressInfo + const scheme = this.isHttps() ? 'https' : 'http' + const username = addressInfo.username ? addressInfo.username + '@' : '' + const suffix = addressInfo.suffix || '' + const url = new URL(`${scheme}://${username}placeholder${suffix}`) + + if (host.kind === 'multi') { + const onionHostname = host.hostnames.find(h => h.kind === 'onion') + ?.hostname as ExportedOnionHostname + + if (this.isTor() && onionHostname) { + url.hostname = onionHostname.value + } else { + const ipHostname = host.hostnames.find(h => h.kind === 'ip') + ?.hostname as ExportedIpHostname + + if (!ipHostname) return '' + + url.hostname = this.hostname + url.port = String(ipHostname.sslPort || ipHostname.port) + } } else { - return `http://${torUiAddress(pkg)}` + throw new Error('unimplemented') + const hostname = {} as ExportedHostnameInfo // host.hostname + + if (!hostname) return '' + + if (this.isTor() && hostname.kind === 'onion') { + url.hostname = (hostname.hostname as ExportedOnionHostname).value + } else { + url.hostname = this.hostname + url.port = String(hostname.hostname.sslPort || hostname.hostname.port) + } } + + return url.href } getHost(): string { @@ -92,54 +121,8 @@ export class ConfigService { } } -export function hasTorUi(interfaces: Record): boolean { - const int = getUiInterfaceValue(interfaces) - return !!int?.['tor-config'] -} - -export function hasLocalUi(interfaces: Record): boolean { - const int = getUiInterfaceValue(interfaces) - return !!int?.['lan-config'] -} - -export function torUiAddress({ - manifest, - installed, -}: PackageDataEntry): string { - const key = getUiInterfaceKey(manifest.interfaces) - return installed ? installed['interface-addresses'][key]['tor-address'] : '' -} - -export function lanUiAddress({ - manifest, - installed, -}: PackageDataEntry): string { - const key = getUiInterfaceKey(manifest.interfaces) - return installed ? installed['interface-addresses'][key]['lan-address'] : '' -} - -export function hasUi(interfaces: Record): boolean { - return hasTorUi(interfaces) || hasLocalUi(interfaces) -} - -export function removeProtocol(str: string): string { - if (str.startsWith('http://')) return str.slice(7) - if (str.startsWith('https://')) return str.slice(8) - return str -} - -export function removePort(str: string): string { - return str.split(':')[0] -} - -export function getUiInterfaceKey( - interfaces: Record, -): string { - return Object.keys(interfaces).find(key => interfaces[key].ui) || '' -} - -export function getUiInterfaceValue( - interfaces: Record, -): InterfaceDef | null { - return Object.values(interfaces).find(i => i.ui) || null +export function hasUi( + interfaces: PackageDataEntry['serviceInterfaces'], +): boolean { + return Object.values(interfaces).some(iface => iface.type === 'ui') } diff --git a/web/projects/ui/src/app/services/dep-error.service.ts b/web/projects/ui/src/app/services/dep-error.service.ts index 4762f2df18..fe145055b8 100644 --- a/web/projects/ui/src/app/services/dep-error.service.ts +++ b/web/projects/ui/src/app/services/dep-error.service.ts @@ -4,12 +4,11 @@ import { distinctUntilChanged, map, shareReplay } from 'rxjs/operators' import { PatchDB } from 'patch-db-client' import { DataModel, - HealthCheckResult, - HealthResult, - InstalledPackageDataEntry, - PackageMainStatus, + InstalledState, + PackageDataEntry, } from './patch-db/data-model' import * as deepEqual from 'fast-deep-equal' +import { isInstalled } from '../util/get-package-data' export type AllDependencyErrors = Record export type PkgDependencyErrors = Record @@ -18,7 +17,7 @@ export type PkgDependencyErrors = Record providedIn: 'root', }) export class DepErrorService { - readonly depErrors$ = this.patch.watch$('package-data').pipe( + readonly depErrors$ = this.patch.watch$('packageData').pipe( map(pkgs => Object.keys(pkgs) .map(id => ({ @@ -51,86 +50,72 @@ export class DepErrorService { } private getDepErrors( - pkgs: DataModel['package-data'], + pkgs: DataModel['packageData'], pkgId: string, outerErrors: AllDependencyErrors, ): PkgDependencyErrors { - const pkgInstalled = pkgs[pkgId].installed + const pkg = pkgs[pkgId] - if (!pkgInstalled) return {} + if (!isInstalled(pkg)) return {} return currentDeps(pkgs, pkgId).reduce( (innerErrors, depId): PkgDependencyErrors => ({ ...innerErrors, - [depId]: this.getDepError(pkgs, pkgInstalled, depId, outerErrors), + [depId]: this.getDepError(pkgs, pkg, depId, outerErrors), }), {} as PkgDependencyErrors, ) } private getDepError( - pkgs: DataModel['package-data'], - pkgInstalled: InstalledPackageDataEntry, + pkgs: DataModel['packageData'], + pkg: PackageDataEntry, depId: string, outerErrors: AllDependencyErrors, ): DependencyError | null { - const depInstalled = pkgs[depId]?.installed + const dep = pkgs[depId] // not installed - if (!depInstalled) { + if (!dep || dep.stateInfo.state !== 'installed') { return { type: DependencyErrorType.NotInstalled, } } - const pkgManifest = pkgInstalled.manifest - const depManifest = depInstalled.manifest + const versionSpec = pkg.currentDependencies[depId].versionSpec + const depManifest = dep.stateInfo.manifest // incorrect version - if ( - !this.emver.satisfies( - depManifest.version, - pkgManifest.dependencies[depId].version, - ) - ) { + if (!this.emver.satisfies(depManifest.version, versionSpec)) { return { type: DependencyErrorType.IncorrectVersion, - expected: pkgManifest.dependencies[depId].version, + expected: versionSpec, received: depManifest.version, } } // invalid config - if ( - Object.values(pkgInstalled.status['dependency-config-errors']).some( - err => !!err, - ) - ) { + if (Object.values(pkg.status.dependencyConfigErrors).some(err => !!err)) { return { type: DependencyErrorType.ConfigUnsatisfied, } } - const depStatus = depInstalled.status.main.status + const depStatus = dep.status.main.status // not running - if ( - depStatus !== PackageMainStatus.Running && - depStatus !== PackageMainStatus.Starting - ) { + if (depStatus !== 'running' && depStatus !== 'starting') { return { type: DependencyErrorType.NotRunning, } } + const currentDep = pkg.currentDependencies[depId] + // health check failure - if (depStatus === PackageMainStatus.Running) { - for (let id of pkgInstalled['current-dependencies'][depId][ - 'health-checks' - ]) { - if ( - depInstalled.status.main.health[id]?.result !== HealthResult.Success - ) { + if (depStatus === 'running' && currentDep.kind === 'running') { + for (let id of currentDep.healthChecks) { + if (dep.status.main.health[id]?.result !== 'success') { return { type: DependencyErrorType.HealthChecksFailed, } @@ -153,14 +138,14 @@ export class DepErrorService { } } -function currentDeps(pkgs: DataModel['package-data'], id: string): string[] { - return Object.keys( - pkgs[id]?.installed?.['current-dependencies'] || {}, - ).filter(depId => depId !== id) +function currentDeps(pkgs: DataModel['packageData'], id: string): string[] { + return Object.keys(pkgs[id]?.currentDependencies || {}).filter( + depId => depId !== id, + ) } function dependencyDepth( - pkgs: DataModel['package-data'], + pkgs: DataModel['packageData'], id: string, depth = 0, ): number { diff --git a/web/projects/ui/src/app/services/eos.service.ts b/web/projects/ui/src/app/services/eos.service.ts index 396c84223c..dcd1d0de04 100644 --- a/web/projects/ui/src/app/services/eos.service.ts +++ b/web/projects/ui/src/app/services/eos.service.ts @@ -15,13 +15,13 @@ export class EOSService { eos?: MarketplaceEOS updateAvailable$ = new BehaviorSubject(false) - readonly updating$ = this.patch.watch$('server-info', 'status-info').pipe( - map(status => !!status['update-progress'] || status.updated), + readonly updating$ = this.patch.watch$('serverInfo', 'statusInfo').pipe( + map(status => !!status.updateProgress || status.updated), distinctUntilChanged(), ) readonly backingUp$ = this.patch - .watch$('server-info', 'status-info', 'backup-progress') + .watch$('serverInfo', 'statusInfo', 'backupProgress') .pipe( map(obj => !!obj), distinctUntilChanged(), diff --git a/web/projects/ui/src/app/services/form.service.ts b/web/projects/ui/src/app/services/form.service.ts index 66d368b507..ee8f0f1365 100644 --- a/web/projects/ui/src/app/services/form.service.ts +++ b/web/projects/ui/src/app/services/form.service.ts @@ -89,7 +89,6 @@ export class FormService { UntypedFormGroup | UntypedFormArray | UntypedFormControl > = {} Object.entries(config).map(([key, spec]) => { - if (spec.type === 'pointer') return group[key] = this.getFormEntry(spec, current ? current[key] : undefined) }) return this.formBuilder.group(group, { validators }) diff --git a/web/projects/ui/src/app/services/marketplace.service.ts b/web/projects/ui/src/app/services/marketplace.service.ts index 9592e7da1b..30055df565 100644 --- a/web/projects/ui/src/app/services/marketplace.service.ts +++ b/web/projects/ui/src/app/services/marketplace.service.ts @@ -19,7 +19,11 @@ import { } from 'rxjs' import { RR } from 'src/app/services/api/api.types' import { ApiService } from 'src/app/services/api/embassy-api.service' -import { DataModel, UIStore } from 'src/app/services/patch-db/data-model' +import { + DataModel, + UIMarketplaceData, + UIStore, +} from 'src/app/services/patch-db/data-model' import { PatchDB } from 'patch-db-client' import { catchError, @@ -39,9 +43,9 @@ import { ClientStorageService } from './client-storage.service' @Injectable() export class MarketplaceService implements AbstractMarketplaceService { private readonly knownHosts$: Observable = this.patch - .watch$('ui', 'marketplace', 'known-hosts') + .watch$('ui', 'marketplace', 'knownHosts') .pipe( - map(hosts => { + map((hosts: UIMarketplaceData['knownHosts']) => { const { start9, community } = this.config.marketplace let arr = [ toStoreIdentity(start9, hosts[start9]), @@ -73,8 +77,8 @@ export class MarketplaceService implements AbstractMarketplaceService { private readonly selectedHost$: Observable = this.patch .watch$('ui', 'marketplace') .pipe( - distinctUntilKeyChanged('selected-url'), - map(({ 'selected-url': url, 'known-hosts': hosts }) => + distinctUntilKeyChanged('selectedUrl'), + map(({ selectedUrl: url, knownHosts: hosts }) => toStoreIdentity(url, hosts[url]), ), shareReplay({ bufferSize: 1, refCount: true }), @@ -171,9 +175,9 @@ export class MarketplaceService implements AbstractMarketplaceService { ): Observable { return this.patch.watch$('ui', 'marketplace').pipe( switchMap(uiMarketplace => { - const url = optionalUrl || uiMarketplace['selected-url'] + const url = optionalUrl || uiMarketplace.selectedUrl - if (version !== '*' || !uiMarketplace['known-hosts'][url]) { + if (version !== '*' || !uiMarketplace.knownHosts[url]) { return this.fetchPackage$(id, version, url) } @@ -206,18 +210,18 @@ export class MarketplaceService implements AbstractMarketplaceService { ): Promise { const params: RR.InstallPackageReq = { id, - 'version-spec': `=${version}`, - 'marketplace-url': url, + versionSpec: `=${version}`, + marketplaceUrl: url, } await this.api.installPackage(params) } fetchInfo$(url: string): Observable { - return this.patch.watch$('server-info').pipe( + return this.patch.watch$('serverInfo').pipe( take(1), switchMap(serverInfo => { - const qp: RR.GetMarketplaceInfoReq = { 'server-id': serverInfo.id } + const qp: RR.GetMarketplaceInfoReq = { serverId: serverInfo.id } return this.api.marketplaceProxy( '/package/v0/info', qp, @@ -276,7 +280,7 @@ export class MarketplaceService implements AbstractMarketplaceService { const qp: RR.GetMarketplacePackagesReq = { ...params, page: 1, - 'per-page': 100, + perPage: 100, } if (qp.ids) qp.ids = JSON.stringify(qp.ids) @@ -306,7 +310,7 @@ export class MarketplaceService implements AbstractMarketplaceService { ): Promise { if (oldName !== newName) { this.api.setDbValue( - ['marketplace', 'known-hosts', url, 'name'], + ['marketplace', 'knownHosts', url, 'name'], newName, ) } diff --git a/web/projects/ui/src/app/services/patch-data.service.ts b/web/projects/ui/src/app/services/patch-data.service.ts index 23ee159252..5372a665cd 100644 --- a/web/projects/ui/src/app/services/patch-data.service.ts +++ b/web/projects/ui/src/app/services/patch-data.service.ts @@ -25,7 +25,7 @@ export class PatchDataService extends Observable { // check for updates to eOS and services this.checkForUpdates() // show eos welcome message - this.showEosWelcome(ui['ack-welcome']) + this.showEosWelcome(ui.ackWelcome) }), share(), ) @@ -60,7 +60,7 @@ export class PatchDataService extends Observable { }) modal.onWillDismiss().then(() => { this.embassyApi - .setDbValue(['ack-welcome'], this.config.version) + .setDbValue(['ackWelcome'], this.config.version) .catch() }) diff --git a/web/projects/ui/src/app/services/patch-db/data-model.ts b/web/projects/ui/src/app/services/patch-db/data-model.ts index e4ea729d9b..b2446892e0 100644 --- a/web/projects/ui/src/app/services/patch-db/data-model.ts +++ b/web/projects/ui/src/app/services/patch-db/data-model.ts @@ -1,25 +1,27 @@ -import { ConfigSpec } from 'src/app/pkg-config/config-types' import { Url } from '@start9labs/shared' -import { MarketplaceManifest } from '@start9labs/marketplace' -import { BasicInfo } from 'src/app/pages/developer-routes/developer-menu/form-info' - -export interface DataModel { - 'server-info': ServerInfo - 'package-data': { [id: string]: PackageDataEntry } +import { types } from '@start9labs/start-sdk' +import { ActionMetadata } from '@start9labs/start-sdk/cjs/sdk/lib/types' +import { Public } from '../../../../../../../core/startos/bindings/Public' +import { PackageDataEntry as PDE } from '../../../../../../../core/startos/bindings/PackageDataEntry' +import { FullProgress } from '../../../../../../../core/startos/bindings/FullProgress' +import { Manifest } from '../../../../../../../core/startos/bindings/Manifest' +type ServiceInterfaceWithHostInfo = types.ServiceInterfaceWithHostInfo + +export type DataModel = Public & { ui: UIData + packageData: Record } export interface UIData { name: string | null - 'ack-welcome': string // eOS emver + ackWelcome: string // eOS emver marketplace: UIMarketplaceData - dev: DevData gaming: { snake: { - 'high-score': number + highScore: number } } - 'ack-instructions': Record + ackInstructions: Record theme: string widgets: readonly Widget[] } @@ -38,8 +40,8 @@ export interface Widget { } export interface UIMarketplaceData { - 'selected-url': string - 'known-hosts': { + selectedUrl: string + knownHosts: { 'https://registry.start9.com/': UIStore 'https://community-registry.start9.com/': UIStore [url: string]: UIStore @@ -50,327 +52,31 @@ export interface UIStore { name?: string } -export interface DevData { - [id: string]: DevProjectData -} - -export interface DevProjectData { - name: string - instructions: string - config: string - 'basic-info'?: BasicInfo -} - -export interface ServerInfo { - id: string - version: string - 'last-backup': string | null - 'lan-address': Url - 'tor-address': Url - 'ip-info': IpInfo - 'last-wifi-region': string | null - 'unread-notification-count': number - 'status-info': ServerStatusInfo - 'eos-version-compat': string - 'password-hash': string - hostname: string - pubkey: string - 'ca-fingerprint': string - 'ntp-synced': boolean - zram: boolean - platform: string -} - -export interface IpInfo { - [iface: string]: { - ipv4: string | null - ipv6: string | null - } -} - -export interface ServerStatusInfo { - 'backup-progress': null | { - [packageId: string]: { - complete: boolean - } - } - updated: boolean - 'update-progress': { size: number | null; downloaded: number } | null - restarting: boolean - 'shutting-down': boolean +export type PackageDataEntry = PDE & { + stateInfo: T } -export enum ServerStatus { - Running = 'running', - Updated = 'updated', - BackingUp = 'backing-up', -} +export type StateInfo = InstalledState | InstallingState | UpdatingState -export interface PackageDataEntry { - state: PackageState - 'static-files': { - license: Url - instructions: Url - icon: Url - } +export type InstalledState = { + state: 'installed' | 'removing' manifest: Manifest - installed?: InstalledPackageDataEntry // exists when: installed, updating - 'install-progress'?: InstallProgress // exists when: installing, updating + installingInfo?: undefined } -export enum PackageState { - Installing = 'installing', - Installed = 'installed', - Updating = 'updating', - Removing = 'removing', - Restoring = 'restoring', +export type InstallingState = { + state: 'installing' | 'restoring' + installingInfo: InstallingInfo + manifest?: undefined } -export interface InstalledPackageDataEntry { - status: Status +export type UpdatingState = { + state: 'updating' + installingInfo: InstallingInfo manifest: Manifest - 'last-backup': string | null - 'system-pointers': any[] - 'current-dependents': { [id: string]: CurrentDependencyInfo } - 'current-dependencies': { [id: string]: CurrentDependencyInfo } - 'dependency-info': { - [id: string]: { - title: string - icon: Url - } - } - 'interface-addresses': { - [id: string]: { 'tor-address': string; 'lan-address': string } - } - 'marketplace-url': string | null - 'developer-key': string -} - -export interface CurrentDependencyInfo { - pointers: any[] - 'health-checks': string[] // array of health check IDs -} - -export interface Manifest extends MarketplaceManifest { - assets: { - license: string // filename - instructions: string // filename - icon: string // filename - docker_images: string // filename - assets: string // path to assets folder - scripts: string // path to scripts folder - } - main: ActionImpl - 'health-checks': Record< - string, - ActionImpl & { name: string; 'success-message': string | null } - > - config: ConfigActions | null - volumes: Record - 'min-os-version': string - interfaces: Record - backup: BackupActions - migrations: Migrations | null - actions: Record -} - -export interface DependencyConfig { - check: ActionImpl - 'auto-configure': ActionImpl -} - -export interface ActionImpl { - type: 'docker' - image: string - system: boolean - entrypoint: string - args: string[] - mounts: { [id: string]: string } - 'io-format': DockerIoFormat | null - inject: boolean - 'shm-size': string - 'sigterm-timeout': string | null -} - -export enum DockerIoFormat { - Json = 'json', - Yaml = 'yaml', - Cbor = 'cbor', - Toml = 'toml', -} - -export interface ConfigActions { - get: ActionImpl | null - set: ActionImpl | null -} - -export type Volume = VolumeData - -export interface VolumeData { - type: VolumeType.Data - readonly: boolean -} - -export interface VolumeAssets { - type: VolumeType.Assets -} - -export interface VolumePointer { - type: VolumeType.Pointer - 'package-id': string - 'volume-id': string - path: string - readonly: boolean -} - -export interface VolumeCertificate { - type: VolumeType.Certificate - 'interface-id': string -} - -export interface VolumeBackup { - type: VolumeType.Backup - readonly: boolean -} - -export enum VolumeType { - Data = 'data', - Assets = 'assets', - Pointer = 'pointer', - Certificate = 'certificate', - Backup = 'backup', -} - -export interface InterfaceDef { - name: string - description: string - 'tor-config': TorConfig | null - 'lan-config': LanConfig | null - ui: boolean - protocols: string[] -} - -export interface TorConfig { - 'port-mapping': { [port: number]: number } -} - -export type LanConfig = { - [port: number]: { ssl: boolean; mapping: number } -} - -export interface BackupActions { - create: ActionImpl - restore: ActionImpl -} - -export interface Migrations { - from: { [versionRange: string]: ActionImpl } - to: { [versionRange: string]: ActionImpl } -} - -export interface Action { - name: string - description: string - warning: string | null - implementation: ActionImpl - 'allowed-statuses': (PackageMainStatus.Stopped | PackageMainStatus.Running)[] - 'input-spec': ConfigSpec | null -} - -export interface Status { - configured: boolean - main: MainStatus - 'dependency-config-errors': { [id: string]: string | null } -} - -export type MainStatus = - | MainStatusStopped - | MainStatusStopping - | MainStatusStarting - | MainStatusRunning - | MainStatusBackingUp - | MainStatusRestarting - -export interface MainStatusStopped { - status: PackageMainStatus.Stopped -} - -export interface MainStatusStopping { - status: PackageMainStatus.Stopping -} - -export interface MainStatusStarting { - status: PackageMainStatus.Starting - restarting: boolean -} - -export interface MainStatusRunning { - status: PackageMainStatus.Running - started: string // UTC date string - health: { [id: string]: HealthCheckResult } -} - -export interface MainStatusBackingUp { - status: PackageMainStatus.BackingUp - started: string | null // UTC date string -} - -export interface MainStatusRestarting { - status: PackageMainStatus.Restarting -} - -export enum PackageMainStatus { - Starting = 'starting', - Running = 'running', - Stopping = 'stopping', - Stopped = 'stopped', - BackingUp = 'backing-up', - Restarting = 'restarting', -} - -export type HealthCheckResult = - | HealthCheckResultStarting - | HealthCheckResultLoading - | HealthCheckResultDisabled - | HealthCheckResultSuccess - | HealthCheckResultFailure - -export enum HealthResult { - Starting = 'starting', - Loading = 'loading', - Disabled = 'disabled', - Success = 'success', - Failure = 'failure', -} - -export interface HealthCheckResultStarting { - result: HealthResult.Starting -} - -export interface HealthCheckResultDisabled { - result: HealthResult.Disabled -} - -export interface HealthCheckResultSuccess { - result: HealthResult.Success -} - -export interface HealthCheckResultLoading { - result: HealthResult.Loading - message: string -} - -export interface HealthCheckResultFailure { - result: HealthResult.Failure - error: string } -export interface InstallProgress { - readonly size: number | null - readonly downloaded: number - readonly 'download-complete': boolean - readonly validated: number - readonly 'validation-complete': boolean - readonly unpacked: number - readonly 'unpack-complete': boolean +export type InstallingInfo = { + progress: FullProgress + newManifest: Manifest } diff --git a/web/projects/ui/src/app/services/pkg-status-rendering.service.ts b/web/projects/ui/src/app/services/pkg-status-rendering.service.ts index 28c58a8092..d95a5ccdc1 100644 --- a/web/projects/ui/src/app/services/pkg-status-rendering.service.ts +++ b/web/projects/ui/src/app/services/pkg-status-rendering.service.ts @@ -1,12 +1,7 @@ import { isEmptyObject } from '@start9labs/shared' -import { - MainStatusStarting, - PackageDataEntry, - PackageMainStatus, - PackageState, - Status, -} from 'src/app/services/patch-db/data-model' +import { PackageDataEntry } from 'src/app/services/patch-db/data-model' import { PkgDependencyErrors } from './dep-error.service' +import { Status } from '../../../../../../core/startos/bindings/Status' export interface PackageStatus { primary: PrimaryStatus @@ -22,15 +17,12 @@ export function renderPkgStatus( let dependency: DependencyStatus | null = null let health: HealthStatus | null = null - if (pkg.state === PackageState.Installed && pkg.installed) { - primary = getPrimaryStatus(pkg.installed.status) + if (pkg.stateInfo.state === 'installed') { + primary = getPrimaryStatus(pkg.status) dependency = getDependencyStatus(depErrors) - health = getHealthStatus( - pkg.installed.status, - !isEmptyObject(pkg.manifest['health-checks']), - ) + health = getHealthStatus(pkg.status) } else { - primary = pkg.state as string as PrimaryStatus + primary = pkg.stateInfo.state as string as PrimaryStatus } return { primary, dependency, health } @@ -39,7 +31,7 @@ export function renderPkgStatus( function getPrimaryStatus(status: Status): PrimaryStatus { if (!status.configured) { return PrimaryStatus.NeedsConfig - } else if ((status.main as MainStatusStarting).restarting) { + } else if (status.main.status === 'restarting') { return PrimaryStatus.Restarting } else { return status.main.status as any as PrimaryStatus @@ -52,29 +44,26 @@ function getDependencyStatus(depErrors: PkgDependencyErrors): DependencyStatus { : DependencyStatus.Satisfied } -function getHealthStatus( - status: Status, - hasHealthChecks: boolean, -): HealthStatus | null { - if (status.main.status !== PackageMainStatus.Running || !status.main.health) { +function getHealthStatus(status: Status): HealthStatus | null { + if (status.main.status !== 'running' || !status.main.health) { return null } const values = Object.values(status.main.health) - if (values.some(h => h.result === 'failure')) { - return HealthStatus.Failure + if (values.some(h => !h.result)) { + return HealthStatus.Waiting } - if (!values.length && hasHealthChecks) { - return HealthStatus.Waiting + if (values.some(h => h.result === 'failure')) { + return HealthStatus.Failure } if (values.some(h => h.result === 'loading')) { return HealthStatus.Loading } - if (values.some(h => !h.result || h.result === 'starting')) { + if (values.some(h => h.result === 'starting')) { return HealthStatus.Starting } diff --git a/web/projects/ui/src/app/services/time-service.ts b/web/projects/ui/src/app/services/time-service.ts index 641144dae9..3b58da7444 100644 --- a/web/projects/ui/src/app/services/time-service.ts +++ b/web/projects/ui/src/app/services/time-service.ts @@ -32,7 +32,7 @@ export class TimeService { readonly now$ = combineLatest([ this.time$, - this.patch.watch$('server-info', 'ntp-synced'), + this.patch.watch$('serverInfo', 'ntpSynced'), ]).pipe( map(([time, synced]) => ({ value: time.now, diff --git a/web/projects/ui/src/app/services/ui-launcher.service.ts b/web/projects/ui/src/app/services/ui-launcher.service.ts index 55559bcd3c..badbc7a3ea 100644 --- a/web/projects/ui/src/app/services/ui-launcher.service.ts +++ b/web/projects/ui/src/app/services/ui-launcher.service.ts @@ -12,7 +12,11 @@ export class UiLauncherService { private readonly config: ConfigService, ) {} - launch(pkg: PackageDataEntry): void { - this.windowRef.open(this.config.launchableURL(pkg), '_blank', 'noreferrer') + launch(interfaces: PackageDataEntry['serviceInterfaces']): void { + this.windowRef.open( + this.config.launchableAddress(interfaces), + '_blank', + 'noreferrer', + ) } } diff --git a/web/projects/ui/src/app/types/progress-data.ts b/web/projects/ui/src/app/types/progress-data.ts deleted file mode 100644 index a05e475a15..0000000000 --- a/web/projects/ui/src/app/types/progress-data.ts +++ /dev/null @@ -1,7 +0,0 @@ -export interface ProgressData { - totalProgress: number - downloadProgress: number - validateProgress: number - unpackProgress: number - isComplete: boolean -} diff --git a/web/projects/ui/src/app/util/dry-update.ts b/web/projects/ui/src/app/util/dry-update.ts index 9b7ba44a3b..2d4d1aa101 100644 --- a/web/projects/ui/src/app/util/dry-update.ts +++ b/web/projects/ui/src/app/util/dry-update.ts @@ -1,17 +1,18 @@ import { Emver } from '@start9labs/shared' import { DataModel } from '../services/patch-db/data-model' +import { getManifest } from './get-package-data' export function dryUpdate( { id, version }: { id: string; version: string }, - pkgs: DataModel['package-data'], + pkgs: DataModel['packageData'], emver: Emver, ): string[] { return Object.values(pkgs) .filter( pkg => - Object.keys(pkg.installed?.['current-dependencies'] || {}).some( + Object.keys(pkg.currentDependencies || {}).some( pkgId => pkgId === id, - ) && !emver.satisfies(version, pkg.manifest.dependencies[id].version), + ) && !emver.satisfies(version, pkg.currentDependencies[id].versionSpec), ) - .map(pkg => pkg.manifest.title) + .map(pkg => getManifest(pkg).title) } diff --git a/web/projects/ui/src/app/util/get-package-data.ts b/web/projects/ui/src/app/util/get-package-data.ts index 0645f3b3c3..08fbdd2df2 100644 --- a/web/projects/ui/src/app/util/get-package-data.ts +++ b/web/projects/ui/src/app/util/get-package-data.ts @@ -1,19 +1,59 @@ import { PatchDB } from 'patch-db-client' import { DataModel, + InstalledState, + InstallingState, PackageDataEntry, + UpdatingState, } from 'src/app/services/patch-db/data-model' import { firstValueFrom } from 'rxjs' +import { Manifest } from '../../../../../../core/startos/bindings/Manifest' export async function getPackage( patch: PatchDB, id: string, ): Promise { - return firstValueFrom(patch.watch$('package-data', id)) + return firstValueFrom(patch.watch$('packageData', id)) } export async function getAllPackages( patch: PatchDB, -): Promise { - return firstValueFrom(patch.watch$('package-data')) +): Promise { + return firstValueFrom(patch.watch$('packageData')) +} + +export function getManifest(pkg: PackageDataEntry): Manifest { + if (isInstalled(pkg) || isRemoving(pkg)) return pkg.stateInfo.manifest + + return (pkg.stateInfo as InstallingState).installingInfo.newManifest +} + +export function isInstalled( + pkg: PackageDataEntry, +): pkg is PackageDataEntry { + return pkg.stateInfo.state === 'installed' +} + +export function isRemoving( + pkg: PackageDataEntry, +): pkg is PackageDataEntry { + return pkg.stateInfo.state === 'removing' +} + +export function isInstalling( + pkg: PackageDataEntry, +): pkg is PackageDataEntry { + return pkg.stateInfo.state === 'installing' +} + +export function isRestoring( + pkg: PackageDataEntry, +): pkg is PackageDataEntry { + return pkg.stateInfo.state === 'restoring' +} + +export function isUpdating( + pkg: PackageDataEntry, +): pkg is PackageDataEntry { + return pkg.stateInfo.state === 'updating' } diff --git a/web/projects/ui/src/app/util/get-package-info.ts b/web/projects/ui/src/app/util/get-package-info.ts index 14901f2010..936ed99547 100644 --- a/web/projects/ui/src/app/util/get-package-info.ts +++ b/web/projects/ui/src/app/util/get-package-info.ts @@ -7,9 +7,7 @@ import { renderPkgStatus, StatusRendering, } from '../services/pkg-status-rendering.service' -import { ProgressData } from 'src/app/types/progress-data' import { Subscription } from 'rxjs' -import { packageLoadingProgress } from './package-loading-progress' import { PkgDependencyErrors } from '../services/dep-error.service' export function getPackageInfo( @@ -23,7 +21,6 @@ export function getPackageInfo( entry, primaryRendering, primaryStatus: statuses.primary, - installProgress: packageLoadingProgress(entry['install-progress']), error: statuses.health === HealthStatus.Failure || statuses.dependency === DependencyStatus.Warning, @@ -40,7 +37,6 @@ export interface PkgInfo { entry: PackageDataEntry primaryRendering: StatusRendering primaryStatus: PrimaryStatus - installProgress: ProgressData | null error: boolean warning: boolean transitioning: boolean diff --git a/web/projects/ui/src/app/util/get-server-info.ts b/web/projects/ui/src/app/util/get-server-info.ts index 4500c4305f..ba4bf0dbf2 100644 --- a/web/projects/ui/src/app/util/get-server-info.ts +++ b/web/projects/ui/src/app/util/get-server-info.ts @@ -1,9 +1,10 @@ import { PatchDB } from 'patch-db-client' -import { DataModel, ServerInfo } from 'src/app/services/patch-db/data-model' +import { DataModel } from 'src/app/services/patch-db/data-model' import { firstValueFrom } from 'rxjs' +import { ServerInfo } from '../../../../../../core/startos/bindings/ServerInfo' export async function getServerInfo( patch: PatchDB, ): Promise { - return firstValueFrom(patch.watch$('server-info')) + return firstValueFrom(patch.watch$('serverInfo')) } diff --git a/web/projects/ui/src/app/util/has-deps.ts b/web/projects/ui/src/app/util/has-deps.ts index 94b3fa4cdd..079990d1cb 100644 --- a/web/projects/ui/src/app/util/has-deps.ts +++ b/web/projects/ui/src/app/util/has-deps.ts @@ -1,7 +1,8 @@ import { PackageDataEntry } from '../services/patch-db/data-model' -export function hasCurrentDeps(pkg: PackageDataEntry): boolean { - return !!Object.keys(pkg.installed?.['current-dependents'] || {}).filter( - depId => depId !== pkg.manifest.id, - ).length +export function hasCurrentDeps( + id: string, + pkgs: Record, +): boolean { + return !!Object.values(pkgs).some(pkg => !!pkg.currentDependencies[id]) } diff --git a/web/projects/ui/src/app/util/package-loading-progress.ts b/web/projects/ui/src/app/util/package-loading-progress.ts deleted file mode 100644 index 3c699dfd38..0000000000 --- a/web/projects/ui/src/app/util/package-loading-progress.ts +++ /dev/null @@ -1,50 +0,0 @@ -import { isEmptyObject } from '@start9labs/shared' -import { ProgressData } from 'src/app/types/progress-data' -import { InstallProgress } from '../services/patch-db/data-model' - -export function packageLoadingProgress( - loadData?: InstallProgress, -): ProgressData | null { - if (!loadData || isEmptyObject(loadData)) { - return null - } - - let { - downloaded, - validated, - unpacked, - size, - 'download-complete': downloadComplete, - 'validation-complete': validationComplete, - 'unpack-complete': unpackComplete, - } = loadData - - // only permit 100% when "complete" == true - size = size || 0 - downloaded = downloadComplete ? size : Math.max(downloaded - 1, 0) - validated = validationComplete ? size : Math.max(validated - 1, 0) - unpacked = unpackComplete ? size : Math.max(unpacked - 1, 0) - - const downloadWeight = 1 - const validateWeight = 0.2 - const unpackWeight = 0.7 - - const numerator = Math.floor( - downloadWeight * downloaded + - validateWeight * validated + - unpackWeight * unpacked, - ) - - const denominator = Math.floor( - size * (downloadWeight + validateWeight + unpackWeight), - ) - const totalProgress = Math.floor((100 * numerator) / denominator) - - return { - totalProgress, - downloadProgress: Math.floor((100 * downloaded) / size), - validateProgress: Math.floor((100 * validated) / size), - unpackProgress: Math.floor((100 * unpacked) / size), - isComplete: downloadComplete && validationComplete && unpackComplete, - } -} diff --git a/web/projects/ui/src/app/util/properties.util.ts b/web/projects/ui/src/app/util/properties.util.ts index 0d1d7e6f45..5afb4bd8b5 100644 --- a/web/projects/ui/src/app/util/properties.util.ts +++ b/web/projects/ui/src/app/util/properties.util.ts @@ -12,8 +12,6 @@ import matches, { arrayOf, } from 'ts-matches' -type ValidVersion = 1 | 2 - type PropertiesV1 = typeof matchPropertiesV1._TYPE type PackagePropertiesV1 = PropertiesV1[] type PackagePropertiesV2 = {