diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 507a81210..7489c0405 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -42,16 +42,14 @@ jobs: - name: Download Python Wheels and Publish Builder Image run: | - docker buildx build --platform amd64 --build-arg ARCH=amd64 -t ghcr.io/defenseunicorns/leapfrogai/leapfrogai-sdk:${{ steps.get_version.outputs.version-without-v }} --push -f src/leapfrogai_sdk/Dockerfile . - docker buildx build --platform arm64 --build-arg ARCH=arm64 -t ghcr.io/defenseunicorns/leapfrogai/leapfrogai-sdk:${{ steps.get_version.outputs.version-without-v }} --push -f src/leapfrogai_sdk/Dockerfile . + docker buildx build --platform amd64,arm64 -t ghcr.io/defenseunicorns/leapfrogai/leapfrogai-sdk:${{ steps.get_version.outputs.version-without-v }} --push -f src/leapfrogai_sdk/Dockerfile . - name: Install Zarf uses: defenseunicorns/setup-zarf@f95763914e20e493bb5d45d63e30e17138f981d6 # v1.0.0 - name: Build and Publish API run: | - docker buildx build --platform amd64 --build-arg ARCH=amd64 --build-arg LOCAL_VERSION=${{ steps.get_version.outputs.version-without-v }} -t ghcr.io/defenseunicorns/leapfrogai/leapfrogai-api:${{ steps.get_version.outputs.version-without-v }} --push -f packages/api/Dockerfile . - docker buildx build --platform arm64 --build-arg ARCH=arm64 --build-arg LOCAL_VERSION=${{ steps.get_version.outputs.version-without-v }} -t ghcr.io/defenseunicorns/leapfrogai/leapfrogai-api:${{ steps.get_version.outputs.version-without-v }} --push -f packages/api/Dockerfile . + docker buildx build --platform amd64,arm64 --build-arg LOCAL_VERSION=${{ steps.get_version.outputs.version-without-v }} -t ghcr.io/defenseunicorns/leapfrogai/leapfrogai-api:${{ steps.get_version.outputs.version-without-v }} --push -f packages/api/Dockerfile . docker buildx build --platform amd64,arm64 -t ghcr.io/defenseunicorns/leapfrogai/api-migrations:${{ steps.get_version.outputs.version-without-v }} --push -f Dockerfile.migrations --build-arg="MIGRATIONS_DIR=packages/api/supabase/migrations" . zarf package create packages/api --set=LEAPFROGAI_IMAGE_VERSION=${{ steps.get_version.outputs.version-without-v }} --architecture amd64 --confirm @@ -65,7 +63,7 @@ jobs: - name: Build and Publish UI run: | - docker buildx build --platform amd64,arm64 -t ghcr.io/defenseunicorns/leapfrogai/leapfrogai-ui:${{ steps.get_version.outputs.version-without-v }} --push -f src/leapfrogai_ui/Dockerfile . + docker buildx build --platform amd64,arm64 -t ghcr.io/defenseunicorns/leapfrogai/leapfrogai-ui:${{ steps.get_version.outputs.version-without-v }} --push src/leapfrogai_ui docker buildx build --platform amd64,arm64 -t ghcr.io/defenseunicorns/leapfrogai/ui-migrations:${{ steps.get_version.outputs.version-without-v }} --push -f Dockerfile.migrations --build-arg="MIGRATIONS_DIR=src/leapfrogai_ui/supabase/migrations" . zarf package create packages/ui --set=IMAGE_VERSION=${{ steps.get_version.outputs.version-without-v }} --architecture amd64 --confirm @@ -89,8 +87,7 @@ jobs: - name: Build and Publish repeater run: | - docker buildx build --platform amd64 --build-arg ARCH=amd64 --build-arg LOCAL_VERSION=${{ steps.get_version.outputs.version-without-v }} -t ghcr.io/defenseunicorns/leapfrogai/repeater:${{ steps.get_version.outputs.version-without-v }} --push -f packages/repeater/Dockerfile . - docker buildx build --platform arm64 --build-arg ARCH=arm64 --build-arg LOCAL_VERSION=${{ steps.get_version.outputs.version-without-v }} -t ghcr.io/defenseunicorns/leapfrogai/repeater:${{ steps.get_version.outputs.version-without-v }} --push -f packages/repeater/Dockerfile . + docker buildx build --platform amd64,arm64 --build-arg LOCAL_VERSION=${{ steps.get_version.outputs.version-without-v }} -t ghcr.io/defenseunicorns/leapfrogai/repeater:${{ steps.get_version.outputs.version-without-v }} --push -f packages/repeater/Dockerfile . zarf package create packages/repeater --set=IMAGE_VERSION=${{ steps.get_version.outputs.version-without-v }} --architecture amd64 --confirm zarf package create packages/repeater --set=IMAGE_VERSION=${{ steps.get_version.outputs.version-without-v }} --architecture arm64 --confirm @@ -103,8 +100,7 @@ jobs: - name: Build and Publish llama run: | - docker buildx build --platform amd64 --build-arg ARCH=amd64 --build-arg LOCAL_VERSION=${{ steps.get_version.outputs.version-without-v }} -t ghcr.io/defenseunicorns/leapfrogai/llama-cpp-python:${{ steps.get_version.outputs.version-without-v }} --push -f packages/llama-cpp-python/Dockerfile . - docker buildx build --platform arm64 --build-arg ARCH=arm64 --build-arg LOCAL_VERSION=${{ steps.get_version.outputs.version-without-v }} -t ghcr.io/defenseunicorns/leapfrogai/llama-cpp-python:${{ steps.get_version.outputs.version-without-v }} --push -f packages/llama-cpp-python/Dockerfile . + docker buildx build --platform amd64,arm64 --build-arg LOCAL_VERSION=${{ steps.get_version.outputs.version-without-v }} -t ghcr.io/defenseunicorns/leapfrogai/llama-cpp-python:${{ steps.get_version.outputs.version-without-v }} --push -f packages/llama-cpp-python/Dockerfile . zarf package create packages/llama-cpp-python --set=IMAGE_VERSION=${{ steps.get_version.outputs.version-without-v }} --architecture amd64 --confirm zarf package create packages/llama-cpp-python --set=IMAGE_VERSION=${{ steps.get_version.outputs.version-without-v }} --architecture arm64 --confirm @@ -128,8 +124,7 @@ jobs: - name: Build and Publish Text-Embeddings run: | - docker buildx build --platform amd64 --build-arg ARCH=amd64 --build-arg LOCAL_VERSION=${{ steps.get_version.outputs.version-without-v }} -t ghcr.io/defenseunicorns/leapfrogai/text-embeddings:${{ steps.get_version.outputs.version-without-v }} --push -f packages/text-embeddings/Dockerfile . - docker buildx build --platform arm64 --build-arg ARCH=arm64 --build-arg LOCAL_VERSION=${{ steps.get_version.outputs.version-without-v }} -t ghcr.io/defenseunicorns/leapfrogai/text-embeddings:${{ steps.get_version.outputs.version-without-v }} --push -f packages/text-embeddings/Dockerfile . + docker buildx build --platform amd64,arm64 --build-arg LOCAL_VERSION=${{ steps.get_version.outputs.version-without-v }} -t ghcr.io/defenseunicorns/leapfrogai/text-embeddings:${{ steps.get_version.outputs.version-without-v }} --push -f packages/text-embeddings/Dockerfile . zarf package create packages/text-embeddings --set=IMAGE_VERSION=${{ steps.get_version.outputs.version-without-v }} --architecture amd64 --confirm zarf package create packages/text-embeddings --set=IMAGE_VERSION=${{ steps.get_version.outputs.version-without-v }} --architecture arm64 --confirm @@ -142,8 +137,7 @@ jobs: - name: Build and Publish whisper run: | - docker buildx build --platform amd64 --build-arg ARCH=amd64 --build-arg LOCAL_VERSION=${{ steps.get_version.outputs.version-without-v }} -t ghcr.io/defenseunicorns/leapfrogai/whisper:${{ steps.get_version.outputs.version-without-v }} --push -f packages/whisper/Dockerfile . - docker buildx build --platform arm64 --build-arg ARCH=arm64 --build-arg LOCAL_VERSION=${{ steps.get_version.outputs.version-without-v }} -t ghcr.io/defenseunicorns/leapfrogai/whisper:${{ steps.get_version.outputs.version-without-v }} --push -f packages/whisper/Dockerfile . + docker buildx build --platform amd64,arm64 --build-arg LOCAL_VERSION=${{ steps.get_version.outputs.version-without-v }} -t ghcr.io/defenseunicorns/leapfrogai/whisper:${{ steps.get_version.outputs.version-without-v }} --push -f packages/whisper/Dockerfile . zarf package create packages/whisper --set=IMAGE_VERSION=${{ steps.get_version.outputs.version-without-v }} --architecture amd64 --confirm zarf package create packages/whisper --set=IMAGE_VERSION=${{ steps.get_version.outputs.version-without-v }} --architecture arm64 --confirm diff --git a/Makefile b/Makefile index 331571d1f..e83e76d5f 100644 --- a/Makefile +++ b/Makefile @@ -36,14 +36,14 @@ local-registry: ## Start up a local container registry. Errors in this target ar -docker run -d -p ${REG_PORT}:5000 --restart=always --name registry registry:2 sdk-wheel: ## build wheels for the leapfrogai_sdk package as a dependency for other lfai components - docker build --platform=linux/${ARCH} --build-arg ARCH=${ARCH} -t ghcr.io/defenseunicorns/leapfrogai/leapfrogai-sdk:${LOCAL_VERSION} -f src/leapfrogai_sdk/Dockerfile . + docker build --platform=linux/${ARCH} -t ghcr.io/defenseunicorns/leapfrogai/leapfrogai-sdk:${LOCAL_VERSION} -f src/leapfrogai_sdk/Dockerfile . -docker-supabase: local-registry +docker-supabase: ## Build the migration container for this version of the supabase package docker build -t ghcr.io/defenseunicorns/leapfrogai/supabase-migrations:${LOCAL_VERSION} -f Dockerfile.migrations --build-arg="MIGRATIONS_DIR=packages/supabase/migrations" . docker tag ghcr.io/defenseunicorns/leapfrogai/supabase-migrations:${LOCAL_VERSION} localhost:${REG_PORT}/defenseunicorns/leapfrogai/supabase-migrations:${LOCAL_VERSION} -build-supabase: docker-supabase +build-supabase: local-registry docker-supabase docker push localhost:${REG_PORT}/defenseunicorns/leapfrogai/supabase-migrations:${LOCAL_VERSION} ## Build the Zarf package @@ -51,98 +51,98 @@ build-supabase: docker-supabase docker-api: local-registry sdk-wheel ## Build the API image (and tag it for the local registry) - docker build --platform=linux/${ARCH} --build-arg ARCH=${ARCH} --build-arg LOCAL_VERSION=${LOCAL_VERSION} -t ghcr.io/defenseunicorns/leapfrogai/leapfrogai-api:${LOCAL_VERSION} -f packages/api/Dockerfile . + docker build --platform=linux/${ARCH} --build-arg LOCAL_VERSION=${LOCAL_VERSION} -t ghcr.io/defenseunicorns/leapfrogai/leapfrogai-api:${LOCAL_VERSION} -f packages/api/Dockerfile . docker tag ghcr.io/defenseunicorns/leapfrogai/leapfrogai-api:${LOCAL_VERSION} localhost:${REG_PORT}/defenseunicorns/leapfrogai/leapfrogai-api:${LOCAL_VERSION} ## Build the migration container for this version of the API - docker build --platform=linux/${ARCH} --build-arg ARCH=${ARCH} -t ghcr.io/defenseunicorns/leapfrogai/api-migrations:${LOCAL_VERSION} -f Dockerfile.migrations --build-arg="MIGRATIONS_DIR=packages/api/supabase/migrations" . + docker build --platform=linux/${ARCH} -t ghcr.io/defenseunicorns/leapfrogai/api-migrations:${LOCAL_VERSION} -f Dockerfile.migrations --build-arg="MIGRATIONS_DIR=packages/api/supabase/migrations" . docker tag ghcr.io/defenseunicorns/leapfrogai/api-migrations:${LOCAL_VERSION} localhost:${REG_PORT}/defenseunicorns/leapfrogai/api-migrations:${LOCAL_VERSION} -build-api: docker-api ## Build the leapfrogai_api container and Zarf package +build-api: local-registry docker-api ## Build the leapfrogai_api container and Zarf package ## Push the images to the local registry (Zarf is super slow if the image is only in the local daemon) docker push localhost:${REG_PORT}/defenseunicorns/leapfrogai/leapfrogai-api:${LOCAL_VERSION} docker push localhost:${REG_PORT}/defenseunicorns/leapfrogai/api-migrations:${LOCAL_VERSION} ## Build the Zarf package - uds zarf package create packages/api -o packages/api --registry-override=ghcr.io=localhost:${REG_PORT} --insecure --set LEAPFROGAI_IMAGE_VERSION=${LOCAL_VERSION} --confirm + uds zarf package create packages/api -a ${ARCH} -o packages/api --registry-override=ghcr.io=localhost:${REG_PORT} --insecure --set LEAPFROGAI_IMAGE_VERSION=${LOCAL_VERSION} --confirm -docker-ui: local-registry +docker-ui: ## Build the UI image (and tag it for the local registry) - docker build --platform=linux/${ARCH} --build-arg ARCH=${ARCH} -t ghcr.io/defenseunicorns/leapfrogai/leapfrogai-ui:${LOCAL_VERSION} src/leapfrogai_ui + docker build --platform=linux/${ARCH} -t ghcr.io/defenseunicorns/leapfrogai/leapfrogai-ui:${LOCAL_VERSION} src/leapfrogai_ui docker tag ghcr.io/defenseunicorns/leapfrogai/leapfrogai-ui:${LOCAL_VERSION} localhost:${REG_PORT}/defenseunicorns/leapfrogai/leapfrogai-ui:${LOCAL_VERSION} ## Build the migration container for the version of the UI - docker build --platform=linux/${ARCH} --build-arg ARCH=${ARCH} -t ghcr.io/defenseunicorns/leapfrogai/ui-migrations:${LOCAL_VERSION} -f Dockerfile.migrations --build-arg="MIGRATIONS_DIR=src/leapfrogai_ui/supabase/migrations" . + docker build --platform=linux/${ARCH} -t ghcr.io/defenseunicorns/leapfrogai/ui-migrations:${LOCAL_VERSION} -f Dockerfile.migrations --build-arg="MIGRATIONS_DIR=src/leapfrogai_ui/supabase/migrations" . docker tag ghcr.io/defenseunicorns/leapfrogai/ui-migrations:${LOCAL_VERSION} localhost:${REG_PORT}/defenseunicorns/leapfrogai/ui-migrations:${LOCAL_VERSION} -build-ui: docker-ui ## Build the leapfrogai_ui container and Zarf package +build-ui: local-registry docker-ui ## Build the leapfrogai_ui container and Zarf package ## Push the image to the local registry (Zarf is super slow if the image is only in the local daemon) docker push localhost:${REG_PORT}/defenseunicorns/leapfrogai/leapfrogai-ui:${LOCAL_VERSION} docker push localhost:${REG_PORT}/defenseunicorns/leapfrogai/ui-migrations:${LOCAL_VERSION} ## Build the Zarf package - uds zarf package create packages/ui -o packages/ui --registry-override=ghcr.io=localhost:${REG_PORT} --insecure --set IMAGE_VERSION=${LOCAL_VERSION} --confirm + uds zarf package create packages/ui -a ${ARCH} -o packages/ui --registry-override=ghcr.io=localhost:${REG_PORT} --insecure --set IMAGE_VERSION=${LOCAL_VERSION} --confirm -docker-llama-cpp-python: local-registry sdk-wheel +docker-llama-cpp-python: sdk-wheel ## Build the image (and tag it for the local registry) - docker build --platform=linux/${ARCH} --build-arg ARCH=${ARCH} --build-arg LOCAL_VERSION=${LOCAL_VERSION} -t ghcr.io/defenseunicorns/leapfrogai/llama-cpp-python:${LOCAL_VERSION} -f packages/llama-cpp-python/Dockerfile . + docker build --platform=linux/${ARCH} --build-arg LOCAL_VERSION=${LOCAL_VERSION} -t ghcr.io/defenseunicorns/leapfrogai/llama-cpp-python:${LOCAL_VERSION} -f packages/llama-cpp-python/Dockerfile . docker tag ghcr.io/defenseunicorns/leapfrogai/llama-cpp-python:${LOCAL_VERSION} localhost:${REG_PORT}/defenseunicorns/leapfrogai/llama-cpp-python:${LOCAL_VERSION} -build-llama-cpp-python: docker-llama-cpp-python ## Build the llama-cpp-python (cpu) container and Zarf package +build-llama-cpp-python: local-registry docker-llama-cpp-python ## Build the llama-cpp-python (cpu) container and Zarf package ## Push the image to the local registry (Zarf is super slow if the image is only in the local daemon) docker push localhost:${REG_PORT}/defenseunicorns/leapfrogai/llama-cpp-python:${LOCAL_VERSION} ## Build the Zarf package - uds zarf package create packages/llama-cpp-python -o packages/llama-cpp-python --registry-override=ghcr.io=localhost:${REG_PORT} --insecure --set IMAGE_VERSION=${LOCAL_VERSION} --confirm + uds zarf package create packages/llama-cpp-python -a ${ARCH} -o packages/llama-cpp-python --registry-override=ghcr.io=localhost:${REG_PORT} --insecure --set IMAGE_VERSION=${LOCAL_VERSION} --confirm -docker-vllm: local-registry sdk-wheel +docker-vllm: sdk-wheel ## Build the image (and tag it for the local registry) - docker build --platform=linux/${ARCH} --build-arg ARCH=${ARCH} --build-arg LOCAL_VERSION=${LOCAL_VERSION} -t ghcr.io/defenseunicorns/leapfrogai/vllm:${LOCAL_VERSION} -f packages/vllm/Dockerfile . + docker build --platform=linux/${ARCH} --build-arg LOCAL_VERSION=${LOCAL_VERSION} -t ghcr.io/defenseunicorns/leapfrogai/vllm:${LOCAL_VERSION} -f packages/vllm/Dockerfile . docker tag ghcr.io/defenseunicorns/leapfrogai/vllm:${LOCAL_VERSION} localhost:${REG_PORT}/defenseunicorns/leapfrogai/vllm:${LOCAL_VERSION} -build-vllm: docker-vllm ## Build the vllm container and Zarf package +build-vllm: local-registry docker-vllm ## Build the vllm container and Zarf package ## Push the image to the local registry (Zarf is super slow if the image is only in the local daemon) docker push localhost:${REG_PORT}/defenseunicorns/leapfrogai/vllm:${LOCAL_VERSION} ## Build the Zarf package - uds zarf package create packages/vllm -o packages/vllm --registry-override=ghcr.io=localhost:${REG_PORT} --insecure --set IMAGE_VERSION=${LOCAL_VERSION} --confirm + uds zarf package create packages/vllm -a ${ARCH} -o packages/vllm --registry-override=ghcr.io=localhost:${REG_PORT} --insecure --set IMAGE_VERSION=${LOCAL_VERSION} --confirm -docker-text-embeddings: local-registry sdk-wheel +docker-text-embeddings: sdk-wheel ## Build the image (and tag it for the local registry) - docker build --platform=linux/${ARCH} --build-arg ARCH=${ARCH} --build-arg LOCAL_VERSION=${LOCAL_VERSION} -t ghcr.io/defenseunicorns/leapfrogai/text-embeddings:${LOCAL_VERSION} -f packages/text-embeddings/Dockerfile . + docker build --platform=linux/${ARCH} --build-arg LOCAL_VERSION=${LOCAL_VERSION} -t ghcr.io/defenseunicorns/leapfrogai/text-embeddings:${LOCAL_VERSION} -f packages/text-embeddings/Dockerfile . docker tag ghcr.io/defenseunicorns/leapfrogai/text-embeddings:${LOCAL_VERSION} localhost:${REG_PORT}/defenseunicorns/leapfrogai/text-embeddings:${LOCAL_VERSION} -build-text-embeddings: docker-text-embeddings ## Build the text-embeddings container and Zarf package +build-text-embeddings: local-registry docker-text-embeddings ## Build the text-embeddings container and Zarf package ## Push the image to the local registry (Zarf is super slow if the image is only in the local daemon) docker push localhost:${REG_PORT}/defenseunicorns/leapfrogai/text-embeddings:${LOCAL_VERSION} ## Build the Zarf package - uds zarf package create packages/text-embeddings -o packages/text-embeddings --registry-override=ghcr.io=localhost:${REG_PORT} --insecure --set IMAGE_VERSION=${LOCAL_VERSION} --confirm + uds zarf package create packages/text-embeddings -a ${ARCH} -o packages/text-embeddings --registry-override=ghcr.io=localhost:${REG_PORT} --insecure --set IMAGE_VERSION=${LOCAL_VERSION} --confirm -docker-whisper: local-registry sdk-wheel +docker-whisper: sdk-wheel ## Build the image (and tag it for the local registry) - docker build --platform=linux/${ARCH} --build-arg ARCH=${ARCH} --build-arg LOCAL_VERSION=${LOCAL_VERSION} -t ghcr.io/defenseunicorns/leapfrogai/whisper:${LOCAL_VERSION} -f packages/whisper/Dockerfile . + docker build --platform=linux/${ARCH} --build-arg LOCAL_VERSION=${LOCAL_VERSION} -t ghcr.io/defenseunicorns/leapfrogai/whisper:${LOCAL_VERSION} -f packages/whisper/Dockerfile . docker tag ghcr.io/defenseunicorns/leapfrogai/whisper:${LOCAL_VERSION} localhost:${REG_PORT}/defenseunicorns/leapfrogai/whisper:${LOCAL_VERSION} -build-whisper: docker-whisper ## Build the whisper container and zarf package +build-whisper: local-registry docker-whisper ## Build the whisper container and zarf package ## Push the image to the local registry (Zarf is super slow if the image is only in the local daemon) docker push localhost:${REG_PORT}/defenseunicorns/leapfrogai/whisper:${LOCAL_VERSION} ## Build the Zarf package - uds zarf package create packages/whisper -o packages/whisper --registry-override=ghcr.io=localhost:${REG_PORT} --insecure --set IMAGE_VERSION=${LOCAL_VERSION} --confirm + uds zarf package create packages/whisper -a ${ARCH} -o packages/whisper --registry-override=ghcr.io=localhost:${REG_PORT} --insecure --set IMAGE_VERSION=${LOCAL_VERSION} --confirm -docker-repeater: local-registry sdk-wheel +docker-repeater: sdk-wheel ## Build the image (and tag it for the local registry) - docker build --platform=linux/${ARCH} --build-arg ARCH=${ARCH} --build-arg LOCAL_VERSION=${LOCAL_VERSION} -t ghcr.io/defenseunicorns/leapfrogai/repeater:${LOCAL_VERSION} -f packages/repeater/Dockerfile . + docker build --platform=linux/${ARCH} --build-arg LOCAL_VERSION=${LOCAL_VERSION} -t ghcr.io/defenseunicorns/leapfrogai/repeater:${LOCAL_VERSION} -f packages/repeater/Dockerfile . docker tag ghcr.io/defenseunicorns/leapfrogai/repeater:${LOCAL_VERSION} localhost:${REG_PORT}/defenseunicorns/leapfrogai/repeater:${LOCAL_VERSION} -build-repeater: docker-repeater ## Build the repeater container and zarf package +build-repeater: local-registry docker-repeater ## Build the repeater container and zarf package ## Push the image to the local registry (Zarf is super slow if the image is only in the local daemon) docker push localhost:${REG_PORT}/defenseunicorns/leapfrogai/repeater:${LOCAL_VERSION} ## Build the Zarf package - uds zarf package create packages/repeater -o packages/repeater --registry-override=ghcr.io=localhost:${REG_PORT} --insecure --set IMAGE_VERSION=${LOCAL_VERSION} --confirm + uds zarf package create packages/repeater -a ${ARCH} -o packages/repeater --registry-override=ghcr.io=localhost:${REG_PORT} --insecure --set IMAGE_VERSION=${LOCAL_VERSION} --confirm build-cpu: build-supabase build-api build-ui build-llama-cpp-python build-text-embeddings build-whisper ## Build all zarf packages for a cpu-enabled deployment of LFAI diff --git a/packages/api/Dockerfile b/packages/api/Dockerfile index 1cd10d1fe..4bd36c4ad 100644 --- a/packages/api/Dockerfile +++ b/packages/api/Dockerfile @@ -1,8 +1,7 @@ -ARG ARCH ARG LOCAL_VERSION -FROM ghcr.io/defenseunicorns/leapfrogai/leapfrogai-sdk:${LOCAL_VERSION} as sdk +FROM ghcr.io/defenseunicorns/leapfrogai/leapfrogai-sdk:${LOCAL_VERSION} AS sdk -FROM ghcr.io/defenseunicorns/leapfrogai/python:3.11-dev-${ARCH} as builder +FROM ghcr.io/defenseunicorns/leapfrogai/python:3.11-dev AS builder ARG SDK_DEST=src/leapfrogai_sdk/build USER root WORKDIR /leapfrogai @@ -18,7 +17,7 @@ RUN rm -f packages/api/build/*.whl RUN python -m pip wheel src/leapfrogai_api -w packages/api/build --find-links=${SDK_DEST} RUN pip install packages/api/build/leapfrogai_api*.whl --no-index --find-links=packages/api/build/ -FROM ghcr.io/defenseunicorns/leapfrogai/python:3.11-${ARCH} +FROM ghcr.io/defenseunicorns/leapfrogai/python:3.11 ENV PATH="/leapfrogai/.venv/bin:$PATH" WORKDIR /leapfrogai diff --git a/packages/llama-cpp-python/Dockerfile b/packages/llama-cpp-python/Dockerfile index 63a55c2a0..63d5c09aa 100644 --- a/packages/llama-cpp-python/Dockerfile +++ b/packages/llama-cpp-python/Dockerfile @@ -1,9 +1,8 @@ -ARG ARCH ARG LOCAL_VERSION -FROM ghcr.io/defenseunicorns/leapfrogai/leapfrogai-sdk:${LOCAL_VERSION} as sdk +FROM ghcr.io/defenseunicorns/leapfrogai/leapfrogai-sdk:${LOCAL_VERSION} AS sdk # hardened and slim python w/ developer tools image -FROM --platform=$BUILDPLATFORM ghcr.io/defenseunicorns/leapfrogai/python:3.11-dev as builder +FROM ghcr.io/defenseunicorns/leapfrogai/python:3.11-dev AS builder ARG SDK_DEST=src/leapfrogai_sdk/build USER root WORKDIR /leapfrogai @@ -36,7 +35,7 @@ RUN python -m pip wheel packages/llama-cpp-python -w packages/llama-cpp-python/b RUN pip install packages/llama-cpp-python/build/lfai_llama_cpp_python*.whl --no-index --find-links=packages/llama-cpp-python/build/ # hardened and slim python image -FROM --platform=$BUILDPLATFORM ghcr.io/defenseunicorns/leapfrogai/python:3.11 +FROM ghcr.io/defenseunicorns/leapfrogai/python:3.11 ENV PATH="/leapfrogai/.venv/bin:$PATH" diff --git a/packages/repeater/Dockerfile b/packages/repeater/Dockerfile index 1dcca8f0a..4d58f46a6 100644 --- a/packages/repeater/Dockerfile +++ b/packages/repeater/Dockerfile @@ -1,9 +1,8 @@ -ARG ARCH ARG LOCAL_VERSION -FROM ghcr.io/defenseunicorns/leapfrogai/leapfrogai-sdk:${LOCAL_VERSION} as sdk +FROM ghcr.io/defenseunicorns/leapfrogai/leapfrogai-sdk:${LOCAL_VERSION} AS sdk # hardened and slim python w/ developer tools image -FROM ghcr.io/defenseunicorns/leapfrogai/python:3.11-dev-${ARCH} as builder +FROM ghcr.io/defenseunicorns/leapfrogai/python:3.11-dev AS builder ARG SDK_DEST=src/leapfrogai_sdk/build USER root WORKDIR /leapfrogai @@ -20,7 +19,7 @@ RUN python -m pip wheel packages/repeater -w packages/repeater/build --find-link RUN pip install packages/repeater/build/lfai_repeater*.whl --no-index --find-links=packages/repeater/build/ # hardened and slim python image -FROM ghcr.io/defenseunicorns/leapfrogai/python:3.11-${ARCH} +FROM ghcr.io/defenseunicorns/leapfrogai/python:3.11 ENV PATH="/leapfrogai/.venv/bin:$PATH" diff --git a/packages/text-embeddings/Dockerfile b/packages/text-embeddings/Dockerfile index 0e4d320e3..9c1e76f5b 100644 --- a/packages/text-embeddings/Dockerfile +++ b/packages/text-embeddings/Dockerfile @@ -1,9 +1,8 @@ -ARG ARCH ARG LOCAL_VERSION -FROM ghcr.io/defenseunicorns/leapfrogai/leapfrogai-sdk:${LOCAL_VERSION} as sdk +FROM ghcr.io/defenseunicorns/leapfrogai/leapfrogai-sdk:${LOCAL_VERSION} AS sdk # hardened and slim python w/ developer tools image -FROM ghcr.io/defenseunicorns/leapfrogai/python:3.11-dev-${ARCH} as builder +FROM ghcr.io/defenseunicorns/leapfrogai/python:3.11-dev AS builder ARG SDK_DEST=src/leapfrogai_sdk/build USER root WORKDIR /leapfrogai @@ -32,7 +31,7 @@ COPY packages/text-embeddings/scripts/model_download.py scripts/model_download.p RUN REPO_ID=${REPO_ID} REVISION=${REVISION} python scripts/model_download.py # hardened and slim python image -FROM ghcr.io/defenseunicorns/leapfrogai/python:3.11-${ARCH} +FROM ghcr.io/defenseunicorns/leapfrogai/python:3.11 ENV PATH="/leapfrogai/.venv/bin:$PATH" diff --git a/packages/whisper/Dockerfile b/packages/whisper/Dockerfile index 7d04955fc..1686c8d3c 100644 --- a/packages/whisper/Dockerfile +++ b/packages/whisper/Dockerfile @@ -1,8 +1,7 @@ -ARG ARCH ARG LOCAL_VERSION -FROM ghcr.io/defenseunicorns/leapfrogai/leapfrogai-sdk:${LOCAL_VERSION} as sdk +FROM ghcr.io/defenseunicorns/leapfrogai/leapfrogai-sdk:${LOCAL_VERSION} AS sdk -FROM --platform=$BUILDPLATFORM ghcr.io/defenseunicorns/leapfrogai/python:3.11-dev as builder +FROM ghcr.io/defenseunicorns/leapfrogai/python:3.11-dev AS builder USER root ARG SDK_DEST=src/leapfrogai_sdk/build @@ -30,7 +29,7 @@ RUN pip install packages/whisper/build/lfai_whisper*.whl --no-index --find-links FROM cgr.dev/chainguard/ffmpeg:latest as ffmpeg # hardened and slim python image -FROM --platform=$BUILDPLATFORM ghcr.io/defenseunicorns/leapfrogai/python:3.11 +FROM ghcr.io/defenseunicorns/leapfrogai/python:3.11 ENV PATH="/leapfrogai/.venv/bin:$PATH" diff --git a/src/leapfrogai_sdk/Dockerfile b/src/leapfrogai_sdk/Dockerfile index 4a9ec424a..5fa4b6346 100644 --- a/src/leapfrogai_sdk/Dockerfile +++ b/src/leapfrogai_sdk/Dockerfile @@ -1,5 +1,4 @@ -ARG ARCH=amd64 -FROM ghcr.io/defenseunicorns/leapfrogai/python:3.11-dev-${ARCH} as builder +FROM ghcr.io/defenseunicorns/leapfrogai/python:3.11-dev AS builder ARG SDK_DEST=src/leapfrogai_sdk/build USER root @@ -10,4 +9,4 @@ COPY ./src/leapfrogai_sdk /leapfrogai/src/leapfrogai_sdk RUN python -m venv .venv ENV PATH="/leapfrogai/.venv/bin:$PATH" RUN rm -f ${SDK_DEST}/*.whl -RUN python -m pip wheel src/leapfrogai_sdk -w ${SDK_DEST} \ No newline at end of file +RUN python -m pip wheel src/leapfrogai_sdk -w ${SDK_DEST}