From 5c1eb36365404f2526e5909287280a2a0d1b9363 Mon Sep 17 00:00:00 2001 From: Liora Milbaum Date: Thu, 28 Mar 2024 20:14:29 +0200 Subject: [PATCH] cleanup Signed-off-by: Liora Milbaum --- .devcontainer/Containerfile | 2 +- .github/workflows/llamacpp_python.yaml | 48 --------------------- model_servers/llamacpp_python/base/Makefile | 4 +- 3 files changed, 3 insertions(+), 51 deletions(-) delete mode 100644 .github/workflows/llamacpp_python.yaml diff --git a/.devcontainer/Containerfile b/.devcontainer/Containerfile index 1fca87ae..9efee96e 100644 --- a/.devcontainer/Containerfile +++ b/.devcontainer/Containerfile @@ -4,6 +4,6 @@ USER root COPY requirements-test.txt . -RUN dnf install -y python3.11 python3-pip buildah git && \ +RUN dnf install -y python3.11 python3-pip buildah git make && \ dnf clean all && \ pip3 install -r requirements-test.txt diff --git a/.github/workflows/llamacpp_python.yaml b/.github/workflows/llamacpp_python.yaml deleted file mode 100644 index 7a8dcd75..00000000 --- a/.github/workflows/llamacpp_python.yaml +++ /dev/null @@ -1,48 +0,0 @@ -name: llamacpp_python - -on: - pull_request: - branches: - - main - push: - branches: - - main - -env: - REGISTRY: ghcr.io - IMAGE_NAME: ${{ github.repository_owner }}/playground - -jobs: - build-and-push-image: - runs-on: ubuntu-latest - permissions: - contents: read - packages: write - services: - registry: - image: registry:2.8.3 - ports: - - 5000:5000 - steps: - - uses: actions/checkout@v4.1.1 - - - name: Login to ghcr - uses: docker/login-action@v3.1.0 - with: - registry: ${{ env.REGISTRY }} - username: ${{ github.actor }} - password: ${{ secrets.GITHUB_TOKEN }} - - - name: Buildah Action - uses: redhat-actions/buildah-build@v2.13 - with: - image: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} - tags: latest - containerfiles: ./model_servers/llamacpp_python/base/Containerfile - context: model_servers/llamacpp_python/ - - - name: Set up Python - uses: actions/setup-python@v5.0.0 - - - name: Run tests - run: make -f model_servers/llamacpp_python/base/Makefile test diff --git a/model_servers/llamacpp_python/base/Makefile b/model_servers/llamacpp_python/base/Makefile index e4761424..92a1aad7 100644 --- a/model_servers/llamacpp_python/base/Makefile +++ b/model_servers/llamacpp_python/base/Makefile @@ -1,6 +1,6 @@ .PHONY: build build: - podman build -f Containerfile -t ghcr.io/ai-lab-recipes/playground --format docker . + podman build -f Containerfile -t ghcr.io/ai-lab-recipes/model_servers . models/llama-2-7b-chat.Q5_K_S.gguf: curl -s -S -L -f https://huggingface.co/TheBloke/Llama-2-7B-Chat-GGUF/resolve/main/llama-2-7b-chat.Q5_K_S.gguf -z $@ -o $@.tmp && mv -f $@.tmp $@ 2>/dev/null || rm -f $@.tmp $@ @@ -11,7 +11,7 @@ install: .PHONY: run run: models/llama-2-7b-chat.Q5_K_S.gguf install - podman run -it -d -p 8001:8001 -v ./models:/locallm/models:ro,Z -e MODEL_PATH=models/llama-2-7b-chat.Q5_K_S.gguf -e HOST=0.0.0.0 -e PORT=8001 --net=host ghcr.io/redhat-et/playground + podman run -it -d -p 8001:8001 -v ./models:/locallm/models:ro,Z -e MODEL_PATH=models/llama-2-7b-chat.Q5_K_S.gguf -e HOST=0.0.0.0 -e PORT=8001 --net=host ghcr.io/redhat-et/model_servers .PHONY: test test: models/llama-2-7b-chat.Q5_K_S.gguf install