Skip to content

Commit

Permalink
Fix 0.10.1 cherry-pick merge issue (kserve#2999)
Browse files Browse the repository at this point in the history
* Run e2e tests with release branch

Signed-off-by: Dan Sun <[email protected]>

* Fix merge issue

Signed-off-by: Dan Sun <[email protected]>

* Update release branch name

Signed-off-by: Dan Sun <[email protected]>

* Free up disk spaces

Signed-off-by: Dan Sun <[email protected]>

* Fix disk space issue

Signed-off-by: Dan Sun <[email protected]>

* prune docker images

Signed-off-by: Dan Sun <[email protected]>

* Use standard socket for single process (kserve#3000)

Signed-off-by: Dan Sun <[email protected]>

* Use cpu images for torch

Signed-off-by: Dan Sun <[email protected]>

* Update version to 0.10.2

Signed-off-by: Dan Sun <[email protected]>

* Use buildx for images

Signed-off-by: Dan Sun <[email protected]>

* fix dockerfile

Signed-off-by: Dan Sun <[email protected]>

---------

Signed-off-by: Dan Sun <[email protected]>
  • Loading branch information
yuzisun authored Jun 21, 2023
1 parent ea96e46 commit c8f0999
Show file tree
Hide file tree
Showing 16 changed files with 67 additions and 37 deletions.
28 changes: 28 additions & 0 deletions .github/actions/minikube-setup/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,23 @@ description: 'Sets up minikube on the github runner'
runs:
using: "composite"
steps:
- name: Free up disk space
shell: bash
run: |
echo "Disk usage before cleanup:"
df -h
# remove non-essential tools and libraries, see:
# https://github.com/actions/runner-images/issues/2840#issuecomment-790492173
sudo rm -rf /opt/ghc
sudo rm -rf /usr/share/dotnet
sudo rm -rf /usr/local/share/boost
# delete libraries for Android (12G), CodeQL (5.3G), PowerShell (1.3G), Swift (1.7G)
sudo rm -rf /usr/local/lib/android
sudo rm -rf "${AGENT_TOOLSDIRECTORY}/CodeQL"
sudo rm -rf /usr/local/share/powershell
sudo rm -rf /usr/share/swift
echo "Disk usage after cleanup:"
df -h
- name: Setup Minikube
uses: manusa/[email protected]
with:
Expand All @@ -12,12 +29,23 @@ runs:
driver: 'none'
start args: --wait-timeout=120s
github token: ${{ env.GITHUB_TOKEN }}
- name: Install kubectl
uses: azure/setup-kubectl@v3
with:
version: 'v1.23.0'
- name: Setup port-forward
shell: bash
run: sudo apt-get install -y conntrack socat
- name: Check Kubernetes pods
shell: bash
run: kubectl get pods -n kube-system
- name: Prune docker images
shell: bash
run: |
echo "Pruning docker images"
docker image prune -a -f
docker system df
df -h
- name: Setup KServe dependencies
shell: bash
run: |
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/e2e-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ name: E2E Tests

on:
pull_request:
branches: [master]
branches: [master, release*]
paths:
- '**'
- '!.github/**'
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/python-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ name: Python package

on:
push:
branches: [ master ]
branches: [ master, release* ]
pull_request:
branches: []
jobs:
Expand Down
2 changes: 1 addition & 1 deletion charts/kserve-crd/Chart.yaml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
apiVersion: v1
name: kserve-crd
version: v0.10.1
version: v0.10.2
description: Helm chart for deploying kserve crds
keywords:
- kserve
Expand Down
2 changes: 1 addition & 1 deletion charts/kserve-resources/values.yaml
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
kserve:
version: &defaultVersion v0.10.1
version: &defaultVersion v0.10.2
modelmeshVersion: &defaultModelMeshVersion v0.10.0
agent:
image: kserve/agent
Expand Down
2 changes: 1 addition & 1 deletion python/VERSION
Original file line number Diff line number Diff line change
@@ -1 +1 @@
0.10.1
0.10.2
2 changes: 1 addition & 1 deletion python/custom_model.Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ COPY VERSION VERSION
RUN pip install --no-cache-dir --upgrade pip && pip install --no-cache-dir -e ./kserve

COPY custom_model custom_model
RUN pip install -r ./custom_model/requirements.txt
RUN pip install -r ./custom_model/requirements.txt -f https://download.pytorch.org/whl/torch_stable.html

RUN useradd kserve -m -u 1000 -d /home/kserve
USER 1000
Expand Down
3 changes: 2 additions & 1 deletion python/custom_model/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,2 +1,3 @@
kserve
torchvision
torch==2.0.0+cpu
torchvision==0.15.1+cpu
3 changes: 2 additions & 1 deletion python/custom_model_grpc.Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,9 @@ COPY third_party third_party
COPY kserve kserve
COPY VERSION VERSION
RUN pip install --no-cache-dir --upgrade pip && pip install --no-cache-dir -e ./kserve
RUN pip install --no-cache-dir torch==1.13.0+cpu torchvision==0.14.0+cpu -f https://download.pytorch.org/whl/torch_stable.html

COPY custom_model custom_model
RUN pip install -r ./custom_model/requirements.txt -f https://download.pytorch.org/whl/torch_stable.html

RUN useradd kserve -m -u 1000 -d /home/kserve
USER 1000
Expand Down
2 changes: 1 addition & 1 deletion python/custom_transformer.Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ COPY VERSION VERSION
RUN pip install --no-cache-dir --upgrade pip && pip install --no-cache-dir -e ./kserve

COPY custom_transformer custom_transformer
RUN pip install --no-cache-dir -e ./custom_transformer
RUN pip install --no-cache-dir -e ./custom_transformer -f https://download.pytorch.org/whl/torch_stable.html

RUN useradd kserve -m -u 1000 -d /home/kserve
USER 1000
Expand Down
3 changes: 2 additions & 1 deletion python/custom_transformer/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
kserve
torchvision
torch==2.0.0+cpu
torchvision==0.15.1+cpu
pillow==9.3.0
2 changes: 1 addition & 1 deletion python/custom_transformer_grpc.Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ COPY VERSION VERSION
RUN pip install --no-cache-dir --upgrade pip && pip install --no-cache-dir -e ./kserve

COPY custom_transformer custom_transformer
RUN pip install --no-cache-dir -e ./custom_transformer
RUN pip install --no-cache-dir -e ./custom_transformer -f https://download.pytorch.org/whl/torch_stable.html

RUN useradd kserve -m -u 1000 -d /home/kserve
USER 1000
Expand Down
8 changes: 4 additions & 4 deletions python/kserve/kserve/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,10 +34,10 @@
ModelInferResponse)
from .protocol.infer_type import InferRequest, InferResponse

PREDICTOR_URL_FORMAT = "{0}://{1}/v1/models/{2}:predict"
EXPLAINER_URL_FORMAT = "{0}://{1}/v1/models/{2}:explain"
PREDICTOR_V2_URL_FORMAT = "{0}://{1}/v2/models/{2}/infer"
EXPLAINER_V2_URL_FORMAT = "{0}://{1}/v2/models/{2}/explain"
PREDICTOR_URL_FORMAT = "http://{0}/v1/models/{1}:predict"
EXPLAINER_URL_FORMAT = "http://{0}/v1/models/{1}:explain"
PREDICTOR_V2_URL_FORMAT = "http://{0}/v2/models/{1}/infer"
EXPLAINER_V2_URL_FORMAT = "http://{0}/v2/models/{1}/explain"


class ModelType(Enum):
Expand Down
19 changes: 9 additions & 10 deletions python/kserve/kserve/model_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -150,29 +150,28 @@ def start(self, models: Union[List[Model], Dict[str, Deployment]]) -> None:
concurrent.futures.ThreadPoolExecutor(max_workers=self.max_asyncio_workers))

async def serve():
serversocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
serversocket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
serversocket.bind(('0.0.0.0', self.http_port))
serversocket.listen(5)

logger.info(f"Starting uvicorn with {self.workers} workers")
loop = asyncio.get_event_loop()
for sig in [signal.SIGINT, signal.SIGTERM, signal.SIGQUIT]:
loop.add_signal_handler(
sig, lambda s=sig: asyncio.create_task(self.stop(sig=s))
)
self._rest_server = UvicornServer(self.http_port, [serversocket],
self.dataplane, self.model_repository_extension,
self.enable_docs_url,
log_config=self.log_config,
access_log_format=self.access_log_format)
if self.workers == 1:
self._rest_server = UvicornServer(self.http_port, [],
self.dataplane, self.model_repository_extension,
self.enable_docs_url,
log_config=self.log_config,
access_log_format=self.access_log_format)
await self._rest_server.run()
else:
# Since py38 MacOS/Windows defaults to use spawn for starting multiprocessing.
# https://docs.python.org/3/library/multiprocessing.html#contexts-and-start-methods
# Spawn does not work with FastAPI/uvicorn in multiprocessing mode, use fork for multiprocessing
# https://github.com/tiangolo/fastapi/issues/1586
serversocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
serversocket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
serversocket.bind(('0.0.0.0', self.http_port))
serversocket.listen(5)
multiprocessing.set_start_method('fork')
server = UvicornServer(self.http_port, [serversocket],
self.dataplane, self.model_repository_extension,
Expand Down
2 changes: 1 addition & 1 deletion python/kserve/kserve/protocol/rest/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -170,7 +170,7 @@ def run_sync(self):
asyncio.run(server.serve(sockets=self.sockets))

async def run(self):
await self.server.serve(sockets=self.sockets)
await self.server.serve()

async def stop(self, sig: Optional[int] = None):
if self.server:
Expand Down
22 changes: 11 additions & 11 deletions test/scripts/gh-actions/build-server-runtimes.sh
Original file line number Diff line number Diff line change
Expand Up @@ -44,33 +44,33 @@ IMAGE_TRANSFORMER_IMG=kserve/image-transformer:${GITHUB_SHA}
pushd python >/dev/null
if [[ " ${types[*]} " =~ "predictor" ]]; then
echo "Building Sklearn image"
docker build -t ${SKLEARN_IMG} -f sklearn.Dockerfile .
docker buildx build -t ${SKLEARN_IMG} -f sklearn.Dockerfile .
echo "Building XGB image"
docker build -t ${XGB_IMG} -f xgb.Dockerfile .
docker buildx build -t ${XGB_IMG} -f xgb.Dockerfile .
echo "Building LGB image"
docker build -t ${LGB_IMG} -f lgb.Dockerfile .
docker buildx build -t ${LGB_IMG} -f lgb.Dockerfile .
echo "Building PMML image"
docker build -t ${PMML_IMG} -f pmml.Dockerfile .
docker buildx build -t ${PMML_IMG} -f pmml.Dockerfile .
echo "Building Paddle image"
docker build -t ${PADDLE_IMG} -f paddle.Dockerfile .
docker buildx build -t ${PADDLE_IMG} -f paddle.Dockerfile .
echo "Building Custom model gRPC image"
docker build -t ${CUSTOM_MODEL_GRPC} -f custom_model_grpc.Dockerfile .
docker buildx build -t ${CUSTOM_MODEL_GRPC} -f custom_model_grpc.Dockerfile .
echo "Building image transformer gRPC image"
docker build -t ${CUSTOM_TRANSFORMER_GRPC} -f custom_transformer_grpc.Dockerfile .
docker buildx build -t ${CUSTOM_TRANSFORMER_GRPC} -f custom_transformer_grpc.Dockerfile .
fi

if [[ " ${types[*]} " =~ "explainer" ]]; then
echo "Building Alibi image"
docker build -t ${ALIBI_IMG} -f alibiexplainer.Dockerfile .
docker buildx build -t ${ALIBI_IMG} -f alibiexplainer.Dockerfile .
echo "Building AIX image"
docker build -t ${AIX_IMG} -f aixexplainer.Dockerfile .
docker buildx build -t ${AIX_IMG} -f aixexplainer.Dockerfile .
echo "Building ART explainer image"
docker build -t ${ART_IMG} -f artexplainer.Dockerfile .
docker buildx build -t ${ART_IMG} -f artexplainer.Dockerfile .
fi

if [[ " ${types[*]} " =~ "transformer" ]]; then
echo "Building Image transformer image"
docker build -t ${IMAGE_TRANSFORMER_IMG} -f custom_transformer.Dockerfile .
docker buildx build -t ${IMAGE_TRANSFORMER_IMG} -f custom_transformer.Dockerfile .
fi

popd
Expand Down

0 comments on commit c8f0999

Please sign in to comment.