Skip to content

Commit

Permalink
feat(ray): add help functions for ray model (#40)
Browse files Browse the repository at this point in the history
Because

- provide support for constructing compatible `ray` model for `Instill
Model`

This commit

- add helper functions for constructing `ray` model

Resolves INS-2497
Resolves INS-2498
  • Loading branch information
heiruwu authored Nov 1, 2023
1 parent be350b3 commit 14c1a53
Show file tree
Hide file tree
Showing 8 changed files with 736 additions and 55 deletions.
2 changes: 1 addition & 1 deletion .pylint.ini
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ extension-pkg-whitelist=

# Add files or directories to the blacklist. They should be base names, not
# paths.
ignore=CVS,protogen
ignore=CVS,protogen,protobufs

# Add files or directories matching the regex patterns to the blacklist. The
# regex matches against base names, not paths.
Expand Down
3 changes: 3 additions & 0 deletions instill/configuration/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,9 @@
import yaml
from pydantic import BaseModel

CLOUD_RAY_ADDRESS = "ray://core_ray_server:10001"
CORE_RAY_ADDRESS = "ray://ray_server:10001"

CONFIG_DIR = Path(
os.getenv(
"INSTILL_SYSTEM_CONFIG_PATH",
Expand Down
44 changes: 44 additions & 0 deletions instill/helpers/protobufs/ray_pb2.py

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

256 changes: 256 additions & 0 deletions instill/helpers/protobufs/ray_pb2.pyi
Original file line number Diff line number Diff line change
@@ -0,0 +1,256 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import collections.abc
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.message
import sys

if sys.version_info >= (3, 8):
import typing as typing_extensions
else:
import typing_extensions

DESCRIPTOR: google.protobuf.descriptor.FileDescriptor

@typing_extensions.final
class ModelReadyRequest(google.protobuf.message.Message):
"""ModelReadyRequest represents a request to check if a model is ready"""

DESCRIPTOR: google.protobuf.descriptor.Descriptor

NAME_FIELD_NUMBER: builtins.int
VERSION_FIELD_NUMBER: builtins.int
name: builtins.str
"""model id"""
version: builtins.str
"""model tag verion"""
def __init__(
self,
*,
name: builtins.str = ...,
version: builtins.str = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["name", b"name", "version", b"version"]) -> None: ...

global___ModelReadyRequest = ModelReadyRequest

@typing_extensions.final
class ModelReadyResponse(google.protobuf.message.Message):
"""ModelReadyResponse represents a response to check if a model is ready"""

DESCRIPTOR: google.protobuf.descriptor.Descriptor

READY_FIELD_NUMBER: builtins.int
ready: builtins.bool
"""whether the model is ready or not"""
def __init__(
self,
*,
ready: builtins.bool = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["ready", b"ready"]) -> None: ...

global___ModelReadyResponse = ModelReadyResponse

@typing_extensions.final
class ModelMetadataRequest(google.protobuf.message.Message):
"""ModelMetadataRequest represents a request to get the model metadata"""

DESCRIPTOR: google.protobuf.descriptor.Descriptor

NAME_FIELD_NUMBER: builtins.int
VERSION_FIELD_NUMBER: builtins.int
name: builtins.str
"""model id"""
version: builtins.str
"""model tag verion"""
def __init__(
self,
*,
name: builtins.str = ...,
version: builtins.str = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["name", b"name", "version", b"version"]) -> None: ...

global___ModelMetadataRequest = ModelMetadataRequest

@typing_extensions.final
class InferTensor(google.protobuf.message.Message):
"""tensor for inference"""

DESCRIPTOR: google.protobuf.descriptor.Descriptor

NAME_FIELD_NUMBER: builtins.int
DATATYPE_FIELD_NUMBER: builtins.int
SHAPE_FIELD_NUMBER: builtins.int
name: builtins.str
"""tensor name."""
datatype: builtins.str
"""tensor data type."""
@property
def shape(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]:
"""tensor shape."""
def __init__(
self,
*,
name: builtins.str = ...,
datatype: builtins.str = ...,
shape: collections.abc.Iterable[builtins.int] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["datatype", b"datatype", "name", b"name", "shape", b"shape"]) -> None: ...

global___InferTensor = InferTensor

@typing_extensions.final
class ModelMetadataResponse(google.protobuf.message.Message):
"""ModelMetadataResponse represents a response to get the model metadata"""

DESCRIPTOR: google.protobuf.descriptor.Descriptor

@typing_extensions.final
class TensorMetadata(google.protobuf.message.Message):
"""metadata for a tensor"""

DESCRIPTOR: google.protobuf.descriptor.Descriptor

NAME_FIELD_NUMBER: builtins.int
DATATYPE_FIELD_NUMBER: builtins.int
SHAPE_FIELD_NUMBER: builtins.int
name: builtins.str
"""tensor name"""
datatype: builtins.str
"""tensor data type"""
@property
def shape(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]:
"""tensor shape"""
def __init__(
self,
*,
name: builtins.str = ...,
datatype: builtins.str = ...,
shape: collections.abc.Iterable[builtins.int] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["datatype", b"datatype", "name", b"name", "shape", b"shape"]) -> None: ...

NAME_FIELD_NUMBER: builtins.int
VERSIONS_FIELD_NUMBER: builtins.int
FRAMEWORK_FIELD_NUMBER: builtins.int
INPUTS_FIELD_NUMBER: builtins.int
OUTPUTS_FIELD_NUMBER: builtins.int
name: builtins.str
"""model name"""
@property
def versions(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
"""model tag version"""
framework: builtins.str
"""model inference framework"""
@property
def inputs(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ModelMetadataResponse.TensorMetadata]:
"""model inputs"""
@property
def outputs(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ModelMetadataResponse.TensorMetadata]:
"""model outputs"""
def __init__(
self,
*,
name: builtins.str = ...,
versions: collections.abc.Iterable[builtins.str] | None = ...,
framework: builtins.str = ...,
inputs: collections.abc.Iterable[global___ModelMetadataResponse.TensorMetadata] | None = ...,
outputs: collections.abc.Iterable[global___ModelMetadataResponse.TensorMetadata] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["framework", b"framework", "inputs", b"inputs", "name", b"name", "outputs", b"outputs", "versions", b"versions"]) -> None: ...

global___ModelMetadataResponse = ModelMetadataResponse

@typing_extensions.final
class ModelInferRequest(google.protobuf.message.Message):
"""ModelInferRequest represents a request for model inference"""

DESCRIPTOR: google.protobuf.descriptor.Descriptor

@typing_extensions.final
class InferRequestedOutputTensor(google.protobuf.message.Message):
"""An output tensor requested for an inference request."""

DESCRIPTOR: google.protobuf.descriptor.Descriptor

NAME_FIELD_NUMBER: builtins.int
name: builtins.str
"""tensor name."""
def __init__(
self,
*,
name: builtins.str = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["name", b"name"]) -> None: ...

MODEL_NAME_FIELD_NUMBER: builtins.int
MODEL_VERSION_FIELD_NUMBER: builtins.int
INPUTS_FIELD_NUMBER: builtins.int
OUTPUTS_FIELD_NUMBER: builtins.int
RAW_INPUT_CONTENTS_FIELD_NUMBER: builtins.int
model_name: builtins.str
"""name of the model to use for inferencing."""
model_version: builtins.str
"""model tag version"""
@property
def inputs(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___InferTensor]:
"""input tensors for the inference."""
@property
def outputs(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ModelInferRequest.InferRequestedOutputTensor]:
"""The requested output tensors for the inference. Optional, if not
specified all outputs specified in the model config will be
returned.
"""
@property
def raw_input_contents(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.bytes]:
"""raw input contents"""
def __init__(
self,
*,
model_name: builtins.str = ...,
model_version: builtins.str = ...,
inputs: collections.abc.Iterable[global___InferTensor] | None = ...,
outputs: collections.abc.Iterable[global___ModelInferRequest.InferRequestedOutputTensor] | None = ...,
raw_input_contents: collections.abc.Iterable[builtins.bytes] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["inputs", b"inputs", "model_name", b"model_name", "model_version", b"model_version", "outputs", b"outputs", "raw_input_contents", b"raw_input_contents"]) -> None: ...

global___ModelInferRequest = ModelInferRequest

@typing_extensions.final
class ModelInferResponse(google.protobuf.message.Message):
"""ModelInferResponse represents a response for model inference"""

DESCRIPTOR: google.protobuf.descriptor.Descriptor

MODEL_NAME_FIELD_NUMBER: builtins.int
MODEL_VERSION_FIELD_NUMBER: builtins.int
OUTPUTS_FIELD_NUMBER: builtins.int
RAW_OUTPUT_CONTENTS_FIELD_NUMBER: builtins.int
model_name: builtins.str
"""name of the model to use for inferencing."""
model_version: builtins.str
"""model tag version"""
@property
def outputs(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___InferTensor]:
"""output tensors"""
@property
def raw_output_contents(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.bytes]:
"""raw output contents"""
def __init__(
self,
*,
model_name: builtins.str = ...,
model_version: builtins.str = ...,
outputs: collections.abc.Iterable[global___InferTensor] | None = ...,
raw_output_contents: collections.abc.Iterable[builtins.bytes] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["model_name", b"model_name", "model_version", b"model_version", "outputs", b"outputs", "raw_output_contents", b"raw_output_contents"]) -> None: ...

global___ModelInferResponse = ModelInferResponse
Loading

0 comments on commit 14c1a53

Please sign in to comment.