-
Notifications
You must be signed in to change notification settings - Fork 1
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
feat(ray): add help functions for ray model (#40)
Because - provide support for constructing compatible `ray` model for `Instill Model` This commit - add helper functions for constructing `ray` model Resolves INS-2497 Resolves INS-2498
- Loading branch information
Showing
9 changed files
with
626 additions
and
60 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,92 @@ | ||
from google.protobuf.internal import containers as _containers | ||
from google.protobuf import descriptor as _descriptor | ||
from google.protobuf import message as _message | ||
from typing import ClassVar as _ClassVar, Iterable as _Iterable, Mapping as _Mapping, Optional as _Optional, Union as _Union | ||
|
||
DESCRIPTOR: _descriptor.FileDescriptor | ||
|
||
class ModelReadyRequest(_message.Message): | ||
__slots__ = ["name", "version"] | ||
NAME_FIELD_NUMBER: _ClassVar[int] | ||
VERSION_FIELD_NUMBER: _ClassVar[int] | ||
name: str | ||
version: str | ||
def __init__(self, name: _Optional[str] = ..., version: _Optional[str] = ...) -> None: ... | ||
|
||
class ModelReadyResponse(_message.Message): | ||
__slots__ = ["ready"] | ||
READY_FIELD_NUMBER: _ClassVar[int] | ||
ready: bool | ||
def __init__(self, ready: bool = ...) -> None: ... | ||
|
||
class ModelMetadataRequest(_message.Message): | ||
__slots__ = ["name", "version"] | ||
NAME_FIELD_NUMBER: _ClassVar[int] | ||
VERSION_FIELD_NUMBER: _ClassVar[int] | ||
name: str | ||
version: str | ||
def __init__(self, name: _Optional[str] = ..., version: _Optional[str] = ...) -> None: ... | ||
|
||
class InferTensor(_message.Message): | ||
__slots__ = ["name", "datatype", "shape"] | ||
NAME_FIELD_NUMBER: _ClassVar[int] | ||
DATATYPE_FIELD_NUMBER: _ClassVar[int] | ||
SHAPE_FIELD_NUMBER: _ClassVar[int] | ||
name: str | ||
datatype: str | ||
shape: _containers.RepeatedScalarFieldContainer[int] | ||
def __init__(self, name: _Optional[str] = ..., datatype: _Optional[str] = ..., shape: _Optional[_Iterable[int]] = ...) -> None: ... | ||
|
||
class ModelMetadataResponse(_message.Message): | ||
__slots__ = ["name", "versions", "framework", "inputs", "outputs"] | ||
class TensorMetadata(_message.Message): | ||
__slots__ = ["name", "datatype", "shape"] | ||
NAME_FIELD_NUMBER: _ClassVar[int] | ||
DATATYPE_FIELD_NUMBER: _ClassVar[int] | ||
SHAPE_FIELD_NUMBER: _ClassVar[int] | ||
name: str | ||
datatype: str | ||
shape: _containers.RepeatedScalarFieldContainer[int] | ||
def __init__(self, name: _Optional[str] = ..., datatype: _Optional[str] = ..., shape: _Optional[_Iterable[int]] = ...) -> None: ... | ||
NAME_FIELD_NUMBER: _ClassVar[int] | ||
VERSIONS_FIELD_NUMBER: _ClassVar[int] | ||
FRAMEWORK_FIELD_NUMBER: _ClassVar[int] | ||
INPUTS_FIELD_NUMBER: _ClassVar[int] | ||
OUTPUTS_FIELD_NUMBER: _ClassVar[int] | ||
name: str | ||
versions: _containers.RepeatedScalarFieldContainer[str] | ||
framework: str | ||
inputs: _containers.RepeatedCompositeFieldContainer[ModelMetadataResponse.TensorMetadata] | ||
outputs: _containers.RepeatedCompositeFieldContainer[ModelMetadataResponse.TensorMetadata] | ||
def __init__(self, name: _Optional[str] = ..., versions: _Optional[_Iterable[str]] = ..., framework: _Optional[str] = ..., inputs: _Optional[_Iterable[_Union[ModelMetadataResponse.TensorMetadata, _Mapping]]] = ..., outputs: _Optional[_Iterable[_Union[ModelMetadataResponse.TensorMetadata, _Mapping]]] = ...) -> None: ... | ||
|
||
class ModelInferRequest(_message.Message): | ||
__slots__ = ["model_name", "model_version", "inputs", "outputs", "raw_input_contents"] | ||
class InferRequestedOutputTensor(_message.Message): | ||
__slots__ = ["name"] | ||
NAME_FIELD_NUMBER: _ClassVar[int] | ||
name: str | ||
def __init__(self, name: _Optional[str] = ...) -> None: ... | ||
MODEL_NAME_FIELD_NUMBER: _ClassVar[int] | ||
MODEL_VERSION_FIELD_NUMBER: _ClassVar[int] | ||
INPUTS_FIELD_NUMBER: _ClassVar[int] | ||
OUTPUTS_FIELD_NUMBER: _ClassVar[int] | ||
RAW_INPUT_CONTENTS_FIELD_NUMBER: _ClassVar[int] | ||
model_name: str | ||
model_version: str | ||
inputs: _containers.RepeatedCompositeFieldContainer[InferTensor] | ||
outputs: _containers.RepeatedCompositeFieldContainer[ModelInferRequest.InferRequestedOutputTensor] | ||
raw_input_contents: _containers.RepeatedScalarFieldContainer[bytes] | ||
def __init__(self, model_name: _Optional[str] = ..., model_version: _Optional[str] = ..., inputs: _Optional[_Iterable[_Union[InferTensor, _Mapping]]] = ..., outputs: _Optional[_Iterable[_Union[ModelInferRequest.InferRequestedOutputTensor, _Mapping]]] = ..., raw_input_contents: _Optional[_Iterable[bytes]] = ...) -> None: ... | ||
|
||
class ModelInferResponse(_message.Message): | ||
__slots__ = ["model_name", "model_version", "outputs", "raw_output_contents"] | ||
MODEL_NAME_FIELD_NUMBER: _ClassVar[int] | ||
MODEL_VERSION_FIELD_NUMBER: _ClassVar[int] | ||
OUTPUTS_FIELD_NUMBER: _ClassVar[int] | ||
RAW_OUTPUT_CONTENTS_FIELD_NUMBER: _ClassVar[int] | ||
model_name: str | ||
model_version: str | ||
outputs: _containers.RepeatedCompositeFieldContainer[InferTensor] | ||
raw_output_contents: _containers.RepeatedScalarFieldContainer[bytes] | ||
def __init__(self, model_name: _Optional[str] = ..., model_version: _Optional[str] = ..., outputs: _Optional[_Iterable[_Union[InferTensor, _Mapping]]] = ..., raw_output_contents: _Optional[_Iterable[bytes]] = ...) -> None: ... |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,141 @@ | ||
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! | ||
"""Client and server classes corresponding to protobuf-defined services.""" | ||
import grpc | ||
|
||
import ray_pb2 as ray__pb2 | ||
|
||
|
||
class RayServiceStub(object): | ||
"""Ray service for internal process | ||
""" | ||
|
||
def __init__(self, channel): | ||
"""Constructor. | ||
Args: | ||
channel: A grpc.Channel. | ||
""" | ||
self.ModelReady = channel.unary_unary( | ||
'/ray.serve.RayService/ModelReady', | ||
request_serializer=ray__pb2.ModelReadyRequest.SerializeToString, | ||
response_deserializer=ray__pb2.ModelReadyResponse.FromString, | ||
) | ||
self.ModelMetadata = channel.unary_unary( | ||
'/ray.serve.RayService/ModelMetadata', | ||
request_serializer=ray__pb2.ModelMetadataRequest.SerializeToString, | ||
response_deserializer=ray__pb2.ModelMetadataResponse.FromString, | ||
) | ||
self.ModelInfer = channel.unary_unary( | ||
'/ray.serve.RayService/ModelInfer', | ||
request_serializer=ray__pb2.ModelInferRequest.SerializeToString, | ||
response_deserializer=ray__pb2.ModelInferResponse.FromString, | ||
) | ||
|
||
|
||
class RayServiceServicer(object): | ||
"""Ray service for internal process | ||
""" | ||
|
||
def ModelReady(self, request, context): | ||
"""ModelReady method receives a ModelReadyRequest message and | ||
returns a ModelReadyResponse | ||
""" | ||
context.set_code(grpc.StatusCode.UNIMPLEMENTED) | ||
context.set_details('Method not implemented!') | ||
raise NotImplementedError('Method not implemented!') | ||
|
||
def ModelMetadata(self, request, context): | ||
"""ModelMetadata method receives a ModelMetadataRequest message and | ||
returns a ModelMetadataResponse | ||
""" | ||
context.set_code(grpc.StatusCode.UNIMPLEMENTED) | ||
context.set_details('Method not implemented!') | ||
raise NotImplementedError('Method not implemented!') | ||
|
||
def ModelInfer(self, request, context): | ||
"""ModelInfer method receives a ModelInferRequest message and | ||
returns a ModelInferResponse | ||
""" | ||
context.set_code(grpc.StatusCode.UNIMPLEMENTED) | ||
context.set_details('Method not implemented!') | ||
raise NotImplementedError('Method not implemented!') | ||
|
||
|
||
def add_RayServiceServicer_to_server(servicer, server): | ||
rpc_method_handlers = { | ||
'ModelReady': grpc.unary_unary_rpc_method_handler( | ||
servicer.ModelReady, | ||
request_deserializer=ray__pb2.ModelReadyRequest.FromString, | ||
response_serializer=ray__pb2.ModelReadyResponse.SerializeToString, | ||
), | ||
'ModelMetadata': grpc.unary_unary_rpc_method_handler( | ||
servicer.ModelMetadata, | ||
request_deserializer=ray__pb2.ModelMetadataRequest.FromString, | ||
response_serializer=ray__pb2.ModelMetadataResponse.SerializeToString, | ||
), | ||
'ModelInfer': grpc.unary_unary_rpc_method_handler( | ||
servicer.ModelInfer, | ||
request_deserializer=ray__pb2.ModelInferRequest.FromString, | ||
response_serializer=ray__pb2.ModelInferResponse.SerializeToString, | ||
), | ||
} | ||
generic_handler = grpc.method_handlers_generic_handler( | ||
'ray.serve.RayService', rpc_method_handlers) | ||
server.add_generic_rpc_handlers((generic_handler,)) | ||
|
||
|
||
# This class is part of an EXPERIMENTAL API. | ||
class RayService(object): | ||
"""Ray service for internal process | ||
""" | ||
|
||
@staticmethod | ||
def ModelReady(request, | ||
target, | ||
options=(), | ||
channel_credentials=None, | ||
call_credentials=None, | ||
insecure=False, | ||
compression=None, | ||
wait_for_ready=None, | ||
timeout=None, | ||
metadata=None): | ||
return grpc.experimental.unary_unary(request, target, '/ray.serve.RayService/ModelReady', | ||
ray__pb2.ModelReadyRequest.SerializeToString, | ||
ray__pb2.ModelReadyResponse.FromString, | ||
options, channel_credentials, | ||
insecure, call_credentials, compression, wait_for_ready, timeout, metadata) | ||
|
||
@staticmethod | ||
def ModelMetadata(request, | ||
target, | ||
options=(), | ||
channel_credentials=None, | ||
call_credentials=None, | ||
insecure=False, | ||
compression=None, | ||
wait_for_ready=None, | ||
timeout=None, | ||
metadata=None): | ||
return grpc.experimental.unary_unary(request, target, '/ray.serve.RayService/ModelMetadata', | ||
ray__pb2.ModelMetadataRequest.SerializeToString, | ||
ray__pb2.ModelMetadataResponse.FromString, | ||
options, channel_credentials, | ||
insecure, call_credentials, compression, wait_for_ready, timeout, metadata) | ||
|
||
@staticmethod | ||
def ModelInfer(request, | ||
target, | ||
options=(), | ||
channel_credentials=None, | ||
call_credentials=None, | ||
insecure=False, | ||
compression=None, | ||
wait_for_ready=None, | ||
timeout=None, | ||
metadata=None): | ||
return grpc.experimental.unary_unary(request, target, '/ray.serve.RayService/ModelInfer', | ||
ray__pb2.ModelInferRequest.SerializeToString, | ||
ray__pb2.ModelInferResponse.FromString, | ||
options, channel_credentials, | ||
insecure, call_credentials, compression, wait_for_ready, timeout, metadata) |
Oops, something went wrong.