Skip to content

Commit

Permalink
generate grpc protos
Browse files Browse the repository at this point in the history
  • Loading branch information
sakoush committed Dec 13, 2024
1 parent d16c33b commit c73e426
Show file tree
Hide file tree
Showing 4 changed files with 109 additions and 15 deletions.
35 changes: 23 additions & 12 deletions mlserver/grpc/dataplane_pb2.py

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

46 changes: 46 additions & 0 deletions mlserver/grpc/dataplane_pb2_grpc.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,32 @@
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
import warnings

from . import dataplane_pb2 as dataplane__pb2

GRPC_GENERATED_VERSION = "1.67.1"
GRPC_VERSION = grpc.__version__
_version_not_supported = False

try:
from grpc._utilities import first_version_is_lower

_version_not_supported = first_version_is_lower(
GRPC_VERSION, GRPC_GENERATED_VERSION
)
except ImportError:
_version_not_supported = True

if _version_not_supported:
raise RuntimeError(
f"The grpc package installed is at version {GRPC_VERSION},"
+ f" but the generated code in dataplane_pb2_grpc.py depends on"
+ f" grpcio>={GRPC_GENERATED_VERSION}."
+ f" Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}"
+ f" or downgrade your generated code using grpcio-tools<={GRPC_VERSION}."
)


class GRPCInferenceServiceStub(object):
"""
Expand All @@ -21,51 +44,61 @@ def __init__(self, channel):
"/inference.GRPCInferenceService/ServerLive",
request_serializer=dataplane__pb2.ServerLiveRequest.SerializeToString,
response_deserializer=dataplane__pb2.ServerLiveResponse.FromString,
_registered_method=True,
)
self.ServerReady = channel.unary_unary(
"/inference.GRPCInferenceService/ServerReady",
request_serializer=dataplane__pb2.ServerReadyRequest.SerializeToString,
response_deserializer=dataplane__pb2.ServerReadyResponse.FromString,
_registered_method=True,
)
self.ModelReady = channel.unary_unary(
"/inference.GRPCInferenceService/ModelReady",
request_serializer=dataplane__pb2.ModelReadyRequest.SerializeToString,
response_deserializer=dataplane__pb2.ModelReadyResponse.FromString,
_registered_method=True,
)
self.ServerMetadata = channel.unary_unary(
"/inference.GRPCInferenceService/ServerMetadata",
request_serializer=dataplane__pb2.ServerMetadataRequest.SerializeToString,
response_deserializer=dataplane__pb2.ServerMetadataResponse.FromString,
_registered_method=True,
)
self.ModelMetadata = channel.unary_unary(
"/inference.GRPCInferenceService/ModelMetadata",
request_serializer=dataplane__pb2.ModelMetadataRequest.SerializeToString,
response_deserializer=dataplane__pb2.ModelMetadataResponse.FromString,
_registered_method=True,
)
self.ModelInfer = channel.unary_unary(
"/inference.GRPCInferenceService/ModelInfer",
request_serializer=dataplane__pb2.ModelInferRequest.SerializeToString,
response_deserializer=dataplane__pb2.ModelInferResponse.FromString,
_registered_method=True,
)
self.ModelStreamInfer = channel.stream_stream(
"/inference.GRPCInferenceService/ModelStreamInfer",
request_serializer=dataplane__pb2.ModelInferRequest.SerializeToString,
response_deserializer=dataplane__pb2.ModelInferResponse.FromString,
_registered_method=True,
)
self.RepositoryIndex = channel.unary_unary(
"/inference.GRPCInferenceService/RepositoryIndex",
request_serializer=dataplane__pb2.RepositoryIndexRequest.SerializeToString,
response_deserializer=dataplane__pb2.RepositoryIndexResponse.FromString,
_registered_method=True,
)
self.RepositoryModelLoad = channel.unary_unary(
"/inference.GRPCInferenceService/RepositoryModelLoad",
request_serializer=dataplane__pb2.RepositoryModelLoadRequest.SerializeToString,
response_deserializer=dataplane__pb2.RepositoryModelLoadResponse.FromString,
_registered_method=True,
)
self.RepositoryModelUnload = channel.unary_unary(
"/inference.GRPCInferenceService/RepositoryModelUnload",
request_serializer=dataplane__pb2.RepositoryModelUnloadRequest.SerializeToString,
response_deserializer=dataplane__pb2.RepositoryModelUnloadResponse.FromString,
_registered_method=True,
)


Expand Down Expand Up @@ -193,6 +226,9 @@ def add_GRPCInferenceServiceServicer_to_server(servicer, server):
"inference.GRPCInferenceService", rpc_method_handlers
)
server.add_generic_rpc_handlers((generic_handler,))
server.add_registered_method_handlers(
"inference.GRPCInferenceService", rpc_method_handlers
)


# This class is part of an EXPERIMENTAL API.
Expand Down Expand Up @@ -229,6 +265,7 @@ def ServerLive(
wait_for_ready,
timeout,
metadata,
_registered_method=True,
)

@staticmethod
Expand Down Expand Up @@ -258,6 +295,7 @@ def ServerReady(
wait_for_ready,
timeout,
metadata,
_registered_method=True,
)

@staticmethod
Expand Down Expand Up @@ -287,6 +325,7 @@ def ModelReady(
wait_for_ready,
timeout,
metadata,
_registered_method=True,
)

@staticmethod
Expand Down Expand Up @@ -316,6 +355,7 @@ def ServerMetadata(
wait_for_ready,
timeout,
metadata,
_registered_method=True,
)

@staticmethod
Expand Down Expand Up @@ -345,6 +385,7 @@ def ModelMetadata(
wait_for_ready,
timeout,
metadata,
_registered_method=True,
)

@staticmethod
Expand Down Expand Up @@ -374,6 +415,7 @@ def ModelInfer(
wait_for_ready,
timeout,
metadata,
_registered_method=True,
)

@staticmethod
Expand Down Expand Up @@ -403,6 +445,7 @@ def ModelStreamInfer(
wait_for_ready,
timeout,
metadata,
_registered_method=True,
)

@staticmethod
Expand Down Expand Up @@ -432,6 +475,7 @@ def RepositoryIndex(
wait_for_ready,
timeout,
metadata,
_registered_method=True,
)

@staticmethod
Expand Down Expand Up @@ -461,6 +505,7 @@ def RepositoryModelLoad(
wait_for_ready,
timeout,
metadata,
_registered_method=True,
)

@staticmethod
Expand Down Expand Up @@ -490,4 +535,5 @@ def RepositoryModelUnload(
wait_for_ready,
timeout,
metadata,
_registered_method=True,
)
11 changes: 8 additions & 3 deletions mlserver/grpc/model_repository_pb2.py

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

32 changes: 32 additions & 0 deletions mlserver/grpc/model_repository_pb2_grpc.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,32 @@
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
import warnings

from . import model_repository_pb2 as model__repository__pb2

GRPC_GENERATED_VERSION = "1.67.1"
GRPC_VERSION = grpc.__version__
_version_not_supported = False

try:
from grpc._utilities import first_version_is_lower

_version_not_supported = first_version_is_lower(
GRPC_VERSION, GRPC_GENERATED_VERSION
)
except ImportError:
_version_not_supported = True

if _version_not_supported:
raise RuntimeError(
f"The grpc package installed is at version {GRPC_VERSION},"
+ f" but the generated code in model_repository_pb2_grpc.py depends on"
+ f" grpcio>={GRPC_GENERATED_VERSION}."
+ f" Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}"
+ f" or downgrade your generated code using grpcio-tools<={GRPC_VERSION}."
)


class ModelRepositoryServiceStub(object):
"""Missing associated documentation comment in .proto file."""
Expand All @@ -18,16 +41,19 @@ def __init__(self, channel):
"/inference.model_repository.ModelRepositoryService/RepositoryIndex",
request_serializer=model__repository__pb2.RepositoryIndexRequest.SerializeToString,
response_deserializer=model__repository__pb2.RepositoryIndexResponse.FromString,
_registered_method=True,
)
self.RepositoryModelLoad = channel.unary_unary(
"/inference.model_repository.ModelRepositoryService/RepositoryModelLoad",
request_serializer=model__repository__pb2.RepositoryModelLoadRequest.SerializeToString,
response_deserializer=model__repository__pb2.RepositoryModelLoadResponse.FromString,
_registered_method=True,
)
self.RepositoryModelUnload = channel.unary_unary(
"/inference.model_repository.ModelRepositoryService/RepositoryModelUnload",
request_serializer=model__repository__pb2.RepositoryModelUnloadRequest.SerializeToString,
response_deserializer=model__repository__pb2.RepositoryModelUnloadResponse.FromString,
_registered_method=True,
)


Expand Down Expand Up @@ -75,6 +101,9 @@ def add_ModelRepositoryServiceServicer_to_server(servicer, server):
"inference.model_repository.ModelRepositoryService", rpc_method_handlers
)
server.add_generic_rpc_handlers((generic_handler,))
server.add_registered_method_handlers(
"inference.model_repository.ModelRepositoryService", rpc_method_handlers
)


# This class is part of an EXPERIMENTAL API.
Expand Down Expand Up @@ -108,6 +137,7 @@ def RepositoryIndex(
wait_for_ready,
timeout,
metadata,
_registered_method=True,
)

@staticmethod
Expand Down Expand Up @@ -137,6 +167,7 @@ def RepositoryModelLoad(
wait_for_ready,
timeout,
metadata,
_registered_method=True,
)

@staticmethod
Expand Down Expand Up @@ -166,4 +197,5 @@ def RepositoryModelUnload(
wait_for_ready,
timeout,
metadata,
_registered_method=True,
)

0 comments on commit c73e426

Please sign in to comment.