mirror of
https://github.com/onepanelio/onepanel.git
synced 2025-09-26 17:51:13 +08:00
Merge pull request #943 from Vafilor/feat/model.deployment
feat: Inference Service API
This commit is contained in:
@@ -763,6 +763,114 @@
|
||||
]
|
||||
}
|
||||
},
|
||||
"/apis/v1beta1/{namespace}/inferenceservice": {
|
||||
"post": {
|
||||
"operationId": "CreateInferenceService",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "A successful response.",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/GetInferenceServiceResponse"
|
||||
}
|
||||
},
|
||||
"default": {
|
||||
"description": "An unexpected error response.",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/google.rpc.Status"
|
||||
}
|
||||
}
|
||||
},
|
||||
"parameters": [
|
||||
{
|
||||
"name": "namespace",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"name": "body",
|
||||
"in": "body",
|
||||
"required": true,
|
||||
"schema": {
|
||||
"$ref": "#/definitions/CreateInferenceServiceRequest"
|
||||
}
|
||||
}
|
||||
],
|
||||
"tags": [
|
||||
"InferenceService"
|
||||
]
|
||||
}
|
||||
},
|
||||
"/apis/v1beta1/{namespace}/inferenceservice/{name}": {
|
||||
"get": {
|
||||
"operationId": "GetInferenceService",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "A successful response.",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/GetInferenceServiceResponse"
|
||||
}
|
||||
},
|
||||
"default": {
|
||||
"description": "An unexpected error response.",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/google.rpc.Status"
|
||||
}
|
||||
}
|
||||
},
|
||||
"parameters": [
|
||||
{
|
||||
"name": "namespace",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"name": "name",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"type": "string"
|
||||
}
|
||||
],
|
||||
"tags": [
|
||||
"InferenceService"
|
||||
]
|
||||
},
|
||||
"delete": {
|
||||
"operationId": "DeleteInferenceService",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "A successful response.",
|
||||
"schema": {
|
||||
"properties": {}
|
||||
}
|
||||
},
|
||||
"default": {
|
||||
"description": "An unexpected error response.",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/google.rpc.Status"
|
||||
}
|
||||
}
|
||||
},
|
||||
"parameters": [
|
||||
{
|
||||
"name": "namespace",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"name": "name",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"type": "string"
|
||||
}
|
||||
],
|
||||
"tags": [
|
||||
"InferenceService"
|
||||
]
|
||||
}
|
||||
},
|
||||
"/apis/v1beta1/{namespace}/secrets": {
|
||||
"get": {
|
||||
"operationId": "ListSecrets",
|
||||
@@ -3260,6 +3368,43 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"Container": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"image": {
|
||||
"type": "string"
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"env": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/Env"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"CreateInferenceServiceRequest": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"namespace": {
|
||||
"type": "string"
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"defaultTransformerImage": {
|
||||
"type": "string"
|
||||
},
|
||||
"predictor": {
|
||||
"$ref": "#/definitions/InferenceServicePredictor"
|
||||
},
|
||||
"transformer": {
|
||||
"$ref": "#/definitions/InferenceServiceTransformer"
|
||||
}
|
||||
}
|
||||
},
|
||||
"CreateWorkflowExecutionBody": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
@@ -3362,6 +3507,17 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"Env": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"value": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"File": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
@@ -3431,6 +3587,23 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"GetInferenceServiceResponse": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"ready": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"conditions": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/InferenceServiceCondition"
|
||||
}
|
||||
},
|
||||
"predictUrl": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"GetLabelsResponse": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
@@ -3497,6 +3670,72 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"InferenceServiceCondition": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"lastTransitionTime": {
|
||||
"type": "string"
|
||||
},
|
||||
"status": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"InferenceServicePredictor": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"runtimeVersion": {
|
||||
"type": "string"
|
||||
},
|
||||
"storageUri": {
|
||||
"type": "string"
|
||||
},
|
||||
"nodeSelector": {
|
||||
"type": "string"
|
||||
},
|
||||
"minCpu": {
|
||||
"type": "string"
|
||||
},
|
||||
"minMemory": {
|
||||
"type": "string"
|
||||
},
|
||||
"maxCpu": {
|
||||
"type": "string"
|
||||
},
|
||||
"maxMemory": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"InferenceServiceTransformer": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"containers": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/Container"
|
||||
}
|
||||
},
|
||||
"minCpu": {
|
||||
"type": "string"
|
||||
},
|
||||
"minMemory": {
|
||||
"type": "string"
|
||||
},
|
||||
"maxCpu": {
|
||||
"type": "string"
|
||||
},
|
||||
"maxMemory": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"IsAuthorized": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
|
999
api/gen/inference_service.pb.go
Normal file
999
api/gen/inference_service.pb.go
Normal file
@@ -0,0 +1,999 @@
|
||||
// Code generated by protoc-gen-go. DO NOT EDIT.
|
||||
// versions:
|
||||
// protoc-gen-go v1.25.0
|
||||
// protoc v3.14.0
|
||||
// source: inference_service.proto
|
||||
|
||||
package gen
|
||||
|
||||
import (
|
||||
proto "github.com/golang/protobuf/proto"
|
||||
_ "google.golang.org/genproto/googleapis/api/annotations"
|
||||
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
|
||||
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
|
||||
emptypb "google.golang.org/protobuf/types/known/emptypb"
|
||||
reflect "reflect"
|
||||
sync "sync"
|
||||
)
|
||||
|
||||
const (
|
||||
// Verify that this generated code is sufficiently up-to-date.
|
||||
_ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
|
||||
// Verify that runtime/protoimpl is sufficiently up-to-date.
|
||||
_ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
|
||||
)
|
||||
|
||||
// This is a compile-time assertion that a sufficiently up-to-date version
|
||||
// of the legacy proto package is being used.
|
||||
const _ = proto.ProtoPackageIsVersion4
|
||||
|
||||
type InferenceServiceIdentifier struct {
|
||||
state protoimpl.MessageState
|
||||
sizeCache protoimpl.SizeCache
|
||||
unknownFields protoimpl.UnknownFields
|
||||
|
||||
Namespace string `protobuf:"bytes,1,opt,name=namespace,proto3" json:"namespace,omitempty"`
|
||||
Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"`
|
||||
}
|
||||
|
||||
func (x *InferenceServiceIdentifier) Reset() {
|
||||
*x = InferenceServiceIdentifier{}
|
||||
if protoimpl.UnsafeEnabled {
|
||||
mi := &file_inference_service_proto_msgTypes[0]
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
}
|
||||
|
||||
func (x *InferenceServiceIdentifier) String() string {
|
||||
return protoimpl.X.MessageStringOf(x)
|
||||
}
|
||||
|
||||
func (*InferenceServiceIdentifier) ProtoMessage() {}
|
||||
|
||||
func (x *InferenceServiceIdentifier) ProtoReflect() protoreflect.Message {
|
||||
mi := &file_inference_service_proto_msgTypes[0]
|
||||
if protoimpl.UnsafeEnabled && x != nil {
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
if ms.LoadMessageInfo() == nil {
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
return ms
|
||||
}
|
||||
return mi.MessageOf(x)
|
||||
}
|
||||
|
||||
// Deprecated: Use InferenceServiceIdentifier.ProtoReflect.Descriptor instead.
|
||||
func (*InferenceServiceIdentifier) Descriptor() ([]byte, []int) {
|
||||
return file_inference_service_proto_rawDescGZIP(), []int{0}
|
||||
}
|
||||
|
||||
func (x *InferenceServiceIdentifier) GetNamespace() string {
|
||||
if x != nil {
|
||||
return x.Namespace
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (x *InferenceServiceIdentifier) GetName() string {
|
||||
if x != nil {
|
||||
return x.Name
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
type Env struct {
|
||||
state protoimpl.MessageState
|
||||
sizeCache protoimpl.SizeCache
|
||||
unknownFields protoimpl.UnknownFields
|
||||
|
||||
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
|
||||
Value string `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"`
|
||||
}
|
||||
|
||||
func (x *Env) Reset() {
|
||||
*x = Env{}
|
||||
if protoimpl.UnsafeEnabled {
|
||||
mi := &file_inference_service_proto_msgTypes[1]
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
}
|
||||
|
||||
func (x *Env) String() string {
|
||||
return protoimpl.X.MessageStringOf(x)
|
||||
}
|
||||
|
||||
func (*Env) ProtoMessage() {}
|
||||
|
||||
func (x *Env) ProtoReflect() protoreflect.Message {
|
||||
mi := &file_inference_service_proto_msgTypes[1]
|
||||
if protoimpl.UnsafeEnabled && x != nil {
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
if ms.LoadMessageInfo() == nil {
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
return ms
|
||||
}
|
||||
return mi.MessageOf(x)
|
||||
}
|
||||
|
||||
// Deprecated: Use Env.ProtoReflect.Descriptor instead.
|
||||
func (*Env) Descriptor() ([]byte, []int) {
|
||||
return file_inference_service_proto_rawDescGZIP(), []int{1}
|
||||
}
|
||||
|
||||
func (x *Env) GetName() string {
|
||||
if x != nil {
|
||||
return x.Name
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (x *Env) GetValue() string {
|
||||
if x != nil {
|
||||
return x.Value
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
type Container struct {
|
||||
state protoimpl.MessageState
|
||||
sizeCache protoimpl.SizeCache
|
||||
unknownFields protoimpl.UnknownFields
|
||||
|
||||
Image string `protobuf:"bytes,1,opt,name=image,proto3" json:"image,omitempty"`
|
||||
Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"`
|
||||
Env []*Env `protobuf:"bytes,3,rep,name=env,proto3" json:"env,omitempty"`
|
||||
}
|
||||
|
||||
func (x *Container) Reset() {
|
||||
*x = Container{}
|
||||
if protoimpl.UnsafeEnabled {
|
||||
mi := &file_inference_service_proto_msgTypes[2]
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
}
|
||||
|
||||
func (x *Container) String() string {
|
||||
return protoimpl.X.MessageStringOf(x)
|
||||
}
|
||||
|
||||
func (*Container) ProtoMessage() {}
|
||||
|
||||
func (x *Container) ProtoReflect() protoreflect.Message {
|
||||
mi := &file_inference_service_proto_msgTypes[2]
|
||||
if protoimpl.UnsafeEnabled && x != nil {
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
if ms.LoadMessageInfo() == nil {
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
return ms
|
||||
}
|
||||
return mi.MessageOf(x)
|
||||
}
|
||||
|
||||
// Deprecated: Use Container.ProtoReflect.Descriptor instead.
|
||||
func (*Container) Descriptor() ([]byte, []int) {
|
||||
return file_inference_service_proto_rawDescGZIP(), []int{2}
|
||||
}
|
||||
|
||||
func (x *Container) GetImage() string {
|
||||
if x != nil {
|
||||
return x.Image
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (x *Container) GetName() string {
|
||||
if x != nil {
|
||||
return x.Name
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (x *Container) GetEnv() []*Env {
|
||||
if x != nil {
|
||||
return x.Env
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type InferenceServiceTransformer struct {
|
||||
state protoimpl.MessageState
|
||||
sizeCache protoimpl.SizeCache
|
||||
unknownFields protoimpl.UnknownFields
|
||||
|
||||
Containers []*Container `protobuf:"bytes,1,rep,name=containers,proto3" json:"containers,omitempty"`
|
||||
MinCpu string `protobuf:"bytes,2,opt,name=minCpu,proto3" json:"minCpu,omitempty"`
|
||||
MinMemory string `protobuf:"bytes,3,opt,name=minMemory,proto3" json:"minMemory,omitempty"`
|
||||
MaxCpu string `protobuf:"bytes,4,opt,name=maxCpu,proto3" json:"maxCpu,omitempty"`
|
||||
MaxMemory string `protobuf:"bytes,5,opt,name=maxMemory,proto3" json:"maxMemory,omitempty"`
|
||||
}
|
||||
|
||||
func (x *InferenceServiceTransformer) Reset() {
|
||||
*x = InferenceServiceTransformer{}
|
||||
if protoimpl.UnsafeEnabled {
|
||||
mi := &file_inference_service_proto_msgTypes[3]
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
}
|
||||
|
||||
func (x *InferenceServiceTransformer) String() string {
|
||||
return protoimpl.X.MessageStringOf(x)
|
||||
}
|
||||
|
||||
func (*InferenceServiceTransformer) ProtoMessage() {}
|
||||
|
||||
func (x *InferenceServiceTransformer) ProtoReflect() protoreflect.Message {
|
||||
mi := &file_inference_service_proto_msgTypes[3]
|
||||
if protoimpl.UnsafeEnabled && x != nil {
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
if ms.LoadMessageInfo() == nil {
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
return ms
|
||||
}
|
||||
return mi.MessageOf(x)
|
||||
}
|
||||
|
||||
// Deprecated: Use InferenceServiceTransformer.ProtoReflect.Descriptor instead.
|
||||
func (*InferenceServiceTransformer) Descriptor() ([]byte, []int) {
|
||||
return file_inference_service_proto_rawDescGZIP(), []int{3}
|
||||
}
|
||||
|
||||
func (x *InferenceServiceTransformer) GetContainers() []*Container {
|
||||
if x != nil {
|
||||
return x.Containers
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (x *InferenceServiceTransformer) GetMinCpu() string {
|
||||
if x != nil {
|
||||
return x.MinCpu
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (x *InferenceServiceTransformer) GetMinMemory() string {
|
||||
if x != nil {
|
||||
return x.MinMemory
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (x *InferenceServiceTransformer) GetMaxCpu() string {
|
||||
if x != nil {
|
||||
return x.MaxCpu
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (x *InferenceServiceTransformer) GetMaxMemory() string {
|
||||
if x != nil {
|
||||
return x.MaxMemory
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
type InferenceServicePredictor struct {
|
||||
state protoimpl.MessageState
|
||||
sizeCache protoimpl.SizeCache
|
||||
unknownFields protoimpl.UnknownFields
|
||||
|
||||
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
|
||||
RuntimeVersion string `protobuf:"bytes,2,opt,name=runtimeVersion,proto3" json:"runtimeVersion,omitempty"`
|
||||
StorageUri string `protobuf:"bytes,3,opt,name=storageUri,proto3" json:"storageUri,omitempty"`
|
||||
NodeSelector string `protobuf:"bytes,4,opt,name=nodeSelector,proto3" json:"nodeSelector,omitempty"`
|
||||
MinCpu string `protobuf:"bytes,5,opt,name=minCpu,proto3" json:"minCpu,omitempty"`
|
||||
MinMemory string `protobuf:"bytes,6,opt,name=minMemory,proto3" json:"minMemory,omitempty"`
|
||||
MaxCpu string `protobuf:"bytes,7,opt,name=maxCpu,proto3" json:"maxCpu,omitempty"`
|
||||
MaxMemory string `protobuf:"bytes,8,opt,name=maxMemory,proto3" json:"maxMemory,omitempty"`
|
||||
}
|
||||
|
||||
func (x *InferenceServicePredictor) Reset() {
|
||||
*x = InferenceServicePredictor{}
|
||||
if protoimpl.UnsafeEnabled {
|
||||
mi := &file_inference_service_proto_msgTypes[4]
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
}
|
||||
|
||||
func (x *InferenceServicePredictor) String() string {
|
||||
return protoimpl.X.MessageStringOf(x)
|
||||
}
|
||||
|
||||
func (*InferenceServicePredictor) ProtoMessage() {}
|
||||
|
||||
func (x *InferenceServicePredictor) ProtoReflect() protoreflect.Message {
|
||||
mi := &file_inference_service_proto_msgTypes[4]
|
||||
if protoimpl.UnsafeEnabled && x != nil {
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
if ms.LoadMessageInfo() == nil {
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
return ms
|
||||
}
|
||||
return mi.MessageOf(x)
|
||||
}
|
||||
|
||||
// Deprecated: Use InferenceServicePredictor.ProtoReflect.Descriptor instead.
|
||||
func (*InferenceServicePredictor) Descriptor() ([]byte, []int) {
|
||||
return file_inference_service_proto_rawDescGZIP(), []int{4}
|
||||
}
|
||||
|
||||
func (x *InferenceServicePredictor) GetName() string {
|
||||
if x != nil {
|
||||
return x.Name
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (x *InferenceServicePredictor) GetRuntimeVersion() string {
|
||||
if x != nil {
|
||||
return x.RuntimeVersion
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (x *InferenceServicePredictor) GetStorageUri() string {
|
||||
if x != nil {
|
||||
return x.StorageUri
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (x *InferenceServicePredictor) GetNodeSelector() string {
|
||||
if x != nil {
|
||||
return x.NodeSelector
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (x *InferenceServicePredictor) GetMinCpu() string {
|
||||
if x != nil {
|
||||
return x.MinCpu
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (x *InferenceServicePredictor) GetMinMemory() string {
|
||||
if x != nil {
|
||||
return x.MinMemory
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (x *InferenceServicePredictor) GetMaxCpu() string {
|
||||
if x != nil {
|
||||
return x.MaxCpu
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (x *InferenceServicePredictor) GetMaxMemory() string {
|
||||
if x != nil {
|
||||
return x.MaxMemory
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
type CreateInferenceServiceRequest struct {
|
||||
state protoimpl.MessageState
|
||||
sizeCache protoimpl.SizeCache
|
||||
unknownFields protoimpl.UnknownFields
|
||||
|
||||
Namespace string `protobuf:"bytes,1,opt,name=namespace,proto3" json:"namespace,omitempty"`
|
||||
Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"`
|
||||
DefaultTransformerImage string `protobuf:"bytes,3,opt,name=defaultTransformerImage,proto3" json:"defaultTransformerImage,omitempty"`
|
||||
Predictor *InferenceServicePredictor `protobuf:"bytes,4,opt,name=predictor,proto3" json:"predictor,omitempty"`
|
||||
Transformer *InferenceServiceTransformer `protobuf:"bytes,5,opt,name=transformer,proto3" json:"transformer,omitempty"`
|
||||
}
|
||||
|
||||
func (x *CreateInferenceServiceRequest) Reset() {
|
||||
*x = CreateInferenceServiceRequest{}
|
||||
if protoimpl.UnsafeEnabled {
|
||||
mi := &file_inference_service_proto_msgTypes[5]
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
}
|
||||
|
||||
func (x *CreateInferenceServiceRequest) String() string {
|
||||
return protoimpl.X.MessageStringOf(x)
|
||||
}
|
||||
|
||||
func (*CreateInferenceServiceRequest) ProtoMessage() {}
|
||||
|
||||
func (x *CreateInferenceServiceRequest) ProtoReflect() protoreflect.Message {
|
||||
mi := &file_inference_service_proto_msgTypes[5]
|
||||
if protoimpl.UnsafeEnabled && x != nil {
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
if ms.LoadMessageInfo() == nil {
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
return ms
|
||||
}
|
||||
return mi.MessageOf(x)
|
||||
}
|
||||
|
||||
// Deprecated: Use CreateInferenceServiceRequest.ProtoReflect.Descriptor instead.
|
||||
func (*CreateInferenceServiceRequest) Descriptor() ([]byte, []int) {
|
||||
return file_inference_service_proto_rawDescGZIP(), []int{5}
|
||||
}
|
||||
|
||||
func (x *CreateInferenceServiceRequest) GetNamespace() string {
|
||||
if x != nil {
|
||||
return x.Namespace
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (x *CreateInferenceServiceRequest) GetName() string {
|
||||
if x != nil {
|
||||
return x.Name
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (x *CreateInferenceServiceRequest) GetDefaultTransformerImage() string {
|
||||
if x != nil {
|
||||
return x.DefaultTransformerImage
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (x *CreateInferenceServiceRequest) GetPredictor() *InferenceServicePredictor {
|
||||
if x != nil {
|
||||
return x.Predictor
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (x *CreateInferenceServiceRequest) GetTransformer() *InferenceServiceTransformer {
|
||||
if x != nil {
|
||||
return x.Transformer
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type DeployModelResponse struct {
|
||||
state protoimpl.MessageState
|
||||
sizeCache protoimpl.SizeCache
|
||||
unknownFields protoimpl.UnknownFields
|
||||
|
||||
Status string `protobuf:"bytes,1,opt,name=status,proto3" json:"status,omitempty"`
|
||||
}
|
||||
|
||||
func (x *DeployModelResponse) Reset() {
|
||||
*x = DeployModelResponse{}
|
||||
if protoimpl.UnsafeEnabled {
|
||||
mi := &file_inference_service_proto_msgTypes[6]
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
}
|
||||
|
||||
func (x *DeployModelResponse) String() string {
|
||||
return protoimpl.X.MessageStringOf(x)
|
||||
}
|
||||
|
||||
func (*DeployModelResponse) ProtoMessage() {}
|
||||
|
||||
func (x *DeployModelResponse) ProtoReflect() protoreflect.Message {
|
||||
mi := &file_inference_service_proto_msgTypes[6]
|
||||
if protoimpl.UnsafeEnabled && x != nil {
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
if ms.LoadMessageInfo() == nil {
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
return ms
|
||||
}
|
||||
return mi.MessageOf(x)
|
||||
}
|
||||
|
||||
// Deprecated: Use DeployModelResponse.ProtoReflect.Descriptor instead.
|
||||
func (*DeployModelResponse) Descriptor() ([]byte, []int) {
|
||||
return file_inference_service_proto_rawDescGZIP(), []int{6}
|
||||
}
|
||||
|
||||
func (x *DeployModelResponse) GetStatus() string {
|
||||
if x != nil {
|
||||
return x.Status
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
type InferenceServiceCondition struct {
|
||||
state protoimpl.MessageState
|
||||
sizeCache protoimpl.SizeCache
|
||||
unknownFields protoimpl.UnknownFields
|
||||
|
||||
LastTransitionTime string `protobuf:"bytes,1,opt,name=lastTransitionTime,proto3" json:"lastTransitionTime,omitempty"`
|
||||
Status string `protobuf:"bytes,2,opt,name=status,proto3" json:"status,omitempty"`
|
||||
Type string `protobuf:"bytes,3,opt,name=type,proto3" json:"type,omitempty"`
|
||||
}
|
||||
|
||||
func (x *InferenceServiceCondition) Reset() {
|
||||
*x = InferenceServiceCondition{}
|
||||
if protoimpl.UnsafeEnabled {
|
||||
mi := &file_inference_service_proto_msgTypes[7]
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
}
|
||||
|
||||
func (x *InferenceServiceCondition) String() string {
|
||||
return protoimpl.X.MessageStringOf(x)
|
||||
}
|
||||
|
||||
func (*InferenceServiceCondition) ProtoMessage() {}
|
||||
|
||||
func (x *InferenceServiceCondition) ProtoReflect() protoreflect.Message {
|
||||
mi := &file_inference_service_proto_msgTypes[7]
|
||||
if protoimpl.UnsafeEnabled && x != nil {
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
if ms.LoadMessageInfo() == nil {
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
return ms
|
||||
}
|
||||
return mi.MessageOf(x)
|
||||
}
|
||||
|
||||
// Deprecated: Use InferenceServiceCondition.ProtoReflect.Descriptor instead.
|
||||
func (*InferenceServiceCondition) Descriptor() ([]byte, []int) {
|
||||
return file_inference_service_proto_rawDescGZIP(), []int{7}
|
||||
}
|
||||
|
||||
func (x *InferenceServiceCondition) GetLastTransitionTime() string {
|
||||
if x != nil {
|
||||
return x.LastTransitionTime
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (x *InferenceServiceCondition) GetStatus() string {
|
||||
if x != nil {
|
||||
return x.Status
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (x *InferenceServiceCondition) GetType() string {
|
||||
if x != nil {
|
||||
return x.Type
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
type GetInferenceServiceResponse struct {
|
||||
state protoimpl.MessageState
|
||||
sizeCache protoimpl.SizeCache
|
||||
unknownFields protoimpl.UnknownFields
|
||||
|
||||
Ready bool `protobuf:"varint,1,opt,name=ready,proto3" json:"ready,omitempty"`
|
||||
Conditions []*InferenceServiceCondition `protobuf:"bytes,2,rep,name=conditions,proto3" json:"conditions,omitempty"`
|
||||
PredictUrl string `protobuf:"bytes,3,opt,name=predictUrl,proto3" json:"predictUrl,omitempty"`
|
||||
}
|
||||
|
||||
func (x *GetInferenceServiceResponse) Reset() {
|
||||
*x = GetInferenceServiceResponse{}
|
||||
if protoimpl.UnsafeEnabled {
|
||||
mi := &file_inference_service_proto_msgTypes[8]
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
}
|
||||
|
||||
func (x *GetInferenceServiceResponse) String() string {
|
||||
return protoimpl.X.MessageStringOf(x)
|
||||
}
|
||||
|
||||
func (*GetInferenceServiceResponse) ProtoMessage() {}
|
||||
|
||||
func (x *GetInferenceServiceResponse) ProtoReflect() protoreflect.Message {
|
||||
mi := &file_inference_service_proto_msgTypes[8]
|
||||
if protoimpl.UnsafeEnabled && x != nil {
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
if ms.LoadMessageInfo() == nil {
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
return ms
|
||||
}
|
||||
return mi.MessageOf(x)
|
||||
}
|
||||
|
||||
// Deprecated: Use GetInferenceServiceResponse.ProtoReflect.Descriptor instead.
|
||||
func (*GetInferenceServiceResponse) Descriptor() ([]byte, []int) {
|
||||
return file_inference_service_proto_rawDescGZIP(), []int{8}
|
||||
}
|
||||
|
||||
func (x *GetInferenceServiceResponse) GetReady() bool {
|
||||
if x != nil {
|
||||
return x.Ready
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (x *GetInferenceServiceResponse) GetConditions() []*InferenceServiceCondition {
|
||||
if x != nil {
|
||||
return x.Conditions
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (x *GetInferenceServiceResponse) GetPredictUrl() string {
|
||||
if x != nil {
|
||||
return x.PredictUrl
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
type InferenceServiceEndpoints struct {
|
||||
state protoimpl.MessageState
|
||||
sizeCache protoimpl.SizeCache
|
||||
unknownFields protoimpl.UnknownFields
|
||||
|
||||
Predict string `protobuf:"bytes,1,opt,name=predict,proto3" json:"predict,omitempty"`
|
||||
}
|
||||
|
||||
func (x *InferenceServiceEndpoints) Reset() {
|
||||
*x = InferenceServiceEndpoints{}
|
||||
if protoimpl.UnsafeEnabled {
|
||||
mi := &file_inference_service_proto_msgTypes[9]
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
}
|
||||
|
||||
func (x *InferenceServiceEndpoints) String() string {
|
||||
return protoimpl.X.MessageStringOf(x)
|
||||
}
|
||||
|
||||
func (*InferenceServiceEndpoints) ProtoMessage() {}
|
||||
|
||||
func (x *InferenceServiceEndpoints) ProtoReflect() protoreflect.Message {
|
||||
mi := &file_inference_service_proto_msgTypes[9]
|
||||
if protoimpl.UnsafeEnabled && x != nil {
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
if ms.LoadMessageInfo() == nil {
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
return ms
|
||||
}
|
||||
return mi.MessageOf(x)
|
||||
}
|
||||
|
||||
// Deprecated: Use InferenceServiceEndpoints.ProtoReflect.Descriptor instead.
|
||||
func (*InferenceServiceEndpoints) Descriptor() ([]byte, []int) {
|
||||
return file_inference_service_proto_rawDescGZIP(), []int{9}
|
||||
}
|
||||
|
||||
func (x *InferenceServiceEndpoints) GetPredict() string {
|
||||
if x != nil {
|
||||
return x.Predict
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
var File_inference_service_proto protoreflect.FileDescriptor
|
||||
|
||||
var file_inference_service_proto_rawDesc = []byte{
|
||||
0x0a, 0x17, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x5f, 0x73, 0x65, 0x72, 0x76,
|
||||
0x69, 0x63, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x03, 0x61, 0x70, 0x69, 0x1a, 0x1c,
|
||||
0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74,
|
||||
0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1b, 0x67, 0x6f,
|
||||
0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x65, 0x6d,
|
||||
0x70, 0x74, 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x4e, 0x0a, 0x1a, 0x49, 0x6e, 0x66,
|
||||
0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x49, 0x64, 0x65,
|
||||
0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73,
|
||||
0x70, 0x61, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65,
|
||||
0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20,
|
||||
0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x2f, 0x0a, 0x03, 0x45, 0x6e, 0x76,
|
||||
0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04,
|
||||
0x6e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20,
|
||||
0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x51, 0x0a, 0x09, 0x43, 0x6f,
|
||||
0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x12, 0x14, 0x0a, 0x05, 0x69, 0x6d, 0x61, 0x67, 0x65,
|
||||
0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x12, 0x12, 0x0a,
|
||||
0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d,
|
||||
0x65, 0x12, 0x1a, 0x0a, 0x03, 0x65, 0x6e, 0x76, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x08,
|
||||
0x2e, 0x61, 0x70, 0x69, 0x2e, 0x45, 0x6e, 0x76, 0x52, 0x03, 0x65, 0x6e, 0x76, 0x22, 0xb9, 0x01,
|
||||
0x0a, 0x1b, 0x49, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x53, 0x65, 0x72, 0x76, 0x69,
|
||||
0x63, 0x65, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x66, 0x6f, 0x72, 0x6d, 0x65, 0x72, 0x12, 0x2e, 0x0a,
|
||||
0x0a, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28,
|
||||
0x0b, 0x32, 0x0e, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x43, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65,
|
||||
0x72, 0x52, 0x0a, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x73, 0x12, 0x16, 0x0a,
|
||||
0x06, 0x6d, 0x69, 0x6e, 0x43, 0x70, 0x75, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x6d,
|
||||
0x69, 0x6e, 0x43, 0x70, 0x75, 0x12, 0x1c, 0x0a, 0x09, 0x6d, 0x69, 0x6e, 0x4d, 0x65, 0x6d, 0x6f,
|
||||
0x72, 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6d, 0x69, 0x6e, 0x4d, 0x65, 0x6d,
|
||||
0x6f, 0x72, 0x79, 0x12, 0x16, 0x0a, 0x06, 0x6d, 0x61, 0x78, 0x43, 0x70, 0x75, 0x18, 0x04, 0x20,
|
||||
0x01, 0x28, 0x09, 0x52, 0x06, 0x6d, 0x61, 0x78, 0x43, 0x70, 0x75, 0x12, 0x1c, 0x0a, 0x09, 0x6d,
|
||||
0x61, 0x78, 0x4d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09,
|
||||
0x6d, 0x61, 0x78, 0x4d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x22, 0x87, 0x02, 0x0a, 0x19, 0x49, 0x6e,
|
||||
0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x50, 0x72,
|
||||
0x65, 0x64, 0x69, 0x63, 0x74, 0x6f, 0x72, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18,
|
||||
0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x26, 0x0a, 0x0e, 0x72,
|
||||
0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20,
|
||||
0x01, 0x28, 0x09, 0x52, 0x0e, 0x72, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x56, 0x65, 0x72, 0x73,
|
||||
0x69, 0x6f, 0x6e, 0x12, 0x1e, 0x0a, 0x0a, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x55, 0x72,
|
||||
0x69, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65,
|
||||
0x55, 0x72, 0x69, 0x12, 0x22, 0x0a, 0x0c, 0x6e, 0x6f, 0x64, 0x65, 0x53, 0x65, 0x6c, 0x65, 0x63,
|
||||
0x74, 0x6f, 0x72, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x6e, 0x6f, 0x64, 0x65, 0x53,
|
||||
0x65, 0x6c, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x12, 0x16, 0x0a, 0x06, 0x6d, 0x69, 0x6e, 0x43, 0x70,
|
||||
0x75, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x6d, 0x69, 0x6e, 0x43, 0x70, 0x75, 0x12,
|
||||
0x1c, 0x0a, 0x09, 0x6d, 0x69, 0x6e, 0x4d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x18, 0x06, 0x20, 0x01,
|
||||
0x28, 0x09, 0x52, 0x09, 0x6d, 0x69, 0x6e, 0x4d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x12, 0x16, 0x0a,
|
||||
0x06, 0x6d, 0x61, 0x78, 0x43, 0x70, 0x75, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x6d,
|
||||
0x61, 0x78, 0x43, 0x70, 0x75, 0x12, 0x1c, 0x0a, 0x09, 0x6d, 0x61, 0x78, 0x4d, 0x65, 0x6d, 0x6f,
|
||||
0x72, 0x79, 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6d, 0x61, 0x78, 0x4d, 0x65, 0x6d,
|
||||
0x6f, 0x72, 0x79, 0x22, 0x8d, 0x02, 0x0a, 0x1d, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x49, 0x6e,
|
||||
0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x52, 0x65,
|
||||
0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61,
|
||||
0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70,
|
||||
0x61, 0x63, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28,
|
||||
0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x38, 0x0a, 0x17, 0x64, 0x65, 0x66, 0x61, 0x75,
|
||||
0x6c, 0x74, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x66, 0x6f, 0x72, 0x6d, 0x65, 0x72, 0x49, 0x6d, 0x61,
|
||||
0x67, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x17, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c,
|
||||
0x74, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x66, 0x6f, 0x72, 0x6d, 0x65, 0x72, 0x49, 0x6d, 0x61, 0x67,
|
||||
0x65, 0x12, 0x3c, 0x0a, 0x09, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x6f, 0x72, 0x18, 0x04,
|
||||
0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x49, 0x6e, 0x66, 0x65, 0x72,
|
||||
0x65, 0x6e, 0x63, 0x65, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x50, 0x72, 0x65, 0x64, 0x69,
|
||||
0x63, 0x74, 0x6f, 0x72, 0x52, 0x09, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x6f, 0x72, 0x12,
|
||||
0x42, 0x0a, 0x0b, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x66, 0x6f, 0x72, 0x6d, 0x65, 0x72, 0x18, 0x05,
|
||||
0x20, 0x01, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x49, 0x6e, 0x66, 0x65, 0x72,
|
||||
0x65, 0x6e, 0x63, 0x65, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x54, 0x72, 0x61, 0x6e, 0x73,
|
||||
0x66, 0x6f, 0x72, 0x6d, 0x65, 0x72, 0x52, 0x0b, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x66, 0x6f, 0x72,
|
||||
0x6d, 0x65, 0x72, 0x22, 0x2d, 0x0a, 0x13, 0x44, 0x65, 0x70, 0x6c, 0x6f, 0x79, 0x4d, 0x6f, 0x64,
|
||||
0x65, 0x6c, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x74,
|
||||
0x61, 0x74, 0x75, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x73, 0x74, 0x61, 0x74,
|
||||
0x75, 0x73, 0x22, 0x77, 0x0a, 0x19, 0x49, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x53,
|
||||
0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x43, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x12,
|
||||
0x2e, 0x0a, 0x12, 0x6c, 0x61, 0x73, 0x74, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x69, 0x74, 0x69, 0x6f,
|
||||
0x6e, 0x54, 0x69, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x12, 0x6c, 0x61, 0x73,
|
||||
0x74, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x54, 0x69, 0x6d, 0x65, 0x12,
|
||||
0x16, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52,
|
||||
0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18,
|
||||
0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x22, 0x93, 0x01, 0x0a, 0x1b,
|
||||
0x47, 0x65, 0x74, 0x49, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x53, 0x65, 0x72, 0x76,
|
||||
0x69, 0x63, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x72,
|
||||
0x65, 0x61, 0x64, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x05, 0x72, 0x65, 0x61, 0x64,
|
||||
0x79, 0x12, 0x3e, 0x0a, 0x0a, 0x63, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18,
|
||||
0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x49, 0x6e, 0x66, 0x65,
|
||||
0x72, 0x65, 0x6e, 0x63, 0x65, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x43, 0x6f, 0x6e, 0x64,
|
||||
0x69, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x0a, 0x63, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e,
|
||||
0x73, 0x12, 0x1e, 0x0a, 0x0a, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x55, 0x72, 0x6c, 0x18,
|
||||
0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x55, 0x72,
|
||||
0x6c, 0x22, 0x35, 0x0a, 0x19, 0x49, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x53, 0x65,
|
||||
0x72, 0x76, 0x69, 0x63, 0x65, 0x45, 0x6e, 0x64, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x73, 0x12, 0x18,
|
||||
0x0a, 0x07, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52,
|
||||
0x07, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x32, 0xcf, 0x03, 0x0a, 0x10, 0x49, 0x6e, 0x66,
|
||||
0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x95, 0x01,
|
||||
0x0a, 0x16, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x49, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63,
|
||||
0x65, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x22, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x43,
|
||||
0x72, 0x65, 0x61, 0x74, 0x65, 0x49, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x53, 0x65,
|
||||
0x72, 0x76, 0x69, 0x63, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x20, 0x2e, 0x61,
|
||||
0x70, 0x69, 0x2e, 0x47, 0x65, 0x74, 0x49, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x53,
|
||||
0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x35,
|
||||
0x82, 0xd3, 0xe4, 0x93, 0x02, 0x2f, 0x22, 0x2a, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31,
|
||||
0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65,
|
||||
0x7d, 0x2f, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x73, 0x65, 0x72, 0x76, 0x69,
|
||||
0x63, 0x65, 0x3a, 0x01, 0x2a, 0x12, 0x93, 0x01, 0x0a, 0x13, 0x47, 0x65, 0x74, 0x49, 0x6e, 0x66,
|
||||
0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x1f, 0x2e,
|
||||
0x61, 0x70, 0x69, 0x2e, 0x49, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x53, 0x65, 0x72,
|
||||
0x76, 0x69, 0x63, 0x65, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x1a, 0x20,
|
||||
0x2e, 0x61, 0x70, 0x69, 0x2e, 0x47, 0x65, 0x74, 0x49, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63,
|
||||
0x65, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65,
|
||||
0x22, 0x39, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x33, 0x12, 0x31, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f,
|
||||
0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61,
|
||||
0x63, 0x65, 0x7d, 0x2f, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x73, 0x65, 0x72,
|
||||
0x76, 0x69, 0x63, 0x65, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x12, 0x8c, 0x01, 0x0a, 0x16,
|
||||
0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x49, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x53,
|
||||
0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x1f, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x49, 0x6e, 0x66,
|
||||
0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x49, 0x64, 0x65,
|
||||
0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x1a, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65,
|
||||
0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22,
|
||||
0x39, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x33, 0x2a, 0x31, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76,
|
||||
0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63,
|
||||
0x65, 0x7d, 0x2f, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x73, 0x65, 0x72, 0x76,
|
||||
0x69, 0x63, 0x65, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x42, 0x24, 0x5a, 0x22, 0x67, 0x69,
|
||||
0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x6f, 0x6e, 0x65, 0x70, 0x61, 0x6e, 0x65,
|
||||
0x6c, 0x69, 0x6f, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x67, 0x65, 0x6e,
|
||||
0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
|
||||
}
|
||||
|
||||
var (
|
||||
file_inference_service_proto_rawDescOnce sync.Once
|
||||
file_inference_service_proto_rawDescData = file_inference_service_proto_rawDesc
|
||||
)
|
||||
|
||||
func file_inference_service_proto_rawDescGZIP() []byte {
|
||||
file_inference_service_proto_rawDescOnce.Do(func() {
|
||||
file_inference_service_proto_rawDescData = protoimpl.X.CompressGZIP(file_inference_service_proto_rawDescData)
|
||||
})
|
||||
return file_inference_service_proto_rawDescData
|
||||
}
|
||||
|
||||
var file_inference_service_proto_msgTypes = make([]protoimpl.MessageInfo, 10)
|
||||
var file_inference_service_proto_goTypes = []interface{}{
|
||||
(*InferenceServiceIdentifier)(nil), // 0: api.InferenceServiceIdentifier
|
||||
(*Env)(nil), // 1: api.Env
|
||||
(*Container)(nil), // 2: api.Container
|
||||
(*InferenceServiceTransformer)(nil), // 3: api.InferenceServiceTransformer
|
||||
(*InferenceServicePredictor)(nil), // 4: api.InferenceServicePredictor
|
||||
(*CreateInferenceServiceRequest)(nil), // 5: api.CreateInferenceServiceRequest
|
||||
(*DeployModelResponse)(nil), // 6: api.DeployModelResponse
|
||||
(*InferenceServiceCondition)(nil), // 7: api.InferenceServiceCondition
|
||||
(*GetInferenceServiceResponse)(nil), // 8: api.GetInferenceServiceResponse
|
||||
(*InferenceServiceEndpoints)(nil), // 9: api.InferenceServiceEndpoints
|
||||
(*emptypb.Empty)(nil), // 10: google.protobuf.Empty
|
||||
}
|
||||
var file_inference_service_proto_depIdxs = []int32{
|
||||
1, // 0: api.Container.env:type_name -> api.Env
|
||||
2, // 1: api.InferenceServiceTransformer.containers:type_name -> api.Container
|
||||
4, // 2: api.CreateInferenceServiceRequest.predictor:type_name -> api.InferenceServicePredictor
|
||||
3, // 3: api.CreateInferenceServiceRequest.transformer:type_name -> api.InferenceServiceTransformer
|
||||
7, // 4: api.GetInferenceServiceResponse.conditions:type_name -> api.InferenceServiceCondition
|
||||
5, // 5: api.InferenceService.CreateInferenceService:input_type -> api.CreateInferenceServiceRequest
|
||||
0, // 6: api.InferenceService.GetInferenceService:input_type -> api.InferenceServiceIdentifier
|
||||
0, // 7: api.InferenceService.DeleteInferenceService:input_type -> api.InferenceServiceIdentifier
|
||||
8, // 8: api.InferenceService.CreateInferenceService:output_type -> api.GetInferenceServiceResponse
|
||||
8, // 9: api.InferenceService.GetInferenceService:output_type -> api.GetInferenceServiceResponse
|
||||
10, // 10: api.InferenceService.DeleteInferenceService:output_type -> google.protobuf.Empty
|
||||
8, // [8:11] is the sub-list for method output_type
|
||||
5, // [5:8] is the sub-list for method input_type
|
||||
5, // [5:5] is the sub-list for extension type_name
|
||||
5, // [5:5] is the sub-list for extension extendee
|
||||
0, // [0:5] is the sub-list for field type_name
|
||||
}
|
||||
|
||||
func init() { file_inference_service_proto_init() }
|
||||
func file_inference_service_proto_init() {
|
||||
if File_inference_service_proto != nil {
|
||||
return
|
||||
}
|
||||
if !protoimpl.UnsafeEnabled {
|
||||
file_inference_service_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
|
||||
switch v := v.(*InferenceServiceIdentifier); i {
|
||||
case 0:
|
||||
return &v.state
|
||||
case 1:
|
||||
return &v.sizeCache
|
||||
case 2:
|
||||
return &v.unknownFields
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
file_inference_service_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
|
||||
switch v := v.(*Env); i {
|
||||
case 0:
|
||||
return &v.state
|
||||
case 1:
|
||||
return &v.sizeCache
|
||||
case 2:
|
||||
return &v.unknownFields
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
file_inference_service_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} {
|
||||
switch v := v.(*Container); i {
|
||||
case 0:
|
||||
return &v.state
|
||||
case 1:
|
||||
return &v.sizeCache
|
||||
case 2:
|
||||
return &v.unknownFields
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
file_inference_service_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} {
|
||||
switch v := v.(*InferenceServiceTransformer); i {
|
||||
case 0:
|
||||
return &v.state
|
||||
case 1:
|
||||
return &v.sizeCache
|
||||
case 2:
|
||||
return &v.unknownFields
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
file_inference_service_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} {
|
||||
switch v := v.(*InferenceServicePredictor); i {
|
||||
case 0:
|
||||
return &v.state
|
||||
case 1:
|
||||
return &v.sizeCache
|
||||
case 2:
|
||||
return &v.unknownFields
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
file_inference_service_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} {
|
||||
switch v := v.(*CreateInferenceServiceRequest); i {
|
||||
case 0:
|
||||
return &v.state
|
||||
case 1:
|
||||
return &v.sizeCache
|
||||
case 2:
|
||||
return &v.unknownFields
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
file_inference_service_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} {
|
||||
switch v := v.(*DeployModelResponse); i {
|
||||
case 0:
|
||||
return &v.state
|
||||
case 1:
|
||||
return &v.sizeCache
|
||||
case 2:
|
||||
return &v.unknownFields
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
file_inference_service_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} {
|
||||
switch v := v.(*InferenceServiceCondition); i {
|
||||
case 0:
|
||||
return &v.state
|
||||
case 1:
|
||||
return &v.sizeCache
|
||||
case 2:
|
||||
return &v.unknownFields
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
file_inference_service_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} {
|
||||
switch v := v.(*GetInferenceServiceResponse); i {
|
||||
case 0:
|
||||
return &v.state
|
||||
case 1:
|
||||
return &v.sizeCache
|
||||
case 2:
|
||||
return &v.unknownFields
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
file_inference_service_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} {
|
||||
switch v := v.(*InferenceServiceEndpoints); i {
|
||||
case 0:
|
||||
return &v.state
|
||||
case 1:
|
||||
return &v.sizeCache
|
||||
case 2:
|
||||
return &v.unknownFields
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
||||
type x struct{}
|
||||
out := protoimpl.TypeBuilder{
|
||||
File: protoimpl.DescBuilder{
|
||||
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
|
||||
RawDescriptor: file_inference_service_proto_rawDesc,
|
||||
NumEnums: 0,
|
||||
NumMessages: 10,
|
||||
NumExtensions: 0,
|
||||
NumServices: 1,
|
||||
},
|
||||
GoTypes: file_inference_service_proto_goTypes,
|
||||
DependencyIndexes: file_inference_service_proto_depIdxs,
|
||||
MessageInfos: file_inference_service_proto_msgTypes,
|
||||
}.Build()
|
||||
File_inference_service_proto = out.File
|
||||
file_inference_service_proto_rawDesc = nil
|
||||
file_inference_service_proto_goTypes = nil
|
||||
file_inference_service_proto_depIdxs = nil
|
||||
}
|
439
api/gen/inference_service.pb.gw.go
Normal file
439
api/gen/inference_service.pb.gw.go
Normal file
@@ -0,0 +1,439 @@
|
||||
// Code generated by protoc-gen-grpc-gateway. DO NOT EDIT.
|
||||
// source: inference_service.proto
|
||||
|
||||
/*
|
||||
Package gen is a reverse proxy.
|
||||
|
||||
It translates gRPC into RESTful JSON APIs.
|
||||
*/
|
||||
package gen
|
||||
|
||||
import (
|
||||
"context"
|
||||
"io"
|
||||
"net/http"
|
||||
|
||||
"github.com/grpc-ecosystem/grpc-gateway/v2/runtime"
|
||||
"github.com/grpc-ecosystem/grpc-gateway/v2/utilities"
|
||||
"google.golang.org/grpc"
|
||||
"google.golang.org/grpc/codes"
|
||||
"google.golang.org/grpc/grpclog"
|
||||
"google.golang.org/grpc/metadata"
|
||||
"google.golang.org/grpc/status"
|
||||
"google.golang.org/protobuf/proto"
|
||||
)
|
||||
|
||||
// Suppress "imported and not used" errors
|
||||
var _ codes.Code
|
||||
var _ io.Reader
|
||||
var _ status.Status
|
||||
var _ = runtime.String
|
||||
var _ = utilities.NewDoubleArray
|
||||
var _ = metadata.Join
|
||||
|
||||
func request_InferenceService_CreateInferenceService_0(ctx context.Context, marshaler runtime.Marshaler, client InferenceServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
|
||||
var protoReq CreateInferenceServiceRequest
|
||||
var metadata runtime.ServerMetadata
|
||||
|
||||
newReader, berr := utilities.IOReaderFactory(req.Body)
|
||||
if berr != nil {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr)
|
||||
}
|
||||
if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
|
||||
}
|
||||
|
||||
var (
|
||||
val string
|
||||
ok bool
|
||||
err error
|
||||
_ = err
|
||||
)
|
||||
|
||||
val, ok = pathParams["namespace"]
|
||||
if !ok {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "namespace")
|
||||
}
|
||||
|
||||
protoReq.Namespace, err = runtime.String(val)
|
||||
if err != nil {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "namespace", err)
|
||||
}
|
||||
|
||||
msg, err := client.CreateInferenceService(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD))
|
||||
return msg, metadata, err
|
||||
|
||||
}
|
||||
|
||||
func local_request_InferenceService_CreateInferenceService_0(ctx context.Context, marshaler runtime.Marshaler, server InferenceServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
|
||||
var protoReq CreateInferenceServiceRequest
|
||||
var metadata runtime.ServerMetadata
|
||||
|
||||
newReader, berr := utilities.IOReaderFactory(req.Body)
|
||||
if berr != nil {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr)
|
||||
}
|
||||
if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
|
||||
}
|
||||
|
||||
var (
|
||||
val string
|
||||
ok bool
|
||||
err error
|
||||
_ = err
|
||||
)
|
||||
|
||||
val, ok = pathParams["namespace"]
|
||||
if !ok {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "namespace")
|
||||
}
|
||||
|
||||
protoReq.Namespace, err = runtime.String(val)
|
||||
if err != nil {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "namespace", err)
|
||||
}
|
||||
|
||||
msg, err := server.CreateInferenceService(ctx, &protoReq)
|
||||
return msg, metadata, err
|
||||
|
||||
}
|
||||
|
||||
func request_InferenceService_GetInferenceService_0(ctx context.Context, marshaler runtime.Marshaler, client InferenceServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
|
||||
var protoReq InferenceServiceIdentifier
|
||||
var metadata runtime.ServerMetadata
|
||||
|
||||
var (
|
||||
val string
|
||||
ok bool
|
||||
err error
|
||||
_ = err
|
||||
)
|
||||
|
||||
val, ok = pathParams["namespace"]
|
||||
if !ok {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "namespace")
|
||||
}
|
||||
|
||||
protoReq.Namespace, err = runtime.String(val)
|
||||
if err != nil {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "namespace", err)
|
||||
}
|
||||
|
||||
val, ok = pathParams["name"]
|
||||
if !ok {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "name")
|
||||
}
|
||||
|
||||
protoReq.Name, err = runtime.String(val)
|
||||
if err != nil {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "name", err)
|
||||
}
|
||||
|
||||
msg, err := client.GetInferenceService(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD))
|
||||
return msg, metadata, err
|
||||
|
||||
}
|
||||
|
||||
func local_request_InferenceService_GetInferenceService_0(ctx context.Context, marshaler runtime.Marshaler, server InferenceServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
|
||||
var protoReq InferenceServiceIdentifier
|
||||
var metadata runtime.ServerMetadata
|
||||
|
||||
var (
|
||||
val string
|
||||
ok bool
|
||||
err error
|
||||
_ = err
|
||||
)
|
||||
|
||||
val, ok = pathParams["namespace"]
|
||||
if !ok {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "namespace")
|
||||
}
|
||||
|
||||
protoReq.Namespace, err = runtime.String(val)
|
||||
if err != nil {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "namespace", err)
|
||||
}
|
||||
|
||||
val, ok = pathParams["name"]
|
||||
if !ok {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "name")
|
||||
}
|
||||
|
||||
protoReq.Name, err = runtime.String(val)
|
||||
if err != nil {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "name", err)
|
||||
}
|
||||
|
||||
msg, err := server.GetInferenceService(ctx, &protoReq)
|
||||
return msg, metadata, err
|
||||
|
||||
}
|
||||
|
||||
func request_InferenceService_DeleteInferenceService_0(ctx context.Context, marshaler runtime.Marshaler, client InferenceServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
|
||||
var protoReq InferenceServiceIdentifier
|
||||
var metadata runtime.ServerMetadata
|
||||
|
||||
var (
|
||||
val string
|
||||
ok bool
|
||||
err error
|
||||
_ = err
|
||||
)
|
||||
|
||||
val, ok = pathParams["namespace"]
|
||||
if !ok {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "namespace")
|
||||
}
|
||||
|
||||
protoReq.Namespace, err = runtime.String(val)
|
||||
if err != nil {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "namespace", err)
|
||||
}
|
||||
|
||||
val, ok = pathParams["name"]
|
||||
if !ok {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "name")
|
||||
}
|
||||
|
||||
protoReq.Name, err = runtime.String(val)
|
||||
if err != nil {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "name", err)
|
||||
}
|
||||
|
||||
msg, err := client.DeleteInferenceService(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD))
|
||||
return msg, metadata, err
|
||||
|
||||
}
|
||||
|
||||
func local_request_InferenceService_DeleteInferenceService_0(ctx context.Context, marshaler runtime.Marshaler, server InferenceServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
|
||||
var protoReq InferenceServiceIdentifier
|
||||
var metadata runtime.ServerMetadata
|
||||
|
||||
var (
|
||||
val string
|
||||
ok bool
|
||||
err error
|
||||
_ = err
|
||||
)
|
||||
|
||||
val, ok = pathParams["namespace"]
|
||||
if !ok {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "namespace")
|
||||
}
|
||||
|
||||
protoReq.Namespace, err = runtime.String(val)
|
||||
if err != nil {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "namespace", err)
|
||||
}
|
||||
|
||||
val, ok = pathParams["name"]
|
||||
if !ok {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "name")
|
||||
}
|
||||
|
||||
protoReq.Name, err = runtime.String(val)
|
||||
if err != nil {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "name", err)
|
||||
}
|
||||
|
||||
msg, err := server.DeleteInferenceService(ctx, &protoReq)
|
||||
return msg, metadata, err
|
||||
|
||||
}
|
||||
|
||||
// RegisterInferenceServiceHandlerServer registers the http handlers for service InferenceService to "mux".
|
||||
// UnaryRPC :call InferenceServiceServer directly.
|
||||
// StreamingRPC :currently unsupported pending https://github.com/grpc/grpc-go/issues/906.
|
||||
// Note that using this registration option will cause many gRPC library features to stop working. Consider using RegisterInferenceServiceHandlerFromEndpoint instead.
|
||||
func RegisterInferenceServiceHandlerServer(ctx context.Context, mux *runtime.ServeMux, server InferenceServiceServer) error {
|
||||
|
||||
mux.Handle("POST", pattern_InferenceService_CreateInferenceService_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
|
||||
ctx, cancel := context.WithCancel(req.Context())
|
||||
defer cancel()
|
||||
var stream runtime.ServerTransportStream
|
||||
ctx = grpc.NewContextWithServerTransportStream(ctx, &stream)
|
||||
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
|
||||
rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/api.InferenceService/CreateInferenceService")
|
||||
if err != nil {
|
||||
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
|
||||
return
|
||||
}
|
||||
resp, md, err := local_request_InferenceService_CreateInferenceService_0(rctx, inboundMarshaler, server, req, pathParams)
|
||||
md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())
|
||||
ctx = runtime.NewServerMetadataContext(ctx, md)
|
||||
if err != nil {
|
||||
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
|
||||
return
|
||||
}
|
||||
|
||||
forward_InferenceService_CreateInferenceService_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
|
||||
|
||||
})
|
||||
|
||||
mux.Handle("GET", pattern_InferenceService_GetInferenceService_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
|
||||
ctx, cancel := context.WithCancel(req.Context())
|
||||
defer cancel()
|
||||
var stream runtime.ServerTransportStream
|
||||
ctx = grpc.NewContextWithServerTransportStream(ctx, &stream)
|
||||
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
|
||||
rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/api.InferenceService/GetInferenceService")
|
||||
if err != nil {
|
||||
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
|
||||
return
|
||||
}
|
||||
resp, md, err := local_request_InferenceService_GetInferenceService_0(rctx, inboundMarshaler, server, req, pathParams)
|
||||
md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())
|
||||
ctx = runtime.NewServerMetadataContext(ctx, md)
|
||||
if err != nil {
|
||||
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
|
||||
return
|
||||
}
|
||||
|
||||
forward_InferenceService_GetInferenceService_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
|
||||
|
||||
})
|
||||
|
||||
mux.Handle("DELETE", pattern_InferenceService_DeleteInferenceService_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
|
||||
ctx, cancel := context.WithCancel(req.Context())
|
||||
defer cancel()
|
||||
var stream runtime.ServerTransportStream
|
||||
ctx = grpc.NewContextWithServerTransportStream(ctx, &stream)
|
||||
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
|
||||
rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/api.InferenceService/DeleteInferenceService")
|
||||
if err != nil {
|
||||
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
|
||||
return
|
||||
}
|
||||
resp, md, err := local_request_InferenceService_DeleteInferenceService_0(rctx, inboundMarshaler, server, req, pathParams)
|
||||
md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())
|
||||
ctx = runtime.NewServerMetadataContext(ctx, md)
|
||||
if err != nil {
|
||||
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
|
||||
return
|
||||
}
|
||||
|
||||
forward_InferenceService_DeleteInferenceService_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
|
||||
|
||||
})
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// RegisterInferenceServiceHandlerFromEndpoint is same as RegisterInferenceServiceHandler but
|
||||
// automatically dials to "endpoint" and closes the connection when "ctx" gets done.
|
||||
func RegisterInferenceServiceHandlerFromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, opts []grpc.DialOption) (err error) {
|
||||
conn, err := grpc.Dial(endpoint, opts...)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer func() {
|
||||
if err != nil {
|
||||
if cerr := conn.Close(); cerr != nil {
|
||||
grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr)
|
||||
}
|
||||
return
|
||||
}
|
||||
go func() {
|
||||
<-ctx.Done()
|
||||
if cerr := conn.Close(); cerr != nil {
|
||||
grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr)
|
||||
}
|
||||
}()
|
||||
}()
|
||||
|
||||
return RegisterInferenceServiceHandler(ctx, mux, conn)
|
||||
}
|
||||
|
||||
// RegisterInferenceServiceHandler registers the http handlers for service InferenceService to "mux".
|
||||
// The handlers forward requests to the grpc endpoint over "conn".
|
||||
func RegisterInferenceServiceHandler(ctx context.Context, mux *runtime.ServeMux, conn *grpc.ClientConn) error {
|
||||
return RegisterInferenceServiceHandlerClient(ctx, mux, NewInferenceServiceClient(conn))
|
||||
}
|
||||
|
||||
// RegisterInferenceServiceHandlerClient registers the http handlers for service InferenceService
|
||||
// to "mux". The handlers forward requests to the grpc endpoint over the given implementation of "InferenceServiceClient".
|
||||
// Note: the gRPC framework executes interceptors within the gRPC handler. If the passed in "InferenceServiceClient"
|
||||
// doesn't go through the normal gRPC flow (creating a gRPC client etc.) then it will be up to the passed in
|
||||
// "InferenceServiceClient" to call the correct interceptors.
|
||||
func RegisterInferenceServiceHandlerClient(ctx context.Context, mux *runtime.ServeMux, client InferenceServiceClient) error {
|
||||
|
||||
mux.Handle("POST", pattern_InferenceService_CreateInferenceService_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
|
||||
ctx, cancel := context.WithCancel(req.Context())
|
||||
defer cancel()
|
||||
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
|
||||
rctx, err := runtime.AnnotateContext(ctx, mux, req, "/api.InferenceService/CreateInferenceService")
|
||||
if err != nil {
|
||||
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
|
||||
return
|
||||
}
|
||||
resp, md, err := request_InferenceService_CreateInferenceService_0(rctx, inboundMarshaler, client, req, pathParams)
|
||||
ctx = runtime.NewServerMetadataContext(ctx, md)
|
||||
if err != nil {
|
||||
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
|
||||
return
|
||||
}
|
||||
|
||||
forward_InferenceService_CreateInferenceService_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
|
||||
|
||||
})
|
||||
|
||||
mux.Handle("GET", pattern_InferenceService_GetInferenceService_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
|
||||
ctx, cancel := context.WithCancel(req.Context())
|
||||
defer cancel()
|
||||
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
|
||||
rctx, err := runtime.AnnotateContext(ctx, mux, req, "/api.InferenceService/GetInferenceService")
|
||||
if err != nil {
|
||||
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
|
||||
return
|
||||
}
|
||||
resp, md, err := request_InferenceService_GetInferenceService_0(rctx, inboundMarshaler, client, req, pathParams)
|
||||
ctx = runtime.NewServerMetadataContext(ctx, md)
|
||||
if err != nil {
|
||||
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
|
||||
return
|
||||
}
|
||||
|
||||
forward_InferenceService_GetInferenceService_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
|
||||
|
||||
})
|
||||
|
||||
mux.Handle("DELETE", pattern_InferenceService_DeleteInferenceService_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
|
||||
ctx, cancel := context.WithCancel(req.Context())
|
||||
defer cancel()
|
||||
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
|
||||
rctx, err := runtime.AnnotateContext(ctx, mux, req, "/api.InferenceService/DeleteInferenceService")
|
||||
if err != nil {
|
||||
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
|
||||
return
|
||||
}
|
||||
resp, md, err := request_InferenceService_DeleteInferenceService_0(rctx, inboundMarshaler, client, req, pathParams)
|
||||
ctx = runtime.NewServerMetadataContext(ctx, md)
|
||||
if err != nil {
|
||||
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
|
||||
return
|
||||
}
|
||||
|
||||
forward_InferenceService_DeleteInferenceService_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
|
||||
|
||||
})
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
var (
|
||||
pattern_InferenceService_CreateInferenceService_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 4, 1, 5, 2, 2, 3}, []string{"apis", "v1beta1", "namespace", "inferenceservice"}, ""))
|
||||
|
||||
pattern_InferenceService_GetInferenceService_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 4, 1, 5, 2, 2, 3, 1, 0, 4, 1, 5, 4}, []string{"apis", "v1beta1", "namespace", "inferenceservice", "name"}, ""))
|
||||
|
||||
pattern_InferenceService_DeleteInferenceService_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 4, 1, 5, 2, 2, 3, 1, 0, 4, 1, 5, 4}, []string{"apis", "v1beta1", "namespace", "inferenceservice", "name"}, ""))
|
||||
)
|
||||
|
||||
var (
|
||||
forward_InferenceService_CreateInferenceService_0 = runtime.ForwardResponseMessage
|
||||
|
||||
forward_InferenceService_GetInferenceService_0 = runtime.ForwardResponseMessage
|
||||
|
||||
forward_InferenceService_DeleteInferenceService_0 = runtime.ForwardResponseMessage
|
||||
)
|
170
api/gen/inference_service_grpc.pb.go
Normal file
170
api/gen/inference_service_grpc.pb.go
Normal file
@@ -0,0 +1,170 @@
|
||||
// Code generated by protoc-gen-go-grpc. DO NOT EDIT.
|
||||
|
||||
package gen
|
||||
|
||||
import (
|
||||
context "context"
|
||||
grpc "google.golang.org/grpc"
|
||||
codes "google.golang.org/grpc/codes"
|
||||
status "google.golang.org/grpc/status"
|
||||
emptypb "google.golang.org/protobuf/types/known/emptypb"
|
||||
)
|
||||
|
||||
// This is a compile-time assertion to ensure that this generated file
|
||||
// is compatible with the grpc package it is being compiled against.
|
||||
const _ = grpc.SupportPackageIsVersion7
|
||||
|
||||
// InferenceServiceClient is the client API for InferenceService service.
|
||||
//
|
||||
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream.
|
||||
type InferenceServiceClient interface {
|
||||
CreateInferenceService(ctx context.Context, in *CreateInferenceServiceRequest, opts ...grpc.CallOption) (*GetInferenceServiceResponse, error)
|
||||
GetInferenceService(ctx context.Context, in *InferenceServiceIdentifier, opts ...grpc.CallOption) (*GetInferenceServiceResponse, error)
|
||||
DeleteInferenceService(ctx context.Context, in *InferenceServiceIdentifier, opts ...grpc.CallOption) (*emptypb.Empty, error)
|
||||
}
|
||||
|
||||
type inferenceServiceClient struct {
|
||||
cc grpc.ClientConnInterface
|
||||
}
|
||||
|
||||
func NewInferenceServiceClient(cc grpc.ClientConnInterface) InferenceServiceClient {
|
||||
return &inferenceServiceClient{cc}
|
||||
}
|
||||
|
||||
func (c *inferenceServiceClient) CreateInferenceService(ctx context.Context, in *CreateInferenceServiceRequest, opts ...grpc.CallOption) (*GetInferenceServiceResponse, error) {
|
||||
out := new(GetInferenceServiceResponse)
|
||||
err := c.cc.Invoke(ctx, "/api.InferenceService/CreateInferenceService", in, out, opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func (c *inferenceServiceClient) GetInferenceService(ctx context.Context, in *InferenceServiceIdentifier, opts ...grpc.CallOption) (*GetInferenceServiceResponse, error) {
|
||||
out := new(GetInferenceServiceResponse)
|
||||
err := c.cc.Invoke(ctx, "/api.InferenceService/GetInferenceService", in, out, opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func (c *inferenceServiceClient) DeleteInferenceService(ctx context.Context, in *InferenceServiceIdentifier, opts ...grpc.CallOption) (*emptypb.Empty, error) {
|
||||
out := new(emptypb.Empty)
|
||||
err := c.cc.Invoke(ctx, "/api.InferenceService/DeleteInferenceService", in, out, opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
// InferenceServiceServer is the server API for InferenceService service.
|
||||
// All implementations must embed UnimplementedInferenceServiceServer
|
||||
// for forward compatibility
|
||||
type InferenceServiceServer interface {
|
||||
CreateInferenceService(context.Context, *CreateInferenceServiceRequest) (*GetInferenceServiceResponse, error)
|
||||
GetInferenceService(context.Context, *InferenceServiceIdentifier) (*GetInferenceServiceResponse, error)
|
||||
DeleteInferenceService(context.Context, *InferenceServiceIdentifier) (*emptypb.Empty, error)
|
||||
mustEmbedUnimplementedInferenceServiceServer()
|
||||
}
|
||||
|
||||
// UnimplementedInferenceServiceServer must be embedded to have forward compatible implementations.
|
||||
type UnimplementedInferenceServiceServer struct {
|
||||
}
|
||||
|
||||
func (UnimplementedInferenceServiceServer) CreateInferenceService(context.Context, *CreateInferenceServiceRequest) (*GetInferenceServiceResponse, error) {
|
||||
return nil, status.Errorf(codes.Unimplemented, "method CreateInferenceService not implemented")
|
||||
}
|
||||
func (UnimplementedInferenceServiceServer) GetInferenceService(context.Context, *InferenceServiceIdentifier) (*GetInferenceServiceResponse, error) {
|
||||
return nil, status.Errorf(codes.Unimplemented, "method GetInferenceService not implemented")
|
||||
}
|
||||
func (UnimplementedInferenceServiceServer) DeleteInferenceService(context.Context, *InferenceServiceIdentifier) (*emptypb.Empty, error) {
|
||||
return nil, status.Errorf(codes.Unimplemented, "method DeleteInferenceService not implemented")
|
||||
}
|
||||
func (UnimplementedInferenceServiceServer) mustEmbedUnimplementedInferenceServiceServer() {}
|
||||
|
||||
// UnsafeInferenceServiceServer may be embedded to opt out of forward compatibility for this service.
|
||||
// Use of this interface is not recommended, as added methods to InferenceServiceServer will
|
||||
// result in compilation errors.
|
||||
type UnsafeInferenceServiceServer interface {
|
||||
mustEmbedUnimplementedInferenceServiceServer()
|
||||
}
|
||||
|
||||
func RegisterInferenceServiceServer(s grpc.ServiceRegistrar, srv InferenceServiceServer) {
|
||||
s.RegisterService(&_InferenceService_serviceDesc, srv)
|
||||
}
|
||||
|
||||
func _InferenceService_CreateInferenceService_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
|
||||
in := new(CreateInferenceServiceRequest)
|
||||
if err := dec(in); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if interceptor == nil {
|
||||
return srv.(InferenceServiceServer).CreateInferenceService(ctx, in)
|
||||
}
|
||||
info := &grpc.UnaryServerInfo{
|
||||
Server: srv,
|
||||
FullMethod: "/api.InferenceService/CreateInferenceService",
|
||||
}
|
||||
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
|
||||
return srv.(InferenceServiceServer).CreateInferenceService(ctx, req.(*CreateInferenceServiceRequest))
|
||||
}
|
||||
return interceptor(ctx, in, info, handler)
|
||||
}
|
||||
|
||||
func _InferenceService_GetInferenceService_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
|
||||
in := new(InferenceServiceIdentifier)
|
||||
if err := dec(in); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if interceptor == nil {
|
||||
return srv.(InferenceServiceServer).GetInferenceService(ctx, in)
|
||||
}
|
||||
info := &grpc.UnaryServerInfo{
|
||||
Server: srv,
|
||||
FullMethod: "/api.InferenceService/GetInferenceService",
|
||||
}
|
||||
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
|
||||
return srv.(InferenceServiceServer).GetInferenceService(ctx, req.(*InferenceServiceIdentifier))
|
||||
}
|
||||
return interceptor(ctx, in, info, handler)
|
||||
}
|
||||
|
||||
func _InferenceService_DeleteInferenceService_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
|
||||
in := new(InferenceServiceIdentifier)
|
||||
if err := dec(in); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if interceptor == nil {
|
||||
return srv.(InferenceServiceServer).DeleteInferenceService(ctx, in)
|
||||
}
|
||||
info := &grpc.UnaryServerInfo{
|
||||
Server: srv,
|
||||
FullMethod: "/api.InferenceService/DeleteInferenceService",
|
||||
}
|
||||
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
|
||||
return srv.(InferenceServiceServer).DeleteInferenceService(ctx, req.(*InferenceServiceIdentifier))
|
||||
}
|
||||
return interceptor(ctx, in, info, handler)
|
||||
}
|
||||
|
||||
var _InferenceService_serviceDesc = grpc.ServiceDesc{
|
||||
ServiceName: "api.InferenceService",
|
||||
HandlerType: (*InferenceServiceServer)(nil),
|
||||
Methods: []grpc.MethodDesc{
|
||||
{
|
||||
MethodName: "CreateInferenceService",
|
||||
Handler: _InferenceService_CreateInferenceService_Handler,
|
||||
},
|
||||
{
|
||||
MethodName: "GetInferenceService",
|
||||
Handler: _InferenceService_GetInferenceService_Handler,
|
||||
},
|
||||
{
|
||||
MethodName: "DeleteInferenceService",
|
||||
Handler: _InferenceService_DeleteInferenceService_Handler,
|
||||
},
|
||||
},
|
||||
Streams: []grpc.StreamDesc{},
|
||||
Metadata: "inference_service.proto",
|
||||
}
|
92
api/proto/inference_service.proto
Normal file
92
api/proto/inference_service.proto
Normal file
@@ -0,0 +1,92 @@
|
||||
syntax = "proto3";
|
||||
|
||||
package api;
|
||||
option go_package = "github.com/onepanelio/core/api/gen";
|
||||
|
||||
import "google/api/annotations.proto";
|
||||
import "google/protobuf/empty.proto";
|
||||
|
||||
service InferenceService {
|
||||
rpc CreateInferenceService (CreateInferenceServiceRequest) returns (GetInferenceServiceResponse) {
|
||||
option (google.api.http) = {
|
||||
post: "/apis/v1beta1/{namespace}/inferenceservice"
|
||||
body: "*"
|
||||
};
|
||||
}
|
||||
|
||||
rpc GetInferenceService(InferenceServiceIdentifier) returns (GetInferenceServiceResponse) {
|
||||
option (google.api.http) = {
|
||||
get: "/apis/v1beta1/{namespace}/inferenceservice/{name}"
|
||||
};
|
||||
}
|
||||
|
||||
rpc DeleteInferenceService (InferenceServiceIdentifier) returns (google.protobuf.Empty) {
|
||||
option (google.api.http) = {
|
||||
delete: "/apis/v1beta1/{namespace}/inferenceservice/{name}"
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
message InferenceServiceIdentifier {
|
||||
string namespace = 1;
|
||||
string name = 2;
|
||||
}
|
||||
|
||||
message Env {
|
||||
string name = 1;
|
||||
string value = 2;
|
||||
}
|
||||
|
||||
message Container {
|
||||
string image = 1;
|
||||
string name = 2;
|
||||
repeated Env env = 3;
|
||||
}
|
||||
|
||||
message InferenceServiceTransformer {
|
||||
repeated Container containers = 1;
|
||||
string minCpu = 2;
|
||||
string minMemory = 3;
|
||||
string maxCpu = 4;
|
||||
string maxMemory = 5;
|
||||
}
|
||||
|
||||
message InferenceServicePredictor {
|
||||
string name = 1;
|
||||
string runtimeVersion = 2;
|
||||
string storageUri = 3;
|
||||
string nodeSelector = 4;
|
||||
string minCpu = 5;
|
||||
string minMemory = 6;
|
||||
string maxCpu = 7;
|
||||
string maxMemory = 8;
|
||||
}
|
||||
|
||||
message CreateInferenceServiceRequest {
|
||||
string namespace = 1;
|
||||
string name = 2;
|
||||
string defaultTransformerImage = 3;
|
||||
|
||||
InferenceServicePredictor predictor = 4;
|
||||
InferenceServiceTransformer transformer = 5;
|
||||
}
|
||||
|
||||
message DeployModelResponse {
|
||||
string status = 1;
|
||||
}
|
||||
|
||||
message InferenceServiceCondition {
|
||||
string lastTransitionTime = 1;
|
||||
string status = 2;
|
||||
string type = 3;
|
||||
}
|
||||
|
||||
message GetInferenceServiceResponse {
|
||||
bool ready = 1;
|
||||
repeated InferenceServiceCondition conditions = 2;
|
||||
string predictUrl = 3;
|
||||
}
|
||||
|
||||
message InferenceServiceEndpoints {
|
||||
string predict = 1;
|
||||
}
|
4
main.go
4
main.go
@@ -158,6 +158,7 @@ func startRPCServer(db *v1.DB, kubeConfig *v1.Config, sysConfig v1.SystemConfig,
|
||||
api.RegisterConfigServiceServer(s, server.NewConfigServer())
|
||||
api.RegisterServiceServiceServer(s, server.NewServiceServer())
|
||||
api.RegisterFileServiceServer(s, server.NewFileServer())
|
||||
api.RegisterInferenceServiceServer(s, server.NewInferenceService())
|
||||
|
||||
go func() {
|
||||
if err := s.Serve(lis); err != nil {
|
||||
@@ -194,6 +195,7 @@ func startHTTPProxy() {
|
||||
registerHandler(api.RegisterConfigServiceHandlerFromEndpoint, ctx, mux, endpoint, opts)
|
||||
registerHandler(api.RegisterServiceServiceHandlerFromEndpoint, ctx, mux, endpoint, opts)
|
||||
registerHandler(api.RegisterFileServiceHandlerFromEndpoint, ctx, mux, endpoint, opts)
|
||||
registerHandler(api.RegisterInferenceServiceHandlerFromEndpoint, ctx, mux, endpoint, opts)
|
||||
|
||||
log.Printf("Starting HTTP proxy on port %v", *httpPort)
|
||||
|
||||
@@ -286,6 +288,8 @@ func customHeaderMatcher(key string) (string, bool) {
|
||||
switch lowerCaseKey {
|
||||
case "onepanel-auth-token":
|
||||
return lowerCaseKey, true
|
||||
case "onepanel-access-token":
|
||||
return lowerCaseKey, true
|
||||
case "cookie":
|
||||
return lowerCaseKey, true
|
||||
default:
|
||||
|
100
pkg/inference_service.go
Normal file
100
pkg/inference_service.go
Normal file
@@ -0,0 +1,100 @@
|
||||
package v1
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/onepanelio/core/pkg/util"
|
||||
"google.golang.org/grpc/codes"
|
||||
"k8s.io/apimachinery/pkg/runtime/schema"
|
||||
"k8s.io/apimachinery/pkg/util/json"
|
||||
"k8s.io/client-go/kubernetes/scheme"
|
||||
"k8s.io/client-go/rest"
|
||||
"strings"
|
||||
)
|
||||
|
||||
func modelRestClient() (*rest.RESTClient, error) {
|
||||
config := *NewConfig()
|
||||
config.GroupVersion = &schema.GroupVersion{Group: "serving.kubeflow.org", Version: "v1beta1"}
|
||||
config.APIPath = "/apis"
|
||||
config.NegotiatedSerializer = scheme.Codecs.WithoutConversion()
|
||||
|
||||
return rest.RESTClientFor(&config)
|
||||
}
|
||||
|
||||
// CreateInferenceService creates an InferenceService with KFServing
|
||||
func (c *Client) CreateInferenceService(deployment *InferenceService) error {
|
||||
resource := deployment.ToResource()
|
||||
|
||||
data, err := json.Marshal(resource)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
restClient, err := modelRestClient()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = restClient.Post().
|
||||
Namespace(deployment.Namespace).
|
||||
Name(deployment.Name).
|
||||
Resource(inferenceServiceResource).
|
||||
Body(data).
|
||||
Do().
|
||||
Error()
|
||||
|
||||
if err != nil && strings.Contains(err.Error(), "already exists") {
|
||||
return util.NewUserError(codes.AlreadyExists, fmt.Sprintf("InferenceService with name '%v' already exists", deployment.Name))
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
// GetModelStatus returns the model's status
|
||||
func (c *Client) GetModelStatus(namespace, name string) (*InferenceServiceStatus, error) {
|
||||
restClient, err := modelRestClient()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
result := &k8sModel{}
|
||||
|
||||
err = restClient.Get().
|
||||
Namespace(namespace).
|
||||
Name(name).
|
||||
Resource(inferenceServiceResource).
|
||||
Do().
|
||||
Into(result)
|
||||
|
||||
if err != nil && strings.Contains(err.Error(), "not found") {
|
||||
return nil, util.NewUserError(codes.NotFound, "not found")
|
||||
}
|
||||
|
||||
predictURL := result.Status.URL
|
||||
suffixIndex := strings.LastIndex(result.Status.Address.URL, "cluster.local")
|
||||
if suffixIndex >= 0 {
|
||||
predictURL += result.Status.Address.URL[suffixIndex+13:]
|
||||
}
|
||||
|
||||
status := &InferenceServiceStatus{
|
||||
Conditions: result.Status.Conditions,
|
||||
Ready: result.Status.Ready(),
|
||||
PredictURL: predictURL,
|
||||
}
|
||||
|
||||
return status, err
|
||||
}
|
||||
|
||||
// DeleteModel deletes the model
|
||||
func (c *Client) DeleteModel(namespace, name string) error {
|
||||
restClient, err := modelRestClient()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return restClient.Delete().
|
||||
Namespace(namespace).
|
||||
Name(name).
|
||||
Resource(inferenceServiceResource).
|
||||
Do().
|
||||
Error()
|
||||
}
|
157
pkg/inference_service_types.go
Normal file
157
pkg/inference_service_types.go
Normal file
@@ -0,0 +1,157 @@
|
||||
package v1
|
||||
|
||||
import (
|
||||
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
|
||||
"k8s.io/apimachinery/pkg/runtime"
|
||||
"time"
|
||||
)
|
||||
|
||||
const inferenceServiceResource = "InferenceServices"
|
||||
|
||||
// MachineResources are the cpu/memory limits
|
||||
type MachineResources struct {
|
||||
CPU string `json:"cpu,omitempty"`
|
||||
Memory string `json:"memory,omitempty"`
|
||||
}
|
||||
|
||||
// Resources represent the machine resource requests/limits
|
||||
type Resources struct {
|
||||
Limits *MachineResources `json:"limits,omitempty"`
|
||||
Requests *MachineResources `json:"requests,omitempty"`
|
||||
}
|
||||
|
||||
// Predictor contains information on what type of predictor we are using, and what resources it has available
|
||||
type Predictor struct {
|
||||
Name string `json:"-"`
|
||||
RuntimeVersion string `json:"runtimeVersion,omitempty"`
|
||||
StorageURI string `json:"storageUri"`
|
||||
Resources *Resources `json:"resources,omitempty"`
|
||||
NodeSelector map[string]string `json:"nodeSelector,omitempty"`
|
||||
}
|
||||
|
||||
// SetResources will set the cpu/memory requests/limits for the predictor. Empty strings are ignored
|
||||
func (p *Predictor) SetResources(minCPU, maxCPU, minMemory, maxMemory string) {
|
||||
if minCPU == "" && maxCPU == "" && minMemory == "" && maxMemory == "" {
|
||||
return
|
||||
}
|
||||
|
||||
p.Resources = &Resources{}
|
||||
if minCPU != "" || minMemory != "" {
|
||||
p.Resources.Requests = &MachineResources{
|
||||
CPU: minCPU,
|
||||
Memory: minMemory,
|
||||
}
|
||||
}
|
||||
|
||||
if maxCPU != "" || maxMemory != "" {
|
||||
p.Resources.Limits = &MachineResources{
|
||||
CPU: maxCPU,
|
||||
Memory: maxMemory,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// SetNodeSelector will set the node selector to the input label: selector value
|
||||
func (p *Predictor) SetNodeSelector(label, selector string) {
|
||||
p.NodeSelector = map[string]string{
|
||||
label: selector,
|
||||
}
|
||||
}
|
||||
|
||||
// Env is a name/value environment variable
|
||||
type Env struct {
|
||||
Name string `json:"name"`
|
||||
Value string `json:"value"`
|
||||
}
|
||||
|
||||
// TransformerContainer is a container specific to a Transformer
|
||||
type TransformerContainer struct {
|
||||
Image string `json:"image"`
|
||||
Name string `json:"name"`
|
||||
Env []Env `json:"env"`
|
||||
Resources *Resources `json:"resources,omitempty"`
|
||||
}
|
||||
|
||||
// Transformer is a unit that can convert model input and output to different formats in json
|
||||
type Transformer struct {
|
||||
Containers []TransformerContainer `json:"containers"`
|
||||
}
|
||||
|
||||
// InferenceService represents the information necessary to deploy an inference service
|
||||
type InferenceService struct {
|
||||
Name string
|
||||
Namespace string
|
||||
|
||||
Transformer *Transformer
|
||||
Predictor *Predictor
|
||||
}
|
||||
|
||||
// InferenceServiceStatus represents information about an InferenceService
|
||||
type InferenceServiceStatus struct {
|
||||
Ready bool
|
||||
Conditions []inferenceServiceCondition
|
||||
PredictURL string
|
||||
}
|
||||
|
||||
type inferenceServiceCondition struct {
|
||||
LastTransitionTime time.Time `json:"lastTransitionTime"`
|
||||
Status string `json:"status"`
|
||||
Type string `json:"type"`
|
||||
}
|
||||
|
||||
type inferenceServiceAddress struct {
|
||||
URL string `json:"url"`
|
||||
}
|
||||
|
||||
type inferenceServiceStatus struct {
|
||||
Conditions []inferenceServiceCondition `json:"conditions"`
|
||||
Address inferenceServiceAddress `json:"address"`
|
||||
URL string `json:"url"`
|
||||
}
|
||||
|
||||
// Ready returns true if there is a condition called Ready: true.
|
||||
func (m *inferenceServiceStatus) Ready() bool {
|
||||
for _, condition := range m.Conditions {
|
||||
if condition.Type == "Ready" && condition.Status == "True" {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
type k8sModel struct {
|
||||
metav1.TypeMeta `json:",inline"`
|
||||
metav1.ObjectMeta `json:"metadata" protobuf:"bytes,1,opt,name=metadata"`
|
||||
Status inferenceServiceStatus `json:"status,omitempty"`
|
||||
}
|
||||
|
||||
// DeepCopyObject is a stub to support the interface
|
||||
func (k k8sModel) DeepCopyObject() runtime.Object {
|
||||
panic("implement me")
|
||||
}
|
||||
|
||||
// ToResource converts the InferenceService into a KFServing spec
|
||||
func (m *InferenceService) ToResource() interface{} {
|
||||
spec := map[string]interface{}{
|
||||
"predictor": map[string]interface{}{
|
||||
m.Predictor.Name: m.Predictor,
|
||||
},
|
||||
}
|
||||
|
||||
if m.Transformer != nil {
|
||||
spec["transformer"] = m.Transformer
|
||||
}
|
||||
|
||||
resource := map[string]interface{}{
|
||||
"apiVersion": "serving.kubeflow.org/v1beta1",
|
||||
"kind": "InferenceService",
|
||||
"metadata": map[string]string{
|
||||
"namespace": m.Namespace,
|
||||
"name": m.Name,
|
||||
},
|
||||
"spec": spec,
|
||||
}
|
||||
|
||||
return resource
|
||||
}
|
@@ -66,6 +66,10 @@ func getBearerToken(ctx context.Context) (*string, bool) {
|
||||
return &t, true
|
||||
}
|
||||
|
||||
for _, t := range md.Get("onepanel-access-token") {
|
||||
return &t, true
|
||||
}
|
||||
|
||||
log.WithFields(log.Fields{
|
||||
"Method": "getBearerToken",
|
||||
}).Error("Unable to get BearerToken:", md)
|
||||
|
204
server/inferenceservice_server.go
Normal file
204
server/inferenceservice_server.go
Normal file
@@ -0,0 +1,204 @@
|
||||
package server
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"github.com/golang/protobuf/ptypes/empty"
|
||||
api "github.com/onepanelio/core/api/gen"
|
||||
v1 "github.com/onepanelio/core/pkg"
|
||||
"github.com/onepanelio/core/pkg/util"
|
||||
"github.com/onepanelio/core/server/auth"
|
||||
"google.golang.org/grpc/codes"
|
||||
"time"
|
||||
)
|
||||
|
||||
// InferenceServiceServer is an implementation of the grpc InferenceServiceServer
|
||||
type InferenceServiceServer struct {
|
||||
api.UnimplementedInferenceServiceServer
|
||||
}
|
||||
|
||||
// NewInferenceService creates a new InferenceServiceServer
|
||||
func NewInferenceService() *InferenceServiceServer {
|
||||
return &InferenceServiceServer{}
|
||||
}
|
||||
|
||||
// CreateInferenceService deploys an inference service
|
||||
func (s *InferenceServiceServer) CreateInferenceService(ctx context.Context, req *api.CreateInferenceServiceRequest) (*api.GetInferenceServiceResponse, error) {
|
||||
client := getClient(ctx)
|
||||
allowed, err := auth.IsAuthorized(client, req.Namespace, "create", "serving.kubeflow.org", "inferenceservices", "")
|
||||
if err != nil || !allowed {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if req.Predictor.Name == "" {
|
||||
return nil, util.NewUserError(codes.InvalidArgument, "missing key 'predictor.name'")
|
||||
}
|
||||
|
||||
if req.Predictor.StorageUri == "" {
|
||||
return nil, util.NewUserError(codes.InvalidArgument, "missing key 'predictor.storageUri'")
|
||||
}
|
||||
|
||||
if req.DefaultTransformerImage != "" && req.Transformer != nil {
|
||||
return nil, util.NewUserError(codes.InvalidArgument, "must set either transformerImage or transformer, but not both")
|
||||
}
|
||||
|
||||
model := &v1.InferenceService{
|
||||
Name: req.Name,
|
||||
Namespace: req.Namespace,
|
||||
Predictor: &v1.Predictor{
|
||||
Name: req.Predictor.Name,
|
||||
StorageURI: req.Predictor.StorageUri,
|
||||
},
|
||||
}
|
||||
|
||||
model.Predictor.RuntimeVersion = req.Predictor.RuntimeVersion
|
||||
model.Predictor.SetResources(req.Predictor.MinCpu, req.Predictor.MaxCpu, req.Predictor.MinMemory, req.Predictor.MaxMemory)
|
||||
if req.Predictor.NodeSelector != "" {
|
||||
sysConfig, err := client.GetSystemConfig()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
nodePoolLabel := sysConfig.NodePoolLabel()
|
||||
if nodePoolLabel == nil {
|
||||
return nil, fmt.Errorf("applicationNodePoolLabel not set")
|
||||
}
|
||||
model.Predictor.SetNodeSelector(*nodePoolLabel, req.Predictor.NodeSelector)
|
||||
}
|
||||
|
||||
if req.Transformer != nil {
|
||||
model.Transformer = &v1.Transformer{}
|
||||
|
||||
for i, container := range req.Transformer.Containers {
|
||||
modelContainer := v1.TransformerContainer{
|
||||
Image: container.Image,
|
||||
}
|
||||
|
||||
if container.Name == "" {
|
||||
modelContainer.Name = fmt.Sprintf("kfserving-container-%v", i)
|
||||
} else {
|
||||
modelContainer.Name = container.Name
|
||||
}
|
||||
|
||||
modelContainer.Resources = &v1.Resources{
|
||||
Requests: &v1.MachineResources{
|
||||
CPU: req.Transformer.MinCpu,
|
||||
Memory: req.Transformer.MinMemory,
|
||||
},
|
||||
Limits: &v1.MachineResources{
|
||||
CPU: req.Transformer.MaxCpu,
|
||||
Memory: req.Transformer.MaxMemory,
|
||||
},
|
||||
}
|
||||
|
||||
for _, env := range container.Env {
|
||||
modelContainer.Env = append(modelContainer.Env, v1.Env{
|
||||
Name: env.Name,
|
||||
Value: env.Value,
|
||||
})
|
||||
}
|
||||
|
||||
if len(container.Env) == 0 {
|
||||
modelContainer.Env = []v1.Env{
|
||||
{
|
||||
Name: "STORAGE_URI",
|
||||
Value: req.Predictor.StorageUri,
|
||||
},
|
||||
{
|
||||
Name: "model",
|
||||
Value: req.Name,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
model.Transformer.Containers = append(model.Transformer.Containers, modelContainer)
|
||||
}
|
||||
} else if req.DefaultTransformerImage != "" {
|
||||
model.Transformer = &v1.Transformer{
|
||||
Containers: []v1.TransformerContainer{
|
||||
{
|
||||
Image: req.DefaultTransformerImage,
|
||||
Name: "kfserving-container",
|
||||
Env: []v1.Env{
|
||||
{
|
||||
Name: "STORAGE_URI",
|
||||
Value: req.Predictor.StorageUri,
|
||||
},
|
||||
{
|
||||
Name: "model",
|
||||
Value: req.Name,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
err = client.CreateInferenceService(model)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
status, err := client.GetModelStatus(req.Namespace, req.Name)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
apiConditions := make([]*api.InferenceServiceCondition, len(status.Conditions))
|
||||
for i := range status.Conditions {
|
||||
condition := status.Conditions[i]
|
||||
apiConditions[i] = &api.InferenceServiceCondition{
|
||||
LastTransitionTime: condition.LastTransitionTime.Format(time.RFC3339),
|
||||
Status: condition.Status,
|
||||
Type: condition.Type,
|
||||
}
|
||||
}
|
||||
|
||||
return &api.GetInferenceServiceResponse{
|
||||
Ready: status.Ready,
|
||||
Conditions: apiConditions,
|
||||
PredictUrl: status.PredictURL,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// GetInferenceService returns the status of an inferenceservice
|
||||
func (s *InferenceServiceServer) GetInferenceService(ctx context.Context, req *api.InferenceServiceIdentifier) (*api.GetInferenceServiceResponse, error) {
|
||||
client := getClient(ctx)
|
||||
allowed, err := auth.IsAuthorized(client, req.Namespace, "get", "serving.kubeflow.org", "inferenceservices", req.Name)
|
||||
if err != nil || !allowed {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
status, err := client.GetModelStatus(req.Namespace, req.Name)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
apiConditions := make([]*api.InferenceServiceCondition, len(status.Conditions))
|
||||
for i := range status.Conditions {
|
||||
condition := status.Conditions[i]
|
||||
apiConditions[i] = &api.InferenceServiceCondition{
|
||||
LastTransitionTime: condition.LastTransitionTime.Format(time.RFC3339),
|
||||
Status: condition.Status,
|
||||
Type: condition.Type,
|
||||
}
|
||||
}
|
||||
|
||||
return &api.GetInferenceServiceResponse{
|
||||
Ready: status.Ready,
|
||||
Conditions: apiConditions,
|
||||
PredictUrl: status.PredictURL,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// DeleteInferenceService deletes an inference service
|
||||
func (s *InferenceServiceServer) DeleteInferenceService(ctx context.Context, req *api.InferenceServiceIdentifier) (*empty.Empty, error) {
|
||||
client := getClient(ctx)
|
||||
allowed, err := auth.IsAuthorized(client, req.Namespace, "delete", "serving.kubeflow.org", "inferenceservices", req.Name)
|
||||
if err != nil || !allowed {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
err = client.DeleteModel(req.Namespace, req.Name)
|
||||
|
||||
return &empty.Empty{}, err
|
||||
}
|
Reference in New Issue
Block a user