mirror of
https://github.com/onepanelio/onepanel.git
synced 2025-11-02 19:04:00 +08:00
Compare commits
79 Commits
v0.20.0-rc
...
v1.0.2
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c984ff34d5 | ||
|
|
5283b7beb6 | ||
|
|
2cddf4a88a | ||
|
|
dd3d7f6632 | ||
|
|
4d1aff5c5b | ||
|
|
719613ecd4 | ||
|
|
09b854a434 | ||
|
|
493ca51682 | ||
|
|
6407c2a7b4 | ||
|
|
62896b2f52 | ||
|
|
d934163fc8 | ||
|
|
e991102d85 | ||
|
|
467f7f71dd | ||
|
|
f1c0f0d31e | ||
|
|
11fc055ee3 | ||
|
|
d9c79370e9 | ||
|
|
98f78d453a | ||
|
|
700b3bd512 | ||
|
|
3abdc54d3c | ||
|
|
f570a710ba | ||
|
|
c922b708fc | ||
|
|
fc9669d757 | ||
|
|
8eeb90d3f1 | ||
|
|
c25dfce84f | ||
|
|
5705bfa47f | ||
|
|
44a78effed | ||
|
|
a8985a7878 | ||
|
|
69006309b4 | ||
|
|
22b3d984ec | ||
|
|
4528927463 | ||
|
|
edf7a30f64 | ||
|
|
51fb86e3fe | ||
|
|
d8e0e5c968 | ||
|
|
75719caec9 | ||
|
|
147c937252 | ||
|
|
e0f3f81563 | ||
|
|
0021249464 | ||
|
|
7235951ec2 | ||
|
|
f843074a3f | ||
|
|
8e6ef8d3eb | ||
|
|
d226028b33 | ||
|
|
82585d1011 | ||
|
|
193dbe156e | ||
|
|
5ebccbd811 | ||
|
|
023fb50046 | ||
|
|
abd8d3cde0 | ||
|
|
64d6dde1aa | ||
|
|
d0d68470dd | ||
|
|
6f8e3f56e7 | ||
|
|
2b47ad7092 | ||
|
|
66e2418424 | ||
|
|
5b6979302e | ||
|
|
afb98c295b | ||
|
|
1fb0d10b7c | ||
|
|
c4438bfe0d | ||
|
|
8329706f22 | ||
|
|
09be35b2d6 | ||
|
|
e4d83903c7 | ||
|
|
69bc6e3df1 | ||
|
|
bfee6c2e34 | ||
|
|
47e03d7e7c | ||
|
|
bcf78b54a0 | ||
|
|
96b8f522b3 | ||
|
|
98766cdc41 | ||
|
|
378850f591 | ||
|
|
e27361466f | ||
|
|
daabf17078 | ||
|
|
73385ad779 | ||
|
|
c92c848134 | ||
|
|
82424605f6 | ||
|
|
9f05ab150a | ||
|
|
81de77d88b | ||
|
|
ea47eaf49d | ||
|
|
42e99f0ac4 | ||
|
|
ae702c474c | ||
|
|
cfd63a3ef9 | ||
|
|
1b2d5623b4 | ||
|
|
86895a9dfe | ||
|
|
ec94a13cd9 |
4
.github/workflows/push_dev_branch.yaml
vendored
4
.github/workflows/push_dev_branch.yaml
vendored
@@ -1,6 +1,10 @@
|
||||
name: Publish dev docker image
|
||||
on:
|
||||
push:
|
||||
paths-ignore:
|
||||
- LICENSE
|
||||
- ".github/**"
|
||||
- "*.md"
|
||||
branches:
|
||||
- master
|
||||
jobs:
|
||||
|
||||
@@ -12,6 +12,7 @@ FROM golang:1.15.5
|
||||
COPY --from=builder /go/bin/core .
|
||||
COPY --from=builder /go/src/db ./db
|
||||
COPY --from=builder /go/bin/goose .
|
||||
COPY --from=builder /go/src/manifest ./manifest
|
||||
|
||||
EXPOSE 8888
|
||||
EXPOSE 8887
|
||||
|
||||
16
README.md
16
README.md
@@ -1,6 +1,6 @@
|
||||
<img width="200px" src="img/logo.png">
|
||||
|
||||

|
||||

|
||||

|
||||
[](https://github.com/onepanelio/core/releases)
|
||||
[](https://pypi.org/project/onepanel-sdk/)
|
||||
@@ -10,9 +10,10 @@
|
||||
[](https://landscape.lfai.foundation/?selected=onepanel)
|
||||
[](https://opensource.org/licenses/Apache-2.0)
|
||||
|
||||
The open and extensible integrated development environment (IDE) for computer vision with built-in modules for model building, automated labeling, data processing, model training, hyperparameter tuning and workflow orchestration.
|
||||
## End-to-end computer vision platform
|
||||
Label, build, train, tune, deploy and automate in a unified platform that runs on any cloud and on-premises.
|
||||
|
||||
<img width="100%" src="img/onepanel.gif">
|
||||
https://user-images.githubusercontent.com/1211823/116489376-afc60000-a849-11eb-8e8b-b0c64c07c144.mp4
|
||||
|
||||
## Why Onepanel?
|
||||
<img width="100%" src="img/features.png">
|
||||
@@ -20,9 +21,6 @@ The open and extensible integrated development environment (IDE) for computer vi
|
||||
## Quick start
|
||||
See [quick start guide](https://docs.onepanel.ai/docs/getting-started/quickstart) to get started.
|
||||
|
||||
## Online demo
|
||||
For a quick look at some features see this shared, read-only [online demo](https://onepanel.typeform.com/to/kQfDX5Vf?product=github).
|
||||
|
||||
## Community
|
||||
To submit a feature request, report a bug or documentation issue, please open a GitHub [pull request](https://github.com/onepanelio/core/pulls) or [issue](https://github.com/onepanelio/core/issues).
|
||||
|
||||
@@ -36,12 +34,12 @@ See [contribution guide](https://docs.onepanel.ai/docs/getting-started/contribut
|
||||
## Acknowledgments
|
||||
Onepanel seamlessly integrates the following open source projects under the hood:
|
||||
|
||||
[Argo](https://github.com/argoproj/argo) | [Couler](https://github.com/couler-proj/couler) | [CVAT](https://github.com/opencv/cvat) | [JupyterLab](https://github.com/jupyterlab/jupyterlab) | [NNI](https://github.com/microsoft/nni)
|
||||
[Argo](https://github.com/argoproj/argo-workflows) | [Couler](https://github.com/couler-proj/couler) | [CVAT](https://github.com/opencv/cvat) | [JupyterLab](https://github.com/jupyterlab/jupyterlab) | [NNI](https://github.com/microsoft/nni)
|
||||
|
||||
We are grateful for the support these communities provide and do our best to contribute back as much as possible.
|
||||
|
||||
## License
|
||||
Onepanel is licensed under [Apache 2.0](https://github.com/onepanelio/core/blob/master/LICENSE).
|
||||
|
||||
## For organizations
|
||||
Visit our [website](https://www.onepanel.ai/) for more information on support options and enterprise solution.
|
||||
## Enterprise support
|
||||
Need enterprise features and support? Visit our [website](https://www.onepanel.ai/) for more information.
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"info": {
|
||||
"title": "Onepanel",
|
||||
"description": "Onepanel API",
|
||||
"version": "0.19.0",
|
||||
"version": "1.0.2",
|
||||
"contact": {
|
||||
"name": "Onepanel project",
|
||||
"url": "https://github.com/onepanelio/core"
|
||||
@@ -22,6 +22,36 @@
|
||||
"application/octet-stream"
|
||||
],
|
||||
"paths": {
|
||||
"/apis/v1beta/service/{name}": {
|
||||
"get": {
|
||||
"operationId": "HasService",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "A successful response.",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/HasServiceResponse"
|
||||
}
|
||||
},
|
||||
"default": {
|
||||
"description": "An unexpected error response.",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/google.rpc.Status"
|
||||
}
|
||||
}
|
||||
},
|
||||
"parameters": [
|
||||
{
|
||||
"name": "name",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"type": "string"
|
||||
}
|
||||
],
|
||||
"tags": [
|
||||
"ServiceService"
|
||||
]
|
||||
}
|
||||
},
|
||||
"/apis/v1beta/{namespace}/field/workflow_executions/{fieldName}": {
|
||||
"get": {
|
||||
"operationId": "ListWorkflowExecutionsField",
|
||||
@@ -647,6 +677,200 @@
|
||||
]
|
||||
}
|
||||
},
|
||||
"/apis/v1beta1/{namespace}/files/list/{path}": {
|
||||
"get": {
|
||||
"operationId": "ListFiles",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "A successful response.",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/ListFilesResponse"
|
||||
}
|
||||
},
|
||||
"default": {
|
||||
"description": "An unexpected error response.",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/google.rpc.Status"
|
||||
}
|
||||
}
|
||||
},
|
||||
"parameters": [
|
||||
{
|
||||
"name": "namespace",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"name": "path",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"name": "page",
|
||||
"in": "query",
|
||||
"required": false,
|
||||
"type": "integer",
|
||||
"format": "int32"
|
||||
},
|
||||
{
|
||||
"name": "perPage",
|
||||
"in": "query",
|
||||
"required": false,
|
||||
"type": "integer",
|
||||
"format": "int32"
|
||||
}
|
||||
],
|
||||
"tags": [
|
||||
"FileService"
|
||||
]
|
||||
}
|
||||
},
|
||||
"/apis/v1beta1/{namespace}/files/presigned-url/{key}": {
|
||||
"get": {
|
||||
"operationId": "GetObjectDownloadPresignedURL",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "A successful response.",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/GetPresignedUrlResponse"
|
||||
}
|
||||
},
|
||||
"default": {
|
||||
"description": "An unexpected error response.",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/google.rpc.Status"
|
||||
}
|
||||
}
|
||||
},
|
||||
"parameters": [
|
||||
{
|
||||
"name": "namespace",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"name": "key",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"type": "string"
|
||||
}
|
||||
],
|
||||
"tags": [
|
||||
"FileService"
|
||||
]
|
||||
}
|
||||
},
|
||||
"/apis/v1beta1/{namespace}/inferenceservice": {
|
||||
"post": {
|
||||
"operationId": "CreateInferenceService",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "A successful response.",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/GetInferenceServiceResponse"
|
||||
}
|
||||
},
|
||||
"default": {
|
||||
"description": "An unexpected error response.",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/google.rpc.Status"
|
||||
}
|
||||
}
|
||||
},
|
||||
"parameters": [
|
||||
{
|
||||
"name": "namespace",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"name": "body",
|
||||
"in": "body",
|
||||
"required": true,
|
||||
"schema": {
|
||||
"$ref": "#/definitions/CreateInferenceServiceRequest"
|
||||
}
|
||||
}
|
||||
],
|
||||
"tags": [
|
||||
"InferenceService"
|
||||
]
|
||||
}
|
||||
},
|
||||
"/apis/v1beta1/{namespace}/inferenceservice/{name}": {
|
||||
"get": {
|
||||
"operationId": "GetInferenceService",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "A successful response.",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/GetInferenceServiceResponse"
|
||||
}
|
||||
},
|
||||
"default": {
|
||||
"description": "An unexpected error response.",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/google.rpc.Status"
|
||||
}
|
||||
}
|
||||
},
|
||||
"parameters": [
|
||||
{
|
||||
"name": "namespace",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"name": "name",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"type": "string"
|
||||
}
|
||||
],
|
||||
"tags": [
|
||||
"InferenceService"
|
||||
]
|
||||
},
|
||||
"delete": {
|
||||
"operationId": "DeleteInferenceService",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "A successful response.",
|
||||
"schema": {
|
||||
"properties": {}
|
||||
}
|
||||
},
|
||||
"default": {
|
||||
"description": "An unexpected error response.",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/google.rpc.Status"
|
||||
}
|
||||
}
|
||||
},
|
||||
"parameters": [
|
||||
{
|
||||
"name": "namespace",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"name": "name",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"type": "string"
|
||||
}
|
||||
],
|
||||
"tags": [
|
||||
"InferenceService"
|
||||
]
|
||||
}
|
||||
},
|
||||
"/apis/v1beta1/{namespace}/secrets": {
|
||||
"get": {
|
||||
"operationId": "ListSecrets",
|
||||
@@ -1245,48 +1469,6 @@
|
||||
]
|
||||
}
|
||||
},
|
||||
"/apis/v1beta1/{namespace}/workflow_executions/{uid}/artifacts/{key}": {
|
||||
"get": {
|
||||
"operationId": "GetArtifact",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "A successful response.",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/ArtifactResponse"
|
||||
}
|
||||
},
|
||||
"default": {
|
||||
"description": "An unexpected error response.",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/google.rpc.Status"
|
||||
}
|
||||
}
|
||||
},
|
||||
"parameters": [
|
||||
{
|
||||
"name": "namespace",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"name": "uid",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"name": "key",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"type": "string"
|
||||
}
|
||||
],
|
||||
"tags": [
|
||||
"WorkflowService"
|
||||
]
|
||||
}
|
||||
},
|
||||
"/apis/v1beta1/{namespace}/workflow_executions/{uid}/cron_start_statistics": {
|
||||
"post": {
|
||||
"operationId": "CronStartWorkflowExecutionStatistic",
|
||||
@@ -1331,48 +1513,6 @@
|
||||
]
|
||||
}
|
||||
},
|
||||
"/apis/v1beta1/{namespace}/workflow_executions/{uid}/files/{path}": {
|
||||
"get": {
|
||||
"operationId": "ListFiles",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "A successful response.",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/ListFilesResponse"
|
||||
}
|
||||
},
|
||||
"default": {
|
||||
"description": "An unexpected error response.",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/google.rpc.Status"
|
||||
}
|
||||
}
|
||||
},
|
||||
"parameters": [
|
||||
{
|
||||
"name": "namespace",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"name": "uid",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"name": "path",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"type": "string"
|
||||
}
|
||||
],
|
||||
"tags": [
|
||||
"WorkflowService"
|
||||
]
|
||||
}
|
||||
},
|
||||
"/apis/v1beta1/{namespace}/workflow_executions/{uid}/metric": {
|
||||
"post": {
|
||||
"operationId": "AddWorkflowExecutionMetrics",
|
||||
@@ -3228,12 +3368,40 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"ArtifactResponse": {
|
||||
"Container": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"data": {
|
||||
"type": "string",
|
||||
"format": "byte"
|
||||
"image": {
|
||||
"type": "string"
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"env": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/Env"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"CreateInferenceServiceRequest": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"namespace": {
|
||||
"type": "string"
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"defaultTransformerImage": {
|
||||
"type": "string"
|
||||
},
|
||||
"predictor": {
|
||||
"$ref": "#/definitions/InferenceServicePredictor"
|
||||
},
|
||||
"transformer": {
|
||||
"$ref": "#/definitions/InferenceServiceTransformer"
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -3282,6 +3450,9 @@
|
||||
"items": {
|
||||
"$ref": "#/definitions/KeyValue"
|
||||
}
|
||||
},
|
||||
"captureNode": {
|
||||
"type": "boolean"
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -3336,6 +3507,17 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"Env": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"value": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"File": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
@@ -3405,6 +3587,23 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"GetInferenceServiceResponse": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"ready": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"conditions": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/InferenceServiceCondition"
|
||||
}
|
||||
},
|
||||
"predictUrl": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"GetLabelsResponse": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
@@ -3424,6 +3623,18 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"GetPresignedUrlResponse": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"url": {
|
||||
"type": "string"
|
||||
},
|
||||
"size": {
|
||||
"type": "string",
|
||||
"format": "int64"
|
||||
}
|
||||
}
|
||||
},
|
||||
"GetWorkflowExecutionMetricsResponse": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
@@ -3451,6 +3662,80 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"HasServiceResponse": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"hasService": {
|
||||
"type": "boolean"
|
||||
}
|
||||
}
|
||||
},
|
||||
"InferenceServiceCondition": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"lastTransitionTime": {
|
||||
"type": "string"
|
||||
},
|
||||
"status": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"InferenceServicePredictor": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"runtimeVersion": {
|
||||
"type": "string"
|
||||
},
|
||||
"storageUri": {
|
||||
"type": "string"
|
||||
},
|
||||
"nodeSelector": {
|
||||
"type": "string"
|
||||
},
|
||||
"minCpu": {
|
||||
"type": "string"
|
||||
},
|
||||
"minMemory": {
|
||||
"type": "string"
|
||||
},
|
||||
"maxCpu": {
|
||||
"type": "string"
|
||||
},
|
||||
"maxMemory": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"InferenceServiceTransformer": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"containers": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/Container"
|
||||
}
|
||||
},
|
||||
"minCpu": {
|
||||
"type": "string"
|
||||
},
|
||||
"minMemory": {
|
||||
"type": "string"
|
||||
},
|
||||
"maxCpu": {
|
||||
"type": "string"
|
||||
},
|
||||
"maxMemory": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"IsAuthorized": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
@@ -3556,6 +3841,22 @@
|
||||
"ListFilesResponse": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"count": {
|
||||
"type": "integer",
|
||||
"format": "int32"
|
||||
},
|
||||
"totalCount": {
|
||||
"type": "integer",
|
||||
"format": "int32"
|
||||
},
|
||||
"page": {
|
||||
"type": "integer",
|
||||
"format": "int32"
|
||||
},
|
||||
"pages": {
|
||||
"type": "integer",
|
||||
"format": "int32"
|
||||
},
|
||||
"files": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
@@ -3883,6 +4184,9 @@
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"sourceName": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -4195,6 +4499,9 @@
|
||||
"items": {
|
||||
"$ref": "#/definitions/Parameter"
|
||||
}
|
||||
},
|
||||
"description": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
580
api/gen/files.pb.go
Normal file
580
api/gen/files.pb.go
Normal file
@@ -0,0 +1,580 @@
|
||||
// Code generated by protoc-gen-go. DO NOT EDIT.
|
||||
// versions:
|
||||
// protoc-gen-go v1.25.0
|
||||
// protoc v3.14.0
|
||||
// source: files.proto
|
||||
|
||||
package gen
|
||||
|
||||
import (
|
||||
proto "github.com/golang/protobuf/proto"
|
||||
_ "google.golang.org/genproto/googleapis/api/annotations"
|
||||
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
|
||||
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
|
||||
reflect "reflect"
|
||||
sync "sync"
|
||||
)
|
||||
|
||||
const (
|
||||
// Verify that this generated code is sufficiently up-to-date.
|
||||
_ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
|
||||
// Verify that runtime/protoimpl is sufficiently up-to-date.
|
||||
_ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
|
||||
)
|
||||
|
||||
// This is a compile-time assertion that a sufficiently up-to-date version
|
||||
// of the legacy proto package is being used.
|
||||
const _ = proto.ProtoPackageIsVersion4
|
||||
|
||||
type File struct {
|
||||
state protoimpl.MessageState
|
||||
sizeCache protoimpl.SizeCache
|
||||
unknownFields protoimpl.UnknownFields
|
||||
|
||||
Path string `protobuf:"bytes,1,opt,name=path,proto3" json:"path,omitempty"`
|
||||
Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"`
|
||||
Extension string `protobuf:"bytes,3,opt,name=extension,proto3" json:"extension,omitempty"`
|
||||
Size int64 `protobuf:"varint,4,opt,name=size,proto3" json:"size,omitempty"`
|
||||
ContentType string `protobuf:"bytes,5,opt,name=contentType,proto3" json:"contentType,omitempty"`
|
||||
LastModified string `protobuf:"bytes,6,opt,name=lastModified,proto3" json:"lastModified,omitempty"`
|
||||
Directory bool `protobuf:"varint,7,opt,name=directory,proto3" json:"directory,omitempty"`
|
||||
}
|
||||
|
||||
func (x *File) Reset() {
|
||||
*x = File{}
|
||||
if protoimpl.UnsafeEnabled {
|
||||
mi := &file_files_proto_msgTypes[0]
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
}
|
||||
|
||||
func (x *File) String() string {
|
||||
return protoimpl.X.MessageStringOf(x)
|
||||
}
|
||||
|
||||
func (*File) ProtoMessage() {}
|
||||
|
||||
func (x *File) ProtoReflect() protoreflect.Message {
|
||||
mi := &file_files_proto_msgTypes[0]
|
||||
if protoimpl.UnsafeEnabled && x != nil {
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
if ms.LoadMessageInfo() == nil {
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
return ms
|
||||
}
|
||||
return mi.MessageOf(x)
|
||||
}
|
||||
|
||||
// Deprecated: Use File.ProtoReflect.Descriptor instead.
|
||||
func (*File) Descriptor() ([]byte, []int) {
|
||||
return file_files_proto_rawDescGZIP(), []int{0}
|
||||
}
|
||||
|
||||
func (x *File) GetPath() string {
|
||||
if x != nil {
|
||||
return x.Path
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (x *File) GetName() string {
|
||||
if x != nil {
|
||||
return x.Name
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (x *File) GetExtension() string {
|
||||
if x != nil {
|
||||
return x.Extension
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (x *File) GetSize() int64 {
|
||||
if x != nil {
|
||||
return x.Size
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (x *File) GetContentType() string {
|
||||
if x != nil {
|
||||
return x.ContentType
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (x *File) GetLastModified() string {
|
||||
if x != nil {
|
||||
return x.LastModified
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (x *File) GetDirectory() bool {
|
||||
if x != nil {
|
||||
return x.Directory
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
type ListFilesRequest struct {
|
||||
state protoimpl.MessageState
|
||||
sizeCache protoimpl.SizeCache
|
||||
unknownFields protoimpl.UnknownFields
|
||||
|
||||
Namespace string `protobuf:"bytes,1,opt,name=namespace,proto3" json:"namespace,omitempty"`
|
||||
Path string `protobuf:"bytes,2,opt,name=path,proto3" json:"path,omitempty"`
|
||||
Page int32 `protobuf:"varint,3,opt,name=page,proto3" json:"page,omitempty"`
|
||||
PerPage int32 `protobuf:"varint,4,opt,name=perPage,proto3" json:"perPage,omitempty"`
|
||||
}
|
||||
|
||||
func (x *ListFilesRequest) Reset() {
|
||||
*x = ListFilesRequest{}
|
||||
if protoimpl.UnsafeEnabled {
|
||||
mi := &file_files_proto_msgTypes[1]
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
}
|
||||
|
||||
func (x *ListFilesRequest) String() string {
|
||||
return protoimpl.X.MessageStringOf(x)
|
||||
}
|
||||
|
||||
func (*ListFilesRequest) ProtoMessage() {}
|
||||
|
||||
func (x *ListFilesRequest) ProtoReflect() protoreflect.Message {
|
||||
mi := &file_files_proto_msgTypes[1]
|
||||
if protoimpl.UnsafeEnabled && x != nil {
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
if ms.LoadMessageInfo() == nil {
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
return ms
|
||||
}
|
||||
return mi.MessageOf(x)
|
||||
}
|
||||
|
||||
// Deprecated: Use ListFilesRequest.ProtoReflect.Descriptor instead.
|
||||
func (*ListFilesRequest) Descriptor() ([]byte, []int) {
|
||||
return file_files_proto_rawDescGZIP(), []int{1}
|
||||
}
|
||||
|
||||
func (x *ListFilesRequest) GetNamespace() string {
|
||||
if x != nil {
|
||||
return x.Namespace
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (x *ListFilesRequest) GetPath() string {
|
||||
if x != nil {
|
||||
return x.Path
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (x *ListFilesRequest) GetPage() int32 {
|
||||
if x != nil {
|
||||
return x.Page
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (x *ListFilesRequest) GetPerPage() int32 {
|
||||
if x != nil {
|
||||
return x.PerPage
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
type ListFilesResponse struct {
|
||||
state protoimpl.MessageState
|
||||
sizeCache protoimpl.SizeCache
|
||||
unknownFields protoimpl.UnknownFields
|
||||
|
||||
Count int32 `protobuf:"varint,1,opt,name=count,proto3" json:"count,omitempty"`
|
||||
TotalCount int32 `protobuf:"varint,2,opt,name=totalCount,proto3" json:"totalCount,omitempty"`
|
||||
Page int32 `protobuf:"varint,3,opt,name=page,proto3" json:"page,omitempty"`
|
||||
Pages int32 `protobuf:"varint,4,opt,name=pages,proto3" json:"pages,omitempty"`
|
||||
Files []*File `protobuf:"bytes,5,rep,name=files,proto3" json:"files,omitempty"`
|
||||
ParentPath string `protobuf:"bytes,6,opt,name=parentPath,proto3" json:"parentPath,omitempty"`
|
||||
}
|
||||
|
||||
func (x *ListFilesResponse) Reset() {
|
||||
*x = ListFilesResponse{}
|
||||
if protoimpl.UnsafeEnabled {
|
||||
mi := &file_files_proto_msgTypes[2]
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
}
|
||||
|
||||
func (x *ListFilesResponse) String() string {
|
||||
return protoimpl.X.MessageStringOf(x)
|
||||
}
|
||||
|
||||
func (*ListFilesResponse) ProtoMessage() {}
|
||||
|
||||
func (x *ListFilesResponse) ProtoReflect() protoreflect.Message {
|
||||
mi := &file_files_proto_msgTypes[2]
|
||||
if protoimpl.UnsafeEnabled && x != nil {
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
if ms.LoadMessageInfo() == nil {
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
return ms
|
||||
}
|
||||
return mi.MessageOf(x)
|
||||
}
|
||||
|
||||
// Deprecated: Use ListFilesResponse.ProtoReflect.Descriptor instead.
|
||||
func (*ListFilesResponse) Descriptor() ([]byte, []int) {
|
||||
return file_files_proto_rawDescGZIP(), []int{2}
|
||||
}
|
||||
|
||||
func (x *ListFilesResponse) GetCount() int32 {
|
||||
if x != nil {
|
||||
return x.Count
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (x *ListFilesResponse) GetTotalCount() int32 {
|
||||
if x != nil {
|
||||
return x.TotalCount
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (x *ListFilesResponse) GetPage() int32 {
|
||||
if x != nil {
|
||||
return x.Page
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (x *ListFilesResponse) GetPages() int32 {
|
||||
if x != nil {
|
||||
return x.Pages
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (x *ListFilesResponse) GetFiles() []*File {
|
||||
if x != nil {
|
||||
return x.Files
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (x *ListFilesResponse) GetParentPath() string {
|
||||
if x != nil {
|
||||
return x.ParentPath
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
type GetObjectPresignedUrlRequest struct {
|
||||
state protoimpl.MessageState
|
||||
sizeCache protoimpl.SizeCache
|
||||
unknownFields protoimpl.UnknownFields
|
||||
|
||||
Namespace string `protobuf:"bytes,1,opt,name=namespace,proto3" json:"namespace,omitempty"`
|
||||
Key string `protobuf:"bytes,2,opt,name=key,proto3" json:"key,omitempty"`
|
||||
}
|
||||
|
||||
func (x *GetObjectPresignedUrlRequest) Reset() {
|
||||
*x = GetObjectPresignedUrlRequest{}
|
||||
if protoimpl.UnsafeEnabled {
|
||||
mi := &file_files_proto_msgTypes[3]
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
}
|
||||
|
||||
func (x *GetObjectPresignedUrlRequest) String() string {
|
||||
return protoimpl.X.MessageStringOf(x)
|
||||
}
|
||||
|
||||
func (*GetObjectPresignedUrlRequest) ProtoMessage() {}
|
||||
|
||||
func (x *GetObjectPresignedUrlRequest) ProtoReflect() protoreflect.Message {
|
||||
mi := &file_files_proto_msgTypes[3]
|
||||
if protoimpl.UnsafeEnabled && x != nil {
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
if ms.LoadMessageInfo() == nil {
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
return ms
|
||||
}
|
||||
return mi.MessageOf(x)
|
||||
}
|
||||
|
||||
// Deprecated: Use GetObjectPresignedUrlRequest.ProtoReflect.Descriptor instead.
|
||||
func (*GetObjectPresignedUrlRequest) Descriptor() ([]byte, []int) {
|
||||
return file_files_proto_rawDescGZIP(), []int{3}
|
||||
}
|
||||
|
||||
func (x *GetObjectPresignedUrlRequest) GetNamespace() string {
|
||||
if x != nil {
|
||||
return x.Namespace
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (x *GetObjectPresignedUrlRequest) GetKey() string {
|
||||
if x != nil {
|
||||
return x.Key
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
type GetPresignedUrlResponse struct {
|
||||
state protoimpl.MessageState
|
||||
sizeCache protoimpl.SizeCache
|
||||
unknownFields protoimpl.UnknownFields
|
||||
|
||||
Url string `protobuf:"bytes,1,opt,name=url,proto3" json:"url,omitempty"`
|
||||
Size int64 `protobuf:"varint,2,opt,name=size,proto3" json:"size,omitempty"`
|
||||
}
|
||||
|
||||
func (x *GetPresignedUrlResponse) Reset() {
|
||||
*x = GetPresignedUrlResponse{}
|
||||
if protoimpl.UnsafeEnabled {
|
||||
mi := &file_files_proto_msgTypes[4]
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
}
|
||||
|
||||
func (x *GetPresignedUrlResponse) String() string {
|
||||
return protoimpl.X.MessageStringOf(x)
|
||||
}
|
||||
|
||||
func (*GetPresignedUrlResponse) ProtoMessage() {}
|
||||
|
||||
func (x *GetPresignedUrlResponse) ProtoReflect() protoreflect.Message {
|
||||
mi := &file_files_proto_msgTypes[4]
|
||||
if protoimpl.UnsafeEnabled && x != nil {
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
if ms.LoadMessageInfo() == nil {
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
return ms
|
||||
}
|
||||
return mi.MessageOf(x)
|
||||
}
|
||||
|
||||
// Deprecated: Use GetPresignedUrlResponse.ProtoReflect.Descriptor instead.
|
||||
func (*GetPresignedUrlResponse) Descriptor() ([]byte, []int) {
|
||||
return file_files_proto_rawDescGZIP(), []int{4}
|
||||
}
|
||||
|
||||
func (x *GetPresignedUrlResponse) GetUrl() string {
|
||||
if x != nil {
|
||||
return x.Url
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (x *GetPresignedUrlResponse) GetSize() int64 {
|
||||
if x != nil {
|
||||
return x.Size
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
var File_files_proto protoreflect.FileDescriptor
|
||||
|
||||
var file_files_proto_rawDesc = []byte{
|
||||
0x0a, 0x0b, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x03, 0x61,
|
||||
0x70, 0x69, 0x1a, 0x1c, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x61,
|
||||
0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f,
|
||||
0x22, 0xc4, 0x01, 0x0a, 0x04, 0x46, 0x69, 0x6c, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x61, 0x74,
|
||||
0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x70, 0x61, 0x74, 0x68, 0x12, 0x12, 0x0a,
|
||||
0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d,
|
||||
0x65, 0x12, 0x1c, 0x0a, 0x09, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x03,
|
||||
0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x12,
|
||||
0x12, 0x0a, 0x04, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x03, 0x52, 0x04, 0x73,
|
||||
0x69, 0x7a, 0x65, 0x12, 0x20, 0x0a, 0x0b, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x54, 0x79,
|
||||
0x70, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e,
|
||||
0x74, 0x54, 0x79, 0x70, 0x65, 0x12, 0x22, 0x0a, 0x0c, 0x6c, 0x61, 0x73, 0x74, 0x4d, 0x6f, 0x64,
|
||||
0x69, 0x66, 0x69, 0x65, 0x64, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x6c, 0x61, 0x73,
|
||||
0x74, 0x4d, 0x6f, 0x64, 0x69, 0x66, 0x69, 0x65, 0x64, 0x12, 0x1c, 0x0a, 0x09, 0x64, 0x69, 0x72,
|
||||
0x65, 0x63, 0x74, 0x6f, 0x72, 0x79, 0x18, 0x07, 0x20, 0x01, 0x28, 0x08, 0x52, 0x09, 0x64, 0x69,
|
||||
0x72, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x79, 0x22, 0x72, 0x0a, 0x10, 0x4c, 0x69, 0x73, 0x74, 0x46,
|
||||
0x69, 0x6c, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x6e,
|
||||
0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09,
|
||||
0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x61, 0x74,
|
||||
0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x70, 0x61, 0x74, 0x68, 0x12, 0x12, 0x0a,
|
||||
0x04, 0x70, 0x61, 0x67, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x52, 0x04, 0x70, 0x61, 0x67,
|
||||
0x65, 0x12, 0x18, 0x0a, 0x07, 0x70, 0x65, 0x72, 0x50, 0x61, 0x67, 0x65, 0x18, 0x04, 0x20, 0x01,
|
||||
0x28, 0x05, 0x52, 0x07, 0x70, 0x65, 0x72, 0x50, 0x61, 0x67, 0x65, 0x22, 0xb4, 0x01, 0x0a, 0x11,
|
||||
0x4c, 0x69, 0x73, 0x74, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73,
|
||||
0x65, 0x12, 0x14, 0x0a, 0x05, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05,
|
||||
0x52, 0x05, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x1e, 0x0a, 0x0a, 0x74, 0x6f, 0x74, 0x61, 0x6c,
|
||||
0x43, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0a, 0x74, 0x6f, 0x74,
|
||||
0x61, 0x6c, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x61, 0x67, 0x65, 0x18,
|
||||
0x03, 0x20, 0x01, 0x28, 0x05, 0x52, 0x04, 0x70, 0x61, 0x67, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x70,
|
||||
0x61, 0x67, 0x65, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x05, 0x52, 0x05, 0x70, 0x61, 0x67, 0x65,
|
||||
0x73, 0x12, 0x1f, 0x0a, 0x05, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b,
|
||||
0x32, 0x09, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x46, 0x69, 0x6c, 0x65, 0x52, 0x05, 0x66, 0x69, 0x6c,
|
||||
0x65, 0x73, 0x12, 0x1e, 0x0a, 0x0a, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x50, 0x61, 0x74, 0x68,
|
||||
0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x50, 0x61,
|
||||
0x74, 0x68, 0x22, 0x4e, 0x0a, 0x1c, 0x47, 0x65, 0x74, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x50,
|
||||
0x72, 0x65, 0x73, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x55, 0x72, 0x6c, 0x52, 0x65, 0x71, 0x75, 0x65,
|
||||
0x73, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18,
|
||||
0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65,
|
||||
0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b,
|
||||
0x65, 0x79, 0x22, 0x3f, 0x0a, 0x17, 0x47, 0x65, 0x74, 0x50, 0x72, 0x65, 0x73, 0x69, 0x67, 0x6e,
|
||||
0x65, 0x64, 0x55, 0x72, 0x6c, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x10, 0x0a,
|
||||
0x03, 0x75, 0x72, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x75, 0x72, 0x6c, 0x12,
|
||||
0x12, 0x0a, 0x04, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x03, 0x52, 0x04, 0x73,
|
||||
0x69, 0x7a, 0x65, 0x32, 0xa4, 0x02, 0x0a, 0x0b, 0x46, 0x69, 0x6c, 0x65, 0x53, 0x65, 0x72, 0x76,
|
||||
0x69, 0x63, 0x65, 0x12, 0xa0, 0x01, 0x0a, 0x1d, 0x47, 0x65, 0x74, 0x4f, 0x62, 0x6a, 0x65, 0x63,
|
||||
0x74, 0x44, 0x6f, 0x77, 0x6e, 0x6c, 0x6f, 0x61, 0x64, 0x50, 0x72, 0x65, 0x73, 0x69, 0x67, 0x6e,
|
||||
0x65, 0x64, 0x55, 0x52, 0x4c, 0x12, 0x21, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x47, 0x65, 0x74, 0x4f,
|
||||
0x62, 0x6a, 0x65, 0x63, 0x74, 0x50, 0x72, 0x65, 0x73, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x55, 0x72,
|
||||
0x6c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1c, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x47,
|
||||
0x65, 0x74, 0x50, 0x72, 0x65, 0x73, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x55, 0x72, 0x6c, 0x52, 0x65,
|
||||
0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x3e, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x38, 0x12, 0x36,
|
||||
0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b, 0x6e,
|
||||
0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x7d, 0x2f, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x2f,
|
||||
0x70, 0x72, 0x65, 0x73, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x2d, 0x75, 0x72, 0x6c, 0x2f, 0x7b, 0x6b,
|
||||
0x65, 0x79, 0x3d, 0x2a, 0x2a, 0x7d, 0x12, 0x72, 0x0a, 0x09, 0x4c, 0x69, 0x73, 0x74, 0x46, 0x69,
|
||||
0x6c, 0x65, 0x73, 0x12, 0x15, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x46, 0x69,
|
||||
0x6c, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x61, 0x70, 0x69,
|
||||
0x2e, 0x4c, 0x69, 0x73, 0x74, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e,
|
||||
0x73, 0x65, 0x22, 0x36, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x30, 0x12, 0x2e, 0x2f, 0x61, 0x70, 0x69,
|
||||
0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73,
|
||||
0x70, 0x61, 0x63, 0x65, 0x7d, 0x2f, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x2f, 0x6c, 0x69, 0x73, 0x74,
|
||||
0x2f, 0x7b, 0x70, 0x61, 0x74, 0x68, 0x3d, 0x2a, 0x2a, 0x7d, 0x42, 0x24, 0x5a, 0x22, 0x67, 0x69,
|
||||
0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x6f, 0x6e, 0x65, 0x70, 0x61, 0x6e, 0x65,
|
||||
0x6c, 0x69, 0x6f, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x67, 0x65, 0x6e,
|
||||
0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
|
||||
}
|
||||
|
||||
var (
|
||||
file_files_proto_rawDescOnce sync.Once
|
||||
file_files_proto_rawDescData = file_files_proto_rawDesc
|
||||
)
|
||||
|
||||
func file_files_proto_rawDescGZIP() []byte {
|
||||
file_files_proto_rawDescOnce.Do(func() {
|
||||
file_files_proto_rawDescData = protoimpl.X.CompressGZIP(file_files_proto_rawDescData)
|
||||
})
|
||||
return file_files_proto_rawDescData
|
||||
}
|
||||
|
||||
var file_files_proto_msgTypes = make([]protoimpl.MessageInfo, 5)
|
||||
var file_files_proto_goTypes = []interface{}{
|
||||
(*File)(nil), // 0: api.File
|
||||
(*ListFilesRequest)(nil), // 1: api.ListFilesRequest
|
||||
(*ListFilesResponse)(nil), // 2: api.ListFilesResponse
|
||||
(*GetObjectPresignedUrlRequest)(nil), // 3: api.GetObjectPresignedUrlRequest
|
||||
(*GetPresignedUrlResponse)(nil), // 4: api.GetPresignedUrlResponse
|
||||
}
|
||||
var file_files_proto_depIdxs = []int32{
|
||||
0, // 0: api.ListFilesResponse.files:type_name -> api.File
|
||||
3, // 1: api.FileService.GetObjectDownloadPresignedURL:input_type -> api.GetObjectPresignedUrlRequest
|
||||
1, // 2: api.FileService.ListFiles:input_type -> api.ListFilesRequest
|
||||
4, // 3: api.FileService.GetObjectDownloadPresignedURL:output_type -> api.GetPresignedUrlResponse
|
||||
2, // 4: api.FileService.ListFiles:output_type -> api.ListFilesResponse
|
||||
3, // [3:5] is the sub-list for method output_type
|
||||
1, // [1:3] is the sub-list for method input_type
|
||||
1, // [1:1] is the sub-list for extension type_name
|
||||
1, // [1:1] is the sub-list for extension extendee
|
||||
0, // [0:1] is the sub-list for field type_name
|
||||
}
|
||||
|
||||
func init() { file_files_proto_init() }
|
||||
func file_files_proto_init() {
|
||||
if File_files_proto != nil {
|
||||
return
|
||||
}
|
||||
if !protoimpl.UnsafeEnabled {
|
||||
file_files_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
|
||||
switch v := v.(*File); i {
|
||||
case 0:
|
||||
return &v.state
|
||||
case 1:
|
||||
return &v.sizeCache
|
||||
case 2:
|
||||
return &v.unknownFields
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
file_files_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
|
||||
switch v := v.(*ListFilesRequest); i {
|
||||
case 0:
|
||||
return &v.state
|
||||
case 1:
|
||||
return &v.sizeCache
|
||||
case 2:
|
||||
return &v.unknownFields
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
file_files_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} {
|
||||
switch v := v.(*ListFilesResponse); i {
|
||||
case 0:
|
||||
return &v.state
|
||||
case 1:
|
||||
return &v.sizeCache
|
||||
case 2:
|
||||
return &v.unknownFields
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
file_files_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} {
|
||||
switch v := v.(*GetObjectPresignedUrlRequest); i {
|
||||
case 0:
|
||||
return &v.state
|
||||
case 1:
|
||||
return &v.sizeCache
|
||||
case 2:
|
||||
return &v.unknownFields
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
file_files_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} {
|
||||
switch v := v.(*GetPresignedUrlResponse); i {
|
||||
case 0:
|
||||
return &v.state
|
||||
case 1:
|
||||
return &v.sizeCache
|
||||
case 2:
|
||||
return &v.unknownFields
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
||||
type x struct{}
|
||||
out := protoimpl.TypeBuilder{
|
||||
File: protoimpl.DescBuilder{
|
||||
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
|
||||
RawDescriptor: file_files_proto_rawDesc,
|
||||
NumEnums: 0,
|
||||
NumMessages: 5,
|
||||
NumExtensions: 0,
|
||||
NumServices: 1,
|
||||
},
|
||||
GoTypes: file_files_proto_goTypes,
|
||||
DependencyIndexes: file_files_proto_depIdxs,
|
||||
MessageInfos: file_files_proto_msgTypes,
|
||||
}.Build()
|
||||
File_files_proto = out.File
|
||||
file_files_proto_rawDesc = nil
|
||||
file_files_proto_goTypes = nil
|
||||
file_files_proto_depIdxs = nil
|
||||
}
|
||||
342
api/gen/files.pb.gw.go
Normal file
342
api/gen/files.pb.gw.go
Normal file
@@ -0,0 +1,342 @@
|
||||
// Code generated by protoc-gen-grpc-gateway. DO NOT EDIT.
|
||||
// source: files.proto
|
||||
|
||||
/*
|
||||
Package gen is a reverse proxy.
|
||||
|
||||
It translates gRPC into RESTful JSON APIs.
|
||||
*/
|
||||
package gen
|
||||
|
||||
import (
|
||||
"context"
|
||||
"io"
|
||||
"net/http"
|
||||
|
||||
"github.com/grpc-ecosystem/grpc-gateway/v2/runtime"
|
||||
"github.com/grpc-ecosystem/grpc-gateway/v2/utilities"
|
||||
"google.golang.org/grpc"
|
||||
"google.golang.org/grpc/codes"
|
||||
"google.golang.org/grpc/grpclog"
|
||||
"google.golang.org/grpc/metadata"
|
||||
"google.golang.org/grpc/status"
|
||||
"google.golang.org/protobuf/proto"
|
||||
)
|
||||
|
||||
// Suppress "imported and not used" errors
|
||||
var _ codes.Code
|
||||
var _ io.Reader
|
||||
var _ status.Status
|
||||
var _ = runtime.String
|
||||
var _ = utilities.NewDoubleArray
|
||||
var _ = metadata.Join
|
||||
|
||||
func request_FileService_GetObjectDownloadPresignedURL_0(ctx context.Context, marshaler runtime.Marshaler, client FileServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
|
||||
var protoReq GetObjectPresignedUrlRequest
|
||||
var metadata runtime.ServerMetadata
|
||||
|
||||
var (
|
||||
val string
|
||||
ok bool
|
||||
err error
|
||||
_ = err
|
||||
)
|
||||
|
||||
val, ok = pathParams["namespace"]
|
||||
if !ok {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "namespace")
|
||||
}
|
||||
|
||||
protoReq.Namespace, err = runtime.String(val)
|
||||
if err != nil {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "namespace", err)
|
||||
}
|
||||
|
||||
val, ok = pathParams["key"]
|
||||
if !ok {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "key")
|
||||
}
|
||||
|
||||
protoReq.Key, err = runtime.String(val)
|
||||
if err != nil {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "key", err)
|
||||
}
|
||||
|
||||
msg, err := client.GetObjectDownloadPresignedURL(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD))
|
||||
return msg, metadata, err
|
||||
|
||||
}
|
||||
|
||||
func local_request_FileService_GetObjectDownloadPresignedURL_0(ctx context.Context, marshaler runtime.Marshaler, server FileServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
|
||||
var protoReq GetObjectPresignedUrlRequest
|
||||
var metadata runtime.ServerMetadata
|
||||
|
||||
var (
|
||||
val string
|
||||
ok bool
|
||||
err error
|
||||
_ = err
|
||||
)
|
||||
|
||||
val, ok = pathParams["namespace"]
|
||||
if !ok {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "namespace")
|
||||
}
|
||||
|
||||
protoReq.Namespace, err = runtime.String(val)
|
||||
if err != nil {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "namespace", err)
|
||||
}
|
||||
|
||||
val, ok = pathParams["key"]
|
||||
if !ok {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "key")
|
||||
}
|
||||
|
||||
protoReq.Key, err = runtime.String(val)
|
||||
if err != nil {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "key", err)
|
||||
}
|
||||
|
||||
msg, err := server.GetObjectDownloadPresignedURL(ctx, &protoReq)
|
||||
return msg, metadata, err
|
||||
|
||||
}
|
||||
|
||||
var (
|
||||
filter_FileService_ListFiles_0 = &utilities.DoubleArray{Encoding: map[string]int{"namespace": 0, "path": 1}, Base: []int{1, 1, 2, 0, 0}, Check: []int{0, 1, 1, 2, 3}}
|
||||
)
|
||||
|
||||
func request_FileService_ListFiles_0(ctx context.Context, marshaler runtime.Marshaler, client FileServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
|
||||
var protoReq ListFilesRequest
|
||||
var metadata runtime.ServerMetadata
|
||||
|
||||
var (
|
||||
val string
|
||||
ok bool
|
||||
err error
|
||||
_ = err
|
||||
)
|
||||
|
||||
val, ok = pathParams["namespace"]
|
||||
if !ok {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "namespace")
|
||||
}
|
||||
|
||||
protoReq.Namespace, err = runtime.String(val)
|
||||
if err != nil {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "namespace", err)
|
||||
}
|
||||
|
||||
val, ok = pathParams["path"]
|
||||
if !ok {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "path")
|
||||
}
|
||||
|
||||
protoReq.Path, err = runtime.String(val)
|
||||
if err != nil {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "path", err)
|
||||
}
|
||||
|
||||
if err := req.ParseForm(); err != nil {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
|
||||
}
|
||||
if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_FileService_ListFiles_0); err != nil {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
|
||||
}
|
||||
|
||||
msg, err := client.ListFiles(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD))
|
||||
return msg, metadata, err
|
||||
|
||||
}
|
||||
|
||||
func local_request_FileService_ListFiles_0(ctx context.Context, marshaler runtime.Marshaler, server FileServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
|
||||
var protoReq ListFilesRequest
|
||||
var metadata runtime.ServerMetadata
|
||||
|
||||
var (
|
||||
val string
|
||||
ok bool
|
||||
err error
|
||||
_ = err
|
||||
)
|
||||
|
||||
val, ok = pathParams["namespace"]
|
||||
if !ok {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "namespace")
|
||||
}
|
||||
|
||||
protoReq.Namespace, err = runtime.String(val)
|
||||
if err != nil {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "namespace", err)
|
||||
}
|
||||
|
||||
val, ok = pathParams["path"]
|
||||
if !ok {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "path")
|
||||
}
|
||||
|
||||
protoReq.Path, err = runtime.String(val)
|
||||
if err != nil {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "path", err)
|
||||
}
|
||||
|
||||
if err := req.ParseForm(); err != nil {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
|
||||
}
|
||||
if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_FileService_ListFiles_0); err != nil {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
|
||||
}
|
||||
|
||||
msg, err := server.ListFiles(ctx, &protoReq)
|
||||
return msg, metadata, err
|
||||
|
||||
}
|
||||
|
||||
// RegisterFileServiceHandlerServer registers the http handlers for service FileService to "mux".
|
||||
// UnaryRPC :call FileServiceServer directly.
|
||||
// StreamingRPC :currently unsupported pending https://github.com/grpc/grpc-go/issues/906.
|
||||
// Note that using this registration option will cause many gRPC library features to stop working. Consider using RegisterFileServiceHandlerFromEndpoint instead.
|
||||
func RegisterFileServiceHandlerServer(ctx context.Context, mux *runtime.ServeMux, server FileServiceServer) error {
|
||||
|
||||
mux.Handle("GET", pattern_FileService_GetObjectDownloadPresignedURL_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
|
||||
ctx, cancel := context.WithCancel(req.Context())
|
||||
defer cancel()
|
||||
var stream runtime.ServerTransportStream
|
||||
ctx = grpc.NewContextWithServerTransportStream(ctx, &stream)
|
||||
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
|
||||
rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/api.FileService/GetObjectDownloadPresignedURL")
|
||||
if err != nil {
|
||||
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
|
||||
return
|
||||
}
|
||||
resp, md, err := local_request_FileService_GetObjectDownloadPresignedURL_0(rctx, inboundMarshaler, server, req, pathParams)
|
||||
md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())
|
||||
ctx = runtime.NewServerMetadataContext(ctx, md)
|
||||
if err != nil {
|
||||
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
|
||||
return
|
||||
}
|
||||
|
||||
forward_FileService_GetObjectDownloadPresignedURL_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
|
||||
|
||||
})
|
||||
|
||||
mux.Handle("GET", pattern_FileService_ListFiles_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
|
||||
ctx, cancel := context.WithCancel(req.Context())
|
||||
defer cancel()
|
||||
var stream runtime.ServerTransportStream
|
||||
ctx = grpc.NewContextWithServerTransportStream(ctx, &stream)
|
||||
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
|
||||
rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/api.FileService/ListFiles")
|
||||
if err != nil {
|
||||
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
|
||||
return
|
||||
}
|
||||
resp, md, err := local_request_FileService_ListFiles_0(rctx, inboundMarshaler, server, req, pathParams)
|
||||
md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())
|
||||
ctx = runtime.NewServerMetadataContext(ctx, md)
|
||||
if err != nil {
|
||||
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
|
||||
return
|
||||
}
|
||||
|
||||
forward_FileService_ListFiles_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
|
||||
|
||||
})
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// RegisterFileServiceHandlerFromEndpoint is same as RegisterFileServiceHandler but
|
||||
// automatically dials to "endpoint" and closes the connection when "ctx" gets done.
|
||||
func RegisterFileServiceHandlerFromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, opts []grpc.DialOption) (err error) {
|
||||
conn, err := grpc.Dial(endpoint, opts...)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer func() {
|
||||
if err != nil {
|
||||
if cerr := conn.Close(); cerr != nil {
|
||||
grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr)
|
||||
}
|
||||
return
|
||||
}
|
||||
go func() {
|
||||
<-ctx.Done()
|
||||
if cerr := conn.Close(); cerr != nil {
|
||||
grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr)
|
||||
}
|
||||
}()
|
||||
}()
|
||||
|
||||
return RegisterFileServiceHandler(ctx, mux, conn)
|
||||
}
|
||||
|
||||
// RegisterFileServiceHandler registers the http handlers for service FileService to "mux".
|
||||
// The handlers forward requests to the grpc endpoint over "conn".
|
||||
func RegisterFileServiceHandler(ctx context.Context, mux *runtime.ServeMux, conn *grpc.ClientConn) error {
|
||||
return RegisterFileServiceHandlerClient(ctx, mux, NewFileServiceClient(conn))
|
||||
}
|
||||
|
||||
// RegisterFileServiceHandlerClient registers the http handlers for service FileService
|
||||
// to "mux". The handlers forward requests to the grpc endpoint over the given implementation of "FileServiceClient".
|
||||
// Note: the gRPC framework executes interceptors within the gRPC handler. If the passed in "FileServiceClient"
|
||||
// doesn't go through the normal gRPC flow (creating a gRPC client etc.) then it will be up to the passed in
|
||||
// "FileServiceClient" to call the correct interceptors.
|
||||
func RegisterFileServiceHandlerClient(ctx context.Context, mux *runtime.ServeMux, client FileServiceClient) error {
|
||||
|
||||
mux.Handle("GET", pattern_FileService_GetObjectDownloadPresignedURL_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
|
||||
ctx, cancel := context.WithCancel(req.Context())
|
||||
defer cancel()
|
||||
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
|
||||
rctx, err := runtime.AnnotateContext(ctx, mux, req, "/api.FileService/GetObjectDownloadPresignedURL")
|
||||
if err != nil {
|
||||
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
|
||||
return
|
||||
}
|
||||
resp, md, err := request_FileService_GetObjectDownloadPresignedURL_0(rctx, inboundMarshaler, client, req, pathParams)
|
||||
ctx = runtime.NewServerMetadataContext(ctx, md)
|
||||
if err != nil {
|
||||
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
|
||||
return
|
||||
}
|
||||
|
||||
forward_FileService_GetObjectDownloadPresignedURL_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
|
||||
|
||||
})
|
||||
|
||||
mux.Handle("GET", pattern_FileService_ListFiles_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
|
||||
ctx, cancel := context.WithCancel(req.Context())
|
||||
defer cancel()
|
||||
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
|
||||
rctx, err := runtime.AnnotateContext(ctx, mux, req, "/api.FileService/ListFiles")
|
||||
if err != nil {
|
||||
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
|
||||
return
|
||||
}
|
||||
resp, md, err := request_FileService_ListFiles_0(rctx, inboundMarshaler, client, req, pathParams)
|
||||
ctx = runtime.NewServerMetadataContext(ctx, md)
|
||||
if err != nil {
|
||||
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
|
||||
return
|
||||
}
|
||||
|
||||
forward_FileService_ListFiles_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
|
||||
|
||||
})
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
var (
|
||||
pattern_FileService_GetObjectDownloadPresignedURL_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 4, 1, 5, 2, 2, 3, 2, 4, 3, 0, 4, 1, 5, 5}, []string{"apis", "v1beta1", "namespace", "files", "presigned-url", "key"}, ""))
|
||||
|
||||
pattern_FileService_ListFiles_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 4, 1, 5, 2, 2, 3, 2, 4, 3, 0, 4, 1, 5, 5}, []string{"apis", "v1beta1", "namespace", "files", "list", "path"}, ""))
|
||||
)
|
||||
|
||||
var (
|
||||
forward_FileService_GetObjectDownloadPresignedURL_0 = runtime.ForwardResponseMessage
|
||||
|
||||
forward_FileService_ListFiles_0 = runtime.ForwardResponseMessage
|
||||
)
|
||||
133
api/gen/files_grpc.pb.go
Normal file
133
api/gen/files_grpc.pb.go
Normal file
@@ -0,0 +1,133 @@
|
||||
// Code generated by protoc-gen-go-grpc. DO NOT EDIT.
|
||||
|
||||
package gen
|
||||
|
||||
import (
|
||||
context "context"
|
||||
grpc "google.golang.org/grpc"
|
||||
codes "google.golang.org/grpc/codes"
|
||||
status "google.golang.org/grpc/status"
|
||||
)
|
||||
|
||||
// This is a compile-time assertion to ensure that this generated file
|
||||
// is compatible with the grpc package it is being compiled against.
|
||||
const _ = grpc.SupportPackageIsVersion7
|
||||
|
||||
// FileServiceClient is the client API for FileService service.
|
||||
//
|
||||
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream.
|
||||
type FileServiceClient interface {
|
||||
GetObjectDownloadPresignedURL(ctx context.Context, in *GetObjectPresignedUrlRequest, opts ...grpc.CallOption) (*GetPresignedUrlResponse, error)
|
||||
ListFiles(ctx context.Context, in *ListFilesRequest, opts ...grpc.CallOption) (*ListFilesResponse, error)
|
||||
}
|
||||
|
||||
type fileServiceClient struct {
|
||||
cc grpc.ClientConnInterface
|
||||
}
|
||||
|
||||
func NewFileServiceClient(cc grpc.ClientConnInterface) FileServiceClient {
|
||||
return &fileServiceClient{cc}
|
||||
}
|
||||
|
||||
func (c *fileServiceClient) GetObjectDownloadPresignedURL(ctx context.Context, in *GetObjectPresignedUrlRequest, opts ...grpc.CallOption) (*GetPresignedUrlResponse, error) {
|
||||
out := new(GetPresignedUrlResponse)
|
||||
err := c.cc.Invoke(ctx, "/api.FileService/GetObjectDownloadPresignedURL", in, out, opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func (c *fileServiceClient) ListFiles(ctx context.Context, in *ListFilesRequest, opts ...grpc.CallOption) (*ListFilesResponse, error) {
|
||||
out := new(ListFilesResponse)
|
||||
err := c.cc.Invoke(ctx, "/api.FileService/ListFiles", in, out, opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
// FileServiceServer is the server API for FileService service.
|
||||
// All implementations must embed UnimplementedFileServiceServer
|
||||
// for forward compatibility
|
||||
type FileServiceServer interface {
|
||||
GetObjectDownloadPresignedURL(context.Context, *GetObjectPresignedUrlRequest) (*GetPresignedUrlResponse, error)
|
||||
ListFiles(context.Context, *ListFilesRequest) (*ListFilesResponse, error)
|
||||
mustEmbedUnimplementedFileServiceServer()
|
||||
}
|
||||
|
||||
// UnimplementedFileServiceServer must be embedded to have forward compatible implementations.
|
||||
type UnimplementedFileServiceServer struct {
|
||||
}
|
||||
|
||||
func (UnimplementedFileServiceServer) GetObjectDownloadPresignedURL(context.Context, *GetObjectPresignedUrlRequest) (*GetPresignedUrlResponse, error) {
|
||||
return nil, status.Errorf(codes.Unimplemented, "method GetObjectDownloadPresignedURL not implemented")
|
||||
}
|
||||
func (UnimplementedFileServiceServer) ListFiles(context.Context, *ListFilesRequest) (*ListFilesResponse, error) {
|
||||
return nil, status.Errorf(codes.Unimplemented, "method ListFiles not implemented")
|
||||
}
|
||||
func (UnimplementedFileServiceServer) mustEmbedUnimplementedFileServiceServer() {}
|
||||
|
||||
// UnsafeFileServiceServer may be embedded to opt out of forward compatibility for this service.
|
||||
// Use of this interface is not recommended, as added methods to FileServiceServer will
|
||||
// result in compilation errors.
|
||||
type UnsafeFileServiceServer interface {
|
||||
mustEmbedUnimplementedFileServiceServer()
|
||||
}
|
||||
|
||||
func RegisterFileServiceServer(s grpc.ServiceRegistrar, srv FileServiceServer) {
|
||||
s.RegisterService(&_FileService_serviceDesc, srv)
|
||||
}
|
||||
|
||||
func _FileService_GetObjectDownloadPresignedURL_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
|
||||
in := new(GetObjectPresignedUrlRequest)
|
||||
if err := dec(in); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if interceptor == nil {
|
||||
return srv.(FileServiceServer).GetObjectDownloadPresignedURL(ctx, in)
|
||||
}
|
||||
info := &grpc.UnaryServerInfo{
|
||||
Server: srv,
|
||||
FullMethod: "/api.FileService/GetObjectDownloadPresignedURL",
|
||||
}
|
||||
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
|
||||
return srv.(FileServiceServer).GetObjectDownloadPresignedURL(ctx, req.(*GetObjectPresignedUrlRequest))
|
||||
}
|
||||
return interceptor(ctx, in, info, handler)
|
||||
}
|
||||
|
||||
func _FileService_ListFiles_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
|
||||
in := new(ListFilesRequest)
|
||||
if err := dec(in); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if interceptor == nil {
|
||||
return srv.(FileServiceServer).ListFiles(ctx, in)
|
||||
}
|
||||
info := &grpc.UnaryServerInfo{
|
||||
Server: srv,
|
||||
FullMethod: "/api.FileService/ListFiles",
|
||||
}
|
||||
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
|
||||
return srv.(FileServiceServer).ListFiles(ctx, req.(*ListFilesRequest))
|
||||
}
|
||||
return interceptor(ctx, in, info, handler)
|
||||
}
|
||||
|
||||
var _FileService_serviceDesc = grpc.ServiceDesc{
|
||||
ServiceName: "api.FileService",
|
||||
HandlerType: (*FileServiceServer)(nil),
|
||||
Methods: []grpc.MethodDesc{
|
||||
{
|
||||
MethodName: "GetObjectDownloadPresignedURL",
|
||||
Handler: _FileService_GetObjectDownloadPresignedURL_Handler,
|
||||
},
|
||||
{
|
||||
MethodName: "ListFiles",
|
||||
Handler: _FileService_ListFiles_Handler,
|
||||
},
|
||||
},
|
||||
Streams: []grpc.StreamDesc{},
|
||||
Metadata: "files.proto",
|
||||
}
|
||||
999
api/gen/inference_service.pb.go
Normal file
999
api/gen/inference_service.pb.go
Normal file
@@ -0,0 +1,999 @@
|
||||
// Code generated by protoc-gen-go. DO NOT EDIT.
|
||||
// versions:
|
||||
// protoc-gen-go v1.25.0
|
||||
// protoc v3.14.0
|
||||
// source: inference_service.proto
|
||||
|
||||
package gen
|
||||
|
||||
import (
|
||||
proto "github.com/golang/protobuf/proto"
|
||||
_ "google.golang.org/genproto/googleapis/api/annotations"
|
||||
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
|
||||
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
|
||||
emptypb "google.golang.org/protobuf/types/known/emptypb"
|
||||
reflect "reflect"
|
||||
sync "sync"
|
||||
)
|
||||
|
||||
const (
|
||||
// Verify that this generated code is sufficiently up-to-date.
|
||||
_ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
|
||||
// Verify that runtime/protoimpl is sufficiently up-to-date.
|
||||
_ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
|
||||
)
|
||||
|
||||
// This is a compile-time assertion that a sufficiently up-to-date version
|
||||
// of the legacy proto package is being used.
|
||||
const _ = proto.ProtoPackageIsVersion4
|
||||
|
||||
type InferenceServiceIdentifier struct {
|
||||
state protoimpl.MessageState
|
||||
sizeCache protoimpl.SizeCache
|
||||
unknownFields protoimpl.UnknownFields
|
||||
|
||||
Namespace string `protobuf:"bytes,1,opt,name=namespace,proto3" json:"namespace,omitempty"`
|
||||
Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"`
|
||||
}
|
||||
|
||||
func (x *InferenceServiceIdentifier) Reset() {
|
||||
*x = InferenceServiceIdentifier{}
|
||||
if protoimpl.UnsafeEnabled {
|
||||
mi := &file_inference_service_proto_msgTypes[0]
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
}
|
||||
|
||||
func (x *InferenceServiceIdentifier) String() string {
|
||||
return protoimpl.X.MessageStringOf(x)
|
||||
}
|
||||
|
||||
func (*InferenceServiceIdentifier) ProtoMessage() {}
|
||||
|
||||
func (x *InferenceServiceIdentifier) ProtoReflect() protoreflect.Message {
|
||||
mi := &file_inference_service_proto_msgTypes[0]
|
||||
if protoimpl.UnsafeEnabled && x != nil {
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
if ms.LoadMessageInfo() == nil {
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
return ms
|
||||
}
|
||||
return mi.MessageOf(x)
|
||||
}
|
||||
|
||||
// Deprecated: Use InferenceServiceIdentifier.ProtoReflect.Descriptor instead.
|
||||
func (*InferenceServiceIdentifier) Descriptor() ([]byte, []int) {
|
||||
return file_inference_service_proto_rawDescGZIP(), []int{0}
|
||||
}
|
||||
|
||||
func (x *InferenceServiceIdentifier) GetNamespace() string {
|
||||
if x != nil {
|
||||
return x.Namespace
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (x *InferenceServiceIdentifier) GetName() string {
|
||||
if x != nil {
|
||||
return x.Name
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
type Env struct {
|
||||
state protoimpl.MessageState
|
||||
sizeCache protoimpl.SizeCache
|
||||
unknownFields protoimpl.UnknownFields
|
||||
|
||||
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
|
||||
Value string `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"`
|
||||
}
|
||||
|
||||
func (x *Env) Reset() {
|
||||
*x = Env{}
|
||||
if protoimpl.UnsafeEnabled {
|
||||
mi := &file_inference_service_proto_msgTypes[1]
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
}
|
||||
|
||||
func (x *Env) String() string {
|
||||
return protoimpl.X.MessageStringOf(x)
|
||||
}
|
||||
|
||||
func (*Env) ProtoMessage() {}
|
||||
|
||||
func (x *Env) ProtoReflect() protoreflect.Message {
|
||||
mi := &file_inference_service_proto_msgTypes[1]
|
||||
if protoimpl.UnsafeEnabled && x != nil {
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
if ms.LoadMessageInfo() == nil {
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
return ms
|
||||
}
|
||||
return mi.MessageOf(x)
|
||||
}
|
||||
|
||||
// Deprecated: Use Env.ProtoReflect.Descriptor instead.
|
||||
func (*Env) Descriptor() ([]byte, []int) {
|
||||
return file_inference_service_proto_rawDescGZIP(), []int{1}
|
||||
}
|
||||
|
||||
func (x *Env) GetName() string {
|
||||
if x != nil {
|
||||
return x.Name
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (x *Env) GetValue() string {
|
||||
if x != nil {
|
||||
return x.Value
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
type Container struct {
|
||||
state protoimpl.MessageState
|
||||
sizeCache protoimpl.SizeCache
|
||||
unknownFields protoimpl.UnknownFields
|
||||
|
||||
Image string `protobuf:"bytes,1,opt,name=image,proto3" json:"image,omitempty"`
|
||||
Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"`
|
||||
Env []*Env `protobuf:"bytes,3,rep,name=env,proto3" json:"env,omitempty"`
|
||||
}
|
||||
|
||||
func (x *Container) Reset() {
|
||||
*x = Container{}
|
||||
if protoimpl.UnsafeEnabled {
|
||||
mi := &file_inference_service_proto_msgTypes[2]
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
}
|
||||
|
||||
func (x *Container) String() string {
|
||||
return protoimpl.X.MessageStringOf(x)
|
||||
}
|
||||
|
||||
func (*Container) ProtoMessage() {}
|
||||
|
||||
func (x *Container) ProtoReflect() protoreflect.Message {
|
||||
mi := &file_inference_service_proto_msgTypes[2]
|
||||
if protoimpl.UnsafeEnabled && x != nil {
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
if ms.LoadMessageInfo() == nil {
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
return ms
|
||||
}
|
||||
return mi.MessageOf(x)
|
||||
}
|
||||
|
||||
// Deprecated: Use Container.ProtoReflect.Descriptor instead.
|
||||
func (*Container) Descriptor() ([]byte, []int) {
|
||||
return file_inference_service_proto_rawDescGZIP(), []int{2}
|
||||
}
|
||||
|
||||
func (x *Container) GetImage() string {
|
||||
if x != nil {
|
||||
return x.Image
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (x *Container) GetName() string {
|
||||
if x != nil {
|
||||
return x.Name
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (x *Container) GetEnv() []*Env {
|
||||
if x != nil {
|
||||
return x.Env
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type InferenceServiceTransformer struct {
|
||||
state protoimpl.MessageState
|
||||
sizeCache protoimpl.SizeCache
|
||||
unknownFields protoimpl.UnknownFields
|
||||
|
||||
Containers []*Container `protobuf:"bytes,1,rep,name=containers,proto3" json:"containers,omitempty"`
|
||||
MinCpu string `protobuf:"bytes,2,opt,name=minCpu,proto3" json:"minCpu,omitempty"`
|
||||
MinMemory string `protobuf:"bytes,3,opt,name=minMemory,proto3" json:"minMemory,omitempty"`
|
||||
MaxCpu string `protobuf:"bytes,4,opt,name=maxCpu,proto3" json:"maxCpu,omitempty"`
|
||||
MaxMemory string `protobuf:"bytes,5,opt,name=maxMemory,proto3" json:"maxMemory,omitempty"`
|
||||
}
|
||||
|
||||
func (x *InferenceServiceTransformer) Reset() {
|
||||
*x = InferenceServiceTransformer{}
|
||||
if protoimpl.UnsafeEnabled {
|
||||
mi := &file_inference_service_proto_msgTypes[3]
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
}
|
||||
|
||||
func (x *InferenceServiceTransformer) String() string {
|
||||
return protoimpl.X.MessageStringOf(x)
|
||||
}
|
||||
|
||||
func (*InferenceServiceTransformer) ProtoMessage() {}
|
||||
|
||||
func (x *InferenceServiceTransformer) ProtoReflect() protoreflect.Message {
|
||||
mi := &file_inference_service_proto_msgTypes[3]
|
||||
if protoimpl.UnsafeEnabled && x != nil {
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
if ms.LoadMessageInfo() == nil {
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
return ms
|
||||
}
|
||||
return mi.MessageOf(x)
|
||||
}
|
||||
|
||||
// Deprecated: Use InferenceServiceTransformer.ProtoReflect.Descriptor instead.
|
||||
func (*InferenceServiceTransformer) Descriptor() ([]byte, []int) {
|
||||
return file_inference_service_proto_rawDescGZIP(), []int{3}
|
||||
}
|
||||
|
||||
func (x *InferenceServiceTransformer) GetContainers() []*Container {
|
||||
if x != nil {
|
||||
return x.Containers
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (x *InferenceServiceTransformer) GetMinCpu() string {
|
||||
if x != nil {
|
||||
return x.MinCpu
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (x *InferenceServiceTransformer) GetMinMemory() string {
|
||||
if x != nil {
|
||||
return x.MinMemory
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (x *InferenceServiceTransformer) GetMaxCpu() string {
|
||||
if x != nil {
|
||||
return x.MaxCpu
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (x *InferenceServiceTransformer) GetMaxMemory() string {
|
||||
if x != nil {
|
||||
return x.MaxMemory
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
type InferenceServicePredictor struct {
|
||||
state protoimpl.MessageState
|
||||
sizeCache protoimpl.SizeCache
|
||||
unknownFields protoimpl.UnknownFields
|
||||
|
||||
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
|
||||
RuntimeVersion string `protobuf:"bytes,2,opt,name=runtimeVersion,proto3" json:"runtimeVersion,omitempty"`
|
||||
StorageUri string `protobuf:"bytes,3,opt,name=storageUri,proto3" json:"storageUri,omitempty"`
|
||||
NodeSelector string `protobuf:"bytes,4,opt,name=nodeSelector,proto3" json:"nodeSelector,omitempty"`
|
||||
MinCpu string `protobuf:"bytes,5,opt,name=minCpu,proto3" json:"minCpu,omitempty"`
|
||||
MinMemory string `protobuf:"bytes,6,opt,name=minMemory,proto3" json:"minMemory,omitempty"`
|
||||
MaxCpu string `protobuf:"bytes,7,opt,name=maxCpu,proto3" json:"maxCpu,omitempty"`
|
||||
MaxMemory string `protobuf:"bytes,8,opt,name=maxMemory,proto3" json:"maxMemory,omitempty"`
|
||||
}
|
||||
|
||||
func (x *InferenceServicePredictor) Reset() {
|
||||
*x = InferenceServicePredictor{}
|
||||
if protoimpl.UnsafeEnabled {
|
||||
mi := &file_inference_service_proto_msgTypes[4]
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
}
|
||||
|
||||
func (x *InferenceServicePredictor) String() string {
|
||||
return protoimpl.X.MessageStringOf(x)
|
||||
}
|
||||
|
||||
func (*InferenceServicePredictor) ProtoMessage() {}
|
||||
|
||||
func (x *InferenceServicePredictor) ProtoReflect() protoreflect.Message {
|
||||
mi := &file_inference_service_proto_msgTypes[4]
|
||||
if protoimpl.UnsafeEnabled && x != nil {
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
if ms.LoadMessageInfo() == nil {
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
return ms
|
||||
}
|
||||
return mi.MessageOf(x)
|
||||
}
|
||||
|
||||
// Deprecated: Use InferenceServicePredictor.ProtoReflect.Descriptor instead.
|
||||
func (*InferenceServicePredictor) Descriptor() ([]byte, []int) {
|
||||
return file_inference_service_proto_rawDescGZIP(), []int{4}
|
||||
}
|
||||
|
||||
func (x *InferenceServicePredictor) GetName() string {
|
||||
if x != nil {
|
||||
return x.Name
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (x *InferenceServicePredictor) GetRuntimeVersion() string {
|
||||
if x != nil {
|
||||
return x.RuntimeVersion
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (x *InferenceServicePredictor) GetStorageUri() string {
|
||||
if x != nil {
|
||||
return x.StorageUri
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (x *InferenceServicePredictor) GetNodeSelector() string {
|
||||
if x != nil {
|
||||
return x.NodeSelector
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (x *InferenceServicePredictor) GetMinCpu() string {
|
||||
if x != nil {
|
||||
return x.MinCpu
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (x *InferenceServicePredictor) GetMinMemory() string {
|
||||
if x != nil {
|
||||
return x.MinMemory
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (x *InferenceServicePredictor) GetMaxCpu() string {
|
||||
if x != nil {
|
||||
return x.MaxCpu
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (x *InferenceServicePredictor) GetMaxMemory() string {
|
||||
if x != nil {
|
||||
return x.MaxMemory
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
type CreateInferenceServiceRequest struct {
|
||||
state protoimpl.MessageState
|
||||
sizeCache protoimpl.SizeCache
|
||||
unknownFields protoimpl.UnknownFields
|
||||
|
||||
Namespace string `protobuf:"bytes,1,opt,name=namespace,proto3" json:"namespace,omitempty"`
|
||||
Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"`
|
||||
DefaultTransformerImage string `protobuf:"bytes,3,opt,name=defaultTransformerImage,proto3" json:"defaultTransformerImage,omitempty"`
|
||||
Predictor *InferenceServicePredictor `protobuf:"bytes,4,opt,name=predictor,proto3" json:"predictor,omitempty"`
|
||||
Transformer *InferenceServiceTransformer `protobuf:"bytes,5,opt,name=transformer,proto3" json:"transformer,omitempty"`
|
||||
}
|
||||
|
||||
func (x *CreateInferenceServiceRequest) Reset() {
|
||||
*x = CreateInferenceServiceRequest{}
|
||||
if protoimpl.UnsafeEnabled {
|
||||
mi := &file_inference_service_proto_msgTypes[5]
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
}
|
||||
|
||||
func (x *CreateInferenceServiceRequest) String() string {
|
||||
return protoimpl.X.MessageStringOf(x)
|
||||
}
|
||||
|
||||
func (*CreateInferenceServiceRequest) ProtoMessage() {}
|
||||
|
||||
func (x *CreateInferenceServiceRequest) ProtoReflect() protoreflect.Message {
|
||||
mi := &file_inference_service_proto_msgTypes[5]
|
||||
if protoimpl.UnsafeEnabled && x != nil {
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
if ms.LoadMessageInfo() == nil {
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
return ms
|
||||
}
|
||||
return mi.MessageOf(x)
|
||||
}
|
||||
|
||||
// Deprecated: Use CreateInferenceServiceRequest.ProtoReflect.Descriptor instead.
|
||||
func (*CreateInferenceServiceRequest) Descriptor() ([]byte, []int) {
|
||||
return file_inference_service_proto_rawDescGZIP(), []int{5}
|
||||
}
|
||||
|
||||
func (x *CreateInferenceServiceRequest) GetNamespace() string {
|
||||
if x != nil {
|
||||
return x.Namespace
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (x *CreateInferenceServiceRequest) GetName() string {
|
||||
if x != nil {
|
||||
return x.Name
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (x *CreateInferenceServiceRequest) GetDefaultTransformerImage() string {
|
||||
if x != nil {
|
||||
return x.DefaultTransformerImage
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (x *CreateInferenceServiceRequest) GetPredictor() *InferenceServicePredictor {
|
||||
if x != nil {
|
||||
return x.Predictor
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (x *CreateInferenceServiceRequest) GetTransformer() *InferenceServiceTransformer {
|
||||
if x != nil {
|
||||
return x.Transformer
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type DeployModelResponse struct {
|
||||
state protoimpl.MessageState
|
||||
sizeCache protoimpl.SizeCache
|
||||
unknownFields protoimpl.UnknownFields
|
||||
|
||||
Status string `protobuf:"bytes,1,opt,name=status,proto3" json:"status,omitempty"`
|
||||
}
|
||||
|
||||
func (x *DeployModelResponse) Reset() {
|
||||
*x = DeployModelResponse{}
|
||||
if protoimpl.UnsafeEnabled {
|
||||
mi := &file_inference_service_proto_msgTypes[6]
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
}
|
||||
|
||||
func (x *DeployModelResponse) String() string {
|
||||
return protoimpl.X.MessageStringOf(x)
|
||||
}
|
||||
|
||||
func (*DeployModelResponse) ProtoMessage() {}
|
||||
|
||||
func (x *DeployModelResponse) ProtoReflect() protoreflect.Message {
|
||||
mi := &file_inference_service_proto_msgTypes[6]
|
||||
if protoimpl.UnsafeEnabled && x != nil {
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
if ms.LoadMessageInfo() == nil {
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
return ms
|
||||
}
|
||||
return mi.MessageOf(x)
|
||||
}
|
||||
|
||||
// Deprecated: Use DeployModelResponse.ProtoReflect.Descriptor instead.
|
||||
func (*DeployModelResponse) Descriptor() ([]byte, []int) {
|
||||
return file_inference_service_proto_rawDescGZIP(), []int{6}
|
||||
}
|
||||
|
||||
func (x *DeployModelResponse) GetStatus() string {
|
||||
if x != nil {
|
||||
return x.Status
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
type InferenceServiceCondition struct {
|
||||
state protoimpl.MessageState
|
||||
sizeCache protoimpl.SizeCache
|
||||
unknownFields protoimpl.UnknownFields
|
||||
|
||||
LastTransitionTime string `protobuf:"bytes,1,opt,name=lastTransitionTime,proto3" json:"lastTransitionTime,omitempty"`
|
||||
Status string `protobuf:"bytes,2,opt,name=status,proto3" json:"status,omitempty"`
|
||||
Type string `protobuf:"bytes,3,opt,name=type,proto3" json:"type,omitempty"`
|
||||
}
|
||||
|
||||
func (x *InferenceServiceCondition) Reset() {
|
||||
*x = InferenceServiceCondition{}
|
||||
if protoimpl.UnsafeEnabled {
|
||||
mi := &file_inference_service_proto_msgTypes[7]
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
}
|
||||
|
||||
func (x *InferenceServiceCondition) String() string {
|
||||
return protoimpl.X.MessageStringOf(x)
|
||||
}
|
||||
|
||||
func (*InferenceServiceCondition) ProtoMessage() {}
|
||||
|
||||
func (x *InferenceServiceCondition) ProtoReflect() protoreflect.Message {
|
||||
mi := &file_inference_service_proto_msgTypes[7]
|
||||
if protoimpl.UnsafeEnabled && x != nil {
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
if ms.LoadMessageInfo() == nil {
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
return ms
|
||||
}
|
||||
return mi.MessageOf(x)
|
||||
}
|
||||
|
||||
// Deprecated: Use InferenceServiceCondition.ProtoReflect.Descriptor instead.
|
||||
func (*InferenceServiceCondition) Descriptor() ([]byte, []int) {
|
||||
return file_inference_service_proto_rawDescGZIP(), []int{7}
|
||||
}
|
||||
|
||||
func (x *InferenceServiceCondition) GetLastTransitionTime() string {
|
||||
if x != nil {
|
||||
return x.LastTransitionTime
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (x *InferenceServiceCondition) GetStatus() string {
|
||||
if x != nil {
|
||||
return x.Status
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (x *InferenceServiceCondition) GetType() string {
|
||||
if x != nil {
|
||||
return x.Type
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
type GetInferenceServiceResponse struct {
|
||||
state protoimpl.MessageState
|
||||
sizeCache protoimpl.SizeCache
|
||||
unknownFields protoimpl.UnknownFields
|
||||
|
||||
Ready bool `protobuf:"varint,1,opt,name=ready,proto3" json:"ready,omitempty"`
|
||||
Conditions []*InferenceServiceCondition `protobuf:"bytes,2,rep,name=conditions,proto3" json:"conditions,omitempty"`
|
||||
PredictUrl string `protobuf:"bytes,3,opt,name=predictUrl,proto3" json:"predictUrl,omitempty"`
|
||||
}
|
||||
|
||||
func (x *GetInferenceServiceResponse) Reset() {
|
||||
*x = GetInferenceServiceResponse{}
|
||||
if protoimpl.UnsafeEnabled {
|
||||
mi := &file_inference_service_proto_msgTypes[8]
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
}
|
||||
|
||||
func (x *GetInferenceServiceResponse) String() string {
|
||||
return protoimpl.X.MessageStringOf(x)
|
||||
}
|
||||
|
||||
func (*GetInferenceServiceResponse) ProtoMessage() {}
|
||||
|
||||
func (x *GetInferenceServiceResponse) ProtoReflect() protoreflect.Message {
|
||||
mi := &file_inference_service_proto_msgTypes[8]
|
||||
if protoimpl.UnsafeEnabled && x != nil {
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
if ms.LoadMessageInfo() == nil {
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
return ms
|
||||
}
|
||||
return mi.MessageOf(x)
|
||||
}
|
||||
|
||||
// Deprecated: Use GetInferenceServiceResponse.ProtoReflect.Descriptor instead.
|
||||
func (*GetInferenceServiceResponse) Descriptor() ([]byte, []int) {
|
||||
return file_inference_service_proto_rawDescGZIP(), []int{8}
|
||||
}
|
||||
|
||||
func (x *GetInferenceServiceResponse) GetReady() bool {
|
||||
if x != nil {
|
||||
return x.Ready
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (x *GetInferenceServiceResponse) GetConditions() []*InferenceServiceCondition {
|
||||
if x != nil {
|
||||
return x.Conditions
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (x *GetInferenceServiceResponse) GetPredictUrl() string {
|
||||
if x != nil {
|
||||
return x.PredictUrl
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
type InferenceServiceEndpoints struct {
|
||||
state protoimpl.MessageState
|
||||
sizeCache protoimpl.SizeCache
|
||||
unknownFields protoimpl.UnknownFields
|
||||
|
||||
Predict string `protobuf:"bytes,1,opt,name=predict,proto3" json:"predict,omitempty"`
|
||||
}
|
||||
|
||||
func (x *InferenceServiceEndpoints) Reset() {
|
||||
*x = InferenceServiceEndpoints{}
|
||||
if protoimpl.UnsafeEnabled {
|
||||
mi := &file_inference_service_proto_msgTypes[9]
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
}
|
||||
|
||||
func (x *InferenceServiceEndpoints) String() string {
|
||||
return protoimpl.X.MessageStringOf(x)
|
||||
}
|
||||
|
||||
func (*InferenceServiceEndpoints) ProtoMessage() {}
|
||||
|
||||
func (x *InferenceServiceEndpoints) ProtoReflect() protoreflect.Message {
|
||||
mi := &file_inference_service_proto_msgTypes[9]
|
||||
if protoimpl.UnsafeEnabled && x != nil {
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
if ms.LoadMessageInfo() == nil {
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
return ms
|
||||
}
|
||||
return mi.MessageOf(x)
|
||||
}
|
||||
|
||||
// Deprecated: Use InferenceServiceEndpoints.ProtoReflect.Descriptor instead.
|
||||
func (*InferenceServiceEndpoints) Descriptor() ([]byte, []int) {
|
||||
return file_inference_service_proto_rawDescGZIP(), []int{9}
|
||||
}
|
||||
|
||||
func (x *InferenceServiceEndpoints) GetPredict() string {
|
||||
if x != nil {
|
||||
return x.Predict
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
var File_inference_service_proto protoreflect.FileDescriptor
|
||||
|
||||
var file_inference_service_proto_rawDesc = []byte{
|
||||
0x0a, 0x17, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x5f, 0x73, 0x65, 0x72, 0x76,
|
||||
0x69, 0x63, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x03, 0x61, 0x70, 0x69, 0x1a, 0x1c,
|
||||
0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74,
|
||||
0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1b, 0x67, 0x6f,
|
||||
0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x65, 0x6d,
|
||||
0x70, 0x74, 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x4e, 0x0a, 0x1a, 0x49, 0x6e, 0x66,
|
||||
0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x49, 0x64, 0x65,
|
||||
0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73,
|
||||
0x70, 0x61, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65,
|
||||
0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20,
|
||||
0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x2f, 0x0a, 0x03, 0x45, 0x6e, 0x76,
|
||||
0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04,
|
||||
0x6e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20,
|
||||
0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x51, 0x0a, 0x09, 0x43, 0x6f,
|
||||
0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x12, 0x14, 0x0a, 0x05, 0x69, 0x6d, 0x61, 0x67, 0x65,
|
||||
0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x12, 0x12, 0x0a,
|
||||
0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d,
|
||||
0x65, 0x12, 0x1a, 0x0a, 0x03, 0x65, 0x6e, 0x76, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x08,
|
||||
0x2e, 0x61, 0x70, 0x69, 0x2e, 0x45, 0x6e, 0x76, 0x52, 0x03, 0x65, 0x6e, 0x76, 0x22, 0xb9, 0x01,
|
||||
0x0a, 0x1b, 0x49, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x53, 0x65, 0x72, 0x76, 0x69,
|
||||
0x63, 0x65, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x66, 0x6f, 0x72, 0x6d, 0x65, 0x72, 0x12, 0x2e, 0x0a,
|
||||
0x0a, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28,
|
||||
0x0b, 0x32, 0x0e, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x43, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65,
|
||||
0x72, 0x52, 0x0a, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x73, 0x12, 0x16, 0x0a,
|
||||
0x06, 0x6d, 0x69, 0x6e, 0x43, 0x70, 0x75, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x6d,
|
||||
0x69, 0x6e, 0x43, 0x70, 0x75, 0x12, 0x1c, 0x0a, 0x09, 0x6d, 0x69, 0x6e, 0x4d, 0x65, 0x6d, 0x6f,
|
||||
0x72, 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6d, 0x69, 0x6e, 0x4d, 0x65, 0x6d,
|
||||
0x6f, 0x72, 0x79, 0x12, 0x16, 0x0a, 0x06, 0x6d, 0x61, 0x78, 0x43, 0x70, 0x75, 0x18, 0x04, 0x20,
|
||||
0x01, 0x28, 0x09, 0x52, 0x06, 0x6d, 0x61, 0x78, 0x43, 0x70, 0x75, 0x12, 0x1c, 0x0a, 0x09, 0x6d,
|
||||
0x61, 0x78, 0x4d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09,
|
||||
0x6d, 0x61, 0x78, 0x4d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x22, 0x87, 0x02, 0x0a, 0x19, 0x49, 0x6e,
|
||||
0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x50, 0x72,
|
||||
0x65, 0x64, 0x69, 0x63, 0x74, 0x6f, 0x72, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18,
|
||||
0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x26, 0x0a, 0x0e, 0x72,
|
||||
0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20,
|
||||
0x01, 0x28, 0x09, 0x52, 0x0e, 0x72, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x56, 0x65, 0x72, 0x73,
|
||||
0x69, 0x6f, 0x6e, 0x12, 0x1e, 0x0a, 0x0a, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x55, 0x72,
|
||||
0x69, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65,
|
||||
0x55, 0x72, 0x69, 0x12, 0x22, 0x0a, 0x0c, 0x6e, 0x6f, 0x64, 0x65, 0x53, 0x65, 0x6c, 0x65, 0x63,
|
||||
0x74, 0x6f, 0x72, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x6e, 0x6f, 0x64, 0x65, 0x53,
|
||||
0x65, 0x6c, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x12, 0x16, 0x0a, 0x06, 0x6d, 0x69, 0x6e, 0x43, 0x70,
|
||||
0x75, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x6d, 0x69, 0x6e, 0x43, 0x70, 0x75, 0x12,
|
||||
0x1c, 0x0a, 0x09, 0x6d, 0x69, 0x6e, 0x4d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x18, 0x06, 0x20, 0x01,
|
||||
0x28, 0x09, 0x52, 0x09, 0x6d, 0x69, 0x6e, 0x4d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x12, 0x16, 0x0a,
|
||||
0x06, 0x6d, 0x61, 0x78, 0x43, 0x70, 0x75, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x6d,
|
||||
0x61, 0x78, 0x43, 0x70, 0x75, 0x12, 0x1c, 0x0a, 0x09, 0x6d, 0x61, 0x78, 0x4d, 0x65, 0x6d, 0x6f,
|
||||
0x72, 0x79, 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6d, 0x61, 0x78, 0x4d, 0x65, 0x6d,
|
||||
0x6f, 0x72, 0x79, 0x22, 0x8d, 0x02, 0x0a, 0x1d, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x49, 0x6e,
|
||||
0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x52, 0x65,
|
||||
0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61,
|
||||
0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70,
|
||||
0x61, 0x63, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28,
|
||||
0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x38, 0x0a, 0x17, 0x64, 0x65, 0x66, 0x61, 0x75,
|
||||
0x6c, 0x74, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x66, 0x6f, 0x72, 0x6d, 0x65, 0x72, 0x49, 0x6d, 0x61,
|
||||
0x67, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x17, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c,
|
||||
0x74, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x66, 0x6f, 0x72, 0x6d, 0x65, 0x72, 0x49, 0x6d, 0x61, 0x67,
|
||||
0x65, 0x12, 0x3c, 0x0a, 0x09, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x6f, 0x72, 0x18, 0x04,
|
||||
0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x49, 0x6e, 0x66, 0x65, 0x72,
|
||||
0x65, 0x6e, 0x63, 0x65, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x50, 0x72, 0x65, 0x64, 0x69,
|
||||
0x63, 0x74, 0x6f, 0x72, 0x52, 0x09, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x6f, 0x72, 0x12,
|
||||
0x42, 0x0a, 0x0b, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x66, 0x6f, 0x72, 0x6d, 0x65, 0x72, 0x18, 0x05,
|
||||
0x20, 0x01, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x49, 0x6e, 0x66, 0x65, 0x72,
|
||||
0x65, 0x6e, 0x63, 0x65, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x54, 0x72, 0x61, 0x6e, 0x73,
|
||||
0x66, 0x6f, 0x72, 0x6d, 0x65, 0x72, 0x52, 0x0b, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x66, 0x6f, 0x72,
|
||||
0x6d, 0x65, 0x72, 0x22, 0x2d, 0x0a, 0x13, 0x44, 0x65, 0x70, 0x6c, 0x6f, 0x79, 0x4d, 0x6f, 0x64,
|
||||
0x65, 0x6c, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x74,
|
||||
0x61, 0x74, 0x75, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x73, 0x74, 0x61, 0x74,
|
||||
0x75, 0x73, 0x22, 0x77, 0x0a, 0x19, 0x49, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x53,
|
||||
0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x43, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x12,
|
||||
0x2e, 0x0a, 0x12, 0x6c, 0x61, 0x73, 0x74, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x69, 0x74, 0x69, 0x6f,
|
||||
0x6e, 0x54, 0x69, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x12, 0x6c, 0x61, 0x73,
|
||||
0x74, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x54, 0x69, 0x6d, 0x65, 0x12,
|
||||
0x16, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52,
|
||||
0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18,
|
||||
0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x22, 0x93, 0x01, 0x0a, 0x1b,
|
||||
0x47, 0x65, 0x74, 0x49, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x53, 0x65, 0x72, 0x76,
|
||||
0x69, 0x63, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x72,
|
||||
0x65, 0x61, 0x64, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x05, 0x72, 0x65, 0x61, 0x64,
|
||||
0x79, 0x12, 0x3e, 0x0a, 0x0a, 0x63, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18,
|
||||
0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x49, 0x6e, 0x66, 0x65,
|
||||
0x72, 0x65, 0x6e, 0x63, 0x65, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x43, 0x6f, 0x6e, 0x64,
|
||||
0x69, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x0a, 0x63, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e,
|
||||
0x73, 0x12, 0x1e, 0x0a, 0x0a, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x55, 0x72, 0x6c, 0x18,
|
||||
0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x55, 0x72,
|
||||
0x6c, 0x22, 0x35, 0x0a, 0x19, 0x49, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x53, 0x65,
|
||||
0x72, 0x76, 0x69, 0x63, 0x65, 0x45, 0x6e, 0x64, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x73, 0x12, 0x18,
|
||||
0x0a, 0x07, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52,
|
||||
0x07, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x32, 0xcf, 0x03, 0x0a, 0x10, 0x49, 0x6e, 0x66,
|
||||
0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x95, 0x01,
|
||||
0x0a, 0x16, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x49, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63,
|
||||
0x65, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x22, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x43,
|
||||
0x72, 0x65, 0x61, 0x74, 0x65, 0x49, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x53, 0x65,
|
||||
0x72, 0x76, 0x69, 0x63, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x20, 0x2e, 0x61,
|
||||
0x70, 0x69, 0x2e, 0x47, 0x65, 0x74, 0x49, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x53,
|
||||
0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x35,
|
||||
0x82, 0xd3, 0xe4, 0x93, 0x02, 0x2f, 0x22, 0x2a, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31,
|
||||
0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65,
|
||||
0x7d, 0x2f, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x73, 0x65, 0x72, 0x76, 0x69,
|
||||
0x63, 0x65, 0x3a, 0x01, 0x2a, 0x12, 0x93, 0x01, 0x0a, 0x13, 0x47, 0x65, 0x74, 0x49, 0x6e, 0x66,
|
||||
0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x1f, 0x2e,
|
||||
0x61, 0x70, 0x69, 0x2e, 0x49, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x53, 0x65, 0x72,
|
||||
0x76, 0x69, 0x63, 0x65, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x1a, 0x20,
|
||||
0x2e, 0x61, 0x70, 0x69, 0x2e, 0x47, 0x65, 0x74, 0x49, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63,
|
||||
0x65, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65,
|
||||
0x22, 0x39, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x33, 0x12, 0x31, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f,
|
||||
0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61,
|
||||
0x63, 0x65, 0x7d, 0x2f, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x73, 0x65, 0x72,
|
||||
0x76, 0x69, 0x63, 0x65, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x12, 0x8c, 0x01, 0x0a, 0x16,
|
||||
0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x49, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x53,
|
||||
0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x1f, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x49, 0x6e, 0x66,
|
||||
0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x49, 0x64, 0x65,
|
||||
0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x1a, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65,
|
||||
0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22,
|
||||
0x39, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x33, 0x2a, 0x31, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76,
|
||||
0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63,
|
||||
0x65, 0x7d, 0x2f, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x73, 0x65, 0x72, 0x76,
|
||||
0x69, 0x63, 0x65, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x42, 0x24, 0x5a, 0x22, 0x67, 0x69,
|
||||
0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x6f, 0x6e, 0x65, 0x70, 0x61, 0x6e, 0x65,
|
||||
0x6c, 0x69, 0x6f, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x67, 0x65, 0x6e,
|
||||
0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
|
||||
}
|
||||
|
||||
var (
|
||||
file_inference_service_proto_rawDescOnce sync.Once
|
||||
file_inference_service_proto_rawDescData = file_inference_service_proto_rawDesc
|
||||
)
|
||||
|
||||
func file_inference_service_proto_rawDescGZIP() []byte {
|
||||
file_inference_service_proto_rawDescOnce.Do(func() {
|
||||
file_inference_service_proto_rawDescData = protoimpl.X.CompressGZIP(file_inference_service_proto_rawDescData)
|
||||
})
|
||||
return file_inference_service_proto_rawDescData
|
||||
}
|
||||
|
||||
var file_inference_service_proto_msgTypes = make([]protoimpl.MessageInfo, 10)
|
||||
var file_inference_service_proto_goTypes = []interface{}{
|
||||
(*InferenceServiceIdentifier)(nil), // 0: api.InferenceServiceIdentifier
|
||||
(*Env)(nil), // 1: api.Env
|
||||
(*Container)(nil), // 2: api.Container
|
||||
(*InferenceServiceTransformer)(nil), // 3: api.InferenceServiceTransformer
|
||||
(*InferenceServicePredictor)(nil), // 4: api.InferenceServicePredictor
|
||||
(*CreateInferenceServiceRequest)(nil), // 5: api.CreateInferenceServiceRequest
|
||||
(*DeployModelResponse)(nil), // 6: api.DeployModelResponse
|
||||
(*InferenceServiceCondition)(nil), // 7: api.InferenceServiceCondition
|
||||
(*GetInferenceServiceResponse)(nil), // 8: api.GetInferenceServiceResponse
|
||||
(*InferenceServiceEndpoints)(nil), // 9: api.InferenceServiceEndpoints
|
||||
(*emptypb.Empty)(nil), // 10: google.protobuf.Empty
|
||||
}
|
||||
var file_inference_service_proto_depIdxs = []int32{
|
||||
1, // 0: api.Container.env:type_name -> api.Env
|
||||
2, // 1: api.InferenceServiceTransformer.containers:type_name -> api.Container
|
||||
4, // 2: api.CreateInferenceServiceRequest.predictor:type_name -> api.InferenceServicePredictor
|
||||
3, // 3: api.CreateInferenceServiceRequest.transformer:type_name -> api.InferenceServiceTransformer
|
||||
7, // 4: api.GetInferenceServiceResponse.conditions:type_name -> api.InferenceServiceCondition
|
||||
5, // 5: api.InferenceService.CreateInferenceService:input_type -> api.CreateInferenceServiceRequest
|
||||
0, // 6: api.InferenceService.GetInferenceService:input_type -> api.InferenceServiceIdentifier
|
||||
0, // 7: api.InferenceService.DeleteInferenceService:input_type -> api.InferenceServiceIdentifier
|
||||
8, // 8: api.InferenceService.CreateInferenceService:output_type -> api.GetInferenceServiceResponse
|
||||
8, // 9: api.InferenceService.GetInferenceService:output_type -> api.GetInferenceServiceResponse
|
||||
10, // 10: api.InferenceService.DeleteInferenceService:output_type -> google.protobuf.Empty
|
||||
8, // [8:11] is the sub-list for method output_type
|
||||
5, // [5:8] is the sub-list for method input_type
|
||||
5, // [5:5] is the sub-list for extension type_name
|
||||
5, // [5:5] is the sub-list for extension extendee
|
||||
0, // [0:5] is the sub-list for field type_name
|
||||
}
|
||||
|
||||
func init() { file_inference_service_proto_init() }
|
||||
func file_inference_service_proto_init() {
|
||||
if File_inference_service_proto != nil {
|
||||
return
|
||||
}
|
||||
if !protoimpl.UnsafeEnabled {
|
||||
file_inference_service_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
|
||||
switch v := v.(*InferenceServiceIdentifier); i {
|
||||
case 0:
|
||||
return &v.state
|
||||
case 1:
|
||||
return &v.sizeCache
|
||||
case 2:
|
||||
return &v.unknownFields
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
file_inference_service_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
|
||||
switch v := v.(*Env); i {
|
||||
case 0:
|
||||
return &v.state
|
||||
case 1:
|
||||
return &v.sizeCache
|
||||
case 2:
|
||||
return &v.unknownFields
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
file_inference_service_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} {
|
||||
switch v := v.(*Container); i {
|
||||
case 0:
|
||||
return &v.state
|
||||
case 1:
|
||||
return &v.sizeCache
|
||||
case 2:
|
||||
return &v.unknownFields
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
file_inference_service_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} {
|
||||
switch v := v.(*InferenceServiceTransformer); i {
|
||||
case 0:
|
||||
return &v.state
|
||||
case 1:
|
||||
return &v.sizeCache
|
||||
case 2:
|
||||
return &v.unknownFields
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
file_inference_service_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} {
|
||||
switch v := v.(*InferenceServicePredictor); i {
|
||||
case 0:
|
||||
return &v.state
|
||||
case 1:
|
||||
return &v.sizeCache
|
||||
case 2:
|
||||
return &v.unknownFields
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
file_inference_service_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} {
|
||||
switch v := v.(*CreateInferenceServiceRequest); i {
|
||||
case 0:
|
||||
return &v.state
|
||||
case 1:
|
||||
return &v.sizeCache
|
||||
case 2:
|
||||
return &v.unknownFields
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
file_inference_service_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} {
|
||||
switch v := v.(*DeployModelResponse); i {
|
||||
case 0:
|
||||
return &v.state
|
||||
case 1:
|
||||
return &v.sizeCache
|
||||
case 2:
|
||||
return &v.unknownFields
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
file_inference_service_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} {
|
||||
switch v := v.(*InferenceServiceCondition); i {
|
||||
case 0:
|
||||
return &v.state
|
||||
case 1:
|
||||
return &v.sizeCache
|
||||
case 2:
|
||||
return &v.unknownFields
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
file_inference_service_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} {
|
||||
switch v := v.(*GetInferenceServiceResponse); i {
|
||||
case 0:
|
||||
return &v.state
|
||||
case 1:
|
||||
return &v.sizeCache
|
||||
case 2:
|
||||
return &v.unknownFields
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
file_inference_service_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} {
|
||||
switch v := v.(*InferenceServiceEndpoints); i {
|
||||
case 0:
|
||||
return &v.state
|
||||
case 1:
|
||||
return &v.sizeCache
|
||||
case 2:
|
||||
return &v.unknownFields
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
||||
type x struct{}
|
||||
out := protoimpl.TypeBuilder{
|
||||
File: protoimpl.DescBuilder{
|
||||
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
|
||||
RawDescriptor: file_inference_service_proto_rawDesc,
|
||||
NumEnums: 0,
|
||||
NumMessages: 10,
|
||||
NumExtensions: 0,
|
||||
NumServices: 1,
|
||||
},
|
||||
GoTypes: file_inference_service_proto_goTypes,
|
||||
DependencyIndexes: file_inference_service_proto_depIdxs,
|
||||
MessageInfos: file_inference_service_proto_msgTypes,
|
||||
}.Build()
|
||||
File_inference_service_proto = out.File
|
||||
file_inference_service_proto_rawDesc = nil
|
||||
file_inference_service_proto_goTypes = nil
|
||||
file_inference_service_proto_depIdxs = nil
|
||||
}
|
||||
439
api/gen/inference_service.pb.gw.go
Normal file
439
api/gen/inference_service.pb.gw.go
Normal file
@@ -0,0 +1,439 @@
|
||||
// Code generated by protoc-gen-grpc-gateway. DO NOT EDIT.
|
||||
// source: inference_service.proto
|
||||
|
||||
/*
|
||||
Package gen is a reverse proxy.
|
||||
|
||||
It translates gRPC into RESTful JSON APIs.
|
||||
*/
|
||||
package gen
|
||||
|
||||
import (
|
||||
"context"
|
||||
"io"
|
||||
"net/http"
|
||||
|
||||
"github.com/grpc-ecosystem/grpc-gateway/v2/runtime"
|
||||
"github.com/grpc-ecosystem/grpc-gateway/v2/utilities"
|
||||
"google.golang.org/grpc"
|
||||
"google.golang.org/grpc/codes"
|
||||
"google.golang.org/grpc/grpclog"
|
||||
"google.golang.org/grpc/metadata"
|
||||
"google.golang.org/grpc/status"
|
||||
"google.golang.org/protobuf/proto"
|
||||
)
|
||||
|
||||
// Suppress "imported and not used" errors
|
||||
var _ codes.Code
|
||||
var _ io.Reader
|
||||
var _ status.Status
|
||||
var _ = runtime.String
|
||||
var _ = utilities.NewDoubleArray
|
||||
var _ = metadata.Join
|
||||
|
||||
func request_InferenceService_CreateInferenceService_0(ctx context.Context, marshaler runtime.Marshaler, client InferenceServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
|
||||
var protoReq CreateInferenceServiceRequest
|
||||
var metadata runtime.ServerMetadata
|
||||
|
||||
newReader, berr := utilities.IOReaderFactory(req.Body)
|
||||
if berr != nil {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr)
|
||||
}
|
||||
if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
|
||||
}
|
||||
|
||||
var (
|
||||
val string
|
||||
ok bool
|
||||
err error
|
||||
_ = err
|
||||
)
|
||||
|
||||
val, ok = pathParams["namespace"]
|
||||
if !ok {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "namespace")
|
||||
}
|
||||
|
||||
protoReq.Namespace, err = runtime.String(val)
|
||||
if err != nil {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "namespace", err)
|
||||
}
|
||||
|
||||
msg, err := client.CreateInferenceService(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD))
|
||||
return msg, metadata, err
|
||||
|
||||
}
|
||||
|
||||
func local_request_InferenceService_CreateInferenceService_0(ctx context.Context, marshaler runtime.Marshaler, server InferenceServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
|
||||
var protoReq CreateInferenceServiceRequest
|
||||
var metadata runtime.ServerMetadata
|
||||
|
||||
newReader, berr := utilities.IOReaderFactory(req.Body)
|
||||
if berr != nil {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr)
|
||||
}
|
||||
if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
|
||||
}
|
||||
|
||||
var (
|
||||
val string
|
||||
ok bool
|
||||
err error
|
||||
_ = err
|
||||
)
|
||||
|
||||
val, ok = pathParams["namespace"]
|
||||
if !ok {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "namespace")
|
||||
}
|
||||
|
||||
protoReq.Namespace, err = runtime.String(val)
|
||||
if err != nil {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "namespace", err)
|
||||
}
|
||||
|
||||
msg, err := server.CreateInferenceService(ctx, &protoReq)
|
||||
return msg, metadata, err
|
||||
|
||||
}
|
||||
|
||||
func request_InferenceService_GetInferenceService_0(ctx context.Context, marshaler runtime.Marshaler, client InferenceServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
|
||||
var protoReq InferenceServiceIdentifier
|
||||
var metadata runtime.ServerMetadata
|
||||
|
||||
var (
|
||||
val string
|
||||
ok bool
|
||||
err error
|
||||
_ = err
|
||||
)
|
||||
|
||||
val, ok = pathParams["namespace"]
|
||||
if !ok {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "namespace")
|
||||
}
|
||||
|
||||
protoReq.Namespace, err = runtime.String(val)
|
||||
if err != nil {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "namespace", err)
|
||||
}
|
||||
|
||||
val, ok = pathParams["name"]
|
||||
if !ok {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "name")
|
||||
}
|
||||
|
||||
protoReq.Name, err = runtime.String(val)
|
||||
if err != nil {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "name", err)
|
||||
}
|
||||
|
||||
msg, err := client.GetInferenceService(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD))
|
||||
return msg, metadata, err
|
||||
|
||||
}
|
||||
|
||||
func local_request_InferenceService_GetInferenceService_0(ctx context.Context, marshaler runtime.Marshaler, server InferenceServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
|
||||
var protoReq InferenceServiceIdentifier
|
||||
var metadata runtime.ServerMetadata
|
||||
|
||||
var (
|
||||
val string
|
||||
ok bool
|
||||
err error
|
||||
_ = err
|
||||
)
|
||||
|
||||
val, ok = pathParams["namespace"]
|
||||
if !ok {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "namespace")
|
||||
}
|
||||
|
||||
protoReq.Namespace, err = runtime.String(val)
|
||||
if err != nil {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "namespace", err)
|
||||
}
|
||||
|
||||
val, ok = pathParams["name"]
|
||||
if !ok {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "name")
|
||||
}
|
||||
|
||||
protoReq.Name, err = runtime.String(val)
|
||||
if err != nil {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "name", err)
|
||||
}
|
||||
|
||||
msg, err := server.GetInferenceService(ctx, &protoReq)
|
||||
return msg, metadata, err
|
||||
|
||||
}
|
||||
|
||||
func request_InferenceService_DeleteInferenceService_0(ctx context.Context, marshaler runtime.Marshaler, client InferenceServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
|
||||
var protoReq InferenceServiceIdentifier
|
||||
var metadata runtime.ServerMetadata
|
||||
|
||||
var (
|
||||
val string
|
||||
ok bool
|
||||
err error
|
||||
_ = err
|
||||
)
|
||||
|
||||
val, ok = pathParams["namespace"]
|
||||
if !ok {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "namespace")
|
||||
}
|
||||
|
||||
protoReq.Namespace, err = runtime.String(val)
|
||||
if err != nil {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "namespace", err)
|
||||
}
|
||||
|
||||
val, ok = pathParams["name"]
|
||||
if !ok {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "name")
|
||||
}
|
||||
|
||||
protoReq.Name, err = runtime.String(val)
|
||||
if err != nil {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "name", err)
|
||||
}
|
||||
|
||||
msg, err := client.DeleteInferenceService(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD))
|
||||
return msg, metadata, err
|
||||
|
||||
}
|
||||
|
||||
func local_request_InferenceService_DeleteInferenceService_0(ctx context.Context, marshaler runtime.Marshaler, server InferenceServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
|
||||
var protoReq InferenceServiceIdentifier
|
||||
var metadata runtime.ServerMetadata
|
||||
|
||||
var (
|
||||
val string
|
||||
ok bool
|
||||
err error
|
||||
_ = err
|
||||
)
|
||||
|
||||
val, ok = pathParams["namespace"]
|
||||
if !ok {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "namespace")
|
||||
}
|
||||
|
||||
protoReq.Namespace, err = runtime.String(val)
|
||||
if err != nil {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "namespace", err)
|
||||
}
|
||||
|
||||
val, ok = pathParams["name"]
|
||||
if !ok {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "name")
|
||||
}
|
||||
|
||||
protoReq.Name, err = runtime.String(val)
|
||||
if err != nil {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "name", err)
|
||||
}
|
||||
|
||||
msg, err := server.DeleteInferenceService(ctx, &protoReq)
|
||||
return msg, metadata, err
|
||||
|
||||
}
|
||||
|
||||
// RegisterInferenceServiceHandlerServer registers the http handlers for service InferenceService to "mux".
|
||||
// UnaryRPC :call InferenceServiceServer directly.
|
||||
// StreamingRPC :currently unsupported pending https://github.com/grpc/grpc-go/issues/906.
|
||||
// Note that using this registration option will cause many gRPC library features to stop working. Consider using RegisterInferenceServiceHandlerFromEndpoint instead.
|
||||
func RegisterInferenceServiceHandlerServer(ctx context.Context, mux *runtime.ServeMux, server InferenceServiceServer) error {
|
||||
|
||||
mux.Handle("POST", pattern_InferenceService_CreateInferenceService_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
|
||||
ctx, cancel := context.WithCancel(req.Context())
|
||||
defer cancel()
|
||||
var stream runtime.ServerTransportStream
|
||||
ctx = grpc.NewContextWithServerTransportStream(ctx, &stream)
|
||||
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
|
||||
rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/api.InferenceService/CreateInferenceService")
|
||||
if err != nil {
|
||||
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
|
||||
return
|
||||
}
|
||||
resp, md, err := local_request_InferenceService_CreateInferenceService_0(rctx, inboundMarshaler, server, req, pathParams)
|
||||
md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())
|
||||
ctx = runtime.NewServerMetadataContext(ctx, md)
|
||||
if err != nil {
|
||||
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
|
||||
return
|
||||
}
|
||||
|
||||
forward_InferenceService_CreateInferenceService_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
|
||||
|
||||
})
|
||||
|
||||
mux.Handle("GET", pattern_InferenceService_GetInferenceService_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
|
||||
ctx, cancel := context.WithCancel(req.Context())
|
||||
defer cancel()
|
||||
var stream runtime.ServerTransportStream
|
||||
ctx = grpc.NewContextWithServerTransportStream(ctx, &stream)
|
||||
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
|
||||
rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/api.InferenceService/GetInferenceService")
|
||||
if err != nil {
|
||||
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
|
||||
return
|
||||
}
|
||||
resp, md, err := local_request_InferenceService_GetInferenceService_0(rctx, inboundMarshaler, server, req, pathParams)
|
||||
md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())
|
||||
ctx = runtime.NewServerMetadataContext(ctx, md)
|
||||
if err != nil {
|
||||
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
|
||||
return
|
||||
}
|
||||
|
||||
forward_InferenceService_GetInferenceService_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
|
||||
|
||||
})
|
||||
|
||||
mux.Handle("DELETE", pattern_InferenceService_DeleteInferenceService_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
|
||||
ctx, cancel := context.WithCancel(req.Context())
|
||||
defer cancel()
|
||||
var stream runtime.ServerTransportStream
|
||||
ctx = grpc.NewContextWithServerTransportStream(ctx, &stream)
|
||||
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
|
||||
rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/api.InferenceService/DeleteInferenceService")
|
||||
if err != nil {
|
||||
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
|
||||
return
|
||||
}
|
||||
resp, md, err := local_request_InferenceService_DeleteInferenceService_0(rctx, inboundMarshaler, server, req, pathParams)
|
||||
md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())
|
||||
ctx = runtime.NewServerMetadataContext(ctx, md)
|
||||
if err != nil {
|
||||
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
|
||||
return
|
||||
}
|
||||
|
||||
forward_InferenceService_DeleteInferenceService_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
|
||||
|
||||
})
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// RegisterInferenceServiceHandlerFromEndpoint is same as RegisterInferenceServiceHandler but
|
||||
// automatically dials to "endpoint" and closes the connection when "ctx" gets done.
|
||||
func RegisterInferenceServiceHandlerFromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, opts []grpc.DialOption) (err error) {
|
||||
conn, err := grpc.Dial(endpoint, opts...)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer func() {
|
||||
if err != nil {
|
||||
if cerr := conn.Close(); cerr != nil {
|
||||
grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr)
|
||||
}
|
||||
return
|
||||
}
|
||||
go func() {
|
||||
<-ctx.Done()
|
||||
if cerr := conn.Close(); cerr != nil {
|
||||
grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr)
|
||||
}
|
||||
}()
|
||||
}()
|
||||
|
||||
return RegisterInferenceServiceHandler(ctx, mux, conn)
|
||||
}
|
||||
|
||||
// RegisterInferenceServiceHandler registers the http handlers for service InferenceService to "mux".
|
||||
// The handlers forward requests to the grpc endpoint over "conn".
|
||||
func RegisterInferenceServiceHandler(ctx context.Context, mux *runtime.ServeMux, conn *grpc.ClientConn) error {
|
||||
return RegisterInferenceServiceHandlerClient(ctx, mux, NewInferenceServiceClient(conn))
|
||||
}
|
||||
|
||||
// RegisterInferenceServiceHandlerClient registers the http handlers for service InferenceService
|
||||
// to "mux". The handlers forward requests to the grpc endpoint over the given implementation of "InferenceServiceClient".
|
||||
// Note: the gRPC framework executes interceptors within the gRPC handler. If the passed in "InferenceServiceClient"
|
||||
// doesn't go through the normal gRPC flow (creating a gRPC client etc.) then it will be up to the passed in
|
||||
// "InferenceServiceClient" to call the correct interceptors.
|
||||
func RegisterInferenceServiceHandlerClient(ctx context.Context, mux *runtime.ServeMux, client InferenceServiceClient) error {
|
||||
|
||||
mux.Handle("POST", pattern_InferenceService_CreateInferenceService_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
|
||||
ctx, cancel := context.WithCancel(req.Context())
|
||||
defer cancel()
|
||||
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
|
||||
rctx, err := runtime.AnnotateContext(ctx, mux, req, "/api.InferenceService/CreateInferenceService")
|
||||
if err != nil {
|
||||
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
|
||||
return
|
||||
}
|
||||
resp, md, err := request_InferenceService_CreateInferenceService_0(rctx, inboundMarshaler, client, req, pathParams)
|
||||
ctx = runtime.NewServerMetadataContext(ctx, md)
|
||||
if err != nil {
|
||||
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
|
||||
return
|
||||
}
|
||||
|
||||
forward_InferenceService_CreateInferenceService_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
|
||||
|
||||
})
|
||||
|
||||
mux.Handle("GET", pattern_InferenceService_GetInferenceService_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
|
||||
ctx, cancel := context.WithCancel(req.Context())
|
||||
defer cancel()
|
||||
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
|
||||
rctx, err := runtime.AnnotateContext(ctx, mux, req, "/api.InferenceService/GetInferenceService")
|
||||
if err != nil {
|
||||
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
|
||||
return
|
||||
}
|
||||
resp, md, err := request_InferenceService_GetInferenceService_0(rctx, inboundMarshaler, client, req, pathParams)
|
||||
ctx = runtime.NewServerMetadataContext(ctx, md)
|
||||
if err != nil {
|
||||
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
|
||||
return
|
||||
}
|
||||
|
||||
forward_InferenceService_GetInferenceService_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
|
||||
|
||||
})
|
||||
|
||||
mux.Handle("DELETE", pattern_InferenceService_DeleteInferenceService_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
|
||||
ctx, cancel := context.WithCancel(req.Context())
|
||||
defer cancel()
|
||||
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
|
||||
rctx, err := runtime.AnnotateContext(ctx, mux, req, "/api.InferenceService/DeleteInferenceService")
|
||||
if err != nil {
|
||||
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
|
||||
return
|
||||
}
|
||||
resp, md, err := request_InferenceService_DeleteInferenceService_0(rctx, inboundMarshaler, client, req, pathParams)
|
||||
ctx = runtime.NewServerMetadataContext(ctx, md)
|
||||
if err != nil {
|
||||
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
|
||||
return
|
||||
}
|
||||
|
||||
forward_InferenceService_DeleteInferenceService_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
|
||||
|
||||
})
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
var (
|
||||
pattern_InferenceService_CreateInferenceService_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 4, 1, 5, 2, 2, 3}, []string{"apis", "v1beta1", "namespace", "inferenceservice"}, ""))
|
||||
|
||||
pattern_InferenceService_GetInferenceService_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 4, 1, 5, 2, 2, 3, 1, 0, 4, 1, 5, 4}, []string{"apis", "v1beta1", "namespace", "inferenceservice", "name"}, ""))
|
||||
|
||||
pattern_InferenceService_DeleteInferenceService_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 4, 1, 5, 2, 2, 3, 1, 0, 4, 1, 5, 4}, []string{"apis", "v1beta1", "namespace", "inferenceservice", "name"}, ""))
|
||||
)
|
||||
|
||||
var (
|
||||
forward_InferenceService_CreateInferenceService_0 = runtime.ForwardResponseMessage
|
||||
|
||||
forward_InferenceService_GetInferenceService_0 = runtime.ForwardResponseMessage
|
||||
|
||||
forward_InferenceService_DeleteInferenceService_0 = runtime.ForwardResponseMessage
|
||||
)
|
||||
170
api/gen/inference_service_grpc.pb.go
Normal file
170
api/gen/inference_service_grpc.pb.go
Normal file
@@ -0,0 +1,170 @@
|
||||
// Code generated by protoc-gen-go-grpc. DO NOT EDIT.
|
||||
|
||||
package gen
|
||||
|
||||
import (
|
||||
context "context"
|
||||
grpc "google.golang.org/grpc"
|
||||
codes "google.golang.org/grpc/codes"
|
||||
status "google.golang.org/grpc/status"
|
||||
emptypb "google.golang.org/protobuf/types/known/emptypb"
|
||||
)
|
||||
|
||||
// This is a compile-time assertion to ensure that this generated file
|
||||
// is compatible with the grpc package it is being compiled against.
|
||||
const _ = grpc.SupportPackageIsVersion7
|
||||
|
||||
// InferenceServiceClient is the client API for InferenceService service.
|
||||
//
|
||||
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream.
|
||||
type InferenceServiceClient interface {
|
||||
CreateInferenceService(ctx context.Context, in *CreateInferenceServiceRequest, opts ...grpc.CallOption) (*GetInferenceServiceResponse, error)
|
||||
GetInferenceService(ctx context.Context, in *InferenceServiceIdentifier, opts ...grpc.CallOption) (*GetInferenceServiceResponse, error)
|
||||
DeleteInferenceService(ctx context.Context, in *InferenceServiceIdentifier, opts ...grpc.CallOption) (*emptypb.Empty, error)
|
||||
}
|
||||
|
||||
type inferenceServiceClient struct {
|
||||
cc grpc.ClientConnInterface
|
||||
}
|
||||
|
||||
func NewInferenceServiceClient(cc grpc.ClientConnInterface) InferenceServiceClient {
|
||||
return &inferenceServiceClient{cc}
|
||||
}
|
||||
|
||||
func (c *inferenceServiceClient) CreateInferenceService(ctx context.Context, in *CreateInferenceServiceRequest, opts ...grpc.CallOption) (*GetInferenceServiceResponse, error) {
|
||||
out := new(GetInferenceServiceResponse)
|
||||
err := c.cc.Invoke(ctx, "/api.InferenceService/CreateInferenceService", in, out, opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func (c *inferenceServiceClient) GetInferenceService(ctx context.Context, in *InferenceServiceIdentifier, opts ...grpc.CallOption) (*GetInferenceServiceResponse, error) {
|
||||
out := new(GetInferenceServiceResponse)
|
||||
err := c.cc.Invoke(ctx, "/api.InferenceService/GetInferenceService", in, out, opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func (c *inferenceServiceClient) DeleteInferenceService(ctx context.Context, in *InferenceServiceIdentifier, opts ...grpc.CallOption) (*emptypb.Empty, error) {
|
||||
out := new(emptypb.Empty)
|
||||
err := c.cc.Invoke(ctx, "/api.InferenceService/DeleteInferenceService", in, out, opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
// InferenceServiceServer is the server API for InferenceService service.
|
||||
// All implementations must embed UnimplementedInferenceServiceServer
|
||||
// for forward compatibility
|
||||
type InferenceServiceServer interface {
|
||||
CreateInferenceService(context.Context, *CreateInferenceServiceRequest) (*GetInferenceServiceResponse, error)
|
||||
GetInferenceService(context.Context, *InferenceServiceIdentifier) (*GetInferenceServiceResponse, error)
|
||||
DeleteInferenceService(context.Context, *InferenceServiceIdentifier) (*emptypb.Empty, error)
|
||||
mustEmbedUnimplementedInferenceServiceServer()
|
||||
}
|
||||
|
||||
// UnimplementedInferenceServiceServer must be embedded to have forward compatible implementations.
|
||||
type UnimplementedInferenceServiceServer struct {
|
||||
}
|
||||
|
||||
func (UnimplementedInferenceServiceServer) CreateInferenceService(context.Context, *CreateInferenceServiceRequest) (*GetInferenceServiceResponse, error) {
|
||||
return nil, status.Errorf(codes.Unimplemented, "method CreateInferenceService not implemented")
|
||||
}
|
||||
func (UnimplementedInferenceServiceServer) GetInferenceService(context.Context, *InferenceServiceIdentifier) (*GetInferenceServiceResponse, error) {
|
||||
return nil, status.Errorf(codes.Unimplemented, "method GetInferenceService not implemented")
|
||||
}
|
||||
func (UnimplementedInferenceServiceServer) DeleteInferenceService(context.Context, *InferenceServiceIdentifier) (*emptypb.Empty, error) {
|
||||
return nil, status.Errorf(codes.Unimplemented, "method DeleteInferenceService not implemented")
|
||||
}
|
||||
func (UnimplementedInferenceServiceServer) mustEmbedUnimplementedInferenceServiceServer() {}
|
||||
|
||||
// UnsafeInferenceServiceServer may be embedded to opt out of forward compatibility for this service.
|
||||
// Use of this interface is not recommended, as added methods to InferenceServiceServer will
|
||||
// result in compilation errors.
|
||||
type UnsafeInferenceServiceServer interface {
|
||||
mustEmbedUnimplementedInferenceServiceServer()
|
||||
}
|
||||
|
||||
func RegisterInferenceServiceServer(s grpc.ServiceRegistrar, srv InferenceServiceServer) {
|
||||
s.RegisterService(&_InferenceService_serviceDesc, srv)
|
||||
}
|
||||
|
||||
func _InferenceService_CreateInferenceService_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
|
||||
in := new(CreateInferenceServiceRequest)
|
||||
if err := dec(in); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if interceptor == nil {
|
||||
return srv.(InferenceServiceServer).CreateInferenceService(ctx, in)
|
||||
}
|
||||
info := &grpc.UnaryServerInfo{
|
||||
Server: srv,
|
||||
FullMethod: "/api.InferenceService/CreateInferenceService",
|
||||
}
|
||||
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
|
||||
return srv.(InferenceServiceServer).CreateInferenceService(ctx, req.(*CreateInferenceServiceRequest))
|
||||
}
|
||||
return interceptor(ctx, in, info, handler)
|
||||
}
|
||||
|
||||
func _InferenceService_GetInferenceService_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
|
||||
in := new(InferenceServiceIdentifier)
|
||||
if err := dec(in); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if interceptor == nil {
|
||||
return srv.(InferenceServiceServer).GetInferenceService(ctx, in)
|
||||
}
|
||||
info := &grpc.UnaryServerInfo{
|
||||
Server: srv,
|
||||
FullMethod: "/api.InferenceService/GetInferenceService",
|
||||
}
|
||||
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
|
||||
return srv.(InferenceServiceServer).GetInferenceService(ctx, req.(*InferenceServiceIdentifier))
|
||||
}
|
||||
return interceptor(ctx, in, info, handler)
|
||||
}
|
||||
|
||||
func _InferenceService_DeleteInferenceService_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
|
||||
in := new(InferenceServiceIdentifier)
|
||||
if err := dec(in); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if interceptor == nil {
|
||||
return srv.(InferenceServiceServer).DeleteInferenceService(ctx, in)
|
||||
}
|
||||
info := &grpc.UnaryServerInfo{
|
||||
Server: srv,
|
||||
FullMethod: "/api.InferenceService/DeleteInferenceService",
|
||||
}
|
||||
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
|
||||
return srv.(InferenceServiceServer).DeleteInferenceService(ctx, req.(*InferenceServiceIdentifier))
|
||||
}
|
||||
return interceptor(ctx, in, info, handler)
|
||||
}
|
||||
|
||||
var _InferenceService_serviceDesc = grpc.ServiceDesc{
|
||||
ServiceName: "api.InferenceService",
|
||||
HandlerType: (*InferenceServiceServer)(nil),
|
||||
Methods: []grpc.MethodDesc{
|
||||
{
|
||||
MethodName: "CreateInferenceService",
|
||||
Handler: _InferenceService_CreateInferenceService_Handler,
|
||||
},
|
||||
{
|
||||
MethodName: "GetInferenceService",
|
||||
Handler: _InferenceService_GetInferenceService_Handler,
|
||||
},
|
||||
{
|
||||
MethodName: "DeleteInferenceService",
|
||||
Handler: _InferenceService_DeleteInferenceService_Handler,
|
||||
},
|
||||
},
|
||||
Streams: []grpc.StreamDesc{},
|
||||
Metadata: "inference_service.proto",
|
||||
}
|
||||
@@ -220,7 +220,8 @@ type Namespace struct {
|
||||
sizeCache protoimpl.SizeCache
|
||||
unknownFields protoimpl.UnknownFields
|
||||
|
||||
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
|
||||
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
|
||||
SourceName string `protobuf:"bytes,2,opt,name=sourceName,proto3" json:"sourceName,omitempty"`
|
||||
}
|
||||
|
||||
func (x *Namespace) Reset() {
|
||||
@@ -262,6 +263,13 @@ func (x *Namespace) GetName() string {
|
||||
return ""
|
||||
}
|
||||
|
||||
func (x *Namespace) GetSourceName() string {
|
||||
if x != nil {
|
||||
return x.SourceName
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
var File_namespace_proto protoreflect.FileDescriptor
|
||||
|
||||
var file_namespace_proto_rawDesc = []byte{
|
||||
@@ -289,9 +297,11 @@ var file_namespace_proto_rawDesc = []byte{
|
||||
0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x2c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65,
|
||||
0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0e, 0x2e, 0x61, 0x70,
|
||||
0x69, 0x2e, 0x4e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x52, 0x09, 0x6e, 0x61, 0x6d,
|
||||
0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x22, 0x1f, 0x0a, 0x09, 0x4e, 0x61, 0x6d, 0x65, 0x73, 0x70,
|
||||
0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x22, 0x3f, 0x0a, 0x09, 0x4e, 0x61, 0x6d, 0x65, 0x73, 0x70,
|
||||
0x61, 0x63, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28,
|
||||
0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x32, 0xec, 0x01, 0x0a, 0x10, 0x4e, 0x61, 0x6d, 0x65,
|
||||
0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x1e, 0x0a, 0x0a, 0x73, 0x6f, 0x75, 0x72, 0x63,
|
||||
0x65, 0x4e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x73, 0x6f, 0x75,
|
||||
0x72, 0x63, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x32, 0xec, 0x01, 0x0a, 0x10, 0x4e, 0x61, 0x6d, 0x65,
|
||||
0x73, 0x70, 0x61, 0x63, 0x65, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x6b, 0x0a, 0x0e,
|
||||
0x4c, 0x69, 0x73, 0x74, 0x4e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x73, 0x12, 0x1a,
|
||||
0x2e, 0x61, 0x70, 0x69, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x4e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61,
|
||||
|
||||
@@ -136,6 +136,100 @@ func (x *GetServiceRequest) GetName() string {
|
||||
return ""
|
||||
}
|
||||
|
||||
type HasServiceRequest struct {
|
||||
state protoimpl.MessageState
|
||||
sizeCache protoimpl.SizeCache
|
||||
unknownFields protoimpl.UnknownFields
|
||||
|
||||
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
|
||||
}
|
||||
|
||||
func (x *HasServiceRequest) Reset() {
|
||||
*x = HasServiceRequest{}
|
||||
if protoimpl.UnsafeEnabled {
|
||||
mi := &file_services_proto_msgTypes[2]
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
}
|
||||
|
||||
func (x *HasServiceRequest) String() string {
|
||||
return protoimpl.X.MessageStringOf(x)
|
||||
}
|
||||
|
||||
func (*HasServiceRequest) ProtoMessage() {}
|
||||
|
||||
func (x *HasServiceRequest) ProtoReflect() protoreflect.Message {
|
||||
mi := &file_services_proto_msgTypes[2]
|
||||
if protoimpl.UnsafeEnabled && x != nil {
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
if ms.LoadMessageInfo() == nil {
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
return ms
|
||||
}
|
||||
return mi.MessageOf(x)
|
||||
}
|
||||
|
||||
// Deprecated: Use HasServiceRequest.ProtoReflect.Descriptor instead.
|
||||
func (*HasServiceRequest) Descriptor() ([]byte, []int) {
|
||||
return file_services_proto_rawDescGZIP(), []int{2}
|
||||
}
|
||||
|
||||
func (x *HasServiceRequest) GetName() string {
|
||||
if x != nil {
|
||||
return x.Name
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
type HasServiceResponse struct {
|
||||
state protoimpl.MessageState
|
||||
sizeCache protoimpl.SizeCache
|
||||
unknownFields protoimpl.UnknownFields
|
||||
|
||||
HasService bool `protobuf:"varint,1,opt,name=hasService,proto3" json:"hasService,omitempty"`
|
||||
}
|
||||
|
||||
func (x *HasServiceResponse) Reset() {
|
||||
*x = HasServiceResponse{}
|
||||
if protoimpl.UnsafeEnabled {
|
||||
mi := &file_services_proto_msgTypes[3]
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
}
|
||||
|
||||
func (x *HasServiceResponse) String() string {
|
||||
return protoimpl.X.MessageStringOf(x)
|
||||
}
|
||||
|
||||
func (*HasServiceResponse) ProtoMessage() {}
|
||||
|
||||
func (x *HasServiceResponse) ProtoReflect() protoreflect.Message {
|
||||
mi := &file_services_proto_msgTypes[3]
|
||||
if protoimpl.UnsafeEnabled && x != nil {
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
if ms.LoadMessageInfo() == nil {
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
return ms
|
||||
}
|
||||
return mi.MessageOf(x)
|
||||
}
|
||||
|
||||
// Deprecated: Use HasServiceResponse.ProtoReflect.Descriptor instead.
|
||||
func (*HasServiceResponse) Descriptor() ([]byte, []int) {
|
||||
return file_services_proto_rawDescGZIP(), []int{3}
|
||||
}
|
||||
|
||||
func (x *HasServiceResponse) GetHasService() bool {
|
||||
if x != nil {
|
||||
return x.HasService
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
type ListServicesRequest struct {
|
||||
state protoimpl.MessageState
|
||||
sizeCache protoimpl.SizeCache
|
||||
@@ -149,7 +243,7 @@ type ListServicesRequest struct {
|
||||
func (x *ListServicesRequest) Reset() {
|
||||
*x = ListServicesRequest{}
|
||||
if protoimpl.UnsafeEnabled {
|
||||
mi := &file_services_proto_msgTypes[2]
|
||||
mi := &file_services_proto_msgTypes[4]
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
@@ -162,7 +256,7 @@ func (x *ListServicesRequest) String() string {
|
||||
func (*ListServicesRequest) ProtoMessage() {}
|
||||
|
||||
func (x *ListServicesRequest) ProtoReflect() protoreflect.Message {
|
||||
mi := &file_services_proto_msgTypes[2]
|
||||
mi := &file_services_proto_msgTypes[4]
|
||||
if protoimpl.UnsafeEnabled && x != nil {
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
if ms.LoadMessageInfo() == nil {
|
||||
@@ -175,7 +269,7 @@ func (x *ListServicesRequest) ProtoReflect() protoreflect.Message {
|
||||
|
||||
// Deprecated: Use ListServicesRequest.ProtoReflect.Descriptor instead.
|
||||
func (*ListServicesRequest) Descriptor() ([]byte, []int) {
|
||||
return file_services_proto_rawDescGZIP(), []int{2}
|
||||
return file_services_proto_rawDescGZIP(), []int{4}
|
||||
}
|
||||
|
||||
func (x *ListServicesRequest) GetNamespace() string {
|
||||
@@ -214,7 +308,7 @@ type ListServicesResponse struct {
|
||||
func (x *ListServicesResponse) Reset() {
|
||||
*x = ListServicesResponse{}
|
||||
if protoimpl.UnsafeEnabled {
|
||||
mi := &file_services_proto_msgTypes[3]
|
||||
mi := &file_services_proto_msgTypes[5]
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
ms.StoreMessageInfo(mi)
|
||||
}
|
||||
@@ -227,7 +321,7 @@ func (x *ListServicesResponse) String() string {
|
||||
func (*ListServicesResponse) ProtoMessage() {}
|
||||
|
||||
func (x *ListServicesResponse) ProtoReflect() protoreflect.Message {
|
||||
mi := &file_services_proto_msgTypes[3]
|
||||
mi := &file_services_proto_msgTypes[5]
|
||||
if protoimpl.UnsafeEnabled && x != nil {
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
if ms.LoadMessageInfo() == nil {
|
||||
@@ -240,7 +334,7 @@ func (x *ListServicesResponse) ProtoReflect() protoreflect.Message {
|
||||
|
||||
// Deprecated: Use ListServicesResponse.ProtoReflect.Descriptor instead.
|
||||
func (*ListServicesResponse) Descriptor() ([]byte, []int) {
|
||||
return file_services_proto_rawDescGZIP(), []int{3}
|
||||
return file_services_proto_rawDescGZIP(), []int{5}
|
||||
}
|
||||
|
||||
func (x *ListServicesResponse) GetCount() int32 {
|
||||
@@ -291,41 +385,53 @@ var file_services_proto_rawDesc = []byte{
|
||||
0x63, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d,
|
||||
0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61,
|
||||
0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18,
|
||||
0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x63, 0x0a, 0x13, 0x4c,
|
||||
0x69, 0x73, 0x74, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65,
|
||||
0x73, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18,
|
||||
0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65,
|
||||
0x12, 0x1a, 0x0a, 0x08, 0x70, 0x61, 0x67, 0x65, 0x53, 0x69, 0x7a, 0x65, 0x18, 0x02, 0x20, 0x01,
|
||||
0x28, 0x05, 0x52, 0x08, 0x70, 0x61, 0x67, 0x65, 0x53, 0x69, 0x7a, 0x65, 0x12, 0x12, 0x0a, 0x04,
|
||||
0x70, 0x61, 0x67, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x52, 0x04, 0x70, 0x61, 0x67, 0x65,
|
||||
0x22, 0xa0, 0x01, 0x0a, 0x14, 0x4c, 0x69, 0x73, 0x74, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65,
|
||||
0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x63, 0x6f, 0x75,
|
||||
0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x05, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x12,
|
||||
0x28, 0x0a, 0x08, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28,
|
||||
0x0b, 0x32, 0x0c, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x52,
|
||||
0x08, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x61, 0x67,
|
||||
0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x52, 0x04, 0x70, 0x61, 0x67, 0x65, 0x12, 0x14, 0x0a,
|
||||
0x05, 0x70, 0x61, 0x67, 0x65, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x05, 0x52, 0x05, 0x70, 0x61,
|
||||
0x67, 0x65, 0x73, 0x12, 0x1e, 0x0a, 0x0a, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x43, 0x6f, 0x75, 0x6e,
|
||||
0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0a, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x43, 0x6f,
|
||||
0x75, 0x6e, 0x74, 0x32, 0xe6, 0x01, 0x0a, 0x0e, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x53,
|
||||
0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x64, 0x0a, 0x0a, 0x47, 0x65, 0x74, 0x53, 0x65, 0x72,
|
||||
0x76, 0x69, 0x63, 0x65, 0x12, 0x16, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x47, 0x65, 0x74, 0x53, 0x65,
|
||||
0x72, 0x76, 0x69, 0x63, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x0c, 0x2e, 0x61,
|
||||
0x70, 0x69, 0x2e, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x22, 0x30, 0x82, 0xd3, 0xe4, 0x93,
|
||||
0x02, 0x2a, 0x12, 0x28, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61,
|
||||
0x31, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x7d, 0x2f, 0x73, 0x65,
|
||||
0x72, 0x76, 0x69, 0x63, 0x65, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x12, 0x6e, 0x0a, 0x0c,
|
||||
0x4c, 0x69, 0x73, 0x74, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73, 0x12, 0x18, 0x2e, 0x61,
|
||||
0x70, 0x69, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73, 0x52,
|
||||
0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x19, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x4c, 0x69, 0x73,
|
||||
0x74, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73,
|
||||
0x65, 0x22, 0x29, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x23, 0x12, 0x21, 0x2f, 0x61, 0x70, 0x69, 0x73,
|
||||
0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70,
|
||||
0x61, 0x63, 0x65, 0x7d, 0x2f, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x42, 0x24, 0x5a, 0x22,
|
||||
0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x6f, 0x6e, 0x65, 0x70, 0x61,
|
||||
0x6e, 0x65, 0x6c, 0x69, 0x6f, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x67,
|
||||
0x65, 0x6e, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
|
||||
0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x27, 0x0a, 0x11, 0x48,
|
||||
0x61, 0x73, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74,
|
||||
0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04,
|
||||
0x6e, 0x61, 0x6d, 0x65, 0x22, 0x34, 0x0a, 0x12, 0x48, 0x61, 0x73, 0x53, 0x65, 0x72, 0x76, 0x69,
|
||||
0x63, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x1e, 0x0a, 0x0a, 0x68, 0x61,
|
||||
0x73, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0a,
|
||||
0x68, 0x61, 0x73, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x22, 0x63, 0x0a, 0x13, 0x4c, 0x69,
|
||||
0x73, 0x74, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73,
|
||||
0x74, 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x01,
|
||||
0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x12,
|
||||
0x1a, 0x0a, 0x08, 0x70, 0x61, 0x67, 0x65, 0x53, 0x69, 0x7a, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28,
|
||||
0x05, 0x52, 0x08, 0x70, 0x61, 0x67, 0x65, 0x53, 0x69, 0x7a, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x70,
|
||||
0x61, 0x67, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x52, 0x04, 0x70, 0x61, 0x67, 0x65, 0x22,
|
||||
0xa0, 0x01, 0x0a, 0x14, 0x4c, 0x69, 0x73, 0x74, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73,
|
||||
0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x63, 0x6f, 0x75, 0x6e,
|
||||
0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x05, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x28,
|
||||
0x0a, 0x08, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b,
|
||||
0x32, 0x0c, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x52, 0x08,
|
||||
0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x61, 0x67, 0x65,
|
||||
0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x52, 0x04, 0x70, 0x61, 0x67, 0x65, 0x12, 0x14, 0x0a, 0x05,
|
||||
0x70, 0x61, 0x67, 0x65, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x05, 0x52, 0x05, 0x70, 0x61, 0x67,
|
||||
0x65, 0x73, 0x12, 0x1e, 0x0a, 0x0a, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x43, 0x6f, 0x75, 0x6e, 0x74,
|
||||
0x18, 0x05, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0a, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x43, 0x6f, 0x75,
|
||||
0x6e, 0x74, 0x32, 0xca, 0x02, 0x0a, 0x0e, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x53, 0x65,
|
||||
0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x64, 0x0a, 0x0a, 0x47, 0x65, 0x74, 0x53, 0x65, 0x72, 0x76,
|
||||
0x69, 0x63, 0x65, 0x12, 0x16, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x47, 0x65, 0x74, 0x53, 0x65, 0x72,
|
||||
0x76, 0x69, 0x63, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x0c, 0x2e, 0x61, 0x70,
|
||||
0x69, 0x2e, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x22, 0x30, 0x82, 0xd3, 0xe4, 0x93, 0x02,
|
||||
0x2a, 0x12, 0x28, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31,
|
||||
0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x7d, 0x2f, 0x73, 0x65, 0x72,
|
||||
0x76, 0x69, 0x63, 0x65, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x12, 0x6e, 0x0a, 0x0c, 0x4c,
|
||||
0x69, 0x73, 0x74, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73, 0x12, 0x18, 0x2e, 0x61, 0x70,
|
||||
0x69, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73, 0x52, 0x65,
|
||||
0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x19, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x4c, 0x69, 0x73, 0x74,
|
||||
0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65,
|
||||
0x22, 0x29, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x23, 0x12, 0x21, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f,
|
||||
0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61,
|
||||
0x63, 0x65, 0x7d, 0x2f, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x62, 0x0a, 0x0a, 0x48,
|
||||
0x61, 0x73, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x16, 0x2e, 0x61, 0x70, 0x69, 0x2e,
|
||||
0x48, 0x61, 0x73, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73,
|
||||
0x74, 0x1a, 0x17, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x48, 0x61, 0x73, 0x53, 0x65, 0x72, 0x76, 0x69,
|
||||
0x63, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x23, 0x82, 0xd3, 0xe4, 0x93,
|
||||
0x02, 0x1d, 0x12, 0x1b, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61,
|
||||
0x2f, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x42,
|
||||
0x24, 0x5a, 0x22, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x6f, 0x6e,
|
||||
0x65, 0x70, 0x61, 0x6e, 0x65, 0x6c, 0x69, 0x6f, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x2f, 0x61, 0x70,
|
||||
0x69, 0x2f, 0x67, 0x65, 0x6e, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
|
||||
}
|
||||
|
||||
var (
|
||||
@@ -340,21 +446,25 @@ func file_services_proto_rawDescGZIP() []byte {
|
||||
return file_services_proto_rawDescData
|
||||
}
|
||||
|
||||
var file_services_proto_msgTypes = make([]protoimpl.MessageInfo, 4)
|
||||
var file_services_proto_msgTypes = make([]protoimpl.MessageInfo, 6)
|
||||
var file_services_proto_goTypes = []interface{}{
|
||||
(*Service)(nil), // 0: api.Service
|
||||
(*GetServiceRequest)(nil), // 1: api.GetServiceRequest
|
||||
(*ListServicesRequest)(nil), // 2: api.ListServicesRequest
|
||||
(*ListServicesResponse)(nil), // 3: api.ListServicesResponse
|
||||
(*HasServiceRequest)(nil), // 2: api.HasServiceRequest
|
||||
(*HasServiceResponse)(nil), // 3: api.HasServiceResponse
|
||||
(*ListServicesRequest)(nil), // 4: api.ListServicesRequest
|
||||
(*ListServicesResponse)(nil), // 5: api.ListServicesResponse
|
||||
}
|
||||
var file_services_proto_depIdxs = []int32{
|
||||
0, // 0: api.ListServicesResponse.services:type_name -> api.Service
|
||||
1, // 1: api.ServiceService.GetService:input_type -> api.GetServiceRequest
|
||||
2, // 2: api.ServiceService.ListServices:input_type -> api.ListServicesRequest
|
||||
0, // 3: api.ServiceService.GetService:output_type -> api.Service
|
||||
3, // 4: api.ServiceService.ListServices:output_type -> api.ListServicesResponse
|
||||
3, // [3:5] is the sub-list for method output_type
|
||||
1, // [1:3] is the sub-list for method input_type
|
||||
4, // 2: api.ServiceService.ListServices:input_type -> api.ListServicesRequest
|
||||
2, // 3: api.ServiceService.HasService:input_type -> api.HasServiceRequest
|
||||
0, // 4: api.ServiceService.GetService:output_type -> api.Service
|
||||
5, // 5: api.ServiceService.ListServices:output_type -> api.ListServicesResponse
|
||||
3, // 6: api.ServiceService.HasService:output_type -> api.HasServiceResponse
|
||||
4, // [4:7] is the sub-list for method output_type
|
||||
1, // [1:4] is the sub-list for method input_type
|
||||
1, // [1:1] is the sub-list for extension type_name
|
||||
1, // [1:1] is the sub-list for extension extendee
|
||||
0, // [0:1] is the sub-list for field type_name
|
||||
@@ -391,7 +501,7 @@ func file_services_proto_init() {
|
||||
}
|
||||
}
|
||||
file_services_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} {
|
||||
switch v := v.(*ListServicesRequest); i {
|
||||
switch v := v.(*HasServiceRequest); i {
|
||||
case 0:
|
||||
return &v.state
|
||||
case 1:
|
||||
@@ -403,6 +513,30 @@ func file_services_proto_init() {
|
||||
}
|
||||
}
|
||||
file_services_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} {
|
||||
switch v := v.(*HasServiceResponse); i {
|
||||
case 0:
|
||||
return &v.state
|
||||
case 1:
|
||||
return &v.sizeCache
|
||||
case 2:
|
||||
return &v.unknownFields
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
file_services_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} {
|
||||
switch v := v.(*ListServicesRequest); i {
|
||||
case 0:
|
||||
return &v.state
|
||||
case 1:
|
||||
return &v.sizeCache
|
||||
case 2:
|
||||
return &v.unknownFields
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
file_services_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} {
|
||||
switch v := v.(*ListServicesResponse); i {
|
||||
case 0:
|
||||
return &v.state
|
||||
@@ -421,7 +555,7 @@ func file_services_proto_init() {
|
||||
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
|
||||
RawDescriptor: file_services_proto_rawDesc,
|
||||
NumEnums: 0,
|
||||
NumMessages: 4,
|
||||
NumMessages: 6,
|
||||
NumExtensions: 0,
|
||||
NumServices: 1,
|
||||
},
|
||||
|
||||
@@ -173,6 +173,58 @@ func local_request_ServiceService_ListServices_0(ctx context.Context, marshaler
|
||||
|
||||
}
|
||||
|
||||
func request_ServiceService_HasService_0(ctx context.Context, marshaler runtime.Marshaler, client ServiceServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
|
||||
var protoReq HasServiceRequest
|
||||
var metadata runtime.ServerMetadata
|
||||
|
||||
var (
|
||||
val string
|
||||
ok bool
|
||||
err error
|
||||
_ = err
|
||||
)
|
||||
|
||||
val, ok = pathParams["name"]
|
||||
if !ok {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "name")
|
||||
}
|
||||
|
||||
protoReq.Name, err = runtime.String(val)
|
||||
if err != nil {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "name", err)
|
||||
}
|
||||
|
||||
msg, err := client.HasService(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD))
|
||||
return msg, metadata, err
|
||||
|
||||
}
|
||||
|
||||
func local_request_ServiceService_HasService_0(ctx context.Context, marshaler runtime.Marshaler, server ServiceServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
|
||||
var protoReq HasServiceRequest
|
||||
var metadata runtime.ServerMetadata
|
||||
|
||||
var (
|
||||
val string
|
||||
ok bool
|
||||
err error
|
||||
_ = err
|
||||
)
|
||||
|
||||
val, ok = pathParams["name"]
|
||||
if !ok {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "name")
|
||||
}
|
||||
|
||||
protoReq.Name, err = runtime.String(val)
|
||||
if err != nil {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "name", err)
|
||||
}
|
||||
|
||||
msg, err := server.HasService(ctx, &protoReq)
|
||||
return msg, metadata, err
|
||||
|
||||
}
|
||||
|
||||
// RegisterServiceServiceHandlerServer registers the http handlers for service ServiceService to "mux".
|
||||
// UnaryRPC :call ServiceServiceServer directly.
|
||||
// StreamingRPC :currently unsupported pending https://github.com/grpc/grpc-go/issues/906.
|
||||
@@ -225,6 +277,29 @@ func RegisterServiceServiceHandlerServer(ctx context.Context, mux *runtime.Serve
|
||||
|
||||
})
|
||||
|
||||
mux.Handle("GET", pattern_ServiceService_HasService_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
|
||||
ctx, cancel := context.WithCancel(req.Context())
|
||||
defer cancel()
|
||||
var stream runtime.ServerTransportStream
|
||||
ctx = grpc.NewContextWithServerTransportStream(ctx, &stream)
|
||||
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
|
||||
rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/api.ServiceService/HasService")
|
||||
if err != nil {
|
||||
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
|
||||
return
|
||||
}
|
||||
resp, md, err := local_request_ServiceService_HasService_0(rctx, inboundMarshaler, server, req, pathParams)
|
||||
md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())
|
||||
ctx = runtime.NewServerMetadataContext(ctx, md)
|
||||
if err != nil {
|
||||
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
|
||||
return
|
||||
}
|
||||
|
||||
forward_ServiceService_HasService_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
|
||||
|
||||
})
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -306,6 +381,26 @@ func RegisterServiceServiceHandlerClient(ctx context.Context, mux *runtime.Serve
|
||||
|
||||
})
|
||||
|
||||
mux.Handle("GET", pattern_ServiceService_HasService_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
|
||||
ctx, cancel := context.WithCancel(req.Context())
|
||||
defer cancel()
|
||||
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
|
||||
rctx, err := runtime.AnnotateContext(ctx, mux, req, "/api.ServiceService/HasService")
|
||||
if err != nil {
|
||||
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
|
||||
return
|
||||
}
|
||||
resp, md, err := request_ServiceService_HasService_0(rctx, inboundMarshaler, client, req, pathParams)
|
||||
ctx = runtime.NewServerMetadataContext(ctx, md)
|
||||
if err != nil {
|
||||
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
|
||||
return
|
||||
}
|
||||
|
||||
forward_ServiceService_HasService_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
|
||||
|
||||
})
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -313,10 +408,14 @@ var (
|
||||
pattern_ServiceService_GetService_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 4, 1, 5, 2, 2, 3, 1, 0, 4, 1, 5, 4}, []string{"apis", "v1beta1", "namespace", "service", "name"}, ""))
|
||||
|
||||
pattern_ServiceService_ListServices_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 4, 1, 5, 2, 2, 3}, []string{"apis", "v1beta1", "namespace", "service"}, ""))
|
||||
|
||||
pattern_ServiceService_HasService_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v1beta", "service", "name"}, ""))
|
||||
)
|
||||
|
||||
var (
|
||||
forward_ServiceService_GetService_0 = runtime.ForwardResponseMessage
|
||||
|
||||
forward_ServiceService_ListServices_0 = runtime.ForwardResponseMessage
|
||||
|
||||
forward_ServiceService_HasService_0 = runtime.ForwardResponseMessage
|
||||
)
|
||||
|
||||
@@ -19,6 +19,7 @@ const _ = grpc.SupportPackageIsVersion7
|
||||
type ServiceServiceClient interface {
|
||||
GetService(ctx context.Context, in *GetServiceRequest, opts ...grpc.CallOption) (*Service, error)
|
||||
ListServices(ctx context.Context, in *ListServicesRequest, opts ...grpc.CallOption) (*ListServicesResponse, error)
|
||||
HasService(ctx context.Context, in *HasServiceRequest, opts ...grpc.CallOption) (*HasServiceResponse, error)
|
||||
}
|
||||
|
||||
type serviceServiceClient struct {
|
||||
@@ -47,12 +48,22 @@ func (c *serviceServiceClient) ListServices(ctx context.Context, in *ListService
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func (c *serviceServiceClient) HasService(ctx context.Context, in *HasServiceRequest, opts ...grpc.CallOption) (*HasServiceResponse, error) {
|
||||
out := new(HasServiceResponse)
|
||||
err := c.cc.Invoke(ctx, "/api.ServiceService/HasService", in, out, opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
// ServiceServiceServer is the server API for ServiceService service.
|
||||
// All implementations must embed UnimplementedServiceServiceServer
|
||||
// for forward compatibility
|
||||
type ServiceServiceServer interface {
|
||||
GetService(context.Context, *GetServiceRequest) (*Service, error)
|
||||
ListServices(context.Context, *ListServicesRequest) (*ListServicesResponse, error)
|
||||
HasService(context.Context, *HasServiceRequest) (*HasServiceResponse, error)
|
||||
mustEmbedUnimplementedServiceServiceServer()
|
||||
}
|
||||
|
||||
@@ -66,6 +77,9 @@ func (UnimplementedServiceServiceServer) GetService(context.Context, *GetService
|
||||
func (UnimplementedServiceServiceServer) ListServices(context.Context, *ListServicesRequest) (*ListServicesResponse, error) {
|
||||
return nil, status.Errorf(codes.Unimplemented, "method ListServices not implemented")
|
||||
}
|
||||
func (UnimplementedServiceServiceServer) HasService(context.Context, *HasServiceRequest) (*HasServiceResponse, error) {
|
||||
return nil, status.Errorf(codes.Unimplemented, "method HasService not implemented")
|
||||
}
|
||||
func (UnimplementedServiceServiceServer) mustEmbedUnimplementedServiceServiceServer() {}
|
||||
|
||||
// UnsafeServiceServiceServer may be embedded to opt out of forward compatibility for this service.
|
||||
@@ -115,6 +129,24 @@ func _ServiceService_ListServices_Handler(srv interface{}, ctx context.Context,
|
||||
return interceptor(ctx, in, info, handler)
|
||||
}
|
||||
|
||||
func _ServiceService_HasService_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
|
||||
in := new(HasServiceRequest)
|
||||
if err := dec(in); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if interceptor == nil {
|
||||
return srv.(ServiceServiceServer).HasService(ctx, in)
|
||||
}
|
||||
info := &grpc.UnaryServerInfo{
|
||||
Server: srv,
|
||||
FullMethod: "/api.ServiceService/HasService",
|
||||
}
|
||||
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
|
||||
return srv.(ServiceServiceServer).HasService(ctx, req.(*HasServiceRequest))
|
||||
}
|
||||
return interceptor(ctx, in, info, handler)
|
||||
}
|
||||
|
||||
var _ServiceService_serviceDesc = grpc.ServiceDesc{
|
||||
ServiceName: "api.ServiceService",
|
||||
HandlerType: (*ServiceServiceServer)(nil),
|
||||
@@ -127,6 +159,10 @@ var _ServiceService_serviceDesc = grpc.ServiceDesc{
|
||||
MethodName: "ListServices",
|
||||
Handler: _ServiceService_ListServices_Handler,
|
||||
},
|
||||
{
|
||||
MethodName: "HasService",
|
||||
Handler: _ServiceService_HasService_Handler,
|
||||
},
|
||||
},
|
||||
Streams: []grpc.StreamDesc{},
|
||||
Metadata: "services.proto",
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -709,190 +709,6 @@ func local_request_WorkflowService_TerminateWorkflowExecution_0(ctx context.Cont
|
||||
|
||||
}
|
||||
|
||||
func request_WorkflowService_GetArtifact_0(ctx context.Context, marshaler runtime.Marshaler, client WorkflowServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
|
||||
var protoReq GetArtifactRequest
|
||||
var metadata runtime.ServerMetadata
|
||||
|
||||
var (
|
||||
val string
|
||||
ok bool
|
||||
err error
|
||||
_ = err
|
||||
)
|
||||
|
||||
val, ok = pathParams["namespace"]
|
||||
if !ok {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "namespace")
|
||||
}
|
||||
|
||||
protoReq.Namespace, err = runtime.String(val)
|
||||
if err != nil {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "namespace", err)
|
||||
}
|
||||
|
||||
val, ok = pathParams["uid"]
|
||||
if !ok {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "uid")
|
||||
}
|
||||
|
||||
protoReq.Uid, err = runtime.String(val)
|
||||
if err != nil {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "uid", err)
|
||||
}
|
||||
|
||||
val, ok = pathParams["key"]
|
||||
if !ok {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "key")
|
||||
}
|
||||
|
||||
protoReq.Key, err = runtime.String(val)
|
||||
if err != nil {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "key", err)
|
||||
}
|
||||
|
||||
msg, err := client.GetArtifact(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD))
|
||||
return msg, metadata, err
|
||||
|
||||
}
|
||||
|
||||
func local_request_WorkflowService_GetArtifact_0(ctx context.Context, marshaler runtime.Marshaler, server WorkflowServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
|
||||
var protoReq GetArtifactRequest
|
||||
var metadata runtime.ServerMetadata
|
||||
|
||||
var (
|
||||
val string
|
||||
ok bool
|
||||
err error
|
||||
_ = err
|
||||
)
|
||||
|
||||
val, ok = pathParams["namespace"]
|
||||
if !ok {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "namespace")
|
||||
}
|
||||
|
||||
protoReq.Namespace, err = runtime.String(val)
|
||||
if err != nil {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "namespace", err)
|
||||
}
|
||||
|
||||
val, ok = pathParams["uid"]
|
||||
if !ok {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "uid")
|
||||
}
|
||||
|
||||
protoReq.Uid, err = runtime.String(val)
|
||||
if err != nil {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "uid", err)
|
||||
}
|
||||
|
||||
val, ok = pathParams["key"]
|
||||
if !ok {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "key")
|
||||
}
|
||||
|
||||
protoReq.Key, err = runtime.String(val)
|
||||
if err != nil {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "key", err)
|
||||
}
|
||||
|
||||
msg, err := server.GetArtifact(ctx, &protoReq)
|
||||
return msg, metadata, err
|
||||
|
||||
}
|
||||
|
||||
func request_WorkflowService_ListFiles_0(ctx context.Context, marshaler runtime.Marshaler, client WorkflowServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
|
||||
var protoReq ListFilesRequest
|
||||
var metadata runtime.ServerMetadata
|
||||
|
||||
var (
|
||||
val string
|
||||
ok bool
|
||||
err error
|
||||
_ = err
|
||||
)
|
||||
|
||||
val, ok = pathParams["namespace"]
|
||||
if !ok {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "namespace")
|
||||
}
|
||||
|
||||
protoReq.Namespace, err = runtime.String(val)
|
||||
if err != nil {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "namespace", err)
|
||||
}
|
||||
|
||||
val, ok = pathParams["uid"]
|
||||
if !ok {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "uid")
|
||||
}
|
||||
|
||||
protoReq.Uid, err = runtime.String(val)
|
||||
if err != nil {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "uid", err)
|
||||
}
|
||||
|
||||
val, ok = pathParams["path"]
|
||||
if !ok {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "path")
|
||||
}
|
||||
|
||||
protoReq.Path, err = runtime.String(val)
|
||||
if err != nil {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "path", err)
|
||||
}
|
||||
|
||||
msg, err := client.ListFiles(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD))
|
||||
return msg, metadata, err
|
||||
|
||||
}
|
||||
|
||||
func local_request_WorkflowService_ListFiles_0(ctx context.Context, marshaler runtime.Marshaler, server WorkflowServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
|
||||
var protoReq ListFilesRequest
|
||||
var metadata runtime.ServerMetadata
|
||||
|
||||
var (
|
||||
val string
|
||||
ok bool
|
||||
err error
|
||||
_ = err
|
||||
)
|
||||
|
||||
val, ok = pathParams["namespace"]
|
||||
if !ok {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "namespace")
|
||||
}
|
||||
|
||||
protoReq.Namespace, err = runtime.String(val)
|
||||
if err != nil {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "namespace", err)
|
||||
}
|
||||
|
||||
val, ok = pathParams["uid"]
|
||||
if !ok {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "uid")
|
||||
}
|
||||
|
||||
protoReq.Uid, err = runtime.String(val)
|
||||
if err != nil {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "uid", err)
|
||||
}
|
||||
|
||||
val, ok = pathParams["path"]
|
||||
if !ok {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "path")
|
||||
}
|
||||
|
||||
protoReq.Path, err = runtime.String(val)
|
||||
if err != nil {
|
||||
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "path", err)
|
||||
}
|
||||
|
||||
msg, err := server.ListFiles(ctx, &protoReq)
|
||||
return msg, metadata, err
|
||||
|
||||
}
|
||||
|
||||
func request_WorkflowService_AddWorkflowExecutionStatistics_0(ctx context.Context, marshaler runtime.Marshaler, client WorkflowServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
|
||||
var protoReq AddWorkflowExecutionStatisticRequest
|
||||
var metadata runtime.ServerMetadata
|
||||
@@ -1609,52 +1425,6 @@ func RegisterWorkflowServiceHandlerServer(ctx context.Context, mux *runtime.Serv
|
||||
|
||||
})
|
||||
|
||||
mux.Handle("GET", pattern_WorkflowService_GetArtifact_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
|
||||
ctx, cancel := context.WithCancel(req.Context())
|
||||
defer cancel()
|
||||
var stream runtime.ServerTransportStream
|
||||
ctx = grpc.NewContextWithServerTransportStream(ctx, &stream)
|
||||
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
|
||||
rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/api.WorkflowService/GetArtifact")
|
||||
if err != nil {
|
||||
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
|
||||
return
|
||||
}
|
||||
resp, md, err := local_request_WorkflowService_GetArtifact_0(rctx, inboundMarshaler, server, req, pathParams)
|
||||
md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())
|
||||
ctx = runtime.NewServerMetadataContext(ctx, md)
|
||||
if err != nil {
|
||||
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
|
||||
return
|
||||
}
|
||||
|
||||
forward_WorkflowService_GetArtifact_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
|
||||
|
||||
})
|
||||
|
||||
mux.Handle("GET", pattern_WorkflowService_ListFiles_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
|
||||
ctx, cancel := context.WithCancel(req.Context())
|
||||
defer cancel()
|
||||
var stream runtime.ServerTransportStream
|
||||
ctx = grpc.NewContextWithServerTransportStream(ctx, &stream)
|
||||
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
|
||||
rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/api.WorkflowService/ListFiles")
|
||||
if err != nil {
|
||||
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
|
||||
return
|
||||
}
|
||||
resp, md, err := local_request_WorkflowService_ListFiles_0(rctx, inboundMarshaler, server, req, pathParams)
|
||||
md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())
|
||||
ctx = runtime.NewServerMetadataContext(ctx, md)
|
||||
if err != nil {
|
||||
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
|
||||
return
|
||||
}
|
||||
|
||||
forward_WorkflowService_ListFiles_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
|
||||
|
||||
})
|
||||
|
||||
mux.Handle("POST", pattern_WorkflowService_AddWorkflowExecutionStatistics_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
|
||||
ctx, cancel := context.WithCancel(req.Context())
|
||||
defer cancel()
|
||||
@@ -2034,46 +1804,6 @@ func RegisterWorkflowServiceHandlerClient(ctx context.Context, mux *runtime.Serv
|
||||
|
||||
})
|
||||
|
||||
mux.Handle("GET", pattern_WorkflowService_GetArtifact_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
|
||||
ctx, cancel := context.WithCancel(req.Context())
|
||||
defer cancel()
|
||||
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
|
||||
rctx, err := runtime.AnnotateContext(ctx, mux, req, "/api.WorkflowService/GetArtifact")
|
||||
if err != nil {
|
||||
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
|
||||
return
|
||||
}
|
||||
resp, md, err := request_WorkflowService_GetArtifact_0(rctx, inboundMarshaler, client, req, pathParams)
|
||||
ctx = runtime.NewServerMetadataContext(ctx, md)
|
||||
if err != nil {
|
||||
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
|
||||
return
|
||||
}
|
||||
|
||||
forward_WorkflowService_GetArtifact_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
|
||||
|
||||
})
|
||||
|
||||
mux.Handle("GET", pattern_WorkflowService_ListFiles_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
|
||||
ctx, cancel := context.WithCancel(req.Context())
|
||||
defer cancel()
|
||||
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
|
||||
rctx, err := runtime.AnnotateContext(ctx, mux, req, "/api.WorkflowService/ListFiles")
|
||||
if err != nil {
|
||||
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
|
||||
return
|
||||
}
|
||||
resp, md, err := request_WorkflowService_ListFiles_0(rctx, inboundMarshaler, client, req, pathParams)
|
||||
ctx = runtime.NewServerMetadataContext(ctx, md)
|
||||
if err != nil {
|
||||
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
|
||||
return
|
||||
}
|
||||
|
||||
forward_WorkflowService_ListFiles_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
|
||||
|
||||
})
|
||||
|
||||
mux.Handle("POST", pattern_WorkflowService_AddWorkflowExecutionStatistics_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
|
||||
ctx, cancel := context.WithCancel(req.Context())
|
||||
defer cancel()
|
||||
@@ -2218,10 +1948,6 @@ var (
|
||||
|
||||
pattern_WorkflowService_TerminateWorkflowExecution_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 4, 1, 5, 2, 2, 3, 1, 0, 4, 1, 5, 4, 2, 5}, []string{"apis", "v1beta1", "namespace", "workflow_executions", "uid", "terminate"}, ""))
|
||||
|
||||
pattern_WorkflowService_GetArtifact_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 4, 1, 5, 2, 2, 3, 1, 0, 4, 1, 5, 4, 2, 5, 3, 0, 4, 1, 5, 6}, []string{"apis", "v1beta1", "namespace", "workflow_executions", "uid", "artifacts", "key"}, ""))
|
||||
|
||||
pattern_WorkflowService_ListFiles_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 4, 1, 5, 2, 2, 3, 1, 0, 4, 1, 5, 4, 2, 5, 3, 0, 4, 1, 5, 6}, []string{"apis", "v1beta1", "namespace", "workflow_executions", "uid", "files", "path"}, ""))
|
||||
|
||||
pattern_WorkflowService_AddWorkflowExecutionStatistics_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 4, 1, 5, 2, 2, 3, 1, 0, 4, 1, 5, 4, 2, 5}, []string{"apis", "v1beta1", "namespace", "workflow_executions", "uid", "statistics"}, ""))
|
||||
|
||||
pattern_WorkflowService_CronStartWorkflowExecutionStatistic_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 4, 1, 5, 2, 2, 3, 1, 0, 4, 1, 5, 4, 2, 5}, []string{"apis", "v1beta1", "namespace", "workflow_executions", "uid", "cron_start_statistics"}, ""))
|
||||
@@ -2256,10 +1982,6 @@ var (
|
||||
|
||||
forward_WorkflowService_TerminateWorkflowExecution_0 = runtime.ForwardResponseMessage
|
||||
|
||||
forward_WorkflowService_GetArtifact_0 = runtime.ForwardResponseMessage
|
||||
|
||||
forward_WorkflowService_ListFiles_0 = runtime.ForwardResponseMessage
|
||||
|
||||
forward_WorkflowService_AddWorkflowExecutionStatistics_0 = runtime.ForwardResponseMessage
|
||||
|
||||
forward_WorkflowService_CronStartWorkflowExecutionStatistic_0 = runtime.ForwardResponseMessage
|
||||
|
||||
@@ -30,8 +30,6 @@ type WorkflowServiceClient interface {
|
||||
GetWorkflowExecutionMetrics(ctx context.Context, in *GetWorkflowExecutionMetricsRequest, opts ...grpc.CallOption) (*GetWorkflowExecutionMetricsResponse, error)
|
||||
ResubmitWorkflowExecution(ctx context.Context, in *ResubmitWorkflowExecutionRequest, opts ...grpc.CallOption) (*WorkflowExecution, error)
|
||||
TerminateWorkflowExecution(ctx context.Context, in *TerminateWorkflowExecutionRequest, opts ...grpc.CallOption) (*emptypb.Empty, error)
|
||||
GetArtifact(ctx context.Context, in *GetArtifactRequest, opts ...grpc.CallOption) (*ArtifactResponse, error)
|
||||
ListFiles(ctx context.Context, in *ListFilesRequest, opts ...grpc.CallOption) (*ListFilesResponse, error)
|
||||
AddWorkflowExecutionStatistics(ctx context.Context, in *AddWorkflowExecutionStatisticRequest, opts ...grpc.CallOption) (*emptypb.Empty, error)
|
||||
CronStartWorkflowExecutionStatistic(ctx context.Context, in *CronStartWorkflowExecutionStatisticRequest, opts ...grpc.CallOption) (*emptypb.Empty, error)
|
||||
UpdateWorkflowExecutionStatus(ctx context.Context, in *UpdateWorkflowExecutionStatusRequest, opts ...grpc.CallOption) (*emptypb.Empty, error)
|
||||
@@ -184,24 +182,6 @@ func (c *workflowServiceClient) TerminateWorkflowExecution(ctx context.Context,
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func (c *workflowServiceClient) GetArtifact(ctx context.Context, in *GetArtifactRequest, opts ...grpc.CallOption) (*ArtifactResponse, error) {
|
||||
out := new(ArtifactResponse)
|
||||
err := c.cc.Invoke(ctx, "/api.WorkflowService/GetArtifact", in, out, opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func (c *workflowServiceClient) ListFiles(ctx context.Context, in *ListFilesRequest, opts ...grpc.CallOption) (*ListFilesResponse, error) {
|
||||
out := new(ListFilesResponse)
|
||||
err := c.cc.Invoke(ctx, "/api.WorkflowService/ListFiles", in, out, opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func (c *workflowServiceClient) AddWorkflowExecutionStatistics(ctx context.Context, in *AddWorkflowExecutionStatisticRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) {
|
||||
out := new(emptypb.Empty)
|
||||
err := c.cc.Invoke(ctx, "/api.WorkflowService/AddWorkflowExecutionStatistics", in, out, opts...)
|
||||
@@ -272,8 +252,6 @@ type WorkflowServiceServer interface {
|
||||
GetWorkflowExecutionMetrics(context.Context, *GetWorkflowExecutionMetricsRequest) (*GetWorkflowExecutionMetricsResponse, error)
|
||||
ResubmitWorkflowExecution(context.Context, *ResubmitWorkflowExecutionRequest) (*WorkflowExecution, error)
|
||||
TerminateWorkflowExecution(context.Context, *TerminateWorkflowExecutionRequest) (*emptypb.Empty, error)
|
||||
GetArtifact(context.Context, *GetArtifactRequest) (*ArtifactResponse, error)
|
||||
ListFiles(context.Context, *ListFilesRequest) (*ListFilesResponse, error)
|
||||
AddWorkflowExecutionStatistics(context.Context, *AddWorkflowExecutionStatisticRequest) (*emptypb.Empty, error)
|
||||
CronStartWorkflowExecutionStatistic(context.Context, *CronStartWorkflowExecutionStatisticRequest) (*emptypb.Empty, error)
|
||||
UpdateWorkflowExecutionStatus(context.Context, *UpdateWorkflowExecutionStatusRequest) (*emptypb.Empty, error)
|
||||
@@ -317,12 +295,6 @@ func (UnimplementedWorkflowServiceServer) ResubmitWorkflowExecution(context.Cont
|
||||
func (UnimplementedWorkflowServiceServer) TerminateWorkflowExecution(context.Context, *TerminateWorkflowExecutionRequest) (*emptypb.Empty, error) {
|
||||
return nil, status.Errorf(codes.Unimplemented, "method TerminateWorkflowExecution not implemented")
|
||||
}
|
||||
func (UnimplementedWorkflowServiceServer) GetArtifact(context.Context, *GetArtifactRequest) (*ArtifactResponse, error) {
|
||||
return nil, status.Errorf(codes.Unimplemented, "method GetArtifact not implemented")
|
||||
}
|
||||
func (UnimplementedWorkflowServiceServer) ListFiles(context.Context, *ListFilesRequest) (*ListFilesResponse, error) {
|
||||
return nil, status.Errorf(codes.Unimplemented, "method ListFiles not implemented")
|
||||
}
|
||||
func (UnimplementedWorkflowServiceServer) AddWorkflowExecutionStatistics(context.Context, *AddWorkflowExecutionStatisticRequest) (*emptypb.Empty, error) {
|
||||
return nil, status.Errorf(codes.Unimplemented, "method AddWorkflowExecutionStatistics not implemented")
|
||||
}
|
||||
@@ -540,42 +512,6 @@ func _WorkflowService_TerminateWorkflowExecution_Handler(srv interface{}, ctx co
|
||||
return interceptor(ctx, in, info, handler)
|
||||
}
|
||||
|
||||
func _WorkflowService_GetArtifact_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
|
||||
in := new(GetArtifactRequest)
|
||||
if err := dec(in); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if interceptor == nil {
|
||||
return srv.(WorkflowServiceServer).GetArtifact(ctx, in)
|
||||
}
|
||||
info := &grpc.UnaryServerInfo{
|
||||
Server: srv,
|
||||
FullMethod: "/api.WorkflowService/GetArtifact",
|
||||
}
|
||||
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
|
||||
return srv.(WorkflowServiceServer).GetArtifact(ctx, req.(*GetArtifactRequest))
|
||||
}
|
||||
return interceptor(ctx, in, info, handler)
|
||||
}
|
||||
|
||||
func _WorkflowService_ListFiles_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
|
||||
in := new(ListFilesRequest)
|
||||
if err := dec(in); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if interceptor == nil {
|
||||
return srv.(WorkflowServiceServer).ListFiles(ctx, in)
|
||||
}
|
||||
info := &grpc.UnaryServerInfo{
|
||||
Server: srv,
|
||||
FullMethod: "/api.WorkflowService/ListFiles",
|
||||
}
|
||||
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
|
||||
return srv.(WorkflowServiceServer).ListFiles(ctx, req.(*ListFilesRequest))
|
||||
}
|
||||
return interceptor(ctx, in, info, handler)
|
||||
}
|
||||
|
||||
func _WorkflowService_AddWorkflowExecutionStatistics_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
|
||||
in := new(AddWorkflowExecutionStatisticRequest)
|
||||
if err := dec(in); err != nil {
|
||||
@@ -720,14 +656,6 @@ var _WorkflowService_serviceDesc = grpc.ServiceDesc{
|
||||
MethodName: "TerminateWorkflowExecution",
|
||||
Handler: _WorkflowService_TerminateWorkflowExecution_Handler,
|
||||
},
|
||||
{
|
||||
MethodName: "GetArtifact",
|
||||
Handler: _WorkflowService_GetArtifact_Handler,
|
||||
},
|
||||
{
|
||||
MethodName: "ListFiles",
|
||||
Handler: _WorkflowService_ListFiles_Handler,
|
||||
},
|
||||
{
|
||||
MethodName: "AddWorkflowExecutionStatistics",
|
||||
Handler: _WorkflowService_AddWorkflowExecutionStatistics_Handler,
|
||||
|
||||
@@ -842,19 +842,20 @@ type WorkflowTemplate struct {
|
||||
sizeCache protoimpl.SizeCache
|
||||
unknownFields protoimpl.UnknownFields
|
||||
|
||||
CreatedAt string `protobuf:"bytes,1,opt,name=createdAt,proto3" json:"createdAt,omitempty"`
|
||||
ModifiedAt string `protobuf:"bytes,2,opt,name=modifiedAt,proto3" json:"modifiedAt,omitempty"`
|
||||
Uid string `protobuf:"bytes,3,opt,name=uid,proto3" json:"uid,omitempty"`
|
||||
Name string `protobuf:"bytes,4,opt,name=name,proto3" json:"name,omitempty"`
|
||||
Version int64 `protobuf:"varint,5,opt,name=version,proto3" json:"version,omitempty"`
|
||||
Versions int64 `protobuf:"varint,6,opt,name=versions,proto3" json:"versions,omitempty"`
|
||||
Manifest string `protobuf:"bytes,7,opt,name=manifest,proto3" json:"manifest,omitempty"`
|
||||
IsLatest bool `protobuf:"varint,8,opt,name=isLatest,proto3" json:"isLatest,omitempty"`
|
||||
IsArchived bool `protobuf:"varint,9,opt,name=isArchived,proto3" json:"isArchived,omitempty"`
|
||||
Labels []*KeyValue `protobuf:"bytes,10,rep,name=labels,proto3" json:"labels,omitempty"`
|
||||
Stats *WorkflowExecutionStatisticReport `protobuf:"bytes,11,opt,name=stats,proto3" json:"stats,omitempty"`
|
||||
CronStats *CronWorkflowStatisticsReport `protobuf:"bytes,12,opt,name=cronStats,proto3" json:"cronStats,omitempty"`
|
||||
Parameters []*Parameter `protobuf:"bytes,13,rep,name=parameters,proto3" json:"parameters,omitempty"`
|
||||
CreatedAt string `protobuf:"bytes,1,opt,name=createdAt,proto3" json:"createdAt,omitempty"`
|
||||
ModifiedAt string `protobuf:"bytes,2,opt,name=modifiedAt,proto3" json:"modifiedAt,omitempty"`
|
||||
Uid string `protobuf:"bytes,3,opt,name=uid,proto3" json:"uid,omitempty"`
|
||||
Name string `protobuf:"bytes,4,opt,name=name,proto3" json:"name,omitempty"`
|
||||
Version int64 `protobuf:"varint,5,opt,name=version,proto3" json:"version,omitempty"`
|
||||
Versions int64 `protobuf:"varint,6,opt,name=versions,proto3" json:"versions,omitempty"`
|
||||
Manifest string `protobuf:"bytes,7,opt,name=manifest,proto3" json:"manifest,omitempty"`
|
||||
IsLatest bool `protobuf:"varint,8,opt,name=isLatest,proto3" json:"isLatest,omitempty"`
|
||||
IsArchived bool `protobuf:"varint,9,opt,name=isArchived,proto3" json:"isArchived,omitempty"`
|
||||
Labels []*KeyValue `protobuf:"bytes,10,rep,name=labels,proto3" json:"labels,omitempty"`
|
||||
Stats *WorkflowExecutionStatisticReport `protobuf:"bytes,11,opt,name=stats,proto3" json:"stats,omitempty"`
|
||||
CronStats *CronWorkflowStatisticsReport `protobuf:"bytes,12,opt,name=cronStats,proto3" json:"cronStats,omitempty"`
|
||||
Parameters []*Parameter `protobuf:"bytes,13,rep,name=parameters,proto3" json:"parameters,omitempty"`
|
||||
Description string `protobuf:"bytes,14,opt,name=description,proto3" json:"description,omitempty"`
|
||||
}
|
||||
|
||||
func (x *WorkflowTemplate) Reset() {
|
||||
@@ -980,6 +981,13 @@ func (x *WorkflowTemplate) GetParameters() []*Parameter {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (x *WorkflowTemplate) GetDescription() string {
|
||||
if x != nil {
|
||||
return x.Description
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
type GetWorkflowTemplateLabelsRequest struct {
|
||||
state protoimpl.MessageState
|
||||
sizeCache protoimpl.SizeCache
|
||||
@@ -1267,7 +1275,7 @@ var file_workflow_template_proto_rawDesc = []byte{
|
||||
0x74, 0x65, 0x64, 0x22, 0x34, 0x0a, 0x1c, 0x43, 0x72, 0x6f, 0x6e, 0x57, 0x6f, 0x72, 0x6b, 0x66,
|
||||
0x6c, 0x6f, 0x77, 0x53, 0x74, 0x61, 0x74, 0x69, 0x73, 0x74, 0x69, 0x63, 0x73, 0x52, 0x65, 0x70,
|
||||
0x6f, 0x72, 0x74, 0x12, 0x14, 0x0a, 0x05, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x18, 0x01, 0x20, 0x01,
|
||||
0x28, 0x05, 0x52, 0x05, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x22, 0xd9, 0x03, 0x0a, 0x10, 0x57, 0x6f,
|
||||
0x28, 0x05, 0x52, 0x05, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x22, 0xfb, 0x03, 0x0a, 0x10, 0x57, 0x6f,
|
||||
0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x12, 0x1c,
|
||||
0x0a, 0x09, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x41, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28,
|
||||
0x09, 0x52, 0x09, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x41, 0x74, 0x12, 0x1e, 0x0a, 0x0a,
|
||||
@@ -1297,135 +1305,137 @@ var file_workflow_template_proto_rawDesc = []byte{
|
||||
0x53, 0x74, 0x61, 0x74, 0x73, 0x12, 0x2e, 0x0a, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74,
|
||||
0x65, 0x72, 0x73, 0x18, 0x0d, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0e, 0x2e, 0x61, 0x70, 0x69, 0x2e,
|
||||
0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x52, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d,
|
||||
0x65, 0x74, 0x65, 0x72, 0x73, 0x22, 0x6e, 0x0a, 0x20, 0x47, 0x65, 0x74, 0x57, 0x6f, 0x72, 0x6b,
|
||||
0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x4c, 0x61, 0x62, 0x65,
|
||||
0x6c, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d,
|
||||
0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61,
|
||||
0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18,
|
||||
0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x76,
|
||||
0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x03, 0x52, 0x07, 0x76, 0x65,
|
||||
0x72, 0x73, 0x69, 0x6f, 0x6e, 0x22, 0x7b, 0x0a, 0x21, 0x4c, 0x69, 0x73, 0x74, 0x57, 0x6f, 0x72,
|
||||
0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x46, 0x69,
|
||||
0x65, 0x6c, 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61,
|
||||
0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e,
|
||||
0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x1c, 0x0a, 0x09, 0x66, 0x69, 0x65, 0x6c,
|
||||
0x64, 0x4e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x66, 0x69, 0x65,
|
||||
0x6c, 0x64, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x1a, 0x0a, 0x08, 0x69, 0x73, 0x53, 0x79, 0x73, 0x74,
|
||||
0x65, 0x6d, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x52, 0x08, 0x69, 0x73, 0x53, 0x79, 0x73, 0x74,
|
||||
0x65, 0x6d, 0x22, 0x3c, 0x0a, 0x22, 0x4c, 0x69, 0x73, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c,
|
||||
0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x46, 0x69, 0x65, 0x6c, 0x64,
|
||||
0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x76, 0x61, 0x6c, 0x75,
|
||||
0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73,
|
||||
0x32, 0xa3, 0x0d, 0x0a, 0x17, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d,
|
||||
0x70, 0x6c, 0x61, 0x74, 0x65, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0xae, 0x01, 0x0a,
|
||||
0x18, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f,
|
||||
0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x12, 0x24, 0x2e, 0x61, 0x70, 0x69, 0x2e,
|
||||
0x47, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77,
|
||||
0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a,
|
||||
0x15, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65,
|
||||
0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x55, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x4f, 0x22, 0x3b,
|
||||
0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b, 0x6e,
|
||||
0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x7d, 0x2f, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c,
|
||||
0x6f, 0x77, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2f, 0x7b, 0x75, 0x69,
|
||||
0x64, 0x7d, 0x2f, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x3a, 0x10, 0x77, 0x6f, 0x72,
|
||||
0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x12, 0x9b, 0x01,
|
||||
0x0a, 0x16, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77,
|
||||
0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x12, 0x22, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x43,
|
||||
0x72, 0x65, 0x61, 0x74, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d,
|
||||
0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x15, 0x2e, 0x61,
|
||||
0x70, 0x69, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c,
|
||||
0x61, 0x74, 0x65, 0x22, 0x46, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x40, 0x22, 0x2c, 0x2f, 0x61, 0x70,
|
||||
0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65,
|
||||
0x73, 0x70, 0x61, 0x63, 0x65, 0x7d, 0x2f, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f,
|
||||
0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x3a, 0x10, 0x77, 0x6f, 0x72, 0x6b, 0x66,
|
||||
0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x12, 0xc2, 0x01, 0x0a, 0x1d,
|
||||
0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65,
|
||||
0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x22, 0x2e,
|
||||
0x61, 0x70, 0x69, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c,
|
||||
0x65, 0x74, 0x65, 0x72, 0x73, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70,
|
||||
0x74, 0x69, 0x6f, 0x6e, 0x18, 0x0e, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63,
|
||||
0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x6e, 0x0a, 0x20, 0x47, 0x65, 0x74, 0x57, 0x6f,
|
||||
0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x4c, 0x61,
|
||||
0x62, 0x65, 0x6c, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x6e,
|
||||
0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09,
|
||||
0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d,
|
||||
0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x18, 0x0a,
|
||||
0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x03, 0x52, 0x07,
|
||||
0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x22, 0x7b, 0x0a, 0x21, 0x4c, 0x69, 0x73, 0x74, 0x57,
|
||||
0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73,
|
||||
0x46, 0x69, 0x65, 0x6c, 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1c, 0x0a, 0x09,
|
||||
0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52,
|
||||
0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x1c, 0x0a, 0x09, 0x66, 0x69,
|
||||
0x65, 0x6c, 0x64, 0x4e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x66,
|
||||
0x69, 0x65, 0x6c, 0x64, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x1a, 0x0a, 0x08, 0x69, 0x73, 0x53, 0x79,
|
||||
0x73, 0x74, 0x65, 0x6d, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x52, 0x08, 0x69, 0x73, 0x53, 0x79,
|
||||
0x73, 0x74, 0x65, 0x6d, 0x22, 0x3c, 0x0a, 0x22, 0x4c, 0x69, 0x73, 0x74, 0x57, 0x6f, 0x72, 0x6b,
|
||||
0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x46, 0x69, 0x65,
|
||||
0x6c, 0x64, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x76, 0x61,
|
||||
0x6c, 0x75, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x06, 0x76, 0x61, 0x6c, 0x75,
|
||||
0x65, 0x73, 0x32, 0xa3, 0x0d, 0x0a, 0x17, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54,
|
||||
0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0xae,
|
||||
0x01, 0x0a, 0x18, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x66,
|
||||
0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x12, 0x24, 0x2e, 0x61, 0x70,
|
||||
0x69, 0x2e, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c,
|
||||
0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73,
|
||||
0x74, 0x1a, 0x15, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77,
|
||||
0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x66, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x60,
|
||||
0x22, 0x4c, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f,
|
||||
0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x55, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x4f,
|
||||
0x22, 0x3b, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f,
|
||||
0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x7d, 0x2f, 0x77, 0x6f, 0x72, 0x6b,
|
||||
0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2f, 0x7b,
|
||||
0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65,
|
||||
0x2e, 0x75, 0x69, 0x64, 0x7d, 0x2f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x3a, 0x10,
|
||||
0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65,
|
||||
0x12, 0xd3, 0x01, 0x0a, 0x13, 0x47, 0x65, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77,
|
||||
0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x12, 0x1f, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x47,
|
||||
0x65, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61,
|
||||
0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x15, 0x2e, 0x61, 0x70, 0x69, 0x2e,
|
||||
0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65,
|
||||
0x22, 0x83, 0x01, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x7d, 0x12, 0x32, 0x2f, 0x61, 0x70, 0x69, 0x73,
|
||||
0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70,
|
||||
0x61, 0x63, 0x65, 0x7d, 0x2f, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x74, 0x65,
|
||||
0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2f, 0x7b, 0x75, 0x69, 0x64, 0x7d, 0x5a, 0x47, 0x12,
|
||||
0x45, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b,
|
||||
0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x7d, 0x2f, 0x77, 0x6f, 0x72, 0x6b, 0x66,
|
||||
0x6c, 0x6f, 0x77, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2f, 0x7b, 0x75,
|
||||
0x69, 0x64, 0x7d, 0x2f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x2f, 0x7b, 0x76, 0x65,
|
||||
0x72, 0x73, 0x69, 0x6f, 0x6e, 0x7d, 0x12, 0xb8, 0x01, 0x0a, 0x1c, 0x4c, 0x69, 0x73, 0x74, 0x57,
|
||||
0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x56,
|
||||
0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x28, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x4c, 0x69,
|
||||
0x73, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61,
|
||||
0x74, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73,
|
||||
0x74, 0x1a, 0x29, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x57, 0x6f, 0x72, 0x6b,
|
||||
0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x56, 0x65, 0x72, 0x73,
|
||||
0x69, 0x6f, 0x6e, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x43, 0x82, 0xd3,
|
||||
0xe4, 0x93, 0x02, 0x3d, 0x12, 0x3b, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65,
|
||||
0x74, 0x61, 0x31, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x7d, 0x2f,
|
||||
0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74,
|
||||
0x65, 0x73, 0x2f, 0x7b, 0x75, 0x69, 0x64, 0x7d, 0x2f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e,
|
||||
0x73, 0x12, 0x94, 0x01, 0x0a, 0x15, 0x4c, 0x69, 0x73, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c,
|
||||
0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x12, 0x21, 0x2e, 0x61, 0x70,
|
||||
0x69, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65,
|
||||
0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x22,
|
||||
0x2e, 0x61, 0x70, 0x69, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f,
|
||||
0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e,
|
||||
0x73, 0x65, 0x22, 0x34, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x2e, 0x12, 0x2c, 0x2f, 0x61, 0x70, 0x69,
|
||||
0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73,
|
||||
0x70, 0x61, 0x63, 0x65, 0x7d, 0x2f, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x74,
|
||||
0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x12, 0xe9, 0x01, 0x0a, 0x15, 0x43, 0x6c, 0x6f,
|
||||
0x6e, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61,
|
||||
0x74, 0x65, 0x12, 0x21, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x43, 0x6c, 0x6f, 0x6e, 0x65, 0x57, 0x6f,
|
||||
0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65,
|
||||
0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x15, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x57, 0x6f, 0x72, 0x6b,
|
||||
0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x95, 0x01, 0x82,
|
||||
0xd3, 0xe4, 0x93, 0x02, 0x8e, 0x01, 0x12, 0x3f, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31,
|
||||
0x75, 0x69, 0x64, 0x7d, 0x2f, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x3a, 0x10, 0x77,
|
||||
0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x12,
|
||||
0x9b, 0x01, 0x0a, 0x16, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c,
|
||||
0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x12, 0x22, 0x2e, 0x61, 0x70, 0x69,
|
||||
0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54,
|
||||
0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x15,
|
||||
0x2e, 0x61, 0x70, 0x69, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d,
|
||||
0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x46, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x40, 0x22, 0x2c, 0x2f,
|
||||
0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b, 0x6e, 0x61,
|
||||
0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x7d, 0x2f, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f,
|
||||
0x77, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x3a, 0x10, 0x77, 0x6f, 0x72,
|
||||
0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x12, 0xc2, 0x01,
|
||||
0x0a, 0x1d, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77,
|
||||
0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12,
|
||||
0x22, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x57, 0x6f, 0x72, 0x6b,
|
||||
0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75,
|
||||
0x65, 0x73, 0x74, 0x1a, 0x15, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c,
|
||||
0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x66, 0x82, 0xd3, 0xe4, 0x93,
|
||||
0x02, 0x60, 0x22, 0x4c, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61,
|
||||
0x31, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x7d, 0x2f, 0x77, 0x6f,
|
||||
0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73,
|
||||
0x2f, 0x7b, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61,
|
||||
0x74, 0x65, 0x2e, 0x75, 0x69, 0x64, 0x7d, 0x2f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x73,
|
||||
0x3a, 0x10, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61,
|
||||
0x74, 0x65, 0x12, 0xd3, 0x01, 0x0a, 0x13, 0x47, 0x65, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c,
|
||||
0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x12, 0x1f, 0x2e, 0x61, 0x70, 0x69,
|
||||
0x2e, 0x47, 0x65, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70,
|
||||
0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x15, 0x2e, 0x61, 0x70,
|
||||
0x69, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61,
|
||||
0x74, 0x65, 0x22, 0x83, 0x01, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x7d, 0x12, 0x32, 0x2f, 0x61, 0x70,
|
||||
0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65,
|
||||
0x73, 0x70, 0x61, 0x63, 0x65, 0x7d, 0x2f, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f,
|
||||
0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2f, 0x7b, 0x75, 0x69, 0x64, 0x7d, 0x5a,
|
||||
0x47, 0x12, 0x45, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31,
|
||||
0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x7d, 0x2f, 0x77, 0x6f, 0x72,
|
||||
0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2f,
|
||||
0x7b, 0x75, 0x69, 0x64, 0x7d, 0x2f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x2f, 0x7b,
|
||||
0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x7d, 0x12, 0xb8, 0x01, 0x0a, 0x1c, 0x4c, 0x69, 0x73,
|
||||
0x74, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74,
|
||||
0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x28, 0x2e, 0x61, 0x70, 0x69, 0x2e,
|
||||
0x4c, 0x69, 0x73, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70,
|
||||
0x6c, 0x61, 0x74, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x52, 0x65, 0x71, 0x75,
|
||||
0x65, 0x73, 0x74, 0x1a, 0x29, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x57, 0x6f,
|
||||
0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x56, 0x65,
|
||||
0x72, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x43,
|
||||
0x82, 0xd3, 0xe4, 0x93, 0x02, 0x3d, 0x12, 0x3b, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31,
|
||||
0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65,
|
||||
0x7d, 0x2f, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c,
|
||||
0x61, 0x74, 0x65, 0x73, 0x2f, 0x7b, 0x75, 0x69, 0x64, 0x7d, 0x2f, 0x63, 0x6c, 0x6f, 0x6e, 0x65,
|
||||
0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x5a, 0x4b, 0x12, 0x49, 0x2f, 0x61, 0x70, 0x69, 0x73,
|
||||
0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70,
|
||||
0x61, 0x63, 0x65, 0x7d, 0x2f, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x74, 0x65,
|
||||
0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2f, 0x7b, 0x75, 0x69, 0x64, 0x7d, 0x2f, 0x63, 0x6c,
|
||||
0x6f, 0x6e, 0x65, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x2f, 0x7b, 0x76, 0x65, 0x72, 0x73,
|
||||
0x69, 0x6f, 0x6e, 0x7d, 0x12, 0xa8, 0x01, 0x0a, 0x17, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65,
|
||||
0x61, 0x74, 0x65, 0x73, 0x2f, 0x7b, 0x75, 0x69, 0x64, 0x7d, 0x2f, 0x76, 0x65, 0x72, 0x73, 0x69,
|
||||
0x6f, 0x6e, 0x73, 0x12, 0x94, 0x01, 0x0a, 0x15, 0x4c, 0x69, 0x73, 0x74, 0x57, 0x6f, 0x72, 0x6b,
|
||||
0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x12, 0x21, 0x2e,
|
||||
0x61, 0x70, 0x69, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77,
|
||||
0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74,
|
||||
0x1a, 0x22, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x66,
|
||||
0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70,
|
||||
0x6f, 0x6e, 0x73, 0x65, 0x22, 0x34, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x2e, 0x12, 0x2c, 0x2f, 0x61,
|
||||
0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b, 0x6e, 0x61, 0x6d,
|
||||
0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x7d, 0x2f, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77,
|
||||
0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x12, 0xe9, 0x01, 0x0a, 0x15, 0x43,
|
||||
0x6c, 0x6f, 0x6e, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70,
|
||||
0x6c, 0x61, 0x74, 0x65, 0x12, 0x21, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x43, 0x6c, 0x6f, 0x6e, 0x65,
|
||||
0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65,
|
||||
0x12, 0x23, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x57, 0x6f,
|
||||
0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65,
|
||||
0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x24, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x41, 0x72, 0x63, 0x68,
|
||||
0x69, 0x76, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c,
|
||||
0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x42, 0x82, 0xd3, 0xe4,
|
||||
0x93, 0x02, 0x3c, 0x1a, 0x3a, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74,
|
||||
0x61, 0x31, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x7d, 0x2f, 0x77,
|
||||
0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65,
|
||||
0x73, 0x2f, 0x7b, 0x75, 0x69, 0x64, 0x7d, 0x2f, 0x61, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x12,
|
||||
0xb4, 0x01, 0x0a, 0x1a, 0x4c, 0x69, 0x73, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77,
|
||||
0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x12, 0x26,
|
||||
0x2e, 0x61, 0x70, 0x69, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f,
|
||||
0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x52,
|
||||
0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x27, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x4c, 0x69, 0x73,
|
||||
0x74, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74,
|
||||
0x65, 0x73, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22,
|
||||
0x45, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x3f, 0x12, 0x3d, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76,
|
||||
0x31, 0x62, 0x65, 0x74, 0x61, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65,
|
||||
0x7d, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x2f, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77,
|
||||
0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2f, 0x7b, 0x66, 0x69, 0x65, 0x6c,
|
||||
0x64, 0x4e, 0x61, 0x6d, 0x65, 0x7d, 0x42, 0x24, 0x5a, 0x22, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62,
|
||||
0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x6f, 0x6e, 0x65, 0x70, 0x61, 0x6e, 0x65, 0x6c, 0x69, 0x6f, 0x2f,
|
||||
0x63, 0x6f, 0x72, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x67, 0x65, 0x6e, 0x62, 0x06, 0x70, 0x72,
|
||||
0x6f, 0x74, 0x6f, 0x33,
|
||||
0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x15, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x57, 0x6f,
|
||||
0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x95,
|
||||
0x01, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x8e, 0x01, 0x12, 0x3f, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f,
|
||||
0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61,
|
||||
0x63, 0x65, 0x7d, 0x2f, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x74, 0x65, 0x6d,
|
||||
0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2f, 0x7b, 0x75, 0x69, 0x64, 0x7d, 0x2f, 0x63, 0x6c, 0x6f,
|
||||
0x6e, 0x65, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x5a, 0x4b, 0x12, 0x49, 0x2f, 0x61, 0x70,
|
||||
0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65,
|
||||
0x73, 0x70, 0x61, 0x63, 0x65, 0x7d, 0x2f, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f,
|
||||
0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2f, 0x7b, 0x75, 0x69, 0x64, 0x7d, 0x2f,
|
||||
0x63, 0x6c, 0x6f, 0x6e, 0x65, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x2f, 0x7b, 0x76, 0x65,
|
||||
0x72, 0x73, 0x69, 0x6f, 0x6e, 0x7d, 0x12, 0xa8, 0x01, 0x0a, 0x17, 0x41, 0x72, 0x63, 0x68, 0x69,
|
||||
0x76, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61,
|
||||
0x74, 0x65, 0x12, 0x23, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65,
|
||||
0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65,
|
||||
0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x24, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x41, 0x72,
|
||||
0x63, 0x68, 0x69, 0x76, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d,
|
||||
0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x42, 0x82,
|
||||
0xd3, 0xe4, 0x93, 0x02, 0x3c, 0x1a, 0x3a, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62,
|
||||
0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x7d,
|
||||
0x2f, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61,
|
||||
0x74, 0x65, 0x73, 0x2f, 0x7b, 0x75, 0x69, 0x64, 0x7d, 0x2f, 0x61, 0x72, 0x63, 0x68, 0x69, 0x76,
|
||||
0x65, 0x12, 0xb4, 0x01, 0x0a, 0x1a, 0x4c, 0x69, 0x73, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c,
|
||||
0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x46, 0x69, 0x65, 0x6c, 0x64,
|
||||
0x12, 0x26, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x66,
|
||||
0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x46, 0x69, 0x65, 0x6c,
|
||||
0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x27, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x4c,
|
||||
0x69, 0x73, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c,
|
||||
0x61, 0x74, 0x65, 0x73, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73,
|
||||
0x65, 0x22, 0x45, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x3f, 0x12, 0x3d, 0x2f, 0x61, 0x70, 0x69, 0x73,
|
||||
0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61,
|
||||
0x63, 0x65, 0x7d, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x2f, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c,
|
||||
0x6f, 0x77, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2f, 0x7b, 0x66, 0x69,
|
||||
0x65, 0x6c, 0x64, 0x4e, 0x61, 0x6d, 0x65, 0x7d, 0x42, 0x24, 0x5a, 0x22, 0x67, 0x69, 0x74, 0x68,
|
||||
0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x6f, 0x6e, 0x65, 0x70, 0x61, 0x6e, 0x65, 0x6c, 0x69,
|
||||
0x6f, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x67, 0x65, 0x6e, 0x62, 0x06,
|
||||
0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
|
||||
}
|
||||
|
||||
var (
|
||||
|
||||
@@ -297,6 +297,7 @@ type CreateWorkspaceBody struct {
|
||||
WorkspaceTemplateVersion int64 `protobuf:"varint,2,opt,name=workspaceTemplateVersion,proto3" json:"workspaceTemplateVersion,omitempty"`
|
||||
Parameters []*Parameter `protobuf:"bytes,3,rep,name=parameters,proto3" json:"parameters,omitempty"`
|
||||
Labels []*KeyValue `protobuf:"bytes,4,rep,name=labels,proto3" json:"labels,omitempty"`
|
||||
CaptureNode bool `protobuf:"varint,5,opt,name=captureNode,proto3" json:"captureNode,omitempty"`
|
||||
}
|
||||
|
||||
func (x *CreateWorkspaceBody) Reset() {
|
||||
@@ -359,6 +360,13 @@ func (x *CreateWorkspaceBody) GetLabels() []*KeyValue {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (x *CreateWorkspaceBody) GetCaptureNode() bool {
|
||||
if x != nil {
|
||||
return x.CaptureNode
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
type CreateWorkspaceRequest struct {
|
||||
state protoimpl.MessageState
|
||||
sizeCache protoimpl.SizeCache
|
||||
@@ -1534,7 +1542,7 @@ var file_workspace_proto_rawDesc = []byte{
|
||||
0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x70, 0x61, 0x75, 0x73, 0x65, 0x64, 0x41, 0x74, 0x12, 0x22,
|
||||
0x0a, 0x0c, 0x74, 0x65, 0x72, 0x6d, 0x69, 0x6e, 0x61, 0x74, 0x65, 0x64, 0x41, 0x74, 0x18, 0x04,
|
||||
0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x74, 0x65, 0x72, 0x6d, 0x69, 0x6e, 0x61, 0x74, 0x65, 0x64,
|
||||
0x41, 0x74, 0x22, 0xdc, 0x01, 0x0a, 0x13, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x57, 0x6f, 0x72,
|
||||
0x41, 0x74, 0x22, 0xfe, 0x01, 0x0a, 0x13, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x57, 0x6f, 0x72,
|
||||
0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x42, 0x6f, 0x64, 0x79, 0x12, 0x32, 0x0a, 0x14, 0x77, 0x6f,
|
||||
0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x55,
|
||||
0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x14, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70,
|
||||
@@ -1548,257 +1556,259 @@ var file_workspace_proto_rawDesc = []byte{
|
||||
0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x12, 0x25, 0x0a, 0x06, 0x6c, 0x61,
|
||||
0x62, 0x65, 0x6c, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0d, 0x2e, 0x61, 0x70, 0x69,
|
||||
0x2e, 0x4b, 0x65, 0x79, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c,
|
||||
0x73, 0x22, 0x64, 0x0a, 0x16, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x73,
|
||||
0x70, 0x61, 0x63, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x6e,
|
||||
0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09,
|
||||
0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x2c, 0x0a, 0x04, 0x62, 0x6f, 0x64,
|
||||
0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x43, 0x72,
|
||||
0x65, 0x61, 0x74, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x42, 0x6f, 0x64,
|
||||
0x79, 0x52, 0x04, 0x62, 0x6f, 0x64, 0x79, 0x22, 0x45, 0x0a, 0x13, 0x47, 0x65, 0x74, 0x57, 0x6f,
|
||||
0x73, 0x12, 0x20, 0x0a, 0x0b, 0x63, 0x61, 0x70, 0x74, 0x75, 0x72, 0x65, 0x4e, 0x6f, 0x64, 0x65,
|
||||
0x18, 0x05, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0b, 0x63, 0x61, 0x70, 0x74, 0x75, 0x72, 0x65, 0x4e,
|
||||
0x6f, 0x64, 0x65, 0x22, 0x64, 0x0a, 0x16, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x57, 0x6f, 0x72,
|
||||
0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1c, 0x0a,
|
||||
0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09,
|
||||
0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x2c, 0x0a, 0x04, 0x62,
|
||||
0x6f, 0x64, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x61, 0x70, 0x69, 0x2e,
|
||||
0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x42,
|
||||
0x6f, 0x64, 0x79, 0x52, 0x04, 0x62, 0x6f, 0x64, 0x79, 0x22, 0x45, 0x0a, 0x13, 0x47, 0x65, 0x74,
|
||||
0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74,
|
||||
0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x01, 0x20,
|
||||
0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x10,
|
||||
0x0a, 0x03, 0x75, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x75, 0x69, 0x64,
|
||||
0x22, 0x7c, 0x0a, 0x1c, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70,
|
||||
0x61, 0x63, 0x65, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74,
|
||||
0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x01, 0x20,
|
||||
0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x10,
|
||||
0x0a, 0x03, 0x75, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x75, 0x69, 0x64,
|
||||
0x12, 0x2c, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b,
|
||||
0x32, 0x14, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65,
|
||||
0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, 0x6c,
|
||||
0x0a, 0x13, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63,
|
||||
0x65, 0x42, 0x6f, 0x64, 0x79, 0x12, 0x2e, 0x0a, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74,
|
||||
0x65, 0x72, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0e, 0x2e, 0x61, 0x70, 0x69, 0x2e,
|
||||
0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x52, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d,
|
||||
0x65, 0x74, 0x65, 0x72, 0x73, 0x12, 0x25, 0x0a, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x18,
|
||||
0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0d, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x4b, 0x65, 0x79, 0x56,
|
||||
0x61, 0x6c, 0x75, 0x65, 0x52, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x22, 0x76, 0x0a, 0x16,
|
||||
0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x52,
|
||||
0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70,
|
||||
0x61, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73,
|
||||
0x70, 0x61, 0x63, 0x65, 0x12, 0x10, 0x0a, 0x03, 0x75, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28,
|
||||
0x09, 0x52, 0x03, 0x75, 0x69, 0x64, 0x12, 0x2c, 0x0a, 0x04, 0x62, 0x6f, 0x64, 0x79, 0x18, 0x03,
|
||||
0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74,
|
||||
0x65, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x42, 0x6f, 0x64, 0x79, 0x52, 0x04,
|
||||
0x62, 0x6f, 0x64, 0x79, 0x22, 0xa8, 0x01, 0x0a, 0x14, 0x4c, 0x69, 0x73, 0x74, 0x57, 0x6f, 0x72,
|
||||
0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1c, 0x0a,
|
||||
0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09,
|
||||
0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x1a, 0x0a, 0x08, 0x70,
|
||||
0x61, 0x67, 0x65, 0x53, 0x69, 0x7a, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x08, 0x70,
|
||||
0x61, 0x67, 0x65, 0x53, 0x69, 0x7a, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x61, 0x67, 0x65, 0x18,
|
||||
0x03, 0x20, 0x01, 0x28, 0x05, 0x52, 0x04, 0x70, 0x61, 0x67, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x6f,
|
||||
0x72, 0x64, 0x65, 0x72, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x6f, 0x72, 0x64, 0x65,
|
||||
0x72, 0x12, 0x16, 0x0a, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x18, 0x05, 0x20, 0x01, 0x28,
|
||||
0x09, 0x52, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x12, 0x14, 0x0a, 0x05, 0x70, 0x68, 0x61,
|
||||
0x73, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x70, 0x68, 0x61, 0x73, 0x65, 0x22,
|
||||
0xd9, 0x01, 0x0a, 0x15, 0x4c, 0x69, 0x73, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63,
|
||||
0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x63, 0x6f, 0x75,
|
||||
0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x05, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x12,
|
||||
0x2e, 0x0a, 0x0a, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x73, 0x18, 0x02, 0x20,
|
||||
0x03, 0x28, 0x0b, 0x32, 0x0e, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70,
|
||||
0x61, 0x63, 0x65, 0x52, 0x0a, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x73, 0x12,
|
||||
0x12, 0x0a, 0x04, 0x70, 0x61, 0x67, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x52, 0x04, 0x70,
|
||||
0x61, 0x67, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x70, 0x61, 0x67, 0x65, 0x73, 0x18, 0x04, 0x20, 0x01,
|
||||
0x28, 0x05, 0x52, 0x05, 0x70, 0x61, 0x67, 0x65, 0x73, 0x12, 0x1e, 0x0a, 0x0a, 0x74, 0x6f, 0x74,
|
||||
0x61, 0x6c, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0a, 0x74,
|
||||
0x6f, 0x74, 0x61, 0x6c, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x30, 0x0a, 0x13, 0x74, 0x6f, 0x74,
|
||||
0x61, 0x6c, 0x41, 0x76, 0x61, 0x69, 0x6c, 0x61, 0x62, 0x6c, 0x65, 0x43, 0x6f, 0x75, 0x6e, 0x74,
|
||||
0x18, 0x06, 0x20, 0x01, 0x28, 0x05, 0x52, 0x13, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x41, 0x76, 0x61,
|
||||
0x69, 0x6c, 0x61, 0x62, 0x6c, 0x65, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x22, 0x47, 0x0a, 0x15, 0x50,
|
||||
0x61, 0x75, 0x73, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x52, 0x65, 0x71,
|
||||
0x75, 0x65, 0x73, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63,
|
||||
0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61,
|
||||
0x63, 0x65, 0x12, 0x10, 0x0a, 0x03, 0x75, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52,
|
||||
0x03, 0x75, 0x69, 0x64, 0x22, 0x76, 0x0a, 0x16, 0x52, 0x65, 0x73, 0x75, 0x6d, 0x65, 0x57, 0x6f,
|
||||
0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1c,
|
||||
0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28,
|
||||
0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x10, 0x0a, 0x03,
|
||||
0x75, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x75, 0x69, 0x64, 0x22, 0x7c,
|
||||
0x0a, 0x1c, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63,
|
||||
0x65, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1c,
|
||||
0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28,
|
||||
0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x10, 0x0a, 0x03,
|
||||
0x75, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x75, 0x69, 0x64, 0x12, 0x2c,
|
||||
0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14,
|
||||
0x2e, 0x61, 0x70, 0x69, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x53, 0x74,
|
||||
0x61, 0x74, 0x75, 0x73, 0x52, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, 0x6c, 0x0a, 0x13,
|
||||
0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x42,
|
||||
0x6f, 0x64, 0x79, 0x12, 0x2e, 0x0a, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72,
|
||||
0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0e, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x50, 0x61,
|
||||
0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x52, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74,
|
||||
0x65, 0x72, 0x73, 0x12, 0x25, 0x0a, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x18, 0x02, 0x20,
|
||||
0x03, 0x28, 0x0b, 0x32, 0x0d, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x4b, 0x65, 0x79, 0x56, 0x61, 0x6c,
|
||||
0x75, 0x65, 0x52, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x22, 0x76, 0x0a, 0x16, 0x55, 0x70,
|
||||
0x64, 0x61, 0x74, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x52, 0x65, 0x71,
|
||||
0x75, 0x65, 0x73, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63,
|
||||
0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61,
|
||||
0x63, 0x65, 0x12, 0x10, 0x0a, 0x03, 0x75, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52,
|
||||
0x03, 0x75, 0x69, 0x64, 0x12, 0x2c, 0x0a, 0x04, 0x62, 0x6f, 0x64, 0x79, 0x18, 0x03, 0x20, 0x01,
|
||||
0x28, 0x0b, 0x32, 0x18, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x57,
|
||||
0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x42, 0x6f, 0x64, 0x79, 0x52, 0x04, 0x62, 0x6f,
|
||||
0x64, 0x79, 0x22, 0xa8, 0x01, 0x0a, 0x14, 0x4c, 0x69, 0x73, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x73,
|
||||
0x70, 0x61, 0x63, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x6e,
|
||||
0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09,
|
||||
0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x1a, 0x0a, 0x08, 0x70, 0x61, 0x67,
|
||||
0x65, 0x53, 0x69, 0x7a, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x08, 0x70, 0x61, 0x67,
|
||||
0x65, 0x53, 0x69, 0x7a, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x61, 0x67, 0x65, 0x18, 0x03, 0x20,
|
||||
0x01, 0x28, 0x05, 0x52, 0x04, 0x70, 0x61, 0x67, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x6f, 0x72, 0x64,
|
||||
0x65, 0x72, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x12,
|
||||
0x16, 0x0a, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52,
|
||||
0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x12, 0x14, 0x0a, 0x05, 0x70, 0x68, 0x61, 0x73, 0x65,
|
||||
0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x70, 0x68, 0x61, 0x73, 0x65, 0x22, 0xd9, 0x01,
|
||||
0x0a, 0x15, 0x4c, 0x69, 0x73, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x52,
|
||||
0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x63, 0x6f, 0x75, 0x6e, 0x74,
|
||||
0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x05, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x2e, 0x0a,
|
||||
0x0a, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28,
|
||||
0x0b, 0x32, 0x0e, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63,
|
||||
0x65, 0x52, 0x0a, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x73, 0x12, 0x12, 0x0a,
|
||||
0x04, 0x70, 0x61, 0x67, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x52, 0x04, 0x70, 0x61, 0x67,
|
||||
0x65, 0x12, 0x14, 0x0a, 0x05, 0x70, 0x61, 0x67, 0x65, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x05,
|
||||
0x52, 0x05, 0x70, 0x61, 0x67, 0x65, 0x73, 0x12, 0x1e, 0x0a, 0x0a, 0x74, 0x6f, 0x74, 0x61, 0x6c,
|
||||
0x43, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0a, 0x74, 0x6f, 0x74,
|
||||
0x61, 0x6c, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x30, 0x0a, 0x13, 0x74, 0x6f, 0x74, 0x61, 0x6c,
|
||||
0x41, 0x76, 0x61, 0x69, 0x6c, 0x61, 0x62, 0x6c, 0x65, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x06,
|
||||
0x20, 0x01, 0x28, 0x05, 0x52, 0x13, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x41, 0x76, 0x61, 0x69, 0x6c,
|
||||
0x61, 0x62, 0x6c, 0x65, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x22, 0x47, 0x0a, 0x15, 0x50, 0x61, 0x75,
|
||||
0x73, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65,
|
||||
0x73, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18,
|
||||
0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65,
|
||||
0x12, 0x10, 0x0a, 0x03, 0x75, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x75,
|
||||
0x69, 0x64, 0x22, 0x76, 0x0a, 0x16, 0x52, 0x65, 0x73, 0x75, 0x6d, 0x65, 0x57, 0x6f, 0x72, 0x6b,
|
||||
0x73, 0x70, 0x61, 0x63, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1c, 0x0a, 0x09,
|
||||
0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52,
|
||||
0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x10, 0x0a, 0x03, 0x75, 0x69,
|
||||
0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x75, 0x69, 0x64, 0x12, 0x2c, 0x0a, 0x04,
|
||||
0x62, 0x6f, 0x64, 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x61, 0x70, 0x69,
|
||||
0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65,
|
||||
0x42, 0x6f, 0x64, 0x79, 0x52, 0x04, 0x62, 0x6f, 0x64, 0x79, 0x22, 0x48, 0x0a, 0x16, 0x44, 0x65,
|
||||
0x6c, 0x65, 0x74, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x52, 0x65, 0x71,
|
||||
0x75, 0x65, 0x73, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63,
|
||||
0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61,
|
||||
0x63, 0x65, 0x12, 0x10, 0x0a, 0x03, 0x75, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52,
|
||||
0x03, 0x75, 0x69, 0x64, 0x22, 0x4d, 0x0a, 0x1b, 0x52, 0x65, 0x74, 0x72, 0x79, 0x41, 0x63, 0x74,
|
||||
0x69, 0x6f, 0x6e, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x52, 0x65, 0x71, 0x75,
|
||||
0x0a, 0x04, 0x62, 0x6f, 0x64, 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x61,
|
||||
0x70, 0x69, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61,
|
||||
0x63, 0x65, 0x42, 0x6f, 0x64, 0x79, 0x52, 0x04, 0x62, 0x6f, 0x64, 0x79, 0x22, 0x48, 0x0a, 0x16,
|
||||
0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x52,
|
||||
0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70,
|
||||
0x61, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73,
|
||||
0x70, 0x61, 0x63, 0x65, 0x12, 0x10, 0x0a, 0x03, 0x75, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28,
|
||||
0x09, 0x52, 0x03, 0x75, 0x69, 0x64, 0x22, 0x4d, 0x0a, 0x1b, 0x52, 0x65, 0x74, 0x72, 0x79, 0x41,
|
||||
0x63, 0x74, 0x69, 0x6f, 0x6e, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x52, 0x65,
|
||||
0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61,
|
||||
0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70,
|
||||
0x61, 0x63, 0x65, 0x12, 0x10, 0x0a, 0x03, 0x75, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09,
|
||||
0x52, 0x03, 0x75, 0x69, 0x64, 0x22, 0xfe, 0x03, 0x0a, 0x18, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70,
|
||||
0x61, 0x63, 0x65, 0x53, 0x74, 0x61, 0x74, 0x69, 0x73, 0x74, 0x69, 0x63, 0x52, 0x65, 0x70, 0x6f,
|
||||
0x72, 0x74, 0x12, 0x14, 0x0a, 0x05, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28,
|
||||
0x05, 0x52, 0x05, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x12, 0x20, 0x0a, 0x0b, 0x6c, 0x61, 0x73, 0x74,
|
||||
0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x6c,
|
||||
0x61, 0x73, 0x74, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x12, 0x1c, 0x0a, 0x09, 0x6c, 0x61,
|
||||
0x75, 0x6e, 0x63, 0x68, 0x69, 0x6e, 0x67, 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x52, 0x09, 0x6c,
|
||||
0x61, 0x75, 0x6e, 0x63, 0x68, 0x69, 0x6e, 0x67, 0x12, 0x18, 0x0a, 0x07, 0x72, 0x75, 0x6e, 0x6e,
|
||||
0x69, 0x6e, 0x67, 0x18, 0x04, 0x20, 0x01, 0x28, 0x05, 0x52, 0x07, 0x72, 0x75, 0x6e, 0x6e, 0x69,
|
||||
0x6e, 0x67, 0x12, 0x1a, 0x0a, 0x08, 0x75, 0x70, 0x64, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x18, 0x05,
|
||||
0x20, 0x01, 0x28, 0x05, 0x52, 0x08, 0x75, 0x70, 0x64, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x12, 0x18,
|
||||
0x0a, 0x07, 0x70, 0x61, 0x75, 0x73, 0x69, 0x6e, 0x67, 0x18, 0x06, 0x20, 0x01, 0x28, 0x05, 0x52,
|
||||
0x07, 0x70, 0x61, 0x75, 0x73, 0x69, 0x6e, 0x67, 0x12, 0x16, 0x0a, 0x06, 0x70, 0x61, 0x75, 0x73,
|
||||
0x65, 0x64, 0x18, 0x07, 0x20, 0x01, 0x28, 0x05, 0x52, 0x06, 0x70, 0x61, 0x75, 0x73, 0x65, 0x64,
|
||||
0x12, 0x20, 0x0a, 0x0b, 0x74, 0x65, 0x72, 0x6d, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x18,
|
||||
0x08, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0b, 0x74, 0x65, 0x72, 0x6d, 0x69, 0x6e, 0x61, 0x74, 0x69,
|
||||
0x6e, 0x67, 0x12, 0x1e, 0x0a, 0x0a, 0x74, 0x65, 0x72, 0x6d, 0x69, 0x6e, 0x61, 0x74, 0x65, 0x64,
|
||||
0x18, 0x09, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0a, 0x74, 0x65, 0x72, 0x6d, 0x69, 0x6e, 0x61, 0x74,
|
||||
0x65, 0x64, 0x12, 0x24, 0x0a, 0x0d, 0x66, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x54, 0x6f, 0x50, 0x61,
|
||||
0x75, 0x73, 0x65, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0d, 0x66, 0x61, 0x69, 0x6c, 0x65,
|
||||
0x64, 0x54, 0x6f, 0x50, 0x61, 0x75, 0x73, 0x65, 0x12, 0x26, 0x0a, 0x0e, 0x66, 0x61, 0x69, 0x6c,
|
||||
0x65, 0x64, 0x54, 0x6f, 0x52, 0x65, 0x73, 0x75, 0x6d, 0x65, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x05,
|
||||
0x52, 0x0e, 0x66, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x54, 0x6f, 0x52, 0x65, 0x73, 0x75, 0x6d, 0x65,
|
||||
0x12, 0x2c, 0x0a, 0x11, 0x66, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x54, 0x6f, 0x54, 0x65, 0x72, 0x6d,
|
||||
0x69, 0x6e, 0x61, 0x74, 0x65, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x05, 0x52, 0x11, 0x66, 0x61, 0x69,
|
||||
0x6c, 0x65, 0x64, 0x54, 0x6f, 0x54, 0x65, 0x72, 0x6d, 0x69, 0x6e, 0x61, 0x74, 0x65, 0x12, 0x26,
|
||||
0x0a, 0x0e, 0x66, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x54, 0x6f, 0x4c, 0x61, 0x75, 0x6e, 0x63, 0x68,
|
||||
0x18, 0x0d, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0e, 0x66, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x54, 0x6f,
|
||||
0x4c, 0x61, 0x75, 0x6e, 0x63, 0x68, 0x12, 0x26, 0x0a, 0x0e, 0x66, 0x61, 0x69, 0x6c, 0x65, 0x64,
|
||||
0x54, 0x6f, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x18, 0x0e, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0e,
|
||||
0x66, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x54, 0x6f, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x12, 0x16,
|
||||
0x0a, 0x06, 0x66, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x18, 0x0f, 0x20, 0x01, 0x28, 0x05, 0x52, 0x06,
|
||||
0x66, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x22, 0x49, 0x0a, 0x29, 0x47, 0x65, 0x74, 0x57, 0x6f, 0x72,
|
||||
0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x53, 0x74, 0x61, 0x74, 0x69, 0x73, 0x74, 0x69, 0x63, 0x73,
|
||||
0x46, 0x6f, 0x72, 0x4e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x52, 0x65, 0x71, 0x75,
|
||||
0x65, 0x73, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65,
|
||||
0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63,
|
||||
0x65, 0x12, 0x10, 0x0a, 0x03, 0x75, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03,
|
||||
0x75, 0x69, 0x64, 0x22, 0xfe, 0x03, 0x0a, 0x18, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63,
|
||||
0x65, 0x53, 0x74, 0x61, 0x74, 0x69, 0x73, 0x74, 0x69, 0x63, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74,
|
||||
0x12, 0x14, 0x0a, 0x05, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52,
|
||||
0x05, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x12, 0x20, 0x0a, 0x0b, 0x6c, 0x61, 0x73, 0x74, 0x43, 0x72,
|
||||
0x65, 0x61, 0x74, 0x65, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x6c, 0x61, 0x73,
|
||||
0x74, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x12, 0x1c, 0x0a, 0x09, 0x6c, 0x61, 0x75, 0x6e,
|
||||
0x63, 0x68, 0x69, 0x6e, 0x67, 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x52, 0x09, 0x6c, 0x61, 0x75,
|
||||
0x6e, 0x63, 0x68, 0x69, 0x6e, 0x67, 0x12, 0x18, 0x0a, 0x07, 0x72, 0x75, 0x6e, 0x6e, 0x69, 0x6e,
|
||||
0x67, 0x18, 0x04, 0x20, 0x01, 0x28, 0x05, 0x52, 0x07, 0x72, 0x75, 0x6e, 0x6e, 0x69, 0x6e, 0x67,
|
||||
0x12, 0x1a, 0x0a, 0x08, 0x75, 0x70, 0x64, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x18, 0x05, 0x20, 0x01,
|
||||
0x28, 0x05, 0x52, 0x08, 0x75, 0x70, 0x64, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x12, 0x18, 0x0a, 0x07,
|
||||
0x70, 0x61, 0x75, 0x73, 0x69, 0x6e, 0x67, 0x18, 0x06, 0x20, 0x01, 0x28, 0x05, 0x52, 0x07, 0x70,
|
||||
0x61, 0x75, 0x73, 0x69, 0x6e, 0x67, 0x12, 0x16, 0x0a, 0x06, 0x70, 0x61, 0x75, 0x73, 0x65, 0x64,
|
||||
0x18, 0x07, 0x20, 0x01, 0x28, 0x05, 0x52, 0x06, 0x70, 0x61, 0x75, 0x73, 0x65, 0x64, 0x12, 0x20,
|
||||
0x0a, 0x0b, 0x74, 0x65, 0x72, 0x6d, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x18, 0x08, 0x20,
|
||||
0x01, 0x28, 0x05, 0x52, 0x0b, 0x74, 0x65, 0x72, 0x6d, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6e, 0x67,
|
||||
0x12, 0x1e, 0x0a, 0x0a, 0x74, 0x65, 0x72, 0x6d, 0x69, 0x6e, 0x61, 0x74, 0x65, 0x64, 0x18, 0x09,
|
||||
0x20, 0x01, 0x28, 0x05, 0x52, 0x0a, 0x74, 0x65, 0x72, 0x6d, 0x69, 0x6e, 0x61, 0x74, 0x65, 0x64,
|
||||
0x12, 0x24, 0x0a, 0x0d, 0x66, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x54, 0x6f, 0x50, 0x61, 0x75, 0x73,
|
||||
0x65, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0d, 0x66, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x54,
|
||||
0x6f, 0x50, 0x61, 0x75, 0x73, 0x65, 0x12, 0x26, 0x0a, 0x0e, 0x66, 0x61, 0x69, 0x6c, 0x65, 0x64,
|
||||
0x54, 0x6f, 0x52, 0x65, 0x73, 0x75, 0x6d, 0x65, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0e,
|
||||
0x66, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x54, 0x6f, 0x52, 0x65, 0x73, 0x75, 0x6d, 0x65, 0x12, 0x2c,
|
||||
0x0a, 0x11, 0x66, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x54, 0x6f, 0x54, 0x65, 0x72, 0x6d, 0x69, 0x6e,
|
||||
0x61, 0x74, 0x65, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x05, 0x52, 0x11, 0x66, 0x61, 0x69, 0x6c, 0x65,
|
||||
0x64, 0x54, 0x6f, 0x54, 0x65, 0x72, 0x6d, 0x69, 0x6e, 0x61, 0x74, 0x65, 0x12, 0x26, 0x0a, 0x0e,
|
||||
0x66, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x54, 0x6f, 0x4c, 0x61, 0x75, 0x6e, 0x63, 0x68, 0x18, 0x0d,
|
||||
0x20, 0x01, 0x28, 0x05, 0x52, 0x0e, 0x66, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x54, 0x6f, 0x4c, 0x61,
|
||||
0x75, 0x6e, 0x63, 0x68, 0x12, 0x26, 0x0a, 0x0e, 0x66, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x54, 0x6f,
|
||||
0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x18, 0x0e, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0e, 0x66, 0x61,
|
||||
0x69, 0x6c, 0x65, 0x64, 0x54, 0x6f, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x12, 0x16, 0x0a, 0x06,
|
||||
0x66, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x18, 0x0f, 0x20, 0x01, 0x28, 0x05, 0x52, 0x06, 0x66, 0x61,
|
||||
0x69, 0x6c, 0x65, 0x64, 0x22, 0x49, 0x0a, 0x29, 0x47, 0x65, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x73,
|
||||
0x70, 0x61, 0x63, 0x65, 0x53, 0x74, 0x61, 0x74, 0x69, 0x73, 0x74, 0x69, 0x63, 0x73, 0x46, 0x6f,
|
||||
0x72, 0x4e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73,
|
||||
0x74, 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x01,
|
||||
0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x22,
|
||||
0x61, 0x0a, 0x2a, 0x47, 0x65, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x53,
|
||||
0x74, 0x61, 0x74, 0x69, 0x73, 0x74, 0x69, 0x63, 0x73, 0x46, 0x6f, 0x72, 0x4e, 0x61, 0x6d, 0x65,
|
||||
0x73, 0x70, 0x61, 0x63, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x33, 0x0a,
|
||||
0x05, 0x73, 0x74, 0x61, 0x74, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x61,
|
||||
0x70, 0x69, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x53, 0x74, 0x61, 0x74,
|
||||
0x69, 0x73, 0x74, 0x69, 0x63, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x52, 0x05, 0x73, 0x74, 0x61,
|
||||
0x74, 0x73, 0x22, 0x96, 0x01, 0x0a, 0x20, 0x47, 0x65, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70,
|
||||
0x61, 0x63, 0x65, 0x43, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x4c, 0x6f, 0x67, 0x73,
|
||||
0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73,
|
||||
0x70, 0x61, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65,
|
||||
0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x10, 0x0a, 0x03, 0x75, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01,
|
||||
0x28, 0x09, 0x52, 0x03, 0x75, 0x69, 0x64, 0x12, 0x24, 0x0a, 0x0d, 0x63, 0x6f, 0x6e, 0x74, 0x61,
|
||||
0x69, 0x6e, 0x65, 0x72, 0x4e, 0x61, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d,
|
||||
0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x1c, 0x0a,
|
||||
0x09, 0x73, 0x69, 0x6e, 0x63, 0x65, 0x54, 0x69, 0x6d, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x03,
|
||||
0x52, 0x09, 0x73, 0x69, 0x6e, 0x63, 0x65, 0x54, 0x69, 0x6d, 0x65, 0x22, 0x58, 0x0a, 0x1a, 0x4c,
|
||||
0x69, 0x73, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x73, 0x46, 0x69, 0x65,
|
||||
0x6c, 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d,
|
||||
0x65, 0x22, 0x61, 0x0a, 0x2a, 0x47, 0x65, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63,
|
||||
0x65, 0x53, 0x74, 0x61, 0x74, 0x69, 0x73, 0x74, 0x69, 0x63, 0x73, 0x46, 0x6f, 0x72, 0x4e, 0x61,
|
||||
0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12,
|
||||
0x33, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d,
|
||||
0x2e, 0x61, 0x70, 0x69, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x53, 0x74,
|
||||
0x61, 0x74, 0x69, 0x73, 0x74, 0x69, 0x63, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x52, 0x05, 0x73,
|
||||
0x74, 0x61, 0x74, 0x73, 0x22, 0x96, 0x01, 0x0a, 0x20, 0x47, 0x65, 0x74, 0x57, 0x6f, 0x72, 0x6b,
|
||||
0x73, 0x70, 0x61, 0x63, 0x65, 0x43, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x4c, 0x6f,
|
||||
0x67, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d,
|
||||
0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61,
|
||||
0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x1c, 0x0a, 0x09, 0x66, 0x69, 0x65, 0x6c, 0x64,
|
||||
0x4e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x66, 0x69, 0x65, 0x6c,
|
||||
0x64, 0x4e, 0x61, 0x6d, 0x65, 0x22, 0x35, 0x0a, 0x1b, 0x4c, 0x69, 0x73, 0x74, 0x57, 0x6f, 0x72,
|
||||
0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x73, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x52, 0x65, 0x73, 0x70,
|
||||
0x6f, 0x6e, 0x73, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x01,
|
||||
0x20, 0x03, 0x28, 0x09, 0x52, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x32, 0xaa, 0x0d, 0x0a,
|
||||
0x10, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63,
|
||||
0x65, 0x12, 0x72, 0x0a, 0x0f, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x73,
|
||||
0x70, 0x61, 0x63, 0x65, 0x12, 0x1b, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74,
|
||||
0x65, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73,
|
||||
0x74, 0x1a, 0x0e, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63,
|
||||
0x65, 0x22, 0x32, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x2c, 0x22, 0x24, 0x2f, 0x61, 0x70, 0x69, 0x73,
|
||||
0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70,
|
||||
0x61, 0x63, 0x65, 0x7d, 0x2f, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x73, 0x3a,
|
||||
0x04, 0x62, 0x6f, 0x64, 0x79, 0x12, 0xbd, 0x01, 0x0a, 0x22, 0x47, 0x65, 0x74, 0x57, 0x6f, 0x72,
|
||||
0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x53, 0x74, 0x61, 0x74, 0x69, 0x73, 0x74, 0x69, 0x63, 0x73,
|
||||
0x46, 0x6f, 0x72, 0x4e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x2e, 0x2e, 0x61,
|
||||
0x70, 0x69, 0x2e, 0x47, 0x65, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x53,
|
||||
0x74, 0x61, 0x74, 0x69, 0x73, 0x74, 0x69, 0x63, 0x73, 0x46, 0x6f, 0x72, 0x4e, 0x61, 0x6d, 0x65,
|
||||
0x73, 0x70, 0x61, 0x63, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2f, 0x2e, 0x61,
|
||||
0x70, 0x69, 0x2e, 0x47, 0x65, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x53,
|
||||
0x74, 0x61, 0x74, 0x69, 0x73, 0x74, 0x69, 0x63, 0x73, 0x46, 0x6f, 0x72, 0x4e, 0x61, 0x6d, 0x65,
|
||||
0x73, 0x70, 0x61, 0x63, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x36, 0x82,
|
||||
0xd3, 0xe4, 0x93, 0x02, 0x30, 0x12, 0x2e, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62,
|
||||
0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x7d,
|
||||
0x2f, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x2f, 0x73, 0x74, 0x61, 0x74, 0x69,
|
||||
0x73, 0x74, 0x69, 0x63, 0x73, 0x12, 0x6c, 0x0a, 0x0c, 0x47, 0x65, 0x74, 0x57, 0x6f, 0x72, 0x6b,
|
||||
0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x18, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x47, 0x65, 0x74, 0x57,
|
||||
0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a,
|
||||
0x0e, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x22,
|
||||
0x32, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x2c, 0x12, 0x2a, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76,
|
||||
0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63,
|
||||
0x65, 0x7d, 0x2f, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x73, 0x2f, 0x7b, 0x75,
|
||||
0x69, 0x64, 0x7d, 0x12, 0x75, 0x0a, 0x0e, 0x4c, 0x69, 0x73, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x73,
|
||||
0x70, 0x61, 0x63, 0x65, 0x73, 0x12, 0x19, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x4c, 0x69, 0x73, 0x74,
|
||||
0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74,
|
||||
0x1a, 0x1a, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x73,
|
||||
0x70, 0x61, 0x63, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x2c, 0x82, 0xd3,
|
||||
0xe4, 0x93, 0x02, 0x26, 0x12, 0x24, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65,
|
||||
0x74, 0x61, 0x31, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x7d, 0x2f,
|
||||
0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x73, 0x12, 0x95, 0x01, 0x0a, 0x15, 0x55,
|
||||
0x70, 0x64, 0x61, 0x74, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x53, 0x74,
|
||||
0x61, 0x74, 0x75, 0x73, 0x12, 0x21, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74,
|
||||
0x65, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73,
|
||||
0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65,
|
||||
0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22,
|
||||
0x41, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x3b, 0x1a, 0x31, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76,
|
||||
0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63,
|
||||
0x65, 0x7d, 0x2f, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x73, 0x2f, 0x7b, 0x75,
|
||||
0x69, 0x64, 0x7d, 0x2f, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x3a, 0x06, 0x73, 0x74, 0x61, 0x74,
|
||||
0x75, 0x73, 0x12, 0x80, 0x01, 0x0a, 0x0f, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x57, 0x6f, 0x72,
|
||||
0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x1b, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x55, 0x70, 0x64,
|
||||
0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x10, 0x0a, 0x03, 0x75, 0x69, 0x64, 0x18, 0x02,
|
||||
0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x75, 0x69, 0x64, 0x12, 0x24, 0x0a, 0x0d, 0x63, 0x6f, 0x6e,
|
||||
0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x4e, 0x61, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09,
|
||||
0x52, 0x0d, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x4e, 0x61, 0x6d, 0x65, 0x12,
|
||||
0x1c, 0x0a, 0x09, 0x73, 0x69, 0x6e, 0x63, 0x65, 0x54, 0x69, 0x6d, 0x65, 0x18, 0x04, 0x20, 0x01,
|
||||
0x28, 0x03, 0x52, 0x09, 0x73, 0x69, 0x6e, 0x63, 0x65, 0x54, 0x69, 0x6d, 0x65, 0x22, 0x58, 0x0a,
|
||||
0x1a, 0x4c, 0x69, 0x73, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x73, 0x46,
|
||||
0x69, 0x65, 0x6c, 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x6e,
|
||||
0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09,
|
||||
0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x1c, 0x0a, 0x09, 0x66, 0x69, 0x65,
|
||||
0x6c, 0x64, 0x4e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x66, 0x69,
|
||||
0x65, 0x6c, 0x64, 0x4e, 0x61, 0x6d, 0x65, 0x22, 0x35, 0x0a, 0x1b, 0x4c, 0x69, 0x73, 0x74, 0x57,
|
||||
0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x73, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x52, 0x65,
|
||||
0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73,
|
||||
0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x32, 0xaa,
|
||||
0x0d, 0x0a, 0x10, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x53, 0x65, 0x72, 0x76,
|
||||
0x69, 0x63, 0x65, 0x12, 0x72, 0x0a, 0x0f, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x57, 0x6f, 0x72,
|
||||
0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x1b, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x43, 0x72, 0x65,
|
||||
0x61, 0x74, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x52, 0x65, 0x71, 0x75,
|
||||
0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f,
|
||||
0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x38, 0x82, 0xd3, 0xe4,
|
||||
0x93, 0x02, 0x32, 0x1a, 0x2a, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74,
|
||||
0x61, 0x31, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x7d, 0x2f, 0x77,
|
||||
0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x73, 0x2f, 0x7b, 0x75, 0x69, 0x64, 0x7d, 0x3a,
|
||||
0x04, 0x62, 0x6f, 0x64, 0x79, 0x12, 0x7e, 0x0a, 0x0e, 0x50, 0x61, 0x75, 0x73, 0x65, 0x57, 0x6f,
|
||||
0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x1a, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x50, 0x61,
|
||||
0x75, 0x73, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x52, 0x65, 0x71, 0x75,
|
||||
0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f,
|
||||
0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x38, 0x82, 0xd3, 0xe4,
|
||||
0x93, 0x02, 0x32, 0x1a, 0x30, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74,
|
||||
0x65, 0x73, 0x74, 0x1a, 0x0e, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70,
|
||||
0x61, 0x63, 0x65, 0x22, 0x32, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x2c, 0x22, 0x24, 0x2f, 0x61, 0x70,
|
||||
0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65,
|
||||
0x73, 0x70, 0x61, 0x63, 0x65, 0x7d, 0x2f, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65,
|
||||
0x73, 0x3a, 0x04, 0x62, 0x6f, 0x64, 0x79, 0x12, 0xbd, 0x01, 0x0a, 0x22, 0x47, 0x65, 0x74, 0x57,
|
||||
0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x53, 0x74, 0x61, 0x74, 0x69, 0x73, 0x74, 0x69,
|
||||
0x63, 0x73, 0x46, 0x6f, 0x72, 0x4e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x2e,
|
||||
0x2e, 0x61, 0x70, 0x69, 0x2e, 0x47, 0x65, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63,
|
||||
0x65, 0x53, 0x74, 0x61, 0x74, 0x69, 0x73, 0x74, 0x69, 0x63, 0x73, 0x46, 0x6f, 0x72, 0x4e, 0x61,
|
||||
0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2f,
|
||||
0x2e, 0x61, 0x70, 0x69, 0x2e, 0x47, 0x65, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63,
|
||||
0x65, 0x53, 0x74, 0x61, 0x74, 0x69, 0x73, 0x74, 0x69, 0x63, 0x73, 0x46, 0x6f, 0x72, 0x4e, 0x61,
|
||||
0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22,
|
||||
0x36, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x30, 0x12, 0x2e, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76,
|
||||
0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63,
|
||||
0x65, 0x7d, 0x2f, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x2f, 0x73, 0x74, 0x61,
|
||||
0x74, 0x69, 0x73, 0x74, 0x69, 0x63, 0x73, 0x12, 0x6c, 0x0a, 0x0c, 0x47, 0x65, 0x74, 0x57, 0x6f,
|
||||
0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x18, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x47, 0x65,
|
||||
0x74, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73,
|
||||
0x74, 0x1a, 0x0e, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63,
|
||||
0x65, 0x22, 0x32, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x2c, 0x12, 0x2a, 0x2f, 0x61, 0x70, 0x69, 0x73,
|
||||
0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70,
|
||||
0x61, 0x63, 0x65, 0x7d, 0x2f, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x73, 0x2f,
|
||||
0x7b, 0x75, 0x69, 0x64, 0x7d, 0x12, 0x75, 0x0a, 0x0e, 0x4c, 0x69, 0x73, 0x74, 0x57, 0x6f, 0x72,
|
||||
0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x73, 0x12, 0x19, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x4c, 0x69,
|
||||
0x73, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65,
|
||||
0x73, 0x74, 0x1a, 0x1a, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x57, 0x6f, 0x72,
|
||||
0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x2c,
|
||||
0x82, 0xd3, 0xe4, 0x93, 0x02, 0x26, 0x12, 0x24, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31,
|
||||
0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65,
|
||||
0x7d, 0x2f, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x73, 0x12, 0x95, 0x01, 0x0a,
|
||||
0x15, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65,
|
||||
0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x21, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x55, 0x70, 0x64,
|
||||
0x61, 0x74, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x53, 0x74, 0x61, 0x74,
|
||||
0x75, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67,
|
||||
0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74,
|
||||
0x79, 0x22, 0x41, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x3b, 0x1a, 0x31, 0x2f, 0x61, 0x70, 0x69, 0x73,
|
||||
0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70,
|
||||
0x61, 0x63, 0x65, 0x7d, 0x2f, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x73, 0x2f,
|
||||
0x7b, 0x75, 0x69, 0x64, 0x7d, 0x2f, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x3a, 0x06, 0x73, 0x74,
|
||||
0x61, 0x74, 0x75, 0x73, 0x12, 0x80, 0x01, 0x0a, 0x0f, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x57,
|
||||
0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x1b, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x55,
|
||||
0x70, 0x64, 0x61, 0x74, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x52, 0x65,
|
||||
0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70,
|
||||
0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x38, 0x82,
|
||||
0xd3, 0xe4, 0x93, 0x02, 0x32, 0x1a, 0x2a, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62,
|
||||
0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x7d,
|
||||
0x2f, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x73, 0x2f, 0x7b, 0x75, 0x69, 0x64,
|
||||
0x7d, 0x3a, 0x04, 0x62, 0x6f, 0x64, 0x79, 0x12, 0x7e, 0x0a, 0x0e, 0x50, 0x61, 0x75, 0x73, 0x65,
|
||||
0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x1a, 0x2e, 0x61, 0x70, 0x69, 0x2e,
|
||||
0x50, 0x61, 0x75, 0x73, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x52, 0x65,
|
||||
0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70,
|
||||
0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x38, 0x82,
|
||||
0xd3, 0xe4, 0x93, 0x02, 0x32, 0x1a, 0x30, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62,
|
||||
0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x7d,
|
||||
0x2f, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x73, 0x2f, 0x7b, 0x75, 0x69, 0x64,
|
||||
0x7d, 0x2f, 0x70, 0x61, 0x75, 0x73, 0x65, 0x12, 0x87, 0x01, 0x0a, 0x0f, 0x52, 0x65, 0x73, 0x75,
|
||||
0x6d, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x1b, 0x2e, 0x61, 0x70,
|
||||
0x69, 0x2e, 0x52, 0x65, 0x73, 0x75, 0x6d, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63,
|
||||
0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c,
|
||||
0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79,
|
||||
0x22, 0x3f, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x39, 0x1a, 0x31, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f,
|
||||
0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61,
|
||||
0x63, 0x65, 0x7d, 0x2f, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x73, 0x2f, 0x7b,
|
||||
0x75, 0x69, 0x64, 0x7d, 0x2f, 0x72, 0x65, 0x73, 0x75, 0x6d, 0x65, 0x3a, 0x04, 0x62, 0x6f, 0x64,
|
||||
0x79, 0x12, 0x7a, 0x0a, 0x0f, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x73,
|
||||
0x70, 0x61, 0x63, 0x65, 0x12, 0x1b, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x44, 0x65, 0x6c, 0x65, 0x74,
|
||||
0x65, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73,
|
||||
0x74, 0x1a, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f,
|
||||
0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x32, 0x82, 0xd3, 0xe4, 0x93, 0x02,
|
||||
0x2c, 0x2a, 0x2a, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31,
|
||||
0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x7d, 0x2f, 0x77, 0x6f, 0x72,
|
||||
0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x73, 0x2f, 0x7b, 0x75, 0x69, 0x64, 0x7d, 0x12, 0x8e, 0x01,
|
||||
0x0a, 0x18, 0x52, 0x65, 0x74, 0x72, 0x79, 0x4c, 0x61, 0x73, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x73,
|
||||
0x70, 0x61, 0x63, 0x65, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x20, 0x2e, 0x61, 0x70, 0x69,
|
||||
0x2e, 0x52, 0x65, 0x74, 0x72, 0x79, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x57, 0x6f, 0x72, 0x6b,
|
||||
0x73, 0x70, 0x61, 0x63, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x67,
|
||||
0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45,
|
||||
0x6d, 0x70, 0x74, 0x79, 0x22, 0x38, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x32, 0x1a, 0x30, 0x2f, 0x61,
|
||||
0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b, 0x6e, 0x61, 0x6d,
|
||||
0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x7d, 0x2f, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63,
|
||||
0x65, 0x73, 0x2f, 0x7b, 0x75, 0x69, 0x64, 0x7d, 0x2f, 0x72, 0x65, 0x74, 0x72, 0x79, 0x12, 0xb0,
|
||||
0x01, 0x0a, 0x19, 0x47, 0x65, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x43,
|
||||
0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x4c, 0x6f, 0x67, 0x73, 0x12, 0x25, 0x2e, 0x61,
|
||||
0x70, 0x69, 0x2e, 0x47, 0x65, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x43,
|
||||
0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x4c, 0x6f, 0x67, 0x73, 0x52, 0x65, 0x71, 0x75,
|
||||
0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x4c, 0x6f, 0x67, 0x53, 0x74, 0x72,
|
||||
0x65, 0x61, 0x6d, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x52, 0x82, 0xd3, 0xe4,
|
||||
0x93, 0x02, 0x4c, 0x12, 0x4a, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74,
|
||||
0x61, 0x31, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x7d, 0x2f, 0x77,
|
||||
0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x73, 0x2f, 0x7b, 0x75, 0x69, 0x64, 0x7d, 0x2f,
|
||||
0x70, 0x61, 0x75, 0x73, 0x65, 0x12, 0x87, 0x01, 0x0a, 0x0f, 0x52, 0x65, 0x73, 0x75, 0x6d, 0x65,
|
||||
0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x1b, 0x2e, 0x61, 0x70, 0x69, 0x2e,
|
||||
0x52, 0x65, 0x73, 0x75, 0x6d, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x52,
|
||||
0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e,
|
||||
0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x3f,
|
||||
0x82, 0xd3, 0xe4, 0x93, 0x02, 0x39, 0x1a, 0x31, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31,
|
||||
0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65,
|
||||
0x7d, 0x2f, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x73, 0x2f, 0x7b, 0x75, 0x69,
|
||||
0x64, 0x7d, 0x2f, 0x72, 0x65, 0x73, 0x75, 0x6d, 0x65, 0x3a, 0x04, 0x62, 0x6f, 0x64, 0x79, 0x12,
|
||||
0x7a, 0x0a, 0x0f, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61,
|
||||
0x63, 0x65, 0x12, 0x1b, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x57,
|
||||
0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a,
|
||||
0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75,
|
||||
0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x32, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x2c, 0x2a,
|
||||
0x2a, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b,
|
||||
0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x7d, 0x2f, 0x77, 0x6f, 0x72, 0x6b, 0x73,
|
||||
0x70, 0x61, 0x63, 0x65, 0x73, 0x2f, 0x7b, 0x75, 0x69, 0x64, 0x7d, 0x12, 0x8e, 0x01, 0x0a, 0x18,
|
||||
0x52, 0x65, 0x74, 0x72, 0x79, 0x4c, 0x61, 0x73, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61,
|
||||
0x63, 0x65, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x20, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x52,
|
||||
0x65, 0x74, 0x72, 0x79, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70,
|
||||
0x61, 0x63, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x67, 0x6f, 0x6f,
|
||||
0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70,
|
||||
0x74, 0x79, 0x22, 0x38, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x32, 0x1a, 0x30, 0x2f, 0x61, 0x70, 0x69,
|
||||
0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73,
|
||||
0x70, 0x61, 0x63, 0x65, 0x7d, 0x2f, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x73,
|
||||
0x2f, 0x7b, 0x75, 0x69, 0x64, 0x7d, 0x2f, 0x72, 0x65, 0x74, 0x72, 0x79, 0x12, 0xb0, 0x01, 0x0a,
|
||||
0x19, 0x47, 0x65, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x43, 0x6f, 0x6e,
|
||||
0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x4c, 0x6f, 0x67, 0x73, 0x12, 0x25, 0x2e, 0x61, 0x70, 0x69,
|
||||
0x2e, 0x47, 0x65, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x43, 0x6f, 0x6e,
|
||||
0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x4c, 0x6f, 0x67, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73,
|
||||
0x74, 0x1a, 0x16, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x4c, 0x6f, 0x67, 0x53, 0x74, 0x72, 0x65, 0x61,
|
||||
0x6d, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x52, 0x82, 0xd3, 0xe4, 0x93, 0x02,
|
||||
0x4c, 0x12, 0x4a, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31,
|
||||
0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x7d, 0x2f, 0x77, 0x6f, 0x72,
|
||||
0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x73, 0x2f, 0x7b, 0x75, 0x69, 0x64, 0x7d, 0x2f, 0x63, 0x6f,
|
||||
0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x73, 0x2f, 0x7b, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69,
|
||||
0x6e, 0x65, 0x72, 0x4e, 0x61, 0x6d, 0x65, 0x7d, 0x2f, 0x6c, 0x6f, 0x67, 0x73, 0x30, 0x01, 0x12,
|
||||
0x97, 0x01, 0x0a, 0x13, 0x4c, 0x69, 0x73, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63,
|
||||
0x65, 0x73, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x12, 0x1f, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x4c, 0x69,
|
||||
0x73, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x73, 0x46, 0x69, 0x65, 0x6c,
|
||||
0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x20, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x4c,
|
||||
0x69, 0x73, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x73, 0x46, 0x69, 0x65,
|
||||
0x6c, 0x64, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x3d, 0x82, 0xd3, 0xe4, 0x93,
|
||||
0x02, 0x37, 0x12, 0x35, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61,
|
||||
0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x7d, 0x2f, 0x66, 0x69, 0x65,
|
||||
0x6c, 0x64, 0x2f, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x73, 0x2f, 0x7b, 0x66,
|
||||
0x69, 0x65, 0x6c, 0x64, 0x4e, 0x61, 0x6d, 0x65, 0x7d, 0x42, 0x24, 0x5a, 0x22, 0x67, 0x69, 0x74,
|
||||
0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x6f, 0x6e, 0x65, 0x70, 0x61, 0x6e, 0x65, 0x6c,
|
||||
0x69, 0x6f, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x67, 0x65, 0x6e, 0x62,
|
||||
0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
|
||||
0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x73, 0x2f, 0x7b, 0x63, 0x6f, 0x6e, 0x74,
|
||||
0x61, 0x69, 0x6e, 0x65, 0x72, 0x4e, 0x61, 0x6d, 0x65, 0x7d, 0x2f, 0x6c, 0x6f, 0x67, 0x73, 0x30,
|
||||
0x01, 0x12, 0x97, 0x01, 0x0a, 0x13, 0x4c, 0x69, 0x73, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70,
|
||||
0x61, 0x63, 0x65, 0x73, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x12, 0x1f, 0x2e, 0x61, 0x70, 0x69, 0x2e,
|
||||
0x4c, 0x69, 0x73, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x73, 0x46, 0x69,
|
||||
0x65, 0x6c, 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x20, 0x2e, 0x61, 0x70, 0x69,
|
||||
0x2e, 0x4c, 0x69, 0x73, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x73, 0x46,
|
||||
0x69, 0x65, 0x6c, 0x64, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x3d, 0x82, 0xd3,
|
||||
0xe4, 0x93, 0x02, 0x37, 0x12, 0x35, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65,
|
||||
0x74, 0x61, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x7d, 0x2f, 0x66,
|
||||
0x69, 0x65, 0x6c, 0x64, 0x2f, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x73, 0x2f,
|
||||
0x7b, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x4e, 0x61, 0x6d, 0x65, 0x7d, 0x42, 0x24, 0x5a, 0x22, 0x67,
|
||||
0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x6f, 0x6e, 0x65, 0x70, 0x61, 0x6e,
|
||||
0x65, 0x6c, 0x69, 0x6f, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x67, 0x65,
|
||||
0x6e, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
|
||||
}
|
||||
|
||||
var (
|
||||
|
||||
56
api/proto/files.proto
Normal file
56
api/proto/files.proto
Normal file
@@ -0,0 +1,56 @@
|
||||
syntax = "proto3";
|
||||
|
||||
package api;
|
||||
option go_package = "github.com/onepanelio/core/api/gen";
|
||||
|
||||
import "google/api/annotations.proto";
|
||||
|
||||
service FileService {
|
||||
rpc GetObjectDownloadPresignedURL (GetObjectPresignedUrlRequest) returns (GetPresignedUrlResponse) {
|
||||
option (google.api.http) = {
|
||||
get: "/apis/v1beta1/{namespace}/files/presigned-url/{key=**}"
|
||||
};
|
||||
}
|
||||
|
||||
rpc ListFiles (ListFilesRequest) returns (ListFilesResponse) {
|
||||
option (google.api.http) = {
|
||||
get: "/apis/v1beta1/{namespace}/files/list/{path=**}"
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
message File {
|
||||
string path = 1;
|
||||
string name = 2;
|
||||
string extension = 3;
|
||||
int64 size = 4;
|
||||
string contentType = 5;
|
||||
string lastModified = 6;
|
||||
bool directory = 7;
|
||||
}
|
||||
|
||||
message ListFilesRequest {
|
||||
string namespace = 1;
|
||||
string path = 2;
|
||||
int32 page = 3;
|
||||
int32 perPage = 4;
|
||||
}
|
||||
|
||||
message ListFilesResponse {
|
||||
int32 count = 1;
|
||||
int32 totalCount = 2;
|
||||
int32 page = 3;
|
||||
int32 pages = 4;
|
||||
repeated File files = 5;
|
||||
string parentPath = 6;
|
||||
}
|
||||
|
||||
message GetObjectPresignedUrlRequest {
|
||||
string namespace = 1;
|
||||
string key = 2;
|
||||
}
|
||||
|
||||
message GetPresignedUrlResponse {
|
||||
string url = 1;
|
||||
int64 size = 2;
|
||||
}
|
||||
92
api/proto/inference_service.proto
Normal file
92
api/proto/inference_service.proto
Normal file
@@ -0,0 +1,92 @@
|
||||
syntax = "proto3";
|
||||
|
||||
package api;
|
||||
option go_package = "github.com/onepanelio/core/api/gen";
|
||||
|
||||
import "google/api/annotations.proto";
|
||||
import "google/protobuf/empty.proto";
|
||||
|
||||
service InferenceService {
|
||||
rpc CreateInferenceService (CreateInferenceServiceRequest) returns (GetInferenceServiceResponse) {
|
||||
option (google.api.http) = {
|
||||
post: "/apis/v1beta1/{namespace}/inferenceservice"
|
||||
body: "*"
|
||||
};
|
||||
}
|
||||
|
||||
rpc GetInferenceService(InferenceServiceIdentifier) returns (GetInferenceServiceResponse) {
|
||||
option (google.api.http) = {
|
||||
get: "/apis/v1beta1/{namespace}/inferenceservice/{name}"
|
||||
};
|
||||
}
|
||||
|
||||
rpc DeleteInferenceService (InferenceServiceIdentifier) returns (google.protobuf.Empty) {
|
||||
option (google.api.http) = {
|
||||
delete: "/apis/v1beta1/{namespace}/inferenceservice/{name}"
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
message InferenceServiceIdentifier {
|
||||
string namespace = 1;
|
||||
string name = 2;
|
||||
}
|
||||
|
||||
message Env {
|
||||
string name = 1;
|
||||
string value = 2;
|
||||
}
|
||||
|
||||
message Container {
|
||||
string image = 1;
|
||||
string name = 2;
|
||||
repeated Env env = 3;
|
||||
}
|
||||
|
||||
message InferenceServiceTransformer {
|
||||
repeated Container containers = 1;
|
||||
string minCpu = 2;
|
||||
string minMemory = 3;
|
||||
string maxCpu = 4;
|
||||
string maxMemory = 5;
|
||||
}
|
||||
|
||||
message InferenceServicePredictor {
|
||||
string name = 1;
|
||||
string runtimeVersion = 2;
|
||||
string storageUri = 3;
|
||||
string nodeSelector = 4;
|
||||
string minCpu = 5;
|
||||
string minMemory = 6;
|
||||
string maxCpu = 7;
|
||||
string maxMemory = 8;
|
||||
}
|
||||
|
||||
message CreateInferenceServiceRequest {
|
||||
string namespace = 1;
|
||||
string name = 2;
|
||||
string defaultTransformerImage = 3;
|
||||
|
||||
InferenceServicePredictor predictor = 4;
|
||||
InferenceServiceTransformer transformer = 5;
|
||||
}
|
||||
|
||||
message DeployModelResponse {
|
||||
string status = 1;
|
||||
}
|
||||
|
||||
message InferenceServiceCondition {
|
||||
string lastTransitionTime = 1;
|
||||
string status = 2;
|
||||
string type = 3;
|
||||
}
|
||||
|
||||
message GetInferenceServiceResponse {
|
||||
bool ready = 1;
|
||||
repeated InferenceServiceCondition conditions = 2;
|
||||
string predictUrl = 3;
|
||||
}
|
||||
|
||||
message InferenceServiceEndpoints {
|
||||
string predict = 1;
|
||||
}
|
||||
@@ -40,4 +40,5 @@ message CreateNamespaceRequest {
|
||||
|
||||
message Namespace {
|
||||
string name = 1;
|
||||
string sourceName = 2;
|
||||
}
|
||||
@@ -17,6 +17,12 @@ service ServiceService {
|
||||
get: "/apis/v1beta1/{namespace}/service"
|
||||
};
|
||||
}
|
||||
|
||||
rpc HasService(HasServiceRequest) returns (HasServiceResponse) {
|
||||
option (google.api.http) = {
|
||||
get: "/apis/v1beta/service/{name}"
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
message Service {
|
||||
@@ -29,6 +35,14 @@ message GetServiceRequest {
|
||||
string name = 2;
|
||||
}
|
||||
|
||||
message HasServiceRequest {
|
||||
string name = 1;
|
||||
}
|
||||
|
||||
message HasServiceResponse {
|
||||
bool hasService= 1;
|
||||
}
|
||||
|
||||
message ListServicesRequest {
|
||||
string namespace = 1;
|
||||
int32 pageSize = 2;
|
||||
|
||||
@@ -74,18 +74,6 @@ service WorkflowService {
|
||||
};
|
||||
}
|
||||
|
||||
rpc GetArtifact (GetArtifactRequest) returns (ArtifactResponse) {
|
||||
option (google.api.http) = {
|
||||
get: "/apis/v1beta1/{namespace}/workflow_executions/{uid}/artifacts/{key=**}"
|
||||
};
|
||||
}
|
||||
|
||||
rpc ListFiles (ListFilesRequest) returns (ListFilesResponse) {
|
||||
option (google.api.http) = {
|
||||
get: "/apis/v1beta1/{namespace}/workflow_executions/{uid}/files/{path=**}"
|
||||
};
|
||||
}
|
||||
|
||||
rpc AddWorkflowExecutionStatistics (AddWorkflowExecutionStatisticRequest) returns (google.protobuf.Empty) {
|
||||
option (google.api.http) = {
|
||||
post: "/apis/v1beta1/{namespace}/workflow_executions/{uid}/statistics"
|
||||
@@ -235,31 +223,6 @@ message WorkflowExecution {
|
||||
repeated Metric metrics = 12;
|
||||
}
|
||||
|
||||
message ArtifactResponse {
|
||||
bytes data = 1;
|
||||
}
|
||||
|
||||
message File {
|
||||
string path = 1;
|
||||
string name = 2;
|
||||
string extension = 3;
|
||||
int64 size = 4;
|
||||
string contentType = 5;
|
||||
string lastModified = 6;
|
||||
bool directory = 7;
|
||||
}
|
||||
|
||||
message ListFilesRequest {
|
||||
string namespace = 1;
|
||||
string uid = 2;
|
||||
string path = 3;
|
||||
}
|
||||
|
||||
message ListFilesResponse {
|
||||
repeated File files = 1;
|
||||
string parentPath = 2;
|
||||
}
|
||||
|
||||
message Statistics {
|
||||
string workflowStatus = 1;
|
||||
int64 workflowTemplateId = 2;
|
||||
|
||||
@@ -166,6 +166,7 @@ message WorkflowTemplate {
|
||||
WorkflowExecutionStatisticReport stats = 11;
|
||||
CronWorkflowStatisticsReport cronStats = 12;
|
||||
repeated Parameter parameters = 13;
|
||||
string description = 14;
|
||||
}
|
||||
|
||||
message GetWorkflowTemplateLabelsRequest {
|
||||
|
||||
@@ -121,6 +121,7 @@ message CreateWorkspaceBody {
|
||||
|
||||
repeated Parameter parameters = 3;
|
||||
repeated KeyValue labels = 4;
|
||||
bool captureNode = 5;
|
||||
}
|
||||
|
||||
message CreateWorkspaceRequest {
|
||||
|
||||
@@ -52,7 +52,7 @@ See https://docs.onepanel.ai
|
||||
|
||||
` + "```" + `
|
||||
# Download the binary
|
||||
curl -sLO https://github.com/onepanelio/core/releases/download/v%s/opctl-linux-amd64
|
||||
curl -sLO https://github.com/onepanelio/onepanel/releases/download/v%s/opctl-linux-amd64
|
||||
|
||||
# Make binary executable
|
||||
chmod +x opctl-linux-amd64
|
||||
@@ -68,7 +68,7 @@ opctl version
|
||||
|
||||
` + "```" + `
|
||||
# Download the binary
|
||||
curl -sLO https://github.com/onepanelio/core/releases/download/v%s/opctl-macos-amd64
|
||||
curl -sLO https://github.com/onepanelio/onepanel/releases/download/v%s/opctl-macos-amd64
|
||||
|
||||
# Make binary executable
|
||||
chmod +x opctl-macos-amd64
|
||||
@@ -82,7 +82,7 @@ opctl version
|
||||
|
||||
## Windows
|
||||
|
||||
Download the [attached executable](https://github.com/onepanelio/core/releases/download/v%s/opctl-windows-amd64.exe), rename it to "opctl" and move it to a folder that is in your PATH environment variable.
|
||||
Download the [attached executable](https://github.com/onepanelio/onepanel/releases/download/v%s/opctl-windows-amd64.exe), rename it to "opctl" and move it to a folder that is in your PATH environment variable.
|
||||
`
|
||||
|
||||
var repositories = []string{
|
||||
|
||||
31
db/go/20210414165510_add_deep_learning_desktop_workspace.go
Normal file
31
db/go/20210414165510_add_deep_learning_desktop_workspace.go
Normal file
@@ -0,0 +1,31 @@
|
||||
package migration
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"github.com/pressly/goose"
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
var deepLearningDesktopTemplateName = "Deep Learning Desktop"
|
||||
|
||||
func initialize20210414165510() {
|
||||
if _, ok := initializedMigrations[20210414165510]; !ok {
|
||||
goose.AddMigration(Up20210414165510, Down20210414165510)
|
||||
initializedMigrations[20210414165510] = true
|
||||
}
|
||||
}
|
||||
|
||||
// Up20210414165510 creates the Deep Learning Desktop Workspace Template
|
||||
func Up20210414165510(tx *sql.Tx) error {
|
||||
// This code is executed when the migration is applied.
|
||||
return createWorkspaceTemplate(
|
||||
filepath.Join("workspaces", "vnc", "20210414165510.yaml"),
|
||||
deepLearningDesktopTemplateName,
|
||||
"Deep learning desktop with VNC")
|
||||
}
|
||||
|
||||
// Down20210414165510 removes the Deep Learning Desktop Workspace Template
|
||||
func Down20210414165510(tx *sql.Tx) error {
|
||||
// This code is executed when the migration is rolled back.
|
||||
return archiveWorkspaceTemplate(deepLearningDesktopTemplateName)
|
||||
}
|
||||
66
db/go/20210719190719_update_filesyncer.go
Normal file
66
db/go/20210719190719_update_filesyncer.go
Normal file
@@ -0,0 +1,66 @@
|
||||
package migration
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"github.com/pressly/goose"
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
func initialize20210719190719() {
|
||||
if _, ok := initializedMigrations[20210719190719]; !ok {
|
||||
goose.AddMigration(Up20210719190719, Down20210719190719)
|
||||
initializedMigrations[20210719190719] = true
|
||||
}
|
||||
}
|
||||
|
||||
// Up20210719190719 updates the workspace templates to use new v1.0.0 of filesyncer
|
||||
func Up20210719190719(tx *sql.Tx) error {
|
||||
// This code is executed when the migration is applied.
|
||||
if err := updateWorkspaceTemplateManifest(
|
||||
filepath.Join("workspaces", "cvat", "20210719190719.yaml"),
|
||||
cvatTemplateName); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := updateWorkspaceTemplateManifest(
|
||||
filepath.Join("workspaces", "jupyterlab", "20210719190719.yaml"),
|
||||
jupyterLabTemplateName); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := updateWorkspaceTemplateManifest(
|
||||
filepath.Join("workspaces", "vnc", "20210719190719.yaml"),
|
||||
deepLearningDesktopTemplateName); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return updateWorkspaceTemplateManifest(
|
||||
filepath.Join("workspaces", "vscode", "20210719190719.yaml"),
|
||||
vscodeWorkspaceTemplateName)
|
||||
}
|
||||
|
||||
// Down20210719190719 rolls back the change to update filesyncer
|
||||
func Down20210719190719(tx *sql.Tx) error {
|
||||
// This code is executed when the migration is rolled back.
|
||||
if err := updateWorkspaceTemplateManifest(
|
||||
filepath.Join("workspaces", "cvat", "20210323175655.yaml"),
|
||||
cvatTemplateName); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := updateWorkspaceTemplateManifest(
|
||||
filepath.Join("workspaces", "jupyterlab", "20210323175655.yaml"),
|
||||
jupyterLabTemplateName); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := updateWorkspaceTemplateManifest(
|
||||
filepath.Join("workspaces", "vnc", "20210414165510.yaml"),
|
||||
deepLearningDesktopTemplateName); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return updateWorkspaceTemplateManifest(
|
||||
filepath.Join("workspaces", "vscode", "20210323175655.yaml"),
|
||||
vscodeWorkspaceTemplateName)
|
||||
}
|
||||
28
db/go/20211028205201_cvat_1_6.go
Normal file
28
db/go/20211028205201_cvat_1_6.go
Normal file
@@ -0,0 +1,28 @@
|
||||
package migration
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"github.com/pressly/goose"
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
func initialize20211028205201() {
|
||||
if _, ok := initializedMigrations[20211028205201]; !ok {
|
||||
goose.AddMigration(Up20211028205201, Down20211028205201)
|
||||
initializedMigrations[20211028205201] = true
|
||||
}
|
||||
}
|
||||
|
||||
// Up20211028205201 creates the new cvat 1.6.0 workspace template
|
||||
func Up20211028205201(tx *sql.Tx) error {
|
||||
// This code is executed when the migration is applied.
|
||||
return createWorkspaceTemplate(
|
||||
filepath.Join("workspaces", "cvat_1_6_0", "20211028205201.yaml"),
|
||||
"CVAT_1.6.0",
|
||||
"Powerful and efficient Computer Vision Annotation Tool (CVAT)")
|
||||
}
|
||||
|
||||
// Down20211028205201 archives the new cvat 1.6.0 workspace template
|
||||
func Down20211028205201(tx *sql.Tx) error {
|
||||
return archiveWorkspaceTemplate("CVAT_1.6.0")
|
||||
}
|
||||
@@ -94,6 +94,9 @@ func Initialize() {
|
||||
initialize20210323175655()
|
||||
initialize20210329171739()
|
||||
initialize20210329194731()
|
||||
initialize20210414165510()
|
||||
initialize20210719190719()
|
||||
initialize20211028205201()
|
||||
|
||||
if err := client.DB.Close(); err != nil {
|
||||
log.Printf("[error] closing db %v", err)
|
||||
|
||||
121
db/go/util.go
121
db/go/util.go
@@ -1,14 +1,17 @@
|
||||
package migration
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
v1 "github.com/onepanelio/core/pkg"
|
||||
"github.com/onepanelio/core/pkg/util/data"
|
||||
uid2 "github.com/onepanelio/core/pkg/util/uid"
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
// updateWorkspaceTemplateManifest will update the workspace template given by {{templateName}} with the contents
|
||||
// createWorkspaceTemplate will create the workspace template given by {{templateName}} with the contents
|
||||
// given by {{filename}}
|
||||
// It will do so for all namespaces.
|
||||
func updateWorkspaceTemplateManifest(filename, templateName string) error {
|
||||
func createWorkspaceTemplate(filename, templateName, description string) error {
|
||||
client, err := getClient()
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -20,7 +23,101 @@ func updateWorkspaceTemplateManifest(filename, templateName string) error {
|
||||
return err
|
||||
}
|
||||
|
||||
newManifest, err := readDataFile(filename)
|
||||
filename = filepath.Join("db", "yaml", filename)
|
||||
manifestFile, err := data.ManifestFileFromFile(filename)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
newManifest, err := manifestFile.SpecString()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
uid, err := uid2.GenerateUID(templateName, 30)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, namespace := range namespaces {
|
||||
workspaceTemplate := &v1.WorkspaceTemplate{
|
||||
UID: uid,
|
||||
Name: templateName,
|
||||
Manifest: newManifest,
|
||||
Description: description,
|
||||
}
|
||||
|
||||
err = ReplaceArtifactRepositoryType(client, namespace, nil, workspaceTemplate)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if _, err := client.CreateWorkspaceTemplate(namespace.Name, workspaceTemplate); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func archiveWorkspaceTemplate(templateName string) error {
|
||||
client, err := getClient()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer client.DB.Close()
|
||||
|
||||
namespaces, err := client.ListOnepanelEnabledNamespaces()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
uid, err := uid2.GenerateUID(templateName, 30)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, namespace := range namespaces {
|
||||
hasRunning, err := client.WorkspaceTemplateHasRunningWorkspaces(namespace.Name, uid)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Unable to get check running workspaces")
|
||||
}
|
||||
if hasRunning {
|
||||
return fmt.Errorf("unable to archive workspace template. There are running workspaces that use it")
|
||||
}
|
||||
|
||||
_, err = client.ArchiveWorkspaceTemplate(namespace.Name, uid)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// updateWorkspaceTemplateManifest will update the workspace template given by {{templateName}} with the contents
|
||||
// given by {{filename}}
|
||||
// It will do so for all namespaces.
|
||||
func updateWorkspaceTemplateManifest(filename, templateName string) error {
|
||||
client, err := getClient()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer client.DB.Close()
|
||||
|
||||
filename = filepath.Join("db", "yaml", filename)
|
||||
|
||||
namespaces, err := client.ListOnepanelEnabledNamespaces()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
manifest, err := data.ManifestFileFromFile(filename)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
newManifest, err := manifest.SpecString()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -63,7 +160,14 @@ func createWorkflowTemplate(filename, templateName string, labels map[string]str
|
||||
return err
|
||||
}
|
||||
|
||||
manifest, err := readDataFile(filename)
|
||||
filename = filepath.Join("db", "yaml", filename)
|
||||
|
||||
manifestFile, err := data.ManifestFileFromFile(filename)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
manifest, err := manifestFile.SpecString()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -108,7 +212,14 @@ func updateWorkflowTemplateManifest(filename, templateName string, labels map[st
|
||||
return err
|
||||
}
|
||||
|
||||
newManifest, err := readDataFile(filename)
|
||||
filename = filepath.Join("db", "yaml", filename)
|
||||
|
||||
manifestFile, err := data.ManifestFileFromFile(filename)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
newManifest, err := manifestFile.SpecString()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -0,0 +1,7 @@
|
||||
-- +goose Up
|
||||
-- SQL in this section is executed when the migration is applied.
|
||||
ALTER TABLE workflow_template_versions ADD COLUMN description TEXT DEFAULT '';
|
||||
|
||||
-- +goose Down
|
||||
-- SQL in this section is executed when the migration is rolled back.
|
||||
ALTER TABLE workflow_template_versions DROP COLUMN description;
|
||||
@@ -0,0 +1,7 @@
|
||||
-- +goose Up
|
||||
-- SQL in this section is executed when the migration is applied.
|
||||
ALTER TABLE workspaces ADD COLUMN capture_node boolean;
|
||||
UPDATE workspaces SET capture_node = false;
|
||||
|
||||
-- +goose Down
|
||||
ALTER TABLE workspaces DROP COLUMN capture_node;
|
||||
@@ -1,183 +1,194 @@
|
||||
# source: https://github.com/onepanelio/templates/blob/master/workflows/nni-hyperparameter-tuning/mnist/
|
||||
entrypoint: main
|
||||
arguments:
|
||||
parameters:
|
||||
- name: source
|
||||
value: https://github.com/onepanelio/templates
|
||||
- name: revision
|
||||
value: master
|
||||
- name: config
|
||||
displayName: Configuration
|
||||
required: true
|
||||
hint: NNI configuration
|
||||
type: textarea.textarea
|
||||
value: |-
|
||||
authorName: Onepanel, Inc.
|
||||
experimentName: MNIST TF v2.x
|
||||
trialConcurrency: 1
|
||||
maxExecDuration: 1h
|
||||
maxTrialNum: 10
|
||||
trainingServicePlatform: local
|
||||
searchSpacePath: search_space.json
|
||||
useAnnotation: false
|
||||
tuner:
|
||||
# gpuIndices: '0' # uncomment and update to the GPU indices to assign this tuner
|
||||
builtinTunerName: TPE # choices: TPE, Random, Anneal, Evolution, BatchTuner, MetisTuner, GPTuner
|
||||
classArgs:
|
||||
optimize_mode: maximize # choices: maximize, minimize
|
||||
trial:
|
||||
command: python main.py --output /mnt/output
|
||||
codeDir: .
|
||||
# gpuNum: 1 # uncomment and update to number of GPUs
|
||||
- name: search-space
|
||||
displayName: Search space configuration
|
||||
required: true
|
||||
type: textarea.textarea
|
||||
value: |-
|
||||
{
|
||||
"dropout_rate": { "_type": "uniform", "_value": [0.5, 0.9] },
|
||||
"conv_size": { "_type": "choice", "_value": [2, 3, 5, 7] },
|
||||
"hidden_size": { "_type": "choice", "_value": [124, 512, 1024] },
|
||||
"batch_size": { "_type": "choice", "_value": [16, 32] },
|
||||
"learning_rate": { "_type": "choice", "_value": [0.0001, 0.001, 0.01, 0.1] },
|
||||
"epochs": { "_type": "choice", "_value": [10] }
|
||||
}
|
||||
- displayName: Node pool
|
||||
hint: Name of node pool or group to run this workflow task
|
||||
type: select.nodepool
|
||||
name: sys-node-pool
|
||||
value: {{.DefaultNodePoolOption}}
|
||||
required: true
|
||||
metadata:
|
||||
name: "Hyperparameter Tuning Example"
|
||||
kind: Workflow
|
||||
version: 20201225172926
|
||||
action: create
|
||||
source: "https://github.com/onepanelio/templates/blob/master/workflows/nni-hyperparameter-tuning/mnist/"
|
||||
deprecated: true
|
||||
labels:
|
||||
framework: tensorflow
|
||||
tuner: TPE
|
||||
"created-by": system
|
||||
spec:
|
||||
entrypoint: main
|
||||
arguments:
|
||||
parameters:
|
||||
- name: source
|
||||
value: https://github.com/onepanelio/templates
|
||||
- name: revision
|
||||
value: master
|
||||
- name: config
|
||||
displayName: Configuration
|
||||
required: true
|
||||
hint: NNI configuration
|
||||
type: textarea.textarea
|
||||
value: |-
|
||||
authorName: Onepanel, Inc.
|
||||
experimentName: MNIST TF v2.x
|
||||
trialConcurrency: 1
|
||||
maxExecDuration: 1h
|
||||
maxTrialNum: 10
|
||||
trainingServicePlatform: local
|
||||
searchSpacePath: search_space.json
|
||||
useAnnotation: false
|
||||
tuner:
|
||||
# gpuIndices: '0' # uncomment and update to the GPU indices to assign this tuner
|
||||
builtinTunerName: TPE # choices: TPE, Random, Anneal, Evolution, BatchTuner, MetisTuner, GPTuner
|
||||
classArgs:
|
||||
optimize_mode: maximize # choices: maximize, minimize
|
||||
trial:
|
||||
command: python main.py --output /mnt/output
|
||||
codeDir: .
|
||||
# gpuNum: 1 # uncomment and update to number of GPUs
|
||||
- name: search-space
|
||||
displayName: Search space configuration
|
||||
required: true
|
||||
type: textarea.textarea
|
||||
value: |-
|
||||
{
|
||||
"dropout_rate": { "_type": "uniform", "_value": [0.5, 0.9] },
|
||||
"conv_size": { "_type": "choice", "_value": [2, 3, 5, 7] },
|
||||
"hidden_size": { "_type": "choice", "_value": [124, 512, 1024] },
|
||||
"batch_size": { "_type": "choice", "_value": [16, 32] },
|
||||
"learning_rate": { "_type": "choice", "_value": [0.0001, 0.001, 0.01, 0.1] },
|
||||
"epochs": { "_type": "choice", "_value": [10] }
|
||||
}
|
||||
- displayName: Node pool
|
||||
hint: Name of node pool or group to run this workflow task
|
||||
type: select.nodepool
|
||||
name: sys-node-pool
|
||||
value: "{{.DefaultNodePoolOption}}"
|
||||
required: true
|
||||
|
||||
volumeClaimTemplates:
|
||||
- metadata:
|
||||
name: hyperparamtuning-data
|
||||
spec:
|
||||
accessModes: [ "ReadWriteOnce" ]
|
||||
resources:
|
||||
requests:
|
||||
storage: 20Gi
|
||||
- metadata:
|
||||
name: hyperparamtuning-output
|
||||
spec:
|
||||
accessModes: [ "ReadWriteOnce" ]
|
||||
resources:
|
||||
requests:
|
||||
storage: 20Gi
|
||||
volumeClaimTemplates:
|
||||
- metadata:
|
||||
name: hyperparamtuning-data
|
||||
spec:
|
||||
accessModes: [ "ReadWriteOnce" ]
|
||||
resources:
|
||||
requests:
|
||||
storage: 20Gi
|
||||
- metadata:
|
||||
name: hyperparamtuning-output
|
||||
spec:
|
||||
accessModes: [ "ReadWriteOnce" ]
|
||||
resources:
|
||||
requests:
|
||||
storage: 20Gi
|
||||
|
||||
templates:
|
||||
- name: main
|
||||
dag:
|
||||
tasks:
|
||||
- name: hyperparameter-tuning
|
||||
template: hyperparameter-tuning
|
||||
- name: workflow-metrics-writer
|
||||
template: workflow-metrics-writer
|
||||
dependencies: [hyperparameter-tuning]
|
||||
arguments:
|
||||
# Use sys-metrics artifact output from hyperparameter-tuning Task
|
||||
artifacts:
|
||||
- name: best-metrics
|
||||
from: "{{tasks.hyperparameter-tuning.outputs.artifacts.sys-metrics}}"
|
||||
- name: hyperparameter-tuning
|
||||
inputs:
|
||||
artifacts:
|
||||
- name: src
|
||||
git:
|
||||
repo: '{{workflow.parameters.source}}'
|
||||
revision: '{{workflow.parameters.revision}}'
|
||||
path: /mnt/data/src
|
||||
- name: config
|
||||
path: /mnt/data/src/workflows/hyperparameter-tuning/mnist/config.yaml
|
||||
raw:
|
||||
data: '{{workflow.parameters.config}}'
|
||||
- name: search-space
|
||||
path: /mnt/data/src/workflows/hyperparameter-tuning/mnist/search_space.json
|
||||
raw:
|
||||
data: '{{workflow.parameters.search-space}}'
|
||||
outputs:
|
||||
artifacts:
|
||||
- name: output
|
||||
path: /mnt/output
|
||||
optional: true
|
||||
container:
|
||||
image: onepanel/dl:0.17.0
|
||||
args:
|
||||
- --config
|
||||
- /mnt/data/src/workflows/hyperparameter-tuning/mnist/config.yaml
|
||||
workingDir: /mnt
|
||||
volumeMounts:
|
||||
- name: hyperparamtuning-data
|
||||
mountPath: /mnt/data
|
||||
- name: hyperparamtuning-output
|
||||
mountPath: /mnt/output
|
||||
nodeSelector:
|
||||
beta.kubernetes.io/instance-type: '{{workflow.parameters.sys-node-pool}}'
|
||||
sidecars:
|
||||
- name: nni-web-ui
|
||||
image: 'onepanel/nni-web-ui:0.17.0'
|
||||
env:
|
||||
- name: ONEPANEL_INTERACTIVE_SIDECAR
|
||||
value: 'true'
|
||||
ports:
|
||||
- containerPort: 9000
|
||||
name: nni
|
||||
- name: tensorboard
|
||||
image: 'tensorflow/tensorflow:2.3.0'
|
||||
command:
|
||||
- sh
|
||||
- '-c'
|
||||
env:
|
||||
- name: ONEPANEL_INTERACTIVE_SIDECAR
|
||||
value: 'true'
|
||||
templates:
|
||||
- name: main
|
||||
dag:
|
||||
tasks:
|
||||
- name: hyperparameter-tuning
|
||||
template: hyperparameter-tuning
|
||||
- name: workflow-metrics-writer
|
||||
template: workflow-metrics-writer
|
||||
dependencies: [hyperparameter-tuning]
|
||||
arguments:
|
||||
# Use sys-metrics artifact output from hyperparameter-tuning Task
|
||||
artifacts:
|
||||
- name: best-metrics
|
||||
from: "{{tasks.hyperparameter-tuning.outputs.artifacts.sys-metrics}}"
|
||||
- name: hyperparameter-tuning
|
||||
inputs:
|
||||
artifacts:
|
||||
- name: src
|
||||
git:
|
||||
repo: '{{workflow.parameters.source}}'
|
||||
revision: '{{workflow.parameters.revision}}'
|
||||
path: /mnt/data/src
|
||||
- name: config
|
||||
path: /mnt/data/src/workflows/hyperparameter-tuning/mnist/config.yaml
|
||||
raw:
|
||||
data: '{{workflow.parameters.config}}'
|
||||
- name: search-space
|
||||
path: /mnt/data/src/workflows/hyperparameter-tuning/mnist/search_space.json
|
||||
raw:
|
||||
data: '{{workflow.parameters.search-space}}'
|
||||
outputs:
|
||||
artifacts:
|
||||
- name: output
|
||||
path: /mnt/output
|
||||
optional: true
|
||||
container:
|
||||
image: onepanel/dl:0.17.0
|
||||
args:
|
||||
# Read logs from /mnt/output/tensorboard - /mnt/output is auto-mounted from volumeMounts
|
||||
- tensorboard --logdir /mnt/output/tensorboard
|
||||
ports:
|
||||
- containerPort: 6006
|
||||
name: tensorboard
|
||||
- name: workflow-metrics-writer
|
||||
inputs:
|
||||
artifacts:
|
||||
- name: best-metrics
|
||||
path: /tmp/sys-metrics.json
|
||||
script:
|
||||
image: onepanel/python-sdk:v0.16.0
|
||||
command: [python, '-u']
|
||||
source: |
|
||||
import os
|
||||
import json
|
||||
- --config
|
||||
- /mnt/data/src/workflows/hyperparameter-tuning/mnist/config.yaml
|
||||
workingDir: /mnt
|
||||
volumeMounts:
|
||||
- name: hyperparamtuning-data
|
||||
mountPath: /mnt/data
|
||||
- name: hyperparamtuning-output
|
||||
mountPath: /mnt/output
|
||||
nodeSelector:
|
||||
beta.kubernetes.io/instance-type: '{{workflow.parameters.sys-node-pool}}'
|
||||
sidecars:
|
||||
- name: nni-web-ui
|
||||
image: 'onepanel/nni-web-ui:0.17.0'
|
||||
env:
|
||||
- name: ONEPANEL_INTERACTIVE_SIDECAR
|
||||
value: 'true'
|
||||
ports:
|
||||
- containerPort: 9000
|
||||
name: nni
|
||||
- name: tensorboard
|
||||
image: 'tensorflow/tensorflow:2.3.0'
|
||||
command:
|
||||
- sh
|
||||
- '-c'
|
||||
env:
|
||||
- name: ONEPANEL_INTERACTIVE_SIDECAR
|
||||
value: 'true'
|
||||
args:
|
||||
# Read logs from /mnt/output/tensorboard - /mnt/output is auto-mounted from volumeMounts
|
||||
- tensorboard --logdir /mnt/output/tensorboard
|
||||
ports:
|
||||
- containerPort: 6006
|
||||
name: tensorboard
|
||||
- name: workflow-metrics-writer
|
||||
inputs:
|
||||
artifacts:
|
||||
- name: best-metrics
|
||||
path: /tmp/sys-metrics.json
|
||||
script:
|
||||
image: onepanel/python-sdk:v0.16.0
|
||||
command: [python, '-u']
|
||||
source: |
|
||||
import os
|
||||
import json
|
||||
|
||||
import onepanel.core.api
|
||||
from onepanel.core.api.models.metric import Metric
|
||||
from onepanel.core.api.rest import ApiException
|
||||
from onepanel.core.api.models import Parameter
|
||||
import onepanel.core.api
|
||||
from onepanel.core.api.models.metric import Metric
|
||||
from onepanel.core.api.rest import ApiException
|
||||
from onepanel.core.api.models import Parameter
|
||||
|
||||
# Load Task A metrics
|
||||
with open('/tmp/sys-metrics.json') as f:
|
||||
metrics = json.load(f)
|
||||
# Load Task A metrics
|
||||
with open('/tmp/sys-metrics.json') as f:
|
||||
metrics = json.load(f)
|
||||
|
||||
with open('/var/run/secrets/kubernetes.io/serviceaccount/token') as f:
|
||||
token = f.read()
|
||||
with open('/var/run/secrets/kubernetes.io/serviceaccount/token') as f:
|
||||
token = f.read()
|
||||
|
||||
# Configure API authorization
|
||||
configuration = onepanel.core.api.Configuration(
|
||||
host = os.getenv('ONEPANEL_API_URL'),
|
||||
api_key = {
|
||||
'authorization': token
|
||||
}
|
||||
)
|
||||
configuration.api_key_prefix['authorization'] = 'Bearer'
|
||||
# Configure API authorization
|
||||
configuration = onepanel.core.api.Configuration(
|
||||
host = os.getenv('ONEPANEL_API_URL'),
|
||||
api_key = {
|
||||
'authorization': token
|
||||
}
|
||||
)
|
||||
configuration.api_key_prefix['authorization'] = 'Bearer'
|
||||
|
||||
# Call SDK method to save metrics
|
||||
with onepanel.core.api.ApiClient(configuration) as api_client:
|
||||
api_instance = onepanel.core.api.WorkflowServiceApi(api_client)
|
||||
namespace = '{{workflow.namespace}}'
|
||||
uid = '{{workflow.name}}'
|
||||
body = onepanel.core.api.AddWorkflowExecutionsMetricsRequest()
|
||||
body.metrics = metrics
|
||||
try:
|
||||
api_response = api_instance.add_workflow_execution_metrics(namespace, uid, body)
|
||||
print('Metrics added.')
|
||||
except ApiException as e:
|
||||
print("Exception when calling WorkflowServiceApi->add_workflow_execution_metrics: %s\n" % e)
|
||||
# Call SDK method to save metrics
|
||||
with onepanel.core.api.ApiClient(configuration) as api_client:
|
||||
api_instance = onepanel.core.api.WorkflowServiceApi(api_client)
|
||||
namespace = '{{workflow.namespace}}'
|
||||
uid = '{{workflow.name}}'
|
||||
body = onepanel.core.api.AddWorkflowExecutionsMetricsRequest()
|
||||
body.metrics = metrics
|
||||
try:
|
||||
api_response = api_instance.add_workflow_execution_metrics(namespace, uid, body)
|
||||
print('Metrics added.')
|
||||
except ApiException as e:
|
||||
print("Exception when calling WorkflowServiceApi->add_workflow_execution_metrics: %s\n" % e)
|
||||
@@ -1,194 +1,205 @@
|
||||
# source: https://github.com/onepanelio/templates/blob/master/workflows/nni-hyperparameter-tuning/mnist/
|
||||
# Workflow Template example for hyperparameter tuning
|
||||
# Documentation: https://docs.onepanel.ai/docs/reference/workflows/hyperparameter-tuning
|
||||
#
|
||||
# Only change the fields marked with [CHANGE]
|
||||
entrypoint: main
|
||||
arguments:
|
||||
parameters:
|
||||
metadata:
|
||||
name: "Hyperparameter Tuning Example"
|
||||
kind: Workflow
|
||||
version: 20210118175809
|
||||
action: update
|
||||
source: "https://github.com/onepanelio/templates/blob/master/workflows/nni-hyperparameter-tuning/mnist/"
|
||||
deprecated: true
|
||||
labels:
|
||||
framework: tensorflow
|
||||
tuner: TPE
|
||||
"created-by": system
|
||||
spec:
|
||||
# Workflow Template example for hyperparameter tuning
|
||||
# Documentation: https://docs.onepanel.ai/docs/reference/workflows/hyperparameter-tuning
|
||||
#
|
||||
# Only change the fields marked with [CHANGE]
|
||||
entrypoint: main
|
||||
arguments:
|
||||
parameters:
|
||||
|
||||
# [CHANGE] Path to your training/model architecture code repository
|
||||
# Change this value and revision value to your code repository and branch respectively
|
||||
- name: source
|
||||
value: https://github.com/onepanelio/templates
|
||||
# [CHANGE] Path to your training/model architecture code repository
|
||||
# Change this value and revision value to your code repository and branch respectively
|
||||
- name: source
|
||||
value: https://github.com/onepanelio/templates
|
||||
|
||||
# [CHANGE] Revision is the branch or tag that you want to use
|
||||
# You can change this to any tag or branch name in your repository
|
||||
- name: revision
|
||||
value: v0.18.0
|
||||
# [CHANGE] Revision is the branch or tag that you want to use
|
||||
# You can change this to any tag or branch name in your repository
|
||||
- name: revision
|
||||
value: v0.18.0
|
||||
|
||||
# [CHANGE] Default configuration for the NNI tuner
|
||||
# See https://docs.onepanel.ai/docs/reference/workflows/hyperparameter-tuning#understanding-the-configurations
|
||||
- name: config
|
||||
displayName: Configuration
|
||||
required: true
|
||||
hint: NNI configuration
|
||||
type: textarea.textarea
|
||||
value: |-
|
||||
authorName: Onepanel, Inc.
|
||||
experimentName: MNIST TF v2.x
|
||||
trialConcurrency: 1
|
||||
maxExecDuration: 1h
|
||||
maxTrialNum: 10
|
||||
trainingServicePlatform: local
|
||||
searchSpacePath: search_space.json
|
||||
useAnnotation: false
|
||||
tuner:
|
||||
# gpuIndices: '0' # uncomment and update to the GPU indices to assign this tuner
|
||||
builtinTunerName: TPE # choices: TPE, Random, Anneal, Evolution, BatchTuner, MetisTuner, GPTuner
|
||||
classArgs:
|
||||
optimize_mode: maximize # choices: maximize, minimize
|
||||
trial:
|
||||
command: python main.py --output /mnt/output
|
||||
codeDir: .
|
||||
# gpuNum: 1 # uncomment and update to number of GPUs
|
||||
# [CHANGE] Default configuration for the NNI tuner
|
||||
# See https://docs.onepanel.ai/docs/reference/workflows/hyperparameter-tuning#understanding-the-configurations
|
||||
- name: config
|
||||
displayName: Configuration
|
||||
required: true
|
||||
hint: NNI configuration
|
||||
type: textarea.textarea
|
||||
value: |-
|
||||
authorName: Onepanel, Inc.
|
||||
experimentName: MNIST TF v2.x
|
||||
trialConcurrency: 1
|
||||
maxExecDuration: 1h
|
||||
maxTrialNum: 10
|
||||
trainingServicePlatform: local
|
||||
searchSpacePath: search_space.json
|
||||
useAnnotation: false
|
||||
tuner:
|
||||
# gpuIndices: '0' # uncomment and update to the GPU indices to assign this tuner
|
||||
builtinTunerName: TPE # choices: TPE, Random, Anneal, Evolution, BatchTuner, MetisTuner, GPTuner
|
||||
classArgs:
|
||||
optimize_mode: maximize # choices: maximize, minimize
|
||||
trial:
|
||||
command: python main.py --output /mnt/output
|
||||
codeDir: .
|
||||
# gpuNum: 1 # uncomment and update to number of GPUs
|
||||
|
||||
# [CHANGE] Search space configuration
|
||||
# Change according to your hyperparameters and ranges
|
||||
- name: search-space
|
||||
displayName: Search space configuration
|
||||
required: true
|
||||
type: textarea.textarea
|
||||
value: |-
|
||||
{
|
||||
"dropout_rate": { "_type": "uniform", "_value": [0.5, 0.9] },
|
||||
"conv_size": { "_type": "choice", "_value": [2, 3, 5, 7] },
|
||||
"hidden_size": { "_type": "choice", "_value": [124, 512, 1024] },
|
||||
"batch_size": { "_type": "choice", "_value": [16, 32] },
|
||||
"learning_rate": { "_type": "choice", "_value": [0.0001, 0.001, 0.01, 0.1] },
|
||||
"epochs": { "_type": "choice", "_value": [10] }
|
||||
}
|
||||
# [CHANGE] Search space configuration
|
||||
# Change according to your hyperparameters and ranges
|
||||
- name: search-space
|
||||
displayName: Search space configuration
|
||||
required: true
|
||||
type: textarea.textarea
|
||||
value: |-
|
||||
{
|
||||
"dropout_rate": { "_type": "uniform", "_value": [0.5, 0.9] },
|
||||
"conv_size": { "_type": "choice", "_value": [2, 3, 5, 7] },
|
||||
"hidden_size": { "_type": "choice", "_value": [124, 512, 1024] },
|
||||
"batch_size": { "_type": "choice", "_value": [16, 32] },
|
||||
"learning_rate": { "_type": "choice", "_value": [0.0001, 0.001, 0.01, 0.1] },
|
||||
"epochs": { "_type": "choice", "_value": [10] }
|
||||
}
|
||||
|
||||
# Node pool dropdown (Node group in EKS)
|
||||
# You can add more of these if you have additional tasks that can run on different node pools
|
||||
- displayName: Node pool
|
||||
hint: Name of node pool or group to run this workflow task
|
||||
type: select.nodepool
|
||||
name: sys-node-pool
|
||||
value: {{.DefaultNodePoolOption}}
|
||||
required: true
|
||||
# Node pool dropdown (Node group in EKS)
|
||||
# You can add more of these if you have additional tasks that can run on different node pools
|
||||
- displayName: Node pool
|
||||
hint: Name of node pool or group to run this workflow task
|
||||
type: select.nodepool
|
||||
name: sys-node-pool
|
||||
value: "{{.DefaultNodePoolOption}}"
|
||||
required: true
|
||||
|
||||
templates:
|
||||
- name: main
|
||||
dag:
|
||||
tasks:
|
||||
- name: hyperparameter-tuning
|
||||
template: hyperparameter-tuning
|
||||
- name: metrics-writer
|
||||
template: metrics-writer
|
||||
dependencies: [hyperparameter-tuning]
|
||||
arguments:
|
||||
# Use sys-metrics artifact output from hyperparameter-tuning Task
|
||||
# This writes the best metrics to the Workflow
|
||||
artifacts:
|
||||
- name: sys-metrics
|
||||
from: "{{tasks.hyperparameter-tuning.outputs.artifacts.sys-metrics}}"
|
||||
- name: hyperparameter-tuning
|
||||
inputs:
|
||||
artifacts:
|
||||
- name: src
|
||||
# Clone the above repository into '/mnt/data/src'
|
||||
# See https://docs.onepanel.ai/docs/reference/workflows/artifacts#git for private repositories
|
||||
git:
|
||||
repo: '{{workflow.parameters.source}}'
|
||||
revision: '{{workflow.parameters.revision}}'
|
||||
path: /mnt/data/src
|
||||
# [CHANGE] Path where config.yaml will be generated or already exists
|
||||
# Update the path below so that config.yaml is written to the same directory as your main.py file
|
||||
# Note that your source code is cloned to /mnt/data/src
|
||||
- name: config
|
||||
path: /mnt/data/src/workflows/hyperparameter-tuning/mnist/config.yaml
|
||||
raw:
|
||||
data: '{{workflow.parameters.config}}'
|
||||
# [CHANGE] Path where search_space.json will be generated or already exists
|
||||
# Update the path below so that search_space.json is written to the same directory as your main.py file
|
||||
# Note that your source code is cloned to /mnt/data/src
|
||||
- name: search-space
|
||||
path: /mnt/data/src/workflows/hyperparameter-tuning/mnist/search_space.json
|
||||
raw:
|
||||
data: '{{workflow.parameters.search-space}}'
|
||||
outputs:
|
||||
artifacts:
|
||||
- name: output
|
||||
path: /mnt/output
|
||||
optional: true
|
||||
container:
|
||||
image: onepanel/dl:0.17.0
|
||||
command:
|
||||
- sh
|
||||
- -c
|
||||
args:
|
||||
# [CHANGE] Update the config path below to point to config.yaml path as described above
|
||||
# Note that you can `pip install` additional tools here if necessary
|
||||
- |
|
||||
python -u /opt/onepanel/nni/start.py \
|
||||
--config /mnt/data/src/workflows/hyperparameter-tuning/mnist/config.yaml
|
||||
workingDir: /mnt
|
||||
volumeMounts:
|
||||
- name: hyperparamtuning-data
|
||||
mountPath: /mnt/data
|
||||
- name: hyperparamtuning-output
|
||||
mountPath: /mnt/output
|
||||
nodeSelector:
|
||||
{{.NodePoolLabel}}: '{{workflow.parameters.sys-node-pool}}'
|
||||
sidecars:
|
||||
- name: nni-web-ui
|
||||
image: onepanel/nni-web-ui:0.17.0
|
||||
env:
|
||||
- name: ONEPANEL_INTERACTIVE_SIDECAR
|
||||
value: 'true'
|
||||
ports:
|
||||
- containerPort: 9000
|
||||
name: nni
|
||||
- name: tensorboard
|
||||
templates:
|
||||
- name: main
|
||||
dag:
|
||||
tasks:
|
||||
- name: hyperparameter-tuning
|
||||
template: hyperparameter-tuning
|
||||
- name: metrics-writer
|
||||
template: metrics-writer
|
||||
dependencies: [hyperparameter-tuning]
|
||||
arguments:
|
||||
# Use sys-metrics artifact output from hyperparameter-tuning Task
|
||||
# This writes the best metrics to the Workflow
|
||||
artifacts:
|
||||
- name: sys-metrics
|
||||
from: "{{tasks.hyperparameter-tuning.outputs.artifacts.sys-metrics}}"
|
||||
- name: hyperparameter-tuning
|
||||
inputs:
|
||||
artifacts:
|
||||
- name: src
|
||||
# Clone the above repository into '/mnt/data/src'
|
||||
# See https://docs.onepanel.ai/docs/reference/workflows/artifacts#git for private repositories
|
||||
git:
|
||||
repo: '{{workflow.parameters.source}}'
|
||||
revision: '{{workflow.parameters.revision}}'
|
||||
path: /mnt/data/src
|
||||
# [CHANGE] Path where config.yaml will be generated or already exists
|
||||
# Update the path below so that config.yaml is written to the same directory as your main.py file
|
||||
# Note that your source code is cloned to /mnt/data/src
|
||||
- name: config
|
||||
path: /mnt/data/src/workflows/hyperparameter-tuning/mnist/config.yaml
|
||||
raw:
|
||||
data: '{{workflow.parameters.config}}'
|
||||
# [CHANGE] Path where search_space.json will be generated or already exists
|
||||
# Update the path below so that search_space.json is written to the same directory as your main.py file
|
||||
# Note that your source code is cloned to /mnt/data/src
|
||||
- name: search-space
|
||||
path: /mnt/data/src/workflows/hyperparameter-tuning/mnist/search_space.json
|
||||
raw:
|
||||
data: '{{workflow.parameters.search-space}}'
|
||||
outputs:
|
||||
artifacts:
|
||||
- name: output
|
||||
path: /mnt/output
|
||||
optional: true
|
||||
container:
|
||||
image: onepanel/dl:0.17.0
|
||||
command:
|
||||
- sh
|
||||
- '-c'
|
||||
env:
|
||||
- name: ONEPANEL_INTERACTIVE_SIDECAR
|
||||
value: 'true'
|
||||
- -c
|
||||
args:
|
||||
# Read logs from /mnt/output/tensorboard - /mnt/output is auto-mounted from volumeMounts
|
||||
- tensorboard --logdir /mnt/output/tensorboard
|
||||
ports:
|
||||
- containerPort: 6006
|
||||
name: tensorboard
|
||||
# Use the metrics-writer tasks to write best metrics to Workflow
|
||||
- name: metrics-writer
|
||||
inputs:
|
||||
artifacts:
|
||||
- name: sys-metrics
|
||||
path: /tmp/sys-metrics.json
|
||||
- git:
|
||||
repo: https://github.com/onepanelio/templates.git
|
||||
revision: v0.18.0
|
||||
name: src
|
||||
path: /mnt/src
|
||||
container:
|
||||
image: onepanel/python-sdk:v0.16.0
|
||||
command:
|
||||
- python
|
||||
- -u
|
||||
args:
|
||||
- /mnt/src/tasks/metrics-writer/main.py
|
||||
- --from_file=/tmp/sys-metrics.json
|
||||
# [CHANGE] Update the config path below to point to config.yaml path as described above
|
||||
# Note that you can `pip install` additional tools here if necessary
|
||||
- |
|
||||
python -u /opt/onepanel/nni/start.py \
|
||||
--config /mnt/data/src/workflows/hyperparameter-tuning/mnist/config.yaml
|
||||
workingDir: /mnt
|
||||
volumeMounts:
|
||||
- name: hyperparamtuning-data
|
||||
mountPath: /mnt/data
|
||||
- name: hyperparamtuning-output
|
||||
mountPath: /mnt/output
|
||||
nodeSelector:
|
||||
"{{.NodePoolLabel}}": '{{workflow.parameters.sys-node-pool}}'
|
||||
sidecars:
|
||||
- name: nni-web-ui
|
||||
image: onepanel/nni-web-ui:0.17.0
|
||||
env:
|
||||
- name: ONEPANEL_INTERACTIVE_SIDECAR
|
||||
value: 'true'
|
||||
ports:
|
||||
- containerPort: 9000
|
||||
name: nni
|
||||
- name: tensorboard
|
||||
image: onepanel/dl:0.17.0
|
||||
command:
|
||||
- sh
|
||||
- '-c'
|
||||
env:
|
||||
- name: ONEPANEL_INTERACTIVE_SIDECAR
|
||||
value: 'true'
|
||||
args:
|
||||
# Read logs from /mnt/output/tensorboard - /mnt/output is auto-mounted from volumeMounts
|
||||
- tensorboard --logdir /mnt/output/tensorboard
|
||||
ports:
|
||||
- containerPort: 6006
|
||||
name: tensorboard
|
||||
# Use the metrics-writer tasks to write best metrics to Workflow
|
||||
- name: metrics-writer
|
||||
inputs:
|
||||
artifacts:
|
||||
- name: sys-metrics
|
||||
path: /tmp/sys-metrics.json
|
||||
- git:
|
||||
repo: https://github.com/onepanelio/templates.git
|
||||
revision: v0.18.0
|
||||
name: src
|
||||
path: /mnt/src
|
||||
container:
|
||||
image: onepanel/python-sdk:v0.16.0
|
||||
command:
|
||||
- python
|
||||
- -u
|
||||
args:
|
||||
- /mnt/src/tasks/metrics-writer/main.py
|
||||
- --from_file=/tmp/sys-metrics.json
|
||||
|
||||
# [CHANGE] Volumes that will mount to /mnt/data (annotated data) and /mnt/output (models, checkpoints, logs)
|
||||
# Update this depending on your annotation data, model, checkpoint, logs, etc. sizes
|
||||
# Example values: 250Mi, 500Gi, 1Ti
|
||||
volumeClaimTemplates:
|
||||
- metadata:
|
||||
name: hyperparamtuning-data
|
||||
spec:
|
||||
accessModes: [ "ReadWriteOnce" ]
|
||||
resources:
|
||||
requests:
|
||||
storage: 20Gi
|
||||
- metadata:
|
||||
name: hyperparamtuning-output
|
||||
spec:
|
||||
accessModes: [ "ReadWriteOnce" ]
|
||||
resources:
|
||||
requests:
|
||||
storage: 20Gi
|
||||
# [CHANGE] Volumes that will mount to /mnt/data (annotated data) and /mnt/output (models, checkpoints, logs)
|
||||
# Update this depending on your annotation data, model, checkpoint, logs, etc. sizes
|
||||
# Example values: 250Mi, 500Gi, 1Ti
|
||||
volumeClaimTemplates:
|
||||
- metadata:
|
||||
name: hyperparamtuning-data
|
||||
spec:
|
||||
accessModes: [ "ReadWriteOnce" ]
|
||||
resources:
|
||||
requests:
|
||||
storage: 20Gi
|
||||
- metadata:
|
||||
name: hyperparamtuning-output
|
||||
spec:
|
||||
accessModes: [ "ReadWriteOnce" ]
|
||||
resources:
|
||||
requests:
|
||||
storage: 20Gi
|
||||
197
db/yaml/workflows/maskrcnn-training/20200812104328.yaml
Normal file
197
db/yaml/workflows/maskrcnn-training/20200812104328.yaml
Normal file
@@ -0,0 +1,197 @@
|
||||
metadata:
|
||||
name: "MaskRCNN Training"
|
||||
kind: Workflow
|
||||
version: 20200812104328
|
||||
action: create
|
||||
labels:
|
||||
"used-by": "cvat"
|
||||
"created-by": "system"
|
||||
spec:
|
||||
arguments:
|
||||
parameters:
|
||||
- name: source
|
||||
value: https://github.com/onepanelio/Mask_RCNN.git
|
||||
displayName: Model source code
|
||||
type: hidden
|
||||
visibility: private
|
||||
|
||||
- name: sys-annotation-path
|
||||
value: annotation-dump/sample_dataset
|
||||
hint: Path to annotated data in default object storage (i.e S3). In CVAT, this parameter will be pre-populated.
|
||||
displayName: Dataset path
|
||||
visibility: private
|
||||
|
||||
- name: sys-output-path
|
||||
value: workflow-data/output/sample_output
|
||||
hint: Path to store output artifacts in default object storage (i.e s3). In CVAT, this parameter will be pre-populated.
|
||||
displayName: Workflow output path
|
||||
visibility: private
|
||||
|
||||
- name: sys-finetune-checkpoint
|
||||
value: ''
|
||||
hint: Select the last fine-tune checkpoint for this model. It may take up to 5 minutes for a recent checkpoint show here. Leave empty if this is the first time you're training this model.
|
||||
displayName: Checkpoint path
|
||||
visibility: public
|
||||
|
||||
- name: sys-num-classes
|
||||
displayName: Number of classes
|
||||
hint: Number of classes (i.e in CVAT taks) + 1 for background
|
||||
value: '81'
|
||||
visibility: private
|
||||
|
||||
- name: extras
|
||||
displayName: Hyperparameters
|
||||
visibility: public
|
||||
type: textarea.textarea
|
||||
value: |-
|
||||
stage-1-epochs=1 # Epochs for network heads
|
||||
stage-2-epochs=2 # Epochs for finetune layers
|
||||
stage-3-epochs=3 # Epochs for all layers
|
||||
hint: "Please refer to our <a href='https://docs.onepanel.ai/docs/getting-started/use-cases/computervision/annotation/cvat/cvat_annotation_model#arguments-optional' target='_blank'>documentation</a> for more information on parameters."
|
||||
|
||||
- name: dump-format
|
||||
type: select.select
|
||||
value: cvat_coco
|
||||
displayName: CVAT dump format
|
||||
visibility: public
|
||||
options:
|
||||
- name: 'MS COCO'
|
||||
value: 'cvat_coco'
|
||||
- name: 'TF Detection API'
|
||||
value: 'cvat_tfrecord'
|
||||
|
||||
- name: tf-image
|
||||
visibility: public
|
||||
value: tensorflow/tensorflow:1.13.1-py3
|
||||
type: select.select
|
||||
displayName: Select TensorFlow image
|
||||
hint: Select the GPU image if you are running on a GPU node pool
|
||||
options:
|
||||
- name: 'TensorFlow 1.13.1 CPU Image'
|
||||
value: 'tensorflow/tensorflow:1.13.1-py3'
|
||||
- name: 'TensorFlow 1.13.1 GPU Image'
|
||||
value: 'tensorflow/tensorflow:1.13.1-gpu-py3'
|
||||
|
||||
- displayName: Node pool
|
||||
hint: Name of node pool or group to run this workflow task
|
||||
type: select.select
|
||||
visibility: public
|
||||
name: sys-node-pool
|
||||
value: Standard_D4s_v3
|
||||
required: true
|
||||
options:
|
||||
- name: 'CPU: 2, RAM: 8GB'
|
||||
value: Standard_D2s_v3
|
||||
- name: 'CPU: 4, RAM: 16GB'
|
||||
value: Standard_D4s_v3
|
||||
- name: 'GPU: 1xK80, CPU: 6, RAM: 56GB'
|
||||
value: Standard_NC6
|
||||
|
||||
entrypoint: main
|
||||
templates:
|
||||
- dag:
|
||||
tasks:
|
||||
- name: train-model
|
||||
template: tensorflow
|
||||
# Uncomment the lines below if you want to send Slack notifications
|
||||
# - arguments:
|
||||
# artifacts:
|
||||
# - from: '{{tasks.train-model.outputs.artifacts.sys-metrics}}'
|
||||
# name: metrics
|
||||
# parameters:
|
||||
# - name: status
|
||||
# value: '{{tasks.train-model.status}}'
|
||||
# dependencies:
|
||||
# - train-model
|
||||
# name: notify-in-slack
|
||||
# template: slack-notify-success
|
||||
name: main
|
||||
- container:
|
||||
args:
|
||||
- |
|
||||
apt-get update \
|
||||
&& apt-get install -y git wget libglib2.0-0 libsm6 libxext6 libxrender-dev \
|
||||
&& pip install -r requirements.txt \
|
||||
&& pip install boto3 pyyaml google-cloud-storage \
|
||||
&& git clone https://github.com/waleedka/coco \
|
||||
&& cd coco/PythonAPI \
|
||||
&& python setup.py build_ext install \
|
||||
&& rm -rf build \
|
||||
&& cd ../../ \
|
||||
&& wget https://github.com/matterport/Mask_RCNN/releases/download/v2.0/mask_rcnn_coco.h5 \
|
||||
&& python setup.py install && ls \
|
||||
&& python samples/coco/cvat.py train --dataset=/mnt/data/datasets \
|
||||
--model=workflow_maskrcnn \
|
||||
--extras="{{workflow.parameters.extras}}" \
|
||||
--ref_model_path="{{workflow.parameters.sys-finetune-checkpoint}}" \
|
||||
--num_classes="{{workflow.parameters.sys-num-classes}}" \
|
||||
&& cd /mnt/src/ \
|
||||
&& python prepare_dataset.py /mnt/data/datasets/annotations/instances_default.json
|
||||
command:
|
||||
- sh
|
||||
- -c
|
||||
image: '{{workflow.parameters.tf-image}}'
|
||||
volumeMounts:
|
||||
- mountPath: /mnt/data
|
||||
name: data
|
||||
- mountPath: /mnt/output
|
||||
name: output
|
||||
workingDir: /mnt/src
|
||||
nodeSelector:
|
||||
beta.kubernetes.io/instance-type: '{{workflow.parameters.sys-node-pool}}'
|
||||
inputs:
|
||||
artifacts:
|
||||
- name: data
|
||||
path: /mnt/data/datasets/
|
||||
"{{.ArtifactRepositoryType}}":
|
||||
key: '{{workflow.namespace}}/{{workflow.parameters.sys-annotation-path}}'
|
||||
- git:
|
||||
repo: '{{workflow.parameters.source}}'
|
||||
revision: "no-boto"
|
||||
name: src
|
||||
path: /mnt/src
|
||||
name: tensorflow
|
||||
outputs:
|
||||
artifacts:
|
||||
- name: model
|
||||
optional: true
|
||||
path: /mnt/output
|
||||
"{{.ArtifactRepositoryType}}":
|
||||
key: '{{workflow.namespace}}/{{workflow.parameters.sys-output-path}}'
|
||||
# Uncomment the lines below if you want to send Slack notifications
|
||||
#- container:
|
||||
# args:
|
||||
# - SLACK_USERNAME=Onepanel SLACK_TITLE="{{workflow.name}} {{inputs.parameters.status}}"
|
||||
# SLACK_ICON=https://www.gravatar.com/avatar/5c4478592fe00878f62f0027be59c1bd
|
||||
# SLACK_MESSAGE=$(cat /tmp/metrics.json)} ./slack-notify
|
||||
# command:
|
||||
# - sh
|
||||
# - -c
|
||||
# image: technosophos/slack-notify
|
||||
# inputs:
|
||||
# artifacts:
|
||||
# - name: metrics
|
||||
# optional: true
|
||||
# path: /tmp/metrics.json
|
||||
# parameters:
|
||||
# - name: status
|
||||
# name: slack-notify-success
|
||||
volumeClaimTemplates:
|
||||
- metadata:
|
||||
creationTimestamp: null
|
||||
name: data
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 200Gi
|
||||
- metadata:
|
||||
creationTimestamp: null
|
||||
name: output
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 200Gi
|
||||
191
db/yaml/workflows/maskrcnn-training/20200824095513.yaml
Normal file
191
db/yaml/workflows/maskrcnn-training/20200824095513.yaml
Normal file
@@ -0,0 +1,191 @@
|
||||
metadata:
|
||||
name: "MaskRCNN Training"
|
||||
kind: Workflow
|
||||
version: 20200824095513
|
||||
action: update
|
||||
labels:
|
||||
"used-by": "cvat"
|
||||
"created-by": "system"
|
||||
spec:
|
||||
arguments:
|
||||
parameters:
|
||||
- name: source
|
||||
value: https://github.com/onepanelio/Mask_RCNN.git
|
||||
displayName: Model source code
|
||||
type: hidden
|
||||
visibility: private
|
||||
|
||||
- name: cvat-annotation-path
|
||||
value: annotation-dump/sample_dataset
|
||||
hint: Path to annotated data in default object storage (i.e S3). In CVAT, this parameter will be pre-populated.
|
||||
displayName: Dataset path
|
||||
visibility: private
|
||||
|
||||
- name: cvat-output-path
|
||||
value: workflow-data/output/sample_output
|
||||
hint: Path to store output artifacts in default object storage (i.e s3). In CVAT, this parameter will be pre-populated.
|
||||
displayName: Workflow output path
|
||||
visibility: private
|
||||
|
||||
- name: cvat-finetune-checkpoint
|
||||
value: ''
|
||||
hint: Select the last fine-tune checkpoint for this model. It may take up to 5 minutes for a recent checkpoint show here. Leave empty if this is the first time you're training this model.
|
||||
displayName: Checkpoint path
|
||||
visibility: public
|
||||
|
||||
- name: cvat-num-classes
|
||||
displayName: Number of classes
|
||||
hint: Number of classes (i.e in CVAT taks) + 1 for background
|
||||
value: '81'
|
||||
visibility: private
|
||||
|
||||
- name: hyperparameters
|
||||
displayName: Hyperparameters
|
||||
visibility: public
|
||||
type: textarea.textarea
|
||||
value: |-
|
||||
stage-1-epochs=1 # Epochs for network heads
|
||||
stage-2-epochs=2 # Epochs for finetune layers
|
||||
stage-3-epochs=3 # Epochs for all layers
|
||||
hint: "Please refer to our <a href='https://docs.onepanel.ai/docs/getting-started/use-cases/computervision/annotation/cvat/cvat_annotation_model#arguments-optional' target='_blank'>documentation</a> for more information on parameters. Number of classes will be automatically populated if you had 'sys-num-classes' parameter in a workflow."
|
||||
|
||||
- name: dump-format
|
||||
value: cvat_coco
|
||||
displayName: CVAT dump format
|
||||
visibility: public
|
||||
|
||||
- name: tf-image
|
||||
visibility: public
|
||||
value: tensorflow/tensorflow:1.13.1-py3
|
||||
type: select.select
|
||||
displayName: Select TensorFlow image
|
||||
hint: Select the GPU image if you are running on a GPU node pool
|
||||
options:
|
||||
- name: 'TensorFlow 1.13.1 CPU Image'
|
||||
value: 'tensorflow/tensorflow:1.13.1-py3'
|
||||
- name: 'TensorFlow 1.13.1 GPU Image'
|
||||
value: 'tensorflow/tensorflow:1.13.1-gpu-py3'
|
||||
|
||||
- displayName: Node pool
|
||||
hint: Name of node pool or group to run this workflow task
|
||||
type: select.select
|
||||
visibility: public
|
||||
name: sys-node-pool
|
||||
value: Standard_D4s_v3
|
||||
required: true
|
||||
options:
|
||||
- name: 'CPU: 2, RAM: 8GB'
|
||||
value: Standard_D2s_v3
|
||||
- name: 'CPU: 4, RAM: 16GB'
|
||||
value: Standard_D4s_v3
|
||||
- name: 'GPU: 1xK80, CPU: 6, RAM: 56GB'
|
||||
value: Standard_NC6
|
||||
|
||||
entrypoint: main
|
||||
templates:
|
||||
- dag:
|
||||
tasks:
|
||||
- name: train-model
|
||||
template: tensorflow
|
||||
# Uncomment the lines below if you want to send Slack notifications
|
||||
# - arguments:
|
||||
# artifacts:
|
||||
# - from: '{{tasks.train-model.outputs.artifacts.sys-metrics}}'
|
||||
# name: metrics
|
||||
# parameters:
|
||||
# - name: status
|
||||
# value: '{{tasks.train-model.status}}'
|
||||
# dependencies:
|
||||
# - train-model
|
||||
# name: notify-in-slack
|
||||
# template: slack-notify-success
|
||||
name: main
|
||||
- container:
|
||||
args:
|
||||
- |
|
||||
apt-get update \
|
||||
&& apt-get install -y git wget libglib2.0-0 libsm6 libxext6 libxrender-dev \
|
||||
&& pip install -r requirements.txt \
|
||||
&& pip install boto3 pyyaml google-cloud-storage \
|
||||
&& git clone https://github.com/waleedka/coco \
|
||||
&& cd coco/PythonAPI \
|
||||
&& python setup.py build_ext install \
|
||||
&& rm -rf build \
|
||||
&& cd ../../ \
|
||||
&& wget https://github.com/matterport/Mask_RCNN/releases/download/v2.0/mask_rcnn_coco.h5 \
|
||||
&& python setup.py install && ls \
|
||||
&& python samples/coco/cvat.py train --dataset=/mnt/data/datasets \
|
||||
--model=workflow_maskrcnn \
|
||||
--extras="{{workflow.parameters.hyperparameters}}" \
|
||||
--ref_model_path="{{workflow.parameters.cvat-finetune-checkpoint}}" \
|
||||
--num_classes="{{workflow.parameters.cvat-num-classes}}" \
|
||||
&& cd /mnt/src/ \
|
||||
&& python prepare_dataset.py /mnt/data/datasets/annotations/instances_default.json
|
||||
command:
|
||||
- sh
|
||||
- -c
|
||||
image: '{{workflow.parameters.tf-image}}'
|
||||
volumeMounts:
|
||||
- mountPath: /mnt/data
|
||||
name: data
|
||||
- mountPath: /mnt/output
|
||||
name: output
|
||||
workingDir: /mnt/src
|
||||
nodeSelector:
|
||||
beta.kubernetes.io/instance-type: '{{workflow.parameters.sys-node-pool}}'
|
||||
inputs:
|
||||
artifacts:
|
||||
- name: data
|
||||
path: /mnt/data/datasets/
|
||||
"{{.ArtifactRepositoryType}}":
|
||||
key: '{{workflow.namespace}}/{{workflow.parameters.cvat-annotation-path}}'
|
||||
- git:
|
||||
repo: '{{workflow.parameters.source}}'
|
||||
revision: "no-boto"
|
||||
name: src
|
||||
path: /mnt/src
|
||||
name: tensorflow
|
||||
outputs:
|
||||
artifacts:
|
||||
- name: model
|
||||
optional: true
|
||||
path: /mnt/output
|
||||
"{{.ArtifactRepositoryType}}":
|
||||
key: '{{workflow.namespace}}/{{workflow.parameters.cvat-output-path}}/{{workflow.name}}'
|
||||
# Uncomment the lines below if you want to send Slack notifications
|
||||
#- container:
|
||||
# args:
|
||||
# - SLACK_USERNAME=Onepanel SLACK_TITLE="{{workflow.name}} {{inputs.parameters.status}}"
|
||||
# SLACK_ICON=https://www.gravatar.com/avatar/5c4478592fe00878f62f0027be59c1bd
|
||||
# SLACK_MESSAGE=$(cat /tmp/metrics.json)} ./slack-notify
|
||||
# command:
|
||||
# - sh
|
||||
# - -c
|
||||
# image: technosophos/slack-notify
|
||||
# inputs:
|
||||
# artifacts:
|
||||
# - name: metrics
|
||||
# optional: true
|
||||
# path: /tmp/metrics.json
|
||||
# parameters:
|
||||
# - name: status
|
||||
# name: slack-notify-success
|
||||
volumeClaimTemplates:
|
||||
- metadata:
|
||||
creationTimestamp: null
|
||||
name: data
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 200Gi
|
||||
- metadata:
|
||||
creationTimestamp: null
|
||||
name: output
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 200Gi
|
||||
@@ -1,190 +1,199 @@
|
||||
entrypoint: main
|
||||
arguments:
|
||||
parameters:
|
||||
- name: source
|
||||
value: https://github.com/onepanelio/Mask_RCNN.git
|
||||
displayName: Model source code
|
||||
type: hidden
|
||||
visibility: private
|
||||
metadata:
|
||||
name: "MaskRCNN Training"
|
||||
kind: Workflow
|
||||
version: 20201115145814
|
||||
action: update
|
||||
labels:
|
||||
"used-by": "cvat"
|
||||
"created-by": "system"
|
||||
spec:
|
||||
entrypoint: main
|
||||
arguments:
|
||||
parameters:
|
||||
- name: source
|
||||
value: https://github.com/onepanelio/Mask_RCNN.git
|
||||
displayName: Model source code
|
||||
type: hidden
|
||||
visibility: private
|
||||
|
||||
- name: cvat-annotation-path
|
||||
value: annotation-dump/sample_dataset
|
||||
hint: Path to annotated data in default object storage (i.e S3). In CVAT, this parameter will be pre-populated.
|
||||
displayName: Dataset path
|
||||
visibility: private
|
||||
- name: cvat-annotation-path
|
||||
value: annotation-dump/sample_dataset
|
||||
hint: Path to annotated data in default object storage (i.e S3). In CVAT, this parameter will be pre-populated.
|
||||
displayName: Dataset path
|
||||
visibility: private
|
||||
|
||||
- name: cvat-output-path
|
||||
value: workflow-data/output/sample_output
|
||||
hint: Path to store output artifacts in default object storage (i.e s3). In CVAT, this parameter will be pre-populated.
|
||||
displayName: Workflow output path
|
||||
visibility: private
|
||||
- name: cvat-output-path
|
||||
value: workflow-data/output/sample_output
|
||||
hint: Path to store output artifacts in default object storage (i.e s3). In CVAT, this parameter will be pre-populated.
|
||||
displayName: Workflow output path
|
||||
visibility: private
|
||||
|
||||
- name: cvat-finetune-checkpoint
|
||||
value: ''
|
||||
hint: Select the last fine-tune checkpoint for this model. It may take up to 5 minutes for a recent checkpoint show here. Leave empty if this is the first time you're training this model.
|
||||
displayName: Checkpoint path
|
||||
visibility: public
|
||||
- name: cvat-finetune-checkpoint
|
||||
value: ''
|
||||
hint: Select the last fine-tune checkpoint for this model. It may take up to 5 minutes for a recent checkpoint show here. Leave empty if this is the first time you're training this model.
|
||||
displayName: Checkpoint path
|
||||
visibility: public
|
||||
|
||||
- name: cvat-num-classes
|
||||
displayName: Number of classes
|
||||
hint: Number of classes (i.e in CVAT taks) + 1 for background
|
||||
value: '81'
|
||||
visibility: private
|
||||
- name: cvat-num-classes
|
||||
displayName: Number of classes
|
||||
hint: Number of classes (i.e in CVAT taks) + 1 for background
|
||||
value: '81'
|
||||
visibility: private
|
||||
|
||||
- name: hyperparameters
|
||||
displayName: Hyperparameters
|
||||
visibility: public
|
||||
type: textarea.textarea
|
||||
value: |-
|
||||
stage-1-epochs=1 # Epochs for network heads
|
||||
stage-2-epochs=2 # Epochs for finetune layers
|
||||
stage-3-epochs=3 # Epochs for all layers
|
||||
hint: "Please refer to our <a href='https://docs.onepanel.ai/docs/getting-started/use-cases/computervision/annotation/cvat/cvat_annotation_model#arguments-optional' target='_blank'>documentation</a> for more information on parameters. Number of classes will be automatically populated if you had 'sys-num-classes' parameter in a workflow."
|
||||
- name: hyperparameters
|
||||
displayName: Hyperparameters
|
||||
visibility: public
|
||||
type: textarea.textarea
|
||||
value: |-
|
||||
stage-1-epochs=1 # Epochs for network heads
|
||||
stage-2-epochs=2 # Epochs for finetune layers
|
||||
stage-3-epochs=3 # Epochs for all layers
|
||||
hint: "Please refer to our <a href='https://docs.onepanel.ai/docs/getting-started/use-cases/computervision/annotation/cvat/cvat_annotation_model#arguments-optional' target='_blank'>documentation</a> for more information on parameters. Number of classes will be automatically populated if you had 'sys-num-classes' parameter in a workflow."
|
||||
|
||||
- name: dump-format
|
||||
value: cvat_coco
|
||||
displayName: CVAT dump format
|
||||
visibility: public
|
||||
- name: dump-format
|
||||
value: cvat_coco
|
||||
displayName: CVAT dump format
|
||||
visibility: public
|
||||
|
||||
- name: tf-image
|
||||
visibility: public
|
||||
value: tensorflow/tensorflow:1.13.1-py3
|
||||
type: select.select
|
||||
displayName: Select TensorFlow image
|
||||
hint: Select the GPU image if you are running on a GPU node pool
|
||||
options:
|
||||
- name: 'TensorFlow 1.13.1 CPU Image'
|
||||
value: 'tensorflow/tensorflow:1.13.1-py3'
|
||||
- name: 'TensorFlow 1.13.1 GPU Image'
|
||||
value: 'tensorflow/tensorflow:1.13.1-gpu-py3'
|
||||
- name: tf-image
|
||||
visibility: public
|
||||
value: tensorflow/tensorflow:1.13.1-py3
|
||||
type: select.select
|
||||
displayName: Select TensorFlow image
|
||||
hint: Select the GPU image if you are running on a GPU node pool
|
||||
options:
|
||||
- name: 'TensorFlow 1.13.1 CPU Image'
|
||||
value: 'tensorflow/tensorflow:1.13.1-py3'
|
||||
- name: 'TensorFlow 1.13.1 GPU Image'
|
||||
value: 'tensorflow/tensorflow:1.13.1-gpu-py3'
|
||||
|
||||
- displayName: Node pool
|
||||
hint: Name of node pool or group to run this workflow task
|
||||
type: select.select
|
||||
visibility: public
|
||||
name: sys-node-pool
|
||||
value: Standard_D4s_v3
|
||||
required: true
|
||||
options:
|
||||
- name: 'CPU: 2, RAM: 8GB'
|
||||
value: Standard_D2s_v3
|
||||
- name: 'CPU: 4, RAM: 16GB'
|
||||
value: Standard_D4s_v3
|
||||
- name: 'GPU: 1xK80, CPU: 6, RAM: 56GB'
|
||||
value: Standard_NC6
|
||||
templates:
|
||||
- name: main
|
||||
dag:
|
||||
tasks:
|
||||
- name: train-model
|
||||
template: tensorflow
|
||||
# Uncomment the lines below if you want to send Slack notifications
|
||||
# - arguments:
|
||||
# artifacts:
|
||||
# - from: '{{tasks.train-model.outputs.artifacts.sys-metrics}}'
|
||||
# name: metrics
|
||||
# parameters:
|
||||
# - name: status
|
||||
# value: '{{tasks.train-model.status}}'
|
||||
# dependencies:
|
||||
# - train-model
|
||||
# name: notify-in-slack
|
||||
# template: slack-notify-success
|
||||
- name: tensorflow
|
||||
container:
|
||||
args:
|
||||
- |
|
||||
apt-get update \
|
||||
&& apt-get install -y git wget libglib2.0-0 libsm6 libxext6 libxrender-dev \
|
||||
&& pip install -r requirements.txt \
|
||||
&& pip install boto3 pyyaml google-cloud-storage \
|
||||
&& git clone https://github.com/waleedka/coco \
|
||||
&& cd coco/PythonAPI \
|
||||
&& python setup.py build_ext install \
|
||||
&& rm -rf build \
|
||||
&& cd ../../ \
|
||||
&& wget https://github.com/matterport/Mask_RCNN/releases/download/v2.0/mask_rcnn_coco.h5 \
|
||||
&& python setup.py install && ls \
|
||||
&& python samples/coco/cvat.py train --dataset=/mnt/data/datasets \
|
||||
--model=workflow_maskrcnn \
|
||||
--extras="{{workflow.parameters.hyperparameters}}" \
|
||||
--ref_model_path="{{workflow.parameters.cvat-finetune-checkpoint}}" \
|
||||
--num_classes="{{workflow.parameters.cvat-num-classes}}" \
|
||||
&& cd /mnt/src/ \
|
||||
&& python prepare_dataset.py /mnt/data/datasets/annotations/instances_default.json
|
||||
command:
|
||||
- sh
|
||||
- -c
|
||||
image: '{{workflow.parameters.tf-image}}'
|
||||
volumeMounts:
|
||||
- mountPath: /mnt/data
|
||||
name: data
|
||||
- mountPath: /mnt/output
|
||||
name: output
|
||||
workingDir: /mnt/src
|
||||
nodeSelector:
|
||||
beta.kubernetes.io/instance-type: '{{workflow.parameters.sys-node-pool}}'
|
||||
sidecars:
|
||||
- name: tensorboard
|
||||
image: tensorflow/tensorflow:2.3.0
|
||||
command: [sh, -c]
|
||||
tty: true
|
||||
args: ["tensorboard --logdir /mnt/output/"]
|
||||
ports:
|
||||
- containerPort: 6006
|
||||
name: tensorboard
|
||||
inputs:
|
||||
artifacts:
|
||||
- name: data
|
||||
path: /mnt/data/datasets/
|
||||
{{.ArtifactRepositoryType}}:
|
||||
key: '{{workflow.namespace}}/{{workflow.parameters.cvat-annotation-path}}'
|
||||
- git:
|
||||
repo: '{{workflow.parameters.source}}'
|
||||
revision: "no-boto"
|
||||
name: src
|
||||
path: /mnt/src
|
||||
outputs:
|
||||
artifacts:
|
||||
- name: model
|
||||
optional: true
|
||||
path: /mnt/output
|
||||
{{.ArtifactRepositoryType}}:
|
||||
key: '{{workflow.namespace}}/{{workflow.parameters.cvat-output-path}}/{{workflow.name}}'
|
||||
# Uncomment the lines below if you want to send Slack notifications
|
||||
#- container:
|
||||
# args:
|
||||
# - SLACK_USERNAME=Onepanel SLACK_TITLE="{{workflow.name}} {{inputs.parameters.status}}"
|
||||
# SLACK_ICON=https://www.gravatar.com/avatar/5c4478592fe00878f62f0027be59c1bd
|
||||
# SLACK_MESSAGE=$(cat /tmp/metrics.json)} ./slack-notify
|
||||
# command:
|
||||
# - sh
|
||||
# - -c
|
||||
# image: technosophos/slack-notify
|
||||
# inputs:
|
||||
# artifacts:
|
||||
# - name: metrics
|
||||
# optional: true
|
||||
# path: /tmp/metrics.json
|
||||
# parameters:
|
||||
# - name: status
|
||||
# name: slack-notify-success
|
||||
volumeClaimTemplates:
|
||||
- metadata:
|
||||
creationTimestamp: null
|
||||
name: data
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 200Gi
|
||||
- metadata:
|
||||
creationTimestamp: null
|
||||
name: output
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 200Gi
|
||||
- displayName: Node pool
|
||||
hint: Name of node pool or group to run this workflow task
|
||||
type: select.select
|
||||
visibility: public
|
||||
name: sys-node-pool
|
||||
value: Standard_D4s_v3
|
||||
required: true
|
||||
options:
|
||||
- name: 'CPU: 2, RAM: 8GB'
|
||||
value: Standard_D2s_v3
|
||||
- name: 'CPU: 4, RAM: 16GB'
|
||||
value: Standard_D4s_v3
|
||||
- name: 'GPU: 1xK80, CPU: 6, RAM: 56GB'
|
||||
value: Standard_NC6
|
||||
templates:
|
||||
- name: main
|
||||
dag:
|
||||
tasks:
|
||||
- name: train-model
|
||||
template: tensorflow
|
||||
# Uncomment the lines below if you want to send Slack notifications
|
||||
# - arguments:
|
||||
# artifacts:
|
||||
# - from: '{{tasks.train-model.outputs.artifacts.sys-metrics}}'
|
||||
# name: metrics
|
||||
# parameters:
|
||||
# - name: status
|
||||
# value: '{{tasks.train-model.status}}'
|
||||
# dependencies:
|
||||
# - train-model
|
||||
# name: notify-in-slack
|
||||
# template: slack-notify-success
|
||||
- name: tensorflow
|
||||
container:
|
||||
args:
|
||||
- |
|
||||
apt-get update \
|
||||
&& apt-get install -y git wget libglib2.0-0 libsm6 libxext6 libxrender-dev \
|
||||
&& pip install -r requirements.txt \
|
||||
&& pip install boto3 pyyaml google-cloud-storage \
|
||||
&& git clone https://github.com/waleedka/coco \
|
||||
&& cd coco/PythonAPI \
|
||||
&& python setup.py build_ext install \
|
||||
&& rm -rf build \
|
||||
&& cd ../../ \
|
||||
&& wget https://github.com/matterport/Mask_RCNN/releases/download/v2.0/mask_rcnn_coco.h5 \
|
||||
&& python setup.py install && ls \
|
||||
&& python samples/coco/cvat.py train --dataset=/mnt/data/datasets \
|
||||
--model=workflow_maskrcnn \
|
||||
--extras="{{workflow.parameters.hyperparameters}}" \
|
||||
--ref_model_path="{{workflow.parameters.cvat-finetune-checkpoint}}" \
|
||||
--num_classes="{{workflow.parameters.cvat-num-classes}}" \
|
||||
&& cd /mnt/src/ \
|
||||
&& python prepare_dataset.py /mnt/data/datasets/annotations/instances_default.json
|
||||
command:
|
||||
- sh
|
||||
- -c
|
||||
image: '{{workflow.parameters.tf-image}}'
|
||||
volumeMounts:
|
||||
- mountPath: /mnt/data
|
||||
name: data
|
||||
- mountPath: /mnt/output
|
||||
name: output
|
||||
workingDir: /mnt/src
|
||||
nodeSelector:
|
||||
beta.kubernetes.io/instance-type: '{{workflow.parameters.sys-node-pool}}'
|
||||
sidecars:
|
||||
- name: tensorboard
|
||||
image: tensorflow/tensorflow:2.3.0
|
||||
command: [sh, -c]
|
||||
tty: true
|
||||
args: ["tensorboard --logdir /mnt/output/"]
|
||||
ports:
|
||||
- containerPort: 6006
|
||||
name: tensorboard
|
||||
inputs:
|
||||
artifacts:
|
||||
- name: data
|
||||
path: /mnt/data/datasets/
|
||||
"{{.ArtifactRepositoryType}}":
|
||||
key: '{{workflow.namespace}}/{{workflow.parameters.cvat-annotation-path}}'
|
||||
- git:
|
||||
repo: '{{workflow.parameters.source}}'
|
||||
revision: "no-boto"
|
||||
name: src
|
||||
path: /mnt/src
|
||||
outputs:
|
||||
artifacts:
|
||||
- name: model
|
||||
optional: true
|
||||
path: /mnt/output
|
||||
"{{.ArtifactRepositoryType}}":
|
||||
key: '{{workflow.namespace}}/{{workflow.parameters.cvat-output-path}}/{{workflow.name}}'
|
||||
# Uncomment the lines below if you want to send Slack notifications
|
||||
#- container:
|
||||
# args:
|
||||
# - SLACK_USERNAME=Onepanel SLACK_TITLE="{{workflow.name}} {{inputs.parameters.status}}"
|
||||
# SLACK_ICON=https://www.gravatar.com/avatar/5c4478592fe00878f62f0027be59c1bd
|
||||
# SLACK_MESSAGE=$(cat /tmp/metrics.json)} ./slack-notify
|
||||
# command:
|
||||
# - sh
|
||||
# - -c
|
||||
# image: technosophos/slack-notify
|
||||
# inputs:
|
||||
# artifacts:
|
||||
# - name: metrics
|
||||
# optional: true
|
||||
# path: /tmp/metrics.json
|
||||
# parameters:
|
||||
# - name: status
|
||||
# name: slack-notify-success
|
||||
volumeClaimTemplates:
|
||||
- metadata:
|
||||
creationTimestamp: null
|
||||
name: data
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 200Gi
|
||||
- metadata:
|
||||
creationTimestamp: null
|
||||
name: output
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 200Gi
|
||||
@@ -1,192 +1,201 @@
|
||||
entrypoint: main
|
||||
arguments:
|
||||
parameters:
|
||||
- name: source
|
||||
value: https://github.com/onepanelio/Mask_RCNN.git
|
||||
displayName: Model source code
|
||||
type: hidden
|
||||
visibility: private
|
||||
metadata:
|
||||
name: "MaskRCNN Training"
|
||||
kind: Workflow
|
||||
version: 20201208155115
|
||||
action: update
|
||||
labels:
|
||||
"used-by": "cvat"
|
||||
"created-by": "system"
|
||||
spec:
|
||||
entrypoint: main
|
||||
arguments:
|
||||
parameters:
|
||||
- name: source
|
||||
value: https://github.com/onepanelio/Mask_RCNN.git
|
||||
displayName: Model source code
|
||||
type: hidden
|
||||
visibility: private
|
||||
|
||||
- name: cvat-annotation-path
|
||||
value: annotation-dump/sample_dataset
|
||||
hint: Path to annotated data in default object storage (i.e S3). In CVAT, this parameter will be pre-populated.
|
||||
displayName: Dataset path
|
||||
visibility: private
|
||||
- name: cvat-annotation-path
|
||||
value: annotation-dump/sample_dataset
|
||||
hint: Path to annotated data in default object storage (i.e S3). In CVAT, this parameter will be pre-populated.
|
||||
displayName: Dataset path
|
||||
visibility: private
|
||||
|
||||
- name: cvat-output-path
|
||||
value: workflow-data/output/sample_output
|
||||
hint: Path to store output artifacts in default object storage (i.e s3). In CVAT, this parameter will be pre-populated.
|
||||
displayName: Workflow output path
|
||||
visibility: private
|
||||
- name: cvat-output-path
|
||||
value: workflow-data/output/sample_output
|
||||
hint: Path to store output artifacts in default object storage (i.e s3). In CVAT, this parameter will be pre-populated.
|
||||
displayName: Workflow output path
|
||||
visibility: private
|
||||
|
||||
- name: cvat-finetune-checkpoint
|
||||
value: ''
|
||||
hint: Select the last fine-tune checkpoint for this model. It may take up to 5 minutes for a recent checkpoint show here. Leave empty if this is the first time you're training this model.
|
||||
displayName: Checkpoint path
|
||||
visibility: public
|
||||
- name: cvat-finetune-checkpoint
|
||||
value: ''
|
||||
hint: Select the last fine-tune checkpoint for this model. It may take up to 5 minutes for a recent checkpoint show here. Leave empty if this is the first time you're training this model.
|
||||
displayName: Checkpoint path
|
||||
visibility: public
|
||||
|
||||
- name: cvat-num-classes
|
||||
displayName: Number of classes
|
||||
hint: Number of classes (i.e in CVAT taks) + 1 for background
|
||||
value: '81'
|
||||
visibility: private
|
||||
- name: cvat-num-classes
|
||||
displayName: Number of classes
|
||||
hint: Number of classes (i.e in CVAT taks) + 1 for background
|
||||
value: '81'
|
||||
visibility: private
|
||||
|
||||
- name: hyperparameters
|
||||
displayName: Hyperparameters
|
||||
visibility: public
|
||||
type: textarea.textarea
|
||||
value: |-
|
||||
stage-1-epochs=1 # Epochs for network heads
|
||||
stage-2-epochs=2 # Epochs for finetune layers
|
||||
stage-3-epochs=3 # Epochs for all layers
|
||||
hint: "Please refer to our <a href='https://docs.onepanel.ai/docs/getting-started/use-cases/computervision/annotation/cvat/cvat_annotation_model#arguments-optional' target='_blank'>documentation</a> for more information on parameters. Number of classes will be automatically populated if you had 'sys-num-classes' parameter in a workflow."
|
||||
- name: hyperparameters
|
||||
displayName: Hyperparameters
|
||||
visibility: public
|
||||
type: textarea.textarea
|
||||
value: |-
|
||||
stage-1-epochs=1 # Epochs for network heads
|
||||
stage-2-epochs=2 # Epochs for finetune layers
|
||||
stage-3-epochs=3 # Epochs for all layers
|
||||
hint: "Please refer to our <a href='https://docs.onepanel.ai/docs/getting-started/use-cases/computervision/annotation/cvat/cvat_annotation_model#arguments-optional' target='_blank'>documentation</a> for more information on parameters. Number of classes will be automatically populated if you had 'sys-num-classes' parameter in a workflow."
|
||||
|
||||
- name: dump-format
|
||||
value: cvat_coco
|
||||
displayName: CVAT dump format
|
||||
visibility: public
|
||||
- name: dump-format
|
||||
value: cvat_coco
|
||||
displayName: CVAT dump format
|
||||
visibility: public
|
||||
|
||||
- name: tf-image
|
||||
visibility: public
|
||||
value: tensorflow/tensorflow:1.13.1-py3
|
||||
type: select.select
|
||||
displayName: Select TensorFlow image
|
||||
hint: Select the GPU image if you are running on a GPU node pool
|
||||
options:
|
||||
- name: 'TensorFlow 1.13.1 CPU Image'
|
||||
value: 'tensorflow/tensorflow:1.13.1-py3'
|
||||
- name: 'TensorFlow 1.13.1 GPU Image'
|
||||
value: 'tensorflow/tensorflow:1.13.1-gpu-py3'
|
||||
- name: tf-image
|
||||
visibility: public
|
||||
value: tensorflow/tensorflow:1.13.1-py3
|
||||
type: select.select
|
||||
displayName: Select TensorFlow image
|
||||
hint: Select the GPU image if you are running on a GPU node pool
|
||||
options:
|
||||
- name: 'TensorFlow 1.13.1 CPU Image'
|
||||
value: 'tensorflow/tensorflow:1.13.1-py3'
|
||||
- name: 'TensorFlow 1.13.1 GPU Image'
|
||||
value: 'tensorflow/tensorflow:1.13.1-gpu-py3'
|
||||
|
||||
- displayName: Node pool
|
||||
hint: Name of node pool or group to run this workflow task
|
||||
type: select.select
|
||||
visibility: public
|
||||
name: sys-node-pool
|
||||
value: Standard_D4s_v3
|
||||
required: true
|
||||
options:
|
||||
- name: 'CPU: 2, RAM: 8GB'
|
||||
value: Standard_D2s_v3
|
||||
- name: 'CPU: 4, RAM: 16GB'
|
||||
value: Standard_D4s_v3
|
||||
- name: 'GPU: 1xK80, CPU: 6, RAM: 56GB'
|
||||
value: Standard_NC6
|
||||
templates:
|
||||
- name: main
|
||||
dag:
|
||||
tasks:
|
||||
- name: train-model
|
||||
template: tensorflow
|
||||
# Uncomment the lines below if you want to send Slack notifications
|
||||
# - arguments:
|
||||
# artifacts:
|
||||
# - from: '{{tasks.train-model.outputs.artifacts.sys-metrics}}'
|
||||
# name: metrics
|
||||
# parameters:
|
||||
# - name: status
|
||||
# value: '{{tasks.train-model.status}}'
|
||||
# dependencies:
|
||||
# - train-model
|
||||
# name: notify-in-slack
|
||||
# template: slack-notify-success
|
||||
- name: tensorflow
|
||||
container:
|
||||
args:
|
||||
- |
|
||||
apt-get update \
|
||||
&& apt-get install -y git wget libglib2.0-0 libsm6 libxext6 libxrender-dev \
|
||||
&& pip install -r requirements.txt \
|
||||
&& pip install boto3 pyyaml google-cloud-storage \
|
||||
&& git clone https://github.com/waleedka/coco \
|
||||
&& cd coco/PythonAPI \
|
||||
&& python setup.py build_ext install \
|
||||
&& rm -rf build \
|
||||
&& cd ../../ \
|
||||
&& wget https://github.com/matterport/Mask_RCNN/releases/download/v2.0/mask_rcnn_coco.h5 \
|
||||
&& python setup.py install && ls \
|
||||
&& python samples/coco/cvat.py train --dataset=/mnt/data/datasets \
|
||||
--model=workflow_maskrcnn \
|
||||
--extras="{{workflow.parameters.hyperparameters}}" \
|
||||
--ref_model_path="{{workflow.parameters.cvat-finetune-checkpoint}}" \
|
||||
--num_classes="{{workflow.parameters.cvat-num-classes}}" \
|
||||
&& cd /mnt/src/ \
|
||||
&& python prepare_dataset.py /mnt/data/datasets/annotations/instances_default.json
|
||||
command:
|
||||
- sh
|
||||
- -c
|
||||
image: '{{workflow.parameters.tf-image}}'
|
||||
volumeMounts:
|
||||
- mountPath: /mnt/data
|
||||
name: data
|
||||
- mountPath: /mnt/output
|
||||
name: output
|
||||
workingDir: /mnt/src
|
||||
nodeSelector:
|
||||
beta.kubernetes.io/instance-type: '{{workflow.parameters.sys-node-pool}}'
|
||||
sidecars:
|
||||
- name: tensorboard
|
||||
image: tensorflow/tensorflow:2.3.0
|
||||
command: [sh, -c]
|
||||
env:
|
||||
- name: ONEPANEL_INTERACTIVE_SIDECAR
|
||||
value: 'true'
|
||||
args: ["tensorboard --logdir /mnt/output/"]
|
||||
ports:
|
||||
- containerPort: 6006
|
||||
name: tensorboard
|
||||
inputs:
|
||||
artifacts:
|
||||
- name: data
|
||||
path: /mnt/data/datasets/
|
||||
{{.ArtifactRepositoryType}}:
|
||||
key: '{{workflow.namespace}}/{{workflow.parameters.cvat-annotation-path}}'
|
||||
- git:
|
||||
repo: '{{workflow.parameters.source}}'
|
||||
revision: "no-boto"
|
||||
name: src
|
||||
path: /mnt/src
|
||||
outputs:
|
||||
artifacts:
|
||||
- name: model
|
||||
optional: true
|
||||
path: /mnt/output
|
||||
{{.ArtifactRepositoryType}}:
|
||||
key: '{{workflow.namespace}}/{{workflow.parameters.cvat-output-path}}/{{workflow.name}}'
|
||||
# Uncomment the lines below if you want to send Slack notifications
|
||||
#- container:
|
||||
# args:
|
||||
# - SLACK_USERNAME=Onepanel SLACK_TITLE="{{workflow.name}} {{inputs.parameters.status}}"
|
||||
# SLACK_ICON=https://www.gravatar.com/avatar/5c4478592fe00878f62f0027be59c1bd
|
||||
# SLACK_MESSAGE=$(cat /tmp/metrics.json)} ./slack-notify
|
||||
# command:
|
||||
# - sh
|
||||
# - -c
|
||||
# image: technosophos/slack-notify
|
||||
# inputs:
|
||||
# artifacts:
|
||||
# - name: metrics
|
||||
# optional: true
|
||||
# path: /tmp/metrics.json
|
||||
# parameters:
|
||||
# - name: status
|
||||
# name: slack-notify-success
|
||||
volumeClaimTemplates:
|
||||
- metadata:
|
||||
creationTimestamp: null
|
||||
name: data
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 200Gi
|
||||
- metadata:
|
||||
creationTimestamp: null
|
||||
name: output
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 200Gi
|
||||
- displayName: Node pool
|
||||
hint: Name of node pool or group to run this workflow task
|
||||
type: select.select
|
||||
visibility: public
|
||||
name: sys-node-pool
|
||||
value: Standard_D4s_v3
|
||||
required: true
|
||||
options:
|
||||
- name: 'CPU: 2, RAM: 8GB'
|
||||
value: Standard_D2s_v3
|
||||
- name: 'CPU: 4, RAM: 16GB'
|
||||
value: Standard_D4s_v3
|
||||
- name: 'GPU: 1xK80, CPU: 6, RAM: 56GB'
|
||||
value: Standard_NC6
|
||||
templates:
|
||||
- name: main
|
||||
dag:
|
||||
tasks:
|
||||
- name: train-model
|
||||
template: tensorflow
|
||||
# Uncomment the lines below if you want to send Slack notifications
|
||||
# - arguments:
|
||||
# artifacts:
|
||||
# - from: '{{tasks.train-model.outputs.artifacts.sys-metrics}}'
|
||||
# name: metrics
|
||||
# parameters:
|
||||
# - name: status
|
||||
# value: '{{tasks.train-model.status}}'
|
||||
# dependencies:
|
||||
# - train-model
|
||||
# name: notify-in-slack
|
||||
# template: slack-notify-success
|
||||
- name: tensorflow
|
||||
container:
|
||||
args:
|
||||
- |
|
||||
apt-get update \
|
||||
&& apt-get install -y git wget libglib2.0-0 libsm6 libxext6 libxrender-dev \
|
||||
&& pip install -r requirements.txt \
|
||||
&& pip install boto3 pyyaml google-cloud-storage \
|
||||
&& git clone https://github.com/waleedka/coco \
|
||||
&& cd coco/PythonAPI \
|
||||
&& python setup.py build_ext install \
|
||||
&& rm -rf build \
|
||||
&& cd ../../ \
|
||||
&& wget https://github.com/matterport/Mask_RCNN/releases/download/v2.0/mask_rcnn_coco.h5 \
|
||||
&& python setup.py install && ls \
|
||||
&& python samples/coco/cvat.py train --dataset=/mnt/data/datasets \
|
||||
--model=workflow_maskrcnn \
|
||||
--extras="{{workflow.parameters.hyperparameters}}" \
|
||||
--ref_model_path="{{workflow.parameters.cvat-finetune-checkpoint}}" \
|
||||
--num_classes="{{workflow.parameters.cvat-num-classes}}" \
|
||||
&& cd /mnt/src/ \
|
||||
&& python prepare_dataset.py /mnt/data/datasets/annotations/instances_default.json
|
||||
command:
|
||||
- sh
|
||||
- -c
|
||||
image: '{{workflow.parameters.tf-image}}'
|
||||
volumeMounts:
|
||||
- mountPath: /mnt/data
|
||||
name: data
|
||||
- mountPath: /mnt/output
|
||||
name: output
|
||||
workingDir: /mnt/src
|
||||
nodeSelector:
|
||||
beta.kubernetes.io/instance-type: '{{workflow.parameters.sys-node-pool}}'
|
||||
sidecars:
|
||||
- name: tensorboard
|
||||
image: tensorflow/tensorflow:2.3.0
|
||||
command: [sh, -c]
|
||||
env:
|
||||
- name: ONEPANEL_INTERACTIVE_SIDECAR
|
||||
value: 'true'
|
||||
args: ["tensorboard --logdir /mnt/output/"]
|
||||
ports:
|
||||
- containerPort: 6006
|
||||
name: tensorboard
|
||||
inputs:
|
||||
artifacts:
|
||||
- name: data
|
||||
path: /mnt/data/datasets/
|
||||
"{{.ArtifactRepositoryType}}":
|
||||
key: '{{workflow.namespace}}/{{workflow.parameters.cvat-annotation-path}}'
|
||||
- git:
|
||||
repo: '{{workflow.parameters.source}}'
|
||||
revision: "no-boto"
|
||||
name: src
|
||||
path: /mnt/src
|
||||
outputs:
|
||||
artifacts:
|
||||
- name: model
|
||||
optional: true
|
||||
path: /mnt/output
|
||||
"{{.ArtifactRepositoryType}}":
|
||||
key: '{{workflow.namespace}}/{{workflow.parameters.cvat-output-path}}/{{workflow.name}}'
|
||||
# Uncomment the lines below if you want to send Slack notifications
|
||||
#- container:
|
||||
# args:
|
||||
# - SLACK_USERNAME=Onepanel SLACK_TITLE="{{workflow.name}} {{inputs.parameters.status}}"
|
||||
# SLACK_ICON=https://www.gravatar.com/avatar/5c4478592fe00878f62f0027be59c1bd
|
||||
# SLACK_MESSAGE=$(cat /tmp/metrics.json)} ./slack-notify
|
||||
# command:
|
||||
# - sh
|
||||
# - -c
|
||||
# image: technosophos/slack-notify
|
||||
# inputs:
|
||||
# artifacts:
|
||||
# - name: metrics
|
||||
# optional: true
|
||||
# path: /tmp/metrics.json
|
||||
# parameters:
|
||||
# - name: status
|
||||
# name: slack-notify-success
|
||||
volumeClaimTemplates:
|
||||
- metadata:
|
||||
creationTimestamp: null
|
||||
name: data
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 200Gi
|
||||
- metadata:
|
||||
creationTimestamp: null
|
||||
name: output
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 200Gi
|
||||
@@ -1,149 +1,158 @@
|
||||
# source: https://github.com/onepanelio/templates/blob/master/workflows/maskrcnn-training/
|
||||
arguments:
|
||||
parameters:
|
||||
- name: cvat-annotation-path
|
||||
value: annotation-dump/sample_dataset
|
||||
hint: Path to annotated data in default object storage. In CVAT, this parameter will be pre-populated.
|
||||
displayName: Dataset path
|
||||
visibility: internal
|
||||
metadata:
|
||||
name: "MaskRCNN Training"
|
||||
kind: Workflow
|
||||
version: 20201221195937
|
||||
action: update
|
||||
source: "https://github.com/onepanelio/templates/blob/master/workflows/maskrcnn-training/"
|
||||
labels:
|
||||
"used-by": "cvat"
|
||||
"created-by": "system"
|
||||
spec:
|
||||
arguments:
|
||||
parameters:
|
||||
- name: cvat-annotation-path
|
||||
value: annotation-dump/sample_dataset
|
||||
hint: Path to annotated data in default object storage. In CVAT, this parameter will be pre-populated.
|
||||
displayName: Dataset path
|
||||
visibility: internal
|
||||
|
||||
- name: cvat-output-path
|
||||
value: workflow-data/output/sample_output
|
||||
hint: Path to store output artifacts in default object storage. In CVAT, this parameter will be pre-populated.
|
||||
displayName: Workflow output path
|
||||
visibility: internal
|
||||
- name: cvat-output-path
|
||||
value: workflow-data/output/sample_output
|
||||
hint: Path to store output artifacts in default object storage. In CVAT, this parameter will be pre-populated.
|
||||
displayName: Workflow output path
|
||||
visibility: internal
|
||||
|
||||
- name: cvat-finetune-checkpoint
|
||||
value: ''
|
||||
hint: Select the last fine-tune checkpoint for this model. It may take up to 5 minutes for a recent checkpoint show here. Leave empty if this is the first time you're training this model.
|
||||
displayName: Checkpoint path
|
||||
visibility: public
|
||||
- name: cvat-finetune-checkpoint
|
||||
value: ''
|
||||
hint: Select the last fine-tune checkpoint for this model. It may take up to 5 minutes for a recent checkpoint show here. Leave empty if this is the first time you're training this model.
|
||||
displayName: Checkpoint path
|
||||
visibility: public
|
||||
|
||||
- name: cvat-num-classes
|
||||
displayName: Number of classes
|
||||
hint: Number of classes + 1 for background. In CVAT, this parameter will be pre-populated.
|
||||
value: '11'
|
||||
visibility: internal
|
||||
- name: cvat-num-classes
|
||||
displayName: Number of classes
|
||||
hint: Number of classes + 1 for background. In CVAT, this parameter will be pre-populated.
|
||||
value: '11'
|
||||
visibility: internal
|
||||
|
||||
- name: hyperparameters
|
||||
displayName: Hyperparameters
|
||||
visibility: public
|
||||
type: textarea.textarea
|
||||
value: |-
|
||||
stage-1-epochs=1 # Epochs for network heads
|
||||
stage-2-epochs=2 # Epochs for finetune layers
|
||||
stage-3-epochs=3 # Epochs for all layers
|
||||
hint: "See <a href='https://docs.onepanel.ai/docs/getting-started/use-cases/computervision/annotation/cvat/cvat_annotation_model#maskrcnn-hyperparameters' target='_blank'>documentation</a> for more information on parameters."
|
||||
- name: hyperparameters
|
||||
displayName: Hyperparameters
|
||||
visibility: public
|
||||
type: textarea.textarea
|
||||
value: |-
|
||||
stage-1-epochs=1 # Epochs for network heads
|
||||
stage-2-epochs=2 # Epochs for finetune layers
|
||||
stage-3-epochs=3 # Epochs for all layers
|
||||
hint: "See <a href='https://docs.onepanel.ai/docs/getting-started/use-cases/computervision/annotation/cvat/cvat_annotation_model#maskrcnn-hyperparameters' target='_blank'>documentation</a> for more information on parameters."
|
||||
|
||||
- name: dump-format
|
||||
value: cvat_coco
|
||||
displayName: CVAT dump format
|
||||
visibility: public
|
||||
- name: dump-format
|
||||
value: cvat_coco
|
||||
displayName: CVAT dump format
|
||||
visibility: public
|
||||
|
||||
- name: tf-image
|
||||
visibility: public
|
||||
value: tensorflow/tensorflow:1.13.1-py3
|
||||
type: select.select
|
||||
displayName: Select TensorFlow image
|
||||
hint: Select the GPU image if you are running on a GPU node pool
|
||||
options:
|
||||
- name: 'TensorFlow 1.13.1 CPU Image'
|
||||
value: 'tensorflow/tensorflow:1.13.1-py3'
|
||||
- name: 'TensorFlow 1.13.1 GPU Image'
|
||||
value: 'tensorflow/tensorflow:1.13.1-gpu-py3'
|
||||
- name: tf-image
|
||||
visibility: public
|
||||
value: tensorflow/tensorflow:1.13.1-py3
|
||||
type: select.select
|
||||
displayName: Select TensorFlow image
|
||||
hint: Select the GPU image if you are running on a GPU node pool
|
||||
options:
|
||||
- name: 'TensorFlow 1.13.1 CPU Image'
|
||||
value: 'tensorflow/tensorflow:1.13.1-py3'
|
||||
- name: 'TensorFlow 1.13.1 GPU Image'
|
||||
value: 'tensorflow/tensorflow:1.13.1-gpu-py3'
|
||||
|
||||
- displayName: Node pool
|
||||
hint: Name of node pool or group to run this workflow task
|
||||
type: select.nodepool
|
||||
visibility: public
|
||||
name: sys-node-pool
|
||||
value: {{.DefaultNodePoolOption}}
|
||||
required: true
|
||||
- displayName: Node pool
|
||||
hint: Name of node pool or group to run this workflow task
|
||||
type: select.nodepool
|
||||
visibility: public
|
||||
name: sys-node-pool
|
||||
value: "{{.DefaultNodePoolOption}}"
|
||||
required: true
|
||||
|
||||
entrypoint: main
|
||||
templates:
|
||||
- dag:
|
||||
tasks:
|
||||
- name: train-model
|
||||
template: tensorflow
|
||||
name: main
|
||||
- container:
|
||||
args:
|
||||
- |
|
||||
apt-get update \
|
||||
&& apt-get install -y git wget libglib2.0-0 libsm6 libxext6 libxrender-dev \
|
||||
&& pip install -r requirements.txt \
|
||||
&& pip install boto3 pyyaml google-cloud-storage \
|
||||
&& git clone https://github.com/waleedka/coco \
|
||||
&& cd coco/PythonAPI \
|
||||
&& python setup.py build_ext install \
|
||||
&& rm -rf build \
|
||||
&& cd ../../ \
|
||||
&& wget https://github.com/matterport/Mask_RCNN/releases/download/v2.0/mask_rcnn_coco.h5 \
|
||||
&& python setup.py install && ls \
|
||||
&& python samples/coco/cvat.py train --dataset=/mnt/data/datasets \
|
||||
--model=workflow_maskrcnn \
|
||||
--extras="{{workflow.parameters.hyperparameters}}" \
|
||||
--ref_model_path="{{workflow.parameters.cvat-finetune-checkpoint}}" \
|
||||
--num_classes="{{workflow.parameters.cvat-num-classes}}" \
|
||||
&& cd /mnt/src/ \
|
||||
&& python prepare_dataset.py /mnt/data/datasets/annotations/instances_default.json
|
||||
command:
|
||||
- sh
|
||||
- -c
|
||||
image: '{{workflow.parameters.tf-image}}'
|
||||
volumeMounts:
|
||||
- mountPath: /mnt/data
|
||||
name: data
|
||||
- mountPath: /mnt/output
|
||||
name: output
|
||||
workingDir: /mnt/src
|
||||
sidecars:
|
||||
- name: tensorboard
|
||||
image: tensorflow/tensorflow:2.3.0
|
||||
command: [ sh, -c ]
|
||||
env:
|
||||
- name: ONEPANEL_INTERACTIVE_SIDECAR
|
||||
value: 'true'
|
||||
args: [ "tensorboard --logdir /mnt/output/" ]
|
||||
ports:
|
||||
- containerPort: 6006
|
||||
name: tensorboard
|
||||
nodeSelector:
|
||||
beta.kubernetes.io/instance-type: '{{workflow.parameters.sys-node-pool}}'
|
||||
inputs:
|
||||
artifacts:
|
||||
- name: data
|
||||
path: /mnt/data/datasets/
|
||||
s3:
|
||||
key: '{{workflow.namespace}}/{{workflow.parameters.cvat-annotation-path}}'
|
||||
- git:
|
||||
repo: 'https://github.com/onepanelio/Mask_RCNN.git'
|
||||
revision: 'no-boto'
|
||||
name: src
|
||||
path: /mnt/src
|
||||
name: tensorflow
|
||||
outputs:
|
||||
artifacts:
|
||||
- name: model
|
||||
optional: true
|
||||
path: /mnt/output
|
||||
s3:
|
||||
key: '{{workflow.namespace}}/{{workflow.parameters.cvat-output-path}}/{{workflow.name}}'
|
||||
volumeClaimTemplates:
|
||||
- metadata:
|
||||
name: data
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 200Gi
|
||||
- metadata:
|
||||
name: output
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 200Gi
|
||||
entrypoint: main
|
||||
templates:
|
||||
- dag:
|
||||
tasks:
|
||||
- name: train-model
|
||||
template: tensorflow
|
||||
name: main
|
||||
- container:
|
||||
args:
|
||||
- |
|
||||
apt-get update \
|
||||
&& apt-get install -y git wget libglib2.0-0 libsm6 libxext6 libxrender-dev \
|
||||
&& pip install -r requirements.txt \
|
||||
&& pip install boto3 pyyaml google-cloud-storage \
|
||||
&& git clone https://github.com/waleedka/coco \
|
||||
&& cd coco/PythonAPI \
|
||||
&& python setup.py build_ext install \
|
||||
&& rm -rf build \
|
||||
&& cd ../../ \
|
||||
&& wget https://github.com/matterport/Mask_RCNN/releases/download/v2.0/mask_rcnn_coco.h5 \
|
||||
&& python setup.py install && ls \
|
||||
&& python samples/coco/cvat.py train --dataset=/mnt/data/datasets \
|
||||
--model=workflow_maskrcnn \
|
||||
--extras="{{workflow.parameters.hyperparameters}}" \
|
||||
--ref_model_path="{{workflow.parameters.cvat-finetune-checkpoint}}" \
|
||||
--num_classes="{{workflow.parameters.cvat-num-classes}}" \
|
||||
&& cd /mnt/src/ \
|
||||
&& python prepare_dataset.py /mnt/data/datasets/annotations/instances_default.json
|
||||
command:
|
||||
- sh
|
||||
- -c
|
||||
image: '{{workflow.parameters.tf-image}}'
|
||||
volumeMounts:
|
||||
- mountPath: /mnt/data
|
||||
name: data
|
||||
- mountPath: /mnt/output
|
||||
name: output
|
||||
workingDir: /mnt/src
|
||||
sidecars:
|
||||
- name: tensorboard
|
||||
image: tensorflow/tensorflow:2.3.0
|
||||
command: [ sh, -c ]
|
||||
env:
|
||||
- name: ONEPANEL_INTERACTIVE_SIDECAR
|
||||
value: 'true'
|
||||
args: [ "tensorboard --logdir /mnt/output/" ]
|
||||
ports:
|
||||
- containerPort: 6006
|
||||
name: tensorboard
|
||||
nodeSelector:
|
||||
beta.kubernetes.io/instance-type: '{{workflow.parameters.sys-node-pool}}'
|
||||
inputs:
|
||||
artifacts:
|
||||
- name: data
|
||||
path: /mnt/data/datasets/
|
||||
s3:
|
||||
key: '{{workflow.namespace}}/{{workflow.parameters.cvat-annotation-path}}'
|
||||
- git:
|
||||
repo: 'https://github.com/onepanelio/Mask_RCNN.git'
|
||||
revision: 'no-boto'
|
||||
name: src
|
||||
path: /mnt/src
|
||||
name: tensorflow
|
||||
outputs:
|
||||
artifacts:
|
||||
- name: model
|
||||
optional: true
|
||||
path: /mnt/output
|
||||
s3:
|
||||
key: '{{workflow.namespace}}/{{workflow.parameters.cvat-output-path}}/{{workflow.name}}'
|
||||
volumeClaimTemplates:
|
||||
- metadata:
|
||||
name: data
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 200Gi
|
||||
- metadata:
|
||||
name: output
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 200Gi
|
||||
|
||||
@@ -1,208 +1,217 @@
|
||||
# source: https://github.com/onepanelio/templates/blob/master/workflows/maskrcnn-training/
|
||||
arguments:
|
||||
parameters:
|
||||
- name: cvat-annotation-path
|
||||
value: 'artifacts/{{workflow.namespace}}/annotations/'
|
||||
hint: Path to annotated data (COCO format) in default object storage. In CVAT, this parameter will be pre-populated.
|
||||
displayName: Dataset path
|
||||
visibility: internal
|
||||
metadata:
|
||||
name: "MaskRCNN Training"
|
||||
kind: Workflow
|
||||
version: 20210118175809
|
||||
action: update
|
||||
source: "https://github.com/onepanelio/templates/blob/master/workflows/maskrcnn-training/"
|
||||
labels:
|
||||
"used-by": "cvat"
|
||||
"created-by": "system"
|
||||
spec:
|
||||
arguments:
|
||||
parameters:
|
||||
- name: cvat-annotation-path
|
||||
value: 'artifacts/{{workflow.namespace}}/annotations/'
|
||||
hint: Path to annotated data (COCO format) in default object storage. In CVAT, this parameter will be pre-populated.
|
||||
displayName: Dataset path
|
||||
visibility: internal
|
||||
|
||||
- name: val-split
|
||||
value: 10
|
||||
displayName: Validation split size
|
||||
type: input.number
|
||||
visibility: public
|
||||
hint: Enter validation set size in percentage of full dataset. (0 - 100)
|
||||
- name: val-split
|
||||
value: 10
|
||||
displayName: Validation split size
|
||||
type: input.number
|
||||
visibility: public
|
||||
hint: Enter validation set size in percentage of full dataset. (0 - 100)
|
||||
|
||||
- name: num-augmentation-cycles
|
||||
value: 1
|
||||
displayName: Number of augmentation cycles
|
||||
type: input.number
|
||||
visibility: public
|
||||
hint: Number of augmentation cycles, zero means no data augmentation
|
||||
- name: num-augmentation-cycles
|
||||
value: 1
|
||||
displayName: Number of augmentation cycles
|
||||
type: input.number
|
||||
visibility: public
|
||||
hint: Number of augmentation cycles, zero means no data augmentation
|
||||
|
||||
- name: preprocessing-parameters
|
||||
value: |-
|
||||
RandomBrightnessContrast:
|
||||
p: 0.2
|
||||
GaussianBlur:
|
||||
p: 0.3
|
||||
GaussNoise:
|
||||
p: 0.4
|
||||
HorizontalFlip:
|
||||
p: 0.5
|
||||
VerticalFlip:
|
||||
p: 0.3
|
||||
displayName: Preprocessing parameters
|
||||
visibility: public
|
||||
type: textarea.textarea
|
||||
hint: 'See <a href="https://albumentations.ai/docs/api_reference/augmentations/transforms/" target="_blank">documentation</a> for more information on parameters.'
|
||||
- name: preprocessing-parameters
|
||||
value: |-
|
||||
RandomBrightnessContrast:
|
||||
p: 0.2
|
||||
GaussianBlur:
|
||||
p: 0.3
|
||||
GaussNoise:
|
||||
p: 0.4
|
||||
HorizontalFlip:
|
||||
p: 0.5
|
||||
VerticalFlip:
|
||||
p: 0.3
|
||||
displayName: Preprocessing parameters
|
||||
visibility: public
|
||||
type: textarea.textarea
|
||||
hint: 'See <a href="https://albumentations.ai/docs/api_reference/augmentations/transforms/" target="_blank">documentation</a> for more information on parameters.'
|
||||
|
||||
- name: cvat-num-classes
|
||||
displayName: Number of classes
|
||||
hint: Number of classes. In CVAT, this parameter will be pre-populated.
|
||||
value: '10'
|
||||
visibility: internal
|
||||
- name: cvat-num-classes
|
||||
displayName: Number of classes
|
||||
hint: Number of classes. In CVAT, this parameter will be pre-populated.
|
||||
value: '10'
|
||||
visibility: internal
|
||||
|
||||
- name: hyperparameters
|
||||
displayName: Hyperparameters
|
||||
visibility: public
|
||||
type: textarea.textarea
|
||||
value: |-
|
||||
stage_1_epochs: 1 # Epochs for network heads
|
||||
stage_2_epochs: 1 # Epochs for finetune layers
|
||||
stage_3_epochs: 1 # Epochs for all layers
|
||||
num_steps: 1000 # Num steps per epoch
|
||||
hint: 'See <a href="https://docs.onepanel.ai/docs/reference/workflows/training#maskrcnn-hyperparameters" target="_blank">documentation</a> for more information on parameters.'
|
||||
- name: hyperparameters
|
||||
displayName: Hyperparameters
|
||||
visibility: public
|
||||
type: textarea.textarea
|
||||
value: |-
|
||||
stage_1_epochs: 1 # Epochs for network heads
|
||||
stage_2_epochs: 1 # Epochs for finetune layers
|
||||
stage_3_epochs: 1 # Epochs for all layers
|
||||
num_steps: 1000 # Num steps per epoch
|
||||
hint: 'See <a href="https://docs.onepanel.ai/docs/reference/workflows/training#maskrcnn-hyperparameters" target="_blank">documentation</a> for more information on parameters.'
|
||||
|
||||
- name: dump-format
|
||||
value: cvat_coco
|
||||
displayName: CVAT dump format
|
||||
visibility: private
|
||||
- name: dump-format
|
||||
value: cvat_coco
|
||||
displayName: CVAT dump format
|
||||
visibility: private
|
||||
|
||||
- name: cvat-finetune-checkpoint
|
||||
value: ''
|
||||
hint: Path to the last fine-tune checkpoint for this model in default object storage. Leave empty if this is the first time you're training this model.
|
||||
displayName: Checkpoint path
|
||||
visibility: public
|
||||
- name: cvat-finetune-checkpoint
|
||||
value: ''
|
||||
hint: Path to the last fine-tune checkpoint for this model in default object storage. Leave empty if this is the first time you're training this model.
|
||||
displayName: Checkpoint path
|
||||
visibility: public
|
||||
|
||||
- displayName: Node pool
|
||||
hint: Name of node pool or group to run this workflow task
|
||||
type: select.nodepool
|
||||
visibility: public
|
||||
name: sys-node-pool
|
||||
value: {{.DefaultNodePoolOption}}
|
||||
required: true
|
||||
- displayName: Node pool
|
||||
hint: Name of node pool or group to run this workflow task
|
||||
type: select.nodepool
|
||||
visibility: public
|
||||
name: sys-node-pool
|
||||
value: "{{.DefaultNodePoolOption}}"
|
||||
required: true
|
||||
|
||||
entrypoint: main
|
||||
templates:
|
||||
- dag:
|
||||
tasks:
|
||||
- name: preprocessing
|
||||
template: preprocessing
|
||||
- name: train-model
|
||||
template: tensorflow
|
||||
dependencies: [preprocessing]
|
||||
arguments:
|
||||
artifacts:
|
||||
- name: data
|
||||
from: "{{tasks.preprocessing.outputs.artifacts.processed-data}}"
|
||||
name: main
|
||||
- container:
|
||||
args:
|
||||
- |
|
||||
pip install pycocotools scikit-image==0.16.2 && \
|
||||
cd /mnt/src/train/workflows/maskrcnn-training && \
|
||||
python -u main.py train --dataset=/mnt/data/datasets/train_set/ \
|
||||
--model=workflow_maskrcnn \
|
||||
--extras="{{workflow.parameters.hyperparameters}}" \
|
||||
--ref_model_path="{{workflow.parameters.cvat-finetune-checkpoint}}" \
|
||||
--num_classes="{{workflow.parameters.cvat-num-classes}}" \
|
||||
--val_dataset=/mnt/data/datasets/eval_set/ \
|
||||
--use_validation=True
|
||||
command:
|
||||
- sh
|
||||
- -c
|
||||
image: onepanel/dl:v0.20.0
|
||||
volumeMounts:
|
||||
- mountPath: /mnt/data
|
||||
name: processed-data
|
||||
- mountPath: /mnt/output
|
||||
name: output
|
||||
workingDir: /mnt/src
|
||||
sidecars:
|
||||
- name: tensorboard
|
||||
entrypoint: main
|
||||
templates:
|
||||
- dag:
|
||||
tasks:
|
||||
- name: preprocessing
|
||||
template: preprocessing
|
||||
- name: train-model
|
||||
template: tensorflow
|
||||
dependencies: [preprocessing]
|
||||
arguments:
|
||||
artifacts:
|
||||
- name: data
|
||||
from: "{{tasks.preprocessing.outputs.artifacts.processed-data}}"
|
||||
name: main
|
||||
- container:
|
||||
args:
|
||||
- |
|
||||
pip install pycocotools scikit-image==0.16.2 && \
|
||||
cd /mnt/src/train/workflows/maskrcnn-training && \
|
||||
python -u main.py train --dataset=/mnt/data/datasets/train_set/ \
|
||||
--model=workflow_maskrcnn \
|
||||
--extras="{{workflow.parameters.hyperparameters}}" \
|
||||
--ref_model_path="{{workflow.parameters.cvat-finetune-checkpoint}}" \
|
||||
--num_classes="{{workflow.parameters.cvat-num-classes}}" \
|
||||
--val_dataset=/mnt/data/datasets/eval_set/ \
|
||||
--use_validation=True
|
||||
command:
|
||||
- sh
|
||||
- -c
|
||||
image: onepanel/dl:v0.20.0
|
||||
command: [ sh, -c ]
|
||||
env:
|
||||
- name: ONEPANEL_INTERACTIVE_SIDECAR
|
||||
value: 'true'
|
||||
args: [ "tensorboard --logdir /mnt/output/tensorboard" ]
|
||||
ports:
|
||||
- containerPort: 6006
|
||||
name: tensorboard
|
||||
nodeSelector:
|
||||
{{.NodePoolLabel}}: '{{workflow.parameters.sys-node-pool}}'
|
||||
inputs:
|
||||
artifacts:
|
||||
- name: data
|
||||
path: /mnt/data/datasets/
|
||||
- name: models
|
||||
path: /mnt/data/models/
|
||||
optional: true
|
||||
s3:
|
||||
key: '{{workflow.parameters.cvat-finetune-checkpoint}}'
|
||||
- git:
|
||||
repo: https://github.com/onepanelio/templates.git
|
||||
revision: v0.18.0
|
||||
name: src
|
||||
path: /mnt/src/train
|
||||
name: tensorflow
|
||||
outputs:
|
||||
artifacts:
|
||||
- name: model
|
||||
optional: true
|
||||
path: /mnt/output
|
||||
- container:
|
||||
args:
|
||||
- |
|
||||
pip install pycocotools && \
|
||||
cd /mnt/src/preprocessing/workflows/albumentations-preprocessing && \
|
||||
python -u main.py \
|
||||
--data_aug_params="{{workflow.parameters.preprocessing-parameters}}" \
|
||||
--val_split={{workflow.parameters.val-split}} \
|
||||
--aug_steps={{workflow.parameters.num-augmentation-cycles}}
|
||||
command:
|
||||
- sh
|
||||
- -c
|
||||
image: onepanel/dl:v0.20.0
|
||||
volumeMounts:
|
||||
- mountPath: /mnt/data
|
||||
name: data
|
||||
- mountPath: /mnt/output
|
||||
name: processed-data
|
||||
workingDir: /mnt/src
|
||||
nodeSelector:
|
||||
{{.NodePoolLabel}}: '{{workflow.parameters.sys-node-pool}}'
|
||||
inputs:
|
||||
artifacts:
|
||||
- name: data
|
||||
path: /mnt/data/datasets/
|
||||
s3:
|
||||
key: '{{workflow.parameters.cvat-annotation-path}}'
|
||||
- git:
|
||||
repo: https://github.com/onepanelio/templates.git
|
||||
revision: v0.18.0
|
||||
name: src
|
||||
path: /mnt/src/preprocessing
|
||||
name: preprocessing
|
||||
outputs:
|
||||
artifacts:
|
||||
- name: processed-data
|
||||
optional: true
|
||||
path: /mnt/output
|
||||
volumeClaimTemplates:
|
||||
- metadata:
|
||||
name: data
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 200Gi
|
||||
- metadata:
|
||||
name: processed-data
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 200Gi
|
||||
- metadata:
|
||||
name: output
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 200Gi
|
||||
volumeMounts:
|
||||
- mountPath: /mnt/data
|
||||
name: processed-data
|
||||
- mountPath: /mnt/output
|
||||
name: output
|
||||
workingDir: /mnt/src
|
||||
sidecars:
|
||||
- name: tensorboard
|
||||
image: onepanel/dl:v0.20.0
|
||||
command: [ sh, -c ]
|
||||
env:
|
||||
- name: ONEPANEL_INTERACTIVE_SIDECAR
|
||||
value: 'true'
|
||||
args: [ "tensorboard --logdir /mnt/output/tensorboard" ]
|
||||
ports:
|
||||
- containerPort: 6006
|
||||
name: tensorboard
|
||||
nodeSelector:
|
||||
"{{.NodePoolLabel}}": '{{workflow.parameters.sys-node-pool}}'
|
||||
inputs:
|
||||
artifacts:
|
||||
- name: data
|
||||
path: /mnt/data/datasets/
|
||||
- name: models
|
||||
path: /mnt/data/models/
|
||||
optional: true
|
||||
s3:
|
||||
key: '{{workflow.parameters.cvat-finetune-checkpoint}}'
|
||||
- git:
|
||||
repo: https://github.com/onepanelio/templates.git
|
||||
revision: v0.18.0
|
||||
name: src
|
||||
path: /mnt/src/train
|
||||
name: tensorflow
|
||||
outputs:
|
||||
artifacts:
|
||||
- name: model
|
||||
optional: true
|
||||
path: /mnt/output
|
||||
- container:
|
||||
args:
|
||||
- |
|
||||
pip install pycocotools && \
|
||||
cd /mnt/src/preprocessing/workflows/albumentations-preprocessing && \
|
||||
python -u main.py \
|
||||
--data_aug_params="{{workflow.parameters.preprocessing-parameters}}" \
|
||||
--val_split={{workflow.parameters.val-split}} \
|
||||
--aug_steps={{workflow.parameters.num-augmentation-cycles}}
|
||||
command:
|
||||
- sh
|
||||
- -c
|
||||
image: onepanel/dl:v0.20.0
|
||||
volumeMounts:
|
||||
- mountPath: /mnt/data
|
||||
name: data
|
||||
- mountPath: /mnt/output
|
||||
name: processed-data
|
||||
workingDir: /mnt/src
|
||||
nodeSelector:
|
||||
"{{.NodePoolLabel}}": '{{workflow.parameters.sys-node-pool}}'
|
||||
inputs:
|
||||
artifacts:
|
||||
- name: data
|
||||
path: /mnt/data/datasets/
|
||||
s3:
|
||||
key: '{{workflow.parameters.cvat-annotation-path}}'
|
||||
- git:
|
||||
repo: https://github.com/onepanelio/templates.git
|
||||
revision: v0.18.0
|
||||
name: src
|
||||
path: /mnt/src/preprocessing
|
||||
name: preprocessing
|
||||
outputs:
|
||||
artifacts:
|
||||
- name: processed-data
|
||||
optional: true
|
||||
path: /mnt/output
|
||||
volumeClaimTemplates:
|
||||
- metadata:
|
||||
name: data
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 200Gi
|
||||
- metadata:
|
||||
name: processed-data
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 200Gi
|
||||
- metadata:
|
||||
name: output
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 200Gi
|
||||
|
||||
@@ -1,75 +1,84 @@
|
||||
entrypoint: main
|
||||
arguments:
|
||||
parameters:
|
||||
- name: source
|
||||
value: https://github.com/onepanelio/pytorch-examples.git
|
||||
- name: command
|
||||
value: "python mnist/main.py --epochs=1"
|
||||
volumeClaimTemplates:
|
||||
- metadata:
|
||||
name: data
|
||||
spec:
|
||||
accessModes: [ "ReadWriteOnce" ]
|
||||
resources:
|
||||
requests:
|
||||
storage: 2Gi
|
||||
- metadata:
|
||||
name: output
|
||||
spec:
|
||||
accessModes: [ "ReadWriteOnce" ]
|
||||
resources:
|
||||
requests:
|
||||
storage: 2Gi
|
||||
templates:
|
||||
- name: main
|
||||
dag:
|
||||
tasks:
|
||||
- name: train-model
|
||||
template: pytorch
|
||||
# Uncomment section below to send metrics to Slack
|
||||
# - name: notify-in-slack
|
||||
# dependencies: [train-model]
|
||||
# template: slack-notify-success
|
||||
# arguments:
|
||||
# parameters:
|
||||
# - name: status
|
||||
# value: "{{tasks.train-model.status}}"
|
||||
# artifacts:
|
||||
# - name: metrics
|
||||
# from: "{{tasks.train-model.outputs.artifacts.sys-metrics}}"
|
||||
- name: pytorch
|
||||
inputs:
|
||||
artifacts:
|
||||
- name: src
|
||||
path: /mnt/src
|
||||
git:
|
||||
repo: "{{workflow.parameters.source}}"
|
||||
outputs:
|
||||
artifacts:
|
||||
- name: model
|
||||
path: /mnt/output
|
||||
optional: true
|
||||
archive:
|
||||
none: {}
|
||||
container:
|
||||
image: pytorch/pytorch:latest
|
||||
command: [sh,-c]
|
||||
args: ["{{workflow.parameters.command}}"]
|
||||
workingDir: /mnt/src
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /mnt/data
|
||||
- name: output
|
||||
mountPath: /mnt/output
|
||||
- name: slack-notify-success
|
||||
container:
|
||||
image: technosophos/slack-notify
|
||||
command: [sh,-c]
|
||||
args: ['SLACK_USERNAME=Worker SLACK_TITLE="{{workflow.name}} {{inputs.parameters.status}}" SLACK_ICON=https://www.gravatar.com/avatar/5c4478592fe00878f62f0027be59c1bd SLACK_MESSAGE=$(cat /tmp/metrics.json)} ./slack-notify']
|
||||
inputs:
|
||||
parameters:
|
||||
- name: status
|
||||
artifacts:
|
||||
- name: metrics
|
||||
path: /tmp/metrics.json
|
||||
optional: true
|
||||
metadata:
|
||||
name: "PyTorch Training"
|
||||
kind: Workflow
|
||||
version: 20200605090509
|
||||
action: create
|
||||
labels:
|
||||
"created-by": "system"
|
||||
framework: pytorch
|
||||
spec:
|
||||
entrypoint: main
|
||||
arguments:
|
||||
parameters:
|
||||
- name: source
|
||||
value: https://github.com/onepanelio/pytorch-examples.git
|
||||
- name: command
|
||||
value: "python mnist/main.py --epochs=1"
|
||||
volumeClaimTemplates:
|
||||
- metadata:
|
||||
name: data
|
||||
spec:
|
||||
accessModes: [ "ReadWriteOnce" ]
|
||||
resources:
|
||||
requests:
|
||||
storage: 2Gi
|
||||
- metadata:
|
||||
name: output
|
||||
spec:
|
||||
accessModes: [ "ReadWriteOnce" ]
|
||||
resources:
|
||||
requests:
|
||||
storage: 2Gi
|
||||
templates:
|
||||
- name: main
|
||||
dag:
|
||||
tasks:
|
||||
- name: train-model
|
||||
template: pytorch
|
||||
# Uncomment section below to send metrics to Slack
|
||||
# - name: notify-in-slack
|
||||
# dependencies: [train-model]
|
||||
# template: slack-notify-success
|
||||
# arguments:
|
||||
# parameters:
|
||||
# - name: status
|
||||
# value: "{{tasks.train-model.status}}"
|
||||
# artifacts:
|
||||
# - name: metrics
|
||||
# from: "{{tasks.train-model.outputs.artifacts.sys-metrics}}"
|
||||
- name: pytorch
|
||||
inputs:
|
||||
artifacts:
|
||||
- name: src
|
||||
path: /mnt/src
|
||||
git:
|
||||
repo: "{{workflow.parameters.source}}"
|
||||
outputs:
|
||||
artifacts:
|
||||
- name: model
|
||||
path: /mnt/output
|
||||
optional: true
|
||||
archive:
|
||||
none: {}
|
||||
container:
|
||||
image: pytorch/pytorch:latest
|
||||
command: [sh,-c]
|
||||
args: ["{{workflow.parameters.command}}"]
|
||||
workingDir: /mnt/src
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /mnt/data
|
||||
- name: output
|
||||
mountPath: /mnt/output
|
||||
- name: slack-notify-success
|
||||
container:
|
||||
image: technosophos/slack-notify
|
||||
command: [sh,-c]
|
||||
args: ['SLACK_USERNAME=Worker SLACK_TITLE="{{workflow.name}} {{inputs.parameters.status}}" SLACK_ICON=https://www.gravatar.com/avatar/5c4478592fe00878f62f0027be59c1bd SLACK_MESSAGE=$(cat /tmp/metrics.json)} ./slack-notify']
|
||||
inputs:
|
||||
parameters:
|
||||
- name: status
|
||||
artifacts:
|
||||
- name: metrics
|
||||
path: /tmp/metrics.json
|
||||
optional: true
|
||||
@@ -1,207 +1,216 @@
|
||||
# source: https://github.com/onepanelio/templates/blob/master/workflows/pytorch-mnist-training/
|
||||
arguments:
|
||||
parameters:
|
||||
- name: epochs
|
||||
value: '10'
|
||||
- displayName: Node pool
|
||||
hint: Name of node pool or group to run this workflow task
|
||||
type: select.nodepool
|
||||
name: sys-node-pool
|
||||
value: {{.DefaultNodePoolOption}}
|
||||
visibility: public
|
||||
required: true
|
||||
entrypoint: main
|
||||
templates:
|
||||
- name: main
|
||||
dag:
|
||||
tasks:
|
||||
- name: train-model
|
||||
template: train-model
|
||||
- name: train-model
|
||||
# Indicates that we want to push files in /mnt/output to object storage
|
||||
outputs:
|
||||
artifacts:
|
||||
- name: output
|
||||
path: /mnt/output
|
||||
optional: true
|
||||
script:
|
||||
image: onepanel/dl:0.17.0
|
||||
command:
|
||||
- python
|
||||
- '-u'
|
||||
source: |
|
||||
import json
|
||||
import torch
|
||||
import torch.nn as nn
|
||||
import torch.nn.functional as F
|
||||
import torch.optim as optim
|
||||
from torchvision import datasets, transforms
|
||||
from torch.optim.lr_scheduler import StepLR
|
||||
from torch.utils.tensorboard import SummaryWriter
|
||||
|
||||
|
||||
class Net(nn.Module):
|
||||
def __init__(self):
|
||||
super(Net, self).__init__()
|
||||
self.conv1 = nn.Conv2d(1, 32, 3, 1)
|
||||
self.conv2 = nn.Conv2d(32, 64, 3, 1)
|
||||
self.dropout1 = nn.Dropout(0.25)
|
||||
self.dropout2 = nn.Dropout(0.5)
|
||||
self.fc1 = nn.Linear(9216, 128)
|
||||
self.fc2 = nn.Linear(128, 10)
|
||||
|
||||
def forward(self, x):
|
||||
x = self.conv1(x)
|
||||
x = F.relu(x)
|
||||
x = self.conv2(x)
|
||||
x = F.relu(x)
|
||||
x = F.max_pool2d(x, 2)
|
||||
x = self.dropout1(x)
|
||||
x = torch.flatten(x, 1)
|
||||
x = self.fc1(x)
|
||||
x = F.relu(x)
|
||||
x = self.dropout2(x)
|
||||
x = self.fc2(x)
|
||||
output = F.log_softmax(x, dim=1)
|
||||
return output
|
||||
|
||||
|
||||
def train(model, device, train_loader, optimizer, epoch, batch_size, writer):
|
||||
model.train()
|
||||
for batch_idx, (data, target) in enumerate(train_loader):
|
||||
data, target = data.to(device), target.to(device)
|
||||
optimizer.zero_grad()
|
||||
output = model(data)
|
||||
loss = F.nll_loss(output, target)
|
||||
loss.backward()
|
||||
optimizer.step()
|
||||
if batch_idx % 10 == 0:
|
||||
print('Train Epoch: {} [{}/{} ({:.0f}%)]\tLoss: {:.6f}'.format(
|
||||
epoch, batch_idx * len(data), len(train_loader.dataset),
|
||||
100. * batch_idx / len(train_loader), loss.item()))
|
||||
|
||||
writer.add_scalar('training loss', loss.item(), epoch)
|
||||
|
||||
|
||||
def test(model, device, test_loader, epoch, writer):
|
||||
model.eval()
|
||||
test_loss = 0
|
||||
correct = 0
|
||||
with torch.no_grad():
|
||||
for data, target in test_loader:
|
||||
data, target = data.to(device), target.to(device)
|
||||
output = model(data)
|
||||
test_loss += F.nll_loss(output, target, reduction='sum').item() # sum up batch loss
|
||||
pred = output.argmax(dim=1, keepdim=True) # get the index of the max log-probability
|
||||
correct += pred.eq(target.view_as(pred)).sum().item()
|
||||
|
||||
loss = test_loss / len(test_loader.dataset)
|
||||
accuracy = correct / len(test_loader.dataset)
|
||||
|
||||
print('\nTest set: Average loss: {}, Accuracy: {}\n'.format(
|
||||
loss, accuracy))
|
||||
|
||||
# Store metrics for this task
|
||||
metrics = [
|
||||
{'name': 'accuracy', 'value': accuracy},
|
||||
{'name': 'loss', 'value': loss}
|
||||
]
|
||||
with open('/tmp/sys-metrics.json', 'w') as f:
|
||||
json.dump(metrics, f)
|
||||
|
||||
|
||||
def main(params):
|
||||
writer = SummaryWriter(log_dir='/mnt/output/tensorboard')
|
||||
|
||||
use_cuda = torch.cuda.is_available()
|
||||
|
||||
torch.manual_seed(params['seed'])
|
||||
|
||||
device = torch.device('cuda' if use_cuda else 'cpu')
|
||||
|
||||
train_kwargs = {'batch_size': params['batch_size']}
|
||||
test_kwargs = {'batch_size': params['test_batch_size']}
|
||||
if use_cuda:
|
||||
cuda_kwargs = {'num_workers': 1,
|
||||
'pin_memory': True,
|
||||
'shuffle': True}
|
||||
train_kwargs.update(cuda_kwargs)
|
||||
test_kwargs.update(cuda_kwargs)
|
||||
|
||||
transform=transforms.Compose([
|
||||
transforms.ToTensor(),
|
||||
transforms.Normalize((0.1307,), (0.3081,))
|
||||
])
|
||||
dataset1 = datasets.MNIST('/mnt/data', train=True, download=True,
|
||||
transform=transform)
|
||||
dataset2 = datasets.MNIST('/mnt/data', train=False,
|
||||
transform=transform)
|
||||
train_loader = torch.utils.data.DataLoader(dataset1, **train_kwargs)
|
||||
test_loader = torch.utils.data.DataLoader(dataset2, **test_kwargs)
|
||||
|
||||
model = Net().to(device)
|
||||
optimizer = optim.Adadelta(model.parameters(), lr=params['lr'])
|
||||
|
||||
scheduler = StepLR(optimizer, step_size=1, gamma=params['gamma'])
|
||||
for epoch in range(1, params['epochs'] + 1):
|
||||
train(model, device, train_loader, optimizer, epoch, params['batch_size'], writer)
|
||||
test(model, device, test_loader, epoch, writer)
|
||||
scheduler.step()
|
||||
|
||||
# Save model
|
||||
torch.save(model.state_dict(), '/mnt/output/model.pt')
|
||||
|
||||
writer.close()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
params = {
|
||||
'seed': 1,
|
||||
'batch_size': 64,
|
||||
'test_batch_size': 1000,
|
||||
'epochs': {{workflow.parameters.epochs}},
|
||||
'lr': 0.001,
|
||||
'gamma': 0.7,
|
||||
}
|
||||
main(params)
|
||||
volumeMounts:
|
||||
# TensorBoard sidecar will automatically mount these volumes
|
||||
# The `data` volume is mounted for saving datasets
|
||||
# The `output` volume is mounted to save model output and share TensorBoard logs
|
||||
- name: data
|
||||
mountPath: /mnt/data
|
||||
- name: output
|
||||
mountPath: /mnt/output
|
||||
nodeSelector:
|
||||
beta.kubernetes.io/instance-type: '{{workflow.parameters.sys-node-pool}}'
|
||||
sidecars:
|
||||
- name: tensorboard
|
||||
image: tensorflow/tensorflow:2.3.0
|
||||
metadata:
|
||||
name: "PyTorch Training"
|
||||
kind: Workflow
|
||||
version: 20201221194344
|
||||
action: update
|
||||
source: "https://github.com/onepanelio/templates/blob/master/workflows/pytorch-mnist-training/"
|
||||
labels:
|
||||
"created-by": "system"
|
||||
framework: pytorch
|
||||
spec:
|
||||
arguments:
|
||||
parameters:
|
||||
- name: epochs
|
||||
value: '10'
|
||||
- displayName: Node pool
|
||||
hint: Name of node pool or group to run this workflow task
|
||||
type: select.nodepool
|
||||
name: sys-node-pool
|
||||
value: "{{.DefaultNodePoolOption}}"
|
||||
visibility: public
|
||||
required: true
|
||||
entrypoint: main
|
||||
templates:
|
||||
- name: main
|
||||
dag:
|
||||
tasks:
|
||||
- name: train-model
|
||||
template: train-model
|
||||
- name: train-model
|
||||
# Indicates that we want to push files in /mnt/output to object storage
|
||||
outputs:
|
||||
artifacts:
|
||||
- name: output
|
||||
path: /mnt/output
|
||||
optional: true
|
||||
script:
|
||||
image: onepanel/dl:0.17.0
|
||||
command:
|
||||
- sh
|
||||
- '-c'
|
||||
env:
|
||||
- name: ONEPANEL_INTERACTIVE_SIDECAR
|
||||
value: 'true'
|
||||
args:
|
||||
# Read logs from /mnt/output - this directory is auto-mounted from volumeMounts
|
||||
- tensorboard --logdir /mnt/output/tensorboard
|
||||
ports:
|
||||
- containerPort: 6006
|
||||
name: tensorboard
|
||||
volumeClaimTemplates:
|
||||
# Provision volumes for storing data and output
|
||||
- metadata:
|
||||
name: data
|
||||
spec:
|
||||
accessModes: [ "ReadWriteOnce" ]
|
||||
resources:
|
||||
requests:
|
||||
storage: 2Gi
|
||||
- metadata:
|
||||
name: output
|
||||
spec:
|
||||
accessModes: [ "ReadWriteOnce" ]
|
||||
resources:
|
||||
requests:
|
||||
storage: 2Gi
|
||||
- python
|
||||
- '-u'
|
||||
source: |
|
||||
import json
|
||||
import torch
|
||||
import torch.nn as nn
|
||||
import torch.nn.functional as F
|
||||
import torch.optim as optim
|
||||
from torchvision import datasets, transforms
|
||||
from torch.optim.lr_scheduler import StepLR
|
||||
from torch.utils.tensorboard import SummaryWriter
|
||||
|
||||
|
||||
class Net(nn.Module):
|
||||
def __init__(self):
|
||||
super(Net, self).__init__()
|
||||
self.conv1 = nn.Conv2d(1, 32, 3, 1)
|
||||
self.conv2 = nn.Conv2d(32, 64, 3, 1)
|
||||
self.dropout1 = nn.Dropout(0.25)
|
||||
self.dropout2 = nn.Dropout(0.5)
|
||||
self.fc1 = nn.Linear(9216, 128)
|
||||
self.fc2 = nn.Linear(128, 10)
|
||||
|
||||
def forward(self, x):
|
||||
x = self.conv1(x)
|
||||
x = F.relu(x)
|
||||
x = self.conv2(x)
|
||||
x = F.relu(x)
|
||||
x = F.max_pool2d(x, 2)
|
||||
x = self.dropout1(x)
|
||||
x = torch.flatten(x, 1)
|
||||
x = self.fc1(x)
|
||||
x = F.relu(x)
|
||||
x = self.dropout2(x)
|
||||
x = self.fc2(x)
|
||||
output = F.log_softmax(x, dim=1)
|
||||
return output
|
||||
|
||||
|
||||
def train(model, device, train_loader, optimizer, epoch, batch_size, writer):
|
||||
model.train()
|
||||
for batch_idx, (data, target) in enumerate(train_loader):
|
||||
data, target = data.to(device), target.to(device)
|
||||
optimizer.zero_grad()
|
||||
output = model(data)
|
||||
loss = F.nll_loss(output, target)
|
||||
loss.backward()
|
||||
optimizer.step()
|
||||
if batch_idx % 10 == 0:
|
||||
print('Train Epoch: {} [{}/{} ({:.0f}%)]\tLoss: {:.6f}'.format(
|
||||
epoch, batch_idx * len(data), len(train_loader.dataset),
|
||||
100. * batch_idx / len(train_loader), loss.item()))
|
||||
|
||||
writer.add_scalar('training loss', loss.item(), epoch)
|
||||
|
||||
|
||||
def test(model, device, test_loader, epoch, writer):
|
||||
model.eval()
|
||||
test_loss = 0
|
||||
correct = 0
|
||||
with torch.no_grad():
|
||||
for data, target in test_loader:
|
||||
data, target = data.to(device), target.to(device)
|
||||
output = model(data)
|
||||
test_loss += F.nll_loss(output, target, reduction='sum').item() # sum up batch loss
|
||||
pred = output.argmax(dim=1, keepdim=True) # get the index of the max log-probability
|
||||
correct += pred.eq(target.view_as(pred)).sum().item()
|
||||
|
||||
loss = test_loss / len(test_loader.dataset)
|
||||
accuracy = correct / len(test_loader.dataset)
|
||||
|
||||
print('\nTest set: Average loss: {}, Accuracy: {}\n'.format(
|
||||
loss, accuracy))
|
||||
|
||||
# Store metrics for this task
|
||||
metrics = [
|
||||
{'name': 'accuracy', 'value': accuracy},
|
||||
{'name': 'loss', 'value': loss}
|
||||
]
|
||||
with open('/tmp/sys-metrics.json', 'w') as f:
|
||||
json.dump(metrics, f)
|
||||
|
||||
|
||||
def main(params):
|
||||
writer = SummaryWriter(log_dir='/mnt/output/tensorboard')
|
||||
|
||||
use_cuda = torch.cuda.is_available()
|
||||
|
||||
torch.manual_seed(params['seed'])
|
||||
|
||||
device = torch.device('cuda' if use_cuda else 'cpu')
|
||||
|
||||
train_kwargs = {'batch_size': params['batch_size']}
|
||||
test_kwargs = {'batch_size': params['test_batch_size']}
|
||||
if use_cuda:
|
||||
cuda_kwargs = {'num_workers': 1,
|
||||
'pin_memory': True,
|
||||
'shuffle': True}
|
||||
train_kwargs.update(cuda_kwargs)
|
||||
test_kwargs.update(cuda_kwargs)
|
||||
|
||||
transform=transforms.Compose([
|
||||
transforms.ToTensor(),
|
||||
transforms.Normalize((0.1307,), (0.3081,))
|
||||
])
|
||||
dataset1 = datasets.MNIST('/mnt/data', train=True, download=True,
|
||||
transform=transform)
|
||||
dataset2 = datasets.MNIST('/mnt/data', train=False,
|
||||
transform=transform)
|
||||
train_loader = torch.utils.data.DataLoader(dataset1, **train_kwargs)
|
||||
test_loader = torch.utils.data.DataLoader(dataset2, **test_kwargs)
|
||||
|
||||
model = Net().to(device)
|
||||
optimizer = optim.Adadelta(model.parameters(), lr=params['lr'])
|
||||
|
||||
scheduler = StepLR(optimizer, step_size=1, gamma=params['gamma'])
|
||||
for epoch in range(1, params['epochs'] + 1):
|
||||
train(model, device, train_loader, optimizer, epoch, params['batch_size'], writer)
|
||||
test(model, device, test_loader, epoch, writer)
|
||||
scheduler.step()
|
||||
|
||||
# Save model
|
||||
torch.save(model.state_dict(), '/mnt/output/model.pt')
|
||||
|
||||
writer.close()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
params = {
|
||||
'seed': 1,
|
||||
'batch_size': 64,
|
||||
'test_batch_size': 1000,
|
||||
'epochs': {{workflow.parameters.epochs}},
|
||||
'lr': 0.001,
|
||||
'gamma': 0.7,
|
||||
}
|
||||
main(params)
|
||||
volumeMounts:
|
||||
# TensorBoard sidecar will automatically mount these volumes
|
||||
# The `data` volume is mounted for saving datasets
|
||||
# The `output` volume is mounted to save model output and share TensorBoard logs
|
||||
- name: data
|
||||
mountPath: /mnt/data
|
||||
- name: output
|
||||
mountPath: /mnt/output
|
||||
nodeSelector:
|
||||
beta.kubernetes.io/instance-type: '{{workflow.parameters.sys-node-pool}}'
|
||||
sidecars:
|
||||
- name: tensorboard
|
||||
image: tensorflow/tensorflow:2.3.0
|
||||
command:
|
||||
- sh
|
||||
- '-c'
|
||||
env:
|
||||
- name: ONEPANEL_INTERACTIVE_SIDECAR
|
||||
value: 'true'
|
||||
args:
|
||||
# Read logs from /mnt/output - this directory is auto-mounted from volumeMounts
|
||||
- tensorboard --logdir /mnt/output/tensorboard
|
||||
ports:
|
||||
- containerPort: 6006
|
||||
name: tensorboard
|
||||
volumeClaimTemplates:
|
||||
# Provision volumes for storing data and output
|
||||
- metadata:
|
||||
name: data
|
||||
spec:
|
||||
accessModes: [ "ReadWriteOnce" ]
|
||||
resources:
|
||||
requests:
|
||||
storage: 2Gi
|
||||
- metadata:
|
||||
name: output
|
||||
spec:
|
||||
accessModes: [ "ReadWriteOnce" ]
|
||||
resources:
|
||||
requests:
|
||||
storage: 2Gi
|
||||
|
||||
@@ -1,207 +1,216 @@
|
||||
# source: https://github.com/onepanelio/templates/blob/master/workflows/pytorch-mnist-training/
|
||||
arguments:
|
||||
parameters:
|
||||
- name: epochs
|
||||
value: '10'
|
||||
- displayName: Node pool
|
||||
hint: Name of node pool or group to run this workflow task
|
||||
type: select.nodepool
|
||||
name: sys-node-pool
|
||||
value: {{.DefaultNodePoolOption}}
|
||||
visibility: public
|
||||
required: true
|
||||
entrypoint: main
|
||||
templates:
|
||||
- name: main
|
||||
dag:
|
||||
tasks:
|
||||
- name: train-model
|
||||
template: train-model
|
||||
- name: train-model
|
||||
# Indicates that we want to push files in /mnt/output to object storage
|
||||
outputs:
|
||||
artifacts:
|
||||
- name: output
|
||||
path: /mnt/output
|
||||
optional: true
|
||||
script:
|
||||
image: onepanel/dl:0.17.0
|
||||
command:
|
||||
- python
|
||||
- '-u'
|
||||
source: |
|
||||
import json
|
||||
import torch
|
||||
import torch.nn as nn
|
||||
import torch.nn.functional as F
|
||||
import torch.optim as optim
|
||||
from torchvision import datasets, transforms
|
||||
from torch.optim.lr_scheduler import StepLR
|
||||
from torch.utils.tensorboard import SummaryWriter
|
||||
|
||||
|
||||
class Net(nn.Module):
|
||||
def __init__(self):
|
||||
super(Net, self).__init__()
|
||||
self.conv1 = nn.Conv2d(1, 32, 3, 1)
|
||||
self.conv2 = nn.Conv2d(32, 64, 3, 1)
|
||||
self.dropout1 = nn.Dropout(0.25)
|
||||
self.dropout2 = nn.Dropout(0.5)
|
||||
self.fc1 = nn.Linear(9216, 128)
|
||||
self.fc2 = nn.Linear(128, 10)
|
||||
|
||||
def forward(self, x):
|
||||
x = self.conv1(x)
|
||||
x = F.relu(x)
|
||||
x = self.conv2(x)
|
||||
x = F.relu(x)
|
||||
x = F.max_pool2d(x, 2)
|
||||
x = self.dropout1(x)
|
||||
x = torch.flatten(x, 1)
|
||||
x = self.fc1(x)
|
||||
x = F.relu(x)
|
||||
x = self.dropout2(x)
|
||||
x = self.fc2(x)
|
||||
output = F.log_softmax(x, dim=1)
|
||||
return output
|
||||
|
||||
|
||||
def train(model, device, train_loader, optimizer, epoch, batch_size, writer):
|
||||
model.train()
|
||||
for batch_idx, (data, target) in enumerate(train_loader):
|
||||
data, target = data.to(device), target.to(device)
|
||||
optimizer.zero_grad()
|
||||
output = model(data)
|
||||
loss = F.nll_loss(output, target)
|
||||
loss.backward()
|
||||
optimizer.step()
|
||||
if batch_idx % 10 == 0:
|
||||
print('Train Epoch: {} [{}/{} ({:.0f}%)]\tLoss: {:.6f}'.format(
|
||||
epoch, batch_idx * len(data), len(train_loader.dataset),
|
||||
100. * batch_idx / len(train_loader), loss.item()))
|
||||
|
||||
writer.add_scalar('training loss', loss.item(), epoch)
|
||||
|
||||
|
||||
def test(model, device, test_loader, epoch, writer):
|
||||
model.eval()
|
||||
test_loss = 0
|
||||
correct = 0
|
||||
with torch.no_grad():
|
||||
for data, target in test_loader:
|
||||
data, target = data.to(device), target.to(device)
|
||||
output = model(data)
|
||||
test_loss += F.nll_loss(output, target, reduction='sum').item() # sum up batch loss
|
||||
pred = output.argmax(dim=1, keepdim=True) # get the index of the max log-probability
|
||||
correct += pred.eq(target.view_as(pred)).sum().item()
|
||||
|
||||
loss = test_loss / len(test_loader.dataset)
|
||||
accuracy = correct / len(test_loader.dataset)
|
||||
|
||||
print('\nTest set: Average loss: {}, Accuracy: {}\n'.format(
|
||||
loss, accuracy))
|
||||
|
||||
# Store metrics for this task
|
||||
metrics = [
|
||||
{'name': 'accuracy', 'value': accuracy},
|
||||
{'name': 'loss', 'value': loss}
|
||||
]
|
||||
with open('/tmp/sys-metrics.json', 'w') as f:
|
||||
json.dump(metrics, f)
|
||||
|
||||
|
||||
def main(params):
|
||||
writer = SummaryWriter(log_dir='/mnt/output/tensorboard')
|
||||
|
||||
use_cuda = torch.cuda.is_available()
|
||||
|
||||
torch.manual_seed(params['seed'])
|
||||
|
||||
device = torch.device('cuda' if use_cuda else 'cpu')
|
||||
|
||||
train_kwargs = {'batch_size': params['batch_size']}
|
||||
test_kwargs = {'batch_size': params['test_batch_size']}
|
||||
if use_cuda:
|
||||
cuda_kwargs = {'num_workers': 1,
|
||||
'pin_memory': True,
|
||||
'shuffle': True}
|
||||
train_kwargs.update(cuda_kwargs)
|
||||
test_kwargs.update(cuda_kwargs)
|
||||
|
||||
transform=transforms.Compose([
|
||||
transforms.ToTensor(),
|
||||
transforms.Normalize((0.1307,), (0.3081,))
|
||||
])
|
||||
dataset1 = datasets.MNIST('/mnt/data', train=True, download=True,
|
||||
transform=transform)
|
||||
dataset2 = datasets.MNIST('/mnt/data', train=False,
|
||||
transform=transform)
|
||||
train_loader = torch.utils.data.DataLoader(dataset1, **train_kwargs)
|
||||
test_loader = torch.utils.data.DataLoader(dataset2, **test_kwargs)
|
||||
|
||||
model = Net().to(device)
|
||||
optimizer = optim.Adadelta(model.parameters(), lr=params['lr'])
|
||||
|
||||
scheduler = StepLR(optimizer, step_size=1, gamma=params['gamma'])
|
||||
for epoch in range(1, params['epochs'] + 1):
|
||||
train(model, device, train_loader, optimizer, epoch, params['batch_size'], writer)
|
||||
test(model, device, test_loader, epoch, writer)
|
||||
scheduler.step()
|
||||
|
||||
# Save model
|
||||
torch.save(model.state_dict(), '/mnt/output/model.pt')
|
||||
|
||||
writer.close()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
params = {
|
||||
'seed': 1,
|
||||
'batch_size': 64,
|
||||
'test_batch_size': 1000,
|
||||
'epochs': {{workflow.parameters.epochs}},
|
||||
'lr': 0.001,
|
||||
'gamma': 0.7,
|
||||
}
|
||||
main(params)
|
||||
volumeMounts:
|
||||
# TensorBoard sidecar will automatically mount these volumes
|
||||
# The `data` volume is mounted for saving datasets
|
||||
# The `output` volume is mounted to save model output and share TensorBoard logs
|
||||
- name: data
|
||||
mountPath: /mnt/data
|
||||
- name: output
|
||||
mountPath: /mnt/output
|
||||
nodeSelector:
|
||||
{{.NodePoolLabel}}: '{{workflow.parameters.sys-node-pool}}'
|
||||
sidecars:
|
||||
- name: tensorboard
|
||||
metadata:
|
||||
name: "PyTorch Training"
|
||||
kind: Workflow
|
||||
version: 20210118175809
|
||||
action: update
|
||||
source: "https://github.com/onepanelio/templates/blob/master/workflows/pytorch-mnist-training/"
|
||||
labels:
|
||||
"created-by": "system"
|
||||
framework: pytorch
|
||||
spec:
|
||||
arguments:
|
||||
parameters:
|
||||
- name: epochs
|
||||
value: '10'
|
||||
- displayName: Node pool
|
||||
hint: Name of node pool or group to run this workflow task
|
||||
type: select.nodepool
|
||||
name: sys-node-pool
|
||||
value: "{{.DefaultNodePoolOption}}"
|
||||
visibility: public
|
||||
required: true
|
||||
entrypoint: main
|
||||
templates:
|
||||
- name: main
|
||||
dag:
|
||||
tasks:
|
||||
- name: train-model
|
||||
template: train-model
|
||||
- name: train-model
|
||||
# Indicates that we want to push files in /mnt/output to object storage
|
||||
outputs:
|
||||
artifacts:
|
||||
- name: output
|
||||
path: /mnt/output
|
||||
optional: true
|
||||
script:
|
||||
image: onepanel/dl:0.17.0
|
||||
command:
|
||||
- sh
|
||||
- '-c'
|
||||
env:
|
||||
- name: ONEPANEL_INTERACTIVE_SIDECAR
|
||||
value: 'true'
|
||||
args:
|
||||
# Read logs from /mnt/output - this directory is auto-mounted from volumeMounts
|
||||
- tensorboard --logdir /mnt/output/tensorboard
|
||||
ports:
|
||||
- containerPort: 6006
|
||||
name: tensorboard
|
||||
volumeClaimTemplates:
|
||||
# Provision volumes for storing data and output
|
||||
- metadata:
|
||||
name: data
|
||||
spec:
|
||||
accessModes: [ "ReadWriteOnce" ]
|
||||
resources:
|
||||
requests:
|
||||
storage: 2Gi
|
||||
- metadata:
|
||||
name: output
|
||||
spec:
|
||||
accessModes: [ "ReadWriteOnce" ]
|
||||
resources:
|
||||
requests:
|
||||
storage: 2Gi
|
||||
- python
|
||||
- '-u'
|
||||
source: |
|
||||
import json
|
||||
import torch
|
||||
import torch.nn as nn
|
||||
import torch.nn.functional as F
|
||||
import torch.optim as optim
|
||||
from torchvision import datasets, transforms
|
||||
from torch.optim.lr_scheduler import StepLR
|
||||
from torch.utils.tensorboard import SummaryWriter
|
||||
|
||||
|
||||
class Net(nn.Module):
|
||||
def __init__(self):
|
||||
super(Net, self).__init__()
|
||||
self.conv1 = nn.Conv2d(1, 32, 3, 1)
|
||||
self.conv2 = nn.Conv2d(32, 64, 3, 1)
|
||||
self.dropout1 = nn.Dropout(0.25)
|
||||
self.dropout2 = nn.Dropout(0.5)
|
||||
self.fc1 = nn.Linear(9216, 128)
|
||||
self.fc2 = nn.Linear(128, 10)
|
||||
|
||||
def forward(self, x):
|
||||
x = self.conv1(x)
|
||||
x = F.relu(x)
|
||||
x = self.conv2(x)
|
||||
x = F.relu(x)
|
||||
x = F.max_pool2d(x, 2)
|
||||
x = self.dropout1(x)
|
||||
x = torch.flatten(x, 1)
|
||||
x = self.fc1(x)
|
||||
x = F.relu(x)
|
||||
x = self.dropout2(x)
|
||||
x = self.fc2(x)
|
||||
output = F.log_softmax(x, dim=1)
|
||||
return output
|
||||
|
||||
|
||||
def train(model, device, train_loader, optimizer, epoch, batch_size, writer):
|
||||
model.train()
|
||||
for batch_idx, (data, target) in enumerate(train_loader):
|
||||
data, target = data.to(device), target.to(device)
|
||||
optimizer.zero_grad()
|
||||
output = model(data)
|
||||
loss = F.nll_loss(output, target)
|
||||
loss.backward()
|
||||
optimizer.step()
|
||||
if batch_idx % 10 == 0:
|
||||
print('Train Epoch: {} [{}/{} ({:.0f}%)]\tLoss: {:.6f}'.format(
|
||||
epoch, batch_idx * len(data), len(train_loader.dataset),
|
||||
100. * batch_idx / len(train_loader), loss.item()))
|
||||
|
||||
writer.add_scalar('training loss', loss.item(), epoch)
|
||||
|
||||
|
||||
def test(model, device, test_loader, epoch, writer):
|
||||
model.eval()
|
||||
test_loss = 0
|
||||
correct = 0
|
||||
with torch.no_grad():
|
||||
for data, target in test_loader:
|
||||
data, target = data.to(device), target.to(device)
|
||||
output = model(data)
|
||||
test_loss += F.nll_loss(output, target, reduction='sum').item() # sum up batch loss
|
||||
pred = output.argmax(dim=1, keepdim=True) # get the index of the max log-probability
|
||||
correct += pred.eq(target.view_as(pred)).sum().item()
|
||||
|
||||
loss = test_loss / len(test_loader.dataset)
|
||||
accuracy = correct / len(test_loader.dataset)
|
||||
|
||||
print('\nTest set: Average loss: {}, Accuracy: {}\n'.format(
|
||||
loss, accuracy))
|
||||
|
||||
# Store metrics for this task
|
||||
metrics = [
|
||||
{'name': 'accuracy', 'value': accuracy},
|
||||
{'name': 'loss', 'value': loss}
|
||||
]
|
||||
with open('/tmp/sys-metrics.json', 'w') as f:
|
||||
json.dump(metrics, f)
|
||||
|
||||
|
||||
def main(params):
|
||||
writer = SummaryWriter(log_dir='/mnt/output/tensorboard')
|
||||
|
||||
use_cuda = torch.cuda.is_available()
|
||||
|
||||
torch.manual_seed(params['seed'])
|
||||
|
||||
device = torch.device('cuda' if use_cuda else 'cpu')
|
||||
|
||||
train_kwargs = {'batch_size': params['batch_size']}
|
||||
test_kwargs = {'batch_size': params['test_batch_size']}
|
||||
if use_cuda:
|
||||
cuda_kwargs = {'num_workers': 1,
|
||||
'pin_memory': True,
|
||||
'shuffle': True}
|
||||
train_kwargs.update(cuda_kwargs)
|
||||
test_kwargs.update(cuda_kwargs)
|
||||
|
||||
transform=transforms.Compose([
|
||||
transforms.ToTensor(),
|
||||
transforms.Normalize((0.1307,), (0.3081,))
|
||||
])
|
||||
dataset1 = datasets.MNIST('/mnt/data', train=True, download=True,
|
||||
transform=transform)
|
||||
dataset2 = datasets.MNIST('/mnt/data', train=False,
|
||||
transform=transform)
|
||||
train_loader = torch.utils.data.DataLoader(dataset1, **train_kwargs)
|
||||
test_loader = torch.utils.data.DataLoader(dataset2, **test_kwargs)
|
||||
|
||||
model = Net().to(device)
|
||||
optimizer = optim.Adadelta(model.parameters(), lr=params['lr'])
|
||||
|
||||
scheduler = StepLR(optimizer, step_size=1, gamma=params['gamma'])
|
||||
for epoch in range(1, params['epochs'] + 1):
|
||||
train(model, device, train_loader, optimizer, epoch, params['batch_size'], writer)
|
||||
test(model, device, test_loader, epoch, writer)
|
||||
scheduler.step()
|
||||
|
||||
# Save model
|
||||
torch.save(model.state_dict(), '/mnt/output/model.pt')
|
||||
|
||||
writer.close()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
params = {
|
||||
'seed': 1,
|
||||
'batch_size': 64,
|
||||
'test_batch_size': 1000,
|
||||
'epochs': {{workflow.parameters.epochs}},
|
||||
'lr': 0.001,
|
||||
'gamma': 0.7,
|
||||
}
|
||||
main(params)
|
||||
volumeMounts:
|
||||
# TensorBoard sidecar will automatically mount these volumes
|
||||
# The `data` volume is mounted for saving datasets
|
||||
# The `output` volume is mounted to save model output and share TensorBoard logs
|
||||
- name: data
|
||||
mountPath: /mnt/data
|
||||
- name: output
|
||||
mountPath: /mnt/output
|
||||
nodeSelector:
|
||||
"{{.NodePoolLabel}}": '{{workflow.parameters.sys-node-pool}}'
|
||||
sidecars:
|
||||
- name: tensorboard
|
||||
image: onepanel/dl:0.17.0
|
||||
command:
|
||||
- sh
|
||||
- '-c'
|
||||
env:
|
||||
- name: ONEPANEL_INTERACTIVE_SIDECAR
|
||||
value: 'true'
|
||||
args:
|
||||
# Read logs from /mnt/output - this directory is auto-mounted from volumeMounts
|
||||
- tensorboard --logdir /mnt/output/tensorboard
|
||||
ports:
|
||||
- containerPort: 6006
|
||||
name: tensorboard
|
||||
volumeClaimTemplates:
|
||||
# Provision volumes for storing data and output
|
||||
- metadata:
|
||||
name: data
|
||||
spec:
|
||||
accessModes: [ "ReadWriteOnce" ]
|
||||
resources:
|
||||
requests:
|
||||
storage: 2Gi
|
||||
- metadata:
|
||||
name: output
|
||||
spec:
|
||||
accessModes: [ "ReadWriteOnce" ]
|
||||
resources:
|
||||
requests:
|
||||
storage: 2Gi
|
||||
|
||||
@@ -1,207 +1,216 @@
|
||||
# source: https://github.com/onepanelio/templates/blob/master/workflows/pytorch-mnist-training/
|
||||
arguments:
|
||||
parameters:
|
||||
- name: epochs
|
||||
value: '10'
|
||||
- displayName: Node pool
|
||||
hint: Name of node pool or group to run this workflow task
|
||||
type: select.nodepool
|
||||
name: sys-node-pool
|
||||
value: {{.DefaultNodePoolOption}}
|
||||
visibility: public
|
||||
required: true
|
||||
entrypoint: main
|
||||
templates:
|
||||
- name: main
|
||||
dag:
|
||||
tasks:
|
||||
- name: train-model
|
||||
template: train-model
|
||||
- name: train-model
|
||||
# Indicates that we want to push files in /mnt/output to object storage
|
||||
outputs:
|
||||
artifacts:
|
||||
- name: output
|
||||
path: /mnt/output
|
||||
optional: true
|
||||
script:
|
||||
image: onepanel/dl:v0.20.0
|
||||
command:
|
||||
- python
|
||||
- '-u'
|
||||
source: |
|
||||
import json
|
||||
import torch
|
||||
import torch.nn as nn
|
||||
import torch.nn.functional as F
|
||||
import torch.optim as optim
|
||||
from torchvision import datasets, transforms
|
||||
from torch.optim.lr_scheduler import StepLR
|
||||
from torch.utils.tensorboard import SummaryWriter
|
||||
|
||||
|
||||
class Net(nn.Module):
|
||||
def __init__(self):
|
||||
super(Net, self).__init__()
|
||||
self.conv1 = nn.Conv2d(1, 32, 3, 1)
|
||||
self.conv2 = nn.Conv2d(32, 64, 3, 1)
|
||||
self.dropout1 = nn.Dropout(0.25)
|
||||
self.dropout2 = nn.Dropout(0.5)
|
||||
self.fc1 = nn.Linear(9216, 128)
|
||||
self.fc2 = nn.Linear(128, 10)
|
||||
|
||||
def forward(self, x):
|
||||
x = self.conv1(x)
|
||||
x = F.relu(x)
|
||||
x = self.conv2(x)
|
||||
x = F.relu(x)
|
||||
x = F.max_pool2d(x, 2)
|
||||
x = self.dropout1(x)
|
||||
x = torch.flatten(x, 1)
|
||||
x = self.fc1(x)
|
||||
x = F.relu(x)
|
||||
x = self.dropout2(x)
|
||||
x = self.fc2(x)
|
||||
output = F.log_softmax(x, dim=1)
|
||||
return output
|
||||
|
||||
|
||||
def train(model, device, train_loader, optimizer, epoch, batch_size, writer):
|
||||
model.train()
|
||||
for batch_idx, (data, target) in enumerate(train_loader):
|
||||
data, target = data.to(device), target.to(device)
|
||||
optimizer.zero_grad()
|
||||
output = model(data)
|
||||
loss = F.nll_loss(output, target)
|
||||
loss.backward()
|
||||
optimizer.step()
|
||||
if batch_idx % 10 == 0:
|
||||
print('Train Epoch: {} [{}/{} ({:.0f}%)]\tLoss: {:.6f}'.format(
|
||||
epoch, batch_idx * len(data), len(train_loader.dataset),
|
||||
100. * batch_idx / len(train_loader), loss.item()))
|
||||
|
||||
writer.add_scalar('training loss', loss.item(), epoch)
|
||||
|
||||
|
||||
def test(model, device, test_loader, epoch, writer):
|
||||
model.eval()
|
||||
test_loss = 0
|
||||
correct = 0
|
||||
with torch.no_grad():
|
||||
for data, target in test_loader:
|
||||
data, target = data.to(device), target.to(device)
|
||||
output = model(data)
|
||||
test_loss += F.nll_loss(output, target, reduction='sum').item() # sum up batch loss
|
||||
pred = output.argmax(dim=1, keepdim=True) # get the index of the max log-probability
|
||||
correct += pred.eq(target.view_as(pred)).sum().item()
|
||||
|
||||
loss = test_loss / len(test_loader.dataset)
|
||||
accuracy = correct / len(test_loader.dataset)
|
||||
|
||||
print('\nTest set: Average loss: {}, Accuracy: {}\n'.format(
|
||||
loss, accuracy))
|
||||
|
||||
# Store metrics for this task
|
||||
metrics = [
|
||||
{'name': 'accuracy', 'value': accuracy},
|
||||
{'name': 'loss', 'value': loss}
|
||||
]
|
||||
with open('/mnt/tmp/sys-metrics.json', 'w') as f:
|
||||
json.dump(metrics, f)
|
||||
|
||||
|
||||
def main(params):
|
||||
writer = SummaryWriter(log_dir='/mnt/output/tensorboard')
|
||||
|
||||
use_cuda = torch.cuda.is_available()
|
||||
|
||||
torch.manual_seed(params['seed'])
|
||||
|
||||
device = torch.device('cuda' if use_cuda else 'cpu')
|
||||
|
||||
train_kwargs = {'batch_size': params['batch_size']}
|
||||
test_kwargs = {'batch_size': params['test_batch_size']}
|
||||
if use_cuda:
|
||||
cuda_kwargs = {'num_workers': 1,
|
||||
'pin_memory': True,
|
||||
'shuffle': True}
|
||||
train_kwargs.update(cuda_kwargs)
|
||||
test_kwargs.update(cuda_kwargs)
|
||||
|
||||
transform=transforms.Compose([
|
||||
transforms.ToTensor(),
|
||||
transforms.Normalize((0.1307,), (0.3081,))
|
||||
])
|
||||
dataset1 = datasets.MNIST('/mnt/data', train=True, download=True,
|
||||
transform=transform)
|
||||
dataset2 = datasets.MNIST('/mnt/data', train=False,
|
||||
transform=transform)
|
||||
train_loader = torch.utils.data.DataLoader(dataset1, **train_kwargs)
|
||||
test_loader = torch.utils.data.DataLoader(dataset2, **test_kwargs)
|
||||
|
||||
model = Net().to(device)
|
||||
optimizer = optim.Adadelta(model.parameters(), lr=params['lr'])
|
||||
|
||||
scheduler = StepLR(optimizer, step_size=1, gamma=params['gamma'])
|
||||
for epoch in range(1, params['epochs'] + 1):
|
||||
train(model, device, train_loader, optimizer, epoch, params['batch_size'], writer)
|
||||
test(model, device, test_loader, epoch, writer)
|
||||
scheduler.step()
|
||||
|
||||
# Save model
|
||||
torch.save(model.state_dict(), '/mnt/output/model.pt')
|
||||
|
||||
writer.close()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
params = {
|
||||
'seed': 1,
|
||||
'batch_size': 64,
|
||||
'test_batch_size': 1000,
|
||||
'epochs': {{workflow.parameters.epochs}},
|
||||
'lr': 0.001,
|
||||
'gamma': 0.7,
|
||||
}
|
||||
main(params)
|
||||
volumeMounts:
|
||||
# TensorBoard sidecar will automatically mount these volumes
|
||||
# The `data` volume is mounted for saving datasets
|
||||
# The `output` volume is mounted to save model output and share TensorBoard logs
|
||||
- name: data
|
||||
mountPath: /mnt/data
|
||||
- name: output
|
||||
mountPath: /mnt/output
|
||||
nodeSelector:
|
||||
{{.NodePoolLabel}}: '{{workflow.parameters.sys-node-pool}}'
|
||||
sidecars:
|
||||
- name: tensorboard
|
||||
metadata:
|
||||
name: "PyTorch Training"
|
||||
kind: Workflow
|
||||
version: 20210323175655
|
||||
action: update
|
||||
source: "https://github.com/onepanelio/templates/blob/master/workflows/pytorch-mnist-training/"
|
||||
labels:
|
||||
"created-by": "system"
|
||||
framework: pytorch
|
||||
spec:
|
||||
arguments:
|
||||
parameters:
|
||||
- name: epochs
|
||||
value: '10'
|
||||
- displayName: Node pool
|
||||
hint: Name of node pool or group to run this workflow task
|
||||
type: select.nodepool
|
||||
name: sys-node-pool
|
||||
value: "{{.DefaultNodePoolOption}}"
|
||||
visibility: public
|
||||
required: true
|
||||
entrypoint: main
|
||||
templates:
|
||||
- name: main
|
||||
dag:
|
||||
tasks:
|
||||
- name: train-model
|
||||
template: train-model
|
||||
- name: train-model
|
||||
# Indicates that we want to push files in /mnt/output to object storage
|
||||
outputs:
|
||||
artifacts:
|
||||
- name: output
|
||||
path: /mnt/output
|
||||
optional: true
|
||||
script:
|
||||
image: onepanel/dl:v0.20.0
|
||||
command:
|
||||
- sh
|
||||
- '-c'
|
||||
env:
|
||||
- name: ONEPANEL_INTERACTIVE_SIDECAR
|
||||
value: 'true'
|
||||
args:
|
||||
# Read logs from /mnt/output - this directory is auto-mounted from volumeMounts
|
||||
- tensorboard --logdir /mnt/output/tensorboard
|
||||
ports:
|
||||
- containerPort: 6006
|
||||
name: tensorboard
|
||||
volumeClaimTemplates:
|
||||
# Provision volumes for storing data and output
|
||||
- metadata:
|
||||
name: data
|
||||
spec:
|
||||
accessModes: [ "ReadWriteOnce" ]
|
||||
resources:
|
||||
requests:
|
||||
storage: 2Gi
|
||||
- metadata:
|
||||
name: output
|
||||
spec:
|
||||
accessModes: [ "ReadWriteOnce" ]
|
||||
resources:
|
||||
requests:
|
||||
storage: 2Gi
|
||||
- python
|
||||
- '-u'
|
||||
source: |
|
||||
import json
|
||||
import torch
|
||||
import torch.nn as nn
|
||||
import torch.nn.functional as F
|
||||
import torch.optim as optim
|
||||
from torchvision import datasets, transforms
|
||||
from torch.optim.lr_scheduler import StepLR
|
||||
from torch.utils.tensorboard import SummaryWriter
|
||||
|
||||
|
||||
class Net(nn.Module):
|
||||
def __init__(self):
|
||||
super(Net, self).__init__()
|
||||
self.conv1 = nn.Conv2d(1, 32, 3, 1)
|
||||
self.conv2 = nn.Conv2d(32, 64, 3, 1)
|
||||
self.dropout1 = nn.Dropout(0.25)
|
||||
self.dropout2 = nn.Dropout(0.5)
|
||||
self.fc1 = nn.Linear(9216, 128)
|
||||
self.fc2 = nn.Linear(128, 10)
|
||||
|
||||
def forward(self, x):
|
||||
x = self.conv1(x)
|
||||
x = F.relu(x)
|
||||
x = self.conv2(x)
|
||||
x = F.relu(x)
|
||||
x = F.max_pool2d(x, 2)
|
||||
x = self.dropout1(x)
|
||||
x = torch.flatten(x, 1)
|
||||
x = self.fc1(x)
|
||||
x = F.relu(x)
|
||||
x = self.dropout2(x)
|
||||
x = self.fc2(x)
|
||||
output = F.log_softmax(x, dim=1)
|
||||
return output
|
||||
|
||||
|
||||
def train(model, device, train_loader, optimizer, epoch, batch_size, writer):
|
||||
model.train()
|
||||
for batch_idx, (data, target) in enumerate(train_loader):
|
||||
data, target = data.to(device), target.to(device)
|
||||
optimizer.zero_grad()
|
||||
output = model(data)
|
||||
loss = F.nll_loss(output, target)
|
||||
loss.backward()
|
||||
optimizer.step()
|
||||
if batch_idx % 10 == 0:
|
||||
print('Train Epoch: {} [{}/{} ({:.0f}%)]\tLoss: {:.6f}'.format(
|
||||
epoch, batch_idx * len(data), len(train_loader.dataset),
|
||||
100. * batch_idx / len(train_loader), loss.item()))
|
||||
|
||||
writer.add_scalar('training loss', loss.item(), epoch)
|
||||
|
||||
|
||||
def test(model, device, test_loader, epoch, writer):
|
||||
model.eval()
|
||||
test_loss = 0
|
||||
correct = 0
|
||||
with torch.no_grad():
|
||||
for data, target in test_loader:
|
||||
data, target = data.to(device), target.to(device)
|
||||
output = model(data)
|
||||
test_loss += F.nll_loss(output, target, reduction='sum').item() # sum up batch loss
|
||||
pred = output.argmax(dim=1, keepdim=True) # get the index of the max log-probability
|
||||
correct += pred.eq(target.view_as(pred)).sum().item()
|
||||
|
||||
loss = test_loss / len(test_loader.dataset)
|
||||
accuracy = correct / len(test_loader.dataset)
|
||||
|
||||
print('\nTest set: Average loss: {}, Accuracy: {}\n'.format(
|
||||
loss, accuracy))
|
||||
|
||||
# Store metrics for this task
|
||||
metrics = [
|
||||
{'name': 'accuracy', 'value': accuracy},
|
||||
{'name': 'loss', 'value': loss}
|
||||
]
|
||||
with open('/mnt/tmp/sys-metrics.json', 'w') as f:
|
||||
json.dump(metrics, f)
|
||||
|
||||
|
||||
def main(params):
|
||||
writer = SummaryWriter(log_dir='/mnt/output/tensorboard')
|
||||
|
||||
use_cuda = torch.cuda.is_available()
|
||||
|
||||
torch.manual_seed(params['seed'])
|
||||
|
||||
device = torch.device('cuda' if use_cuda else 'cpu')
|
||||
|
||||
train_kwargs = {'batch_size': params['batch_size']}
|
||||
test_kwargs = {'batch_size': params['test_batch_size']}
|
||||
if use_cuda:
|
||||
cuda_kwargs = {'num_workers': 1,
|
||||
'pin_memory': True,
|
||||
'shuffle': True}
|
||||
train_kwargs.update(cuda_kwargs)
|
||||
test_kwargs.update(cuda_kwargs)
|
||||
|
||||
transform=transforms.Compose([
|
||||
transforms.ToTensor(),
|
||||
transforms.Normalize((0.1307,), (0.3081,))
|
||||
])
|
||||
dataset1 = datasets.MNIST('/mnt/data', train=True, download=True,
|
||||
transform=transform)
|
||||
dataset2 = datasets.MNIST('/mnt/data', train=False,
|
||||
transform=transform)
|
||||
train_loader = torch.utils.data.DataLoader(dataset1, **train_kwargs)
|
||||
test_loader = torch.utils.data.DataLoader(dataset2, **test_kwargs)
|
||||
|
||||
model = Net().to(device)
|
||||
optimizer = optim.Adadelta(model.parameters(), lr=params['lr'])
|
||||
|
||||
scheduler = StepLR(optimizer, step_size=1, gamma=params['gamma'])
|
||||
for epoch in range(1, params['epochs'] + 1):
|
||||
train(model, device, train_loader, optimizer, epoch, params['batch_size'], writer)
|
||||
test(model, device, test_loader, epoch, writer)
|
||||
scheduler.step()
|
||||
|
||||
# Save model
|
||||
torch.save(model.state_dict(), '/mnt/output/model.pt')
|
||||
|
||||
writer.close()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
params = {
|
||||
'seed': 1,
|
||||
'batch_size': 64,
|
||||
'test_batch_size': 1000,
|
||||
'epochs': {{workflow.parameters.epochs}},
|
||||
'lr': 0.001,
|
||||
'gamma': 0.7,
|
||||
}
|
||||
main(params)
|
||||
volumeMounts:
|
||||
# TensorBoard sidecar will automatically mount these volumes
|
||||
# The `data` volume is mounted for saving datasets
|
||||
# The `output` volume is mounted to save model output and share TensorBoard logs
|
||||
- name: data
|
||||
mountPath: /mnt/data
|
||||
- name: output
|
||||
mountPath: /mnt/output
|
||||
nodeSelector:
|
||||
"{{.NodePoolLabel}}": '{{workflow.parameters.sys-node-pool}}'
|
||||
sidecars:
|
||||
- name: tensorboard
|
||||
image: onepanel/dl:v0.20.0
|
||||
command:
|
||||
- sh
|
||||
- '-c'
|
||||
env:
|
||||
- name: ONEPANEL_INTERACTIVE_SIDECAR
|
||||
value: 'true'
|
||||
args:
|
||||
# Read logs from /mnt/output - this directory is auto-mounted from volumeMounts
|
||||
- tensorboard --logdir /mnt/output/tensorboard
|
||||
ports:
|
||||
- containerPort: 6006
|
||||
name: tensorboard
|
||||
volumeClaimTemplates:
|
||||
# Provision volumes for storing data and output
|
||||
- metadata:
|
||||
name: data
|
||||
spec:
|
||||
accessModes: [ "ReadWriteOnce" ]
|
||||
resources:
|
||||
requests:
|
||||
storage: 2Gi
|
||||
- metadata:
|
||||
name: output
|
||||
spec:
|
||||
accessModes: [ "ReadWriteOnce" ]
|
||||
resources:
|
||||
requests:
|
||||
storage: 2Gi
|
||||
|
||||
@@ -1,76 +1,85 @@
|
||||
# source: https://github.com/onepanelio/templates/blob/master/workflows/tensorflow-mnist-training/template.yaml
|
||||
entrypoint: main
|
||||
arguments:
|
||||
parameters:
|
||||
- name: source
|
||||
value: https://github.com/onepanelio/tensorflow-examples.git
|
||||
- name: command
|
||||
value: "python mnist/main.py --epochs=5"
|
||||
volumeClaimTemplates:
|
||||
- metadata:
|
||||
name: data
|
||||
spec:
|
||||
accessModes: [ "ReadWriteOnce" ]
|
||||
resources:
|
||||
requests:
|
||||
storage: 2Gi
|
||||
- metadata:
|
||||
name: output
|
||||
spec:
|
||||
accessModes: [ "ReadWriteOnce" ]
|
||||
resources:
|
||||
requests:
|
||||
storage: 2Gi
|
||||
templates:
|
||||
- name: main
|
||||
dag:
|
||||
tasks:
|
||||
- name: train-model
|
||||
template: pytorch
|
||||
# Uncomment section below to send metrics to Slack
|
||||
# - name: notify-in-slack
|
||||
# dependencies: [train-model]
|
||||
# template: slack-notify-success
|
||||
# arguments:
|
||||
# parameters:
|
||||
# - name: status
|
||||
# value: "{{tasks.train-model.status}}"
|
||||
# artifacts:
|
||||
# - name: metrics
|
||||
# from: "{{tasks.train-model.outputs.artifacts.sys-metrics}}"
|
||||
- name: pytorch
|
||||
inputs:
|
||||
artifacts:
|
||||
- name: src
|
||||
path: /mnt/src
|
||||
git:
|
||||
repo: "{{workflow.parameters.source}}"
|
||||
outputs:
|
||||
artifacts:
|
||||
- name: model
|
||||
path: /mnt/output
|
||||
optional: true
|
||||
archive:
|
||||
none: {}
|
||||
container:
|
||||
image: tensorflow/tensorflow:latest
|
||||
command: [sh,-c]
|
||||
args: ["{{workflow.parameters.command}}"]
|
||||
workingDir: /mnt/src
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /mnt/data
|
||||
- name: output
|
||||
mountPath: /mnt/output
|
||||
- name: slack-notify-success
|
||||
container:
|
||||
image: technosophos/slack-notify
|
||||
command: [sh,-c]
|
||||
args: ['SLACK_USERNAME=Worker SLACK_TITLE="{{workflow.name}} {{inputs.parameters.status}}" SLACK_ICON=https://www.gravatar.com/avatar/5c4478592fe00878f62f0027be59c1bd SLACK_MESSAGE=$(cat /tmp/metrics.json)} ./slack-notify']
|
||||
inputs:
|
||||
parameters:
|
||||
- name: status
|
||||
artifacts:
|
||||
- name: metrics
|
||||
path: /tmp/metrics.json
|
||||
optional: true
|
||||
metadata:
|
||||
name: "TensorFlow Training"
|
||||
kind: Workflow
|
||||
version: 20200605090535
|
||||
action: create
|
||||
source: "https://github.com/onepanelio/templates/blob/master/workflows/tensorflow-mnist-training/template.yaml"
|
||||
labels:
|
||||
"created-by": "system"
|
||||
framework: tensorflow
|
||||
spec:
|
||||
entrypoint: main
|
||||
arguments:
|
||||
parameters:
|
||||
- name: source
|
||||
value: https://github.com/onepanelio/tensorflow-examples.git
|
||||
- name: command
|
||||
value: "python mnist/main.py --epochs=5"
|
||||
volumeClaimTemplates:
|
||||
- metadata:
|
||||
name: data
|
||||
spec:
|
||||
accessModes: [ "ReadWriteOnce" ]
|
||||
resources:
|
||||
requests:
|
||||
storage: 2Gi
|
||||
- metadata:
|
||||
name: output
|
||||
spec:
|
||||
accessModes: [ "ReadWriteOnce" ]
|
||||
resources:
|
||||
requests:
|
||||
storage: 2Gi
|
||||
templates:
|
||||
- name: main
|
||||
dag:
|
||||
tasks:
|
||||
- name: train-model
|
||||
template: pytorch
|
||||
# Uncomment section below to send metrics to Slack
|
||||
# - name: notify-in-slack
|
||||
# dependencies: [train-model]
|
||||
# template: slack-notify-success
|
||||
# arguments:
|
||||
# parameters:
|
||||
# - name: status
|
||||
# value: "{{tasks.train-model.status}}"
|
||||
# artifacts:
|
||||
# - name: metrics
|
||||
# from: "{{tasks.train-model.outputs.artifacts.sys-metrics}}"
|
||||
- name: pytorch
|
||||
inputs:
|
||||
artifacts:
|
||||
- name: src
|
||||
path: /mnt/src
|
||||
git:
|
||||
repo: "{{workflow.parameters.source}}"
|
||||
outputs:
|
||||
artifacts:
|
||||
- name: model
|
||||
path: /mnt/output
|
||||
optional: true
|
||||
archive:
|
||||
none: {}
|
||||
container:
|
||||
image: tensorflow/tensorflow:latest
|
||||
command: [sh,-c]
|
||||
args: ["{{workflow.parameters.command}}"]
|
||||
workingDir: /mnt/src
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /mnt/data
|
||||
- name: output
|
||||
mountPath: /mnt/output
|
||||
- name: slack-notify-success
|
||||
container:
|
||||
image: technosophos/slack-notify
|
||||
command: [sh,-c]
|
||||
args: ['SLACK_USERNAME=Worker SLACK_TITLE="{{workflow.name}} {{inputs.parameters.status}}" SLACK_ICON=https://www.gravatar.com/avatar/5c4478592fe00878f62f0027be59c1bd SLACK_MESSAGE=$(cat /tmp/metrics.json)} ./slack-notify']
|
||||
inputs:
|
||||
parameters:
|
||||
- name: status
|
||||
artifacts:
|
||||
- name: metrics
|
||||
path: /tmp/metrics.json
|
||||
optional: true
|
||||
@@ -1,71 +1,80 @@
|
||||
# source: https://github.com/onepanelio/templates/blob/master/workflows/tensorflow-mnist-training/template.yaml
|
||||
arguments:
|
||||
parameters:
|
||||
- name: epochs
|
||||
value: '10'
|
||||
entrypoint: main
|
||||
templates:
|
||||
- name: main
|
||||
dag:
|
||||
tasks:
|
||||
- name: train-model
|
||||
template: tf-dense
|
||||
- name: tf-dense
|
||||
script:
|
||||
image: tensorflow/tensorflow:2.3.0
|
||||
command:
|
||||
- python
|
||||
- '-u'
|
||||
source: |
|
||||
import tensorflow as tf
|
||||
import datetime
|
||||
mnist = tf.keras.datasets.mnist
|
||||
(x_train, y_train),(x_test, y_test) = mnist.load_data()
|
||||
x_train, x_test = x_train / 255.0, x_test / 255.0
|
||||
def create_model():
|
||||
return tf.keras.models.Sequential([
|
||||
tf.keras.layers.Flatten(input_shape=(28, 28)),
|
||||
tf.keras.layers.Dense(512, activation='relu'),
|
||||
tf.keras.layers.Dropout(0.2),
|
||||
tf.keras.layers.Dense(10, activation='softmax')
|
||||
])
|
||||
model = create_model()
|
||||
model.compile(optimizer='adam',
|
||||
loss='sparse_categorical_crossentropy',
|
||||
metrics=['accuracy'])
|
||||
# Write logs to /mnt/output
|
||||
log_dir = "/mnt/output/logs/" + datetime.datetime.now().strftime("%Y%m%d-%H%M%S")
|
||||
tensorboard_callback = tf.keras.callbacks.TensorBoard(log_dir=log_dir, histogram_freq=1)
|
||||
history = model.fit(x=x_train,
|
||||
y=y_train,
|
||||
epochs={{workflow.parameters.epochs}},
|
||||
validation_data=(x_test, y_test),
|
||||
callbacks=[tensorboard_callback])
|
||||
volumeMounts:
|
||||
# TensorBoard sidecar will automatically mount this volume
|
||||
- name: output
|
||||
mountPath: /mnt/output
|
||||
sidecars:
|
||||
- name: tensorboard
|
||||
image: 'tensorflow/tensorflow:2.3.0'
|
||||
metadata:
|
||||
name: "TensorFlow Training"
|
||||
kind: Workflow
|
||||
version: 20201209124226
|
||||
action: update
|
||||
source: "https://github.com/onepanelio/templates/blob/master/workflows/tensorflow-mnist-training/template.yaml"
|
||||
labels:
|
||||
"created-by": "system"
|
||||
framework: tensorflow
|
||||
spec:
|
||||
arguments:
|
||||
parameters:
|
||||
- name: epochs
|
||||
value: '10'
|
||||
entrypoint: main
|
||||
templates:
|
||||
- name: main
|
||||
dag:
|
||||
tasks:
|
||||
- name: train-model
|
||||
template: tf-dense
|
||||
- name: tf-dense
|
||||
script:
|
||||
image: tensorflow/tensorflow:2.3.0
|
||||
command:
|
||||
- sh
|
||||
- '-c'
|
||||
env:
|
||||
- name: ONEPANEL_INTERACTIVE_SIDECAR
|
||||
value: 'true'
|
||||
args:
|
||||
# Read logs from /mnt/output - this directory is auto-mounted from volumeMounts
|
||||
- tensorboard --logdir /mnt/output/
|
||||
ports:
|
||||
- containerPort: 6006
|
||||
name: tensorboard
|
||||
volumeClaimTemplates:
|
||||
# Provision a volume that can be shared between main container and TensorBoard side car
|
||||
- metadata:
|
||||
name: output
|
||||
spec:
|
||||
accessModes: [ "ReadWriteOnce" ]
|
||||
resources:
|
||||
requests:
|
||||
storage: 2Gi
|
||||
- python
|
||||
- '-u'
|
||||
source: |
|
||||
import tensorflow as tf
|
||||
import datetime
|
||||
mnist = tf.keras.datasets.mnist
|
||||
(x_train, y_train),(x_test, y_test) = mnist.load_data()
|
||||
x_train, x_test = x_train / 255.0, x_test / 255.0
|
||||
def create_model():
|
||||
return tf.keras.models.Sequential([
|
||||
tf.keras.layers.Flatten(input_shape=(28, 28)),
|
||||
tf.keras.layers.Dense(512, activation='relu'),
|
||||
tf.keras.layers.Dropout(0.2),
|
||||
tf.keras.layers.Dense(10, activation='softmax')
|
||||
])
|
||||
model = create_model()
|
||||
model.compile(optimizer='adam',
|
||||
loss='sparse_categorical_crossentropy',
|
||||
metrics=['accuracy'])
|
||||
# Write logs to /mnt/output
|
||||
log_dir = "/mnt/output/logs/" + datetime.datetime.now().strftime("%Y%m%d-%H%M%S")
|
||||
tensorboard_callback = tf.keras.callbacks.TensorBoard(log_dir=log_dir, histogram_freq=1)
|
||||
history = model.fit(x=x_train,
|
||||
y=y_train,
|
||||
epochs={{workflow.parameters.epochs}},
|
||||
validation_data=(x_test, y_test),
|
||||
callbacks=[tensorboard_callback])
|
||||
volumeMounts:
|
||||
# TensorBoard sidecar will automatically mount this volume
|
||||
- name: output
|
||||
mountPath: /mnt/output
|
||||
sidecars:
|
||||
- name: tensorboard
|
||||
image: 'tensorflow/tensorflow:2.3.0'
|
||||
command:
|
||||
- sh
|
||||
- '-c'
|
||||
env:
|
||||
- name: ONEPANEL_INTERACTIVE_SIDECAR
|
||||
value: 'true'
|
||||
args:
|
||||
# Read logs from /mnt/output - this directory is auto-mounted from volumeMounts
|
||||
- tensorboard --logdir /mnt/output/
|
||||
ports:
|
||||
- containerPort: 6006
|
||||
name: tensorboard
|
||||
volumeClaimTemplates:
|
||||
# Provision a volume that can be shared between main container and TensorBoard side car
|
||||
- metadata:
|
||||
name: output
|
||||
spec:
|
||||
accessModes: [ "ReadWriteOnce" ]
|
||||
resources:
|
||||
requests:
|
||||
storage: 2Gi
|
||||
@@ -1,118 +1,127 @@
|
||||
# source: https://github.com/onepanelio/templates/blob/master/workflows/tensorflow-mnist-training/
|
||||
arguments:
|
||||
parameters:
|
||||
- name: epochs
|
||||
value: '10'
|
||||
- displayName: Node pool
|
||||
hint: Name of node pool or group to run this workflow task
|
||||
type: select.nodepool
|
||||
name: sys-node-pool
|
||||
value: {{.DefaultNodePoolOption}}
|
||||
visibility: public
|
||||
required: true
|
||||
entrypoint: main
|
||||
templates:
|
||||
- name: main
|
||||
dag:
|
||||
tasks:
|
||||
- name: train-model
|
||||
template: train-model
|
||||
- name: train-model
|
||||
# Indicates that we want to push files in /mnt/output to object storage
|
||||
outputs:
|
||||
artifacts:
|
||||
- name: output
|
||||
path: /mnt/output
|
||||
optional: true
|
||||
script:
|
||||
image: onepanel/dl:0.17.0
|
||||
command:
|
||||
- python
|
||||
- '-u'
|
||||
source: |
|
||||
import json
|
||||
import tensorflow as tf
|
||||
|
||||
mnist = tf.keras.datasets.mnist
|
||||
|
||||
(x_train, y_train),(x_test, y_test) = mnist.load_data()
|
||||
x_train, x_test = x_train / 255.0, x_test / 255.0
|
||||
x_train = x_train[..., tf.newaxis]
|
||||
x_test = x_test[..., tf.newaxis]
|
||||
|
||||
model = tf.keras.Sequential([
|
||||
tf.keras.layers.Conv2D(filters=32, kernel_size=5, activation='relu'),
|
||||
tf.keras.layers.MaxPool2D(pool_size=2),
|
||||
tf.keras.layers.Conv2D(filters=64, kernel_size=5, activation='relu'),
|
||||
tf.keras.layers.MaxPool2D(pool_size=2),
|
||||
tf.keras.layers.Flatten(),
|
||||
tf.keras.layers.Dense(units=124, activation='relu'),
|
||||
tf.keras.layers.Dropout(rate=0.75),
|
||||
tf.keras.layers.Dense(units=10, activation='softmax')
|
||||
])
|
||||
model.compile(optimizer=tf.keras.optimizers.Adam(lr=0.001),
|
||||
loss='sparse_categorical_crossentropy',
|
||||
metrics=['accuracy'])
|
||||
|
||||
# Write TensorBoard logs to /mnt/output
|
||||
log_dir = '/mnt/output/tensorboard/'
|
||||
tensorboard_callback = tf.keras.callbacks.TensorBoard(log_dir=log_dir, histogram_freq=1)
|
||||
|
||||
model.fit(x=x_train,
|
||||
y=y_train,
|
||||
epochs={{workflow.parameters.epochs}},
|
||||
validation_data=(x_test, y_test),
|
||||
callbacks=[tensorboard_callback])
|
||||
|
||||
# Store metrics for this task
|
||||
loss, accuracy = model.evaluate(x_test, y_test)
|
||||
metrics = [
|
||||
{'name': 'accuracy', 'value': accuracy},
|
||||
{'name': 'loss', 'value': loss}
|
||||
]
|
||||
with open('/tmp/sys-metrics.json', 'w') as f:
|
||||
json.dump(metrics, f)
|
||||
|
||||
# Save model
|
||||
model.save('/mnt/output/model.h5')
|
||||
volumeMounts:
|
||||
# TensorBoard sidecar will automatically mount these volumes
|
||||
# The `data` volume is mounted to support Keras datasets
|
||||
# The `output` volume is mounted to save model output and share TensorBoard logs
|
||||
- name: data
|
||||
mountPath: /home/root/.keras/datasets
|
||||
- name: output
|
||||
mountPath: /mnt/output
|
||||
nodeSelector:
|
||||
beta.kubernetes.io/instance-type: '{{workflow.parameters.sys-node-pool}}'
|
||||
sidecars:
|
||||
- name: tensorboard
|
||||
image: tensorflow/tensorflow:2.3.0
|
||||
metadata:
|
||||
name: "TensorFlow Training"
|
||||
kind: Workflow
|
||||
version: 20201223062947
|
||||
action: update
|
||||
source: "https://github.com/onepanelio/templates/blob/master/workflows/tensorflow-mnist-training/"
|
||||
labels:
|
||||
"created-by": "system"
|
||||
framework: tensorflow
|
||||
spec:
|
||||
arguments:
|
||||
parameters:
|
||||
- name: epochs
|
||||
value: '10'
|
||||
- displayName: Node pool
|
||||
hint: Name of node pool or group to run this workflow task
|
||||
type: select.nodepool
|
||||
name: sys-node-pool
|
||||
value: "{{.DefaultNodePoolOption}}"
|
||||
visibility: public
|
||||
required: true
|
||||
entrypoint: main
|
||||
templates:
|
||||
- name: main
|
||||
dag:
|
||||
tasks:
|
||||
- name: train-model
|
||||
template: train-model
|
||||
- name: train-model
|
||||
# Indicates that we want to push files in /mnt/output to object storage
|
||||
outputs:
|
||||
artifacts:
|
||||
- name: output
|
||||
path: /mnt/output
|
||||
optional: true
|
||||
script:
|
||||
image: onepanel/dl:0.17.0
|
||||
command:
|
||||
- sh
|
||||
- '-c'
|
||||
env:
|
||||
- name: ONEPANEL_INTERACTIVE_SIDECAR
|
||||
value: 'true'
|
||||
args:
|
||||
# Read logs from /mnt/output - this directory is auto-mounted from volumeMounts
|
||||
- tensorboard --logdir /mnt/output/tensorboard
|
||||
ports:
|
||||
- containerPort: 6006
|
||||
name: tensorboard
|
||||
volumeClaimTemplates:
|
||||
# Provision volumes for storing data and output
|
||||
- metadata:
|
||||
name: data
|
||||
spec:
|
||||
accessModes: [ "ReadWriteOnce" ]
|
||||
resources:
|
||||
requests:
|
||||
storage: 2Gi
|
||||
- metadata:
|
||||
name: output
|
||||
spec:
|
||||
accessModes: [ "ReadWriteOnce" ]
|
||||
resources:
|
||||
requests:
|
||||
storage: 2Gi
|
||||
- python
|
||||
- '-u'
|
||||
source: |
|
||||
import json
|
||||
import tensorflow as tf
|
||||
|
||||
mnist = tf.keras.datasets.mnist
|
||||
|
||||
(x_train, y_train),(x_test, y_test) = mnist.load_data()
|
||||
x_train, x_test = x_train / 255.0, x_test / 255.0
|
||||
x_train = x_train[..., tf.newaxis]
|
||||
x_test = x_test[..., tf.newaxis]
|
||||
|
||||
model = tf.keras.Sequential([
|
||||
tf.keras.layers.Conv2D(filters=32, kernel_size=5, activation='relu'),
|
||||
tf.keras.layers.MaxPool2D(pool_size=2),
|
||||
tf.keras.layers.Conv2D(filters=64, kernel_size=5, activation='relu'),
|
||||
tf.keras.layers.MaxPool2D(pool_size=2),
|
||||
tf.keras.layers.Flatten(),
|
||||
tf.keras.layers.Dense(units=124, activation='relu'),
|
||||
tf.keras.layers.Dropout(rate=0.75),
|
||||
tf.keras.layers.Dense(units=10, activation='softmax')
|
||||
])
|
||||
model.compile(optimizer=tf.keras.optimizers.Adam(lr=0.001),
|
||||
loss='sparse_categorical_crossentropy',
|
||||
metrics=['accuracy'])
|
||||
|
||||
# Write TensorBoard logs to /mnt/output
|
||||
log_dir = '/mnt/output/tensorboard/'
|
||||
tensorboard_callback = tf.keras.callbacks.TensorBoard(log_dir=log_dir, histogram_freq=1)
|
||||
|
||||
model.fit(x=x_train,
|
||||
y=y_train,
|
||||
epochs={{workflow.parameters.epochs}},
|
||||
validation_data=(x_test, y_test),
|
||||
callbacks=[tensorboard_callback])
|
||||
|
||||
# Store metrics for this task
|
||||
loss, accuracy = model.evaluate(x_test, y_test)
|
||||
metrics = [
|
||||
{'name': 'accuracy', 'value': accuracy},
|
||||
{'name': 'loss', 'value': loss}
|
||||
]
|
||||
with open('/tmp/sys-metrics.json', 'w') as f:
|
||||
json.dump(metrics, f)
|
||||
|
||||
# Save model
|
||||
model.save('/mnt/output/model.h5')
|
||||
volumeMounts:
|
||||
# TensorBoard sidecar will automatically mount these volumes
|
||||
# The `data` volume is mounted to support Keras datasets
|
||||
# The `output` volume is mounted to save model output and share TensorBoard logs
|
||||
- name: data
|
||||
mountPath: /home/root/.keras/datasets
|
||||
- name: output
|
||||
mountPath: /mnt/output
|
||||
nodeSelector:
|
||||
beta.kubernetes.io/instance-type: '{{workflow.parameters.sys-node-pool}}'
|
||||
sidecars:
|
||||
- name: tensorboard
|
||||
image: tensorflow/tensorflow:2.3.0
|
||||
command:
|
||||
- sh
|
||||
- '-c'
|
||||
env:
|
||||
- name: ONEPANEL_INTERACTIVE_SIDECAR
|
||||
value: 'true'
|
||||
args:
|
||||
# Read logs from /mnt/output - this directory is auto-mounted from volumeMounts
|
||||
- tensorboard --logdir /mnt/output/tensorboard
|
||||
ports:
|
||||
- containerPort: 6006
|
||||
name: tensorboard
|
||||
volumeClaimTemplates:
|
||||
# Provision volumes for storing data and output
|
||||
- metadata:
|
||||
name: data
|
||||
spec:
|
||||
accessModes: [ "ReadWriteOnce" ]
|
||||
resources:
|
||||
requests:
|
||||
storage: 2Gi
|
||||
- metadata:
|
||||
name: output
|
||||
spec:
|
||||
accessModes: [ "ReadWriteOnce" ]
|
||||
resources:
|
||||
requests:
|
||||
storage: 2Gi
|
||||
|
||||
@@ -1,118 +1,127 @@
|
||||
# source: https://github.com/onepanelio/templates/blob/master/workflows/tensorflow-mnist-training/
|
||||
arguments:
|
||||
parameters:
|
||||
- name: epochs
|
||||
value: '10'
|
||||
- displayName: Node pool
|
||||
hint: Name of node pool or group to run this workflow task
|
||||
type: select.nodepool
|
||||
name: sys-node-pool
|
||||
value: {{.DefaultNodePoolOption}}
|
||||
visibility: public
|
||||
required: true
|
||||
entrypoint: main
|
||||
templates:
|
||||
- name: main
|
||||
dag:
|
||||
tasks:
|
||||
- name: train-model
|
||||
template: train-model
|
||||
- name: train-model
|
||||
# Indicates that we want to push files in /mnt/output to object storage
|
||||
outputs:
|
||||
artifacts:
|
||||
- name: output
|
||||
path: /mnt/output
|
||||
optional: true
|
||||
script:
|
||||
image: onepanel/dl:0.17.0
|
||||
command:
|
||||
- python
|
||||
- '-u'
|
||||
source: |
|
||||
import json
|
||||
import tensorflow as tf
|
||||
|
||||
mnist = tf.keras.datasets.mnist
|
||||
|
||||
(x_train, y_train),(x_test, y_test) = mnist.load_data()
|
||||
x_train, x_test = x_train / 255.0, x_test / 255.0
|
||||
x_train = x_train[..., tf.newaxis]
|
||||
x_test = x_test[..., tf.newaxis]
|
||||
|
||||
model = tf.keras.Sequential([
|
||||
tf.keras.layers.Conv2D(filters=32, kernel_size=5, activation='relu'),
|
||||
tf.keras.layers.MaxPool2D(pool_size=2),
|
||||
tf.keras.layers.Conv2D(filters=64, kernel_size=5, activation='relu'),
|
||||
tf.keras.layers.MaxPool2D(pool_size=2),
|
||||
tf.keras.layers.Flatten(),
|
||||
tf.keras.layers.Dense(units=124, activation='relu'),
|
||||
tf.keras.layers.Dropout(rate=0.75),
|
||||
tf.keras.layers.Dense(units=10, activation='softmax')
|
||||
])
|
||||
model.compile(optimizer=tf.keras.optimizers.Adam(lr=0.001),
|
||||
loss='sparse_categorical_crossentropy',
|
||||
metrics=['accuracy'])
|
||||
|
||||
# Write TensorBoard logs to /mnt/output
|
||||
log_dir = '/mnt/output/tensorboard/'
|
||||
tensorboard_callback = tf.keras.callbacks.TensorBoard(log_dir=log_dir, histogram_freq=1)
|
||||
|
||||
model.fit(x=x_train,
|
||||
y=y_train,
|
||||
epochs={{workflow.parameters.epochs}},
|
||||
validation_data=(x_test, y_test),
|
||||
callbacks=[tensorboard_callback])
|
||||
|
||||
# Store metrics for this task
|
||||
loss, accuracy = model.evaluate(x_test, y_test)
|
||||
metrics = [
|
||||
{'name': 'accuracy', 'value': accuracy},
|
||||
{'name': 'loss', 'value': loss}
|
||||
]
|
||||
with open('/tmp/sys-metrics.json', 'w') as f:
|
||||
json.dump(metrics, f)
|
||||
|
||||
# Save model
|
||||
model.save('/mnt/output/model.h5')
|
||||
volumeMounts:
|
||||
# TensorBoard sidecar will automatically mount these volumes
|
||||
# The `data` volume is mounted to support Keras datasets
|
||||
# The `output` volume is mounted to save model output and share TensorBoard logs
|
||||
- name: data
|
||||
mountPath: /home/root/.keras/datasets
|
||||
- name: output
|
||||
mountPath: /mnt/output
|
||||
nodeSelector:
|
||||
{{.NodePoolLabel}}: '{{workflow.parameters.sys-node-pool}}'
|
||||
sidecars:
|
||||
- name: tensorboard
|
||||
metadata:
|
||||
name: "TensorFlow Training"
|
||||
kind: Workflow
|
||||
version: 20210118175809
|
||||
action: update
|
||||
source: "https://github.com/onepanelio/templates/blob/master/workflows/tensorflow-mnist-training/"
|
||||
labels:
|
||||
"created-by": "system"
|
||||
framework: tensorflow
|
||||
spec:
|
||||
arguments:
|
||||
parameters:
|
||||
- name: epochs
|
||||
value: '10'
|
||||
- displayName: Node pool
|
||||
hint: Name of node pool or group to run this workflow task
|
||||
type: select.nodepool
|
||||
name: sys-node-pool
|
||||
value: "{{.DefaultNodePoolOption}}"
|
||||
visibility: public
|
||||
required: true
|
||||
entrypoint: main
|
||||
templates:
|
||||
- name: main
|
||||
dag:
|
||||
tasks:
|
||||
- name: train-model
|
||||
template: train-model
|
||||
- name: train-model
|
||||
# Indicates that we want to push files in /mnt/output to object storage
|
||||
outputs:
|
||||
artifacts:
|
||||
- name: output
|
||||
path: /mnt/output
|
||||
optional: true
|
||||
script:
|
||||
image: onepanel/dl:0.17.0
|
||||
command:
|
||||
- sh
|
||||
- '-c'
|
||||
env:
|
||||
- name: ONEPANEL_INTERACTIVE_SIDECAR
|
||||
value: 'true'
|
||||
args:
|
||||
# Read logs from /mnt/output - this directory is auto-mounted from volumeMounts
|
||||
- tensorboard --logdir /mnt/output/tensorboard
|
||||
ports:
|
||||
- containerPort: 6006
|
||||
name: tensorboard
|
||||
volumeClaimTemplates:
|
||||
# Provision volumes for storing data and output
|
||||
- metadata:
|
||||
name: data
|
||||
spec:
|
||||
accessModes: [ "ReadWriteOnce" ]
|
||||
resources:
|
||||
requests:
|
||||
storage: 2Gi
|
||||
- metadata:
|
||||
name: output
|
||||
spec:
|
||||
accessModes: [ "ReadWriteOnce" ]
|
||||
resources:
|
||||
requests:
|
||||
storage: 2Gi
|
||||
- python
|
||||
- '-u'
|
||||
source: |
|
||||
import json
|
||||
import tensorflow as tf
|
||||
|
||||
mnist = tf.keras.datasets.mnist
|
||||
|
||||
(x_train, y_train),(x_test, y_test) = mnist.load_data()
|
||||
x_train, x_test = x_train / 255.0, x_test / 255.0
|
||||
x_train = x_train[..., tf.newaxis]
|
||||
x_test = x_test[..., tf.newaxis]
|
||||
|
||||
model = tf.keras.Sequential([
|
||||
tf.keras.layers.Conv2D(filters=32, kernel_size=5, activation='relu'),
|
||||
tf.keras.layers.MaxPool2D(pool_size=2),
|
||||
tf.keras.layers.Conv2D(filters=64, kernel_size=5, activation='relu'),
|
||||
tf.keras.layers.MaxPool2D(pool_size=2),
|
||||
tf.keras.layers.Flatten(),
|
||||
tf.keras.layers.Dense(units=124, activation='relu'),
|
||||
tf.keras.layers.Dropout(rate=0.75),
|
||||
tf.keras.layers.Dense(units=10, activation='softmax')
|
||||
])
|
||||
model.compile(optimizer=tf.keras.optimizers.Adam(lr=0.001),
|
||||
loss='sparse_categorical_crossentropy',
|
||||
metrics=['accuracy'])
|
||||
|
||||
# Write TensorBoard logs to /mnt/output
|
||||
log_dir = '/mnt/output/tensorboard/'
|
||||
tensorboard_callback = tf.keras.callbacks.TensorBoard(log_dir=log_dir, histogram_freq=1)
|
||||
|
||||
model.fit(x=x_train,
|
||||
y=y_train,
|
||||
epochs={{workflow.parameters.epochs}},
|
||||
validation_data=(x_test, y_test),
|
||||
callbacks=[tensorboard_callback])
|
||||
|
||||
# Store metrics for this task
|
||||
loss, accuracy = model.evaluate(x_test, y_test)
|
||||
metrics = [
|
||||
{'name': 'accuracy', 'value': accuracy},
|
||||
{'name': 'loss', 'value': loss}
|
||||
]
|
||||
with open('/tmp/sys-metrics.json', 'w') as f:
|
||||
json.dump(metrics, f)
|
||||
|
||||
# Save model
|
||||
model.save('/mnt/output/model.h5')
|
||||
volumeMounts:
|
||||
# TensorBoard sidecar will automatically mount these volumes
|
||||
# The `data` volume is mounted to support Keras datasets
|
||||
# The `output` volume is mounted to save model output and share TensorBoard logs
|
||||
- name: data
|
||||
mountPath: /home/root/.keras/datasets
|
||||
- name: output
|
||||
mountPath: /mnt/output
|
||||
nodeSelector:
|
||||
"{{.NodePoolLabel}}": '{{workflow.parameters.sys-node-pool}}'
|
||||
sidecars:
|
||||
- name: tensorboard
|
||||
image: onepanel/dl:0.17.0
|
||||
command:
|
||||
- sh
|
||||
- '-c'
|
||||
env:
|
||||
- name: ONEPANEL_INTERACTIVE_SIDECAR
|
||||
value: 'true'
|
||||
args:
|
||||
# Read logs from /mnt/output - this directory is auto-mounted from volumeMounts
|
||||
- tensorboard --logdir /mnt/output/tensorboard
|
||||
ports:
|
||||
- containerPort: 6006
|
||||
name: tensorboard
|
||||
volumeClaimTemplates:
|
||||
# Provision volumes for storing data and output
|
||||
- metadata:
|
||||
name: data
|
||||
spec:
|
||||
accessModes: [ "ReadWriteOnce" ]
|
||||
resources:
|
||||
requests:
|
||||
storage: 2Gi
|
||||
- metadata:
|
||||
name: output
|
||||
spec:
|
||||
accessModes: [ "ReadWriteOnce" ]
|
||||
resources:
|
||||
requests:
|
||||
storage: 2Gi
|
||||
|
||||
@@ -1,118 +1,127 @@
|
||||
# source: https://github.com/onepanelio/templates/blob/master/workflows/tensorflow-mnist-training/
|
||||
arguments:
|
||||
parameters:
|
||||
- name: epochs
|
||||
value: '10'
|
||||
- displayName: Node pool
|
||||
hint: Name of node pool or group to run this workflow task
|
||||
type: select.nodepool
|
||||
name: sys-node-pool
|
||||
value: {{.DefaultNodePoolOption}}
|
||||
visibility: public
|
||||
required: true
|
||||
entrypoint: main
|
||||
templates:
|
||||
- name: main
|
||||
dag:
|
||||
tasks:
|
||||
- name: train-model
|
||||
template: train-model
|
||||
- name: train-model
|
||||
# Indicates that we want to push files in /mnt/output to object storage
|
||||
outputs:
|
||||
artifacts:
|
||||
- name: output
|
||||
path: /mnt/output
|
||||
optional: true
|
||||
script:
|
||||
image: onepanel/dl:v0.20.0
|
||||
command:
|
||||
- python
|
||||
- '-u'
|
||||
source: |
|
||||
import json
|
||||
import tensorflow as tf
|
||||
|
||||
mnist = tf.keras.datasets.mnist
|
||||
|
||||
(x_train, y_train),(x_test, y_test) = mnist.load_data()
|
||||
x_train, x_test = x_train / 255.0, x_test / 255.0
|
||||
x_train = x_train[..., tf.newaxis]
|
||||
x_test = x_test[..., tf.newaxis]
|
||||
|
||||
model = tf.keras.Sequential([
|
||||
tf.keras.layers.Conv2D(filters=32, kernel_size=5, activation='relu'),
|
||||
tf.keras.layers.MaxPool2D(pool_size=2),
|
||||
tf.keras.layers.Conv2D(filters=64, kernel_size=5, activation='relu'),
|
||||
tf.keras.layers.MaxPool2D(pool_size=2),
|
||||
tf.keras.layers.Flatten(),
|
||||
tf.keras.layers.Dense(units=124, activation='relu'),
|
||||
tf.keras.layers.Dropout(rate=0.75),
|
||||
tf.keras.layers.Dense(units=10, activation='softmax')
|
||||
])
|
||||
model.compile(optimizer=tf.keras.optimizers.Adam(lr=0.001),
|
||||
loss='sparse_categorical_crossentropy',
|
||||
metrics=['accuracy'])
|
||||
|
||||
# Write TensorBoard logs to /mnt/output
|
||||
log_dir = '/mnt/output/tensorboard/'
|
||||
tensorboard_callback = tf.keras.callbacks.TensorBoard(log_dir=log_dir, histogram_freq=1)
|
||||
|
||||
model.fit(x=x_train,
|
||||
y=y_train,
|
||||
epochs={{workflow.parameters.epochs}},
|
||||
validation_data=(x_test, y_test),
|
||||
callbacks=[tensorboard_callback])
|
||||
|
||||
# Store metrics for this task
|
||||
loss, accuracy = model.evaluate(x_test, y_test)
|
||||
metrics = [
|
||||
{'name': 'accuracy', 'value': accuracy},
|
||||
{'name': 'loss', 'value': loss}
|
||||
]
|
||||
with open('/mnt/tmp/sys-metrics.json', 'w') as f:
|
||||
json.dump(metrics, f)
|
||||
|
||||
# Save model
|
||||
model.save('/mnt/output/model.h5')
|
||||
volumeMounts:
|
||||
# TensorBoard sidecar will automatically mount these volumes
|
||||
# The `data` volume is mounted to support Keras datasets
|
||||
# The `output` volume is mounted to save model output and share TensorBoard logs
|
||||
- name: data
|
||||
mountPath: /home/root/.keras/datasets
|
||||
- name: output
|
||||
mountPath: /mnt/output
|
||||
nodeSelector:
|
||||
{{.NodePoolLabel}}: '{{workflow.parameters.sys-node-pool}}'
|
||||
sidecars:
|
||||
- name: tensorboard
|
||||
metadata:
|
||||
name: "TensorFlow Training"
|
||||
kind: Workflow
|
||||
version: 20210323175655
|
||||
action: update
|
||||
source: "https://github.com/onepanelio/templates/blob/master/workflows/tensorflow-mnist-training/"
|
||||
labels:
|
||||
"created-by": "system"
|
||||
framework: tensorflow
|
||||
spec:
|
||||
arguments:
|
||||
parameters:
|
||||
- name: epochs
|
||||
value: '10'
|
||||
- displayName: Node pool
|
||||
hint: Name of node pool or group to run this workflow task
|
||||
type: select.nodepool
|
||||
name: sys-node-pool
|
||||
value: "{{.DefaultNodePoolOption}}"
|
||||
visibility: public
|
||||
required: true
|
||||
entrypoint: main
|
||||
templates:
|
||||
- name: main
|
||||
dag:
|
||||
tasks:
|
||||
- name: train-model
|
||||
template: train-model
|
||||
- name: train-model
|
||||
# Indicates that we want to push files in /mnt/output to object storage
|
||||
outputs:
|
||||
artifacts:
|
||||
- name: output
|
||||
path: /mnt/output
|
||||
optional: true
|
||||
script:
|
||||
image: onepanel/dl:v0.20.0
|
||||
command:
|
||||
- sh
|
||||
- '-c'
|
||||
env:
|
||||
- name: ONEPANEL_INTERACTIVE_SIDECAR
|
||||
value: 'true'
|
||||
args:
|
||||
# Read logs from /mnt/output - this directory is auto-mounted from volumeMounts
|
||||
- tensorboard --logdir /mnt/output/tensorboard
|
||||
ports:
|
||||
- containerPort: 6006
|
||||
name: tensorboard
|
||||
volumeClaimTemplates:
|
||||
# Provision volumes for storing data and output
|
||||
- metadata:
|
||||
name: data
|
||||
spec:
|
||||
accessModes: [ "ReadWriteOnce" ]
|
||||
resources:
|
||||
requests:
|
||||
storage: 2Gi
|
||||
- metadata:
|
||||
name: output
|
||||
spec:
|
||||
accessModes: [ "ReadWriteOnce" ]
|
||||
resources:
|
||||
requests:
|
||||
storage: 2Gi
|
||||
- python
|
||||
- '-u'
|
||||
source: |
|
||||
import json
|
||||
import tensorflow as tf
|
||||
|
||||
mnist = tf.keras.datasets.mnist
|
||||
|
||||
(x_train, y_train),(x_test, y_test) = mnist.load_data()
|
||||
x_train, x_test = x_train / 255.0, x_test / 255.0
|
||||
x_train = x_train[..., tf.newaxis]
|
||||
x_test = x_test[..., tf.newaxis]
|
||||
|
||||
model = tf.keras.Sequential([
|
||||
tf.keras.layers.Conv2D(filters=32, kernel_size=5, activation='relu'),
|
||||
tf.keras.layers.MaxPool2D(pool_size=2),
|
||||
tf.keras.layers.Conv2D(filters=64, kernel_size=5, activation='relu'),
|
||||
tf.keras.layers.MaxPool2D(pool_size=2),
|
||||
tf.keras.layers.Flatten(),
|
||||
tf.keras.layers.Dense(units=124, activation='relu'),
|
||||
tf.keras.layers.Dropout(rate=0.75),
|
||||
tf.keras.layers.Dense(units=10, activation='softmax')
|
||||
])
|
||||
model.compile(optimizer=tf.keras.optimizers.Adam(lr=0.001),
|
||||
loss='sparse_categorical_crossentropy',
|
||||
metrics=['accuracy'])
|
||||
|
||||
# Write TensorBoard logs to /mnt/output
|
||||
log_dir = '/mnt/output/tensorboard/'
|
||||
tensorboard_callback = tf.keras.callbacks.TensorBoard(log_dir=log_dir, histogram_freq=1)
|
||||
|
||||
model.fit(x=x_train,
|
||||
y=y_train,
|
||||
epochs={{workflow.parameters.epochs}},
|
||||
validation_data=(x_test, y_test),
|
||||
callbacks=[tensorboard_callback])
|
||||
|
||||
# Store metrics for this task
|
||||
loss, accuracy = model.evaluate(x_test, y_test)
|
||||
metrics = [
|
||||
{'name': 'accuracy', 'value': accuracy},
|
||||
{'name': 'loss', 'value': loss}
|
||||
]
|
||||
with open('/mnt/tmp/sys-metrics.json', 'w') as f:
|
||||
json.dump(metrics, f)
|
||||
|
||||
# Save model
|
||||
model.save('/mnt/output/model.h5')
|
||||
volumeMounts:
|
||||
# TensorBoard sidecar will automatically mount these volumes
|
||||
# The `data` volume is mounted to support Keras datasets
|
||||
# The `output` volume is mounted to save model output and share TensorBoard logs
|
||||
- name: data
|
||||
mountPath: /home/root/.keras/datasets
|
||||
- name: output
|
||||
mountPath: /mnt/output
|
||||
nodeSelector:
|
||||
"{{.NodePoolLabel}}": '{{workflow.parameters.sys-node-pool}}'
|
||||
sidecars:
|
||||
- name: tensorboard
|
||||
image: onepanel/dl:v0.20.0
|
||||
command:
|
||||
- sh
|
||||
- '-c'
|
||||
env:
|
||||
- name: ONEPANEL_INTERACTIVE_SIDECAR
|
||||
value: 'true'
|
||||
args:
|
||||
# Read logs from /mnt/output - this directory is auto-mounted from volumeMounts
|
||||
- tensorboard --logdir /mnt/output/tensorboard
|
||||
ports:
|
||||
- containerPort: 6006
|
||||
name: tensorboard
|
||||
volumeClaimTemplates:
|
||||
# Provision volumes for storing data and output
|
||||
- metadata:
|
||||
name: data
|
||||
spec:
|
||||
accessModes: [ "ReadWriteOnce" ]
|
||||
resources:
|
||||
requests:
|
||||
storage: 2Gi
|
||||
- metadata:
|
||||
name: output
|
||||
spec:
|
||||
accessModes: [ "ReadWriteOnce" ]
|
||||
resources:
|
||||
requests:
|
||||
storage: 2Gi
|
||||
|
||||
@@ -0,0 +1,221 @@
|
||||
metadata:
|
||||
name: "TF Object Detection Training"
|
||||
kind: Workflow
|
||||
version: 20200812104328
|
||||
action: create
|
||||
source: "https://github.com/onepanelio/templates/blob/master/workflows/tf-object-detection-training/"
|
||||
labels:
|
||||
"created-by": "system"
|
||||
"used-by": "cvat"
|
||||
spec:
|
||||
arguments:
|
||||
parameters:
|
||||
- name: source
|
||||
value: https://github.com/tensorflow/models.git
|
||||
displayName: Model source code
|
||||
type: hidden
|
||||
visibility: private
|
||||
|
||||
- name: trainingsource
|
||||
value: https://github.com/onepanelio/cvat-training.git
|
||||
type: hidden
|
||||
visibility: private
|
||||
|
||||
- name: revision
|
||||
value: v1.13.0
|
||||
type: hidden
|
||||
visibility: private
|
||||
|
||||
- name: sys-annotation-path
|
||||
value: annotation-dump/sample_dataset
|
||||
displayName: Dataset path
|
||||
hint: Path to annotated data in default object storage (i.e S3). In CVAT, this parameter will be pre-populated.
|
||||
|
||||
- name: sys-output-path
|
||||
value: workflow-data/output/sample_output
|
||||
hint: Path to store output artifacts in default object storage (i.e s3). In CVAT, this parameter will be pre-populated.
|
||||
displayName: Workflow output path
|
||||
visibility: private
|
||||
|
||||
- name: ref-model
|
||||
value: frcnn-res50-coco
|
||||
displayName: Model
|
||||
hint: TF Detection API's model to use for training.
|
||||
type: select.select
|
||||
visibility: public
|
||||
options:
|
||||
- name: 'Faster RCNN-ResNet 101-COCO'
|
||||
value: frcnn-res101-coco
|
||||
- name: 'Faster RCNN-ResNet 101-Low Proposal-COCO'
|
||||
value: frcnn-res101-low
|
||||
- name: 'Faster RCNN-ResNet 50-COCO'
|
||||
value: frcnn-res50-coco
|
||||
- name: 'Faster RCNN-NAS-COCO'
|
||||
value: frcnn-nas-coco
|
||||
- name: 'SSD MobileNet V1-COCO'
|
||||
value: ssd-mobilenet-v1-coco2
|
||||
- name: 'SSD MobileNet V2-COCO'
|
||||
value: ssd-mobilenet-v2-coco
|
||||
- name: 'SSDLite MobileNet-COCO'
|
||||
value: ssdlite-mobilenet-coco
|
||||
|
||||
- name: extras
|
||||
value: |-
|
||||
epochs=1000
|
||||
displayName: Hyperparameters
|
||||
visibility: public
|
||||
type: textarea.textarea
|
||||
hint: "Please refer to our <a href='https://docs.onepanel.ai/docs/getting-started/use-cases/computervision/annotation/cvat/cvat_annotation_model#arguments-optional' target='_blank'>documentation</a> for more information on parameters. Number of classes will be automatically populated if you had 'sys-num-classes' parameter in a workflow."
|
||||
|
||||
- name: sys-finetune-checkpoint
|
||||
value: ''
|
||||
hint: Select the last fine-tune checkpoint for this model. It may take up to 5 minutes for a recent checkpoint show here. Leave empty if this is the first time you're training this model.
|
||||
displayName: Checkpoint path
|
||||
visibility: public
|
||||
|
||||
- name: sys-num-classes
|
||||
value: '81'
|
||||
hint: Number of classes
|
||||
displayName: Number of classes
|
||||
visibility: private
|
||||
|
||||
- name: tf-image
|
||||
value: tensorflow/tensorflow:1.13.1-py3
|
||||
type: select.select
|
||||
displayName: Select TensorFlow image
|
||||
visibility: public
|
||||
hint: Select the GPU image if you are running on a GPU node pool
|
||||
options:
|
||||
- name: 'TensorFlow 1.13.1 CPU Image'
|
||||
value: 'tensorflow/tensorflow:1.13.1-py3'
|
||||
- name: 'TensorFlow 1.13.1 GPU Image'
|
||||
value: 'tensorflow/tensorflow:1.13.1-gpu-py3'
|
||||
|
||||
- displayName: Node pool
|
||||
hint: Name of node pool or group to run this workflow task
|
||||
type: select.select
|
||||
name: sys-node-pool
|
||||
value: Standard_D4s_v3
|
||||
visibility: public
|
||||
required: true
|
||||
options:
|
||||
- name: 'CPU: 2, RAM: 8GB'
|
||||
value: Standard_D2s_v3
|
||||
- name: 'CPU: 4, RAM: 16GB'
|
||||
value: Standard_D4s_v3
|
||||
- name: 'GPU: 1xK80, CPU: 6, RAM: 56GB'
|
||||
value: Standard_NC6
|
||||
- name: dump-format
|
||||
value: cvat_tfrecord
|
||||
visibility: public
|
||||
entrypoint: main
|
||||
templates:
|
||||
- dag:
|
||||
tasks:
|
||||
- name: train-model
|
||||
template: tensorflow
|
||||
# Uncomment the lines below if you want to send Slack notifications
|
||||
# - arguments:
|
||||
# artifacts:
|
||||
# - from: '{{tasks.train-model.outputs.artifacts.sys-metrics}}'
|
||||
# name: metrics
|
||||
# parameters:
|
||||
# - name: status
|
||||
# value: '{{tasks.train-model.status}}'
|
||||
# dependencies:
|
||||
# - train-model
|
||||
# name: notify-in-slack
|
||||
# template: slack-notify-success
|
||||
name: main
|
||||
- container:
|
||||
args:
|
||||
- |
|
||||
apt-get update && \
|
||||
apt-get install -y python3-pip git wget unzip libglib2.0-0 libsm6 libxext6 libxrender-dev && \
|
||||
pip install pillow lxml Cython contextlib2 jupyter matplotlib numpy scipy boto3 pycocotools pyyaml google-cloud-storage && \
|
||||
cd /mnt/src/tf/research && \
|
||||
export PYTHONPATH=$PYTHONPATH:` + "`pwd`:`pwd`/slim" + ` && \
|
||||
cd /mnt/src/train && \
|
||||
python convert_workflow.py \
|
||||
--extras="{{workflow.parameters.extras}}" \
|
||||
--model="{{workflow.parameters.ref-model}}" \
|
||||
--num_classes="{{workflow.parameters.sys-num-classes}}" \
|
||||
--sys_finetune_checkpoint={{workflow.parameters.sys-finetune-checkpoint}}
|
||||
command:
|
||||
- sh
|
||||
- -c
|
||||
image: '{{workflow.parameters.tf-image}}'
|
||||
volumeMounts:
|
||||
- mountPath: /mnt/data
|
||||
name: data
|
||||
- mountPath: /mnt/output
|
||||
name: output
|
||||
workingDir: /mnt/src
|
||||
nodeSelector:
|
||||
beta.kubernetes.io/instance-type: '{{workflow.parameters.sys-node-pool}}'
|
||||
inputs:
|
||||
artifacts:
|
||||
- name: data
|
||||
path: /mnt/data/datasets/
|
||||
"{{.ArtifactRepositoryType}}":
|
||||
key: '{{workflow.namespace}}/{{workflow.parameters.sys-annotation-path}}'
|
||||
- name: models
|
||||
path: /mnt/data/models/
|
||||
optional: true
|
||||
"{{.ArtifactRepositoryType}}":
|
||||
key: '{{workflow.namespace}}/{{workflow.parameters.sys-finetune-checkpoint}}'
|
||||
- git:
|
||||
repo: '{{workflow.parameters.source}}'
|
||||
revision: '{{workflow.parameters.revision}}'
|
||||
name: src
|
||||
path: /mnt/src/tf
|
||||
- git:
|
||||
repo: '{{workflow.parameters.trainingsource}}'
|
||||
revision: 'optional-artifacts'
|
||||
name: tsrc
|
||||
path: /mnt/src/train
|
||||
name: tensorflow
|
||||
outputs:
|
||||
artifacts:
|
||||
- name: model
|
||||
optional: true
|
||||
path: /mnt/output
|
||||
"{{.ArtifactRepositoryType}}":
|
||||
key: '{{workflow.namespace}}/{{workflow.parameters.sys-output-path}}'
|
||||
# Uncomment the lines below if you want to send Slack notifications
|
||||
#- container:
|
||||
# args:
|
||||
# - SLACK_USERNAME=Onepanel SLACK_TITLE="{{workflow.name}} {{inputs.parameters.status}}"
|
||||
# SLACK_ICON=https://www.gravatar.com/avatar/5c4478592fe00878f62f0027be59c1bd
|
||||
# SLACK_MESSAGE=$(cat /tmp/metrics.json)} ./slack-notify
|
||||
# command:
|
||||
# - sh
|
||||
# - -c
|
||||
# image: technosophos/slack-notify
|
||||
# inputs:
|
||||
# artifacts:
|
||||
# - name: metrics
|
||||
# optional: true
|
||||
# path: /tmp/metrics.json
|
||||
# parameters:
|
||||
# - name: status
|
||||
# name: slack-notify-success
|
||||
volumeClaimTemplates:
|
||||
- metadata:
|
||||
creationTimestamp: null
|
||||
name: data
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 200Gi
|
||||
- metadata:
|
||||
creationTimestamp: null
|
||||
name: output
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 200Gi
|
||||
@@ -0,0 +1,222 @@
|
||||
metadata:
|
||||
name: "TF Object Detection Training"
|
||||
kind: Workflow
|
||||
version: 20200824101019
|
||||
action: update
|
||||
source: "https://github.com/onepanelio/templates/blob/master/workflows/tf-object-detection-training/"
|
||||
labels:
|
||||
"created-by": "system"
|
||||
"used-by": "cvat"
|
||||
spec:
|
||||
arguments:
|
||||
parameters:
|
||||
- name: source
|
||||
value: https://github.com/tensorflow/models.git
|
||||
displayName: Model source code
|
||||
type: hidden
|
||||
visibility: private
|
||||
|
||||
- name: trainingsource
|
||||
value: https://github.com/onepanelio/cvat-training.git
|
||||
type: hidden
|
||||
visibility: private
|
||||
|
||||
- name: revision
|
||||
value: v1.13.0
|
||||
type: hidden
|
||||
visibility: private
|
||||
|
||||
- name: cvat-annotation-path
|
||||
value: annotation-dump/sample_dataset
|
||||
displayName: Dataset path
|
||||
hint: Path to annotated data in default object storage (i.e S3). In CVAT, this parameter will be pre-populated.
|
||||
visibility: private
|
||||
|
||||
- name: cvat-output-path
|
||||
value: workflow-data/output/sample_output
|
||||
hint: Path to store output artifacts in default object storage (i.e s3). In CVAT, this parameter will be pre-populated.
|
||||
displayName: Workflow output path
|
||||
visibility: private
|
||||
|
||||
- name: cvat-model
|
||||
value: frcnn-res50-coco
|
||||
displayName: Model
|
||||
hint: TF Detection API's model to use for training.
|
||||
type: select.select
|
||||
visibility: public
|
||||
options:
|
||||
- name: 'Faster RCNN-ResNet 101-COCO'
|
||||
value: frcnn-res101-coco
|
||||
- name: 'Faster RCNN-ResNet 101-Low Proposal-COCO'
|
||||
value: frcnn-res101-low
|
||||
- name: 'Faster RCNN-ResNet 50-COCO'
|
||||
value: frcnn-res50-coco
|
||||
- name: 'Faster RCNN-NAS-COCO'
|
||||
value: frcnn-nas-coco
|
||||
- name: 'SSD MobileNet V1-COCO'
|
||||
value: ssd-mobilenet-v1-coco2
|
||||
- name: 'SSD MobileNet V2-COCO'
|
||||
value: ssd-mobilenet-v2-coco
|
||||
- name: 'SSDLite MobileNet-COCO'
|
||||
value: ssdlite-mobilenet-coco
|
||||
|
||||
- name: hyperparameters
|
||||
value: |-
|
||||
num-steps=10000
|
||||
displayName: Hyperparameters
|
||||
visibility: public
|
||||
type: textarea.textarea
|
||||
hint: "Please refer to our <a href='https://docs.onepanel.ai/docs/getting-started/use-cases/computervision/annotation/cvat/cvat_annotation_model#arguments-optional' target='_blank'>documentation</a> for more information on parameters. Number of classes will be automatically populated if you had 'sys-num-classes' parameter in a workflow."
|
||||
|
||||
- name: cvat-finetune-checkpoint
|
||||
value: ''
|
||||
hint: Select the last fine-tune checkpoint for this model. It may take up to 5 minutes for a recent checkpoint show here. Leave empty if this is the first time you're training this model.
|
||||
displayName: Checkpoint path
|
||||
visibility: public
|
||||
|
||||
- name: cvat-num-classes
|
||||
value: '81'
|
||||
hint: Number of classes
|
||||
displayName: Number of classes
|
||||
visibility: private
|
||||
|
||||
- name: tf-image
|
||||
value: tensorflow/tensorflow:1.13.1-py3
|
||||
type: select.select
|
||||
displayName: Select TensorFlow image
|
||||
visibility: public
|
||||
hint: Select the GPU image if you are running on a GPU node pool
|
||||
options:
|
||||
- name: 'TensorFlow 1.13.1 CPU Image'
|
||||
value: 'tensorflow/tensorflow:1.13.1-py3'
|
||||
- name: 'TensorFlow 1.13.1 GPU Image'
|
||||
value: 'tensorflow/tensorflow:1.13.1-gpu-py3'
|
||||
|
||||
- displayName: Node pool
|
||||
hint: Name of node pool or group to run this workflow task
|
||||
type: select.select
|
||||
name: sys-node-pool
|
||||
value: Standard_D4s_v3
|
||||
visibility: public
|
||||
required: true
|
||||
options:
|
||||
- name: 'CPU: 2, RAM: 8GB'
|
||||
value: Standard_D2s_v3
|
||||
- name: 'CPU: 4, RAM: 16GB'
|
||||
value: Standard_D4s_v3
|
||||
- name: 'GPU: 1xK80, CPU: 6, RAM: 56GB'
|
||||
value: Standard_NC6
|
||||
- name: dump-format
|
||||
value: cvat_tfrecord
|
||||
visibility: public
|
||||
entrypoint: main
|
||||
templates:
|
||||
- dag:
|
||||
tasks:
|
||||
- name: train-model
|
||||
template: tensorflow
|
||||
# Uncomment the lines below if you want to send Slack notifications
|
||||
# - arguments:
|
||||
# artifacts:
|
||||
# - from: '{{tasks.train-model.outputs.artifacts.sys-metrics}}'
|
||||
# name: metrics
|
||||
# parameters:
|
||||
# - name: status
|
||||
# value: '{{tasks.train-model.status}}'
|
||||
# dependencies:
|
||||
# - train-model
|
||||
# name: notify-in-slack
|
||||
# template: slack-notify-success
|
||||
name: main
|
||||
- container:
|
||||
args:
|
||||
- |
|
||||
apt-get update && \
|
||||
apt-get install -y python3-pip git wget unzip libglib2.0-0 libsm6 libxext6 libxrender-dev && \
|
||||
pip install pillow lxml Cython contextlib2 jupyter matplotlib numpy scipy boto3 pycocotools pyyaml google-cloud-storage && \
|
||||
cd /mnt/src/tf/research && \
|
||||
export PYTHONPATH=$PYTHONPATH:` + "`pwd`:`pwd`" + `/slim && \
|
||||
cd /mnt/src/train && \
|
||||
python convert_workflow.py \
|
||||
--extras="{{workflow.parameters.hyperparameters}}" \
|
||||
--model="{{workflow.parameters.cvat-model}}" \
|
||||
--num_classes="{{workflow.parameters.cvat-num-classes}}" \
|
||||
--sys_finetune_checkpoint={{workflow.parameters.cvat-finetune-checkpoint}}
|
||||
command:
|
||||
- sh
|
||||
- -c
|
||||
image: '{{workflow.parameters.tf-image}}'
|
||||
volumeMounts:
|
||||
- mountPath: /mnt/data
|
||||
name: data
|
||||
- mountPath: /mnt/output
|
||||
name: output
|
||||
workingDir: /mnt/src
|
||||
nodeSelector:
|
||||
beta.kubernetes.io/instance-type: '{{workflow.parameters.sys-node-pool}}'
|
||||
inputs:
|
||||
artifacts:
|
||||
- name: data
|
||||
path: /mnt/data/datasets/
|
||||
"{{.ArtifactRepositoryType}}":
|
||||
key: '{{workflow.namespace}}/{{workflow.parameters.cvat-annotation-path}}'
|
||||
- name: models
|
||||
path: /mnt/data/models/
|
||||
optional: true
|
||||
"{{.ArtifactRepositoryType}}":
|
||||
key: '{{workflow.namespace}}/{{workflow.parameters.cvat-finetune-checkpoint}}'
|
||||
- git:
|
||||
repo: '{{workflow.parameters.source}}'
|
||||
revision: '{{workflow.parameters.revision}}'
|
||||
name: src
|
||||
path: /mnt/src/tf
|
||||
- git:
|
||||
repo: '{{workflow.parameters.trainingsource}}'
|
||||
revision: 'optional-artifacts'
|
||||
name: tsrc
|
||||
path: /mnt/src/train
|
||||
name: tensorflow
|
||||
outputs:
|
||||
artifacts:
|
||||
- name: model
|
||||
optional: true
|
||||
path: /mnt/output
|
||||
"{{.ArtifactRepositoryType}}":
|
||||
key: '{{workflow.namespace}}/{{workflow.parameters.cvat-output-path}}/{{workflow.name}}'
|
||||
# Uncomment the lines below if you want to send Slack notifications
|
||||
#- container:
|
||||
# args:
|
||||
# - SLACK_USERNAME=Onepanel SLACK_TITLE="{{workflow.name}} {{inputs.parameters.status}}"
|
||||
# SLACK_ICON=https://www.gravatar.com/avatar/5c4478592fe00878f62f0027be59c1bd
|
||||
# SLACK_MESSAGE=$(cat /tmp/metrics.json)} ./slack-notify
|
||||
# command:
|
||||
# - sh
|
||||
# - -c
|
||||
# image: technosophos/slack-notify
|
||||
# inputs:
|
||||
# artifacts:
|
||||
# - name: metrics
|
||||
# optional: true
|
||||
# path: /tmp/metrics.json
|
||||
# parameters:
|
||||
# - name: status
|
||||
# name: slack-notify-success
|
||||
volumeClaimTemplates:
|
||||
- metadata:
|
||||
creationTimestamp: null
|
||||
name: data
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 200Gi
|
||||
- metadata:
|
||||
creationTimestamp: null
|
||||
name: output
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 200Gi
|
||||
@@ -1,221 +1,231 @@
|
||||
entrypoint: main
|
||||
arguments:
|
||||
parameters:
|
||||
- name: source
|
||||
value: https://github.com/tensorflow/models.git
|
||||
displayName: Model source code
|
||||
type: hidden
|
||||
visibility: private
|
||||
metadata:
|
||||
name: "TF Object Detection Training"
|
||||
kind: Workflow
|
||||
version: 20201115134934
|
||||
action: update
|
||||
source: "https://github.com/onepanelio/templates/blob/master/workflows/tf-object-detection-training/"
|
||||
labels:
|
||||
"created-by": "system"
|
||||
"used-by": "cvat"
|
||||
spec:
|
||||
entrypoint: main
|
||||
arguments:
|
||||
parameters:
|
||||
- name: source
|
||||
value: https://github.com/tensorflow/models.git
|
||||
displayName: Model source code
|
||||
type: hidden
|
||||
visibility: private
|
||||
|
||||
- name: trainingsource
|
||||
value: https://github.com/onepanelio/cvat-training.git
|
||||
type: hidden
|
||||
visibility: private
|
||||
- name: trainingsource
|
||||
value: https://github.com/onepanelio/cvat-training.git
|
||||
type: hidden
|
||||
visibility: private
|
||||
|
||||
- name: revision
|
||||
value: v1.13.0
|
||||
type: hidden
|
||||
visibility: private
|
||||
- name: revision
|
||||
value: v1.13.0
|
||||
type: hidden
|
||||
visibility: private
|
||||
|
||||
- name: cvat-annotation-path
|
||||
value: annotation-dump/sample_dataset
|
||||
displayName: Dataset path
|
||||
hint: Path to annotated data in default object storage (i.e S3). In CVAT, this parameter will be pre-populated.
|
||||
visibility: private
|
||||
- name: cvat-annotation-path
|
||||
value: annotation-dump/sample_dataset
|
||||
displayName: Dataset path
|
||||
hint: Path to annotated data in default object storage (i.e S3). In CVAT, this parameter will be pre-populated.
|
||||
visibility: private
|
||||
|
||||
- name: cvat-output-path
|
||||
value: workflow-data/output/sample_output
|
||||
hint: Path to store output artifacts in default object storage (i.e s3). In CVAT, this parameter will be pre-populated.
|
||||
displayName: Workflow output path
|
||||
visibility: private
|
||||
- name: cvat-output-path
|
||||
value: workflow-data/output/sample_output
|
||||
hint: Path to store output artifacts in default object storage (i.e s3). In CVAT, this parameter will be pre-populated.
|
||||
displayName: Workflow output path
|
||||
visibility: private
|
||||
|
||||
- name: cvat-model
|
||||
value: frcnn-res50-coco
|
||||
displayName: Model
|
||||
hint: TF Detection API's model to use for training.
|
||||
type: select.select
|
||||
visibility: public
|
||||
options:
|
||||
- name: 'Faster RCNN-ResNet 101-COCO'
|
||||
value: frcnn-res101-coco
|
||||
- name: 'Faster RCNN-ResNet 101-Low Proposal-COCO'
|
||||
value: frcnn-res101-low
|
||||
- name: 'Faster RCNN-ResNet 50-COCO'
|
||||
value: frcnn-res50-coco
|
||||
- name: 'Faster RCNN-NAS-COCO'
|
||||
value: frcnn-nas-coco
|
||||
- name: 'SSD MobileNet V1-COCO'
|
||||
value: ssd-mobilenet-v1-coco2
|
||||
- name: 'SSD MobileNet V2-COCO'
|
||||
value: ssd-mobilenet-v2-coco
|
||||
- name: 'SSDLite MobileNet-COCO'
|
||||
value: ssdlite-mobilenet-coco
|
||||
- name: cvat-model
|
||||
value: frcnn-res50-coco
|
||||
displayName: Model
|
||||
hint: TF Detection API's model to use for training.
|
||||
type: select.select
|
||||
visibility: public
|
||||
options:
|
||||
- name: 'Faster RCNN-ResNet 101-COCO'
|
||||
value: frcnn-res101-coco
|
||||
- name: 'Faster RCNN-ResNet 101-Low Proposal-COCO'
|
||||
value: frcnn-res101-low
|
||||
- name: 'Faster RCNN-ResNet 50-COCO'
|
||||
value: frcnn-res50-coco
|
||||
- name: 'Faster RCNN-NAS-COCO'
|
||||
value: frcnn-nas-coco
|
||||
- name: 'SSD MobileNet V1-COCO'
|
||||
value: ssd-mobilenet-v1-coco2
|
||||
- name: 'SSD MobileNet V2-COCO'
|
||||
value: ssd-mobilenet-v2-coco
|
||||
- name: 'SSDLite MobileNet-COCO'
|
||||
value: ssdlite-mobilenet-coco
|
||||
|
||||
- name: hyperparameters
|
||||
value: |-
|
||||
num-steps=10000
|
||||
displayName: Hyperparameters
|
||||
visibility: public
|
||||
type: textarea.textarea
|
||||
hint: "Please refer to our <a href='https://docs.onepanel.ai/docs/getting-started/use-cases/computervision/annotation/cvat/cvat_annotation_model#arguments-optional' target='_blank'>documentation</a> for more information on parameters. Number of classes will be automatically populated if you had 'sys-num-classes' parameter in a workflow."
|
||||
- name: hyperparameters
|
||||
value: |-
|
||||
num-steps=10000
|
||||
displayName: Hyperparameters
|
||||
visibility: public
|
||||
type: textarea.textarea
|
||||
hint: "Please refer to our <a href='https://docs.onepanel.ai/docs/getting-started/use-cases/computervision/annotation/cvat/cvat_annotation_model#arguments-optional' target='_blank'>documentation</a> for more information on parameters. Number of classes will be automatically populated if you had 'sys-num-classes' parameter in a workflow."
|
||||
|
||||
- name: cvat-finetune-checkpoint
|
||||
value: ''
|
||||
hint: Select the last fine-tune checkpoint for this model. It may take up to 5 minutes for a recent checkpoint show here. Leave empty if this is the first time you're training this model.
|
||||
displayName: Checkpoint path
|
||||
visibility: public
|
||||
- name: cvat-finetune-checkpoint
|
||||
value: ''
|
||||
hint: Select the last fine-tune checkpoint for this model. It may take up to 5 minutes for a recent checkpoint show here. Leave empty if this is the first time you're training this model.
|
||||
displayName: Checkpoint path
|
||||
visibility: public
|
||||
|
||||
- name: cvat-num-classes
|
||||
value: '81'
|
||||
hint: Number of classes
|
||||
displayName: Number of classes
|
||||
visibility: private
|
||||
- name: cvat-num-classes
|
||||
value: '81'
|
||||
hint: Number of classes
|
||||
displayName: Number of classes
|
||||
visibility: private
|
||||
|
||||
- name: tf-image
|
||||
value: tensorflow/tensorflow:1.13.1-py3
|
||||
type: select.select
|
||||
displayName: Select TensorFlow image
|
||||
visibility: public
|
||||
hint: Select the GPU image if you are running on a GPU node pool
|
||||
options:
|
||||
- name: 'TensorFlow 1.13.1 CPU Image'
|
||||
value: 'tensorflow/tensorflow:1.13.1-py3'
|
||||
- name: 'TensorFlow 1.13.1 GPU Image'
|
||||
value: 'tensorflow/tensorflow:1.13.1-gpu-py3'
|
||||
- name: tf-image
|
||||
value: tensorflow/tensorflow:1.13.1-py3
|
||||
type: select.select
|
||||
displayName: Select TensorFlow image
|
||||
visibility: public
|
||||
hint: Select the GPU image if you are running on a GPU node pool
|
||||
options:
|
||||
- name: 'TensorFlow 1.13.1 CPU Image'
|
||||
value: 'tensorflow/tensorflow:1.13.1-py3'
|
||||
- name: 'TensorFlow 1.13.1 GPU Image'
|
||||
value: 'tensorflow/tensorflow:1.13.1-gpu-py3'
|
||||
|
||||
- displayName: Node pool
|
||||
hint: Name of node pool or group to run this workflow task
|
||||
type: select.select
|
||||
name: sys-node-pool
|
||||
value: Standard_D4s_v3
|
||||
visibility: public
|
||||
required: true
|
||||
options:
|
||||
- name: 'CPU: 2, RAM: 8GB'
|
||||
value: Standard_D2s_v3
|
||||
- name: 'CPU: 4, RAM: 16GB'
|
||||
value: Standard_D4s_v3
|
||||
- name: 'GPU: 1xK80, CPU: 6, RAM: 56GB'
|
||||
value: Standard_NC6
|
||||
- name: dump-format
|
||||
value: cvat_tfrecord
|
||||
visibility: public
|
||||
templates:
|
||||
- name: main
|
||||
dag:
|
||||
tasks:
|
||||
- name: train-model
|
||||
template: tensorflow
|
||||
# Uncomment the lines below if you want to send Slack notifications
|
||||
# - arguments:
|
||||
# artifacts:
|
||||
# - from: '{{tasks.train-model.outputs.artifacts.sys-metrics}}'
|
||||
# name: metrics
|
||||
# parameters:
|
||||
# - name: status
|
||||
# value: '{{tasks.train-model.status}}'
|
||||
# dependencies:
|
||||
# - train-model
|
||||
# name: notify-in-slack
|
||||
# template: slack-notify-success
|
||||
- name: tensorflow
|
||||
container:
|
||||
args:
|
||||
- |
|
||||
apt-get update && \
|
||||
apt-get install -y python3-pip git wget unzip libglib2.0-0 libsm6 libxext6 libxrender-dev && \
|
||||
pip install pillow lxml Cython contextlib2 jupyter matplotlib numpy scipy boto3 pycocotools pyyaml google-cloud-storage && \
|
||||
cd /mnt/src/tf/research && \
|
||||
export PYTHONPATH=$PYTHONPATH:`pwd`:`pwd`/slim && \
|
||||
cd /mnt/src/train && \
|
||||
python convert_workflow.py \
|
||||
--extras="{{workflow.parameters.hyperparameters}}" \
|
||||
--model="{{workflow.parameters.cvat-model}}" \
|
||||
--num_classes="{{workflow.parameters.cvat-num-classes}}" \
|
||||
--sys_finetune_checkpoint={{workflow.parameters.cvat-finetune-checkpoint}}
|
||||
command:
|
||||
- sh
|
||||
- -c
|
||||
image: '{{workflow.parameters.tf-image}}'
|
||||
volumeMounts:
|
||||
- mountPath: /mnt/data
|
||||
name: data
|
||||
- mountPath: /mnt/output
|
||||
name: output
|
||||
workingDir: /mnt/src
|
||||
nodeSelector:
|
||||
beta.kubernetes.io/instance-type: '{{workflow.parameters.sys-node-pool}}'
|
||||
sidecars:
|
||||
- name: tensorboard
|
||||
image: tensorflow/tensorflow:2.3.0
|
||||
command: [sh, -c]
|
||||
tty: true
|
||||
args: ["tensorboard --logdir /mnt/output/"]
|
||||
ports:
|
||||
- containerPort: 6006
|
||||
name: tensorboard
|
||||
inputs:
|
||||
artifacts:
|
||||
- name: data
|
||||
path: /mnt/data/datasets/
|
||||
{{.ArtifactRepositoryType}}:
|
||||
key: '{{workflow.namespace}}/{{workflow.parameters.cvat-annotation-path}}'
|
||||
- name: models
|
||||
path: /mnt/data/models/
|
||||
optional: true
|
||||
{{.ArtifactRepositoryType}}:
|
||||
key: '{{workflow.namespace}}/{{workflow.parameters.cvat-finetune-checkpoint}}'
|
||||
- git:
|
||||
repo: '{{workflow.parameters.source}}'
|
||||
revision: '{{workflow.parameters.revision}}'
|
||||
name: src
|
||||
path: /mnt/src/tf
|
||||
- git:
|
||||
repo: '{{workflow.parameters.trainingsource}}'
|
||||
revision: 'optional-artifacts'
|
||||
name: tsrc
|
||||
path: /mnt/src/train
|
||||
outputs:
|
||||
artifacts:
|
||||
- name: model
|
||||
optional: true
|
||||
path: /mnt/output
|
||||
{{.ArtifactRepositoryType}}:
|
||||
key: '{{workflow.namespace}}/{{workflow.parameters.cvat-output-path}}/{{workflow.name}}'
|
||||
# Uncomment the lines below if you want to send Slack notifications
|
||||
#- container:
|
||||
# args:
|
||||
# - SLACK_USERNAME=Onepanel SLACK_TITLE="{{workflow.name}} {{inputs.parameters.status}}"
|
||||
# SLACK_ICON=https://www.gravatar.com/avatar/5c4478592fe00878f62f0027be59c1bd
|
||||
# SLACK_MESSAGE=$(cat /tmp/metrics.json)} ./slack-notify
|
||||
# command:
|
||||
# - sh
|
||||
# - -c
|
||||
# image: technosophos/slack-notify
|
||||
# inputs:
|
||||
# artifacts:
|
||||
# - name: metrics
|
||||
# optional: true
|
||||
# path: /tmp/metrics.json
|
||||
# parameters:
|
||||
# - name: status
|
||||
# name: slack-notify-success
|
||||
volumeClaimTemplates:
|
||||
- metadata:
|
||||
creationTimestamp: null
|
||||
name: data
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 200Gi
|
||||
- metadata:
|
||||
creationTimestamp: null
|
||||
name: output
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 200Gi
|
||||
- displayName: Node pool
|
||||
hint: Name of node pool or group to run this workflow task
|
||||
type: select.select
|
||||
name: sys-node-pool
|
||||
value: Standard_D4s_v3
|
||||
visibility: public
|
||||
required: true
|
||||
options:
|
||||
- name: 'CPU: 2, RAM: 8GB'
|
||||
value: Standard_D2s_v3
|
||||
- name: 'CPU: 4, RAM: 16GB'
|
||||
value: Standard_D4s_v3
|
||||
- name: 'GPU: 1xK80, CPU: 6, RAM: 56GB'
|
||||
value: Standard_NC6
|
||||
- name: dump-format
|
||||
value: cvat_tfrecord
|
||||
visibility: public
|
||||
templates:
|
||||
- name: main
|
||||
dag:
|
||||
tasks:
|
||||
- name: train-model
|
||||
template: tensorflow
|
||||
# Uncomment the lines below if you want to send Slack notifications
|
||||
# - arguments:
|
||||
# artifacts:
|
||||
# - from: '{{tasks.train-model.outputs.artifacts.sys-metrics}}'
|
||||
# name: metrics
|
||||
# parameters:
|
||||
# - name: status
|
||||
# value: '{{tasks.train-model.status}}'
|
||||
# dependencies:
|
||||
# - train-model
|
||||
# name: notify-in-slack
|
||||
# template: slack-notify-success
|
||||
- name: tensorflow
|
||||
container:
|
||||
args:
|
||||
- |
|
||||
apt-get update && \
|
||||
apt-get install -y python3-pip git wget unzip libglib2.0-0 libsm6 libxext6 libxrender-dev && \
|
||||
pip install pillow lxml Cython contextlib2 jupyter matplotlib numpy scipy boto3 pycocotools pyyaml google-cloud-storage && \
|
||||
cd /mnt/src/tf/research && \
|
||||
export PYTHONPATH=$PYTHONPATH:`pwd`:`pwd`/slim && \
|
||||
cd /mnt/src/train && \
|
||||
python convert_workflow.py \
|
||||
--extras="{{workflow.parameters.hyperparameters}}" \
|
||||
--model="{{workflow.parameters.cvat-model}}" \
|
||||
--num_classes="{{workflow.parameters.cvat-num-classes}}" \
|
||||
--sys_finetune_checkpoint={{workflow.parameters.cvat-finetune-checkpoint}}
|
||||
command:
|
||||
- sh
|
||||
- -c
|
||||
image: '{{workflow.parameters.tf-image}}'
|
||||
volumeMounts:
|
||||
- mountPath: /mnt/data
|
||||
name: data
|
||||
- mountPath: /mnt/output
|
||||
name: output
|
||||
workingDir: /mnt/src
|
||||
nodeSelector:
|
||||
beta.kubernetes.io/instance-type: '{{workflow.parameters.sys-node-pool}}'
|
||||
sidecars:
|
||||
- name: tensorboard
|
||||
image: tensorflow/tensorflow:2.3.0
|
||||
command: [sh, -c]
|
||||
tty: true
|
||||
args: ["tensorboard --logdir /mnt/output/"]
|
||||
ports:
|
||||
- containerPort: 6006
|
||||
name: tensorboard
|
||||
inputs:
|
||||
artifacts:
|
||||
- name: data
|
||||
path: /mnt/data/datasets/
|
||||
"{{.ArtifactRepositoryType}}":
|
||||
key: '{{workflow.namespace}}/{{workflow.parameters.cvat-annotation-path}}'
|
||||
- name: models
|
||||
path: /mnt/data/models/
|
||||
optional: true
|
||||
"{{.ArtifactRepositoryType}}":
|
||||
key: '{{workflow.namespace}}/{{workflow.parameters.cvat-finetune-checkpoint}}'
|
||||
- git:
|
||||
repo: '{{workflow.parameters.source}}'
|
||||
revision: '{{workflow.parameters.revision}}'
|
||||
name: src
|
||||
path: /mnt/src/tf
|
||||
- git:
|
||||
repo: '{{workflow.parameters.trainingsource}}'
|
||||
revision: 'optional-artifacts'
|
||||
name: tsrc
|
||||
path: /mnt/src/train
|
||||
outputs:
|
||||
artifacts:
|
||||
- name: model
|
||||
optional: true
|
||||
path: /mnt/output
|
||||
"{{.ArtifactRepositoryType}}":
|
||||
key: '{{workflow.namespace}}/{{workflow.parameters.cvat-output-path}}/{{workflow.name}}'
|
||||
# Uncomment the lines below if you want to send Slack notifications
|
||||
#- container:
|
||||
# args:
|
||||
# - SLACK_USERNAME=Onepanel SLACK_TITLE="{{workflow.name}} {{inputs.parameters.status}}"
|
||||
# SLACK_ICON=https://www.gravatar.com/avatar/5c4478592fe00878f62f0027be59c1bd
|
||||
# SLACK_MESSAGE=$(cat /tmp/metrics.json)} ./slack-notify
|
||||
# command:
|
||||
# - sh
|
||||
# - -c
|
||||
# image: technosophos/slack-notify
|
||||
# inputs:
|
||||
# artifacts:
|
||||
# - name: metrics
|
||||
# optional: true
|
||||
# path: /tmp/metrics.json
|
||||
# parameters:
|
||||
# - name: status
|
||||
# name: slack-notify-success
|
||||
volumeClaimTemplates:
|
||||
- metadata:
|
||||
creationTimestamp: null
|
||||
name: data
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 200Gi
|
||||
- metadata:
|
||||
creationTimestamp: null
|
||||
name: output
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 200Gi
|
||||
@@ -1,221 +1,231 @@
|
||||
entrypoint: main
|
||||
arguments:
|
||||
parameters:
|
||||
- name: source
|
||||
value: https://github.com/tensorflow/models.git
|
||||
displayName: Model source code
|
||||
type: hidden
|
||||
visibility: private
|
||||
metadata:
|
||||
name: "TF Object Detection Training"
|
||||
kind: Workflow
|
||||
version: 20201130130433
|
||||
action: update
|
||||
source: "https://github.com/onepanelio/templates/blob/master/workflows/tf-object-detection-training/"
|
||||
labels:
|
||||
"created-by": "system"
|
||||
"used-by": "cvat"
|
||||
spec:
|
||||
entrypoint: main
|
||||
arguments:
|
||||
parameters:
|
||||
- name: source
|
||||
value: https://github.com/tensorflow/models.git
|
||||
displayName: Model source code
|
||||
type: hidden
|
||||
visibility: private
|
||||
|
||||
- name: trainingsource
|
||||
value: https://github.com/onepanelio/cvat-training.git
|
||||
type: hidden
|
||||
visibility: private
|
||||
- name: trainingsource
|
||||
value: https://github.com/onepanelio/cvat-training.git
|
||||
type: hidden
|
||||
visibility: private
|
||||
|
||||
- name: revision
|
||||
value: v1.13.0
|
||||
type: hidden
|
||||
visibility: private
|
||||
- name: revision
|
||||
value: v1.13.0
|
||||
type: hidden
|
||||
visibility: private
|
||||
|
||||
- name: cvat-annotation-path
|
||||
value: annotation-dump/sample_dataset
|
||||
displayName: Dataset path
|
||||
hint: Path to annotated data in default object storage (i.e S3). In CVAT, this parameter will be pre-populated.
|
||||
visibility: private
|
||||
- name: cvat-annotation-path
|
||||
value: annotation-dump/sample_dataset
|
||||
displayName: Dataset path
|
||||
hint: Path to annotated data in default object storage (i.e S3). In CVAT, this parameter will be pre-populated.
|
||||
visibility: private
|
||||
|
||||
- name: cvat-output-path
|
||||
value: workflow-data/output/sample_output
|
||||
hint: Path to store output artifacts in default object storage (i.e s3). In CVAT, this parameter will be pre-populated.
|
||||
displayName: Workflow output path
|
||||
visibility: private
|
||||
- name: cvat-output-path
|
||||
value: workflow-data/output/sample_output
|
||||
hint: Path to store output artifacts in default object storage (i.e s3). In CVAT, this parameter will be pre-populated.
|
||||
displayName: Workflow output path
|
||||
visibility: private
|
||||
|
||||
- name: cvat-model
|
||||
value: frcnn-res50-coco
|
||||
displayName: Model
|
||||
hint: TF Detection API's model to use for training.
|
||||
type: select.select
|
||||
visibility: public
|
||||
options:
|
||||
- name: 'Faster RCNN-ResNet 101-COCO'
|
||||
value: frcnn-res101-coco
|
||||
- name: 'Faster RCNN-ResNet 101-Low Proposal-COCO'
|
||||
value: frcnn-res101-low
|
||||
- name: 'Faster RCNN-ResNet 50-COCO'
|
||||
value: frcnn-res50-coco
|
||||
- name: 'Faster RCNN-NAS-COCO'
|
||||
value: frcnn-nas-coco
|
||||
- name: 'SSD MobileNet V1-COCO'
|
||||
value: ssd-mobilenet-v1-coco2
|
||||
- name: 'SSD MobileNet V2-COCO'
|
||||
value: ssd-mobilenet-v2-coco
|
||||
- name: 'SSDLite MobileNet-COCO'
|
||||
value: ssdlite-mobilenet-coco
|
||||
- name: cvat-model
|
||||
value: frcnn-res50-coco
|
||||
displayName: Model
|
||||
hint: TF Detection API's model to use for training.
|
||||
type: select.select
|
||||
visibility: public
|
||||
options:
|
||||
- name: 'Faster RCNN-ResNet 101-COCO'
|
||||
value: frcnn-res101-coco
|
||||
- name: 'Faster RCNN-ResNet 101-Low Proposal-COCO'
|
||||
value: frcnn-res101-low
|
||||
- name: 'Faster RCNN-ResNet 50-COCO'
|
||||
value: frcnn-res50-coco
|
||||
- name: 'Faster RCNN-NAS-COCO'
|
||||
value: frcnn-nas-coco
|
||||
- name: 'SSD MobileNet V1-COCO'
|
||||
value: ssd-mobilenet-v1-coco2
|
||||
- name: 'SSD MobileNet V2-COCO'
|
||||
value: ssd-mobilenet-v2-coco
|
||||
- name: 'SSDLite MobileNet-COCO'
|
||||
value: ssdlite-mobilenet-coco
|
||||
|
||||
- name: hyperparameters
|
||||
value: |-
|
||||
num-steps=10000
|
||||
displayName: Hyperparameters
|
||||
visibility: public
|
||||
type: textarea.textarea
|
||||
hint: "Please refer to our <a href='https://docs.onepanel.ai/docs/getting-started/use-cases/computervision/annotation/cvat/cvat_annotation_model#arguments-optional' target='_blank'>documentation</a> for more information on parameters. Number of classes will be automatically populated if you had 'sys-num-classes' parameter in a workflow."
|
||||
- name: hyperparameters
|
||||
value: |-
|
||||
num-steps=10000
|
||||
displayName: Hyperparameters
|
||||
visibility: public
|
||||
type: textarea.textarea
|
||||
hint: "Please refer to our <a href='https://docs.onepanel.ai/docs/getting-started/use-cases/computervision/annotation/cvat/cvat_annotation_model#arguments-optional' target='_blank'>documentation</a> for more information on parameters. Number of classes will be automatically populated if you had 'sys-num-classes' parameter in a workflow."
|
||||
|
||||
- name: cvat-finetune-checkpoint
|
||||
value: ''
|
||||
hint: Select the last fine-tune checkpoint for this model. It may take up to 5 minutes for a recent checkpoint show here. Leave empty if this is the first time you're training this model.
|
||||
displayName: Checkpoint path
|
||||
visibility: public
|
||||
- name: cvat-finetune-checkpoint
|
||||
value: ''
|
||||
hint: Select the last fine-tune checkpoint for this model. It may take up to 5 minutes for a recent checkpoint show here. Leave empty if this is the first time you're training this model.
|
||||
displayName: Checkpoint path
|
||||
visibility: public
|
||||
|
||||
- name: cvat-num-classes
|
||||
value: '81'
|
||||
hint: Number of classes
|
||||
displayName: Number of classes
|
||||
visibility: private
|
||||
- name: cvat-num-classes
|
||||
value: '81'
|
||||
hint: Number of classes
|
||||
displayName: Number of classes
|
||||
visibility: private
|
||||
|
||||
- name: tf-image
|
||||
value: tensorflow/tensorflow:1.13.1-py3
|
||||
type: select.select
|
||||
displayName: Select TensorFlow image
|
||||
visibility: public
|
||||
hint: Select the GPU image if you are running on a GPU node pool
|
||||
options:
|
||||
- name: 'TensorFlow 1.13.1 CPU Image'
|
||||
value: 'tensorflow/tensorflow:1.13.1-py3'
|
||||
- name: 'TensorFlow 1.13.1 GPU Image'
|
||||
value: 'tensorflow/tensorflow:1.13.1-gpu-py3'
|
||||
- name: tf-image
|
||||
value: tensorflow/tensorflow:1.13.1-py3
|
||||
type: select.select
|
||||
displayName: Select TensorFlow image
|
||||
visibility: public
|
||||
hint: Select the GPU image if you are running on a GPU node pool
|
||||
options:
|
||||
- name: 'TensorFlow 1.13.1 CPU Image'
|
||||
value: 'tensorflow/tensorflow:1.13.1-py3'
|
||||
- name: 'TensorFlow 1.13.1 GPU Image'
|
||||
value: 'tensorflow/tensorflow:1.13.1-gpu-py3'
|
||||
|
||||
- displayName: Node pool
|
||||
hint: Name of node pool or group to run this workflow task
|
||||
type: select.select
|
||||
name: sys-node-pool
|
||||
value: Standard_D4s_v3
|
||||
visibility: public
|
||||
required: true
|
||||
options:
|
||||
- name: 'CPU: 2, RAM: 8GB'
|
||||
value: Standard_D2s_v3
|
||||
- name: 'CPU: 4, RAM: 16GB'
|
||||
value: Standard_D4s_v3
|
||||
- name: 'GPU: 1xK80, CPU: 6, RAM: 56GB'
|
||||
value: Standard_NC6
|
||||
- name: dump-format
|
||||
value: cvat_tfrecord
|
||||
visibility: public
|
||||
templates:
|
||||
- name: main
|
||||
dag:
|
||||
tasks:
|
||||
- name: train-model
|
||||
template: tensorflow
|
||||
# Uncomment the lines below if you want to send Slack notifications
|
||||
# - arguments:
|
||||
# artifacts:
|
||||
# - from: '{{tasks.train-model.outputs.artifacts.sys-metrics}}'
|
||||
# name: metrics
|
||||
# parameters:
|
||||
# - name: status
|
||||
# value: '{{tasks.train-model.status}}'
|
||||
# dependencies:
|
||||
# - train-model
|
||||
# name: notify-in-slack
|
||||
# template: slack-notify-success
|
||||
- name: tensorflow
|
||||
container:
|
||||
args:
|
||||
- |
|
||||
apt-get update && \
|
||||
apt-get install -y python3-pip git wget unzip libglib2.0-0 libsm6 libxext6 libxrender-dev && \
|
||||
pip install pillow lxml Cython contextlib2 jupyter matplotlib numpy scipy boto3 pycocotools pyyaml google-cloud-storage && \
|
||||
cd /mnt/src/tf/research && \
|
||||
export PYTHONPATH=$PYTHONPATH:`pwd`:`pwd`/slim && \
|
||||
cd /mnt/src/train && \
|
||||
python convert_workflow.py \
|
||||
--extras="{{workflow.parameters.hyperparameters}}" \
|
||||
--model="{{workflow.parameters.cvat-model}}" \
|
||||
--num_classes="{{workflow.parameters.cvat-num-classes}}" \
|
||||
--sys_finetune_checkpoint={{workflow.parameters.cvat-finetune-checkpoint}}
|
||||
command:
|
||||
- sh
|
||||
- -c
|
||||
image: '{{workflow.parameters.tf-image}}'
|
||||
volumeMounts:
|
||||
- mountPath: /mnt/data
|
||||
name: data
|
||||
- mountPath: /mnt/output
|
||||
name: output
|
||||
workingDir: /mnt/src
|
||||
nodeSelector:
|
||||
beta.kubernetes.io/instance-type: '{{workflow.parameters.sys-node-pool}}'
|
||||
sidecars:
|
||||
- name: tensorboard
|
||||
image: tensorflow/tensorflow:2.3.0
|
||||
command: [sh, -c]
|
||||
tty: true
|
||||
args: ["tensorboard --logdir /mnt/output/"]
|
||||
ports:
|
||||
- containerPort: 6006
|
||||
name: tensorboard
|
||||
inputs:
|
||||
artifacts:
|
||||
- name: data
|
||||
path: /mnt/data/datasets/
|
||||
{{.ArtifactRepositoryType}}:
|
||||
key: '{{workflow.namespace}}/{{workflow.parameters.cvat-annotation-path}}'
|
||||
- name: models
|
||||
path: /mnt/data/models/
|
||||
optional: true
|
||||
{{.ArtifactRepositoryType}}:
|
||||
key: '{{workflow.parameters.cvat-finetune-checkpoint}}'
|
||||
- git:
|
||||
repo: '{{workflow.parameters.source}}'
|
||||
revision: '{{workflow.parameters.revision}}'
|
||||
name: src
|
||||
path: /mnt/src/tf
|
||||
- git:
|
||||
repo: '{{workflow.parameters.trainingsource}}'
|
||||
revision: 'optional-artifacts'
|
||||
name: tsrc
|
||||
path: /mnt/src/train
|
||||
outputs:
|
||||
artifacts:
|
||||
- name: model
|
||||
optional: true
|
||||
path: /mnt/output
|
||||
{{.ArtifactRepositoryType}}:
|
||||
key: '{{workflow.namespace}}/{{workflow.parameters.cvat-output-path}}/{{workflow.name}}'
|
||||
# Uncomment the lines below if you want to send Slack notifications
|
||||
#- container:
|
||||
# args:
|
||||
# - SLACK_USERNAME=Onepanel SLACK_TITLE="{{workflow.name}} {{inputs.parameters.status}}"
|
||||
# SLACK_ICON=https://www.gravatar.com/avatar/5c4478592fe00878f62f0027be59c1bd
|
||||
# SLACK_MESSAGE=$(cat /tmp/metrics.json)} ./slack-notify
|
||||
# command:
|
||||
# - sh
|
||||
# - -c
|
||||
# image: technosophos/slack-notify
|
||||
# inputs:
|
||||
# artifacts:
|
||||
# - name: metrics
|
||||
# optional: true
|
||||
# path: /tmp/metrics.json
|
||||
# parameters:
|
||||
# - name: status
|
||||
# name: slack-notify-success
|
||||
volumeClaimTemplates:
|
||||
- metadata:
|
||||
creationTimestamp: null
|
||||
name: data
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 200Gi
|
||||
- metadata:
|
||||
creationTimestamp: null
|
||||
name: output
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 200Gi
|
||||
- displayName: Node pool
|
||||
hint: Name of node pool or group to run this workflow task
|
||||
type: select.select
|
||||
name: sys-node-pool
|
||||
value: Standard_D4s_v3
|
||||
visibility: public
|
||||
required: true
|
||||
options:
|
||||
- name: 'CPU: 2, RAM: 8GB'
|
||||
value: Standard_D2s_v3
|
||||
- name: 'CPU: 4, RAM: 16GB'
|
||||
value: Standard_D4s_v3
|
||||
- name: 'GPU: 1xK80, CPU: 6, RAM: 56GB'
|
||||
value: Standard_NC6
|
||||
- name: dump-format
|
||||
value: cvat_tfrecord
|
||||
visibility: public
|
||||
templates:
|
||||
- name: main
|
||||
dag:
|
||||
tasks:
|
||||
- name: train-model
|
||||
template: tensorflow
|
||||
# Uncomment the lines below if you want to send Slack notifications
|
||||
# - arguments:
|
||||
# artifacts:
|
||||
# - from: '{{tasks.train-model.outputs.artifacts.sys-metrics}}'
|
||||
# name: metrics
|
||||
# parameters:
|
||||
# - name: status
|
||||
# value: '{{tasks.train-model.status}}'
|
||||
# dependencies:
|
||||
# - train-model
|
||||
# name: notify-in-slack
|
||||
# template: slack-notify-success
|
||||
- name: tensorflow
|
||||
container:
|
||||
args:
|
||||
- |
|
||||
apt-get update && \
|
||||
apt-get install -y python3-pip git wget unzip libglib2.0-0 libsm6 libxext6 libxrender-dev && \
|
||||
pip install pillow lxml Cython contextlib2 jupyter matplotlib numpy scipy boto3 pycocotools pyyaml google-cloud-storage && \
|
||||
cd /mnt/src/tf/research && \
|
||||
export PYTHONPATH=$PYTHONPATH:`pwd`:`pwd`/slim && \
|
||||
cd /mnt/src/train && \
|
||||
python convert_workflow.py \
|
||||
--extras="{{workflow.parameters.hyperparameters}}" \
|
||||
--model="{{workflow.parameters.cvat-model}}" \
|
||||
--num_classes="{{workflow.parameters.cvat-num-classes}}" \
|
||||
--sys_finetune_checkpoint={{workflow.parameters.cvat-finetune-checkpoint}}
|
||||
command:
|
||||
- sh
|
||||
- -c
|
||||
image: '{{workflow.parameters.tf-image}}'
|
||||
volumeMounts:
|
||||
- mountPath: /mnt/data
|
||||
name: data
|
||||
- mountPath: /mnt/output
|
||||
name: output
|
||||
workingDir: /mnt/src
|
||||
nodeSelector:
|
||||
beta.kubernetes.io/instance-type: '{{workflow.parameters.sys-node-pool}}'
|
||||
sidecars:
|
||||
- name: tensorboard
|
||||
image: tensorflow/tensorflow:2.3.0
|
||||
command: [sh, -c]
|
||||
tty: true
|
||||
args: ["tensorboard --logdir /mnt/output/"]
|
||||
ports:
|
||||
- containerPort: 6006
|
||||
name: tensorboard
|
||||
inputs:
|
||||
artifacts:
|
||||
- name: data
|
||||
path: /mnt/data/datasets/
|
||||
"{{.ArtifactRepositoryType}}":
|
||||
key: '{{workflow.namespace}}/{{workflow.parameters.cvat-annotation-path}}'
|
||||
- name: models
|
||||
path: /mnt/data/models/
|
||||
optional: true
|
||||
"{{.ArtifactRepositoryType}}":
|
||||
key: '{{workflow.parameters.cvat-finetune-checkpoint}}'
|
||||
- git:
|
||||
repo: '{{workflow.parameters.source}}'
|
||||
revision: '{{workflow.parameters.revision}}'
|
||||
name: src
|
||||
path: /mnt/src/tf
|
||||
- git:
|
||||
repo: '{{workflow.parameters.trainingsource}}'
|
||||
revision: 'optional-artifacts'
|
||||
name: tsrc
|
||||
path: /mnt/src/train
|
||||
outputs:
|
||||
artifacts:
|
||||
- name: model
|
||||
optional: true
|
||||
path: /mnt/output
|
||||
"{{.ArtifactRepositoryType}}":
|
||||
key: '{{workflow.namespace}}/{{workflow.parameters.cvat-output-path}}/{{workflow.name}}'
|
||||
# Uncomment the lines below if you want to send Slack notifications
|
||||
#- container:
|
||||
# args:
|
||||
# - SLACK_USERNAME=Onepanel SLACK_TITLE="{{workflow.name}} {{inputs.parameters.status}}"
|
||||
# SLACK_ICON=https://www.gravatar.com/avatar/5c4478592fe00878f62f0027be59c1bd
|
||||
# SLACK_MESSAGE=$(cat /tmp/metrics.json)} ./slack-notify
|
||||
# command:
|
||||
# - sh
|
||||
# - -c
|
||||
# image: technosophos/slack-notify
|
||||
# inputs:
|
||||
# artifacts:
|
||||
# - name: metrics
|
||||
# optional: true
|
||||
# path: /tmp/metrics.json
|
||||
# parameters:
|
||||
# - name: status
|
||||
# name: slack-notify-success
|
||||
volumeClaimTemplates:
|
||||
- metadata:
|
||||
creationTimestamp: null
|
||||
name: data
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 200Gi
|
||||
- metadata:
|
||||
creationTimestamp: null
|
||||
name: output
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 200Gi
|
||||
@@ -1,223 +1,233 @@
|
||||
entrypoint: main
|
||||
arguments:
|
||||
parameters:
|
||||
- name: source
|
||||
value: https://github.com/tensorflow/models.git
|
||||
displayName: Model source code
|
||||
type: hidden
|
||||
visibility: private
|
||||
metadata:
|
||||
name: "TF Object Detection Training"
|
||||
kind: Workflow
|
||||
version: 20201208155115
|
||||
action: update
|
||||
source: "https://github.com/onepanelio/templates/blob/master/workflows/tf-object-detection-training/"
|
||||
labels:
|
||||
"created-by": "system"
|
||||
"used-by": "cvat"
|
||||
spec:
|
||||
entrypoint: main
|
||||
arguments:
|
||||
parameters:
|
||||
- name: source
|
||||
value: https://github.com/tensorflow/models.git
|
||||
displayName: Model source code
|
||||
type: hidden
|
||||
visibility: private
|
||||
|
||||
- name: trainingsource
|
||||
value: https://github.com/onepanelio/cvat-training.git
|
||||
type: hidden
|
||||
visibility: private
|
||||
- name: trainingsource
|
||||
value: https://github.com/onepanelio/cvat-training.git
|
||||
type: hidden
|
||||
visibility: private
|
||||
|
||||
- name: revision
|
||||
value: v1.13.0
|
||||
type: hidden
|
||||
visibility: private
|
||||
- name: revision
|
||||
value: v1.13.0
|
||||
type: hidden
|
||||
visibility: private
|
||||
|
||||
- name: cvat-annotation-path
|
||||
value: annotation-dump/sample_dataset
|
||||
displayName: Dataset path
|
||||
hint: Path to annotated data in default object storage (i.e S3). In CVAT, this parameter will be pre-populated.
|
||||
visibility: private
|
||||
- name: cvat-annotation-path
|
||||
value: annotation-dump/sample_dataset
|
||||
displayName: Dataset path
|
||||
hint: Path to annotated data in default object storage (i.e S3). In CVAT, this parameter will be pre-populated.
|
||||
visibility: private
|
||||
|
||||
- name: cvat-output-path
|
||||
value: workflow-data/output/sample_output
|
||||
hint: Path to store output artifacts in default object storage (i.e s3). In CVAT, this parameter will be pre-populated.
|
||||
displayName: Workflow output path
|
||||
visibility: private
|
||||
- name: cvat-output-path
|
||||
value: workflow-data/output/sample_output
|
||||
hint: Path to store output artifacts in default object storage (i.e s3). In CVAT, this parameter will be pre-populated.
|
||||
displayName: Workflow output path
|
||||
visibility: private
|
||||
|
||||
- name: cvat-model
|
||||
value: frcnn-res50-coco
|
||||
displayName: Model
|
||||
hint: TF Detection API's model to use for training.
|
||||
type: select.select
|
||||
visibility: public
|
||||
options:
|
||||
- name: 'Faster RCNN-ResNet 101-COCO'
|
||||
value: frcnn-res101-coco
|
||||
- name: 'Faster RCNN-ResNet 101-Low Proposal-COCO'
|
||||
value: frcnn-res101-low
|
||||
- name: 'Faster RCNN-ResNet 50-COCO'
|
||||
value: frcnn-res50-coco
|
||||
- name: 'Faster RCNN-NAS-COCO'
|
||||
value: frcnn-nas-coco
|
||||
- name: 'SSD MobileNet V1-COCO'
|
||||
value: ssd-mobilenet-v1-coco2
|
||||
- name: 'SSD MobileNet V2-COCO'
|
||||
value: ssd-mobilenet-v2-coco
|
||||
- name: 'SSDLite MobileNet-COCO'
|
||||
value: ssdlite-mobilenet-coco
|
||||
- name: cvat-model
|
||||
value: frcnn-res50-coco
|
||||
displayName: Model
|
||||
hint: TF Detection API's model to use for training.
|
||||
type: select.select
|
||||
visibility: public
|
||||
options:
|
||||
- name: 'Faster RCNN-ResNet 101-COCO'
|
||||
value: frcnn-res101-coco
|
||||
- name: 'Faster RCNN-ResNet 101-Low Proposal-COCO'
|
||||
value: frcnn-res101-low
|
||||
- name: 'Faster RCNN-ResNet 50-COCO'
|
||||
value: frcnn-res50-coco
|
||||
- name: 'Faster RCNN-NAS-COCO'
|
||||
value: frcnn-nas-coco
|
||||
- name: 'SSD MobileNet V1-COCO'
|
||||
value: ssd-mobilenet-v1-coco2
|
||||
- name: 'SSD MobileNet V2-COCO'
|
||||
value: ssd-mobilenet-v2-coco
|
||||
- name: 'SSDLite MobileNet-COCO'
|
||||
value: ssdlite-mobilenet-coco
|
||||
|
||||
- name: hyperparameters
|
||||
value: |-
|
||||
num-steps=10000
|
||||
displayName: Hyperparameters
|
||||
visibility: public
|
||||
type: textarea.textarea
|
||||
hint: "Please refer to our <a href='https://docs.onepanel.ai/docs/getting-started/use-cases/computervision/annotation/cvat/cvat_annotation_model#arguments-optional' target='_blank'>documentation</a> for more information on parameters. Number of classes will be automatically populated if you had 'sys-num-classes' parameter in a workflow."
|
||||
- name: hyperparameters
|
||||
value: |-
|
||||
num-steps=10000
|
||||
displayName: Hyperparameters
|
||||
visibility: public
|
||||
type: textarea.textarea
|
||||
hint: "Please refer to our <a href='https://docs.onepanel.ai/docs/getting-started/use-cases/computervision/annotation/cvat/cvat_annotation_model#arguments-optional' target='_blank'>documentation</a> for more information on parameters. Number of classes will be automatically populated if you had 'sys-num-classes' parameter in a workflow."
|
||||
|
||||
- name: cvat-finetune-checkpoint
|
||||
value: ''
|
||||
hint: Select the last fine-tune checkpoint for this model. It may take up to 5 minutes for a recent checkpoint show here. Leave empty if this is the first time you're training this model.
|
||||
displayName: Checkpoint path
|
||||
visibility: public
|
||||
- name: cvat-finetune-checkpoint
|
||||
value: ''
|
||||
hint: Select the last fine-tune checkpoint for this model. It may take up to 5 minutes for a recent checkpoint show here. Leave empty if this is the first time you're training this model.
|
||||
displayName: Checkpoint path
|
||||
visibility: public
|
||||
|
||||
- name: cvat-num-classes
|
||||
value: '81'
|
||||
hint: Number of classes
|
||||
displayName: Number of classes
|
||||
visibility: private
|
||||
- name: cvat-num-classes
|
||||
value: '81'
|
||||
hint: Number of classes
|
||||
displayName: Number of classes
|
||||
visibility: private
|
||||
|
||||
- name: tf-image
|
||||
value: tensorflow/tensorflow:1.13.1-py3
|
||||
type: select.select
|
||||
displayName: Select TensorFlow image
|
||||
visibility: public
|
||||
hint: Select the GPU image if you are running on a GPU node pool
|
||||
options:
|
||||
- name: 'TensorFlow 1.13.1 CPU Image'
|
||||
value: 'tensorflow/tensorflow:1.13.1-py3'
|
||||
- name: 'TensorFlow 1.13.1 GPU Image'
|
||||
value: 'tensorflow/tensorflow:1.13.1-gpu-py3'
|
||||
- name: tf-image
|
||||
value: tensorflow/tensorflow:1.13.1-py3
|
||||
type: select.select
|
||||
displayName: Select TensorFlow image
|
||||
visibility: public
|
||||
hint: Select the GPU image if you are running on a GPU node pool
|
||||
options:
|
||||
- name: 'TensorFlow 1.13.1 CPU Image'
|
||||
value: 'tensorflow/tensorflow:1.13.1-py3'
|
||||
- name: 'TensorFlow 1.13.1 GPU Image'
|
||||
value: 'tensorflow/tensorflow:1.13.1-gpu-py3'
|
||||
|
||||
- displayName: Node pool
|
||||
hint: Name of node pool or group to run this workflow task
|
||||
type: select.select
|
||||
name: sys-node-pool
|
||||
value: Standard_D4s_v3
|
||||
visibility: public
|
||||
required: true
|
||||
options:
|
||||
- name: 'CPU: 2, RAM: 8GB'
|
||||
value: Standard_D2s_v3
|
||||
- name: 'CPU: 4, RAM: 16GB'
|
||||
value: Standard_D4s_v3
|
||||
- name: 'GPU: 1xK80, CPU: 6, RAM: 56GB'
|
||||
value: Standard_NC6
|
||||
- name: dump-format
|
||||
value: cvat_tfrecord
|
||||
visibility: public
|
||||
templates:
|
||||
- name: main
|
||||
dag:
|
||||
tasks:
|
||||
- name: train-model
|
||||
template: tensorflow
|
||||
# Uncomment the lines below if you want to send Slack notifications
|
||||
# - arguments:
|
||||
# artifacts:
|
||||
# - from: '{{tasks.train-model.outputs.artifacts.sys-metrics}}'
|
||||
# name: metrics
|
||||
# parameters:
|
||||
# - name: status
|
||||
# value: '{{tasks.train-model.status}}'
|
||||
# dependencies:
|
||||
# - train-model
|
||||
# name: notify-in-slack
|
||||
# template: slack-notify-success
|
||||
- name: tensorflow
|
||||
container:
|
||||
args:
|
||||
- |
|
||||
apt-get update && \
|
||||
apt-get install -y python3-pip git wget unzip libglib2.0-0 libsm6 libxext6 libxrender-dev && \
|
||||
pip install pillow lxml Cython contextlib2 jupyter matplotlib numpy scipy boto3 pycocotools pyyaml google-cloud-storage && \
|
||||
cd /mnt/src/tf/research && \
|
||||
export PYTHONPATH=$PYTHONPATH:`pwd`:`pwd`/slim && \
|
||||
cd /mnt/src/train && \
|
||||
python convert_workflow.py \
|
||||
--extras="{{workflow.parameters.hyperparameters}}" \
|
||||
--model="{{workflow.parameters.cvat-model}}" \
|
||||
--num_classes="{{workflow.parameters.cvat-num-classes}}" \
|
||||
--sys_finetune_checkpoint={{workflow.parameters.cvat-finetune-checkpoint}}
|
||||
command:
|
||||
- sh
|
||||
- -c
|
||||
image: '{{workflow.parameters.tf-image}}'
|
||||
volumeMounts:
|
||||
- mountPath: /mnt/data
|
||||
name: data
|
||||
- mountPath: /mnt/output
|
||||
name: output
|
||||
workingDir: /mnt/src
|
||||
nodeSelector:
|
||||
beta.kubernetes.io/instance-type: '{{workflow.parameters.sys-node-pool}}'
|
||||
sidecars:
|
||||
- name: tensorboard
|
||||
image: tensorflow/tensorflow:2.3.0
|
||||
command: [sh, -c]
|
||||
env:
|
||||
- name: ONEPANEL_INTERACTIVE_SIDECAR
|
||||
value: 'true'
|
||||
args: ["tensorboard --logdir /mnt/output/"]
|
||||
ports:
|
||||
- containerPort: 6006
|
||||
name: tensorboard
|
||||
inputs:
|
||||
artifacts:
|
||||
- name: data
|
||||
path: /mnt/data/datasets/
|
||||
{{.ArtifactRepositoryType}}:
|
||||
key: '{{workflow.namespace}}/{{workflow.parameters.cvat-annotation-path}}'
|
||||
- name: models
|
||||
path: /mnt/data/models/
|
||||
optional: true
|
||||
{{.ArtifactRepositoryType}}:
|
||||
key: '{{workflow.parameters.cvat-finetune-checkpoint}}'
|
||||
- git:
|
||||
repo: '{{workflow.parameters.source}}'
|
||||
revision: '{{workflow.parameters.revision}}'
|
||||
name: src
|
||||
path: /mnt/src/tf
|
||||
- git:
|
||||
repo: '{{workflow.parameters.trainingsource}}'
|
||||
revision: 'optional-artifacts'
|
||||
name: tsrc
|
||||
path: /mnt/src/train
|
||||
outputs:
|
||||
artifacts:
|
||||
- name: model
|
||||
optional: true
|
||||
path: /mnt/output
|
||||
{{.ArtifactRepositoryType}}:
|
||||
key: '{{workflow.namespace}}/{{workflow.parameters.cvat-output-path}}/{{workflow.name}}'
|
||||
# Uncomment the lines below if you want to send Slack notifications
|
||||
#- container:
|
||||
# args:
|
||||
# - SLACK_USERNAME=Onepanel SLACK_TITLE="{{workflow.name}} {{inputs.parameters.status}}"
|
||||
# SLACK_ICON=https://www.gravatar.com/avatar/5c4478592fe00878f62f0027be59c1bd
|
||||
# SLACK_MESSAGE=$(cat /tmp/metrics.json)} ./slack-notify
|
||||
# command:
|
||||
# - sh
|
||||
# - -c
|
||||
# image: technosophos/slack-notify
|
||||
# inputs:
|
||||
# artifacts:
|
||||
# - name: metrics
|
||||
# optional: true
|
||||
# path: /tmp/metrics.json
|
||||
# parameters:
|
||||
# - name: status
|
||||
# name: slack-notify-success
|
||||
volumeClaimTemplates:
|
||||
- metadata:
|
||||
creationTimestamp: null
|
||||
name: data
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 200Gi
|
||||
- metadata:
|
||||
creationTimestamp: null
|
||||
name: output
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 200Gi
|
||||
- displayName: Node pool
|
||||
hint: Name of node pool or group to run this workflow task
|
||||
type: select.select
|
||||
name: sys-node-pool
|
||||
value: Standard_D4s_v3
|
||||
visibility: public
|
||||
required: true
|
||||
options:
|
||||
- name: 'CPU: 2, RAM: 8GB'
|
||||
value: Standard_D2s_v3
|
||||
- name: 'CPU: 4, RAM: 16GB'
|
||||
value: Standard_D4s_v3
|
||||
- name: 'GPU: 1xK80, CPU: 6, RAM: 56GB'
|
||||
value: Standard_NC6
|
||||
- name: dump-format
|
||||
value: cvat_tfrecord
|
||||
visibility: public
|
||||
templates:
|
||||
- name: main
|
||||
dag:
|
||||
tasks:
|
||||
- name: train-model
|
||||
template: tensorflow
|
||||
# Uncomment the lines below if you want to send Slack notifications
|
||||
# - arguments:
|
||||
# artifacts:
|
||||
# - from: '{{tasks.train-model.outputs.artifacts.sys-metrics}}'
|
||||
# name: metrics
|
||||
# parameters:
|
||||
# - name: status
|
||||
# value: '{{tasks.train-model.status}}'
|
||||
# dependencies:
|
||||
# - train-model
|
||||
# name: notify-in-slack
|
||||
# template: slack-notify-success
|
||||
- name: tensorflow
|
||||
container:
|
||||
args:
|
||||
- |
|
||||
apt-get update && \
|
||||
apt-get install -y python3-pip git wget unzip libglib2.0-0 libsm6 libxext6 libxrender-dev && \
|
||||
pip install pillow lxml Cython contextlib2 jupyter matplotlib numpy scipy boto3 pycocotools pyyaml google-cloud-storage && \
|
||||
cd /mnt/src/tf/research && \
|
||||
export PYTHONPATH=$PYTHONPATH:`pwd`:`pwd`/slim && \
|
||||
cd /mnt/src/train && \
|
||||
python convert_workflow.py \
|
||||
--extras="{{workflow.parameters.hyperparameters}}" \
|
||||
--model="{{workflow.parameters.cvat-model}}" \
|
||||
--num_classes="{{workflow.parameters.cvat-num-classes}}" \
|
||||
--sys_finetune_checkpoint={{workflow.parameters.cvat-finetune-checkpoint}}
|
||||
command:
|
||||
- sh
|
||||
- -c
|
||||
image: '{{workflow.parameters.tf-image}}'
|
||||
volumeMounts:
|
||||
- mountPath: /mnt/data
|
||||
name: data
|
||||
- mountPath: /mnt/output
|
||||
name: output
|
||||
workingDir: /mnt/src
|
||||
nodeSelector:
|
||||
beta.kubernetes.io/instance-type: '{{workflow.parameters.sys-node-pool}}'
|
||||
sidecars:
|
||||
- name: tensorboard
|
||||
image: tensorflow/tensorflow:2.3.0
|
||||
command: [sh, -c]
|
||||
env:
|
||||
- name: ONEPANEL_INTERACTIVE_SIDECAR
|
||||
value: 'true'
|
||||
args: ["tensorboard --logdir /mnt/output/"]
|
||||
ports:
|
||||
- containerPort: 6006
|
||||
name: tensorboard
|
||||
inputs:
|
||||
artifacts:
|
||||
- name: data
|
||||
path: /mnt/data/datasets/
|
||||
"{{.ArtifactRepositoryType}}":
|
||||
key: '{{workflow.namespace}}/{{workflow.parameters.cvat-annotation-path}}'
|
||||
- name: models
|
||||
path: /mnt/data/models/
|
||||
optional: true
|
||||
"{{.ArtifactRepositoryType}}":
|
||||
key: '{{workflow.parameters.cvat-finetune-checkpoint}}'
|
||||
- git:
|
||||
repo: '{{workflow.parameters.source}}'
|
||||
revision: '{{workflow.parameters.revision}}'
|
||||
name: src
|
||||
path: /mnt/src/tf
|
||||
- git:
|
||||
repo: '{{workflow.parameters.trainingsource}}'
|
||||
revision: 'optional-artifacts'
|
||||
name: tsrc
|
||||
path: /mnt/src/train
|
||||
outputs:
|
||||
artifacts:
|
||||
- name: model
|
||||
optional: true
|
||||
path: /mnt/output
|
||||
"{{.ArtifactRepositoryType}}":
|
||||
key: '{{workflow.namespace}}/{{workflow.parameters.cvat-output-path}}/{{workflow.name}}'
|
||||
# Uncomment the lines below if you want to send Slack notifications
|
||||
#- container:
|
||||
# args:
|
||||
# - SLACK_USERNAME=Onepanel SLACK_TITLE="{{workflow.name}} {{inputs.parameters.status}}"
|
||||
# SLACK_ICON=https://www.gravatar.com/avatar/5c4478592fe00878f62f0027be59c1bd
|
||||
# SLACK_MESSAGE=$(cat /tmp/metrics.json)} ./slack-notify
|
||||
# command:
|
||||
# - sh
|
||||
# - -c
|
||||
# image: technosophos/slack-notify
|
||||
# inputs:
|
||||
# artifacts:
|
||||
# - name: metrics
|
||||
# optional: true
|
||||
# path: /tmp/metrics.json
|
||||
# parameters:
|
||||
# - name: status
|
||||
# name: slack-notify-success
|
||||
volumeClaimTemplates:
|
||||
- metadata:
|
||||
creationTimestamp: null
|
||||
name: data
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 200Gi
|
||||
- metadata:
|
||||
creationTimestamp: null
|
||||
name: output
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 200Gi
|
||||
@@ -1,165 +1,174 @@
|
||||
# source: https://github.com/onepanelio/templates/blob/master/workflows/tf-object-detection-training/
|
||||
arguments:
|
||||
parameters:
|
||||
- name: cvat-annotation-path
|
||||
value: annotation-dump/sample_dataset
|
||||
displayName: Dataset path
|
||||
hint: Path to annotated data (TFRecord format) in default object storage. In CVAT, this parameter will be pre-populated.
|
||||
visibility: internal
|
||||
metadata:
|
||||
name: "TF Object Detection Training"
|
||||
kind: Workflow
|
||||
version: 20201223202929
|
||||
action: update
|
||||
source: "https://github.com/onepanelio/templates/blob/master/workflows/tf-object-detection-training/"
|
||||
labels:
|
||||
"created-by": "system"
|
||||
"used-by": "cvat"
|
||||
spec:
|
||||
arguments:
|
||||
parameters:
|
||||
- name: cvat-annotation-path
|
||||
value: annotation-dump/sample_dataset
|
||||
displayName: Dataset path
|
||||
hint: Path to annotated data (TFRecord format) in default object storage. In CVAT, this parameter will be pre-populated.
|
||||
visibility: internal
|
||||
|
||||
- name: cvat-output-path
|
||||
value: workflow-data/output/sample_output
|
||||
hint: Path to store output artifacts in default object storage (i.e s3). In CVAT, this parameter will be pre-populated.
|
||||
displayName: Workflow output path
|
||||
visibility: internal
|
||||
- name: cvat-output-path
|
||||
value: workflow-data/output/sample_output
|
||||
hint: Path to store output artifacts in default object storage (i.e s3). In CVAT, this parameter will be pre-populated.
|
||||
displayName: Workflow output path
|
||||
visibility: internal
|
||||
|
||||
- name: cvat-model
|
||||
value: frcnn-res50-coco
|
||||
displayName: Model
|
||||
hint: TF Detection API's model to use for training.
|
||||
type: select.select
|
||||
visibility: public
|
||||
options:
|
||||
- name: 'Faster RCNN-ResNet 101-COCO'
|
||||
value: frcnn-res101-coco
|
||||
- name: 'Faster RCNN-ResNet 101-Low Proposal-COCO'
|
||||
value: frcnn-res101-low
|
||||
- name: 'Faster RCNN-ResNet 50-COCO'
|
||||
value: frcnn-res50-coco
|
||||
- name: 'Faster RCNN-NAS-COCO'
|
||||
value: frcnn-nas-coco
|
||||
- name: 'SSD MobileNet V1-COCO'
|
||||
value: ssd-mobilenet-v1-coco2
|
||||
- name: 'SSD MobileNet V2-COCO'
|
||||
value: ssd-mobilenet-v2-coco
|
||||
- name: 'SSDLite MobileNet-COCO'
|
||||
value: ssdlite-mobilenet-coco
|
||||
- name: cvat-model
|
||||
value: frcnn-res50-coco
|
||||
displayName: Model
|
||||
hint: TF Detection API's model to use for training.
|
||||
type: select.select
|
||||
visibility: public
|
||||
options:
|
||||
- name: 'Faster RCNN-ResNet 101-COCO'
|
||||
value: frcnn-res101-coco
|
||||
- name: 'Faster RCNN-ResNet 101-Low Proposal-COCO'
|
||||
value: frcnn-res101-low
|
||||
- name: 'Faster RCNN-ResNet 50-COCO'
|
||||
value: frcnn-res50-coco
|
||||
- name: 'Faster RCNN-NAS-COCO'
|
||||
value: frcnn-nas-coco
|
||||
- name: 'SSD MobileNet V1-COCO'
|
||||
value: ssd-mobilenet-v1-coco2
|
||||
- name: 'SSD MobileNet V2-COCO'
|
||||
value: ssd-mobilenet-v2-coco
|
||||
- name: 'SSDLite MobileNet-COCO'
|
||||
value: ssdlite-mobilenet-coco
|
||||
|
||||
- name: hyperparameters
|
||||
value: |-
|
||||
num-steps=10000
|
||||
displayName: Hyperparameters
|
||||
visibility: public
|
||||
type: textarea.textarea
|
||||
hint: 'See <a href="https://docs.onepanel.ai/docs/getting-started/use-cases/computervision/annotation/cvat/cvat_annotation_model/#tfod-hyperparameters" target="_blank">documentation</a> for more information on parameters.'
|
||||
- name: hyperparameters
|
||||
value: |-
|
||||
num-steps=10000
|
||||
displayName: Hyperparameters
|
||||
visibility: public
|
||||
type: textarea.textarea
|
||||
hint: 'See <a href="https://docs.onepanel.ai/docs/getting-started/use-cases/computervision/annotation/cvat/cvat_annotation_model/#tfod-hyperparameters" target="_blank">documentation</a> for more information on parameters.'
|
||||
|
||||
- name: cvat-finetune-checkpoint
|
||||
value: ''
|
||||
hint: Select the last fine-tune checkpoint for this model. It may take up to 5 minutes for a recent checkpoint show here. Leave empty if this is the first time you're training this model.
|
||||
displayName: Checkpoint path
|
||||
visibility: public
|
||||
- name: cvat-finetune-checkpoint
|
||||
value: ''
|
||||
hint: Select the last fine-tune checkpoint for this model. It may take up to 5 minutes for a recent checkpoint show here. Leave empty if this is the first time you're training this model.
|
||||
displayName: Checkpoint path
|
||||
visibility: public
|
||||
|
||||
- name: cvat-num-classes
|
||||
value: '10'
|
||||
hint: Number of classes. In CVAT, this parameter will be pre-populated.
|
||||
displayName: Number of classes
|
||||
visibility: internal
|
||||
- name: cvat-num-classes
|
||||
value: '10'
|
||||
hint: Number of classes. In CVAT, this parameter will be pre-populated.
|
||||
displayName: Number of classes
|
||||
visibility: internal
|
||||
|
||||
- name: tf-image
|
||||
value: tensorflow/tensorflow:1.13.1-py3
|
||||
type: select.select
|
||||
displayName: Select TensorFlow image
|
||||
visibility: public
|
||||
hint: Select the GPU image if you are running on a GPU node pool
|
||||
options:
|
||||
- name: 'TensorFlow 1.13.1 CPU Image'
|
||||
value: 'tensorflow/tensorflow:1.13.1-py3'
|
||||
- name: 'TensorFlow 1.13.1 GPU Image'
|
||||
value: 'tensorflow/tensorflow:1.13.1-gpu-py3'
|
||||
- name: tf-image
|
||||
value: tensorflow/tensorflow:1.13.1-py3
|
||||
type: select.select
|
||||
displayName: Select TensorFlow image
|
||||
visibility: public
|
||||
hint: Select the GPU image if you are running on a GPU node pool
|
||||
options:
|
||||
- name: 'TensorFlow 1.13.1 CPU Image'
|
||||
value: 'tensorflow/tensorflow:1.13.1-py3'
|
||||
- name: 'TensorFlow 1.13.1 GPU Image'
|
||||
value: 'tensorflow/tensorflow:1.13.1-gpu-py3'
|
||||
|
||||
- name: dump-format
|
||||
value: cvat_tfrecord
|
||||
visibility: public
|
||||
- name: dump-format
|
||||
value: cvat_tfrecord
|
||||
visibility: public
|
||||
|
||||
- displayName: Node pool
|
||||
hint: Name of node pool or group to run this workflow task
|
||||
type: select.nodepool
|
||||
name: sys-node-pool
|
||||
value: {{.DefaultNodePoolOption}}
|
||||
visibility: public
|
||||
required: true
|
||||
- displayName: Node pool
|
||||
hint: Name of node pool or group to run this workflow task
|
||||
type: select.nodepool
|
||||
name: sys-node-pool
|
||||
value: "{{.DefaultNodePoolOption}}"
|
||||
visibility: public
|
||||
required: true
|
||||
|
||||
entrypoint: main
|
||||
templates:
|
||||
- dag:
|
||||
tasks:
|
||||
- name: train-model
|
||||
template: tensorflow
|
||||
name: main
|
||||
- container:
|
||||
args:
|
||||
- |
|
||||
apt-get update && \
|
||||
apt-get install -y python3-pip git wget unzip libglib2.0-0 libsm6 libxext6 libxrender-dev && \
|
||||
pip install pillow lxml Cython contextlib2 matplotlib numpy scipy pycocotools pyyaml test-generator && \
|
||||
cd /mnt/src/tf/research && \
|
||||
export PYTHONPATH=$PYTHONPATH:`pwd`:`pwd`/slim && \
|
||||
mkdir -p /mnt/src/protoc && \
|
||||
wget -P /mnt/src/protoc https://github.com/protocolbuffers/protobuf/releases/download/v3.10.1/protoc-3.10.1-linux-x86_64.zip && \
|
||||
cd /mnt/src/protoc/ && \
|
||||
unzip protoc-3.10.1-linux-x86_64.zip && \
|
||||
cd /mnt/src/tf/research/ && \
|
||||
/mnt/src/protoc/bin/protoc object_detection/protos/*.proto --python_out=. && \
|
||||
cd /mnt/src/train/workflows/tf-object-detection-training && \
|
||||
python main.py \
|
||||
--extras="{{workflow.parameters.hyperparameters}}" \
|
||||
--model="{{workflow.parameters.cvat-model}}" \
|
||||
--num_classes="{{workflow.parameters.cvat-num-classes}}" \
|
||||
--sys_finetune_checkpoint={{workflow.parameters.cvat-finetune-checkpoint}}
|
||||
command:
|
||||
- sh
|
||||
- -c
|
||||
image: '{{workflow.parameters.tf-image}}'
|
||||
volumeMounts:
|
||||
- mountPath: /mnt/data
|
||||
name: data
|
||||
- mountPath: /mnt/output
|
||||
name: output
|
||||
workingDir: /mnt/src
|
||||
nodeSelector:
|
||||
beta.kubernetes.io/instance-type: '{{workflow.parameters.sys-node-pool}}'
|
||||
inputs:
|
||||
artifacts:
|
||||
- name: data
|
||||
path: /mnt/data/datasets/
|
||||
s3:
|
||||
key: '{{workflow.namespace}}/{{workflow.parameters.cvat-annotation-path}}'
|
||||
- name: models
|
||||
path: /mnt/data/models/
|
||||
optional: true
|
||||
s3:
|
||||
key: '{{workflow.parameters.cvat-finetune-checkpoint}}'
|
||||
- git:
|
||||
repo: https://github.com/tensorflow/models.git
|
||||
revision: v1.13.0
|
||||
name: src
|
||||
path: /mnt/src/tf
|
||||
- git:
|
||||
repo: https://github.com/onepanelio/templates.git
|
||||
name: tsrc
|
||||
path: /mnt/src/train
|
||||
name: tensorflow
|
||||
outputs:
|
||||
artifacts:
|
||||
- name: model
|
||||
optional: true
|
||||
path: /mnt/output
|
||||
s3:
|
||||
key: '{{workflow.namespace}}/{{workflow.parameters.cvat-output-path}}/{{workflow.name}}'
|
||||
volumeClaimTemplates:
|
||||
- metadata:
|
||||
name: data
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 200Gi
|
||||
- metadata:
|
||||
name: output
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 200Gi
|
||||
entrypoint: main
|
||||
templates:
|
||||
- dag:
|
||||
tasks:
|
||||
- name: train-model
|
||||
template: tensorflow
|
||||
name: main
|
||||
- container:
|
||||
args:
|
||||
- |
|
||||
apt-get update && \
|
||||
apt-get install -y python3-pip git wget unzip libglib2.0-0 libsm6 libxext6 libxrender-dev && \
|
||||
pip install pillow lxml Cython contextlib2 matplotlib numpy scipy pycocotools pyyaml test-generator && \
|
||||
cd /mnt/src/tf/research && \
|
||||
export PYTHONPATH=$PYTHONPATH:`pwd`:`pwd`/slim && \
|
||||
mkdir -p /mnt/src/protoc && \
|
||||
wget -P /mnt/src/protoc https://github.com/protocolbuffers/protobuf/releases/download/v3.10.1/protoc-3.10.1-linux-x86_64.zip && \
|
||||
cd /mnt/src/protoc/ && \
|
||||
unzip protoc-3.10.1-linux-x86_64.zip && \
|
||||
cd /mnt/src/tf/research/ && \
|
||||
/mnt/src/protoc/bin/protoc object_detection/protos/*.proto --python_out=. && \
|
||||
cd /mnt/src/train/workflows/tf-object-detection-training && \
|
||||
python main.py \
|
||||
--extras="{{workflow.parameters.hyperparameters}}" \
|
||||
--model="{{workflow.parameters.cvat-model}}" \
|
||||
--num_classes="{{workflow.parameters.cvat-num-classes}}" \
|
||||
--sys_finetune_checkpoint={{workflow.parameters.cvat-finetune-checkpoint}}
|
||||
command:
|
||||
- sh
|
||||
- -c
|
||||
image: '{{workflow.parameters.tf-image}}'
|
||||
volumeMounts:
|
||||
- mountPath: /mnt/data
|
||||
name: data
|
||||
- mountPath: /mnt/output
|
||||
name: output
|
||||
workingDir: /mnt/src
|
||||
nodeSelector:
|
||||
beta.kubernetes.io/instance-type: '{{workflow.parameters.sys-node-pool}}'
|
||||
inputs:
|
||||
artifacts:
|
||||
- name: data
|
||||
path: /mnt/data/datasets/
|
||||
s3:
|
||||
key: '{{workflow.namespace}}/{{workflow.parameters.cvat-annotation-path}}'
|
||||
- name: models
|
||||
path: /mnt/data/models/
|
||||
optional: true
|
||||
s3:
|
||||
key: '{{workflow.parameters.cvat-finetune-checkpoint}}'
|
||||
- git:
|
||||
repo: https://github.com/tensorflow/models.git
|
||||
revision: v1.13.0
|
||||
name: src
|
||||
path: /mnt/src/tf
|
||||
- git:
|
||||
repo: https://github.com/onepanelio/templates.git
|
||||
name: tsrc
|
||||
path: /mnt/src/train
|
||||
name: tensorflow
|
||||
outputs:
|
||||
artifacts:
|
||||
- name: model
|
||||
optional: true
|
||||
path: /mnt/output
|
||||
s3:
|
||||
key: '{{workflow.namespace}}/{{workflow.parameters.cvat-output-path}}/{{workflow.name}}'
|
||||
volumeClaimTemplates:
|
||||
- metadata:
|
||||
name: data
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 200Gi
|
||||
- metadata:
|
||||
name: output
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 200Gi
|
||||
|
||||
@@ -1,260 +1,269 @@
|
||||
# source: https://github.com/onepanelio/templates/blob/master/workflows/tf-object-detection-training/
|
||||
arguments:
|
||||
parameters:
|
||||
- name: cvat-annotation-path
|
||||
value: 'artifacts/{{workflow.namespace}}/annotations/'
|
||||
hint: Path to annotated data (COCO format) in default object storage. In CVAT, this parameter will be pre-populated.
|
||||
displayName: Dataset path
|
||||
visibility: internal
|
||||
metadata:
|
||||
name: "TF Object Detection Training"
|
||||
kind: Workflow
|
||||
version: 20210118175809
|
||||
action: update
|
||||
source: "https://github.com/onepanelio/templates/blob/master/workflows/tf-object-detection-training/"
|
||||
labels:
|
||||
"created-by": "system"
|
||||
"used-by": "cvat"
|
||||
spec:
|
||||
arguments:
|
||||
parameters:
|
||||
- name: cvat-annotation-path
|
||||
value: 'artifacts/{{workflow.namespace}}/annotations/'
|
||||
hint: Path to annotated data (COCO format) in default object storage. In CVAT, this parameter will be pre-populated.
|
||||
displayName: Dataset path
|
||||
visibility: internal
|
||||
|
||||
- name: val-split
|
||||
value: 10
|
||||
displayName: Validation split size
|
||||
type: input.number
|
||||
visibility: public
|
||||
hint: Enter validation set size in percentage of full dataset. (0 - 100)
|
||||
- name: val-split
|
||||
value: 10
|
||||
displayName: Validation split size
|
||||
type: input.number
|
||||
visibility: public
|
||||
hint: Enter validation set size in percentage of full dataset. (0 - 100)
|
||||
|
||||
- name: num-augmentation-cycles
|
||||
value: 1
|
||||
displayName: Number of augmentation cycles
|
||||
type: input.number
|
||||
visibility: public
|
||||
hint: Number of augmentation cycles, zero means no data augmentation
|
||||
- name: num-augmentation-cycles
|
||||
value: 1
|
||||
displayName: Number of augmentation cycles
|
||||
type: input.number
|
||||
visibility: public
|
||||
hint: Number of augmentation cycles, zero means no data augmentation
|
||||
|
||||
- name: preprocessing-parameters
|
||||
value: |-
|
||||
RandomBrightnessContrast:
|
||||
p: 0.2
|
||||
GaussianBlur:
|
||||
p: 0.3
|
||||
GaussNoise:
|
||||
p: 0.4
|
||||
HorizontalFlip:
|
||||
p: 0.5
|
||||
VerticalFlip:
|
||||
p: 0.3
|
||||
displayName: Preprocessing parameters
|
||||
visibility: public
|
||||
type: textarea.textarea
|
||||
hint: 'See <a href="https://albumentations.ai/docs/api_reference/augmentations/transforms/" target="_blank">documentation</a> for more information on parameters.'
|
||||
- name: preprocessing-parameters
|
||||
value: |-
|
||||
RandomBrightnessContrast:
|
||||
p: 0.2
|
||||
GaussianBlur:
|
||||
p: 0.3
|
||||
GaussNoise:
|
||||
p: 0.4
|
||||
HorizontalFlip:
|
||||
p: 0.5
|
||||
VerticalFlip:
|
||||
p: 0.3
|
||||
displayName: Preprocessing parameters
|
||||
visibility: public
|
||||
type: textarea.textarea
|
||||
hint: 'See <a href="https://albumentations.ai/docs/api_reference/augmentations/transforms/" target="_blank">documentation</a> for more information on parameters.'
|
||||
|
||||
- name: cvat-model
|
||||
value: frcnn-res50-coco
|
||||
displayName: Model
|
||||
hint: TF Detection API's model to use for training.
|
||||
type: select.select
|
||||
visibility: public
|
||||
options:
|
||||
- name: 'Faster RCNN-ResNet 101-COCO'
|
||||
value: frcnn-res101-coco
|
||||
- name: 'Faster RCNN-ResNet 101-Low Proposal-COCO'
|
||||
value: frcnn-res101-low
|
||||
- name: 'Faster RCNN-ResNet 50-COCO'
|
||||
value: frcnn-res50-coco
|
||||
- name: 'Faster RCNN-NAS-COCO'
|
||||
value: frcnn-nas-coco
|
||||
- name: 'SSD MobileNet V1-COCO'
|
||||
value: ssd-mobilenet-v1-coco2
|
||||
- name: 'SSD MobileNet V2-COCO'
|
||||
value: ssd-mobilenet-v2-coco
|
||||
- name: 'SSDLite MobileNet-COCO'
|
||||
value: ssdlite-mobilenet-coco
|
||||
- name: cvat-model
|
||||
value: frcnn-res50-coco
|
||||
displayName: Model
|
||||
hint: TF Detection API's model to use for training.
|
||||
type: select.select
|
||||
visibility: public
|
||||
options:
|
||||
- name: 'Faster RCNN-ResNet 101-COCO'
|
||||
value: frcnn-res101-coco
|
||||
- name: 'Faster RCNN-ResNet 101-Low Proposal-COCO'
|
||||
value: frcnn-res101-low
|
||||
- name: 'Faster RCNN-ResNet 50-COCO'
|
||||
value: frcnn-res50-coco
|
||||
- name: 'Faster RCNN-NAS-COCO'
|
||||
value: frcnn-nas-coco
|
||||
- name: 'SSD MobileNet V1-COCO'
|
||||
value: ssd-mobilenet-v1-coco2
|
||||
- name: 'SSD MobileNet V2-COCO'
|
||||
value: ssd-mobilenet-v2-coco
|
||||
- name: 'SSDLite MobileNet-COCO'
|
||||
value: ssdlite-mobilenet-coco
|
||||
|
||||
- name: cvat-num-classes
|
||||
value: '10'
|
||||
hint: Number of classes. In CVAT, this parameter will be pre-populated.
|
||||
displayName: Number of classes
|
||||
visibility: internal
|
||||
- name: cvat-num-classes
|
||||
value: '10'
|
||||
hint: Number of classes. In CVAT, this parameter will be pre-populated.
|
||||
displayName: Number of classes
|
||||
visibility: internal
|
||||
|
||||
- name: hyperparameters
|
||||
value: |-
|
||||
num_steps: 10000
|
||||
displayName: Hyperparameters
|
||||
visibility: public
|
||||
type: textarea.textarea
|
||||
hint: 'See <a href="https://docs.onepanel.ai/docs/reference/workflows/training#tfod-hyperparameters" target="_blank">documentation</a> for more information on parameters.'
|
||||
- name: hyperparameters
|
||||
value: |-
|
||||
num_steps: 10000
|
||||
displayName: Hyperparameters
|
||||
visibility: public
|
||||
type: textarea.textarea
|
||||
hint: 'See <a href="https://docs.onepanel.ai/docs/reference/workflows/training#tfod-hyperparameters" target="_blank">documentation</a> for more information on parameters.'
|
||||
|
||||
- name: dump-format
|
||||
value: cvat_coco
|
||||
displayName: CVAT dump format
|
||||
visibility: private
|
||||
- name: dump-format
|
||||
value: cvat_coco
|
||||
displayName: CVAT dump format
|
||||
visibility: private
|
||||
|
||||
- name: cvat-finetune-checkpoint
|
||||
value: ''
|
||||
hint: Path to the last fine-tune checkpoint for this model in default object storage. Leave empty if this is the first time you're training this model.
|
||||
displayName: Checkpoint path
|
||||
visibility: public
|
||||
- name: cvat-finetune-checkpoint
|
||||
value: ''
|
||||
hint: Path to the last fine-tune checkpoint for this model in default object storage. Leave empty if this is the first time you're training this model.
|
||||
displayName: Checkpoint path
|
||||
visibility: public
|
||||
|
||||
- name: tf-image
|
||||
value: tensorflow/tensorflow:1.13.1-py3
|
||||
type: select.select
|
||||
displayName: Select TensorFlow image
|
||||
visibility: public
|
||||
hint: Select the GPU image if you are running on a GPU node pool
|
||||
options:
|
||||
- name: 'TensorFlow 1.13.1 CPU Image'
|
||||
value: 'tensorflow/tensorflow:1.13.1-py3'
|
||||
- name: 'TensorFlow 1.13.1 GPU Image'
|
||||
value: 'tensorflow/tensorflow:1.13.1-gpu-py3'
|
||||
- name: tf-image
|
||||
value: tensorflow/tensorflow:1.13.1-py3
|
||||
type: select.select
|
||||
displayName: Select TensorFlow image
|
||||
visibility: public
|
||||
hint: Select the GPU image if you are running on a GPU node pool
|
||||
options:
|
||||
- name: 'TensorFlow 1.13.1 CPU Image'
|
||||
value: 'tensorflow/tensorflow:1.13.1-py3'
|
||||
- name: 'TensorFlow 1.13.1 GPU Image'
|
||||
value: 'tensorflow/tensorflow:1.13.1-gpu-py3'
|
||||
|
||||
- displayName: Node pool
|
||||
hint: Name of node pool or group to run this workflow task
|
||||
type: select.nodepool
|
||||
name: sys-node-pool
|
||||
value: {{.DefaultNodePoolOption}}
|
||||
visibility: public
|
||||
required: true
|
||||
- displayName: Node pool
|
||||
hint: Name of node pool or group to run this workflow task
|
||||
type: select.nodepool
|
||||
name: sys-node-pool
|
||||
value: "{{.DefaultNodePoolOption}}"
|
||||
visibility: public
|
||||
required: true
|
||||
|
||||
entrypoint: main
|
||||
templates:
|
||||
- dag:
|
||||
tasks:
|
||||
- name: preprocessing
|
||||
template: preprocessing
|
||||
- name: train-model
|
||||
template: tensorflow
|
||||
dependencies: [preprocessing]
|
||||
arguments:
|
||||
artifacts:
|
||||
- name: data
|
||||
from: "{{tasks.preprocessing.outputs.artifacts.processed-data}}"
|
||||
name: main
|
||||
- container:
|
||||
args:
|
||||
- |
|
||||
apt-get update && \
|
||||
apt-get install -y python3-pip git wget unzip libglib2.0-0 libsm6 libxext6 libxrender-dev && \
|
||||
pip install --upgrade pip && \
|
||||
pip install pillow lxml Cython contextlib2 matplotlib numpy scipy pycocotools pyyaml test-generator && \
|
||||
cd /mnt/src/tf/research && \
|
||||
export PYTHONPATH=$PYTHONPATH:`pwd`:`pwd`/slim && \
|
||||
mkdir -p /mnt/src/protoc && \
|
||||
wget -P /mnt/src/protoc https://github.com/protocolbuffers/protobuf/releases/download/v3.10.1/protoc-3.10.1-linux-x86_64.zip && \
|
||||
cd /mnt/src/protoc/ && \
|
||||
unzip protoc-3.10.1-linux-x86_64.zip && \
|
||||
cd /mnt/src/tf/research/ && \
|
||||
/mnt/src/protoc/bin/protoc object_detection/protos/*.proto --python_out=. && \
|
||||
cd /mnt/src/train/workflows/tf-object-detection-training && \
|
||||
python main.py \
|
||||
--extras="{{workflow.parameters.hyperparameters}}" \
|
||||
--model="{{workflow.parameters.cvat-model}}" \
|
||||
--num_classes="{{workflow.parameters.cvat-num-classes}}" \
|
||||
--sys_finetune_checkpoint="{{workflow.parameters.cvat-finetune-checkpoint}}" \
|
||||
--from_preprocessing=True
|
||||
command:
|
||||
- sh
|
||||
- -c
|
||||
image: '{{workflow.parameters.tf-image}}'
|
||||
volumeMounts:
|
||||
- mountPath: /mnt/data
|
||||
name: processed-data
|
||||
- mountPath: /mnt/output
|
||||
name: output
|
||||
workingDir: /mnt/src
|
||||
nodeSelector:
|
||||
{{.NodePoolLabel}}: '{{workflow.parameters.sys-node-pool}}'
|
||||
inputs:
|
||||
artifacts:
|
||||
- name: data
|
||||
path: /mnt/data/datasets/
|
||||
- name: models
|
||||
path: /mnt/data/models/
|
||||
optional: true
|
||||
s3:
|
||||
key: '{{workflow.parameters.cvat-finetune-checkpoint}}'
|
||||
- git:
|
||||
repo: https://github.com/tensorflow/models.git
|
||||
revision: v1.13.0
|
||||
name: src
|
||||
path: /mnt/src/tf
|
||||
- git:
|
||||
repo: https://github.com/onepanelio/templates.git
|
||||
revision: v0.18.0
|
||||
name: tsrc
|
||||
path: /mnt/src/train
|
||||
name: tensorflow
|
||||
outputs:
|
||||
artifacts:
|
||||
- name: model
|
||||
optional: true
|
||||
path: /mnt/output
|
||||
sidecars:
|
||||
- name: tensorboard
|
||||
image: '{{workflow.parameters.tf-image}}'
|
||||
entrypoint: main
|
||||
templates:
|
||||
- dag:
|
||||
tasks:
|
||||
- name: preprocessing
|
||||
template: preprocessing
|
||||
- name: train-model
|
||||
template: tensorflow
|
||||
dependencies: [preprocessing]
|
||||
arguments:
|
||||
artifacts:
|
||||
- name: data
|
||||
from: "{{tasks.preprocessing.outputs.artifacts.processed-data}}"
|
||||
name: main
|
||||
- container:
|
||||
args:
|
||||
- |
|
||||
apt-get update && \
|
||||
apt-get install -y python3-pip git wget unzip libglib2.0-0 libsm6 libxext6 libxrender-dev && \
|
||||
pip install --upgrade pip && \
|
||||
pip install pillow lxml Cython contextlib2 matplotlib numpy scipy pycocotools pyyaml test-generator && \
|
||||
cd /mnt/src/tf/research && \
|
||||
export PYTHONPATH=$PYTHONPATH:`pwd`:`pwd`/slim && \
|
||||
mkdir -p /mnt/src/protoc && \
|
||||
wget -P /mnt/src/protoc https://github.com/protocolbuffers/protobuf/releases/download/v3.10.1/protoc-3.10.1-linux-x86_64.zip && \
|
||||
cd /mnt/src/protoc/ && \
|
||||
unzip protoc-3.10.1-linux-x86_64.zip && \
|
||||
cd /mnt/src/tf/research/ && \
|
||||
/mnt/src/protoc/bin/protoc object_detection/protos/*.proto --python_out=. && \
|
||||
cd /mnt/src/train/workflows/tf-object-detection-training && \
|
||||
python main.py \
|
||||
--extras="{{workflow.parameters.hyperparameters}}" \
|
||||
--model="{{workflow.parameters.cvat-model}}" \
|
||||
--num_classes="{{workflow.parameters.cvat-num-classes}}" \
|
||||
--sys_finetune_checkpoint="{{workflow.parameters.cvat-finetune-checkpoint}}" \
|
||||
--from_preprocessing=True
|
||||
command:
|
||||
- sh
|
||||
- '-c'
|
||||
env:
|
||||
- name: ONEPANEL_INTERACTIVE_SIDECAR
|
||||
value: 'true'
|
||||
- -c
|
||||
image: '{{workflow.parameters.tf-image}}'
|
||||
volumeMounts:
|
||||
- mountPath: /mnt/data
|
||||
name: processed-data
|
||||
- mountPath: /mnt/output
|
||||
name: output
|
||||
workingDir: /mnt/src
|
||||
nodeSelector:
|
||||
"{{.NodePoolLabel}}": '{{workflow.parameters.sys-node-pool}}'
|
||||
inputs:
|
||||
artifacts:
|
||||
- name: data
|
||||
path: /mnt/data/datasets/
|
||||
- name: models
|
||||
path: /mnt/data/models/
|
||||
optional: true
|
||||
s3:
|
||||
key: '{{workflow.parameters.cvat-finetune-checkpoint}}'
|
||||
- git:
|
||||
repo: https://github.com/tensorflow/models.git
|
||||
revision: v1.13.0
|
||||
name: src
|
||||
path: /mnt/src/tf
|
||||
- git:
|
||||
repo: https://github.com/onepanelio/templates.git
|
||||
revision: v0.18.0
|
||||
name: tsrc
|
||||
path: /mnt/src/train
|
||||
name: tensorflow
|
||||
outputs:
|
||||
artifacts:
|
||||
- name: model
|
||||
optional: true
|
||||
path: /mnt/output
|
||||
sidecars:
|
||||
- name: tensorboard
|
||||
image: '{{workflow.parameters.tf-image}}'
|
||||
command:
|
||||
- sh
|
||||
- '-c'
|
||||
env:
|
||||
- name: ONEPANEL_INTERACTIVE_SIDECAR
|
||||
value: 'true'
|
||||
args:
|
||||
# Read logs from /mnt/output - this directory is auto-mounted from volumeMounts
|
||||
- tensorboard --logdir /mnt/output/checkpoints/
|
||||
ports:
|
||||
- containerPort: 6006
|
||||
name: tensorboard
|
||||
- container:
|
||||
args:
|
||||
# Read logs from /mnt/output - this directory is auto-mounted from volumeMounts
|
||||
- tensorboard --logdir /mnt/output/checkpoints/
|
||||
ports:
|
||||
- containerPort: 6006
|
||||
name: tensorboard
|
||||
- container:
|
||||
args:
|
||||
- |
|
||||
pip install --upgrade pip &&\
|
||||
pip install opencv-python albumentations tqdm pyyaml pycocotools && \
|
||||
cd /mnt/src/preprocessing/workflows/albumentations-preprocessing && \
|
||||
python -u main.py \
|
||||
--data_aug_params="{{workflow.parameters.preprocessing-parameters}}" \
|
||||
--format="tfrecord" \
|
||||
--val_split={{workflow.parameters.val-split}} \
|
||||
--aug_steps={{workflow.parameters.num-augmentation-cycles}}
|
||||
command:
|
||||
- sh
|
||||
- -c
|
||||
image: '{{workflow.parameters.tf-image}}'
|
||||
volumeMounts:
|
||||
- mountPath: /mnt/data
|
||||
name: data
|
||||
- mountPath: /mnt/output
|
||||
name: processed-data
|
||||
workingDir: /mnt/src
|
||||
nodeSelector:
|
||||
{{.NodePoolLabel}}: '{{workflow.parameters.sys-node-pool}}'
|
||||
inputs:
|
||||
artifacts:
|
||||
- name: data
|
||||
path: /mnt/data/datasets/
|
||||
s3:
|
||||
key: '{{workflow.parameters.cvat-annotation-path}}'
|
||||
- git:
|
||||
repo: https://github.com/onepanelio/templates.git
|
||||
revision: v0.18.0
|
||||
name: src
|
||||
path: /mnt/src/preprocessing
|
||||
name: preprocessing
|
||||
outputs:
|
||||
artifacts:
|
||||
- name: processed-data
|
||||
optional: true
|
||||
path: /mnt/output
|
||||
volumeClaimTemplates:
|
||||
- metadata:
|
||||
name: data
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 200Gi
|
||||
- metadata:
|
||||
name: processed-data
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 200Gi
|
||||
- metadata:
|
||||
name: output
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 200Gi
|
||||
- |
|
||||
pip install --upgrade pip &&\
|
||||
pip install opencv-python albumentations tqdm pyyaml pycocotools && \
|
||||
cd /mnt/src/preprocessing/workflows/albumentations-preprocessing && \
|
||||
python -u main.py \
|
||||
--data_aug_params="{{workflow.parameters.preprocessing-parameters}}" \
|
||||
--format="tfrecord" \
|
||||
--val_split={{workflow.parameters.val-split}} \
|
||||
--aug_steps={{workflow.parameters.num-augmentation-cycles}}
|
||||
command:
|
||||
- sh
|
||||
- -c
|
||||
image: '{{workflow.parameters.tf-image}}'
|
||||
volumeMounts:
|
||||
- mountPath: /mnt/data
|
||||
name: data
|
||||
- mountPath: /mnt/output
|
||||
name: processed-data
|
||||
workingDir: /mnt/src
|
||||
nodeSelector:
|
||||
"{{.NodePoolLabel}}": '{{workflow.parameters.sys-node-pool}}'
|
||||
inputs:
|
||||
artifacts:
|
||||
- name: data
|
||||
path: /mnt/data/datasets/
|
||||
s3:
|
||||
key: '{{workflow.parameters.cvat-annotation-path}}'
|
||||
- git:
|
||||
repo: https://github.com/onepanelio/templates.git
|
||||
revision: v0.18.0
|
||||
name: src
|
||||
path: /mnt/src/preprocessing
|
||||
name: preprocessing
|
||||
outputs:
|
||||
artifacts:
|
||||
- name: processed-data
|
||||
optional: true
|
||||
path: /mnt/output
|
||||
volumeClaimTemplates:
|
||||
- metadata:
|
||||
name: data
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 200Gi
|
||||
- metadata:
|
||||
name: processed-data
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 200Gi
|
||||
- metadata:
|
||||
name: output
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 200Gi
|
||||
|
||||
105
db/yaml/workspaces/cvat/20200528140124.yaml
Normal file
105
db/yaml/workspaces/cvat/20200528140124.yaml
Normal file
@@ -0,0 +1,105 @@
|
||||
metadata:
|
||||
name: CVAT
|
||||
kind: Workspace
|
||||
version: 20200528140124
|
||||
action: create
|
||||
description: "Powerful and efficient Computer Vision Annotation Tool (CVAT)"
|
||||
spec:
|
||||
# Docker containers that are part of the Workspace
|
||||
containers:
|
||||
- name: cvat-db
|
||||
image: postgres:10-alpine
|
||||
env:
|
||||
- name: POSTGRES_USER
|
||||
value: root
|
||||
- name: POSTGRES_DB
|
||||
value: cvat
|
||||
- name: POSTGRES_HOST_AUTH_METHOD
|
||||
value: trust
|
||||
- name: PGDATA
|
||||
value: /var/lib/psql/data
|
||||
ports:
|
||||
- containerPort: 5432
|
||||
name: tcp
|
||||
volumeMounts:
|
||||
- name: db
|
||||
mountPath: /var/lib/psql
|
||||
- name: cvat-redis
|
||||
image: redis:4.0-alpine
|
||||
ports:
|
||||
- containerPort: 6379
|
||||
name: tcp
|
||||
- name: cvat
|
||||
image: onepanel/cvat:v0.7.0
|
||||
env:
|
||||
- name: DJANGO_MODWSGI_EXTRA_ARGS
|
||||
value: ""
|
||||
- name: ALLOWED_HOSTS
|
||||
value: '*'
|
||||
- name: CVAT_REDIS_HOST
|
||||
value: localhost
|
||||
- name: CVAT_POSTGRES_HOST
|
||||
value: localhost
|
||||
- name: CVAT_SHARE_URL
|
||||
value: /home/django/data
|
||||
ports:
|
||||
- containerPort: 8080
|
||||
name: http
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /home/django/data
|
||||
- name: keys
|
||||
mountPath: /home/django/keys
|
||||
- name: logs
|
||||
mountPath: /home/django/logs
|
||||
- name: models
|
||||
mountPath: /home/django/models
|
||||
- name: cvat-ui
|
||||
image: onepanel/cvat-ui:v0.7.0
|
||||
ports:
|
||||
- containerPort: 80
|
||||
name: http
|
||||
ports:
|
||||
- name: cvat-ui
|
||||
port: 80
|
||||
protocol: TCP
|
||||
targetPort: 80
|
||||
- name: cvat
|
||||
port: 8080
|
||||
protocol: TCP
|
||||
targetPort: 8080
|
||||
routes:
|
||||
- match:
|
||||
- uri:
|
||||
regex: /api/.*|/git/.*|/tensorflow/.*|/auto_annotation/.*|/analytics/.*|/static/.*|/admin/.*|/documentation/.*|/dextr/.*|/reid/.*
|
||||
- queryParams:
|
||||
id:
|
||||
regex: \d+.*
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8080
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 80
|
||||
# DAG Workflow to be executed once a Workspace action completes
|
||||
# postExecutionWorkflow:
|
||||
# entrypoint: main
|
||||
# templates:
|
||||
# - name: main
|
||||
# dag:
|
||||
# tasks:
|
||||
# - name: slack-notify
|
||||
# template: slack-notify
|
||||
# - name: slack-notify
|
||||
# container:
|
||||
# image: technosophos/slack-notify
|
||||
# args:
|
||||
# - SLACK_USERNAME=onepanel SLACK_TITLE="Your workspace is ready" SLACK_ICON=https://www.gravatar.com/avatar/5c4478592fe00878f62f0027be59c1bd SLACK_MESSAGE="Your workspace is now running" ./slack-notify
|
||||
# command:
|
||||
# - sh
|
||||
# - -c
|
||||
116
db/yaml/workspaces/cvat/20200626113635.yaml
Normal file
116
db/yaml/workspaces/cvat/20200626113635.yaml
Normal file
@@ -0,0 +1,116 @@
|
||||
metadata:
|
||||
name: CVAT
|
||||
kind: Workspace
|
||||
version: 20200626113635
|
||||
action: update
|
||||
description: "Powerful and efficient Computer Vision Annotation Tool (CVAT)"
|
||||
spec:
|
||||
# Docker containers that are part of the Workspace
|
||||
containers:
|
||||
- name: cvat-db
|
||||
image: postgres:10-alpine
|
||||
env:
|
||||
- name: POSTGRES_USER
|
||||
value: root
|
||||
- name: POSTGRES_DB
|
||||
value: cvat
|
||||
- name: POSTGRES_HOST_AUTH_METHOD
|
||||
value: trust
|
||||
- name: PGDATA
|
||||
value: /var/lib/psql/data
|
||||
ports:
|
||||
- containerPort: 5432
|
||||
name: tcp
|
||||
volumeMounts:
|
||||
- name: db
|
||||
mountPath: /var/lib/psql
|
||||
- name: cvat-redis
|
||||
image: redis:4.0-alpine
|
||||
ports:
|
||||
- containerPort: 6379
|
||||
name: tcp
|
||||
- name: cvat
|
||||
image: onepanel/cvat:v0.7.6
|
||||
env:
|
||||
- name: DJANGO_MODWSGI_EXTRA_ARGS
|
||||
value: ""
|
||||
- name: ALLOWED_HOSTS
|
||||
value: '*'
|
||||
- name: CVAT_REDIS_HOST
|
||||
value: localhost
|
||||
- name: CVAT_POSTGRES_HOST
|
||||
value: localhost
|
||||
- name: CVAT_SHARE_URL
|
||||
value: /home/django/data
|
||||
ports:
|
||||
- containerPort: 8080
|
||||
name: http
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /home/django/data
|
||||
- name: keys
|
||||
mountPath: /home/django/keys
|
||||
- name: logs
|
||||
mountPath: /home/django/logs
|
||||
- name: models
|
||||
mountPath: /home/django/models
|
||||
- name: share
|
||||
mountPath: /home/django/share
|
||||
- name: cvat-ui
|
||||
image: onepanel/cvat-ui:v0.7.5
|
||||
ports:
|
||||
- containerPort: 80
|
||||
name: http
|
||||
- name: filesyncer
|
||||
image: onepanel/filesyncer:v0.0.4
|
||||
command: ['python3', 'main.py']
|
||||
volumeMounts:
|
||||
- name: share
|
||||
mountPath: /mnt/share
|
||||
ports:
|
||||
- name: cvat-ui
|
||||
port: 80
|
||||
protocol: TCP
|
||||
targetPort: 80
|
||||
- name: cvat
|
||||
port: 8080
|
||||
protocol: TCP
|
||||
targetPort: 8080
|
||||
routes:
|
||||
- match:
|
||||
- uri:
|
||||
regex: /api/.*|/git/.*|/tensorflow/.*|/auto_annotation/.*|/analytics/.*|/static/.*|/admin/.*|/documentation/.*|/dextr/.*|/reid/.*
|
||||
- queryParams:
|
||||
id:
|
||||
regex: \d+.*
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8080
|
||||
timeout: 600s
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 80
|
||||
timeout: 600s
|
||||
# DAG Workflow to be executed once a Workspace action completes (optional)
|
||||
# Uncomment the lines below if you want to send Slack notifications
|
||||
#postExecutionWorkflow:
|
||||
# entrypoint: main
|
||||
# templates:
|
||||
# - name: main
|
||||
# dag:
|
||||
# tasks:
|
||||
# - name: slack-notify
|
||||
# template: slack-notify
|
||||
# - name: slack-notify
|
||||
# container:
|
||||
# image: technosophos/slack-notify
|
||||
# args:
|
||||
# - SLACK_USERNAME=onepanel SLACK_TITLE="Your workspace is ready" SLACK_ICON=https://www.gravatar.com/avatar/5c4478592fe00878f62f0027be59c1bd SLACK_MESSAGE="Your workspace is now running" ./slack-notify
|
||||
# command:
|
||||
# - sh
|
||||
# - -c
|
||||
118
db/yaml/workspaces/cvat/20200704151301.yaml
Normal file
118
db/yaml/workspaces/cvat/20200704151301.yaml
Normal file
@@ -0,0 +1,118 @@
|
||||
metadata:
|
||||
name: CVAT
|
||||
kind: Workspace
|
||||
version: 20200704151301
|
||||
action: update
|
||||
description: "Powerful and efficient Computer Vision Annotation Tool (CVAT)"
|
||||
spec:
|
||||
# Docker containers that are part of the Workspace
|
||||
containers:
|
||||
- name: cvat-db
|
||||
image: postgres:10-alpine
|
||||
env:
|
||||
- name: POSTGRES_USER
|
||||
value: root
|
||||
- name: POSTGRES_DB
|
||||
value: cvat
|
||||
- name: POSTGRES_HOST_AUTH_METHOD
|
||||
value: trust
|
||||
- name: PGDATA
|
||||
value: /var/lib/psql/data
|
||||
ports:
|
||||
- containerPort: 5432
|
||||
name: tcp
|
||||
volumeMounts:
|
||||
- name: db
|
||||
mountPath: /var/lib/psql
|
||||
- name: cvat-redis
|
||||
image: redis:4.0-alpine
|
||||
ports:
|
||||
- containerPort: 6379
|
||||
name: tcp
|
||||
- name: cvat
|
||||
image: onepanel/cvat:v0.7.10-stable
|
||||
env:
|
||||
- name: DJANGO_MODWSGI_EXTRA_ARGS
|
||||
value: ""
|
||||
- name: ALLOWED_HOSTS
|
||||
value: '*'
|
||||
- name: CVAT_REDIS_HOST
|
||||
value: localhost
|
||||
- name: CVAT_POSTGRES_HOST
|
||||
value: localhost
|
||||
- name: CVAT_SHARE_URL
|
||||
value: /home/django/data
|
||||
ports:
|
||||
- containerPort: 8080
|
||||
name: http
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /home/django/data
|
||||
- name: keys
|
||||
mountPath: /home/django/keys
|
||||
- name: logs
|
||||
mountPath: /home/django/logs
|
||||
- name: models
|
||||
mountPath: /home/django/models
|
||||
- name: share
|
||||
mountPath: /home/django/share
|
||||
- name: cvat-ui
|
||||
image: onepanel/cvat-ui:v0.7.10-stable
|
||||
ports:
|
||||
- containerPort: 80
|
||||
name: http
|
||||
# Uncomment following lines to enable S3 FileSyncer
|
||||
# Refer to https://docs.onepanel.ai/docs/getting-started/use-cases/computervision/annotation/cvat/cvat_quick_guide#setting-up-environment-variables
|
||||
#- name: filesyncer
|
||||
# image: onepanel/filesyncer:v0.0.4
|
||||
# command: ['python3', 'main.py']
|
||||
# volumeMounts:
|
||||
# - name: share
|
||||
# mountPath: /mnt/share
|
||||
ports:
|
||||
- name: cvat-ui
|
||||
port: 80
|
||||
protocol: TCP
|
||||
targetPort: 80
|
||||
- name: cvat
|
||||
port: 8080
|
||||
protocol: TCP
|
||||
targetPort: 8080
|
||||
routes:
|
||||
- match:
|
||||
- uri:
|
||||
regex: /api/.*|/git/.*|/tensorflow/.*|/auto_annotation/.*|/analytics/.*|/static/.*|/admin/.*|/documentation/.*|/dextr/.*|/reid/.*
|
||||
- queryParams:
|
||||
id:
|
||||
regex: \d+.*
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8080
|
||||
timeout: 600s
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 80
|
||||
timeout: 600s
|
||||
# DAG Workflow to be executed once a Workspace action completes (optional)
|
||||
# Uncomment the lines below if you want to send Slack notifications
|
||||
#postExecutionWorkflow:
|
||||
# entrypoint: main
|
||||
# templates:
|
||||
# - name: main
|
||||
# dag:
|
||||
# tasks:
|
||||
# - name: slack-notify
|
||||
# template: slack-notify
|
||||
# - name: slack-notify
|
||||
# container:
|
||||
# image: technosophos/slack-notify
|
||||
# args:
|
||||
# - SLACK_USERNAME=onepanel SLACK_TITLE="Your workspace is ready" SLACK_ICON=https://www.gravatar.com/avatar/5c4478592fe00878f62f0027be59c1bd SLACK_MESSAGE="Your workspace is now running" ./slack-notify
|
||||
# command:
|
||||
# - sh
|
||||
# - -c
|
||||
135
db/yaml/workspaces/cvat/20200724220450.yaml
Normal file
135
db/yaml/workspaces/cvat/20200724220450.yaml
Normal file
@@ -0,0 +1,135 @@
|
||||
metadata:
|
||||
name: CVAT
|
||||
kind: Workspace
|
||||
version: 20200724220450
|
||||
action: update
|
||||
description: "Powerful and efficient Computer Vision Annotation Tool (CVAT)"
|
||||
spec:
|
||||
# Workspace arguments
|
||||
arguments:
|
||||
parameters:
|
||||
- name: storage-prefix
|
||||
displayName: Directory in default object storage
|
||||
value: data
|
||||
hint: Location of data and models in default object storage, will continuously sync to '/mnt/share'
|
||||
containers:
|
||||
- name: cvat-db
|
||||
image: postgres:10-alpine
|
||||
env:
|
||||
- name: POSTGRES_USER
|
||||
value: root
|
||||
- name: POSTGRES_DB
|
||||
value: cvat
|
||||
- name: POSTGRES_HOST_AUTH_METHOD
|
||||
value: trust
|
||||
- name: PGDATA
|
||||
value: /var/lib/psql/data
|
||||
ports:
|
||||
- containerPort: 5432
|
||||
name: tcp
|
||||
volumeMounts:
|
||||
- name: db
|
||||
mountPath: /var/lib/psql
|
||||
- name: cvat-redis
|
||||
image: redis:4.0-alpine
|
||||
ports:
|
||||
- containerPort: 6379
|
||||
name: tcp
|
||||
- name: cvat
|
||||
image: onepanel/cvat:v0.7.10-stable
|
||||
env:
|
||||
- name: DJANGO_MODWSGI_EXTRA_ARGS
|
||||
value: ""
|
||||
- name: ALLOWED_HOSTS
|
||||
value: '*'
|
||||
- name: CVAT_REDIS_HOST
|
||||
value: localhost
|
||||
- name: CVAT_POSTGRES_HOST
|
||||
value: localhost
|
||||
- name: CVAT_SHARE_URL
|
||||
value: /home/django/data
|
||||
ports:
|
||||
- containerPort: 8080
|
||||
name: http
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /home/django/data
|
||||
- name: keys
|
||||
mountPath: /home/django/keys
|
||||
- name: logs
|
||||
mountPath: /home/django/logs
|
||||
- name: models
|
||||
mountPath: /home/django/models
|
||||
- name: share
|
||||
mountPath: /home/django/share
|
||||
- name: sys-namespace-config
|
||||
mountPath: /etc/onepanel
|
||||
readOnly: true
|
||||
- name: cvat-ui
|
||||
image: onepanel/cvat-ui:v0.7.10-stable
|
||||
ports:
|
||||
- containerPort: 80
|
||||
name: http
|
||||
# You can add multiple FileSyncer sidecar containers if needed
|
||||
- name: filesyncer
|
||||
image: "onepanel/filesyncer:{{.ArtifactRepositoryType}}"
|
||||
args:
|
||||
- download
|
||||
env:
|
||||
- name: FS_PATH
|
||||
value: /mnt/share
|
||||
- name: FS_PREFIX
|
||||
value: '{{workspace.parameters.storage-prefix}}'
|
||||
volumeMounts:
|
||||
- name: share
|
||||
mountPath: /mnt/share
|
||||
- name: sys-namespace-config
|
||||
mountPath: /etc/onepanel
|
||||
readOnly: true
|
||||
ports:
|
||||
- name: cvat-ui
|
||||
port: 80
|
||||
protocol: TCP
|
||||
targetPort: 80
|
||||
- name: cvat
|
||||
port: 8080
|
||||
protocol: TCP
|
||||
targetPort: 8080
|
||||
routes:
|
||||
- match:
|
||||
- uri:
|
||||
regex: /api/.*|/git/.*|/tensorflow/.*|/auto_annotation/.*|/analytics/.*|/static/.*|/admin/.*|/documentation/.*|/dextr/.*|/reid/.*
|
||||
- queryParams:
|
||||
id:
|
||||
regex: \d+.*
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8080
|
||||
timeout: 600s
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 80
|
||||
timeout: 600s
|
||||
# DAG Workflow to be executed once a Workspace action completes (optional)
|
||||
# Uncomment the lines below if you want to send Slack notifications
|
||||
#postExecutionWorkflow:
|
||||
# entrypoint: main
|
||||
# templates:
|
||||
# - name: main
|
||||
# dag:
|
||||
# tasks:
|
||||
# - name: slack-notify
|
||||
# template: slack-notify
|
||||
# - name: slack-notify
|
||||
# container:
|
||||
# image: technosophos/slack-notify
|
||||
# args:
|
||||
# - SLACK_USERNAME=onepanel SLACK_TITLE="Your workspace is ready" SLACK_ICON=https://www.gravatar.com/avatar/5c4478592fe00878f62f0027be59c1bd SLACK_MESSAGE="Your workspace is now running" ./slack-notify
|
||||
# command:
|
||||
# - sh
|
||||
# - -c
|
||||
144
db/yaml/workspaces/cvat/20200812113316.yaml
Normal file
144
db/yaml/workspaces/cvat/20200812113316.yaml
Normal file
@@ -0,0 +1,144 @@
|
||||
metadata:
|
||||
name: CVAT
|
||||
kind: Workspace
|
||||
version: 20200812113316
|
||||
action: update
|
||||
description: "Powerful and efficient Computer Vision Annotation Tool (CVAT)"
|
||||
spec:
|
||||
# Workspace arguments
|
||||
arguments:
|
||||
parameters:
|
||||
- name: sync-directory
|
||||
displayName: Directory to sync raw input and training output
|
||||
value: workflow-data
|
||||
hint: Location to sync raw input, models and checkpoints from default object storage. Note that this will be relative to the current namespace.
|
||||
containers:
|
||||
- name: cvat-db
|
||||
image: postgres:10-alpine
|
||||
env:
|
||||
- name: POSTGRES_USER
|
||||
value: root
|
||||
- name: POSTGRES_DB
|
||||
value: cvat
|
||||
- name: POSTGRES_HOST_AUTH_METHOD
|
||||
value: trust
|
||||
- name: PGDATA
|
||||
value: /var/lib/psql/data
|
||||
ports:
|
||||
- containerPort: 5432
|
||||
name: tcp
|
||||
volumeMounts:
|
||||
- name: db
|
||||
mountPath: /var/lib/psql
|
||||
- name: cvat-redis
|
||||
image: redis:4.0-alpine
|
||||
ports:
|
||||
- containerPort: 6379
|
||||
name: tcp
|
||||
- name: cvat
|
||||
image: onepanel/cvat:0.12.0_cvat.1.0.0-beta.2-cuda
|
||||
env:
|
||||
- name: DJANGO_MODWSGI_EXTRA_ARGS
|
||||
value: ""
|
||||
- name: ALLOWED_HOSTS
|
||||
value: '*'
|
||||
- name: CVAT_REDIS_HOST
|
||||
value: localhost
|
||||
- name: CVAT_POSTGRES_HOST
|
||||
value: localhost
|
||||
- name: CVAT_SHARE_URL
|
||||
value: /home/django/data
|
||||
- name: ONEPANEL_SYNC_DIRECTORY
|
||||
value: '{{workspace.parameters.sync-directory}}'
|
||||
- name: NVIDIA_VISIBLE_DEVICES
|
||||
value: all
|
||||
- name: NVIDIA_DRIVER_CAPABILITIES
|
||||
value: compute,utility
|
||||
- name: NVIDIA_REQUIRE_CUDA
|
||||
value: "cuda>=10.0 brand=tesla,driver>=384,driver<385 brand=tesla,driver>=410,driver<411"
|
||||
ports:
|
||||
- containerPort: 8080
|
||||
name: http
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /home/django/data
|
||||
- name: keys
|
||||
mountPath: /home/django/keys
|
||||
- name: logs
|
||||
mountPath: /home/django/logs
|
||||
- name: models
|
||||
mountPath: /home/django/models
|
||||
- name: share
|
||||
mountPath: /home/django/share
|
||||
- name: sys-namespace-config
|
||||
mountPath: /etc/onepanel
|
||||
readOnly: true
|
||||
- name: cvat-ui
|
||||
image: onepanel/cvat-ui:0.12.0_cvat.1.0.0-beta.2
|
||||
ports:
|
||||
- containerPort: 80
|
||||
name: http
|
||||
# You can add multiple FileSyncer sidecar containers if needed
|
||||
- name: filesyncer
|
||||
image: "onepanel/filesyncer:{{.ArtifactRepositoryType}}"
|
||||
imagePullPolicy: Always
|
||||
args:
|
||||
- download
|
||||
env:
|
||||
- name: FS_PATH
|
||||
value: /mnt/share
|
||||
- name: FS_PREFIX
|
||||
value: '{{workflow.namespace}}/{{workspace.parameters.sync-directory}}'
|
||||
volumeMounts:
|
||||
- name: share
|
||||
mountPath: /mnt/share
|
||||
- name: sys-namespace-config
|
||||
mountPath: /etc/onepanel
|
||||
readOnly: true
|
||||
ports:
|
||||
- name: cvat-ui
|
||||
port: 80
|
||||
protocol: TCP
|
||||
targetPort: 80
|
||||
- name: cvat
|
||||
port: 8080
|
||||
protocol: TCP
|
||||
targetPort: 8080
|
||||
routes:
|
||||
- match:
|
||||
- uri:
|
||||
regex: /api/.*|/git/.*|/tensorflow/.*|/onepanelio/.*|/tracking/.*|/auto_annotation/.*|/analytics/.*|/static/.*|/admin/.*|/documentation/.*|/dextr/.*|/reid/.*
|
||||
- queryParams:
|
||||
id:
|
||||
regex: \d+.*
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8080
|
||||
timeout: 600s
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 80
|
||||
timeout: 600s
|
||||
# DAG Workflow to be executed once a Workspace action completes (optional)
|
||||
# Uncomment the lines below if you want to send Slack notifications
|
||||
#postExecutionWorkflow:
|
||||
# entrypoint: main
|
||||
# templates:
|
||||
# - name: main
|
||||
# dag:
|
||||
# tasks:
|
||||
# - name: slack-notify
|
||||
# template: slack-notify
|
||||
# - name: slack-notify
|
||||
# container:
|
||||
# image: technosophos/slack-notify
|
||||
# args:
|
||||
# - SLACK_USERNAME=onepanel SLACK_TITLE="Your workspace is ready" SLACK_ICON=https://www.gravatar.com/avatar/5c4478592fe00878f62f0027be59c1bd SLACK_MESSAGE="Your workspace is now running" ./slack-notify
|
||||
# command:
|
||||
# - sh
|
||||
# - -c
|
||||
144
db/yaml/workspaces/cvat/20200824101905.yaml
Normal file
144
db/yaml/workspaces/cvat/20200824101905.yaml
Normal file
@@ -0,0 +1,144 @@
|
||||
metadata:
|
||||
name: CVAT
|
||||
kind: Workspace
|
||||
version: 20200824101905
|
||||
action: update
|
||||
description: "Powerful and efficient Computer Vision Annotation Tool (CVAT)"
|
||||
spec:
|
||||
# Workspace arguments
|
||||
arguments:
|
||||
parameters:
|
||||
- name: sync-directory
|
||||
displayName: Directory to sync raw input and training output
|
||||
value: workflow-data
|
||||
hint: Location to sync raw input, models and checkpoints from default object storage. Note that this will be relative to the current namespace.
|
||||
containers:
|
||||
- name: cvat-db
|
||||
image: postgres:10-alpine
|
||||
env:
|
||||
- name: POSTGRES_USER
|
||||
value: root
|
||||
- name: POSTGRES_DB
|
||||
value: cvat
|
||||
- name: POSTGRES_HOST_AUTH_METHOD
|
||||
value: trust
|
||||
- name: PGDATA
|
||||
value: /var/lib/psql/data
|
||||
ports:
|
||||
- containerPort: 5432
|
||||
name: tcp
|
||||
volumeMounts:
|
||||
- name: db
|
||||
mountPath: /var/lib/psql
|
||||
- name: cvat-redis
|
||||
image: redis:4.0-alpine
|
||||
ports:
|
||||
- containerPort: 6379
|
||||
name: tcp
|
||||
- name: cvat
|
||||
image: onepanel/cvat:0.12.0-rc.6_cvat.1.0.0
|
||||
env:
|
||||
- name: DJANGO_MODWSGI_EXTRA_ARGS
|
||||
value: ""
|
||||
- name: ALLOWED_HOSTS
|
||||
value: '*'
|
||||
- name: CVAT_REDIS_HOST
|
||||
value: localhost
|
||||
- name: CVAT_POSTGRES_HOST
|
||||
value: localhost
|
||||
- name: CVAT_SHARE_URL
|
||||
value: /home/django/data
|
||||
- name: ONEPANEL_SYNC_DIRECTORY
|
||||
value: '{{workspace.parameters.sync-directory}}'
|
||||
- name: NVIDIA_VISIBLE_DEVICES
|
||||
value: all
|
||||
- name: NVIDIA_DRIVER_CAPABILITIES
|
||||
value: compute,utility
|
||||
- name: NVIDIA_REQUIRE_CUDA
|
||||
value: "cuda>=10.0 brand=tesla,driver>=384,driver<385 brand=tesla,driver>=410,driver<411"
|
||||
ports:
|
||||
- containerPort: 8080
|
||||
name: http
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /home/django/data
|
||||
- name: keys
|
||||
mountPath: /home/django/keys
|
||||
- name: logs
|
||||
mountPath: /home/django/logs
|
||||
- name: models
|
||||
mountPath: /home/django/models
|
||||
- name: share
|
||||
mountPath: /home/django/share
|
||||
- name: sys-namespace-config
|
||||
mountPath: /etc/onepanel
|
||||
readOnly: true
|
||||
- name: cvat-ui
|
||||
image: onepanel/cvat-ui:0.12.0-rc.1_cvat.1.0.0
|
||||
ports:
|
||||
- containerPort: 80
|
||||
name: http
|
||||
# You can add multiple FileSyncer sidecar containers if needed
|
||||
- name: filesyncer
|
||||
image: "onepanel/filesyncer:{{.ArtifactRepositoryType}}"
|
||||
imagePullPolicy: Always
|
||||
args:
|
||||
- download
|
||||
env:
|
||||
- name: FS_PATH
|
||||
value: /mnt/share
|
||||
- name: FS_PREFIX
|
||||
value: '{{workflow.namespace}}/{{workspace.parameters.sync-directory}}'
|
||||
volumeMounts:
|
||||
- name: share
|
||||
mountPath: /mnt/share
|
||||
- name: sys-namespace-config
|
||||
mountPath: /etc/onepanel
|
||||
readOnly: true
|
||||
ports:
|
||||
- name: cvat-ui
|
||||
port: 80
|
||||
protocol: TCP
|
||||
targetPort: 80
|
||||
- name: cvat
|
||||
port: 8080
|
||||
protocol: TCP
|
||||
targetPort: 8080
|
||||
routes:
|
||||
- match:
|
||||
- uri:
|
||||
regex: /api/.*|/git/.*|/tensorflow/.*|/onepanelio/.*|/tracking/.*|/auto_annotation/.*|/analytics/.*|/static/.*|/admin/.*|/documentation/.*|/dextr/.*|/reid/.*
|
||||
- queryParams:
|
||||
id:
|
||||
regex: \d+.*
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8080
|
||||
timeout: 600s
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 80
|
||||
timeout: 600s
|
||||
# DAG Workflow to be executed once a Workspace action completes (optional)
|
||||
# Uncomment the lines below if you want to send Slack notifications
|
||||
#postExecutionWorkflow:
|
||||
# entrypoint: main
|
||||
# templates:
|
||||
# - name: main
|
||||
# dag:
|
||||
# tasks:
|
||||
# - name: slack-notify
|
||||
# template: slack-notify
|
||||
# - name: slack-notify
|
||||
# container:
|
||||
# image: technosophos/slack-notify
|
||||
# args:
|
||||
# - SLACK_USERNAME=onepanel SLACK_TITLE="Your workspace is ready" SLACK_ICON=https://www.gravatar.com/avatar/5c4478592fe00878f62f0027be59c1bd SLACK_MESSAGE="Your workspace is now running" ./slack-notify
|
||||
# command:
|
||||
# - sh
|
||||
# - -c
|
||||
144
db/yaml/workspaces/cvat/20200825154403.yaml
Normal file
144
db/yaml/workspaces/cvat/20200825154403.yaml
Normal file
@@ -0,0 +1,144 @@
|
||||
metadata:
|
||||
name: CVAT
|
||||
kind: Workspace
|
||||
version: 20200825154403
|
||||
action: update
|
||||
description: "Powerful and efficient Computer Vision Annotation Tool (CVAT)"
|
||||
spec:
|
||||
# Workspace arguments
|
||||
arguments:
|
||||
parameters:
|
||||
- name: sync-directory
|
||||
displayName: Directory to sync raw input and training output
|
||||
value: workflow-data
|
||||
hint: Location to sync raw input, models and checkpoints from default object storage. Note that this will be relative to the current namespace.
|
||||
containers:
|
||||
- name: cvat-db
|
||||
image: postgres:10-alpine
|
||||
env:
|
||||
- name: POSTGRES_USER
|
||||
value: root
|
||||
- name: POSTGRES_DB
|
||||
value: cvat
|
||||
- name: POSTGRES_HOST_AUTH_METHOD
|
||||
value: trust
|
||||
- name: PGDATA
|
||||
value: /var/lib/psql/data
|
||||
ports:
|
||||
- containerPort: 5432
|
||||
name: tcp
|
||||
volumeMounts:
|
||||
- name: db
|
||||
mountPath: /var/lib/psql
|
||||
- name: cvat-redis
|
||||
image: redis:4.0-alpine
|
||||
ports:
|
||||
- containerPort: 6379
|
||||
name: tcp
|
||||
- name: cvat
|
||||
image: onepanel/cvat:0.12.0_cvat.1.0.0
|
||||
env:
|
||||
- name: DJANGO_MODWSGI_EXTRA_ARGS
|
||||
value: ""
|
||||
- name: ALLOWED_HOSTS
|
||||
value: '*'
|
||||
- name: CVAT_REDIS_HOST
|
||||
value: localhost
|
||||
- name: CVAT_POSTGRES_HOST
|
||||
value: localhost
|
||||
- name: CVAT_SHARE_URL
|
||||
value: /home/django/data
|
||||
- name: ONEPANEL_SYNC_DIRECTORY
|
||||
value: '{{workspace.parameters.sync-directory}}'
|
||||
- name: NVIDIA_VISIBLE_DEVICES
|
||||
value: all
|
||||
- name: NVIDIA_DRIVER_CAPABILITIES
|
||||
value: compute,utility
|
||||
- name: NVIDIA_REQUIRE_CUDA
|
||||
value: "cuda>=10.0 brand=tesla,driver>=384,driver<385 brand=tesla,driver>=410,driver<411"
|
||||
ports:
|
||||
- containerPort: 8080
|
||||
name: http
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /home/django/data
|
||||
- name: keys
|
||||
mountPath: /home/django/keys
|
||||
- name: logs
|
||||
mountPath: /home/django/logs
|
||||
- name: models
|
||||
mountPath: /home/django/models
|
||||
- name: share
|
||||
mountPath: /home/django/share
|
||||
- name: sys-namespace-config
|
||||
mountPath: /etc/onepanel
|
||||
readOnly: true
|
||||
- name: cvat-ui
|
||||
image: onepanel/cvat-ui:0.12.0_cvat.1.0.0
|
||||
ports:
|
||||
- containerPort: 80
|
||||
name: http
|
||||
# You can add multiple FileSyncer sidecar containers if needed
|
||||
- name: filesyncer
|
||||
image: "onepanel/filesyncer:{{.ArtifactRepositoryType}}"
|
||||
imagePullPolicy: Always
|
||||
args:
|
||||
- download
|
||||
env:
|
||||
- name: FS_PATH
|
||||
value: /mnt/share
|
||||
- name: FS_PREFIX
|
||||
value: '{{workflow.namespace}}/{{workspace.parameters.sync-directory}}'
|
||||
volumeMounts:
|
||||
- name: share
|
||||
mountPath: /mnt/share
|
||||
- name: sys-namespace-config
|
||||
mountPath: /etc/onepanel
|
||||
readOnly: true
|
||||
ports:
|
||||
- name: cvat-ui
|
||||
port: 80
|
||||
protocol: TCP
|
||||
targetPort: 80
|
||||
- name: cvat
|
||||
port: 8080
|
||||
protocol: TCP
|
||||
targetPort: 8080
|
||||
routes:
|
||||
- match:
|
||||
- uri:
|
||||
regex: /api/.*|/git/.*|/tensorflow/.*|/onepanelio/.*|/tracking/.*|/auto_annotation/.*|/analytics/.*|/static/.*|/admin/.*|/documentation/.*|/dextr/.*|/reid/.*
|
||||
- queryParams:
|
||||
id:
|
||||
regex: \d+.*
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8080
|
||||
timeout: 600s
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 80
|
||||
timeout: 600s
|
||||
# DAG Workflow to be executed once a Workspace action completes (optional)
|
||||
# Uncomment the lines below if you want to send Slack notifications
|
||||
#postExecutionWorkflow:
|
||||
# entrypoint: main
|
||||
# templates:
|
||||
# - name: main
|
||||
# dag:
|
||||
# tasks:
|
||||
# - name: slack-notify
|
||||
# template: slack-notify
|
||||
# - name: slack-notify
|
||||
# container:
|
||||
# image: technosophos/slack-notify
|
||||
# args:
|
||||
# - SLACK_USERNAME=onepanel SLACK_TITLE="Your workspace is ready" SLACK_ICON=https://www.gravatar.com/avatar/5c4478592fe00878f62f0027be59c1bd SLACK_MESSAGE="Your workspace is now running" ./slack-notify
|
||||
# command:
|
||||
# - sh
|
||||
# - -c
|
||||
156
db/yaml/workspaces/cvat/20200826185926.yaml
Normal file
156
db/yaml/workspaces/cvat/20200826185926.yaml
Normal file
@@ -0,0 +1,156 @@
|
||||
metadata:
|
||||
name: CVAT
|
||||
kind: Workspace
|
||||
version: 20200826185926
|
||||
action: update
|
||||
description: "Powerful and efficient Computer Vision Annotation Tool (CVAT)"
|
||||
spec:
|
||||
# Workspace arguments
|
||||
arguments:
|
||||
parameters:
|
||||
- name: sync-directory
|
||||
displayName: Directory to sync raw input and training output
|
||||
value: workflow-data
|
||||
hint: Location to sync raw input, models and checkpoints from default object storage. Note that this will be relative to the current namespace.
|
||||
containers:
|
||||
- name: cvat-db
|
||||
image: postgres:10-alpine
|
||||
env:
|
||||
- name: POSTGRES_USER
|
||||
value: root
|
||||
- name: POSTGRES_DB
|
||||
value: cvat
|
||||
- name: POSTGRES_HOST_AUTH_METHOD
|
||||
value: trust
|
||||
- name: PGDATA
|
||||
value: /var/lib/psql/data
|
||||
ports:
|
||||
- containerPort: 5432
|
||||
name: tcp
|
||||
volumeMounts:
|
||||
- name: db
|
||||
mountPath: /var/lib/psql
|
||||
- name: cvat-redis
|
||||
image: redis:4.0-alpine
|
||||
ports:
|
||||
- containerPort: 6379
|
||||
name: tcp
|
||||
- name: cvat
|
||||
image: onepanel/cvat:0.12.0_cvat.1.0.0
|
||||
env:
|
||||
- name: DJANGO_MODWSGI_EXTRA_ARGS
|
||||
value: ""
|
||||
- name: ALLOWED_HOSTS
|
||||
value: '*'
|
||||
- name: CVAT_REDIS_HOST
|
||||
value: localhost
|
||||
- name: CVAT_POSTGRES_HOST
|
||||
value: localhost
|
||||
- name: CVAT_SHARE_URL
|
||||
value: /home/django/data
|
||||
- name: ONEPANEL_SYNC_DIRECTORY
|
||||
value: '{{workspace.parameters.sync-directory}}'
|
||||
- name: NVIDIA_VISIBLE_DEVICES
|
||||
value: all
|
||||
- name: NVIDIA_DRIVER_CAPABILITIES
|
||||
value: compute,utility
|
||||
- name: NVIDIA_REQUIRE_CUDA
|
||||
value: "cuda>=10.0 brand=tesla,driver>=384,driver<385 brand=tesla,driver>=410,driver<411"
|
||||
ports:
|
||||
- containerPort: 8080
|
||||
name: http
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /home/django/data
|
||||
- name: keys
|
||||
mountPath: /home/django/keys
|
||||
- name: logs
|
||||
mountPath: /home/django/logs
|
||||
- name: models
|
||||
mountPath: /home/django/models
|
||||
- name: share
|
||||
mountPath: /home/django/share
|
||||
- name: sys-namespace-config
|
||||
mountPath: /etc/onepanel
|
||||
readOnly: true
|
||||
- name: cvat-ui
|
||||
image: onepanel/cvat-ui:0.12.0_cvat.1.0.0
|
||||
ports:
|
||||
- containerPort: 80
|
||||
name: http
|
||||
# You can add multiple FileSyncer sidecar containers if needed
|
||||
- name: filesyncer
|
||||
image: "onepanel/filesyncer:{{.ArtifactRepositoryType}}"
|
||||
imagePullPolicy: Always
|
||||
args:
|
||||
- download
|
||||
- -server-prefix=/sys/filesyncer
|
||||
env:
|
||||
- name: FS_PATH
|
||||
value: /mnt/share
|
||||
- name: FS_PREFIX
|
||||
value: '{{workflow.namespace}}/{{workspace.parameters.sync-directory}}'
|
||||
volumeMounts:
|
||||
- name: share
|
||||
mountPath: /mnt/share
|
||||
- name: sys-namespace-config
|
||||
mountPath: /etc/onepanel
|
||||
readOnly: true
|
||||
ports:
|
||||
- name: cvat-ui
|
||||
port: 80
|
||||
protocol: TCP
|
||||
targetPort: 80
|
||||
- name: cvat
|
||||
port: 8080
|
||||
protocol: TCP
|
||||
targetPort: 8080
|
||||
- name: fs
|
||||
port: 8888
|
||||
protocol: TCP
|
||||
targetPort: 8888
|
||||
routes:
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /sys/filesyncer
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8888
|
||||
- match:
|
||||
- uri:
|
||||
regex: /api/.*|/git/.*|/tensorflow/.*|/onepanelio/.*|/tracking/.*|/auto_annotation/.*|/analytics/.*|/static/.*|/admin/.*|/documentation/.*|/dextr/.*|/reid/.*
|
||||
- queryParams:
|
||||
id:
|
||||
regex: \d+.*
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8080
|
||||
timeout: 600s
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 80
|
||||
timeout: 600s
|
||||
# DAG Workflow to be executed once a Workspace action completes (optional)
|
||||
# Uncomment the lines below if you want to send Slack notifications
|
||||
#postExecutionWorkflow:
|
||||
# entrypoint: main
|
||||
# templates:
|
||||
# - name: main
|
||||
# dag:
|
||||
# tasks:
|
||||
# - name: slack-notify
|
||||
# template: slack-notify
|
||||
# - name: slack-notify
|
||||
# container:
|
||||
# image: technosophos/slack-notify
|
||||
# args:
|
||||
# - SLACK_USERNAME=onepanel SLACK_TITLE="Your workspace is ready" SLACK_ICON=https://www.gravatar.com/avatar/5c4478592fe00878f62f0027be59c1bd SLACK_MESSAGE="Your workspace is now running" ./slack-notify
|
||||
# command:
|
||||
# - sh
|
||||
# - -c
|
||||
154
db/yaml/workspaces/cvat/20201001070806.yaml
Normal file
154
db/yaml/workspaces/cvat/20201001070806.yaml
Normal file
@@ -0,0 +1,154 @@
|
||||
metadata:
|
||||
name: CVAT
|
||||
kind: Workspace
|
||||
version: 20201001070806
|
||||
action: update
|
||||
description: "Powerful and efficient Computer Vision Annotation Tool (CVAT)"
|
||||
spec:
|
||||
# Workspace arguments
|
||||
arguments:
|
||||
parameters:
|
||||
- name: sync-directory
|
||||
displayName: Directory to sync raw input and training output
|
||||
value: workflow-data
|
||||
hint: Location to sync raw input, models and checkpoints from default object storage. Note that this will be relative to the current namespace.
|
||||
containers:
|
||||
- name: cvat-db
|
||||
image: postgres:10-alpine
|
||||
env:
|
||||
- name: POSTGRES_USER
|
||||
value: root
|
||||
- name: POSTGRES_DB
|
||||
value: cvat
|
||||
- name: POSTGRES_HOST_AUTH_METHOD
|
||||
value: trust
|
||||
- name: PGDATA
|
||||
value: /var/lib/psql/data
|
||||
ports:
|
||||
- containerPort: 5432
|
||||
name: tcp
|
||||
volumeMounts:
|
||||
- name: db
|
||||
mountPath: /var/lib/psql
|
||||
- name: cvat-redis
|
||||
image: redis:4.0-alpine
|
||||
ports:
|
||||
- containerPort: 6379
|
||||
name: tcp
|
||||
- name: cvat
|
||||
image: onepanel/cvat:0.12.1_cvat.1.0.0
|
||||
env:
|
||||
- name: DJANGO_MODWSGI_EXTRA_ARGS
|
||||
value: ""
|
||||
- name: ALLOWED_HOSTS
|
||||
value: '*'
|
||||
- name: CVAT_REDIS_HOST
|
||||
value: localhost
|
||||
- name: CVAT_POSTGRES_HOST
|
||||
value: localhost
|
||||
- name: CVAT_SHARE_URL
|
||||
value: /home/django/data
|
||||
- name: ONEPANEL_SYNC_DIRECTORY
|
||||
value: '{{workspace.parameters.sync-directory}}'
|
||||
- name: NVIDIA_VISIBLE_DEVICES
|
||||
value: all
|
||||
- name: NVIDIA_DRIVER_CAPABILITIES
|
||||
value: compute,utility
|
||||
- name: NVIDIA_REQUIRE_CUDA
|
||||
value: "cuda>=10.0 brand=tesla,driver>=384,driver<385 brand=tesla,driver>=410,driver<411"
|
||||
ports:
|
||||
- containerPort: 8080
|
||||
name: http
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /home/django/data
|
||||
- name: keys
|
||||
mountPath: /home/django/keys
|
||||
- name: logs
|
||||
mountPath: /home/django/logs
|
||||
- name: models
|
||||
mountPath: /home/django/models
|
||||
- name: share
|
||||
mountPath: /home/django/share
|
||||
- name: sys-namespace-config
|
||||
mountPath: /etc/onepanel
|
||||
readOnly: true
|
||||
- name: cvat-ui
|
||||
image: onepanel/cvat-ui:0.12.1_cvat.1.0.0
|
||||
ports:
|
||||
- containerPort: 80
|
||||
name: http
|
||||
# You can add multiple FileSyncer sidecar containers if needed
|
||||
- name: filesyncer
|
||||
image: "onepanel/filesyncer:{{.ArtifactRepositoryType}}"
|
||||
imagePullPolicy: Always
|
||||
args:
|
||||
- download
|
||||
- -server-prefix=/sys/filesyncer
|
||||
env:
|
||||
- name: FS_PATH
|
||||
value: /mnt/share
|
||||
- name: FS_PREFIX
|
||||
value: '{{workflow.namespace}}/{{workspace.parameters.sync-directory}}'
|
||||
volumeMounts:
|
||||
- name: share
|
||||
mountPath: /mnt/share
|
||||
- name: sys-namespace-config
|
||||
mountPath: /etc/onepanel
|
||||
readOnly: true
|
||||
ports:
|
||||
- name: cvat-ui
|
||||
port: 80
|
||||
protocol: TCP
|
||||
targetPort: 80
|
||||
- name: cvat
|
||||
port: 8080
|
||||
protocol: TCP
|
||||
targetPort: 8080
|
||||
- name: fs
|
||||
port: 8888
|
||||
protocol: TCP
|
||||
targetPort: 8888
|
||||
routes:
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /sys/filesyncer
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8888
|
||||
- match:
|
||||
- uri:
|
||||
regex: /api/.*|/git/.*|/tensorflow/.*|/onepanelio/.*|/tracking/.*|/auto_annotation/.*|/analytics/.*|/static/.*|/admin/.*|/documentation/.*|/dextr/.*|/reid/.*
|
||||
- queryParams:
|
||||
id:
|
||||
regex: \d+.*
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8080
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 80
|
||||
# DAG Workflow to be executed once a Workspace action completes (optional)
|
||||
# Uncomment the lines below if you want to send Slack notifications
|
||||
#postExecutionWorkflow:
|
||||
# entrypoint: main
|
||||
# templates:
|
||||
# - name: main
|
||||
# dag:
|
||||
# tasks:
|
||||
# - name: slack-notify
|
||||
# template: slack-notify
|
||||
# - name: slack-notify
|
||||
# container:
|
||||
# image: technosophos/slack-notify
|
||||
# args:
|
||||
# - SLACK_USERNAME=onepanel SLACK_TITLE="Your workspace is ready" SLACK_ICON=https://www.gravatar.com/avatar/5c4478592fe00878f62f0027be59c1bd SLACK_MESSAGE="Your workspace is now running" ./slack-notify
|
||||
# command:
|
||||
# - sh
|
||||
# - -c
|
||||
@@ -1,147 +1,154 @@
|
||||
# Workspace arguments
|
||||
arguments:
|
||||
parameters:
|
||||
- name: sync-directory
|
||||
displayName: Directory to sync raw input and training output
|
||||
value: workflow-data
|
||||
hint: Location to sync raw input, models and checkpoints from default object storage. Note that this will be relative to the current namespace.
|
||||
containers:
|
||||
- name: cvat-db
|
||||
image: postgres:10-alpine
|
||||
env:
|
||||
- name: POSTGRES_USER
|
||||
value: root
|
||||
- name: POSTGRES_DB
|
||||
value: cvat
|
||||
- name: POSTGRES_HOST_AUTH_METHOD
|
||||
value: trust
|
||||
- name: PGDATA
|
||||
value: /var/lib/psql/data
|
||||
ports:
|
||||
- containerPort: 5432
|
||||
name: tcp
|
||||
volumeMounts:
|
||||
- name: db
|
||||
mountPath: /var/lib/psql
|
||||
- name: cvat-redis
|
||||
image: redis:4.0-alpine
|
||||
ports:
|
||||
- containerPort: 6379
|
||||
name: tcp
|
||||
- name: cvat
|
||||
image: onepanel/cvat:0.14.0_cvat.1.0.0
|
||||
env:
|
||||
- name: DJANGO_MODWSGI_EXTRA_ARGS
|
||||
value: ""
|
||||
- name: ALLOWED_HOSTS
|
||||
value: '*'
|
||||
- name: CVAT_REDIS_HOST
|
||||
value: localhost
|
||||
- name: CVAT_POSTGRES_HOST
|
||||
value: localhost
|
||||
- name: CVAT_SHARE_URL
|
||||
value: /home/django/data
|
||||
- name: ONEPANEL_SYNC_DIRECTORY
|
||||
value: '{{workspace.parameters.sync-directory}}'
|
||||
- name: NVIDIA_VISIBLE_DEVICES
|
||||
value: all
|
||||
- name: NVIDIA_DRIVER_CAPABILITIES
|
||||
value: compute,utility
|
||||
- name: NVIDIA_REQUIRE_CUDA
|
||||
value: "cuda>=10.0 brand=tesla,driver>=384,driver<385 brand=tesla,driver>=410,driver<411"
|
||||
ports:
|
||||
- containerPort: 8080
|
||||
name: http
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /home/django/data
|
||||
- name: keys
|
||||
mountPath: /home/django/keys
|
||||
- name: logs
|
||||
mountPath: /home/django/logs
|
||||
- name: models
|
||||
mountPath: /home/django/models
|
||||
- name: share
|
||||
mountPath: /home/django/share
|
||||
- name: sys-namespace-config
|
||||
mountPath: /etc/onepanel
|
||||
readOnly: true
|
||||
- name: cvat-ui
|
||||
image: onepanel/cvat-ui:0.14.0_cvat.1.0.0
|
||||
ports:
|
||||
- containerPort: 80
|
||||
name: http
|
||||
# You can add multiple FileSyncer sidecar containers if needed
|
||||
- name: filesyncer
|
||||
image: onepanel/filesyncer:{{.ArtifactRepositoryType}}
|
||||
imagePullPolicy: Always
|
||||
args:
|
||||
- download
|
||||
- -server-prefix=/sys/filesyncer
|
||||
env:
|
||||
- name: FS_PATH
|
||||
value: /mnt/share
|
||||
- name: FS_PREFIX
|
||||
value: '{{workflow.namespace}}/{{workspace.parameters.sync-directory}}'
|
||||
volumeMounts:
|
||||
- name: share
|
||||
mountPath: /mnt/share
|
||||
- name: sys-namespace-config
|
||||
mountPath: /etc/onepanel
|
||||
readOnly: true
|
||||
ports:
|
||||
- name: cvat-ui
|
||||
port: 80
|
||||
protocol: TCP
|
||||
targetPort: 80
|
||||
- name: cvat
|
||||
port: 8080
|
||||
protocol: TCP
|
||||
targetPort: 8080
|
||||
- name: fs
|
||||
port: 8888
|
||||
protocol: TCP
|
||||
targetPort: 8888
|
||||
routes:
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /sys/filesyncer
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8888
|
||||
- match:
|
||||
- uri:
|
||||
regex: /api/.*|/git/.*|/tensorflow/.*|/onepanelio/.*|/tracking/.*|/auto_annotation/.*|/analytics/.*|/static/.*|/admin/.*|/documentation/.*|/dextr/.*|/reid/.*
|
||||
- queryParams:
|
||||
id:
|
||||
regex: \d+.*
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8080
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 80
|
||||
# DAG Workflow to be executed once a Workspace action completes (optional)
|
||||
# Uncomment the lines below if you want to send Slack notifications
|
||||
#postExecutionWorkflow:
|
||||
# entrypoint: main
|
||||
# templates:
|
||||
# - name: main
|
||||
# dag:
|
||||
# tasks:
|
||||
# - name: slack-notify
|
||||
# template: slack-notify
|
||||
# - name: slack-notify
|
||||
# container:
|
||||
# image: technosophos/slack-notify
|
||||
# args:
|
||||
# - SLACK_USERNAME=onepanel SLACK_TITLE="Your workspace is ready" SLACK_ICON=https://www.gravatar.com/avatar/5c4478592fe00878f62f0027be59c1bd SLACK_MESSAGE="Your workspace is now running" ./slack-notify
|
||||
# command:
|
||||
# - sh
|
||||
# - -c
|
||||
metadata:
|
||||
name: CVAT
|
||||
kind: Workspace
|
||||
version: 20201016170415
|
||||
action: update
|
||||
description: "Powerful and efficient Computer Vision Annotation Tool (CVAT)"
|
||||
spec:
|
||||
# Workspace arguments
|
||||
arguments:
|
||||
parameters:
|
||||
- name: sync-directory
|
||||
displayName: Directory to sync raw input and training output
|
||||
value: workflow-data
|
||||
hint: Location to sync raw input, models and checkpoints from default object storage. Note that this will be relative to the current namespace.
|
||||
containers:
|
||||
- name: cvat-db
|
||||
image: postgres:10-alpine
|
||||
env:
|
||||
- name: POSTGRES_USER
|
||||
value: root
|
||||
- name: POSTGRES_DB
|
||||
value: cvat
|
||||
- name: POSTGRES_HOST_AUTH_METHOD
|
||||
value: trust
|
||||
- name: PGDATA
|
||||
value: /var/lib/psql/data
|
||||
ports:
|
||||
- containerPort: 5432
|
||||
name: tcp
|
||||
volumeMounts:
|
||||
- name: db
|
||||
mountPath: /var/lib/psql
|
||||
- name: cvat-redis
|
||||
image: redis:4.0-alpine
|
||||
ports:
|
||||
- containerPort: 6379
|
||||
name: tcp
|
||||
- name: cvat
|
||||
image: onepanel/cvat:0.14.0_cvat.1.0.0
|
||||
env:
|
||||
- name: DJANGO_MODWSGI_EXTRA_ARGS
|
||||
value: ""
|
||||
- name: ALLOWED_HOSTS
|
||||
value: '*'
|
||||
- name: CVAT_REDIS_HOST
|
||||
value: localhost
|
||||
- name: CVAT_POSTGRES_HOST
|
||||
value: localhost
|
||||
- name: CVAT_SHARE_URL
|
||||
value: /home/django/data
|
||||
- name: ONEPANEL_SYNC_DIRECTORY
|
||||
value: '{{workspace.parameters.sync-directory}}'
|
||||
- name: NVIDIA_VISIBLE_DEVICES
|
||||
value: all
|
||||
- name: NVIDIA_DRIVER_CAPABILITIES
|
||||
value: compute,utility
|
||||
- name: NVIDIA_REQUIRE_CUDA
|
||||
value: "cuda>=10.0 brand=tesla,driver>=384,driver<385 brand=tesla,driver>=410,driver<411"
|
||||
ports:
|
||||
- containerPort: 8080
|
||||
name: http
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /home/django/data
|
||||
- name: keys
|
||||
mountPath: /home/django/keys
|
||||
- name: logs
|
||||
mountPath: /home/django/logs
|
||||
- name: models
|
||||
mountPath: /home/django/models
|
||||
- name: share
|
||||
mountPath: /home/django/share
|
||||
- name: sys-namespace-config
|
||||
mountPath: /etc/onepanel
|
||||
readOnly: true
|
||||
- name: cvat-ui
|
||||
image: onepanel/cvat-ui:0.14.0_cvat.1.0.0
|
||||
ports:
|
||||
- containerPort: 80
|
||||
name: http
|
||||
# You can add multiple FileSyncer sidecar containers if needed
|
||||
- name: filesyncer
|
||||
image: "onepanel/filesyncer:{{.ArtifactRepositoryType}}"
|
||||
imagePullPolicy: Always
|
||||
args:
|
||||
- download
|
||||
- -server-prefix=/sys/filesyncer
|
||||
env:
|
||||
- name: FS_PATH
|
||||
value: /mnt/share
|
||||
- name: FS_PREFIX
|
||||
value: '{{workflow.namespace}}/{{workspace.parameters.sync-directory}}'
|
||||
volumeMounts:
|
||||
- name: share
|
||||
mountPath: /mnt/share
|
||||
- name: sys-namespace-config
|
||||
mountPath: /etc/onepanel
|
||||
readOnly: true
|
||||
ports:
|
||||
- name: cvat-ui
|
||||
port: 80
|
||||
protocol: TCP
|
||||
targetPort: 80
|
||||
- name: cvat
|
||||
port: 8080
|
||||
protocol: TCP
|
||||
targetPort: 8080
|
||||
- name: fs
|
||||
port: 8888
|
||||
protocol: TCP
|
||||
targetPort: 8888
|
||||
routes:
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /sys/filesyncer
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8888
|
||||
- match:
|
||||
- uri:
|
||||
regex: /api/.*|/git/.*|/tensorflow/.*|/onepanelio/.*|/tracking/.*|/auto_annotation/.*|/analytics/.*|/static/.*|/admin/.*|/documentation/.*|/dextr/.*|/reid/.*
|
||||
- queryParams:
|
||||
id:
|
||||
regex: \d+.*
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8080
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 80
|
||||
# DAG Workflow to be executed once a Workspace action completes (optional)
|
||||
# Uncomment the lines below if you want to send Slack notifications
|
||||
#postExecutionWorkflow:
|
||||
# entrypoint: main
|
||||
# templates:
|
||||
# - name: main
|
||||
# dag:
|
||||
# tasks:
|
||||
# - name: slack-notify
|
||||
# template: slack-notify
|
||||
# - name: slack-notify
|
||||
# container:
|
||||
# image: technosophos/slack-notify
|
||||
# args:
|
||||
# - SLACK_USERNAME=onepanel SLACK_TITLE="Your workspace is ready" SLACK_ICON=https://www.gravatar.com/avatar/5c4478592fe00878f62f0027be59c1bd SLACK_MESSAGE="Your workspace is now running" ./slack-notify
|
||||
# command:
|
||||
# - sh
|
||||
# - -c
|
||||
@@ -1,159 +1,166 @@
|
||||
# Workspace arguments
|
||||
arguments:
|
||||
parameters:
|
||||
- name: sync-directory
|
||||
displayName: Directory to sync raw input and training output
|
||||
value: workflow-data
|
||||
hint: Location (relative to current namespace) to sync raw input, models and checkpoints from default object storage to '/share'.
|
||||
containers:
|
||||
- name: cvat-db
|
||||
image: postgres:10-alpine
|
||||
env:
|
||||
- name: POSTGRES_USER
|
||||
value: root
|
||||
- name: POSTGRES_DB
|
||||
value: cvat
|
||||
- name: POSTGRES_HOST_AUTH_METHOD
|
||||
value: trust
|
||||
- name: PGDATA
|
||||
value: /var/lib/psql/data
|
||||
ports:
|
||||
- containerPort: 5432
|
||||
name: tcp
|
||||
volumeMounts:
|
||||
- name: db
|
||||
mountPath: /var/lib/psql
|
||||
- name: cvat-redis
|
||||
image: redis:4.0-alpine
|
||||
ports:
|
||||
- containerPort: 6379
|
||||
name: tcp
|
||||
- name: cvat
|
||||
image: onepanel/cvat:0.15.0_cvat.1.0.0
|
||||
env:
|
||||
- name: DJANGO_MODWSGI_EXTRA_ARGS
|
||||
value: ""
|
||||
- name: ALLOWED_HOSTS
|
||||
value: '*'
|
||||
- name: CVAT_REDIS_HOST
|
||||
value: localhost
|
||||
- name: CVAT_POSTGRES_HOST
|
||||
value: localhost
|
||||
- name: CVAT_SHARE_URL
|
||||
value: /cvat/data
|
||||
- name: CVAT_SHARE_DIR
|
||||
value: /share
|
||||
- name: CVAT_KEYS_DIR
|
||||
value: /cvat/keys
|
||||
- name: CVAT_DATA_DIR
|
||||
value: /cvat/data
|
||||
- name: CVAT_MODELS_DIR
|
||||
value: /cvat/models
|
||||
- name: CVAT_LOGS_DIR
|
||||
value: /cvat/logs
|
||||
- name: ONEPANEL_SYNC_DIRECTORY
|
||||
value: '{{workspace.parameters.sync-directory}}'
|
||||
- name: NVIDIA_VISIBLE_DEVICES
|
||||
value: all
|
||||
- name: NVIDIA_DRIVER_CAPABILITIES
|
||||
value: compute,utility
|
||||
- name: NVIDIA_REQUIRE_CUDA
|
||||
value: "cuda>=10.0 brand=tesla,driver>=384,driver<385 brand=tesla,driver>=410,driver<411"
|
||||
ports:
|
||||
- containerPort: 8080
|
||||
name: http
|
||||
volumeMounts:
|
||||
- name: cvat-data
|
||||
mountPath: /cvat
|
||||
- name: share
|
||||
mountPath: /share
|
||||
- name: sys-namespace-config
|
||||
mountPath: /etc/onepanel
|
||||
readOnly: true
|
||||
- name: cvat-ui
|
||||
image: onepanel/cvat-ui:0.15.0_cvat.1.0.0
|
||||
ports:
|
||||
- containerPort: 80
|
||||
name: http
|
||||
# You can add multiple FileSyncer sidecar containers if needed
|
||||
- name: filesyncer
|
||||
image: onepanel/filesyncer:s3
|
||||
imagePullPolicy: Always
|
||||
args:
|
||||
- download
|
||||
- -server-prefix=/sys/filesyncer
|
||||
env:
|
||||
- name: FS_PATH
|
||||
value: /mnt/share
|
||||
- name: FS_PREFIX
|
||||
value: '{{workflow.namespace}}/{{workspace.parameters.sync-directory}}'
|
||||
volumeMounts:
|
||||
- name: share
|
||||
mountPath: /mnt/share
|
||||
- name: sys-namespace-config
|
||||
mountPath: /etc/onepanel
|
||||
readOnly: true
|
||||
ports:
|
||||
- name: cvat-ui
|
||||
port: 80
|
||||
protocol: TCP
|
||||
targetPort: 80
|
||||
- name: cvat
|
||||
port: 8080
|
||||
protocol: TCP
|
||||
targetPort: 8080
|
||||
- name: fs
|
||||
port: 8888
|
||||
protocol: TCP
|
||||
targetPort: 8888
|
||||
routes:
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /sys/filesyncer
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8888
|
||||
- match:
|
||||
- uri:
|
||||
regex: /api/.*|/git/.*|/tensorflow/.*|/onepanelio/.*|/tracking/.*|/auto_annotation/.*|/analytics/.*|/static/.*|/admin/.*|/documentation/.*|/dextr/.*|/reid/.*
|
||||
- queryParams:
|
||||
id:
|
||||
regex: \d+.*
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8080
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 80
|
||||
volumeClaimTemplates:
|
||||
- metadata:
|
||||
name: db
|
||||
spec:
|
||||
accessModes: ["ReadWriteOnce"]
|
||||
resources:
|
||||
requests:
|
||||
storage: 20Gi
|
||||
# DAG Workflow to be executed once a Workspace action completes (optional)
|
||||
# Uncomment the lines below if you want to send Slack notifications
|
||||
#postExecutionWorkflow:
|
||||
# entrypoint: main
|
||||
# templates:
|
||||
# - name: main
|
||||
# dag:
|
||||
# tasks:
|
||||
# - name: slack-notify
|
||||
# template: slack-notify
|
||||
# - name: slack-notify
|
||||
# container:
|
||||
# image: technosophos/slack-notify
|
||||
# args:
|
||||
# - SLACK_USERNAME=onepanel SLACK_TITLE="Your workspace is ready" SLACK_ICON=https://www.gravatar.com/avatar/5c4478592fe00878f62f0027be59c1bd SLACK_MESSAGE="Your workspace is now running" ./slack-notify
|
||||
# command:
|
||||
# - sh
|
||||
# - -c
|
||||
metadata:
|
||||
name: CVAT
|
||||
kind: Workspace
|
||||
version: 20201102104048
|
||||
action: update
|
||||
description: "Powerful and efficient Computer Vision Annotation Tool (CVAT)"
|
||||
spec:
|
||||
# Workspace arguments
|
||||
arguments:
|
||||
parameters:
|
||||
- name: sync-directory
|
||||
displayName: Directory to sync raw input and training output
|
||||
value: workflow-data
|
||||
hint: Location (relative to current namespace) to sync raw input, models and checkpoints from default object storage to '/share'.
|
||||
containers:
|
||||
- name: cvat-db
|
||||
image: postgres:10-alpine
|
||||
env:
|
||||
- name: POSTGRES_USER
|
||||
value: root
|
||||
- name: POSTGRES_DB
|
||||
value: cvat
|
||||
- name: POSTGRES_HOST_AUTH_METHOD
|
||||
value: trust
|
||||
- name: PGDATA
|
||||
value: /var/lib/psql/data
|
||||
ports:
|
||||
- containerPort: 5432
|
||||
name: tcp
|
||||
volumeMounts:
|
||||
- name: db
|
||||
mountPath: /var/lib/psql
|
||||
- name: cvat-redis
|
||||
image: redis:4.0-alpine
|
||||
ports:
|
||||
- containerPort: 6379
|
||||
name: tcp
|
||||
- name: cvat
|
||||
image: onepanel/cvat:0.15.0_cvat.1.0.0
|
||||
env:
|
||||
- name: DJANGO_MODWSGI_EXTRA_ARGS
|
||||
value: ""
|
||||
- name: ALLOWED_HOSTS
|
||||
value: '*'
|
||||
- name: CVAT_REDIS_HOST
|
||||
value: localhost
|
||||
- name: CVAT_POSTGRES_HOST
|
||||
value: localhost
|
||||
- name: CVAT_SHARE_URL
|
||||
value: /cvat/data
|
||||
- name: CVAT_SHARE_DIR
|
||||
value: /share
|
||||
- name: CVAT_KEYS_DIR
|
||||
value: /cvat/keys
|
||||
- name: CVAT_DATA_DIR
|
||||
value: /cvat/data
|
||||
- name: CVAT_MODELS_DIR
|
||||
value: /cvat/models
|
||||
- name: CVAT_LOGS_DIR
|
||||
value: /cvat/logs
|
||||
- name: ONEPANEL_SYNC_DIRECTORY
|
||||
value: '{{workspace.parameters.sync-directory}}'
|
||||
- name: NVIDIA_VISIBLE_DEVICES
|
||||
value: all
|
||||
- name: NVIDIA_DRIVER_CAPABILITIES
|
||||
value: compute,utility
|
||||
- name: NVIDIA_REQUIRE_CUDA
|
||||
value: "cuda>=10.0 brand=tesla,driver>=384,driver<385 brand=tesla,driver>=410,driver<411"
|
||||
ports:
|
||||
- containerPort: 8080
|
||||
name: http
|
||||
volumeMounts:
|
||||
- name: cvat-data
|
||||
mountPath: /cvat
|
||||
- name: share
|
||||
mountPath: /share
|
||||
- name: sys-namespace-config
|
||||
mountPath: /etc/onepanel
|
||||
readOnly: true
|
||||
- name: cvat-ui
|
||||
image: onepanel/cvat-ui:0.15.0_cvat.1.0.0
|
||||
ports:
|
||||
- containerPort: 80
|
||||
name: http
|
||||
# You can add multiple FileSyncer sidecar containers if needed
|
||||
- name: filesyncer
|
||||
image: onepanel/filesyncer:s3
|
||||
imagePullPolicy: Always
|
||||
args:
|
||||
- download
|
||||
- -server-prefix=/sys/filesyncer
|
||||
env:
|
||||
- name: FS_PATH
|
||||
value: /mnt/share
|
||||
- name: FS_PREFIX
|
||||
value: '{{workflow.namespace}}/{{workspace.parameters.sync-directory}}'
|
||||
volumeMounts:
|
||||
- name: share
|
||||
mountPath: /mnt/share
|
||||
- name: sys-namespace-config
|
||||
mountPath: /etc/onepanel
|
||||
readOnly: true
|
||||
ports:
|
||||
- name: cvat-ui
|
||||
port: 80
|
||||
protocol: TCP
|
||||
targetPort: 80
|
||||
- name: cvat
|
||||
port: 8080
|
||||
protocol: TCP
|
||||
targetPort: 8080
|
||||
- name: fs
|
||||
port: 8888
|
||||
protocol: TCP
|
||||
targetPort: 8888
|
||||
routes:
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /sys/filesyncer
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8888
|
||||
- match:
|
||||
- uri:
|
||||
regex: /api/.*|/git/.*|/tensorflow/.*|/onepanelio/.*|/tracking/.*|/auto_annotation/.*|/analytics/.*|/static/.*|/admin/.*|/documentation/.*|/dextr/.*|/reid/.*
|
||||
- queryParams:
|
||||
id:
|
||||
regex: \d+.*
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8080
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 80
|
||||
volumeClaimTemplates:
|
||||
- metadata:
|
||||
name: db
|
||||
spec:
|
||||
accessModes: ["ReadWriteOnce"]
|
||||
resources:
|
||||
requests:
|
||||
storage: 20Gi
|
||||
# DAG Workflow to be executed once a Workspace action completes (optional)
|
||||
# Uncomment the lines below if you want to send Slack notifications
|
||||
#postExecutionWorkflow:
|
||||
# entrypoint: main
|
||||
# templates:
|
||||
# - name: main
|
||||
# dag:
|
||||
# tasks:
|
||||
# - name: slack-notify
|
||||
# template: slack-notify
|
||||
# - name: slack-notify
|
||||
# container:
|
||||
# image: technosophos/slack-notify
|
||||
# args:
|
||||
# - SLACK_USERNAME=onepanel SLACK_TITLE="Your workspace is ready" SLACK_ICON=https://www.gravatar.com/avatar/5c4478592fe00878f62f0027be59c1bd SLACK_MESSAGE="Your workspace is now running" ./slack-notify
|
||||
# command:
|
||||
# - sh
|
||||
# - -c
|
||||
@@ -1,159 +1,166 @@
|
||||
# Workspace arguments
|
||||
arguments:
|
||||
parameters:
|
||||
- name: sync-directory
|
||||
displayName: Directory to sync raw input and training output
|
||||
value: workflow-data
|
||||
hint: Location (relative to current namespace) to sync raw input, models and checkpoints from default object storage to '/share'.
|
||||
containers:
|
||||
- name: cvat-db
|
||||
image: postgres:10-alpine
|
||||
env:
|
||||
- name: POSTGRES_USER
|
||||
value: root
|
||||
- name: POSTGRES_DB
|
||||
value: cvat
|
||||
- name: POSTGRES_HOST_AUTH_METHOD
|
||||
value: trust
|
||||
- name: PGDATA
|
||||
value: /var/lib/psql/data
|
||||
ports:
|
||||
- containerPort: 5432
|
||||
name: tcp
|
||||
volumeMounts:
|
||||
- name: db
|
||||
mountPath: /var/lib/psql
|
||||
- name: cvat-redis
|
||||
image: redis:4.0-alpine
|
||||
ports:
|
||||
- containerPort: 6379
|
||||
name: tcp
|
||||
- name: cvat
|
||||
image: onepanel/cvat:0.16.0_cvat.1.0.0
|
||||
env:
|
||||
- name: DJANGO_MODWSGI_EXTRA_ARGS
|
||||
value: ""
|
||||
- name: ALLOWED_HOSTS
|
||||
value: '*'
|
||||
- name: CVAT_REDIS_HOST
|
||||
value: localhost
|
||||
- name: CVAT_POSTGRES_HOST
|
||||
value: localhost
|
||||
- name: CVAT_SHARE_URL
|
||||
value: /cvat/data
|
||||
- name: CVAT_SHARE_DIR
|
||||
value: /share
|
||||
- name: CVAT_KEYS_DIR
|
||||
value: /cvat/keys
|
||||
- name: CVAT_DATA_DIR
|
||||
value: /cvat/data
|
||||
- name: CVAT_MODELS_DIR
|
||||
value: /cvat/models
|
||||
- name: CVAT_LOGS_DIR
|
||||
value: /cvat/logs
|
||||
- name: ONEPANEL_SYNC_DIRECTORY
|
||||
value: '{{workspace.parameters.sync-directory}}'
|
||||
- name: NVIDIA_VISIBLE_DEVICES
|
||||
value: all
|
||||
- name: NVIDIA_DRIVER_CAPABILITIES
|
||||
value: compute,utility
|
||||
- name: NVIDIA_REQUIRE_CUDA
|
||||
value: "cuda>=10.0 brand=tesla,driver>=384,driver<385 brand=tesla,driver>=410,driver<411"
|
||||
ports:
|
||||
- containerPort: 8080
|
||||
name: http
|
||||
volumeMounts:
|
||||
- name: cvat-data
|
||||
mountPath: /cvat
|
||||
- name: share
|
||||
mountPath: /share
|
||||
- name: sys-namespace-config
|
||||
mountPath: /etc/onepanel
|
||||
readOnly: true
|
||||
- name: cvat-ui
|
||||
image: onepanel/cvat-ui:0.16.0_cvat.1.0.0
|
||||
ports:
|
||||
- containerPort: 80
|
||||
name: http
|
||||
# You can add multiple FileSyncer sidecar containers if needed
|
||||
- name: filesyncer
|
||||
image: onepanel/filesyncer:s3
|
||||
imagePullPolicy: Always
|
||||
args:
|
||||
- download
|
||||
- -server-prefix=/sys/filesyncer
|
||||
env:
|
||||
- name: FS_PATH
|
||||
value: /mnt/share
|
||||
- name: FS_PREFIX
|
||||
value: '{{workflow.namespace}}/{{workspace.parameters.sync-directory}}'
|
||||
volumeMounts:
|
||||
- name: share
|
||||
mountPath: /mnt/share
|
||||
- name: sys-namespace-config
|
||||
mountPath: /etc/onepanel
|
||||
readOnly: true
|
||||
ports:
|
||||
- name: cvat-ui
|
||||
port: 80
|
||||
protocol: TCP
|
||||
targetPort: 80
|
||||
- name: cvat
|
||||
port: 8080
|
||||
protocol: TCP
|
||||
targetPort: 8080
|
||||
- name: fs
|
||||
port: 8888
|
||||
protocol: TCP
|
||||
targetPort: 8888
|
||||
routes:
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /sys/filesyncer
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8888
|
||||
- match:
|
||||
- uri:
|
||||
regex: /api/.*|/git/.*|/tensorflow/.*|/onepanelio/.*|/tracking/.*|/auto_annotation/.*|/analytics/.*|/static/.*|/admin/.*|/documentation/.*|/dextr/.*|/reid/.*
|
||||
- queryParams:
|
||||
id:
|
||||
regex: \d+.*
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8080
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 80
|
||||
volumeClaimTemplates:
|
||||
- metadata:
|
||||
name: db
|
||||
spec:
|
||||
accessModes: ["ReadWriteOnce"]
|
||||
resources:
|
||||
requests:
|
||||
storage: 20Gi
|
||||
# DAG Workflow to be executed once a Workspace action completes (optional)
|
||||
# Uncomment the lines below if you want to send Slack notifications
|
||||
#postExecutionWorkflow:
|
||||
# entrypoint: main
|
||||
# templates:
|
||||
# - name: main
|
||||
# dag:
|
||||
# tasks:
|
||||
# - name: slack-notify
|
||||
# template: slack-notify
|
||||
# - name: slack-notify
|
||||
# container:
|
||||
# image: technosophos/slack-notify
|
||||
# args:
|
||||
# - SLACK_USERNAME=onepanel SLACK_TITLE="Your workspace is ready" SLACK_ICON=https://www.gravatar.com/avatar/5c4478592fe00878f62f0027be59c1bd SLACK_MESSAGE="Your workspace is now running" ./slack-notify
|
||||
# command:
|
||||
# - sh
|
||||
# - -c
|
||||
metadata:
|
||||
name: CVAT
|
||||
kind: Workspace
|
||||
version: 20201113094916
|
||||
action: update
|
||||
description: "Powerful and efficient Computer Vision Annotation Tool (CVAT)"
|
||||
spec:
|
||||
# Workspace arguments
|
||||
arguments:
|
||||
parameters:
|
||||
- name: sync-directory
|
||||
displayName: Directory to sync raw input and training output
|
||||
value: workflow-data
|
||||
hint: Location (relative to current namespace) to sync raw input, models and checkpoints from default object storage to '/share'.
|
||||
containers:
|
||||
- name: cvat-db
|
||||
image: postgres:10-alpine
|
||||
env:
|
||||
- name: POSTGRES_USER
|
||||
value: root
|
||||
- name: POSTGRES_DB
|
||||
value: cvat
|
||||
- name: POSTGRES_HOST_AUTH_METHOD
|
||||
value: trust
|
||||
- name: PGDATA
|
||||
value: /var/lib/psql/data
|
||||
ports:
|
||||
- containerPort: 5432
|
||||
name: tcp
|
||||
volumeMounts:
|
||||
- name: db
|
||||
mountPath: /var/lib/psql
|
||||
- name: cvat-redis
|
||||
image: redis:4.0-alpine
|
||||
ports:
|
||||
- containerPort: 6379
|
||||
name: tcp
|
||||
- name: cvat
|
||||
image: onepanel/cvat:0.16.0_cvat.1.0.0
|
||||
env:
|
||||
- name: DJANGO_MODWSGI_EXTRA_ARGS
|
||||
value: ""
|
||||
- name: ALLOWED_HOSTS
|
||||
value: '*'
|
||||
- name: CVAT_REDIS_HOST
|
||||
value: localhost
|
||||
- name: CVAT_POSTGRES_HOST
|
||||
value: localhost
|
||||
- name: CVAT_SHARE_URL
|
||||
value: /cvat/data
|
||||
- name: CVAT_SHARE_DIR
|
||||
value: /share
|
||||
- name: CVAT_KEYS_DIR
|
||||
value: /cvat/keys
|
||||
- name: CVAT_DATA_DIR
|
||||
value: /cvat/data
|
||||
- name: CVAT_MODELS_DIR
|
||||
value: /cvat/models
|
||||
- name: CVAT_LOGS_DIR
|
||||
value: /cvat/logs
|
||||
- name: ONEPANEL_SYNC_DIRECTORY
|
||||
value: '{{workspace.parameters.sync-directory}}'
|
||||
- name: NVIDIA_VISIBLE_DEVICES
|
||||
value: all
|
||||
- name: NVIDIA_DRIVER_CAPABILITIES
|
||||
value: compute,utility
|
||||
- name: NVIDIA_REQUIRE_CUDA
|
||||
value: "cuda>=10.0 brand=tesla,driver>=384,driver<385 brand=tesla,driver>=410,driver<411"
|
||||
ports:
|
||||
- containerPort: 8080
|
||||
name: http
|
||||
volumeMounts:
|
||||
- name: cvat-data
|
||||
mountPath: /cvat
|
||||
- name: share
|
||||
mountPath: /share
|
||||
- name: sys-namespace-config
|
||||
mountPath: /etc/onepanel
|
||||
readOnly: true
|
||||
- name: cvat-ui
|
||||
image: onepanel/cvat-ui:0.16.0_cvat.1.0.0
|
||||
ports:
|
||||
- containerPort: 80
|
||||
name: http
|
||||
# You can add multiple FileSyncer sidecar containers if needed
|
||||
- name: filesyncer
|
||||
image: onepanel/filesyncer:s3
|
||||
imagePullPolicy: Always
|
||||
args:
|
||||
- download
|
||||
- -server-prefix=/sys/filesyncer
|
||||
env:
|
||||
- name: FS_PATH
|
||||
value: /mnt/share
|
||||
- name: FS_PREFIX
|
||||
value: '{{workflow.namespace}}/{{workspace.parameters.sync-directory}}'
|
||||
volumeMounts:
|
||||
- name: share
|
||||
mountPath: /mnt/share
|
||||
- name: sys-namespace-config
|
||||
mountPath: /etc/onepanel
|
||||
readOnly: true
|
||||
ports:
|
||||
- name: cvat-ui
|
||||
port: 80
|
||||
protocol: TCP
|
||||
targetPort: 80
|
||||
- name: cvat
|
||||
port: 8080
|
||||
protocol: TCP
|
||||
targetPort: 8080
|
||||
- name: fs
|
||||
port: 8888
|
||||
protocol: TCP
|
||||
targetPort: 8888
|
||||
routes:
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /sys/filesyncer
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8888
|
||||
- match:
|
||||
- uri:
|
||||
regex: /api/.*|/git/.*|/tensorflow/.*|/onepanelio/.*|/tracking/.*|/auto_annotation/.*|/analytics/.*|/static/.*|/admin/.*|/documentation/.*|/dextr/.*|/reid/.*
|
||||
- queryParams:
|
||||
id:
|
||||
regex: \d+.*
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8080
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 80
|
||||
volumeClaimTemplates:
|
||||
- metadata:
|
||||
name: db
|
||||
spec:
|
||||
accessModes: ["ReadWriteOnce"]
|
||||
resources:
|
||||
requests:
|
||||
storage: 20Gi
|
||||
# DAG Workflow to be executed once a Workspace action completes (optional)
|
||||
# Uncomment the lines below if you want to send Slack notifications
|
||||
#postExecutionWorkflow:
|
||||
# entrypoint: main
|
||||
# templates:
|
||||
# - name: main
|
||||
# dag:
|
||||
# tasks:
|
||||
# - name: slack-notify
|
||||
# template: slack-notify
|
||||
# - name: slack-notify
|
||||
# container:
|
||||
# image: technosophos/slack-notify
|
||||
# args:
|
||||
# - SLACK_USERNAME=onepanel SLACK_TITLE="Your workspace is ready" SLACK_ICON=https://www.gravatar.com/avatar/5c4478592fe00878f62f0027be59c1bd SLACK_MESSAGE="Your workspace is now running" ./slack-notify
|
||||
# command:
|
||||
# - sh
|
||||
# - -c
|
||||
@@ -1,161 +1,168 @@
|
||||
# Workspace arguments
|
||||
arguments:
|
||||
parameters:
|
||||
- name: sync-directory
|
||||
displayName: Directory to sync raw input and training output
|
||||
value: workflow-data
|
||||
hint: Location (relative to current namespace) to sync raw input, models and checkpoints from default object storage to '/share'.
|
||||
containers:
|
||||
- name: cvat-db
|
||||
image: postgres:10-alpine
|
||||
env:
|
||||
- name: POSTGRES_USER
|
||||
value: root
|
||||
- name: POSTGRES_DB
|
||||
value: cvat
|
||||
- name: POSTGRES_HOST_AUTH_METHOD
|
||||
value: trust
|
||||
- name: PGDATA
|
||||
value: /var/lib/psql/data
|
||||
ports:
|
||||
- containerPort: 5432
|
||||
name: tcp
|
||||
volumeMounts:
|
||||
- name: db
|
||||
mountPath: /var/lib/psql
|
||||
- name: cvat-redis
|
||||
image: redis:4.0-alpine
|
||||
ports:
|
||||
- containerPort: 6379
|
||||
name: tcp
|
||||
- name: cvat
|
||||
image: onepanel/cvat:0.16.0_cvat.1.0.0
|
||||
env:
|
||||
- name: DJANGO_MODWSGI_EXTRA_ARGS
|
||||
value: ""
|
||||
- name: ALLOWED_HOSTS
|
||||
value: '*'
|
||||
- name: CVAT_REDIS_HOST
|
||||
value: localhost
|
||||
- name: CVAT_POSTGRES_HOST
|
||||
value: localhost
|
||||
- name: CVAT_SHARE_URL
|
||||
value: /cvat/data
|
||||
- name: CVAT_SHARE_DIR
|
||||
value: /share
|
||||
- name: CVAT_DATA_DIR
|
||||
value: /cvat/data
|
||||
- name: CVAT_MEDIA_DATA_DIR
|
||||
value: /cvat/data/data
|
||||
- name: CVAT_KEYS_DIR
|
||||
value: /cvat/data/keys
|
||||
- name: CVAT_MODELS_DIR
|
||||
value: /cvat/data/models
|
||||
- name: CVAT_LOGS_DIR
|
||||
value: /cvat/logs
|
||||
- name: ONEPANEL_SYNC_DIRECTORY
|
||||
value: '{{workspace.parameters.sync-directory}}'
|
||||
- name: NVIDIA_VISIBLE_DEVICES
|
||||
value: all
|
||||
- name: NVIDIA_DRIVER_CAPABILITIES
|
||||
value: compute,utility
|
||||
- name: NVIDIA_REQUIRE_CUDA
|
||||
value: "cuda>=10.0 brand=tesla,driver>=384,driver<385 brand=tesla,driver>=410,driver<411"
|
||||
ports:
|
||||
- containerPort: 8080
|
||||
name: http
|
||||
volumeMounts:
|
||||
- name: cvat-data
|
||||
mountPath: /cvat
|
||||
- name: share
|
||||
mountPath: /share
|
||||
- name: sys-namespace-config
|
||||
mountPath: /etc/onepanel
|
||||
readOnly: true
|
||||
- name: cvat-ui
|
||||
image: onepanel/cvat-ui:0.16.0_cvat.1.0.0
|
||||
ports:
|
||||
- containerPort: 80
|
||||
name: http
|
||||
# You can add multiple FileSyncer sidecar containers if needed
|
||||
- name: filesyncer
|
||||
image: onepanel/filesyncer:s3
|
||||
imagePullPolicy: Always
|
||||
args:
|
||||
- download
|
||||
- -server-prefix=/sys/filesyncer
|
||||
env:
|
||||
- name: FS_PATH
|
||||
value: /mnt/share
|
||||
- name: FS_PREFIX
|
||||
value: '{{workflow.namespace}}/{{workspace.parameters.sync-directory}}'
|
||||
volumeMounts:
|
||||
- name: share
|
||||
mountPath: /mnt/share
|
||||
- name: sys-namespace-config
|
||||
mountPath: /etc/onepanel
|
||||
readOnly: true
|
||||
ports:
|
||||
- name: cvat-ui
|
||||
port: 80
|
||||
protocol: TCP
|
||||
targetPort: 80
|
||||
- name: cvat
|
||||
port: 8080
|
||||
protocol: TCP
|
||||
targetPort: 8080
|
||||
- name: fs
|
||||
port: 8888
|
||||
protocol: TCP
|
||||
targetPort: 8888
|
||||
routes:
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /sys/filesyncer
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8888
|
||||
- match:
|
||||
- uri:
|
||||
regex: /api/.*|/git/.*|/tensorflow/.*|/onepanelio/.*|/tracking/.*|/auto_annotation/.*|/analytics/.*|/static/.*|/admin/.*|/documentation/.*|/dextr/.*|/reid/.*
|
||||
- queryParams:
|
||||
id:
|
||||
regex: \d+.*
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8080
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 80
|
||||
volumeClaimTemplates:
|
||||
- metadata:
|
||||
name: db
|
||||
spec:
|
||||
accessModes: ["ReadWriteOnce"]
|
||||
resources:
|
||||
requests:
|
||||
storage: 20Gi
|
||||
# DAG Workflow to be executed once a Workspace action completes (optional)
|
||||
# Uncomment the lines below if you want to send Slack notifications
|
||||
#postExecutionWorkflow:
|
||||
# entrypoint: main
|
||||
# templates:
|
||||
# - name: main
|
||||
# dag:
|
||||
# tasks:
|
||||
# - name: slack-notify
|
||||
# template: slack-notify
|
||||
# - name: slack-notify
|
||||
# container:
|
||||
# image: technosophos/slack-notify
|
||||
# args:
|
||||
# - SLACK_USERNAME=onepanel SLACK_TITLE="Your workspace is ready" SLACK_ICON=https://www.gravatar.com/avatar/5c4478592fe00878f62f0027be59c1bd SLACK_MESSAGE="Your workspace is now running" ./slack-notify
|
||||
# command:
|
||||
# - sh
|
||||
# - -c
|
||||
metadata:
|
||||
name: CVAT
|
||||
kind: Workspace
|
||||
version: 20201115133046
|
||||
action: update
|
||||
description: "Powerful and efficient Computer Vision Annotation Tool (CVAT)"
|
||||
spec:
|
||||
# Workspace arguments
|
||||
arguments:
|
||||
parameters:
|
||||
- name: sync-directory
|
||||
displayName: Directory to sync raw input and training output
|
||||
value: workflow-data
|
||||
hint: Location (relative to current namespace) to sync raw input, models and checkpoints from default object storage to '/share'.
|
||||
containers:
|
||||
- name: cvat-db
|
||||
image: postgres:10-alpine
|
||||
env:
|
||||
- name: POSTGRES_USER
|
||||
value: root
|
||||
- name: POSTGRES_DB
|
||||
value: cvat
|
||||
- name: POSTGRES_HOST_AUTH_METHOD
|
||||
value: trust
|
||||
- name: PGDATA
|
||||
value: /var/lib/psql/data
|
||||
ports:
|
||||
- containerPort: 5432
|
||||
name: tcp
|
||||
volumeMounts:
|
||||
- name: db
|
||||
mountPath: /var/lib/psql
|
||||
- name: cvat-redis
|
||||
image: redis:4.0-alpine
|
||||
ports:
|
||||
- containerPort: 6379
|
||||
name: tcp
|
||||
- name: cvat
|
||||
image: onepanel/cvat:0.16.0_cvat.1.0.0
|
||||
env:
|
||||
- name: DJANGO_MODWSGI_EXTRA_ARGS
|
||||
value: ""
|
||||
- name: ALLOWED_HOSTS
|
||||
value: '*'
|
||||
- name: CVAT_REDIS_HOST
|
||||
value: localhost
|
||||
- name: CVAT_POSTGRES_HOST
|
||||
value: localhost
|
||||
- name: CVAT_SHARE_URL
|
||||
value: /cvat/data
|
||||
- name: CVAT_SHARE_DIR
|
||||
value: /share
|
||||
- name: CVAT_DATA_DIR
|
||||
value: /cvat/data
|
||||
- name: CVAT_MEDIA_DATA_DIR
|
||||
value: /cvat/data/data
|
||||
- name: CVAT_KEYS_DIR
|
||||
value: /cvat/data/keys
|
||||
- name: CVAT_MODELS_DIR
|
||||
value: /cvat/data/models
|
||||
- name: CVAT_LOGS_DIR
|
||||
value: /cvat/logs
|
||||
- name: ONEPANEL_SYNC_DIRECTORY
|
||||
value: '{{workspace.parameters.sync-directory}}'
|
||||
- name: NVIDIA_VISIBLE_DEVICES
|
||||
value: all
|
||||
- name: NVIDIA_DRIVER_CAPABILITIES
|
||||
value: compute,utility
|
||||
- name: NVIDIA_REQUIRE_CUDA
|
||||
value: "cuda>=10.0 brand=tesla,driver>=384,driver<385 brand=tesla,driver>=410,driver<411"
|
||||
ports:
|
||||
- containerPort: 8080
|
||||
name: http
|
||||
volumeMounts:
|
||||
- name: cvat-data
|
||||
mountPath: /cvat
|
||||
- name: share
|
||||
mountPath: /share
|
||||
- name: sys-namespace-config
|
||||
mountPath: /etc/onepanel
|
||||
readOnly: true
|
||||
- name: cvat-ui
|
||||
image: onepanel/cvat-ui:0.16.0_cvat.1.0.0
|
||||
ports:
|
||||
- containerPort: 80
|
||||
name: http
|
||||
# You can add multiple FileSyncer sidecar containers if needed
|
||||
- name: filesyncer
|
||||
image: onepanel/filesyncer:s3
|
||||
imagePullPolicy: Always
|
||||
args:
|
||||
- download
|
||||
- -server-prefix=/sys/filesyncer
|
||||
env:
|
||||
- name: FS_PATH
|
||||
value: /mnt/share
|
||||
- name: FS_PREFIX
|
||||
value: '{{workflow.namespace}}/{{workspace.parameters.sync-directory}}'
|
||||
volumeMounts:
|
||||
- name: share
|
||||
mountPath: /mnt/share
|
||||
- name: sys-namespace-config
|
||||
mountPath: /etc/onepanel
|
||||
readOnly: true
|
||||
ports:
|
||||
- name: cvat-ui
|
||||
port: 80
|
||||
protocol: TCP
|
||||
targetPort: 80
|
||||
- name: cvat
|
||||
port: 8080
|
||||
protocol: TCP
|
||||
targetPort: 8080
|
||||
- name: fs
|
||||
port: 8888
|
||||
protocol: TCP
|
||||
targetPort: 8888
|
||||
routes:
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /sys/filesyncer
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8888
|
||||
- match:
|
||||
- uri:
|
||||
regex: /api/.*|/git/.*|/tensorflow/.*|/onepanelio/.*|/tracking/.*|/auto_annotation/.*|/analytics/.*|/static/.*|/admin/.*|/documentation/.*|/dextr/.*|/reid/.*
|
||||
- queryParams:
|
||||
id:
|
||||
regex: \d+.*
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8080
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 80
|
||||
volumeClaimTemplates:
|
||||
- metadata:
|
||||
name: db
|
||||
spec:
|
||||
accessModes: ["ReadWriteOnce"]
|
||||
resources:
|
||||
requests:
|
||||
storage: 20Gi
|
||||
# DAG Workflow to be executed once a Workspace action completes (optional)
|
||||
# Uncomment the lines below if you want to send Slack notifications
|
||||
#postExecutionWorkflow:
|
||||
# entrypoint: main
|
||||
# templates:
|
||||
# - name: main
|
||||
# dag:
|
||||
# tasks:
|
||||
# - name: slack-notify
|
||||
# template: slack-notify
|
||||
# - name: slack-notify
|
||||
# container:
|
||||
# image: technosophos/slack-notify
|
||||
# args:
|
||||
# - SLACK_USERNAME=onepanel SLACK_TITLE="Your workspace is ready" SLACK_ICON=https://www.gravatar.com/avatar/5c4478592fe00878f62f0027be59c1bd SLACK_MESSAGE="Your workspace is now running" ./slack-notify
|
||||
# command:
|
||||
# - sh
|
||||
# - -c
|
||||
@@ -1,163 +1,170 @@
|
||||
# Workspace arguments
|
||||
arguments:
|
||||
parameters:
|
||||
- name: sync-directory
|
||||
displayName: Directory to sync raw input and training output
|
||||
value: workflow-data
|
||||
hint: Location (relative to current namespace) to sync raw input, models and checkpoints from default object storage to '/share'.
|
||||
containers:
|
||||
- name: cvat-db
|
||||
image: postgres:10-alpine
|
||||
env:
|
||||
- name: POSTGRES_USER
|
||||
value: root
|
||||
- name: POSTGRES_DB
|
||||
value: cvat
|
||||
- name: POSTGRES_HOST_AUTH_METHOD
|
||||
value: trust
|
||||
- name: PGDATA
|
||||
value: /var/lib/psql/data
|
||||
ports:
|
||||
- containerPort: 5432
|
||||
name: tcp
|
||||
volumeMounts:
|
||||
- name: db
|
||||
mountPath: /var/lib/psql
|
||||
- name: cvat-redis
|
||||
image: redis:4.0-alpine
|
||||
ports:
|
||||
- containerPort: 6379
|
||||
name: tcp
|
||||
- name: cvat
|
||||
image: onepanel/cvat:0.16.0_cvat.1.0.0
|
||||
env:
|
||||
- name: DJANGO_MODWSGI_EXTRA_ARGS
|
||||
value: ""
|
||||
- name: ALLOWED_HOSTS
|
||||
value: '*'
|
||||
- name: CVAT_REDIS_HOST
|
||||
value: localhost
|
||||
- name: CVAT_POSTGRES_HOST
|
||||
value: localhost
|
||||
- name: CVAT_SHARE_URL
|
||||
value: /cvat/data
|
||||
- name: CVAT_SHARE_DIR
|
||||
value: /share
|
||||
- name: CVAT_DATA_DIR
|
||||
value: /cvat/data
|
||||
- name: CVAT_MEDIA_DATA_DIR
|
||||
value: /cvat/data/data
|
||||
- name: CVAT_KEYS_DIR
|
||||
value: /cvat/data/keys
|
||||
- name: CVAT_MODELS_DIR
|
||||
value: /cvat/data/models
|
||||
- name: CVAT_LOGS_DIR
|
||||
value: /cvat/logs
|
||||
- name: ONEPANEL_SYNC_DIRECTORY
|
||||
value: '{{workspace.parameters.sync-directory}}'
|
||||
- name: NVIDIA_VISIBLE_DEVICES
|
||||
value: all
|
||||
- name: NVIDIA_DRIVER_CAPABILITIES
|
||||
value: compute,utility
|
||||
- name: NVIDIA_REQUIRE_CUDA
|
||||
value: "cuda>=10.0 brand=tesla,driver>=384,driver<385 brand=tesla,driver>=410,driver<411"
|
||||
- name: ONEPANEL_MAIN_CONTAINER
|
||||
value: 'true'
|
||||
ports:
|
||||
- containerPort: 8080
|
||||
name: http
|
||||
volumeMounts:
|
||||
- name: cvat-data
|
||||
mountPath: /cvat
|
||||
- name: share
|
||||
mountPath: /share
|
||||
- name: sys-namespace-config
|
||||
mountPath: /etc/onepanel
|
||||
readOnly: true
|
||||
- name: cvat-ui
|
||||
image: onepanel/cvat-ui:0.16.0_cvat.1.0.0
|
||||
ports:
|
||||
- containerPort: 80
|
||||
name: http
|
||||
# You can add multiple FileSyncer sidecar containers if needed
|
||||
- name: filesyncer
|
||||
image: onepanel/filesyncer:s3
|
||||
imagePullPolicy: Always
|
||||
args:
|
||||
- download
|
||||
- -server-prefix=/sys/filesyncer
|
||||
env:
|
||||
- name: FS_PATH
|
||||
value: /mnt/share
|
||||
- name: FS_PREFIX
|
||||
value: '{{workflow.namespace}}/{{workspace.parameters.sync-directory}}'
|
||||
volumeMounts:
|
||||
- name: share
|
||||
mountPath: /mnt/share
|
||||
- name: sys-namespace-config
|
||||
mountPath: /etc/onepanel
|
||||
readOnly: true
|
||||
ports:
|
||||
- name: cvat-ui
|
||||
port: 80
|
||||
protocol: TCP
|
||||
targetPort: 80
|
||||
- name: cvat
|
||||
port: 8080
|
||||
protocol: TCP
|
||||
targetPort: 8080
|
||||
- name: fs
|
||||
port: 8888
|
||||
protocol: TCP
|
||||
targetPort: 8888
|
||||
routes:
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /sys/filesyncer
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8888
|
||||
- match:
|
||||
- uri:
|
||||
regex: /api/.*|/git/.*|/tensorflow/.*|/onepanelio/.*|/tracking/.*|/auto_annotation/.*|/analytics/.*|/static/.*|/admin/.*|/documentation/.*|/dextr/.*|/reid/.*
|
||||
- queryParams:
|
||||
id:
|
||||
regex: \d+.*
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8080
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 80
|
||||
volumeClaimTemplates:
|
||||
- metadata:
|
||||
name: db
|
||||
spec:
|
||||
accessModes: ["ReadWriteOnce"]
|
||||
resources:
|
||||
requests:
|
||||
storage: 20Gi
|
||||
# DAG Workflow to be executed once a Workspace action completes (optional)
|
||||
# Uncomment the lines below if you want to send Slack notifications
|
||||
#postExecutionWorkflow:
|
||||
# entrypoint: main
|
||||
# templates:
|
||||
# - name: main
|
||||
# dag:
|
||||
# tasks:
|
||||
# - name: slack-notify
|
||||
# template: slack-notify
|
||||
# - name: slack-notify
|
||||
# container:
|
||||
# image: technosophos/slack-notify
|
||||
# args:
|
||||
# - SLACK_USERNAME=onepanel SLACK_TITLE="Your workspace is ready" SLACK_ICON=https://www.gravatar.com/avatar/5c4478592fe00878f62f0027be59c1bd SLACK_MESSAGE="Your workspace is now running" ./slack-notify
|
||||
# command:
|
||||
# - sh
|
||||
# - -c
|
||||
metadata:
|
||||
name: CVAT
|
||||
kind: Workspace
|
||||
version: 20201211161117
|
||||
action: update
|
||||
description: "Powerful and efficient Computer Vision Annotation Tool (CVAT)"
|
||||
spec:
|
||||
# Workspace arguments
|
||||
arguments:
|
||||
parameters:
|
||||
- name: sync-directory
|
||||
displayName: Directory to sync raw input and training output
|
||||
value: workflow-data
|
||||
hint: Location (relative to current namespace) to sync raw input, models and checkpoints from default object storage to '/share'.
|
||||
containers:
|
||||
- name: cvat-db
|
||||
image: postgres:10-alpine
|
||||
env:
|
||||
- name: POSTGRES_USER
|
||||
value: root
|
||||
- name: POSTGRES_DB
|
||||
value: cvat
|
||||
- name: POSTGRES_HOST_AUTH_METHOD
|
||||
value: trust
|
||||
- name: PGDATA
|
||||
value: /var/lib/psql/data
|
||||
ports:
|
||||
- containerPort: 5432
|
||||
name: tcp
|
||||
volumeMounts:
|
||||
- name: db
|
||||
mountPath: /var/lib/psql
|
||||
- name: cvat-redis
|
||||
image: redis:4.0-alpine
|
||||
ports:
|
||||
- containerPort: 6379
|
||||
name: tcp
|
||||
- name: cvat
|
||||
image: onepanel/cvat:0.16.0_cvat.1.0.0
|
||||
env:
|
||||
- name: DJANGO_MODWSGI_EXTRA_ARGS
|
||||
value: ""
|
||||
- name: ALLOWED_HOSTS
|
||||
value: '*'
|
||||
- name: CVAT_REDIS_HOST
|
||||
value: localhost
|
||||
- name: CVAT_POSTGRES_HOST
|
||||
value: localhost
|
||||
- name: CVAT_SHARE_URL
|
||||
value: /cvat/data
|
||||
- name: CVAT_SHARE_DIR
|
||||
value: /share
|
||||
- name: CVAT_DATA_DIR
|
||||
value: /cvat/data
|
||||
- name: CVAT_MEDIA_DATA_DIR
|
||||
value: /cvat/data/data
|
||||
- name: CVAT_KEYS_DIR
|
||||
value: /cvat/data/keys
|
||||
- name: CVAT_MODELS_DIR
|
||||
value: /cvat/data/models
|
||||
- name: CVAT_LOGS_DIR
|
||||
value: /cvat/logs
|
||||
- name: ONEPANEL_SYNC_DIRECTORY
|
||||
value: '{{workspace.parameters.sync-directory}}'
|
||||
- name: NVIDIA_VISIBLE_DEVICES
|
||||
value: all
|
||||
- name: NVIDIA_DRIVER_CAPABILITIES
|
||||
value: compute,utility
|
||||
- name: NVIDIA_REQUIRE_CUDA
|
||||
value: "cuda>=10.0 brand=tesla,driver>=384,driver<385 brand=tesla,driver>=410,driver<411"
|
||||
- name: ONEPANEL_MAIN_CONTAINER
|
||||
value: 'true'
|
||||
ports:
|
||||
- containerPort: 8080
|
||||
name: http
|
||||
volumeMounts:
|
||||
- name: cvat-data
|
||||
mountPath: /cvat
|
||||
- name: share
|
||||
mountPath: /share
|
||||
- name: sys-namespace-config
|
||||
mountPath: /etc/onepanel
|
||||
readOnly: true
|
||||
- name: cvat-ui
|
||||
image: onepanel/cvat-ui:0.16.0_cvat.1.0.0
|
||||
ports:
|
||||
- containerPort: 80
|
||||
name: http
|
||||
# You can add multiple FileSyncer sidecar containers if needed
|
||||
- name: filesyncer
|
||||
image: onepanel/filesyncer:s3
|
||||
imagePullPolicy: Always
|
||||
args:
|
||||
- download
|
||||
- -server-prefix=/sys/filesyncer
|
||||
env:
|
||||
- name: FS_PATH
|
||||
value: /mnt/share
|
||||
- name: FS_PREFIX
|
||||
value: '{{workflow.namespace}}/{{workspace.parameters.sync-directory}}'
|
||||
volumeMounts:
|
||||
- name: share
|
||||
mountPath: /mnt/share
|
||||
- name: sys-namespace-config
|
||||
mountPath: /etc/onepanel
|
||||
readOnly: true
|
||||
ports:
|
||||
- name: cvat-ui
|
||||
port: 80
|
||||
protocol: TCP
|
||||
targetPort: 80
|
||||
- name: cvat
|
||||
port: 8080
|
||||
protocol: TCP
|
||||
targetPort: 8080
|
||||
- name: fs
|
||||
port: 8888
|
||||
protocol: TCP
|
||||
targetPort: 8888
|
||||
routes:
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /sys/filesyncer
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8888
|
||||
- match:
|
||||
- uri:
|
||||
regex: /api/.*|/git/.*|/tensorflow/.*|/onepanelio/.*|/tracking/.*|/auto_annotation/.*|/analytics/.*|/static/.*|/admin/.*|/documentation/.*|/dextr/.*|/reid/.*
|
||||
- queryParams:
|
||||
id:
|
||||
regex: \d+.*
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8080
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 80
|
||||
volumeClaimTemplates:
|
||||
- metadata:
|
||||
name: db
|
||||
spec:
|
||||
accessModes: ["ReadWriteOnce"]
|
||||
resources:
|
||||
requests:
|
||||
storage: 20Gi
|
||||
# DAG Workflow to be executed once a Workspace action completes (optional)
|
||||
# Uncomment the lines below if you want to send Slack notifications
|
||||
#postExecutionWorkflow:
|
||||
# entrypoint: main
|
||||
# templates:
|
||||
# - name: main
|
||||
# dag:
|
||||
# tasks:
|
||||
# - name: slack-notify
|
||||
# template: slack-notify
|
||||
# - name: slack-notify
|
||||
# container:
|
||||
# image: technosophos/slack-notify
|
||||
# args:
|
||||
# - SLACK_USERNAME=onepanel SLACK_TITLE="Your workspace is ready" SLACK_ICON=https://www.gravatar.com/avatar/5c4478592fe00878f62f0027be59c1bd SLACK_MESSAGE="Your workspace is now running" ./slack-notify
|
||||
# command:
|
||||
# - sh
|
||||
# - -c
|
||||
@@ -1,163 +1,170 @@
|
||||
# Workspace arguments
|
||||
arguments:
|
||||
parameters:
|
||||
- name: sync-directory
|
||||
displayName: Directory to sync raw input and training output
|
||||
value: workflow-data
|
||||
hint: Location (relative to current namespace) to sync raw input, models and checkpoints from default object storage to '/share'.
|
||||
containers:
|
||||
- name: cvat-db
|
||||
image: postgres:10-alpine
|
||||
env:
|
||||
- name: POSTGRES_USER
|
||||
value: root
|
||||
- name: POSTGRES_DB
|
||||
value: cvat
|
||||
- name: POSTGRES_HOST_AUTH_METHOD
|
||||
value: trust
|
||||
- name: PGDATA
|
||||
value: /var/lib/psql/data
|
||||
ports:
|
||||
- containerPort: 5432
|
||||
name: tcp
|
||||
volumeMounts:
|
||||
- name: db
|
||||
mountPath: /var/lib/psql
|
||||
- name: cvat-redis
|
||||
image: redis:4.0-alpine
|
||||
ports:
|
||||
- containerPort: 6379
|
||||
name: tcp
|
||||
- name: cvat
|
||||
image: onepanel/cvat:0.17.0_cvat.1.0.0
|
||||
env:
|
||||
- name: DJANGO_MODWSGI_EXTRA_ARGS
|
||||
value: ""
|
||||
- name: ALLOWED_HOSTS
|
||||
value: '*'
|
||||
- name: CVAT_REDIS_HOST
|
||||
value: localhost
|
||||
- name: CVAT_POSTGRES_HOST
|
||||
value: localhost
|
||||
- name: CVAT_SHARE_URL
|
||||
value: /cvat/data
|
||||
- name: CVAT_SHARE_DIR
|
||||
value: /share
|
||||
- name: CVAT_DATA_DIR
|
||||
value: /cvat/data
|
||||
- name: CVAT_MEDIA_DATA_DIR
|
||||
value: /cvat/data/data
|
||||
- name: CVAT_KEYS_DIR
|
||||
value: /cvat/data/keys
|
||||
- name: CVAT_MODELS_DIR
|
||||
value: /cvat/data/models
|
||||
- name: CVAT_LOGS_DIR
|
||||
value: /cvat/logs
|
||||
- name: ONEPANEL_SYNC_DIRECTORY
|
||||
value: '{{workspace.parameters.sync-directory}}'
|
||||
- name: NVIDIA_VISIBLE_DEVICES
|
||||
value: all
|
||||
- name: NVIDIA_DRIVER_CAPABILITIES
|
||||
value: compute,utility
|
||||
- name: NVIDIA_REQUIRE_CUDA
|
||||
value: "cuda>=10.0 brand=tesla,driver>=384,driver<385 brand=tesla,driver>=410,driver<411"
|
||||
- name: ONEPANEL_MAIN_CONTAINER
|
||||
value: 'true'
|
||||
ports:
|
||||
- containerPort: 8080
|
||||
name: http
|
||||
volumeMounts:
|
||||
- name: cvat-data
|
||||
mountPath: /cvat
|
||||
- name: share
|
||||
mountPath: /share
|
||||
- name: sys-namespace-config
|
||||
mountPath: /etc/onepanel
|
||||
readOnly: true
|
||||
- name: cvat-ui
|
||||
image: onepanel/cvat-ui:0.17.0_cvat.1.0.0
|
||||
ports:
|
||||
- containerPort: 80
|
||||
name: http
|
||||
# You can add multiple FileSyncer sidecar containers if needed
|
||||
- name: filesyncer
|
||||
image: onepanel/filesyncer:0.17.0
|
||||
imagePullPolicy: Always
|
||||
args:
|
||||
- download
|
||||
- -server-prefix=/sys/filesyncer
|
||||
env:
|
||||
- name: FS_PATH
|
||||
value: /mnt/share
|
||||
- name: FS_PREFIX
|
||||
value: '{{workflow.namespace}}/{{workspace.parameters.sync-directory}}'
|
||||
volumeMounts:
|
||||
- name: share
|
||||
mountPath: /mnt/share
|
||||
- name: sys-namespace-config
|
||||
mountPath: /etc/onepanel
|
||||
readOnly: true
|
||||
ports:
|
||||
- name: cvat-ui
|
||||
port: 80
|
||||
protocol: TCP
|
||||
targetPort: 80
|
||||
- name: cvat
|
||||
port: 8080
|
||||
protocol: TCP
|
||||
targetPort: 8080
|
||||
- name: fs
|
||||
port: 8888
|
||||
protocol: TCP
|
||||
targetPort: 8888
|
||||
routes:
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /sys/filesyncer
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8888
|
||||
- match:
|
||||
- uri:
|
||||
regex: /api/.*|/git/.*|/tensorflow/.*|/onepanelio/.*|/tracking/.*|/auto_annotation/.*|/analytics/.*|/static/.*|/admin/.*|/documentation/.*|/dextr/.*|/reid/.*
|
||||
- queryParams:
|
||||
id:
|
||||
regex: \d+.*
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8080
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 80
|
||||
volumeClaimTemplates:
|
||||
- metadata:
|
||||
name: db
|
||||
spec:
|
||||
accessModes: ["ReadWriteOnce"]
|
||||
resources:
|
||||
requests:
|
||||
storage: 20Gi
|
||||
# DAG Workflow to be executed once a Workspace action completes (optional)
|
||||
# Uncomment the lines below if you want to send Slack notifications
|
||||
#postExecutionWorkflow:
|
||||
# entrypoint: main
|
||||
# templates:
|
||||
# - name: main
|
||||
# dag:
|
||||
# tasks:
|
||||
# - name: slack-notify
|
||||
# template: slack-notify
|
||||
# - name: slack-notify
|
||||
# container:
|
||||
# image: technosophos/slack-notify
|
||||
# args:
|
||||
# - SLACK_USERNAME=onepanel SLACK_TITLE="Your workspace is ready" SLACK_ICON=https://www.gravatar.com/avatar/5c4478592fe00878f62f0027be59c1bd SLACK_MESSAGE="Your workspace is now running" ./slack-notify
|
||||
# command:
|
||||
# - sh
|
||||
# - -c
|
||||
metadata:
|
||||
name: CVAT
|
||||
kind: Workspace
|
||||
version: 20210107094725
|
||||
action: update
|
||||
description: "Powerful and efficient Computer Vision Annotation Tool (CVAT)"
|
||||
spec:
|
||||
# Workspace arguments
|
||||
arguments:
|
||||
parameters:
|
||||
- name: sync-directory
|
||||
displayName: Directory to sync raw input and training output
|
||||
value: workflow-data
|
||||
hint: Location (relative to current namespace) to sync raw input, models and checkpoints from default object storage to '/share'.
|
||||
containers:
|
||||
- name: cvat-db
|
||||
image: postgres:10-alpine
|
||||
env:
|
||||
- name: POSTGRES_USER
|
||||
value: root
|
||||
- name: POSTGRES_DB
|
||||
value: cvat
|
||||
- name: POSTGRES_HOST_AUTH_METHOD
|
||||
value: trust
|
||||
- name: PGDATA
|
||||
value: /var/lib/psql/data
|
||||
ports:
|
||||
- containerPort: 5432
|
||||
name: tcp
|
||||
volumeMounts:
|
||||
- name: db
|
||||
mountPath: /var/lib/psql
|
||||
- name: cvat-redis
|
||||
image: redis:4.0-alpine
|
||||
ports:
|
||||
- containerPort: 6379
|
||||
name: tcp
|
||||
- name: cvat
|
||||
image: onepanel/cvat:0.17.0_cvat.1.0.0
|
||||
env:
|
||||
- name: DJANGO_MODWSGI_EXTRA_ARGS
|
||||
value: ""
|
||||
- name: ALLOWED_HOSTS
|
||||
value: '*'
|
||||
- name: CVAT_REDIS_HOST
|
||||
value: localhost
|
||||
- name: CVAT_POSTGRES_HOST
|
||||
value: localhost
|
||||
- name: CVAT_SHARE_URL
|
||||
value: /cvat/data
|
||||
- name: CVAT_SHARE_DIR
|
||||
value: /share
|
||||
- name: CVAT_DATA_DIR
|
||||
value: /cvat/data
|
||||
- name: CVAT_MEDIA_DATA_DIR
|
||||
value: /cvat/data/data
|
||||
- name: CVAT_KEYS_DIR
|
||||
value: /cvat/data/keys
|
||||
- name: CVAT_MODELS_DIR
|
||||
value: /cvat/data/models
|
||||
- name: CVAT_LOGS_DIR
|
||||
value: /cvat/logs
|
||||
- name: ONEPANEL_SYNC_DIRECTORY
|
||||
value: '{{workspace.parameters.sync-directory}}'
|
||||
- name: NVIDIA_VISIBLE_DEVICES
|
||||
value: all
|
||||
- name: NVIDIA_DRIVER_CAPABILITIES
|
||||
value: compute,utility
|
||||
- name: NVIDIA_REQUIRE_CUDA
|
||||
value: "cuda>=10.0 brand=tesla,driver>=384,driver<385 brand=tesla,driver>=410,driver<411"
|
||||
- name: ONEPANEL_MAIN_CONTAINER
|
||||
value: 'true'
|
||||
ports:
|
||||
- containerPort: 8080
|
||||
name: http
|
||||
volumeMounts:
|
||||
- name: cvat-data
|
||||
mountPath: /cvat
|
||||
- name: share
|
||||
mountPath: /share
|
||||
- name: sys-namespace-config
|
||||
mountPath: /etc/onepanel
|
||||
readOnly: true
|
||||
- name: cvat-ui
|
||||
image: onepanel/cvat-ui:0.17.0_cvat.1.0.0
|
||||
ports:
|
||||
- containerPort: 80
|
||||
name: http
|
||||
# You can add multiple FileSyncer sidecar containers if needed
|
||||
- name: filesyncer
|
||||
image: onepanel/filesyncer:0.17.0
|
||||
imagePullPolicy: Always
|
||||
args:
|
||||
- download
|
||||
- -server-prefix=/sys/filesyncer
|
||||
env:
|
||||
- name: FS_PATH
|
||||
value: /mnt/share
|
||||
- name: FS_PREFIX
|
||||
value: '{{workflow.namespace}}/{{workspace.parameters.sync-directory}}'
|
||||
volumeMounts:
|
||||
- name: share
|
||||
mountPath: /mnt/share
|
||||
- name: sys-namespace-config
|
||||
mountPath: /etc/onepanel
|
||||
readOnly: true
|
||||
ports:
|
||||
- name: cvat-ui
|
||||
port: 80
|
||||
protocol: TCP
|
||||
targetPort: 80
|
||||
- name: cvat
|
||||
port: 8080
|
||||
protocol: TCP
|
||||
targetPort: 8080
|
||||
- name: fs
|
||||
port: 8888
|
||||
protocol: TCP
|
||||
targetPort: 8888
|
||||
routes:
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /sys/filesyncer
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8888
|
||||
- match:
|
||||
- uri:
|
||||
regex: /api/.*|/git/.*|/tensorflow/.*|/onepanelio/.*|/tracking/.*|/auto_annotation/.*|/analytics/.*|/static/.*|/admin/.*|/documentation/.*|/dextr/.*|/reid/.*
|
||||
- queryParams:
|
||||
id:
|
||||
regex: \d+.*
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8080
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 80
|
||||
volumeClaimTemplates:
|
||||
- metadata:
|
||||
name: db
|
||||
spec:
|
||||
accessModes: ["ReadWriteOnce"]
|
||||
resources:
|
||||
requests:
|
||||
storage: 20Gi
|
||||
# DAG Workflow to be executed once a Workspace action completes (optional)
|
||||
# Uncomment the lines below if you want to send Slack notifications
|
||||
#postExecutionWorkflow:
|
||||
# entrypoint: main
|
||||
# templates:
|
||||
# - name: main
|
||||
# dag:
|
||||
# tasks:
|
||||
# - name: slack-notify
|
||||
# template: slack-notify
|
||||
# - name: slack-notify
|
||||
# container:
|
||||
# image: technosophos/slack-notify
|
||||
# args:
|
||||
# - SLACK_USERNAME=onepanel SLACK_TITLE="Your workspace is ready" SLACK_ICON=https://www.gravatar.com/avatar/5c4478592fe00878f62f0027be59c1bd SLACK_MESSAGE="Your workspace is now running" ./slack-notify
|
||||
# command:
|
||||
# - sh
|
||||
# - -c
|
||||
@@ -1,134 +1,141 @@
|
||||
containers:
|
||||
- name: cvat-db
|
||||
image: postgres:10-alpine
|
||||
env:
|
||||
- name: POSTGRES_USER
|
||||
value: root
|
||||
- name: POSTGRES_DB
|
||||
value: cvat
|
||||
- name: POSTGRES_HOST_AUTH_METHOD
|
||||
value: trust
|
||||
- name: PGDATA
|
||||
value: /var/lib/psql/data
|
||||
ports:
|
||||
- containerPort: 5432
|
||||
name: tcp
|
||||
volumeMounts:
|
||||
- name: db
|
||||
mountPath: /var/lib/psql
|
||||
- name: cvat-redis
|
||||
image: redis:4.0-alpine
|
||||
ports:
|
||||
- containerPort: 6379
|
||||
name: tcp
|
||||
- name: cvat
|
||||
image: onepanel/cvat:v0.18.0_cvat.1.0.0
|
||||
env:
|
||||
- name: DJANGO_MODWSGI_EXTRA_ARGS
|
||||
value: ""
|
||||
- name: ALLOWED_HOSTS
|
||||
value: '*'
|
||||
- name: CVAT_REDIS_HOST
|
||||
value: localhost
|
||||
- name: CVAT_POSTGRES_HOST
|
||||
value: localhost
|
||||
- name: CVAT_SHARE_URL
|
||||
value: /cvat/data
|
||||
- name: CVAT_SHARE_DIR
|
||||
value: /share
|
||||
- name: CVAT_DATA_DIR
|
||||
value: /cvat/data
|
||||
- name: CVAT_MEDIA_DATA_DIR
|
||||
value: /cvat/data/data
|
||||
- name: CVAT_KEYS_DIR
|
||||
value: /cvat/data/keys
|
||||
- name: CVAT_MODELS_DIR
|
||||
value: /cvat/data/models
|
||||
- name: CVAT_LOGS_DIR
|
||||
value: /cvat/logs
|
||||
- name: CVAT_ANNOTATIONS_OBJECT_STORAGE_PREFIX
|
||||
value: 'artifacts/$(ONEPANEL_RESOURCE_NAMESPACE)/annotations/'
|
||||
- name: CVAT_ONEPANEL_WORKFLOWS_LABEL
|
||||
value: 'key=used-by,value=cvat'
|
||||
- name: NVIDIA_VISIBLE_DEVICES
|
||||
value: all
|
||||
- name: NVIDIA_DRIVER_CAPABILITIES
|
||||
value: compute,utility
|
||||
- name: NVIDIA_REQUIRE_CUDA
|
||||
value: "cuda>=10.0 brand=tesla,driver>=384,driver<385 brand=tesla,driver>=410,driver<411"
|
||||
- name: ONEPANEL_MAIN_CONTAINER
|
||||
value: 'true'
|
||||
ports:
|
||||
- containerPort: 8080
|
||||
name: http
|
||||
volumeMounts:
|
||||
- name: cvat-data
|
||||
mountPath: /cvat
|
||||
- name: share
|
||||
mountPath: /share
|
||||
- name: sys-namespace-config
|
||||
mountPath: /etc/onepanel
|
||||
readOnly: true
|
||||
- name: cvat-ui
|
||||
image: onepanel/cvat-ui:v0.18.0_cvat.1.0.0
|
||||
ports:
|
||||
- containerPort: 80
|
||||
name: http
|
||||
- name: sys-filesyncer
|
||||
image: onepanel/filesyncer:v0.18.0
|
||||
imagePullPolicy: Always
|
||||
args:
|
||||
- server
|
||||
- -server-prefix=/sys/filesyncer
|
||||
volumeMounts:
|
||||
- name: share
|
||||
mountPath: /share
|
||||
- name: sys-namespace-config
|
||||
mountPath: /etc/onepanel
|
||||
readOnly: true
|
||||
ports:
|
||||
- name: cvat-ui
|
||||
port: 80
|
||||
protocol: TCP
|
||||
targetPort: 80
|
||||
- name: cvat
|
||||
port: 8080
|
||||
protocol: TCP
|
||||
targetPort: 8080
|
||||
- name: fs
|
||||
port: 8888
|
||||
protocol: TCP
|
||||
targetPort: 8888
|
||||
routes:
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /sys/filesyncer
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8888
|
||||
- match:
|
||||
- uri:
|
||||
regex: /api/.*|/git/.*|/tensorflow/.*|/onepanelio/.*|/tracking/.*|/auto_annotation/.*|/analytics/.*|/static/.*|/admin/.*|/documentation/.*|/dextr/.*|/reid/.*
|
||||
- queryParams:
|
||||
id:
|
||||
regex: \d+.*
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8080
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 80
|
||||
volumeClaimTemplates:
|
||||
- metadata:
|
||||
name: db
|
||||
spec:
|
||||
accessModes: ["ReadWriteOnce"]
|
||||
resources:
|
||||
requests:
|
||||
storage: 20Gi
|
||||
metadata:
|
||||
name: CVAT
|
||||
kind: Workspace
|
||||
version: 20210129134326
|
||||
action: update
|
||||
description: "Powerful and efficient Computer Vision Annotation Tool (CVAT)"
|
||||
spec:
|
||||
containers:
|
||||
- name: cvat-db
|
||||
image: postgres:10-alpine
|
||||
env:
|
||||
- name: POSTGRES_USER
|
||||
value: root
|
||||
- name: POSTGRES_DB
|
||||
value: cvat
|
||||
- name: POSTGRES_HOST_AUTH_METHOD
|
||||
value: trust
|
||||
- name: PGDATA
|
||||
value: /var/lib/psql/data
|
||||
ports:
|
||||
- containerPort: 5432
|
||||
name: tcp
|
||||
volumeMounts:
|
||||
- name: db
|
||||
mountPath: /var/lib/psql
|
||||
- name: cvat-redis
|
||||
image: redis:4.0-alpine
|
||||
ports:
|
||||
- containerPort: 6379
|
||||
name: tcp
|
||||
- name: cvat
|
||||
image: onepanel/cvat:v0.18.0_cvat.1.0.0
|
||||
env:
|
||||
- name: DJANGO_MODWSGI_EXTRA_ARGS
|
||||
value: ""
|
||||
- name: ALLOWED_HOSTS
|
||||
value: '*'
|
||||
- name: CVAT_REDIS_HOST
|
||||
value: localhost
|
||||
- name: CVAT_POSTGRES_HOST
|
||||
value: localhost
|
||||
- name: CVAT_SHARE_URL
|
||||
value: /cvat/data
|
||||
- name: CVAT_SHARE_DIR
|
||||
value: /share
|
||||
- name: CVAT_DATA_DIR
|
||||
value: /cvat/data
|
||||
- name: CVAT_MEDIA_DATA_DIR
|
||||
value: /cvat/data/data
|
||||
- name: CVAT_KEYS_DIR
|
||||
value: /cvat/data/keys
|
||||
- name: CVAT_MODELS_DIR
|
||||
value: /cvat/data/models
|
||||
- name: CVAT_LOGS_DIR
|
||||
value: /cvat/logs
|
||||
- name: CVAT_ANNOTATIONS_OBJECT_STORAGE_PREFIX
|
||||
value: 'artifacts/$(ONEPANEL_RESOURCE_NAMESPACE)/annotations/'
|
||||
- name: CVAT_ONEPANEL_WORKFLOWS_LABEL
|
||||
value: 'key=used-by,value=cvat'
|
||||
- name: NVIDIA_VISIBLE_DEVICES
|
||||
value: all
|
||||
- name: NVIDIA_DRIVER_CAPABILITIES
|
||||
value: compute,utility
|
||||
- name: NVIDIA_REQUIRE_CUDA
|
||||
value: "cuda>=10.0 brand=tesla,driver>=384,driver<385 brand=tesla,driver>=410,driver<411"
|
||||
- name: ONEPANEL_MAIN_CONTAINER
|
||||
value: 'true'
|
||||
ports:
|
||||
- containerPort: 8080
|
||||
name: http
|
||||
volumeMounts:
|
||||
- name: cvat-data
|
||||
mountPath: /cvat
|
||||
- name: share
|
||||
mountPath: /share
|
||||
- name: sys-namespace-config
|
||||
mountPath: /etc/onepanel
|
||||
readOnly: true
|
||||
- name: cvat-ui
|
||||
image: onepanel/cvat-ui:v0.18.0_cvat.1.0.0
|
||||
ports:
|
||||
- containerPort: 80
|
||||
name: http
|
||||
- name: sys-filesyncer
|
||||
image: onepanel/filesyncer:v0.18.0
|
||||
imagePullPolicy: Always
|
||||
args:
|
||||
- server
|
||||
- -server-prefix=/sys/filesyncer
|
||||
volumeMounts:
|
||||
- name: share
|
||||
mountPath: /share
|
||||
- name: sys-namespace-config
|
||||
mountPath: /etc/onepanel
|
||||
readOnly: true
|
||||
ports:
|
||||
- name: cvat-ui
|
||||
port: 80
|
||||
protocol: TCP
|
||||
targetPort: 80
|
||||
- name: cvat
|
||||
port: 8080
|
||||
protocol: TCP
|
||||
targetPort: 8080
|
||||
- name: fs
|
||||
port: 8888
|
||||
protocol: TCP
|
||||
targetPort: 8888
|
||||
routes:
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /sys/filesyncer
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8888
|
||||
- match:
|
||||
- uri:
|
||||
regex: /api/.*|/git/.*|/tensorflow/.*|/onepanelio/.*|/tracking/.*|/auto_annotation/.*|/analytics/.*|/static/.*|/admin/.*|/documentation/.*|/dextr/.*|/reid/.*
|
||||
- queryParams:
|
||||
id:
|
||||
regex: \d+.*
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8080
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 80
|
||||
volumeClaimTemplates:
|
||||
- metadata:
|
||||
name: db
|
||||
spec:
|
||||
accessModes: ["ReadWriteOnce"]
|
||||
resources:
|
||||
requests:
|
||||
storage: 20Gi
|
||||
|
||||
@@ -1,134 +1,141 @@
|
||||
containers:
|
||||
- name: cvat-db
|
||||
image: postgres:10-alpine
|
||||
env:
|
||||
- name: POSTGRES_USER
|
||||
value: root
|
||||
- name: POSTGRES_DB
|
||||
value: cvat
|
||||
- name: POSTGRES_HOST_AUTH_METHOD
|
||||
value: trust
|
||||
- name: PGDATA
|
||||
value: /var/lib/psql/data
|
||||
ports:
|
||||
- containerPort: 5432
|
||||
name: tcp
|
||||
volumeMounts:
|
||||
- name: db
|
||||
mountPath: /var/lib/psql
|
||||
- name: cvat-redis
|
||||
image: redis:4.0-alpine
|
||||
ports:
|
||||
- containerPort: 6379
|
||||
name: tcp
|
||||
- name: cvat
|
||||
image: onepanel/cvat:v0.19.0_cvat.1.0.0
|
||||
env:
|
||||
- name: DJANGO_MODWSGI_EXTRA_ARGS
|
||||
value: ""
|
||||
- name: ALLOWED_HOSTS
|
||||
value: '*'
|
||||
- name: CVAT_REDIS_HOST
|
||||
value: localhost
|
||||
- name: CVAT_POSTGRES_HOST
|
||||
value: localhost
|
||||
- name: CVAT_SHARE_URL
|
||||
value: /cvat/data
|
||||
- name: CVAT_SHARE_DIR
|
||||
value: /share
|
||||
- name: CVAT_DATA_DIR
|
||||
value: /cvat/data
|
||||
- name: CVAT_MEDIA_DATA_DIR
|
||||
value: /cvat/data/data
|
||||
- name: CVAT_KEYS_DIR
|
||||
value: /cvat/data/keys
|
||||
- name: CVAT_MODELS_DIR
|
||||
value: /cvat/data/models
|
||||
- name: CVAT_LOGS_DIR
|
||||
value: /cvat/logs
|
||||
- name: CVAT_ANNOTATIONS_OBJECT_STORAGE_PREFIX
|
||||
value: 'artifacts/$(ONEPANEL_RESOURCE_NAMESPACE)/annotations/'
|
||||
- name: CVAT_ONEPANEL_WORKFLOWS_LABEL
|
||||
value: 'key=used-by,value=cvat'
|
||||
- name: NVIDIA_VISIBLE_DEVICES
|
||||
value: all
|
||||
- name: NVIDIA_DRIVER_CAPABILITIES
|
||||
value: compute,utility
|
||||
- name: NVIDIA_REQUIRE_CUDA
|
||||
value: "cuda>=10.0 brand=tesla,driver>=384,driver<385 brand=tesla,driver>=410,driver<411"
|
||||
- name: ONEPANEL_MAIN_CONTAINER
|
||||
value: 'true'
|
||||
ports:
|
||||
- containerPort: 8080
|
||||
name: http
|
||||
volumeMounts:
|
||||
- name: cvat-data
|
||||
mountPath: /cvat
|
||||
- name: share
|
||||
mountPath: /share
|
||||
- name: sys-namespace-config
|
||||
mountPath: /etc/onepanel
|
||||
readOnly: true
|
||||
- name: cvat-ui
|
||||
image: onepanel/cvat-ui:v0.19.0_cvat.1.0.0
|
||||
ports:
|
||||
- containerPort: 80
|
||||
name: http
|
||||
- name: sys-filesyncer
|
||||
image: onepanel/filesyncer:v0.19.0
|
||||
imagePullPolicy: Always
|
||||
args:
|
||||
- server
|
||||
- -server-prefix=/sys/filesyncer
|
||||
volumeMounts:
|
||||
- name: share
|
||||
mountPath: /share
|
||||
- name: sys-namespace-config
|
||||
mountPath: /etc/onepanel
|
||||
readOnly: true
|
||||
ports:
|
||||
- name: cvat-ui
|
||||
port: 80
|
||||
protocol: TCP
|
||||
targetPort: 80
|
||||
- name: cvat
|
||||
port: 8080
|
||||
protocol: TCP
|
||||
targetPort: 8080
|
||||
- name: fs
|
||||
port: 8888
|
||||
protocol: TCP
|
||||
targetPort: 8888
|
||||
routes:
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /sys/filesyncer
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8888
|
||||
- match:
|
||||
- uri:
|
||||
regex: /api/.*|/git/.*|/tensorflow/.*|/onepanelio/.*|/tracking/.*|/auto_annotation/.*|/analytics/.*|/static/.*|/admin/.*|/documentation/.*|/dextr/.*|/reid/.*
|
||||
- queryParams:
|
||||
id:
|
||||
regex: \d+.*
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8080
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 80
|
||||
volumeClaimTemplates:
|
||||
- metadata:
|
||||
name: db
|
||||
spec:
|
||||
accessModes: ["ReadWriteOnce"]
|
||||
resources:
|
||||
requests:
|
||||
storage: 20Gi
|
||||
metadata:
|
||||
name: CVAT
|
||||
kind: Workspace
|
||||
version: 20210224180017
|
||||
action: update
|
||||
description: "Powerful and efficient Computer Vision Annotation Tool (CVAT)"
|
||||
spec:
|
||||
containers:
|
||||
- name: cvat-db
|
||||
image: postgres:10-alpine
|
||||
env:
|
||||
- name: POSTGRES_USER
|
||||
value: root
|
||||
- name: POSTGRES_DB
|
||||
value: cvat
|
||||
- name: POSTGRES_HOST_AUTH_METHOD
|
||||
value: trust
|
||||
- name: PGDATA
|
||||
value: /var/lib/psql/data
|
||||
ports:
|
||||
- containerPort: 5432
|
||||
name: tcp
|
||||
volumeMounts:
|
||||
- name: db
|
||||
mountPath: /var/lib/psql
|
||||
- name: cvat-redis
|
||||
image: redis:4.0-alpine
|
||||
ports:
|
||||
- containerPort: 6379
|
||||
name: tcp
|
||||
- name: cvat
|
||||
image: onepanel/cvat:v0.19.0_cvat.1.0.0
|
||||
env:
|
||||
- name: DJANGO_MODWSGI_EXTRA_ARGS
|
||||
value: ""
|
||||
- name: ALLOWED_HOSTS
|
||||
value: '*'
|
||||
- name: CVAT_REDIS_HOST
|
||||
value: localhost
|
||||
- name: CVAT_POSTGRES_HOST
|
||||
value: localhost
|
||||
- name: CVAT_SHARE_URL
|
||||
value: /cvat/data
|
||||
- name: CVAT_SHARE_DIR
|
||||
value: /share
|
||||
- name: CVAT_DATA_DIR
|
||||
value: /cvat/data
|
||||
- name: CVAT_MEDIA_DATA_DIR
|
||||
value: /cvat/data/data
|
||||
- name: CVAT_KEYS_DIR
|
||||
value: /cvat/data/keys
|
||||
- name: CVAT_MODELS_DIR
|
||||
value: /cvat/data/models
|
||||
- name: CVAT_LOGS_DIR
|
||||
value: /cvat/logs
|
||||
- name: CVAT_ANNOTATIONS_OBJECT_STORAGE_PREFIX
|
||||
value: 'artifacts/$(ONEPANEL_RESOURCE_NAMESPACE)/annotations/'
|
||||
- name: CVAT_ONEPANEL_WORKFLOWS_LABEL
|
||||
value: 'key=used-by,value=cvat'
|
||||
- name: NVIDIA_VISIBLE_DEVICES
|
||||
value: all
|
||||
- name: NVIDIA_DRIVER_CAPABILITIES
|
||||
value: compute,utility
|
||||
- name: NVIDIA_REQUIRE_CUDA
|
||||
value: "cuda>=10.0 brand=tesla,driver>=384,driver<385 brand=tesla,driver>=410,driver<411"
|
||||
- name: ONEPANEL_MAIN_CONTAINER
|
||||
value: 'true'
|
||||
ports:
|
||||
- containerPort: 8080
|
||||
name: http
|
||||
volumeMounts:
|
||||
- name: cvat-data
|
||||
mountPath: /cvat
|
||||
- name: share
|
||||
mountPath: /share
|
||||
- name: sys-namespace-config
|
||||
mountPath: /etc/onepanel
|
||||
readOnly: true
|
||||
- name: cvat-ui
|
||||
image: onepanel/cvat-ui:v0.19.0_cvat.1.0.0
|
||||
ports:
|
||||
- containerPort: 80
|
||||
name: http
|
||||
- name: sys-filesyncer
|
||||
image: onepanel/filesyncer:v0.19.0
|
||||
imagePullPolicy: Always
|
||||
args:
|
||||
- server
|
||||
- -server-prefix=/sys/filesyncer
|
||||
volumeMounts:
|
||||
- name: share
|
||||
mountPath: /share
|
||||
- name: sys-namespace-config
|
||||
mountPath: /etc/onepanel
|
||||
readOnly: true
|
||||
ports:
|
||||
- name: cvat-ui
|
||||
port: 80
|
||||
protocol: TCP
|
||||
targetPort: 80
|
||||
- name: cvat
|
||||
port: 8080
|
||||
protocol: TCP
|
||||
targetPort: 8080
|
||||
- name: fs
|
||||
port: 8888
|
||||
protocol: TCP
|
||||
targetPort: 8888
|
||||
routes:
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /sys/filesyncer
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8888
|
||||
- match:
|
||||
- uri:
|
||||
regex: /api/.*|/git/.*|/tensorflow/.*|/onepanelio/.*|/tracking/.*|/auto_annotation/.*|/analytics/.*|/static/.*|/admin/.*|/documentation/.*|/dextr/.*|/reid/.*
|
||||
- queryParams:
|
||||
id:
|
||||
regex: \d+.*
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8080
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 80
|
||||
volumeClaimTemplates:
|
||||
- metadata:
|
||||
name: db
|
||||
spec:
|
||||
accessModes: ["ReadWriteOnce"]
|
||||
resources:
|
||||
requests:
|
||||
storage: 20Gi
|
||||
|
||||
@@ -1,134 +1,141 @@
|
||||
containers:
|
||||
- name: cvat-db
|
||||
image: postgres:10-alpine
|
||||
env:
|
||||
- name: POSTGRES_USER
|
||||
value: root
|
||||
- name: POSTGRES_DB
|
||||
value: cvat
|
||||
- name: POSTGRES_HOST_AUTH_METHOD
|
||||
value: trust
|
||||
- name: PGDATA
|
||||
value: /var/lib/psql/data
|
||||
ports:
|
||||
- containerPort: 5432
|
||||
name: tcp
|
||||
volumeMounts:
|
||||
- name: db
|
||||
mountPath: /var/lib/psql
|
||||
- name: cvat-redis
|
||||
image: redis:4.0-alpine
|
||||
ports:
|
||||
- containerPort: 6379
|
||||
name: tcp
|
||||
- name: cvat
|
||||
image: onepanel/cvat:v0.19.0_cvat.1.0.0
|
||||
env:
|
||||
- name: DJANGO_MODWSGI_EXTRA_ARGS
|
||||
value: ""
|
||||
- name: ALLOWED_HOSTS
|
||||
value: '*'
|
||||
- name: CVAT_REDIS_HOST
|
||||
value: localhost
|
||||
- name: CVAT_POSTGRES_HOST
|
||||
value: localhost
|
||||
- name: CVAT_SHARE_URL
|
||||
value: /cvat/data
|
||||
- name: CVAT_SHARE_DIR
|
||||
value: /share
|
||||
- name: CVAT_DATA_DIR
|
||||
value: /cvat/data
|
||||
- name: CVAT_MEDIA_DATA_DIR
|
||||
value: /cvat/data/data
|
||||
- name: CVAT_KEYS_DIR
|
||||
value: /cvat/data/keys
|
||||
- name: CVAT_MODELS_DIR
|
||||
value: /cvat/data/models
|
||||
- name: CVAT_LOGS_DIR
|
||||
value: /cvat/logs
|
||||
- name: CVAT_ANNOTATIONS_OBJECT_STORAGE_PREFIX
|
||||
value: 'artifacts/$(ONEPANEL_RESOURCE_NAMESPACE)/annotations/'
|
||||
- name: CVAT_ONEPANEL_WORKFLOWS_LABEL
|
||||
value: 'key=used-by,value=cvat'
|
||||
- name: NVIDIA_VISIBLE_DEVICES
|
||||
value: all
|
||||
- name: NVIDIA_DRIVER_CAPABILITIES
|
||||
value: compute,utility
|
||||
- name: NVIDIA_REQUIRE_CUDA
|
||||
value: "cuda>=10.0 brand=tesla,driver>=384,driver<385 brand=tesla,driver>=410,driver<411"
|
||||
- name: ONEPANEL_MAIN_CONTAINER
|
||||
value: 'true'
|
||||
ports:
|
||||
- containerPort: 8080
|
||||
name: http
|
||||
volumeMounts:
|
||||
- name: cvat-data
|
||||
mountPath: /cvat
|
||||
- name: share
|
||||
mountPath: /share
|
||||
- name: sys-namespace-config
|
||||
mountPath: /etc/onepanel
|
||||
readOnly: true
|
||||
- name: cvat-ui
|
||||
image: onepanel/cvat-ui:v0.19.0_cvat.1.0.0
|
||||
ports:
|
||||
- containerPort: 80
|
||||
name: http
|
||||
- name: sys-filesyncer
|
||||
image: onepanel/filesyncer:v0.20.0
|
||||
imagePullPolicy: Always
|
||||
args:
|
||||
- server
|
||||
- -server-prefix=/sys/filesyncer
|
||||
volumeMounts:
|
||||
- name: share
|
||||
mountPath: /share
|
||||
- name: sys-namespace-config
|
||||
mountPath: /etc/onepanel
|
||||
readOnly: true
|
||||
ports:
|
||||
- name: cvat-ui
|
||||
port: 80
|
||||
protocol: TCP
|
||||
targetPort: 80
|
||||
- name: cvat
|
||||
port: 8080
|
||||
protocol: TCP
|
||||
targetPort: 8080
|
||||
- name: fs
|
||||
port: 8888
|
||||
protocol: TCP
|
||||
targetPort: 8888
|
||||
routes:
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /sys/filesyncer
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8888
|
||||
- match:
|
||||
- uri:
|
||||
regex: /api/.*|/git/.*|/tensorflow/.*|/onepanelio/.*|/tracking/.*|/auto_annotation/.*|/analytics/.*|/static/.*|/admin/.*|/documentation/.*|/dextr/.*|/reid/.*
|
||||
- queryParams:
|
||||
id:
|
||||
regex: \d+.*
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8080
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 80
|
||||
volumeClaimTemplates:
|
||||
- metadata:
|
||||
name: db
|
||||
spec:
|
||||
accessModes: ["ReadWriteOnce"]
|
||||
resources:
|
||||
requests:
|
||||
storage: 20Gi
|
||||
metadata:
|
||||
name: CVAT
|
||||
kind: Workspace
|
||||
version: 20210323175655
|
||||
action: update
|
||||
description: "Powerful and efficient Computer Vision Annotation Tool (CVAT)"
|
||||
spec:
|
||||
containers:
|
||||
- name: cvat-db
|
||||
image: postgres:10-alpine
|
||||
env:
|
||||
- name: POSTGRES_USER
|
||||
value: root
|
||||
- name: POSTGRES_DB
|
||||
value: cvat
|
||||
- name: POSTGRES_HOST_AUTH_METHOD
|
||||
value: trust
|
||||
- name: PGDATA
|
||||
value: /var/lib/psql/data
|
||||
ports:
|
||||
- containerPort: 5432
|
||||
name: tcp
|
||||
volumeMounts:
|
||||
- name: db
|
||||
mountPath: /var/lib/psql
|
||||
- name: cvat-redis
|
||||
image: redis:4.0-alpine
|
||||
ports:
|
||||
- containerPort: 6379
|
||||
name: tcp
|
||||
- name: cvat
|
||||
image: onepanel/cvat:v0.19.0_cvat.1.0.0
|
||||
env:
|
||||
- name: DJANGO_MODWSGI_EXTRA_ARGS
|
||||
value: ""
|
||||
- name: ALLOWED_HOSTS
|
||||
value: '*'
|
||||
- name: CVAT_REDIS_HOST
|
||||
value: localhost
|
||||
- name: CVAT_POSTGRES_HOST
|
||||
value: localhost
|
||||
- name: CVAT_SHARE_URL
|
||||
value: /cvat/data
|
||||
- name: CVAT_SHARE_DIR
|
||||
value: /share
|
||||
- name: CVAT_DATA_DIR
|
||||
value: /cvat/data
|
||||
- name: CVAT_MEDIA_DATA_DIR
|
||||
value: /cvat/data/data
|
||||
- name: CVAT_KEYS_DIR
|
||||
value: /cvat/data/keys
|
||||
- name: CVAT_MODELS_DIR
|
||||
value: /cvat/data/models
|
||||
- name: CVAT_LOGS_DIR
|
||||
value: /cvat/logs
|
||||
- name: CVAT_ANNOTATIONS_OBJECT_STORAGE_PREFIX
|
||||
value: 'artifacts/$(ONEPANEL_RESOURCE_NAMESPACE)/annotations/'
|
||||
- name: CVAT_ONEPANEL_WORKFLOWS_LABEL
|
||||
value: 'key=used-by,value=cvat'
|
||||
- name: NVIDIA_VISIBLE_DEVICES
|
||||
value: all
|
||||
- name: NVIDIA_DRIVER_CAPABILITIES
|
||||
value: compute,utility
|
||||
- name: NVIDIA_REQUIRE_CUDA
|
||||
value: "cuda>=10.0 brand=tesla,driver>=384,driver<385 brand=tesla,driver>=410,driver<411"
|
||||
- name: ONEPANEL_MAIN_CONTAINER
|
||||
value: 'true'
|
||||
ports:
|
||||
- containerPort: 8080
|
||||
name: http
|
||||
volumeMounts:
|
||||
- name: cvat-data
|
||||
mountPath: /cvat
|
||||
- name: share
|
||||
mountPath: /share
|
||||
- name: sys-namespace-config
|
||||
mountPath: /etc/onepanel
|
||||
readOnly: true
|
||||
- name: cvat-ui
|
||||
image: onepanel/cvat-ui:v0.19.0_cvat.1.0.0
|
||||
ports:
|
||||
- containerPort: 80
|
||||
name: http
|
||||
- name: sys-filesyncer
|
||||
image: onepanel/filesyncer:v0.20.0
|
||||
imagePullPolicy: Always
|
||||
args:
|
||||
- server
|
||||
- -server-prefix=/sys/filesyncer
|
||||
volumeMounts:
|
||||
- name: share
|
||||
mountPath: /share
|
||||
- name: sys-namespace-config
|
||||
mountPath: /etc/onepanel
|
||||
readOnly: true
|
||||
ports:
|
||||
- name: cvat-ui
|
||||
port: 80
|
||||
protocol: TCP
|
||||
targetPort: 80
|
||||
- name: cvat
|
||||
port: 8080
|
||||
protocol: TCP
|
||||
targetPort: 8080
|
||||
- name: fs
|
||||
port: 8888
|
||||
protocol: TCP
|
||||
targetPort: 8888
|
||||
routes:
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /sys/filesyncer
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8888
|
||||
- match:
|
||||
- uri:
|
||||
regex: /api/.*|/git/.*|/tensorflow/.*|/onepanelio/.*|/tracking/.*|/auto_annotation/.*|/analytics/.*|/static/.*|/admin/.*|/documentation/.*|/dextr/.*|/reid/.*
|
||||
- queryParams:
|
||||
id:
|
||||
regex: \d+.*
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8080
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 80
|
||||
volumeClaimTemplates:
|
||||
- metadata:
|
||||
name: db
|
||||
spec:
|
||||
accessModes: ["ReadWriteOnce"]
|
||||
resources:
|
||||
requests:
|
||||
storage: 20Gi
|
||||
|
||||
141
db/yaml/workspaces/cvat/20210719190719.yaml
Normal file
141
db/yaml/workspaces/cvat/20210719190719.yaml
Normal file
@@ -0,0 +1,141 @@
|
||||
metadata:
|
||||
name: CVAT
|
||||
kind: Workspace
|
||||
version: 20210719190719
|
||||
action: update
|
||||
description: "Powerful and efficient Computer Vision Annotation Tool (CVAT)"
|
||||
spec:
|
||||
containers:
|
||||
- name: cvat-db
|
||||
image: postgres:10-alpine
|
||||
env:
|
||||
- name: POSTGRES_USER
|
||||
value: root
|
||||
- name: POSTGRES_DB
|
||||
value: cvat
|
||||
- name: POSTGRES_HOST_AUTH_METHOD
|
||||
value: trust
|
||||
- name: PGDATA
|
||||
value: /var/lib/psql/data
|
||||
ports:
|
||||
- containerPort: 5432
|
||||
name: tcp
|
||||
volumeMounts:
|
||||
- name: db
|
||||
mountPath: /var/lib/psql
|
||||
- name: cvat-redis
|
||||
image: redis:4.0-alpine
|
||||
ports:
|
||||
- containerPort: 6379
|
||||
name: tcp
|
||||
- name: cvat
|
||||
image: onepanel/cvat:v0.19.0_cvat.1.0.0
|
||||
env:
|
||||
- name: DJANGO_MODWSGI_EXTRA_ARGS
|
||||
value: ""
|
||||
- name: ALLOWED_HOSTS
|
||||
value: '*'
|
||||
- name: CVAT_REDIS_HOST
|
||||
value: localhost
|
||||
- name: CVAT_POSTGRES_HOST
|
||||
value: localhost
|
||||
- name: CVAT_SHARE_URL
|
||||
value: /cvat/data
|
||||
- name: CVAT_SHARE_DIR
|
||||
value: /share
|
||||
- name: CVAT_DATA_DIR
|
||||
value: /cvat/data
|
||||
- name: CVAT_MEDIA_DATA_DIR
|
||||
value: /cvat/data/data
|
||||
- name: CVAT_KEYS_DIR
|
||||
value: /cvat/data/keys
|
||||
- name: CVAT_MODELS_DIR
|
||||
value: /cvat/data/models
|
||||
- name: CVAT_LOGS_DIR
|
||||
value: /cvat/logs
|
||||
- name: CVAT_ANNOTATIONS_OBJECT_STORAGE_PREFIX
|
||||
value: 'artifacts/$(ONEPANEL_RESOURCE_NAMESPACE)/annotations/'
|
||||
- name: CVAT_ONEPANEL_WORKFLOWS_LABEL
|
||||
value: 'key=used-by,value=cvat'
|
||||
- name: NVIDIA_VISIBLE_DEVICES
|
||||
value: all
|
||||
- name: NVIDIA_DRIVER_CAPABILITIES
|
||||
value: compute,utility
|
||||
- name: NVIDIA_REQUIRE_CUDA
|
||||
value: "cuda>=10.0 brand=tesla,driver>=384,driver<385 brand=tesla,driver>=410,driver<411"
|
||||
- name: ONEPANEL_MAIN_CONTAINER
|
||||
value: 'true'
|
||||
ports:
|
||||
- containerPort: 8080
|
||||
name: http
|
||||
volumeMounts:
|
||||
- name: cvat-data
|
||||
mountPath: /cvat
|
||||
- name: share
|
||||
mountPath: /share
|
||||
- name: sys-namespace-config
|
||||
mountPath: /etc/onepanel
|
||||
readOnly: true
|
||||
- name: cvat-ui
|
||||
image: onepanel/cvat-ui:v0.19.0_cvat.1.0.0
|
||||
ports:
|
||||
- containerPort: 80
|
||||
name: http
|
||||
- name: sys-filesyncer
|
||||
image: onepanel/filesyncer:v1.0.0
|
||||
imagePullPolicy: Always
|
||||
args:
|
||||
- server
|
||||
- -server-prefix=/sys/filesyncer
|
||||
volumeMounts:
|
||||
- name: share
|
||||
mountPath: /share
|
||||
- name: sys-namespace-config
|
||||
mountPath: /etc/onepanel
|
||||
readOnly: true
|
||||
ports:
|
||||
- name: cvat-ui
|
||||
port: 80
|
||||
protocol: TCP
|
||||
targetPort: 80
|
||||
- name: cvat
|
||||
port: 8080
|
||||
protocol: TCP
|
||||
targetPort: 8080
|
||||
- name: fs
|
||||
port: 8888
|
||||
protocol: TCP
|
||||
targetPort: 8888
|
||||
routes:
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /sys/filesyncer
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8888
|
||||
- match:
|
||||
- uri:
|
||||
regex: /api/.*|/git/.*|/tensorflow/.*|/onepanelio/.*|/tracking/.*|/auto_annotation/.*|/analytics/.*|/static/.*|/admin/.*|/documentation/.*|/dextr/.*|/reid/.*
|
||||
- queryParams:
|
||||
id:
|
||||
regex: \d+.*
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8080
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 80
|
||||
volumeClaimTemplates:
|
||||
- metadata:
|
||||
name: db
|
||||
spec:
|
||||
accessModes: ["ReadWriteOnce"]
|
||||
resources:
|
||||
requests:
|
||||
storage: 20Gi
|
||||
143
db/yaml/workspaces/cvat_1_6_0/20211028205201.yaml
Normal file
143
db/yaml/workspaces/cvat_1_6_0/20211028205201.yaml
Normal file
@@ -0,0 +1,143 @@
|
||||
metadata:
|
||||
name: CVAT_1.6.0
|
||||
kind: Workspace
|
||||
version: 20211028205201
|
||||
action: create
|
||||
description: "Powerful and efficient Computer Vision Annotation Tool (CVAT 1.6.0)"
|
||||
spec:
|
||||
containers:
|
||||
- name: cvat-db
|
||||
image: postgres:10-alpine
|
||||
env:
|
||||
- name: POSTGRES_USER
|
||||
value: root
|
||||
- name: POSTGRES_DB
|
||||
value: cvat
|
||||
- name: POSTGRES_HOST_AUTH_METHOD
|
||||
value: trust
|
||||
- name: PGDATA
|
||||
value: /var/lib/psql/data
|
||||
ports:
|
||||
- containerPort: 5432
|
||||
name: tcp
|
||||
volumeMounts:
|
||||
- name: db
|
||||
mountPath: /var/lib/psql
|
||||
- name: cvat-redis
|
||||
image: redis:4.0-alpine
|
||||
ports:
|
||||
- containerPort: 6379
|
||||
name: tcp
|
||||
- name: cvat
|
||||
image: onepanel/cvat:v1.0.2_cvat.1.6.0
|
||||
env:
|
||||
- name: DJANGO_MODWSGI_EXTRA_ARGS
|
||||
value: ""
|
||||
- name: ALLOWED_HOSTS
|
||||
value: '*'
|
||||
- name: CVAT_REDIS_HOST
|
||||
value: localhost
|
||||
- name: CVAT_POSTGRES_HOST
|
||||
value: localhost
|
||||
- name: CVAT_SHARE_URL
|
||||
value: /cvat/data
|
||||
- name: CVAT_SHARE_DIR
|
||||
value: /share
|
||||
- name: CVAT_DATA_DIR
|
||||
value: /cvat/data
|
||||
- name: CVAT_MEDIA_DATA_DIR
|
||||
value: /cvat/data/data
|
||||
- name: CVAT_KEYS_DIR
|
||||
value: /cvat/data/keys
|
||||
- name: CVAT_MODELS_DIR
|
||||
value: /cvat/data/models
|
||||
- name: CVAT_LOGS_DIR
|
||||
value: /cvat/logs
|
||||
- name: CVAT_ANNOTATIONS_OBJECT_STORAGE_PREFIX
|
||||
value: 'artifacts/$(ONEPANEL_RESOURCE_NAMESPACE)/annotations/'
|
||||
- name: CVAT_ONEPANEL_WORKFLOWS_LABEL
|
||||
value: 'key=used-by,value=cvat'
|
||||
- name: NVIDIA_VISIBLE_DEVICES
|
||||
value: all
|
||||
- name: NVIDIA_DRIVER_CAPABILITIES
|
||||
value: compute,utility
|
||||
- name: NVIDIA_REQUIRE_CUDA
|
||||
value: "cuda>=10.0 brand=tesla,driver>=384,driver<385 brand=tesla,driver>=410,driver<411"
|
||||
- name: ONEPANEL_MAIN_CONTAINER
|
||||
value: 'true'
|
||||
- name: CVAT_SERVERLESS
|
||||
value: True
|
||||
ports:
|
||||
- containerPort: 8080
|
||||
name: http
|
||||
volumeMounts:
|
||||
- name: cvat-data
|
||||
mountPath: /cvat
|
||||
- name: share
|
||||
mountPath: /share
|
||||
- name: sys-namespace-config
|
||||
mountPath: /etc/onepanel
|
||||
readOnly: true
|
||||
- name: cvat-ui
|
||||
image: onepanel/cvat-ui:v1.0.2_cvat.1.6.0
|
||||
ports:
|
||||
- containerPort: 80
|
||||
name: http
|
||||
- name: sys-filesyncer
|
||||
image: onepanel/filesyncer:v1.0.0
|
||||
imagePullPolicy: Always
|
||||
args:
|
||||
- server
|
||||
- -server-prefix=/sys/filesyncer
|
||||
volumeMounts:
|
||||
- name: share
|
||||
mountPath: /share
|
||||
- name: sys-namespace-config
|
||||
mountPath: /etc/onepanel
|
||||
readOnly: true
|
||||
ports:
|
||||
- name: cvat-ui
|
||||
port: 80
|
||||
protocol: TCP
|
||||
targetPort: 80
|
||||
- name: cvat
|
||||
port: 8080
|
||||
protocol: TCP
|
||||
targetPort: 8080
|
||||
- name: fs
|
||||
port: 8888
|
||||
protocol: TCP
|
||||
targetPort: 8888
|
||||
routes:
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /sys/filesyncer
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8888
|
||||
- match:
|
||||
- uri:
|
||||
regex: \/?api.*|\/?git.*|\/?tensorflow.*|\/?onepanelio.*|\/?tracking.*|\/?auto_annotation.*|\/?analytics.*|\/?static.*|\/?admin.*|\/?documentation.*|\/?dextr.*|\/?reid.*|\/?django-rq.*
|
||||
- queryParams:
|
||||
id:
|
||||
regex: \d+.*
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8080
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 80
|
||||
volumeClaimTemplates:
|
||||
- metadata:
|
||||
name: db
|
||||
spec:
|
||||
accessModes: [ "ReadWriteOnce" ]
|
||||
resources:
|
||||
requests:
|
||||
storage: 20Gi
|
||||
64
db/yaml/workspaces/jupyterlab/20200525160514.yaml
Normal file
64
db/yaml/workspaces/jupyterlab/20200525160514.yaml
Normal file
@@ -0,0 +1,64 @@
|
||||
metadata:
|
||||
name: JupyterLab
|
||||
kind: Workspace
|
||||
version: 20200525160514
|
||||
action: create
|
||||
description: "Interactive development environment for notebooks"
|
||||
spec:
|
||||
# Docker containers that are part of the Workspace
|
||||
containers:
|
||||
- name: jupyterlab-tensorflow
|
||||
image: jupyter/tensorflow-notebook
|
||||
command: [start.sh, jupyter]
|
||||
env:
|
||||
- name: tornado
|
||||
value: "{ 'headers': { 'Content-Security-Policy': \"frame-ancestors * 'self'\" } }"
|
||||
args:
|
||||
- lab
|
||||
- --LabApp.token=''
|
||||
- --LabApp.allow_remote_access=True
|
||||
- --LabApp.allow_origin="*"
|
||||
- --LabApp.disable_check_xsrf=True
|
||||
- --LabApp.trust_xheaders=True
|
||||
- --LabApp.tornado_settings=$(tornado)
|
||||
- --notebook-dir='/data'
|
||||
ports:
|
||||
- containerPort: 8888
|
||||
name: jupyterlab
|
||||
# Volumes to be mounted in this container
|
||||
# Onepanel will automatically create these volumes and mount them to the container
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /data
|
||||
# Ports that need to be exposed
|
||||
ports:
|
||||
- name: jupyterlab
|
||||
port: 80
|
||||
protocol: TCP
|
||||
targetPort: 8888
|
||||
# Routes that will map to ports
|
||||
routes:
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 80
|
||||
# DAG Workflow to be executed once a Workspace action completes
|
||||
# postExecutionWorkflow:
|
||||
# entrypoint: main
|
||||
# templates:
|
||||
# - name: main
|
||||
# dag:
|
||||
# tasks:
|
||||
# - name: slack-notify
|
||||
# template: slack-notify
|
||||
# - name: slack-notify
|
||||
# container:
|
||||
# image: technosophos/slack-notify
|
||||
# args:
|
||||
# - SLACK_USERNAME=onepanel SLACK_TITLE="Your workspace is ready" SLACK_ICON=https://www.gravatar.com/avatar/5c4478592fe00878f62f0027be59c1bd SLACK_MESSAGE="Your workspace is now running" ./slack-notify
|
||||
# command:
|
||||
# - sh
|
||||
# - -c
|
||||
65
db/yaml/workspaces/jupyterlab/20200821162630.yaml
Normal file
65
db/yaml/workspaces/jupyterlab/20200821162630.yaml
Normal file
@@ -0,0 +1,65 @@
|
||||
metadata:
|
||||
name: JupyterLab
|
||||
kind: Workspace
|
||||
version: 20200821162630
|
||||
action: update
|
||||
description: "Interactive development environment for notebooks"
|
||||
spec:
|
||||
# Docker containers that are part of the Workspace
|
||||
containers:
|
||||
- name: jupyterlab-tensorflow
|
||||
image: onepanel/jupyterlab:1.0.1
|
||||
command: ["/bin/bash", "-c", "start.sh jupyter lab --LabApp.token='' --LabApp.allow_remote_access=True --LabApp.allow_origin=\"*\" --LabApp.disable_check_xsrf=True --LabApp.trust_xheaders=True --LabApp.base_url=/ --LabApp.tornado_settings='{\"headers\":{\"Content-Security-Policy\":\"frame-ancestors * \'self\'\"}}' --notebook-dir='/data' --allow-root"]
|
||||
env:
|
||||
- name: tornado
|
||||
value: "'{'headers':{'Content-Security-Policy':\"frame-ancestors\ *\ \'self'\"}}'"
|
||||
args:
|
||||
ports:
|
||||
- containerPort: 8888
|
||||
name: jupyterlab
|
||||
- containerPort: 6006
|
||||
name: tensorboard
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /data
|
||||
ports:
|
||||
- name: jupyterlab
|
||||
port: 80
|
||||
protocol: TCP
|
||||
targetPort: 8888
|
||||
- name: tensorboard
|
||||
port: 6006
|
||||
protocol: TCP
|
||||
targetPort: 6006
|
||||
routes:
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /tensorboard
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 6006
|
||||
- match:
|
||||
- uri:
|
||||
prefix: / #jupyter runs at the default route
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 80
|
||||
# DAG Workflow to be executed once a Workspace action completes (optional)
|
||||
#postExecutionWorkflow:
|
||||
# entrypoint: main
|
||||
# templates:
|
||||
# - name: main
|
||||
# dag:
|
||||
# tasks:
|
||||
# - name: slack-notify
|
||||
# template: slack-notify
|
||||
# - name: slack-notify
|
||||
# container:
|
||||
# image: technosophos/slack-notify
|
||||
# args:
|
||||
# - SLACK_USERNAME=onepanel SLACK_TITLE="Your workspace is ready" SLACK_ICON=https://www.gravatar.com/avatar/5c4478592fe00878f62f0027be59c1bd SLACK_MESSAGE="Your workspace is now running" ./slack-notify
|
||||
# command:
|
||||
# - sh
|
||||
# - -c
|
||||
@@ -1,58 +1,65 @@
|
||||
# Docker containers that are part of the Workspace
|
||||
containers:
|
||||
- name: jupyterlab-tensorflow
|
||||
image: onepanel/jupyterlab:1.0.1
|
||||
command: ["/bin/bash", "-c", "pip install onepanel-sdk && start.sh jupyter lab --LabApp.token='' --LabApp.allow_remote_access=True --LabApp.allow_origin=\"*\" --LabApp.disable_check_xsrf=True --LabApp.trust_xheaders=True --LabApp.base_url=/ --LabApp.tornado_settings='{\"headers\":{\"Content-Security-Policy\":\"frame-ancestors * \'self\'\"}}' --notebook-dir='/data' --allow-root"]
|
||||
env:
|
||||
- name: tornado
|
||||
value: "'{'headers':{'Content-Security-Policy':\"frame-ancestors\ *\ \'self'\"}}'"
|
||||
args:
|
||||
ports:
|
||||
- containerPort: 8888
|
||||
name: jupyterlab
|
||||
- containerPort: 6006
|
||||
name: tensorboard
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /data
|
||||
ports:
|
||||
- name: jupyterlab
|
||||
port: 80
|
||||
protocol: TCP
|
||||
targetPort: 8888
|
||||
- name: tensorboard
|
||||
port: 6006
|
||||
protocol: TCP
|
||||
targetPort: 6006
|
||||
routes:
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /tensorboard
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 6006
|
||||
- match:
|
||||
- uri:
|
||||
prefix: / #jupyter runs at the default route
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 80
|
||||
# DAG Workflow to be executed once a Workspace action completes (optional)
|
||||
#postExecutionWorkflow:
|
||||
# entrypoint: main
|
||||
# templates:
|
||||
# - name: main
|
||||
# dag:
|
||||
# tasks:
|
||||
# - name: slack-notify
|
||||
# template: slack-notify
|
||||
# - name: slack-notify
|
||||
# container:
|
||||
# image: technosophos/slack-notify
|
||||
# args:
|
||||
# - SLACK_USERNAME=onepanel SLACK_TITLE="Your workspace is ready" SLACK_ICON=https://www.gravatar.com/avatar/5c4478592fe00878f62f0027be59c1bd SLACK_MESSAGE="Your workspace is now running" ./slack-notify
|
||||
# command:
|
||||
# - sh
|
||||
# - -c
|
||||
metadata:
|
||||
name: JupyterLab
|
||||
kind: Workspace
|
||||
version: 20200929153931
|
||||
action: update
|
||||
description: "Interactive development environment for notebooks"
|
||||
spec:
|
||||
# Docker containers that are part of the Workspace
|
||||
containers:
|
||||
- name: jupyterlab-tensorflow
|
||||
image: onepanel/jupyterlab:1.0.1
|
||||
command: ["/bin/bash", "-c", "pip install onepanel-sdk && start.sh jupyter lab --LabApp.token='' --LabApp.allow_remote_access=True --LabApp.allow_origin=\"*\" --LabApp.disable_check_xsrf=True --LabApp.trust_xheaders=True --LabApp.base_url=/ --LabApp.tornado_settings='{\"headers\":{\"Content-Security-Policy\":\"frame-ancestors * \'self\'\"}}' --notebook-dir='/data' --allow-root"]
|
||||
env:
|
||||
- name: tornado
|
||||
value: "'{'headers':{'Content-Security-Policy':\"frame-ancestors\ *\ \'self'\"}}'"
|
||||
args:
|
||||
ports:
|
||||
- containerPort: 8888
|
||||
name: jupyterlab
|
||||
- containerPort: 6006
|
||||
name: tensorboard
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /data
|
||||
ports:
|
||||
- name: jupyterlab
|
||||
port: 80
|
||||
protocol: TCP
|
||||
targetPort: 8888
|
||||
- name: tensorboard
|
||||
port: 6006
|
||||
protocol: TCP
|
||||
targetPort: 6006
|
||||
routes:
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /tensorboard
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 6006
|
||||
- match:
|
||||
- uri:
|
||||
prefix: / #jupyter runs at the default route
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 80
|
||||
# DAG Workflow to be executed once a Workspace action completes (optional)
|
||||
#postExecutionWorkflow:
|
||||
# entrypoint: main
|
||||
# templates:
|
||||
# - name: main
|
||||
# dag:
|
||||
# tasks:
|
||||
# - name: slack-notify
|
||||
# template: slack-notify
|
||||
# - name: slack-notify
|
||||
# container:
|
||||
# image: technosophos/slack-notify
|
||||
# args:
|
||||
# - SLACK_USERNAME=onepanel SLACK_TITLE="Your workspace is ready" SLACK_ICON=https://www.gravatar.com/avatar/5c4478592fe00878f62f0027be59c1bd SLACK_MESSAGE="Your workspace is now running" ./slack-notify
|
||||
# command:
|
||||
# - sh
|
||||
# - -c
|
||||
@@ -1,77 +1,84 @@
|
||||
# Docker containers that are part of the Workspace
|
||||
containers:
|
||||
- name: jupyterlab
|
||||
image: onepanel/jupyterlab:1.0.1
|
||||
command: ["/bin/bash", "-c", "pip install onepanel-sdk && start.sh jupyter lab --LabApp.token='' --LabApp.allow_remote_access=True --LabApp.allow_origin=\"*\" --LabApp.disable_check_xsrf=True --LabApp.trust_xheaders=True --LabApp.base_url=/ --LabApp.tornado_settings='{\"headers\":{\"Content-Security-Policy\":\"frame-ancestors * \'self\'\"}}' --notebook-dir='/data' --allow-root"]
|
||||
env:
|
||||
- name: tornado
|
||||
value: "'{'headers':{'Content-Security-Policy':\"frame-ancestors\ *\ \'self'\"}}'"
|
||||
ports:
|
||||
- containerPort: 8888
|
||||
name: jupyterlab
|
||||
- containerPort: 6006
|
||||
name: tensorboard
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /data
|
||||
lifecycle:
|
||||
postStart:
|
||||
exec:
|
||||
command:
|
||||
- /bin/sh
|
||||
- -c
|
||||
- >
|
||||
condayml="/data/.environment.yml";
|
||||
jupytertxt="/data/.jupexported.txt";
|
||||
if [ -f "$condayml" ]; then conda env update -f $condayml; fi;
|
||||
if [ -f "$jupytertxt" ]; then cat $jupytertxt | xargs -n 1 jupyter labextension install --no-build && jupyter lab build --minimize=False; fi;
|
||||
preStop:
|
||||
exec:
|
||||
command:
|
||||
- /bin/sh
|
||||
- -c
|
||||
- >
|
||||
conda env export > /data/.environment.yml -n base;
|
||||
jupyter labextension list 1>/dev/null 2> /data/.jup.txt;
|
||||
cat /data/.jup.txt | sed -n '2,$p' | awk 'sub(/v/,"@", $2){print $1$2}' > /data/.jupexported.txt;
|
||||
ports:
|
||||
- name: jupyterlab
|
||||
port: 80
|
||||
protocol: TCP
|
||||
targetPort: 8888
|
||||
- name: tensorboard
|
||||
port: 6006
|
||||
protocol: TCP
|
||||
targetPort: 6006
|
||||
routes:
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /tensorboard
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 6006
|
||||
- match:
|
||||
- uri:
|
||||
prefix: / #jupyter runs at the default route
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 80
|
||||
# DAG Workflow to be executed once a Workspace action completes (optional)
|
||||
#postExecutionWorkflow:
|
||||
# entrypoint: main
|
||||
# templates:
|
||||
# - name: main
|
||||
# dag:
|
||||
# tasks:
|
||||
# - name: slack-notify
|
||||
# template: slack-notify
|
||||
# - name: slack-notify
|
||||
# container:
|
||||
# image: technosophos/slack-notify
|
||||
# args:
|
||||
# - SLACK_USERNAME=onepanel SLACK_TITLE="Your workspace is ready" SLACK_ICON=https://www.gravatar.com/avatar/5c4478592fe00878f62f0027be59c1bd SLACK_MESSAGE="Your workspace is now running" ./slack-notify
|
||||
# command:
|
||||
# - sh
|
||||
# - -c
|
||||
metadata:
|
||||
name: JupyterLab
|
||||
kind: Workspace
|
||||
version: 20201028145442
|
||||
action: update
|
||||
description: "Interactive development environment for notebooks"
|
||||
spec:
|
||||
# Docker containers that are part of the Workspace
|
||||
containers:
|
||||
- name: jupyterlab
|
||||
image: onepanel/jupyterlab:1.0.1
|
||||
command: ["/bin/bash", "-c", "pip install onepanel-sdk && start.sh jupyter lab --LabApp.token='' --LabApp.allow_remote_access=True --LabApp.allow_origin=\"*\" --LabApp.disable_check_xsrf=True --LabApp.trust_xheaders=True --LabApp.base_url=/ --LabApp.tornado_settings='{\"headers\":{\"Content-Security-Policy\":\"frame-ancestors * \'self\'\"}}' --notebook-dir='/data' --allow-root"]
|
||||
env:
|
||||
- name: tornado
|
||||
value: "'{'headers':{'Content-Security-Policy':\"frame-ancestors\ *\ \'self'\"}}'"
|
||||
ports:
|
||||
- containerPort: 8888
|
||||
name: jupyterlab
|
||||
- containerPort: 6006
|
||||
name: tensorboard
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /data
|
||||
lifecycle:
|
||||
postStart:
|
||||
exec:
|
||||
command:
|
||||
- /bin/sh
|
||||
- -c
|
||||
- >
|
||||
condayml="/data/.environment.yml";
|
||||
jupytertxt="/data/.jupexported.txt";
|
||||
if [ -f "$condayml" ]; then conda env update -f $condayml; fi;
|
||||
if [ -f "$jupytertxt" ]; then cat $jupytertxt | xargs -n 1 jupyter labextension install --no-build && jupyter lab build --minimize=False; fi;
|
||||
preStop:
|
||||
exec:
|
||||
command:
|
||||
- /bin/sh
|
||||
- -c
|
||||
- >
|
||||
conda env export > /data/.environment.yml -n base;
|
||||
jupyter labextension list 1>/dev/null 2> /data/.jup.txt;
|
||||
cat /data/.jup.txt | sed -n '2,$p' | awk 'sub(/v/,"@", $2){print $1$2}' > /data/.jupexported.txt;
|
||||
ports:
|
||||
- name: jupyterlab
|
||||
port: 80
|
||||
protocol: TCP
|
||||
targetPort: 8888
|
||||
- name: tensorboard
|
||||
port: 6006
|
||||
protocol: TCP
|
||||
targetPort: 6006
|
||||
routes:
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /tensorboard
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 6006
|
||||
- match:
|
||||
- uri:
|
||||
prefix: / #jupyter runs at the default route
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 80
|
||||
# DAG Workflow to be executed once a Workspace action completes (optional)
|
||||
#postExecutionWorkflow:
|
||||
# entrypoint: main
|
||||
# templates:
|
||||
# - name: main
|
||||
# dag:
|
||||
# tasks:
|
||||
# - name: slack-notify
|
||||
# template: slack-notify
|
||||
# - name: slack-notify
|
||||
# container:
|
||||
# image: technosophos/slack-notify
|
||||
# args:
|
||||
# - SLACK_USERNAME=onepanel SLACK_TITLE="Your workspace is ready" SLACK_ICON=https://www.gravatar.com/avatar/5c4478592fe00878f62f0027be59c1bd SLACK_MESSAGE="Your workspace is now running" ./slack-notify
|
||||
# command:
|
||||
# - sh
|
||||
# - -c
|
||||
@@ -1,79 +1,86 @@
|
||||
# Docker containers that are part of the Workspace
|
||||
containers:
|
||||
- name: jupyterlab
|
||||
image: onepanel/jupyterlab:1.0.1
|
||||
command: ["/bin/bash", "-c", "pip install onepanel-sdk && start.sh jupyter lab --LabApp.token='' --LabApp.allow_remote_access=True --LabApp.allow_origin=\"*\" --LabApp.disable_check_xsrf=True --LabApp.trust_xheaders=True --LabApp.base_url=/ --LabApp.tornado_settings='{\"headers\":{\"Content-Security-Policy\":\"frame-ancestors * \'self\'\"}}' --notebook-dir='/data' --allow-root"]
|
||||
env:
|
||||
- name: tornado
|
||||
value: "'{'headers':{'Content-Security-Policy':\"frame-ancestors\ *\ \'self'\"}}'"
|
||||
- name: TENSORBOARD_PROXY_URL
|
||||
value: '//$(ONEPANEL_RESOURCE_UID)--$(ONEPANEL_RESOURCE_NAMESPACE).$(ONEPANEL_DOMAIN)/tensorboard'
|
||||
ports:
|
||||
- containerPort: 8888
|
||||
name: jupyterlab
|
||||
- containerPort: 6006
|
||||
name: tensorboard
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /data
|
||||
lifecycle:
|
||||
postStart:
|
||||
exec:
|
||||
command:
|
||||
- /bin/sh
|
||||
- -c
|
||||
- >
|
||||
condayml="/data/.environment.yml";
|
||||
jupytertxt="/data/.jupexported.txt";
|
||||
if [ -f "$condayml" ]; then conda env update -f $condayml; fi;
|
||||
if [ -f "$jupytertxt" ]; then cat $jupytertxt | xargs -n 1 jupyter labextension install --no-build && jupyter lab build --minimize=False; fi;
|
||||
preStop:
|
||||
exec:
|
||||
command:
|
||||
- /bin/sh
|
||||
- -c
|
||||
- >
|
||||
conda env export > /data/.environment.yml -n base;
|
||||
jupyter labextension list 1>/dev/null 2> /data/.jup.txt;
|
||||
cat /data/.jup.txt | sed -n '2,$p' | awk 'sub(/v/,"@", $2){print $1$2}' > /data/.jupexported.txt;
|
||||
ports:
|
||||
- name: jupyterlab
|
||||
port: 80
|
||||
protocol: TCP
|
||||
targetPort: 8888
|
||||
- name: tensorboard
|
||||
port: 6006
|
||||
protocol: TCP
|
||||
targetPort: 6006
|
||||
routes:
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /tensorboard
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 6006
|
||||
- match:
|
||||
- uri:
|
||||
prefix: / #jupyter runs at the default route
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 80
|
||||
# DAG Workflow to be executed once a Workspace action completes (optional)
|
||||
#postExecutionWorkflow:
|
||||
# entrypoint: main
|
||||
# templates:
|
||||
# - name: main
|
||||
# dag:
|
||||
# tasks:
|
||||
# - name: slack-notify
|
||||
# template: slack-notify
|
||||
# - name: slack-notify
|
||||
# container:
|
||||
# image: technosophos/slack-notify
|
||||
# args:
|
||||
# - SLACK_USERNAME=onepanel SLACK_TITLE="Your workspace is ready" SLACK_ICON=https://www.gravatar.com/avatar/5c4478592fe00878f62f0027be59c1bd SLACK_MESSAGE="Your workspace is now running" ./slack-notify
|
||||
# command:
|
||||
# - sh
|
||||
# - -c
|
||||
metadata:
|
||||
name: JupyterLab
|
||||
kind: Workspace
|
||||
version: 20201031165106
|
||||
action: update
|
||||
description: "Interactive development environment for notebooks"
|
||||
spec:
|
||||
# Docker containers that are part of the Workspace
|
||||
containers:
|
||||
- name: jupyterlab
|
||||
image: onepanel/jupyterlab:1.0.1
|
||||
command: ["/bin/bash", "-c", "pip install onepanel-sdk && start.sh jupyter lab --LabApp.token='' --LabApp.allow_remote_access=True --LabApp.allow_origin=\"*\" --LabApp.disable_check_xsrf=True --LabApp.trust_xheaders=True --LabApp.base_url=/ --LabApp.tornado_settings='{\"headers\":{\"Content-Security-Policy\":\"frame-ancestors * \'self\'\"}}' --notebook-dir='/data' --allow-root"]
|
||||
env:
|
||||
- name: tornado
|
||||
value: "'{'headers':{'Content-Security-Policy':\"frame-ancestors\ *\ \'self'\"}}'"
|
||||
- name: TENSORBOARD_PROXY_URL
|
||||
value: '//$(ONEPANEL_RESOURCE_UID)--$(ONEPANEL_RESOURCE_NAMESPACE).$(ONEPANEL_DOMAIN)/tensorboard'
|
||||
ports:
|
||||
- containerPort: 8888
|
||||
name: jupyterlab
|
||||
- containerPort: 6006
|
||||
name: tensorboard
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /data
|
||||
lifecycle:
|
||||
postStart:
|
||||
exec:
|
||||
command:
|
||||
- /bin/sh
|
||||
- -c
|
||||
- >
|
||||
condayml="/data/.environment.yml";
|
||||
jupytertxt="/data/.jupexported.txt";
|
||||
if [ -f "$condayml" ]; then conda env update -f $condayml; fi;
|
||||
if [ -f "$jupytertxt" ]; then cat $jupytertxt | xargs -n 1 jupyter labextension install --no-build && jupyter lab build --minimize=False; fi;
|
||||
preStop:
|
||||
exec:
|
||||
command:
|
||||
- /bin/sh
|
||||
- -c
|
||||
- >
|
||||
conda env export > /data/.environment.yml -n base;
|
||||
jupyter labextension list 1>/dev/null 2> /data/.jup.txt;
|
||||
cat /data/.jup.txt | sed -n '2,$p' | awk 'sub(/v/,"@", $2){print $1$2}' > /data/.jupexported.txt;
|
||||
ports:
|
||||
- name: jupyterlab
|
||||
port: 80
|
||||
protocol: TCP
|
||||
targetPort: 8888
|
||||
- name: tensorboard
|
||||
port: 6006
|
||||
protocol: TCP
|
||||
targetPort: 6006
|
||||
routes:
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /tensorboard
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 6006
|
||||
- match:
|
||||
- uri:
|
||||
prefix: / #jupyter runs at the default route
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 80
|
||||
# DAG Workflow to be executed once a Workspace action completes (optional)
|
||||
#postExecutionWorkflow:
|
||||
# entrypoint: main
|
||||
# templates:
|
||||
# - name: main
|
||||
# dag:
|
||||
# tasks:
|
||||
# - name: slack-notify
|
||||
# template: slack-notify
|
||||
# - name: slack-notify
|
||||
# container:
|
||||
# image: technosophos/slack-notify
|
||||
# args:
|
||||
# - SLACK_USERNAME=onepanel SLACK_TITLE="Your workspace is ready" SLACK_ICON=https://www.gravatar.com/avatar/5c4478592fe00878f62f0027be59c1bd SLACK_MESSAGE="Your workspace is now running" ./slack-notify
|
||||
# command:
|
||||
# - sh
|
||||
# - -c
|
||||
@@ -1,80 +1,87 @@
|
||||
# Docker containers that are part of the Workspace
|
||||
containers:
|
||||
- name: jupyterlab
|
||||
image: onepanel/jupyterlab:1.0.1
|
||||
command: ["/bin/bash", "-c", "pip install onepanel-sdk && start.sh LD_LIBRARY_PATH=/usr/local/nvidia/lib:/usr/local/nvidia/lib64 jupyter lab --LabApp.token='' --LabApp.allow_remote_access=True --LabApp.allow_origin=\"*\" --LabApp.disable_check_xsrf=True --LabApp.trust_xheaders=True --LabApp.base_url=/ --LabApp.tornado_settings='{\"headers\":{\"Content-Security-Policy\":\"frame-ancestors * \'self\'\"}}' --notebook-dir='/data' --allow-root"]
|
||||
workingDir: /data
|
||||
env:
|
||||
- name: tornado
|
||||
value: "'{'headers':{'Content-Security-Policy':\"frame-ancestors\ *\ \'self'\"}}'"
|
||||
- name: TENSORBOARD_PROXY_URL
|
||||
value: '//$(ONEPANEL_RESOURCE_UID)--$(ONEPANEL_RESOURCE_NAMESPACE).$(ONEPANEL_DOMAIN)/tensorboard'
|
||||
ports:
|
||||
- containerPort: 8888
|
||||
name: jupyterlab
|
||||
- containerPort: 6006
|
||||
name: tensorboard
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /data
|
||||
lifecycle:
|
||||
postStart:
|
||||
exec:
|
||||
command:
|
||||
- /bin/sh
|
||||
- -c
|
||||
- >
|
||||
condayml="/data/.environment.yml";
|
||||
jupytertxt="/data/.jupexported.txt";
|
||||
if [ -f "$condayml" ]; then conda env update -f $condayml; fi;
|
||||
if [ -f "$jupytertxt" ]; then cat $jupytertxt | xargs -n 1 jupyter labextension install --no-build && jupyter lab build --minimize=False; fi;
|
||||
preStop:
|
||||
exec:
|
||||
command:
|
||||
- /bin/sh
|
||||
- -c
|
||||
- >
|
||||
conda env export > /data/.environment.yml -n base;
|
||||
jupyter labextension list 1>/dev/null 2> /data/.jup.txt;
|
||||
cat /data/.jup.txt | sed -n '2,$p' | awk 'sub(/v/,"@", $2){print $1$2}' > /data/.jupexported.txt;
|
||||
ports:
|
||||
- name: jupyterlab
|
||||
port: 80
|
||||
protocol: TCP
|
||||
targetPort: 8888
|
||||
- name: tensorboard
|
||||
port: 6006
|
||||
protocol: TCP
|
||||
targetPort: 6006
|
||||
routes:
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /tensorboard
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 6006
|
||||
- match:
|
||||
- uri:
|
||||
prefix: / #jupyter runs at the default route
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 80
|
||||
# DAG Workflow to be executed once a Workspace action completes (optional)
|
||||
#postExecutionWorkflow:
|
||||
# entrypoint: main
|
||||
# templates:
|
||||
# - name: main
|
||||
# dag:
|
||||
# tasks:
|
||||
# - name: slack-notify
|
||||
# template: slack-notify
|
||||
# - name: slack-notify
|
||||
# container:
|
||||
# image: technosophos/slack-notify
|
||||
# args:
|
||||
# - SLACK_USERNAME=onepanel SLACK_TITLE="Your workspace is ready" SLACK_ICON=https://www.gravatar.com/avatar/5c4478592fe00878f62f0027be59c1bd SLACK_MESSAGE="Your workspace is now running" ./slack-notify
|
||||
# command:
|
||||
# - sh
|
||||
# - -c
|
||||
metadata:
|
||||
name: JupyterLab
|
||||
kind: Workspace
|
||||
version: 20201214133458
|
||||
action: update
|
||||
description: "Interactive development environment for notebooks"
|
||||
spec:
|
||||
# Docker containers that are part of the Workspace
|
||||
containers:
|
||||
- name: jupyterlab
|
||||
image: onepanel/jupyterlab:1.0.1
|
||||
command: ["/bin/bash", "-c", "pip install onepanel-sdk && start.sh LD_LIBRARY_PATH=/usr/local/nvidia/lib:/usr/local/nvidia/lib64 jupyter lab --LabApp.token='' --LabApp.allow_remote_access=True --LabApp.allow_origin=\"*\" --LabApp.disable_check_xsrf=True --LabApp.trust_xheaders=True --LabApp.base_url=/ --LabApp.tornado_settings='{\"headers\":{\"Content-Security-Policy\":\"frame-ancestors * \'self\'\"}}' --notebook-dir='/data' --allow-root"]
|
||||
workingDir: /data
|
||||
env:
|
||||
- name: tornado
|
||||
value: "'{'headers':{'Content-Security-Policy':\"frame-ancestors\ *\ \'self'\"}}'"
|
||||
- name: TENSORBOARD_PROXY_URL
|
||||
value: '//$(ONEPANEL_RESOURCE_UID)--$(ONEPANEL_RESOURCE_NAMESPACE).$(ONEPANEL_DOMAIN)/tensorboard'
|
||||
ports:
|
||||
- containerPort: 8888
|
||||
name: jupyterlab
|
||||
- containerPort: 6006
|
||||
name: tensorboard
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /data
|
||||
lifecycle:
|
||||
postStart:
|
||||
exec:
|
||||
command:
|
||||
- /bin/sh
|
||||
- -c
|
||||
- >
|
||||
condayml="/data/.environment.yml";
|
||||
jupytertxt="/data/.jupexported.txt";
|
||||
if [ -f "$condayml" ]; then conda env update -f $condayml; fi;
|
||||
if [ -f "$jupytertxt" ]; then cat $jupytertxt | xargs -n 1 jupyter labextension install --no-build && jupyter lab build --minimize=False; fi;
|
||||
preStop:
|
||||
exec:
|
||||
command:
|
||||
- /bin/sh
|
||||
- -c
|
||||
- >
|
||||
conda env export > /data/.environment.yml -n base;
|
||||
jupyter labextension list 1>/dev/null 2> /data/.jup.txt;
|
||||
cat /data/.jup.txt | sed -n '2,$p' | awk 'sub(/v/,"@", $2){print $1$2}' > /data/.jupexported.txt;
|
||||
ports:
|
||||
- name: jupyterlab
|
||||
port: 80
|
||||
protocol: TCP
|
||||
targetPort: 8888
|
||||
- name: tensorboard
|
||||
port: 6006
|
||||
protocol: TCP
|
||||
targetPort: 6006
|
||||
routes:
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /tensorboard
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 6006
|
||||
- match:
|
||||
- uri:
|
||||
prefix: / #jupyter runs at the default route
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 80
|
||||
# DAG Workflow to be executed once a Workspace action completes (optional)
|
||||
#postExecutionWorkflow:
|
||||
# entrypoint: main
|
||||
# templates:
|
||||
# - name: main
|
||||
# dag:
|
||||
# tasks:
|
||||
# - name: slack-notify
|
||||
# template: slack-notify
|
||||
# - name: slack-notify
|
||||
# container:
|
||||
# image: technosophos/slack-notify
|
||||
# args:
|
||||
# - SLACK_USERNAME=onepanel SLACK_TITLE="Your workspace is ready" SLACK_ICON=https://www.gravatar.com/avatar/5c4478592fe00878f62f0027be59c1bd SLACK_MESSAGE="Your workspace is now running" ./slack-notify
|
||||
# command:
|
||||
# - sh
|
||||
# - -c
|
||||
|
||||
@@ -1,93 +1,100 @@
|
||||
# Docker containers that are part of the Workspace
|
||||
containers:
|
||||
- name: jupyterlab
|
||||
image: onepanel/dl:0.17.0
|
||||
command: ["/bin/bash", "-c", "pip install onepanel-sdk && start.sh LD_LIBRARY_PATH=/usr/local/nvidia/lib:/usr/local/nvidia/lib64 jupyter lab --LabApp.token='' --LabApp.allow_remote_access=True --LabApp.allow_origin=\"*\" --LabApp.disable_check_xsrf=True --LabApp.trust_xheaders=True --LabApp.base_url=/ --LabApp.tornado_settings='{\"headers\":{\"Content-Security-Policy\":\"frame-ancestors * 'self'\"}}' --notebook-dir='/data' --allow-root"]
|
||||
workingDir: /data
|
||||
env:
|
||||
- name: tornado
|
||||
value: "'{'headers':{'Content-Security-Policy':\"frame-ancestors\ *\ 'self'\"}}'"
|
||||
- name: TENSORBOARD_PROXY_URL
|
||||
value: '//$(ONEPANEL_RESOURCE_UID)--$(ONEPANEL_RESOURCE_NAMESPACE).$(ONEPANEL_DOMAIN)/tensorboard'
|
||||
ports:
|
||||
- containerPort: 8888
|
||||
name: jupyterlab
|
||||
- containerPort: 6006
|
||||
name: tensorboard
|
||||
- containerPort: 8080
|
||||
name: nni
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /data
|
||||
lifecycle:
|
||||
postStart:
|
||||
exec:
|
||||
command:
|
||||
- /bin/sh
|
||||
- -c
|
||||
- >
|
||||
condayml="/data/.environment.yml";
|
||||
jupytertxt="/data/.jupexported.txt";
|
||||
if [ -f "$condayml" ]; then conda env update -f $condayml; fi;
|
||||
if [ -f "$jupytertxt" ]; then cat $jupytertxt | xargs -n 1 jupyter labextension install --no-build && jupyter lab build --minimize=False; fi;
|
||||
preStop:
|
||||
exec:
|
||||
command:
|
||||
- /bin/sh
|
||||
- -c
|
||||
- >
|
||||
conda env export > /data/.environment.yml -n base;
|
||||
jupyter labextension list 1>/dev/null 2> /data/.jup.txt;
|
||||
cat /data/.jup.txt | sed -n '2,$p' | awk 'sub(/v/,"@", $2){print $1$2}' > /data/.jupexported.txt;
|
||||
ports:
|
||||
- name: jupyterlab
|
||||
port: 80
|
||||
protocol: TCP
|
||||
targetPort: 8888
|
||||
- name: tensorboard
|
||||
port: 6006
|
||||
protocol: TCP
|
||||
targetPort: 6006
|
||||
- name: nni
|
||||
port: 8080
|
||||
protocol: TCP
|
||||
targetPort: 8080
|
||||
routes:
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /tensorboard
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 6006
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /nni
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8080
|
||||
- match:
|
||||
- uri:
|
||||
prefix: / #jupyter runs at the default route
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 80
|
||||
# DAG Workflow to be executed once a Workspace action completes (optional)
|
||||
#postExecutionWorkflow:
|
||||
# entrypoint: main
|
||||
# templates:
|
||||
# - name: main
|
||||
# dag:
|
||||
# tasks:
|
||||
# - name: slack-notify
|
||||
# template: slack-notify
|
||||
# - name: slack-notify
|
||||
# container:
|
||||
# image: technosophos/slack-notify
|
||||
# args:
|
||||
# - SLACK_USERNAME=onepanel SLACK_TITLE="Your workspace is ready" SLACK_ICON=https://www.gravatar.com/avatar/5c4478592fe00878f62f0027be59c1bd SLACK_MESSAGE="Your workspace is now running" ./slack-notify
|
||||
# command:
|
||||
# - sh
|
||||
# - -c
|
||||
metadata:
|
||||
name: JupyterLab
|
||||
kind: Workspace
|
||||
version: 20201229205644
|
||||
action: update
|
||||
description: "Interactive development environment for notebooks"
|
||||
spec:
|
||||
# Docker containers that are part of the Workspace
|
||||
containers:
|
||||
- name: jupyterlab
|
||||
image: onepanel/dl:0.17.0
|
||||
command: ["/bin/bash", "-c", "pip install onepanel-sdk && start.sh LD_LIBRARY_PATH=/usr/local/nvidia/lib:/usr/local/nvidia/lib64 jupyter lab --LabApp.token='' --LabApp.allow_remote_access=True --LabApp.allow_origin=\"*\" --LabApp.disable_check_xsrf=True --LabApp.trust_xheaders=True --LabApp.base_url=/ --LabApp.tornado_settings='{\"headers\":{\"Content-Security-Policy\":\"frame-ancestors * 'self'\"}}' --notebook-dir='/data' --allow-root"]
|
||||
workingDir: /data
|
||||
env:
|
||||
- name: tornado
|
||||
value: "'{'headers':{'Content-Security-Policy':\"frame-ancestors\ *\ 'self'\"}}'"
|
||||
- name: TENSORBOARD_PROXY_URL
|
||||
value: '//$(ONEPANEL_RESOURCE_UID)--$(ONEPANEL_RESOURCE_NAMESPACE).$(ONEPANEL_DOMAIN)/tensorboard'
|
||||
ports:
|
||||
- containerPort: 8888
|
||||
name: jupyterlab
|
||||
- containerPort: 6006
|
||||
name: tensorboard
|
||||
- containerPort: 8080
|
||||
name: nni
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /data
|
||||
lifecycle:
|
||||
postStart:
|
||||
exec:
|
||||
command:
|
||||
- /bin/sh
|
||||
- -c
|
||||
- >
|
||||
condayml="/data/.environment.yml";
|
||||
jupytertxt="/data/.jupexported.txt";
|
||||
if [ -f "$condayml" ]; then conda env update -f $condayml; fi;
|
||||
if [ -f "$jupytertxt" ]; then cat $jupytertxt | xargs -n 1 jupyter labextension install --no-build && jupyter lab build --minimize=False; fi;
|
||||
preStop:
|
||||
exec:
|
||||
command:
|
||||
- /bin/sh
|
||||
- -c
|
||||
- >
|
||||
conda env export > /data/.environment.yml -n base;
|
||||
jupyter labextension list 1>/dev/null 2> /data/.jup.txt;
|
||||
cat /data/.jup.txt | sed -n '2,$p' | awk 'sub(/v/,"@", $2){print $1$2}' > /data/.jupexported.txt;
|
||||
ports:
|
||||
- name: jupyterlab
|
||||
port: 80
|
||||
protocol: TCP
|
||||
targetPort: 8888
|
||||
- name: tensorboard
|
||||
port: 6006
|
||||
protocol: TCP
|
||||
targetPort: 6006
|
||||
- name: nni
|
||||
port: 8080
|
||||
protocol: TCP
|
||||
targetPort: 8080
|
||||
routes:
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /tensorboard
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 6006
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /nni
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8080
|
||||
- match:
|
||||
- uri:
|
||||
prefix: / #jupyter runs at the default route
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 80
|
||||
# DAG Workflow to be executed once a Workspace action completes (optional)
|
||||
#postExecutionWorkflow:
|
||||
# entrypoint: main
|
||||
# templates:
|
||||
# - name: main
|
||||
# dag:
|
||||
# tasks:
|
||||
# - name: slack-notify
|
||||
# template: slack-notify
|
||||
# - name: slack-notify
|
||||
# container:
|
||||
# image: technosophos/slack-notify
|
||||
# args:
|
||||
# - SLACK_USERNAME=onepanel SLACK_TITLE="Your workspace is ready" SLACK_ICON=https://www.gravatar.com/avatar/5c4478592fe00878f62f0027be59c1bd SLACK_MESSAGE="Your workspace is now running" ./slack-notify
|
||||
# command:
|
||||
# - sh
|
||||
# - -c
|
||||
|
||||
@@ -1,101 +1,108 @@
|
||||
containers:
|
||||
- name: jupyterlab
|
||||
image: onepanel/dl:0.17.0
|
||||
command: ["/bin/bash", "-c", "pip install onepanel-sdk && start.sh LD_LIBRARY_PATH=/usr/local/nvidia/lib:/usr/local/nvidia/lib64 jupyter lab --LabApp.token='' --LabApp.allow_remote_access=True --LabApp.allow_origin=\"*\" --LabApp.disable_check_xsrf=True --LabApp.trust_xheaders=True --LabApp.base_url=/ --LabApp.tornado_settings='{\"headers\":{\"Content-Security-Policy\":\"frame-ancestors * 'self'\"}}' --notebook-dir='/data' --allow-root"]
|
||||
workingDir: /data
|
||||
env:
|
||||
- name: tornado
|
||||
value: "'{'headers':{'Content-Security-Policy':\"frame-ancestors\ *\ 'self'\"}}'"
|
||||
- name: TENSORBOARD_PROXY_URL
|
||||
value: '//$(ONEPANEL_RESOURCE_UID)--$(ONEPANEL_RESOURCE_NAMESPACE).$(ONEPANEL_DOMAIN)/tensorboard'
|
||||
- name: ONEPANEL_MAIN_CONTAINER
|
||||
value: 'true'
|
||||
ports:
|
||||
- containerPort: 8888
|
||||
name: jupyterlab
|
||||
- containerPort: 6006
|
||||
name: tensorboard
|
||||
- containerPort: 8080
|
||||
name: nni
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /data
|
||||
lifecycle:
|
||||
postStart:
|
||||
exec:
|
||||
command:
|
||||
- /bin/sh
|
||||
- -c
|
||||
- >
|
||||
condayml="/data/.environment.yml";
|
||||
jupytertxt="/data/.jupexported.txt";
|
||||
if [ -f "$condayml" ]; then conda env update -f $condayml; fi;
|
||||
if [ -f "$jupytertxt" ]; then cat $jupytertxt | xargs -n 1 jupyter labextension install --no-build && jupyter lab build --minimize=False; fi;
|
||||
preStop:
|
||||
exec:
|
||||
command:
|
||||
- /bin/sh
|
||||
- -c
|
||||
- >
|
||||
conda env export > /data/.environment.yml -n base;
|
||||
jupyter labextension list 1>/dev/null 2> /data/.jup.txt;
|
||||
cat /data/.jup.txt | sed -n '2,$p' | awk 'sub(/v/,"@", $2){print $1$2}' > /data/.jupexported.txt;
|
||||
- name: sys-filesyncer
|
||||
image: onepanel/filesyncer:v0.18.0
|
||||
imagePullPolicy: Always
|
||||
args:
|
||||
- server
|
||||
- -host=localhost:8889
|
||||
- -server-prefix=/sys/filesyncer
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /data
|
||||
- name: sys-namespace-config
|
||||
mountPath: /etc/onepanel
|
||||
readOnly: true
|
||||
ports:
|
||||
- name: jupyterlab
|
||||
port: 80
|
||||
protocol: TCP
|
||||
targetPort: 8888
|
||||
- name: tensorboard
|
||||
port: 6006
|
||||
protocol: TCP
|
||||
targetPort: 6006
|
||||
- name: nni
|
||||
port: 8080
|
||||
protocol: TCP
|
||||
targetPort: 8080
|
||||
- name: fs
|
||||
port: 8889
|
||||
protocol: TCP
|
||||
targetPort: 8889
|
||||
routes:
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /sys/filesyncer
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8889
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /tensorboard
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 6006
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /nni
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8080
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 80
|
||||
metadata:
|
||||
name: JupyterLab
|
||||
kind: Workspace
|
||||
version: 20210129142057
|
||||
action: update
|
||||
description: "Interactive development environment for notebooks"
|
||||
spec:
|
||||
containers:
|
||||
- name: jupyterlab
|
||||
image: onepanel/dl:0.17.0
|
||||
command: ["/bin/bash", "-c", "pip install onepanel-sdk && start.sh LD_LIBRARY_PATH=/usr/local/nvidia/lib:/usr/local/nvidia/lib64 jupyter lab --LabApp.token='' --LabApp.allow_remote_access=True --LabApp.allow_origin=\"*\" --LabApp.disable_check_xsrf=True --LabApp.trust_xheaders=True --LabApp.base_url=/ --LabApp.tornado_settings='{\"headers\":{\"Content-Security-Policy\":\"frame-ancestors * 'self'\"}}' --notebook-dir='/data' --allow-root"]
|
||||
workingDir: /data
|
||||
env:
|
||||
- name: tornado
|
||||
value: "'{'headers':{'Content-Security-Policy':\"frame-ancestors\ *\ 'self'\"}}'"
|
||||
- name: TENSORBOARD_PROXY_URL
|
||||
value: '//$(ONEPANEL_RESOURCE_UID)--$(ONEPANEL_RESOURCE_NAMESPACE).$(ONEPANEL_DOMAIN)/tensorboard'
|
||||
- name: ONEPANEL_MAIN_CONTAINER
|
||||
value: 'true'
|
||||
ports:
|
||||
- containerPort: 8888
|
||||
name: jupyterlab
|
||||
- containerPort: 6006
|
||||
name: tensorboard
|
||||
- containerPort: 8080
|
||||
name: nni
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /data
|
||||
lifecycle:
|
||||
postStart:
|
||||
exec:
|
||||
command:
|
||||
- /bin/sh
|
||||
- -c
|
||||
- >
|
||||
condayml="/data/.environment.yml";
|
||||
jupytertxt="/data/.jupexported.txt";
|
||||
if [ -f "$condayml" ]; then conda env update -f $condayml; fi;
|
||||
if [ -f "$jupytertxt" ]; then cat $jupytertxt | xargs -n 1 jupyter labextension install --no-build && jupyter lab build --minimize=False; fi;
|
||||
preStop:
|
||||
exec:
|
||||
command:
|
||||
- /bin/sh
|
||||
- -c
|
||||
- >
|
||||
conda env export > /data/.environment.yml -n base;
|
||||
jupyter labextension list 1>/dev/null 2> /data/.jup.txt;
|
||||
cat /data/.jup.txt | sed -n '2,$p' | awk 'sub(/v/,"@", $2){print $1$2}' > /data/.jupexported.txt;
|
||||
- name: sys-filesyncer
|
||||
image: onepanel/filesyncer:v0.18.0
|
||||
imagePullPolicy: Always
|
||||
args:
|
||||
- server
|
||||
- -host=localhost:8889
|
||||
- -server-prefix=/sys/filesyncer
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /data
|
||||
- name: sys-namespace-config
|
||||
mountPath: /etc/onepanel
|
||||
readOnly: true
|
||||
ports:
|
||||
- name: jupyterlab
|
||||
port: 80
|
||||
protocol: TCP
|
||||
targetPort: 8888
|
||||
- name: tensorboard
|
||||
port: 6006
|
||||
protocol: TCP
|
||||
targetPort: 6006
|
||||
- name: nni
|
||||
port: 8080
|
||||
protocol: TCP
|
||||
targetPort: 8080
|
||||
- name: fs
|
||||
port: 8889
|
||||
protocol: TCP
|
||||
targetPort: 8889
|
||||
routes:
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /sys/filesyncer
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8889
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /tensorboard
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 6006
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /nni
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8080
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 80
|
||||
|
||||
@@ -1,101 +1,108 @@
|
||||
containers:
|
||||
- name: jupyterlab
|
||||
image: onepanel/dl:0.17.0
|
||||
command: ["/bin/bash", "-c", "pip install onepanel-sdk && start.sh LD_LIBRARY_PATH=/usr/local/nvidia/lib:/usr/local/nvidia/lib64 jupyter lab --LabApp.token='' --LabApp.allow_remote_access=True --LabApp.allow_origin=\"*\" --LabApp.disable_check_xsrf=True --LabApp.trust_xheaders=True --LabApp.base_url=/ --LabApp.tornado_settings='{\"headers\":{\"Content-Security-Policy\":\"frame-ancestors * 'self'\"}}' --notebook-dir='/data' --allow-root"]
|
||||
workingDir: /data
|
||||
env:
|
||||
- name: tornado
|
||||
value: "'{'headers':{'Content-Security-Policy':\"frame-ancestors\ *\ 'self'\"}}'"
|
||||
- name: TENSORBOARD_PROXY_URL
|
||||
value: '//$(ONEPANEL_RESOURCE_UID)--$(ONEPANEL_RESOURCE_NAMESPACE).$(ONEPANEL_DOMAIN)/tensorboard'
|
||||
- name: ONEPANEL_MAIN_CONTAINER
|
||||
value: 'true'
|
||||
ports:
|
||||
- containerPort: 8888
|
||||
name: jupyterlab
|
||||
- containerPort: 6006
|
||||
name: tensorboard
|
||||
- containerPort: 8080
|
||||
name: nni
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /data
|
||||
lifecycle:
|
||||
postStart:
|
||||
exec:
|
||||
command:
|
||||
- /bin/sh
|
||||
- -c
|
||||
- >
|
||||
condayml="/data/.environment.yml";
|
||||
jupytertxt="/data/.jupexported.txt";
|
||||
if [ -f "$condayml" ]; then conda env update -f $condayml; fi;
|
||||
if [ -f "$jupytertxt" ]; then cat $jupytertxt | xargs -n 1 jupyter labextension install --no-build && jupyter lab build --minimize=False; fi;
|
||||
preStop:
|
||||
exec:
|
||||
command:
|
||||
- /bin/sh
|
||||
- -c
|
||||
- >
|
||||
conda env export > /data/.environment.yml -n base;
|
||||
jupyter labextension list 1>/dev/null 2> /data/.jup.txt;
|
||||
cat /data/.jup.txt | sed -n '2,$p' | awk 'sub(/v/,"@", $2){print $1$2}' > /data/.jupexported.txt;
|
||||
- name: sys-filesyncer
|
||||
image: onepanel/filesyncer:v0.19.0
|
||||
imagePullPolicy: Always
|
||||
args:
|
||||
- server
|
||||
- -host=localhost:8889
|
||||
- -server-prefix=/sys/filesyncer
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /data
|
||||
- name: sys-namespace-config
|
||||
mountPath: /etc/onepanel
|
||||
readOnly: true
|
||||
ports:
|
||||
- name: jupyterlab
|
||||
port: 80
|
||||
protocol: TCP
|
||||
targetPort: 8888
|
||||
- name: tensorboard
|
||||
port: 6006
|
||||
protocol: TCP
|
||||
targetPort: 6006
|
||||
- name: nni
|
||||
port: 8080
|
||||
protocol: TCP
|
||||
targetPort: 8080
|
||||
- name: fs
|
||||
port: 8889
|
||||
protocol: TCP
|
||||
targetPort: 8889
|
||||
routes:
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /sys/filesyncer
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8889
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /tensorboard
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 6006
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /nni
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8080
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 80
|
||||
metadata:
|
||||
name: JupyterLab
|
||||
kind: Workspace
|
||||
version: 20210224180017
|
||||
action: update
|
||||
description: "Interactive development environment for notebooks"
|
||||
spec:
|
||||
containers:
|
||||
- name: jupyterlab
|
||||
image: onepanel/dl:0.17.0
|
||||
command: ["/bin/bash", "-c", "pip install onepanel-sdk && start.sh LD_LIBRARY_PATH=/usr/local/nvidia/lib:/usr/local/nvidia/lib64 jupyter lab --LabApp.token='' --LabApp.allow_remote_access=True --LabApp.allow_origin=\"*\" --LabApp.disable_check_xsrf=True --LabApp.trust_xheaders=True --LabApp.base_url=/ --LabApp.tornado_settings='{\"headers\":{\"Content-Security-Policy\":\"frame-ancestors * 'self'\"}}' --notebook-dir='/data' --allow-root"]
|
||||
workingDir: /data
|
||||
env:
|
||||
- name: tornado
|
||||
value: "'{'headers':{'Content-Security-Policy':\"frame-ancestors\ *\ 'self'\"}}'"
|
||||
- name: TENSORBOARD_PROXY_URL
|
||||
value: '//$(ONEPANEL_RESOURCE_UID)--$(ONEPANEL_RESOURCE_NAMESPACE).$(ONEPANEL_DOMAIN)/tensorboard'
|
||||
- name: ONEPANEL_MAIN_CONTAINER
|
||||
value: 'true'
|
||||
ports:
|
||||
- containerPort: 8888
|
||||
name: jupyterlab
|
||||
- containerPort: 6006
|
||||
name: tensorboard
|
||||
- containerPort: 8080
|
||||
name: nni
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /data
|
||||
lifecycle:
|
||||
postStart:
|
||||
exec:
|
||||
command:
|
||||
- /bin/sh
|
||||
- -c
|
||||
- >
|
||||
condayml="/data/.environment.yml";
|
||||
jupytertxt="/data/.jupexported.txt";
|
||||
if [ -f "$condayml" ]; then conda env update -f $condayml; fi;
|
||||
if [ -f "$jupytertxt" ]; then cat $jupytertxt | xargs -n 1 jupyter labextension install --no-build && jupyter lab build --minimize=False; fi;
|
||||
preStop:
|
||||
exec:
|
||||
command:
|
||||
- /bin/sh
|
||||
- -c
|
||||
- >
|
||||
conda env export > /data/.environment.yml -n base;
|
||||
jupyter labextension list 1>/dev/null 2> /data/.jup.txt;
|
||||
cat /data/.jup.txt | sed -n '2,$p' | awk 'sub(/v/,"@", $2){print $1$2}' > /data/.jupexported.txt;
|
||||
- name: sys-filesyncer
|
||||
image: onepanel/filesyncer:v0.19.0
|
||||
imagePullPolicy: Always
|
||||
args:
|
||||
- server
|
||||
- -host=localhost:8889
|
||||
- -server-prefix=/sys/filesyncer
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /data
|
||||
- name: sys-namespace-config
|
||||
mountPath: /etc/onepanel
|
||||
readOnly: true
|
||||
ports:
|
||||
- name: jupyterlab
|
||||
port: 80
|
||||
protocol: TCP
|
||||
targetPort: 8888
|
||||
- name: tensorboard
|
||||
port: 6006
|
||||
protocol: TCP
|
||||
targetPort: 6006
|
||||
- name: nni
|
||||
port: 8080
|
||||
protocol: TCP
|
||||
targetPort: 8080
|
||||
- name: fs
|
||||
port: 8889
|
||||
protocol: TCP
|
||||
targetPort: 8889
|
||||
routes:
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /sys/filesyncer
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8889
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /tensorboard
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 6006
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /nni
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8080
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 80
|
||||
|
||||
@@ -1,101 +1,108 @@
|
||||
containers:
|
||||
- name: jupyterlab
|
||||
image: onepanel/dl:v0.20.0
|
||||
command: ["/bin/bash", "-c", "pip install onepanel-sdk && start.sh LD_LIBRARY_PATH=/usr/local/nvidia/lib:/usr/local/nvidia/lib64 jupyter lab --LabApp.token='' --LabApp.allow_remote_access=True --LabApp.allow_origin=\"*\" --LabApp.disable_check_xsrf=True --LabApp.trust_xheaders=True --LabApp.base_url=/ --LabApp.tornado_settings='{\"headers\":{\"Content-Security-Policy\":\"frame-ancestors * 'self'\"}}' --notebook-dir='/data' --allow-root"]
|
||||
workingDir: /data
|
||||
env:
|
||||
- name: tornado
|
||||
value: "'{'headers':{'Content-Security-Policy':\"frame-ancestors\ *\ 'self'\"}}'"
|
||||
- name: TENSORBOARD_PROXY_URL
|
||||
value: '//$(ONEPANEL_RESOURCE_UID)--$(ONEPANEL_RESOURCE_NAMESPACE).$(ONEPANEL_DOMAIN)/tensorboard'
|
||||
- name: ONEPANEL_MAIN_CONTAINER
|
||||
value: 'true'
|
||||
ports:
|
||||
- containerPort: 8888
|
||||
name: jupyterlab
|
||||
- containerPort: 6006
|
||||
name: tensorboard
|
||||
- containerPort: 8080
|
||||
name: nni
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /data
|
||||
lifecycle:
|
||||
postStart:
|
||||
exec:
|
||||
command:
|
||||
- /bin/sh
|
||||
- -c
|
||||
- >
|
||||
condayml="/data/.environment.yml";
|
||||
jupytertxt="/data/.jupexported.txt";
|
||||
if [ -f "$condayml" ]; then conda env update -f $condayml; fi;
|
||||
if [ -f "$jupytertxt" ]; then cat $jupytertxt | xargs -n 1 jupyter labextension install --no-build && jupyter lab build --minimize=False; fi;
|
||||
preStop:
|
||||
exec:
|
||||
command:
|
||||
- /bin/sh
|
||||
- -c
|
||||
- >
|
||||
conda env export > /data/.environment.yml -n base;
|
||||
jupyter labextension list 1>/dev/null 2> /data/.jup.txt;
|
||||
cat /data/.jup.txt | sed -n '2,$p' | awk 'sub(/v/,"@", $2){print $1$2}' > /data/.jupexported.txt;
|
||||
- name: sys-filesyncer
|
||||
image: onepanel/filesyncer:v0.20.0
|
||||
imagePullPolicy: Always
|
||||
args:
|
||||
- server
|
||||
- -host=localhost:8889
|
||||
- -server-prefix=/sys/filesyncer
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /data
|
||||
- name: sys-namespace-config
|
||||
mountPath: /etc/onepanel
|
||||
readOnly: true
|
||||
ports:
|
||||
- name: jupyterlab
|
||||
port: 80
|
||||
protocol: TCP
|
||||
targetPort: 8888
|
||||
- name: tensorboard
|
||||
port: 6006
|
||||
protocol: TCP
|
||||
targetPort: 6006
|
||||
- name: nni
|
||||
port: 8080
|
||||
protocol: TCP
|
||||
targetPort: 8080
|
||||
- name: fs
|
||||
port: 8889
|
||||
protocol: TCP
|
||||
targetPort: 8889
|
||||
routes:
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /sys/filesyncer
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8889
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /tensorboard
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 6006
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /nni
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8080
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 80
|
||||
metadata:
|
||||
name: JupyterLab
|
||||
kind: Workspace
|
||||
version: 20210323175655
|
||||
action: update
|
||||
description: "Interactive development environment for notebooks"
|
||||
spec:
|
||||
containers:
|
||||
- name: jupyterlab
|
||||
image: onepanel/dl:v0.20.0
|
||||
command: ["/bin/bash", "-c", "pip install onepanel-sdk && start.sh LD_LIBRARY_PATH=/usr/local/nvidia/lib:/usr/local/nvidia/lib64 jupyter lab --LabApp.token='' --LabApp.allow_remote_access=True --LabApp.allow_origin=\"*\" --LabApp.disable_check_xsrf=True --LabApp.trust_xheaders=True --LabApp.base_url=/ --LabApp.tornado_settings='{\"headers\":{\"Content-Security-Policy\":\"frame-ancestors * 'self'\"}}' --notebook-dir='/data' --allow-root"]
|
||||
workingDir: /data
|
||||
env:
|
||||
- name: tornado
|
||||
value: "'{'headers':{'Content-Security-Policy':\"frame-ancestors\ *\ 'self'\"}}'"
|
||||
- name: TENSORBOARD_PROXY_URL
|
||||
value: '//$(ONEPANEL_RESOURCE_UID)--$(ONEPANEL_RESOURCE_NAMESPACE).$(ONEPANEL_DOMAIN)/tensorboard'
|
||||
- name: ONEPANEL_MAIN_CONTAINER
|
||||
value: 'true'
|
||||
ports:
|
||||
- containerPort: 8888
|
||||
name: jupyterlab
|
||||
- containerPort: 6006
|
||||
name: tensorboard
|
||||
- containerPort: 8080
|
||||
name: nni
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /data
|
||||
lifecycle:
|
||||
postStart:
|
||||
exec:
|
||||
command:
|
||||
- /bin/sh
|
||||
- -c
|
||||
- >
|
||||
condayml="/data/.environment.yml";
|
||||
jupytertxt="/data/.jupexported.txt";
|
||||
if [ -f "$condayml" ]; then conda env update -f $condayml; fi;
|
||||
if [ -f "$jupytertxt" ]; then cat $jupytertxt | xargs -n 1 jupyter labextension install --no-build && jupyter lab build --minimize=False; fi;
|
||||
preStop:
|
||||
exec:
|
||||
command:
|
||||
- /bin/sh
|
||||
- -c
|
||||
- >
|
||||
conda env export > /data/.environment.yml -n base;
|
||||
jupyter labextension list 1>/dev/null 2> /data/.jup.txt;
|
||||
cat /data/.jup.txt | sed -n '2,$p' | awk 'sub(/v/,"@", $2){print $1$2}' > /data/.jupexported.txt;
|
||||
- name: sys-filesyncer
|
||||
image: onepanel/filesyncer:v0.20.0
|
||||
imagePullPolicy: Always
|
||||
args:
|
||||
- server
|
||||
- -host=localhost:8889
|
||||
- -server-prefix=/sys/filesyncer
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /data
|
||||
- name: sys-namespace-config
|
||||
mountPath: /etc/onepanel
|
||||
readOnly: true
|
||||
ports:
|
||||
- name: jupyterlab
|
||||
port: 80
|
||||
protocol: TCP
|
||||
targetPort: 8888
|
||||
- name: tensorboard
|
||||
port: 6006
|
||||
protocol: TCP
|
||||
targetPort: 6006
|
||||
- name: nni
|
||||
port: 8080
|
||||
protocol: TCP
|
||||
targetPort: 8080
|
||||
- name: fs
|
||||
port: 8889
|
||||
protocol: TCP
|
||||
targetPort: 8889
|
||||
routes:
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /sys/filesyncer
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8889
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /tensorboard
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 6006
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /nni
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8080
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 80
|
||||
|
||||
108
db/yaml/workspaces/jupyterlab/20210719190719.yaml
Normal file
108
db/yaml/workspaces/jupyterlab/20210719190719.yaml
Normal file
@@ -0,0 +1,108 @@
|
||||
metadata:
|
||||
name: JupyterLab
|
||||
kind: Workspace
|
||||
version: 20210719190719
|
||||
action: update
|
||||
description: "Interactive development environment for notebooks"
|
||||
spec:
|
||||
containers:
|
||||
- name: jupyterlab
|
||||
image: onepanel/dl:v0.20.0
|
||||
command: ["/bin/bash", "-c", "pip install onepanel-sdk && start.sh LD_LIBRARY_PATH=/usr/local/nvidia/lib:/usr/local/nvidia/lib64 jupyter lab --LabApp.token='' --LabApp.allow_remote_access=True --LabApp.allow_origin=\"*\" --LabApp.disable_check_xsrf=True --LabApp.trust_xheaders=True --LabApp.base_url=/ --LabApp.tornado_settings='{\"headers\":{\"Content-Security-Policy\":\"frame-ancestors * 'self'\"}}' --notebook-dir='/data' --allow-root"]
|
||||
workingDir: /data
|
||||
env:
|
||||
- name: tornado
|
||||
value: "'{'headers':{'Content-Security-Policy':\"frame-ancestors\ *\ 'self'\"}}'"
|
||||
- name: TENSORBOARD_PROXY_URL
|
||||
value: '//$(ONEPANEL_RESOURCE_UID)--$(ONEPANEL_RESOURCE_NAMESPACE).$(ONEPANEL_DOMAIN)/tensorboard'
|
||||
- name: ONEPANEL_MAIN_CONTAINER
|
||||
value: 'true'
|
||||
ports:
|
||||
- containerPort: 8888
|
||||
name: jupyterlab
|
||||
- containerPort: 6006
|
||||
name: tensorboard
|
||||
- containerPort: 8080
|
||||
name: nni
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /data
|
||||
lifecycle:
|
||||
postStart:
|
||||
exec:
|
||||
command:
|
||||
- /bin/sh
|
||||
- -c
|
||||
- >
|
||||
condayml="/data/.environment.yml";
|
||||
jupytertxt="/data/.jupexported.txt";
|
||||
if [ -f "$condayml" ]; then conda env update -f $condayml; fi;
|
||||
if [ -f "$jupytertxt" ]; then cat $jupytertxt | xargs -n 1 jupyter labextension install --no-build && jupyter lab build --minimize=False; fi;
|
||||
preStop:
|
||||
exec:
|
||||
command:
|
||||
- /bin/sh
|
||||
- -c
|
||||
- >
|
||||
conda env export > /data/.environment.yml -n base;
|
||||
jupyter labextension list 1>/dev/null 2> /data/.jup.txt;
|
||||
cat /data/.jup.txt | sed -n '2,$p' | awk 'sub(/v/,"@", $2){print $1$2}' > /data/.jupexported.txt;
|
||||
- name: sys-filesyncer
|
||||
image: onepanel/filesyncer:v1.0.0
|
||||
imagePullPolicy: Always
|
||||
args:
|
||||
- server
|
||||
- -host=localhost:8889
|
||||
- -server-prefix=/sys/filesyncer
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /data
|
||||
- name: sys-namespace-config
|
||||
mountPath: /etc/onepanel
|
||||
readOnly: true
|
||||
ports:
|
||||
- name: jupyterlab
|
||||
port: 80
|
||||
protocol: TCP
|
||||
targetPort: 8888
|
||||
- name: tensorboard
|
||||
port: 6006
|
||||
protocol: TCP
|
||||
targetPort: 6006
|
||||
- name: nni
|
||||
port: 8080
|
||||
protocol: TCP
|
||||
targetPort: 8080
|
||||
- name: fs
|
||||
port: 8889
|
||||
protocol: TCP
|
||||
targetPort: 8889
|
||||
routes:
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /sys/filesyncer
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8889
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /tensorboard
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 6006
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /nni
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8080
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 80
|
||||
64
db/yaml/workspaces/vnc/20210414165510.yaml
Normal file
64
db/yaml/workspaces/vnc/20210414165510.yaml
Normal file
@@ -0,0 +1,64 @@
|
||||
metadata:
|
||||
name: "Deep Learning Desktop"
|
||||
kind: Workspace
|
||||
version: 20210414165510
|
||||
action: create
|
||||
description: "Deep learning desktop with VNC"
|
||||
spec:
|
||||
arguments:
|
||||
parameters:
|
||||
# parameter screen-resolution allows users to select screen resolution
|
||||
- name: screen-resolution
|
||||
value: 1680x1050
|
||||
type: select.select
|
||||
displayName: Screen Resolution
|
||||
options:
|
||||
- name: 1280x1024
|
||||
value: 1280x1024
|
||||
- name: 1680x1050
|
||||
value: 1680x1050
|
||||
- name: 2880x1800
|
||||
value: 2880x1800
|
||||
containers:
|
||||
- name: ubuntu
|
||||
image: onepanel/vnc:dl-vnc
|
||||
env:
|
||||
- name: VNC_PASSWORDLESS
|
||||
value: true
|
||||
- name: VNC_RESOLUTION
|
||||
value: '{{workflow.parameters.screen-resolution}}'
|
||||
ports:
|
||||
- containerPort: 6901
|
||||
name: vnc
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /data
|
||||
ports:
|
||||
- name: vnc
|
||||
port: 80
|
||||
protocol: TCP
|
||||
targetPort: 6901
|
||||
routes:
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 80
|
||||
# DAG Workflow to be executed once a Workspace action completes (optional)
|
||||
#postExecutionWorkflow:
|
||||
# entrypoint: main
|
||||
# templates:
|
||||
# - name: main
|
||||
# dag:
|
||||
# tasks:
|
||||
# - name: slack-notify
|
||||
# template: slack-notify
|
||||
# - name: slack-notify
|
||||
# container:
|
||||
# image: technosophos/slack-notify
|
||||
# args:
|
||||
# - SLACK_USERNAME=onepanel SLACK_TITLE="Your workspace is ready" SLACK_ICON=https://www.gravatar.com/avatar/5c4478592fe00878f62f0027be59c1bd SLACK_MESSAGE="Your workspace is now running" ./slack-notify
|
||||
# command:
|
||||
# - sh
|
||||
88
db/yaml/workspaces/vnc/20210719190719.yaml
Normal file
88
db/yaml/workspaces/vnc/20210719190719.yaml
Normal file
@@ -0,0 +1,88 @@
|
||||
metadata:
|
||||
name: "Deep Learning Desktop"
|
||||
kind: Workspace
|
||||
version: 20210719190719
|
||||
action: update
|
||||
description: "Deep learning desktop with VNC"
|
||||
spec:
|
||||
arguments:
|
||||
parameters:
|
||||
# parameter screen-resolution allows users to select screen resolution
|
||||
- name: screen-resolution
|
||||
value: 1680x1050
|
||||
type: select.select
|
||||
displayName: Screen Resolution
|
||||
options:
|
||||
- name: 1280x1024
|
||||
value: 1280x1024
|
||||
- name: 1680x1050
|
||||
value: 1680x1050
|
||||
- name: 2880x1800
|
||||
value: 2880x1800
|
||||
containers:
|
||||
- name: ubuntu
|
||||
image: onepanel/vnc:dl-vnc
|
||||
env:
|
||||
- name: VNC_PASSWORDLESS
|
||||
value: true
|
||||
- name: VNC_RESOLUTION
|
||||
value: '{{workflow.parameters.screen-resolution}}'
|
||||
ports:
|
||||
- containerPort: 6901
|
||||
name: vnc
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /data
|
||||
- name: sys-filesyncer
|
||||
image: onepanel/filesyncer:v1.0.0
|
||||
imagePullPolicy: Always
|
||||
args:
|
||||
- server
|
||||
- -host=localhost:8889
|
||||
- -server-prefix=/sys/filesyncer
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /data
|
||||
- name: sys-namespace-config
|
||||
mountPath: /etc/onepanel
|
||||
readOnly: true
|
||||
ports:
|
||||
- name: vnc
|
||||
port: 80
|
||||
protocol: TCP
|
||||
targetPort: 6901
|
||||
- name: fs
|
||||
port: 8889
|
||||
protocol: TCP
|
||||
targetPort: 8889
|
||||
routes:
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /sys/filesyncer
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8889
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 80
|
||||
# DAG Workflow to be executed once a Workspace action completes (optional)
|
||||
#postExecutionWorkflow:
|
||||
# entrypoint: main
|
||||
# templates:
|
||||
# - name: main
|
||||
# dag:
|
||||
# tasks:
|
||||
# - name: slack-notify
|
||||
# template: slack-notify
|
||||
# - name: slack-notify
|
||||
# container:
|
||||
# image: technosophos/slack-notify
|
||||
# args:
|
||||
# - SLACK_USERNAME=onepanel SLACK_TITLE="Your workspace is ready" SLACK_ICON=https://www.gravatar.com/avatar/5c4478592fe00878f62f0027be59c1bd SLACK_MESSAGE="Your workspace is now running" ./slack-notify
|
||||
# command:
|
||||
# - sh
|
||||
@@ -1,41 +1,48 @@
|
||||
# Docker containers that are part of the Workspace
|
||||
containers:
|
||||
- name: vscode
|
||||
image: onepanel/vscode:1.0.0
|
||||
command: ["/bin/bash", "-c", "pip install onepanel-sdk && /usr/bin/entrypoint.sh --bind-addr 0.0.0.0:8080 --auth none ."]
|
||||
ports:
|
||||
- containerPort: 8080
|
||||
name: vscode
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /data
|
||||
ports:
|
||||
- name: vscode
|
||||
port: 8080
|
||||
protocol: TCP
|
||||
targetPort: 8080
|
||||
routes:
|
||||
- match:
|
||||
- uri:
|
||||
prefix: / #vscode runs at the default route
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8080
|
||||
# DAG Workflow to be executed once a Workspace action completes (optional)
|
||||
#postExecutionWorkflow:
|
||||
# entrypoint: main
|
||||
# templates:
|
||||
# - name: main
|
||||
# dag:
|
||||
# tasks:
|
||||
# - name: slack-notify
|
||||
# template: slack-notify
|
||||
# - name: slack-notify
|
||||
# container:
|
||||
# image: technosophos/slack-notify
|
||||
# args:
|
||||
# - SLACK_USERNAME=onepanel SLACK_TITLE="Your workspace is ready" SLACK_ICON=https://www.gravatar.com/avatar/5c4478592fe00878f62f0027be59c1bd SLACK_MESSAGE="Your workspace is now running" ./slack-notify
|
||||
# command:
|
||||
# - sh
|
||||
# - -c
|
||||
metadata:
|
||||
name: "Visual Studio Code"
|
||||
kind: Workspace
|
||||
version: 20200929144301
|
||||
action: create
|
||||
description: "Open source code editor"
|
||||
spec:
|
||||
# Docker containers that are part of the Workspace
|
||||
containers:
|
||||
- name: vscode
|
||||
image: onepanel/vscode:1.0.0
|
||||
command: ["/bin/bash", "-c", "pip install onepanel-sdk && /usr/bin/entrypoint.sh --bind-addr 0.0.0.0:8080 --auth none ."]
|
||||
ports:
|
||||
- containerPort: 8080
|
||||
name: vscode
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /data
|
||||
ports:
|
||||
- name: vscode
|
||||
port: 8080
|
||||
protocol: TCP
|
||||
targetPort: 8080
|
||||
routes:
|
||||
- match:
|
||||
- uri:
|
||||
prefix: / #vscode runs at the default route
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8080
|
||||
# DAG Workflow to be executed once a Workspace action completes (optional)
|
||||
#postExecutionWorkflow:
|
||||
# entrypoint: main
|
||||
# templates:
|
||||
# - name: main
|
||||
# dag:
|
||||
# tasks:
|
||||
# - name: slack-notify
|
||||
# template: slack-notify
|
||||
# - name: slack-notify
|
||||
# container:
|
||||
# image: technosophos/slack-notify
|
||||
# args:
|
||||
# - SLACK_USERNAME=onepanel SLACK_TITLE="Your workspace is ready" SLACK_ICON=https://www.gravatar.com/avatar/5c4478592fe00878f62f0027be59c1bd SLACK_MESSAGE="Your workspace is now running" ./slack-notify
|
||||
# command:
|
||||
# - sh
|
||||
# - -c
|
||||
@@ -1,60 +1,66 @@
|
||||
# Docker containers that are part of the Workspace
|
||||
containers:
|
||||
- name: vscode
|
||||
image: onepanel/vscode:1.0.0
|
||||
command: ["/bin/bash", "-c", "pip install onepanel-sdk && /usr/bin/entrypoint.sh --bind-addr 0.0.0.0:8080 --auth none ."]
|
||||
ports:
|
||||
- containerPort: 8080
|
||||
name: vscode
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /data
|
||||
lifecycle:
|
||||
postStart:
|
||||
exec:
|
||||
command:
|
||||
- /bin/sh
|
||||
- -c
|
||||
- >
|
||||
condayml="/data/.environment.yml";
|
||||
vscodetxt="/data/.vscode-extensions.txt";
|
||||
if [ -f "$condayml" ]; then conda env update -f $condayml; fi;
|
||||
if [ -f "$vscodetxt" ]; then cat $vscodetxt | xargs -n 1 code-server --install-extension; fi;
|
||||
preStop:
|
||||
exec:
|
||||
command:
|
||||
- /bin/sh
|
||||
- -c
|
||||
- >
|
||||
conda env export > /data/.environment.yml -n base;
|
||||
code-server --list-extensions | tail -n +2 > /data/.vscode-extensions.txt;
|
||||
ports:
|
||||
- name: vscode
|
||||
port: 8080
|
||||
protocol: TCP
|
||||
targetPort: 8080
|
||||
routes:
|
||||
- match:
|
||||
- uri:
|
||||
prefix: / #vscode runs at the default route
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8080
|
||||
# DAG Workflow to be executed once a Workspace action completes (optional)
|
||||
#postExecutionWorkflow:
|
||||
# entrypoint: main
|
||||
# templates:
|
||||
# - name: main
|
||||
# dag:
|
||||
# tasks:
|
||||
# - name: slack-notify
|
||||
# template: slack-notify
|
||||
# - name: slack-notify
|
||||
# container:
|
||||
# image: technosophos/slack-notify
|
||||
# args:
|
||||
# - SLACK_USERNAME=onepanel SLACK_TITLE="Your workspace is ready" SLACK_ICON=https://www.gravatar.com/avatar/5c4478592fe00878f62f0027be59c1bd SLACK_MESSAGE="Your workspace is now running" ./slack-notify
|
||||
# command:
|
||||
# - sh
|
||||
# - -c
|
||||
metadata:
|
||||
name: "Visual Studio Code"
|
||||
kind: Workspace
|
||||
version: 20201028145443
|
||||
action: update
|
||||
spec:
|
||||
# Docker containers that are part of the Workspace
|
||||
containers:
|
||||
- name: vscode
|
||||
image: onepanel/vscode:1.0.0
|
||||
command: ["/bin/bash", "-c", "pip install onepanel-sdk && /usr/bin/entrypoint.sh --bind-addr 0.0.0.0:8080 --auth none ."]
|
||||
ports:
|
||||
- containerPort: 8080
|
||||
name: vscode
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /data
|
||||
lifecycle:
|
||||
postStart:
|
||||
exec:
|
||||
command:
|
||||
- /bin/sh
|
||||
- -c
|
||||
- >
|
||||
condayml="/data/.environment.yml";
|
||||
vscodetxt="/data/.vscode-extensions.txt";
|
||||
if [ -f "$condayml" ]; then conda env update -f $condayml; fi;
|
||||
if [ -f "$vscodetxt" ]; then cat $vscodetxt | xargs -n 1 code-server --install-extension; fi;
|
||||
preStop:
|
||||
exec:
|
||||
command:
|
||||
- /bin/sh
|
||||
- -c
|
||||
- >
|
||||
conda env export > /data/.environment.yml -n base;
|
||||
code-server --list-extensions | tail -n +2 > /data/.vscode-extensions.txt;
|
||||
ports:
|
||||
- name: vscode
|
||||
port: 8080
|
||||
protocol: TCP
|
||||
targetPort: 8080
|
||||
routes:
|
||||
- match:
|
||||
- uri:
|
||||
prefix: / #vscode runs at the default route
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8080
|
||||
# DAG Workflow to be executed once a Workspace action completes (optional)
|
||||
#postExecutionWorkflow:
|
||||
# entrypoint: main
|
||||
# templates:
|
||||
# - name: main
|
||||
# dag:
|
||||
# tasks:
|
||||
# - name: slack-notify
|
||||
# template: slack-notify
|
||||
# - name: slack-notify
|
||||
# container:
|
||||
# image: technosophos/slack-notify
|
||||
# args:
|
||||
# - SLACK_USERNAME=onepanel SLACK_TITLE="Your workspace is ready" SLACK_ICON=https://www.gravatar.com/avatar/5c4478592fe00878f62f0027be59c1bd SLACK_MESSAGE="Your workspace is now running" ./slack-notify
|
||||
# command:
|
||||
# - sh
|
||||
# - -c
|
||||
@@ -1,68 +1,74 @@
|
||||
containers:
|
||||
- name: vscode
|
||||
image: onepanel/vscode:1.0.0
|
||||
command: ["/bin/bash", "-c", "pip install onepanel-sdk && /usr/bin/entrypoint.sh --bind-addr 0.0.0.0:8080 --auth none ."]
|
||||
env:
|
||||
- name: ONEPANEL_MAIN_CONTAINER
|
||||
value: 'true'
|
||||
ports:
|
||||
- containerPort: 8080
|
||||
name: vscode
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /data
|
||||
lifecycle:
|
||||
postStart:
|
||||
exec:
|
||||
command:
|
||||
- /bin/sh
|
||||
- -c
|
||||
- >
|
||||
condayml="/data/.environment.yml";
|
||||
vscodetxt="/data/.vscode-extensions.txt";
|
||||
if [ -f "$condayml" ]; then conda env update -f $condayml; fi;
|
||||
if [ -f "$vscodetxt" ]; then cat $vscodetxt | xargs -n 1 code-server --install-extension; fi;
|
||||
preStop:
|
||||
exec:
|
||||
command:
|
||||
- /bin/sh
|
||||
- -c
|
||||
- >
|
||||
conda env export > /data/.environment.yml -n base;
|
||||
code-server --list-extensions | tail -n +2 > /data/.vscode-extensions.txt;
|
||||
- name: sys-filesyncer
|
||||
image: onepanel/filesyncer:v0.18.0
|
||||
imagePullPolicy: Always
|
||||
args:
|
||||
- server
|
||||
- -server-prefix=/sys/filesyncer
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /data
|
||||
- name: sys-namespace-config
|
||||
mountPath: /etc/onepanel
|
||||
readOnly: true
|
||||
ports:
|
||||
- name: vscode
|
||||
port: 8080
|
||||
protocol: TCP
|
||||
targetPort: 8080
|
||||
- name: fs
|
||||
port: 8888
|
||||
protocol: TCP
|
||||
targetPort: 8888
|
||||
routes:
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /sys/filesyncer
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8888
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8080
|
||||
metadata:
|
||||
name: "Visual Studio Code"
|
||||
kind: Workspace
|
||||
version: 20210129152427
|
||||
action: update
|
||||
spec:
|
||||
containers:
|
||||
- name: vscode
|
||||
image: onepanel/vscode:1.0.0
|
||||
command: ["/bin/bash", "-c", "pip install onepanel-sdk && /usr/bin/entrypoint.sh --bind-addr 0.0.0.0:8080 --auth none ."]
|
||||
env:
|
||||
- name: ONEPANEL_MAIN_CONTAINER
|
||||
value: 'true'
|
||||
ports:
|
||||
- containerPort: 8080
|
||||
name: vscode
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /data
|
||||
lifecycle:
|
||||
postStart:
|
||||
exec:
|
||||
command:
|
||||
- /bin/sh
|
||||
- -c
|
||||
- >
|
||||
condayml="/data/.environment.yml";
|
||||
vscodetxt="/data/.vscode-extensions.txt";
|
||||
if [ -f "$condayml" ]; then conda env update -f $condayml; fi;
|
||||
if [ -f "$vscodetxt" ]; then cat $vscodetxt | xargs -n 1 code-server --install-extension; fi;
|
||||
preStop:
|
||||
exec:
|
||||
command:
|
||||
- /bin/sh
|
||||
- -c
|
||||
- >
|
||||
conda env export > /data/.environment.yml -n base;
|
||||
code-server --list-extensions | tail -n +2 > /data/.vscode-extensions.txt;
|
||||
- name: sys-filesyncer
|
||||
image: onepanel/filesyncer:v0.18.0
|
||||
imagePullPolicy: Always
|
||||
args:
|
||||
- server
|
||||
- -server-prefix=/sys/filesyncer
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /data
|
||||
- name: sys-namespace-config
|
||||
mountPath: /etc/onepanel
|
||||
readOnly: true
|
||||
ports:
|
||||
- name: vscode
|
||||
port: 8080
|
||||
protocol: TCP
|
||||
targetPort: 8080
|
||||
- name: fs
|
||||
port: 8888
|
||||
protocol: TCP
|
||||
targetPort: 8888
|
||||
routes:
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /sys/filesyncer
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8888
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8080
|
||||
|
||||
@@ -1,68 +1,74 @@
|
||||
containers:
|
||||
- name: vscode
|
||||
image: onepanel/vscode:1.0.0
|
||||
command: ["/bin/bash", "-c", "pip install onepanel-sdk && /usr/bin/entrypoint.sh --bind-addr 0.0.0.0:8080 --auth none ."]
|
||||
env:
|
||||
- name: ONEPANEL_MAIN_CONTAINER
|
||||
value: 'true'
|
||||
ports:
|
||||
- containerPort: 8080
|
||||
name: vscode
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /data
|
||||
lifecycle:
|
||||
postStart:
|
||||
exec:
|
||||
command:
|
||||
- /bin/sh
|
||||
- -c
|
||||
- >
|
||||
condayml="/data/.environment.yml";
|
||||
vscodetxt="/data/.vscode-extensions.txt";
|
||||
if [ -f "$condayml" ]; then conda env update -f $condayml; fi;
|
||||
if [ -f "$vscodetxt" ]; then cat $vscodetxt | xargs -n 1 code-server --install-extension; fi;
|
||||
preStop:
|
||||
exec:
|
||||
command:
|
||||
- /bin/sh
|
||||
- -c
|
||||
- >
|
||||
conda env export > /data/.environment.yml -n base;
|
||||
code-server --list-extensions | tail -n +2 > /data/.vscode-extensions.txt;
|
||||
- name: sys-filesyncer
|
||||
image: onepanel/filesyncer:v0.19.0
|
||||
imagePullPolicy: Always
|
||||
args:
|
||||
- server
|
||||
- -server-prefix=/sys/filesyncer
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /data
|
||||
- name: sys-namespace-config
|
||||
mountPath: /etc/onepanel
|
||||
readOnly: true
|
||||
ports:
|
||||
- name: vscode
|
||||
port: 8080
|
||||
protocol: TCP
|
||||
targetPort: 8080
|
||||
- name: fs
|
||||
port: 8888
|
||||
protocol: TCP
|
||||
targetPort: 8888
|
||||
routes:
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /sys/filesyncer
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8888
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8080
|
||||
metadata:
|
||||
name: "Visual Studio Code"
|
||||
kind: Workspace
|
||||
version: 20210224180017
|
||||
action: update
|
||||
spec:
|
||||
containers:
|
||||
- name: vscode
|
||||
image: onepanel/vscode:1.0.0
|
||||
command: ["/bin/bash", "-c", "pip install onepanel-sdk && /usr/bin/entrypoint.sh --bind-addr 0.0.0.0:8080 --auth none ."]
|
||||
env:
|
||||
- name: ONEPANEL_MAIN_CONTAINER
|
||||
value: 'true'
|
||||
ports:
|
||||
- containerPort: 8080
|
||||
name: vscode
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /data
|
||||
lifecycle:
|
||||
postStart:
|
||||
exec:
|
||||
command:
|
||||
- /bin/sh
|
||||
- -c
|
||||
- >
|
||||
condayml="/data/.environment.yml";
|
||||
vscodetxt="/data/.vscode-extensions.txt";
|
||||
if [ -f "$condayml" ]; then conda env update -f $condayml; fi;
|
||||
if [ -f "$vscodetxt" ]; then cat $vscodetxt | xargs -n 1 code-server --install-extension; fi;
|
||||
preStop:
|
||||
exec:
|
||||
command:
|
||||
- /bin/sh
|
||||
- -c
|
||||
- >
|
||||
conda env export > /data/.environment.yml -n base;
|
||||
code-server --list-extensions | tail -n +2 > /data/.vscode-extensions.txt;
|
||||
- name: sys-filesyncer
|
||||
image: onepanel/filesyncer:v0.19.0
|
||||
imagePullPolicy: Always
|
||||
args:
|
||||
- server
|
||||
- -server-prefix=/sys/filesyncer
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /data
|
||||
- name: sys-namespace-config
|
||||
mountPath: /etc/onepanel
|
||||
readOnly: true
|
||||
ports:
|
||||
- name: vscode
|
||||
port: 8080
|
||||
protocol: TCP
|
||||
targetPort: 8080
|
||||
- name: fs
|
||||
port: 8888
|
||||
protocol: TCP
|
||||
targetPort: 8888
|
||||
routes:
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /sys/filesyncer
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8888
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8080
|
||||
|
||||
@@ -1,68 +1,74 @@
|
||||
containers:
|
||||
- name: vscode
|
||||
image: onepanel/vscode:v0.20.0_code-server.3.9.1
|
||||
command: ["/bin/bash", "-c", "pip install onepanel-sdk && /usr/bin/entrypoint.sh --bind-addr 0.0.0.0:8080 --auth none ."]
|
||||
env:
|
||||
- name: ONEPANEL_MAIN_CONTAINER
|
||||
value: 'true'
|
||||
ports:
|
||||
- containerPort: 8080
|
||||
name: vscode
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /data
|
||||
lifecycle:
|
||||
postStart:
|
||||
exec:
|
||||
command:
|
||||
- /bin/sh
|
||||
- -c
|
||||
- >
|
||||
condayml="/data/.environment.yml";
|
||||
vscodetxt="/data/.vscode-extensions.txt";
|
||||
if [ -f "$condayml" ]; then conda env update -f $condayml; fi;
|
||||
if [ -f "$vscodetxt" ]; then cat $vscodetxt | xargs -n 1 code-server --install-extension; fi;
|
||||
preStop:
|
||||
exec:
|
||||
command:
|
||||
- /bin/sh
|
||||
- -c
|
||||
- >
|
||||
conda env export > /data/.environment.yml -n base;
|
||||
code-server --list-extensions | tail -n +2 > /data/.vscode-extensions.txt;
|
||||
- name: sys-filesyncer
|
||||
image: onepanel/filesyncer:v0.20.0
|
||||
imagePullPolicy: Always
|
||||
args:
|
||||
- server
|
||||
- -server-prefix=/sys/filesyncer
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /data
|
||||
- name: sys-namespace-config
|
||||
mountPath: /etc/onepanel
|
||||
readOnly: true
|
||||
ports:
|
||||
- name: vscode
|
||||
port: 8080
|
||||
protocol: TCP
|
||||
targetPort: 8080
|
||||
- name: fs
|
||||
port: 8888
|
||||
protocol: TCP
|
||||
targetPort: 8888
|
||||
routes:
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /sys/filesyncer
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8888
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8080
|
||||
metadata:
|
||||
name: "Visual Studio Code"
|
||||
kind: Workspace
|
||||
version: 20210323175655
|
||||
action: update
|
||||
spec:
|
||||
containers:
|
||||
- name: vscode
|
||||
image: onepanel/vscode:v0.20.0_code-server.3.9.1
|
||||
command: ["/bin/bash", "-c", "pip install onepanel-sdk && /usr/bin/entrypoint.sh --bind-addr 0.0.0.0:8080 --auth none ."]
|
||||
env:
|
||||
- name: ONEPANEL_MAIN_CONTAINER
|
||||
value: 'true'
|
||||
ports:
|
||||
- containerPort: 8080
|
||||
name: vscode
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /data
|
||||
lifecycle:
|
||||
postStart:
|
||||
exec:
|
||||
command:
|
||||
- /bin/sh
|
||||
- -c
|
||||
- >
|
||||
condayml="/data/.environment.yml";
|
||||
vscodetxt="/data/.vscode-extensions.txt";
|
||||
if [ -f "$condayml" ]; then conda env update -f $condayml; fi;
|
||||
if [ -f "$vscodetxt" ]; then cat $vscodetxt | xargs -n 1 code-server --install-extension; fi;
|
||||
preStop:
|
||||
exec:
|
||||
command:
|
||||
- /bin/sh
|
||||
- -c
|
||||
- >
|
||||
conda env export > /data/.environment.yml -n base;
|
||||
code-server --list-extensions | tail -n +2 > /data/.vscode-extensions.txt;
|
||||
- name: sys-filesyncer
|
||||
image: onepanel/filesyncer:v0.20.0
|
||||
imagePullPolicy: Always
|
||||
args:
|
||||
- server
|
||||
- -server-prefix=/sys/filesyncer
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /data
|
||||
- name: sys-namespace-config
|
||||
mountPath: /etc/onepanel
|
||||
readOnly: true
|
||||
ports:
|
||||
- name: vscode
|
||||
port: 8080
|
||||
protocol: TCP
|
||||
targetPort: 8080
|
||||
- name: fs
|
||||
port: 8888
|
||||
protocol: TCP
|
||||
targetPort: 8888
|
||||
routes:
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /sys/filesyncer
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8888
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8080
|
||||
|
||||
74
db/yaml/workspaces/vscode/20210719190719.yaml
Normal file
74
db/yaml/workspaces/vscode/20210719190719.yaml
Normal file
@@ -0,0 +1,74 @@
|
||||
metadata:
|
||||
name: "Visual Studio Code"
|
||||
kind: Workspace
|
||||
version: 20210719190719
|
||||
action: update
|
||||
spec:
|
||||
containers:
|
||||
- name: vscode
|
||||
image: onepanel/vscode:v0.20.0_code-server.3.9.1
|
||||
command: ["/bin/bash", "-c", "pip install onepanel-sdk && /usr/bin/entrypoint.sh --bind-addr 0.0.0.0:8080 --auth none ."]
|
||||
env:
|
||||
- name: ONEPANEL_MAIN_CONTAINER
|
||||
value: 'true'
|
||||
ports:
|
||||
- containerPort: 8080
|
||||
name: vscode
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /data
|
||||
lifecycle:
|
||||
postStart:
|
||||
exec:
|
||||
command:
|
||||
- /bin/sh
|
||||
- -c
|
||||
- >
|
||||
condayml="/data/.environment.yml";
|
||||
vscodetxt="/data/.vscode-extensions.txt";
|
||||
if [ -f "$condayml" ]; then conda env update -f $condayml; fi;
|
||||
if [ -f "$vscodetxt" ]; then cat $vscodetxt | xargs -n 1 code-server --install-extension; fi;
|
||||
preStop:
|
||||
exec:
|
||||
command:
|
||||
- /bin/sh
|
||||
- -c
|
||||
- >
|
||||
conda env export > /data/.environment.yml -n base;
|
||||
code-server --list-extensions | tail -n +2 > /data/.vscode-extensions.txt;
|
||||
- name: sys-filesyncer
|
||||
image: onepanel/filesyncer:v1.0.0
|
||||
imagePullPolicy: Always
|
||||
args:
|
||||
- server
|
||||
- -server-prefix=/sys/filesyncer
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /data
|
||||
- name: sys-namespace-config
|
||||
mountPath: /etc/onepanel
|
||||
readOnly: true
|
||||
ports:
|
||||
- name: vscode
|
||||
port: 8080
|
||||
protocol: TCP
|
||||
targetPort: 8080
|
||||
- name: fs
|
||||
port: 8888
|
||||
protocol: TCP
|
||||
targetPort: 8888
|
||||
routes:
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /sys/filesyncer
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8888
|
||||
- match:
|
||||
- uri:
|
||||
prefix: /
|
||||
route:
|
||||
- destination:
|
||||
port:
|
||||
number: 8080
|
||||
77
go.sum
77
go.sum
@@ -37,30 +37,40 @@ cloud.google.com/go/storage v1.10.0 h1:STgFzyU5/8miMl0//zKh2aQeTyeaUH3WN9bSUiJ09
|
||||
cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0=
|
||||
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
|
||||
github.com/Azure/go-ansiterm v0.0.0-20170929234023-d6e3b3328b78/go.mod h1:LmzpDX56iTiv29bbRTIsUNlaFfuhWRQBWjQdVyAevI8=
|
||||
github.com/Azure/go-autorest v14.0.0+incompatible h1:r/ug62X9o8vikt53/nkAPmFmzfSrCCAplPH7wa+mK0U=
|
||||
github.com/Azure/go-autorest v14.0.0+incompatible/go.mod h1:r+4oMnoxhatjLLJ6zxSWATqVooLgysK6ZNox3g/xq24=
|
||||
github.com/Azure/go-autorest/autorest v0.9.0 h1:MRvx8gncNaXJqOoLmhNjUAKh33JJF8LyxPhomEtOsjs=
|
||||
github.com/Azure/go-autorest/autorest v0.9.0/go.mod h1:xyHB1BMZT0cuDHU7I0+g046+BFDTQ8rEZB0s4Yfa6bI=
|
||||
github.com/Azure/go-autorest/autorest v0.9.6 h1:5YWtOnckcudzIw8lPPBcWOnmIFWMtHci1ZWAZulMSx0=
|
||||
github.com/Azure/go-autorest/autorest v0.9.6/go.mod h1:/FALq9T/kS7b5J5qsQ+RSTUdAmGFqi0vUdVNNx8q630=
|
||||
github.com/Azure/go-autorest/autorest/adal v0.5.0/go.mod h1:8Z9fGy2MpX0PvDjB1pEgQTmVqjGhiHBW7RJJEciWzS0=
|
||||
github.com/Azure/go-autorest/autorest/adal v0.8.2 h1:O1X4oexUxnZCaEUGsvMnr8ZGj8HI37tNezwY4npRqA0=
|
||||
github.com/Azure/go-autorest/autorest/adal v0.8.2/go.mod h1:ZjhuQClTqx435SRJ2iMlOxPYt3d2C/T/7TiQCVZSn3Q=
|
||||
github.com/Azure/go-autorest/autorest/date v0.1.0/go.mod h1:plvfp3oPSKwf2DNjlBjWF/7vwR+cUD/ELuzDCXwHUVA=
|
||||
github.com/Azure/go-autorest/autorest/date v0.2.0 h1:yW+Zlqf26583pE43KhfnhFcdmSWlm5Ew6bxipnr/tbM=
|
||||
github.com/Azure/go-autorest/autorest/date v0.2.0/go.mod h1:vcORJHLJEh643/Ioh9+vPmf1Ij9AEBM5FuBIXLmIy0g=
|
||||
github.com/Azure/go-autorest/autorest/mocks v0.1.0/go.mod h1:OTyCOPRA2IgIlWxVYxBee2F5Gr4kF2zd2J5cFRaIDN0=
|
||||
github.com/Azure/go-autorest/autorest/mocks v0.2.0/go.mod h1:OTyCOPRA2IgIlWxVYxBee2F5Gr4kF2zd2J5cFRaIDN0=
|
||||
github.com/Azure/go-autorest/autorest/mocks v0.3.0 h1:qJumjCaCudz+OcqE9/XtEPfvtOjOmKaui4EOpFI6zZc=
|
||||
github.com/Azure/go-autorest/autorest/mocks v0.3.0/go.mod h1:a8FDP3DYzQ4RYfVAxAN3SVSiiO77gL2j2ronKKP0syM=
|
||||
github.com/Azure/go-autorest/logger v0.1.0 h1:ruG4BSDXONFRrZZJ2GUXDiUyVpayPmb1GnWeHDdaNKY=
|
||||
github.com/Azure/go-autorest/logger v0.1.0/go.mod h1:oExouG+K6PryycPJfVSxi/koC6LSNgds39diKLz7Vrc=
|
||||
github.com/Azure/go-autorest/tracing v0.5.0 h1:TRn4WjSnkcSy5AEG3pnbtFSwNtwzjr4VYyQflFE619k=
|
||||
github.com/Azure/go-autorest/tracing v0.5.0/go.mod h1:r/s2XiOKccPW3HrqB+W0TQzfbtp2fGCgRFtBroKn4Dk=
|
||||
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
||||
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
|
||||
github.com/DATA-DOG/go-sqlmock v1.3.3/go.mod h1:f/Ixk793poVmq4qj/V1dPUg2JEAKC73Q5eFN3EC/SaM=
|
||||
github.com/Knetic/govaluate v3.0.1-0.20171022003610-9aa49832a739+incompatible/go.mod h1:r7JcOSlj0wfOMncg0iLm8Leh48TZaKVeNIfJntJ2wa0=
|
||||
github.com/Masterminds/squirrel v1.1.0 h1:baP1qLdoQCeTw3ifCdOq2dkYc6vGcmRdaociKLbEJXs=
|
||||
github.com/Masterminds/squirrel v1.1.0/go.mod h1:yaPeOnPG5ZRwL9oKdTsO/prlkPbXWZlRVMQ/gGlzIuA=
|
||||
github.com/NYTimes/gziphandler v0.0.0-20170623195520-56545f4a5d46/go.mod h1:3wb06e3pkSAbeQ52E9H9iFoQsEEwGN64994WTCIhntQ=
|
||||
github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU=
|
||||
github.com/PuerkitoBio/purell v1.0.0/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0=
|
||||
github.com/PuerkitoBio/purell v1.1.0/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0=
|
||||
github.com/PuerkitoBio/purell v1.1.1 h1:WEQqlqaGbrPkxLJWfBwQmfEAE1Z7ONdDLqrN38tNFfI=
|
||||
github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0=
|
||||
github.com/PuerkitoBio/urlesc v0.0.0-20160726150825-5bd2802263f2/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE=
|
||||
github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 h1:d+Bc7a5rLufV/sSk/8dngufqelfh6jnri85riMAaF/M=
|
||||
github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE=
|
||||
github.com/UnnoTed/fileb0x v1.1.4/go.mod h1:X59xXT18tdNk/D6j+KZySratBsuKJauMtVuJ9cgOiZs=
|
||||
github.com/agnivade/levenshtein v1.0.1/go.mod h1:CURSv5d9Uaml+FovSIICkLbAUZ9S4RqaHDIsdSBg7lM=
|
||||
@@ -84,6 +94,7 @@ github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5/go.mod h1:wHh0iHkY
|
||||
github.com/asaskevich/govalidator v0.0.0-20180720115003-f9ffefc3facf/go.mod h1:lB+ZfQJz7igIIfQNfa7Ml4HSf2uFQQRzpGGRXenZAgY=
|
||||
github.com/asaskevich/govalidator v0.0.0-20190424111038-f61b66f89f4a/go.mod h1:lB+ZfQJz7igIIfQNfa7Ml4HSf2uFQQRzpGGRXenZAgY=
|
||||
github.com/asaskevich/govalidator v0.0.0-20200108200545-475eaeb16496/go.mod h1:oGkLhpf+kjZl6xBf758TQhh5XrAeiJv/7FRz/2spLIg=
|
||||
github.com/asaskevich/govalidator v0.0.0-20200428143746-21a406dcc535 h1:4daAzAu0S6Vi7/lbWECcX0j45yZReDZ56BQsrVBOEEY=
|
||||
github.com/asaskevich/govalidator v0.0.0-20200428143746-21a406dcc535/go.mod h1:oGkLhpf+kjZl6xBf758TQhh5XrAeiJv/7FRz/2spLIg=
|
||||
github.com/awalterschulze/gographviz v0.0.0-20200901124122-0eecad45bd71/go.mod h1:/ynarkO/43wP/JM2Okn61e8WFMtdbtA8he7GJxW+SFM=
|
||||
github.com/aws/aws-sdk-go v1.33.16/go.mod h1:5zCpMtNQVjRREroY7sYe8lOMRSxkhG6MZveU8YkpAk0=
|
||||
@@ -104,6 +115,7 @@ github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMn
|
||||
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
|
||||
github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
|
||||
github.com/cockroachdb/datadriven v0.0.0-20190809214429-80d97fb3cbaa/go.mod h1:zn76sxSg3SzpJ0PPJaLDCu+Bu0Lg3sKTORVIj19EIF8=
|
||||
github.com/colinmarc/hdfs v1.1.4-0.20180805212432-9746310a4d31 h1:ow7T77012NSZVW0uOWoQxz3yj9fHKYeZ4QmNrMtWMbM=
|
||||
github.com/colinmarc/hdfs v1.1.4-0.20180805212432-9746310a4d31/go.mod h1:vSBumefK4HA5uiRSwNP+3ofgrEoScpCS2MMWcWXEuQ4=
|
||||
github.com/corbym/gocrest v1.0.3/go.mod h1:maVFL5lbdS2PgfOQgGRWDYTeunSWQeiEgoNdTABShCs=
|
||||
github.com/coreos/bbolt v1.3.2/go.mod h1:iRUV2dpdMOn7Bo10OQBFzIJO9kkE559Wcmn+qkEiiKk=
|
||||
@@ -125,22 +137,28 @@ github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ3
|
||||
github.com/dave/jennifer v1.4.1/go.mod h1:7jEdnm+qBcxl8PC0zyp7vxcpSRnzXSt9r39tpTVGlwA=
|
||||
github.com/davecgh/go-spew v0.0.0-20161028175848-04cdfd42973b/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/dgrijalva/jwt-go v3.2.0+incompatible h1:7qlOGliEKZXTDg6OTjfoBKDXWrumCAMpl/TFQ4/5kLM=
|
||||
github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ=
|
||||
github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no=
|
||||
github.com/docker/docker v0.7.3-0.20190327010347-be7ac8be2ae0/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
|
||||
github.com/docker/go-units v0.3.3/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk=
|
||||
github.com/docker/go-units v0.4.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk=
|
||||
github.com/docker/spdystream v0.0.0-20160310174837-449fdfce4d96/go.mod h1:Qh8CwZgvJUkLughtfhJv5dyTYa91l1fOUCrgjqmcifM=
|
||||
github.com/docker/spdystream v0.0.0-20181023171402-6480d4af844c h1:ZfSZ3P3BedhKGUhzj7BQlPSU4OvT6tfOKe3DVHzOA7s=
|
||||
github.com/docker/spdystream v0.0.0-20181023171402-6480d4af844c/go.mod h1:Qh8CwZgvJUkLughtfhJv5dyTYa91l1fOUCrgjqmcifM=
|
||||
github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815/go.mod h1:WwZ+bS3ebgob9U8Nd0kOddGdZWjyMGR8Wziv+TBNwSE=
|
||||
github.com/dustin/go-humanize v0.0.0-20171111073723-bb3d318650d4/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
|
||||
github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
|
||||
github.com/elazarl/goproxy v0.0.0-20170405201442-c4fc26588b6e/go.mod h1:/Zj4wYkgs4iZTTu3o/KG3Itv/qCCa8VVMlb3i9OVuzc=
|
||||
github.com/elazarl/goproxy v0.0.0-20180725130230-947c36da3153/go.mod h1:/Zj4wYkgs4iZTTu3o/KG3Itv/qCCa8VVMlb3i9OVuzc=
|
||||
github.com/elazarl/goproxy v0.0.0-20191011121108-aa519ddbe484 h1:pEtiCjIXx3RvGjlUJuCNxNOw0MNblyR9Wi+vJGBFh+8=
|
||||
github.com/elazarl/goproxy v0.0.0-20191011121108-aa519ddbe484/go.mod h1:Ro8st/ElPeALwNFlcTpWmkr6IoMFfkjXAvTHpevnDsM=
|
||||
github.com/elazarl/goproxy/ext v0.0.0-20190711103511-473e67f1d7d2 h1:dWB6v3RcOy03t/bUadywsbyrQwCqZeNIEX6M1OtSZOM=
|
||||
github.com/elazarl/goproxy/ext v0.0.0-20190711103511-473e67f1d7d2/go.mod h1:gNh8nYJoAm43RfaxurUnxr+N1PwuFV3ZMl/efxlIlY8=
|
||||
github.com/emicklei/go-restful v0.0.0-20170410110728-ff4f55a20633/go.mod h1:otzb+WCGbkyDHkqmQmT5YD2WR4BBwUdeQoFo8l/7tVs=
|
||||
github.com/emicklei/go-restful v2.9.5+incompatible h1:spTtZBk5DYEvbxMVutUuTyh1Ao2r4iyvLdACqsl/Ljk=
|
||||
github.com/emicklei/go-restful v2.9.5+incompatible/go.mod h1:otzb+WCGbkyDHkqmQmT5YD2WR4BBwUdeQoFo8l/7tVs=
|
||||
github.com/emirpasic/gods v1.12.0/go.mod h1:YfzfFFoVP/catgzJb4IKIqXjX78Ha8FMSDh3ymbK86o=
|
||||
github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
|
||||
@@ -148,6 +166,7 @@ github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.m
|
||||
github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98=
|
||||
github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
|
||||
github.com/evanphx/json-patch v4.2.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk=
|
||||
github.com/evanphx/json-patch v4.5.0+incompatible h1:ouOWdg56aJriqS0huScTkVXPC5IcNrDCXZ6OoTAWu7M=
|
||||
github.com/evanphx/json-patch v4.5.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk=
|
||||
github.com/evanphx/json-patch v4.9.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk=
|
||||
github.com/fasthttp-contrib/websocket v0.0.0-20160511215533-1f3b11f56072/go.mod h1:duJ4Jxv5lDcvg4QuQr0oowTf7dz4/CR8NtyCooz9HL8=
|
||||
@@ -161,6 +180,7 @@ github.com/gavv/httpexpect/v2 v2.0.3/go.mod h1:LAoDcy8I/EXEtKJV6wMEJvOMAZVo0MfEk
|
||||
github.com/gdamore/encoding v1.0.0/go.mod h1:alR0ol34c49FCSBLjhosxzcPHQbf2trDkoo5dl+VrEg=
|
||||
github.com/gdamore/tcell v1.3.0/go.mod h1:Hjvr+Ofd+gLglo7RYKxxnzCBmev3BzsS67MebKS4zMM=
|
||||
github.com/ghodss/yaml v0.0.0-20150909031657-73d445a93680/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
|
||||
github.com/ghodss/yaml v1.0.0 h1:wQHKEahhL6wmXdzwWG11gIVCkOv05bNOh+Rxn0yngAk=
|
||||
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
|
||||
github.com/gizak/termui/v3 v3.1.0/go.mod h1:bXQEBkJpzxUAKf0+xq9MSWAvWZlE7c+aidmyFlkYTrY=
|
||||
github.com/gliderlabs/ssh v0.2.2/go.mod h1:U7qILu1NlMHj9FlMhZLlkCdDnU1DBEAqr0aevW3Awn0=
|
||||
@@ -193,11 +213,13 @@ github.com/go-openapi/jsonpointer v0.0.0-20160704185906-46af16f9f7b1/go.mod h1:+
|
||||
github.com/go-openapi/jsonpointer v0.17.0/go.mod h1:cOnomiV+CVVwFLk0A/MExoFMjwdsUdVpsRhURCKh+3M=
|
||||
github.com/go-openapi/jsonpointer v0.18.0/go.mod h1:cOnomiV+CVVwFLk0A/MExoFMjwdsUdVpsRhURCKh+3M=
|
||||
github.com/go-openapi/jsonpointer v0.19.2/go.mod h1:3akKfEdA7DF1sugOqz1dVQHBcuDBPKZGEoHC/NkiQRg=
|
||||
github.com/go-openapi/jsonpointer v0.19.3 h1:gihV7YNZK1iK6Tgwwsxo2rJbD1GTbdm72325Bq8FI3w=
|
||||
github.com/go-openapi/jsonpointer v0.19.3/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg=
|
||||
github.com/go-openapi/jsonreference v0.0.0-20160704190145-13c6e3589ad9/go.mod h1:W3Z9FmVs9qj+KR4zFKmDPGiLdk1D9Rlm7cyMvf57TTg=
|
||||
github.com/go-openapi/jsonreference v0.17.0/go.mod h1:g4xxGn04lDIRh0GJb5QlpE3HfopLOL6uZrK/VgnsK9I=
|
||||
github.com/go-openapi/jsonreference v0.18.0/go.mod h1:g4xxGn04lDIRh0GJb5QlpE3HfopLOL6uZrK/VgnsK9I=
|
||||
github.com/go-openapi/jsonreference v0.19.2/go.mod h1:jMjeRr2HHw6nAVajTXJ4eiUwohSTlpa0o73RUL1owJc=
|
||||
github.com/go-openapi/jsonreference v0.19.3 h1:5cxNfTy0UVC3X8JL5ymxzyoUZmo8iZb+jeTWn7tUa8o=
|
||||
github.com/go-openapi/jsonreference v0.19.3/go.mod h1:rjx6GuL8TTa9VaixXglHmQmIL98+wF9xc8zWvFonSJ8=
|
||||
github.com/go-openapi/loads v0.17.0/go.mod h1:72tmFy5wsWx89uEVddd0RjRWPZm92WRLhf7AC+0+OOU=
|
||||
github.com/go-openapi/loads v0.18.0/go.mod h1:72tmFy5wsWx89uEVddd0RjRWPZm92WRLhf7AC+0+OOU=
|
||||
@@ -274,12 +296,15 @@ github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7a
|
||||
github.com/gogo/protobuf v1.2.1/go.mod h1:hp+jE20tsWTFYpLwKvXlhS1hjn+gTNwPg2I6zVXpSg4=
|
||||
github.com/gogo/protobuf v1.2.2-0.20190723190241-65acae22fc9d/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXPKa29o=
|
||||
github.com/gogo/protobuf v1.3.0/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXPKa29o=
|
||||
github.com/gogo/protobuf v1.3.1 h1:DqDEcV5aeaTmdFBePNpYsp3FlcVH/2ISVVM9Qf8PSls=
|
||||
github.com/gogo/protobuf v1.3.1/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXPKa29o=
|
||||
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b h1:VKtxabqXZkF25pY9ekfRL6a582T4P37/31XEstQ5p58=
|
||||
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
|
||||
github.com/golang/groupcache v0.0.0-20160516000752-02826c3e7903/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||
github.com/golang/groupcache v0.0.0-20190129154638-5b532d6fd5ef/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||
github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||
github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||
github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e h1:1r7pUrabqp18hOBcwBwiTsbnFeTZHV9eER/QT5JVZxY=
|
||||
github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||
github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
|
||||
github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
|
||||
@@ -319,8 +344,11 @@ github.com/google/go-cmp v0.5.2 h1:X2ev0eStA3AbceY54o37/0PQ/UWqKEiiO2dKL5OPaFM=
|
||||
github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck=
|
||||
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||
github.com/google/gofuzz v1.1.0 h1:Hsa8mG0dQ46ij8Sl2AYJDUv1oA9/d6Vk+3LG99Oe02g=
|
||||
github.com/google/gofuzz v1.1.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||
github.com/google/martian v2.1.0+incompatible h1:/CP5g8u/VJHijgedC/Legn3BAbAaWPgecwXBIDzw5no=
|
||||
github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
|
||||
github.com/google/martian/v3 v3.0.0 h1:pMen7vLs8nvgEYhywH3KDWJIJTeEr2ULsVWHWYHQyBs=
|
||||
github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0=
|
||||
github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
|
||||
github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
|
||||
@@ -335,6 +363,7 @@ github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+
|
||||
github.com/google/uuid v1.1.2 h1:EVhdT+1Kseyi1/pUmXKaFxYsDNy9RQYkMWRH68J/W7Y=
|
||||
github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||
github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg=
|
||||
github.com/googleapis/gax-go/v2 v2.0.5 h1:sjZBwGj9Jlw33ImPtvFviGYvseOtDM7hkSKB7+Tv3SM=
|
||||
github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk=
|
||||
github.com/googleapis/gnostic v0.0.0-20170729233727-0c5108395e2d/go.mod h1:sJBsCZ4ayReDTBIg8b9dl28c5xFWyhBTVRp3pOg5EKY=
|
||||
github.com/googleapis/gnostic v0.1.0 h1:rVsPeBmXbYv4If/cumu1AzZPwV58q433hvONV1UEZoI=
|
||||
@@ -342,6 +371,7 @@ github.com/googleapis/gnostic v0.1.0/go.mod h1:sJBsCZ4ayReDTBIg8b9dl28c5xFWyhBTV
|
||||
github.com/googleapis/gnostic v0.4.1 h1:DLJCy1n/vrD4HPjOvYcT8aYQXpPIzoRZONaYwyycI+I=
|
||||
github.com/googleapis/gnostic v0.4.1/go.mod h1:LRhVm6pbyptWbWbuZ38d1eyptfvIytN3ir6b65WBswg=
|
||||
github.com/gophercloud/gophercloud v0.1.0/go.mod h1:vxM41WHh5uqHVBMZHzuwNOHh8XEoIEcSTewFxm1c5g8=
|
||||
github.com/gophercloud/gophercloud v0.7.0 h1:vhmQQEM2SbnGCg2/3EzQnQZ3V7+UCGy9s8exQCprNYg=
|
||||
github.com/gophercloud/gophercloud v0.7.0/go.mod h1:gmC5oQqMDOMO1t1gq5DquX/yAU808e/4mzjjDA76+Ss=
|
||||
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
|
||||
github.com/gorilla/handlers v1.4.2 h1:0QniY0USkHQ1RGCLfKxeNHK9bkDHGRYGNDFBCS+YARg=
|
||||
@@ -365,25 +395,31 @@ github.com/grpc-ecosystem/grpc-gateway v1.14.6/go.mod h1:zdiPV4Yse/1gnckTHtghG4G
|
||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.0.1 h1:X2vfSnm1WC8HEo0MBHZg2TcuDUHJj6kd1TmEAQncnSA=
|
||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.0.1/go.mod h1:oVMjMN64nzEcepv1kdZKgx1qNYt4Ro0Gqefiq2JWdis=
|
||||
github.com/hashicorp/go-uuid v0.0.0-20180228145832-27454136f036/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
|
||||
github.com/hashicorp/go-uuid v1.0.2 h1:cfejS+Tpcp13yd5nYHWDI6qVCny6wyX2Mt5SGur2IGE=
|
||||
github.com/hashicorp/go-uuid v1.0.2/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
|
||||
github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
|
||||
github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
|
||||
github.com/hashicorp/golang-lru v0.5.3/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4=
|
||||
github.com/hashicorp/golang-lru v0.5.4 h1:YDjusn29QI/Das2iO9M0BHnIbxPeyuCHsjMW+lJfyTc=
|
||||
github.com/hashicorp/golang-lru v0.5.4/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4=
|
||||
github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ=
|
||||
github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
|
||||
github.com/iancoleman/strcase v0.1.1/go.mod h1:SK73tn/9oHe+/Y0h39VT4UCxmurVJkR5NA7kMEAOgSE=
|
||||
github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
|
||||
github.com/imdario/mergo v0.3.5/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA=
|
||||
github.com/imdario/mergo v0.3.8 h1:CGgOkSJeqMRmt0D9XLWExdT4m4F1vd3FV3VPt+0VxkQ=
|
||||
github.com/imdario/mergo v0.3.8/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA=
|
||||
github.com/imkira/go-interpol v1.0.0/go.mod h1:z0h2/2T3XF8kyEPpRgJ3kmNv+C43p+I/CoI+jC3w2iA=
|
||||
github.com/imkira/go-interpol v1.1.0/go.mod h1:z0h2/2T3XF8kyEPpRgJ3kmNv+C43p+I/CoI+jC3w2iA=
|
||||
github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM=
|
||||
github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8=
|
||||
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99/go.mod h1:1lJo3i6rXxKeerYnT8Nvf0QmHCRC1n8sfWVwXF2Frvo=
|
||||
github.com/jcmturner/gofork v0.0.0-20180107083740-2aebee971930/go.mod h1:MK8+TM0La+2rjBD4jE12Kj1pCCxK7d2LK/UM3ncEo0o=
|
||||
github.com/jcmturner/gofork v1.0.0 h1:J7uCkflzTEhUZ64xqKnkDxq3kzc96ajM1Gli5ktUem8=
|
||||
github.com/jcmturner/gofork v1.0.0/go.mod h1:MK8+TM0La+2rjBD4jE12Kj1pCCxK7d2LK/UM3ncEo0o=
|
||||
github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI=
|
||||
github.com/jmespath/go-jmespath v0.3.0/go.mod h1:9QtRXoHjLGCJ5IBSaohpXITPlowMeeYCZ7fLUTSywik=
|
||||
github.com/jmoiron/sqlx v1.2.0 h1:41Ip0zITnmWNR/vHV+S4m+VoUivnWY5E4OJfLZjCJMA=
|
||||
github.com/jmoiron/sqlx v1.2.0/go.mod h1:1FEQNm3xlJgrMD+FBdI9+xvCksHtbpVBBw5dYhBSsks=
|
||||
github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg=
|
||||
github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo=
|
||||
@@ -394,6 +430,7 @@ github.com/json-iterator/go v1.1.10 h1:Kz6Cvnvv2wGdaG/V8yMvfkmNiXq9Ya2KUv4rouJJr
|
||||
github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
|
||||
github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
|
||||
github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk=
|
||||
github.com/jtolds/gls v4.20.0+incompatible h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo=
|
||||
github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU=
|
||||
github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=
|
||||
github.com/k0kubun/colorstring v0.0.0-20150214042306-9440f1994b88/go.mod h1:3w7q1U84EfirKl04SVQ/s7nPm1ZPhiXd34z40TNz36k=
|
||||
@@ -411,20 +448,26 @@ github.com/klauspost/cpuid v0.0.0-20180405133222-e7e905edc00e/go.mod h1:Pj4uuM52
|
||||
github.com/klauspost/cpuid v1.2.3/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek=
|
||||
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
||||
github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
||||
github.com/konsorten/go-windows-terminal-sequences v1.0.3 h1:CE8S1cTafDpPvMhIxNJKvHsGVBgn1xWYf1NbHQhywc8=
|
||||
github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
||||
github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc=
|
||||
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
|
||||
github.com/kr/pretty v0.2.0 h1:s5hAObm+yFO5uHYt5dYjxi2rXrsnmRpJx4OYvIWUaQs=
|
||||
github.com/kr/pretty v0.2.0/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI=
|
||||
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
||||
github.com/kr/pty v1.1.5/go.mod h1:9r2w37qlBe7rQ6e1fg1S/9xpWHSnaqNdHD3WcMdbPDA=
|
||||
github.com/kr/pty v1.1.8/go.mod h1:O1sed60cT9XZ5uDucP5qwvh+TE3NnUj51EiZO/lmSfw=
|
||||
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
||||
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
||||
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
||||
github.com/labstack/echo v3.2.1+incompatible/go.mod h1:0INS7j/VjnFxD4E2wkz67b8cVwCLbBmJyDaka6Cmk1s=
|
||||
github.com/labstack/gommon v0.2.7/go.mod h1:/tj9csK2iPSBvn+3NLM9e52usepMtrd5ilFYA+wQNJ4=
|
||||
github.com/lann/builder v0.0.0-20180802200727-47ae307949d0 h1:SOEGU9fKiNWd/HOJuq6+3iTQz8KNCLtVX6idSoTLdUw=
|
||||
github.com/lann/builder v0.0.0-20180802200727-47ae307949d0/go.mod h1:dXGbAdH5GtBTC4WfIxhKZfyBF/HBFgRZSWwZ9g/He9o=
|
||||
github.com/lann/ps v0.0.0-20150810152359-62de8c46ede0 h1:P6pPBnrTSX3DEVR4fDembhRWSsG5rVo6hYhAB/ADZrk=
|
||||
github.com/lann/ps v0.0.0-20150810152359-62de8c46ede0/go.mod h1:vmVJ0l/dxyfGW6FmdpVm2joNMFikkuWg0EoCKLGUMNw=
|
||||
github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
|
||||
github.com/lib/pq v1.3.0 h1:/qkRGz8zljWiDcFvgpwUpwIAPu3r07TDvs3Rws+o/pU=
|
||||
github.com/lib/pq v1.3.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
|
||||
github.com/lucasb-eyer/go-colorful v1.0.2/go.mod h1:0MS4r+7BZKSJ5mw4/S5MPN+qHFF1fYclkSPilDOKW0s=
|
||||
github.com/lucasb-eyer/go-colorful v1.0.3/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0=
|
||||
@@ -451,22 +494,28 @@ github.com/mattn/go-runewidth v0.0.3/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzp
|
||||
github.com/mattn/go-runewidth v0.0.4/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU=
|
||||
github.com/mattn/go-runewidth v0.0.8/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI=
|
||||
github.com/mattn/go-sqlite3 v1.9.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc=
|
||||
github.com/mattn/go-sqlite3 v2.0.3+incompatible h1:gXHsfypPkaMZrKbD5209QV9jbUTJKjyR5WD3HYQSd+U=
|
||||
github.com/mattn/go-sqlite3 v2.0.3+incompatible/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc=
|
||||
github.com/mattn/goreman v0.3.5/go.mod h1:ahZuLhEo4pfYmf56GLNu/pjTxfeE389h43IHKMXz2Ys=
|
||||
github.com/matttproud/golang_protobuf_extensions v1.0.1 h1:4hp9jkHxhMHkqkrB3Ix0jegS5sx/RkqARlsWZ6pIwiU=
|
||||
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
|
||||
github.com/minio/md5-simd v1.1.0/go.mod h1:XpBqgZULrMYD3R+M28PcmP0CkI7PEMzB3U77ZrKZ0Gw=
|
||||
github.com/minio/minio-go/v6 v6.0.45 h1:aY4NI/DOgSbZiwGN3fEF4NAkC9An4bhaIWuJrQrRYew=
|
||||
github.com/minio/minio-go/v6 v6.0.45/go.mod h1:qD0lajrGW49lKZLtXKtCB4X/qkMf0a5tBvN2PaZg7Gg=
|
||||
github.com/minio/minio-go/v7 v7.0.2/go.mod h1:dJ80Mv2HeGkYLH1sqS/ksz07ON6csH3S6JUMSQ2zAns=
|
||||
github.com/minio/sha256-simd v0.1.1 h1:5QHSlgo3nt5yKOJrC7W8w7X+NFl8cMPZm96iu8kKUJU=
|
||||
github.com/minio/sha256-simd v0.1.1/go.mod h1:B5e1o+1/KgNmWrSQK08Y6Z1Vb5pwIktudl0J58iy0KM=
|
||||
github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y=
|
||||
github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
|
||||
github.com/mitchellh/go-ps v0.0.0-20190716172923-621e5597135b/go.mod h1:r1VsdOzOPt1ZSrGZWFoNhsAedKnEd6r9Np1+5blZCWk=
|
||||
github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7/go.mod h1:ZXFpozHsX6DPmq2I0TCekCxypsnAUbP2oI0UX1GXzOo=
|
||||
github.com/mitchellh/go-wordwrap v1.0.0/go.mod h1:ZXFpozHsX6DPmq2I0TCekCxypsnAUbP2oI0UX1GXzOo=
|
||||
github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
|
||||
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
|
||||
github.com/modern-go/reflect2 v1.0.1 h1:9f412s+6RmYXLWZSEzVVgPGK7C2PphHj5RJrvfx9AWI=
|
||||
github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
|
||||
github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc=
|
||||
github.com/munnerz/goautoneg v0.0.0-20120707110453-a547fc61f48d/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ=
|
||||
@@ -479,9 +528,11 @@ github.com/olekukonko/tablewriter v0.0.0-20170122224234-a0225b3f23b5/go.mod h1:v
|
||||
github.com/onsi/ginkgo v0.0.0-20170829012221-11459a886d9c/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
|
||||
github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
|
||||
github.com/onsi/ginkgo v1.10.1/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
|
||||
github.com/onsi/ginkgo v1.11.0 h1:JAKSXpt1YjtLA7YpPiqO9ss6sNXEsPfSGdwN0UHqzrw=
|
||||
github.com/onsi/ginkgo v1.11.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
|
||||
github.com/onsi/gomega v0.0.0-20170829124025-dcabb60a477c/go.mod h1:C1qb7wdrVGGVU+Z6iS04AVkA3Q65CEZX59MT0QO5uiA=
|
||||
github.com/onsi/gomega v1.7.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
|
||||
github.com/onsi/gomega v1.8.1 h1:C5Dqfs/LeauYDX0jJXIe2SWmwCbGzx9yF8C8xy3Lh34=
|
||||
github.com/onsi/gomega v1.8.1/go.mod h1:Ho0h+IUsWyvy1OpqCwxlQ/21gkhVunqlU8fDGcoTdcA=
|
||||
github.com/opentracing/opentracing-go v1.1.0/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o=
|
||||
github.com/pborman/getopt v0.0.0-20180729010549-6fdd0a2c7117/go.mod h1:85jBQOZwpVEaDAr341tbn15RS4fCAsIst0qp7i8ex1o=
|
||||
@@ -493,8 +544,10 @@ github.com/pelletier/go-toml v1.6.0/go.mod h1:5N711Q9dKgbdkxHL+MEfF31hpT7l0S0s/t
|
||||
github.com/peterbourgon/diskv v2.0.1+incompatible/go.mod h1:uqqh8zWWbv1HBMNONnaR/tNboyR3/BZd58JJSHlUSCU=
|
||||
github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
||||
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
github.com/pmezard/go-difflib v0.0.0-20151028094244-d8ed2627bdf0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/pquerna/cachecontrol v0.0.0-20171018203845-0dec1b30a021/go.mod h1:prYjPmNq4d1NPVmpShWobRqXY3q7Vp+80DqgxxUrUIA=
|
||||
github.com/pquerna/cachecontrol v0.0.0-20180517163645-1555304b9b35/go.mod h1:prYjPmNq4d1NPVmpShWobRqXY3q7Vp+80DqgxxUrUIA=
|
||||
@@ -534,6 +587,7 @@ github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFR
|
||||
github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g=
|
||||
github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||
github.com/sanity-io/litter v1.2.0/go.mod h1:JF6pZUFgu2Q0sBZ+HSV35P8TVPI1TTzEwyu9FXAw2W4=
|
||||
github.com/satori/go.uuid v1.2.0 h1:0uYX9dsZ2yD7q2RtLRtPSdGDWzjeM3TbMJP9utgA0ww=
|
||||
github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0=
|
||||
github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo=
|
||||
github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM=
|
||||
@@ -545,8 +599,10 @@ github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6Mwd
|
||||
github.com/sirupsen/logrus v1.6.0 h1:UBcNElsrwanuuMsnGSlYmtmgbb23qDR5dG+6X6Oo89I=
|
||||
github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88=
|
||||
github.com/skratchdot/open-golang v0.0.0-20200116055534-eef842397966/go.mod h1:sUM3LWHvSMaG192sy56D9F7CNvL7jUJVXoqM1QKLnog=
|
||||
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d h1:zE9ykElWQ6/NYmHa3jpm/yHnI4xSofP+UP6SpjHcSeM=
|
||||
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
|
||||
github.com/smartystreets/goconvey v0.0.0-20190330032615-68dc04aab96a/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA=
|
||||
github.com/smartystreets/goconvey v1.6.4 h1:fv0U8FUIMPNf1L9lnHLvLhgicrIVChEkdzIKYqbNC9s=
|
||||
github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA=
|
||||
github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM=
|
||||
github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA=
|
||||
@@ -563,6 +619,7 @@ github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0
|
||||
github.com/spf13/pflag v0.0.0-20170130214245-9ff6c6923cff/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
|
||||
github.com/spf13/pflag v1.0.1/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
|
||||
github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
|
||||
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
|
||||
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
||||
github.com/spf13/viper v1.3.2/go.mod h1:ZiWeW+zYFKm7srdB9IoDzzZXaJaI5eL9QjNiN/DMA2s=
|
||||
github.com/spf13/viper v1.4.0/go.mod h1:PTJ7Z/lr49W6bUbkmS1V3by4uWynFiR9p7+dSq/yZzE=
|
||||
@@ -570,12 +627,14 @@ github.com/spf13/viper v1.6.2/go.mod h1:t3iDnF5Jlj76alVNuyFBk5oUMCvsrkbvZK0WQdfD
|
||||
github.com/src-d/gcfg v1.4.0/go.mod h1:p/UMsR43ujA89BJY9duynAwIpvqEujIH/jFlfL7jWoI=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/objx v0.2.0 h1:Hbg2NidpLE8veEBkEZTL3CvlkUIVzuU9jDplZO54c48=
|
||||
github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE=
|
||||
github.com/stretchr/testify v0.0.0-20161117074351-18a02ba4a312/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
||||
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
|
||||
github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
|
||||
github.com/stretchr/testify v1.6.1 h1:hDPOHmpOpP40lSULcqw7IrRb/u7w6RpDC9399XyoNd0=
|
||||
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw=
|
||||
github.com/tidwall/gjson v1.3.5/go.mod h1:P256ACg0Mn+j1RXIDXoss50DeIABTYK1PULOJHhxOls=
|
||||
@@ -589,9 +648,11 @@ github.com/toqueteos/webbrowser v1.2.0/go.mod h1:XWoZq4cyp9WeUeak7w7LXRUQf1F1ATJ
|
||||
github.com/ugorji/go v1.1.4/go.mod h1:uQMGLiO92mf5W77hV/PUCpI3pbzQx3CRekS0kk+RGrc=
|
||||
github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0=
|
||||
github.com/urfave/cli v1.20.0/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijbERA=
|
||||
github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw=
|
||||
github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc=
|
||||
github.com/valyala/fasthttp v1.0.0/go.mod h1:4vX61m6KN+xDduDNwXrhIAVZaZaZiQ1luJk8LWSxF3s=
|
||||
github.com/valyala/fasttemplate v0.0.0-20170224212429-dcecefd839c4/go.mod h1:50wTf68f99/Zt14pr046Tgt3Lp2vLyFZKzbFXTOabXw=
|
||||
github.com/valyala/fasttemplate v1.1.0 h1:RZqt0yGBsps8NGvLSGW804QQqCUYYLsaOjTVHy1Ocw4=
|
||||
github.com/valyala/fasttemplate v1.1.0/go.mod h1:UQGH1tvbgY+Nz5t2n7tXsz52dQxojPUpymEIMZ47gx8=
|
||||
github.com/valyala/tcplisten v0.0.0-20161114210144-ceec8f93295a/go.mod h1:v3UYOV9WzVtRmSR+PDvWpU/qWl4Wa5LApYYX4ZtKbio=
|
||||
github.com/vektah/gqlparser v1.1.2/go.mod h1:1ycwN7Ij5njmMkPPAOaRFY4rET2Enx7IkVv3vaXspKw=
|
||||
@@ -810,6 +871,7 @@ golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/time v0.0.0-20191024005414-555d28b269f0 h1:/5xXl8Y5W96D+TtHSlonuFqGHIWVuyCkGJLwGh9JJFs=
|
||||
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
@@ -872,6 +934,7 @@ golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE=
|
||||
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
gonum.org/v1/gonum v0.0.0-20190331200053-3d26580ed485/go.mod h1:2ltnJ7xHfj0zHS40VVPYEAAMTa3ZGguvHGBSJeRWqE0=
|
||||
gonum.org/v1/netlib v0.0.0-20190313105609-8cb42192e0e0/go.mod h1:wa6Ws7BG/ESfp6dHfk7C6KdzKA7wR7u/rKwOGE66zvw=
|
||||
@@ -963,22 +1026,30 @@ google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlba
|
||||
gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo=
|
||||
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/cheggaaa/pb.v1 v1.0.25/go.mod h1:V/YB90LKu/1FcN3WVnfiiE5oMCibMjukxqG/qStrOgw=
|
||||
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
|
||||
gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
|
||||
gopkg.in/go-playground/webhooks.v5 v5.15.0/go.mod h1:LZbya/qLVdbqDR1aKrGuWV6qbia2zCYSR5dpom2SInQ=
|
||||
gopkg.in/inf.v0 v0.9.1 h1:73M5CoZyi3ZLMOyDlQh031Cx6N9NDJ2Vvfl76EDAgDc=
|
||||
gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw=
|
||||
gopkg.in/ini.v1 v1.42.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
|
||||
gopkg.in/ini.v1 v1.51.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
|
||||
gopkg.in/ini.v1 v1.54.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
|
||||
gopkg.in/ini.v1 v1.57.0 h1:9unxIsFcTt4I55uWluz+UmL95q4kdJ0buvQ1ZIqVQww=
|
||||
gopkg.in/ini.v1 v1.57.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
|
||||
gopkg.in/jcmturner/aescts.v1 v1.0.1 h1:cVVZBK2b1zY26haWB4vbBiZrfFQnfbTVrE3xZq6hrEw=
|
||||
gopkg.in/jcmturner/aescts.v1 v1.0.1/go.mod h1:nsR8qBOg+OucoIW+WMhB3GspUQXq9XorLnQb9XtvcOo=
|
||||
gopkg.in/jcmturner/dnsutils.v1 v1.0.1 h1:cIuC1OLRGZrld+16ZJvvZxVJeKPsvd5eUIvxfoN5hSM=
|
||||
gopkg.in/jcmturner/dnsutils.v1 v1.0.1/go.mod h1:m3v+5svpVOhtFAP/wSz+yzh4Mc0Fg7eRhxkJMWSIz9Q=
|
||||
gopkg.in/jcmturner/goidentity.v2 v2.0.0 h1:6Bmcdaxb0dD3HyHbo/MtJ2Q1wXLDuZJFwXZmuZvM+zw=
|
||||
gopkg.in/jcmturner/goidentity.v2 v2.0.0/go.mod h1:vCwK9HeXksMeUmQ4SxDd1tRz4LejrKh3KRVjQWhjvZI=
|
||||
gopkg.in/jcmturner/gokrb5.v5 v5.3.0 h1:RS1MYApX27Hx1Xw7NECs7XxGxxrm69/4OmaRuX9kwec=
|
||||
gopkg.in/jcmturner/gokrb5.v5 v5.3.0/go.mod h1:oQz8Wc5GsctOTgCVyKad1Vw4TCWz5G6gfIQr88RPv4k=
|
||||
gopkg.in/jcmturner/rpc.v0 v0.0.2 h1:wBTgrbL1qmLBUPsYVCqdJiI5aJgQhexmK+JkTHPUNJI=
|
||||
gopkg.in/jcmturner/rpc.v0 v0.0.2/go.mod h1:NzMq6cRzR9lipgw7WxRBHNx5N8SifBuaCQsOT1kWY/E=
|
||||
gopkg.in/mgo.v2 v2.0.0-20190816093944-a6b53ec6cb22 h1:VpOs+IwYnYBaFnrNAeB8UUWtL3vEUnzSCL1nVjPhqrw=
|
||||
gopkg.in/mgo.v2 v2.0.0-20190816093944-a6b53ec6cb22/go.mod h1:yeKp02qBN3iKW1OzL3MGk2IdtZzaj7SFntXj72NppTA=
|
||||
gopkg.in/natefinch/lumberjack.v2 v2.0.0/go.mod h1:l0ndWWf7gzL7RNwBG7wST/UCcT4T24xpD6X8LsfU/+k=
|
||||
gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo=
|
||||
@@ -995,9 +1066,11 @@ gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.2.7/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.2.8 h1:obN1ZagJSUGI0Ek/LBmuj4SNLPfIny3KsKFopxRdj10=
|
||||
gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v3 v3.0.0-20190905181640-827449938966/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776 h1:tQIYjPdBoyREyB9XMu+nnTclpTYkz2zFM+lzLJFO4gQ=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
gotest.tools v2.2.0+incompatible/go.mod h1:DsYFclhRJ6vuDpmuTbkuFWG+y2sxOXAzmJt81HFBacw=
|
||||
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||
@@ -1007,7 +1080,9 @@ honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWh
|
||||
honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg=
|
||||
honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k=
|
||||
honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k=
|
||||
istio.io/api v0.0.0-20200107183329-ed4b507c54e1 h1:q4xggEkhMn4RMRo8AJVNmMtNzy514DGiAUxRDDKPhZU=
|
||||
istio.io/api v0.0.0-20200107183329-ed4b507c54e1/go.mod h1:+cyHH83OwC0rFpwk8eXctzPNpiCAbB+r6kmMiAxxBHw=
|
||||
istio.io/gogo-genproto v0.0.0-20190930162913-45029607206a h1:w7zILua2dnYo9CxImhpNW4NE/8ZxEoc/wfBfHrhUhrE=
|
||||
istio.io/gogo-genproto v0.0.0-20190930162913-45029607206a/go.mod h1:OzpAts7jljZceG4Vqi5/zXy/pOg1b209T3jb7Nv5wIs=
|
||||
k8s.io/api v0.17.8 h1:8JHlbqJ3A6sGhoacXfu/sASSD+HWWqVq67qt9lyB0kU=
|
||||
k8s.io/api v0.17.8/go.mod h1:N++Llhs8kCixMUoCaXXAyMMPbo8dDVnh+IQ36xZV2/0=
|
||||
@@ -1038,6 +1113,7 @@ k8s.io/gengo v0.0.0-20200114144118-36b2048a9120/go.mod h1:ezvh/TsK7cY6rbqRK0oQQ8
|
||||
k8s.io/gengo v0.0.0-20200413195148-3a45101e95ac/go.mod h1:ezvh/TsK7cY6rbqRK0oQQ8IAqLxYwwyPxAX1Pzy0ii0=
|
||||
k8s.io/klog v0.0.0-20181102134211-b9b56d5dfc92/go.mod h1:Gq+BEi5rUBO/HRz0bTSXDUcqjScdoY3a9IHpCEIOOfk=
|
||||
k8s.io/klog v0.3.0/go.mod h1:Gq+BEi5rUBO/HRz0bTSXDUcqjScdoY3a9IHpCEIOOfk=
|
||||
k8s.io/klog v1.0.0 h1:Pt+yjF5aB1xDSVbau4VsWe+dQNzA0qv1LlXdC2dF6Q8=
|
||||
k8s.io/klog v1.0.0/go.mod h1:4Bi6QPql/J/LkTDqv7R/cd3hPo4k2DG6Ptcz060Ez5I=
|
||||
k8s.io/klog/v2 v2.0.0/go.mod h1:PBfzABfn139FHAV07az/IF9Wp1bkk3vpT2XSJ76fSDE=
|
||||
k8s.io/klog/v2 v2.2.0 h1:XRvcwJozkgZ1UQJmfMGpvRthQHOvihEhYtDfAaxMz/A=
|
||||
@@ -1074,6 +1150,7 @@ sigs.k8s.io/structured-merge-diff/v3 v3.0.0/go.mod h1:PlARxl6Hbt/+BC80dRLi1qAmnM
|
||||
sigs.k8s.io/structured-merge-diff/v4 v4.0.1 h1:YXTMot5Qz/X1iBRJhAt+vI+HVttY0WkSqqhKxQ0xVbA=
|
||||
sigs.k8s.io/structured-merge-diff/v4 v4.0.1/go.mod h1:bJZC9H9iH24zzfZ/41RGcq60oK1F7G282QMXDPYydCw=
|
||||
sigs.k8s.io/yaml v1.1.0/go.mod h1:UJmg0vDUVViEyp3mgSv9WPwZCDxu4rQW1olrI1uml+o=
|
||||
sigs.k8s.io/yaml v1.2.0 h1:kr/MCeFWJWTwyaHoR9c8EjH9OumOmoF9YGiZd7lFm/Q=
|
||||
sigs.k8s.io/yaml v1.2.0/go.mod h1:yfXDCHCao9+ENCvLSE62v9VSji2MKu5jeNfTrofGhJc=
|
||||
upper.io/db.v3 v3.6.3+incompatible h1:SJLWd7H56Vwm4rYa+cHAQDYWcvvOt1C/5PD/IIBZPW8=
|
||||
upper.io/db.v3 v3.6.3+incompatible/go.mod h1:FgTdD24eBjJAbPKsQSiHUNgXjOR4Lub3u1UMHSIh82Y=
|
||||
|
||||
BIN
img/features.png
BIN
img/features.png
Binary file not shown.
|
Before Width: | Height: | Size: 129 KiB After Width: | Height: | Size: 302 KiB |
6
main.go
6
main.go
@@ -157,6 +157,8 @@ func startRPCServer(db *v1.DB, kubeConfig *v1.Config, sysConfig v1.SystemConfig,
|
||||
api.RegisterWorkspaceServiceServer(s, server.NewWorkspaceServer())
|
||||
api.RegisterConfigServiceServer(s, server.NewConfigServer())
|
||||
api.RegisterServiceServiceServer(s, server.NewServiceServer())
|
||||
api.RegisterFileServiceServer(s, server.NewFileServer())
|
||||
api.RegisterInferenceServiceServer(s, server.NewInferenceService())
|
||||
|
||||
go func() {
|
||||
if err := s.Serve(lis); err != nil {
|
||||
@@ -192,6 +194,8 @@ func startHTTPProxy() {
|
||||
registerHandler(api.RegisterWorkspaceServiceHandlerFromEndpoint, ctx, mux, endpoint, opts)
|
||||
registerHandler(api.RegisterConfigServiceHandlerFromEndpoint, ctx, mux, endpoint, opts)
|
||||
registerHandler(api.RegisterServiceServiceHandlerFromEndpoint, ctx, mux, endpoint, opts)
|
||||
registerHandler(api.RegisterFileServiceHandlerFromEndpoint, ctx, mux, endpoint, opts)
|
||||
registerHandler(api.RegisterInferenceServiceHandlerFromEndpoint, ctx, mux, endpoint, opts)
|
||||
|
||||
log.Printf("Starting HTTP proxy on port %v", *httpPort)
|
||||
|
||||
@@ -284,6 +288,8 @@ func customHeaderMatcher(key string) (string, bool) {
|
||||
switch lowerCaseKey {
|
||||
case "onepanel-auth-token":
|
||||
return lowerCaseKey, true
|
||||
case "onepanel-access-token":
|
||||
return lowerCaseKey, true
|
||||
case "cookie":
|
||||
return lowerCaseKey, true
|
||||
default:
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user