Compare commits

..

252 Commits

Author SHA1 Message Date
Andrey Melnikov
700b3bd512 Merge pull request #955 from Vafilor/feat/add.serving.variables
feat: add onepanel serving url to workspaces
2021-10-07 12:29:05 -07:00
Andrey Melnikov
3abdc54d3c feat: add onepanel serving url to workspaces 2021-10-07 12:24:01 -07:00
Rush Tehrani
f570a710ba Update README.md 2021-09-17 09:40:54 -07:00
Rush Tehrani
c922b708fc Merge pull request #953 from Vafilor/fix/workflow.volumes
fix: workflow volumes don't delete on failed workflow
2021-09-08 12:57:56 -07:00
Andrey Melnikov
fc9669d757 fix: add PodCompletion volume claim gc by default to workflows. This will clean up the volumes if the workflow fails 2021-09-08 12:47:23 -07:00
rushtehrani
8eeb90d3f1 update features image in README 2021-08-19 12:22:57 -07:00
rushtehrani
c25dfce84f Update features image 2021-08-18 11:09:48 -07:00
Andrey Melnikov
5705bfa47f Merge pull request #943 from Vafilor/feat/model.deployment
feat: Inference Service API
2021-08-12 10:58:15 -07:00
Andrey Melnikov
44a78effed feat: Create response for inference service now returns information about the status 2021-08-12 10:05:04 -07:00
Andrey Melnikov
a8985a7878 chore: codacy docs 2021-08-11 17:08:24 -07:00
Andrey Melnikov
69006309b4 feat: allow onepanel-access-token in addition to the onepanel-auth-token 2021-08-11 16:29:37 -07:00
Andrey Melnikov
22b3d984ec clean: simplify marshaling of InferenceService 2021-08-11 16:29:10 -07:00
Andrey Melnikov
4528927463 feat: updated endpoint to get the status to include predictor url 2021-08-11 11:40:18 -07:00
Andrey Melnikov
edf7a30f64 feat: support both resource requests and limits for InferenceService 2021-08-10 14:47:32 -07:00
Andrey Melnikov
51fb86e3fe feat: simplify API for model services and rename it to inferenceservice 2021-08-10 13:36:44 -07:00
Andrey Melnikov
d8e0e5c968 feat: add defaults for model deployment and add get status and delete endpoints 2021-08-10 09:54:17 -07:00
Andrey Melnikov
75719caec9 feat: model deployment API 2021-08-06 16:07:27 -07:00
Andrey Melnikov
147c937252 Merge pull request #940 from Vafilor/feat/full.node.resources
feat: add optional logic to capture the entire node
2021-08-03 09:01:52 -07:00
Andrey Melnikov
e0f3f81563 feat: add optional logic to capture the entire node 2021-08-02 16:43:12 -07:00
Andrey Melnikov
0021249464 Merge pull request #938 from Vafilor/feat/expose.minio
feat: separate files api and support presigned urls
2021-08-02 15:11:34 -07:00
Andrey Melnikov
7235951ec2 chore: codacy docs 2021-08-02 15:08:09 -07:00
Andrey Melnikov
f843074a3f chore: codacy - Url -> URL 2021-07-30 14:21:15 -07:00
Andrey Melnikov
8e6ef8d3eb feat: updated presigned url response to include the file size so client can decide if it can be displayed or not 2021-07-30 14:01:38 -07:00
Andrey Melnikov
d226028b33 feat: separate files from workflow and add endpoint to get pre-signed url 2021-07-30 12:52:50 -07:00
Andrey Melnikov
82585d1011 Merge pull request #936 from Vafilor/fix/wrong.namespace
fix: issue where wrong namespace was parsed in certain cases
2021-07-27 11:10:34 -07:00
Andrey Melnikov
193dbe156e fix: issue where wrong namespace was obtained from certain urls for workspaces/models 2021-07-27 11:05:26 -07:00
Rush Tehrani
5ebccbd811 Update summary and Argo link 2021-07-22 11:05:43 -07:00
Andrey Melnikov
023fb50046 Merge pull request #934 from Vafilor/fix/s3.files
fix: s3 not listing files in file browser
2021-07-20 13:18:13 -07:00
Andrey Melnikov
abd8d3cde0 Merge pull request #933 from Vafilor/feat/filesyncer.update.migrations
feat: added migrations to update filesyncer to version 1.0.0
2021-07-20 13:13:58 -07:00
Andrey Melnikov
64d6dde1aa Merge branch 'fix/s3.files' of github.com:Vafilor/core into fix/s3.files 2021-07-20 13:08:21 -07:00
Andrey Melnikov
d0d68470dd fix: issue where s3 storage would not list files (but would list folders) 2021-07-20 13:07:38 -07:00
Andrey Melnikov
6f8e3f56e7 chore: documenting migrations 2021-07-20 13:07:17 -07:00
Andrey Melnikov
2b47ad7092 fix: issue where s3 storage would not list files (but would list folders) 2021-07-20 13:01:14 -07:00
Andrey Melnikov
66e2418424 feat: added migrations to update filesyncer to version 1.0.0 2021-07-20 13:00:41 -07:00
Andrey Melnikov
5b6979302e Merge pull request #930 from Vafilor/feat/storage.pagination
feat: added pagination to listing files
2021-07-16 16:02:22 -07:00
Andrey Melnikov
afb98c295b feat: added pagination to listing files 2021-07-16 15:33:09 -07:00
Andrey Melnikov
1fb0d10b7c Merge pull request #925 from Vafilor/feat/service.availability
feat: check if kfserving is set up
2021-07-12 15:27:16 -07:00
Andrey Melnikov
c4438bfe0d feat: check if kfserving is set up 2021-07-12 12:46:11 -07:00
Rush Tehrani
8329706f22 Update README.md 2021-07-08 14:01:22 -07:00
Andrey Melnikov
09be35b2d6 fix: don't publish image on any md file changes or github workflow changes 2021-07-08 12:23:21 -07:00
Andrey Melnikov
e4d83903c7 fix: don't publish image on README changess 2021-07-08 12:15:19 -07:00
Rush Tehrani
69bc6e3df1 Update README.md 2021-07-08 12:04:47 -07:00
Andrey Melnikov
bfee6c2e34 Merge pull request #921 from Vafilor/feat/auth.updates
feat: auth updates
2021-05-25 11:53:37 -07:00
Andrey Melnikov
47e03d7e7c Merge pull request #922 from Vafilor/fix/in.cluster.api.url
fix: use in-cluster url for ONEPANEL_API_URL
2021-05-25 11:51:08 -07:00
Andrey Melnikov
bcf78b54a0 fix: no 400 response returned from bad token 2021-05-25 11:49:35 -07:00
Andrey Melnikov
96b8f522b3 fix: use in-cluster url for ONEPANEL_API_URL so workspaces can access it in-cluster 2021-05-21 15:41:26 -07:00
Andrey Melnikov
98766cdc41 feat: auth updates 2021-05-19 13:40:43 -07:00
Rush Tehrani
378850f591 Merge pull request #919 from rushtehrani/master
chore: Add feature highlights video
2021-04-28 17:52:52 -07:00
Rush Tehrani
e27361466f Update feature highlights video 2021-04-28 17:47:49 -07:00
Rush Tehrani
daabf17078 Update features overview video 2021-04-27 11:11:15 -07:00
Rush Tehrani
73385ad779 Update README.md 2021-04-27 11:05:55 -07:00
Andrey Melnikov
c92c848134 Merge pull request #912 from inohmonton99/feat/description
feat: add description for workflow templates
2021-04-19 12:44:12 -07:00
Andrey Melnikov
82424605f6 chore: version bump to 0.22.0 2021-04-19 12:41:22 -07:00
Andrey Melnikov
9f05ab150a Merge pull request #913 from Vafilor/feat/add.deep.learning.desktop
feat: added deep learning desktop workspace
2021-04-14 11:01:56 -07:00
Andrey Melnikov
81de77d88b feat: added description to deep learning workspace 2021-04-14 10:58:22 -07:00
Andrey Melnikov
ea47eaf49d feat: added deep learning desktop workspace 2021-04-14 10:22:24 -07:00
inohmonton99
42e99f0ac4 updated api to match correct settings with new feature 2021-04-10 00:44:53 +08:00
inohmonton99
ae702c474c removed unnecessary commits and description migration for workflow_templates db 2021-04-08 00:28:20 +08:00
inohmonton99
cfd63a3ef9 description feature wip 2021-04-06 04:34:47 +08:00
Andrey Melnikov
1b2d5623b4 Merge pull request #908 from Vafilor/fix/release.command.repo
fix: updated repo in generate release notes as the name has changed
2021-04-01 16:31:42 -07:00
Andrey Melnikov
86895a9dfe Update README.md 2021-04-01 14:44:37 -07:00
Andrey Melnikov
ec94a13cd9 fix: updated repo in generate release notes as the name has changed 2021-04-01 10:53:26 -07:00
Andrey Melnikov
22836c85e1 Merge pull request #907 from Vafilor/feat/pns.updates
fix: don't use port 80 for host port
2021-04-01 10:29:23 -07:00
Andrey Melnikov
a3ab4a86b0 fjx: don't use port 80 for host port so it doesn't take it 2021-04-01 09:08:36 -07:00
Andrey Melnikov
b6ef84a0aa Merge pull request #906 from Vafilor/feat/pns.updates
fix: wrong onepanel/dl version
2021-03-31 19:21:58 -07:00
Andrey Melnikov
9f513dda9b fix: wrong onepanel/dl version 2021-03-31 18:59:21 -07:00
Andrey Melnikov
1bb3e7506d Merge pull request #905 from Vafilor/feat/pns.updates
fix: bug with remove hyperparam tuning migration
2021-03-30 15:13:02 -07:00
Andrey Melnikov
0f19e4d618 fix: bug with remove hyperparam tuning migration 2021-03-30 14:02:08 -07:00
Rush Tehrani
6c251761f5 Merge pull request #904 from Vafilor/feat/pns.updates
feat: Update code to work better with PNS executor
2021-03-30 12:47:54 -07:00
Andrey Melnikov
2cad065778 chore: codacy fixes 2021-03-29 16:23:31 -07:00
Andrey Melnikov
2fe0a239c5 feat: remove hyperparameter tuning workflow if there are no workflow executions ran by it 2021-03-29 16:18:41 -07:00
Andrey Melnikov
8287e178b5 fix: wrong onepanel/dl version for updated jupyterlab template 2021-03-29 12:35:11 -07:00
Andrey Melnikov
b869f2eb22 Merge branch 'feat/pns.updates' of github.com:Vafilor/core into feat/pns.updates 2021-03-29 12:09:54 -07:00
Andrey Melnikov
c4893ed0d7 feat: updated migrations and updated code to better work with pns executor based on Long Nguyen's suggestions. 2021-03-29 12:08:38 -07:00
Andrey Melnikov
4882671b52 feat: updated migrations and updated code to better work with pns executor based on Long Nguyen's suggestions. 2021-03-29 12:03:12 -07:00
Andrey Melnikov
a2009de7b1 Merge pull request #903 from Vafilor/fix/contributing.commands
fix: bash missing character in contributing guide
2021-03-26 13:06:22 -07:00
Andrey Melnikov
bd5641bedc fix: bash missing character 2021-03-26 13:00:59 -07:00
Andrey Melnikov
75680ee621 Merge pull request #902 from Vafilor/fix/contributing.commands
fix: windows sections of contribution guide
2021-03-26 12:59:57 -07:00
Andrey Melnikov
aeba4ee8a2 fix: separate windows sections as windows doesn't use $PWD.
remove: minikube section as that is no longer recommended.
2021-03-26 12:53:34 -07:00
Andrey Melnikov
948f61da13 Merge pull request #892 from lnguyen/master
fix: fixes stability issue with pns executor
2021-03-26 12:09:17 -07:00
Andrey Melnikov
d69f6a4947 Merge pull request #897 from Vafilor/fix/secret.updates
fix:  add error detection when environment variable is too long
2021-03-12 11:38:35 -08:00
Andrey Melnikov
b2119d86d0 fix: detect case where secret is too long. Also only log the secret name. In case of long secret values this would flood the console. 2021-03-12 10:42:52 -08:00
Rush Tehrani
9938b60118 Update README.md 2021-03-08 10:01:47 -08:00
Long Nguyen
50dd0b9264 fixes stability issue with pns executor 2021-03-05 10:20:50 -05:00
Rush Tehrani
1424d24772 Merge pull request #889 from Vafilor/fix/workspaces
fix: incorrectly formatted label for workspaces
2021-03-03 12:21:56 -08:00
Andrey Melnikov
52e4117ab4 chore: codacy fixes 2021-03-03 12:18:09 -08:00
Andrey Melnikov
d6a2e98fc8 fix: incorrectly formatted label for workspaces 2021-03-03 12:14:51 -08:00
Andrey Melnikov
a4dcbdf6dc Merge pull request #888 from Vafilor/feat/workspace.machine.type
feat: add machine type to workspace listing
2021-03-03 10:07:13 -08:00
Andrey Melnikov
e114701958 Merge branch 'feat/workspace.machine.type' of github.com:Vafilor/core into feat/workspace.machine.type 2021-03-03 10:03:05 -08:00
Andrey Melnikov
f0588b7dc4 fix: revert debugging code 2021-03-03 10:02:18 -08:00
Andrey Melnikov
3a17632575 chore: added logging for unexpected error. 2021-03-03 10:02:18 -08:00
Andrey Melnikov
9ed7fc7cc3 feat: add machine type to workspace response 2021-03-03 10:01:37 -08:00
Rush Tehrani
07b525d4d3 Merge pull request #887 from rushtehrani/chore/readme
chore: Update README.md
2021-03-02 18:16:27 -08:00
Rush Tehrani
3d02e8de18 Update README.md 2021-03-02 18:13:48 -08:00
Rush Tehrani
a88066cd22 fix type in README 2021-03-02 18:12:16 -08:00
Rush Tehrani
eb5a248244 upload new logo 2021-03-02 18:10:00 -08:00
Rush Tehrani
90787ba733 Update README.md 2021-03-02 18:06:03 -08:00
rushtehrani
723d8854d7 update features image 2021-03-02 17:46:36 -08:00
rushtehrani
da804eeee0 fix logo 2021-03-02 15:13:48 -08:00
Rush Tehrani
87f13f7c28 Merge pull request #886 from rushtehrani/chore/readme
chore: Update README
2021-03-02 13:47:33 -08:00
Andrey Melnikov
78c576b674 Merge pull request #881 from Vafilor/feat/upgrade.and.resume
feat: allow changing machine type when resuming
2021-03-02 13:04:57 -08:00
Andrey Melnikov
c52f7290ea chore: remove checking for hidden inputs in resume workspace 2021-03-02 13:00:51 -08:00
Andrey Melnikov
ada5f5f588 Merge branch 'feat/upgrade.and.resume' of github.com:Vafilor/core into feat/upgrade.and.resume 2021-03-02 12:59:42 -08:00
Andrey Melnikov
e9c804d718 chore: update auto-generated files 2021-03-02 12:56:58 -08:00
Andrey Melnikov
638647f478 feat: resume workspace now allows changing machine type 2021-03-02 12:55:58 -08:00
rushtehrani
5b52991f18 additional README updates 2021-03-02 12:15:05 -08:00
rushtehrani
ee74238c8d update README 2021-03-02 12:03:11 -08:00
Rush Tehrani
35bc8b9bc6 Merge pull request #884 from Vafilor/feat/filtering.updates
feat: updated workflow/workspace fields to allow some related field queries
2021-02-26 12:16:33 -08:00
Andrey Melnikov
8e36a07ede feat: updated workflow/workspace fields to allow some related field queries. 2021-02-26 10:19:22 -08:00
Andrey Melnikov
ebd304b7e1 Merge pull request #883 from Vafilor/feat/filtering.updates
feat: added methods to get just the field name for resources
2021-02-26 09:49:10 -08:00
Andrey Melnikov
15de7c7e1f chore: documented new methods 2021-02-26 09:36:44 -08:00
Andrey Melnikov
d597ccc13f feat: added methods to get just the field name for workflows, templates, workspaces, and workspace templates 2021-02-25 17:14:28 -08:00
Rush Tehrani
4ce7175424 Merge pull request #882 from rushtehrani/feat/policy-labels
feat: Add labels for better policy management
2021-02-25 12:03:41 -08:00
rushtehrani
9713d4928a add labels to Workflow pods for policy management 2021-02-24 19:26:42 -08:00
rushtehrani
af5bc31410 add labels to Workspace pod for policy management 2021-02-24 19:07:15 -08:00
Rush Tehrani
cce7d4a2ee Merge pull request #880 from Vafilor/feat/update.workspace.templates
feat: Update workspace templates
2021-02-24 12:32:51 -08:00
Andrey Melnikov
098efadc7f feat: resume workspace now allows changing machine type 2021-02-24 11:42:22 -08:00
Andrey Melnikov
ac9753941e feat: update workspace templates 2021-02-24 10:57:36 -08:00
Andrey Melnikov
34d3efd67d chore: added logging for unexpected error. 2021-02-24 10:15:53 -08:00
Andrey Melnikov
547dcec0c6 fix: removed code from resume workspace changes 2021-02-24 10:15:40 -08:00
Andrey Melnikov
058e31d0de feat: add machine type to workspace response 2021-02-24 09:57:39 -08:00
Rush Tehrani
8418962b9e Merge pull request #837 from Vafilor/feat/upgrade.argo.2.12.4
feat: upgrade argo to 2.12.9
2021-02-23 09:17:59 -08:00
Rush Tehrani
410a8741ae Merge pull request #879 from Vafilor/feat/filesyncer.sidecar
feat: filesyncer sidecar for workflows
2021-02-19 16:24:04 -08:00
Andrey Melnikov
4e6e0fb801 feat: update filesyncer 2021-02-19 16:21:32 -08:00
Andrey Melnikov
88a06a5461 feat: remove workflow manifest parts that are not used. 2021-02-19 15:10:33 -08:00
Andrey Melnikov
0343c6fb52 fix: only add filesyncer sidecar if there are volume mounts 2021-02-17 13:06:31 -08:00
Andrey Melnikov
5b0bc517d3 feat: auto-inject filesyncer sidecar 2021-02-16 12:29:34 -08:00
Rush Tehrani
ace9e6e7f0 Merge pull request #876 from onepanelio/update-README
docs: Update README.md
2021-02-15 17:48:23 -08:00
Rush Tehrani
a7b6a1fc77 Update README.md 2021-02-13 10:15:28 -08:00
Andrey Melnikov
5619122e35 Merge pull request #875 from rushtehrani/fix/main-container
fix: Add ONEPANEL_MAIN_CONTAINER env var to all Workspaces
2021-02-09 14:38:04 -08:00
rushtehrani
bf9c20de64 add ONEPANEL_MAIN_CONTAINER env var to all Workspaces 2021-02-09 14:09:21 -08:00
Andrey Melnikov
4275c3c740 Merge pull request #873 from Vafilor/fix/migrations.configmap
fix: db not reconnecting
2021-02-05 11:19:00 -08:00
Andrey Melnikov
1da29e550f fix: reconnect db 2021-02-05 11:17:41 -08:00
Rush Tehrani
3b9c84544c Merge pull request #872 from Vafilor/fix/migrations.configmap 2021-02-04 18:31:37 -08:00
Andrey Melnikov
f88a98c774 fix: move db migrations outside of configmap changes. This was causing an issue where go and sql migrations clashed. 2021-02-04 18:25:30 -08:00
Rush Tehrani
63afd315ff Merge pull request #871 from rushtehrani/chore/update-nni-template
chore: Update hyperparameter tuning template
2021-02-04 10:52:12 -08:00
rushtehrani
8a6a79a4bc update hyperparam tuning template 2021-02-04 10:30:15 -08:00
Rush Tehrani
3b9875ab2d Merge pull request #869 from Vafilor/feat/add.bucket
feat: Add bucket to config response
2021-02-03 12:02:43 -08:00
Andrey Melnikov
5c09e07ab8 feat: separated out config from namespace config 2021-02-03 11:55:36 -08:00
Andrey Melnikov
de9d38a656 chore: cleaned up repository logic since we use minio for non-s3 providers 2021-02-03 11:32:53 -08:00
Andrey Melnikov
e906edc1ec feat: add bucket to config response 2021-02-03 11:25:26 -08:00
rushtehrani
a079c17693 update docs url for training templates 2021-02-02 09:55:59 -08:00
rushtehrani
1413d98ef0 update mrcnn params 2021-02-01 16:46:35 -08:00
rushtehrani
5e8b85c9e9 use correct versioning convention 2021-02-01 14:04:53 -08:00
Rush Tehrani
e5a1276e1d Merge pull request #867 from rushtehrani/chore/filesyncer
chore: Use v0.18.0 for FileSyncer
2021-02-01 13:16:02 -08:00
rushtehrani
ffcef0c0dc use v0.18.0 for filesyncer 2021-02-01 13:10:50 -08:00
Rush Tehrani
257d838092 Merge pull request #866 from rushtehrani/fix/templates
fix: Fix Workflow Template comment
2021-02-01 11:40:16 -08:00
Rush Tehrani
fa6fa6c10e Merge pull request #865 from Vafilor/feat/workspace.updates
feat: workspace updates
2021-02-01 11:36:03 -08:00
rushtehrani
daf0b4622d minor comment fix 2021-01-31 19:17:34 -08:00
Rush Tehrani
a4d7f69118 Merge pull request #860 from onepanelio/feat/update-templates
feat: Update all templates for dynamic node labels and new file syncing
2021-01-31 17:39:38 -08:00
rushtehrani
a8cf80396f update labels 2021-01-30 17:13:33 -08:00
rushtehrani
c5fc80d979 template cleanup 2021-01-29 18:24:15 -08:00
rushtehrani
f55b82f4b1 use correct template name vars 2021-01-29 18:12:12 -08:00
rushtehrani
bd052005f1 update vscode template 2021-01-29 15:37:59 -08:00
rushtehrani
dcab30f839 use correct image 2021-01-29 15:22:57 -08:00
rushtehrani
c91048aae7 update JupyterLab workspace template 2021-01-29 14:24:26 -08:00
Andrey Melnikov
a9ae982e61 Merge branch 'feat/workspace.updates' of github.com:Vafilor/core into feat/workspace.updates 2021-01-29 13:54:42 -08:00
Andrey Melnikov
559d937e79 feat: support sinceTime for workspace container logs 2021-01-29 13:54:08 -08:00
rushtehrani
0283aecfad update CVAT template 2021-01-29 13:47:12 -08:00
rushtehrani
53612cc145 update TFOD template 2021-01-29 12:46:25 -08:00
rushtehrani
f44e1d31c0 update MaskRCNN template 2021-01-29 12:20:28 -08:00
rushtehrani
35ea2164eb update MaskRCNN template 2021-01-29 12:07:25 -08:00
Andrey Melnikov
986b0ec7ee chore: missing comment 2021-01-29 09:57:00 -08:00
Andrey Melnikov
e1e382e223 fix: added missing proto data for workspace component service 2021-01-29 09:57:00 -08:00
Andrey Melnikov
3ae767d8f9 fix: onepanel helper image reference in contributing.md 2021-01-29 09:57:00 -08:00
Andrey Melnikov
dac793601d feat: updated workspace API request to return file syncer url if available 2021-01-29 09:57:00 -08:00
Andrey Melnikov
8f1d1ee7e0 chore: missing comment 2021-01-29 09:56:23 -08:00
Andrey Melnikov
4e2154b3e2 fix: added missing proto data for workspace component service 2021-01-29 09:55:12 -08:00
Andrey Melnikov
0664eb37c2 fix: onepanel helper image reference in contributing.md 2021-01-29 09:54:05 -08:00
Andrey Melnikov
2c86994919 feat: updated workspace API request to return file syncer url if available 2021-01-29 09:53:40 -08:00
Andrey Melnikov
4bc9cc1204 Merge pull request #851 from rushtehrani/feat/workspace-logs
feat: Add GetWorkspaceContainerLogs
2021-01-29 09:48:29 -08:00
Rush Tehrani
e240532756 Merge pull request #858 from onepanelio/chore/fix-badge
chore: Fix LFAI badge
2021-01-27 11:41:04 -08:00
Rush Tehrani
a0d9285646 chore: Fix LFAI badge 2021-01-27 09:41:53 -08:00
Andrey Melnikov
bb4cd8b283 Merge pull request #855 from rushtehrani/feat/nodepool-default
feat: Allow default for nodepool option
2021-01-25 16:35:12 -08:00
rushtehrani
33a25397c3 use an env var for more flexibility 2021-01-25 16:22:37 -08:00
rushtehrani
67180fdba3 allow default for nodepool option 2021-01-25 16:14:06 -08:00
Andrey Melnikov
527bcd835e Merge branch 'feat/upgrade.argo.2.12.4' of github.com:Vafilor/core into feat/upgrade.argo.2.12.4 2021-01-25 14:36:42 -08:00
Andrey Melnikov
488a32d5d5 fix: issue where filterOutCustomTypesFromManifest was crashing 2021-01-25 14:31:47 -08:00
Andrey Melnikov
989ec1f02d fix: wrong format for wfv1.Item 2021-01-25 14:31:47 -08:00
Andrey Melnikov
5a66c278e4 feat: update argo to 2.12.4 (WIP) 2021-01-25 14:31:47 -08:00
Andrey Melnikov
8799a79643 Merge pull request #853 from Vafilor/fix/workflow.template.version.archive
fix: issue where workflow template version did not ignore archived template
2021-01-23 19:51:19 -08:00
Andrey Melnikov
3c8d0ad5db fix: issue where workflow template version did not ignore archived templates 2021-01-23 19:44:56 -08:00
rushtehrani
0a690ad77a add comments to exported funcs 2021-01-20 16:52:40 -08:00
rushtehrani
932b343738 use correct http endpoint for workspace logs 2021-01-20 16:48:16 -08:00
rushtehrani
4a22327fec add GetWorkspaceContainerLogs 2021-01-20 16:32:32 -08:00
Rush Tehrani
1d20348a50 Merge pull request #850 from Vafilor/feat/workspace.uid.param
fix: workspace uid was name and not uid and namespace was not replaced
2021-01-19 15:57:06 -08:00
Andrey Melnikov
c7021b735e chore: codacy method comment 2021-01-19 15:54:40 -08:00
Andrey Melnikov
a132ce4e34 chore: codacy 2021-01-19 15:50:34 -08:00
Andrey Melnikov
26218bb426 fix: workspace uid was name and not uid. Also namespace was not replaced. 2021-01-19 15:33:43 -08:00
Rush Tehrani
744b79ee73 Merge pull request #849 from Vafilor/feat/workspace.uid.param
feat: add {{workspace.uid}} to parameters as a replaceable value
2021-01-19 14:42:16 -08:00
Andrey Melnikov
734d793d83 chore: codacy formatting 2021-01-19 13:19:10 -08:00
Andrey Melnikov
23f8200042 feat: add {{workspace.uid}} to parameters as a replacable value 2021-01-19 13:03:07 -08:00
Andrey Melnikov
f1bdbbad28 chore: code cleanup 2021-01-19 10:55:55 -08:00
Andrey Melnikov
a6cfbe24df chore: codacy method comments 2021-01-18 10:41:17 -08:00
Andrey Melnikov
d630508c0f feat: update workflow templates to have dynamic node pool label 2021-01-18 10:37:08 -08:00
Andrey Melnikov
e02db631b7 Merge pull request #844 from rushtehrani/feat/nodepoollabel
feat: Evaluate node pool label in Workflow Templates
2021-01-18 09:48:39 -08:00
rushtehrani
3c8556e17b evaluate nodepoollabel in workflow templates 2021-01-17 13:15:58 -08:00
Andrey Melnikov
e66255b4ca fix: issue where filterOutCustomTypesFromManifest was crashing 2021-01-15 13:28:50 -08:00
Rush Tehrani
2cb43850be Merge pull request #843 from Vafilor/fix/migration.rollback
fix: wrong file for migration rollback
2021-01-15 12:40:11 -08:00
Andrey Melnikov
08b5edde72 fix: wrong file name for migration rollback 20201113094916 2021-01-15 12:37:48 -08:00
Andrey Melnikov
82a411ebda fix: wrong file for migration rollback 2021-01-15 12:29:00 -08:00
Rush Tehrani
b30be5277a Merge pull request #839 from rushtehrani/docs/readme
docs: Update images, text and links
2021-01-14 10:00:12 -08:00
rushtehrani
4ca5ee3ded update lfai badge and text 2021-01-14 09:49:23 -08:00
rushtehrani
b09cf245d6 update README layout 2021-01-14 09:37:03 -08:00
rushtehrani
c3082cb3cc update onepanel.gif 2021-01-13 15:48:06 -08:00
Rush Tehrani
ab057f25d8 Merge pull request #838 from onepanelio/docs/readme
docs: Update README.md
2021-01-13 13:26:19 -08:00
Rush Tehrani
8081a62771 shorten hyperparameter sentence 2021-01-13 12:57:38 -08:00
Rush Tehrani
83642dd5b0 shorten feature sentences 2021-01-13 12:46:33 -08:00
Rush Tehrani
456d0f9f3c clean up and shorten some sentences 2021-01-13 12:34:01 -08:00
Rush Tehrani
13274ec703 shorten sentences 2021-01-13 12:00:06 -08:00
Rush Tehrani
45454cb783 add hyperparameter tuning bullet 2021-01-13 11:56:56 -08:00
Rush Tehrani
8a68e0e07b update note about python sdk 2021-01-13 11:52:21 -08:00
Rush Tehrani
91b9e86792 shorten sentences 2021-01-13 11:35:17 -08:00
Rush Tehrani
b5fdfa4783 clean up and wording changes 2021-01-13 11:33:04 -08:00
Rush Tehrani
99ef4ef2ac Update README.md 2021-01-13 11:05:38 -08:00
Andrey Melnikov
6e0ca2e1e4 fix: wrong format for wfv1.Item 2021-01-12 16:04:40 -08:00
Andrey Melnikov
edcc42ba4b feat: update argo to 2.12.4 (WIP) 2021-01-12 15:58:49 -08:00
Andrey Melnikov
cebbc069fc Merge pull request #832 from Vafilor/feat/rename.dev
feat: change "dev" tag to be "latest" for images
2021-01-12 11:33:38 -08:00
Rush Tehrani
709871dd3b Merge pull request #834 from rushtehrani/fix/params
fix: Only remove whitespaces in var references
2021-01-11 20:44:37 -08:00
rushtehrani
1d0c898fa4 code cleanup + no param mutation 2021-01-11 20:37:23 -08:00
rushtehrani
1355f8039e only remove whitespaces in var references 2021-01-11 20:05:58 -08:00
Andrey Melnikov
132e4f1d9c feat: change "dev" tag to be "latest" for images 2021-01-11 11:53:32 -08:00
Rush Tehrani
2c187ad784 Merge pull request #830 from Vafilor/feat/workflow.template.version.parameters
feat: add parameters to workflow template versions response
2021-01-09 20:36:51 -08:00
Andrey Melnikov
c2a80121b7 fix: issue where opts.params were overwritten because of pointer reference - this caused the params stored in database to be altered 2021-01-09 19:26:33 -08:00
Andrey Melnikov
5275aa2ea7 Merge branch 'feat/workflow.template.version.parameters' of github.com:Vafilor/core into feat/workflow.template.version.parameters 2021-01-09 19:25:50 -08:00
Andrey Melnikov
682994c4c2 feat: add parameters to workflow template versions response 2021-01-09 19:20:25 -08:00
Andrey Melnikov
302731e23a feat: replace parameter namespace values for workflow/workspace with the runtime value 2021-01-09 15:14:38 -08:00
Andrey Melnikov
f9338bd51e feat: add parameters to workflow template versions response 2021-01-08 10:47:06 -08:00
Andrey Melnikov
f424e5e661 Merge pull request #826 from rushtehrani/fix/tfod
fix: Rename train.py => main.py in TFOD workflow template
2021-01-07 12:54:07 -08:00
rushtehrani
d9cc564596 rename train.py => main.py in tfod workflow template 2021-01-07 12:50:37 -08:00
Andrey Melnikov
fbc780ab4d Merge pull request #824 from rushtehrani/feat/update-cvat
feat: Update CVAT and FileSyncer images
2021-01-07 10:05:28 -08:00
rushtehrani
5f531f2ac0 update cvat and filesyncer images 2021-01-07 09:59:26 -08:00
Rush Tehrani
208828fcaf Merge pull request #822 from aleksandrmelnikov/feat/core.804-upg.k8s.arg.support.secrets
feat: Adding support for ImagePullSecrets in workspaces and workflows
2021-01-06 12:14:31 -08:00
Rush Tehrani
855eaf2a40 Merge pull request #823 from rushtehrani/fix/tf-training
fix: Remove used-by: cvat from tf training workflow
2021-01-06 10:00:41 -08:00
rushtehrani
d230357a28 remove used-by: cvat from tf training workflow 2021-01-05 16:48:50 -08:00
Aleksandr Melnikov
6e0131636e go fmt. 2021-01-05 12:28:12 -08:00
Aleksandr Melnikov
70e3d36416 Adding support for ImagePullSecrets in workspaces. 2021-01-05 11:44:10 -08:00
Rush Tehrani
313e6841b1 Merge pull request #821 from rushtehrani/feat/cleanup
chore: Remove archive from templates
2021-01-04 13:51:39 -08:00
rushtehrani
22abceb9df remove archive from templates 2021-01-04 13:47:32 -08:00
Rush Tehrani
e6889ac470 Merge pull request #820 from Vafilor/fix/finished.logs
fix: issue where logs would sometimes crash
2020-12-31 23:36:16 -08:00
Andrey Melnikov
5669d03c5c chore: codacy 2020-12-31 23:33:42 -08:00
Andrey Melnikov
a8f5cde75e fix: issue where logs would sometimes accumulate because there was no newline in the content. 2020-12-31 23:23:13 -08:00
Rush Tehrani
67af745dab Merge pull request #818 from Vafilor/fix/finished.logs
fix: issues with finished logs
2020-12-31 11:12:09 -08:00
Andrey Melnikov
080624d9e2 fix: issues with finished logs. Content was repeating, resulting in a large data load - making the stream fail.
No timestamps were being returned as timestamps for time 0.
Occasionally lines would cut off across multiple lines.
2020-12-31 11:07:39 -08:00
Rush Tehrani
69c523ee23 Merge pull request #815 from Vafilor/feat/default.make.api
feat: make api command use docker by default.
2020-12-30 15:18:46 -08:00
Andrey Melnikov
303cee3e9f feat: make api command use docker by default. 2020-12-30 15:05:16 -08:00
Rush Tehrani
3cb799f6fe Merge pull request #814 from Vafilor/fix/timestamp.logs
fix: workflow execution logs showing timestamps incorrectly
2020-12-30 13:25:35 -08:00
Andrey Melnikov
8d896c03c0 Merge branch 'fix/timestamp.logs' of github.com:Vafilor/core into fix/timestamp.logs 2020-12-30 12:05:37 -08:00
Andrey Melnikov
543367c36e feat: updated workflow logs to return a response that may contain several log entries. 2020-12-30 12:04:21 -08:00
Andrey Melnikov
ba776cddbd fix: fixed issue with logs where the timestamp parsing ignored newlines 2020-12-30 12:04:21 -08:00
Andrey Melnikov
ea89ddf289 feat: updated workflow logs to return a response that may contain several log entries. 2020-12-30 12:02:25 -08:00
Andrey Melnikov
48d66004ec fix: fixed issue with logs where the timestamp parsing ignored newlines 2020-12-30 11:49:37 -08:00
127 changed files with 13119 additions and 2661 deletions

View File

@@ -1,6 +1,10 @@
name: Publish dev docker image
on:
push:
paths-ignore:
- LICENSE
- ".github/**"
- "*.md"
branches:
- master
jobs:
@@ -33,6 +37,6 @@ jobs:
uses: elgohr/Publish-Docker-Github-Action@master
with:
name: onepanel/core
tags: "dev"
tags: "latest"
username: ${{ secrets.DOCKER_HUB_USERNAME }}
password: ${{ secrets.DOCKER_HUB_TOKEN }}

View File

@@ -4,11 +4,22 @@
Note: Up migrations are automatically executed when the application is run.
#### Linux / Mac
```bash
docker run --rm --mount type=bind,source="${PWD}",target=/root onepanel-helper:v1.0.0 goose -dir db/sql create <name> sql # Create migration in db/sql folder
docker run --rm --mount type=bind,source="${PWD}",target=/root onepanel-helper:v1.0.0 goose -dir db postgres "${DB_DATASOURCE_NAME}" up # Migrate the DB to the most recent version available
docker run --rm --mount type=bind,source="${PWD}",target=/root onepanel-helper:v1.0.0 goose -dir db postgres "${DB_DATASOURCE_NAME}" down # Roll back the version by 1
docker run --rm --mount type=bind,source="${PWD}",target=/root onepanel-helper:v1.0.0 goose help # See all available commands
docker run --rm --mount type=bind,source="${PWD}",target=/root onepanel/helper:v1.0.0 goose -dir db/sql create <name> sql # Create migration in db/sql folder
docker run --rm --mount type=bind,source="${PWD}",target=/root onepanel/helper:v1.0.0 goose -dir db postgres "${DB_DATASOURCE_NAME}" up # Migrate the DB to the most recent version available
docker run --rm --mount type=bind,source="${PWD}",target=/root onepanel/helper:v1.0.0 goose -dir db postgres "${DB_DATASOURCE_NAME}" down # Roll back the version by 1
docker run --rm --mount type=bind,source="${PWD}",target=/root onepanel/helper:v1.0.0 goose help # See all available commands
```
#### Windows
```bash
docker run --rm --mount type=bind,source="%CD%",target=/root onepanel/helper:v1.0.0 goose -dir db/sql create wow sql # Create migration in db/sql folder
docker run --rm --mount type=bind,source="%CD%",target=/root onepanel/helper:v1.0.0 goose -dir db postgres "${DB_DATASOURCE_NAME}" up # Migrate the DB to the most recent version available
docker run --rm --mount type=bind,source="%CD%",target=/root onepanel/helper:v1.0.0 goose -dir db postgres "${DB_DATASOURCE_NAME}" down # Roll back the version by 1
docker run --rm --mount type=bind,source="%CD%",target=/root onepanel/helper:v1.0.0 goose help # See all available commands
```
### Local
@@ -64,86 +75,27 @@ Make sure that your `$GOBIN` is in your `$PATH`.
### Docker
Generate Go and Swagger APIs:
Generate Go and Swagger APIs
#### Linux / Mac
```bash
docker run --rm --mount type=bind,source="${PWD}",target=/root onepanel-helper:v1.0.0 make api version=1.0.0
docker run --rm --mount type=bind,source="${PWD}",target=/root onepanel/helper:v1.0.0 make api-internal version=1.0.0
```
#### Windows
```bash
docker run --rm --mount type=bind,source="%CD%",target=/root onepanel/helper:v1.0.0 make api-internal version=1.0.0
```
### Local Installation
Generate Go and Swagger APIs:
```bash
make api version=1.0.0
make api-internal version=1.0.0
```
## Minikube Debugging and Development
It is possible to access host resources with minikube.
- This means you can run core and core-ui on your machine, and have minikube
execute API calls to your machine.
NOTE:
- Do not use host access with Minikube and VMWare. This has been shown not to work
in our testing.
If you have a work-around, feel free to let us know.
To make this work, some setup is needed.
- Minikube started with driver=virtualbox
Get your Minikube ssh IP
https://minikube.sigs.k8s.io/docs/handbook/host-access/
```shell script
minikube ssh "route -n | grep ^0.0.0.0 | awk '{ print \$2 }'"
```
Example output:
```shell script
10.0.2.2
```
When running core api, add these ENV variables.
```shell script
ONEPANEL_CORE_SERVICE_HOST=10.0.2.2 # IP you just got
ONEPANEL_CORE_SERVICE_PORT=8888 # HTTP Port set in main.go
```
DB Access
- You will need to change the Postgres service from ClusterIP to NodePort
Run
```shell script
minikube service list
```
Look at Postgres, you'll see something like this:
```shell script
$ minikube service list
|----------------------|----------------------------------------|--------------------|--------------------------------|
| NAMESPACE | NAME | TARGET PORT | URL |
|----------------------|----------------------------------------|--------------------|--------------------------------|
| application-system | application-controller-manager-service | No node port |
| default | kubernetes | No node port |
| kube-system | kube-dns | No node port |
| kubernetes-dashboard | dashboard-metrics-scraper | No node port |
| kubernetes-dashboard | kubernetes-dashboard | No node port |
| onepanel | onepanel-core | http/8888 | http://192.168.99.101:32000 |
| | | grpc/8887 | http://192.168.99.101:32001 |
| onepanel | onepanel-core-ui | http/80 | http://192.168.99.101:32002 |
| onepanel | postgres | 5432 | http://192.168.99.101:31975 |
|----------------------|----------------------------------------|--------------------|--------------------------------|
```
Grab `http://192.168.99.101:31975`
Use this in main.go for the following lines:
```shell script
databaseDataSourceName := fmt.Sprintf("port=31975 host=%v user=%v password=%v dbname=%v sslmode=disable",
"192.168.99.101", config["databaseUsername"], config["databasePassword"], config["databaseName"])
```
This should connect your developing core to the minikube db.
After this, build main.go and run the executable.
- Or use your IDE equivalent
## Code Structure & Organization
### `utils` dir

View File

@@ -35,10 +35,10 @@ protoc:
--openapiv2_opt simple_operation_ids=true \
api/proto/*.proto
api: init protoc jq
api-internal: init protoc jq
api-docker: init
docker run --rm --mount type=bind,source="${PWD}",target=/root onepanel/helper:v1.0.0 make api version=$(version)
api: init
docker run --rm --mount type=bind,source="${PWD}",target=/root onepanel/helper:v1.0.0 make api-internal version=$(version)
docker-build:
docker build -t onepanel-core .

View File

@@ -1,82 +1,45 @@
<img width="240px" src="img/logo.png">
<img width="200px" src="img/logo.png">
![build](https://img.shields.io/github/workflow/status/onepanelio/core/Publish%20dev%20docker%20image/master?color=01579b)
![build](https://img.shields.io/github/workflow/status/onepanelio/onepanel/Publish%20dev%20docker%20image/master?color=01579b)
![code](https://img.shields.io/codacy/grade/d060fc4d1ac64b85b78f85c691ead86a?color=01579b)
[![release](https://img.shields.io/github/v/release/onepanelio/core?color=01579b)](https://github.com/onepanelio/core/releases)
[![sdk](https://img.shields.io/pypi/v/onepanel-sdk?color=01579b&label=sdk)](https://pypi.org/project/onepanel-sdk/)
[![docs](https://img.shields.io/github/v/release/onepanelio/core?color=01579b&label=docs)](https://docs.onepanel.io)
[![issues](https://img.shields.io/github/issues-raw/onepanelio/core?color=01579b&label=issues)](https://github.com/onepanelio/core/issues)
[![chat](https://img.shields.io/badge/support-slack-01579b)](https://join.slack.com/t/onepanel-ce/shared_invite/zt-eyjnwec0-nLaHhjif9Y~gA05KuX6AUg)
[![lfai](https://img.shields.io/badge/link-LFAI-01579b)](https://landscape.lfai.foundation/?selected=onepanel)
[![license](https://img.shields.io/github/license/onepanelio/core?color=01579b)](https://opensource.org/licenses/Apache-2.0)
Production scale vision AI platform with fully integrated components for model building, automated labeling, data processing and model training pipelines.
## End-to-end computer vision platform
Label, build, train, tune, deploy and automate in a unified platform that runs on any cloud and on-premises.
<img width="100%" src="img/onepanel.gif">
https://user-images.githubusercontent.com/1211823/116489376-afc60000-a849-11eb-8e8b-b0c64c07c144.mp4
## Why Onepanel?
- End-to-end automation for production scale vision AI pipelines
- Best of breed, open source deep learning tools seamlessly integrated in one unified platform
- Infrastructure automation so you can easily scale your data processing and training pipelines to multiple nodes
- Customizable, reproducible and version controlled tooling and pipeline templates
- Scalability, flexibility and resiliency of Kubernetes without the deployment and configuration complexities
Onepanel can be found on [LF AI Landscape](http://landscape.lfai.foundation/selected=onepanel)
## Features
- Annotate images and video with automatic annotation of bounding boxes and polygon masks, fully integrated with data processing and training pipelines.
- JupyterLab configured with extensions for TensorBoard, Git/GitHub, debugging, notebook diffing and support for Conda, OpenCV, Tensorflow and PyTorch with GPU.
- Build fully reproducible, distributed and parallel data processing and training pipelines with real-time logs and output snapshots.
- Bring your own IDEs, annotation tools and pipelines with a version controlled YAML and Docker based template engine.
- Track and visualize model metrics and experiments with TensorBoard or bring your own experiment tracking tools.
- Extend Onepanel with powerful REST APIs and SDKs to further automate your workflows.
## Online demo
We have created an [online demo environment](https://onepanel.typeform.com/to/kQfDX5Vf?product=github) so that you can quickly try Onepanel.
**Note:** This is a shared, read only demo environment that is reset every few hours.
<img width="100%" src="img/features.png">
## Quick start
See [quick start guide](https://docs.onepanel.ai/docs/getting-started/quickstart) to get started with the platform of your choice.
### Quick start videos
[Getting started with Microsoft Azure](https://youtu.be/CQBIYfBk3Zk)\
[Getting started with Amazon EKS](https://youtu.be/Ipdd8f6D6IM)\
[Getting started with Google GKE](https://youtu.be/pZRO63SnQ8A)
See [quick start guide](https://docs.onepanel.ai/docs/getting-started/quickstart) to get started.
## Community
See [documentation](https://docs.onepanel.ai) to get started or for more detailed operational and user guides.
To submit a feature request, report a bug or documentation issue, please open a GitHub [pull request](https://github.com/onepanelio/core/pulls) or [issue](https://github.com/onepanelio/core/issues).
For help, questions, release announcements and contribution discussions, join us on [Slack](https://join.slack.com/t/onepanel-ce/shared_invite/zt-eyjnwec0-nLaHhjif9Y~gA05KuX6AUg) or [GitHub discussions](https://github.com/onepanelio/core/discussions).
For help, questions, release announcements and contribution discussions, join us on [Slack](https://join.slack.com/t/onepanel-ce/shared_invite/zt-eyjnwec0-nLaHhjif9Y~gA05KuX6AUg).
## Contributing
Onepanel is modular and consists of [multiple repositories](https://docs.onepanel.ai/docs/getting-started/contributing/#project-repositories).
Onepanel is modular and consists of the following repositories:
[Backend](https://github.com/onepanelio/core/) (this repository) - Code base for backend (Go)\
[Frontend](https://github.com/onepanelio/core-ui/) - Code base for frontend (Angular + TypeScript)\
[CLI](https://github.com/onepanelio/cli/) - Code base for installation and management CLI (Go)\
[Manifests](https://github.com/onepanelio/manifests/) - Kustomize manifests used by installation and management CLI (YAML)\
[Python SDK](https://github.com/onepanelio/python-sdk/) - Python SDK code and documentation (Python)\
[Templates](https://github.com/onepanelio/templates) - Various Workspace, Workflow, Task and Sidecar Templates\
[Documentation](https://github.com/onepanelio/core-docs/) - The repository for documentation site\
[API Documentation](https://github.com/onepanelio/core-api-docs/) - API documentation if you choose to use the API directly
See `CONTRIBUTING.md` in each repository for development guidelines. Also, see [contribution guide](https://docs.onepanel.ai/docs/getting-started/contributing) for additional guidelines.
See [contribution guide](https://docs.onepanel.ai/docs/getting-started/contributing) and `CONTRIBUTING.md` in each repository for additional contribution guidelines.
## Acknowledgments
Onepanel seamlessly integrates the following excellent open source projects. We are grateful for the support these communities provide and do our best to contribute back as much as possible.
Onepanel seamlessly integrates the following open source projects under the hood:
[Argo](https://github.com/argoproj/argo)\
[CVAT](https://github.com/opencv/cvat)\
[JupyterLab](https://github.com/jupyterlab/jupyterlab)\
[NNI](https://github.com/microsoft/nni)
[Argo](https://github.com/argoproj/argo-workflows) | [Couler](https://github.com/couler-proj/couler) | [CVAT](https://github.com/opencv/cvat) | [JupyterLab](https://github.com/jupyterlab/jupyterlab) | [NNI](https://github.com/microsoft/nni)
We are grateful for the support these communities provide and do our best to contribute back as much as possible.
## License
Onepanel is licensed under [Apache 2.0](https://github.com/onepanelio/core/blob/master/LICENSE).
## Need a managed solution?
Visit our [website](https://www.onepanel.io/) for more information about our managed offerings.
## Enterprise support
Need enterprise features and support? Visit our [website](https://www.onepanel.ai/) for more information.

View File

@@ -3,7 +3,7 @@
"info": {
"title": "Onepanel",
"description": "Onepanel API",
"version": "0.17.0",
"version": "1.0.0",
"contact": {
"name": "Onepanel project",
"url": "https://github.com/onepanelio/core"
@@ -22,6 +22,186 @@
"application/octet-stream"
],
"paths": {
"/apis/v1beta/service/{name}": {
"get": {
"operationId": "HasService",
"responses": {
"200": {
"description": "A successful response.",
"schema": {
"$ref": "#/definitions/HasServiceResponse"
}
},
"default": {
"description": "An unexpected error response.",
"schema": {
"$ref": "#/definitions/google.rpc.Status"
}
}
},
"parameters": [
{
"name": "name",
"in": "path",
"required": true,
"type": "string"
}
],
"tags": [
"ServiceService"
]
}
},
"/apis/v1beta/{namespace}/field/workflow_executions/{fieldName}": {
"get": {
"operationId": "ListWorkflowExecutionsField",
"responses": {
"200": {
"description": "A successful response.",
"schema": {
"$ref": "#/definitions/ListWorkflowExecutionsFieldResponse"
}
},
"default": {
"description": "An unexpected error response.",
"schema": {
"$ref": "#/definitions/google.rpc.Status"
}
}
},
"parameters": [
{
"name": "namespace",
"in": "path",
"required": true,
"type": "string"
},
{
"name": "fieldName",
"in": "path",
"required": true,
"type": "string"
}
],
"tags": [
"WorkflowService"
]
}
},
"/apis/v1beta/{namespace}/field/workflow_templates/{fieldName}": {
"get": {
"operationId": "ListWorkflowTemplatesField",
"responses": {
"200": {
"description": "A successful response.",
"schema": {
"$ref": "#/definitions/ListWorkflowTemplatesFieldResponse"
}
},
"default": {
"description": "An unexpected error response.",
"schema": {
"$ref": "#/definitions/google.rpc.Status"
}
}
},
"parameters": [
{
"name": "namespace",
"in": "path",
"required": true,
"type": "string"
},
{
"name": "fieldName",
"in": "path",
"required": true,
"type": "string"
},
{
"name": "isSystem",
"in": "query",
"required": false,
"type": "boolean"
}
],
"tags": [
"WorkflowTemplateService"
]
}
},
"/apis/v1beta/{namespace}/field/workspace_templates/{fieldName}": {
"get": {
"operationId": "ListWorkspaceTemplatesField",
"responses": {
"200": {
"description": "A successful response.",
"schema": {
"$ref": "#/definitions/ListWorkspaceTemplatesFieldResponse"
}
},
"default": {
"description": "An unexpected error response.",
"schema": {
"$ref": "#/definitions/google.rpc.Status"
}
}
},
"parameters": [
{
"name": "namespace",
"in": "path",
"required": true,
"type": "string"
},
{
"name": "fieldName",
"in": "path",
"required": true,
"type": "string"
}
],
"tags": [
"WorkspaceTemplateService"
]
}
},
"/apis/v1beta/{namespace}/field/workspaces/{fieldName}": {
"get": {
"operationId": "ListWorkspacesField",
"responses": {
"200": {
"description": "A successful response.",
"schema": {
"$ref": "#/definitions/ListWorkspacesFieldResponse"
}
},
"default": {
"description": "An unexpected error response.",
"schema": {
"$ref": "#/definitions/google.rpc.Status"
}
}
},
"parameters": [
{
"name": "namespace",
"in": "path",
"required": true,
"type": "string"
},
{
"name": "fieldName",
"in": "path",
"required": true,
"type": "string"
}
],
"tags": [
"WorkspaceService"
]
}
},
"/apis/v1beta1/auth": {
"post": {
"operationId": "IsAuthorized",
@@ -215,6 +395,36 @@
]
}
},
"/apis/v1beta1/{namespace}/config": {
"get": {
"operationId": "GetNamespaceConfig",
"responses": {
"200": {
"description": "A successful response.",
"schema": {
"$ref": "#/definitions/GetNamespaceConfigResponse"
}
},
"default": {
"description": "An unexpected error response.",
"schema": {
"$ref": "#/definitions/google.rpc.Status"
}
}
},
"parameters": [
{
"name": "namespace",
"in": "path",
"required": true,
"type": "string"
}
],
"tags": [
"ConfigService"
]
}
},
"/apis/v1beta1/{namespace}/cron_workflow": {
"post": {
"operationId": "CreateCronWorkflow",
@@ -467,6 +677,200 @@
]
}
},
"/apis/v1beta1/{namespace}/files/list/{path}": {
"get": {
"operationId": "ListFiles",
"responses": {
"200": {
"description": "A successful response.",
"schema": {
"$ref": "#/definitions/ListFilesResponse"
}
},
"default": {
"description": "An unexpected error response.",
"schema": {
"$ref": "#/definitions/google.rpc.Status"
}
}
},
"parameters": [
{
"name": "namespace",
"in": "path",
"required": true,
"type": "string"
},
{
"name": "path",
"in": "path",
"required": true,
"type": "string"
},
{
"name": "page",
"in": "query",
"required": false,
"type": "integer",
"format": "int32"
},
{
"name": "perPage",
"in": "query",
"required": false,
"type": "integer",
"format": "int32"
}
],
"tags": [
"FileService"
]
}
},
"/apis/v1beta1/{namespace}/files/presigned-url/{key}": {
"get": {
"operationId": "GetObjectDownloadPresignedURL",
"responses": {
"200": {
"description": "A successful response.",
"schema": {
"$ref": "#/definitions/GetPresignedUrlResponse"
}
},
"default": {
"description": "An unexpected error response.",
"schema": {
"$ref": "#/definitions/google.rpc.Status"
}
}
},
"parameters": [
{
"name": "namespace",
"in": "path",
"required": true,
"type": "string"
},
{
"name": "key",
"in": "path",
"required": true,
"type": "string"
}
],
"tags": [
"FileService"
]
}
},
"/apis/v1beta1/{namespace}/inferenceservice": {
"post": {
"operationId": "CreateInferenceService",
"responses": {
"200": {
"description": "A successful response.",
"schema": {
"$ref": "#/definitions/GetInferenceServiceResponse"
}
},
"default": {
"description": "An unexpected error response.",
"schema": {
"$ref": "#/definitions/google.rpc.Status"
}
}
},
"parameters": [
{
"name": "namespace",
"in": "path",
"required": true,
"type": "string"
},
{
"name": "body",
"in": "body",
"required": true,
"schema": {
"$ref": "#/definitions/CreateInferenceServiceRequest"
}
}
],
"tags": [
"InferenceService"
]
}
},
"/apis/v1beta1/{namespace}/inferenceservice/{name}": {
"get": {
"operationId": "GetInferenceService",
"responses": {
"200": {
"description": "A successful response.",
"schema": {
"$ref": "#/definitions/GetInferenceServiceResponse"
}
},
"default": {
"description": "An unexpected error response.",
"schema": {
"$ref": "#/definitions/google.rpc.Status"
}
}
},
"parameters": [
{
"name": "namespace",
"in": "path",
"required": true,
"type": "string"
},
{
"name": "name",
"in": "path",
"required": true,
"type": "string"
}
],
"tags": [
"InferenceService"
]
},
"delete": {
"operationId": "DeleteInferenceService",
"responses": {
"200": {
"description": "A successful response.",
"schema": {
"properties": {}
}
},
"default": {
"description": "An unexpected error response.",
"schema": {
"$ref": "#/definitions/google.rpc.Status"
}
}
},
"parameters": [
{
"name": "namespace",
"in": "path",
"required": true,
"type": "string"
},
{
"name": "name",
"in": "path",
"required": true,
"type": "string"
}
],
"tags": [
"InferenceService"
]
}
},
"/apis/v1beta1/{namespace}/secrets": {
"get": {
"operationId": "ListSecrets",
@@ -1065,48 +1469,6 @@
]
}
},
"/apis/v1beta1/{namespace}/workflow_executions/{uid}/artifacts/{key}": {
"get": {
"operationId": "GetArtifact",
"responses": {
"200": {
"description": "A successful response.",
"schema": {
"$ref": "#/definitions/ArtifactResponse"
}
},
"default": {
"description": "An unexpected error response.",
"schema": {
"$ref": "#/definitions/google.rpc.Status"
}
}
},
"parameters": [
{
"name": "namespace",
"in": "path",
"required": true,
"type": "string"
},
{
"name": "uid",
"in": "path",
"required": true,
"type": "string"
},
{
"name": "key",
"in": "path",
"required": true,
"type": "string"
}
],
"tags": [
"WorkflowService"
]
}
},
"/apis/v1beta1/{namespace}/workflow_executions/{uid}/cron_start_statistics": {
"post": {
"operationId": "CronStartWorkflowExecutionStatistic",
@@ -1151,48 +1513,6 @@
]
}
},
"/apis/v1beta1/{namespace}/workflow_executions/{uid}/files/{path}": {
"get": {
"operationId": "ListFiles",
"responses": {
"200": {
"description": "A successful response.",
"schema": {
"$ref": "#/definitions/ListFilesResponse"
}
},
"default": {
"description": "An unexpected error response.",
"schema": {
"$ref": "#/definitions/google.rpc.Status"
}
}
},
"parameters": [
{
"name": "namespace",
"in": "path",
"required": true,
"type": "string"
},
{
"name": "uid",
"in": "path",
"required": true,
"type": "string"
},
{
"name": "path",
"in": "path",
"required": true,
"type": "string"
}
],
"tags": [
"WorkflowService"
]
}
},
"/apis/v1beta1/{namespace}/workflow_executions/{uid}/metric": {
"post": {
"operationId": "AddWorkflowExecutionMetrics",
@@ -1289,13 +1609,13 @@
"type": "object",
"properties": {
"result": {
"$ref": "#/definitions/LogEntry"
"$ref": "#/definitions/LogStreamResponse"
},
"error": {
"$ref": "#/definitions/google.rpc.Status"
}
},
"title": "Stream result of LogEntry"
"title": "Stream result of LogStreamResponse"
}
},
"default": {
@@ -2558,6 +2878,64 @@
]
}
},
"/apis/v1beta1/{namespace}/workspaces/{uid}/containers/{containerName}/logs": {
"get": {
"operationId": "GetWorkspaceContainerLogs",
"responses": {
"200": {
"description": "A successful response.(streaming responses)",
"schema": {
"type": "object",
"properties": {
"result": {
"$ref": "#/definitions/LogStreamResponse"
},
"error": {
"$ref": "#/definitions/google.rpc.Status"
}
},
"title": "Stream result of LogStreamResponse"
}
},
"default": {
"description": "An unexpected error response.",
"schema": {
"$ref": "#/definitions/google.rpc.Status"
}
}
},
"parameters": [
{
"name": "namespace",
"in": "path",
"required": true,
"type": "string"
},
{
"name": "uid",
"in": "path",
"required": true,
"type": "string"
},
{
"name": "containerName",
"in": "path",
"required": true,
"type": "string"
},
{
"name": "sinceTime",
"in": "query",
"required": false,
"type": "string",
"format": "int64"
}
],
"tags": [
"WorkspaceService"
]
}
},
"/apis/v1beta1/{namespace}/workspaces/{uid}/pause": {
"put": {
"operationId": "PauseWorkspace",
@@ -2623,6 +3001,14 @@
"in": "path",
"required": true,
"type": "string"
},
{
"name": "body",
"in": "body",
"required": true,
"schema": {
"$ref": "#/definitions/UpdateWorkspaceBody"
}
}
],
"tags": [
@@ -2982,12 +3368,40 @@
}
}
},
"ArtifactResponse": {
"Container": {
"type": "object",
"properties": {
"data": {
"type": "string",
"format": "byte"
"image": {
"type": "string"
},
"name": {
"type": "string"
},
"env": {
"type": "array",
"items": {
"$ref": "#/definitions/Env"
}
}
}
},
"CreateInferenceServiceRequest": {
"type": "object",
"properties": {
"namespace": {
"type": "string"
},
"name": {
"type": "string"
},
"defaultTransformerImage": {
"type": "string"
},
"predictor": {
"$ref": "#/definitions/InferenceServicePredictor"
},
"transformer": {
"$ref": "#/definitions/InferenceServiceTransformer"
}
}
},
@@ -3036,6 +3450,9 @@
"items": {
"$ref": "#/definitions/KeyValue"
}
},
"captureNode": {
"type": "boolean"
}
}
},
@@ -3090,6 +3507,17 @@
}
}
},
"Env": {
"type": "object",
"properties": {
"name": {
"type": "string"
},
"value": {
"type": "string"
}
}
},
"File": {
"type": "object",
"properties": {
@@ -3159,6 +3587,23 @@
}
}
},
"GetInferenceServiceResponse": {
"type": "object",
"properties": {
"ready": {
"type": "boolean"
},
"conditions": {
"type": "array",
"items": {
"$ref": "#/definitions/InferenceServiceCondition"
}
},
"predictUrl": {
"type": "string"
}
}
},
"GetLabelsResponse": {
"type": "object",
"properties": {
@@ -3170,6 +3615,26 @@
}
}
},
"GetNamespaceConfigResponse": {
"type": "object",
"properties": {
"bucket": {
"type": "string"
}
}
},
"GetPresignedUrlResponse": {
"type": "object",
"properties": {
"url": {
"type": "string"
},
"size": {
"type": "string",
"format": "int64"
}
}
},
"GetWorkflowExecutionMetricsResponse": {
"type": "object",
"properties": {
@@ -3197,6 +3662,80 @@
}
}
},
"HasServiceResponse": {
"type": "object",
"properties": {
"hasService": {
"type": "boolean"
}
}
},
"InferenceServiceCondition": {
"type": "object",
"properties": {
"lastTransitionTime": {
"type": "string"
},
"status": {
"type": "string"
},
"type": {
"type": "string"
}
}
},
"InferenceServicePredictor": {
"type": "object",
"properties": {
"name": {
"type": "string"
},
"runtimeVersion": {
"type": "string"
},
"storageUri": {
"type": "string"
},
"nodeSelector": {
"type": "string"
},
"minCpu": {
"type": "string"
},
"minMemory": {
"type": "string"
},
"maxCpu": {
"type": "string"
},
"maxMemory": {
"type": "string"
}
}
},
"InferenceServiceTransformer": {
"type": "object",
"properties": {
"containers": {
"type": "array",
"items": {
"$ref": "#/definitions/Container"
}
},
"minCpu": {
"type": "string"
},
"minMemory": {
"type": "string"
},
"maxCpu": {
"type": "string"
},
"maxMemory": {
"type": "string"
}
}
},
"IsAuthorized": {
"type": "object",
"properties": {
@@ -3302,6 +3841,22 @@
"ListFilesResponse": {
"type": "object",
"properties": {
"count": {
"type": "integer",
"format": "int32"
},
"totalCount": {
"type": "integer",
"format": "int32"
},
"page": {
"type": "integer",
"format": "int32"
},
"pages": {
"type": "integer",
"format": "int32"
},
"files": {
"type": "array",
"items": {
@@ -3382,6 +3937,17 @@
}
}
},
"ListWorkflowExecutionsFieldResponse": {
"type": "object",
"properties": {
"values": {
"type": "array",
"items": {
"type": "string"
}
}
}
},
"ListWorkflowExecutionsResponse": {
"type": "object",
"properties": {
@@ -3428,6 +3994,17 @@
}
}
},
"ListWorkflowTemplatesFieldResponse": {
"type": "object",
"properties": {
"values": {
"type": "array",
"items": {
"type": "string"
}
}
}
},
"ListWorkflowTemplatesResponse": {
"type": "object",
"properties": {
@@ -3505,6 +4082,17 @@
}
}
},
"ListWorkspaceTemplatesFieldResponse": {
"type": "object",
"properties": {
"values": {
"type": "array",
"items": {
"type": "string"
}
}
}
},
"ListWorkspaceTemplatesResponse": {
"type": "object",
"properties": {
@@ -3532,6 +4120,17 @@
}
}
},
"ListWorkspacesFieldResponse": {
"type": "object",
"properties": {
"values": {
"type": "array",
"items": {
"type": "string"
}
}
}
},
"LogEntry": {
"type": "object",
"properties": {
@@ -3543,6 +4142,28 @@
}
}
},
"LogStreamResponse": {
"type": "object",
"properties": {
"logEntries": {
"type": "array",
"items": {
"$ref": "#/definitions/LogEntry"
}
}
}
},
"MachineType": {
"type": "object",
"properties": {
"name": {
"type": "string"
},
"value": {
"type": "string"
}
}
},
"Metric": {
"type": "object",
"properties": {
@@ -3875,6 +4496,9 @@
"items": {
"$ref": "#/definitions/Parameter"
}
},
"description": {
"type": "string"
}
}
},
@@ -3920,6 +4544,26 @@
"items": {
"$ref": "#/definitions/Parameter"
}
},
"workspaceComponents": {
"type": "array",
"items": {
"$ref": "#/definitions/WorkspaceComponent"
}
},
"machineType": {
"$ref": "#/definitions/MachineType"
}
}
},
"WorkspaceComponent": {
"type": "object",
"properties": {
"name": {
"type": "string"
},
"url": {
"type": "string"
}
}
},

View File

@@ -183,6 +183,163 @@ func (x *ParameterOption) GetValue() string {
return ""
}
type LogStreamResponse struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
LogEntries []*LogEntry `protobuf:"bytes,1,rep,name=logEntries,proto3" json:"logEntries,omitempty"`
}
func (x *LogStreamResponse) Reset() {
*x = LogStreamResponse{}
if protoimpl.UnsafeEnabled {
mi := &file_common_proto_msgTypes[2]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *LogStreamResponse) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*LogStreamResponse) ProtoMessage() {}
func (x *LogStreamResponse) ProtoReflect() protoreflect.Message {
mi := &file_common_proto_msgTypes[2]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use LogStreamResponse.ProtoReflect.Descriptor instead.
func (*LogStreamResponse) Descriptor() ([]byte, []int) {
return file_common_proto_rawDescGZIP(), []int{2}
}
func (x *LogStreamResponse) GetLogEntries() []*LogEntry {
if x != nil {
return x.LogEntries
}
return nil
}
type LogEntry struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Timestamp string `protobuf:"bytes,1,opt,name=timestamp,proto3" json:"timestamp,omitempty"`
Content string `protobuf:"bytes,2,opt,name=content,proto3" json:"content,omitempty"`
}
func (x *LogEntry) Reset() {
*x = LogEntry{}
if protoimpl.UnsafeEnabled {
mi := &file_common_proto_msgTypes[3]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *LogEntry) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*LogEntry) ProtoMessage() {}
func (x *LogEntry) ProtoReflect() protoreflect.Message {
mi := &file_common_proto_msgTypes[3]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use LogEntry.ProtoReflect.Descriptor instead.
func (*LogEntry) Descriptor() ([]byte, []int) {
return file_common_proto_rawDescGZIP(), []int{3}
}
func (x *LogEntry) GetTimestamp() string {
if x != nil {
return x.Timestamp
}
return ""
}
func (x *LogEntry) GetContent() string {
if x != nil {
return x.Content
}
return ""
}
type MachineType struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
Value string `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"`
}
func (x *MachineType) Reset() {
*x = MachineType{}
if protoimpl.UnsafeEnabled {
mi := &file_common_proto_msgTypes[4]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *MachineType) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*MachineType) ProtoMessage() {}
func (x *MachineType) ProtoReflect() protoreflect.Message {
mi := &file_common_proto_msgTypes[4]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use MachineType.ProtoReflect.Descriptor instead.
func (*MachineType) Descriptor() ([]byte, []int) {
return file_common_proto_rawDescGZIP(), []int{4}
}
func (x *MachineType) GetName() string {
if x != nil {
return x.Name
}
return ""
}
func (x *MachineType) GetValue() string {
if x != nil {
return x.Value
}
return ""
}
var File_common_proto protoreflect.FileDescriptor
var file_common_proto_rawDesc = []byte{
@@ -205,10 +362,22 @@ var file_common_proto_rawDesc = []byte{
0x73, 0x22, 0x3b, 0x0a, 0x0f, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x4f, 0x70,
0x74, 0x69, 0x6f, 0x6e, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01,
0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75,
0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x42, 0x24,
0x5a, 0x22, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x6f, 0x6e, 0x65,
0x70, 0x61, 0x6e, 0x65, 0x6c, 0x69, 0x6f, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x2f, 0x61, 0x70, 0x69,
0x2f, 0x67, 0x65, 0x6e, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x42,
0x0a, 0x11, 0x4c, 0x6f, 0x67, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x52, 0x65, 0x73, 0x70, 0x6f,
0x6e, 0x73, 0x65, 0x12, 0x2d, 0x0a, 0x0a, 0x6c, 0x6f, 0x67, 0x45, 0x6e, 0x74, 0x72, 0x69, 0x65,
0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0d, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x4c, 0x6f,
0x67, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0a, 0x6c, 0x6f, 0x67, 0x45, 0x6e, 0x74, 0x72, 0x69,
0x65, 0x73, 0x22, 0x42, 0x0a, 0x08, 0x4c, 0x6f, 0x67, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x1c,
0x0a, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x18, 0x01, 0x20, 0x01, 0x28,
0x09, 0x52, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x12, 0x18, 0x0a, 0x07,
0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x63,
0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x22, 0x37, 0x0a, 0x0b, 0x4d, 0x61, 0x63, 0x68, 0x69, 0x6e,
0x65, 0x54, 0x79, 0x70, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20,
0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c,
0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x42,
0x24, 0x5a, 0x22, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x6f, 0x6e,
0x65, 0x70, 0x61, 0x6e, 0x65, 0x6c, 0x69, 0x6f, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x2f, 0x61, 0x70,
0x69, 0x2f, 0x67, 0x65, 0x6e, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
}
var (
@@ -223,18 +392,22 @@ func file_common_proto_rawDescGZIP() []byte {
return file_common_proto_rawDescData
}
var file_common_proto_msgTypes = make([]protoimpl.MessageInfo, 2)
var file_common_proto_msgTypes = make([]protoimpl.MessageInfo, 5)
var file_common_proto_goTypes = []interface{}{
(*Parameter)(nil), // 0: api.Parameter
(*ParameterOption)(nil), // 1: api.ParameterOption
(*Parameter)(nil), // 0: api.Parameter
(*ParameterOption)(nil), // 1: api.ParameterOption
(*LogStreamResponse)(nil), // 2: api.LogStreamResponse
(*LogEntry)(nil), // 3: api.LogEntry
(*MachineType)(nil), // 4: api.MachineType
}
var file_common_proto_depIdxs = []int32{
1, // 0: api.Parameter.options:type_name -> api.ParameterOption
1, // [1:1] is the sub-list for method output_type
1, // [1:1] is the sub-list for method input_type
1, // [1:1] is the sub-list for extension type_name
1, // [1:1] is the sub-list for extension extendee
0, // [0:1] is the sub-list for field type_name
3, // 1: api.LogStreamResponse.logEntries:type_name -> api.LogEntry
2, // [2:2] is the sub-list for method output_type
2, // [2:2] is the sub-list for method input_type
2, // [2:2] is the sub-list for extension type_name
2, // [2:2] is the sub-list for extension extendee
0, // [0:2] is the sub-list for field type_name
}
func init() { file_common_proto_init() }
@@ -267,6 +440,42 @@ func file_common_proto_init() {
return nil
}
}
file_common_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*LogStreamResponse); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_common_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*LogEntry); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_common_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*MachineType); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
}
type x struct{}
out := protoimpl.TypeBuilder{
@@ -274,7 +483,7 @@ func file_common_proto_init() {
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_common_proto_rawDesc,
NumEnums: 0,
NumMessages: 2,
NumMessages: 5,
NumExtensions: 0,
NumServices: 0,
},

View File

@@ -27,6 +27,100 @@ const (
// of the legacy proto package is being used.
const _ = proto.ProtoPackageIsVersion4
type GetNamespaceConfigRequest struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Namespace string `protobuf:"bytes,1,opt,name=namespace,proto3" json:"namespace,omitempty"`
}
func (x *GetNamespaceConfigRequest) Reset() {
*x = GetNamespaceConfigRequest{}
if protoimpl.UnsafeEnabled {
mi := &file_config_proto_msgTypes[0]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *GetNamespaceConfigRequest) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*GetNamespaceConfigRequest) ProtoMessage() {}
func (x *GetNamespaceConfigRequest) ProtoReflect() protoreflect.Message {
mi := &file_config_proto_msgTypes[0]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use GetNamespaceConfigRequest.ProtoReflect.Descriptor instead.
func (*GetNamespaceConfigRequest) Descriptor() ([]byte, []int) {
return file_config_proto_rawDescGZIP(), []int{0}
}
func (x *GetNamespaceConfigRequest) GetNamespace() string {
if x != nil {
return x.Namespace
}
return ""
}
type GetNamespaceConfigResponse struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Bucket string `protobuf:"bytes,1,opt,name=bucket,proto3" json:"bucket,omitempty"`
}
func (x *GetNamespaceConfigResponse) Reset() {
*x = GetNamespaceConfigResponse{}
if protoimpl.UnsafeEnabled {
mi := &file_config_proto_msgTypes[1]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *GetNamespaceConfigResponse) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*GetNamespaceConfigResponse) ProtoMessage() {}
func (x *GetNamespaceConfigResponse) ProtoReflect() protoreflect.Message {
mi := &file_config_proto_msgTypes[1]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use GetNamespaceConfigResponse.ProtoReflect.Descriptor instead.
func (*GetNamespaceConfigResponse) Descriptor() ([]byte, []int) {
return file_config_proto_rawDescGZIP(), []int{1}
}
func (x *GetNamespaceConfigResponse) GetBucket() string {
if x != nil {
return x.Bucket
}
return ""
}
type GetConfigResponse struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
@@ -41,7 +135,7 @@ type GetConfigResponse struct {
func (x *GetConfigResponse) Reset() {
*x = GetConfigResponse{}
if protoimpl.UnsafeEnabled {
mi := &file_config_proto_msgTypes[0]
mi := &file_config_proto_msgTypes[2]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
@@ -54,7 +148,7 @@ func (x *GetConfigResponse) String() string {
func (*GetConfigResponse) ProtoMessage() {}
func (x *GetConfigResponse) ProtoReflect() protoreflect.Message {
mi := &file_config_proto_msgTypes[0]
mi := &file_config_proto_msgTypes[2]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
@@ -67,7 +161,7 @@ func (x *GetConfigResponse) ProtoReflect() protoreflect.Message {
// Deprecated: Use GetConfigResponse.ProtoReflect.Descriptor instead.
func (*GetConfigResponse) Descriptor() ([]byte, []int) {
return file_config_proto_rawDescGZIP(), []int{0}
return file_config_proto_rawDescGZIP(), []int{2}
}
func (x *GetConfigResponse) GetApiUrl() string {
@@ -110,7 +204,7 @@ type NodePoolOption struct {
func (x *NodePoolOption) Reset() {
*x = NodePoolOption{}
if protoimpl.UnsafeEnabled {
mi := &file_config_proto_msgTypes[1]
mi := &file_config_proto_msgTypes[3]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
@@ -123,7 +217,7 @@ func (x *NodePoolOption) String() string {
func (*NodePoolOption) ProtoMessage() {}
func (x *NodePoolOption) ProtoReflect() protoreflect.Message {
mi := &file_config_proto_msgTypes[1]
mi := &file_config_proto_msgTypes[3]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
@@ -136,7 +230,7 @@ func (x *NodePoolOption) ProtoReflect() protoreflect.Message {
// Deprecated: Use NodePoolOption.ProtoReflect.Descriptor instead.
func (*NodePoolOption) Descriptor() ([]byte, []int) {
return file_config_proto_rawDescGZIP(), []int{1}
return file_config_proto_rawDescGZIP(), []int{3}
}
func (x *NodePoolOption) GetName() string {
@@ -165,7 +259,7 @@ type NodePool struct {
func (x *NodePool) Reset() {
*x = NodePool{}
if protoimpl.UnsafeEnabled {
mi := &file_config_proto_msgTypes[2]
mi := &file_config_proto_msgTypes[4]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
@@ -178,7 +272,7 @@ func (x *NodePool) String() string {
func (*NodePool) ProtoMessage() {}
func (x *NodePool) ProtoReflect() protoreflect.Message {
mi := &file_config_proto_msgTypes[2]
mi := &file_config_proto_msgTypes[4]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
@@ -191,7 +285,7 @@ func (x *NodePool) ProtoReflect() protoreflect.Message {
// Deprecated: Use NodePool.ProtoReflect.Descriptor instead.
func (*NodePool) Descriptor() ([]byte, []int) {
return file_config_proto_rawDescGZIP(), []int{2}
return file_config_proto_rawDescGZIP(), []int{4}
}
func (x *NodePool) GetLabel() string {
@@ -215,34 +309,49 @@ var file_config_proto_rawDesc = []byte{
0x61, 0x70, 0x69, 0x1a, 0x1c, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f,
0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74,
0x6f, 0x1a, 0x1b, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62,
0x75, 0x66, 0x2f, 0x65, 0x6d, 0x70, 0x74, 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x82,
0x01, 0x0a, 0x11, 0x47, 0x65, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x65, 0x73, 0x70,
0x6f, 0x6e, 0x73, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x61, 0x70, 0x69, 0x55, 0x72, 0x6c, 0x18, 0x01,
0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x61, 0x70, 0x69, 0x55, 0x72, 0x6c, 0x12, 0x16, 0x0a, 0x06,
0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x64, 0x6f,
0x6d, 0x61, 0x69, 0x6e, 0x12, 0x12, 0x0a, 0x04, 0x66, 0x71, 0x64, 0x6e, 0x18, 0x03, 0x20, 0x01,
0x28, 0x09, 0x52, 0x04, 0x66, 0x71, 0x64, 0x6e, 0x12, 0x29, 0x0a, 0x08, 0x6e, 0x6f, 0x64, 0x65,
0x50, 0x6f, 0x6f, 0x6c, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0d, 0x2e, 0x61, 0x70, 0x69,
0x2e, 0x4e, 0x6f, 0x64, 0x65, 0x50, 0x6f, 0x6f, 0x6c, 0x52, 0x08, 0x6e, 0x6f, 0x64, 0x65, 0x50,
0x6f, 0x6f, 0x6c, 0x22, 0x3a, 0x0a, 0x0e, 0x4e, 0x6f, 0x64, 0x65, 0x50, 0x6f, 0x6f, 0x6c, 0x4f,
0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20,
0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c,
0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22,
0x4f, 0x0a, 0x08, 0x4e, 0x6f, 0x64, 0x65, 0x50, 0x6f, 0x6f, 0x6c, 0x12, 0x14, 0x0a, 0x05, 0x6c,
0x61, 0x62, 0x65, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x6c, 0x61, 0x62, 0x65,
0x6c, 0x12, 0x2d, 0x0a, 0x07, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x02, 0x20, 0x03,
0x28, 0x0b, 0x32, 0x13, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x4e, 0x6f, 0x64, 0x65, 0x50, 0x6f, 0x6f,
0x6c, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x07, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73,
0x32, 0x6a, 0x0a, 0x0d, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63,
0x65, 0x12, 0x59, 0x0a, 0x09, 0x47, 0x65, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x16,
0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66,
0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x1a, 0x16, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x47, 0x65, 0x74,
0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x1c,
0x82, 0xd3, 0xe4, 0x93, 0x02, 0x16, 0x12, 0x14, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31,
0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x42, 0x24, 0x5a, 0x22,
0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x6f, 0x6e, 0x65, 0x70, 0x61,
0x6e, 0x65, 0x6c, 0x69, 0x6f, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x67,
0x65, 0x6e, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
0x75, 0x66, 0x2f, 0x65, 0x6d, 0x70, 0x74, 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x39,
0x0a, 0x19, 0x47, 0x65, 0x74, 0x4e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x43, 0x6f,
0x6e, 0x66, 0x69, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x6e,
0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09,
0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x22, 0x34, 0x0a, 0x1a, 0x47, 0x65, 0x74,
0x4e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52,
0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x62, 0x75, 0x63, 0x6b, 0x65,
0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x62, 0x75, 0x63, 0x6b, 0x65, 0x74, 0x22,
0x82, 0x01, 0x0a, 0x11, 0x47, 0x65, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x65, 0x73,
0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x61, 0x70, 0x69, 0x55, 0x72, 0x6c, 0x18,
0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x61, 0x70, 0x69, 0x55, 0x72, 0x6c, 0x12, 0x16, 0x0a,
0x06, 0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x64,
0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x12, 0x12, 0x0a, 0x04, 0x66, 0x71, 0x64, 0x6e, 0x18, 0x03, 0x20,
0x01, 0x28, 0x09, 0x52, 0x04, 0x66, 0x71, 0x64, 0x6e, 0x12, 0x29, 0x0a, 0x08, 0x6e, 0x6f, 0x64,
0x65, 0x50, 0x6f, 0x6f, 0x6c, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0d, 0x2e, 0x61, 0x70,
0x69, 0x2e, 0x4e, 0x6f, 0x64, 0x65, 0x50, 0x6f, 0x6f, 0x6c, 0x52, 0x08, 0x6e, 0x6f, 0x64, 0x65,
0x50, 0x6f, 0x6f, 0x6c, 0x22, 0x3a, 0x0a, 0x0e, 0x4e, 0x6f, 0x64, 0x65, 0x50, 0x6f, 0x6f, 0x6c,
0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01,
0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61,
0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65,
0x22, 0x4f, 0x0a, 0x08, 0x4e, 0x6f, 0x64, 0x65, 0x50, 0x6f, 0x6f, 0x6c, 0x12, 0x14, 0x0a, 0x05,
0x6c, 0x61, 0x62, 0x65, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x6c, 0x61, 0x62,
0x65, 0x6c, 0x12, 0x2d, 0x0a, 0x07, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x02, 0x20,
0x03, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x4e, 0x6f, 0x64, 0x65, 0x50, 0x6f,
0x6f, 0x6c, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x07, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e,
0x73, 0x32, 0xeb, 0x01, 0x0a, 0x0d, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x53, 0x65, 0x72, 0x76,
0x69, 0x63, 0x65, 0x12, 0x59, 0x0a, 0x09, 0x47, 0x65, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67,
0x12, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62,
0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x1a, 0x16, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x47,
0x65, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65,
0x22, 0x1c, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x16, 0x12, 0x14, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f,
0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x7f,
0x0a, 0x12, 0x47, 0x65, 0x74, 0x4e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x43, 0x6f,
0x6e, 0x66, 0x69, 0x67, 0x12, 0x1e, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x47, 0x65, 0x74, 0x4e, 0x61,
0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x65, 0x71,
0x75, 0x65, 0x73, 0x74, 0x1a, 0x1f, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x47, 0x65, 0x74, 0x4e, 0x61,
0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x65, 0x73,
0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x28, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x22, 0x12, 0x20, 0x2f,
0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b, 0x6e, 0x61,
0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x7d, 0x2f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x42,
0x24, 0x5a, 0x22, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x6f, 0x6e,
0x65, 0x70, 0x61, 0x6e, 0x65, 0x6c, 0x69, 0x6f, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x2f, 0x61, 0x70,
0x69, 0x2f, 0x67, 0x65, 0x6e, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
}
var (
@@ -257,20 +366,24 @@ func file_config_proto_rawDescGZIP() []byte {
return file_config_proto_rawDescData
}
var file_config_proto_msgTypes = make([]protoimpl.MessageInfo, 3)
var file_config_proto_msgTypes = make([]protoimpl.MessageInfo, 5)
var file_config_proto_goTypes = []interface{}{
(*GetConfigResponse)(nil), // 0: api.GetConfigResponse
(*NodePoolOption)(nil), // 1: api.NodePoolOption
(*NodePool)(nil), // 2: api.NodePool
(*emptypb.Empty)(nil), // 3: google.protobuf.Empty
(*GetNamespaceConfigRequest)(nil), // 0: api.GetNamespaceConfigRequest
(*GetNamespaceConfigResponse)(nil), // 1: api.GetNamespaceConfigResponse
(*GetConfigResponse)(nil), // 2: api.GetConfigResponse
(*NodePoolOption)(nil), // 3: api.NodePoolOption
(*NodePool)(nil), // 4: api.NodePool
(*emptypb.Empty)(nil), // 5: google.protobuf.Empty
}
var file_config_proto_depIdxs = []int32{
2, // 0: api.GetConfigResponse.nodePool:type_name -> api.NodePool
1, // 1: api.NodePool.options:type_name -> api.NodePoolOption
3, // 2: api.ConfigService.GetConfig:input_type -> google.protobuf.Empty
0, // 3: api.ConfigService.GetConfig:output_type -> api.GetConfigResponse
3, // [3:4] is the sub-list for method output_type
2, // [2:3] is the sub-list for method input_type
4, // 0: api.GetConfigResponse.nodePool:type_name -> api.NodePool
3, // 1: api.NodePool.options:type_name -> api.NodePoolOption
5, // 2: api.ConfigService.GetConfig:input_type -> google.protobuf.Empty
0, // 3: api.ConfigService.GetNamespaceConfig:input_type -> api.GetNamespaceConfigRequest
2, // 4: api.ConfigService.GetConfig:output_type -> api.GetConfigResponse
1, // 5: api.ConfigService.GetNamespaceConfig:output_type -> api.GetNamespaceConfigResponse
4, // [4:6] is the sub-list for method output_type
2, // [2:4] is the sub-list for method input_type
2, // [2:2] is the sub-list for extension type_name
2, // [2:2] is the sub-list for extension extendee
0, // [0:2] is the sub-list for field type_name
@@ -283,7 +396,7 @@ func file_config_proto_init() {
}
if !protoimpl.UnsafeEnabled {
file_config_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*GetConfigResponse); i {
switch v := v.(*GetNamespaceConfigRequest); i {
case 0:
return &v.state
case 1:
@@ -295,7 +408,7 @@ func file_config_proto_init() {
}
}
file_config_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*NodePoolOption); i {
switch v := v.(*GetNamespaceConfigResponse); i {
case 0:
return &v.state
case 1:
@@ -307,6 +420,30 @@ func file_config_proto_init() {
}
}
file_config_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*GetConfigResponse); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_config_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*NodePoolOption); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_config_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*NodePool); i {
case 0:
return &v.state
@@ -325,7 +462,7 @@ func file_config_proto_init() {
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_config_proto_rawDesc,
NumEnums: 0,
NumMessages: 3,
NumMessages: 5,
NumExtensions: 0,
NumServices: 1,
},

View File

@@ -50,6 +50,58 @@ func local_request_ConfigService_GetConfig_0(ctx context.Context, marshaler runt
}
func request_ConfigService_GetNamespaceConfig_0(ctx context.Context, marshaler runtime.Marshaler, client ConfigServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq GetNamespaceConfigRequest
var metadata runtime.ServerMetadata
var (
val string
ok bool
err error
_ = err
)
val, ok = pathParams["namespace"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "namespace")
}
protoReq.Namespace, err = runtime.String(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "namespace", err)
}
msg, err := client.GetNamespaceConfig(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD))
return msg, metadata, err
}
func local_request_ConfigService_GetNamespaceConfig_0(ctx context.Context, marshaler runtime.Marshaler, server ConfigServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq GetNamespaceConfigRequest
var metadata runtime.ServerMetadata
var (
val string
ok bool
err error
_ = err
)
val, ok = pathParams["namespace"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "namespace")
}
protoReq.Namespace, err = runtime.String(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "namespace", err)
}
msg, err := server.GetNamespaceConfig(ctx, &protoReq)
return msg, metadata, err
}
// RegisterConfigServiceHandlerServer registers the http handlers for service ConfigService to "mux".
// UnaryRPC :call ConfigServiceServer directly.
// StreamingRPC :currently unsupported pending https://github.com/grpc/grpc-go/issues/906.
@@ -79,6 +131,29 @@ func RegisterConfigServiceHandlerServer(ctx context.Context, mux *runtime.ServeM
})
mux.Handle("GET", pattern_ConfigService_GetNamespaceConfig_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
var stream runtime.ServerTransportStream
ctx = grpc.NewContextWithServerTransportStream(ctx, &stream)
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/api.ConfigService/GetNamespaceConfig")
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := local_request_ConfigService_GetNamespaceConfig_0(rctx, inboundMarshaler, server, req, pathParams)
md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_ConfigService_GetNamespaceConfig_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
return nil
}
@@ -140,13 +215,37 @@ func RegisterConfigServiceHandlerClient(ctx context.Context, mux *runtime.ServeM
})
mux.Handle("GET", pattern_ConfigService_GetNamespaceConfig_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateContext(ctx, mux, req, "/api.ConfigService/GetNamespaceConfig")
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := request_ConfigService_GetNamespaceConfig_0(rctx, inboundMarshaler, client, req, pathParams)
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_ConfigService_GetNamespaceConfig_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
return nil
}
var (
pattern_ConfigService_GetConfig_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v1beta1", "config"}, ""))
pattern_ConfigService_GetNamespaceConfig_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 4, 1, 5, 2, 2, 3}, []string{"apis", "v1beta1", "namespace", "config"}, ""))
)
var (
forward_ConfigService_GetConfig_0 = runtime.ForwardResponseMessage
forward_ConfigService_GetNamespaceConfig_0 = runtime.ForwardResponseMessage
)

View File

@@ -19,6 +19,7 @@ const _ = grpc.SupportPackageIsVersion7
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream.
type ConfigServiceClient interface {
GetConfig(ctx context.Context, in *emptypb.Empty, opts ...grpc.CallOption) (*GetConfigResponse, error)
GetNamespaceConfig(ctx context.Context, in *GetNamespaceConfigRequest, opts ...grpc.CallOption) (*GetNamespaceConfigResponse, error)
}
type configServiceClient struct {
@@ -38,11 +39,21 @@ func (c *configServiceClient) GetConfig(ctx context.Context, in *emptypb.Empty,
return out, nil
}
func (c *configServiceClient) GetNamespaceConfig(ctx context.Context, in *GetNamespaceConfigRequest, opts ...grpc.CallOption) (*GetNamespaceConfigResponse, error) {
out := new(GetNamespaceConfigResponse)
err := c.cc.Invoke(ctx, "/api.ConfigService/GetNamespaceConfig", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
// ConfigServiceServer is the server API for ConfigService service.
// All implementations must embed UnimplementedConfigServiceServer
// for forward compatibility
type ConfigServiceServer interface {
GetConfig(context.Context, *emptypb.Empty) (*GetConfigResponse, error)
GetNamespaceConfig(context.Context, *GetNamespaceConfigRequest) (*GetNamespaceConfigResponse, error)
mustEmbedUnimplementedConfigServiceServer()
}
@@ -53,6 +64,9 @@ type UnimplementedConfigServiceServer struct {
func (UnimplementedConfigServiceServer) GetConfig(context.Context, *emptypb.Empty) (*GetConfigResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method GetConfig not implemented")
}
func (UnimplementedConfigServiceServer) GetNamespaceConfig(context.Context, *GetNamespaceConfigRequest) (*GetNamespaceConfigResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method GetNamespaceConfig not implemented")
}
func (UnimplementedConfigServiceServer) mustEmbedUnimplementedConfigServiceServer() {}
// UnsafeConfigServiceServer may be embedded to opt out of forward compatibility for this service.
@@ -84,6 +98,24 @@ func _ConfigService_GetConfig_Handler(srv interface{}, ctx context.Context, dec
return interceptor(ctx, in, info, handler)
}
func _ConfigService_GetNamespaceConfig_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(GetNamespaceConfigRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(ConfigServiceServer).GetNamespaceConfig(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/api.ConfigService/GetNamespaceConfig",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(ConfigServiceServer).GetNamespaceConfig(ctx, req.(*GetNamespaceConfigRequest))
}
return interceptor(ctx, in, info, handler)
}
var _ConfigService_serviceDesc = grpc.ServiceDesc{
ServiceName: "api.ConfigService",
HandlerType: (*ConfigServiceServer)(nil),
@@ -92,6 +124,10 @@ var _ConfigService_serviceDesc = grpc.ServiceDesc{
MethodName: "GetConfig",
Handler: _ConfigService_GetConfig_Handler,
},
{
MethodName: "GetNamespaceConfig",
Handler: _ConfigService_GetNamespaceConfig_Handler,
},
},
Streams: []grpc.StreamDesc{},
Metadata: "config.proto",

580
api/gen/files.pb.go Normal file
View File

@@ -0,0 +1,580 @@
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
// protoc-gen-go v1.25.0
// protoc v3.14.0
// source: files.proto
package gen
import (
proto "github.com/golang/protobuf/proto"
_ "google.golang.org/genproto/googleapis/api/annotations"
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
reflect "reflect"
sync "sync"
)
const (
// Verify that this generated code is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
// Verify that runtime/protoimpl is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
)
// This is a compile-time assertion that a sufficiently up-to-date version
// of the legacy proto package is being used.
const _ = proto.ProtoPackageIsVersion4
type File struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Path string `protobuf:"bytes,1,opt,name=path,proto3" json:"path,omitempty"`
Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"`
Extension string `protobuf:"bytes,3,opt,name=extension,proto3" json:"extension,omitempty"`
Size int64 `protobuf:"varint,4,opt,name=size,proto3" json:"size,omitempty"`
ContentType string `protobuf:"bytes,5,opt,name=contentType,proto3" json:"contentType,omitempty"`
LastModified string `protobuf:"bytes,6,opt,name=lastModified,proto3" json:"lastModified,omitempty"`
Directory bool `protobuf:"varint,7,opt,name=directory,proto3" json:"directory,omitempty"`
}
func (x *File) Reset() {
*x = File{}
if protoimpl.UnsafeEnabled {
mi := &file_files_proto_msgTypes[0]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *File) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*File) ProtoMessage() {}
func (x *File) ProtoReflect() protoreflect.Message {
mi := &file_files_proto_msgTypes[0]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use File.ProtoReflect.Descriptor instead.
func (*File) Descriptor() ([]byte, []int) {
return file_files_proto_rawDescGZIP(), []int{0}
}
func (x *File) GetPath() string {
if x != nil {
return x.Path
}
return ""
}
func (x *File) GetName() string {
if x != nil {
return x.Name
}
return ""
}
func (x *File) GetExtension() string {
if x != nil {
return x.Extension
}
return ""
}
func (x *File) GetSize() int64 {
if x != nil {
return x.Size
}
return 0
}
func (x *File) GetContentType() string {
if x != nil {
return x.ContentType
}
return ""
}
func (x *File) GetLastModified() string {
if x != nil {
return x.LastModified
}
return ""
}
func (x *File) GetDirectory() bool {
if x != nil {
return x.Directory
}
return false
}
type ListFilesRequest struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Namespace string `protobuf:"bytes,1,opt,name=namespace,proto3" json:"namespace,omitempty"`
Path string `protobuf:"bytes,2,opt,name=path,proto3" json:"path,omitempty"`
Page int32 `protobuf:"varint,3,opt,name=page,proto3" json:"page,omitempty"`
PerPage int32 `protobuf:"varint,4,opt,name=perPage,proto3" json:"perPage,omitempty"`
}
func (x *ListFilesRequest) Reset() {
*x = ListFilesRequest{}
if protoimpl.UnsafeEnabled {
mi := &file_files_proto_msgTypes[1]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *ListFilesRequest) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*ListFilesRequest) ProtoMessage() {}
func (x *ListFilesRequest) ProtoReflect() protoreflect.Message {
mi := &file_files_proto_msgTypes[1]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use ListFilesRequest.ProtoReflect.Descriptor instead.
func (*ListFilesRequest) Descriptor() ([]byte, []int) {
return file_files_proto_rawDescGZIP(), []int{1}
}
func (x *ListFilesRequest) GetNamespace() string {
if x != nil {
return x.Namespace
}
return ""
}
func (x *ListFilesRequest) GetPath() string {
if x != nil {
return x.Path
}
return ""
}
func (x *ListFilesRequest) GetPage() int32 {
if x != nil {
return x.Page
}
return 0
}
func (x *ListFilesRequest) GetPerPage() int32 {
if x != nil {
return x.PerPage
}
return 0
}
type ListFilesResponse struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Count int32 `protobuf:"varint,1,opt,name=count,proto3" json:"count,omitempty"`
TotalCount int32 `protobuf:"varint,2,opt,name=totalCount,proto3" json:"totalCount,omitempty"`
Page int32 `protobuf:"varint,3,opt,name=page,proto3" json:"page,omitempty"`
Pages int32 `protobuf:"varint,4,opt,name=pages,proto3" json:"pages,omitempty"`
Files []*File `protobuf:"bytes,5,rep,name=files,proto3" json:"files,omitempty"`
ParentPath string `protobuf:"bytes,6,opt,name=parentPath,proto3" json:"parentPath,omitempty"`
}
func (x *ListFilesResponse) Reset() {
*x = ListFilesResponse{}
if protoimpl.UnsafeEnabled {
mi := &file_files_proto_msgTypes[2]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *ListFilesResponse) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*ListFilesResponse) ProtoMessage() {}
func (x *ListFilesResponse) ProtoReflect() protoreflect.Message {
mi := &file_files_proto_msgTypes[2]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use ListFilesResponse.ProtoReflect.Descriptor instead.
func (*ListFilesResponse) Descriptor() ([]byte, []int) {
return file_files_proto_rawDescGZIP(), []int{2}
}
func (x *ListFilesResponse) GetCount() int32 {
if x != nil {
return x.Count
}
return 0
}
func (x *ListFilesResponse) GetTotalCount() int32 {
if x != nil {
return x.TotalCount
}
return 0
}
func (x *ListFilesResponse) GetPage() int32 {
if x != nil {
return x.Page
}
return 0
}
func (x *ListFilesResponse) GetPages() int32 {
if x != nil {
return x.Pages
}
return 0
}
func (x *ListFilesResponse) GetFiles() []*File {
if x != nil {
return x.Files
}
return nil
}
func (x *ListFilesResponse) GetParentPath() string {
if x != nil {
return x.ParentPath
}
return ""
}
type GetObjectPresignedUrlRequest struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Namespace string `protobuf:"bytes,1,opt,name=namespace,proto3" json:"namespace,omitempty"`
Key string `protobuf:"bytes,2,opt,name=key,proto3" json:"key,omitempty"`
}
func (x *GetObjectPresignedUrlRequest) Reset() {
*x = GetObjectPresignedUrlRequest{}
if protoimpl.UnsafeEnabled {
mi := &file_files_proto_msgTypes[3]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *GetObjectPresignedUrlRequest) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*GetObjectPresignedUrlRequest) ProtoMessage() {}
func (x *GetObjectPresignedUrlRequest) ProtoReflect() protoreflect.Message {
mi := &file_files_proto_msgTypes[3]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use GetObjectPresignedUrlRequest.ProtoReflect.Descriptor instead.
func (*GetObjectPresignedUrlRequest) Descriptor() ([]byte, []int) {
return file_files_proto_rawDescGZIP(), []int{3}
}
func (x *GetObjectPresignedUrlRequest) GetNamespace() string {
if x != nil {
return x.Namespace
}
return ""
}
func (x *GetObjectPresignedUrlRequest) GetKey() string {
if x != nil {
return x.Key
}
return ""
}
type GetPresignedUrlResponse struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Url string `protobuf:"bytes,1,opt,name=url,proto3" json:"url,omitempty"`
Size int64 `protobuf:"varint,2,opt,name=size,proto3" json:"size,omitempty"`
}
func (x *GetPresignedUrlResponse) Reset() {
*x = GetPresignedUrlResponse{}
if protoimpl.UnsafeEnabled {
mi := &file_files_proto_msgTypes[4]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *GetPresignedUrlResponse) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*GetPresignedUrlResponse) ProtoMessage() {}
func (x *GetPresignedUrlResponse) ProtoReflect() protoreflect.Message {
mi := &file_files_proto_msgTypes[4]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use GetPresignedUrlResponse.ProtoReflect.Descriptor instead.
func (*GetPresignedUrlResponse) Descriptor() ([]byte, []int) {
return file_files_proto_rawDescGZIP(), []int{4}
}
func (x *GetPresignedUrlResponse) GetUrl() string {
if x != nil {
return x.Url
}
return ""
}
func (x *GetPresignedUrlResponse) GetSize() int64 {
if x != nil {
return x.Size
}
return 0
}
var File_files_proto protoreflect.FileDescriptor
var file_files_proto_rawDesc = []byte{
0x0a, 0x0b, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x03, 0x61,
0x70, 0x69, 0x1a, 0x1c, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x61,
0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f,
0x22, 0xc4, 0x01, 0x0a, 0x04, 0x46, 0x69, 0x6c, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x61, 0x74,
0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x70, 0x61, 0x74, 0x68, 0x12, 0x12, 0x0a,
0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d,
0x65, 0x12, 0x1c, 0x0a, 0x09, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x03,
0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x12,
0x12, 0x0a, 0x04, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x03, 0x52, 0x04, 0x73,
0x69, 0x7a, 0x65, 0x12, 0x20, 0x0a, 0x0b, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x54, 0x79,
0x70, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e,
0x74, 0x54, 0x79, 0x70, 0x65, 0x12, 0x22, 0x0a, 0x0c, 0x6c, 0x61, 0x73, 0x74, 0x4d, 0x6f, 0x64,
0x69, 0x66, 0x69, 0x65, 0x64, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x6c, 0x61, 0x73,
0x74, 0x4d, 0x6f, 0x64, 0x69, 0x66, 0x69, 0x65, 0x64, 0x12, 0x1c, 0x0a, 0x09, 0x64, 0x69, 0x72,
0x65, 0x63, 0x74, 0x6f, 0x72, 0x79, 0x18, 0x07, 0x20, 0x01, 0x28, 0x08, 0x52, 0x09, 0x64, 0x69,
0x72, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x79, 0x22, 0x72, 0x0a, 0x10, 0x4c, 0x69, 0x73, 0x74, 0x46,
0x69, 0x6c, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x6e,
0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09,
0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x61, 0x74,
0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x70, 0x61, 0x74, 0x68, 0x12, 0x12, 0x0a,
0x04, 0x70, 0x61, 0x67, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x52, 0x04, 0x70, 0x61, 0x67,
0x65, 0x12, 0x18, 0x0a, 0x07, 0x70, 0x65, 0x72, 0x50, 0x61, 0x67, 0x65, 0x18, 0x04, 0x20, 0x01,
0x28, 0x05, 0x52, 0x07, 0x70, 0x65, 0x72, 0x50, 0x61, 0x67, 0x65, 0x22, 0xb4, 0x01, 0x0a, 0x11,
0x4c, 0x69, 0x73, 0x74, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73,
0x65, 0x12, 0x14, 0x0a, 0x05, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05,
0x52, 0x05, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x1e, 0x0a, 0x0a, 0x74, 0x6f, 0x74, 0x61, 0x6c,
0x43, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0a, 0x74, 0x6f, 0x74,
0x61, 0x6c, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x61, 0x67, 0x65, 0x18,
0x03, 0x20, 0x01, 0x28, 0x05, 0x52, 0x04, 0x70, 0x61, 0x67, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x70,
0x61, 0x67, 0x65, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x05, 0x52, 0x05, 0x70, 0x61, 0x67, 0x65,
0x73, 0x12, 0x1f, 0x0a, 0x05, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b,
0x32, 0x09, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x46, 0x69, 0x6c, 0x65, 0x52, 0x05, 0x66, 0x69, 0x6c,
0x65, 0x73, 0x12, 0x1e, 0x0a, 0x0a, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x50, 0x61, 0x74, 0x68,
0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x50, 0x61,
0x74, 0x68, 0x22, 0x4e, 0x0a, 0x1c, 0x47, 0x65, 0x74, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x50,
0x72, 0x65, 0x73, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x55, 0x72, 0x6c, 0x52, 0x65, 0x71, 0x75, 0x65,
0x73, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18,
0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65,
0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b,
0x65, 0x79, 0x22, 0x3f, 0x0a, 0x17, 0x47, 0x65, 0x74, 0x50, 0x72, 0x65, 0x73, 0x69, 0x67, 0x6e,
0x65, 0x64, 0x55, 0x72, 0x6c, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x10, 0x0a,
0x03, 0x75, 0x72, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x75, 0x72, 0x6c, 0x12,
0x12, 0x0a, 0x04, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x03, 0x52, 0x04, 0x73,
0x69, 0x7a, 0x65, 0x32, 0xa4, 0x02, 0x0a, 0x0b, 0x46, 0x69, 0x6c, 0x65, 0x53, 0x65, 0x72, 0x76,
0x69, 0x63, 0x65, 0x12, 0xa0, 0x01, 0x0a, 0x1d, 0x47, 0x65, 0x74, 0x4f, 0x62, 0x6a, 0x65, 0x63,
0x74, 0x44, 0x6f, 0x77, 0x6e, 0x6c, 0x6f, 0x61, 0x64, 0x50, 0x72, 0x65, 0x73, 0x69, 0x67, 0x6e,
0x65, 0x64, 0x55, 0x52, 0x4c, 0x12, 0x21, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x47, 0x65, 0x74, 0x4f,
0x62, 0x6a, 0x65, 0x63, 0x74, 0x50, 0x72, 0x65, 0x73, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x55, 0x72,
0x6c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1c, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x47,
0x65, 0x74, 0x50, 0x72, 0x65, 0x73, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x55, 0x72, 0x6c, 0x52, 0x65,
0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x3e, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x38, 0x12, 0x36,
0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b, 0x6e,
0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x7d, 0x2f, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x2f,
0x70, 0x72, 0x65, 0x73, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x2d, 0x75, 0x72, 0x6c, 0x2f, 0x7b, 0x6b,
0x65, 0x79, 0x3d, 0x2a, 0x2a, 0x7d, 0x12, 0x72, 0x0a, 0x09, 0x4c, 0x69, 0x73, 0x74, 0x46, 0x69,
0x6c, 0x65, 0x73, 0x12, 0x15, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x46, 0x69,
0x6c, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x61, 0x70, 0x69,
0x2e, 0x4c, 0x69, 0x73, 0x74, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e,
0x73, 0x65, 0x22, 0x36, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x30, 0x12, 0x2e, 0x2f, 0x61, 0x70, 0x69,
0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73,
0x70, 0x61, 0x63, 0x65, 0x7d, 0x2f, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x2f, 0x6c, 0x69, 0x73, 0x74,
0x2f, 0x7b, 0x70, 0x61, 0x74, 0x68, 0x3d, 0x2a, 0x2a, 0x7d, 0x42, 0x24, 0x5a, 0x22, 0x67, 0x69,
0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x6f, 0x6e, 0x65, 0x70, 0x61, 0x6e, 0x65,
0x6c, 0x69, 0x6f, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x67, 0x65, 0x6e,
0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
}
var (
file_files_proto_rawDescOnce sync.Once
file_files_proto_rawDescData = file_files_proto_rawDesc
)
func file_files_proto_rawDescGZIP() []byte {
file_files_proto_rawDescOnce.Do(func() {
file_files_proto_rawDescData = protoimpl.X.CompressGZIP(file_files_proto_rawDescData)
})
return file_files_proto_rawDescData
}
var file_files_proto_msgTypes = make([]protoimpl.MessageInfo, 5)
var file_files_proto_goTypes = []interface{}{
(*File)(nil), // 0: api.File
(*ListFilesRequest)(nil), // 1: api.ListFilesRequest
(*ListFilesResponse)(nil), // 2: api.ListFilesResponse
(*GetObjectPresignedUrlRequest)(nil), // 3: api.GetObjectPresignedUrlRequest
(*GetPresignedUrlResponse)(nil), // 4: api.GetPresignedUrlResponse
}
var file_files_proto_depIdxs = []int32{
0, // 0: api.ListFilesResponse.files:type_name -> api.File
3, // 1: api.FileService.GetObjectDownloadPresignedURL:input_type -> api.GetObjectPresignedUrlRequest
1, // 2: api.FileService.ListFiles:input_type -> api.ListFilesRequest
4, // 3: api.FileService.GetObjectDownloadPresignedURL:output_type -> api.GetPresignedUrlResponse
2, // 4: api.FileService.ListFiles:output_type -> api.ListFilesResponse
3, // [3:5] is the sub-list for method output_type
1, // [1:3] is the sub-list for method input_type
1, // [1:1] is the sub-list for extension type_name
1, // [1:1] is the sub-list for extension extendee
0, // [0:1] is the sub-list for field type_name
}
func init() { file_files_proto_init() }
func file_files_proto_init() {
if File_files_proto != nil {
return
}
if !protoimpl.UnsafeEnabled {
file_files_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*File); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_files_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*ListFilesRequest); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_files_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*ListFilesResponse); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_files_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*GetObjectPresignedUrlRequest); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_files_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*GetPresignedUrlResponse); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
}
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_files_proto_rawDesc,
NumEnums: 0,
NumMessages: 5,
NumExtensions: 0,
NumServices: 1,
},
GoTypes: file_files_proto_goTypes,
DependencyIndexes: file_files_proto_depIdxs,
MessageInfos: file_files_proto_msgTypes,
}.Build()
File_files_proto = out.File
file_files_proto_rawDesc = nil
file_files_proto_goTypes = nil
file_files_proto_depIdxs = nil
}

342
api/gen/files.pb.gw.go Normal file
View File

@@ -0,0 +1,342 @@
// Code generated by protoc-gen-grpc-gateway. DO NOT EDIT.
// source: files.proto
/*
Package gen is a reverse proxy.
It translates gRPC into RESTful JSON APIs.
*/
package gen
import (
"context"
"io"
"net/http"
"github.com/grpc-ecosystem/grpc-gateway/v2/runtime"
"github.com/grpc-ecosystem/grpc-gateway/v2/utilities"
"google.golang.org/grpc"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/grpclog"
"google.golang.org/grpc/metadata"
"google.golang.org/grpc/status"
"google.golang.org/protobuf/proto"
)
// Suppress "imported and not used" errors
var _ codes.Code
var _ io.Reader
var _ status.Status
var _ = runtime.String
var _ = utilities.NewDoubleArray
var _ = metadata.Join
func request_FileService_GetObjectDownloadPresignedURL_0(ctx context.Context, marshaler runtime.Marshaler, client FileServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq GetObjectPresignedUrlRequest
var metadata runtime.ServerMetadata
var (
val string
ok bool
err error
_ = err
)
val, ok = pathParams["namespace"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "namespace")
}
protoReq.Namespace, err = runtime.String(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "namespace", err)
}
val, ok = pathParams["key"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "key")
}
protoReq.Key, err = runtime.String(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "key", err)
}
msg, err := client.GetObjectDownloadPresignedURL(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD))
return msg, metadata, err
}
func local_request_FileService_GetObjectDownloadPresignedURL_0(ctx context.Context, marshaler runtime.Marshaler, server FileServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq GetObjectPresignedUrlRequest
var metadata runtime.ServerMetadata
var (
val string
ok bool
err error
_ = err
)
val, ok = pathParams["namespace"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "namespace")
}
protoReq.Namespace, err = runtime.String(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "namespace", err)
}
val, ok = pathParams["key"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "key")
}
protoReq.Key, err = runtime.String(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "key", err)
}
msg, err := server.GetObjectDownloadPresignedURL(ctx, &protoReq)
return msg, metadata, err
}
var (
filter_FileService_ListFiles_0 = &utilities.DoubleArray{Encoding: map[string]int{"namespace": 0, "path": 1}, Base: []int{1, 1, 2, 0, 0}, Check: []int{0, 1, 1, 2, 3}}
)
func request_FileService_ListFiles_0(ctx context.Context, marshaler runtime.Marshaler, client FileServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq ListFilesRequest
var metadata runtime.ServerMetadata
var (
val string
ok bool
err error
_ = err
)
val, ok = pathParams["namespace"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "namespace")
}
protoReq.Namespace, err = runtime.String(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "namespace", err)
}
val, ok = pathParams["path"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "path")
}
protoReq.Path, err = runtime.String(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "path", err)
}
if err := req.ParseForm(); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_FileService_ListFiles_0); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
msg, err := client.ListFiles(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD))
return msg, metadata, err
}
func local_request_FileService_ListFiles_0(ctx context.Context, marshaler runtime.Marshaler, server FileServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq ListFilesRequest
var metadata runtime.ServerMetadata
var (
val string
ok bool
err error
_ = err
)
val, ok = pathParams["namespace"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "namespace")
}
protoReq.Namespace, err = runtime.String(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "namespace", err)
}
val, ok = pathParams["path"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "path")
}
protoReq.Path, err = runtime.String(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "path", err)
}
if err := req.ParseForm(); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_FileService_ListFiles_0); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
msg, err := server.ListFiles(ctx, &protoReq)
return msg, metadata, err
}
// RegisterFileServiceHandlerServer registers the http handlers for service FileService to "mux".
// UnaryRPC :call FileServiceServer directly.
// StreamingRPC :currently unsupported pending https://github.com/grpc/grpc-go/issues/906.
// Note that using this registration option will cause many gRPC library features to stop working. Consider using RegisterFileServiceHandlerFromEndpoint instead.
func RegisterFileServiceHandlerServer(ctx context.Context, mux *runtime.ServeMux, server FileServiceServer) error {
mux.Handle("GET", pattern_FileService_GetObjectDownloadPresignedURL_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
var stream runtime.ServerTransportStream
ctx = grpc.NewContextWithServerTransportStream(ctx, &stream)
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/api.FileService/GetObjectDownloadPresignedURL")
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := local_request_FileService_GetObjectDownloadPresignedURL_0(rctx, inboundMarshaler, server, req, pathParams)
md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_FileService_GetObjectDownloadPresignedURL_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
mux.Handle("GET", pattern_FileService_ListFiles_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
var stream runtime.ServerTransportStream
ctx = grpc.NewContextWithServerTransportStream(ctx, &stream)
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/api.FileService/ListFiles")
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := local_request_FileService_ListFiles_0(rctx, inboundMarshaler, server, req, pathParams)
md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_FileService_ListFiles_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
return nil
}
// RegisterFileServiceHandlerFromEndpoint is same as RegisterFileServiceHandler but
// automatically dials to "endpoint" and closes the connection when "ctx" gets done.
func RegisterFileServiceHandlerFromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, opts []grpc.DialOption) (err error) {
conn, err := grpc.Dial(endpoint, opts...)
if err != nil {
return err
}
defer func() {
if err != nil {
if cerr := conn.Close(); cerr != nil {
grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr)
}
return
}
go func() {
<-ctx.Done()
if cerr := conn.Close(); cerr != nil {
grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr)
}
}()
}()
return RegisterFileServiceHandler(ctx, mux, conn)
}
// RegisterFileServiceHandler registers the http handlers for service FileService to "mux".
// The handlers forward requests to the grpc endpoint over "conn".
func RegisterFileServiceHandler(ctx context.Context, mux *runtime.ServeMux, conn *grpc.ClientConn) error {
return RegisterFileServiceHandlerClient(ctx, mux, NewFileServiceClient(conn))
}
// RegisterFileServiceHandlerClient registers the http handlers for service FileService
// to "mux". The handlers forward requests to the grpc endpoint over the given implementation of "FileServiceClient".
// Note: the gRPC framework executes interceptors within the gRPC handler. If the passed in "FileServiceClient"
// doesn't go through the normal gRPC flow (creating a gRPC client etc.) then it will be up to the passed in
// "FileServiceClient" to call the correct interceptors.
func RegisterFileServiceHandlerClient(ctx context.Context, mux *runtime.ServeMux, client FileServiceClient) error {
mux.Handle("GET", pattern_FileService_GetObjectDownloadPresignedURL_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateContext(ctx, mux, req, "/api.FileService/GetObjectDownloadPresignedURL")
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := request_FileService_GetObjectDownloadPresignedURL_0(rctx, inboundMarshaler, client, req, pathParams)
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_FileService_GetObjectDownloadPresignedURL_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
mux.Handle("GET", pattern_FileService_ListFiles_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateContext(ctx, mux, req, "/api.FileService/ListFiles")
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := request_FileService_ListFiles_0(rctx, inboundMarshaler, client, req, pathParams)
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_FileService_ListFiles_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
return nil
}
var (
pattern_FileService_GetObjectDownloadPresignedURL_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 4, 1, 5, 2, 2, 3, 2, 4, 3, 0, 4, 1, 5, 5}, []string{"apis", "v1beta1", "namespace", "files", "presigned-url", "key"}, ""))
pattern_FileService_ListFiles_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 4, 1, 5, 2, 2, 3, 2, 4, 3, 0, 4, 1, 5, 5}, []string{"apis", "v1beta1", "namespace", "files", "list", "path"}, ""))
)
var (
forward_FileService_GetObjectDownloadPresignedURL_0 = runtime.ForwardResponseMessage
forward_FileService_ListFiles_0 = runtime.ForwardResponseMessage
)

133
api/gen/files_grpc.pb.go Normal file
View File

@@ -0,0 +1,133 @@
// Code generated by protoc-gen-go-grpc. DO NOT EDIT.
package gen
import (
context "context"
grpc "google.golang.org/grpc"
codes "google.golang.org/grpc/codes"
status "google.golang.org/grpc/status"
)
// This is a compile-time assertion to ensure that this generated file
// is compatible with the grpc package it is being compiled against.
const _ = grpc.SupportPackageIsVersion7
// FileServiceClient is the client API for FileService service.
//
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream.
type FileServiceClient interface {
GetObjectDownloadPresignedURL(ctx context.Context, in *GetObjectPresignedUrlRequest, opts ...grpc.CallOption) (*GetPresignedUrlResponse, error)
ListFiles(ctx context.Context, in *ListFilesRequest, opts ...grpc.CallOption) (*ListFilesResponse, error)
}
type fileServiceClient struct {
cc grpc.ClientConnInterface
}
func NewFileServiceClient(cc grpc.ClientConnInterface) FileServiceClient {
return &fileServiceClient{cc}
}
func (c *fileServiceClient) GetObjectDownloadPresignedURL(ctx context.Context, in *GetObjectPresignedUrlRequest, opts ...grpc.CallOption) (*GetPresignedUrlResponse, error) {
out := new(GetPresignedUrlResponse)
err := c.cc.Invoke(ctx, "/api.FileService/GetObjectDownloadPresignedURL", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *fileServiceClient) ListFiles(ctx context.Context, in *ListFilesRequest, opts ...grpc.CallOption) (*ListFilesResponse, error) {
out := new(ListFilesResponse)
err := c.cc.Invoke(ctx, "/api.FileService/ListFiles", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
// FileServiceServer is the server API for FileService service.
// All implementations must embed UnimplementedFileServiceServer
// for forward compatibility
type FileServiceServer interface {
GetObjectDownloadPresignedURL(context.Context, *GetObjectPresignedUrlRequest) (*GetPresignedUrlResponse, error)
ListFiles(context.Context, *ListFilesRequest) (*ListFilesResponse, error)
mustEmbedUnimplementedFileServiceServer()
}
// UnimplementedFileServiceServer must be embedded to have forward compatible implementations.
type UnimplementedFileServiceServer struct {
}
func (UnimplementedFileServiceServer) GetObjectDownloadPresignedURL(context.Context, *GetObjectPresignedUrlRequest) (*GetPresignedUrlResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method GetObjectDownloadPresignedURL not implemented")
}
func (UnimplementedFileServiceServer) ListFiles(context.Context, *ListFilesRequest) (*ListFilesResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method ListFiles not implemented")
}
func (UnimplementedFileServiceServer) mustEmbedUnimplementedFileServiceServer() {}
// UnsafeFileServiceServer may be embedded to opt out of forward compatibility for this service.
// Use of this interface is not recommended, as added methods to FileServiceServer will
// result in compilation errors.
type UnsafeFileServiceServer interface {
mustEmbedUnimplementedFileServiceServer()
}
func RegisterFileServiceServer(s grpc.ServiceRegistrar, srv FileServiceServer) {
s.RegisterService(&_FileService_serviceDesc, srv)
}
func _FileService_GetObjectDownloadPresignedURL_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(GetObjectPresignedUrlRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(FileServiceServer).GetObjectDownloadPresignedURL(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/api.FileService/GetObjectDownloadPresignedURL",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(FileServiceServer).GetObjectDownloadPresignedURL(ctx, req.(*GetObjectPresignedUrlRequest))
}
return interceptor(ctx, in, info, handler)
}
func _FileService_ListFiles_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(ListFilesRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(FileServiceServer).ListFiles(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/api.FileService/ListFiles",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(FileServiceServer).ListFiles(ctx, req.(*ListFilesRequest))
}
return interceptor(ctx, in, info, handler)
}
var _FileService_serviceDesc = grpc.ServiceDesc{
ServiceName: "api.FileService",
HandlerType: (*FileServiceServer)(nil),
Methods: []grpc.MethodDesc{
{
MethodName: "GetObjectDownloadPresignedURL",
Handler: _FileService_GetObjectDownloadPresignedURL_Handler,
},
{
MethodName: "ListFiles",
Handler: _FileService_ListFiles_Handler,
},
},
Streams: []grpc.StreamDesc{},
Metadata: "files.proto",
}

View File

@@ -0,0 +1,999 @@
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
// protoc-gen-go v1.25.0
// protoc v3.14.0
// source: inference_service.proto
package gen
import (
proto "github.com/golang/protobuf/proto"
_ "google.golang.org/genproto/googleapis/api/annotations"
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
emptypb "google.golang.org/protobuf/types/known/emptypb"
reflect "reflect"
sync "sync"
)
const (
// Verify that this generated code is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
// Verify that runtime/protoimpl is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
)
// This is a compile-time assertion that a sufficiently up-to-date version
// of the legacy proto package is being used.
const _ = proto.ProtoPackageIsVersion4
type InferenceServiceIdentifier struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Namespace string `protobuf:"bytes,1,opt,name=namespace,proto3" json:"namespace,omitempty"`
Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"`
}
func (x *InferenceServiceIdentifier) Reset() {
*x = InferenceServiceIdentifier{}
if protoimpl.UnsafeEnabled {
mi := &file_inference_service_proto_msgTypes[0]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *InferenceServiceIdentifier) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*InferenceServiceIdentifier) ProtoMessage() {}
func (x *InferenceServiceIdentifier) ProtoReflect() protoreflect.Message {
mi := &file_inference_service_proto_msgTypes[0]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use InferenceServiceIdentifier.ProtoReflect.Descriptor instead.
func (*InferenceServiceIdentifier) Descriptor() ([]byte, []int) {
return file_inference_service_proto_rawDescGZIP(), []int{0}
}
func (x *InferenceServiceIdentifier) GetNamespace() string {
if x != nil {
return x.Namespace
}
return ""
}
func (x *InferenceServiceIdentifier) GetName() string {
if x != nil {
return x.Name
}
return ""
}
type Env struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
Value string `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"`
}
func (x *Env) Reset() {
*x = Env{}
if protoimpl.UnsafeEnabled {
mi := &file_inference_service_proto_msgTypes[1]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *Env) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*Env) ProtoMessage() {}
func (x *Env) ProtoReflect() protoreflect.Message {
mi := &file_inference_service_proto_msgTypes[1]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use Env.ProtoReflect.Descriptor instead.
func (*Env) Descriptor() ([]byte, []int) {
return file_inference_service_proto_rawDescGZIP(), []int{1}
}
func (x *Env) GetName() string {
if x != nil {
return x.Name
}
return ""
}
func (x *Env) GetValue() string {
if x != nil {
return x.Value
}
return ""
}
type Container struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Image string `protobuf:"bytes,1,opt,name=image,proto3" json:"image,omitempty"`
Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"`
Env []*Env `protobuf:"bytes,3,rep,name=env,proto3" json:"env,omitempty"`
}
func (x *Container) Reset() {
*x = Container{}
if protoimpl.UnsafeEnabled {
mi := &file_inference_service_proto_msgTypes[2]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *Container) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*Container) ProtoMessage() {}
func (x *Container) ProtoReflect() protoreflect.Message {
mi := &file_inference_service_proto_msgTypes[2]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use Container.ProtoReflect.Descriptor instead.
func (*Container) Descriptor() ([]byte, []int) {
return file_inference_service_proto_rawDescGZIP(), []int{2}
}
func (x *Container) GetImage() string {
if x != nil {
return x.Image
}
return ""
}
func (x *Container) GetName() string {
if x != nil {
return x.Name
}
return ""
}
func (x *Container) GetEnv() []*Env {
if x != nil {
return x.Env
}
return nil
}
type InferenceServiceTransformer struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Containers []*Container `protobuf:"bytes,1,rep,name=containers,proto3" json:"containers,omitempty"`
MinCpu string `protobuf:"bytes,2,opt,name=minCpu,proto3" json:"minCpu,omitempty"`
MinMemory string `protobuf:"bytes,3,opt,name=minMemory,proto3" json:"minMemory,omitempty"`
MaxCpu string `protobuf:"bytes,4,opt,name=maxCpu,proto3" json:"maxCpu,omitempty"`
MaxMemory string `protobuf:"bytes,5,opt,name=maxMemory,proto3" json:"maxMemory,omitempty"`
}
func (x *InferenceServiceTransformer) Reset() {
*x = InferenceServiceTransformer{}
if protoimpl.UnsafeEnabled {
mi := &file_inference_service_proto_msgTypes[3]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *InferenceServiceTransformer) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*InferenceServiceTransformer) ProtoMessage() {}
func (x *InferenceServiceTransformer) ProtoReflect() protoreflect.Message {
mi := &file_inference_service_proto_msgTypes[3]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use InferenceServiceTransformer.ProtoReflect.Descriptor instead.
func (*InferenceServiceTransformer) Descriptor() ([]byte, []int) {
return file_inference_service_proto_rawDescGZIP(), []int{3}
}
func (x *InferenceServiceTransformer) GetContainers() []*Container {
if x != nil {
return x.Containers
}
return nil
}
func (x *InferenceServiceTransformer) GetMinCpu() string {
if x != nil {
return x.MinCpu
}
return ""
}
func (x *InferenceServiceTransformer) GetMinMemory() string {
if x != nil {
return x.MinMemory
}
return ""
}
func (x *InferenceServiceTransformer) GetMaxCpu() string {
if x != nil {
return x.MaxCpu
}
return ""
}
func (x *InferenceServiceTransformer) GetMaxMemory() string {
if x != nil {
return x.MaxMemory
}
return ""
}
type InferenceServicePredictor struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
RuntimeVersion string `protobuf:"bytes,2,opt,name=runtimeVersion,proto3" json:"runtimeVersion,omitempty"`
StorageUri string `protobuf:"bytes,3,opt,name=storageUri,proto3" json:"storageUri,omitempty"`
NodeSelector string `protobuf:"bytes,4,opt,name=nodeSelector,proto3" json:"nodeSelector,omitempty"`
MinCpu string `protobuf:"bytes,5,opt,name=minCpu,proto3" json:"minCpu,omitempty"`
MinMemory string `protobuf:"bytes,6,opt,name=minMemory,proto3" json:"minMemory,omitempty"`
MaxCpu string `protobuf:"bytes,7,opt,name=maxCpu,proto3" json:"maxCpu,omitempty"`
MaxMemory string `protobuf:"bytes,8,opt,name=maxMemory,proto3" json:"maxMemory,omitempty"`
}
func (x *InferenceServicePredictor) Reset() {
*x = InferenceServicePredictor{}
if protoimpl.UnsafeEnabled {
mi := &file_inference_service_proto_msgTypes[4]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *InferenceServicePredictor) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*InferenceServicePredictor) ProtoMessage() {}
func (x *InferenceServicePredictor) ProtoReflect() protoreflect.Message {
mi := &file_inference_service_proto_msgTypes[4]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use InferenceServicePredictor.ProtoReflect.Descriptor instead.
func (*InferenceServicePredictor) Descriptor() ([]byte, []int) {
return file_inference_service_proto_rawDescGZIP(), []int{4}
}
func (x *InferenceServicePredictor) GetName() string {
if x != nil {
return x.Name
}
return ""
}
func (x *InferenceServicePredictor) GetRuntimeVersion() string {
if x != nil {
return x.RuntimeVersion
}
return ""
}
func (x *InferenceServicePredictor) GetStorageUri() string {
if x != nil {
return x.StorageUri
}
return ""
}
func (x *InferenceServicePredictor) GetNodeSelector() string {
if x != nil {
return x.NodeSelector
}
return ""
}
func (x *InferenceServicePredictor) GetMinCpu() string {
if x != nil {
return x.MinCpu
}
return ""
}
func (x *InferenceServicePredictor) GetMinMemory() string {
if x != nil {
return x.MinMemory
}
return ""
}
func (x *InferenceServicePredictor) GetMaxCpu() string {
if x != nil {
return x.MaxCpu
}
return ""
}
func (x *InferenceServicePredictor) GetMaxMemory() string {
if x != nil {
return x.MaxMemory
}
return ""
}
type CreateInferenceServiceRequest struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Namespace string `protobuf:"bytes,1,opt,name=namespace,proto3" json:"namespace,omitempty"`
Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"`
DefaultTransformerImage string `protobuf:"bytes,3,opt,name=defaultTransformerImage,proto3" json:"defaultTransformerImage,omitempty"`
Predictor *InferenceServicePredictor `protobuf:"bytes,4,opt,name=predictor,proto3" json:"predictor,omitempty"`
Transformer *InferenceServiceTransformer `protobuf:"bytes,5,opt,name=transformer,proto3" json:"transformer,omitempty"`
}
func (x *CreateInferenceServiceRequest) Reset() {
*x = CreateInferenceServiceRequest{}
if protoimpl.UnsafeEnabled {
mi := &file_inference_service_proto_msgTypes[5]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *CreateInferenceServiceRequest) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*CreateInferenceServiceRequest) ProtoMessage() {}
func (x *CreateInferenceServiceRequest) ProtoReflect() protoreflect.Message {
mi := &file_inference_service_proto_msgTypes[5]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use CreateInferenceServiceRequest.ProtoReflect.Descriptor instead.
func (*CreateInferenceServiceRequest) Descriptor() ([]byte, []int) {
return file_inference_service_proto_rawDescGZIP(), []int{5}
}
func (x *CreateInferenceServiceRequest) GetNamespace() string {
if x != nil {
return x.Namespace
}
return ""
}
func (x *CreateInferenceServiceRequest) GetName() string {
if x != nil {
return x.Name
}
return ""
}
func (x *CreateInferenceServiceRequest) GetDefaultTransformerImage() string {
if x != nil {
return x.DefaultTransformerImage
}
return ""
}
func (x *CreateInferenceServiceRequest) GetPredictor() *InferenceServicePredictor {
if x != nil {
return x.Predictor
}
return nil
}
func (x *CreateInferenceServiceRequest) GetTransformer() *InferenceServiceTransformer {
if x != nil {
return x.Transformer
}
return nil
}
type DeployModelResponse struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Status string `protobuf:"bytes,1,opt,name=status,proto3" json:"status,omitempty"`
}
func (x *DeployModelResponse) Reset() {
*x = DeployModelResponse{}
if protoimpl.UnsafeEnabled {
mi := &file_inference_service_proto_msgTypes[6]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *DeployModelResponse) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*DeployModelResponse) ProtoMessage() {}
func (x *DeployModelResponse) ProtoReflect() protoreflect.Message {
mi := &file_inference_service_proto_msgTypes[6]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use DeployModelResponse.ProtoReflect.Descriptor instead.
func (*DeployModelResponse) Descriptor() ([]byte, []int) {
return file_inference_service_proto_rawDescGZIP(), []int{6}
}
func (x *DeployModelResponse) GetStatus() string {
if x != nil {
return x.Status
}
return ""
}
type InferenceServiceCondition struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
LastTransitionTime string `protobuf:"bytes,1,opt,name=lastTransitionTime,proto3" json:"lastTransitionTime,omitempty"`
Status string `protobuf:"bytes,2,opt,name=status,proto3" json:"status,omitempty"`
Type string `protobuf:"bytes,3,opt,name=type,proto3" json:"type,omitempty"`
}
func (x *InferenceServiceCondition) Reset() {
*x = InferenceServiceCondition{}
if protoimpl.UnsafeEnabled {
mi := &file_inference_service_proto_msgTypes[7]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *InferenceServiceCondition) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*InferenceServiceCondition) ProtoMessage() {}
func (x *InferenceServiceCondition) ProtoReflect() protoreflect.Message {
mi := &file_inference_service_proto_msgTypes[7]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use InferenceServiceCondition.ProtoReflect.Descriptor instead.
func (*InferenceServiceCondition) Descriptor() ([]byte, []int) {
return file_inference_service_proto_rawDescGZIP(), []int{7}
}
func (x *InferenceServiceCondition) GetLastTransitionTime() string {
if x != nil {
return x.LastTransitionTime
}
return ""
}
func (x *InferenceServiceCondition) GetStatus() string {
if x != nil {
return x.Status
}
return ""
}
func (x *InferenceServiceCondition) GetType() string {
if x != nil {
return x.Type
}
return ""
}
type GetInferenceServiceResponse struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Ready bool `protobuf:"varint,1,opt,name=ready,proto3" json:"ready,omitempty"`
Conditions []*InferenceServiceCondition `protobuf:"bytes,2,rep,name=conditions,proto3" json:"conditions,omitempty"`
PredictUrl string `protobuf:"bytes,3,opt,name=predictUrl,proto3" json:"predictUrl,omitempty"`
}
func (x *GetInferenceServiceResponse) Reset() {
*x = GetInferenceServiceResponse{}
if protoimpl.UnsafeEnabled {
mi := &file_inference_service_proto_msgTypes[8]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *GetInferenceServiceResponse) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*GetInferenceServiceResponse) ProtoMessage() {}
func (x *GetInferenceServiceResponse) ProtoReflect() protoreflect.Message {
mi := &file_inference_service_proto_msgTypes[8]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use GetInferenceServiceResponse.ProtoReflect.Descriptor instead.
func (*GetInferenceServiceResponse) Descriptor() ([]byte, []int) {
return file_inference_service_proto_rawDescGZIP(), []int{8}
}
func (x *GetInferenceServiceResponse) GetReady() bool {
if x != nil {
return x.Ready
}
return false
}
func (x *GetInferenceServiceResponse) GetConditions() []*InferenceServiceCondition {
if x != nil {
return x.Conditions
}
return nil
}
func (x *GetInferenceServiceResponse) GetPredictUrl() string {
if x != nil {
return x.PredictUrl
}
return ""
}
type InferenceServiceEndpoints struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Predict string `protobuf:"bytes,1,opt,name=predict,proto3" json:"predict,omitempty"`
}
func (x *InferenceServiceEndpoints) Reset() {
*x = InferenceServiceEndpoints{}
if protoimpl.UnsafeEnabled {
mi := &file_inference_service_proto_msgTypes[9]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *InferenceServiceEndpoints) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*InferenceServiceEndpoints) ProtoMessage() {}
func (x *InferenceServiceEndpoints) ProtoReflect() protoreflect.Message {
mi := &file_inference_service_proto_msgTypes[9]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use InferenceServiceEndpoints.ProtoReflect.Descriptor instead.
func (*InferenceServiceEndpoints) Descriptor() ([]byte, []int) {
return file_inference_service_proto_rawDescGZIP(), []int{9}
}
func (x *InferenceServiceEndpoints) GetPredict() string {
if x != nil {
return x.Predict
}
return ""
}
var File_inference_service_proto protoreflect.FileDescriptor
var file_inference_service_proto_rawDesc = []byte{
0x0a, 0x17, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x5f, 0x73, 0x65, 0x72, 0x76,
0x69, 0x63, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x03, 0x61, 0x70, 0x69, 0x1a, 0x1c,
0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74,
0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1b, 0x67, 0x6f,
0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x65, 0x6d,
0x70, 0x74, 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x4e, 0x0a, 0x1a, 0x49, 0x6e, 0x66,
0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x49, 0x64, 0x65,
0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73,
0x70, 0x61, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65,
0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20,
0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x2f, 0x0a, 0x03, 0x45, 0x6e, 0x76,
0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04,
0x6e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20,
0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x51, 0x0a, 0x09, 0x43, 0x6f,
0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x12, 0x14, 0x0a, 0x05, 0x69, 0x6d, 0x61, 0x67, 0x65,
0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x69, 0x6d, 0x61, 0x67, 0x65, 0x12, 0x12, 0x0a,
0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d,
0x65, 0x12, 0x1a, 0x0a, 0x03, 0x65, 0x6e, 0x76, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x08,
0x2e, 0x61, 0x70, 0x69, 0x2e, 0x45, 0x6e, 0x76, 0x52, 0x03, 0x65, 0x6e, 0x76, 0x22, 0xb9, 0x01,
0x0a, 0x1b, 0x49, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x53, 0x65, 0x72, 0x76, 0x69,
0x63, 0x65, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x66, 0x6f, 0x72, 0x6d, 0x65, 0x72, 0x12, 0x2e, 0x0a,
0x0a, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28,
0x0b, 0x32, 0x0e, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x43, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65,
0x72, 0x52, 0x0a, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x73, 0x12, 0x16, 0x0a,
0x06, 0x6d, 0x69, 0x6e, 0x43, 0x70, 0x75, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x6d,
0x69, 0x6e, 0x43, 0x70, 0x75, 0x12, 0x1c, 0x0a, 0x09, 0x6d, 0x69, 0x6e, 0x4d, 0x65, 0x6d, 0x6f,
0x72, 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6d, 0x69, 0x6e, 0x4d, 0x65, 0x6d,
0x6f, 0x72, 0x79, 0x12, 0x16, 0x0a, 0x06, 0x6d, 0x61, 0x78, 0x43, 0x70, 0x75, 0x18, 0x04, 0x20,
0x01, 0x28, 0x09, 0x52, 0x06, 0x6d, 0x61, 0x78, 0x43, 0x70, 0x75, 0x12, 0x1c, 0x0a, 0x09, 0x6d,
0x61, 0x78, 0x4d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09,
0x6d, 0x61, 0x78, 0x4d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x22, 0x87, 0x02, 0x0a, 0x19, 0x49, 0x6e,
0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x50, 0x72,
0x65, 0x64, 0x69, 0x63, 0x74, 0x6f, 0x72, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18,
0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x26, 0x0a, 0x0e, 0x72,
0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20,
0x01, 0x28, 0x09, 0x52, 0x0e, 0x72, 0x75, 0x6e, 0x74, 0x69, 0x6d, 0x65, 0x56, 0x65, 0x72, 0x73,
0x69, 0x6f, 0x6e, 0x12, 0x1e, 0x0a, 0x0a, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x55, 0x72,
0x69, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65,
0x55, 0x72, 0x69, 0x12, 0x22, 0x0a, 0x0c, 0x6e, 0x6f, 0x64, 0x65, 0x53, 0x65, 0x6c, 0x65, 0x63,
0x74, 0x6f, 0x72, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x6e, 0x6f, 0x64, 0x65, 0x53,
0x65, 0x6c, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x12, 0x16, 0x0a, 0x06, 0x6d, 0x69, 0x6e, 0x43, 0x70,
0x75, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x6d, 0x69, 0x6e, 0x43, 0x70, 0x75, 0x12,
0x1c, 0x0a, 0x09, 0x6d, 0x69, 0x6e, 0x4d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x18, 0x06, 0x20, 0x01,
0x28, 0x09, 0x52, 0x09, 0x6d, 0x69, 0x6e, 0x4d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x12, 0x16, 0x0a,
0x06, 0x6d, 0x61, 0x78, 0x43, 0x70, 0x75, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x6d,
0x61, 0x78, 0x43, 0x70, 0x75, 0x12, 0x1c, 0x0a, 0x09, 0x6d, 0x61, 0x78, 0x4d, 0x65, 0x6d, 0x6f,
0x72, 0x79, 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6d, 0x61, 0x78, 0x4d, 0x65, 0x6d,
0x6f, 0x72, 0x79, 0x22, 0x8d, 0x02, 0x0a, 0x1d, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x49, 0x6e,
0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x52, 0x65,
0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61,
0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70,
0x61, 0x63, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28,
0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x38, 0x0a, 0x17, 0x64, 0x65, 0x66, 0x61, 0x75,
0x6c, 0x74, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x66, 0x6f, 0x72, 0x6d, 0x65, 0x72, 0x49, 0x6d, 0x61,
0x67, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x17, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c,
0x74, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x66, 0x6f, 0x72, 0x6d, 0x65, 0x72, 0x49, 0x6d, 0x61, 0x67,
0x65, 0x12, 0x3c, 0x0a, 0x09, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x6f, 0x72, 0x18, 0x04,
0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x49, 0x6e, 0x66, 0x65, 0x72,
0x65, 0x6e, 0x63, 0x65, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x50, 0x72, 0x65, 0x64, 0x69,
0x63, 0x74, 0x6f, 0x72, 0x52, 0x09, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x6f, 0x72, 0x12,
0x42, 0x0a, 0x0b, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x66, 0x6f, 0x72, 0x6d, 0x65, 0x72, 0x18, 0x05,
0x20, 0x01, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x49, 0x6e, 0x66, 0x65, 0x72,
0x65, 0x6e, 0x63, 0x65, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x54, 0x72, 0x61, 0x6e, 0x73,
0x66, 0x6f, 0x72, 0x6d, 0x65, 0x72, 0x52, 0x0b, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x66, 0x6f, 0x72,
0x6d, 0x65, 0x72, 0x22, 0x2d, 0x0a, 0x13, 0x44, 0x65, 0x70, 0x6c, 0x6f, 0x79, 0x4d, 0x6f, 0x64,
0x65, 0x6c, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x74,
0x61, 0x74, 0x75, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x73, 0x74, 0x61, 0x74,
0x75, 0x73, 0x22, 0x77, 0x0a, 0x19, 0x49, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x53,
0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x43, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x12,
0x2e, 0x0a, 0x12, 0x6c, 0x61, 0x73, 0x74, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x69, 0x74, 0x69, 0x6f,
0x6e, 0x54, 0x69, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x12, 0x6c, 0x61, 0x73,
0x74, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x54, 0x69, 0x6d, 0x65, 0x12,
0x16, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52,
0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18,
0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x22, 0x93, 0x01, 0x0a, 0x1b,
0x47, 0x65, 0x74, 0x49, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x53, 0x65, 0x72, 0x76,
0x69, 0x63, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x72,
0x65, 0x61, 0x64, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x05, 0x72, 0x65, 0x61, 0x64,
0x79, 0x12, 0x3e, 0x0a, 0x0a, 0x63, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18,
0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x49, 0x6e, 0x66, 0x65,
0x72, 0x65, 0x6e, 0x63, 0x65, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x43, 0x6f, 0x6e, 0x64,
0x69, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x0a, 0x63, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e,
0x73, 0x12, 0x1e, 0x0a, 0x0a, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x55, 0x72, 0x6c, 0x18,
0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x55, 0x72,
0x6c, 0x22, 0x35, 0x0a, 0x19, 0x49, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x53, 0x65,
0x72, 0x76, 0x69, 0x63, 0x65, 0x45, 0x6e, 0x64, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x73, 0x12, 0x18,
0x0a, 0x07, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52,
0x07, 0x70, 0x72, 0x65, 0x64, 0x69, 0x63, 0x74, 0x32, 0xcf, 0x03, 0x0a, 0x10, 0x49, 0x6e, 0x66,
0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x95, 0x01,
0x0a, 0x16, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x49, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63,
0x65, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x22, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x43,
0x72, 0x65, 0x61, 0x74, 0x65, 0x49, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x53, 0x65,
0x72, 0x76, 0x69, 0x63, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x20, 0x2e, 0x61,
0x70, 0x69, 0x2e, 0x47, 0x65, 0x74, 0x49, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x53,
0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x35,
0x82, 0xd3, 0xe4, 0x93, 0x02, 0x2f, 0x22, 0x2a, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31,
0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65,
0x7d, 0x2f, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x73, 0x65, 0x72, 0x76, 0x69,
0x63, 0x65, 0x3a, 0x01, 0x2a, 0x12, 0x93, 0x01, 0x0a, 0x13, 0x47, 0x65, 0x74, 0x49, 0x6e, 0x66,
0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x1f, 0x2e,
0x61, 0x70, 0x69, 0x2e, 0x49, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x53, 0x65, 0x72,
0x76, 0x69, 0x63, 0x65, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x1a, 0x20,
0x2e, 0x61, 0x70, 0x69, 0x2e, 0x47, 0x65, 0x74, 0x49, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63,
0x65, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65,
0x22, 0x39, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x33, 0x12, 0x31, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f,
0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61,
0x63, 0x65, 0x7d, 0x2f, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x73, 0x65, 0x72,
0x76, 0x69, 0x63, 0x65, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x12, 0x8c, 0x01, 0x0a, 0x16,
0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x49, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x53,
0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x1f, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x49, 0x6e, 0x66,
0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x49, 0x64, 0x65,
0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x1a, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65,
0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22,
0x39, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x33, 0x2a, 0x31, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76,
0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63,
0x65, 0x7d, 0x2f, 0x69, 0x6e, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x63, 0x65, 0x73, 0x65, 0x72, 0x76,
0x69, 0x63, 0x65, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x42, 0x24, 0x5a, 0x22, 0x67, 0x69,
0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x6f, 0x6e, 0x65, 0x70, 0x61, 0x6e, 0x65,
0x6c, 0x69, 0x6f, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x67, 0x65, 0x6e,
0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
}
var (
file_inference_service_proto_rawDescOnce sync.Once
file_inference_service_proto_rawDescData = file_inference_service_proto_rawDesc
)
func file_inference_service_proto_rawDescGZIP() []byte {
file_inference_service_proto_rawDescOnce.Do(func() {
file_inference_service_proto_rawDescData = protoimpl.X.CompressGZIP(file_inference_service_proto_rawDescData)
})
return file_inference_service_proto_rawDescData
}
var file_inference_service_proto_msgTypes = make([]protoimpl.MessageInfo, 10)
var file_inference_service_proto_goTypes = []interface{}{
(*InferenceServiceIdentifier)(nil), // 0: api.InferenceServiceIdentifier
(*Env)(nil), // 1: api.Env
(*Container)(nil), // 2: api.Container
(*InferenceServiceTransformer)(nil), // 3: api.InferenceServiceTransformer
(*InferenceServicePredictor)(nil), // 4: api.InferenceServicePredictor
(*CreateInferenceServiceRequest)(nil), // 5: api.CreateInferenceServiceRequest
(*DeployModelResponse)(nil), // 6: api.DeployModelResponse
(*InferenceServiceCondition)(nil), // 7: api.InferenceServiceCondition
(*GetInferenceServiceResponse)(nil), // 8: api.GetInferenceServiceResponse
(*InferenceServiceEndpoints)(nil), // 9: api.InferenceServiceEndpoints
(*emptypb.Empty)(nil), // 10: google.protobuf.Empty
}
var file_inference_service_proto_depIdxs = []int32{
1, // 0: api.Container.env:type_name -> api.Env
2, // 1: api.InferenceServiceTransformer.containers:type_name -> api.Container
4, // 2: api.CreateInferenceServiceRequest.predictor:type_name -> api.InferenceServicePredictor
3, // 3: api.CreateInferenceServiceRequest.transformer:type_name -> api.InferenceServiceTransformer
7, // 4: api.GetInferenceServiceResponse.conditions:type_name -> api.InferenceServiceCondition
5, // 5: api.InferenceService.CreateInferenceService:input_type -> api.CreateInferenceServiceRequest
0, // 6: api.InferenceService.GetInferenceService:input_type -> api.InferenceServiceIdentifier
0, // 7: api.InferenceService.DeleteInferenceService:input_type -> api.InferenceServiceIdentifier
8, // 8: api.InferenceService.CreateInferenceService:output_type -> api.GetInferenceServiceResponse
8, // 9: api.InferenceService.GetInferenceService:output_type -> api.GetInferenceServiceResponse
10, // 10: api.InferenceService.DeleteInferenceService:output_type -> google.protobuf.Empty
8, // [8:11] is the sub-list for method output_type
5, // [5:8] is the sub-list for method input_type
5, // [5:5] is the sub-list for extension type_name
5, // [5:5] is the sub-list for extension extendee
0, // [0:5] is the sub-list for field type_name
}
func init() { file_inference_service_proto_init() }
func file_inference_service_proto_init() {
if File_inference_service_proto != nil {
return
}
if !protoimpl.UnsafeEnabled {
file_inference_service_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*InferenceServiceIdentifier); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_inference_service_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*Env); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_inference_service_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*Container); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_inference_service_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*InferenceServiceTransformer); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_inference_service_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*InferenceServicePredictor); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_inference_service_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*CreateInferenceServiceRequest); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_inference_service_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*DeployModelResponse); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_inference_service_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*InferenceServiceCondition); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_inference_service_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*GetInferenceServiceResponse); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_inference_service_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*InferenceServiceEndpoints); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
}
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_inference_service_proto_rawDesc,
NumEnums: 0,
NumMessages: 10,
NumExtensions: 0,
NumServices: 1,
},
GoTypes: file_inference_service_proto_goTypes,
DependencyIndexes: file_inference_service_proto_depIdxs,
MessageInfos: file_inference_service_proto_msgTypes,
}.Build()
File_inference_service_proto = out.File
file_inference_service_proto_rawDesc = nil
file_inference_service_proto_goTypes = nil
file_inference_service_proto_depIdxs = nil
}

View File

@@ -0,0 +1,439 @@
// Code generated by protoc-gen-grpc-gateway. DO NOT EDIT.
// source: inference_service.proto
/*
Package gen is a reverse proxy.
It translates gRPC into RESTful JSON APIs.
*/
package gen
import (
"context"
"io"
"net/http"
"github.com/grpc-ecosystem/grpc-gateway/v2/runtime"
"github.com/grpc-ecosystem/grpc-gateway/v2/utilities"
"google.golang.org/grpc"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/grpclog"
"google.golang.org/grpc/metadata"
"google.golang.org/grpc/status"
"google.golang.org/protobuf/proto"
)
// Suppress "imported and not used" errors
var _ codes.Code
var _ io.Reader
var _ status.Status
var _ = runtime.String
var _ = utilities.NewDoubleArray
var _ = metadata.Join
func request_InferenceService_CreateInferenceService_0(ctx context.Context, marshaler runtime.Marshaler, client InferenceServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq CreateInferenceServiceRequest
var metadata runtime.ServerMetadata
newReader, berr := utilities.IOReaderFactory(req.Body)
if berr != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr)
}
if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
var (
val string
ok bool
err error
_ = err
)
val, ok = pathParams["namespace"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "namespace")
}
protoReq.Namespace, err = runtime.String(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "namespace", err)
}
msg, err := client.CreateInferenceService(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD))
return msg, metadata, err
}
func local_request_InferenceService_CreateInferenceService_0(ctx context.Context, marshaler runtime.Marshaler, server InferenceServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq CreateInferenceServiceRequest
var metadata runtime.ServerMetadata
newReader, berr := utilities.IOReaderFactory(req.Body)
if berr != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr)
}
if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
var (
val string
ok bool
err error
_ = err
)
val, ok = pathParams["namespace"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "namespace")
}
protoReq.Namespace, err = runtime.String(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "namespace", err)
}
msg, err := server.CreateInferenceService(ctx, &protoReq)
return msg, metadata, err
}
func request_InferenceService_GetInferenceService_0(ctx context.Context, marshaler runtime.Marshaler, client InferenceServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq InferenceServiceIdentifier
var metadata runtime.ServerMetadata
var (
val string
ok bool
err error
_ = err
)
val, ok = pathParams["namespace"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "namespace")
}
protoReq.Namespace, err = runtime.String(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "namespace", err)
}
val, ok = pathParams["name"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "name")
}
protoReq.Name, err = runtime.String(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "name", err)
}
msg, err := client.GetInferenceService(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD))
return msg, metadata, err
}
func local_request_InferenceService_GetInferenceService_0(ctx context.Context, marshaler runtime.Marshaler, server InferenceServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq InferenceServiceIdentifier
var metadata runtime.ServerMetadata
var (
val string
ok bool
err error
_ = err
)
val, ok = pathParams["namespace"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "namespace")
}
protoReq.Namespace, err = runtime.String(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "namespace", err)
}
val, ok = pathParams["name"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "name")
}
protoReq.Name, err = runtime.String(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "name", err)
}
msg, err := server.GetInferenceService(ctx, &protoReq)
return msg, metadata, err
}
func request_InferenceService_DeleteInferenceService_0(ctx context.Context, marshaler runtime.Marshaler, client InferenceServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq InferenceServiceIdentifier
var metadata runtime.ServerMetadata
var (
val string
ok bool
err error
_ = err
)
val, ok = pathParams["namespace"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "namespace")
}
protoReq.Namespace, err = runtime.String(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "namespace", err)
}
val, ok = pathParams["name"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "name")
}
protoReq.Name, err = runtime.String(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "name", err)
}
msg, err := client.DeleteInferenceService(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD))
return msg, metadata, err
}
func local_request_InferenceService_DeleteInferenceService_0(ctx context.Context, marshaler runtime.Marshaler, server InferenceServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq InferenceServiceIdentifier
var metadata runtime.ServerMetadata
var (
val string
ok bool
err error
_ = err
)
val, ok = pathParams["namespace"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "namespace")
}
protoReq.Namespace, err = runtime.String(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "namespace", err)
}
val, ok = pathParams["name"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "name")
}
protoReq.Name, err = runtime.String(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "name", err)
}
msg, err := server.DeleteInferenceService(ctx, &protoReq)
return msg, metadata, err
}
// RegisterInferenceServiceHandlerServer registers the http handlers for service InferenceService to "mux".
// UnaryRPC :call InferenceServiceServer directly.
// StreamingRPC :currently unsupported pending https://github.com/grpc/grpc-go/issues/906.
// Note that using this registration option will cause many gRPC library features to stop working. Consider using RegisterInferenceServiceHandlerFromEndpoint instead.
func RegisterInferenceServiceHandlerServer(ctx context.Context, mux *runtime.ServeMux, server InferenceServiceServer) error {
mux.Handle("POST", pattern_InferenceService_CreateInferenceService_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
var stream runtime.ServerTransportStream
ctx = grpc.NewContextWithServerTransportStream(ctx, &stream)
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/api.InferenceService/CreateInferenceService")
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := local_request_InferenceService_CreateInferenceService_0(rctx, inboundMarshaler, server, req, pathParams)
md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_InferenceService_CreateInferenceService_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
mux.Handle("GET", pattern_InferenceService_GetInferenceService_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
var stream runtime.ServerTransportStream
ctx = grpc.NewContextWithServerTransportStream(ctx, &stream)
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/api.InferenceService/GetInferenceService")
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := local_request_InferenceService_GetInferenceService_0(rctx, inboundMarshaler, server, req, pathParams)
md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_InferenceService_GetInferenceService_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
mux.Handle("DELETE", pattern_InferenceService_DeleteInferenceService_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
var stream runtime.ServerTransportStream
ctx = grpc.NewContextWithServerTransportStream(ctx, &stream)
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/api.InferenceService/DeleteInferenceService")
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := local_request_InferenceService_DeleteInferenceService_0(rctx, inboundMarshaler, server, req, pathParams)
md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_InferenceService_DeleteInferenceService_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
return nil
}
// RegisterInferenceServiceHandlerFromEndpoint is same as RegisterInferenceServiceHandler but
// automatically dials to "endpoint" and closes the connection when "ctx" gets done.
func RegisterInferenceServiceHandlerFromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, opts []grpc.DialOption) (err error) {
conn, err := grpc.Dial(endpoint, opts...)
if err != nil {
return err
}
defer func() {
if err != nil {
if cerr := conn.Close(); cerr != nil {
grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr)
}
return
}
go func() {
<-ctx.Done()
if cerr := conn.Close(); cerr != nil {
grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr)
}
}()
}()
return RegisterInferenceServiceHandler(ctx, mux, conn)
}
// RegisterInferenceServiceHandler registers the http handlers for service InferenceService to "mux".
// The handlers forward requests to the grpc endpoint over "conn".
func RegisterInferenceServiceHandler(ctx context.Context, mux *runtime.ServeMux, conn *grpc.ClientConn) error {
return RegisterInferenceServiceHandlerClient(ctx, mux, NewInferenceServiceClient(conn))
}
// RegisterInferenceServiceHandlerClient registers the http handlers for service InferenceService
// to "mux". The handlers forward requests to the grpc endpoint over the given implementation of "InferenceServiceClient".
// Note: the gRPC framework executes interceptors within the gRPC handler. If the passed in "InferenceServiceClient"
// doesn't go through the normal gRPC flow (creating a gRPC client etc.) then it will be up to the passed in
// "InferenceServiceClient" to call the correct interceptors.
func RegisterInferenceServiceHandlerClient(ctx context.Context, mux *runtime.ServeMux, client InferenceServiceClient) error {
mux.Handle("POST", pattern_InferenceService_CreateInferenceService_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateContext(ctx, mux, req, "/api.InferenceService/CreateInferenceService")
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := request_InferenceService_CreateInferenceService_0(rctx, inboundMarshaler, client, req, pathParams)
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_InferenceService_CreateInferenceService_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
mux.Handle("GET", pattern_InferenceService_GetInferenceService_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateContext(ctx, mux, req, "/api.InferenceService/GetInferenceService")
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := request_InferenceService_GetInferenceService_0(rctx, inboundMarshaler, client, req, pathParams)
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_InferenceService_GetInferenceService_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
mux.Handle("DELETE", pattern_InferenceService_DeleteInferenceService_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateContext(ctx, mux, req, "/api.InferenceService/DeleteInferenceService")
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := request_InferenceService_DeleteInferenceService_0(rctx, inboundMarshaler, client, req, pathParams)
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_InferenceService_DeleteInferenceService_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
return nil
}
var (
pattern_InferenceService_CreateInferenceService_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 4, 1, 5, 2, 2, 3}, []string{"apis", "v1beta1", "namespace", "inferenceservice"}, ""))
pattern_InferenceService_GetInferenceService_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 4, 1, 5, 2, 2, 3, 1, 0, 4, 1, 5, 4}, []string{"apis", "v1beta1", "namespace", "inferenceservice", "name"}, ""))
pattern_InferenceService_DeleteInferenceService_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 4, 1, 5, 2, 2, 3, 1, 0, 4, 1, 5, 4}, []string{"apis", "v1beta1", "namespace", "inferenceservice", "name"}, ""))
)
var (
forward_InferenceService_CreateInferenceService_0 = runtime.ForwardResponseMessage
forward_InferenceService_GetInferenceService_0 = runtime.ForwardResponseMessage
forward_InferenceService_DeleteInferenceService_0 = runtime.ForwardResponseMessage
)

View File

@@ -0,0 +1,170 @@
// Code generated by protoc-gen-go-grpc. DO NOT EDIT.
package gen
import (
context "context"
grpc "google.golang.org/grpc"
codes "google.golang.org/grpc/codes"
status "google.golang.org/grpc/status"
emptypb "google.golang.org/protobuf/types/known/emptypb"
)
// This is a compile-time assertion to ensure that this generated file
// is compatible with the grpc package it is being compiled against.
const _ = grpc.SupportPackageIsVersion7
// InferenceServiceClient is the client API for InferenceService service.
//
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream.
type InferenceServiceClient interface {
CreateInferenceService(ctx context.Context, in *CreateInferenceServiceRequest, opts ...grpc.CallOption) (*GetInferenceServiceResponse, error)
GetInferenceService(ctx context.Context, in *InferenceServiceIdentifier, opts ...grpc.CallOption) (*GetInferenceServiceResponse, error)
DeleteInferenceService(ctx context.Context, in *InferenceServiceIdentifier, opts ...grpc.CallOption) (*emptypb.Empty, error)
}
type inferenceServiceClient struct {
cc grpc.ClientConnInterface
}
func NewInferenceServiceClient(cc grpc.ClientConnInterface) InferenceServiceClient {
return &inferenceServiceClient{cc}
}
func (c *inferenceServiceClient) CreateInferenceService(ctx context.Context, in *CreateInferenceServiceRequest, opts ...grpc.CallOption) (*GetInferenceServiceResponse, error) {
out := new(GetInferenceServiceResponse)
err := c.cc.Invoke(ctx, "/api.InferenceService/CreateInferenceService", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *inferenceServiceClient) GetInferenceService(ctx context.Context, in *InferenceServiceIdentifier, opts ...grpc.CallOption) (*GetInferenceServiceResponse, error) {
out := new(GetInferenceServiceResponse)
err := c.cc.Invoke(ctx, "/api.InferenceService/GetInferenceService", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *inferenceServiceClient) DeleteInferenceService(ctx context.Context, in *InferenceServiceIdentifier, opts ...grpc.CallOption) (*emptypb.Empty, error) {
out := new(emptypb.Empty)
err := c.cc.Invoke(ctx, "/api.InferenceService/DeleteInferenceService", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
// InferenceServiceServer is the server API for InferenceService service.
// All implementations must embed UnimplementedInferenceServiceServer
// for forward compatibility
type InferenceServiceServer interface {
CreateInferenceService(context.Context, *CreateInferenceServiceRequest) (*GetInferenceServiceResponse, error)
GetInferenceService(context.Context, *InferenceServiceIdentifier) (*GetInferenceServiceResponse, error)
DeleteInferenceService(context.Context, *InferenceServiceIdentifier) (*emptypb.Empty, error)
mustEmbedUnimplementedInferenceServiceServer()
}
// UnimplementedInferenceServiceServer must be embedded to have forward compatible implementations.
type UnimplementedInferenceServiceServer struct {
}
func (UnimplementedInferenceServiceServer) CreateInferenceService(context.Context, *CreateInferenceServiceRequest) (*GetInferenceServiceResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method CreateInferenceService not implemented")
}
func (UnimplementedInferenceServiceServer) GetInferenceService(context.Context, *InferenceServiceIdentifier) (*GetInferenceServiceResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method GetInferenceService not implemented")
}
func (UnimplementedInferenceServiceServer) DeleteInferenceService(context.Context, *InferenceServiceIdentifier) (*emptypb.Empty, error) {
return nil, status.Errorf(codes.Unimplemented, "method DeleteInferenceService not implemented")
}
func (UnimplementedInferenceServiceServer) mustEmbedUnimplementedInferenceServiceServer() {}
// UnsafeInferenceServiceServer may be embedded to opt out of forward compatibility for this service.
// Use of this interface is not recommended, as added methods to InferenceServiceServer will
// result in compilation errors.
type UnsafeInferenceServiceServer interface {
mustEmbedUnimplementedInferenceServiceServer()
}
func RegisterInferenceServiceServer(s grpc.ServiceRegistrar, srv InferenceServiceServer) {
s.RegisterService(&_InferenceService_serviceDesc, srv)
}
func _InferenceService_CreateInferenceService_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(CreateInferenceServiceRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(InferenceServiceServer).CreateInferenceService(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/api.InferenceService/CreateInferenceService",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(InferenceServiceServer).CreateInferenceService(ctx, req.(*CreateInferenceServiceRequest))
}
return interceptor(ctx, in, info, handler)
}
func _InferenceService_GetInferenceService_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(InferenceServiceIdentifier)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(InferenceServiceServer).GetInferenceService(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/api.InferenceService/GetInferenceService",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(InferenceServiceServer).GetInferenceService(ctx, req.(*InferenceServiceIdentifier))
}
return interceptor(ctx, in, info, handler)
}
func _InferenceService_DeleteInferenceService_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(InferenceServiceIdentifier)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(InferenceServiceServer).DeleteInferenceService(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/api.InferenceService/DeleteInferenceService",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(InferenceServiceServer).DeleteInferenceService(ctx, req.(*InferenceServiceIdentifier))
}
return interceptor(ctx, in, info, handler)
}
var _InferenceService_serviceDesc = grpc.ServiceDesc{
ServiceName: "api.InferenceService",
HandlerType: (*InferenceServiceServer)(nil),
Methods: []grpc.MethodDesc{
{
MethodName: "CreateInferenceService",
Handler: _InferenceService_CreateInferenceService_Handler,
},
{
MethodName: "GetInferenceService",
Handler: _InferenceService_GetInferenceService_Handler,
},
{
MethodName: "DeleteInferenceService",
Handler: _InferenceService_DeleteInferenceService_Handler,
},
},
Streams: []grpc.StreamDesc{},
Metadata: "inference_service.proto",
}

View File

@@ -136,6 +136,100 @@ func (x *GetServiceRequest) GetName() string {
return ""
}
type HasServiceRequest struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
}
func (x *HasServiceRequest) Reset() {
*x = HasServiceRequest{}
if protoimpl.UnsafeEnabled {
mi := &file_services_proto_msgTypes[2]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *HasServiceRequest) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*HasServiceRequest) ProtoMessage() {}
func (x *HasServiceRequest) ProtoReflect() protoreflect.Message {
mi := &file_services_proto_msgTypes[2]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use HasServiceRequest.ProtoReflect.Descriptor instead.
func (*HasServiceRequest) Descriptor() ([]byte, []int) {
return file_services_proto_rawDescGZIP(), []int{2}
}
func (x *HasServiceRequest) GetName() string {
if x != nil {
return x.Name
}
return ""
}
type HasServiceResponse struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
HasService bool `protobuf:"varint,1,opt,name=hasService,proto3" json:"hasService,omitempty"`
}
func (x *HasServiceResponse) Reset() {
*x = HasServiceResponse{}
if protoimpl.UnsafeEnabled {
mi := &file_services_proto_msgTypes[3]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *HasServiceResponse) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*HasServiceResponse) ProtoMessage() {}
func (x *HasServiceResponse) ProtoReflect() protoreflect.Message {
mi := &file_services_proto_msgTypes[3]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use HasServiceResponse.ProtoReflect.Descriptor instead.
func (*HasServiceResponse) Descriptor() ([]byte, []int) {
return file_services_proto_rawDescGZIP(), []int{3}
}
func (x *HasServiceResponse) GetHasService() bool {
if x != nil {
return x.HasService
}
return false
}
type ListServicesRequest struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
@@ -149,7 +243,7 @@ type ListServicesRequest struct {
func (x *ListServicesRequest) Reset() {
*x = ListServicesRequest{}
if protoimpl.UnsafeEnabled {
mi := &file_services_proto_msgTypes[2]
mi := &file_services_proto_msgTypes[4]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
@@ -162,7 +256,7 @@ func (x *ListServicesRequest) String() string {
func (*ListServicesRequest) ProtoMessage() {}
func (x *ListServicesRequest) ProtoReflect() protoreflect.Message {
mi := &file_services_proto_msgTypes[2]
mi := &file_services_proto_msgTypes[4]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
@@ -175,7 +269,7 @@ func (x *ListServicesRequest) ProtoReflect() protoreflect.Message {
// Deprecated: Use ListServicesRequest.ProtoReflect.Descriptor instead.
func (*ListServicesRequest) Descriptor() ([]byte, []int) {
return file_services_proto_rawDescGZIP(), []int{2}
return file_services_proto_rawDescGZIP(), []int{4}
}
func (x *ListServicesRequest) GetNamespace() string {
@@ -214,7 +308,7 @@ type ListServicesResponse struct {
func (x *ListServicesResponse) Reset() {
*x = ListServicesResponse{}
if protoimpl.UnsafeEnabled {
mi := &file_services_proto_msgTypes[3]
mi := &file_services_proto_msgTypes[5]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
@@ -227,7 +321,7 @@ func (x *ListServicesResponse) String() string {
func (*ListServicesResponse) ProtoMessage() {}
func (x *ListServicesResponse) ProtoReflect() protoreflect.Message {
mi := &file_services_proto_msgTypes[3]
mi := &file_services_proto_msgTypes[5]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
@@ -240,7 +334,7 @@ func (x *ListServicesResponse) ProtoReflect() protoreflect.Message {
// Deprecated: Use ListServicesResponse.ProtoReflect.Descriptor instead.
func (*ListServicesResponse) Descriptor() ([]byte, []int) {
return file_services_proto_rawDescGZIP(), []int{3}
return file_services_proto_rawDescGZIP(), []int{5}
}
func (x *ListServicesResponse) GetCount() int32 {
@@ -291,41 +385,53 @@ var file_services_proto_rawDesc = []byte{
0x63, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d,
0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61,
0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18,
0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x63, 0x0a, 0x13, 0x4c,
0x69, 0x73, 0x74, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65,
0x73, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18,
0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65,
0x12, 0x1a, 0x0a, 0x08, 0x70, 0x61, 0x67, 0x65, 0x53, 0x69, 0x7a, 0x65, 0x18, 0x02, 0x20, 0x01,
0x28, 0x05, 0x52, 0x08, 0x70, 0x61, 0x67, 0x65, 0x53, 0x69, 0x7a, 0x65, 0x12, 0x12, 0x0a, 0x04,
0x70, 0x61, 0x67, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x52, 0x04, 0x70, 0x61, 0x67, 0x65,
0x22, 0xa0, 0x01, 0x0a, 0x14, 0x4c, 0x69, 0x73, 0x74, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65,
0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x63, 0x6f, 0x75,
0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x05, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x12,
0x28, 0x0a, 0x08, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28,
0x0b, 0x32, 0x0c, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x52,
0x08, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x61, 0x67,
0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x52, 0x04, 0x70, 0x61, 0x67, 0x65, 0x12, 0x14, 0x0a,
0x05, 0x70, 0x61, 0x67, 0x65, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x05, 0x52, 0x05, 0x70, 0x61,
0x67, 0x65, 0x73, 0x12, 0x1e, 0x0a, 0x0a, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x43, 0x6f, 0x75, 0x6e,
0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0a, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x43, 0x6f,
0x75, 0x6e, 0x74, 0x32, 0xe6, 0x01, 0x0a, 0x0e, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x53,
0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x64, 0x0a, 0x0a, 0x47, 0x65, 0x74, 0x53, 0x65, 0x72,
0x76, 0x69, 0x63, 0x65, 0x12, 0x16, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x47, 0x65, 0x74, 0x53, 0x65,
0x72, 0x76, 0x69, 0x63, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x0c, 0x2e, 0x61,
0x70, 0x69, 0x2e, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x22, 0x30, 0x82, 0xd3, 0xe4, 0x93,
0x02, 0x2a, 0x12, 0x28, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61,
0x31, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x7d, 0x2f, 0x73, 0x65,
0x72, 0x76, 0x69, 0x63, 0x65, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x12, 0x6e, 0x0a, 0x0c,
0x4c, 0x69, 0x73, 0x74, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73, 0x12, 0x18, 0x2e, 0x61,
0x70, 0x69, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73, 0x52,
0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x19, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x4c, 0x69, 0x73,
0x74, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73,
0x65, 0x22, 0x29, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x23, 0x12, 0x21, 0x2f, 0x61, 0x70, 0x69, 0x73,
0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70,
0x61, 0x63, 0x65, 0x7d, 0x2f, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x42, 0x24, 0x5a, 0x22,
0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x6f, 0x6e, 0x65, 0x70, 0x61,
0x6e, 0x65, 0x6c, 0x69, 0x6f, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x67,
0x65, 0x6e, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x27, 0x0a, 0x11, 0x48,
0x61, 0x73, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74,
0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04,
0x6e, 0x61, 0x6d, 0x65, 0x22, 0x34, 0x0a, 0x12, 0x48, 0x61, 0x73, 0x53, 0x65, 0x72, 0x76, 0x69,
0x63, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x1e, 0x0a, 0x0a, 0x68, 0x61,
0x73, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0a,
0x68, 0x61, 0x73, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x22, 0x63, 0x0a, 0x13, 0x4c, 0x69,
0x73, 0x74, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73,
0x74, 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x01,
0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x12,
0x1a, 0x0a, 0x08, 0x70, 0x61, 0x67, 0x65, 0x53, 0x69, 0x7a, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28,
0x05, 0x52, 0x08, 0x70, 0x61, 0x67, 0x65, 0x53, 0x69, 0x7a, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x70,
0x61, 0x67, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x52, 0x04, 0x70, 0x61, 0x67, 0x65, 0x22,
0xa0, 0x01, 0x0a, 0x14, 0x4c, 0x69, 0x73, 0x74, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73,
0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x63, 0x6f, 0x75, 0x6e,
0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x05, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x28,
0x0a, 0x08, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b,
0x32, 0x0c, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x52, 0x08,
0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x61, 0x67, 0x65,
0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x52, 0x04, 0x70, 0x61, 0x67, 0x65, 0x12, 0x14, 0x0a, 0x05,
0x70, 0x61, 0x67, 0x65, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x05, 0x52, 0x05, 0x70, 0x61, 0x67,
0x65, 0x73, 0x12, 0x1e, 0x0a, 0x0a, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x43, 0x6f, 0x75, 0x6e, 0x74,
0x18, 0x05, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0a, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x43, 0x6f, 0x75,
0x6e, 0x74, 0x32, 0xca, 0x02, 0x0a, 0x0e, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x53, 0x65,
0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x64, 0x0a, 0x0a, 0x47, 0x65, 0x74, 0x53, 0x65, 0x72, 0x76,
0x69, 0x63, 0x65, 0x12, 0x16, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x47, 0x65, 0x74, 0x53, 0x65, 0x72,
0x76, 0x69, 0x63, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x0c, 0x2e, 0x61, 0x70,
0x69, 0x2e, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x22, 0x30, 0x82, 0xd3, 0xe4, 0x93, 0x02,
0x2a, 0x12, 0x28, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31,
0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x7d, 0x2f, 0x73, 0x65, 0x72,
0x76, 0x69, 0x63, 0x65, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x12, 0x6e, 0x0a, 0x0c, 0x4c,
0x69, 0x73, 0x74, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73, 0x12, 0x18, 0x2e, 0x61, 0x70,
0x69, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73, 0x52, 0x65,
0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x19, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x4c, 0x69, 0x73, 0x74,
0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65,
0x22, 0x29, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x23, 0x12, 0x21, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f,
0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61,
0x63, 0x65, 0x7d, 0x2f, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x62, 0x0a, 0x0a, 0x48,
0x61, 0x73, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x16, 0x2e, 0x61, 0x70, 0x69, 0x2e,
0x48, 0x61, 0x73, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73,
0x74, 0x1a, 0x17, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x48, 0x61, 0x73, 0x53, 0x65, 0x72, 0x76, 0x69,
0x63, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x23, 0x82, 0xd3, 0xe4, 0x93,
0x02, 0x1d, 0x12, 0x1b, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61,
0x2f, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x42,
0x24, 0x5a, 0x22, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x6f, 0x6e,
0x65, 0x70, 0x61, 0x6e, 0x65, 0x6c, 0x69, 0x6f, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x2f, 0x61, 0x70,
0x69, 0x2f, 0x67, 0x65, 0x6e, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
}
var (
@@ -340,21 +446,25 @@ func file_services_proto_rawDescGZIP() []byte {
return file_services_proto_rawDescData
}
var file_services_proto_msgTypes = make([]protoimpl.MessageInfo, 4)
var file_services_proto_msgTypes = make([]protoimpl.MessageInfo, 6)
var file_services_proto_goTypes = []interface{}{
(*Service)(nil), // 0: api.Service
(*GetServiceRequest)(nil), // 1: api.GetServiceRequest
(*ListServicesRequest)(nil), // 2: api.ListServicesRequest
(*ListServicesResponse)(nil), // 3: api.ListServicesResponse
(*HasServiceRequest)(nil), // 2: api.HasServiceRequest
(*HasServiceResponse)(nil), // 3: api.HasServiceResponse
(*ListServicesRequest)(nil), // 4: api.ListServicesRequest
(*ListServicesResponse)(nil), // 5: api.ListServicesResponse
}
var file_services_proto_depIdxs = []int32{
0, // 0: api.ListServicesResponse.services:type_name -> api.Service
1, // 1: api.ServiceService.GetService:input_type -> api.GetServiceRequest
2, // 2: api.ServiceService.ListServices:input_type -> api.ListServicesRequest
0, // 3: api.ServiceService.GetService:output_type -> api.Service
3, // 4: api.ServiceService.ListServices:output_type -> api.ListServicesResponse
3, // [3:5] is the sub-list for method output_type
1, // [1:3] is the sub-list for method input_type
4, // 2: api.ServiceService.ListServices:input_type -> api.ListServicesRequest
2, // 3: api.ServiceService.HasService:input_type -> api.HasServiceRequest
0, // 4: api.ServiceService.GetService:output_type -> api.Service
5, // 5: api.ServiceService.ListServices:output_type -> api.ListServicesResponse
3, // 6: api.ServiceService.HasService:output_type -> api.HasServiceResponse
4, // [4:7] is the sub-list for method output_type
1, // [1:4] is the sub-list for method input_type
1, // [1:1] is the sub-list for extension type_name
1, // [1:1] is the sub-list for extension extendee
0, // [0:1] is the sub-list for field type_name
@@ -391,7 +501,7 @@ func file_services_proto_init() {
}
}
file_services_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*ListServicesRequest); i {
switch v := v.(*HasServiceRequest); i {
case 0:
return &v.state
case 1:
@@ -403,6 +513,30 @@ func file_services_proto_init() {
}
}
file_services_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*HasServiceResponse); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_services_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*ListServicesRequest); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_services_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*ListServicesResponse); i {
case 0:
return &v.state
@@ -421,7 +555,7 @@ func file_services_proto_init() {
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_services_proto_rawDesc,
NumEnums: 0,
NumMessages: 4,
NumMessages: 6,
NumExtensions: 0,
NumServices: 1,
},

View File

@@ -173,6 +173,58 @@ func local_request_ServiceService_ListServices_0(ctx context.Context, marshaler
}
func request_ServiceService_HasService_0(ctx context.Context, marshaler runtime.Marshaler, client ServiceServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq HasServiceRequest
var metadata runtime.ServerMetadata
var (
val string
ok bool
err error
_ = err
)
val, ok = pathParams["name"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "name")
}
protoReq.Name, err = runtime.String(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "name", err)
}
msg, err := client.HasService(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD))
return msg, metadata, err
}
func local_request_ServiceService_HasService_0(ctx context.Context, marshaler runtime.Marshaler, server ServiceServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq HasServiceRequest
var metadata runtime.ServerMetadata
var (
val string
ok bool
err error
_ = err
)
val, ok = pathParams["name"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "name")
}
protoReq.Name, err = runtime.String(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "name", err)
}
msg, err := server.HasService(ctx, &protoReq)
return msg, metadata, err
}
// RegisterServiceServiceHandlerServer registers the http handlers for service ServiceService to "mux".
// UnaryRPC :call ServiceServiceServer directly.
// StreamingRPC :currently unsupported pending https://github.com/grpc/grpc-go/issues/906.
@@ -225,6 +277,29 @@ func RegisterServiceServiceHandlerServer(ctx context.Context, mux *runtime.Serve
})
mux.Handle("GET", pattern_ServiceService_HasService_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
var stream runtime.ServerTransportStream
ctx = grpc.NewContextWithServerTransportStream(ctx, &stream)
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/api.ServiceService/HasService")
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := local_request_ServiceService_HasService_0(rctx, inboundMarshaler, server, req, pathParams)
md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_ServiceService_HasService_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
return nil
}
@@ -306,6 +381,26 @@ func RegisterServiceServiceHandlerClient(ctx context.Context, mux *runtime.Serve
})
mux.Handle("GET", pattern_ServiceService_HasService_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateContext(ctx, mux, req, "/api.ServiceService/HasService")
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := request_ServiceService_HasService_0(rctx, inboundMarshaler, client, req, pathParams)
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_ServiceService_HasService_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
return nil
}
@@ -313,10 +408,14 @@ var (
pattern_ServiceService_GetService_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 4, 1, 5, 2, 2, 3, 1, 0, 4, 1, 5, 4}, []string{"apis", "v1beta1", "namespace", "service", "name"}, ""))
pattern_ServiceService_ListServices_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 4, 1, 5, 2, 2, 3}, []string{"apis", "v1beta1", "namespace", "service"}, ""))
pattern_ServiceService_HasService_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v1beta", "service", "name"}, ""))
)
var (
forward_ServiceService_GetService_0 = runtime.ForwardResponseMessage
forward_ServiceService_ListServices_0 = runtime.ForwardResponseMessage
forward_ServiceService_HasService_0 = runtime.ForwardResponseMessage
)

View File

@@ -19,6 +19,7 @@ const _ = grpc.SupportPackageIsVersion7
type ServiceServiceClient interface {
GetService(ctx context.Context, in *GetServiceRequest, opts ...grpc.CallOption) (*Service, error)
ListServices(ctx context.Context, in *ListServicesRequest, opts ...grpc.CallOption) (*ListServicesResponse, error)
HasService(ctx context.Context, in *HasServiceRequest, opts ...grpc.CallOption) (*HasServiceResponse, error)
}
type serviceServiceClient struct {
@@ -47,12 +48,22 @@ func (c *serviceServiceClient) ListServices(ctx context.Context, in *ListService
return out, nil
}
func (c *serviceServiceClient) HasService(ctx context.Context, in *HasServiceRequest, opts ...grpc.CallOption) (*HasServiceResponse, error) {
out := new(HasServiceResponse)
err := c.cc.Invoke(ctx, "/api.ServiceService/HasService", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
// ServiceServiceServer is the server API for ServiceService service.
// All implementations must embed UnimplementedServiceServiceServer
// for forward compatibility
type ServiceServiceServer interface {
GetService(context.Context, *GetServiceRequest) (*Service, error)
ListServices(context.Context, *ListServicesRequest) (*ListServicesResponse, error)
HasService(context.Context, *HasServiceRequest) (*HasServiceResponse, error)
mustEmbedUnimplementedServiceServiceServer()
}
@@ -66,6 +77,9 @@ func (UnimplementedServiceServiceServer) GetService(context.Context, *GetService
func (UnimplementedServiceServiceServer) ListServices(context.Context, *ListServicesRequest) (*ListServicesResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method ListServices not implemented")
}
func (UnimplementedServiceServiceServer) HasService(context.Context, *HasServiceRequest) (*HasServiceResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method HasService not implemented")
}
func (UnimplementedServiceServiceServer) mustEmbedUnimplementedServiceServiceServer() {}
// UnsafeServiceServiceServer may be embedded to opt out of forward compatibility for this service.
@@ -115,6 +129,24 @@ func _ServiceService_ListServices_Handler(srv interface{}, ctx context.Context,
return interceptor(ctx, in, info, handler)
}
func _ServiceService_HasService_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(HasServiceRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(ServiceServiceServer).HasService(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/api.ServiceService/HasService",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(ServiceServiceServer).HasService(ctx, req.(*HasServiceRequest))
}
return interceptor(ctx, in, info, handler)
}
var _ServiceService_serviceDesc = grpc.ServiceDesc{
ServiceName: "api.ServiceService",
HandlerType: (*ServiceServiceServer)(nil),
@@ -127,6 +159,10 @@ var _ServiceService_serviceDesc = grpc.ServiceDesc{
MethodName: "ListServices",
Handler: _ServiceService_ListServices_Handler,
},
{
MethodName: "HasService",
Handler: _ServiceService_HasService_Handler,
},
},
Streams: []grpc.StreamDesc{},
Metadata: "services.proto",

File diff suppressed because it is too large Load Diff

View File

@@ -709,190 +709,6 @@ func local_request_WorkflowService_TerminateWorkflowExecution_0(ctx context.Cont
}
func request_WorkflowService_GetArtifact_0(ctx context.Context, marshaler runtime.Marshaler, client WorkflowServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq GetArtifactRequest
var metadata runtime.ServerMetadata
var (
val string
ok bool
err error
_ = err
)
val, ok = pathParams["namespace"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "namespace")
}
protoReq.Namespace, err = runtime.String(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "namespace", err)
}
val, ok = pathParams["uid"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "uid")
}
protoReq.Uid, err = runtime.String(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "uid", err)
}
val, ok = pathParams["key"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "key")
}
protoReq.Key, err = runtime.String(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "key", err)
}
msg, err := client.GetArtifact(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD))
return msg, metadata, err
}
func local_request_WorkflowService_GetArtifact_0(ctx context.Context, marshaler runtime.Marshaler, server WorkflowServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq GetArtifactRequest
var metadata runtime.ServerMetadata
var (
val string
ok bool
err error
_ = err
)
val, ok = pathParams["namespace"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "namespace")
}
protoReq.Namespace, err = runtime.String(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "namespace", err)
}
val, ok = pathParams["uid"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "uid")
}
protoReq.Uid, err = runtime.String(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "uid", err)
}
val, ok = pathParams["key"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "key")
}
protoReq.Key, err = runtime.String(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "key", err)
}
msg, err := server.GetArtifact(ctx, &protoReq)
return msg, metadata, err
}
func request_WorkflowService_ListFiles_0(ctx context.Context, marshaler runtime.Marshaler, client WorkflowServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq ListFilesRequest
var metadata runtime.ServerMetadata
var (
val string
ok bool
err error
_ = err
)
val, ok = pathParams["namespace"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "namespace")
}
protoReq.Namespace, err = runtime.String(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "namespace", err)
}
val, ok = pathParams["uid"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "uid")
}
protoReq.Uid, err = runtime.String(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "uid", err)
}
val, ok = pathParams["path"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "path")
}
protoReq.Path, err = runtime.String(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "path", err)
}
msg, err := client.ListFiles(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD))
return msg, metadata, err
}
func local_request_WorkflowService_ListFiles_0(ctx context.Context, marshaler runtime.Marshaler, server WorkflowServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq ListFilesRequest
var metadata runtime.ServerMetadata
var (
val string
ok bool
err error
_ = err
)
val, ok = pathParams["namespace"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "namespace")
}
protoReq.Namespace, err = runtime.String(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "namespace", err)
}
val, ok = pathParams["uid"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "uid")
}
protoReq.Uid, err = runtime.String(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "uid", err)
}
val, ok = pathParams["path"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "path")
}
protoReq.Path, err = runtime.String(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "path", err)
}
msg, err := server.ListFiles(ctx, &protoReq)
return msg, metadata, err
}
func request_WorkflowService_AddWorkflowExecutionStatistics_0(ctx context.Context, marshaler runtime.Marshaler, client WorkflowServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq AddWorkflowExecutionStatisticRequest
var metadata runtime.ServerMetadata
@@ -1333,6 +1149,78 @@ func local_request_WorkflowService_UpdateWorkflowExecutionMetrics_0(ctx context.
}
func request_WorkflowService_ListWorkflowExecutionsField_0(ctx context.Context, marshaler runtime.Marshaler, client WorkflowServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq ListWorkflowExecutionsFieldRequest
var metadata runtime.ServerMetadata
var (
val string
ok bool
err error
_ = err
)
val, ok = pathParams["namespace"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "namespace")
}
protoReq.Namespace, err = runtime.String(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "namespace", err)
}
val, ok = pathParams["fieldName"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "fieldName")
}
protoReq.FieldName, err = runtime.String(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "fieldName", err)
}
msg, err := client.ListWorkflowExecutionsField(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD))
return msg, metadata, err
}
func local_request_WorkflowService_ListWorkflowExecutionsField_0(ctx context.Context, marshaler runtime.Marshaler, server WorkflowServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq ListWorkflowExecutionsFieldRequest
var metadata runtime.ServerMetadata
var (
val string
ok bool
err error
_ = err
)
val, ok = pathParams["namespace"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "namespace")
}
protoReq.Namespace, err = runtime.String(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "namespace", err)
}
val, ok = pathParams["fieldName"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "fieldName")
}
protoReq.FieldName, err = runtime.String(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "fieldName", err)
}
msg, err := server.ListWorkflowExecutionsField(ctx, &protoReq)
return msg, metadata, err
}
// RegisterWorkflowServiceHandlerServer registers the http handlers for service WorkflowService to "mux".
// UnaryRPC :call WorkflowServiceServer directly.
// StreamingRPC :currently unsupported pending https://github.com/grpc/grpc-go/issues/906.
@@ -1537,52 +1425,6 @@ func RegisterWorkflowServiceHandlerServer(ctx context.Context, mux *runtime.Serv
})
mux.Handle("GET", pattern_WorkflowService_GetArtifact_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
var stream runtime.ServerTransportStream
ctx = grpc.NewContextWithServerTransportStream(ctx, &stream)
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/api.WorkflowService/GetArtifact")
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := local_request_WorkflowService_GetArtifact_0(rctx, inboundMarshaler, server, req, pathParams)
md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_WorkflowService_GetArtifact_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
mux.Handle("GET", pattern_WorkflowService_ListFiles_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
var stream runtime.ServerTransportStream
ctx = grpc.NewContextWithServerTransportStream(ctx, &stream)
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/api.WorkflowService/ListFiles")
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := local_request_WorkflowService_ListFiles_0(rctx, inboundMarshaler, server, req, pathParams)
md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_WorkflowService_ListFiles_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
mux.Handle("POST", pattern_WorkflowService_AddWorkflowExecutionStatistics_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
@@ -1698,6 +1540,29 @@ func RegisterWorkflowServiceHandlerServer(ctx context.Context, mux *runtime.Serv
})
mux.Handle("GET", pattern_WorkflowService_ListWorkflowExecutionsField_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
var stream runtime.ServerTransportStream
ctx = grpc.NewContextWithServerTransportStream(ctx, &stream)
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/api.WorkflowService/ListWorkflowExecutionsField")
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := local_request_WorkflowService_ListWorkflowExecutionsField_0(rctx, inboundMarshaler, server, req, pathParams)
md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_WorkflowService_ListWorkflowExecutionsField_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
return nil
}
@@ -1939,46 +1804,6 @@ func RegisterWorkflowServiceHandlerClient(ctx context.Context, mux *runtime.Serv
})
mux.Handle("GET", pattern_WorkflowService_GetArtifact_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateContext(ctx, mux, req, "/api.WorkflowService/GetArtifact")
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := request_WorkflowService_GetArtifact_0(rctx, inboundMarshaler, client, req, pathParams)
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_WorkflowService_GetArtifact_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
mux.Handle("GET", pattern_WorkflowService_ListFiles_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateContext(ctx, mux, req, "/api.WorkflowService/ListFiles")
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := request_WorkflowService_ListFiles_0(rctx, inboundMarshaler, client, req, pathParams)
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_WorkflowService_ListFiles_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
mux.Handle("POST", pattern_WorkflowService_AddWorkflowExecutionStatistics_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
@@ -2079,6 +1904,26 @@ func RegisterWorkflowServiceHandlerClient(ctx context.Context, mux *runtime.Serv
})
mux.Handle("GET", pattern_WorkflowService_ListWorkflowExecutionsField_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateContext(ctx, mux, req, "/api.WorkflowService/ListWorkflowExecutionsField")
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := request_WorkflowService_ListWorkflowExecutionsField_0(rctx, inboundMarshaler, client, req, pathParams)
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_WorkflowService_ListWorkflowExecutionsField_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
return nil
}
@@ -2103,10 +1948,6 @@ var (
pattern_WorkflowService_TerminateWorkflowExecution_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 4, 1, 5, 2, 2, 3, 1, 0, 4, 1, 5, 4, 2, 5}, []string{"apis", "v1beta1", "namespace", "workflow_executions", "uid", "terminate"}, ""))
pattern_WorkflowService_GetArtifact_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 4, 1, 5, 2, 2, 3, 1, 0, 4, 1, 5, 4, 2, 5, 3, 0, 4, 1, 5, 6}, []string{"apis", "v1beta1", "namespace", "workflow_executions", "uid", "artifacts", "key"}, ""))
pattern_WorkflowService_ListFiles_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 4, 1, 5, 2, 2, 3, 1, 0, 4, 1, 5, 4, 2, 5, 3, 0, 4, 1, 5, 6}, []string{"apis", "v1beta1", "namespace", "workflow_executions", "uid", "files", "path"}, ""))
pattern_WorkflowService_AddWorkflowExecutionStatistics_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 4, 1, 5, 2, 2, 3, 1, 0, 4, 1, 5, 4, 2, 5}, []string{"apis", "v1beta1", "namespace", "workflow_executions", "uid", "statistics"}, ""))
pattern_WorkflowService_CronStartWorkflowExecutionStatistic_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 4, 1, 5, 2, 2, 3, 1, 0, 4, 1, 5, 4, 2, 5}, []string{"apis", "v1beta1", "namespace", "workflow_executions", "uid", "cron_start_statistics"}, ""))
@@ -2116,6 +1957,8 @@ var (
pattern_WorkflowService_AddWorkflowExecutionMetrics_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 4, 1, 5, 2, 2, 3, 1, 0, 4, 1, 5, 4, 2, 5}, []string{"apis", "v1beta1", "namespace", "workflow_executions", "uid", "metric"}, ""))
pattern_WorkflowService_UpdateWorkflowExecutionMetrics_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 4, 1, 5, 2, 2, 3, 1, 0, 4, 1, 5, 4, 2, 5}, []string{"apis", "v1beta1", "namespace", "workflow_executions", "uid", "metric"}, ""))
pattern_WorkflowService_ListWorkflowExecutionsField_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 4, 1, 5, 2, 2, 3, 2, 4, 1, 0, 4, 1, 5, 5}, []string{"apis", "v1beta", "namespace", "field", "workflow_executions", "fieldName"}, ""))
)
var (
@@ -2139,10 +1982,6 @@ var (
forward_WorkflowService_TerminateWorkflowExecution_0 = runtime.ForwardResponseMessage
forward_WorkflowService_GetArtifact_0 = runtime.ForwardResponseMessage
forward_WorkflowService_ListFiles_0 = runtime.ForwardResponseMessage
forward_WorkflowService_AddWorkflowExecutionStatistics_0 = runtime.ForwardResponseMessage
forward_WorkflowService_CronStartWorkflowExecutionStatistic_0 = runtime.ForwardResponseMessage
@@ -2152,4 +1991,6 @@ var (
forward_WorkflowService_AddWorkflowExecutionMetrics_0 = runtime.ForwardResponseMessage
forward_WorkflowService_UpdateWorkflowExecutionMetrics_0 = runtime.ForwardResponseMessage
forward_WorkflowService_ListWorkflowExecutionsField_0 = runtime.ForwardResponseMessage
)

View File

@@ -30,13 +30,12 @@ type WorkflowServiceClient interface {
GetWorkflowExecutionMetrics(ctx context.Context, in *GetWorkflowExecutionMetricsRequest, opts ...grpc.CallOption) (*GetWorkflowExecutionMetricsResponse, error)
ResubmitWorkflowExecution(ctx context.Context, in *ResubmitWorkflowExecutionRequest, opts ...grpc.CallOption) (*WorkflowExecution, error)
TerminateWorkflowExecution(ctx context.Context, in *TerminateWorkflowExecutionRequest, opts ...grpc.CallOption) (*emptypb.Empty, error)
GetArtifact(ctx context.Context, in *GetArtifactRequest, opts ...grpc.CallOption) (*ArtifactResponse, error)
ListFiles(ctx context.Context, in *ListFilesRequest, opts ...grpc.CallOption) (*ListFilesResponse, error)
AddWorkflowExecutionStatistics(ctx context.Context, in *AddWorkflowExecutionStatisticRequest, opts ...grpc.CallOption) (*emptypb.Empty, error)
CronStartWorkflowExecutionStatistic(ctx context.Context, in *CronStartWorkflowExecutionStatisticRequest, opts ...grpc.CallOption) (*emptypb.Empty, error)
UpdateWorkflowExecutionStatus(ctx context.Context, in *UpdateWorkflowExecutionStatusRequest, opts ...grpc.CallOption) (*emptypb.Empty, error)
AddWorkflowExecutionMetrics(ctx context.Context, in *AddWorkflowExecutionsMetricsRequest, opts ...grpc.CallOption) (*WorkflowExecutionsMetricsResponse, error)
UpdateWorkflowExecutionMetrics(ctx context.Context, in *UpdateWorkflowExecutionsMetricsRequest, opts ...grpc.CallOption) (*WorkflowExecutionsMetricsResponse, error)
ListWorkflowExecutionsField(ctx context.Context, in *ListWorkflowExecutionsFieldRequest, opts ...grpc.CallOption) (*ListWorkflowExecutionsFieldResponse, error)
}
type workflowServiceClient struct {
@@ -140,7 +139,7 @@ func (c *workflowServiceClient) GetWorkflowExecutionLogs(ctx context.Context, in
}
type WorkflowService_GetWorkflowExecutionLogsClient interface {
Recv() (*LogEntry, error)
Recv() (*LogStreamResponse, error)
grpc.ClientStream
}
@@ -148,8 +147,8 @@ type workflowServiceGetWorkflowExecutionLogsClient struct {
grpc.ClientStream
}
func (x *workflowServiceGetWorkflowExecutionLogsClient) Recv() (*LogEntry, error) {
m := new(LogEntry)
func (x *workflowServiceGetWorkflowExecutionLogsClient) Recv() (*LogStreamResponse, error) {
m := new(LogStreamResponse)
if err := x.ClientStream.RecvMsg(m); err != nil {
return nil, err
}
@@ -183,24 +182,6 @@ func (c *workflowServiceClient) TerminateWorkflowExecution(ctx context.Context,
return out, nil
}
func (c *workflowServiceClient) GetArtifact(ctx context.Context, in *GetArtifactRequest, opts ...grpc.CallOption) (*ArtifactResponse, error) {
out := new(ArtifactResponse)
err := c.cc.Invoke(ctx, "/api.WorkflowService/GetArtifact", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *workflowServiceClient) ListFiles(ctx context.Context, in *ListFilesRequest, opts ...grpc.CallOption) (*ListFilesResponse, error) {
out := new(ListFilesResponse)
err := c.cc.Invoke(ctx, "/api.WorkflowService/ListFiles", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *workflowServiceClient) AddWorkflowExecutionStatistics(ctx context.Context, in *AddWorkflowExecutionStatisticRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) {
out := new(emptypb.Empty)
err := c.cc.Invoke(ctx, "/api.WorkflowService/AddWorkflowExecutionStatistics", in, out, opts...)
@@ -246,6 +227,15 @@ func (c *workflowServiceClient) UpdateWorkflowExecutionMetrics(ctx context.Conte
return out, nil
}
func (c *workflowServiceClient) ListWorkflowExecutionsField(ctx context.Context, in *ListWorkflowExecutionsFieldRequest, opts ...grpc.CallOption) (*ListWorkflowExecutionsFieldResponse, error) {
out := new(ListWorkflowExecutionsFieldResponse)
err := c.cc.Invoke(ctx, "/api.WorkflowService/ListWorkflowExecutionsField", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
// WorkflowServiceServer is the server API for WorkflowService service.
// All implementations must embed UnimplementedWorkflowServiceServer
// for forward compatibility
@@ -262,13 +252,12 @@ type WorkflowServiceServer interface {
GetWorkflowExecutionMetrics(context.Context, *GetWorkflowExecutionMetricsRequest) (*GetWorkflowExecutionMetricsResponse, error)
ResubmitWorkflowExecution(context.Context, *ResubmitWorkflowExecutionRequest) (*WorkflowExecution, error)
TerminateWorkflowExecution(context.Context, *TerminateWorkflowExecutionRequest) (*emptypb.Empty, error)
GetArtifact(context.Context, *GetArtifactRequest) (*ArtifactResponse, error)
ListFiles(context.Context, *ListFilesRequest) (*ListFilesResponse, error)
AddWorkflowExecutionStatistics(context.Context, *AddWorkflowExecutionStatisticRequest) (*emptypb.Empty, error)
CronStartWorkflowExecutionStatistic(context.Context, *CronStartWorkflowExecutionStatisticRequest) (*emptypb.Empty, error)
UpdateWorkflowExecutionStatus(context.Context, *UpdateWorkflowExecutionStatusRequest) (*emptypb.Empty, error)
AddWorkflowExecutionMetrics(context.Context, *AddWorkflowExecutionsMetricsRequest) (*WorkflowExecutionsMetricsResponse, error)
UpdateWorkflowExecutionMetrics(context.Context, *UpdateWorkflowExecutionsMetricsRequest) (*WorkflowExecutionsMetricsResponse, error)
ListWorkflowExecutionsField(context.Context, *ListWorkflowExecutionsFieldRequest) (*ListWorkflowExecutionsFieldResponse, error)
mustEmbedUnimplementedWorkflowServiceServer()
}
@@ -306,12 +295,6 @@ func (UnimplementedWorkflowServiceServer) ResubmitWorkflowExecution(context.Cont
func (UnimplementedWorkflowServiceServer) TerminateWorkflowExecution(context.Context, *TerminateWorkflowExecutionRequest) (*emptypb.Empty, error) {
return nil, status.Errorf(codes.Unimplemented, "method TerminateWorkflowExecution not implemented")
}
func (UnimplementedWorkflowServiceServer) GetArtifact(context.Context, *GetArtifactRequest) (*ArtifactResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method GetArtifact not implemented")
}
func (UnimplementedWorkflowServiceServer) ListFiles(context.Context, *ListFilesRequest) (*ListFilesResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method ListFiles not implemented")
}
func (UnimplementedWorkflowServiceServer) AddWorkflowExecutionStatistics(context.Context, *AddWorkflowExecutionStatisticRequest) (*emptypb.Empty, error) {
return nil, status.Errorf(codes.Unimplemented, "method AddWorkflowExecutionStatistics not implemented")
}
@@ -327,6 +310,9 @@ func (UnimplementedWorkflowServiceServer) AddWorkflowExecutionMetrics(context.Co
func (UnimplementedWorkflowServiceServer) UpdateWorkflowExecutionMetrics(context.Context, *UpdateWorkflowExecutionsMetricsRequest) (*WorkflowExecutionsMetricsResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method UpdateWorkflowExecutionMetrics not implemented")
}
func (UnimplementedWorkflowServiceServer) ListWorkflowExecutionsField(context.Context, *ListWorkflowExecutionsFieldRequest) (*ListWorkflowExecutionsFieldResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method ListWorkflowExecutionsField not implemented")
}
func (UnimplementedWorkflowServiceServer) mustEmbedUnimplementedWorkflowServiceServer() {}
// UnsafeWorkflowServiceServer may be embedded to opt out of forward compatibility for this service.
@@ -460,7 +446,7 @@ func _WorkflowService_GetWorkflowExecutionLogs_Handler(srv interface{}, stream g
}
type WorkflowService_GetWorkflowExecutionLogsServer interface {
Send(*LogEntry) error
Send(*LogStreamResponse) error
grpc.ServerStream
}
@@ -468,7 +454,7 @@ type workflowServiceGetWorkflowExecutionLogsServer struct {
grpc.ServerStream
}
func (x *workflowServiceGetWorkflowExecutionLogsServer) Send(m *LogEntry) error {
func (x *workflowServiceGetWorkflowExecutionLogsServer) Send(m *LogStreamResponse) error {
return x.ServerStream.SendMsg(m)
}
@@ -526,42 +512,6 @@ func _WorkflowService_TerminateWorkflowExecution_Handler(srv interface{}, ctx co
return interceptor(ctx, in, info, handler)
}
func _WorkflowService_GetArtifact_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(GetArtifactRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(WorkflowServiceServer).GetArtifact(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/api.WorkflowService/GetArtifact",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(WorkflowServiceServer).GetArtifact(ctx, req.(*GetArtifactRequest))
}
return interceptor(ctx, in, info, handler)
}
func _WorkflowService_ListFiles_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(ListFilesRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(WorkflowServiceServer).ListFiles(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/api.WorkflowService/ListFiles",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(WorkflowServiceServer).ListFiles(ctx, req.(*ListFilesRequest))
}
return interceptor(ctx, in, info, handler)
}
func _WorkflowService_AddWorkflowExecutionStatistics_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(AddWorkflowExecutionStatisticRequest)
if err := dec(in); err != nil {
@@ -652,6 +602,24 @@ func _WorkflowService_UpdateWorkflowExecutionMetrics_Handler(srv interface{}, ct
return interceptor(ctx, in, info, handler)
}
func _WorkflowService_ListWorkflowExecutionsField_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(ListWorkflowExecutionsFieldRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(WorkflowServiceServer).ListWorkflowExecutionsField(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/api.WorkflowService/ListWorkflowExecutionsField",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(WorkflowServiceServer).ListWorkflowExecutionsField(ctx, req.(*ListWorkflowExecutionsFieldRequest))
}
return interceptor(ctx, in, info, handler)
}
var _WorkflowService_serviceDesc = grpc.ServiceDesc{
ServiceName: "api.WorkflowService",
HandlerType: (*WorkflowServiceServer)(nil),
@@ -688,14 +656,6 @@ var _WorkflowService_serviceDesc = grpc.ServiceDesc{
MethodName: "TerminateWorkflowExecution",
Handler: _WorkflowService_TerminateWorkflowExecution_Handler,
},
{
MethodName: "GetArtifact",
Handler: _WorkflowService_GetArtifact_Handler,
},
{
MethodName: "ListFiles",
Handler: _WorkflowService_ListFiles_Handler,
},
{
MethodName: "AddWorkflowExecutionStatistics",
Handler: _WorkflowService_AddWorkflowExecutionStatistics_Handler,
@@ -716,6 +676,10 @@ var _WorkflowService_serviceDesc = grpc.ServiceDesc{
MethodName: "UpdateWorkflowExecutionMetrics",
Handler: _WorkflowService_UpdateWorkflowExecutionMetrics_Handler,
},
{
MethodName: "ListWorkflowExecutionsField",
Handler: _WorkflowService_ListWorkflowExecutionsField_Handler,
},
},
Streams: []grpc.StreamDesc{
{

View File

@@ -842,19 +842,20 @@ type WorkflowTemplate struct {
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
CreatedAt string `protobuf:"bytes,1,opt,name=createdAt,proto3" json:"createdAt,omitempty"`
ModifiedAt string `protobuf:"bytes,2,opt,name=modifiedAt,proto3" json:"modifiedAt,omitempty"`
Uid string `protobuf:"bytes,3,opt,name=uid,proto3" json:"uid,omitempty"`
Name string `protobuf:"bytes,4,opt,name=name,proto3" json:"name,omitempty"`
Version int64 `protobuf:"varint,5,opt,name=version,proto3" json:"version,omitempty"`
Versions int64 `protobuf:"varint,6,opt,name=versions,proto3" json:"versions,omitempty"`
Manifest string `protobuf:"bytes,7,opt,name=manifest,proto3" json:"manifest,omitempty"`
IsLatest bool `protobuf:"varint,8,opt,name=isLatest,proto3" json:"isLatest,omitempty"`
IsArchived bool `protobuf:"varint,9,opt,name=isArchived,proto3" json:"isArchived,omitempty"`
Labels []*KeyValue `protobuf:"bytes,10,rep,name=labels,proto3" json:"labels,omitempty"`
Stats *WorkflowExecutionStatisticReport `protobuf:"bytes,11,opt,name=stats,proto3" json:"stats,omitempty"`
CronStats *CronWorkflowStatisticsReport `protobuf:"bytes,12,opt,name=cronStats,proto3" json:"cronStats,omitempty"`
Parameters []*Parameter `protobuf:"bytes,13,rep,name=parameters,proto3" json:"parameters,omitempty"`
CreatedAt string `protobuf:"bytes,1,opt,name=createdAt,proto3" json:"createdAt,omitempty"`
ModifiedAt string `protobuf:"bytes,2,opt,name=modifiedAt,proto3" json:"modifiedAt,omitempty"`
Uid string `protobuf:"bytes,3,opt,name=uid,proto3" json:"uid,omitempty"`
Name string `protobuf:"bytes,4,opt,name=name,proto3" json:"name,omitempty"`
Version int64 `protobuf:"varint,5,opt,name=version,proto3" json:"version,omitempty"`
Versions int64 `protobuf:"varint,6,opt,name=versions,proto3" json:"versions,omitempty"`
Manifest string `protobuf:"bytes,7,opt,name=manifest,proto3" json:"manifest,omitempty"`
IsLatest bool `protobuf:"varint,8,opt,name=isLatest,proto3" json:"isLatest,omitempty"`
IsArchived bool `protobuf:"varint,9,opt,name=isArchived,proto3" json:"isArchived,omitempty"`
Labels []*KeyValue `protobuf:"bytes,10,rep,name=labels,proto3" json:"labels,omitempty"`
Stats *WorkflowExecutionStatisticReport `protobuf:"bytes,11,opt,name=stats,proto3" json:"stats,omitempty"`
CronStats *CronWorkflowStatisticsReport `protobuf:"bytes,12,opt,name=cronStats,proto3" json:"cronStats,omitempty"`
Parameters []*Parameter `protobuf:"bytes,13,rep,name=parameters,proto3" json:"parameters,omitempty"`
Description string `protobuf:"bytes,14,opt,name=description,proto3" json:"description,omitempty"`
}
func (x *WorkflowTemplate) Reset() {
@@ -980,6 +981,13 @@ func (x *WorkflowTemplate) GetParameters() []*Parameter {
return nil
}
func (x *WorkflowTemplate) GetDescription() string {
if x != nil {
return x.Description
}
return ""
}
type GetWorkflowTemplateLabelsRequest struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
@@ -1043,6 +1051,116 @@ func (x *GetWorkflowTemplateLabelsRequest) GetVersion() int64 {
return 0
}
type ListWorkflowTemplatesFieldRequest struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Namespace string `protobuf:"bytes,1,opt,name=namespace,proto3" json:"namespace,omitempty"`
FieldName string `protobuf:"bytes,2,opt,name=fieldName,proto3" json:"fieldName,omitempty"`
IsSystem bool `protobuf:"varint,3,opt,name=isSystem,proto3" json:"isSystem,omitempty"`
}
func (x *ListWorkflowTemplatesFieldRequest) Reset() {
*x = ListWorkflowTemplatesFieldRequest{}
if protoimpl.UnsafeEnabled {
mi := &file_workflow_template_proto_msgTypes[15]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *ListWorkflowTemplatesFieldRequest) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*ListWorkflowTemplatesFieldRequest) ProtoMessage() {}
func (x *ListWorkflowTemplatesFieldRequest) ProtoReflect() protoreflect.Message {
mi := &file_workflow_template_proto_msgTypes[15]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use ListWorkflowTemplatesFieldRequest.ProtoReflect.Descriptor instead.
func (*ListWorkflowTemplatesFieldRequest) Descriptor() ([]byte, []int) {
return file_workflow_template_proto_rawDescGZIP(), []int{15}
}
func (x *ListWorkflowTemplatesFieldRequest) GetNamespace() string {
if x != nil {
return x.Namespace
}
return ""
}
func (x *ListWorkflowTemplatesFieldRequest) GetFieldName() string {
if x != nil {
return x.FieldName
}
return ""
}
func (x *ListWorkflowTemplatesFieldRequest) GetIsSystem() bool {
if x != nil {
return x.IsSystem
}
return false
}
type ListWorkflowTemplatesFieldResponse struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Values []string `protobuf:"bytes,1,rep,name=values,proto3" json:"values,omitempty"`
}
func (x *ListWorkflowTemplatesFieldResponse) Reset() {
*x = ListWorkflowTemplatesFieldResponse{}
if protoimpl.UnsafeEnabled {
mi := &file_workflow_template_proto_msgTypes[16]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *ListWorkflowTemplatesFieldResponse) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*ListWorkflowTemplatesFieldResponse) ProtoMessage() {}
func (x *ListWorkflowTemplatesFieldResponse) ProtoReflect() protoreflect.Message {
mi := &file_workflow_template_proto_msgTypes[16]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use ListWorkflowTemplatesFieldResponse.ProtoReflect.Descriptor instead.
func (*ListWorkflowTemplatesFieldResponse) Descriptor() ([]byte, []int) {
return file_workflow_template_proto_rawDescGZIP(), []int{16}
}
func (x *ListWorkflowTemplatesFieldResponse) GetValues() []string {
if x != nil {
return x.Values
}
return nil
}
var File_workflow_template_proto protoreflect.FileDescriptor
var file_workflow_template_proto_rawDesc = []byte{
@@ -1157,7 +1275,7 @@ var file_workflow_template_proto_rawDesc = []byte{
0x74, 0x65, 0x64, 0x22, 0x34, 0x0a, 0x1c, 0x43, 0x72, 0x6f, 0x6e, 0x57, 0x6f, 0x72, 0x6b, 0x66,
0x6c, 0x6f, 0x77, 0x53, 0x74, 0x61, 0x74, 0x69, 0x73, 0x74, 0x69, 0x63, 0x73, 0x52, 0x65, 0x70,
0x6f, 0x72, 0x74, 0x12, 0x14, 0x0a, 0x05, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x18, 0x01, 0x20, 0x01,
0x28, 0x05, 0x52, 0x05, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x22, 0xd9, 0x03, 0x0a, 0x10, 0x57, 0x6f,
0x28, 0x05, 0x52, 0x05, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x22, 0xfb, 0x03, 0x0a, 0x10, 0x57, 0x6f,
0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x12, 0x1c,
0x0a, 0x09, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x41, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28,
0x09, 0x52, 0x09, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x41, 0x74, 0x12, 0x1e, 0x0a, 0x0a,
@@ -1187,112 +1305,137 @@ var file_workflow_template_proto_rawDesc = []byte{
0x53, 0x74, 0x61, 0x74, 0x73, 0x12, 0x2e, 0x0a, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74,
0x65, 0x72, 0x73, 0x18, 0x0d, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0e, 0x2e, 0x61, 0x70, 0x69, 0x2e,
0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x52, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d,
0x65, 0x74, 0x65, 0x72, 0x73, 0x22, 0x6e, 0x0a, 0x20, 0x47, 0x65, 0x74, 0x57, 0x6f, 0x72, 0x6b,
0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x4c, 0x61, 0x62, 0x65,
0x6c, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d,
0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61,
0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18,
0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x76,
0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x03, 0x52, 0x07, 0x76, 0x65,
0x72, 0x73, 0x69, 0x6f, 0x6e, 0x32, 0xec, 0x0b, 0x0a, 0x17, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c,
0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63,
0x65, 0x12, 0xae, 0x01, 0x0a, 0x18, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x57, 0x6f,
0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x12, 0x24,
0x2e, 0x61, 0x70, 0x69, 0x2e, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x57, 0x6f, 0x72,
0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x71,
0x75, 0x65, 0x73, 0x74, 0x1a, 0x15, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x66,
0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x55, 0x82, 0xd3, 0xe4,
0x93, 0x02, 0x4f, 0x22, 0x3b, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74,
0x61, 0x31, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x7d, 0x2f, 0x77,
0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65,
0x73, 0x2f, 0x7b, 0x75, 0x69, 0x64, 0x7d, 0x2f, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65,
0x3a, 0x10, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61,
0x74, 0x65, 0x12, 0x9b, 0x01, 0x0a, 0x16, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x57, 0x6f, 0x72,
0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x12, 0x22, 0x2e,
0x61, 0x70, 0x69, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c,
0x65, 0x74, 0x65, 0x72, 0x73, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70,
0x74, 0x69, 0x6f, 0x6e, 0x18, 0x0e, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63,
0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x6e, 0x0a, 0x20, 0x47, 0x65, 0x74, 0x57, 0x6f,
0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x4c, 0x61,
0x62, 0x65, 0x6c, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x6e,
0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09,
0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d,
0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x18, 0x0a,
0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x03, 0x52, 0x07,
0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x22, 0x7b, 0x0a, 0x21, 0x4c, 0x69, 0x73, 0x74, 0x57,
0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73,
0x46, 0x69, 0x65, 0x6c, 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1c, 0x0a, 0x09,
0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52,
0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x1c, 0x0a, 0x09, 0x66, 0x69,
0x65, 0x6c, 0x64, 0x4e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x66,
0x69, 0x65, 0x6c, 0x64, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x1a, 0x0a, 0x08, 0x69, 0x73, 0x53, 0x79,
0x73, 0x74, 0x65, 0x6d, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x52, 0x08, 0x69, 0x73, 0x53, 0x79,
0x73, 0x74, 0x65, 0x6d, 0x22, 0x3c, 0x0a, 0x22, 0x4c, 0x69, 0x73, 0x74, 0x57, 0x6f, 0x72, 0x6b,
0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x46, 0x69, 0x65,
0x6c, 0x64, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x76, 0x61,
0x6c, 0x75, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x06, 0x76, 0x61, 0x6c, 0x75,
0x65, 0x73, 0x32, 0xa3, 0x0d, 0x0a, 0x17, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54,
0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0xae,
0x01, 0x0a, 0x18, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x66,
0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x12, 0x24, 0x2e, 0x61, 0x70,
0x69, 0x2e, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c,
0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73,
0x74, 0x1a, 0x15, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77,
0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x46, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x40,
0x22, 0x2c, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f,
0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x55, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x4f,
0x22, 0x3b, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f,
0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x7d, 0x2f, 0x77, 0x6f, 0x72, 0x6b,
0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x3a, 0x10,
0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65,
0x12, 0xc2, 0x01, 0x0a, 0x1d, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x66,
0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69,
0x6f, 0x6e, 0x12, 0x22, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x57,
0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52,
0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x15, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x57, 0x6f, 0x72,
0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x66, 0x82,
0xd3, 0xe4, 0x93, 0x02, 0x60, 0x22, 0x4c, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62,
0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x7d,
0x2f, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61,
0x74, 0x65, 0x73, 0x2f, 0x7b, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d,
0x70, 0x6c, 0x61, 0x74, 0x65, 0x2e, 0x75, 0x69, 0x64, 0x7d, 0x2f, 0x76, 0x65, 0x72, 0x73, 0x69,
0x6f, 0x6e, 0x73, 0x3a, 0x10, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d,
0x70, 0x6c, 0x61, 0x74, 0x65, 0x12, 0xd3, 0x01, 0x0a, 0x13, 0x47, 0x65, 0x74, 0x57, 0x6f, 0x72,
0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x12, 0x1f, 0x2e,
0x61, 0x70, 0x69, 0x2e, 0x47, 0x65, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54,
0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2f, 0x7b,
0x75, 0x69, 0x64, 0x7d, 0x2f, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x3a, 0x10, 0x77,
0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x12,
0x9b, 0x01, 0x0a, 0x16, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c,
0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x12, 0x22, 0x2e, 0x61, 0x70, 0x69,
0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54,
0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x15,
0x2e, 0x61, 0x70, 0x69, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d,
0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x83, 0x01, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x7d, 0x12, 0x32,
0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b, 0x6e,
0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x7d, 0x2f, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c,
0x6f, 0x77, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2f, 0x7b, 0x75, 0x69,
0x64, 0x7d, 0x5a, 0x47, 0x12, 0x45, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65,
0x74, 0x61, 0x31, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x7d, 0x2f,
0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74,
0x65, 0x73, 0x2f, 0x7b, 0x75, 0x69, 0x64, 0x7d, 0x2f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e,
0x73, 0x2f, 0x7b, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x7d, 0x12, 0xb8, 0x01, 0x0a, 0x1c,
0x4c, 0x69, 0x73, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70,
0x6c, 0x61, 0x74, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x28, 0x2e, 0x61,
0x70, 0x69, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54,
0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x52,
0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x29, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x4c, 0x69, 0x73,
0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x46, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x40, 0x22, 0x2c, 0x2f,
0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b, 0x6e, 0x61,
0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x7d, 0x2f, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f,
0x77, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x3a, 0x10, 0x77, 0x6f, 0x72,
0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x12, 0xc2, 0x01,
0x0a, 0x1d, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77,
0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12,
0x22, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x57, 0x6f, 0x72, 0x6b,
0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75,
0x65, 0x73, 0x74, 0x1a, 0x15, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c,
0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x66, 0x82, 0xd3, 0xe4, 0x93,
0x02, 0x60, 0x22, 0x4c, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61,
0x31, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x7d, 0x2f, 0x77, 0x6f,
0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73,
0x2f, 0x7b, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61,
0x74, 0x65, 0x2e, 0x75, 0x69, 0x64, 0x7d, 0x2f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x73,
0x3a, 0x10, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61,
0x74, 0x65, 0x12, 0xd3, 0x01, 0x0a, 0x13, 0x47, 0x65, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c,
0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x12, 0x1f, 0x2e, 0x61, 0x70, 0x69,
0x2e, 0x47, 0x65, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70,
0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x15, 0x2e, 0x61, 0x70,
0x69, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61,
0x74, 0x65, 0x22, 0x83, 0x01, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x7d, 0x12, 0x32, 0x2f, 0x61, 0x70,
0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65,
0x73, 0x70, 0x61, 0x63, 0x65, 0x7d, 0x2f, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f,
0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2f, 0x7b, 0x75, 0x69, 0x64, 0x7d, 0x5a,
0x47, 0x12, 0x45, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31,
0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x7d, 0x2f, 0x77, 0x6f, 0x72,
0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2f,
0x7b, 0x75, 0x69, 0x64, 0x7d, 0x2f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x2f, 0x7b,
0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x7d, 0x12, 0xb8, 0x01, 0x0a, 0x1c, 0x4c, 0x69, 0x73,
0x74, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74,
0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73,
0x65, 0x22, 0x43, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x3d, 0x12, 0x3b, 0x2f, 0x61, 0x70, 0x69, 0x73,
0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70,
0x61, 0x63, 0x65, 0x7d, 0x2f, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x74, 0x65,
0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2f, 0x7b, 0x75, 0x69, 0x64, 0x7d, 0x2f, 0x76, 0x65,
0x72, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x94, 0x01, 0x0a, 0x15, 0x4c, 0x69, 0x73, 0x74, 0x57,
0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73,
0x12, 0x21, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x66,
0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75,
0x65, 0x73, 0x74, 0x1a, 0x22, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x57, 0x6f,
0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x52,
0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x34, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x2e, 0x12,
0x2c, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b,
0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x7d, 0x2f, 0x77, 0x6f, 0x72, 0x6b, 0x66,
0x6c, 0x6f, 0x77, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x12, 0xe9, 0x01,
0x0a, 0x15, 0x43, 0x6c, 0x6f, 0x6e, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54,
0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x12, 0x21, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x43, 0x6c,
0x6f, 0x6e, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c,
0x61, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x15, 0x2e, 0x61, 0x70, 0x69,
0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74,
0x65, 0x22, 0x95, 0x01, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x8e, 0x01, 0x12, 0x3f, 0x2f, 0x61, 0x70,
0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x28, 0x2e, 0x61, 0x70, 0x69, 0x2e,
0x4c, 0x69, 0x73, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70,
0x6c, 0x61, 0x74, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x52, 0x65, 0x71, 0x75,
0x65, 0x73, 0x74, 0x1a, 0x29, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x57, 0x6f,
0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x56, 0x65,
0x72, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x43,
0x82, 0xd3, 0xe4, 0x93, 0x02, 0x3d, 0x12, 0x3b, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31,
0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65,
0x7d, 0x2f, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c,
0x61, 0x74, 0x65, 0x73, 0x2f, 0x7b, 0x75, 0x69, 0x64, 0x7d, 0x2f, 0x76, 0x65, 0x72, 0x73, 0x69,
0x6f, 0x6e, 0x73, 0x12, 0x94, 0x01, 0x0a, 0x15, 0x4c, 0x69, 0x73, 0x74, 0x57, 0x6f, 0x72, 0x6b,
0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x12, 0x21, 0x2e,
0x61, 0x70, 0x69, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77,
0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74,
0x1a, 0x22, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x66,
0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70,
0x6f, 0x6e, 0x73, 0x65, 0x22, 0x34, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x2e, 0x12, 0x2c, 0x2f, 0x61,
0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b, 0x6e, 0x61, 0x6d,
0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x7d, 0x2f, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77,
0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x12, 0xe9, 0x01, 0x0a, 0x15, 0x43,
0x6c, 0x6f, 0x6e, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70,
0x6c, 0x61, 0x74, 0x65, 0x12, 0x21, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x43, 0x6c, 0x6f, 0x6e, 0x65,
0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65,
0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x15, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x57, 0x6f,
0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x95,
0x01, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x8e, 0x01, 0x12, 0x3f, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f,
0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61,
0x63, 0x65, 0x7d, 0x2f, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x74, 0x65, 0x6d,
0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2f, 0x7b, 0x75, 0x69, 0x64, 0x7d, 0x2f, 0x63, 0x6c, 0x6f,
0x6e, 0x65, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x5a, 0x4b, 0x12, 0x49, 0x2f, 0x61, 0x70,
0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65,
0x73, 0x70, 0x61, 0x63, 0x65, 0x7d, 0x2f, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f,
0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2f, 0x7b, 0x75, 0x69, 0x64, 0x7d, 0x2f,
0x63, 0x6c, 0x6f, 0x6e, 0x65, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x5a, 0x4b, 0x12, 0x49,
0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b, 0x6e,
0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x7d, 0x2f, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c,
0x6f, 0x77, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2f, 0x7b, 0x75, 0x69,
0x64, 0x7d, 0x2f, 0x63, 0x6c, 0x6f, 0x6e, 0x65, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x2f,
0x7b, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x7d, 0x12, 0xa8, 0x01, 0x0a, 0x17, 0x41, 0x72,
0x63, 0x6c, 0x6f, 0x6e, 0x65, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x7d, 0x2f, 0x7b, 0x76, 0x65,
0x72, 0x73, 0x69, 0x6f, 0x6e, 0x7d, 0x12, 0xa8, 0x01, 0x0a, 0x17, 0x41, 0x72, 0x63, 0x68, 0x69,
0x76, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61,
0x74, 0x65, 0x12, 0x23, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65,
0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65,
0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x24, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x41, 0x72,
0x63, 0x68, 0x69, 0x76, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d,
0x70, 0x6c, 0x61, 0x74, 0x65, 0x12, 0x23, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x41, 0x72, 0x63, 0x68,
0x69, 0x76, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c,
0x61, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x24, 0x2e, 0x61, 0x70, 0x69,
0x2e, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77,
0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65,
0x22, 0x42, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x3c, 0x1a, 0x3a, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f,
0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61,
0x63, 0x65, 0x7d, 0x2f, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x74, 0x65, 0x6d,
0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2f, 0x7b, 0x75, 0x69, 0x64, 0x7d, 0x2f, 0x61, 0x72, 0x63,
0x68, 0x69, 0x76, 0x65, 0x42, 0x24, 0x5a, 0x22, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63,
0x6f, 0x6d, 0x2f, 0x6f, 0x6e, 0x65, 0x70, 0x61, 0x6e, 0x65, 0x6c, 0x69, 0x6f, 0x2f, 0x63, 0x6f,
0x72, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x67, 0x65, 0x6e, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74,
0x6f, 0x33,
0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x42, 0x82,
0xd3, 0xe4, 0x93, 0x02, 0x3c, 0x1a, 0x3a, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62,
0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x7d,
0x2f, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61,
0x74, 0x65, 0x73, 0x2f, 0x7b, 0x75, 0x69, 0x64, 0x7d, 0x2f, 0x61, 0x72, 0x63, 0x68, 0x69, 0x76,
0x65, 0x12, 0xb4, 0x01, 0x0a, 0x1a, 0x4c, 0x69, 0x73, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c,
0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x46, 0x69, 0x65, 0x6c, 0x64,
0x12, 0x26, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x66,
0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x46, 0x69, 0x65, 0x6c,
0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x27, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x4c,
0x69, 0x73, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c,
0x61, 0x74, 0x65, 0x73, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73,
0x65, 0x22, 0x45, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x3f, 0x12, 0x3d, 0x2f, 0x61, 0x70, 0x69, 0x73,
0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61,
0x63, 0x65, 0x7d, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x2f, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c,
0x6f, 0x77, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2f, 0x7b, 0x66, 0x69,
0x65, 0x6c, 0x64, 0x4e, 0x61, 0x6d, 0x65, 0x7d, 0x42, 0x24, 0x5a, 0x22, 0x67, 0x69, 0x74, 0x68,
0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x6f, 0x6e, 0x65, 0x70, 0x61, 0x6e, 0x65, 0x6c, 0x69,
0x6f, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x67, 0x65, 0x6e, 0x62, 0x06,
0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
}
var (
@@ -1307,7 +1450,7 @@ func file_workflow_template_proto_rawDescGZIP() []byte {
return file_workflow_template_proto_rawDescData
}
var file_workflow_template_proto_msgTypes = make([]protoimpl.MessageInfo, 15)
var file_workflow_template_proto_msgTypes = make([]protoimpl.MessageInfo, 17)
var file_workflow_template_proto_goTypes = []interface{}{
(*GenerateWorkflowTemplateRequest)(nil), // 0: api.GenerateWorkflowTemplateRequest
(*CreateWorkflowTemplateRequest)(nil), // 1: api.CreateWorkflowTemplateRequest
@@ -1324,8 +1467,10 @@ var file_workflow_template_proto_goTypes = []interface{}{
(*CronWorkflowStatisticsReport)(nil), // 12: api.CronWorkflowStatisticsReport
(*WorkflowTemplate)(nil), // 13: api.WorkflowTemplate
(*GetWorkflowTemplateLabelsRequest)(nil), // 14: api.GetWorkflowTemplateLabelsRequest
(*KeyValue)(nil), // 15: api.KeyValue
(*Parameter)(nil), // 16: api.Parameter
(*ListWorkflowTemplatesFieldRequest)(nil), // 15: api.ListWorkflowTemplatesFieldRequest
(*ListWorkflowTemplatesFieldResponse)(nil), // 16: api.ListWorkflowTemplatesFieldResponse
(*KeyValue)(nil), // 17: api.KeyValue
(*Parameter)(nil), // 18: api.Parameter
}
var file_workflow_template_proto_depIdxs = []int32{
13, // 0: api.GenerateWorkflowTemplateRequest.workflowTemplate:type_name -> api.WorkflowTemplate
@@ -1334,10 +1479,10 @@ var file_workflow_template_proto_depIdxs = []int32{
13, // 3: api.ListWorkflowTemplateVersionsResponse.workflowTemplates:type_name -> api.WorkflowTemplate
13, // 4: api.ListWorkflowTemplatesResponse.workflowTemplates:type_name -> api.WorkflowTemplate
13, // 5: api.ArchiveWorkflowTemplateResponse.workflowTemplate:type_name -> api.WorkflowTemplate
15, // 6: api.WorkflowTemplate.labels:type_name -> api.KeyValue
17, // 6: api.WorkflowTemplate.labels:type_name -> api.KeyValue
11, // 7: api.WorkflowTemplate.stats:type_name -> api.WorkflowExecutionStatisticReport
12, // 8: api.WorkflowTemplate.cronStats:type_name -> api.CronWorkflowStatisticsReport
16, // 9: api.WorkflowTemplate.parameters:type_name -> api.Parameter
18, // 9: api.WorkflowTemplate.parameters:type_name -> api.Parameter
0, // 10: api.WorkflowTemplateService.GenerateWorkflowTemplate:input_type -> api.GenerateWorkflowTemplateRequest
1, // 11: api.WorkflowTemplateService.CreateWorkflowTemplate:input_type -> api.CreateWorkflowTemplateRequest
1, // 12: api.WorkflowTemplateService.CreateWorkflowTemplateVersion:input_type -> api.CreateWorkflowTemplateRequest
@@ -1346,16 +1491,18 @@ var file_workflow_template_proto_depIdxs = []int32{
7, // 15: api.WorkflowTemplateService.ListWorkflowTemplates:input_type -> api.ListWorkflowTemplatesRequest
4, // 16: api.WorkflowTemplateService.CloneWorkflowTemplate:input_type -> api.CloneWorkflowTemplateRequest
9, // 17: api.WorkflowTemplateService.ArchiveWorkflowTemplate:input_type -> api.ArchiveWorkflowTemplateRequest
13, // 18: api.WorkflowTemplateService.GenerateWorkflowTemplate:output_type -> api.WorkflowTemplate
13, // 19: api.WorkflowTemplateService.CreateWorkflowTemplate:output_type -> api.WorkflowTemplate
13, // 20: api.WorkflowTemplateService.CreateWorkflowTemplateVersion:output_type -> api.WorkflowTemplate
13, // 21: api.WorkflowTemplateService.GetWorkflowTemplate:output_type -> api.WorkflowTemplate
6, // 22: api.WorkflowTemplateService.ListWorkflowTemplateVersions:output_type -> api.ListWorkflowTemplateVersionsResponse
8, // 23: api.WorkflowTemplateService.ListWorkflowTemplates:output_type -> api.ListWorkflowTemplatesResponse
13, // 24: api.WorkflowTemplateService.CloneWorkflowTemplate:output_type -> api.WorkflowTemplate
10, // 25: api.WorkflowTemplateService.ArchiveWorkflowTemplate:output_type -> api.ArchiveWorkflowTemplateResponse
18, // [18:26] is the sub-list for method output_type
10, // [10:18] is the sub-list for method input_type
15, // 18: api.WorkflowTemplateService.ListWorkflowTemplatesField:input_type -> api.ListWorkflowTemplatesFieldRequest
13, // 19: api.WorkflowTemplateService.GenerateWorkflowTemplate:output_type -> api.WorkflowTemplate
13, // 20: api.WorkflowTemplateService.CreateWorkflowTemplate:output_type -> api.WorkflowTemplate
13, // 21: api.WorkflowTemplateService.CreateWorkflowTemplateVersion:output_type -> api.WorkflowTemplate
13, // 22: api.WorkflowTemplateService.GetWorkflowTemplate:output_type -> api.WorkflowTemplate
6, // 23: api.WorkflowTemplateService.ListWorkflowTemplateVersions:output_type -> api.ListWorkflowTemplateVersionsResponse
8, // 24: api.WorkflowTemplateService.ListWorkflowTemplates:output_type -> api.ListWorkflowTemplatesResponse
13, // 25: api.WorkflowTemplateService.CloneWorkflowTemplate:output_type -> api.WorkflowTemplate
10, // 26: api.WorkflowTemplateService.ArchiveWorkflowTemplate:output_type -> api.ArchiveWorkflowTemplateResponse
16, // 27: api.WorkflowTemplateService.ListWorkflowTemplatesField:output_type -> api.ListWorkflowTemplatesFieldResponse
19, // [19:28] is the sub-list for method output_type
10, // [10:19] is the sub-list for method input_type
10, // [10:10] is the sub-list for extension type_name
10, // [10:10] is the sub-list for extension extendee
0, // [0:10] is the sub-list for field type_name
@@ -1549,6 +1696,30 @@ func file_workflow_template_proto_init() {
return nil
}
}
file_workflow_template_proto_msgTypes[15].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*ListWorkflowTemplatesFieldRequest); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_workflow_template_proto_msgTypes[16].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*ListWorkflowTemplatesFieldResponse); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
}
type x struct{}
out := protoimpl.TypeBuilder{
@@ -1556,7 +1727,7 @@ func file_workflow_template_proto_init() {
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_workflow_template_proto_rawDesc,
NumEnums: 0,
NumMessages: 15,
NumMessages: 17,
NumExtensions: 0,
NumServices: 1,
},

View File

@@ -893,6 +893,96 @@ func local_request_WorkflowTemplateService_ArchiveWorkflowTemplate_0(ctx context
}
var (
filter_WorkflowTemplateService_ListWorkflowTemplatesField_0 = &utilities.DoubleArray{Encoding: map[string]int{"namespace": 0, "fieldName": 1}, Base: []int{1, 1, 2, 0, 0}, Check: []int{0, 1, 1, 2, 3}}
)
func request_WorkflowTemplateService_ListWorkflowTemplatesField_0(ctx context.Context, marshaler runtime.Marshaler, client WorkflowTemplateServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq ListWorkflowTemplatesFieldRequest
var metadata runtime.ServerMetadata
var (
val string
ok bool
err error
_ = err
)
val, ok = pathParams["namespace"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "namespace")
}
protoReq.Namespace, err = runtime.String(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "namespace", err)
}
val, ok = pathParams["fieldName"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "fieldName")
}
protoReq.FieldName, err = runtime.String(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "fieldName", err)
}
if err := req.ParseForm(); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_WorkflowTemplateService_ListWorkflowTemplatesField_0); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
msg, err := client.ListWorkflowTemplatesField(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD))
return msg, metadata, err
}
func local_request_WorkflowTemplateService_ListWorkflowTemplatesField_0(ctx context.Context, marshaler runtime.Marshaler, server WorkflowTemplateServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq ListWorkflowTemplatesFieldRequest
var metadata runtime.ServerMetadata
var (
val string
ok bool
err error
_ = err
)
val, ok = pathParams["namespace"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "namespace")
}
protoReq.Namespace, err = runtime.String(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "namespace", err)
}
val, ok = pathParams["fieldName"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "fieldName")
}
protoReq.FieldName, err = runtime.String(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "fieldName", err)
}
if err := req.ParseForm(); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_WorkflowTemplateService_ListWorkflowTemplatesField_0); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
msg, err := server.ListWorkflowTemplatesField(ctx, &protoReq)
return msg, metadata, err
}
// RegisterWorkflowTemplateServiceHandlerServer registers the http handlers for service WorkflowTemplateService to "mux".
// UnaryRPC :call WorkflowTemplateServiceServer directly.
// StreamingRPC :currently unsupported pending https://github.com/grpc/grpc-go/issues/906.
@@ -1129,6 +1219,29 @@ func RegisterWorkflowTemplateServiceHandlerServer(ctx context.Context, mux *runt
})
mux.Handle("GET", pattern_WorkflowTemplateService_ListWorkflowTemplatesField_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
var stream runtime.ServerTransportStream
ctx = grpc.NewContextWithServerTransportStream(ctx, &stream)
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/api.WorkflowTemplateService/ListWorkflowTemplatesField")
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := local_request_WorkflowTemplateService_ListWorkflowTemplatesField_0(rctx, inboundMarshaler, server, req, pathParams)
md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_WorkflowTemplateService_ListWorkflowTemplatesField_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
return nil
}
@@ -1370,6 +1483,26 @@ func RegisterWorkflowTemplateServiceHandlerClient(ctx context.Context, mux *runt
})
mux.Handle("GET", pattern_WorkflowTemplateService_ListWorkflowTemplatesField_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateContext(ctx, mux, req, "/api.WorkflowTemplateService/ListWorkflowTemplatesField")
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := request_WorkflowTemplateService_ListWorkflowTemplatesField_0(rctx, inboundMarshaler, client, req, pathParams)
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_WorkflowTemplateService_ListWorkflowTemplatesField_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
return nil
}
@@ -1393,6 +1526,8 @@ var (
pattern_WorkflowTemplateService_CloneWorkflowTemplate_1 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 4, 1, 5, 2, 2, 3, 1, 0, 4, 1, 5, 4, 2, 5, 1, 0, 4, 1, 5, 6, 1, 0, 4, 1, 5, 7}, []string{"apis", "v1beta1", "namespace", "workflow_templates", "uid", "clone", "name", "version"}, ""))
pattern_WorkflowTemplateService_ArchiveWorkflowTemplate_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 4, 1, 5, 2, 2, 3, 1, 0, 4, 1, 5, 4, 2, 5}, []string{"apis", "v1beta1", "namespace", "workflow_templates", "uid", "archive"}, ""))
pattern_WorkflowTemplateService_ListWorkflowTemplatesField_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 4, 1, 5, 2, 2, 3, 2, 4, 1, 0, 4, 1, 5, 5}, []string{"apis", "v1beta", "namespace", "field", "workflow_templates", "fieldName"}, ""))
)
var (
@@ -1415,4 +1550,6 @@ var (
forward_WorkflowTemplateService_CloneWorkflowTemplate_1 = runtime.ForwardResponseMessage
forward_WorkflowTemplateService_ArchiveWorkflowTemplate_0 = runtime.ForwardResponseMessage
forward_WorkflowTemplateService_ListWorkflowTemplatesField_0 = runtime.ForwardResponseMessage
)

View File

@@ -26,6 +26,7 @@ type WorkflowTemplateServiceClient interface {
ListWorkflowTemplates(ctx context.Context, in *ListWorkflowTemplatesRequest, opts ...grpc.CallOption) (*ListWorkflowTemplatesResponse, error)
CloneWorkflowTemplate(ctx context.Context, in *CloneWorkflowTemplateRequest, opts ...grpc.CallOption) (*WorkflowTemplate, error)
ArchiveWorkflowTemplate(ctx context.Context, in *ArchiveWorkflowTemplateRequest, opts ...grpc.CallOption) (*ArchiveWorkflowTemplateResponse, error)
ListWorkflowTemplatesField(ctx context.Context, in *ListWorkflowTemplatesFieldRequest, opts ...grpc.CallOption) (*ListWorkflowTemplatesFieldResponse, error)
}
type workflowTemplateServiceClient struct {
@@ -108,6 +109,15 @@ func (c *workflowTemplateServiceClient) ArchiveWorkflowTemplate(ctx context.Cont
return out, nil
}
func (c *workflowTemplateServiceClient) ListWorkflowTemplatesField(ctx context.Context, in *ListWorkflowTemplatesFieldRequest, opts ...grpc.CallOption) (*ListWorkflowTemplatesFieldResponse, error) {
out := new(ListWorkflowTemplatesFieldResponse)
err := c.cc.Invoke(ctx, "/api.WorkflowTemplateService/ListWorkflowTemplatesField", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
// WorkflowTemplateServiceServer is the server API for WorkflowTemplateService service.
// All implementations must embed UnimplementedWorkflowTemplateServiceServer
// for forward compatibility
@@ -121,6 +131,7 @@ type WorkflowTemplateServiceServer interface {
ListWorkflowTemplates(context.Context, *ListWorkflowTemplatesRequest) (*ListWorkflowTemplatesResponse, error)
CloneWorkflowTemplate(context.Context, *CloneWorkflowTemplateRequest) (*WorkflowTemplate, error)
ArchiveWorkflowTemplate(context.Context, *ArchiveWorkflowTemplateRequest) (*ArchiveWorkflowTemplateResponse, error)
ListWorkflowTemplatesField(context.Context, *ListWorkflowTemplatesFieldRequest) (*ListWorkflowTemplatesFieldResponse, error)
mustEmbedUnimplementedWorkflowTemplateServiceServer()
}
@@ -152,6 +163,9 @@ func (UnimplementedWorkflowTemplateServiceServer) CloneWorkflowTemplate(context.
func (UnimplementedWorkflowTemplateServiceServer) ArchiveWorkflowTemplate(context.Context, *ArchiveWorkflowTemplateRequest) (*ArchiveWorkflowTemplateResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method ArchiveWorkflowTemplate not implemented")
}
func (UnimplementedWorkflowTemplateServiceServer) ListWorkflowTemplatesField(context.Context, *ListWorkflowTemplatesFieldRequest) (*ListWorkflowTemplatesFieldResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method ListWorkflowTemplatesField not implemented")
}
func (UnimplementedWorkflowTemplateServiceServer) mustEmbedUnimplementedWorkflowTemplateServiceServer() {
}
@@ -310,6 +324,24 @@ func _WorkflowTemplateService_ArchiveWorkflowTemplate_Handler(srv interface{}, c
return interceptor(ctx, in, info, handler)
}
func _WorkflowTemplateService_ListWorkflowTemplatesField_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(ListWorkflowTemplatesFieldRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(WorkflowTemplateServiceServer).ListWorkflowTemplatesField(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/api.WorkflowTemplateService/ListWorkflowTemplatesField",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(WorkflowTemplateServiceServer).ListWorkflowTemplatesField(ctx, req.(*ListWorkflowTemplatesFieldRequest))
}
return interceptor(ctx, in, info, handler)
}
var _WorkflowTemplateService_serviceDesc = grpc.ServiceDesc{
ServiceName: "api.WorkflowTemplateService",
HandlerType: (*WorkflowTemplateServiceServer)(nil),
@@ -346,6 +378,10 @@ var _WorkflowTemplateService_serviceDesc = grpc.ServiceDesc{
MethodName: "ArchiveWorkflowTemplate",
Handler: _WorkflowTemplateService_ArchiveWorkflowTemplate_Handler,
},
{
MethodName: "ListWorkflowTemplatesField",
Handler: _WorkflowTemplateService_ListWorkflowTemplatesField_Handler,
},
},
Streams: []grpc.StreamDesc{},
Metadata: "workflow_template.proto",

File diff suppressed because it is too large Load Diff

View File

@@ -545,6 +545,14 @@ func request_WorkspaceService_ResumeWorkspace_0(ctx context.Context, marshaler r
var protoReq ResumeWorkspaceRequest
var metadata runtime.ServerMetadata
newReader, berr := utilities.IOReaderFactory(req.Body)
if berr != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr)
}
if err := marshaler.NewDecoder(newReader()).Decode(&protoReq.Body); err != nil && err != io.EOF {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
var (
val string
ok bool
@@ -581,6 +589,14 @@ func local_request_WorkspaceService_ResumeWorkspace_0(ctx context.Context, marsh
var protoReq ResumeWorkspaceRequest
var metadata runtime.ServerMetadata
newReader, berr := utilities.IOReaderFactory(req.Body)
if berr != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr)
}
if err := marshaler.NewDecoder(newReader()).Decode(&protoReq.Body); err != nil && err != io.EOF {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
var (
val string
ok bool
@@ -757,6 +773,143 @@ func local_request_WorkspaceService_RetryLastWorkspaceAction_0(ctx context.Conte
}
var (
filter_WorkspaceService_GetWorkspaceContainerLogs_0 = &utilities.DoubleArray{Encoding: map[string]int{"namespace": 0, "uid": 1, "containerName": 2}, Base: []int{1, 1, 2, 3, 0, 0, 0}, Check: []int{0, 1, 1, 1, 2, 3, 4}}
)
func request_WorkspaceService_GetWorkspaceContainerLogs_0(ctx context.Context, marshaler runtime.Marshaler, client WorkspaceServiceClient, req *http.Request, pathParams map[string]string) (WorkspaceService_GetWorkspaceContainerLogsClient, runtime.ServerMetadata, error) {
var protoReq GetWorkspaceContainerLogsRequest
var metadata runtime.ServerMetadata
var (
val string
ok bool
err error
_ = err
)
val, ok = pathParams["namespace"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "namespace")
}
protoReq.Namespace, err = runtime.String(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "namespace", err)
}
val, ok = pathParams["uid"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "uid")
}
protoReq.Uid, err = runtime.String(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "uid", err)
}
val, ok = pathParams["containerName"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "containerName")
}
protoReq.ContainerName, err = runtime.String(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "containerName", err)
}
if err := req.ParseForm(); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_WorkspaceService_GetWorkspaceContainerLogs_0); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
stream, err := client.GetWorkspaceContainerLogs(ctx, &protoReq)
if err != nil {
return nil, metadata, err
}
header, err := stream.Header()
if err != nil {
return nil, metadata, err
}
metadata.HeaderMD = header
return stream, metadata, nil
}
func request_WorkspaceService_ListWorkspacesField_0(ctx context.Context, marshaler runtime.Marshaler, client WorkspaceServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq ListWorkspacesFieldRequest
var metadata runtime.ServerMetadata
var (
val string
ok bool
err error
_ = err
)
val, ok = pathParams["namespace"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "namespace")
}
protoReq.Namespace, err = runtime.String(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "namespace", err)
}
val, ok = pathParams["fieldName"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "fieldName")
}
protoReq.FieldName, err = runtime.String(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "fieldName", err)
}
msg, err := client.ListWorkspacesField(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD))
return msg, metadata, err
}
func local_request_WorkspaceService_ListWorkspacesField_0(ctx context.Context, marshaler runtime.Marshaler, server WorkspaceServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq ListWorkspacesFieldRequest
var metadata runtime.ServerMetadata
var (
val string
ok bool
err error
_ = err
)
val, ok = pathParams["namespace"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "namespace")
}
protoReq.Namespace, err = runtime.String(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "namespace", err)
}
val, ok = pathParams["fieldName"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "fieldName")
}
protoReq.FieldName, err = runtime.String(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "fieldName", err)
}
msg, err := server.ListWorkspacesField(ctx, &protoReq)
return msg, metadata, err
}
// RegisterWorkspaceServiceHandlerServer registers the http handlers for service WorkspaceService to "mux".
// UnaryRPC :call WorkspaceServiceServer directly.
// StreamingRPC :currently unsupported pending https://github.com/grpc/grpc-go/issues/906.
@@ -993,6 +1146,36 @@ func RegisterWorkspaceServiceHandlerServer(ctx context.Context, mux *runtime.Ser
})
mux.Handle("GET", pattern_WorkspaceService_GetWorkspaceContainerLogs_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
err := status.Error(codes.Unimplemented, "streaming calls are not yet supported in the in-process transport")
_, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
})
mux.Handle("GET", pattern_WorkspaceService_ListWorkspacesField_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
var stream runtime.ServerTransportStream
ctx = grpc.NewContextWithServerTransportStream(ctx, &stream)
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/api.WorkspaceService/ListWorkspacesField")
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := local_request_WorkspaceService_ListWorkspacesField_0(rctx, inboundMarshaler, server, req, pathParams)
md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_WorkspaceService_ListWorkspacesField_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
return nil
}
@@ -1234,6 +1417,46 @@ func RegisterWorkspaceServiceHandlerClient(ctx context.Context, mux *runtime.Ser
})
mux.Handle("GET", pattern_WorkspaceService_GetWorkspaceContainerLogs_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateContext(ctx, mux, req, "/api.WorkspaceService/GetWorkspaceContainerLogs")
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := request_WorkspaceService_GetWorkspaceContainerLogs_0(rctx, inboundMarshaler, client, req, pathParams)
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_WorkspaceService_GetWorkspaceContainerLogs_0(ctx, mux, outboundMarshaler, w, req, func() (proto.Message, error) { return resp.Recv() }, mux.GetForwardResponseOptions()...)
})
mux.Handle("GET", pattern_WorkspaceService_ListWorkspacesField_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateContext(ctx, mux, req, "/api.WorkspaceService/ListWorkspacesField")
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := request_WorkspaceService_ListWorkspacesField_0(rctx, inboundMarshaler, client, req, pathParams)
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_WorkspaceService_ListWorkspacesField_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
return nil
}
@@ -1257,6 +1480,10 @@ var (
pattern_WorkspaceService_DeleteWorkspace_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 4, 1, 5, 2, 2, 3, 1, 0, 4, 1, 5, 4}, []string{"apis", "v1beta1", "namespace", "workspaces", "uid"}, ""))
pattern_WorkspaceService_RetryLastWorkspaceAction_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 4, 1, 5, 2, 2, 3, 1, 0, 4, 1, 5, 4, 2, 5}, []string{"apis", "v1beta1", "namespace", "workspaces", "uid", "retry"}, ""))
pattern_WorkspaceService_GetWorkspaceContainerLogs_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 4, 1, 5, 2, 2, 3, 1, 0, 4, 1, 5, 4, 2, 5, 1, 0, 4, 1, 5, 6, 2, 7}, []string{"apis", "v1beta1", "namespace", "workspaces", "uid", "containers", "containerName", "logs"}, ""))
pattern_WorkspaceService_ListWorkspacesField_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 4, 1, 5, 2, 2, 3, 2, 4, 1, 0, 4, 1, 5, 5}, []string{"apis", "v1beta", "namespace", "field", "workspaces", "fieldName"}, ""))
)
var (
@@ -1279,4 +1506,8 @@ var (
forward_WorkspaceService_DeleteWorkspace_0 = runtime.ForwardResponseMessage
forward_WorkspaceService_RetryLastWorkspaceAction_0 = runtime.ForwardResponseMessage
forward_WorkspaceService_GetWorkspaceContainerLogs_0 = runtime.ForwardResponseStream
forward_WorkspaceService_ListWorkspacesField_0 = runtime.ForwardResponseMessage
)

View File

@@ -28,6 +28,8 @@ type WorkspaceServiceClient interface {
ResumeWorkspace(ctx context.Context, in *ResumeWorkspaceRequest, opts ...grpc.CallOption) (*emptypb.Empty, error)
DeleteWorkspace(ctx context.Context, in *DeleteWorkspaceRequest, opts ...grpc.CallOption) (*emptypb.Empty, error)
RetryLastWorkspaceAction(ctx context.Context, in *RetryActionWorkspaceRequest, opts ...grpc.CallOption) (*emptypb.Empty, error)
GetWorkspaceContainerLogs(ctx context.Context, in *GetWorkspaceContainerLogsRequest, opts ...grpc.CallOption) (WorkspaceService_GetWorkspaceContainerLogsClient, error)
ListWorkspacesField(ctx context.Context, in *ListWorkspacesFieldRequest, opts ...grpc.CallOption) (*ListWorkspacesFieldResponse, error)
}
type workspaceServiceClient struct {
@@ -128,6 +130,47 @@ func (c *workspaceServiceClient) RetryLastWorkspaceAction(ctx context.Context, i
return out, nil
}
func (c *workspaceServiceClient) GetWorkspaceContainerLogs(ctx context.Context, in *GetWorkspaceContainerLogsRequest, opts ...grpc.CallOption) (WorkspaceService_GetWorkspaceContainerLogsClient, error) {
stream, err := c.cc.NewStream(ctx, &_WorkspaceService_serviceDesc.Streams[0], "/api.WorkspaceService/GetWorkspaceContainerLogs", opts...)
if err != nil {
return nil, err
}
x := &workspaceServiceGetWorkspaceContainerLogsClient{stream}
if err := x.ClientStream.SendMsg(in); err != nil {
return nil, err
}
if err := x.ClientStream.CloseSend(); err != nil {
return nil, err
}
return x, nil
}
type WorkspaceService_GetWorkspaceContainerLogsClient interface {
Recv() (*LogStreamResponse, error)
grpc.ClientStream
}
type workspaceServiceGetWorkspaceContainerLogsClient struct {
grpc.ClientStream
}
func (x *workspaceServiceGetWorkspaceContainerLogsClient) Recv() (*LogStreamResponse, error) {
m := new(LogStreamResponse)
if err := x.ClientStream.RecvMsg(m); err != nil {
return nil, err
}
return m, nil
}
func (c *workspaceServiceClient) ListWorkspacesField(ctx context.Context, in *ListWorkspacesFieldRequest, opts ...grpc.CallOption) (*ListWorkspacesFieldResponse, error) {
out := new(ListWorkspacesFieldResponse)
err := c.cc.Invoke(ctx, "/api.WorkspaceService/ListWorkspacesField", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
// WorkspaceServiceServer is the server API for WorkspaceService service.
// All implementations must embed UnimplementedWorkspaceServiceServer
// for forward compatibility
@@ -142,6 +185,8 @@ type WorkspaceServiceServer interface {
ResumeWorkspace(context.Context, *ResumeWorkspaceRequest) (*emptypb.Empty, error)
DeleteWorkspace(context.Context, *DeleteWorkspaceRequest) (*emptypb.Empty, error)
RetryLastWorkspaceAction(context.Context, *RetryActionWorkspaceRequest) (*emptypb.Empty, error)
GetWorkspaceContainerLogs(*GetWorkspaceContainerLogsRequest, WorkspaceService_GetWorkspaceContainerLogsServer) error
ListWorkspacesField(context.Context, *ListWorkspacesFieldRequest) (*ListWorkspacesFieldResponse, error)
mustEmbedUnimplementedWorkspaceServiceServer()
}
@@ -179,6 +224,12 @@ func (UnimplementedWorkspaceServiceServer) DeleteWorkspace(context.Context, *Del
func (UnimplementedWorkspaceServiceServer) RetryLastWorkspaceAction(context.Context, *RetryActionWorkspaceRequest) (*emptypb.Empty, error) {
return nil, status.Errorf(codes.Unimplemented, "method RetryLastWorkspaceAction not implemented")
}
func (UnimplementedWorkspaceServiceServer) GetWorkspaceContainerLogs(*GetWorkspaceContainerLogsRequest, WorkspaceService_GetWorkspaceContainerLogsServer) error {
return status.Errorf(codes.Unimplemented, "method GetWorkspaceContainerLogs not implemented")
}
func (UnimplementedWorkspaceServiceServer) ListWorkspacesField(context.Context, *ListWorkspacesFieldRequest) (*ListWorkspacesFieldResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method ListWorkspacesField not implemented")
}
func (UnimplementedWorkspaceServiceServer) mustEmbedUnimplementedWorkspaceServiceServer() {}
// UnsafeWorkspaceServiceServer may be embedded to opt out of forward compatibility for this service.
@@ -372,6 +423,45 @@ func _WorkspaceService_RetryLastWorkspaceAction_Handler(srv interface{}, ctx con
return interceptor(ctx, in, info, handler)
}
func _WorkspaceService_GetWorkspaceContainerLogs_Handler(srv interface{}, stream grpc.ServerStream) error {
m := new(GetWorkspaceContainerLogsRequest)
if err := stream.RecvMsg(m); err != nil {
return err
}
return srv.(WorkspaceServiceServer).GetWorkspaceContainerLogs(m, &workspaceServiceGetWorkspaceContainerLogsServer{stream})
}
type WorkspaceService_GetWorkspaceContainerLogsServer interface {
Send(*LogStreamResponse) error
grpc.ServerStream
}
type workspaceServiceGetWorkspaceContainerLogsServer struct {
grpc.ServerStream
}
func (x *workspaceServiceGetWorkspaceContainerLogsServer) Send(m *LogStreamResponse) error {
return x.ServerStream.SendMsg(m)
}
func _WorkspaceService_ListWorkspacesField_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(ListWorkspacesFieldRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(WorkspaceServiceServer).ListWorkspacesField(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/api.WorkspaceService/ListWorkspacesField",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(WorkspaceServiceServer).ListWorkspacesField(ctx, req.(*ListWorkspacesFieldRequest))
}
return interceptor(ctx, in, info, handler)
}
var _WorkspaceService_serviceDesc = grpc.ServiceDesc{
ServiceName: "api.WorkspaceService",
HandlerType: (*WorkspaceServiceServer)(nil),
@@ -416,7 +506,17 @@ var _WorkspaceService_serviceDesc = grpc.ServiceDesc{
MethodName: "RetryLastWorkspaceAction",
Handler: _WorkspaceService_RetryLastWorkspaceAction_Handler,
},
{
MethodName: "ListWorkspacesField",
Handler: _WorkspaceService_ListWorkspacesField_Handler,
},
},
Streams: []grpc.StreamDesc{
{
StreamName: "GetWorkspaceContainerLogs",
Handler: _WorkspaceService_GetWorkspaceContainerLogs_Handler,
ServerStreams: true,
},
},
Streams: []grpc.StreamDesc{},
Metadata: "workspace.proto",
}

View File

@@ -720,6 +720,108 @@ func (x *ListWorkspaceTemplateVersionsResponse) GetWorkspaceTemplates() []*Works
return nil
}
type ListWorkspaceTemplatesFieldRequest struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Namespace string `protobuf:"bytes,1,opt,name=namespace,proto3" json:"namespace,omitempty"`
FieldName string `protobuf:"bytes,2,opt,name=fieldName,proto3" json:"fieldName,omitempty"`
}
func (x *ListWorkspaceTemplatesFieldRequest) Reset() {
*x = ListWorkspaceTemplatesFieldRequest{}
if protoimpl.UnsafeEnabled {
mi := &file_workspace_template_proto_msgTypes[10]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *ListWorkspaceTemplatesFieldRequest) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*ListWorkspaceTemplatesFieldRequest) ProtoMessage() {}
func (x *ListWorkspaceTemplatesFieldRequest) ProtoReflect() protoreflect.Message {
mi := &file_workspace_template_proto_msgTypes[10]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use ListWorkspaceTemplatesFieldRequest.ProtoReflect.Descriptor instead.
func (*ListWorkspaceTemplatesFieldRequest) Descriptor() ([]byte, []int) {
return file_workspace_template_proto_rawDescGZIP(), []int{10}
}
func (x *ListWorkspaceTemplatesFieldRequest) GetNamespace() string {
if x != nil {
return x.Namespace
}
return ""
}
func (x *ListWorkspaceTemplatesFieldRequest) GetFieldName() string {
if x != nil {
return x.FieldName
}
return ""
}
type ListWorkspaceTemplatesFieldResponse struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Values []string `protobuf:"bytes,1,rep,name=values,proto3" json:"values,omitempty"`
}
func (x *ListWorkspaceTemplatesFieldResponse) Reset() {
*x = ListWorkspaceTemplatesFieldResponse{}
if protoimpl.UnsafeEnabled {
mi := &file_workspace_template_proto_msgTypes[11]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *ListWorkspaceTemplatesFieldResponse) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*ListWorkspaceTemplatesFieldResponse) ProtoMessage() {}
func (x *ListWorkspaceTemplatesFieldResponse) ProtoReflect() protoreflect.Message {
mi := &file_workspace_template_proto_msgTypes[11]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use ListWorkspaceTemplatesFieldResponse.ProtoReflect.Descriptor instead.
func (*ListWorkspaceTemplatesFieldResponse) Descriptor() ([]byte, []int) {
return file_workspace_template_proto_rawDescGZIP(), []int{11}
}
func (x *ListWorkspaceTemplatesFieldResponse) GetValues() []string {
if x != nil {
return x.Values
}
return nil
}
var File_workspace_template_proto protoreflect.FileDescriptor
var file_workspace_template_proto_rawDesc = []byte{
@@ -828,87 +930,109 @@ var file_workspace_template_proto_rawDesc = []byte{
0x63, 0x65, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28,
0x0b, 0x32, 0x16, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63,
0x65, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x12, 0x77, 0x6f, 0x72, 0x6b, 0x73,
0x70, 0x61, 0x63, 0x65, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x32, 0xce, 0x09,
0x0a, 0x18, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, 0x65, 0x6d, 0x70, 0x6c,
0x61, 0x74, 0x65, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0xdb, 0x01, 0x0a, 0x29, 0x47,
0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65,
0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77,
0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x12, 0x35, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x47,
0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65,
0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77,
0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a,
0x15, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54, 0x65,
0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x60, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x5a, 0x22, 0x45,
0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b, 0x6e,
0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x7d, 0x2f, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70,
0x61, 0x63, 0x65, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2f, 0x7b, 0x75,
0x69, 0x64, 0x7d, 0x2f, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x74, 0x65, 0x6d,
0x70, 0x6c, 0x61, 0x74, 0x65, 0x3a, 0x11, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65,
0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x12, 0xa0, 0x01, 0x0a, 0x17, 0x43, 0x72, 0x65,
0x61, 0x74, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, 0x65, 0x6d, 0x70,
0x6c, 0x61, 0x74, 0x65, 0x12, 0x23, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74,
0x65, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61,
0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x61, 0x70, 0x69, 0x2e,
0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74,
0x65, 0x22, 0x48, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x42, 0x22, 0x2d, 0x2f, 0x61, 0x70, 0x69, 0x73,
0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70,
0x61, 0x63, 0x65, 0x7d, 0x2f, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x74,
0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x3a, 0x11, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70,
0x61, 0x63, 0x65, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x12, 0xa6, 0x01, 0x0a, 0x17,
0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54,
0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x12, 0x23, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x55, 0x70,
0x64, 0x61, 0x74, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, 0x65, 0x6d,
0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x61,
0x70, 0x69, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, 0x65, 0x6d, 0x70,
0x6c, 0x61, 0x74, 0x65, 0x22, 0x4e, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x48, 0x1a, 0x33, 0x2f, 0x61,
0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b, 0x6e, 0x61, 0x6d,
0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x7d, 0x2f, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63,
0x65, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2f, 0x7b, 0x75, 0x69, 0x64,
0x7d, 0x3a, 0x11, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, 0x65, 0x6d, 0x70,
0x6c, 0x61, 0x74, 0x65, 0x12, 0x9d, 0x01, 0x0a, 0x18, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65,
0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74,
0x65, 0x12, 0x24, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x57,
0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65,
0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x57, 0x6f,
0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22,
0x43, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x3d, 0x1a, 0x3b, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76,
0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63,
0x65, 0x7d, 0x2f, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x74, 0x65, 0x6d,
0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2f, 0x7b, 0x75, 0x69, 0x64, 0x7d, 0x2f, 0x61, 0x72, 0x63,
0x68, 0x69, 0x76, 0x65, 0x12, 0x8d, 0x01, 0x0a, 0x14, 0x47, 0x65, 0x74, 0x57, 0x6f, 0x72, 0x6b,
0x73, 0x70, 0x61, 0x63, 0x65, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x12, 0x20, 0x2e,
0x61, 0x70, 0x69, 0x2e, 0x47, 0x65, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65,
0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a,
0x16, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54,
0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x3b, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x35, 0x12,
0x33, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b,
0x70, 0x61, 0x63, 0x65, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x22, 0x60, 0x0a,
0x22, 0x4c, 0x69, 0x73, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, 0x65,
0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x52, 0x65, 0x71, 0x75,
0x65, 0x73, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65,
0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63,
0x65, 0x12, 0x1c, 0x0a, 0x09, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x4e, 0x61, 0x6d, 0x65, 0x18, 0x02,
0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x4e, 0x61, 0x6d, 0x65, 0x22,
0x3d, 0x0a, 0x23, 0x4c, 0x69, 0x73, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65,
0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x52, 0x65,
0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73,
0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x06, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x32, 0x89,
0x0b, 0x0a, 0x18, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, 0x65, 0x6d, 0x70,
0x6c, 0x61, 0x74, 0x65, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0xdb, 0x01, 0x0a, 0x29,
0x47, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63,
0x65, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f,
0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x12, 0x35, 0x2e, 0x61, 0x70, 0x69, 0x2e,
0x47, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63,
0x65, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f,
0x77, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74,
0x1a, 0x15, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x54,
0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x60, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x5a, 0x22,
0x45, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b,
0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x7d, 0x2f, 0x77, 0x6f, 0x72, 0x6b, 0x73,
0x70, 0x61, 0x63, 0x65, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2f, 0x7b,
0x75, 0x69, 0x64, 0x7d, 0x12, 0x98, 0x01, 0x0a, 0x16, 0x4c, 0x69, 0x73, 0x74, 0x57, 0x6f, 0x72,
0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x12,
0x22, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70,
0x61, 0x63, 0x65, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75,
0x65, 0x73, 0x74, 0x1a, 0x23, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x57, 0x6f,
0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73,
0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x35, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x2f,
0x12, 0x2d, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f,
0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x7d, 0x2f, 0x77, 0x6f, 0x72, 0x6b,
0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x12,
0xbc, 0x01, 0x0a, 0x1d, 0x4c, 0x69, 0x73, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63,
0x65, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e,
0x73, 0x12, 0x29, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x57, 0x6f, 0x72, 0x6b,
0x73, 0x70, 0x61, 0x63, 0x65, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x56, 0x65, 0x72,
0x73, 0x69, 0x6f, 0x6e, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2a, 0x2e, 0x61,
0x70, 0x69, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65,
0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x73,
0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x44, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x3e,
0x12, 0x3c, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f,
0x75, 0x69, 0x64, 0x7d, 0x2f, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x5f, 0x74, 0x65,
0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x3a, 0x11, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63,
0x65, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x12, 0xa0, 0x01, 0x0a, 0x17, 0x43, 0x72,
0x65, 0x61, 0x74, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, 0x65, 0x6d,
0x70, 0x6c, 0x61, 0x74, 0x65, 0x12, 0x23, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x43, 0x72, 0x65, 0x61,
0x74, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, 0x65, 0x6d, 0x70, 0x6c,
0x61, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x61, 0x70, 0x69,
0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61,
0x74, 0x65, 0x22, 0x48, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x42, 0x22, 0x2d, 0x2f, 0x61, 0x70, 0x69,
0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73,
0x70, 0x61, 0x63, 0x65, 0x7d, 0x2f, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f,
0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x3a, 0x11, 0x77, 0x6f, 0x72, 0x6b, 0x73,
0x70, 0x61, 0x63, 0x65, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x12, 0xa6, 0x01, 0x0a,
0x17, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65,
0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x12, 0x23, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x55,
0x70, 0x64, 0x61, 0x74, 0x65, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, 0x65,
0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e,
0x61, 0x70, 0x69, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, 0x65, 0x6d,
0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x4e, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x48, 0x1a, 0x33, 0x2f,
0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b, 0x6e, 0x61,
0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x7d, 0x2f, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61,
0x63, 0x65, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2f, 0x7b, 0x75, 0x69,
0x64, 0x7d, 0x3a, 0x11, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, 0x65, 0x6d,
0x70, 0x6c, 0x61, 0x74, 0x65, 0x12, 0x9d, 0x01, 0x0a, 0x18, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76,
0x65, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61,
0x74, 0x65, 0x12, 0x24, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65,
0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74,
0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x57,
0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65,
0x22, 0x43, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x3d, 0x1a, 0x3b, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f,
0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61,
0x63, 0x65, 0x7d, 0x2f, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x74, 0x65,
0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2f, 0x7b, 0x75, 0x69, 0x64, 0x7d, 0x2f, 0x61, 0x72,
0x63, 0x68, 0x69, 0x76, 0x65, 0x12, 0x8d, 0x01, 0x0a, 0x14, 0x47, 0x65, 0x74, 0x57, 0x6f, 0x72,
0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x12, 0x20,
0x2e, 0x61, 0x70, 0x69, 0x2e, 0x47, 0x65, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63,
0x65, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74,
0x1a, 0x16, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65,
0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x22, 0x3b, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x35,
0x12, 0x33, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31, 0x2f,
0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x7d, 0x2f, 0x77, 0x6f, 0x72, 0x6b,
0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2f,
0x7b, 0x75, 0x69, 0x64, 0x7d, 0x2f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x42, 0x24,
0x5a, 0x22, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x6f, 0x6e, 0x65,
0x70, 0x61, 0x6e, 0x65, 0x6c, 0x69, 0x6f, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x2f, 0x61, 0x70, 0x69,
0x2f, 0x67, 0x65, 0x6e, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
0x7b, 0x75, 0x69, 0x64, 0x7d, 0x12, 0x98, 0x01, 0x0a, 0x16, 0x4c, 0x69, 0x73, 0x74, 0x57, 0x6f,
0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73,
0x12, 0x22, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x73,
0x70, 0x61, 0x63, 0x65, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x52, 0x65, 0x71,
0x75, 0x65, 0x73, 0x74, 0x1a, 0x23, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x57,
0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65,
0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x35, 0x82, 0xd3, 0xe4, 0x93, 0x02,
0x2f, 0x12, 0x2d, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31,
0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x7d, 0x2f, 0x77, 0x6f, 0x72,
0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73,
0x12, 0xbc, 0x01, 0x0a, 0x1d, 0x4c, 0x69, 0x73, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61,
0x63, 0x65, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f,
0x6e, 0x73, 0x12, 0x29, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x57, 0x6f, 0x72,
0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x56, 0x65,
0x72, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2a, 0x2e,
0x61, 0x70, 0x69, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63,
0x65, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e,
0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x44, 0x82, 0xd3, 0xe4, 0x93, 0x02,
0x3e, 0x12, 0x3c, 0x2f, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x31,
0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x7d, 0x2f, 0x77, 0x6f, 0x72,
0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73,
0x2f, 0x7b, 0x75, 0x69, 0x64, 0x7d, 0x2f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x12,
0xb8, 0x01, 0x0a, 0x1b, 0x4c, 0x69, 0x73, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63,
0x65, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x12,
0x27, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70,
0x61, 0x63, 0x65, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x46, 0x69, 0x65, 0x6c,
0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x28, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x4c,
0x69, 0x73, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, 0x65, 0x6d, 0x70,
0x6c, 0x61, 0x74, 0x65, 0x73, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e,
0x73, 0x65, 0x22, 0x46, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x40, 0x12, 0x3e, 0x2f, 0x61, 0x70, 0x69,
0x73, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x2f, 0x7b, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70,
0x61, 0x63, 0x65, 0x7d, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x2f, 0x77, 0x6f, 0x72, 0x6b, 0x73,
0x70, 0x61, 0x63, 0x65, 0x5f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x73, 0x2f, 0x7b,
0x66, 0x69, 0x65, 0x6c, 0x64, 0x4e, 0x61, 0x6d, 0x65, 0x7d, 0x42, 0x24, 0x5a, 0x22, 0x67, 0x69,
0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x6f, 0x6e, 0x65, 0x70, 0x61, 0x6e, 0x65,
0x6c, 0x69, 0x6f, 0x2f, 0x63, 0x6f, 0x72, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x67, 0x65, 0x6e,
0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
}
var (
@@ -923,7 +1047,7 @@ func file_workspace_template_proto_rawDescGZIP() []byte {
return file_workspace_template_proto_rawDescData
}
var file_workspace_template_proto_msgTypes = make([]protoimpl.MessageInfo, 10)
var file_workspace_template_proto_msgTypes = make([]protoimpl.MessageInfo, 12)
var file_workspace_template_proto_goTypes = []interface{}{
(*WorkspaceTemplate)(nil), // 0: api.WorkspaceTemplate
(*GenerateWorkspaceTemplateWorkflowTemplateRequest)(nil), // 1: api.GenerateWorkspaceTemplateWorkflowTemplateRequest
@@ -935,12 +1059,14 @@ var file_workspace_template_proto_goTypes = []interface{}{
(*ListWorkspaceTemplatesResponse)(nil), // 7: api.ListWorkspaceTemplatesResponse
(*ListWorkspaceTemplateVersionsRequest)(nil), // 8: api.ListWorkspaceTemplateVersionsRequest
(*ListWorkspaceTemplateVersionsResponse)(nil), // 9: api.ListWorkspaceTemplateVersionsResponse
(*WorkflowTemplate)(nil), // 10: api.WorkflowTemplate
(*KeyValue)(nil), // 11: api.KeyValue
(*ListWorkspaceTemplatesFieldRequest)(nil), // 10: api.ListWorkspaceTemplatesFieldRequest
(*ListWorkspaceTemplatesFieldResponse)(nil), // 11: api.ListWorkspaceTemplatesFieldResponse
(*WorkflowTemplate)(nil), // 12: api.WorkflowTemplate
(*KeyValue)(nil), // 13: api.KeyValue
}
var file_workspace_template_proto_depIdxs = []int32{
10, // 0: api.WorkspaceTemplate.workflowTemplate:type_name -> api.WorkflowTemplate
11, // 1: api.WorkspaceTemplate.labels:type_name -> api.KeyValue
12, // 0: api.WorkspaceTemplate.workflowTemplate:type_name -> api.WorkflowTemplate
13, // 1: api.WorkspaceTemplate.labels:type_name -> api.KeyValue
0, // 2: api.GenerateWorkspaceTemplateWorkflowTemplateRequest.workspaceTemplate:type_name -> api.WorkspaceTemplate
0, // 3: api.CreateWorkspaceTemplateRequest.workspaceTemplate:type_name -> api.WorkspaceTemplate
0, // 4: api.UpdateWorkspaceTemplateRequest.workspaceTemplate:type_name -> api.WorkspaceTemplate
@@ -953,15 +1079,17 @@ var file_workspace_template_proto_depIdxs = []int32{
4, // 11: api.WorkspaceTemplateService.GetWorkspaceTemplate:input_type -> api.GetWorkspaceTemplateRequest
6, // 12: api.WorkspaceTemplateService.ListWorkspaceTemplates:input_type -> api.ListWorkspaceTemplatesRequest
8, // 13: api.WorkspaceTemplateService.ListWorkspaceTemplateVersions:input_type -> api.ListWorkspaceTemplateVersionsRequest
10, // 14: api.WorkspaceTemplateService.GenerateWorkspaceTemplateWorkflowTemplate:output_type -> api.WorkflowTemplate
0, // 15: api.WorkspaceTemplateService.CreateWorkspaceTemplate:output_type -> api.WorkspaceTemplate
0, // 16: api.WorkspaceTemplateService.UpdateWorkspaceTemplate:output_type -> api.WorkspaceTemplate
0, // 17: api.WorkspaceTemplateService.ArchiveWorkspaceTemplate:output_type -> api.WorkspaceTemplate
0, // 18: api.WorkspaceTemplateService.GetWorkspaceTemplate:output_type -> api.WorkspaceTemplate
7, // 19: api.WorkspaceTemplateService.ListWorkspaceTemplates:output_type -> api.ListWorkspaceTemplatesResponse
9, // 20: api.WorkspaceTemplateService.ListWorkspaceTemplateVersions:output_type -> api.ListWorkspaceTemplateVersionsResponse
14, // [14:21] is the sub-list for method output_type
7, // [7:14] is the sub-list for method input_type
10, // 14: api.WorkspaceTemplateService.ListWorkspaceTemplatesField:input_type -> api.ListWorkspaceTemplatesFieldRequest
12, // 15: api.WorkspaceTemplateService.GenerateWorkspaceTemplateWorkflowTemplate:output_type -> api.WorkflowTemplate
0, // 16: api.WorkspaceTemplateService.CreateWorkspaceTemplate:output_type -> api.WorkspaceTemplate
0, // 17: api.WorkspaceTemplateService.UpdateWorkspaceTemplate:output_type -> api.WorkspaceTemplate
0, // 18: api.WorkspaceTemplateService.ArchiveWorkspaceTemplate:output_type -> api.WorkspaceTemplate
0, // 19: api.WorkspaceTemplateService.GetWorkspaceTemplate:output_type -> api.WorkspaceTemplate
7, // 20: api.WorkspaceTemplateService.ListWorkspaceTemplates:output_type -> api.ListWorkspaceTemplatesResponse
9, // 21: api.WorkspaceTemplateService.ListWorkspaceTemplateVersions:output_type -> api.ListWorkspaceTemplateVersionsResponse
11, // 22: api.WorkspaceTemplateService.ListWorkspaceTemplatesField:output_type -> api.ListWorkspaceTemplatesFieldResponse
15, // [15:23] is the sub-list for method output_type
7, // [7:15] is the sub-list for method input_type
7, // [7:7] is the sub-list for extension type_name
7, // [7:7] is the sub-list for extension extendee
0, // [0:7] is the sub-list for field type_name
@@ -1095,6 +1223,30 @@ func file_workspace_template_proto_init() {
return nil
}
}
file_workspace_template_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*ListWorkspaceTemplatesFieldRequest); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_workspace_template_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*ListWorkspaceTemplatesFieldResponse); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
}
type x struct{}
out := protoimpl.TypeBuilder{
@@ -1102,7 +1254,7 @@ func file_workspace_template_proto_init() {
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_workspace_template_proto_rawDesc,
NumEnums: 0,
NumMessages: 10,
NumMessages: 12,
NumExtensions: 0,
NumServices: 1,
},

View File

@@ -579,6 +579,78 @@ func local_request_WorkspaceTemplateService_ListWorkspaceTemplateVersions_0(ctx
}
func request_WorkspaceTemplateService_ListWorkspaceTemplatesField_0(ctx context.Context, marshaler runtime.Marshaler, client WorkspaceTemplateServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq ListWorkspaceTemplatesFieldRequest
var metadata runtime.ServerMetadata
var (
val string
ok bool
err error
_ = err
)
val, ok = pathParams["namespace"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "namespace")
}
protoReq.Namespace, err = runtime.String(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "namespace", err)
}
val, ok = pathParams["fieldName"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "fieldName")
}
protoReq.FieldName, err = runtime.String(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "fieldName", err)
}
msg, err := client.ListWorkspaceTemplatesField(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD))
return msg, metadata, err
}
func local_request_WorkspaceTemplateService_ListWorkspaceTemplatesField_0(ctx context.Context, marshaler runtime.Marshaler, server WorkspaceTemplateServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq ListWorkspaceTemplatesFieldRequest
var metadata runtime.ServerMetadata
var (
val string
ok bool
err error
_ = err
)
val, ok = pathParams["namespace"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "namespace")
}
protoReq.Namespace, err = runtime.String(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "namespace", err)
}
val, ok = pathParams["fieldName"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "fieldName")
}
protoReq.FieldName, err = runtime.String(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "fieldName", err)
}
msg, err := server.ListWorkspaceTemplatesField(ctx, &protoReq)
return msg, metadata, err
}
// RegisterWorkspaceTemplateServiceHandlerServer registers the http handlers for service WorkspaceTemplateService to "mux".
// UnaryRPC :call WorkspaceTemplateServiceServer directly.
// StreamingRPC :currently unsupported pending https://github.com/grpc/grpc-go/issues/906.
@@ -746,6 +818,29 @@ func RegisterWorkspaceTemplateServiceHandlerServer(ctx context.Context, mux *run
})
mux.Handle("GET", pattern_WorkspaceTemplateService_ListWorkspaceTemplatesField_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
var stream runtime.ServerTransportStream
ctx = grpc.NewContextWithServerTransportStream(ctx, &stream)
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/api.WorkspaceTemplateService/ListWorkspaceTemplatesField")
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := local_request_WorkspaceTemplateService_ListWorkspaceTemplatesField_0(rctx, inboundMarshaler, server, req, pathParams)
md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_WorkspaceTemplateService_ListWorkspaceTemplatesField_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
return nil
}
@@ -927,6 +1022,26 @@ func RegisterWorkspaceTemplateServiceHandlerClient(ctx context.Context, mux *run
})
mux.Handle("GET", pattern_WorkspaceTemplateService_ListWorkspaceTemplatesField_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateContext(ctx, mux, req, "/api.WorkspaceTemplateService/ListWorkspaceTemplatesField")
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := request_WorkspaceTemplateService_ListWorkspaceTemplatesField_0(rctx, inboundMarshaler, client, req, pathParams)
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_WorkspaceTemplateService_ListWorkspaceTemplatesField_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
return nil
}
@@ -944,6 +1059,8 @@ var (
pattern_WorkspaceTemplateService_ListWorkspaceTemplates_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 4, 1, 5, 2, 2, 3}, []string{"apis", "v1beta1", "namespace", "workspace_templates"}, ""))
pattern_WorkspaceTemplateService_ListWorkspaceTemplateVersions_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 4, 1, 5, 2, 2, 3, 1, 0, 4, 1, 5, 4, 2, 5}, []string{"apis", "v1beta1", "namespace", "workspace_templates", "uid", "versions"}, ""))
pattern_WorkspaceTemplateService_ListWorkspaceTemplatesField_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 4, 1, 5, 2, 2, 3, 2, 4, 1, 0, 4, 1, 5, 5}, []string{"apis", "v1beta", "namespace", "field", "workspace_templates", "fieldName"}, ""))
)
var (
@@ -960,4 +1077,6 @@ var (
forward_WorkspaceTemplateService_ListWorkspaceTemplates_0 = runtime.ForwardResponseMessage
forward_WorkspaceTemplateService_ListWorkspaceTemplateVersions_0 = runtime.ForwardResponseMessage
forward_WorkspaceTemplateService_ListWorkspaceTemplatesField_0 = runtime.ForwardResponseMessage
)

View File

@@ -29,6 +29,7 @@ type WorkspaceTemplateServiceClient interface {
GetWorkspaceTemplate(ctx context.Context, in *GetWorkspaceTemplateRequest, opts ...grpc.CallOption) (*WorkspaceTemplate, error)
ListWorkspaceTemplates(ctx context.Context, in *ListWorkspaceTemplatesRequest, opts ...grpc.CallOption) (*ListWorkspaceTemplatesResponse, error)
ListWorkspaceTemplateVersions(ctx context.Context, in *ListWorkspaceTemplateVersionsRequest, opts ...grpc.CallOption) (*ListWorkspaceTemplateVersionsResponse, error)
ListWorkspaceTemplatesField(ctx context.Context, in *ListWorkspaceTemplatesFieldRequest, opts ...grpc.CallOption) (*ListWorkspaceTemplatesFieldResponse, error)
}
type workspaceTemplateServiceClient struct {
@@ -102,6 +103,15 @@ func (c *workspaceTemplateServiceClient) ListWorkspaceTemplateVersions(ctx conte
return out, nil
}
func (c *workspaceTemplateServiceClient) ListWorkspaceTemplatesField(ctx context.Context, in *ListWorkspaceTemplatesFieldRequest, opts ...grpc.CallOption) (*ListWorkspaceTemplatesFieldResponse, error) {
out := new(ListWorkspaceTemplatesFieldResponse)
err := c.cc.Invoke(ctx, "/api.WorkspaceTemplateService/ListWorkspaceTemplatesField", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
// WorkspaceTemplateServiceServer is the server API for WorkspaceTemplateService service.
// All implementations must embed UnimplementedWorkspaceTemplateServiceServer
// for forward compatibility
@@ -118,6 +128,7 @@ type WorkspaceTemplateServiceServer interface {
GetWorkspaceTemplate(context.Context, *GetWorkspaceTemplateRequest) (*WorkspaceTemplate, error)
ListWorkspaceTemplates(context.Context, *ListWorkspaceTemplatesRequest) (*ListWorkspaceTemplatesResponse, error)
ListWorkspaceTemplateVersions(context.Context, *ListWorkspaceTemplateVersionsRequest) (*ListWorkspaceTemplateVersionsResponse, error)
ListWorkspaceTemplatesField(context.Context, *ListWorkspaceTemplatesFieldRequest) (*ListWorkspaceTemplatesFieldResponse, error)
mustEmbedUnimplementedWorkspaceTemplateServiceServer()
}
@@ -146,6 +157,9 @@ func (UnimplementedWorkspaceTemplateServiceServer) ListWorkspaceTemplates(contex
func (UnimplementedWorkspaceTemplateServiceServer) ListWorkspaceTemplateVersions(context.Context, *ListWorkspaceTemplateVersionsRequest) (*ListWorkspaceTemplateVersionsResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method ListWorkspaceTemplateVersions not implemented")
}
func (UnimplementedWorkspaceTemplateServiceServer) ListWorkspaceTemplatesField(context.Context, *ListWorkspaceTemplatesFieldRequest) (*ListWorkspaceTemplatesFieldResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method ListWorkspaceTemplatesField not implemented")
}
func (UnimplementedWorkspaceTemplateServiceServer) mustEmbedUnimplementedWorkspaceTemplateServiceServer() {
}
@@ -286,6 +300,24 @@ func _WorkspaceTemplateService_ListWorkspaceTemplateVersions_Handler(srv interfa
return interceptor(ctx, in, info, handler)
}
func _WorkspaceTemplateService_ListWorkspaceTemplatesField_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(ListWorkspaceTemplatesFieldRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(WorkspaceTemplateServiceServer).ListWorkspaceTemplatesField(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/api.WorkspaceTemplateService/ListWorkspaceTemplatesField",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(WorkspaceTemplateServiceServer).ListWorkspaceTemplatesField(ctx, req.(*ListWorkspaceTemplatesFieldRequest))
}
return interceptor(ctx, in, info, handler)
}
var _WorkspaceTemplateService_serviceDesc = grpc.ServiceDesc{
ServiceName: "api.WorkspaceTemplateService",
HandlerType: (*WorkspaceTemplateServiceServer)(nil),
@@ -318,6 +350,10 @@ var _WorkspaceTemplateService_serviceDesc = grpc.ServiceDesc{
MethodName: "ListWorkspaceTemplateVersions",
Handler: _WorkspaceTemplateService_ListWorkspaceTemplateVersions_Handler,
},
{
MethodName: "ListWorkspaceTemplatesField",
Handler: _WorkspaceTemplateService_ListWorkspaceTemplatesField_Handler,
},
},
Streams: []grpc.StreamDesc{},
Metadata: "workspace_template.proto",

View File

@@ -18,4 +18,18 @@ message Parameter {
message ParameterOption {
string name = 1;
string value = 2;
}
message LogStreamResponse {
repeated LogEntry logEntries = 1;
}
message LogEntry {
string timestamp = 1;
string content = 2;
}
message MachineType {
string name = 1;
string value = 2;
}

View File

@@ -12,6 +12,20 @@ service ConfigService {
get: "/apis/v1beta1/config"
};
}
rpc GetNamespaceConfig (GetNamespaceConfigRequest) returns (GetNamespaceConfigResponse) {
option (google.api.http) = {
get: "/apis/v1beta1/{namespace}/config"
};
}
}
message GetNamespaceConfigRequest {
string namespace = 1;
}
message GetNamespaceConfigResponse {
string bucket = 1;
}
message GetConfigResponse {

56
api/proto/files.proto Normal file
View File

@@ -0,0 +1,56 @@
syntax = "proto3";
package api;
option go_package = "github.com/onepanelio/core/api/gen";
import "google/api/annotations.proto";
service FileService {
rpc GetObjectDownloadPresignedURL (GetObjectPresignedUrlRequest) returns (GetPresignedUrlResponse) {
option (google.api.http) = {
get: "/apis/v1beta1/{namespace}/files/presigned-url/{key=**}"
};
}
rpc ListFiles (ListFilesRequest) returns (ListFilesResponse) {
option (google.api.http) = {
get: "/apis/v1beta1/{namespace}/files/list/{path=**}"
};
}
}
message File {
string path = 1;
string name = 2;
string extension = 3;
int64 size = 4;
string contentType = 5;
string lastModified = 6;
bool directory = 7;
}
message ListFilesRequest {
string namespace = 1;
string path = 2;
int32 page = 3;
int32 perPage = 4;
}
message ListFilesResponse {
int32 count = 1;
int32 totalCount = 2;
int32 page = 3;
int32 pages = 4;
repeated File files = 5;
string parentPath = 6;
}
message GetObjectPresignedUrlRequest {
string namespace = 1;
string key = 2;
}
message GetPresignedUrlResponse {
string url = 1;
int64 size = 2;
}

View File

@@ -0,0 +1,92 @@
syntax = "proto3";
package api;
option go_package = "github.com/onepanelio/core/api/gen";
import "google/api/annotations.proto";
import "google/protobuf/empty.proto";
service InferenceService {
rpc CreateInferenceService (CreateInferenceServiceRequest) returns (GetInferenceServiceResponse) {
option (google.api.http) = {
post: "/apis/v1beta1/{namespace}/inferenceservice"
body: "*"
};
}
rpc GetInferenceService(InferenceServiceIdentifier) returns (GetInferenceServiceResponse) {
option (google.api.http) = {
get: "/apis/v1beta1/{namespace}/inferenceservice/{name}"
};
}
rpc DeleteInferenceService (InferenceServiceIdentifier) returns (google.protobuf.Empty) {
option (google.api.http) = {
delete: "/apis/v1beta1/{namespace}/inferenceservice/{name}"
};
}
}
message InferenceServiceIdentifier {
string namespace = 1;
string name = 2;
}
message Env {
string name = 1;
string value = 2;
}
message Container {
string image = 1;
string name = 2;
repeated Env env = 3;
}
message InferenceServiceTransformer {
repeated Container containers = 1;
string minCpu = 2;
string minMemory = 3;
string maxCpu = 4;
string maxMemory = 5;
}
message InferenceServicePredictor {
string name = 1;
string runtimeVersion = 2;
string storageUri = 3;
string nodeSelector = 4;
string minCpu = 5;
string minMemory = 6;
string maxCpu = 7;
string maxMemory = 8;
}
message CreateInferenceServiceRequest {
string namespace = 1;
string name = 2;
string defaultTransformerImage = 3;
InferenceServicePredictor predictor = 4;
InferenceServiceTransformer transformer = 5;
}
message DeployModelResponse {
string status = 1;
}
message InferenceServiceCondition {
string lastTransitionTime = 1;
string status = 2;
string type = 3;
}
message GetInferenceServiceResponse {
bool ready = 1;
repeated InferenceServiceCondition conditions = 2;
string predictUrl = 3;
}
message InferenceServiceEndpoints {
string predict = 1;
}

View File

@@ -17,6 +17,12 @@ service ServiceService {
get: "/apis/v1beta1/{namespace}/service"
};
}
rpc HasService(HasServiceRequest) returns (HasServiceResponse) {
option (google.api.http) = {
get: "/apis/v1beta/service/{name}"
};
}
}
message Service {
@@ -29,6 +35,14 @@ message GetServiceRequest {
string name = 2;
}
message HasServiceRequest {
string name = 1;
}
message HasServiceResponse {
bool hasService= 1;
}
message ListServicesRequest {
string namespace = 1;
int32 pageSize = 2;

View File

@@ -50,7 +50,7 @@ service WorkflowService {
};
}
rpc GetWorkflowExecutionLogs (GetWorkflowExecutionLogsRequest) returns (stream LogEntry) {
rpc GetWorkflowExecutionLogs (GetWorkflowExecutionLogsRequest) returns (stream LogStreamResponse) {
option (google.api.http) = {
get: "/apis/v1beta1/{namespace}/workflow_executions/{uid}/pods/{podName}/containers/{containerName}/logs"
};
@@ -74,18 +74,6 @@ service WorkflowService {
};
}
rpc GetArtifact (GetArtifactRequest) returns (ArtifactResponse) {
option (google.api.http) = {
get: "/apis/v1beta1/{namespace}/workflow_executions/{uid}/artifacts/{key=**}"
};
}
rpc ListFiles (ListFilesRequest) returns (ListFilesResponse) {
option (google.api.http) = {
get: "/apis/v1beta1/{namespace}/workflow_executions/{uid}/files/{path=**}"
};
}
rpc AddWorkflowExecutionStatistics (AddWorkflowExecutionStatisticRequest) returns (google.protobuf.Empty) {
option (google.api.http) = {
post: "/apis/v1beta1/{namespace}/workflow_executions/{uid}/statistics"
@@ -120,6 +108,12 @@ service WorkflowService {
body: "*"
};
}
rpc ListWorkflowExecutionsField (ListWorkflowExecutionsFieldRequest) returns (ListWorkflowExecutionsFieldResponse) {
option (google.api.http) = {
get: "/apis/v1beta/{namespace}/field/workflow_executions/{fieldName}"
};
}
}
message CreateWorkflowExecutionBody {
@@ -205,11 +199,6 @@ message ListWorkflowExecutionsResponse {
int32 totalAvailableCount = 6;
}
message LogEntry {
string timestamp = 1;
string content = 2;
}
message WorkflowExecutionMetadata {
string url = 1;
}
@@ -234,31 +223,6 @@ message WorkflowExecution {
repeated Metric metrics = 12;
}
message ArtifactResponse {
bytes data = 1;
}
message File {
string path = 1;
string name = 2;
string extension = 3;
int64 size = 4;
string contentType = 5;
string lastModified = 6;
bool directory = 7;
}
message ListFilesRequest {
string namespace = 1;
string uid = 2;
string path = 3;
}
message ListFilesResponse {
repeated File files = 1;
string parentPath = 2;
}
message Statistics {
string workflowStatus = 1;
int64 workflowTemplateId = 2;
@@ -315,4 +279,13 @@ message UpdateWorkflowExecutionsMetricsRequest {
message WorkflowExecutionsMetricsResponse {
repeated Metric metrics = 4;
}
message ListWorkflowExecutionsFieldRequest {
string namespace = 1;
string fieldName = 2;
}
message ListWorkflowExecutionsFieldResponse {
repeated string values = 1;
}

View File

@@ -65,6 +65,12 @@ service WorkflowTemplateService {
put: "/apis/v1beta1/{namespace}/workflow_templates/{uid}/archive"
};
}
rpc ListWorkflowTemplatesField (ListWorkflowTemplatesFieldRequest) returns (ListWorkflowTemplatesFieldResponse) {
option (google.api.http) = {
get: "/apis/v1beta/{namespace}/field/workflow_templates/{fieldName}"
};
}
}
message GenerateWorkflowTemplateRequest {
@@ -160,10 +166,21 @@ message WorkflowTemplate {
WorkflowExecutionStatisticReport stats = 11;
CronWorkflowStatisticsReport cronStats = 12;
repeated Parameter parameters = 13;
string description = 14;
}
message GetWorkflowTemplateLabelsRequest {
string namespace = 1;
string name = 2;
int64 version = 3;
}
message ListWorkflowTemplatesFieldRequest {
string namespace = 1;
string fieldName = 2;
bool isSystem = 3;
}
message ListWorkflowTemplatesFieldResponse {
repeated string values = 1;
}

View File

@@ -59,6 +59,7 @@ service WorkspaceService {
rpc ResumeWorkspace (ResumeWorkspaceRequest) returns (google.protobuf.Empty) {
option (google.api.http) = {
put: "/apis/v1beta1/{namespace}/workspaces/{uid}/resume"
body: "body"
};
}
@@ -73,6 +74,23 @@ service WorkspaceService {
put: "/apis/v1beta1/{namespace}/workspaces/{uid}/retry"
};
}
rpc GetWorkspaceContainerLogs (GetWorkspaceContainerLogsRequest) returns (stream LogStreamResponse) {
option (google.api.http) = {
get: "/apis/v1beta1/{namespace}/workspaces/{uid}/containers/{containerName}/logs"
};
}
rpc ListWorkspacesField (ListWorkspacesFieldRequest) returns (ListWorkspacesFieldResponse) {
option (google.api.http) = {
get: "/apis/v1beta/{namespace}/field/workspaces/{fieldName}"
};
}
}
message WorkspaceComponent {
string name = 1;
string url = 2;
}
message Workspace {
@@ -86,6 +104,8 @@ message Workspace {
repeated KeyValue labels = 8;
string url = 9;
repeated Parameter templateParameters = 10;
repeated WorkspaceComponent workspaceComponents = 11;
MachineType machineType = 12;
}
message WorkspaceStatus {
@@ -101,6 +121,7 @@ message CreateWorkspaceBody {
repeated Parameter parameters = 3;
repeated KeyValue labels = 4;
bool captureNode = 5;
}
message CreateWorkspaceRequest {
@@ -157,6 +178,7 @@ message PauseWorkspaceRequest {
message ResumeWorkspaceRequest {
string namespace = 1;
string uid = 2;
UpdateWorkspaceBody body = 3;
}
message DeleteWorkspaceRequest {
@@ -193,4 +215,20 @@ message GetWorkspaceStatisticsForNamespaceRequest {
message GetWorkspaceStatisticsForNamespaceResponse {
WorkspaceStatisticReport stats = 1;
}
message GetWorkspaceContainerLogsRequest {
string namespace = 1;
string uid = 2;
string containerName = 3;
int64 sinceTime = 4;
}
message ListWorkspacesFieldRequest {
string namespace = 1;
string fieldName = 2;
}
message ListWorkspacesFieldResponse {
repeated string values = 1;
}

View File

@@ -57,6 +57,12 @@ service WorkspaceTemplateService {
get: "/apis/v1beta1/{namespace}/workspace_templates/{uid}/versions"
};
}
rpc ListWorkspaceTemplatesField (ListWorkspaceTemplatesFieldRequest) returns (ListWorkspaceTemplatesFieldResponse) {
option (google.api.http) = {
get: "/apis/v1beta/{namespace}/field/workspace_templates/{fieldName}"
};
}
}
message WorkspaceTemplate {
@@ -127,4 +133,11 @@ message ListWorkspaceTemplateVersionsResponse {
repeated WorkspaceTemplate workspaceTemplates = 2;
}
message ListWorkspaceTemplatesFieldRequest {
string namespace = 1;
string fieldName = 2;
}
message ListWorkspaceTemplatesFieldResponse {
repeated string values = 1;
}

View File

@@ -52,7 +52,7 @@ See https://docs.onepanel.ai
` + "```" + `
# Download the binary
curl -sLO https://github.com/onepanelio/core/releases/download/v%s/opctl-linux-amd64
curl -sLO https://github.com/onepanelio/onepanel/releases/download/v%s/opctl-linux-amd64
# Make binary executable
chmod +x opctl-linux-amd64
@@ -68,7 +68,7 @@ opctl version
` + "```" + `
# Download the binary
curl -sLO https://github.com/onepanelio/core/releases/download/v%s/opctl-macos-amd64
curl -sLO https://github.com/onepanelio/onepanel/releases/download/v%s/opctl-macos-amd64
# Make binary executable
chmod +x opctl-macos-amd64
@@ -82,7 +82,7 @@ opctl version
## Windows
Download the [attached executable](https://github.com/onepanelio/core/releases/download/v%s/opctl-windows-amd64.exe), rename it to "opctl" and move it to a folder that is in your PATH environment variable.
Download the [attached executable](https://github.com/onepanelio/onepanel/releases/download/v%s/opctl-windows-amd64.exe), rename it to "opctl" and move it to a folder that is in your PATH environment variable.
`
var repositories = []string{

View File

@@ -86,7 +86,7 @@ templates:
optional: true
`
const pytorchMnistWorkflowTemplateName = "PyTorch Training"
const pytorchWorkflowTemplateName = "PyTorch Training"
func initialize20200605090509() {
if _, ok := initializedMigrations[20200605090509]; !ok {
@@ -120,7 +120,7 @@ func Up20200605090509(tx *sql.Tx) error {
}
workflowTemplate := &v1.WorkflowTemplate{
Name: pytorchMnistWorkflowTemplateName,
Name: pytorchWorkflowTemplateName,
Manifest: pytorchMnistWorkflowTemplate,
}
@@ -150,7 +150,7 @@ func Down20200605090509(tx *sql.Tx) error {
return err
}
uid, err := uid2.GenerateUID(pytorchMnistWorkflowTemplateName, 30)
uid, err := uid2.GenerateUID(pytorchWorkflowTemplateName, 30)
if err != nil {
return err
}

View File

@@ -25,6 +25,6 @@ func Up20201115133046(tx *sql.Tx) error {
func Down20201115133046(tx *sql.Tx) error {
// This code is executed when the migration is rolled back.
return updateWorkspaceTemplateManifest(
filepath.Join("workspaces", "cvat", "20201113094916"),
filepath.Join("workspaces", "cvat", "20201113094916.yaml"),
cvatTemplateName)
}

View File

@@ -17,7 +17,7 @@ func initialize20201221194344() {
func Up20201221194344(tx *sql.Tx) error {
return updateWorkflowTemplateManifest(
filepath.Join("workflows", "pytorch-mnist-training", "20201221194344.yaml"),
pytorchMnistWorkflowTemplateName,
pytorchWorkflowTemplateName,
map[string]string{
"created-by": "system",
},
@@ -29,7 +29,7 @@ func Down20201221194344(tx *sql.Tx) error {
// This code is executed when the migration is rolled back.
return updateWorkflowTemplateManifest(
filepath.Join("workflows", "pytorch-mnist-training", "20200605090509.yaml"),
pytorchMnistWorkflowTemplateName,
pytorchWorkflowTemplateName,
map[string]string{
"created-by": "system",
},

View File

@@ -21,7 +21,6 @@ func Up20201223062947(tx *sql.Tx) error {
tensorflowWorkflowTemplateName,
map[string]string{
"created-by": "system",
"used-by": "cvat",
},
)
}

View File

@@ -0,0 +1,30 @@
package migration
import (
"database/sql"
"github.com/pressly/goose"
"path/filepath"
)
func initialize20210107094725() {
if _, ok := initializedMigrations[20210107094725]; !ok {
goose.AddMigration(Up20210107094725, Down20210107094725)
initializedMigrations[20210107094725] = true
}
}
//Up20210107094725 updates CVAT to latest image
func Up20210107094725(tx *sql.Tx) error {
// This code is executed when the migration is applied.
return updateWorkspaceTemplateManifest(
filepath.Join("workspaces", "cvat", "20210107094725.yaml"),
cvatTemplateName)
}
//Down20210107094725 reverts to previous CVAT image
func Down20210107094725(tx *sql.Tx) error {
// This code is executed when the migration is rolled back.
return updateWorkspaceTemplateManifest(
filepath.Join("workspaces", "cvat", "20201211161117.yaml"),
cvatTemplateName)
}

View File

@@ -0,0 +1,129 @@
package migration
import (
"database/sql"
"github.com/pressly/goose"
"path/filepath"
)
func initialize20210118175809() {
if _, ok := initializedMigrations[20210118175809]; !ok {
goose.AddMigration(Up20210118175809, Down20210118175809)
initializedMigrations[20210118175809] = true
}
}
// Up20210118175809 updates workflows so that the nodePoolSelector label is based on k8s config
func Up20210118175809(tx *sql.Tx) error {
// This code is executed when the migration is applied.
if err := updateWorkflowTemplateManifest(
filepath.Join("workflows", "hyperparameter-tuning", "20210118175809.yaml"),
hyperparameterTuningTemplateName,
map[string]string{
"framework": "tensorflow",
"tuner": "TPE",
"created-by": "system",
},
); err != nil {
return err
}
if err := updateWorkflowTemplateManifest(
filepath.Join("workflows", "maskrcnn-training", "20210118175809.yaml"),
maskRCNNWorkflowTemplateName,
map[string]string{
"created-by": "system",
"used-by": "cvat",
},
); err != nil {
return err
}
if err := updateWorkflowTemplateManifest(
filepath.Join("workflows", "pytorch-mnist-training", "20210118175809.yaml"),
pytorchWorkflowTemplateName,
map[string]string{
"created-by": "system",
"framework": "pytorch",
},
); err != nil {
return err
}
if err := updateWorkflowTemplateManifest(
filepath.Join("workflows", "tensorflow-mnist-training", "20210118175809.yaml"),
tensorflowWorkflowTemplateName,
map[string]string{
"created-by": "system",
"framework": "tensorflow",
},
); err != nil {
return err
}
return updateWorkflowTemplateManifest(
filepath.Join("workflows", "tf-object-detection-training", "20210118175809.yaml"),
tensorflowObjectDetectionWorkflowTemplateName,
map[string]string{
"created-by": "system",
"used-by": "cvat",
},
)
}
// Down20210118175809 reverts the migration
func Down20210118175809(tx *sql.Tx) error {
// This code is executed when the migration is rolled back.
if err := updateWorkflowTemplateManifest(
filepath.Join("workflows", "tf-object-detection-training", "20201223202929.yaml"),
tensorflowObjectDetectionWorkflowTemplateName,
map[string]string{
"created-by": "system",
"used-by": "cvat",
},
); err != nil {
return err
}
if err := updateWorkflowTemplateManifest(
filepath.Join("workflows", "tensorflow-mnist-training", "20201223062947.yaml"),
tensorflowWorkflowTemplateName,
map[string]string{
"created-by": "system",
"framework": "tensorflow",
},
); err != nil {
return err
}
if err := updateWorkflowTemplateManifest(
filepath.Join("workflows", "pytorch-mnist-training", "20201221194344.yaml"),
pytorchWorkflowTemplateName,
map[string]string{
"created-by": "system",
},
); err != nil {
return err
}
if err := updateWorkflowTemplateManifest(
filepath.Join("workflows", "maskrcnn-training", "20201221195937.yaml"),
maskRCNNWorkflowTemplateName,
map[string]string{
"created-by": "system",
"used-by": "cvat",
},
); err != nil {
return err
}
return updateWorkflowTemplateManifest(
filepath.Join("workflows", "hyperparameter-tuning", "20201225172926.yaml"),
hyperparameterTuningTemplateName,
map[string]string{
"framework": "tensorflow",
"tuner": "TPE",
"created-by": "system",
},
)
}

View File

@@ -0,0 +1,30 @@
package migration
import (
"database/sql"
"github.com/pressly/goose"
"path/filepath"
)
func initialize20210129134326() {
if _, ok := initializedMigrations[20210129134326]; !ok {
goose.AddMigration(Up20210129134326, Down20210129134326)
initializedMigrations[20210129134326] = true
}
}
//Up20210129134326 updates CVAT to latest image
func Up20210129134326(tx *sql.Tx) error {
// This code is executed when the migration is applied.
return updateWorkspaceTemplateManifest(
filepath.Join("workspaces", "cvat", "20210129134326.yaml"),
cvatTemplateName)
}
//Down20210129134326 reverts to previous CVAT image
func Down20210129134326(tx *sql.Tx) error {
// This code is executed when the migration is rolled back.
return updateWorkspaceTemplateManifest(
filepath.Join("workspaces", "cvat", "20210107094725.yaml"),
cvatTemplateName)
}

View File

@@ -0,0 +1,30 @@
package migration
import (
"database/sql"
"github.com/pressly/goose"
"path/filepath"
)
func initialize20210129142057() {
if _, ok := initializedMigrations[20210129142057]; !ok {
goose.AddMigration(Up20210129142057, Down20210129142057)
initializedMigrations[20210129142057] = true
}
}
// Up20210129142057 updates the jupyterlab workspace template
func Up20210129142057(tx *sql.Tx) error {
// This code is executed when the migration is applied.
return updateWorkspaceTemplateManifest(
filepath.Join("workspaces", "jupyterlab", "20210129142057.yaml"),
jupyterLabTemplateName)
}
// Down20210129142057 rolls back the jupyterab workspace template update
func Down20210129142057(tx *sql.Tx) error {
// This code is executed when the migration is rolled back.
return updateWorkspaceTemplateManifest(
filepath.Join("workspaces", "jupyterlab", "20201229205644.yaml"),
jupyterLabTemplateName)
}

View File

@@ -0,0 +1,33 @@
package migration
import (
"database/sql"
"github.com/pressly/goose"
"path/filepath"
)
func initialize20210129152427() {
if _, ok := initializedMigrations[20210129152427]; !ok {
goose.AddMigration(Up20210129152427, Down20210129152427)
initializedMigrations[20210129152427] = true
}
}
// Up20210129152427 migration will add lifecycle hooks to VSCode template.
// These hooks will attempt to export the conda, pip, and vscode packages that are installed,
// to a text file.
// On workspace resume / start, the code then tries to install these packages.
func Up20210129152427(tx *sql.Tx) error {
// This code is executed when the migration is applied.
return updateWorkspaceTemplateManifest(
filepath.Join("workspaces", "vscode", "20210129152427.yaml"),
vscodeWorkspaceTemplateName)
}
// Down20210129152427 removes the lifecycle hooks from VSCode workspace template.
func Down20210129152427(tx *sql.Tx) error {
// This code is executed when the migration is rolled back.
return updateWorkspaceTemplateManifest(
filepath.Join("workspaces", "vscode", "20201028145443.yaml"),
vscodeWorkspaceTemplateName)
}

View File

@@ -0,0 +1,54 @@
package migration
import (
"database/sql"
"github.com/pressly/goose"
"path/filepath"
)
func initialize20210224180017() {
if _, ok := initializedMigrations[20210224180017]; !ok {
goose.AddMigration(Up20210224180017, Down20210224180017)
initializedMigrations[20210224180017] = true
}
}
// Up20210224180017 Updates workspace templates with the latest filesyncer image
func Up20210224180017(tx *sql.Tx) error {
// This code is executed when the migration is applied.
if err := updateWorkspaceTemplateManifest(
filepath.Join("workspaces", "cvat", "20210224180017.yaml"),
cvatTemplateName); err != nil {
return err
}
if err := updateWorkspaceTemplateManifest(
filepath.Join("workspaces", "jupyterlab", "20210224180017.yaml"),
jupyterLabTemplateName); err != nil {
return err
}
return updateWorkspaceTemplateManifest(
filepath.Join("workspaces", "vscode", "20210224180017.yaml"),
vscodeWorkspaceTemplateName)
}
// Down20210224180017 Rolls back the filesyncer image updates
func Down20210224180017(tx *sql.Tx) error {
// This code is executed when the migration is rolled back.
if err := updateWorkspaceTemplateManifest(
filepath.Join("workspaces", "vscode", "20210224180017.yaml"),
vscodeWorkspaceTemplateName); err != nil {
return err
}
if err := updateWorkspaceTemplateManifest(
filepath.Join("workspaces", "jupyterlab", "20210224180017.yaml"),
jupyterLabTemplateName); err != nil {
return err
}
return updateWorkspaceTemplateManifest(
filepath.Join("workspaces", "cvat", "20210224180017.yaml"),
cvatTemplateName)
}

View File

@@ -0,0 +1,58 @@
package migration
import (
"database/sql"
"github.com/pressly/goose"
"path/filepath"
)
func initialize20210323175655() {
if _, ok := initializedMigrations[20210323175655]; !ok {
goose.AddMigration(Up20210323175655, Down20210323175655)
initializedMigrations[20210323175655] = true
}
}
// Up20210323175655 update workflows to support new PNS mode
func Up20210323175655(tx *sql.Tx) error {
// This code is executed when the migration is applied.
if err := updateWorkflowTemplateManifest(
filepath.Join("workflows", "pytorch-mnist-training", "20210323175655.yaml"),
pytorchWorkflowTemplateName,
map[string]string{
"created-by": "system",
"framework": "pytorch",
}); err != nil {
return err
}
return updateWorkflowTemplateManifest(
filepath.Join("workflows", "tensorflow-mnist-training", "20210323175655.yaml"),
tensorflowWorkflowTemplateName,
map[string]string{
"created-by": "system",
"framework": "tensorflow",
})
}
// Down20210323175655 reverts updating workflows to support PNS
func Down20210323175655(tx *sql.Tx) error {
// This code is executed when the migration is rolled back.
if err := updateWorkflowTemplateManifest(
filepath.Join("workflows", "tensorflow-mnist-training", "20210118175809.yaml"),
tensorflowWorkflowTemplateName,
map[string]string{
"created-by": "system",
"framework": "tensorflow",
}); err != nil {
return err
}
return updateWorkflowTemplateManifest(
filepath.Join("workflows", "pytorch-mnist-training", "20210118175809.yaml"),
pytorchWorkflowTemplateName,
map[string]string{
"created-by": "system",
"framework": "pytorch",
})
}

View File

@@ -0,0 +1,55 @@
package migration
import (
"database/sql"
"github.com/pressly/goose"
"path/filepath"
)
func initialize20210329171739() {
if _, ok := initializedMigrations[20210329171739]; !ok {
goose.AddMigration(Up20210329171739, Down20210329171739)
initializedMigrations[20210329171739] = true
}
}
// Up20210329171739 updates workspaces to use new images
func Up20210329171739(tx *sql.Tx) error {
// This code is executed when the migration is applied.
if err := updateWorkspaceTemplateManifest(
filepath.Join("workspaces", "cvat", "20210323175655.yaml"),
cvatTemplateName); err != nil {
return err
}
if err := updateWorkspaceTemplateManifest(
filepath.Join("workspaces", "jupyterlab", "20210323175655.yaml"),
jupyterLabTemplateName); err != nil {
return err
}
return updateWorkspaceTemplateManifest(
filepath.Join("workspaces", "vscode", "20210323175655.yaml"),
vscodeWorkspaceTemplateName)
}
// Down20210329171739 rolls back image updates for workspaces
func Down20210329171739(tx *sql.Tx) error {
// This code is executed when the migration is rolled back.
if err := updateWorkspaceTemplateManifest(
filepath.Join("workspaces", "cvat", "20210224180017.yaml"),
cvatTemplateName); err != nil {
return err
}
if err := updateWorkspaceTemplateManifest(
filepath.Join("workspaces", "jupyterlab", "20210224180017.yaml"),
jupyterLabTemplateName); err != nil {
return err
}
return updateWorkspaceTemplateManifest(
filepath.Join("workspaces", "vscode", "20210224180017.yaml"),
vscodeWorkspaceTemplateName)
}

View File

@@ -0,0 +1,109 @@
package migration
import (
"database/sql"
uid2 "github.com/onepanelio/core/pkg/util/uid"
"github.com/pressly/goose"
"path/filepath"
)
func initialize20210329194731() {
if _, ok := initializedMigrations[20210329194731]; !ok {
goose.AddMigration(Up20210329194731, Down20210329194731)
initializedMigrations[20210329194731] = true
}
}
// Up20210329194731 removes the hyperparameter-tuning workflow if there are no executions
func Up20210329194731(tx *sql.Tx) error {
// This code is executed when the migration is applied.
client, err := getClient()
if err != nil {
return err
}
defer client.DB.Close()
namespaces, err := client.ListOnepanelEnabledNamespaces()
if err != nil {
return err
}
uid, err := uid2.GenerateUID(hyperparameterTuningTemplateName, 30)
if err != nil {
return err
}
for _, namespace := range namespaces {
workflowTemplate, err := client.GetWorkflowTemplateRaw(namespace.Name, uid)
if err != nil {
return err
}
if workflowTemplate == nil {
continue
}
workflowExecutionsCount, err := client.CountWorkflowExecutionsForWorkflowTemplate(workflowTemplate.ID)
if err != nil {
return err
}
cronWorkflowsCount, err := client.CountCronWorkflows(namespace.Name, uid)
if err != nil {
return err
}
// Archive the template if we have no resources associated with it
if workflowExecutionsCount == 0 && cronWorkflowsCount == 0 {
if _, err := client.ArchiveWorkflowTemplate(namespace.Name, uid); err != nil {
return err
}
}
}
return nil
}
// Down20210329194731 returns the hyperparameter-tuning workflow if it was deleted
func Down20210329194731(tx *sql.Tx) error {
// This code is executed when the migration is rolled back.
client, err := getClient()
if err != nil {
return err
}
defer client.DB.Close()
namespaces, err := client.ListOnepanelEnabledNamespaces()
if err != nil {
return err
}
uid, err := uid2.GenerateUID("hyperparameter-tuning", 30)
if err != nil {
return err
}
for _, namespace := range namespaces {
workflowTemplate, err := client.GetWorkflowTemplateRaw(namespace.Name, uid)
if err != nil {
return err
}
if workflowTemplate == nil {
err := createWorkflowTemplate(
filepath.Join("workflows", "hyperparameter-tuning", "20210118175809.yaml"),
hyperparameterTuningTemplateName,
map[string]string{
"framework": "tensorflow",
"tuner": "TPE",
"created-by": "system",
},
)
if err != nil {
return err
}
}
}
return nil
}

View File

@@ -0,0 +1,31 @@
package migration
import (
"database/sql"
"github.com/pressly/goose"
"path/filepath"
)
var deepLearningDesktopTemplateName = "Deep Learning Desktop"
func initialize20210414165510() {
if _, ok := initializedMigrations[20210414165510]; !ok {
goose.AddMigration(Up20210414165510, Down20210414165510)
initializedMigrations[20210414165510] = true
}
}
// Up20210414165510 creates the Deep Learning Desktop Workspace Template
func Up20210414165510(tx *sql.Tx) error {
// This code is executed when the migration is applied.
return createWorkspaceTemplate(
filepath.Join("workspaces", "vnc", "20210414165510.yaml"),
deepLearningDesktopTemplateName,
"Deep learning desktop with VNC")
}
// Down20210414165510 removes the Deep Learning Desktop Workspace Template
func Down20210414165510(tx *sql.Tx) error {
// This code is executed when the migration is rolled back.
return archiveWorkspaceTemplate(deepLearningDesktopTemplateName)
}

View File

@@ -0,0 +1,66 @@
package migration
import (
"database/sql"
"github.com/pressly/goose"
"path/filepath"
)
func initialize20210719190719() {
if _, ok := initializedMigrations[20210719190719]; !ok {
goose.AddMigration(Up20210719190719, Down20210719190719)
initializedMigrations[20210719190719] = true
}
}
// Up20210719190719 updates the workspace templates to use new v1.0.0 of filesyncer
func Up20210719190719(tx *sql.Tx) error {
// This code is executed when the migration is applied.
if err := updateWorkspaceTemplateManifest(
filepath.Join("workspaces", "cvat", "20210719190719.yaml"),
cvatTemplateName); err != nil {
return err
}
if err := updateWorkspaceTemplateManifest(
filepath.Join("workspaces", "jupyterlab", "20210719190719.yaml"),
jupyterLabTemplateName); err != nil {
return err
}
if err := updateWorkspaceTemplateManifest(
filepath.Join("workspaces", "vnc", "20210719190719.yaml"),
deepLearningDesktopTemplateName); err != nil {
return err
}
return updateWorkspaceTemplateManifest(
filepath.Join("workspaces", "vscode", "20210719190719.yaml"),
vscodeWorkspaceTemplateName)
}
// Down20210719190719 rolls back the change to update filesyncer
func Down20210719190719(tx *sql.Tx) error {
// This code is executed when the migration is rolled back.
if err := updateWorkspaceTemplateManifest(
filepath.Join("workspaces", "cvat", "20210323175655.yaml"),
cvatTemplateName); err != nil {
return err
}
if err := updateWorkspaceTemplateManifest(
filepath.Join("workspaces", "jupyterlab", "20210323175655.yaml"),
jupyterLabTemplateName); err != nil {
return err
}
if err := updateWorkspaceTemplateManifest(
filepath.Join("workspaces", "vnc", "20210414165510.yaml"),
deepLearningDesktopTemplateName); err != nil {
return err
}
return updateWorkspaceTemplateManifest(
filepath.Join("workspaces", "vscode", "20210323175655.yaml"),
vscodeWorkspaceTemplateName)
}

View File

@@ -85,6 +85,17 @@ func Initialize() {
initialize20201223202929()
initialize20201225172926()
initialize20201229205644()
initialize20210107094725()
initialize20210118175809()
initialize20210129134326()
initialize20210129142057()
initialize20210129152427()
initialize20210224180017()
initialize20210323175655()
initialize20210329171739()
initialize20210329194731()
initialize20210414165510()
initialize20210719190719()
if err := client.DB.Close(); err != nil {
log.Printf("[error] closing db %v", err)
@@ -191,6 +202,7 @@ func ReplaceRuntimeVariablesInManifest(client *v1.Client, namespace string, mani
replaceMap := map[string]string{
"{{.ArtifactRepositoryType}}": artifactRepositoryType,
"{{.NodePoolLabel}}": *sysConfig.NodePoolLabel(),
"{{.DefaultNodePoolOption}}": nodePoolOptions[0].Value,
}

View File

@@ -1,10 +1,92 @@
package migration
import (
"fmt"
v1 "github.com/onepanelio/core/pkg"
uid2 "github.com/onepanelio/core/pkg/util/uid"
)
// createWorkspaceTemplate will create the workspace template given by {{templateName}} with the contents
// given by {{filename}}
// It will do so for all namespaces.
func createWorkspaceTemplate(filename, templateName, description string) error {
client, err := getClient()
if err != nil {
return err
}
defer client.DB.Close()
namespaces, err := client.ListOnepanelEnabledNamespaces()
if err != nil {
return err
}
newManifest, err := readDataFile(filename)
if err != nil {
return err
}
uid, err := uid2.GenerateUID(templateName, 30)
if err != nil {
return err
}
for _, namespace := range namespaces {
workspaceTemplate := &v1.WorkspaceTemplate{
UID: uid,
Name: templateName,
Manifest: newManifest,
Description: description,
}
err = ReplaceArtifactRepositoryType(client, namespace, nil, workspaceTemplate)
if err != nil {
return err
}
if _, err := client.CreateWorkspaceTemplate(namespace.Name, workspaceTemplate); err != nil {
return err
}
}
return nil
}
func archiveWorkspaceTemplate(templateName string) error {
client, err := getClient()
if err != nil {
return err
}
defer client.DB.Close()
namespaces, err := client.ListOnepanelEnabledNamespaces()
if err != nil {
return err
}
uid, err := uid2.GenerateUID(templateName, 30)
if err != nil {
return err
}
for _, namespace := range namespaces {
hasRunning, err := client.WorkspaceTemplateHasRunningWorkspaces(namespace.Name, uid)
if err != nil {
return fmt.Errorf("Unable to get check running workspaces")
}
if hasRunning {
return fmt.Errorf("unable to archive workspace template. There are running workspaces that use it")
}
_, err = client.ArchiveWorkspaceTemplate(namespace.Name, uid)
if err != nil {
return err
}
}
return nil
}
// updateWorkspaceTemplateManifest will update the workspace template given by {{templateName}} with the contents
// given by {{filename}}
// It will do so for all namespaces.

View File

@@ -0,0 +1,7 @@
-- +goose Up
-- SQL in this section is executed when the migration is applied.
ALTER TABLE workflow_template_versions ADD COLUMN description TEXT DEFAULT '';
-- +goose Down
-- SQL in this section is executed when the migration is rolled back.
ALTER TABLE workflow_template_versions DROP COLUMN description;

View File

@@ -0,0 +1,7 @@
-- +goose Up
-- SQL in this section is executed when the migration is applied.
ALTER TABLE workspaces ADD COLUMN capture_node boolean;
UPDATE workspaces SET capture_node = false;
-- +goose Down
ALTER TABLE workspaces DROP COLUMN capture_node;

View File

@@ -100,8 +100,6 @@ templates:
- name: output
path: /mnt/output
optional: true
archive:
none: {}
container:
image: onepanel/dl:0.17.0
args:

View File

@@ -0,0 +1,194 @@
# source: https://github.com/onepanelio/templates/blob/master/workflows/nni-hyperparameter-tuning/mnist/
# Workflow Template example for hyperparameter tuning
# Documentation: https://docs.onepanel.ai/docs/reference/workflows/hyperparameter-tuning
#
# Only change the fields marked with [CHANGE]
entrypoint: main
arguments:
parameters:
# [CHANGE] Path to your training/model architecture code repository
# Change this value and revision value to your code repository and branch respectively
- name: source
value: https://github.com/onepanelio/templates
# [CHANGE] Revision is the branch or tag that you want to use
# You can change this to any tag or branch name in your repository
- name: revision
value: v0.18.0
# [CHANGE] Default configuration for the NNI tuner
# See https://docs.onepanel.ai/docs/reference/workflows/hyperparameter-tuning#understanding-the-configurations
- name: config
displayName: Configuration
required: true
hint: NNI configuration
type: textarea.textarea
value: |-
authorName: Onepanel, Inc.
experimentName: MNIST TF v2.x
trialConcurrency: 1
maxExecDuration: 1h
maxTrialNum: 10
trainingServicePlatform: local
searchSpacePath: search_space.json
useAnnotation: false
tuner:
# gpuIndices: '0' # uncomment and update to the GPU indices to assign this tuner
builtinTunerName: TPE # choices: TPE, Random, Anneal, Evolution, BatchTuner, MetisTuner, GPTuner
classArgs:
optimize_mode: maximize # choices: maximize, minimize
trial:
command: python main.py --output /mnt/output
codeDir: .
# gpuNum: 1 # uncomment and update to number of GPUs
# [CHANGE] Search space configuration
# Change according to your hyperparameters and ranges
- name: search-space
displayName: Search space configuration
required: true
type: textarea.textarea
value: |-
{
"dropout_rate": { "_type": "uniform", "_value": [0.5, 0.9] },
"conv_size": { "_type": "choice", "_value": [2, 3, 5, 7] },
"hidden_size": { "_type": "choice", "_value": [124, 512, 1024] },
"batch_size": { "_type": "choice", "_value": [16, 32] },
"learning_rate": { "_type": "choice", "_value": [0.0001, 0.001, 0.01, 0.1] },
"epochs": { "_type": "choice", "_value": [10] }
}
# Node pool dropdown (Node group in EKS)
# You can add more of these if you have additional tasks that can run on different node pools
- displayName: Node pool
hint: Name of node pool or group to run this workflow task
type: select.nodepool
name: sys-node-pool
value: {{.DefaultNodePoolOption}}
required: true
templates:
- name: main
dag:
tasks:
- name: hyperparameter-tuning
template: hyperparameter-tuning
- name: metrics-writer
template: metrics-writer
dependencies: [hyperparameter-tuning]
arguments:
# Use sys-metrics artifact output from hyperparameter-tuning Task
# This writes the best metrics to the Workflow
artifacts:
- name: sys-metrics
from: "{{tasks.hyperparameter-tuning.outputs.artifacts.sys-metrics}}"
- name: hyperparameter-tuning
inputs:
artifacts:
- name: src
# Clone the above repository into '/mnt/data/src'
# See https://docs.onepanel.ai/docs/reference/workflows/artifacts#git for private repositories
git:
repo: '{{workflow.parameters.source}}'
revision: '{{workflow.parameters.revision}}'
path: /mnt/data/src
# [CHANGE] Path where config.yaml will be generated or already exists
# Update the path below so that config.yaml is written to the same directory as your main.py file
# Note that your source code is cloned to /mnt/data/src
- name: config
path: /mnt/data/src/workflows/hyperparameter-tuning/mnist/config.yaml
raw:
data: '{{workflow.parameters.config}}'
# [CHANGE] Path where search_space.json will be generated or already exists
# Update the path below so that search_space.json is written to the same directory as your main.py file
# Note that your source code is cloned to /mnt/data/src
- name: search-space
path: /mnt/data/src/workflows/hyperparameter-tuning/mnist/search_space.json
raw:
data: '{{workflow.parameters.search-space}}'
outputs:
artifacts:
- name: output
path: /mnt/output
optional: true
container:
image: onepanel/dl:0.17.0
command:
- sh
- -c
args:
# [CHANGE] Update the config path below to point to config.yaml path as described above
# Note that you can `pip install` additional tools here if necessary
- |
python -u /opt/onepanel/nni/start.py \
--config /mnt/data/src/workflows/hyperparameter-tuning/mnist/config.yaml
workingDir: /mnt
volumeMounts:
- name: hyperparamtuning-data
mountPath: /mnt/data
- name: hyperparamtuning-output
mountPath: /mnt/output
nodeSelector:
{{.NodePoolLabel}}: '{{workflow.parameters.sys-node-pool}}'
sidecars:
- name: nni-web-ui
image: onepanel/nni-web-ui:0.17.0
env:
- name: ONEPANEL_INTERACTIVE_SIDECAR
value: 'true'
ports:
- containerPort: 9000
name: nni
- name: tensorboard
image: onepanel/dl:0.17.0
command:
- sh
- '-c'
env:
- name: ONEPANEL_INTERACTIVE_SIDECAR
value: 'true'
args:
# Read logs from /mnt/output/tensorboard - /mnt/output is auto-mounted from volumeMounts
- tensorboard --logdir /mnt/output/tensorboard
ports:
- containerPort: 6006
name: tensorboard
# Use the metrics-writer tasks to write best metrics to Workflow
- name: metrics-writer
inputs:
artifacts:
- name: sys-metrics
path: /tmp/sys-metrics.json
- git:
repo: https://github.com/onepanelio/templates.git
revision: v0.18.0
name: src
path: /mnt/src
container:
image: onepanel/python-sdk:v0.16.0
command:
- python
- -u
args:
- /mnt/src/tasks/metrics-writer/main.py
- --from_file=/tmp/sys-metrics.json
# [CHANGE] Volumes that will mount to /mnt/data (annotated data) and /mnt/output (models, checkpoints, logs)
# Update this depending on your annotation data, model, checkpoint, logs, etc. sizes
# Example values: 250Mi, 500Gi, 1Ti
volumeClaimTemplates:
- metadata:
name: hyperparamtuning-data
spec:
accessModes: [ "ReadWriteOnce" ]
resources:
requests:
storage: 20Gi
- metadata:
name: hyperparamtuning-output
spec:
accessModes: [ "ReadWriteOnce" ]
resources:
requests:
storage: 20Gi

View File

@@ -0,0 +1,208 @@
# source: https://github.com/onepanelio/templates/blob/master/workflows/maskrcnn-training/
arguments:
parameters:
- name: cvat-annotation-path
value: 'artifacts/{{workflow.namespace}}/annotations/'
hint: Path to annotated data (COCO format) in default object storage. In CVAT, this parameter will be pre-populated.
displayName: Dataset path
visibility: internal
- name: val-split
value: 10
displayName: Validation split size
type: input.number
visibility: public
hint: Enter validation set size in percentage of full dataset. (0 - 100)
- name: num-augmentation-cycles
value: 1
displayName: Number of augmentation cycles
type: input.number
visibility: public
hint: Number of augmentation cycles, zero means no data augmentation
- name: preprocessing-parameters
value: |-
RandomBrightnessContrast:
p: 0.2
GaussianBlur:
p: 0.3
GaussNoise:
p: 0.4
HorizontalFlip:
p: 0.5
VerticalFlip:
p: 0.3
displayName: Preprocessing parameters
visibility: public
type: textarea.textarea
hint: 'See <a href="https://albumentations.ai/docs/api_reference/augmentations/transforms/" target="_blank">documentation</a> for more information on parameters.'
- name: cvat-num-classes
displayName: Number of classes
hint: Number of classes. In CVAT, this parameter will be pre-populated.
value: '10'
visibility: internal
- name: hyperparameters
displayName: Hyperparameters
visibility: public
type: textarea.textarea
value: |-
stage_1_epochs: 1 # Epochs for network heads
stage_2_epochs: 1 # Epochs for finetune layers
stage_3_epochs: 1 # Epochs for all layers
num_steps: 1000 # Num steps per epoch
hint: 'See <a href="https://docs.onepanel.ai/docs/reference/workflows/training#maskrcnn-hyperparameters" target="_blank">documentation</a> for more information on parameters.'
- name: dump-format
value: cvat_coco
displayName: CVAT dump format
visibility: private
- name: cvat-finetune-checkpoint
value: ''
hint: Path to the last fine-tune checkpoint for this model in default object storage. Leave empty if this is the first time you're training this model.
displayName: Checkpoint path
visibility: public
- displayName: Node pool
hint: Name of node pool or group to run this workflow task
type: select.nodepool
visibility: public
name: sys-node-pool
value: {{.DefaultNodePoolOption}}
required: true
entrypoint: main
templates:
- dag:
tasks:
- name: preprocessing
template: preprocessing
- name: train-model
template: tensorflow
dependencies: [preprocessing]
arguments:
artifacts:
- name: data
from: "{{tasks.preprocessing.outputs.artifacts.processed-data}}"
name: main
- container:
args:
- |
pip install pycocotools scikit-image==0.16.2 && \
cd /mnt/src/train/workflows/maskrcnn-training && \
python -u main.py train --dataset=/mnt/data/datasets/train_set/ \
--model=workflow_maskrcnn \
--extras="{{workflow.parameters.hyperparameters}}" \
--ref_model_path="{{workflow.parameters.cvat-finetune-checkpoint}}" \
--num_classes="{{workflow.parameters.cvat-num-classes}}" \
--val_dataset=/mnt/data/datasets/eval_set/ \
--use_validation=True
command:
- sh
- -c
image: onepanel/dl:v0.20.0
volumeMounts:
- mountPath: /mnt/data
name: processed-data
- mountPath: /mnt/output
name: output
workingDir: /mnt/src
sidecars:
- name: tensorboard
image: onepanel/dl:v0.20.0
command: [ sh, -c ]
env:
- name: ONEPANEL_INTERACTIVE_SIDECAR
value: 'true'
args: [ "tensorboard --logdir /mnt/output/tensorboard" ]
ports:
- containerPort: 6006
name: tensorboard
nodeSelector:
{{.NodePoolLabel}}: '{{workflow.parameters.sys-node-pool}}'
inputs:
artifacts:
- name: data
path: /mnt/data/datasets/
- name: models
path: /mnt/data/models/
optional: true
s3:
key: '{{workflow.parameters.cvat-finetune-checkpoint}}'
- git:
repo: https://github.com/onepanelio/templates.git
revision: v0.18.0
name: src
path: /mnt/src/train
name: tensorflow
outputs:
artifacts:
- name: model
optional: true
path: /mnt/output
- container:
args:
- |
pip install pycocotools && \
cd /mnt/src/preprocessing/workflows/albumentations-preprocessing && \
python -u main.py \
--data_aug_params="{{workflow.parameters.preprocessing-parameters}}" \
--val_split={{workflow.parameters.val-split}} \
--aug_steps={{workflow.parameters.num-augmentation-cycles}}
command:
- sh
- -c
image: onepanel/dl:v0.20.0
volumeMounts:
- mountPath: /mnt/data
name: data
- mountPath: /mnt/output
name: processed-data
workingDir: /mnt/src
nodeSelector:
{{.NodePoolLabel}}: '{{workflow.parameters.sys-node-pool}}'
inputs:
artifacts:
- name: data
path: /mnt/data/datasets/
s3:
key: '{{workflow.parameters.cvat-annotation-path}}'
- git:
repo: https://github.com/onepanelio/templates.git
revision: v0.18.0
name: src
path: /mnt/src/preprocessing
name: preprocessing
outputs:
artifacts:
- name: processed-data
optional: true
path: /mnt/output
volumeClaimTemplates:
- metadata:
name: data
spec:
accessModes:
- ReadWriteOnce
resources:
requests:
storage: 200Gi
- metadata:
name: processed-data
spec:
accessModes:
- ReadWriteOnce
resources:
requests:
storage: 200Gi
- metadata:
name: output
spec:
accessModes:
- ReadWriteOnce
resources:
requests:
storage: 200Gi

View File

@@ -24,8 +24,6 @@ templates:
- name: output
path: /mnt/output
optional: true
archive:
none: {}
script:
image: onepanel/dl:0.17.0
command:

View File

@@ -0,0 +1,207 @@
# source: https://github.com/onepanelio/templates/blob/master/workflows/pytorch-mnist-training/
arguments:
parameters:
- name: epochs
value: '10'
- displayName: Node pool
hint: Name of node pool or group to run this workflow task
type: select.nodepool
name: sys-node-pool
value: {{.DefaultNodePoolOption}}
visibility: public
required: true
entrypoint: main
templates:
- name: main
dag:
tasks:
- name: train-model
template: train-model
- name: train-model
# Indicates that we want to push files in /mnt/output to object storage
outputs:
artifacts:
- name: output
path: /mnt/output
optional: true
script:
image: onepanel/dl:0.17.0
command:
- python
- '-u'
source: |
import json
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from torchvision import datasets, transforms
from torch.optim.lr_scheduler import StepLR
from torch.utils.tensorboard import SummaryWriter
class Net(nn.Module):
def __init__(self):
super(Net, self).__init__()
self.conv1 = nn.Conv2d(1, 32, 3, 1)
self.conv2 = nn.Conv2d(32, 64, 3, 1)
self.dropout1 = nn.Dropout(0.25)
self.dropout2 = nn.Dropout(0.5)
self.fc1 = nn.Linear(9216, 128)
self.fc2 = nn.Linear(128, 10)
def forward(self, x):
x = self.conv1(x)
x = F.relu(x)
x = self.conv2(x)
x = F.relu(x)
x = F.max_pool2d(x, 2)
x = self.dropout1(x)
x = torch.flatten(x, 1)
x = self.fc1(x)
x = F.relu(x)
x = self.dropout2(x)
x = self.fc2(x)
output = F.log_softmax(x, dim=1)
return output
def train(model, device, train_loader, optimizer, epoch, batch_size, writer):
model.train()
for batch_idx, (data, target) in enumerate(train_loader):
data, target = data.to(device), target.to(device)
optimizer.zero_grad()
output = model(data)
loss = F.nll_loss(output, target)
loss.backward()
optimizer.step()
if batch_idx % 10 == 0:
print('Train Epoch: {} [{}/{} ({:.0f}%)]\tLoss: {:.6f}'.format(
epoch, batch_idx * len(data), len(train_loader.dataset),
100. * batch_idx / len(train_loader), loss.item()))
writer.add_scalar('training loss', loss.item(), epoch)
def test(model, device, test_loader, epoch, writer):
model.eval()
test_loss = 0
correct = 0
with torch.no_grad():
for data, target in test_loader:
data, target = data.to(device), target.to(device)
output = model(data)
test_loss += F.nll_loss(output, target, reduction='sum').item() # sum up batch loss
pred = output.argmax(dim=1, keepdim=True) # get the index of the max log-probability
correct += pred.eq(target.view_as(pred)).sum().item()
loss = test_loss / len(test_loader.dataset)
accuracy = correct / len(test_loader.dataset)
print('\nTest set: Average loss: {}, Accuracy: {}\n'.format(
loss, accuracy))
# Store metrics for this task
metrics = [
{'name': 'accuracy', 'value': accuracy},
{'name': 'loss', 'value': loss}
]
with open('/tmp/sys-metrics.json', 'w') as f:
json.dump(metrics, f)
def main(params):
writer = SummaryWriter(log_dir='/mnt/output/tensorboard')
use_cuda = torch.cuda.is_available()
torch.manual_seed(params['seed'])
device = torch.device('cuda' if use_cuda else 'cpu')
train_kwargs = {'batch_size': params['batch_size']}
test_kwargs = {'batch_size': params['test_batch_size']}
if use_cuda:
cuda_kwargs = {'num_workers': 1,
'pin_memory': True,
'shuffle': True}
train_kwargs.update(cuda_kwargs)
test_kwargs.update(cuda_kwargs)
transform=transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.1307,), (0.3081,))
])
dataset1 = datasets.MNIST('/mnt/data', train=True, download=True,
transform=transform)
dataset2 = datasets.MNIST('/mnt/data', train=False,
transform=transform)
train_loader = torch.utils.data.DataLoader(dataset1, **train_kwargs)
test_loader = torch.utils.data.DataLoader(dataset2, **test_kwargs)
model = Net().to(device)
optimizer = optim.Adadelta(model.parameters(), lr=params['lr'])
scheduler = StepLR(optimizer, step_size=1, gamma=params['gamma'])
for epoch in range(1, params['epochs'] + 1):
train(model, device, train_loader, optimizer, epoch, params['batch_size'], writer)
test(model, device, test_loader, epoch, writer)
scheduler.step()
# Save model
torch.save(model.state_dict(), '/mnt/output/model.pt')
writer.close()
if __name__ == '__main__':
params = {
'seed': 1,
'batch_size': 64,
'test_batch_size': 1000,
'epochs': {{workflow.parameters.epochs}},
'lr': 0.001,
'gamma': 0.7,
}
main(params)
volumeMounts:
# TensorBoard sidecar will automatically mount these volumes
# The `data` volume is mounted for saving datasets
# The `output` volume is mounted to save model output and share TensorBoard logs
- name: data
mountPath: /mnt/data
- name: output
mountPath: /mnt/output
nodeSelector:
{{.NodePoolLabel}}: '{{workflow.parameters.sys-node-pool}}'
sidecars:
- name: tensorboard
image: onepanel/dl:0.17.0
command:
- sh
- '-c'
env:
- name: ONEPANEL_INTERACTIVE_SIDECAR
value: 'true'
args:
# Read logs from /mnt/output - this directory is auto-mounted from volumeMounts
- tensorboard --logdir /mnt/output/tensorboard
ports:
- containerPort: 6006
name: tensorboard
volumeClaimTemplates:
# Provision volumes for storing data and output
- metadata:
name: data
spec:
accessModes: [ "ReadWriteOnce" ]
resources:
requests:
storage: 2Gi
- metadata:
name: output
spec:
accessModes: [ "ReadWriteOnce" ]
resources:
requests:
storage: 2Gi

View File

@@ -0,0 +1,207 @@
# source: https://github.com/onepanelio/templates/blob/master/workflows/pytorch-mnist-training/
arguments:
parameters:
- name: epochs
value: '10'
- displayName: Node pool
hint: Name of node pool or group to run this workflow task
type: select.nodepool
name: sys-node-pool
value: {{.DefaultNodePoolOption}}
visibility: public
required: true
entrypoint: main
templates:
- name: main
dag:
tasks:
- name: train-model
template: train-model
- name: train-model
# Indicates that we want to push files in /mnt/output to object storage
outputs:
artifacts:
- name: output
path: /mnt/output
optional: true
script:
image: onepanel/dl:v0.20.0
command:
- python
- '-u'
source: |
import json
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from torchvision import datasets, transforms
from torch.optim.lr_scheduler import StepLR
from torch.utils.tensorboard import SummaryWriter
class Net(nn.Module):
def __init__(self):
super(Net, self).__init__()
self.conv1 = nn.Conv2d(1, 32, 3, 1)
self.conv2 = nn.Conv2d(32, 64, 3, 1)
self.dropout1 = nn.Dropout(0.25)
self.dropout2 = nn.Dropout(0.5)
self.fc1 = nn.Linear(9216, 128)
self.fc2 = nn.Linear(128, 10)
def forward(self, x):
x = self.conv1(x)
x = F.relu(x)
x = self.conv2(x)
x = F.relu(x)
x = F.max_pool2d(x, 2)
x = self.dropout1(x)
x = torch.flatten(x, 1)
x = self.fc1(x)
x = F.relu(x)
x = self.dropout2(x)
x = self.fc2(x)
output = F.log_softmax(x, dim=1)
return output
def train(model, device, train_loader, optimizer, epoch, batch_size, writer):
model.train()
for batch_idx, (data, target) in enumerate(train_loader):
data, target = data.to(device), target.to(device)
optimizer.zero_grad()
output = model(data)
loss = F.nll_loss(output, target)
loss.backward()
optimizer.step()
if batch_idx % 10 == 0:
print('Train Epoch: {} [{}/{} ({:.0f}%)]\tLoss: {:.6f}'.format(
epoch, batch_idx * len(data), len(train_loader.dataset),
100. * batch_idx / len(train_loader), loss.item()))
writer.add_scalar('training loss', loss.item(), epoch)
def test(model, device, test_loader, epoch, writer):
model.eval()
test_loss = 0
correct = 0
with torch.no_grad():
for data, target in test_loader:
data, target = data.to(device), target.to(device)
output = model(data)
test_loss += F.nll_loss(output, target, reduction='sum').item() # sum up batch loss
pred = output.argmax(dim=1, keepdim=True) # get the index of the max log-probability
correct += pred.eq(target.view_as(pred)).sum().item()
loss = test_loss / len(test_loader.dataset)
accuracy = correct / len(test_loader.dataset)
print('\nTest set: Average loss: {}, Accuracy: {}\n'.format(
loss, accuracy))
# Store metrics for this task
metrics = [
{'name': 'accuracy', 'value': accuracy},
{'name': 'loss', 'value': loss}
]
with open('/mnt/tmp/sys-metrics.json', 'w') as f:
json.dump(metrics, f)
def main(params):
writer = SummaryWriter(log_dir='/mnt/output/tensorboard')
use_cuda = torch.cuda.is_available()
torch.manual_seed(params['seed'])
device = torch.device('cuda' if use_cuda else 'cpu')
train_kwargs = {'batch_size': params['batch_size']}
test_kwargs = {'batch_size': params['test_batch_size']}
if use_cuda:
cuda_kwargs = {'num_workers': 1,
'pin_memory': True,
'shuffle': True}
train_kwargs.update(cuda_kwargs)
test_kwargs.update(cuda_kwargs)
transform=transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.1307,), (0.3081,))
])
dataset1 = datasets.MNIST('/mnt/data', train=True, download=True,
transform=transform)
dataset2 = datasets.MNIST('/mnt/data', train=False,
transform=transform)
train_loader = torch.utils.data.DataLoader(dataset1, **train_kwargs)
test_loader = torch.utils.data.DataLoader(dataset2, **test_kwargs)
model = Net().to(device)
optimizer = optim.Adadelta(model.parameters(), lr=params['lr'])
scheduler = StepLR(optimizer, step_size=1, gamma=params['gamma'])
for epoch in range(1, params['epochs'] + 1):
train(model, device, train_loader, optimizer, epoch, params['batch_size'], writer)
test(model, device, test_loader, epoch, writer)
scheduler.step()
# Save model
torch.save(model.state_dict(), '/mnt/output/model.pt')
writer.close()
if __name__ == '__main__':
params = {
'seed': 1,
'batch_size': 64,
'test_batch_size': 1000,
'epochs': {{workflow.parameters.epochs}},
'lr': 0.001,
'gamma': 0.7,
}
main(params)
volumeMounts:
# TensorBoard sidecar will automatically mount these volumes
# The `data` volume is mounted for saving datasets
# The `output` volume is mounted to save model output and share TensorBoard logs
- name: data
mountPath: /mnt/data
- name: output
mountPath: /mnt/output
nodeSelector:
{{.NodePoolLabel}}: '{{workflow.parameters.sys-node-pool}}'
sidecars:
- name: tensorboard
image: onepanel/dl:v0.20.0
command:
- sh
- '-c'
env:
- name: ONEPANEL_INTERACTIVE_SIDECAR
value: 'true'
args:
# Read logs from /mnt/output - this directory is auto-mounted from volumeMounts
- tensorboard --logdir /mnt/output/tensorboard
ports:
- containerPort: 6006
name: tensorboard
volumeClaimTemplates:
# Provision volumes for storing data and output
- metadata:
name: data
spec:
accessModes: [ "ReadWriteOnce" ]
resources:
requests:
storage: 2Gi
- metadata:
name: output
spec:
accessModes: [ "ReadWriteOnce" ]
resources:
requests:
storage: 2Gi

View File

@@ -24,8 +24,6 @@ templates:
- name: output
path: /mnt/output
optional: true
archive:
none: {}
script:
image: onepanel/dl:0.17.0
command:

View File

@@ -0,0 +1,118 @@
# source: https://github.com/onepanelio/templates/blob/master/workflows/tensorflow-mnist-training/
arguments:
parameters:
- name: epochs
value: '10'
- displayName: Node pool
hint: Name of node pool or group to run this workflow task
type: select.nodepool
name: sys-node-pool
value: {{.DefaultNodePoolOption}}
visibility: public
required: true
entrypoint: main
templates:
- name: main
dag:
tasks:
- name: train-model
template: train-model
- name: train-model
# Indicates that we want to push files in /mnt/output to object storage
outputs:
artifacts:
- name: output
path: /mnt/output
optional: true
script:
image: onepanel/dl:0.17.0
command:
- python
- '-u'
source: |
import json
import tensorflow as tf
mnist = tf.keras.datasets.mnist
(x_train, y_train),(x_test, y_test) = mnist.load_data()
x_train, x_test = x_train / 255.0, x_test / 255.0
x_train = x_train[..., tf.newaxis]
x_test = x_test[..., tf.newaxis]
model = tf.keras.Sequential([
tf.keras.layers.Conv2D(filters=32, kernel_size=5, activation='relu'),
tf.keras.layers.MaxPool2D(pool_size=2),
tf.keras.layers.Conv2D(filters=64, kernel_size=5, activation='relu'),
tf.keras.layers.MaxPool2D(pool_size=2),
tf.keras.layers.Flatten(),
tf.keras.layers.Dense(units=124, activation='relu'),
tf.keras.layers.Dropout(rate=0.75),
tf.keras.layers.Dense(units=10, activation='softmax')
])
model.compile(optimizer=tf.keras.optimizers.Adam(lr=0.001),
loss='sparse_categorical_crossentropy',
metrics=['accuracy'])
# Write TensorBoard logs to /mnt/output
log_dir = '/mnt/output/tensorboard/'
tensorboard_callback = tf.keras.callbacks.TensorBoard(log_dir=log_dir, histogram_freq=1)
model.fit(x=x_train,
y=y_train,
epochs={{workflow.parameters.epochs}},
validation_data=(x_test, y_test),
callbacks=[tensorboard_callback])
# Store metrics for this task
loss, accuracy = model.evaluate(x_test, y_test)
metrics = [
{'name': 'accuracy', 'value': accuracy},
{'name': 'loss', 'value': loss}
]
with open('/tmp/sys-metrics.json', 'w') as f:
json.dump(metrics, f)
# Save model
model.save('/mnt/output/model.h5')
volumeMounts:
# TensorBoard sidecar will automatically mount these volumes
# The `data` volume is mounted to support Keras datasets
# The `output` volume is mounted to save model output and share TensorBoard logs
- name: data
mountPath: /home/root/.keras/datasets
- name: output
mountPath: /mnt/output
nodeSelector:
{{.NodePoolLabel}}: '{{workflow.parameters.sys-node-pool}}'
sidecars:
- name: tensorboard
image: onepanel/dl:0.17.0
command:
- sh
- '-c'
env:
- name: ONEPANEL_INTERACTIVE_SIDECAR
value: 'true'
args:
# Read logs from /mnt/output - this directory is auto-mounted from volumeMounts
- tensorboard --logdir /mnt/output/tensorboard
ports:
- containerPort: 6006
name: tensorboard
volumeClaimTemplates:
# Provision volumes for storing data and output
- metadata:
name: data
spec:
accessModes: [ "ReadWriteOnce" ]
resources:
requests:
storage: 2Gi
- metadata:
name: output
spec:
accessModes: [ "ReadWriteOnce" ]
resources:
requests:
storage: 2Gi

View File

@@ -0,0 +1,118 @@
# source: https://github.com/onepanelio/templates/blob/master/workflows/tensorflow-mnist-training/
arguments:
parameters:
- name: epochs
value: '10'
- displayName: Node pool
hint: Name of node pool or group to run this workflow task
type: select.nodepool
name: sys-node-pool
value: {{.DefaultNodePoolOption}}
visibility: public
required: true
entrypoint: main
templates:
- name: main
dag:
tasks:
- name: train-model
template: train-model
- name: train-model
# Indicates that we want to push files in /mnt/output to object storage
outputs:
artifacts:
- name: output
path: /mnt/output
optional: true
script:
image: onepanel/dl:v0.20.0
command:
- python
- '-u'
source: |
import json
import tensorflow as tf
mnist = tf.keras.datasets.mnist
(x_train, y_train),(x_test, y_test) = mnist.load_data()
x_train, x_test = x_train / 255.0, x_test / 255.0
x_train = x_train[..., tf.newaxis]
x_test = x_test[..., tf.newaxis]
model = tf.keras.Sequential([
tf.keras.layers.Conv2D(filters=32, kernel_size=5, activation='relu'),
tf.keras.layers.MaxPool2D(pool_size=2),
tf.keras.layers.Conv2D(filters=64, kernel_size=5, activation='relu'),
tf.keras.layers.MaxPool2D(pool_size=2),
tf.keras.layers.Flatten(),
tf.keras.layers.Dense(units=124, activation='relu'),
tf.keras.layers.Dropout(rate=0.75),
tf.keras.layers.Dense(units=10, activation='softmax')
])
model.compile(optimizer=tf.keras.optimizers.Adam(lr=0.001),
loss='sparse_categorical_crossentropy',
metrics=['accuracy'])
# Write TensorBoard logs to /mnt/output
log_dir = '/mnt/output/tensorboard/'
tensorboard_callback = tf.keras.callbacks.TensorBoard(log_dir=log_dir, histogram_freq=1)
model.fit(x=x_train,
y=y_train,
epochs={{workflow.parameters.epochs}},
validation_data=(x_test, y_test),
callbacks=[tensorboard_callback])
# Store metrics for this task
loss, accuracy = model.evaluate(x_test, y_test)
metrics = [
{'name': 'accuracy', 'value': accuracy},
{'name': 'loss', 'value': loss}
]
with open('/mnt/tmp/sys-metrics.json', 'w') as f:
json.dump(metrics, f)
# Save model
model.save('/mnt/output/model.h5')
volumeMounts:
# TensorBoard sidecar will automatically mount these volumes
# The `data` volume is mounted to support Keras datasets
# The `output` volume is mounted to save model output and share TensorBoard logs
- name: data
mountPath: /home/root/.keras/datasets
- name: output
mountPath: /mnt/output
nodeSelector:
{{.NodePoolLabel}}: '{{workflow.parameters.sys-node-pool}}'
sidecars:
- name: tensorboard
image: onepanel/dl:v0.20.0
command:
- sh
- '-c'
env:
- name: ONEPANEL_INTERACTIVE_SIDECAR
value: 'true'
args:
# Read logs from /mnt/output - this directory is auto-mounted from volumeMounts
- tensorboard --logdir /mnt/output/tensorboard
ports:
- containerPort: 6006
name: tensorboard
volumeClaimTemplates:
# Provision volumes for storing data and output
- metadata:
name: data
spec:
accessModes: [ "ReadWriteOnce" ]
resources:
requests:
storage: 2Gi
- metadata:
name: output
spec:
accessModes: [ "ReadWriteOnce" ]
resources:
requests:
storage: 2Gi

View File

@@ -101,7 +101,7 @@ templates:
cd /mnt/src/tf/research/ && \
/mnt/src/protoc/bin/protoc object_detection/protos/*.proto --python_out=. && \
cd /mnt/src/train/workflows/tf-object-detection-training && \
python train.py \
python main.py \
--extras="{{workflow.parameters.hyperparameters}}" \
--model="{{workflow.parameters.cvat-model}}" \
--num_classes="{{workflow.parameters.cvat-num-classes}}" \

View File

@@ -0,0 +1,260 @@
# source: https://github.com/onepanelio/templates/blob/master/workflows/tf-object-detection-training/
arguments:
parameters:
- name: cvat-annotation-path
value: 'artifacts/{{workflow.namespace}}/annotations/'
hint: Path to annotated data (COCO format) in default object storage. In CVAT, this parameter will be pre-populated.
displayName: Dataset path
visibility: internal
- name: val-split
value: 10
displayName: Validation split size
type: input.number
visibility: public
hint: Enter validation set size in percentage of full dataset. (0 - 100)
- name: num-augmentation-cycles
value: 1
displayName: Number of augmentation cycles
type: input.number
visibility: public
hint: Number of augmentation cycles, zero means no data augmentation
- name: preprocessing-parameters
value: |-
RandomBrightnessContrast:
p: 0.2
GaussianBlur:
p: 0.3
GaussNoise:
p: 0.4
HorizontalFlip:
p: 0.5
VerticalFlip:
p: 0.3
displayName: Preprocessing parameters
visibility: public
type: textarea.textarea
hint: 'See <a href="https://albumentations.ai/docs/api_reference/augmentations/transforms/" target="_blank">documentation</a> for more information on parameters.'
- name: cvat-model
value: frcnn-res50-coco
displayName: Model
hint: TF Detection API's model to use for training.
type: select.select
visibility: public
options:
- name: 'Faster RCNN-ResNet 101-COCO'
value: frcnn-res101-coco
- name: 'Faster RCNN-ResNet 101-Low Proposal-COCO'
value: frcnn-res101-low
- name: 'Faster RCNN-ResNet 50-COCO'
value: frcnn-res50-coco
- name: 'Faster RCNN-NAS-COCO'
value: frcnn-nas-coco
- name: 'SSD MobileNet V1-COCO'
value: ssd-mobilenet-v1-coco2
- name: 'SSD MobileNet V2-COCO'
value: ssd-mobilenet-v2-coco
- name: 'SSDLite MobileNet-COCO'
value: ssdlite-mobilenet-coco
- name: cvat-num-classes
value: '10'
hint: Number of classes. In CVAT, this parameter will be pre-populated.
displayName: Number of classes
visibility: internal
- name: hyperparameters
value: |-
num_steps: 10000
displayName: Hyperparameters
visibility: public
type: textarea.textarea
hint: 'See <a href="https://docs.onepanel.ai/docs/reference/workflows/training#tfod-hyperparameters" target="_blank">documentation</a> for more information on parameters.'
- name: dump-format
value: cvat_coco
displayName: CVAT dump format
visibility: private
- name: cvat-finetune-checkpoint
value: ''
hint: Path to the last fine-tune checkpoint for this model in default object storage. Leave empty if this is the first time you're training this model.
displayName: Checkpoint path
visibility: public
- name: tf-image
value: tensorflow/tensorflow:1.13.1-py3
type: select.select
displayName: Select TensorFlow image
visibility: public
hint: Select the GPU image if you are running on a GPU node pool
options:
- name: 'TensorFlow 1.13.1 CPU Image'
value: 'tensorflow/tensorflow:1.13.1-py3'
- name: 'TensorFlow 1.13.1 GPU Image'
value: 'tensorflow/tensorflow:1.13.1-gpu-py3'
- displayName: Node pool
hint: Name of node pool or group to run this workflow task
type: select.nodepool
name: sys-node-pool
value: {{.DefaultNodePoolOption}}
visibility: public
required: true
entrypoint: main
templates:
- dag:
tasks:
- name: preprocessing
template: preprocessing
- name: train-model
template: tensorflow
dependencies: [preprocessing]
arguments:
artifacts:
- name: data
from: "{{tasks.preprocessing.outputs.artifacts.processed-data}}"
name: main
- container:
args:
- |
apt-get update && \
apt-get install -y python3-pip git wget unzip libglib2.0-0 libsm6 libxext6 libxrender-dev && \
pip install --upgrade pip && \
pip install pillow lxml Cython contextlib2 matplotlib numpy scipy pycocotools pyyaml test-generator && \
cd /mnt/src/tf/research && \
export PYTHONPATH=$PYTHONPATH:`pwd`:`pwd`/slim && \
mkdir -p /mnt/src/protoc && \
wget -P /mnt/src/protoc https://github.com/protocolbuffers/protobuf/releases/download/v3.10.1/protoc-3.10.1-linux-x86_64.zip && \
cd /mnt/src/protoc/ && \
unzip protoc-3.10.1-linux-x86_64.zip && \
cd /mnt/src/tf/research/ && \
/mnt/src/protoc/bin/protoc object_detection/protos/*.proto --python_out=. && \
cd /mnt/src/train/workflows/tf-object-detection-training && \
python main.py \
--extras="{{workflow.parameters.hyperparameters}}" \
--model="{{workflow.parameters.cvat-model}}" \
--num_classes="{{workflow.parameters.cvat-num-classes}}" \
--sys_finetune_checkpoint="{{workflow.parameters.cvat-finetune-checkpoint}}" \
--from_preprocessing=True
command:
- sh
- -c
image: '{{workflow.parameters.tf-image}}'
volumeMounts:
- mountPath: /mnt/data
name: processed-data
- mountPath: /mnt/output
name: output
workingDir: /mnt/src
nodeSelector:
{{.NodePoolLabel}}: '{{workflow.parameters.sys-node-pool}}'
inputs:
artifacts:
- name: data
path: /mnt/data/datasets/
- name: models
path: /mnt/data/models/
optional: true
s3:
key: '{{workflow.parameters.cvat-finetune-checkpoint}}'
- git:
repo: https://github.com/tensorflow/models.git
revision: v1.13.0
name: src
path: /mnt/src/tf
- git:
repo: https://github.com/onepanelio/templates.git
revision: v0.18.0
name: tsrc
path: /mnt/src/train
name: tensorflow
outputs:
artifacts:
- name: model
optional: true
path: /mnt/output
sidecars:
- name: tensorboard
image: '{{workflow.parameters.tf-image}}'
command:
- sh
- '-c'
env:
- name: ONEPANEL_INTERACTIVE_SIDECAR
value: 'true'
args:
# Read logs from /mnt/output - this directory is auto-mounted from volumeMounts
- tensorboard --logdir /mnt/output/checkpoints/
ports:
- containerPort: 6006
name: tensorboard
- container:
args:
- |
pip install --upgrade pip &&\
pip install opencv-python albumentations tqdm pyyaml pycocotools && \
cd /mnt/src/preprocessing/workflows/albumentations-preprocessing && \
python -u main.py \
--data_aug_params="{{workflow.parameters.preprocessing-parameters}}" \
--format="tfrecord" \
--val_split={{workflow.parameters.val-split}} \
--aug_steps={{workflow.parameters.num-augmentation-cycles}}
command:
- sh
- -c
image: '{{workflow.parameters.tf-image}}'
volumeMounts:
- mountPath: /mnt/data
name: data
- mountPath: /mnt/output
name: processed-data
workingDir: /mnt/src
nodeSelector:
{{.NodePoolLabel}}: '{{workflow.parameters.sys-node-pool}}'
inputs:
artifacts:
- name: data
path: /mnt/data/datasets/
s3:
key: '{{workflow.parameters.cvat-annotation-path}}'
- git:
repo: https://github.com/onepanelio/templates.git
revision: v0.18.0
name: src
path: /mnt/src/preprocessing
name: preprocessing
outputs:
artifacts:
- name: processed-data
optional: true
path: /mnt/output
volumeClaimTemplates:
- metadata:
name: data
spec:
accessModes:
- ReadWriteOnce
resources:
requests:
storage: 200Gi
- metadata:
name: processed-data
spec:
accessModes:
- ReadWriteOnce
resources:
requests:
storage: 200Gi
- metadata:
name: output
spec:
accessModes:
- ReadWriteOnce
resources:
requests:
storage: 200Gi

View File

@@ -0,0 +1,163 @@
# Workspace arguments
arguments:
parameters:
- name: sync-directory
displayName: Directory to sync raw input and training output
value: workflow-data
hint: Location (relative to current namespace) to sync raw input, models and checkpoints from default object storage to '/share'.
containers:
- name: cvat-db
image: postgres:10-alpine
env:
- name: POSTGRES_USER
value: root
- name: POSTGRES_DB
value: cvat
- name: POSTGRES_HOST_AUTH_METHOD
value: trust
- name: PGDATA
value: /var/lib/psql/data
ports:
- containerPort: 5432
name: tcp
volumeMounts:
- name: db
mountPath: /var/lib/psql
- name: cvat-redis
image: redis:4.0-alpine
ports:
- containerPort: 6379
name: tcp
- name: cvat
image: onepanel/cvat:0.17.0_cvat.1.0.0
env:
- name: DJANGO_MODWSGI_EXTRA_ARGS
value: ""
- name: ALLOWED_HOSTS
value: '*'
- name: CVAT_REDIS_HOST
value: localhost
- name: CVAT_POSTGRES_HOST
value: localhost
- name: CVAT_SHARE_URL
value: /cvat/data
- name: CVAT_SHARE_DIR
value: /share
- name: CVAT_DATA_DIR
value: /cvat/data
- name: CVAT_MEDIA_DATA_DIR
value: /cvat/data/data
- name: CVAT_KEYS_DIR
value: /cvat/data/keys
- name: CVAT_MODELS_DIR
value: /cvat/data/models
- name: CVAT_LOGS_DIR
value: /cvat/logs
- name: ONEPANEL_SYNC_DIRECTORY
value: '{{workspace.parameters.sync-directory}}'
- name: NVIDIA_VISIBLE_DEVICES
value: all
- name: NVIDIA_DRIVER_CAPABILITIES
value: compute,utility
- name: NVIDIA_REQUIRE_CUDA
value: "cuda>=10.0 brand=tesla,driver>=384,driver<385 brand=tesla,driver>=410,driver<411"
- name: ONEPANEL_MAIN_CONTAINER
value: 'true'
ports:
- containerPort: 8080
name: http
volumeMounts:
- name: cvat-data
mountPath: /cvat
- name: share
mountPath: /share
- name: sys-namespace-config
mountPath: /etc/onepanel
readOnly: true
- name: cvat-ui
image: onepanel/cvat-ui:0.17.0_cvat.1.0.0
ports:
- containerPort: 80
name: http
# You can add multiple FileSyncer sidecar containers if needed
- name: filesyncer
image: onepanel/filesyncer:0.17.0
imagePullPolicy: Always
args:
- download
- -server-prefix=/sys/filesyncer
env:
- name: FS_PATH
value: /mnt/share
- name: FS_PREFIX
value: '{{workflow.namespace}}/{{workspace.parameters.sync-directory}}'
volumeMounts:
- name: share
mountPath: /mnt/share
- name: sys-namespace-config
mountPath: /etc/onepanel
readOnly: true
ports:
- name: cvat-ui
port: 80
protocol: TCP
targetPort: 80
- name: cvat
port: 8080
protocol: TCP
targetPort: 8080
- name: fs
port: 8888
protocol: TCP
targetPort: 8888
routes:
- match:
- uri:
prefix: /sys/filesyncer
route:
- destination:
port:
number: 8888
- match:
- uri:
regex: /api/.*|/git/.*|/tensorflow/.*|/onepanelio/.*|/tracking/.*|/auto_annotation/.*|/analytics/.*|/static/.*|/admin/.*|/documentation/.*|/dextr/.*|/reid/.*
- queryParams:
id:
regex: \d+.*
route:
- destination:
port:
number: 8080
- match:
- uri:
prefix: /
route:
- destination:
port:
number: 80
volumeClaimTemplates:
- metadata:
name: db
spec:
accessModes: ["ReadWriteOnce"]
resources:
requests:
storage: 20Gi
# DAG Workflow to be executed once a Workspace action completes (optional)
# Uncomment the lines below if you want to send Slack notifications
#postExecutionWorkflow:
# entrypoint: main
# templates:
# - name: main
# dag:
# tasks:
# - name: slack-notify
# template: slack-notify
# - name: slack-notify
# container:
# image: technosophos/slack-notify
# args:
# - SLACK_USERNAME=onepanel SLACK_TITLE="Your workspace is ready" SLACK_ICON=https://www.gravatar.com/avatar/5c4478592fe00878f62f0027be59c1bd SLACK_MESSAGE="Your workspace is now running" ./slack-notify
# command:
# - sh
# - -c

View File

@@ -0,0 +1,134 @@
containers:
- name: cvat-db
image: postgres:10-alpine
env:
- name: POSTGRES_USER
value: root
- name: POSTGRES_DB
value: cvat
- name: POSTGRES_HOST_AUTH_METHOD
value: trust
- name: PGDATA
value: /var/lib/psql/data
ports:
- containerPort: 5432
name: tcp
volumeMounts:
- name: db
mountPath: /var/lib/psql
- name: cvat-redis
image: redis:4.0-alpine
ports:
- containerPort: 6379
name: tcp
- name: cvat
image: onepanel/cvat:v0.18.0_cvat.1.0.0
env:
- name: DJANGO_MODWSGI_EXTRA_ARGS
value: ""
- name: ALLOWED_HOSTS
value: '*'
- name: CVAT_REDIS_HOST
value: localhost
- name: CVAT_POSTGRES_HOST
value: localhost
- name: CVAT_SHARE_URL
value: /cvat/data
- name: CVAT_SHARE_DIR
value: /share
- name: CVAT_DATA_DIR
value: /cvat/data
- name: CVAT_MEDIA_DATA_DIR
value: /cvat/data/data
- name: CVAT_KEYS_DIR
value: /cvat/data/keys
- name: CVAT_MODELS_DIR
value: /cvat/data/models
- name: CVAT_LOGS_DIR
value: /cvat/logs
- name: CVAT_ANNOTATIONS_OBJECT_STORAGE_PREFIX
value: 'artifacts/$(ONEPANEL_RESOURCE_NAMESPACE)/annotations/'
- name: CVAT_ONEPANEL_WORKFLOWS_LABEL
value: 'key=used-by,value=cvat'
- name: NVIDIA_VISIBLE_DEVICES
value: all
- name: NVIDIA_DRIVER_CAPABILITIES
value: compute,utility
- name: NVIDIA_REQUIRE_CUDA
value: "cuda>=10.0 brand=tesla,driver>=384,driver<385 brand=tesla,driver>=410,driver<411"
- name: ONEPANEL_MAIN_CONTAINER
value: 'true'
ports:
- containerPort: 8080
name: http
volumeMounts:
- name: cvat-data
mountPath: /cvat
- name: share
mountPath: /share
- name: sys-namespace-config
mountPath: /etc/onepanel
readOnly: true
- name: cvat-ui
image: onepanel/cvat-ui:v0.18.0_cvat.1.0.0
ports:
- containerPort: 80
name: http
- name: sys-filesyncer
image: onepanel/filesyncer:v0.18.0
imagePullPolicy: Always
args:
- server
- -server-prefix=/sys/filesyncer
volumeMounts:
- name: share
mountPath: /share
- name: sys-namespace-config
mountPath: /etc/onepanel
readOnly: true
ports:
- name: cvat-ui
port: 80
protocol: TCP
targetPort: 80
- name: cvat
port: 8080
protocol: TCP
targetPort: 8080
- name: fs
port: 8888
protocol: TCP
targetPort: 8888
routes:
- match:
- uri:
prefix: /sys/filesyncer
route:
- destination:
port:
number: 8888
- match:
- uri:
regex: /api/.*|/git/.*|/tensorflow/.*|/onepanelio/.*|/tracking/.*|/auto_annotation/.*|/analytics/.*|/static/.*|/admin/.*|/documentation/.*|/dextr/.*|/reid/.*
- queryParams:
id:
regex: \d+.*
route:
- destination:
port:
number: 8080
- match:
- uri:
prefix: /
route:
- destination:
port:
number: 80
volumeClaimTemplates:
- metadata:
name: db
spec:
accessModes: ["ReadWriteOnce"]
resources:
requests:
storage: 20Gi

View File

@@ -0,0 +1,134 @@
containers:
- name: cvat-db
image: postgres:10-alpine
env:
- name: POSTGRES_USER
value: root
- name: POSTGRES_DB
value: cvat
- name: POSTGRES_HOST_AUTH_METHOD
value: trust
- name: PGDATA
value: /var/lib/psql/data
ports:
- containerPort: 5432
name: tcp
volumeMounts:
- name: db
mountPath: /var/lib/psql
- name: cvat-redis
image: redis:4.0-alpine
ports:
- containerPort: 6379
name: tcp
- name: cvat
image: onepanel/cvat:v0.19.0_cvat.1.0.0
env:
- name: DJANGO_MODWSGI_EXTRA_ARGS
value: ""
- name: ALLOWED_HOSTS
value: '*'
- name: CVAT_REDIS_HOST
value: localhost
- name: CVAT_POSTGRES_HOST
value: localhost
- name: CVAT_SHARE_URL
value: /cvat/data
- name: CVAT_SHARE_DIR
value: /share
- name: CVAT_DATA_DIR
value: /cvat/data
- name: CVAT_MEDIA_DATA_DIR
value: /cvat/data/data
- name: CVAT_KEYS_DIR
value: /cvat/data/keys
- name: CVAT_MODELS_DIR
value: /cvat/data/models
- name: CVAT_LOGS_DIR
value: /cvat/logs
- name: CVAT_ANNOTATIONS_OBJECT_STORAGE_PREFIX
value: 'artifacts/$(ONEPANEL_RESOURCE_NAMESPACE)/annotations/'
- name: CVAT_ONEPANEL_WORKFLOWS_LABEL
value: 'key=used-by,value=cvat'
- name: NVIDIA_VISIBLE_DEVICES
value: all
- name: NVIDIA_DRIVER_CAPABILITIES
value: compute,utility
- name: NVIDIA_REQUIRE_CUDA
value: "cuda>=10.0 brand=tesla,driver>=384,driver<385 brand=tesla,driver>=410,driver<411"
- name: ONEPANEL_MAIN_CONTAINER
value: 'true'
ports:
- containerPort: 8080
name: http
volumeMounts:
- name: cvat-data
mountPath: /cvat
- name: share
mountPath: /share
- name: sys-namespace-config
mountPath: /etc/onepanel
readOnly: true
- name: cvat-ui
image: onepanel/cvat-ui:v0.19.0_cvat.1.0.0
ports:
- containerPort: 80
name: http
- name: sys-filesyncer
image: onepanel/filesyncer:v0.19.0
imagePullPolicy: Always
args:
- server
- -server-prefix=/sys/filesyncer
volumeMounts:
- name: share
mountPath: /share
- name: sys-namespace-config
mountPath: /etc/onepanel
readOnly: true
ports:
- name: cvat-ui
port: 80
protocol: TCP
targetPort: 80
- name: cvat
port: 8080
protocol: TCP
targetPort: 8080
- name: fs
port: 8888
protocol: TCP
targetPort: 8888
routes:
- match:
- uri:
prefix: /sys/filesyncer
route:
- destination:
port:
number: 8888
- match:
- uri:
regex: /api/.*|/git/.*|/tensorflow/.*|/onepanelio/.*|/tracking/.*|/auto_annotation/.*|/analytics/.*|/static/.*|/admin/.*|/documentation/.*|/dextr/.*|/reid/.*
- queryParams:
id:
regex: \d+.*
route:
- destination:
port:
number: 8080
- match:
- uri:
prefix: /
route:
- destination:
port:
number: 80
volumeClaimTemplates:
- metadata:
name: db
spec:
accessModes: ["ReadWriteOnce"]
resources:
requests:
storage: 20Gi

View File

@@ -0,0 +1,134 @@
containers:
- name: cvat-db
image: postgres:10-alpine
env:
- name: POSTGRES_USER
value: root
- name: POSTGRES_DB
value: cvat
- name: POSTGRES_HOST_AUTH_METHOD
value: trust
- name: PGDATA
value: /var/lib/psql/data
ports:
- containerPort: 5432
name: tcp
volumeMounts:
- name: db
mountPath: /var/lib/psql
- name: cvat-redis
image: redis:4.0-alpine
ports:
- containerPort: 6379
name: tcp
- name: cvat
image: onepanel/cvat:v0.19.0_cvat.1.0.0
env:
- name: DJANGO_MODWSGI_EXTRA_ARGS
value: ""
- name: ALLOWED_HOSTS
value: '*'
- name: CVAT_REDIS_HOST
value: localhost
- name: CVAT_POSTGRES_HOST
value: localhost
- name: CVAT_SHARE_URL
value: /cvat/data
- name: CVAT_SHARE_DIR
value: /share
- name: CVAT_DATA_DIR
value: /cvat/data
- name: CVAT_MEDIA_DATA_DIR
value: /cvat/data/data
- name: CVAT_KEYS_DIR
value: /cvat/data/keys
- name: CVAT_MODELS_DIR
value: /cvat/data/models
- name: CVAT_LOGS_DIR
value: /cvat/logs
- name: CVAT_ANNOTATIONS_OBJECT_STORAGE_PREFIX
value: 'artifacts/$(ONEPANEL_RESOURCE_NAMESPACE)/annotations/'
- name: CVAT_ONEPANEL_WORKFLOWS_LABEL
value: 'key=used-by,value=cvat'
- name: NVIDIA_VISIBLE_DEVICES
value: all
- name: NVIDIA_DRIVER_CAPABILITIES
value: compute,utility
- name: NVIDIA_REQUIRE_CUDA
value: "cuda>=10.0 brand=tesla,driver>=384,driver<385 brand=tesla,driver>=410,driver<411"
- name: ONEPANEL_MAIN_CONTAINER
value: 'true'
ports:
- containerPort: 8080
name: http
volumeMounts:
- name: cvat-data
mountPath: /cvat
- name: share
mountPath: /share
- name: sys-namespace-config
mountPath: /etc/onepanel
readOnly: true
- name: cvat-ui
image: onepanel/cvat-ui:v0.19.0_cvat.1.0.0
ports:
- containerPort: 80
name: http
- name: sys-filesyncer
image: onepanel/filesyncer:v0.20.0
imagePullPolicy: Always
args:
- server
- -server-prefix=/sys/filesyncer
volumeMounts:
- name: share
mountPath: /share
- name: sys-namespace-config
mountPath: /etc/onepanel
readOnly: true
ports:
- name: cvat-ui
port: 80
protocol: TCP
targetPort: 80
- name: cvat
port: 8080
protocol: TCP
targetPort: 8080
- name: fs
port: 8888
protocol: TCP
targetPort: 8888
routes:
- match:
- uri:
prefix: /sys/filesyncer
route:
- destination:
port:
number: 8888
- match:
- uri:
regex: /api/.*|/git/.*|/tensorflow/.*|/onepanelio/.*|/tracking/.*|/auto_annotation/.*|/analytics/.*|/static/.*|/admin/.*|/documentation/.*|/dextr/.*|/reid/.*
- queryParams:
id:
regex: \d+.*
route:
- destination:
port:
number: 8080
- match:
- uri:
prefix: /
route:
- destination:
port:
number: 80
volumeClaimTemplates:
- metadata:
name: db
spec:
accessModes: ["ReadWriteOnce"]
resources:
requests:
storage: 20Gi

View File

@@ -0,0 +1,134 @@
containers:
- name: cvat-db
image: postgres:10-alpine
env:
- name: POSTGRES_USER
value: root
- name: POSTGRES_DB
value: cvat
- name: POSTGRES_HOST_AUTH_METHOD
value: trust
- name: PGDATA
value: /var/lib/psql/data
ports:
- containerPort: 5432
name: tcp
volumeMounts:
- name: db
mountPath: /var/lib/psql
- name: cvat-redis
image: redis:4.0-alpine
ports:
- containerPort: 6379
name: tcp
- name: cvat
image: onepanel/cvat:v0.19.0_cvat.1.0.0
env:
- name: DJANGO_MODWSGI_EXTRA_ARGS
value: ""
- name: ALLOWED_HOSTS
value: '*'
- name: CVAT_REDIS_HOST
value: localhost
- name: CVAT_POSTGRES_HOST
value: localhost
- name: CVAT_SHARE_URL
value: /cvat/data
- name: CVAT_SHARE_DIR
value: /share
- name: CVAT_DATA_DIR
value: /cvat/data
- name: CVAT_MEDIA_DATA_DIR
value: /cvat/data/data
- name: CVAT_KEYS_DIR
value: /cvat/data/keys
- name: CVAT_MODELS_DIR
value: /cvat/data/models
- name: CVAT_LOGS_DIR
value: /cvat/logs
- name: CVAT_ANNOTATIONS_OBJECT_STORAGE_PREFIX
value: 'artifacts/$(ONEPANEL_RESOURCE_NAMESPACE)/annotations/'
- name: CVAT_ONEPANEL_WORKFLOWS_LABEL
value: 'key=used-by,value=cvat'
- name: NVIDIA_VISIBLE_DEVICES
value: all
- name: NVIDIA_DRIVER_CAPABILITIES
value: compute,utility
- name: NVIDIA_REQUIRE_CUDA
value: "cuda>=10.0 brand=tesla,driver>=384,driver<385 brand=tesla,driver>=410,driver<411"
- name: ONEPANEL_MAIN_CONTAINER
value: 'true'
ports:
- containerPort: 8080
name: http
volumeMounts:
- name: cvat-data
mountPath: /cvat
- name: share
mountPath: /share
- name: sys-namespace-config
mountPath: /etc/onepanel
readOnly: true
- name: cvat-ui
image: onepanel/cvat-ui:v0.19.0_cvat.1.0.0
ports:
- containerPort: 80
name: http
- name: sys-filesyncer
image: onepanel/filesyncer:v1.0.0
imagePullPolicy: Always
args:
- server
- -server-prefix=/sys/filesyncer
volumeMounts:
- name: share
mountPath: /share
- name: sys-namespace-config
mountPath: /etc/onepanel
readOnly: true
ports:
- name: cvat-ui
port: 80
protocol: TCP
targetPort: 80
- name: cvat
port: 8080
protocol: TCP
targetPort: 8080
- name: fs
port: 8888
protocol: TCP
targetPort: 8888
routes:
- match:
- uri:
prefix: /sys/filesyncer
route:
- destination:
port:
number: 8888
- match:
- uri:
regex: /api/.*|/git/.*|/tensorflow/.*|/onepanelio/.*|/tracking/.*|/auto_annotation/.*|/analytics/.*|/static/.*|/admin/.*|/documentation/.*|/dextr/.*|/reid/.*
- queryParams:
id:
regex: \d+.*
route:
- destination:
port:
number: 8080
- match:
- uri:
prefix: /
route:
- destination:
port:
number: 80
volumeClaimTemplates:
- metadata:
name: db
spec:
accessModes: ["ReadWriteOnce"]
resources:
requests:
storage: 20Gi

View File

@@ -0,0 +1,101 @@
containers:
- name: jupyterlab
image: onepanel/dl:0.17.0
command: ["/bin/bash", "-c", "pip install onepanel-sdk && start.sh LD_LIBRARY_PATH=/usr/local/nvidia/lib:/usr/local/nvidia/lib64 jupyter lab --LabApp.token='' --LabApp.allow_remote_access=True --LabApp.allow_origin=\"*\" --LabApp.disable_check_xsrf=True --LabApp.trust_xheaders=True --LabApp.base_url=/ --LabApp.tornado_settings='{\"headers\":{\"Content-Security-Policy\":\"frame-ancestors * 'self'\"}}' --notebook-dir='/data' --allow-root"]
workingDir: /data
env:
- name: tornado
value: "'{'headers':{'Content-Security-Policy':\"frame-ancestors\ *\ 'self'\"}}'"
- name: TENSORBOARD_PROXY_URL
value: '//$(ONEPANEL_RESOURCE_UID)--$(ONEPANEL_RESOURCE_NAMESPACE).$(ONEPANEL_DOMAIN)/tensorboard'
- name: ONEPANEL_MAIN_CONTAINER
value: 'true'
ports:
- containerPort: 8888
name: jupyterlab
- containerPort: 6006
name: tensorboard
- containerPort: 8080
name: nni
volumeMounts:
- name: data
mountPath: /data
lifecycle:
postStart:
exec:
command:
- /bin/sh
- -c
- >
condayml="/data/.environment.yml";
jupytertxt="/data/.jupexported.txt";
if [ -f "$condayml" ]; then conda env update -f $condayml; fi;
if [ -f "$jupytertxt" ]; then cat $jupytertxt | xargs -n 1 jupyter labextension install --no-build && jupyter lab build --minimize=False; fi;
preStop:
exec:
command:
- /bin/sh
- -c
- >
conda env export > /data/.environment.yml -n base;
jupyter labextension list 1>/dev/null 2> /data/.jup.txt;
cat /data/.jup.txt | sed -n '2,$p' | awk 'sub(/v/,"@", $2){print $1$2}' > /data/.jupexported.txt;
- name: sys-filesyncer
image: onepanel/filesyncer:v0.18.0
imagePullPolicy: Always
args:
- server
- -host=localhost:8889
- -server-prefix=/sys/filesyncer
volumeMounts:
- name: data
mountPath: /data
- name: sys-namespace-config
mountPath: /etc/onepanel
readOnly: true
ports:
- name: jupyterlab
port: 80
protocol: TCP
targetPort: 8888
- name: tensorboard
port: 6006
protocol: TCP
targetPort: 6006
- name: nni
port: 8080
protocol: TCP
targetPort: 8080
- name: fs
port: 8889
protocol: TCP
targetPort: 8889
routes:
- match:
- uri:
prefix: /sys/filesyncer
route:
- destination:
port:
number: 8889
- match:
- uri:
prefix: /tensorboard
route:
- destination:
port:
number: 6006
- match:
- uri:
prefix: /nni
route:
- destination:
port:
number: 8080
- match:
- uri:
prefix: /
route:
- destination:
port:
number: 80

View File

@@ -0,0 +1,101 @@
containers:
- name: jupyterlab
image: onepanel/dl:0.17.0
command: ["/bin/bash", "-c", "pip install onepanel-sdk && start.sh LD_LIBRARY_PATH=/usr/local/nvidia/lib:/usr/local/nvidia/lib64 jupyter lab --LabApp.token='' --LabApp.allow_remote_access=True --LabApp.allow_origin=\"*\" --LabApp.disable_check_xsrf=True --LabApp.trust_xheaders=True --LabApp.base_url=/ --LabApp.tornado_settings='{\"headers\":{\"Content-Security-Policy\":\"frame-ancestors * 'self'\"}}' --notebook-dir='/data' --allow-root"]
workingDir: /data
env:
- name: tornado
value: "'{'headers':{'Content-Security-Policy':\"frame-ancestors\ *\ 'self'\"}}'"
- name: TENSORBOARD_PROXY_URL
value: '//$(ONEPANEL_RESOURCE_UID)--$(ONEPANEL_RESOURCE_NAMESPACE).$(ONEPANEL_DOMAIN)/tensorboard'
- name: ONEPANEL_MAIN_CONTAINER
value: 'true'
ports:
- containerPort: 8888
name: jupyterlab
- containerPort: 6006
name: tensorboard
- containerPort: 8080
name: nni
volumeMounts:
- name: data
mountPath: /data
lifecycle:
postStart:
exec:
command:
- /bin/sh
- -c
- >
condayml="/data/.environment.yml";
jupytertxt="/data/.jupexported.txt";
if [ -f "$condayml" ]; then conda env update -f $condayml; fi;
if [ -f "$jupytertxt" ]; then cat $jupytertxt | xargs -n 1 jupyter labextension install --no-build && jupyter lab build --minimize=False; fi;
preStop:
exec:
command:
- /bin/sh
- -c
- >
conda env export > /data/.environment.yml -n base;
jupyter labextension list 1>/dev/null 2> /data/.jup.txt;
cat /data/.jup.txt | sed -n '2,$p' | awk 'sub(/v/,"@", $2){print $1$2}' > /data/.jupexported.txt;
- name: sys-filesyncer
image: onepanel/filesyncer:v0.19.0
imagePullPolicy: Always
args:
- server
- -host=localhost:8889
- -server-prefix=/sys/filesyncer
volumeMounts:
- name: data
mountPath: /data
- name: sys-namespace-config
mountPath: /etc/onepanel
readOnly: true
ports:
- name: jupyterlab
port: 80
protocol: TCP
targetPort: 8888
- name: tensorboard
port: 6006
protocol: TCP
targetPort: 6006
- name: nni
port: 8080
protocol: TCP
targetPort: 8080
- name: fs
port: 8889
protocol: TCP
targetPort: 8889
routes:
- match:
- uri:
prefix: /sys/filesyncer
route:
- destination:
port:
number: 8889
- match:
- uri:
prefix: /tensorboard
route:
- destination:
port:
number: 6006
- match:
- uri:
prefix: /nni
route:
- destination:
port:
number: 8080
- match:
- uri:
prefix: /
route:
- destination:
port:
number: 80

View File

@@ -0,0 +1,101 @@
containers:
- name: jupyterlab
image: onepanel/dl:v0.20.0
command: ["/bin/bash", "-c", "pip install onepanel-sdk && start.sh LD_LIBRARY_PATH=/usr/local/nvidia/lib:/usr/local/nvidia/lib64 jupyter lab --LabApp.token='' --LabApp.allow_remote_access=True --LabApp.allow_origin=\"*\" --LabApp.disable_check_xsrf=True --LabApp.trust_xheaders=True --LabApp.base_url=/ --LabApp.tornado_settings='{\"headers\":{\"Content-Security-Policy\":\"frame-ancestors * 'self'\"}}' --notebook-dir='/data' --allow-root"]
workingDir: /data
env:
- name: tornado
value: "'{'headers':{'Content-Security-Policy':\"frame-ancestors\ *\ 'self'\"}}'"
- name: TENSORBOARD_PROXY_URL
value: '//$(ONEPANEL_RESOURCE_UID)--$(ONEPANEL_RESOURCE_NAMESPACE).$(ONEPANEL_DOMAIN)/tensorboard'
- name: ONEPANEL_MAIN_CONTAINER
value: 'true'
ports:
- containerPort: 8888
name: jupyterlab
- containerPort: 6006
name: tensorboard
- containerPort: 8080
name: nni
volumeMounts:
- name: data
mountPath: /data
lifecycle:
postStart:
exec:
command:
- /bin/sh
- -c
- >
condayml="/data/.environment.yml";
jupytertxt="/data/.jupexported.txt";
if [ -f "$condayml" ]; then conda env update -f $condayml; fi;
if [ -f "$jupytertxt" ]; then cat $jupytertxt | xargs -n 1 jupyter labextension install --no-build && jupyter lab build --minimize=False; fi;
preStop:
exec:
command:
- /bin/sh
- -c
- >
conda env export > /data/.environment.yml -n base;
jupyter labextension list 1>/dev/null 2> /data/.jup.txt;
cat /data/.jup.txt | sed -n '2,$p' | awk 'sub(/v/,"@", $2){print $1$2}' > /data/.jupexported.txt;
- name: sys-filesyncer
image: onepanel/filesyncer:v0.20.0
imagePullPolicy: Always
args:
- server
- -host=localhost:8889
- -server-prefix=/sys/filesyncer
volumeMounts:
- name: data
mountPath: /data
- name: sys-namespace-config
mountPath: /etc/onepanel
readOnly: true
ports:
- name: jupyterlab
port: 80
protocol: TCP
targetPort: 8888
- name: tensorboard
port: 6006
protocol: TCP
targetPort: 6006
- name: nni
port: 8080
protocol: TCP
targetPort: 8080
- name: fs
port: 8889
protocol: TCP
targetPort: 8889
routes:
- match:
- uri:
prefix: /sys/filesyncer
route:
- destination:
port:
number: 8889
- match:
- uri:
prefix: /tensorboard
route:
- destination:
port:
number: 6006
- match:
- uri:
prefix: /nni
route:
- destination:
port:
number: 8080
- match:
- uri:
prefix: /
route:
- destination:
port:
number: 80

View File

@@ -0,0 +1,101 @@
containers:
- name: jupyterlab
image: onepanel/dl:v0.20.0
command: ["/bin/bash", "-c", "pip install onepanel-sdk && start.sh LD_LIBRARY_PATH=/usr/local/nvidia/lib:/usr/local/nvidia/lib64 jupyter lab --LabApp.token='' --LabApp.allow_remote_access=True --LabApp.allow_origin=\"*\" --LabApp.disable_check_xsrf=True --LabApp.trust_xheaders=True --LabApp.base_url=/ --LabApp.tornado_settings='{\"headers\":{\"Content-Security-Policy\":\"frame-ancestors * 'self'\"}}' --notebook-dir='/data' --allow-root"]
workingDir: /data
env:
- name: tornado
value: "'{'headers':{'Content-Security-Policy':\"frame-ancestors\ *\ 'self'\"}}'"
- name: TENSORBOARD_PROXY_URL
value: '//$(ONEPANEL_RESOURCE_UID)--$(ONEPANEL_RESOURCE_NAMESPACE).$(ONEPANEL_DOMAIN)/tensorboard'
- name: ONEPANEL_MAIN_CONTAINER
value: 'true'
ports:
- containerPort: 8888
name: jupyterlab
- containerPort: 6006
name: tensorboard
- containerPort: 8080
name: nni
volumeMounts:
- name: data
mountPath: /data
lifecycle:
postStart:
exec:
command:
- /bin/sh
- -c
- >
condayml="/data/.environment.yml";
jupytertxt="/data/.jupexported.txt";
if [ -f "$condayml" ]; then conda env update -f $condayml; fi;
if [ -f "$jupytertxt" ]; then cat $jupytertxt | xargs -n 1 jupyter labextension install --no-build && jupyter lab build --minimize=False; fi;
preStop:
exec:
command:
- /bin/sh
- -c
- >
conda env export > /data/.environment.yml -n base;
jupyter labextension list 1>/dev/null 2> /data/.jup.txt;
cat /data/.jup.txt | sed -n '2,$p' | awk 'sub(/v/,"@", $2){print $1$2}' > /data/.jupexported.txt;
- name: sys-filesyncer
image: onepanel/filesyncer:v1.0.0
imagePullPolicy: Always
args:
- server
- -host=localhost:8889
- -server-prefix=/sys/filesyncer
volumeMounts:
- name: data
mountPath: /data
- name: sys-namespace-config
mountPath: /etc/onepanel
readOnly: true
ports:
- name: jupyterlab
port: 80
protocol: TCP
targetPort: 8888
- name: tensorboard
port: 6006
protocol: TCP
targetPort: 6006
- name: nni
port: 8080
protocol: TCP
targetPort: 8080
- name: fs
port: 8889
protocol: TCP
targetPort: 8889
routes:
- match:
- uri:
prefix: /sys/filesyncer
route:
- destination:
port:
number: 8889
- match:
- uri:
prefix: /tensorboard
route:
- destination:
port:
number: 6006
- match:
- uri:
prefix: /nni
route:
- destination:
port:
number: 8080
- match:
- uri:
prefix: /
route:
- destination:
port:
number: 80

View File

@@ -0,0 +1,57 @@
arguments:
parameters:
# parameter screen-resolution allows users to select screen resolution
- name: screen-resolution
value: 1680x1050
type: select.select
displayName: Screen Resolution
options:
- name: 1280x1024
value: 1280x1024
- name: 1680x1050
value: 1680x1050
- name: 2880x1800
value: 2880x1800
containers:
- name: ubuntu
image: onepanel/vnc:dl-vnc
env:
- name: VNC_PASSWORDLESS
value: true
- name: VNC_RESOLUTION
value: '{{workflow.parameters.screen-resolution}}'
ports:
- containerPort: 6901
name: vnc
volumeMounts:
- name: data
mountPath: /data
ports:
- name: vnc
port: 80
protocol: TCP
targetPort: 6901
routes:
- match:
- uri:
prefix: /
route:
- destination:
port:
number: 80
# DAG Workflow to be executed once a Workspace action completes (optional)
#postExecutionWorkflow:
# entrypoint: main
# templates:
# - name: main
# dag:
# tasks:
# - name: slack-notify
# template: slack-notify
# - name: slack-notify
# container:
# image: technosophos/slack-notify
# args:
# - SLACK_USERNAME=onepanel SLACK_TITLE="Your workspace is ready" SLACK_ICON=https://www.gravatar.com/avatar/5c4478592fe00878f62f0027be59c1bd SLACK_MESSAGE="Your workspace is now running" ./slack-notify
# command:
# - sh

View File

@@ -0,0 +1,81 @@
arguments:
parameters:
# parameter screen-resolution allows users to select screen resolution
- name: screen-resolution
value: 1680x1050
type: select.select
displayName: Screen Resolution
options:
- name: 1280x1024
value: 1280x1024
- name: 1680x1050
value: 1680x1050
- name: 2880x1800
value: 2880x1800
containers:
- name: ubuntu
image: onepanel/vnc:dl-vnc
env:
- name: VNC_PASSWORDLESS
value: true
- name: VNC_RESOLUTION
value: '{{workflow.parameters.screen-resolution}}'
ports:
- containerPort: 6901
name: vnc
volumeMounts:
- name: data
mountPath: /data
- name: sys-filesyncer
image: onepanel/filesyncer:v1.0.0
imagePullPolicy: Always
args:
- server
- -host=localhost:8889
- -server-prefix=/sys/filesyncer
volumeMounts:
- name: data
mountPath: /data
- name: sys-namespace-config
mountPath: /etc/onepanel
readOnly: true
ports:
- name: vnc
port: 80
protocol: TCP
targetPort: 6901
- name: fs
port: 8889
protocol: TCP
targetPort: 8889
routes:
- match:
- uri:
prefix: /sys/filesyncer
route:
- destination:
port:
number: 8889
- match:
- uri:
prefix: /
route:
- destination:
port:
number: 80
# DAG Workflow to be executed once a Workspace action completes (optional)
#postExecutionWorkflow:
# entrypoint: main
# templates:
# - name: main
# dag:
# tasks:
# - name: slack-notify
# template: slack-notify
# - name: slack-notify
# container:
# image: technosophos/slack-notify
# args:
# - SLACK_USERNAME=onepanel SLACK_TITLE="Your workspace is ready" SLACK_ICON=https://www.gravatar.com/avatar/5c4478592fe00878f62f0027be59c1bd SLACK_MESSAGE="Your workspace is now running" ./slack-notify
# command:
# - sh

View File

@@ -0,0 +1,68 @@
containers:
- name: vscode
image: onepanel/vscode:1.0.0
command: ["/bin/bash", "-c", "pip install onepanel-sdk && /usr/bin/entrypoint.sh --bind-addr 0.0.0.0:8080 --auth none ."]
env:
- name: ONEPANEL_MAIN_CONTAINER
value: 'true'
ports:
- containerPort: 8080
name: vscode
volumeMounts:
- name: data
mountPath: /data
lifecycle:
postStart:
exec:
command:
- /bin/sh
- -c
- >
condayml="/data/.environment.yml";
vscodetxt="/data/.vscode-extensions.txt";
if [ -f "$condayml" ]; then conda env update -f $condayml; fi;
if [ -f "$vscodetxt" ]; then cat $vscodetxt | xargs -n 1 code-server --install-extension; fi;
preStop:
exec:
command:
- /bin/sh
- -c
- >
conda env export > /data/.environment.yml -n base;
code-server --list-extensions | tail -n +2 > /data/.vscode-extensions.txt;
- name: sys-filesyncer
image: onepanel/filesyncer:v0.18.0
imagePullPolicy: Always
args:
- server
- -server-prefix=/sys/filesyncer
volumeMounts:
- name: data
mountPath: /data
- name: sys-namespace-config
mountPath: /etc/onepanel
readOnly: true
ports:
- name: vscode
port: 8080
protocol: TCP
targetPort: 8080
- name: fs
port: 8888
protocol: TCP
targetPort: 8888
routes:
- match:
- uri:
prefix: /sys/filesyncer
route:
- destination:
port:
number: 8888
- match:
- uri:
prefix: /
route:
- destination:
port:
number: 8080

View File

@@ -0,0 +1,68 @@
containers:
- name: vscode
image: onepanel/vscode:1.0.0
command: ["/bin/bash", "-c", "pip install onepanel-sdk && /usr/bin/entrypoint.sh --bind-addr 0.0.0.0:8080 --auth none ."]
env:
- name: ONEPANEL_MAIN_CONTAINER
value: 'true'
ports:
- containerPort: 8080
name: vscode
volumeMounts:
- name: data
mountPath: /data
lifecycle:
postStart:
exec:
command:
- /bin/sh
- -c
- >
condayml="/data/.environment.yml";
vscodetxt="/data/.vscode-extensions.txt";
if [ -f "$condayml" ]; then conda env update -f $condayml; fi;
if [ -f "$vscodetxt" ]; then cat $vscodetxt | xargs -n 1 code-server --install-extension; fi;
preStop:
exec:
command:
- /bin/sh
- -c
- >
conda env export > /data/.environment.yml -n base;
code-server --list-extensions | tail -n +2 > /data/.vscode-extensions.txt;
- name: sys-filesyncer
image: onepanel/filesyncer:v0.19.0
imagePullPolicy: Always
args:
- server
- -server-prefix=/sys/filesyncer
volumeMounts:
- name: data
mountPath: /data
- name: sys-namespace-config
mountPath: /etc/onepanel
readOnly: true
ports:
- name: vscode
port: 8080
protocol: TCP
targetPort: 8080
- name: fs
port: 8888
protocol: TCP
targetPort: 8888
routes:
- match:
- uri:
prefix: /sys/filesyncer
route:
- destination:
port:
number: 8888
- match:
- uri:
prefix: /
route:
- destination:
port:
number: 8080

View File

@@ -0,0 +1,68 @@
containers:
- name: vscode
image: onepanel/vscode:v0.20.0_code-server.3.9.1
command: ["/bin/bash", "-c", "pip install onepanel-sdk && /usr/bin/entrypoint.sh --bind-addr 0.0.0.0:8080 --auth none ."]
env:
- name: ONEPANEL_MAIN_CONTAINER
value: 'true'
ports:
- containerPort: 8080
name: vscode
volumeMounts:
- name: data
mountPath: /data
lifecycle:
postStart:
exec:
command:
- /bin/sh
- -c
- >
condayml="/data/.environment.yml";
vscodetxt="/data/.vscode-extensions.txt";
if [ -f "$condayml" ]; then conda env update -f $condayml; fi;
if [ -f "$vscodetxt" ]; then cat $vscodetxt | xargs -n 1 code-server --install-extension; fi;
preStop:
exec:
command:
- /bin/sh
- -c
- >
conda env export > /data/.environment.yml -n base;
code-server --list-extensions | tail -n +2 > /data/.vscode-extensions.txt;
- name: sys-filesyncer
image: onepanel/filesyncer:v0.20.0
imagePullPolicy: Always
args:
- server
- -server-prefix=/sys/filesyncer
volumeMounts:
- name: data
mountPath: /data
- name: sys-namespace-config
mountPath: /etc/onepanel
readOnly: true
ports:
- name: vscode
port: 8080
protocol: TCP
targetPort: 8080
- name: fs
port: 8888
protocol: TCP
targetPort: 8888
routes:
- match:
- uri:
prefix: /sys/filesyncer
route:
- destination:
port:
number: 8888
- match:
- uri:
prefix: /
route:
- destination:
port:
number: 8080

View File

@@ -0,0 +1,68 @@
containers:
- name: vscode
image: onepanel/vscode:v0.20.0_code-server.3.9.1
command: ["/bin/bash", "-c", "pip install onepanel-sdk && /usr/bin/entrypoint.sh --bind-addr 0.0.0.0:8080 --auth none ."]
env:
- name: ONEPANEL_MAIN_CONTAINER
value: 'true'
ports:
- containerPort: 8080
name: vscode
volumeMounts:
- name: data
mountPath: /data
lifecycle:
postStart:
exec:
command:
- /bin/sh
- -c
- >
condayml="/data/.environment.yml";
vscodetxt="/data/.vscode-extensions.txt";
if [ -f "$condayml" ]; then conda env update -f $condayml; fi;
if [ -f "$vscodetxt" ]; then cat $vscodetxt | xargs -n 1 code-server --install-extension; fi;
preStop:
exec:
command:
- /bin/sh
- -c
- >
conda env export > /data/.environment.yml -n base;
code-server --list-extensions | tail -n +2 > /data/.vscode-extensions.txt;
- name: sys-filesyncer
image: onepanel/filesyncer:v1.0.0
imagePullPolicy: Always
args:
- server
- -server-prefix=/sys/filesyncer
volumeMounts:
- name: data
mountPath: /data
- name: sys-namespace-config
mountPath: /etc/onepanel
readOnly: true
ports:
- name: vscode
port: 8080
protocol: TCP
targetPort: 8080
- name: fs
port: 8888
protocol: TCP
targetPort: 8888
routes:
- match:
- uri:
prefix: /sys/filesyncer
route:
- destination:
port:
number: 8888
- match:
- uri:
prefix: /
route:
- destination:
port:
number: 8080

27
go.mod
View File

@@ -7,18 +7,17 @@ require (
github.com/Azure/go-autorest v14.0.0+incompatible // indirect
github.com/Azure/go-autorest/autorest/adal v0.8.2 // indirect
github.com/Masterminds/squirrel v1.1.0
github.com/argoproj/argo v0.0.0-20200331233432-4d1175eb68f6
github.com/argoproj/pkg v0.0.0-20200318225345-d3be5f29b1a8
github.com/argoproj/argo v0.0.0-20210112203504-f97bef5d0036
github.com/argoproj/pkg v0.2.0
github.com/asaskevich/govalidator v0.0.0-20200428143746-21a406dcc535
github.com/elazarl/goproxy v0.0.0-20191011121108-aa519ddbe484 // indirect
github.com/evanphx/json-patch v4.5.0+incompatible // indirect
github.com/ghodss/yaml v1.0.0
github.com/go-sql-driver/mysql v1.5.0 // indirect
github.com/golang/protobuf v1.4.3
github.com/google/uuid v1.1.2
github.com/gorilla/handlers v1.4.2
github.com/grpc-ecosystem/go-grpc-middleware v1.2.0
github.com/grpc-ecosystem/grpc-gateway v1.14.4
github.com/grpc-ecosystem/grpc-gateway v1.14.6
github.com/grpc-ecosystem/grpc-gateway/v2 v2.0.1
github.com/hashicorp/go-uuid v1.0.2 // indirect
github.com/hashicorp/golang-lru v0.5.4 // indirect
@@ -28,11 +27,10 @@ require (
github.com/minio/minio-go/v6 v6.0.45
github.com/pkg/errors v0.9.1
github.com/pressly/goose v2.6.0+incompatible
github.com/sirupsen/logrus v1.4.2
github.com/spf13/cobra v0.0.5 // indirect
github.com/stretchr/testify v1.4.0
github.com/sirupsen/logrus v1.6.0
github.com/stretchr/testify v1.6.1
github.com/tmc/grpc-websocket-proxy v0.0.0-20200122045848-3419fae592fc
golang.org/x/net v0.0.0-20200822124328-c89045814202
golang.org/x/net v0.0.0-20201110031124-69a78807bb2b
golang.org/x/oauth2 v0.0.0-20200902213428-5d25da1a8d43
google.golang.org/api v0.30.0
google.golang.org/genproto v0.0.0-20201019141844-1ed22bb0c154
@@ -41,8 +39,15 @@ require (
gopkg.in/yaml.v2 v2.2.8
gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776
istio.io/api v0.0.0-20200107183329-ed4b507c54e1
k8s.io/api v0.16.4
k8s.io/apimachinery v0.16.7-beta.0
k8s.io/client-go v0.16.4
k8s.io/api v0.18.2
k8s.io/apimachinery v0.18.2
k8s.io/client-go v0.18.2
sigs.k8s.io/structured-merge-diff v0.0.0-20190525122527-15d366b2352e // indirect
sigs.k8s.io/yaml v1.2.0
)
replace (
k8s.io/api => k8s.io/api v0.17.8
k8s.io/apimachinery => k8s.io/apimachinery v0.17.8
k8s.io/client-go => k8s.io/client-go v0.17.8
)

547
go.sum

File diff suppressed because it is too large Load Diff

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.8 MiB

BIN
img/features.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 302 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.0 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 75 KiB

After

Width:  |  Height:  |  Size: 4.1 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 7.7 MiB

After

Width:  |  Height:  |  Size: 3.8 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 657 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 620 KiB

68
main.go
View File

@@ -4,6 +4,14 @@ import (
"context"
"flag"
"fmt"
migrations "github.com/onepanelio/core/db/go"
"github.com/pressly/goose"
"math"
"net"
"net/http"
"path/filepath"
"strings"
"github.com/gorilla/handlers"
grpc_middleware "github.com/grpc-ecosystem/go-grpc-middleware"
grpc_logrus "github.com/grpc-ecosystem/go-grpc-middleware/logging/logrus"
@@ -11,12 +19,10 @@ import (
"github.com/grpc-ecosystem/grpc-gateway/v2/runtime"
"github.com/jmoiron/sqlx"
api "github.com/onepanelio/core/api/gen"
migrations "github.com/onepanelio/core/db/go"
v1 "github.com/onepanelio/core/pkg"
"github.com/onepanelio/core/pkg/util/env"
"github.com/onepanelio/core/server"
"github.com/onepanelio/core/server/auth"
"github.com/pressly/goose"
log "github.com/sirupsen/logrus"
"github.com/tmc/grpc-websocket-proxy/wsproxy"
"google.golang.org/grpc"
@@ -29,11 +35,6 @@ import (
"k8s.io/apimachinery/pkg/watch"
"k8s.io/client-go/kubernetes"
"k8s.io/client-go/tools/cache"
"math"
"net"
"net/http"
"path/filepath"
"strings"
)
var (
@@ -56,6 +57,30 @@ func main() {
log.Fatalf("Failed to connect to Kubernetes cluster: %v", err)
}
client.ClearSystemConfigCache()
sysConfig, err := client.GetSystemConfig()
if err != nil {
log.Fatalf("Failed to get system config: %v", err)
}
dbDriverName, databaseDataSourceName := sysConfig.DatabaseConnection()
// sqlx.MustConnect will panic when it can't connect to DB. In that case, this whole application will crash.
// This is okay, as the pod will restart and try connecting to DB again.
// dbDriverName may be nil, but sqlx will then panic.
db := sqlx.MustConnect(dbDriverName, databaseDataSourceName)
goose.SetTableName("goose_db_version")
if err := goose.Run("up", db.DB, filepath.Join("db", "sql")); err != nil {
log.Fatalf("Failed to run database sql migrations: %v", err)
db.Close()
}
goose.SetTableName("goose_db_go_version")
migrations.Initialize()
if err := goose.Run("up", db.DB, filepath.Join("db", "go")); err != nil {
log.Fatalf("Failed to run database go migrations: %v", err)
db.Close()
}
go watchConfigmapChanges("onepanel", stopCh, func(configMap *corev1.ConfigMap) error {
log.Printf("Configmap changed")
stopCh <- struct{}{}
@@ -65,28 +90,13 @@ func main() {
for {
client.ClearSystemConfigCache()
sysConfig, err := client.GetSystemConfig()
sysConfig, err = client.GetSystemConfig()
if err != nil {
log.Fatalf("Failed to get system config: %v", err)
}
dbDriverName, databaseDataSourceName := sysConfig.DatabaseConnection()
// sqlx.MustConnect will panic when it can't connect to DB. In that case, this whole application will crash.
// This is okay, as the pod will restart and try connecting to DB again.
// dbDriverName may be nil, but sqlx will then panic.
db := sqlx.MustConnect(dbDriverName, databaseDataSourceName)
goose.SetTableName("goose_db_version")
if err := goose.Run("up", db.DB, filepath.Join("db", "sql")); err != nil {
log.Fatalf("Failed to run database sql migrations: %v", err)
db.Close()
}
goose.SetTableName("goose_db_go_version")
migrations.Initialize()
if err := goose.Run("up", db.DB, filepath.Join("db", "go")); err != nil {
log.Fatalf("Failed to run database go migrations: %v", err)
db.Close()
}
dbDriverName, databaseDataSourceName = sysConfig.DatabaseConnection()
db = sqlx.MustConnect(dbDriverName, databaseDataSourceName)
s := startRPCServer(v1.NewDB(db), kubeConfig, sysConfig, stopCh)
@@ -94,7 +104,7 @@ func main() {
s.Stop()
if err := db.Close(); err != nil {
log.Printf("[error] closing db connection")
log.Printf("[error] closing db connection %v", err.Error())
}
}
}()
@@ -147,6 +157,8 @@ func startRPCServer(db *v1.DB, kubeConfig *v1.Config, sysConfig v1.SystemConfig,
api.RegisterWorkspaceServiceServer(s, server.NewWorkspaceServer())
api.RegisterConfigServiceServer(s, server.NewConfigServer())
api.RegisterServiceServiceServer(s, server.NewServiceServer())
api.RegisterFileServiceServer(s, server.NewFileServer())
api.RegisterInferenceServiceServer(s, server.NewInferenceService())
go func() {
if err := s.Serve(lis); err != nil {
@@ -182,6 +194,8 @@ func startHTTPProxy() {
registerHandler(api.RegisterWorkspaceServiceHandlerFromEndpoint, ctx, mux, endpoint, opts)
registerHandler(api.RegisterConfigServiceHandlerFromEndpoint, ctx, mux, endpoint, opts)
registerHandler(api.RegisterServiceServiceHandlerFromEndpoint, ctx, mux, endpoint, opts)
registerHandler(api.RegisterFileServiceHandlerFromEndpoint, ctx, mux, endpoint, opts)
registerHandler(api.RegisterInferenceServiceHandlerFromEndpoint, ctx, mux, endpoint, opts)
log.Printf("Starting HTTP proxy on port %v", *httpPort)
@@ -274,6 +288,8 @@ func customHeaderMatcher(key string) (string, bool) {
switch lowerCaseKey {
case "onepanel-auth-token":
return lowerCaseKey, true
case "onepanel-access-token":
return lowerCaseKey, true
case "cookie":
return lowerCaseKey, true
default:

View File

@@ -125,6 +125,26 @@ func (c *Client) GetS3Client(namespace string, config *ArtifactRepositoryS3Provi
return
}
// GetPublicS3Client initializes a client to Amazon Cloud Storage with the endpoint being public accessible (if available)
func (c *Client) GetPublicS3Client(namespace string, config *ArtifactRepositoryS3Provider) (s3Client *s3.Client, err error) {
s3Client, err = s3.NewClient(s3.Config{
Endpoint: config.PublicEndpoint,
Region: config.Region,
AccessKey: config.AccessKey,
SecretKey: config.Secretkey,
InSecure: config.PublicInsecure,
})
if err != nil {
log.WithFields(log.Fields{
"Namespace": namespace,
"ConfigMap": config,
"Error": err.Error(),
}).Error("getS3Client failed when initializing a new S3 client.")
return
}
return
}
// GetGCSClient initializes a client to Google Cloud Storage.
func (c *Client) GetGCSClient(namespace string, config *ArtifactRepositoryGCSProvider) (gcsClient *gcs.Client, err error) {
return gcs.NewClient(namespace, config.ServiceAccountJSON)

View File

@@ -62,6 +62,7 @@ func (c *Client) GetDefaultConfig() (config *ConfigMap, err error) {
return
}
// GetNamespaceConfig returns the NamespaceConfig given a namespace
func (c *Client) GetNamespaceConfig(namespace string) (config *NamespaceConfig, err error) {
configMap, err := c.getConfigMap(namespace, "onepanel")
if err != nil {
@@ -97,11 +98,6 @@ func (c *Client) GetNamespaceConfig(namespace string) (config *NamespaceConfig,
secretKey, _ := base64.StdEncoding.DecodeString(secret.Data[config.ArtifactRepository.S3.SecretKeySecret.Key])
config.ArtifactRepository.S3.Secretkey = string(secretKey)
}
case config.ArtifactRepository.GCS != nil:
{
serviceJSON, _ := base64.StdEncoding.DecodeString(secret.Data[config.ArtifactRepository.GCS.ServiceAccountKeySecret.Key])
config.ArtifactRepository.GCS.ServiceAccountJSON = string(serviceJSON)
}
default:
return nil, util.NewUserError(codes.NotFound, "Artifact repository config not found.")
}

View File

@@ -3,12 +3,13 @@ package v1
import (
"encoding/base64"
"fmt"
"strings"
"github.com/onepanelio/core/pkg/util/ptr"
log "github.com/sirupsen/logrus"
"gopkg.in/yaml.v3"
corev1 "k8s.io/api/core/v1"
k8yaml "sigs.k8s.io/yaml"
"strings"
)
// SystemConfig is configuration loaded from kubernetes config and secrets that includes information about the
@@ -131,6 +132,28 @@ func (s SystemConfig) NodePoolOptionsAsParameters() (result []*ParameterOption,
return
}
// NodePoolOptionsMap returns a map where each key is a node pool value and the value is a NodePoolOption
func (s SystemConfig) NodePoolOptionsMap() (result map[string]*NodePoolOption, err error) {
data := s.GetValue("applicationNodePoolOptions")
if data == nil {
return nil, fmt.Errorf("no nodePoolOptions in config")
}
options := make([]*NodePoolOption, 0)
if err = k8yaml.Unmarshal([]byte(*data), &options); err != nil {
return
}
result = make(map[string]*NodePoolOption)
for i := range options {
val := options[i]
result[val.Value] = val
}
return
}
// NodePoolOptionByValue returns the nodePoolOption based on a given value
func (s SystemConfig) NodePoolOptionByValue(value string) (option *NodePoolOption, err error) {
options, err := s.NodePoolOptions()
@@ -223,6 +246,8 @@ type ArtifactRepositoryS3Provider struct {
KeyFormat string `yaml:"keyFormat"`
Bucket string
Endpoint string
PublicEndpoint string `yaml:"publicEndpoint"`
PublicInsecure bool `yaml:"publicInsecure"`
Insecure bool
Region string
AccessKeySecret ArtifactRepositorySecret `yaml:"accessKeySecret"`
@@ -350,6 +375,7 @@ func (g *ArtifactRepositoryGCSProvider) FormatKey(namespace, workflowName, podNa
return keyFormat
}
// NamespaceConfig represents configuration for the namespace
type NamespaceConfig struct {
ArtifactRepository ArtifactRepositoryProvider
}

View File

@@ -354,7 +354,7 @@ func (c *Client) buildCronWorkflowDefinition(namespace string, workflowTemplateI
for _, param := range opts.Parameters {
newParams = append(newParams, wfv1.Parameter{
Name: param.Name,
Value: param.Value,
Value: wfv1.AnyStringPtr(*param.Value),
})
passedParams[param.Name] = true
}

110
pkg/files.go Normal file
View File

@@ -0,0 +1,110 @@
package v1
import (
"fmt"
"github.com/minio/minio-go/v6"
"github.com/onepanelio/core/pkg/util"
log "github.com/sirupsen/logrus"
"google.golang.org/grpc/codes"
"net/url"
"strings"
"time"
)
// GetPresignedURLDownload represents the information available when downloading an object
type GetPresignedURLDownload struct {
URL string
Size int64
}
// ListFiles returns an array of files for the given namespace/key
func (c *Client) ListFiles(namespace, key string) (files []*File, err error) {
config, err := c.GetNamespaceConfig(namespace)
if err != nil {
return
}
if config.ArtifactRepository.S3 == nil {
return nil, util.NewUserError(codes.Internal, "S3 compatible artifact repository not set")
}
files = make([]*File, 0)
if len(key) > 0 && strings.HasPrefix(key, "/") {
key = key[1:]
}
if len(key) > 0 {
if string(key[len(key)-1]) != "/" {
key += "/"
}
}
s3Client, err := c.GetS3Client(namespace, config.ArtifactRepository.S3)
if err != nil {
return nil, err
}
doneCh := make(chan struct{})
defer close(doneCh)
for objInfo := range s3Client.ListObjects(config.ArtifactRepository.S3.Bucket, key, false, doneCh) {
if objInfo.Key == key {
continue
}
isDirectory := (objInfo.ETag == "" || strings.HasSuffix(objInfo.Key, "/")) && objInfo.Size == 0
newFile := &File{
Path: objInfo.Key,
Name: FilePathToName(objInfo.Key),
Extension: FilePathToExtension(objInfo.Key),
Size: objInfo.Size,
LastModified: objInfo.LastModified,
ContentType: objInfo.ContentType,
Directory: isDirectory,
}
files = append(files, newFile)
}
return
}
// GetObjectPresignedURL generates a presigned url for the object that is valid for 24 hours.
func (c *Client) GetObjectPresignedURL(namespace, key string) (download *GetPresignedURLDownload, err error) {
config, err := c.GetNamespaceConfig(namespace)
if err != nil {
return
}
s3Client, err := c.GetPublicS3Client(namespace, config.ArtifactRepository.S3)
if err != nil {
return
}
objInfo, err := s3Client.StatObject(config.ArtifactRepository.S3.Bucket, key, minio.StatObjectOptions{})
if err != nil {
log.WithFields(log.Fields{
"Namespace": namespace,
"Key": key,
"Error": err.Error(),
}).Error("StatObject")
return
}
reqParams := make(url.Values)
reqParams.Set("response-content-disposition", fmt.Sprintf("attachment; filename=\"%s\"", key))
presignedURL, err := s3Client.PresignedGetObject(config.ArtifactRepository.S3.Bucket, key, time.Hour*24, reqParams)
if err != nil {
log.WithFields(log.Fields{
"Namespace": namespace,
"Key": key,
"Error": err.Error(),
}).Error("PresignedGetObject")
return
}
return &GetPresignedURLDownload{
URL: presignedURL.String(),
Size: objInfo.Size,
}, nil
}

Some files were not shown because too many files have changed in this diff Show More