mirror of
https://github.com/ckaczor/azuredatastudio.git
synced 2026-02-17 11:01:37 -05:00
Compare commits
287 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
77b9a708df | ||
|
|
a4ee871b88 | ||
|
|
3f4e19fc08 | ||
|
|
571fca6de5 | ||
|
|
5a2fdc4034 | ||
|
|
cc6d84e7f6 | ||
|
|
99e11d2e22 | ||
|
|
9a85123e21 | ||
|
|
56669db6b6 | ||
|
|
8782eeb32f | ||
|
|
7f3d5bac0a | ||
|
|
7a1e0a7d2e | ||
|
|
681ecbd946 | ||
|
|
e7798a8e32 | ||
|
|
b158180ef4 | ||
|
|
7ad9da7fda | ||
|
|
94e2016a16 | ||
|
|
21bb577da8 | ||
|
|
5e8325ba28 | ||
|
|
25b7ccade3 | ||
|
|
57940c581c | ||
|
|
82f9e4e24b | ||
|
|
3e22fcfd2d | ||
|
|
0bc81e1078 | ||
|
|
7b6328dccf | ||
|
|
05124273ea | ||
|
|
b1d4444522 | ||
|
|
4ee2d369cf | ||
|
|
fb28b69bb0 | ||
|
|
f2709c7100 | ||
|
|
3476f5ae38 | ||
|
|
b937fdee7a | ||
|
|
dd9ac2e362 | ||
|
|
403ff6cfec | ||
|
|
4a6226974e | ||
|
|
6a2c47f511 | ||
|
|
3d9a316f4b | ||
|
|
cea9194595 | ||
|
|
08050956c9 | ||
|
|
e16d3ed827 | ||
|
|
8cc8dcc89c | ||
|
|
173a715a4d | ||
|
|
4fee0210f6 | ||
|
|
f99adf3de4 | ||
|
|
3d81da9762 | ||
|
|
a567ff6de4 | ||
|
|
61ceb72cea | ||
|
|
58d3b969a2 | ||
|
|
f8d725e15b | ||
|
|
a823e44393 | ||
|
|
6929a803dc | ||
|
|
8a6c776a5f | ||
|
|
d1ba3e23f4 | ||
|
|
6670289057 | ||
|
|
0f8fa0ccef | ||
|
|
02ddfc20f1 | ||
|
|
1f0cdf82e4 | ||
|
|
baa12d725f | ||
|
|
133ff73a43 | ||
|
|
7df132b307 | ||
|
|
cd8102535b | ||
|
|
974e832f78 | ||
|
|
e3ec6bf9c5 | ||
|
|
1528c642d1 | ||
|
|
5730940492 | ||
|
|
3ff9df8e0b | ||
|
|
ff61eae164 | ||
|
|
773ff0e62a | ||
|
|
869d071f73 | ||
|
|
d01dc5a5c7 | ||
|
|
adefa213e2 | ||
|
|
2d29ef7eca | ||
|
|
dad31bc387 | ||
|
|
afb872b3e1 | ||
|
|
6dcc832983 | ||
|
|
7a744bc7f2 | ||
|
|
8a8137e96c | ||
|
|
5ae9495bc6 | ||
|
|
3a0be70783 | ||
|
|
a840057cd8 | ||
|
|
f56e09cfa1 | ||
|
|
9ed274fb39 | ||
|
|
e2b5e9bd66 | ||
|
|
f6c63f2dcb | ||
|
|
2b33c7c27f | ||
|
|
d9b4af1217 | ||
|
|
7231df34ce | ||
|
|
68709c02fe | ||
|
|
3134449b38 | ||
|
|
2b2b9ff44d | ||
|
|
c5d0c6f623 | ||
|
|
3d7d1b23cb | ||
|
|
7cbf471913 | ||
|
|
3b2cd653a7 | ||
|
|
6718c7565d | ||
|
|
b8d0e2a9e3 | ||
|
|
fa5bfee0cf | ||
|
|
704222b8d7 | ||
|
|
503090856a | ||
|
|
bbe5b98a2c | ||
|
|
84ca18b428 | ||
|
|
f6b46a1c5c | ||
|
|
59bb827d2e | ||
|
|
c2320831f7 | ||
|
|
1d12823f09 | ||
|
|
a69ce7ec62 | ||
|
|
1e49e47a37 | ||
|
|
4cc3a3f788 | ||
|
|
a57379be49 | ||
|
|
787245b058 | ||
|
|
1e5ab4d9f0 | ||
|
|
9004769865 | ||
|
|
b1ce53ade3 | ||
|
|
1b4f6f8934 | ||
|
|
4eb98a9dcc | ||
|
|
a63578e6f7 | ||
|
|
39d9eed585 | ||
|
|
78ff0c7d93 | ||
|
|
80b85ad74c | ||
|
|
1c4e65ebe1 | ||
|
|
b7c2eaa65d | ||
|
|
528fbb14ea | ||
|
|
f29127d515 | ||
|
|
d97993e518 | ||
|
|
887ce88100 | ||
|
|
6be8c1d54b | ||
|
|
c5e90d0236 | ||
|
|
65e253ae0d | ||
|
|
a6314b57b7 | ||
|
|
e6d250c640 | ||
|
|
c982ea338d | ||
|
|
8b8bef0401 | ||
|
|
bfe694763b | ||
|
|
f174dc1c77 | ||
|
|
2d33c8dd82 | ||
|
|
d8dcc90857 | ||
|
|
9dde80ce1c | ||
|
|
7b099e703a | ||
|
|
5d4afae616 | ||
|
|
6cbfc0fb1c | ||
|
|
2b59cc0185 | ||
|
|
b0211b434a | ||
|
|
7ceee95f52 | ||
|
|
34e317a559 | ||
|
|
177d9bef39 | ||
|
|
8f8d01cee2 | ||
|
|
5cba0b4f7c | ||
|
|
48d4b806ad | ||
|
|
3c6796938d | ||
|
|
53081cfca9 | ||
|
|
96a6d0674a | ||
|
|
2bc1a41ec4 | ||
|
|
635da9a2b2 | ||
|
|
23bd05ea68 | ||
|
|
85e6d785ff | ||
|
|
02248fc065 | ||
|
|
a5b1e027c1 | ||
|
|
a35c267214 | ||
|
|
98d8c19b07 | ||
|
|
b6e45b69a6 | ||
|
|
2692df3cc7 | ||
|
|
22c88cdd2e | ||
|
|
81e81f1c49 | ||
|
|
f3befb0f4d | ||
|
|
f7c7274463 | ||
|
|
cb1d892747 | ||
|
|
7495259e13 | ||
|
|
61e5003931 | ||
|
|
8d88791a0b | ||
|
|
4ad73d381c | ||
|
|
404260b8a0 | ||
|
|
3d0dd0de98 | ||
|
|
1984c4ca97 | ||
|
|
ae830d9e64 | ||
|
|
b4a3325a21 | ||
|
|
48e437c4a5 | ||
|
|
e9b00062a4 | ||
|
|
47c1204e89 | ||
|
|
ceb4df5b8b | ||
|
|
d8aa9b8d7f | ||
|
|
94958236cd | ||
|
|
d59063ebab | ||
|
|
795300347b | ||
|
|
4789f2165b | ||
|
|
e9c5a6dfbd | ||
|
|
100072cabd | ||
|
|
57ce9fae6f | ||
|
|
c04f2aa110 | ||
|
|
70399be699 | ||
|
|
bf278c39bd | ||
|
|
9824118a07 | ||
|
|
00c7600b05 | ||
|
|
b715e6ed82 | ||
|
|
75812c71df | ||
|
|
21c8609eb7 | ||
|
|
fa664bc92f | ||
|
|
c4e06f4db3 | ||
|
|
5a301f9073 | ||
|
|
2f94307635 | ||
|
|
f7279cb1f5 | ||
|
|
a3121c0b2d | ||
|
|
15017917b3 | ||
|
|
4659d727b7 | ||
|
|
8b52e7200c | ||
|
|
b95e20b6ee | ||
|
|
82132583af | ||
|
|
7b54abbc96 | ||
|
|
8e74ce1881 | ||
|
|
52ec96e4f1 | ||
|
|
969932743a | ||
|
|
54826b5fe3 | ||
|
|
bd2ab6071d | ||
|
|
18a6879d64 | ||
|
|
384553c231 | ||
|
|
0e4e8c304c | ||
|
|
d96e83c3f0 | ||
|
|
d8f9cdeacb | ||
|
|
3e515f2f59 | ||
|
|
91065ebc38 | ||
|
|
9133bef329 | ||
|
|
1eb2e5f41a | ||
|
|
9a472cf8ec | ||
|
|
9d680be37a | ||
|
|
f4f4271115 | ||
|
|
faff61bb82 | ||
|
|
061052e4f3 | ||
|
|
a6efd56844 | ||
|
|
b4d61a067e | ||
|
|
d073cd595b | ||
|
|
1b02fb6906 | ||
|
|
31323d6efb | ||
|
|
84d21b1e76 | ||
|
|
b2a1738836 | ||
|
|
9c81db574e | ||
|
|
18dc7e75ff | ||
|
|
adfdd56907 | ||
|
|
ca2b893c2c | ||
|
|
7029276f16 | ||
|
|
bcce80094d | ||
|
|
9861ca77e0 | ||
|
|
9327624930 | ||
|
|
69a96a7d5d | ||
|
|
29fbc7d952 | ||
|
|
fefb47da83 | ||
|
|
de3c6e6e99 | ||
|
|
a3ae2df0ce | ||
|
|
3c538d1c2d | ||
|
|
d2e4eeac88 | ||
|
|
97b6d71a06 | ||
|
|
43f08e7efb | ||
|
|
60c62c0668 | ||
|
|
e90341b3d2 | ||
|
|
0f063d3a2e | ||
|
|
62d60f62f3 | ||
|
|
2bfba53e21 | ||
|
|
6153b7ad06 | ||
|
|
c4b524237c | ||
|
|
17856855f6 | ||
|
|
b16c6f3faa | ||
|
|
fc9d08a62b | ||
|
|
92db8df000 | ||
|
|
b7e12cb8a7 | ||
|
|
3192d056f2 | ||
|
|
273f6b658a | ||
|
|
e90578694b | ||
|
|
3b2e53d270 | ||
|
|
a34a72795b | ||
|
|
ff2d2d0339 | ||
|
|
1ee4af52b4 | ||
|
|
28aa9b7912 | ||
|
|
8cf82c1f8b | ||
|
|
a69b4bf662 | ||
|
|
f3a48da3fa | ||
|
|
d68433ec22 | ||
|
|
c66726cdb5 | ||
|
|
d547dd8ba3 | ||
|
|
cc8989c2a5 | ||
|
|
053a2c7446 | ||
|
|
6e306461d7 | ||
|
|
e40a81e8e1 | ||
|
|
094ee7c50c | ||
|
|
d96fe82fbc | ||
|
|
bc44014532 | ||
|
|
8cbf73dff6 | ||
|
|
28b0d827b9 | ||
|
|
01ea89a461 | ||
|
|
00af075fb3 |
@@ -5,6 +5,7 @@
|
||||
**/vs/loader.js
|
||||
**/insane/**
|
||||
**/marked/**
|
||||
**/markjs/**
|
||||
**/test/**/*.js
|
||||
**/node_modules/**
|
||||
**/vscode-api-tests/testWorkspace/**
|
||||
|
||||
@@ -682,6 +682,20 @@
|
||||
"**/{vs,sql}/workbench/services/**/{common,browser}/**"
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/{vs,sql}/workbench/contrib/notebook/common/**",
|
||||
"restrictions": [
|
||||
"vs/nls",
|
||||
"vs/css!./**/*",
|
||||
"**/{vs,sql}/base/**/{common,worker}/**",
|
||||
"**/{vs,sql}/platform/**/common/**",
|
||||
"**/{vs,sql}/editor/**",
|
||||
"**/{vs,sql}/workbench/common/**",
|
||||
"**/{vs,sql}/workbench/api/common/**",
|
||||
"**/{vs,sql}/workbench/services/**/common/**",
|
||||
"**/{vs,sql}/workbench/contrib/**/common/**"
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/{vs,sql}/workbench/contrib/**/common/**",
|
||||
"restrictions": [
|
||||
@@ -717,7 +731,9 @@
|
||||
"chart.js",
|
||||
"plotly.js-dist-min",
|
||||
"angular2-grid",
|
||||
"html-query-plan"
|
||||
"html-query-plan",
|
||||
"turndown",
|
||||
"mark.js"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
15
.vscode/launch.json
vendored
15
.vscode/launch.json
vendored
@@ -19,7 +19,8 @@
|
||||
"timeout": 30000,
|
||||
"port": 5870,
|
||||
"outFiles": [
|
||||
"${workspaceFolder}/out/**/*.js"
|
||||
"${workspaceFolder}/out/**/*.js",
|
||||
"${workspaceFolder}/extensions/*/out/**/*.js"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -68,10 +69,9 @@
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "pwa-chrome",
|
||||
"type": "chrome",
|
||||
"request": "attach",
|
||||
"name": "Attach to azuredatastudio",
|
||||
"timeout": 50000,
|
||||
"port": 9222
|
||||
},
|
||||
{
|
||||
@@ -100,7 +100,9 @@
|
||||
"--no-cached-data",
|
||||
],
|
||||
"webRoot": "${workspaceFolder}",
|
||||
// Settings for js-debug:
|
||||
"cascadeTerminateToConfigurations": [
|
||||
"Attach to Extension Host"
|
||||
],
|
||||
"userDataDir": false,
|
||||
"pauseForSourceMap": false,
|
||||
"outFiles": [
|
||||
@@ -110,10 +112,10 @@
|
||||
"preLaunchTask": "Ensure Prelaunch Dependencies",
|
||||
},
|
||||
{
|
||||
"type": "chrome",
|
||||
"type": "node",
|
||||
"request": "launch",
|
||||
"name": "Launch ADS (Web) (TBD)",
|
||||
"program": "${workspaceFolder}/resources/serverless/code-web.js",
|
||||
"program": "${workspaceFolder}/resources/web/code-web.js",
|
||||
"presentation": {
|
||||
"group": "0_vscode",
|
||||
"order": 2
|
||||
@@ -274,6 +276,7 @@
|
||||
"Attach to Extension Host",
|
||||
"Attach to Shared Process",
|
||||
],
|
||||
"preLaunchTask": "Ensure Prelaunch Dependencies",
|
||||
"presentation": {
|
||||
"group": "0_vscode",
|
||||
"order": 1
|
||||
|
||||
2
.vscode/notebooks/api.github-issues
vendored
2
.vscode/notebooks/api.github-issues
vendored
@@ -8,7 +8,7 @@
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "$repo=repo:microsoft/vscode\n$milestone=milestone:\"July 2020\"",
|
||||
"value": "$repo=repo:microsoft/vscode\n$milestone=milestone:\"August 2020\"",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
|
||||
9
.vscode/notebooks/inbox.github-issues
vendored
9
.vscode/notebooks/inbox.github-issues
vendored
@@ -8,17 +8,20 @@
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "$inbox=repo:microsoft/vscode is:open no:assignee -label:feature-request -label:testplan-item -label:plan-item "
|
||||
"value": "$inbox=repo:microsoft/vscode is:open no:assignee -label:feature-request -label:testplan-item -label:plan-item ",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "## Inbox tracking and Issue triage"
|
||||
"value": "## Inbox tracking and Issue triage",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "New issues or pull requests submitted by the community are initially triaged by an [automatic classification bot](https://github.com/microsoft/vscode-github-triage-actions/tree/master/classifier-deep). Issues that the bot does not correctly triage are then triaged by a team member. The team rotates the inbox tracker on a weekly basis.\n\nA [mirror](https://github.com/JacksonKearl/testissues/issues) of the VS Code issue stream is available with details about how the bot classifies issues, including feature-area classifications and confidence ratings. Per-category confidence thresholds and feature-area ownership data is maintained in [.github/classifier.json](https://github.com/microsoft/vscode/blob/master/.github/classifier.json). \n\n💡 The bot is being run through a GitHub action that runs every 30 minutes. Give the bot the opportunity to classify an issue before doing it manually.\n\n### Inbox Tracking\n\nThe inbox tracker is responsible for the [global inbox](https://github.com/Microsoft/vscode/issues?utf8=%E2%9C%93&q=is%3Aopen+no%3Aassignee+-label%3Afeature-request+-label%3Atestplan-item+-label%3Aplan-item) containing all **open issues and pull requests** that\n- are neither **feature requests** nor **test plan items** nor **plan items** and\n- have **no owner assignment**.\n\nThe **inbox tracker** may perform any step described in our [issue triaging documentation](https://github.com/microsoft/vscode/wiki/Issues-Triaging) but its main responsibility is to route issues to the actual feature area owner.\n\nFeature area owners track the **feature area inbox** containing all **open issues and pull requests** that\n- are personally assigned to them and are not assigned to any milestone\n- are labeled with their feature area label and are not assigned to any milestone.\nThis secondary triage may involve any of the steps described in our [issue triaging documentation](https://github.com/microsoft/vscode/wiki/Issues-Triaging) and results in a fully triaged or closed issue.\n\nThe [github triage extension](https://github.com/microsoft/vscode-github-triage-extension) can be used to assist with triaging — it provides a \"Command Palette\"-style list of triaging actions like assignment, labeling, and triggers for various bot actions."
|
||||
"value": "New issues or pull requests submitted by the community are initially triaged by an [automatic classification bot](https://github.com/microsoft/vscode-github-triage-actions/tree/master/classifier-deep). Issues that the bot does not correctly triage are then triaged by a team member. The team rotates the inbox tracker on a weekly basis.\n\nA [mirror](https://github.com/JacksonKearl/testissues/issues) of the VS Code issue stream is available with details about how the bot classifies issues, including feature-area classifications and confidence ratings. Per-category confidence thresholds and feature-area ownership data is maintained in [.github/classifier.json](https://github.com/microsoft/vscode/blob/master/.github/classifier.json). \n\n💡 The bot is being run through a GitHub action that runs every 30 minutes. Give the bot the opportunity to classify an issue before doing it manually.\n\n### Inbox Tracking\n\nThe inbox tracker is responsible for the [global inbox](https://github.com/Microsoft/vscode/issues?utf8=%E2%9C%93&q=is%3Aopen+no%3Aassignee+-label%3Afeature-request+-label%3Atestplan-item+-label%3Aplan-item) containing all **open issues and pull requests** that\n- are neither **feature requests** nor **test plan items** nor **plan items** and\n- have **no owner assignment**.\n\nThe **inbox tracker** may perform any step described in our [issue triaging documentation](https://github.com/microsoft/vscode/wiki/Issues-Triaging) but its main responsibility is to route issues to the actual feature area owner.\n\nFeature area owners track the **feature area inbox** containing all **open issues and pull requests** that\n- are personally assigned to them and are not assigned to any milestone\n- are labeled with their feature area label and are not assigned to any milestone.\nThis secondary triage may involve any of the steps described in our [issue triaging documentation](https://github.com/microsoft/vscode/wiki/Issues-Triaging) and results in a fully triaged or closed issue.\n\nThe [github triage extension](https://github.com/microsoft/vscode-github-triage-extension) can be used to assist with triaging — it provides a \"Command Palette\"-style list of triaging actions like assignment, labeling, and triggers for various bot actions.",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 1,
|
||||
|
||||
2
.vscode/notebooks/my-work.github-issues
vendored
2
.vscode/notebooks/my-work.github-issues
vendored
@@ -8,7 +8,7 @@
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "// list of repos we work in\n$repos=repo:microsoft/vscode repo:microsoft/vscode-remote-release repo:microsoft/vscode-js-debug repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-github-issue-notebooks\n\n// current milestone name\n$milestone=milestone:\"June 2020\"",
|
||||
"value": "// list of repos we work in\n$repos=repo:microsoft/vscode repo:microsoft/vscode-remote-release repo:microsoft/vscode-js-debug repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-github-issue-notebooks\n\n// current milestone name\n$milestone=milestone:\"August 2020\"",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
|
||||
5
.vscode/notebooks/verification.github-issues
vendored
5
.vscode/notebooks/verification.github-issues
vendored
@@ -14,7 +14,7 @@
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "$repos=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-remote-release repo:microsoft/vscode-js-debug repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-github-issue-notebooks \n$milestone=milestone:\"June 2020\"",
|
||||
"value": "$repos=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-remote-release repo:microsoft/vscode-js-debug repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-github-issue-notebooks \n$milestone=milestone:\"July 2020\"",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
@@ -44,7 +44,8 @@
|
||||
{
|
||||
"kind": 1,
|
||||
"language": "markdown",
|
||||
"value": "### All"
|
||||
"value": "### All",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
"kind": 2,
|
||||
|
||||
10
.vscode/searches/es6.code-search
vendored
10
.vscode/searches/es6.code-search
vendored
@@ -34,11 +34,11 @@ src/vs/base/common/arrays.ts:
|
||||
420 */
|
||||
421 export function first<T>(array: ReadonlyArray<T>, fn: (item: T) => boolean, notFoundValue: T): T;
|
||||
|
||||
569
|
||||
570 /**
|
||||
571: * @deprecated ES6: use `Array.find`
|
||||
572 */
|
||||
573 export function find<T>(arr: ArrayLike<T>, predicate: (value: T, index: number, arr: ArrayLike<T>) => any): T | undefined {
|
||||
568
|
||||
569 /**
|
||||
570: * @deprecated ES6: use `Array.find`
|
||||
571 */
|
||||
572 export function find<T>(arr: ArrayLike<T>, predicate: (value: T, index: number, arr: ArrayLike<T>) => any): T | undefined {
|
||||
|
||||
src/vs/base/common/objects.ts:
|
||||
115
|
||||
|
||||
18
.vscode/tasks.json
vendored
18
.vscode/tasks.json
vendored
@@ -139,7 +139,7 @@
|
||||
"label": "Kill Build Web Extensions",
|
||||
"group": "build",
|
||||
"presentation": {
|
||||
"reveal": "never",
|
||||
"reveal": "never"
|
||||
},
|
||||
"problemMatcher": "$tsc"
|
||||
},
|
||||
@@ -203,11 +203,25 @@
|
||||
},
|
||||
{
|
||||
"type": "shell",
|
||||
"command": "node build/lib/prelaunch.js",
|
||||
"command": "node build/lib/preLaunch.js",
|
||||
"label": "Ensure Prelaunch Dependencies",
|
||||
"presentation": {
|
||||
"reveal": "silent"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "npm",
|
||||
"script": "tsec-compile-check",
|
||||
"problemMatcher": [
|
||||
{
|
||||
"base": "$tsc",
|
||||
"applyTo": "allDocuments",
|
||||
"owner": "tsec"
|
||||
},
|
||||
],
|
||||
"group": "build",
|
||||
"label": "npm: tsec-compile-check",
|
||||
"detail": "node_modules/tsec/bin/tsec -p src/tsconfig.json --noEmit"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
2
.yarnrc
2
.yarnrc
@@ -1,3 +1,3 @@
|
||||
disturl "https://atom.io/download/electron"
|
||||
target "7.3.2"
|
||||
target "9.2.1"
|
||||
runtime "electron"
|
||||
|
||||
10
CHANGELOG.md
10
CHANGELOG.md
@@ -1,5 +1,15 @@
|
||||
# Change Log
|
||||
|
||||
## Version 1.21.0
|
||||
* Release date: August 12, 2020
|
||||
* Release status: General Availability
|
||||
* New Notebook Features
|
||||
* Move cell locations changd
|
||||
* Added action to convert cells to Text Cell or Code cell
|
||||
* Jupyter Books picker to open Jupyter Books directly from Github
|
||||
* Search bar added to Notebooks Viewlet for searching through Jupyter Books
|
||||
* Address issues in [August 2020 Milestone](https://github.com/microsoft/azuredatastudio/milestone/59?closed=1)
|
||||
|
||||
## Version 1.20.1
|
||||
* Release date: July 17, 2020
|
||||
* Release status: General Availability
|
||||
|
||||
14
README.md
14
README.md
@@ -129,10 +129,10 @@ Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
|
||||
Licensed under the [Source EULA](LICENSE.txt).
|
||||
|
||||
[win-user]: https://go.microsoft.com/fwlink/?linkid=2135512
|
||||
[win-system]: https://go.microsoft.com/fwlink/?linkid=2135513
|
||||
[win-zip]: https://go.microsoft.com/fwlink/?linkid=2135514
|
||||
[osx-zip]: https://go.microsoft.com/fwlink/?linkid=2135266
|
||||
[linux-zip]: https://go.microsoft.com/fwlink/?linkid=2135267
|
||||
[linux-rpm]: https://go.microsoft.com/fwlink/?linkid=2135268
|
||||
[linux-deb]: https://go.microsoft.com/fwlink/?linkid=2135515
|
||||
[win-user]: https://go.microsoft.com/fwlink/?linkid=2138608
|
||||
[win-system]: https://go.microsoft.com/fwlink/?linkid=2138704
|
||||
[win-zip]: https://go.microsoft.com/fwlink/?linkid=2138705
|
||||
[osx-zip]: https://go.microsoft.com/fwlink/?linkid=2138609
|
||||
[linux-zip]: https://go.microsoft.com/fwlink/?linkid=2138706
|
||||
[linux-rpm]: https://go.microsoft.com/fwlink/?linkid=2138507
|
||||
[linux-deb]: https://go.microsoft.com/fwlink/?linkid=2138508
|
||||
|
||||
@@ -41,6 +41,7 @@ expressly granted herein, whether by implication, estoppel or otherwise.
|
||||
JupyterLab: https://github.com/jupyterlab/jupyterlab
|
||||
keytar: https://github.com/atom/node-keytar
|
||||
make-error: https://github.com/JsCommunity/make-error
|
||||
mark.js: https://github.com/julmot/mark.js
|
||||
minimist: https://github.com/substack/minimist
|
||||
moment: https://github.com/moment/moment
|
||||
native-keymap: https://github.com/Microsoft/node-native-keymap
|
||||
@@ -63,6 +64,8 @@ expressly granted herein, whether by implication, estoppel or otherwise.
|
||||
svg.js: https://github.com/svgdotjs/svg.js
|
||||
systemjs: https://github.com/systemjs/systemjs
|
||||
temp-write: https://github.com/sindresorhus/temp-write
|
||||
turndown: https://github.com/domchristie/turndown
|
||||
turndown-plugin-gfm: https://github.com/domchristie/turndown-plugin-gfm
|
||||
underscore: https://github.com/jashkenas/underscore
|
||||
v8-profiler: https://github.com/node-inspector/v8-profiler
|
||||
vscode: https://github.com/microsoft/vscode
|
||||
@@ -1254,6 +1257,32 @@ ISC © Julien Fontanet
|
||||
=========================================
|
||||
END OF make-error NOTICES AND INFORMATION
|
||||
|
||||
%% mark.js NOTICES AND INFORMATION BEGIN HERE
|
||||
=========================================
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2014–2019 Julian Kühnel
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
=========================================
|
||||
END OF mark.js NOTICES AND INFORMATION
|
||||
|
||||
%% minimist NOTICES AND INFORMATION BEGIN HERE
|
||||
=========================================
|
||||
This software is released under the MIT license:
|
||||
@@ -2002,6 +2031,58 @@ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLI
|
||||
=========================================
|
||||
END OF temp-write NOTICES AND INFORMATION
|
||||
|
||||
%% turndown NOTICES AND INFORMATION BEGIN HERE
|
||||
=========================================
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2017 Dom Christie
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
=========================================
|
||||
END OF turndown NOTICES AND INFORMATION
|
||||
|
||||
%% turndown-plugin-gfm NOTICES AND INFORMATION BEGIN HERE
|
||||
=========================================
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2017 Dom Christie
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
=========================================
|
||||
END OF turndown-plugin-gfm NOTICES AND INFORMATION
|
||||
|
||||
%% underscore NOTICES AND INFORMATION BEGIN HERE
|
||||
=========================================
|
||||
Copyright (c) 2009-2017 Jeremy Ashkenas, DocumentCloud and Investigative
|
||||
|
||||
@@ -286,9 +286,9 @@ nice-try@^1.0.4:
|
||||
integrity sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==
|
||||
|
||||
node-fetch@^2.3.0:
|
||||
version "2.6.0"
|
||||
resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.0.tgz#e633456386d4aa55863f676a7ab0daa8fdecb0fd"
|
||||
integrity sha512-8dG4H5ujfvFiqDmVu9fQ5bOHUC15JMjMY/Zumv26oOvvVJjM67KF8koCWIabKQ1GJIa9r2mMZscBq/TbdOcmNA==
|
||||
version "2.6.1"
|
||||
resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.1.tgz#045bd323631f76ed2e2b55573394416b639a0052"
|
||||
integrity sha512-V4aYg89jEoVRxRb2fJdAg8FHvI7cEyYdVAh94HH0UIK8oJxUfkjlDQN9RbMx+bEjP7+ggMiFRprSti032Oipxw==
|
||||
|
||||
npm-run-path@^2.0.0:
|
||||
version "2.0.2"
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "12.13.0"
|
||||
versionSpec: "12.14.1"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3 # {{SQL CARBON EDIT}} update version
|
||||
inputs:
|
||||
@@ -50,7 +50,7 @@ steps:
|
||||
displayName: Run Unit Tests (Electron)
|
||||
|
||||
# - script: | {{SQL CARBON EDIT}} disable
|
||||
# yarn test-browser --browser chromium --browser webkit --browser firefox
|
||||
# yarn test-browser --browser chromium --browser webkit --browser firefox --tfs "Browser Unit Tests"
|
||||
# displayName: Run Unit Tests (Browser)
|
||||
|
||||
# - script: | {{SQL CARBON EDIT}} disable
|
||||
|
||||
@@ -21,7 +21,7 @@ steps:
|
||||
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "12.13.0"
|
||||
versionSpec: "12.14.1"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
@@ -101,7 +101,7 @@ steps:
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn test-browser --build --browser chromium --browser webkit --browser firefox
|
||||
yarn test-browser --build --browser chromium --browser webkit --browser firefox --tfs "Browser Unit Tests"
|
||||
displayName: Run unit tests (Browser)
|
||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
@@ -118,6 +118,13 @@ steps:
|
||||
displayName: Run integration tests (Electron)
|
||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-web-darwin" \
|
||||
./resources/server/test/test-web-integration.sh --browser webkit
|
||||
displayName: Run integration tests (Browser)
|
||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
APP_ROOT=$(agent.builddirectory)/VSCode-darwin
|
||||
@@ -128,13 +135,6 @@ steps:
|
||||
displayName: Run remote integration tests (Electron)
|
||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-web-darwin" \
|
||||
./resources/server/test/test-web-integration.sh --browser webkit
|
||||
displayName: Run integration tests (Browser)
|
||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
APP_ROOT=$(agent.builddirectory)/VSCode-darwin
|
||||
@@ -160,6 +160,13 @@ steps:
|
||||
continueOnError: true
|
||||
condition: failed()
|
||||
|
||||
- task: PublishTestResults@2
|
||||
displayName: Publish Tests Results
|
||||
inputs:
|
||||
testResultsFiles: '*-results.xml'
|
||||
searchFolder: '$(Build.ArtifactStagingDirectory)/test-results'
|
||||
condition: succeededOrFailed()
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
security create-keychain -p pwd $(agent.tempdirectory)/buildagent.keychain
|
||||
@@ -204,13 +211,6 @@ steps:
|
||||
zip -d $(agent.builddirectory)/VSCode-darwin.zip "*.pkg"
|
||||
displayName: Clean Archive
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
|
||||
AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
|
||||
node build/azure-pipelines/common/createAsset.js darwin-unnotarized archive "VSCode-darwin-$VSCODE_QUALITY.zip" $(agent.builddirectory)/VSCode-darwin.zip
|
||||
displayName: Publish Unnotarized Build
|
||||
|
||||
- script: |
|
||||
APP_ROOT=$(agent.builddirectory)/VSCode-darwin
|
||||
APP_NAME="`ls $APP_ROOT | head -n 1`"
|
||||
|
||||
@@ -127,16 +127,17 @@ steps:
|
||||
APP_NAME="`ls $APP_ROOT | head -n 1`"
|
||||
yarn smoketest --build "$APP_ROOT/$APP_NAME" --screenshots "$(build.artifactstagingdirectory)/smokeshots"
|
||||
displayName: Run smoke tests (Electron)
|
||||
continueOnError: true
|
||||
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||
|
||||
# - script: |
|
||||
# set -e
|
||||
# node ./node_modules/playwright/install.js
|
||||
# VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/azuredatastudio-reh-web-darwin" \
|
||||
# yarn smoketest --web --headless --screenshots "$(build.artifactstagingdirectory)/smokeshots"
|
||||
# displayName: Run smoke tests (Browser)
|
||||
# continueOnError: true
|
||||
# condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||
- script: |
|
||||
set -e
|
||||
node ./node_modules/playwright/install.js
|
||||
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/azuredatastudio-reh-web-darwin" \
|
||||
yarn smoketest --web --headless --screenshots "$(build.artifactstagingdirectory)/smokeshots"
|
||||
displayName: Run smoke tests (Browser)
|
||||
continueOnError: true
|
||||
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
|
||||
@@ -11,7 +11,7 @@ pr:
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "12.13.0"
|
||||
versionSpec: "12.14.1"
|
||||
|
||||
- task: AzureKeyVault@1
|
||||
displayName: 'Azure Key Vault: Get Secrets'
|
||||
|
||||
@@ -11,7 +11,7 @@ pr:
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "12.13.0"
|
||||
versionSpec: "12.14.1"
|
||||
|
||||
- task: AzureKeyVault@1
|
||||
displayName: 'Azure Key Vault: Get Secrets'
|
||||
|
||||
@@ -10,7 +10,7 @@ steps:
|
||||
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "12.13.0"
|
||||
versionSpec: "12.14.1"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
|
||||
inputs:
|
||||
@@ -44,8 +44,8 @@ steps:
|
||||
|
||||
- script: | # {{SQL CARBON EDIT}} add strict null check
|
||||
yarn strict-vscode
|
||||
|
||||
displayName: Run Strict Null Check
|
||||
|
||||
# - script: | {{SQL CARBON EDIT}} remove monaco editor checks
|
||||
# yarn monaco-compile-check
|
||||
# displayName: Run Monaco Editor Checks
|
||||
@@ -67,7 +67,7 @@ steps:
|
||||
displayName: Run Unit Tests (Electron)
|
||||
|
||||
# - script: | {{SQL CARBON EDIT}} disable
|
||||
# DISPLAY=:10 yarn test-browser --browser chromium
|
||||
# DISPLAY=:10 yarn test-browser --browser chromium --tfs "Browser Unit Tests"
|
||||
# displayName: Run Unit Tests (Browser)
|
||||
|
||||
# - script: | {{SQL CARBON EDIT}} disable
|
||||
|
||||
@@ -21,7 +21,7 @@ steps:
|
||||
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "12.13.0"
|
||||
versionSpec: "12.14.1"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
|
||||
@@ -21,7 +21,7 @@ steps:
|
||||
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "12.13.0"
|
||||
versionSpec: "12.14.1"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
@@ -106,7 +106,7 @@ steps:
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
DISPLAY=:10 yarn test-browser --build --browser chromium
|
||||
DISPLAY=:10 yarn test-browser --build --browser chromium --tfs "Browser Unit Tests"
|
||||
displayName: Run unit tests (Browser)
|
||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
@@ -123,6 +123,13 @@ steps:
|
||||
displayName: Run integration tests (Electron)
|
||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-web-linux-x64" \
|
||||
DISPLAY=:10 ./resources/server/test/test-web-integration.sh --browser chromium
|
||||
displayName: Run integration tests (Browser)
|
||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
APP_ROOT=$(agent.builddirectory)/VSCode-linux-x64
|
||||
@@ -133,13 +140,6 @@ steps:
|
||||
displayName: Run remote integration tests (Electron)
|
||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-web-linux-x64" \
|
||||
DISPLAY=:10 ./resources/server/test/test-web-integration.sh --browser chromium
|
||||
displayName: Run integration tests (Browser)
|
||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- task: PublishPipelineArtifact@0
|
||||
inputs:
|
||||
artifactName: crash-dump-linux
|
||||
@@ -148,6 +148,13 @@ steps:
|
||||
continueOnError: true
|
||||
condition: failed()
|
||||
|
||||
- task: PublishTestResults@2
|
||||
displayName: Publish Tests Results
|
||||
inputs:
|
||||
testResultsFiles: '*-results.xml'
|
||||
searchFolder: '$(Build.ArtifactStagingDirectory)/test-results'
|
||||
condition: succeededOrFailed()
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn gulp "vscode-linux-x64-build-deb"
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "12.13.0"
|
||||
versionSpec: "12.14.1"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
|
||||
@@ -159,6 +159,45 @@ steps:
|
||||
yarn gulp vscode-linux-x64-build-rpm
|
||||
displayName: Build Rpm
|
||||
|
||||
- task: UseDotNet@2
|
||||
displayName: 'Install .NET Core sdk for signing'
|
||||
inputs:
|
||||
packageType: sdk
|
||||
version: 2.1.x
|
||||
installationPath: $(Agent.ToolsDirectory)/dotnet
|
||||
|
||||
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
|
||||
inputs:
|
||||
ConnectedServiceName: 'Code Signing'
|
||||
FolderPath: '$(Build.SourcesDirectory)/.build'
|
||||
Pattern: 'extensions/*.vsix'
|
||||
signConfigType: inlineSignParams
|
||||
inlineOperation: |
|
||||
[
|
||||
{
|
||||
"keyCode": "CP-233016",
|
||||
"operationSetCode": "OpcSign",
|
||||
"parameters": [
|
||||
{
|
||||
"parameterName": "FileDigest",
|
||||
"parameterValue": "/fd \"SHA256\""
|
||||
}
|
||||
],
|
||||
"toolName": "sign",
|
||||
"toolVersion": "1.0"
|
||||
},
|
||||
{
|
||||
"keyCode": "CP-233016",
|
||||
"operationSetCode": "OpcVerify",
|
||||
"parameters": [],
|
||||
"toolName": "sign",
|
||||
"toolVersion": "1.0"
|
||||
}
|
||||
]
|
||||
SessionTimeout: 120
|
||||
displayName: 'Signing Extensions'
|
||||
condition: and(succeeded(), eq(variables['signed'], true))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
./build/azure-pipelines/linux/createDrop.sh
|
||||
@@ -170,6 +209,7 @@ steps:
|
||||
mkdir -p $(Build.ArtifactStagingDirectory)/test-results/coverage
|
||||
cp --parents -r $(Build.SourcesDirectory)/extensions/*/coverage/** $(Build.ArtifactStagingDirectory)/test-results/coverage
|
||||
displayName: Copy Coverage
|
||||
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||
|
||||
- task: PublishTestResults@2
|
||||
displayName: 'Publish Test Results test-results.xml'
|
||||
|
||||
@@ -1,157 +1,3 @@
|
||||
resources:
|
||||
containers:
|
||||
- container: vscode-x64
|
||||
image: vscodehub.azurecr.io/vscode-linux-build-agent:x64
|
||||
endpoint: VSCodeHub
|
||||
- container: snapcraft
|
||||
image: snapcore/snapcraft:stable
|
||||
|
||||
jobs:
|
||||
- job: Compile
|
||||
pool:
|
||||
vmImage: 'Ubuntu-16.04'
|
||||
container: vscode-x64
|
||||
steps:
|
||||
- template: product-compile.yml
|
||||
|
||||
- job: Windows
|
||||
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_WIN32'], 'true'))
|
||||
pool:
|
||||
vmImage: VS2017-Win2016
|
||||
variables:
|
||||
VSCODE_ARCH: x64
|
||||
dependsOn:
|
||||
- Compile
|
||||
steps:
|
||||
- template: win32/product-build-win32.yml
|
||||
|
||||
- job: Windows32
|
||||
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_WIN32_32BIT'], 'true'))
|
||||
pool:
|
||||
vmImage: VS2017-Win2016
|
||||
variables:
|
||||
VSCODE_ARCH: ia32
|
||||
dependsOn:
|
||||
- Compile
|
||||
steps:
|
||||
- template: win32/product-build-win32.yml
|
||||
|
||||
- job: WindowsARM64
|
||||
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_WIN32_ARM64'], 'true'))
|
||||
pool:
|
||||
vmImage: VS2017-Win2016
|
||||
variables:
|
||||
VSCODE_ARCH: arm64
|
||||
dependsOn:
|
||||
- Compile
|
||||
steps:
|
||||
- template: win32/product-build-win32-arm64.yml
|
||||
|
||||
- job: Linux
|
||||
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_LINUX'], 'true'))
|
||||
pool:
|
||||
vmImage: 'Ubuntu-16.04'
|
||||
container: vscode-x64
|
||||
dependsOn:
|
||||
- Compile
|
||||
steps:
|
||||
- template: linux/product-build-linux.yml
|
||||
|
||||
- job: LinuxSnap
|
||||
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_LINUX'], 'true'))
|
||||
pool:
|
||||
vmImage: 'Ubuntu-16.04'
|
||||
container: snapcraft
|
||||
dependsOn: Linux
|
||||
steps:
|
||||
- template: linux/snap-build-linux.yml
|
||||
|
||||
- job: LinuxArmhf
|
||||
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_LINUX_ARMHF'], 'true'))
|
||||
pool:
|
||||
vmImage: 'Ubuntu-16.04'
|
||||
variables:
|
||||
VSCODE_ARCH: armhf
|
||||
dependsOn:
|
||||
- Compile
|
||||
steps:
|
||||
- template: linux/product-build-linux-multiarch.yml
|
||||
|
||||
- job: LinuxArm64
|
||||
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_LINUX_ARM64'], 'true'))
|
||||
pool:
|
||||
vmImage: 'Ubuntu-16.04'
|
||||
variables:
|
||||
VSCODE_ARCH: arm64
|
||||
dependsOn:
|
||||
- Compile
|
||||
steps:
|
||||
- template: linux/product-build-linux-multiarch.yml
|
||||
|
||||
- job: LinuxAlpine
|
||||
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_LINUX_ALPINE'], 'true'))
|
||||
pool:
|
||||
vmImage: 'Ubuntu-16.04'
|
||||
variables:
|
||||
VSCODE_ARCH: alpine
|
||||
dependsOn:
|
||||
- Compile
|
||||
steps:
|
||||
- template: linux/product-build-linux-multiarch.yml
|
||||
|
||||
- job: LinuxWeb
|
||||
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_WEB'], 'true'))
|
||||
pool:
|
||||
vmImage: 'Ubuntu-16.04'
|
||||
variables:
|
||||
VSCODE_ARCH: x64
|
||||
dependsOn:
|
||||
- Compile
|
||||
steps:
|
||||
- template: web/product-build-web.yml
|
||||
|
||||
- job: macOS
|
||||
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_MACOS'], 'true'))
|
||||
pool:
|
||||
vmImage: macOS-latest
|
||||
dependsOn:
|
||||
- Compile
|
||||
steps:
|
||||
- template: darwin/product-build-darwin.yml
|
||||
|
||||
- job: Release
|
||||
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), or(eq(variables['VSCODE_RELEASE'], 'true'), and(or(eq(variables['VSCODE_QUALITY'], 'insider'), eq(variables['VSCODE_QUALITY'], 'exploration')), eq(variables['Build.Reason'], 'Schedule'))))
|
||||
pool:
|
||||
vmImage: 'Ubuntu-16.04'
|
||||
dependsOn:
|
||||
- Windows
|
||||
- Windows32
|
||||
- Linux
|
||||
- LinuxSnap
|
||||
- LinuxArmhf
|
||||
- LinuxArm64
|
||||
- LinuxAlpine
|
||||
- macOS
|
||||
steps:
|
||||
- template: release.yml
|
||||
|
||||
- job: Mooncake
|
||||
pool:
|
||||
vmImage: 'Ubuntu-16.04'
|
||||
condition: and(succeededOrFailed(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'))
|
||||
dependsOn:
|
||||
- Windows
|
||||
- Windows32
|
||||
- Linux
|
||||
- LinuxSnap
|
||||
- LinuxArmhf
|
||||
- LinuxArm64
|
||||
- LinuxAlpine
|
||||
- LinuxWeb
|
||||
- macOS
|
||||
steps:
|
||||
- template: sync-mooncake.yml
|
||||
|
||||
trigger: none
|
||||
pr: none
|
||||
|
||||
@@ -160,4 +6,139 @@ schedules:
|
||||
displayName: Mon-Fri at 7:00
|
||||
branches:
|
||||
include:
|
||||
- main
|
||||
- master
|
||||
|
||||
resources:
|
||||
containers:
|
||||
- container: vscode-x64
|
||||
image: vscodehub.azurecr.io/vscode-linux-build-agent:x64
|
||||
endpoint: VSCodeHub
|
||||
- container: snapcraft
|
||||
image: snapcore/snapcraft:stable
|
||||
|
||||
stages:
|
||||
- stage: Compile
|
||||
jobs:
|
||||
- job: Compile
|
||||
pool:
|
||||
vmImage: 'Ubuntu-16.04'
|
||||
container: vscode-x64
|
||||
steps:
|
||||
- template: product-compile.yml
|
||||
|
||||
- stage: Windows
|
||||
dependsOn:
|
||||
- Compile
|
||||
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'))
|
||||
pool:
|
||||
vmImage: VS2017-Win2016
|
||||
jobs:
|
||||
- job: Windows
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_WIN32'], 'true'))
|
||||
variables:
|
||||
VSCODE_ARCH: x64
|
||||
steps:
|
||||
- template: win32/product-build-win32.yml
|
||||
|
||||
- job: Windows32
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_WIN32_32BIT'], 'true'))
|
||||
variables:
|
||||
VSCODE_ARCH: ia32
|
||||
steps:
|
||||
- template: win32/product-build-win32.yml
|
||||
|
||||
- job: WindowsARM64
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_WIN32_ARM64'], 'true'))
|
||||
variables:
|
||||
VSCODE_ARCH: arm64
|
||||
steps:
|
||||
- template: win32/product-build-win32-arm64.yml
|
||||
|
||||
- stage: Linux
|
||||
dependsOn:
|
||||
- Compile
|
||||
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'))
|
||||
pool:
|
||||
vmImage: 'Ubuntu-16.04'
|
||||
jobs:
|
||||
- job: Linux
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_LINUX'], 'true'))
|
||||
container: vscode-x64
|
||||
steps:
|
||||
- template: linux/product-build-linux.yml
|
||||
|
||||
- job: LinuxSnap
|
||||
dependsOn:
|
||||
- Linux
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_LINUX'], 'true'))
|
||||
container: snapcraft
|
||||
steps:
|
||||
- template: linux/snap-build-linux.yml
|
||||
|
||||
- job: LinuxArmhf
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_LINUX_ARMHF'], 'true'))
|
||||
variables:
|
||||
VSCODE_ARCH: armhf
|
||||
steps:
|
||||
- template: linux/product-build-linux-multiarch.yml
|
||||
|
||||
- job: LinuxArm64
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_LINUX_ARM64'], 'true'))
|
||||
variables:
|
||||
VSCODE_ARCH: arm64
|
||||
steps:
|
||||
- template: linux/product-build-linux-multiarch.yml
|
||||
|
||||
- job: LinuxAlpine
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_LINUX_ALPINE'], 'true'))
|
||||
variables:
|
||||
VSCODE_ARCH: alpine
|
||||
steps:
|
||||
- template: linux/product-build-linux-multiarch.yml
|
||||
|
||||
- job: LinuxWeb
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_WEB'], 'true'))
|
||||
variables:
|
||||
VSCODE_ARCH: x64
|
||||
steps:
|
||||
- template: web/product-build-web.yml
|
||||
|
||||
- stage: macOS
|
||||
dependsOn:
|
||||
- Compile
|
||||
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'))
|
||||
pool:
|
||||
vmImage: macOS-latest
|
||||
jobs:
|
||||
- job: macOS
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_MACOS'], 'true'))
|
||||
steps:
|
||||
- template: darwin/product-build-darwin.yml
|
||||
|
||||
- stage: Mooncake
|
||||
dependsOn:
|
||||
- Windows
|
||||
- Linux
|
||||
- macOS
|
||||
condition: and(succeededOrFailed(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'))
|
||||
pool:
|
||||
vmImage: 'Ubuntu-16.04'
|
||||
jobs:
|
||||
- job: SyncMooncake
|
||||
displayName: Sync Mooncake
|
||||
steps:
|
||||
- template: sync-mooncake.yml
|
||||
|
||||
- stage: Publish
|
||||
dependsOn:
|
||||
- Windows
|
||||
- Linux
|
||||
- macOS
|
||||
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), or(eq(variables['VSCODE_RELEASE'], 'true'), and(or(eq(variables['VSCODE_QUALITY'], 'insider'), eq(variables['VSCODE_QUALITY'], 'exploration')), eq(variables['Build.Reason'], 'Schedule'))))
|
||||
pool:
|
||||
vmImage: 'Ubuntu-16.04'
|
||||
jobs:
|
||||
- job: BuildService
|
||||
displayName: Build Service
|
||||
steps:
|
||||
- template: release.yml
|
||||
|
||||
@@ -16,7 +16,7 @@ steps:
|
||||
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "12.13.0"
|
||||
versionSpec: "12.14.1"
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
|
||||
@@ -9,7 +9,7 @@ pr: none
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "12.13.0"
|
||||
versionSpec: "12.14.1"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
|
||||
@@ -45,7 +45,7 @@ function repeat(str: string, times: number): string {
|
||||
}
|
||||
|
||||
function convertTabsToSpaces(str: string): string {
|
||||
return str.replace(/^\t+/gm, value => repeat(' ', value.length));
|
||||
return str.replace(/\t/gm, value => repeat(' ', value.length));
|
||||
}
|
||||
|
||||
function getNewFileContent(content: string, tag: string) {
|
||||
|
||||
@@ -46,7 +46,7 @@ jobs:
|
||||
steps:
|
||||
- template: linux/sql-product-build-linux.yml
|
||||
parameters:
|
||||
extensionsToUnitTest: ["admin-tool-ext-win", "agent", "azdata", "azurecore", "cms", "dacpac", "import", "schema-compare", "notebook", "resource-deployment", "machine-learning", "sql-database-projects"]
|
||||
extensionsToUnitTest: ["admin-tool-ext-win", "agent", "azdata", "azurecore", "cms", "dacpac", "import", "schema-compare", "notebook", "resource-deployment", "machine-learning", "sql-database-projects", "data-workspace"]
|
||||
timeoutInMinutes: 70
|
||||
|
||||
- job: LinuxWeb
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "12.13.0"
|
||||
versionSpec: "12.14.1"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
|
||||
@@ -21,7 +21,7 @@ steps:
|
||||
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "12.13.0"
|
||||
versionSpec: "12.14.1"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "12.13.0"
|
||||
versionSpec: "12.14.1"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3 # {{SQL CARBON EDIT}} update version
|
||||
inputs:
|
||||
@@ -57,7 +57,7 @@ steps:
|
||||
displayName: Run Unit Tests (Electron)
|
||||
|
||||
# - powershell: | {{SQL CARBON EDIT}} disable
|
||||
# yarn test-browser --browser chromium --browser firefox
|
||||
# yarn test-browser --browser chromium --browser firefox --tfs "Browser Unit Tests"
|
||||
# displayName: Run Unit Tests (Browser)
|
||||
|
||||
# - powershell: | {{SQL CARBON EDIT}} disable
|
||||
|
||||
@@ -21,7 +21,7 @@ steps:
|
||||
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "12.13.0"
|
||||
versionSpec: "12.14.1"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
|
||||
@@ -21,7 +21,7 @@ steps:
|
||||
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "12.13.0"
|
||||
versionSpec: "12.14.1"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
@@ -115,7 +115,7 @@ steps:
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
exec { yarn test-browser --build --browser chromium --browser firefox }
|
||||
exec { yarn test-browser --build --browser chromium --browser firefox --tfs "Browser Unit Tests" }
|
||||
displayName: Run unit tests (Browser)
|
||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
@@ -135,18 +135,18 @@ steps:
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
$AppRoot = "$(agent.builddirectory)\VSCode-win32-$(VSCODE_ARCH)"
|
||||
$AppProductJson = Get-Content -Raw -Path "$AppRoot\resources\app\product.json" | ConvertFrom-Json
|
||||
$AppNameShort = $AppProductJson.nameShort
|
||||
exec { $env:INTEGRATION_TEST_ELECTRON_PATH = "$AppRoot\$AppNameShort.exe"; $env:VSCODE_REMOTE_SERVER_PATH = "$(agent.builddirectory)\vscode-reh-win32-$(VSCODE_ARCH)"; .\resources\server\test\test-remote-integration.bat }
|
||||
displayName: Run remote integration tests (Electron)
|
||||
exec { $env:VSCODE_REMOTE_SERVER_PATH = "$(agent.builddirectory)\vscode-reh-web-win32-$(VSCODE_ARCH)"; .\resources\server\test\test-web-integration.bat --browser firefox }
|
||||
displayName: Run integration tests (Browser)
|
||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
exec { $env:VSCODE_REMOTE_SERVER_PATH = "$(agent.builddirectory)\vscode-reh-web-win32-$(VSCODE_ARCH)"; .\resources\server\test\test-web-integration.bat --browser firefox }
|
||||
displayName: Run integration tests (Browser)
|
||||
$AppRoot = "$(agent.builddirectory)\VSCode-win32-$(VSCODE_ARCH)"
|
||||
$AppProductJson = Get-Content -Raw -Path "$AppRoot\resources\app\product.json" | ConvertFrom-Json
|
||||
$AppNameShort = $AppProductJson.nameShort
|
||||
exec { $env:INTEGRATION_TEST_ELECTRON_PATH = "$AppRoot\$AppNameShort.exe"; $env:VSCODE_REMOTE_SERVER_PATH = "$(agent.builddirectory)\vscode-reh-win32-$(VSCODE_ARCH)"; .\resources\server\test\test-remote-integration.bat }
|
||||
displayName: Run remote integration tests (Electron)
|
||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- task: PublishPipelineArtifact@0
|
||||
@@ -157,6 +157,13 @@ steps:
|
||||
continueOnError: true
|
||||
condition: failed()
|
||||
|
||||
- task: PublishTestResults@2
|
||||
displayName: Publish Tests Results
|
||||
inputs:
|
||||
testResultsFiles: '*-results.xml'
|
||||
searchFolder: '$(Build.ArtifactStagingDirectory)/test-results'
|
||||
condition: succeededOrFailed()
|
||||
|
||||
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
|
||||
inputs:
|
||||
ConnectedServiceName: 'ESRP CodeSign'
|
||||
|
||||
@@ -96,6 +96,7 @@ const indentationFilter = [
|
||||
'!**/*.dockerfile',
|
||||
'!extensions/markdown-language-features/media/*.js',
|
||||
// {{SQL CARBON EDIT}}
|
||||
'!**/*.gif',
|
||||
'!build/actions/**/*.js',
|
||||
'!**/*.{xlf,docx,sql,vsix,bacpac,ipynb,jpg}',
|
||||
'!extensions/mssql/sqltoolsservice/**',
|
||||
@@ -138,6 +139,7 @@ const copyrightFilter = [
|
||||
'!resources/linux/snap/snapcraft.yaml',
|
||||
'!resources/linux/snap/electron-launch',
|
||||
'!resources/win32/bin/code.js',
|
||||
'!resources/web/code-web.js',
|
||||
'!resources/completions/**',
|
||||
'!extensions/markdown-language-features/media/highlight.css',
|
||||
'!extensions/html-language-features/server/src/modes/typescript/*',
|
||||
@@ -145,6 +147,7 @@ const copyrightFilter = [
|
||||
'!src/vs/editor/test/node/classification/typescript-test.ts',
|
||||
'!scripts/code-web.js',
|
||||
'!resources/serverless/code-web.js',
|
||||
'!src/vs/editor/test/node/classification/typescript-test.ts',
|
||||
// {{SQL CARBON EDIT}}
|
||||
'!extensions/notebook/src/intellisense/text.ts',
|
||||
'!extensions/mssql/src/hdfs/webhdfs.ts',
|
||||
@@ -168,10 +171,12 @@ const copyrightFilter = [
|
||||
'!extensions/markdown-language-features/media/tomorrow.css',
|
||||
'!src/sql/workbench/browser/modelComponents/media/highlight.css',
|
||||
'!src/sql/workbench/contrib/notebook/electron-browser/cellViews/media/highlight.css',
|
||||
'!src/sql/workbench/contrib/notebook/browser/turndownPluginGfm.ts',
|
||||
'!extensions/mssql/sqltoolsservice/**',
|
||||
'!extensions/import/flatfileimportservice/**',
|
||||
'!extensions/notebook/src/prompts/**',
|
||||
'!extensions/mssql/src/prompts/**',
|
||||
'!extensions/kusto/src/prompts/**',
|
||||
'!extensions/notebook/resources/jupyter_config/**',
|
||||
'!extensions/query-history/images/**',
|
||||
'!**/*.gif',
|
||||
|
||||
@@ -42,6 +42,7 @@ const vscodeEntryPoints = _.flatten([
|
||||
buildfile.entrypoint('vs/workbench/workbench.desktop.main'),
|
||||
buildfile.base,
|
||||
buildfile.workerExtensionHost,
|
||||
buildfile.workerNotebook,
|
||||
buildfile.workbenchDesktop,
|
||||
buildfile.code
|
||||
]);
|
||||
@@ -76,8 +77,7 @@ const vscodeResources = [
|
||||
'out-build/vs/platform/files/**/*.md',
|
||||
'out-build/vs/code/electron-browser/workbench/**',
|
||||
'out-build/vs/code/electron-browser/sharedProcess/sharedProcess.js',
|
||||
'out-build/vs/code/electron-browser/issue/issueReporter.js',
|
||||
'out-build/sql/workbench/electron-browser/splashscreen/*', // {{SQL CARBON EDIT}} STart
|
||||
'out-build/vs/code/electron-sandbox/issue/issueReporter.js',
|
||||
'out-build/sql/**/*.{svg,png,cur,html}',
|
||||
'out-build/sql/base/browser/ui/table/media/*.{gif,png,svg}',
|
||||
'out-build/sql/base/browser/ui/checkbox/media/*.{gif,png,svg}',
|
||||
@@ -97,7 +97,7 @@ const vscodeResources = [
|
||||
'out-build/sql/workbench/parts/notebook/media/**/*.svg',
|
||||
'out-build/sql/setup.js', // {{SQL CARBON EDIT}} end
|
||||
'out-build/vs/code/electron-sandbox/processExplorer/processExplorer.js',
|
||||
'out-build/vs/platform/auth/common/auth.css',
|
||||
'out-build/vs/code/electron-sandbox/proxy/auth.js',
|
||||
'!**/test/**'
|
||||
];
|
||||
|
||||
|
||||
@@ -53,7 +53,9 @@ function createAsar(folderPath, unpackGlobs, destFilename) {
|
||||
const insertFile = (relativePath, stat, shouldUnpack) => {
|
||||
insertDirectoryForFile(relativePath);
|
||||
pendingInserts++;
|
||||
filesystem.insertFile(relativePath, shouldUnpack, { stat: stat }, {}, onFileInserted);
|
||||
// Do not pass `onFileInserted` directly because it gets overwritten below.
|
||||
// Create a closure capturing `onFileInserted`.
|
||||
filesystem.insertFile(relativePath, shouldUnpack, { stat: stat }, {}).then(() => onFileInserted(), () => onFileInserted());
|
||||
};
|
||||
return es.through(function (file) {
|
||||
if (file.stat.isDirectory()) {
|
||||
|
||||
@@ -8,10 +8,17 @@
|
||||
import * as path from 'path';
|
||||
import * as es from 'event-stream';
|
||||
const pickle = require('chromium-pickle-js');
|
||||
const Filesystem = require('asar/lib/filesystem');
|
||||
const Filesystem = <typeof AsarFilesystem>require('asar/lib/filesystem');
|
||||
import * as VinylFile from 'vinyl';
|
||||
import * as minimatch from 'minimatch';
|
||||
|
||||
declare class AsarFilesystem {
|
||||
readonly header: unknown;
|
||||
constructor(src: string);
|
||||
insertDirectory(path: string, shouldUnpack?: boolean): unknown;
|
||||
insertFile(path: string, shouldUnpack: boolean, file: { stat: { size: number; mode: number; }; }, options: {}): Promise<void>;
|
||||
}
|
||||
|
||||
export function createAsar(folderPath: string, unpackGlobs: string[], destFilename: string): NodeJS.ReadWriteStream {
|
||||
|
||||
const shouldUnpackFile = (file: VinylFile): boolean => {
|
||||
@@ -61,7 +68,9 @@ export function createAsar(folderPath: string, unpackGlobs: string[], destFilena
|
||||
const insertFile = (relativePath: string, stat: { size: number; mode: number; }, shouldUnpack: boolean) => {
|
||||
insertDirectoryForFile(relativePath);
|
||||
pendingInserts++;
|
||||
filesystem.insertFile(relativePath, shouldUnpack, { stat: stat }, {}, onFileInserted);
|
||||
// Do not pass `onFileInserted` directly because it gets overwritten below.
|
||||
// Create a closure capturing `onFileInserted`.
|
||||
filesystem.insertFile(relativePath, shouldUnpack, { stat: stat }, {}).then(() => onFileInserted(), () => onFileInserted());
|
||||
};
|
||||
|
||||
return es.through(function (file) {
|
||||
|
||||
@@ -18,7 +18,9 @@ const fancyLog = require('fancy-log');
|
||||
const ansiColors = require('ansi-colors');
|
||||
|
||||
const root = path.dirname(path.dirname(__dirname));
|
||||
const builtInExtensions = JSON.parse(fs.readFileSync(path.join(__dirname, '../../product.json'), 'utf8')).builtInExtensions;
|
||||
const productjson = JSON.parse(fs.readFileSync(path.join(__dirname, '../../product.json'), 'utf8'));
|
||||
const builtInExtensions = productjson.builtInExtensions;
|
||||
const webBuiltInExtensions = productjson.webBuiltInExtensions;
|
||||
const controlFilePath = path.join(os.homedir(), '.vscode-oss-dev', 'extensions', 'control.json');
|
||||
const ENABLE_LOGGING = !process.env['VSCODE_BUILD_BUILTIN_EXTENSIONS_SILENCE_PLEASE'];
|
||||
|
||||
@@ -107,7 +109,7 @@ exports.getBuiltInExtensions = function getBuiltInExtensions() {
|
||||
const control = readControlFile();
|
||||
const streams = [];
|
||||
|
||||
for (const extension of builtInExtensions) {
|
||||
for (const extension of [...builtInExtensions, ...webBuiltInExtensions]) {
|
||||
let controlState = control[extension.name] || 'marketplace';
|
||||
control[extension.name] = controlState;
|
||||
|
||||
|
||||
@@ -218,12 +218,14 @@ const externalExtensions = [
|
||||
'schema-compare',
|
||||
'cms',
|
||||
'query-history',
|
||||
'kusto',
|
||||
'liveshare',
|
||||
'sql-database-projects',
|
||||
'machine-learning',
|
||||
'sql-assessment',
|
||||
'asde-deployment',
|
||||
'sql-migration'
|
||||
'sql-migration',
|
||||
'data-workspace'
|
||||
];
|
||||
// extensions that require a rebuild since they have native parts
|
||||
const rebuildExtensions = [
|
||||
@@ -254,7 +256,6 @@ function packageLocalExtensionsStream(forWeb) {
|
||||
const extensionName = path.basename(extensionPath);
|
||||
return { name: extensionName, path: extensionPath, manifestPath: absoluteManifestPath };
|
||||
})
|
||||
.filter(({ name }) => (name === 'vscode-web-playground' ? forWeb : true)) // package vscode-web-playground only for web
|
||||
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
|
||||
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
|
||||
.filter(({ name }) => externalExtensions.indexOf(name) === -1) // {{SQL CARBON EDIT}} Remove external Extensions with separate package
|
||||
|
||||
@@ -252,12 +252,14 @@ const externalExtensions = [
|
||||
'schema-compare',
|
||||
'cms',
|
||||
'query-history',
|
||||
'kusto',
|
||||
'liveshare',
|
||||
'sql-database-projects',
|
||||
'machine-learning',
|
||||
'sql-assessment',
|
||||
'asde-deployment',
|
||||
'sql-migration'
|
||||
'sql-migration',
|
||||
'data-workspace'
|
||||
];
|
||||
|
||||
// extensions that require a rebuild since they have native parts
|
||||
@@ -307,7 +309,6 @@ export function packageLocalExtensionsStream(forWeb: boolean): Stream {
|
||||
const extensionName = path.basename(extensionPath);
|
||||
return { name: extensionName, path: extensionPath, manifestPath: absoluteManifestPath };
|
||||
})
|
||||
.filter(({ name }) => (name === 'vscode-web-playground' ? forWeb : true)) // package vscode-web-playground only for web
|
||||
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
|
||||
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
|
||||
.filter(({ name }) => externalExtensions.indexOf(name) === -1) // {{SQL CARBON EDIT}} Remove external Extensions with separate package
|
||||
|
||||
@@ -94,6 +94,10 @@
|
||||
"name": "vs/workbench/contrib/issue",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/contrib/keybindings",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/contrib/markers",
|
||||
"project": "vscode-workbench"
|
||||
@@ -246,10 +250,6 @@
|
||||
"name": "vs/workbench/services/configurationResolver",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/services/crashReporter",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/services/dialogs",
|
||||
"project": "vscode-workbench"
|
||||
|
||||
@@ -50,7 +50,7 @@
|
||||
"rollup-plugin-commonjs": "^10.1.0",
|
||||
"rollup-plugin-node-resolve": "^5.2.0",
|
||||
"terser": "4.3.8",
|
||||
"typescript": "^4.0.0-dev.20200803",
|
||||
"typescript": "^4.1.0-dev.20200824",
|
||||
"vsce": "1.48.0",
|
||||
"vscode-telemetry-extractor": "^1.6.0",
|
||||
"xml2js": "^0.4.17"
|
||||
|
||||
@@ -88,7 +88,7 @@ Source: "{#ProductJsonPath}"; DestDir: "{code:GetDestDir}\resources\app"; Flags:
|
||||
|
||||
[Icons]
|
||||
Name: "{group}\{#NameLong}"; Filename: "{app}\{#ExeBasename}.exe"; AppUserModelID: "{#AppUserId}"
|
||||
Name: "{commondesktop}\{#NameLong}"; Filename: "{app}\{#ExeBasename}.exe"; Tasks: desktopicon; AppUserModelID: "{#AppUserId}"
|
||||
Name: "{autodesktop}\{#NameLong}"; Filename: "{app}\{#ExeBasename}.exe"; Tasks: desktopicon; AppUserModelID: "{#AppUserId}"
|
||||
Name: "{userappdata}\Microsoft\Internet Explorer\Quick Launch\{#NameLong}"; Filename: "{app}\{#ExeBasename}.exe"; Tasks: quicklaunchicon; AppUserModelID: "{#AppUserId}"
|
||||
|
||||
[Run]
|
||||
|
||||
@@ -2553,9 +2553,9 @@ node-abort-controller@^1.0.4:
|
||||
integrity sha512-7cNtLKTAg0LrW3ViS2C7UfIzbL3rZd8L0++5MidbKqQVJ8yrH6+1VRSHl33P0ZjBTbOJd37d9EYekvHyKkB0QQ==
|
||||
|
||||
node-fetch@^2.6.0:
|
||||
version "2.6.0"
|
||||
resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.0.tgz#e633456386d4aa55863f676a7ab0daa8fdecb0fd"
|
||||
integrity sha512-8dG4H5ujfvFiqDmVu9fQ5bOHUC15JMjMY/Zumv26oOvvVJjM67KF8koCWIabKQ1GJIa9r2mMZscBq/TbdOcmNA==
|
||||
version "2.6.1"
|
||||
resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.1.tgz#045bd323631f76ed2e2b55573394416b639a0052"
|
||||
integrity sha512-V4aYg89jEoVRxRb2fJdAg8FHvI7cEyYdVAh94HH0UIK8oJxUfkjlDQN9RbMx+bEjP7+ggMiFRprSti032Oipxw==
|
||||
|
||||
node-pre-gyp@^0.10.0:
|
||||
version "0.10.3"
|
||||
@@ -3544,10 +3544,10 @@ typescript@^3.0.1:
|
||||
resolved "https://registry.yarnpkg.com/typescript/-/typescript-3.5.3.tgz#c830f657f93f1ea846819e929092f5fe5983e977"
|
||||
integrity sha512-ACzBtm/PhXBDId6a6sDJfroT2pOWt/oOnk4/dElG5G33ZL776N3Y6/6bKZJBFpd+b05F3Ct9qDjMeJmRWtE2/g==
|
||||
|
||||
typescript@^4.0.0-dev.20200803:
|
||||
version "4.0.0-dev.20200803"
|
||||
resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.0.0-dev.20200803.tgz#ea8b0e9fb2ee3085598ff200c8568f04f4cbb2ba"
|
||||
integrity sha512-f/jDkFqCs0gbUd5MCUijO9u3AOMx1x1HdRDDHSidlc6uPVEkRduxjeTFhIXbGutO7ivzv+aC2sxH+1FQwsyBcg==
|
||||
typescript@^4.1.0-dev.20200824:
|
||||
version "4.1.0-dev.20200824"
|
||||
resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.1.0-dev.20200824.tgz#34c92d9b6e5124600658c0d4e9b8c125beaf577d"
|
||||
integrity sha512-hTJfocmebnMKoqRw/xs3bL61z87XXtvOUwYtM7zaCX9mAvnfdo1x1bzQlLZAsvdzRIgAHPJQYbqYHKygWkDw6g==
|
||||
|
||||
typical@^4.0.0:
|
||||
version "4.0.0"
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
"git": {
|
||||
"name": "chromium",
|
||||
"repositoryUrl": "https://chromium.googlesource.com/chromium/src",
|
||||
"commitHash": "e4745133a1d3745f066e068b8033c6a269b59caf"
|
||||
"commitHash": "894fb9eb56c6cbda65e3c3ae9ada6d4cb5850cc9"
|
||||
}
|
||||
},
|
||||
"licenseDetail": [
|
||||
@@ -40,7 +40,7 @@
|
||||
"SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
|
||||
],
|
||||
"isOnlyProductionDependency": true,
|
||||
"version": "78.0.3904.130"
|
||||
"version": "83.0.4103.122"
|
||||
},
|
||||
{
|
||||
"component": {
|
||||
@@ -48,11 +48,11 @@
|
||||
"git": {
|
||||
"name": "nodejs",
|
||||
"repositoryUrl": "https://github.com/nodejs/node",
|
||||
"commitHash": "787378879acfb212ed4ff824bf9f767a24a5cb43a"
|
||||
"commitHash": "9622fed3fb2cffcea9efff6c8cb4cc2def99d75d"
|
||||
}
|
||||
},
|
||||
"isOnlyProductionDependency": true,
|
||||
"version": "12.8.1"
|
||||
"version": "12.14.1"
|
||||
},
|
||||
{
|
||||
"component": {
|
||||
@@ -60,12 +60,12 @@
|
||||
"git": {
|
||||
"name": "electron",
|
||||
"repositoryUrl": "https://github.com/electron/electron",
|
||||
"commitHash": "5f93e889020d279d5a9cd1ecab080ab467312447"
|
||||
"commitHash": "03c7a54dc534ce1867d4393b9b1a6989d4a7e005"
|
||||
}
|
||||
},
|
||||
"isOnlyProductionDependency": true,
|
||||
"license": "MIT",
|
||||
"version": "7.3.2"
|
||||
"version": "9.2.1"
|
||||
},
|
||||
{
|
||||
"component": {
|
||||
|
||||
@@ -102,7 +102,7 @@
|
||||
"mocha-junit-reporter": "^1.17.0",
|
||||
"mocha-multi-reporters": "^1.1.7",
|
||||
"should": "^13.2.3",
|
||||
"vscodetestcover": "^1.0.9"
|
||||
"vscodetestcover": "^1.1.0"
|
||||
},
|
||||
"__metadata": {
|
||||
"id": "41",
|
||||
|
||||
@@ -986,10 +986,10 @@ vscode-nls@^3.2.1:
|
||||
resolved "https://registry.yarnpkg.com/vscode-nls/-/vscode-nls-3.2.5.tgz#25520c1955108036dec607c85e00a522f247f1a4"
|
||||
integrity sha512-ITtoh3V4AkWXMmp3TB97vsMaHRgHhsSFPsUdzlueSL+dRZbSNTZeOmdQv60kjCV306ghPxhDeoNUEm3+EZMuyw==
|
||||
|
||||
vscodetestcover@^1.0.9:
|
||||
version "1.0.9"
|
||||
resolved "https://registry.yarnpkg.com/vscodetestcover/-/vscodetestcover-1.0.9.tgz#0191f403dd59ba1153fc57979e281e992ce63731"
|
||||
integrity sha512-8z2961KF9Tuz5XdHAC6RMV3CrzAoUcfIK7wLYjLIXD4dbHIT7ceZMhoxToW1olyi3pFnThlS4lRXtx8Q5iyMMQ==
|
||||
vscodetestcover@^1.1.0:
|
||||
version "1.1.0"
|
||||
resolved "https://registry.yarnpkg.com/vscodetestcover/-/vscodetestcover-1.1.0.tgz#ea2bc2fb0c54ca4084057883e7e1614a20533e14"
|
||||
integrity sha512-b/5mYqWC4yPxPUM1G8MD8ZnRt7eYd1IxAg/vdTE6JiNZlpGtxkDv91eXbF4TbQVlOPoqTzfhpY5GxbZbHVv+DQ==
|
||||
dependencies:
|
||||
decache "^4.4.0"
|
||||
glob "^7.1.2"
|
||||
|
||||
@@ -90,7 +90,7 @@
|
||||
"mocha-multi-reporters": "^1.1.7",
|
||||
"should": "^13.2.1",
|
||||
"typemoq": "^2.1.0",
|
||||
"vscodetestcover": "^1.0.9"
|
||||
"vscodetestcover": "^1.1.0"
|
||||
},
|
||||
"__metadata": {
|
||||
"id": "10",
|
||||
|
||||
@@ -769,10 +769,10 @@ vscode-nls@^3.2.1:
|
||||
resolved "https://registry.yarnpkg.com/vscode-nls/-/vscode-nls-3.2.5.tgz#25520c1955108036dec607c85e00a522f247f1a4"
|
||||
integrity sha512-ITtoh3V4AkWXMmp3TB97vsMaHRgHhsSFPsUdzlueSL+dRZbSNTZeOmdQv60kjCV306ghPxhDeoNUEm3+EZMuyw==
|
||||
|
||||
vscodetestcover@^1.0.9:
|
||||
version "1.0.9"
|
||||
resolved "https://registry.yarnpkg.com/vscodetestcover/-/vscodetestcover-1.0.9.tgz#0191f403dd59ba1153fc57979e281e992ce63731"
|
||||
integrity sha512-8z2961KF9Tuz5XdHAC6RMV3CrzAoUcfIK7wLYjLIXD4dbHIT7ceZMhoxToW1olyi3pFnThlS4lRXtx8Q5iyMMQ==
|
||||
vscodetestcover@^1.1.0:
|
||||
version "1.1.0"
|
||||
resolved "https://registry.yarnpkg.com/vscodetestcover/-/vscodetestcover-1.1.0.tgz#ea2bc2fb0c54ca4084057883e7e1614a20533e14"
|
||||
integrity sha512-b/5mYqWC4yPxPUM1G8MD8ZnRt7eYd1IxAg/vdTE6JiNZlpGtxkDv91eXbF4TbQVlOPoqTzfhpY5GxbZbHVv+DQ==
|
||||
dependencies:
|
||||
decache "^4.4.0"
|
||||
glob "^7.1.2"
|
||||
|
||||
@@ -191,8 +191,8 @@
|
||||
"os.environ[\"DOCKER_USERNAME\"] = arc_docker_username\n",
|
||||
"os.environ[\"DOCKER_PASSWORD\"] = arc_docker_password\n",
|
||||
"if os.name == 'nt':\n",
|
||||
" print(f'If you don\\'t see output produced by azdata, you can run the following command in a terminal window to check the deployment status:\\n\\t {os.environ[\"AZDATA_NB_VAR_KUBECTL\"]} get pods -A')\n",
|
||||
"run_command(f'azdata arc dc create --connectivity-mode {arc_data_controller_connectivity_mode} -n {arc_data_controller_name} -ns {arc_data_controller_namespace} -s {arc_subscription} -g {arc_resource_group} -l {arc_data_controller_location} -p {arc_profile}')\n",
|
||||
" print(f'If you don\\'t see output produced by azdata, you can run the following command in a terminal window to check the deployment status:\\n\\t {os.environ[\"AZDATA_NB_VAR_KUBECTL\"]} get pods -n {arc_data_controller_namespace}')\n",
|
||||
"run_command(f'azdata arc dc create --connectivity-mode {arc_data_controller_connectivity_mode} -n {arc_data_controller_name} -ns {arc_data_controller_namespace} -s {arc_subscription} -g {arc_resource_group} -l {arc_data_controller_location} -sc {arc_data_controller_storage_class} --profile-name {arc_profile}')\n",
|
||||
"print(f'Azure Arc Data controller cluster: {arc_data_controller_name} created.') "
|
||||
],
|
||||
"metadata": {
|
||||
@@ -247,4 +247,4 @@
|
||||
"execution_count": null
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
@@ -75,7 +75,7 @@
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"source": [
|
||||
"#### **Get required and optional parameters for the PostgreSQL server group**"
|
||||
"#### **Ensure Postgres Server Group name and password exist**"
|
||||
],
|
||||
"metadata": {
|
||||
"azdata_cell_guid": "68ec0760-27d1-4ded-9a9f-89077c40b8bb"
|
||||
@@ -85,26 +85,78 @@
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"# Required Values\n",
|
||||
"server_group_name = os.environ[\"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_NAME\"]\n",
|
||||
"server_group_namespace = os.environ[\"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_NAMESPACE\"]\n",
|
||||
"server_group_workers = os.environ[\"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_WORKERS\"]\n",
|
||||
"server_group_service_type = os.environ[\"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_SERVICE_TYPE\"]\n",
|
||||
"server_group_data_size = os.environ[\"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_DATA_SIZE\"]\n",
|
||||
"env_var = \"AZDATA_NB_VAR_CONTROLLER_ENDPOINT\" in os.environ\n",
|
||||
"if env_var:\n",
|
||||
" controller_endpoint = os.environ[\"AZDATA_NB_VAR_CONTROLLER_ENDPOINT\"]\n",
|
||||
"else:\n",
|
||||
" sys.exit(f'environment variable: AZDATA_NB_VAR_CONTROLLER_ENDPOINT was not defined. Exiting\\n')\n",
|
||||
"\n",
|
||||
"# Optional Values\n",
|
||||
"server_group_data_class = os.environ.get(\"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_DATA_CLASS\")\n",
|
||||
"env_var = \"AZDATA_NB_VAR_CONTROLLER_USERNAME\" in os.environ\n",
|
||||
"if env_var:\n",
|
||||
" controller_username = os.environ[\"AZDATA_NB_VAR_CONTROLLER_USERNAME\"]\n",
|
||||
"else:\n",
|
||||
" sys.exit(f'environment variable: AZDATA_NB_VAR_CONTROLLER_USERNAME was not defined. Exiting\\n')\n",
|
||||
"\n",
|
||||
"env_var = \"AZDATA_NB_VAR_CONTROLLER_PASSWORD\" in os.environ\n",
|
||||
"if env_var:\n",
|
||||
" controller_password = os.environ[\"AZDATA_NB_VAR_CONTROLLER_PASSWORD\"]\n",
|
||||
"else:\n",
|
||||
" sys.exit(f'environment variable: AZDATA_NB_VAR_CONTROLLER_PASSWORD was not defined. Exiting\\n')\n",
|
||||
"\n",
|
||||
"env_var = \"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_NAME\" in os.environ\n",
|
||||
"if env_var:\n",
|
||||
" server_group_name = os.environ[\"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_NAME\"]\n",
|
||||
"else:\n",
|
||||
" sys.exit(f'environment variable: AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_NAME was not defined. Exiting\\n')\n",
|
||||
"\n",
|
||||
"env_var = \"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_PASSWORD\" in os.environ\n",
|
||||
"if env_var:\n",
|
||||
" postgres_password = os.environ[\"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_PASSWORD\"]\n",
|
||||
"else:\n",
|
||||
" sys.exit(f'environment variable: AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_PASSWORD was not defined. Exiting\\n') \n",
|
||||
"\n",
|
||||
"env_var = \"AZDATA_NB_VAR_POSTGRES_STORAGE_CLASS_DATA\" in os.environ\n",
|
||||
"if env_var:\n",
|
||||
" postgres_storage_class_data = os.environ[\"AZDATA_NB_VAR_POSTGRES_STORAGE_CLASS_DATA\"]\n",
|
||||
"else:\n",
|
||||
" sys.exit(f'environment variable: AZDATA_NB_VAR_POSTGRES_STORAGE_CLASS_DATA was not defined. Exiting\\n') \n",
|
||||
"env_var = \"AZDATA_NB_VAR_POSTGRES_STORAGE_CLASS_LOGS\" in os.environ\n",
|
||||
"if env_var:\n",
|
||||
" postgres_storage_class_logs = os.environ[\"AZDATA_NB_VAR_POSTGRES_STORAGE_CLASS_LOGS\"]\n",
|
||||
"else:\n",
|
||||
" sys.exit(f'environment variable: AZDATA_NB_VAR_POSTGRES_STORAGE_CLASS_LOGS was not defined. Exiting\\n') \n",
|
||||
"env_var = \"AZDATA_NB_VAR_POSTGRES_STORAGE_CLASS_BACKUPS\" in os.environ\n",
|
||||
"if env_var:\n",
|
||||
" postgres_storage_class_backups = os.environ[\"AZDATA_NB_VAR_POSTGRES_STORAGE_CLASS_BACKUPS\"]\n",
|
||||
"else:\n",
|
||||
" sys.exit(f'environment variable: AZDATA_NB_VAR_POSTGRES_STORAGE_CLASS_BACKUPS was not defined. Exiting\\n') \n",
|
||||
""
|
||||
],
|
||||
"metadata": {
|
||||
"azdata_cell_guid": "53769960-e1f8-4477-b4cf-3ab1ea34348b",
|
||||
"tags": []
|
||||
},
|
||||
"outputs": [],
|
||||
"execution_count": null
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"source": [
|
||||
"#### **Get optional parameters for the PostgreSQL server group**"
|
||||
],
|
||||
"metadata": {
|
||||
"azdata_cell_guid": "68ec0760-27d1-4ded-9a9f-89077c40b8bb"
|
||||
}
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"server_group_workers = os.environ[\"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_WORKERS\"]\n",
|
||||
"server_group_port = os.environ.get(\"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_PORT\")\n",
|
||||
"server_group_extensions = os.environ.get(\"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_EXTENSIONS\")\n",
|
||||
"server_group_cpu_min = os.environ.get(\"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_CPU_MIN\")\n",
|
||||
"server_group_cpu_max = os.environ.get(\"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_CPU_MAX\")\n",
|
||||
"server_group_memory_min = os.environ.get(\"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_MEMORY_MIN\")\n",
|
||||
"server_group_memory_max = os.environ.get(\"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_MEMORY_MAX\")\n",
|
||||
"server_group_backup_classes = os.environ.get(\"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_BACKUP_CLASSES\")\n",
|
||||
"server_group_backup_sizes = os.environ.get(\"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_BACKUP_SIZES\")\n",
|
||||
"server_group_backup_full_interval = os.environ.get(\"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_BACKUP_FULL_INTERVAL\")\n",
|
||||
"server_group_backup_delta_interval = os.environ.get(\"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_BACKUP_DELTA_INTERVAL\")\n",
|
||||
"server_group_backup_retention_min = os.environ.get(\"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_BACKUP_RETENTION_MIN\")\n",
|
||||
"server_group_backup_retention_max = os.environ.get(\"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_BACKUP_RETENTION_MAX\")"
|
||||
"server_group_cores_request = os.environ.get(\"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_CORES_REQUEST\")\n",
|
||||
"server_group_cores_limit = os.environ.get(\"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_CORES_LIMIT\")\n",
|
||||
"server_group_memory_request = os.environ.get(\"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_MEMORY_REQUEST\")\n",
|
||||
"server_group_memory_limit = os.environ.get(\"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_MEMORY_LIMIT\")"
|
||||
],
|
||||
"metadata": {
|
||||
"azdata_cell_guid": "53769960-e1f8-4477-b4cf-3ab1ea34348b",
|
||||
@@ -122,25 +174,35 @@
|
||||
"azdata_cell_guid": "90b0e162-2987-463f-9ce6-12dda1267189"
|
||||
}
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"# Login to the data controller.\n",
|
||||
"#\n",
|
||||
"os.environ[\"AZDATA_PASSWORD\"] = controller_password\n",
|
||||
"cmd = f'azdata login -e {controller_endpoint} -u {controller_username}'\n",
|
||||
"out=run_command()"
|
||||
],
|
||||
"metadata": {
|
||||
"azdata_cell_guid": "71366399-5963-4e24-b2f2-6bb5bffba4ec"
|
||||
},
|
||||
"outputs": [],
|
||||
"execution_count": null
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"print (f'Creating a PostgreSQL server group on Azure Arc')\n",
|
||||
"\n",
|
||||
"data_class_option = f' --dataClass \"{server_group_data_class}\"' if server_group_data_class else \"\"\n",
|
||||
"workers_option = f' -w {server_group_workers}' if server_group_workers else \"\"\n",
|
||||
"port_option = f' --port \"{server_group_port}\"' if server_group_port else \"\"\n",
|
||||
"extensions_option = f' --extensions \"{server_group_extensions}\"' if server_group_extensions else \"\"\n",
|
||||
"cpu_min_option = f' --minCpu \"{server_group_cpu_min}\"' if server_group_cpu_min else \"\"\n",
|
||||
"cpu_max_option = f' --maxCpu \"{server_group_cpu_max}\"' if server_group_cpu_max else \"\"\n",
|
||||
"memory_min_option = f' --minMemoryMb \"{server_group_memory_min}\"' if server_group_memory_min else \"\"\n",
|
||||
"memory_max_option = f' --maxMemoryMb \"{server_group_memory_max}\"' if server_group_memory_max else \"\"\n",
|
||||
"backup_classes_option = f' --backupClasses \"{server_group_backup_classes}\"' if server_group_backup_classes else \"\"\n",
|
||||
"backup_sizes_option = f' --backupSizesMb \"{server_group_backup_sizes}\"' if server_group_backup_sizes else \"\"\n",
|
||||
"backup_full_interval_option = f' --fullBackupInterval \"{server_group_backup_full_interval}\"' if server_group_backup_full_interval else \"\"\n",
|
||||
"backup_delta_interval_option = f' --deltaBackupInterval \"{server_group_backup_delta_interval}\"' if server_group_backup_delta_interval else \"\"\n",
|
||||
"backup_retention_min_option = f' --retentionMin \"{server_group_backup_retention_min}\"' if server_group_backup_retention_min else \"\"\n",
|
||||
"backup_retention_max_option = f' --retentionMax \"{server_group_backup_retention_max}\"' if server_group_backup_retention_max else \"\"\n",
|
||||
"cmd = f'azdata postgres server create --name {server_group_name} --namespace {server_group_namespace} --workers {server_group_workers} --serviceType {server_group_service_type} --dataSizeMb {server_group_data_size}{data_class_option}{port_option}{extensions_option}{cpu_min_option}{cpu_max_option}{memory_min_option}{memory_max_option}{backup_classes_option}{backup_sizes_option}{backup_full_interval_option}{backup_delta_interval_option}{backup_retention_min_option}{backup_retention_max_option}'\n",
|
||||
"cores_request_option = f' -cr \"{server_group_cores_request}\"' if server_group_cores_request else \"\"\n",
|
||||
"cores_limit_option = f' -cl \"{server_group_cores_limit}\"' if server_group_cores_limit else \"\"\n",
|
||||
"memory_request_option = f' -mr \"{server_group_memory_request}Mi\"' if server_group_memory_request else \"\"\n",
|
||||
"memory_limit_option = f' -ml \"{server_group_memory_limit}Mi\"' if server_group_memory_limit else \"\"\n",
|
||||
"\n",
|
||||
"os.environ[\"AZDATA_PASSWORD\"] = postgres_password\n",
|
||||
"cmd = f'azdata arc postgres server create -n {server_group_name} -scd {postgres_storage_class_data} -scl {postgres_storage_class_logs} -scb {postgres_storage_class_backups}{workers_option}{port_option}{cores_request_option}{cores_limit_option}{memory_request_option}{memory_limit_option}'\n",
|
||||
"out=run_command()"
|
||||
],
|
||||
"metadata": {
|
||||
@@ -150,4 +212,4 @@
|
||||
"execution_count": null
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
@@ -75,7 +75,7 @@
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"source": [
|
||||
"#### **Ensure SQL instance name, username, password, subscription id and resource group name**"
|
||||
"#### **Ensure SQL instance name, username and password exist**"
|
||||
],
|
||||
"metadata": {
|
||||
"azdata_cell_guid": "68ec0760-27d1-4ded-9a9f-89077c40b8bb"
|
||||
@@ -85,20 +85,48 @@
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"# Required Values\n",
|
||||
"env_var = \"AZDATA_NB_VAR_CONTROLLER_ENDPOINT\" in os.environ\n",
|
||||
"if env_var:\n",
|
||||
" controller_endpoint = os.environ[\"AZDATA_NB_VAR_CONTROLLER_ENDPOINT\"]\n",
|
||||
"else:\n",
|
||||
" sys.exit(f'environment variable: AZDATA_NB_VAR_CONTROLLER_ENDPOINT was not defined. Exiting\\n')\n",
|
||||
"\n",
|
||||
"env_var = \"AZDATA_NB_VAR_CONTROLLER_USERNAME\" in os.environ\n",
|
||||
"if env_var:\n",
|
||||
" controller_username = os.environ[\"AZDATA_NB_VAR_CONTROLLER_USERNAME\"]\n",
|
||||
"else:\n",
|
||||
" sys.exit(f'environment variable: AZDATA_NB_VAR_CONTROLLER_USERNAME was not defined. Exiting\\n')\n",
|
||||
"\n",
|
||||
"env_var = \"AZDATA_NB_VAR_CONTROLLER_PASSWORD\" in os.environ\n",
|
||||
"if env_var:\n",
|
||||
" controller_password = os.environ[\"AZDATA_NB_VAR_CONTROLLER_PASSWORD\"]\n",
|
||||
"else:\n",
|
||||
" sys.exit(f'environment variable: AZDATA_NB_VAR_CONTROLLER_PASSWORD was not defined. Exiting\\n')\n",
|
||||
"\n",
|
||||
"env_var = \"AZDATA_NB_VAR_SQL_INSTANCE_NAME\" in os.environ\n",
|
||||
"if env_var:\n",
|
||||
" mssql_instance_name = os.environ[\"AZDATA_NB_VAR_SQL_INSTANCE_NAME\"]\n",
|
||||
"else:\n",
|
||||
" sys.exit(f'environment variable: AZDATA_NB_VAR_SQL_INSTANCE_NAME was not defined. Exiting\\n')\n",
|
||||
"\n",
|
||||
"env_var = \"AZDATA_NB_VAR_SQL_PASSWORD\" in os.environ\n",
|
||||
"if env_var:\n",
|
||||
" mssql_password = os.environ[\"AZDATA_NB_VAR_SQL_PASSWORD\"]\n",
|
||||
"else:\n",
|
||||
" sys.exit(f'environment variable: AZDATA_NB_VAR_SQL_PASSWORD was not defined. Exiting\\n') \n",
|
||||
" sys.exit(f'environment variable: AZDATA_NB_VAR_SQL_PASSWORD was not defined. Exiting\\n')\n",
|
||||
"\n",
|
||||
"# Optional Values\n",
|
||||
"subscription = os.environ[\"AZDATA_NB_VAR_ARC_SUBSCRIPTION\"] \n",
|
||||
"resource_group_name = os.environ[\"AZDATA_NB_VAR_ARC_RESOURCE_GROUP_NAME\"]\n"
|
||||
"env_var = \"AZDATA_NB_VAR_SQL_STORAGE_CLASS_DATA\" in os.environ\n",
|
||||
"if env_var:\n",
|
||||
" mssql_storage_class_data = os.environ[\"AZDATA_NB_VAR_SQL_STORAGE_CLASS_DATA\"]\n",
|
||||
"else:\n",
|
||||
" sys.exit(f'environment variable: AZDATA_NB_VAR_SQL_STORAGE_CLASS_DATA was not defined. Exiting\\n')\n",
|
||||
"\n",
|
||||
"env_var = \"AZDATA_NB_VAR_SQL_STORAGE_CLASS_LOGS\" in os.environ\n",
|
||||
"if env_var:\n",
|
||||
" mssql_storage_class_logs = os.environ[\"AZDATA_NB_VAR_SQL_STORAGE_CLASS_LOGS\"]\n",
|
||||
"else:\n",
|
||||
" sys.exit(f'environment variable: AZDATA_NB_VAR_SQL_STORAGE_CLASS_LOGS was not defined. Exiting\\n') \n",
|
||||
""
|
||||
],
|
||||
"metadata": {
|
||||
"azdata_cell_guid": "53769960-e1f8-4477-b4cf-3ab1ea34348b",
|
||||
@@ -116,15 +144,28 @@
|
||||
"azdata_cell_guid": "90b0e162-2987-463f-9ce6-12dda1267189"
|
||||
}
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"# Login to the data controller.\n",
|
||||
"#\n",
|
||||
"os.environ[\"AZDATA_PASSWORD\"] = controller_password\n",
|
||||
"cmd = f'azdata login -e {controller_endpoint} -u {controller_username}'\n",
|
||||
"out=run_command()"
|
||||
],
|
||||
"metadata": {
|
||||
"azdata_cell_guid": "1437c536-17e8-4a7f-80c1-aa43ad02686c"
|
||||
},
|
||||
"outputs": [],
|
||||
"execution_count": null
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"print (f'Creating Managed SQL Server instance on Azure Arc')\n",
|
||||
"\n",
|
||||
"os.environ[\"MSSQL_SA_PASSWORD\"] = mssql_password\n",
|
||||
"subscription_option = f' -s \"{subscription}\"' if subscription else \"\"\n",
|
||||
"resource_group_option = f' -r \"{resource_group_name}\"' if resource_group_name else \"\"\n",
|
||||
"cmd = f'azdata sql instance create -n {mssql_instance_name}{subscription_option}{resource_group_option}'\n",
|
||||
"os.environ[\"AZDATA_PASSWORD\"] = mssql_password\n",
|
||||
"cmd = f'azdata arc sql mi create -n {mssql_instance_name} -scd {mssql_storage_class_data} -scl {mssql_storage_class_logs}'\n",
|
||||
"out=run_command()"
|
||||
],
|
||||
"metadata": {
|
||||
@@ -134,4 +175,4 @@
|
||||
"execution_count": null
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
@@ -2,21 +2,23 @@
|
||||
"name": "arc",
|
||||
"displayName": "%arc.displayName%",
|
||||
"description": "%arc.description%",
|
||||
"version": "0.2.0",
|
||||
"version": "0.3.5",
|
||||
"publisher": "Microsoft",
|
||||
"preview": true,
|
||||
"license": "https://raw.githubusercontent.com/Microsoft/azuredatastudio/main/LICENSE.txt",
|
||||
"icon": "images/extension.png",
|
||||
"engines": {
|
||||
"vscode": "*",
|
||||
"azdata": ">=1.20.0"
|
||||
"azdata": ">=1.22.0"
|
||||
},
|
||||
"activationEvents": [
|
||||
"onCommand:arc.manageArcController",
|
||||
"onCommand:arc.manageMiaa",
|
||||
"onCommand:arc.managePostgres",
|
||||
"onCommand:arc.connectToController",
|
||||
"onCommand:arc.createController",
|
||||
"onView:azureArc"
|
||||
],
|
||||
"extensionDependencies": [
|
||||
"Microsoft.azdata"
|
||||
],
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/Microsoft/azuredatastudio.git"
|
||||
@@ -33,18 +35,6 @@
|
||||
]
|
||||
},
|
||||
"commands": [
|
||||
{
|
||||
"command": "arc.manageArcController",
|
||||
"title": "%arc.manageArcController%"
|
||||
},
|
||||
{
|
||||
"command": "arc.manageMiaa",
|
||||
"title": "%arc.manageMiaa%"
|
||||
},
|
||||
{
|
||||
"command": "arc.managePostgres",
|
||||
"title": "%arc.managePostgres%"
|
||||
},
|
||||
{
|
||||
"command": "arc.openDashboard",
|
||||
"title": "%arc.openDashboard%"
|
||||
@@ -66,22 +56,14 @@
|
||||
{
|
||||
"command": "arc.refresh",
|
||||
"title": "%command.refresh.title%"
|
||||
},
|
||||
{
|
||||
"command": "arc.editConnection",
|
||||
"title": "%command.editConnection.title%"
|
||||
}
|
||||
],
|
||||
"menus": {
|
||||
"commandPalette": [
|
||||
{
|
||||
"command": "arc.manageArcController",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "arc.manageMiaa",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "arc.managePostgres",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "arc.openDashboard",
|
||||
"when": "false"
|
||||
@@ -93,6 +75,10 @@
|
||||
{
|
||||
"command": "arc.refresh",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "arc.editConnection",
|
||||
"when": "false"
|
||||
}
|
||||
],
|
||||
"view/title": [
|
||||
@@ -110,32 +96,26 @@
|
||||
"view/item/context": [
|
||||
{
|
||||
"command": "arc.openDashboard",
|
||||
"when": "view == azureArc && viewItem != loading",
|
||||
"when": "view == azureArc && viewItem != postgresInstances",
|
||||
"group": "navigation@1"
|
||||
},
|
||||
{
|
||||
"command": "arc.refresh",
|
||||
"command": "arc.editConnection",
|
||||
"when": "view == azureArc && viewItem == dataControllers",
|
||||
"group": "navigation@2"
|
||||
},
|
||||
{
|
||||
"command": "arc.removeController",
|
||||
"command": "arc.refresh",
|
||||
"when": "view == azureArc && viewItem == dataControllers",
|
||||
"group": "navigation@3"
|
||||
},
|
||||
{
|
||||
"command": "arc.removeController",
|
||||
"when": "view == azureArc && viewItem == dataControllers",
|
||||
"group": "navigation@4"
|
||||
}
|
||||
]
|
||||
},
|
||||
"configuration": {
|
||||
"type": "object",
|
||||
"title": "%arc.configuration.title%",
|
||||
"properties": {
|
||||
"arc.ignoreSslVerification": {
|
||||
"type": "boolean",
|
||||
"default": true,
|
||||
"description": "%arc.ignoreSslVerification.desc%"
|
||||
}
|
||||
}
|
||||
},
|
||||
"viewsWelcome": [
|
||||
{
|
||||
"view": "azureArc",
|
||||
@@ -223,11 +203,15 @@
|
||||
"editable": false,
|
||||
"options": {
|
||||
"values": [
|
||||
"azure-arc-aks-private-preview",
|
||||
"azure-arc-eks-private-preview",
|
||||
"azure-arc-kubeadm-private-preview"
|
||||
"azure-arc-ake",
|
||||
"azure-arc-aks-default-storage",
|
||||
"azure-arc-aks-premium-storage",
|
||||
"azure-arc-azure-openshift",
|
||||
"azure-arc-eks",
|
||||
"azure-arc-kubeadm",
|
||||
"azure-arc-openshift"
|
||||
],
|
||||
"defaultValue": "azure-arc-aks-private-preview",
|
||||
"defaultValue": "azure-arc-aks-default-storage",
|
||||
"optionsType": "radio"
|
||||
}
|
||||
}
|
||||
@@ -283,6 +267,13 @@
|
||||
"required": true,
|
||||
"variableName": "AZDATA_NB_VAR_ARC_DATA_CONTROLLER_NAME"
|
||||
},
|
||||
{
|
||||
"label": "%arc.storage-class.dc.label%",
|
||||
"description": "%arc.sql.storage-class.dc.description%",
|
||||
"variableName": "AZDATA_NB_VAR_ARC_DATA_CONTROLLER_STORAGE_CLASS",
|
||||
"type": "kube_storage_class",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"type": "azure_locations",
|
||||
"label": "%arc.control.plane.arc.data.controller.location%",
|
||||
@@ -310,6 +301,7 @@
|
||||
"defaultValue": "Indirect",
|
||||
"optionsType": "radio"
|
||||
},
|
||||
"enabled": false,
|
||||
"required": true,
|
||||
"variableName": "AZDATA_NB_VAR_ARC_DATA_CONTROLLER_CONNECTIVITY_MODE"
|
||||
}
|
||||
@@ -550,25 +542,12 @@
|
||||
},
|
||||
{
|
||||
"name": "azdata",
|
||||
"version": "20.0.0"
|
||||
"version": "20.1.0"
|
||||
}
|
||||
],
|
||||
"when": true
|
||||
}
|
||||
],
|
||||
"agreement": {
|
||||
"template": "%arc.control.plane.arc.data.controller.agreement%",
|
||||
"links": [
|
||||
{
|
||||
"text": "%microsoft.agreement.privacy.statement%",
|
||||
"url": "https://go.microsoft.com/fwlink/?LinkId=853010"
|
||||
},
|
||||
{
|
||||
"text": "%arc.agreement.azdata.eula%",
|
||||
"url": "https://aka.ms/eula-azdata-en"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "arc.sql",
|
||||
@@ -579,18 +558,6 @@
|
||||
"light": "./images/miaa.svg",
|
||||
"dark": "./images/miaa.svg"
|
||||
},
|
||||
"options": [
|
||||
{
|
||||
"name": "resourceType",
|
||||
"displayName": "%resource.type.picker.display.name%",
|
||||
"values": [
|
||||
{
|
||||
"name": "sql.managed.instance",
|
||||
"displayName": "%sql.managed.instance.display.name%"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"providers": [
|
||||
{
|
||||
"dialog": {
|
||||
@@ -604,26 +571,35 @@
|
||||
{
|
||||
"title": "",
|
||||
"sections": [
|
||||
{
|
||||
"title": "%arc.azure.section.title%",
|
||||
"fields": [
|
||||
{
|
||||
"subscriptionVariableName": "AZDATA_NB_VAR_ARC_SUBSCRIPTION",
|
||||
"resourceGroupVariableName": "AZDATA_NB_VAR_ARC_RESOURCE_GROUP_NAME",
|
||||
"type": "azure_account",
|
||||
"required": true
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"title": "%arc.sql.settings.section.title%",
|
||||
"fields": [
|
||||
{
|
||||
"label": "%arc.controller%",
|
||||
"variableName": "AZDATA_NB_VAR_ARC_CONTROLLER",
|
||||
"type": "options",
|
||||
"editable": false,
|
||||
"required": true,
|
||||
"options": {
|
||||
"source": {
|
||||
"type": "ArcControllersOptionsSource",
|
||||
"variableNames": {
|
||||
"endpoint": "AZDATA_NB_VAR_CONTROLLER_ENDPOINT",
|
||||
"username": "AZDATA_NB_VAR_CONTROLLER_USERNAME",
|
||||
"password": "AZDATA_NB_VAR_CONTROLLER_PASSWORD"
|
||||
}
|
||||
},
|
||||
"optionsType": "dropdown"
|
||||
},
|
||||
"labelWidth": "100%"
|
||||
},
|
||||
{
|
||||
"label": "%arc.sql.instance.name%",
|
||||
"variableName": "AZDATA_NB_VAR_SQL_INSTANCE_NAME",
|
||||
"type": "text",
|
||||
"defaultValue": "sqlinstance1",
|
||||
"required": true
|
||||
"required": true,
|
||||
"labelWidth": "100%"
|
||||
},
|
||||
{
|
||||
"label": "%arc.sql.username%",
|
||||
@@ -633,7 +609,7 @@
|
||||
"enabled": false
|
||||
},
|
||||
{
|
||||
"label": "%arc.sql.password%",
|
||||
"label": "%arc.password%",
|
||||
"variableName": "AZDATA_NB_VAR_SQL_PASSWORD",
|
||||
"type": "sql_password",
|
||||
"userName": "sa",
|
||||
@@ -641,6 +617,20 @@
|
||||
"confirmationLabel": "%arc.confirm.password%",
|
||||
"defaultValue": "",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"label": "%arc.storage-class.data.label%",
|
||||
"description": "%arc.sql.storage-class.data.description%",
|
||||
"variableName": "AZDATA_NB_VAR_SQL_STORAGE_CLASS_DATA",
|
||||
"type": "kube_storage_class",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"label": "%arc.storage-class.logs.label%",
|
||||
"description": "%arc.sql.storage-class.logs.description%",
|
||||
"variableName": "AZDATA_NB_VAR_SQL_STORAGE_CLASS_LOGS",
|
||||
"type": "kube_storage_class",
|
||||
"required": true
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -654,10 +644,10 @@
|
||||
},
|
||||
{
|
||||
"name": "azdata",
|
||||
"version": "20.0.0"
|
||||
"version": "20.1.0"
|
||||
}
|
||||
],
|
||||
"when": "resourceType=sql.managed.instance"
|
||||
"when": "true"
|
||||
}
|
||||
],
|
||||
"agreement": {
|
||||
@@ -670,10 +660,6 @@
|
||||
{
|
||||
"text": "%arc.agreement.sql.terms.conditions%",
|
||||
"url": "https://go.microsoft.com/fwlink/?linkid=2045708"
|
||||
},
|
||||
{
|
||||
"text": "%arc.agreement.azdata.eula%",
|
||||
"url": "https://aka.ms/eula-azdata-en"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -687,18 +673,6 @@
|
||||
"light": "./images/postgres.svg",
|
||||
"dark": "./images/postgres.svg"
|
||||
},
|
||||
"options": [
|
||||
{
|
||||
"name": "resourceType",
|
||||
"displayName": "%resource.type.picker.display.name%",
|
||||
"values": [
|
||||
{
|
||||
"name": "postgres",
|
||||
"displayName": "%postgres.server.group.display.name%"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"providers": [
|
||||
{
|
||||
"dialog": {
|
||||
@@ -715,6 +689,25 @@
|
||||
{
|
||||
"title": "%arc.postgres.settings.section.title%",
|
||||
"fields": [
|
||||
{
|
||||
"label": "%arc.controller%",
|
||||
"variableName": "AZDATA_NB_VAR_ARC_CONTROLLER",
|
||||
"type": "options",
|
||||
"editable": false,
|
||||
"required": true,
|
||||
"options": {
|
||||
"source": {
|
||||
"type": "ArcControllersOptionsSource",
|
||||
"variableNames": {
|
||||
"endpoint": "AZDATA_NB_VAR_CONTROLLER_ENDPOINT",
|
||||
"username": "AZDATA_NB_VAR_CONTROLLER_USERNAME",
|
||||
"password": "AZDATA_NB_VAR_CONTROLLER_PASSWORD"
|
||||
}
|
||||
},
|
||||
"optionsType": "dropdown"
|
||||
},
|
||||
"labelWidth": "100%"
|
||||
},
|
||||
{
|
||||
"label": "%arc.postgres.server.group.name%",
|
||||
"variableName": "AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_NAME",
|
||||
@@ -726,10 +719,12 @@
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"label": "%arc.postgres.server.group.namespace%",
|
||||
"variableName": "AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_NAMESPACE",
|
||||
"type": "text",
|
||||
"defaultValue": "default",
|
||||
"label": "%arc.password%",
|
||||
"variableName": "AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_PASSWORD",
|
||||
"type": "password",
|
||||
"confirmationRequired": true,
|
||||
"confirmationLabel": "%arc.confirm.password%",
|
||||
"defaultValue": "",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
@@ -737,36 +732,8 @@
|
||||
"variableName": "AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_WORKERS",
|
||||
"type": "number",
|
||||
"defaultValue": "1",
|
||||
"required": true,
|
||||
"min": 1
|
||||
},
|
||||
{
|
||||
"label": "%arc.postgres.server.group.service.type%",
|
||||
"variableName": "AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_SERVICE_TYPE",
|
||||
"type": "options",
|
||||
"options": [
|
||||
"ClusterIP",
|
||||
"NodePort",
|
||||
"LoadBalancer"
|
||||
],
|
||||
"defaultValue": "NodePort",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"label": "%arc.postgres.server.group.data.size%",
|
||||
"description": "%arc.postgres.server.group.data.size.description%",
|
||||
"variableName": "AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_DATA_SIZE",
|
||||
"type": "number",
|
||||
"defaultValue": "1024",
|
||||
"required": true,
|
||||
"min": 1
|
||||
},
|
||||
{
|
||||
"label": "%arc.postgres.server.group.data.class%",
|
||||
"description": "%arc.postgres.server.group.data.class.description%",
|
||||
"variableName": "AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_DATA_CLASS",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"label": "%arc.postgres.server.group.port%",
|
||||
"variableName": "AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_PORT",
|
||||
@@ -776,100 +743,56 @@
|
||||
"max": 65535
|
||||
},
|
||||
{
|
||||
"label": "%arc.postgres.server.group.extensions%",
|
||||
"description": "%arc.postgres.server.group.extensions.description%",
|
||||
"variableName": "AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_EXTENSIONS",
|
||||
"type": "text",
|
||||
"textValidationRequired": true,
|
||||
"textValidationRegex": "^(?:(?:pg_cron|postgis|postgis_raster|postgis_topology)(?!\\s*,\\s*$)(?:\\s*,\\s*|$))*$",
|
||||
"textValidationDescription": "%arc.postgres.server.group.extensions.validation.description%",
|
||||
"defaultValue": ""
|
||||
"label": "%arc.storage-class.data.label%",
|
||||
"description": "%arc.postgres.storage-class.data.description%",
|
||||
"variableName": "AZDATA_NB_VAR_POSTGRES_STORAGE_CLASS_DATA",
|
||||
"type": "kube_storage_class",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"label": "%arc.storage-class.logs.label%",
|
||||
"description": "%arc.postgres.storage-class.logs.description%",
|
||||
"variableName": "AZDATA_NB_VAR_POSTGRES_STORAGE_CLASS_LOGS",
|
||||
"type": "kube_storage_class",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"label": "%arc.storage-class.backups.label%",
|
||||
"description": "%arc.postgres.storage-class.backups.description%",
|
||||
"variableName": "AZDATA_NB_VAR_POSTGRES_STORAGE_CLASS_BACKUPS",
|
||||
"type": "kube_storage_class",
|
||||
"required": true
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"title": "%arc.postgres.settings.scheduling.title%",
|
||||
"title": "%arc.postgres.settings.resource.title%",
|
||||
"fields": [
|
||||
{
|
||||
"label": "%arc.postgres.server.group.cpu.min%",
|
||||
"variableName": "AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_CPU_MIN",
|
||||
"label": "%arc.postgres.server.group.cores.request%",
|
||||
"variableName": "AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_CORES_REQUEST",
|
||||
"type": "number",
|
||||
"min": 0
|
||||
},
|
||||
{
|
||||
"label": "%arc.postgres.server.group.cpu.max%",
|
||||
"variableName": "AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_CPU_MAX",
|
||||
"label": "%arc.postgres.server.group.cores.limit%",
|
||||
"variableName": "AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_CORES_LIMIT",
|
||||
"type": "number",
|
||||
"min": 0
|
||||
},
|
||||
{
|
||||
"label": "%arc.postgres.server.group.memory.min%",
|
||||
"variableName": "AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_MEMORY_MIN",
|
||||
"label": "%arc.postgres.server.group.memory.request%",
|
||||
"variableName": "AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_MEMORY_REQUEST",
|
||||
"type": "number",
|
||||
"min": 0
|
||||
},
|
||||
{
|
||||
"label": "%arc.postgres.server.group.memory.max%",
|
||||
"variableName": "AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_MEMORY_MAX",
|
||||
"label": "%arc.postgres.server.group.memory.limit%",
|
||||
"variableName": "AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_MEMORY_LIMIT",
|
||||
"type": "number",
|
||||
"min": 0
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"title": "%arc.postgres.settings.backups.title%",
|
||||
"fields": [
|
||||
{
|
||||
"label": "%arc.postgres.server.group.backup.classes%",
|
||||
"description": "%arc.postgres.server.group.backup.classes.description%",
|
||||
"variableName": "AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_BACKUP_CLASSES",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"label": "%arc.postgres.server.group.backup.sizes%",
|
||||
"description": "%arc.postgres.server.group.backup.sizes.description%",
|
||||
"variableName": "AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_BACKUP_SIZES",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"label": "%arc.postgres.server.group.backup.claims%",
|
||||
"description": "%arc.postgres.server.group.backup.claims.description%",
|
||||
"variableName": "AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_BACKUP_CLAIMS",
|
||||
"type": "text"
|
||||
},
|
||||
{
|
||||
"label": "%arc.postgres.server.group.backup.full.interval%",
|
||||
"variableName": "AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_BACKUP_FULL_INTERVAL",
|
||||
"type": "number",
|
||||
"min": 0
|
||||
},
|
||||
{
|
||||
"label": "%arc.postgres.server.group.backup.delta.interval%",
|
||||
"variableName": "AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_BACKUP_DELTA_INTERVAL",
|
||||
"type": "number",
|
||||
"min": 0
|
||||
},
|
||||
{
|
||||
"label": "%arc.postgres.server.group.backup.retention.min%",
|
||||
"description": "%arc.postgres.server.group.backup.retention.min.description%",
|
||||
"variableName": "AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_BACKUP_RETENTION_MIN",
|
||||
"type": "text",
|
||||
"textValidationRequired": true,
|
||||
"textValidationRegex": "^(?:(?:\\d+|(?:\\d+(?:\\.\\d*)?|\\.\\d+)(?:[mMhHdDwW]|[mMgGtTpPeE][bB]))(?!\\s*[,;]\\s*$)(?:\\s*[,;]\\s*|$))*$",
|
||||
"textValidationDescription": "%arc.postgres.server.group.backup.retention.min.validation.description%",
|
||||
"defaultValue": ""
|
||||
},
|
||||
{
|
||||
"label": "%arc.postgres.server.group.backup.retention.max%",
|
||||
"description": "%arc.postgres.server.group.backup.retention.max.description%",
|
||||
"variableName": "AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_BACKUP_RETENTION_MAX",
|
||||
"type": "text",
|
||||
"textValidationRequired": true,
|
||||
"textValidationRegex": "^(?:(?:\\d+|(?:\\d+(?:\\.\\d*)?|\\.\\d+)(?:[mMhHdDwW]|[mMgGtTpPeE][bB]))(?!\\s*[,;]\\s*$)(?:\\s*[,;]\\s*|$))*$",
|
||||
"textValidationDescription": "%arc.postgres.server.group.backup.retention.max.validation.description%",
|
||||
"defaultValue": ""
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -881,10 +804,10 @@
|
||||
},
|
||||
{
|
||||
"name": "azdata",
|
||||
"version": "20.0.0"
|
||||
"version": "20.1.0"
|
||||
}
|
||||
],
|
||||
"when": "resourceType=postgres"
|
||||
"when": "true"
|
||||
}
|
||||
],
|
||||
"agreement": {
|
||||
@@ -897,10 +820,6 @@
|
||||
{
|
||||
"text": "%arc.agreement.postgres.terms.conditions%",
|
||||
"url": "https://go.microsoft.com/fwlink/?linkid=2045708"
|
||||
},
|
||||
{
|
||||
"text": "%arc.agreement.azdata.eula%",
|
||||
"url": "https://aka.ms/eula-azdata-en"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -909,6 +828,7 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"request": "^2.88.0",
|
||||
"uuid": "^8.3.0",
|
||||
"vscode-nls": "^4.1.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
@@ -916,12 +836,13 @@
|
||||
"@types/node": "^12.11.7",
|
||||
"@types/request": "^2.48.3",
|
||||
"@types/sinon": "^9.0.4",
|
||||
"@types/uuid": "^8.3.0",
|
||||
"mocha": "^5.2.0",
|
||||
"mocha-junit-reporter": "^1.17.0",
|
||||
"mocha-multi-reporters": "^1.1.7",
|
||||
"should": "^13.2.3",
|
||||
"sinon": "^9.0.2",
|
||||
"typemoq": "2.1.0",
|
||||
"vscodetestcover": "^1.0.9"
|
||||
"vscodetestcover": "^1.1.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,17 +2,14 @@
|
||||
"arc.displayName": "Azure Arc",
|
||||
"arc.description": "Support for Azure Arc",
|
||||
"arc.configuration.title": "Azure Arc",
|
||||
"arc.ignoreSslVerification.desc" : "Ignore SSL verification errors against the controller endpoint if true",
|
||||
"arc.manageMiaa": "Manage MIAA",
|
||||
"arc.managePostgres": "Manage Postgres",
|
||||
"arc.manageArcController": "Manage Arc Controller",
|
||||
"arc.view.title" : "Azure Arc Controllers",
|
||||
"arc.view.welcome.connect" : "No Azure Arc controllers registered. [Learn More](https://azure.microsoft.com/services/azure-arc/)\n[Connect Controller](command:arc.connectToController)",
|
||||
"arc.view.welcome.loading" : "Loading controllers...",
|
||||
"command.createController.title" : "Create New Controller",
|
||||
"command.connectToController.title": "Connect to Existing Controller",
|
||||
"command.createController.title" : "Create New Azure Arc Controller",
|
||||
"command.connectToController.title": "Connect to Existing Azure Arc Controller",
|
||||
"command.removeController.title": "Remove Controller",
|
||||
"command.refresh.title": "Refresh",
|
||||
"command.editConnection.title": "Edit Connection",
|
||||
"arc.openDashboard": "Manage",
|
||||
|
||||
"resource.type.azure.arc.display.name": "Azure Arc data controller",
|
||||
@@ -20,14 +17,14 @@
|
||||
|
||||
"arc.control.plane.new.wizard.title": "Create Azure Arc data controller",
|
||||
"arc.control.plane.cluster.environment.title": "What is your target existing Kubernetes cluster environment?",
|
||||
"arc.control.plane.select.cluster.title": "Select from installed existing Kubernetes clusters",
|
||||
"arc.control.plane.select.cluster.title": "Select from existing Kubernetes clusters",
|
||||
"arc.control.plane.kube.cluster.context": "Cluster context",
|
||||
"arc.control.plane.container.registry.title": "Container registry details",
|
||||
"arc.control.plane.container.registry.name": "Container registry login",
|
||||
"arc.control.plane.container.registry.password": "Container registry password",
|
||||
"arc.control.plane.cluster.config.profile.title": "Choose the config profile",
|
||||
"arc.control.plane.cluster.config.profile": "Config profile",
|
||||
"arc.control.plane.data.controller.create.title": "Provide details to create Azure Arc data controller and register it with Azure",
|
||||
"arc.control.plane.data.controller.create.title": "Provide details to create Azure Arc data controller",
|
||||
"arc.control.plane.project.details.title": "Project details",
|
||||
"arc.control.plane.project.details.description": "Select the subscription to manage deployed resources and costs. Use resource groups like folders to organize and manage all your resources.",
|
||||
"arc.control.plane.data.controller.details.title": "Data controller details",
|
||||
@@ -68,7 +65,6 @@
|
||||
"arc.control.plane.summary.location": "Location",
|
||||
"arc.control.plane.arc.data.controller.agreement": "I accept {0} and {1}.",
|
||||
"microsoft.agreement.privacy.statement":"Microsoft Privacy Statement",
|
||||
"arc.agreement.azdata.eula":"azdata license terms",
|
||||
"deploy.arc.control.plane.action":"Script to notebook",
|
||||
|
||||
|
||||
@@ -76,15 +72,25 @@
|
||||
"resource.type.arc.postgres.display.name": "PostgreSQL server groups - Azure Arc (preview)",
|
||||
"resource.type.arc.sql.description": "Managed SQL Instance service for app developers in a customer-managed environment",
|
||||
"resource.type.arc.postgres.description": "Deploy PostgreSQL server groups into an Azure Arc environment",
|
||||
"resource.type.picker.display.name": "Resource Type",
|
||||
"sql.managed.instance.display.name": "Azure SQL managed instance - Azure Arc",
|
||||
"postgres.server.group.display.name": "PostgreSQL server groups - Azure Arc",
|
||||
"arc.controller": "Target Azure Arc Controller",
|
||||
|
||||
|
||||
"arc.sql.new.dialog.title": "Deploy Azure SQL managed instance - Azure Arc (preview)",
|
||||
"arc.sql.settings.section.title": "SQL Connection information",
|
||||
"arc.azure.section.title": "Azure information",
|
||||
"arc.sql.instance.name": "Instance name (lower case letters and digits only)",
|
||||
"arc.sql.username": "Username",
|
||||
"arc.sql.password": "Password",
|
||||
"arc.storage-class.dc.label": "Storage Class",
|
||||
"arc.sql.storage-class.dc.description": "The storage class to be used for all data and logs persistent volumes for all data controller pods that require them.",
|
||||
"arc.storage-class.data.label": "Storage Class (Data)",
|
||||
"arc.sql.storage-class.data.description": "The storage class to be used for data (.mdf)",
|
||||
"arc.postgres.storage-class.data.description": "The storage class to be used for data persistent volumes",
|
||||
"arc.storage-class.logs.label": "Storage Class (Logs)",
|
||||
"arc.sql.storage-class.logs.description": "The storage class to be used for logs (/var/log)",
|
||||
"arc.postgres.storage-class.logs.description": "The storage class to be used for logs persistent volumes",
|
||||
"arc.storage-class.backups.label": "Storage Class (Backups)",
|
||||
"arc.postgres.storage-class.backups.description": "The storage class to be used for backup persistent volumes",
|
||||
"arc.password": "Password",
|
||||
"arc.confirm.password": "Confirm password",
|
||||
"arc.azure.account": "Azure account",
|
||||
"arc.azure.subscription": "Azure subscription",
|
||||
@@ -92,40 +98,16 @@
|
||||
"arc.azure.location": "Azure location",
|
||||
"arc.postgres.new.dialog.title": "Deploy a PostgreSQL server group on Azure Arc (preview)",
|
||||
"arc.postgres.settings.section.title": "PostgreSQL server group settings",
|
||||
"arc.postgres.settings.backups.title": "PostgreSQL server group backup settings",
|
||||
"arc.postgres.settings.scheduling.title": "PostgreSQL server group scheduling settings",
|
||||
"arc.postgres.settings.resource.title": "PostgreSQL server group resource settings",
|
||||
"arc.postgres.server.group.name": "Server group name",
|
||||
"arc.postgres.server.group.name.validation.description": "Server group name must consist of lower case alphanumeric characters or '-', start with a letter, end with an alphanumeric character, and be 10 characters or fewer in length.",
|
||||
"arc.postgres.server.group.namespace": "Kubernetes namespace",
|
||||
"arc.postgres.server.group.workers": "Number of workers",
|
||||
"arc.postgres.server.group.service.type": "Kubernetes service type",
|
||||
"arc.postgres.server.group.data.size": "Data volume size (MB)",
|
||||
"arc.postgres.server.group.data.size.description": "The number of megabytes (per node) that will be requested for the PostgreSQL server group's data volumes.",
|
||||
"arc.postgres.server.group.data.class": "Data volume storage class",
|
||||
"arc.postgres.server.group.data.class.description": "The Kubernetes storage class to use for the PostgreSQL server group's data volumes, or empty to use the default storage class.",
|
||||
"arc.postgres.server.group.port": "Port",
|
||||
"arc.postgres.server.group.extensions": "PostgreSQL extensions",
|
||||
"arc.postgres.server.group.extensions.description": "A comma-separated list of the PostgreSQL extensions that should be added. Supported values: pg_cron, postgis, postgis_raster, postgis_topology.",
|
||||
"arc.postgres.server.group.extensions.validation.description": "Supported PostgreSQL extensions: pg_cron, postgis, postgis_raster, postgis_topology.",
|
||||
"arc.postgres.server.group.cpu.min": "Min CPU cores (per node) to reserve",
|
||||
"arc.postgres.server.group.cpu.max": "Max CPU cores (per node) to allow",
|
||||
"arc.postgres.server.group.memory.min": "Min memory MB (per node) to reserve",
|
||||
"arc.postgres.server.group.memory.max": "Max memory MB (per node) to allow",
|
||||
"arc.postgres.server.group.backup.classes": "Backup volume storage classes",
|
||||
"arc.postgres.server.group.backup.classes.description": "A comma-separated list of existing Kubernetes storage classes to use for the PostgreSQL server group's backup volumes, one per backup tier. If provided, backup volume sizes must also be provided.",
|
||||
"arc.postgres.server.group.backup.sizes": "Backup volume sizes (MB)",
|
||||
"arc.postgres.server.group.backup.sizes.description": "A comma-separated list of the number of megabytes (per node) that will be requested for the PostgreSQL server group's backup volumes, one per backup tier. If specified, backups will be enabled. In this configuration a separate backup volume is used for each node.",
|
||||
"arc.postgres.server.group.backup.claims": "Backup volume claims",
|
||||
"arc.postgres.server.group.backup.claims.description": "A comma-separated list of existing Kubernetes persistent volume claims (in the same namespace) to use for the PostgreSQL server group's backups, one per backup tier. If specified, backups will be enabled. In this configuration the backup volumes are shared across all nodes.",
|
||||
"arc.postgres.server.group.backup.full.interval": "Minutes between full backups",
|
||||
"arc.postgres.server.group.backup.delta.interval": "Minutes between delta backups",
|
||||
"arc.postgres.server.group.backup.retention.min": "Minimum trim settings",
|
||||
"arc.postgres.server.group.backup.retention.min.description": "A list of trim settings that specifies the minimum number of days/size/counts of backups to preserve per tier. Each trim setting contains 1 or more trim values separated by commas and each tier is separated by a semicolon. Possible trim values include '7d', '10GB', or '50'.",
|
||||
"arc.postgres.server.group.backup.retention.min.validation.description": "Minimum trim settings must contain 1 or more trim values separated by commas with each tier separated by a semicolon.",
|
||||
"arc.postgres.server.group.backup.retention.max": "Maximum trim settings",
|
||||
"arc.postgres.server.group.backup.retention.max.description": "A list of trim settings that specifies the maximum number of days/size/counts of backups to preserve per tier. Each trim setting contains 1 or more trim values separated by commas and each tier is separated by a semicolon. Possible trim values include '7d', '10GB', or '50'.",
|
||||
"arc.postgres.server.group.backup.retention.max.validation.description": "Maximum trim settings must contain 1 or more trim values separated by commas with each tier separated by a semicolon.",
|
||||
"arc.agreement": "I accept {0}, {1} and {2}.",
|
||||
"arc.postgres.server.group.cores.request": "Min CPU cores (per node) to reserve",
|
||||
"arc.postgres.server.group.cores.limit": "Max CPU cores (per node) to allow",
|
||||
"arc.postgres.server.group.memory.request": "Min memory MB (per node) to reserve",
|
||||
"arc.postgres.server.group.memory.limit": "Max memory MB (per node) to allow",
|
||||
"arc.agreement": "I accept {0} and {1}.",
|
||||
"arc.agreement.sql.terms.conditions":"Azure SQL managed instance - Azure Arc terms and conditions",
|
||||
"arc.agreement.postgres.terms.conditions":"PostgreSQL server groups - Azure Arc terms and conditions",
|
||||
"arc.deploy.action":"Deploy"
|
||||
|
||||
@@ -3,10 +3,11 @@
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import { ResourceType } from 'arc';
|
||||
import * as azurecore from 'azurecore';
|
||||
import * as vscode from 'vscode';
|
||||
import * as azurecore from '../../../azurecore/src/azurecore';
|
||||
import { ConnectionMode, IconPath, IconPathHelper } from '../constants';
|
||||
import * as loc from '../localizedConstants';
|
||||
import { IconPathHelper, IconPath, ResourceType, ConnectionMode } from '../constants';
|
||||
|
||||
export class UserCancelledError extends Error { }
|
||||
|
||||
@@ -148,12 +149,11 @@ async function promptInputBox(title: string, options: vscode.InputBoxOptions): P
|
||||
|
||||
/**
|
||||
* Opens an input box prompting the user to enter in the name of a resource to delete
|
||||
* @param namespace The namespace of the resource to delete
|
||||
* @param name The name of the resource to delete
|
||||
* @returns Promise resolving to true if the user confirmed the name, false if the input box was closed for any other reason
|
||||
*/
|
||||
export async function promptForResourceDeletion(namespace: string, name: string): Promise<boolean> {
|
||||
const title = loc.resourceDeletionWarning(namespace, name);
|
||||
export async function promptForResourceDeletion(name: string): Promise<boolean> {
|
||||
const title = loc.resourceDeletionWarning(name);
|
||||
const options: vscode.InputBoxOptions = {
|
||||
placeHolder: name,
|
||||
validateInput: input => input !== name ? loc.invalidResourceDeletionName(name) : ''
|
||||
@@ -191,19 +191,7 @@ export async function promptAndConfirmPassword(validate: (input: string) => stri
|
||||
* @param error The error object
|
||||
*/
|
||||
export function getErrorMessage(error: any): string {
|
||||
if (error.body?.reason) {
|
||||
// For HTTP Errors with a body pull out the reason message since that's usually the most helpful
|
||||
return error.body.reason;
|
||||
} else if (error.message) {
|
||||
if (error.response?.statusMessage) {
|
||||
// Some Http errors just have a status message as additional detail, but it's not enough on its
|
||||
// own to be useful so append to the message as well
|
||||
return `${error.message} (${error.response.statusMessage})`;
|
||||
}
|
||||
return error.message;
|
||||
} else {
|
||||
return error;
|
||||
}
|
||||
return error.message ?? error;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -222,3 +210,18 @@ export function parseInstanceName(instanceName: string | undefined): string {
|
||||
}
|
||||
return instanceName;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses an address into its separate ip and port values. Address must be in the form <ip>:<port>
|
||||
* @param address The address to parse
|
||||
*/
|
||||
export function parseIpAndPort(address: string): { ip: string, port: string } {
|
||||
const sections = address.split(':');
|
||||
if (sections.length !== 2) {
|
||||
throw new Error(`Invalid address format for ${address}. Address must be in the form <ip>:<port>`);
|
||||
}
|
||||
return {
|
||||
ip: sections[0],
|
||||
port: sections[1]
|
||||
};
|
||||
}
|
||||
|
||||
@@ -118,11 +118,6 @@ export class IconPathHelper {
|
||||
}
|
||||
}
|
||||
|
||||
export const enum ResourceType {
|
||||
dataControllers = 'dataControllers',
|
||||
postgresInstances = 'postgresInstances',
|
||||
sqlManagedInstances = 'sqlManagedInstances'
|
||||
}
|
||||
|
||||
export const enum Endpoints {
|
||||
mgmtproxy = 'mgmtproxy',
|
||||
|
||||
@@ -1,20 +0,0 @@
|
||||
# Updating the Swagger generated clients
|
||||
|
||||
The TypeScript clients used to communicate with the controller are generated from the controller's Swagger specification. To update the clients:
|
||||
|
||||
1. Get the Swagger specification from a running controller, and save it locally:
|
||||
* `https://<controller_ip>:30080/api/<api_name>/swagger.json`
|
||||
|
||||
2. Generate the clients:
|
||||
* At the time of writing, [editor.swagger.io](https://editor.swagger.io) does not support typescript-node client generation from OpenAPI 3.x specifications. So we'll use [openapi-generator.tech](https://openapi-generator.tech) instead.
|
||||
|
||||
* Run openapi-generator:
|
||||
* Either by [installing it](https://openapi-generator.tech/docs/installation) (requires Java) and running:
|
||||
* `openapi-generator generate -i swagger.json -g typescript-node -o out --additional-properties supportsES6=true`
|
||||
|
||||
* Or by running the Docker image (works in Linux or PowerShell):
|
||||
* `docker run --rm -v ${PWD}:/local openapitools/openapi-generator-cli generate -i /local/swagger.json -g typescript-node -o /local/out --additional-properties supportsES6=true`
|
||||
|
||||
3. Copy the generated clients (api.ts, api/, model/) to ./generated/<api_name>.
|
||||
|
||||
4. The generated clients have some unused imports. This will not compile. VS Code has an "Organize Imports" command (Shift + Alt + O) that fixes this, but it fixes a single file. To organize imports for all files in a folder, you can use the [Folder Source Actions extension](https://marketplace.visualstudio.com/items?itemName=bierner.folder-source-actions). Followed by File -> Save All.
|
||||
@@ -1,48 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as request from 'request';
|
||||
import * as vscode from 'vscode';
|
||||
|
||||
export interface Authentication {
|
||||
applyToRequest(requestOptions: request.Options): Promise<void> | void;
|
||||
}
|
||||
|
||||
class SslAuth implements Authentication {
|
||||
constructor() { }
|
||||
|
||||
applyToRequest(requestOptions: request.Options): void {
|
||||
requestOptions['agentOptions'] = {
|
||||
rejectUnauthorized: !getIgnoreSslVerificationConfigSetting()
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export class BasicAuth extends SslAuth implements Authentication {
|
||||
constructor(public username: string, public password: string) {
|
||||
super();
|
||||
}
|
||||
|
||||
applyToRequest(requestOptions: request.Options): void {
|
||||
super.applyToRequest(requestOptions);
|
||||
requestOptions.auth = {
|
||||
username: this.username, password: this.password
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/* Retrieves the current setting for whether to ignore SSL verification errors */
|
||||
export function getIgnoreSslVerificationConfigSetting(): boolean {
|
||||
const arcConfigSectionName = 'arc';
|
||||
const ignoreSslConfigName = 'ignoreSslVerification';
|
||||
|
||||
try {
|
||||
const config = vscode.workspace.getConfiguration(arcConfigSectionName);
|
||||
return config.get<boolean>(ignoreSslConfigName, true);
|
||||
} catch (error) {
|
||||
console.error(`Unexpected error retrieving ${arcConfigSectionName}.${ignoreSslConfigName} setting : ${error}`);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
@@ -1,3 +0,0 @@
|
||||
// This is the entrypoint for the package
|
||||
export * from './api/apis';
|
||||
export * from './model/models';
|
||||
@@ -1,31 +0,0 @@
|
||||
export * from './databaseRouterApi';
|
||||
export * from './databaseValidateRouterApi';
|
||||
export * from './logsRouterApi';
|
||||
export * from './metricRouterApi';
|
||||
export * from './operatorRouterApi';
|
||||
import * as fs from 'fs';
|
||||
import * as http from 'http';
|
||||
import { DatabaseRouterApi } from './databaseRouterApi';
|
||||
import { DatabaseValidateRouterApi } from './databaseValidateRouterApi';
|
||||
import { LogsRouterApi } from './logsRouterApi';
|
||||
import { MetricRouterApi } from './metricRouterApi';
|
||||
import { OperatorRouterApi } from './operatorRouterApi';
|
||||
|
||||
export class HttpError extends Error {
|
||||
constructor (public response: http.IncomingMessage, public body: any, public statusCode?: number) {
|
||||
super('HTTP request failed');
|
||||
this.name = 'HttpError';
|
||||
}
|
||||
}
|
||||
|
||||
export interface RequestDetailedFile {
|
||||
value: Buffer;
|
||||
options?: {
|
||||
filename?: string;
|
||||
contentType?: string;
|
||||
}
|
||||
}
|
||||
|
||||
export type RequestFile = string | Buffer | fs.ReadStream | RequestDetailedFile;
|
||||
|
||||
export const APIS = [DatabaseRouterApi, DatabaseValidateRouterApi, LogsRouterApi, MetricRouterApi, OperatorRouterApi];
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,260 +0,0 @@
|
||||
/**
|
||||
* Dusky API
|
||||
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
|
||||
*
|
||||
* The version of the OpenAPI document: v1
|
||||
*
|
||||
*
|
||||
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
|
||||
* https://openapi-generator.tech
|
||||
* Do not edit the class manually.
|
||||
*/
|
||||
|
||||
import localVarRequest = require('request');
|
||||
import http = require('http');
|
||||
|
||||
/* tslint:disable:no-unused-locals */
|
||||
import { DuskyObjectModelsDatabaseService } from '../model/duskyObjectModelsDatabaseService';
|
||||
import { DuskyObjectModelsDuskyValidationResult } from '../model/duskyObjectModelsDuskyValidationResult';
|
||||
import { Authentication, HttpBasicAuth, HttpBearerAuth, Interceptor, ObjectSerializer, VoidAuth } from '../model/models';
|
||||
import { HttpError } from './apis';
|
||||
|
||||
|
||||
|
||||
let defaultBasePath = 'https://10.135.16.138:30080';
|
||||
|
||||
// ===============================================
|
||||
// This file is autogenerated - Please do not edit
|
||||
// ===============================================
|
||||
|
||||
export enum DatabaseValidateRouterApiApiKeys {
|
||||
}
|
||||
|
||||
export class DatabaseValidateRouterApi {
|
||||
protected _basePath = defaultBasePath;
|
||||
protected _defaultHeaders : any = {};
|
||||
protected _useQuerystring : boolean = false;
|
||||
|
||||
protected authentications = {
|
||||
'default': <Authentication>new VoidAuth(),
|
||||
'BasicAuth': new HttpBasicAuth(),
|
||||
'BearerAuth': new HttpBearerAuth(),
|
||||
}
|
||||
|
||||
protected interceptors: Interceptor[] = [];
|
||||
|
||||
constructor(basePath?: string);
|
||||
constructor(username: string, password: string, basePath?: string);
|
||||
constructor(basePathOrUsername: string, password?: string, basePath?: string) {
|
||||
if (password) {
|
||||
this.username = basePathOrUsername;
|
||||
this.password = password
|
||||
if (basePath) {
|
||||
this.basePath = basePath;
|
||||
}
|
||||
} else {
|
||||
if (basePathOrUsername) {
|
||||
this.basePath = basePathOrUsername
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
set useQuerystring(value: boolean) {
|
||||
this._useQuerystring = value;
|
||||
}
|
||||
|
||||
set basePath(basePath: string) {
|
||||
this._basePath = basePath;
|
||||
}
|
||||
|
||||
set defaultHeaders(defaultHeaders: any) {
|
||||
this._defaultHeaders = defaultHeaders;
|
||||
}
|
||||
|
||||
get defaultHeaders() {
|
||||
return this._defaultHeaders;
|
||||
}
|
||||
|
||||
get basePath() {
|
||||
return this._basePath;
|
||||
}
|
||||
|
||||
public setDefaultAuthentication(auth: Authentication) {
|
||||
this.authentications.default = auth;
|
||||
}
|
||||
|
||||
public setApiKey(key: DatabaseValidateRouterApiApiKeys, value: string) {
|
||||
(this.authentications as any)[DatabaseValidateRouterApiApiKeys[key]].apiKey = value;
|
||||
}
|
||||
|
||||
set username(username: string) {
|
||||
this.authentications.BasicAuth.username = username;
|
||||
}
|
||||
|
||||
set password(password: string) {
|
||||
this.authentications.BasicAuth.password = password;
|
||||
}
|
||||
|
||||
set accessToken(accessToken: string | (() => string)) {
|
||||
this.authentications.BearerAuth.accessToken = accessToken;
|
||||
}
|
||||
|
||||
public addInterceptor(interceptor: Interceptor) {
|
||||
this.interceptors.push(interceptor);
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @summary Validate database service creation.
|
||||
* @param duskyObjectModelsDatabaseService
|
||||
*/
|
||||
public async validateCreateDatabaseService (duskyObjectModelsDatabaseService?: DuskyObjectModelsDatabaseService, options: {headers: {[name: string]: string}} = {headers: {}}) : Promise<{ response: http.IncomingMessage; body: DuskyObjectModelsDuskyValidationResult; }> {
|
||||
const localVarPath = this.basePath + '/dusky/databases/validate';
|
||||
let localVarQueryParameters: any = {};
|
||||
let localVarHeaderParams: any = (<any>Object).assign({}, this._defaultHeaders);
|
||||
const produces = ['application/json'];
|
||||
// give precedence to 'application/json'
|
||||
if (produces.indexOf('application/json') >= 0) {
|
||||
localVarHeaderParams.Accept = 'application/json';
|
||||
} else {
|
||||
localVarHeaderParams.Accept = produces.join(',');
|
||||
}
|
||||
let localVarFormParams: any = {};
|
||||
|
||||
(<any>Object).assign(localVarHeaderParams, options.headers);
|
||||
|
||||
let localVarUseFormData = false;
|
||||
|
||||
let localVarRequestOptions: localVarRequest.Options = {
|
||||
method: 'POST',
|
||||
qs: localVarQueryParameters,
|
||||
headers: localVarHeaderParams,
|
||||
uri: localVarPath,
|
||||
useQuerystring: this._useQuerystring,
|
||||
json: true,
|
||||
body: ObjectSerializer.serialize(duskyObjectModelsDatabaseService, "DuskyObjectModelsDatabaseService")
|
||||
};
|
||||
|
||||
let authenticationPromise = Promise.resolve();
|
||||
if (this.authentications.BasicAuth.username && this.authentications.BasicAuth.password) {
|
||||
authenticationPromise = authenticationPromise.then(() => this.authentications.BasicAuth.applyToRequest(localVarRequestOptions));
|
||||
}
|
||||
if (this.authentications.BearerAuth.accessToken) {
|
||||
authenticationPromise = authenticationPromise.then(() => this.authentications.BearerAuth.applyToRequest(localVarRequestOptions));
|
||||
}
|
||||
authenticationPromise = authenticationPromise.then(() => this.authentications.default.applyToRequest(localVarRequestOptions));
|
||||
|
||||
let interceptorPromise = authenticationPromise;
|
||||
for (const interceptor of this.interceptors) {
|
||||
interceptorPromise = interceptorPromise.then(() => interceptor(localVarRequestOptions));
|
||||
}
|
||||
|
||||
return interceptorPromise.then(() => {
|
||||
if (Object.keys(localVarFormParams).length) {
|
||||
if (localVarUseFormData) {
|
||||
(<any>localVarRequestOptions).formData = localVarFormParams;
|
||||
} else {
|
||||
localVarRequestOptions.form = localVarFormParams;
|
||||
}
|
||||
}
|
||||
return new Promise<{ response: http.IncomingMessage; body: DuskyObjectModelsDuskyValidationResult; }>((resolve, reject) => {
|
||||
localVarRequest(localVarRequestOptions, (error, response, body) => {
|
||||
if (error) {
|
||||
reject(error);
|
||||
} else {
|
||||
body = ObjectSerializer.deserialize(body, "DuskyObjectModelsDuskyValidationResult");
|
||||
if (response.statusCode && response.statusCode >= 200 && response.statusCode <= 299) {
|
||||
resolve({ response: response, body: body });
|
||||
} else {
|
||||
reject(new HttpError(response, body, response.statusCode));
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
/**
|
||||
*
|
||||
* @summary Validate database service update.
|
||||
* @param ns The namespace of the database service.
|
||||
* @param name The name of the database service to update.
|
||||
* @param duskyObjectModelsDatabaseService
|
||||
*/
|
||||
public async validateUpdateDatabaseService (ns: string, name: string, duskyObjectModelsDatabaseService?: DuskyObjectModelsDatabaseService, options: {headers: {[name: string]: string}} = {headers: {}}) : Promise<{ response: http.IncomingMessage; body: DuskyObjectModelsDuskyValidationResult; }> {
|
||||
const localVarPath = this.basePath + '/dusky/databases/validate/{ns}/{name}'
|
||||
.replace('{' + 'ns' + '}', encodeURIComponent(String(ns)))
|
||||
.replace('{' + 'name' + '}', encodeURIComponent(String(name)));
|
||||
let localVarQueryParameters: any = {};
|
||||
let localVarHeaderParams: any = (<any>Object).assign({}, this._defaultHeaders);
|
||||
const produces = ['application/json'];
|
||||
// give precedence to 'application/json'
|
||||
if (produces.indexOf('application/json') >= 0) {
|
||||
localVarHeaderParams.Accept = 'application/json';
|
||||
} else {
|
||||
localVarHeaderParams.Accept = produces.join(',');
|
||||
}
|
||||
let localVarFormParams: any = {};
|
||||
|
||||
// verify required parameter 'ns' is not null or undefined
|
||||
if (ns === null || ns === undefined) {
|
||||
throw new Error('Required parameter ns was null or undefined when calling validateUpdateDatabaseService.');
|
||||
}
|
||||
|
||||
// verify required parameter 'name' is not null or undefined
|
||||
if (name === null || name === undefined) {
|
||||
throw new Error('Required parameter name was null or undefined when calling validateUpdateDatabaseService.');
|
||||
}
|
||||
|
||||
(<any>Object).assign(localVarHeaderParams, options.headers);
|
||||
|
||||
let localVarUseFormData = false;
|
||||
|
||||
let localVarRequestOptions: localVarRequest.Options = {
|
||||
method: 'POST',
|
||||
qs: localVarQueryParameters,
|
||||
headers: localVarHeaderParams,
|
||||
uri: localVarPath,
|
||||
useQuerystring: this._useQuerystring,
|
||||
json: true,
|
||||
body: ObjectSerializer.serialize(duskyObjectModelsDatabaseService, "DuskyObjectModelsDatabaseService")
|
||||
};
|
||||
|
||||
let authenticationPromise = Promise.resolve();
|
||||
if (this.authentications.BasicAuth.username && this.authentications.BasicAuth.password) {
|
||||
authenticationPromise = authenticationPromise.then(() => this.authentications.BasicAuth.applyToRequest(localVarRequestOptions));
|
||||
}
|
||||
if (this.authentications.BearerAuth.accessToken) {
|
||||
authenticationPromise = authenticationPromise.then(() => this.authentications.BearerAuth.applyToRequest(localVarRequestOptions));
|
||||
}
|
||||
authenticationPromise = authenticationPromise.then(() => this.authentications.default.applyToRequest(localVarRequestOptions));
|
||||
|
||||
let interceptorPromise = authenticationPromise;
|
||||
for (const interceptor of this.interceptors) {
|
||||
interceptorPromise = interceptorPromise.then(() => interceptor(localVarRequestOptions));
|
||||
}
|
||||
|
||||
return interceptorPromise.then(() => {
|
||||
if (Object.keys(localVarFormParams).length) {
|
||||
if (localVarUseFormData) {
|
||||
(<any>localVarRequestOptions).formData = localVarFormParams;
|
||||
} else {
|
||||
localVarRequestOptions.form = localVarFormParams;
|
||||
}
|
||||
}
|
||||
return new Promise<{ response: http.IncomingMessage; body: DuskyObjectModelsDuskyValidationResult; }>((resolve, reject) => {
|
||||
localVarRequest(localVarRequestOptions, (error, response, body) => {
|
||||
if (error) {
|
||||
reject(error);
|
||||
} else {
|
||||
body = ObjectSerializer.deserialize(body, "DuskyObjectModelsDuskyValidationResult");
|
||||
if (response.statusCode && response.statusCode >= 200 && response.statusCode <= 299) {
|
||||
resolve({ response: response, body: body });
|
||||
} else {
|
||||
reject(new HttpError(response, body, response.statusCode));
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1,175 +0,0 @@
|
||||
/**
|
||||
* Dusky API
|
||||
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
|
||||
*
|
||||
* The version of the OpenAPI document: v1
|
||||
*
|
||||
*
|
||||
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
|
||||
* https://openapi-generator.tech
|
||||
* Do not edit the class manually.
|
||||
*/
|
||||
|
||||
import localVarRequest = require('request');
|
||||
import http = require('http');
|
||||
|
||||
/* tslint:disable:no-unused-locals */
|
||||
import { LogsRequest } from '../model/logsRequest';
|
||||
import { Authentication, HttpBasicAuth, HttpBearerAuth, Interceptor, ObjectSerializer, VoidAuth } from '../model/models';
|
||||
import { HttpError } from './apis';
|
||||
|
||||
|
||||
|
||||
let defaultBasePath = 'https://10.135.16.138:30080';
|
||||
|
||||
// ===============================================
|
||||
// This file is autogenerated - Please do not edit
|
||||
// ===============================================
|
||||
|
||||
export enum LogsRouterApiApiKeys {
|
||||
}
|
||||
|
||||
export class LogsRouterApi {
|
||||
protected _basePath = defaultBasePath;
|
||||
protected _defaultHeaders : any = {};
|
||||
protected _useQuerystring : boolean = false;
|
||||
|
||||
protected authentications = {
|
||||
'default': <Authentication>new VoidAuth(),
|
||||
'BasicAuth': new HttpBasicAuth(),
|
||||
'BearerAuth': new HttpBearerAuth(),
|
||||
}
|
||||
|
||||
protected interceptors: Interceptor[] = [];
|
||||
|
||||
constructor(basePath?: string);
|
||||
constructor(username: string, password: string, basePath?: string);
|
||||
constructor(basePathOrUsername: string, password?: string, basePath?: string) {
|
||||
if (password) {
|
||||
this.username = basePathOrUsername;
|
||||
this.password = password
|
||||
if (basePath) {
|
||||
this.basePath = basePath;
|
||||
}
|
||||
} else {
|
||||
if (basePathOrUsername) {
|
||||
this.basePath = basePathOrUsername
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
set useQuerystring(value: boolean) {
|
||||
this._useQuerystring = value;
|
||||
}
|
||||
|
||||
set basePath(basePath: string) {
|
||||
this._basePath = basePath;
|
||||
}
|
||||
|
||||
set defaultHeaders(defaultHeaders: any) {
|
||||
this._defaultHeaders = defaultHeaders;
|
||||
}
|
||||
|
||||
get defaultHeaders() {
|
||||
return this._defaultHeaders;
|
||||
}
|
||||
|
||||
get basePath() {
|
||||
return this._basePath;
|
||||
}
|
||||
|
||||
public setDefaultAuthentication(auth: Authentication) {
|
||||
this.authentications.default = auth;
|
||||
}
|
||||
|
||||
public setApiKey(key: LogsRouterApiApiKeys, value: string) {
|
||||
(this.authentications as any)[LogsRouterApiApiKeys[key]].apiKey = value;
|
||||
}
|
||||
|
||||
set username(username: string) {
|
||||
this.authentications.BasicAuth.username = username;
|
||||
}
|
||||
|
||||
set password(password: string) {
|
||||
this.authentications.BasicAuth.password = password;
|
||||
}
|
||||
|
||||
set accessToken(accessToken: string | (() => string)) {
|
||||
this.authentications.BearerAuth.accessToken = accessToken;
|
||||
}
|
||||
|
||||
public addInterceptor(interceptor: Interceptor) {
|
||||
this.interceptors.push(interceptor);
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @summary Gets logs from Elasticsearch.
|
||||
* @param logsRequest
|
||||
*/
|
||||
public async apiV1LogsPost (logsRequest?: LogsRequest, options: {headers: {[name: string]: string}} = {headers: {}}) : Promise<{ response: http.IncomingMessage; body: object; }> {
|
||||
const localVarPath = this.basePath + '/api/v1/logs';
|
||||
let localVarQueryParameters: any = {};
|
||||
let localVarHeaderParams: any = (<any>Object).assign({}, this._defaultHeaders);
|
||||
const produces = ['application/json'];
|
||||
// give precedence to 'application/json'
|
||||
if (produces.indexOf('application/json') >= 0) {
|
||||
localVarHeaderParams.Accept = 'application/json';
|
||||
} else {
|
||||
localVarHeaderParams.Accept = produces.join(',');
|
||||
}
|
||||
let localVarFormParams: any = {};
|
||||
|
||||
(<any>Object).assign(localVarHeaderParams, options.headers);
|
||||
|
||||
let localVarUseFormData = false;
|
||||
|
||||
let localVarRequestOptions: localVarRequest.Options = {
|
||||
method: 'POST',
|
||||
qs: localVarQueryParameters,
|
||||
headers: localVarHeaderParams,
|
||||
uri: localVarPath,
|
||||
useQuerystring: this._useQuerystring,
|
||||
json: true,
|
||||
body: ObjectSerializer.serialize(logsRequest, "LogsRequest")
|
||||
};
|
||||
|
||||
let authenticationPromise = Promise.resolve();
|
||||
if (this.authentications.BasicAuth.username && this.authentications.BasicAuth.password) {
|
||||
authenticationPromise = authenticationPromise.then(() => this.authentications.BasicAuth.applyToRequest(localVarRequestOptions));
|
||||
}
|
||||
if (this.authentications.BearerAuth.accessToken) {
|
||||
authenticationPromise = authenticationPromise.then(() => this.authentications.BearerAuth.applyToRequest(localVarRequestOptions));
|
||||
}
|
||||
authenticationPromise = authenticationPromise.then(() => this.authentications.default.applyToRequest(localVarRequestOptions));
|
||||
|
||||
let interceptorPromise = authenticationPromise;
|
||||
for (const interceptor of this.interceptors) {
|
||||
interceptorPromise = interceptorPromise.then(() => interceptor(localVarRequestOptions));
|
||||
}
|
||||
|
||||
return interceptorPromise.then(() => {
|
||||
if (Object.keys(localVarFormParams).length) {
|
||||
if (localVarUseFormData) {
|
||||
(<any>localVarRequestOptions).formData = localVarFormParams;
|
||||
} else {
|
||||
localVarRequestOptions.form = localVarFormParams;
|
||||
}
|
||||
}
|
||||
return new Promise<{ response: http.IncomingMessage; body: object; }>((resolve, reject) => {
|
||||
localVarRequest(localVarRequestOptions, (error, response, body) => {
|
||||
if (error) {
|
||||
reject(error);
|
||||
} else {
|
||||
body = ObjectSerializer.deserialize(body, "object");
|
||||
if (response.statusCode && response.statusCode >= 200 && response.statusCode <= 299) {
|
||||
resolve({ response: response, body: body });
|
||||
} else {
|
||||
reject(new HttpError(response, body, response.statusCode));
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1,171 +0,0 @@
|
||||
/**
|
||||
* Dusky API
|
||||
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
|
||||
*
|
||||
* The version of the OpenAPI document: v1
|
||||
*
|
||||
*
|
||||
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
|
||||
* https://openapi-generator.tech
|
||||
* Do not edit the class manually.
|
||||
*/
|
||||
|
||||
import localVarRequest = require('request');
|
||||
import http = require('http');
|
||||
|
||||
/* tslint:disable:no-unused-locals */
|
||||
|
||||
import { Authentication, HttpBasicAuth, HttpBearerAuth, Interceptor, ObjectSerializer, VoidAuth } from '../model/models';
|
||||
import { HttpError } from './apis';
|
||||
|
||||
|
||||
let defaultBasePath = 'https://10.135.16.138:30080';
|
||||
|
||||
// ===============================================
|
||||
// This file is autogenerated - Please do not edit
|
||||
// ===============================================
|
||||
|
||||
export enum MetricRouterApiApiKeys {
|
||||
}
|
||||
|
||||
export class MetricRouterApi {
|
||||
protected _basePath = defaultBasePath;
|
||||
protected _defaultHeaders : any = {};
|
||||
protected _useQuerystring : boolean = false;
|
||||
|
||||
protected authentications = {
|
||||
'default': <Authentication>new VoidAuth(),
|
||||
'BasicAuth': new HttpBasicAuth(),
|
||||
'BearerAuth': new HttpBearerAuth(),
|
||||
}
|
||||
|
||||
protected interceptors: Interceptor[] = [];
|
||||
|
||||
constructor(basePath?: string);
|
||||
constructor(username: string, password: string, basePath?: string);
|
||||
constructor(basePathOrUsername: string, password?: string, basePath?: string) {
|
||||
if (password) {
|
||||
this.username = basePathOrUsername;
|
||||
this.password = password
|
||||
if (basePath) {
|
||||
this.basePath = basePath;
|
||||
}
|
||||
} else {
|
||||
if (basePathOrUsername) {
|
||||
this.basePath = basePathOrUsername
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
set useQuerystring(value: boolean) {
|
||||
this._useQuerystring = value;
|
||||
}
|
||||
|
||||
set basePath(basePath: string) {
|
||||
this._basePath = basePath;
|
||||
}
|
||||
|
||||
set defaultHeaders(defaultHeaders: any) {
|
||||
this._defaultHeaders = defaultHeaders;
|
||||
}
|
||||
|
||||
get defaultHeaders() {
|
||||
return this._defaultHeaders;
|
||||
}
|
||||
|
||||
get basePath() {
|
||||
return this._basePath;
|
||||
}
|
||||
|
||||
public setDefaultAuthentication(auth: Authentication) {
|
||||
this.authentications.default = auth;
|
||||
}
|
||||
|
||||
public setApiKey(key: MetricRouterApiApiKeys, value: string) {
|
||||
(this.authentications as any)[MetricRouterApiApiKeys[key]].apiKey = value;
|
||||
}
|
||||
|
||||
set username(username: string) {
|
||||
this.authentications.BasicAuth.username = username;
|
||||
}
|
||||
|
||||
set password(password: string) {
|
||||
this.authentications.BasicAuth.password = password;
|
||||
}
|
||||
|
||||
set accessToken(accessToken: string | (() => string)) {
|
||||
this.authentications.BearerAuth.accessToken = accessToken;
|
||||
}
|
||||
|
||||
public addInterceptor(interceptor: Interceptor) {
|
||||
this.interceptors.push(interceptor);
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public async apiV1MetricsPost (options: {headers: {[name: string]: string}} = {headers: {}}) : Promise<{ response: http.IncomingMessage; body: object; }> {
|
||||
const localVarPath = this.basePath + '/api/v1/metrics';
|
||||
let localVarQueryParameters: any = {};
|
||||
let localVarHeaderParams: any = (<any>Object).assign({}, this._defaultHeaders);
|
||||
const produces = ['application/json'];
|
||||
// give precedence to 'application/json'
|
||||
if (produces.indexOf('application/json') >= 0) {
|
||||
localVarHeaderParams.Accept = 'application/json';
|
||||
} else {
|
||||
localVarHeaderParams.Accept = produces.join(',');
|
||||
}
|
||||
let localVarFormParams: any = {};
|
||||
|
||||
(<any>Object).assign(localVarHeaderParams, options.headers);
|
||||
|
||||
let localVarUseFormData = false;
|
||||
|
||||
let localVarRequestOptions: localVarRequest.Options = {
|
||||
method: 'POST',
|
||||
qs: localVarQueryParameters,
|
||||
headers: localVarHeaderParams,
|
||||
uri: localVarPath,
|
||||
useQuerystring: this._useQuerystring,
|
||||
json: true,
|
||||
};
|
||||
|
||||
let authenticationPromise = Promise.resolve();
|
||||
if (this.authentications.BasicAuth.username && this.authentications.BasicAuth.password) {
|
||||
authenticationPromise = authenticationPromise.then(() => this.authentications.BasicAuth.applyToRequest(localVarRequestOptions));
|
||||
}
|
||||
if (this.authentications.BearerAuth.accessToken) {
|
||||
authenticationPromise = authenticationPromise.then(() => this.authentications.BearerAuth.applyToRequest(localVarRequestOptions));
|
||||
}
|
||||
authenticationPromise = authenticationPromise.then(() => this.authentications.default.applyToRequest(localVarRequestOptions));
|
||||
|
||||
let interceptorPromise = authenticationPromise;
|
||||
for (const interceptor of this.interceptors) {
|
||||
interceptorPromise = interceptorPromise.then(() => interceptor(localVarRequestOptions));
|
||||
}
|
||||
|
||||
return interceptorPromise.then(() => {
|
||||
if (Object.keys(localVarFormParams).length) {
|
||||
if (localVarUseFormData) {
|
||||
(<any>localVarRequestOptions).formData = localVarFormParams;
|
||||
} else {
|
||||
localVarRequestOptions.form = localVarFormParams;
|
||||
}
|
||||
}
|
||||
return new Promise<{ response: http.IncomingMessage; body: object; }>((resolve, reject) => {
|
||||
localVarRequest(localVarRequestOptions, (error, response, body) => {
|
||||
if (error) {
|
||||
reject(error);
|
||||
} else {
|
||||
body = ObjectSerializer.deserialize(body, "object");
|
||||
if (response.statusCode && response.statusCode >= 200 && response.statusCode <= 299) {
|
||||
resolve({ response: response, body: body });
|
||||
} else {
|
||||
reject(new HttpError(response, body, response.statusCode));
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1,236 +0,0 @@
|
||||
/**
|
||||
* Dusky API
|
||||
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
|
||||
*
|
||||
* The version of the OpenAPI document: v1
|
||||
*
|
||||
*
|
||||
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
|
||||
* https://openapi-generator.tech
|
||||
* Do not edit the class manually.
|
||||
*/
|
||||
|
||||
import localVarRequest = require('request');
|
||||
import http = require('http');
|
||||
|
||||
/* tslint:disable:no-unused-locals */
|
||||
import { ClusterPatchModel } from '../model/clusterPatchModel';
|
||||
import { DuskyObjectModelsOperatorStatus } from '../model/duskyObjectModelsOperatorStatus';
|
||||
import { Authentication, HttpBasicAuth, HttpBearerAuth, Interceptor, ObjectSerializer, VoidAuth } from '../model/models';
|
||||
import { HttpError } from './apis';
|
||||
|
||||
|
||||
|
||||
let defaultBasePath = 'https://10.135.16.138:30080';
|
||||
|
||||
// ===============================================
|
||||
// This file is autogenerated - Please do not edit
|
||||
// ===============================================
|
||||
|
||||
export enum OperatorRouterApiApiKeys {
|
||||
}
|
||||
|
||||
export class OperatorRouterApi {
|
||||
protected _basePath = defaultBasePath;
|
||||
protected _defaultHeaders : any = {};
|
||||
protected _useQuerystring : boolean = false;
|
||||
|
||||
protected authentications = {
|
||||
'default': <Authentication>new VoidAuth(),
|
||||
'BasicAuth': new HttpBasicAuth(),
|
||||
'BearerAuth': new HttpBearerAuth(),
|
||||
}
|
||||
|
||||
protected interceptors: Interceptor[] = [];
|
||||
|
||||
constructor(basePath?: string);
|
||||
constructor(username: string, password: string, basePath?: string);
|
||||
constructor(basePathOrUsername: string, password?: string, basePath?: string) {
|
||||
if (password) {
|
||||
this.username = basePathOrUsername;
|
||||
this.password = password
|
||||
if (basePath) {
|
||||
this.basePath = basePath;
|
||||
}
|
||||
} else {
|
||||
if (basePathOrUsername) {
|
||||
this.basePath = basePathOrUsername
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
set useQuerystring(value: boolean) {
|
||||
this._useQuerystring = value;
|
||||
}
|
||||
|
||||
set basePath(basePath: string) {
|
||||
this._basePath = basePath;
|
||||
}
|
||||
|
||||
set defaultHeaders(defaultHeaders: any) {
|
||||
this._defaultHeaders = defaultHeaders;
|
||||
}
|
||||
|
||||
get defaultHeaders() {
|
||||
return this._defaultHeaders;
|
||||
}
|
||||
|
||||
get basePath() {
|
||||
return this._basePath;
|
||||
}
|
||||
|
||||
public setDefaultAuthentication(auth: Authentication) {
|
||||
this.authentications.default = auth;
|
||||
}
|
||||
|
||||
public setApiKey(key: OperatorRouterApiApiKeys, value: string) {
|
||||
(this.authentications as any)[OperatorRouterApiApiKeys[key]].apiKey = value;
|
||||
}
|
||||
|
||||
set username(username: string) {
|
||||
this.authentications.BasicAuth.username = username;
|
||||
}
|
||||
|
||||
set password(password: string) {
|
||||
this.authentications.BasicAuth.password = password;
|
||||
}
|
||||
|
||||
set accessToken(accessToken: string | (() => string)) {
|
||||
this.authentications.BearerAuth.accessToken = accessToken;
|
||||
}
|
||||
|
||||
public addInterceptor(interceptor: Interceptor) {
|
||||
this.interceptors.push(interceptor);
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @summary Gets the status of the Dusky operator.
|
||||
*/
|
||||
public async getDuskyOperatorStatus (options: {headers: {[name: string]: string}} = {headers: {}}) : Promise<{ response: http.IncomingMessage; body: DuskyObjectModelsOperatorStatus; }> {
|
||||
const localVarPath = this.basePath + '/dusky/operator/status';
|
||||
let localVarQueryParameters: any = {};
|
||||
let localVarHeaderParams: any = (<any>Object).assign({}, this._defaultHeaders);
|
||||
const produces = ['application/json'];
|
||||
// give precedence to 'application/json'
|
||||
if (produces.indexOf('application/json') >= 0) {
|
||||
localVarHeaderParams.Accept = 'application/json';
|
||||
} else {
|
||||
localVarHeaderParams.Accept = produces.join(',');
|
||||
}
|
||||
let localVarFormParams: any = {};
|
||||
|
||||
(<any>Object).assign(localVarHeaderParams, options.headers);
|
||||
|
||||
let localVarUseFormData = false;
|
||||
|
||||
let localVarRequestOptions: localVarRequest.Options = {
|
||||
method: 'GET',
|
||||
qs: localVarQueryParameters,
|
||||
headers: localVarHeaderParams,
|
||||
uri: localVarPath,
|
||||
useQuerystring: this._useQuerystring,
|
||||
json: true,
|
||||
};
|
||||
|
||||
let authenticationPromise = Promise.resolve();
|
||||
if (this.authentications.BasicAuth.username && this.authentications.BasicAuth.password) {
|
||||
authenticationPromise = authenticationPromise.then(() => this.authentications.BasicAuth.applyToRequest(localVarRequestOptions));
|
||||
}
|
||||
if (this.authentications.BearerAuth.accessToken) {
|
||||
authenticationPromise = authenticationPromise.then(() => this.authentications.BearerAuth.applyToRequest(localVarRequestOptions));
|
||||
}
|
||||
authenticationPromise = authenticationPromise.then(() => this.authentications.default.applyToRequest(localVarRequestOptions));
|
||||
|
||||
let interceptorPromise = authenticationPromise;
|
||||
for (const interceptor of this.interceptors) {
|
||||
interceptorPromise = interceptorPromise.then(() => interceptor(localVarRequestOptions));
|
||||
}
|
||||
|
||||
return interceptorPromise.then(() => {
|
||||
if (Object.keys(localVarFormParams).length) {
|
||||
if (localVarUseFormData) {
|
||||
(<any>localVarRequestOptions).formData = localVarFormParams;
|
||||
} else {
|
||||
localVarRequestOptions.form = localVarFormParams;
|
||||
}
|
||||
}
|
||||
return new Promise<{ response: http.IncomingMessage; body: DuskyObjectModelsOperatorStatus; }>((resolve, reject) => {
|
||||
localVarRequest(localVarRequestOptions, (error, response, body) => {
|
||||
if (error) {
|
||||
reject(error);
|
||||
} else {
|
||||
body = ObjectSerializer.deserialize(body, "DuskyObjectModelsOperatorStatus");
|
||||
if (response.statusCode && response.statusCode >= 200 && response.statusCode <= 299) {
|
||||
resolve({ response: response, body: body });
|
||||
} else {
|
||||
reject(new HttpError(response, body, response.statusCode));
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
/**
|
||||
*
|
||||
* @summary Upgrades the Dusky operator.
|
||||
* @param clusterPatchModel
|
||||
*/
|
||||
public async upgradeDuskyOperator (clusterPatchModel?: ClusterPatchModel, options: {headers: {[name: string]: string}} = {headers: {}}) : Promise<{ response: http.IncomingMessage; body?: any; }> {
|
||||
const localVarPath = this.basePath + '/dusky/operator/upgrade';
|
||||
let localVarQueryParameters: any = {};
|
||||
let localVarHeaderParams: any = (<any>Object).assign({}, this._defaultHeaders);
|
||||
let localVarFormParams: any = {};
|
||||
|
||||
(<any>Object).assign(localVarHeaderParams, options.headers);
|
||||
|
||||
let localVarUseFormData = false;
|
||||
|
||||
let localVarRequestOptions: localVarRequest.Options = {
|
||||
method: 'PATCH',
|
||||
qs: localVarQueryParameters,
|
||||
headers: localVarHeaderParams,
|
||||
uri: localVarPath,
|
||||
useQuerystring: this._useQuerystring,
|
||||
json: true,
|
||||
body: ObjectSerializer.serialize(clusterPatchModel, "ClusterPatchModel")
|
||||
};
|
||||
|
||||
let authenticationPromise = Promise.resolve();
|
||||
if (this.authentications.BasicAuth.username && this.authentications.BasicAuth.password) {
|
||||
authenticationPromise = authenticationPromise.then(() => this.authentications.BasicAuth.applyToRequest(localVarRequestOptions));
|
||||
}
|
||||
if (this.authentications.BearerAuth.accessToken) {
|
||||
authenticationPromise = authenticationPromise.then(() => this.authentications.BearerAuth.applyToRequest(localVarRequestOptions));
|
||||
}
|
||||
authenticationPromise = authenticationPromise.then(() => this.authentications.default.applyToRequest(localVarRequestOptions));
|
||||
|
||||
let interceptorPromise = authenticationPromise;
|
||||
for (const interceptor of this.interceptors) {
|
||||
interceptorPromise = interceptorPromise.then(() => interceptor(localVarRequestOptions));
|
||||
}
|
||||
|
||||
return interceptorPromise.then(() => {
|
||||
if (Object.keys(localVarFormParams).length) {
|
||||
if (localVarUseFormData) {
|
||||
(<any>localVarRequestOptions).formData = localVarFormParams;
|
||||
} else {
|
||||
localVarRequestOptions.form = localVarFormParams;
|
||||
}
|
||||
}
|
||||
return new Promise<{ response: http.IncomingMessage; body?: any; }>((resolve, reject) => {
|
||||
localVarRequest(localVarRequestOptions, (error, response, body) => {
|
||||
if (error) {
|
||||
reject(error);
|
||||
} else {
|
||||
if (response.statusCode && response.statusCode >= 200 && response.statusCode <= 299) {
|
||||
resolve({ response: response, body: body });
|
||||
} else {
|
||||
reject(new HttpError(response, body, response.statusCode));
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1,36 +0,0 @@
|
||||
/**
|
||||
* Dusky API
|
||||
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
|
||||
*
|
||||
* The version of the OpenAPI document: v1
|
||||
*
|
||||
*
|
||||
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
|
||||
* https://openapi-generator.tech
|
||||
* Do not edit the class manually.
|
||||
*/
|
||||
|
||||
|
||||
export class ClusterPatchModel {
|
||||
'targetVersion': string;
|
||||
'targetRepository'?: string;
|
||||
|
||||
static discriminator: string | undefined = undefined;
|
||||
|
||||
static attributeTypeMap: Array<{name: string, baseName: string, type: string}> = [
|
||||
{
|
||||
"name": "targetVersion",
|
||||
"baseName": "targetVersion",
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"name": "targetRepository",
|
||||
"baseName": "targetRepository",
|
||||
"type": "string"
|
||||
} ];
|
||||
|
||||
static getAttributeTypeMap() {
|
||||
return ClusterPatchModel.attributeTypeMap;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,67 +0,0 @@
|
||||
/**
|
||||
* Dusky API
|
||||
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
|
||||
*
|
||||
* The version of the OpenAPI document: v1
|
||||
*
|
||||
*
|
||||
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
|
||||
* https://openapi-generator.tech
|
||||
* Do not edit the class manually.
|
||||
*/
|
||||
|
||||
import { DuskyObjectModelsError } from './duskyObjectModelsError';
|
||||
|
||||
export class DuskyObjectModelsBackup {
|
||||
'error'?: DuskyObjectModelsError;
|
||||
'id'?: string;
|
||||
'name'?: string;
|
||||
'timestamp'?: Date;
|
||||
'size'?: number | null;
|
||||
'state'?: string;
|
||||
'tiers'?: number | null;
|
||||
|
||||
static discriminator: string | undefined = undefined;
|
||||
|
||||
static attributeTypeMap: Array<{name: string, baseName: string, type: string}> = [
|
||||
{
|
||||
"name": "error",
|
||||
"baseName": "error",
|
||||
"type": "DuskyObjectModelsError"
|
||||
},
|
||||
{
|
||||
"name": "id",
|
||||
"baseName": "id",
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"name": "name",
|
||||
"baseName": "name",
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"name": "timestamp",
|
||||
"baseName": "timestamp",
|
||||
"type": "Date"
|
||||
},
|
||||
{
|
||||
"name": "size",
|
||||
"baseName": "size",
|
||||
"type": "number"
|
||||
},
|
||||
{
|
||||
"name": "state",
|
||||
"baseName": "state",
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"name": "tiers",
|
||||
"baseName": "tiers",
|
||||
"type": "number"
|
||||
} ];
|
||||
|
||||
static getAttributeTypeMap() {
|
||||
return DuskyObjectModelsBackup.attributeTypeMap;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,36 +0,0 @@
|
||||
/**
|
||||
* Dusky API
|
||||
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
|
||||
*
|
||||
* The version of the OpenAPI document: v1
|
||||
*
|
||||
*
|
||||
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
|
||||
* https://openapi-generator.tech
|
||||
* Do not edit the class manually.
|
||||
*/
|
||||
|
||||
|
||||
export class DuskyObjectModelsBackupCopySchedule {
|
||||
'interval'?: string;
|
||||
'offset'?: string;
|
||||
|
||||
static discriminator: string | undefined = undefined;
|
||||
|
||||
static attributeTypeMap: Array<{name: string, baseName: string, type: string}> = [
|
||||
{
|
||||
"name": "interval",
|
||||
"baseName": "interval",
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"name": "offset",
|
||||
"baseName": "offset",
|
||||
"type": "string"
|
||||
} ];
|
||||
|
||||
static getAttributeTypeMap() {
|
||||
return DuskyObjectModelsBackupCopySchedule.attributeTypeMap;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,36 +0,0 @@
|
||||
/**
|
||||
* Dusky API
|
||||
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
|
||||
*
|
||||
* The version of the OpenAPI document: v1
|
||||
*
|
||||
*
|
||||
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
|
||||
* https://openapi-generator.tech
|
||||
* Do not edit the class manually.
|
||||
*/
|
||||
|
||||
|
||||
export class DuskyObjectModelsBackupRetention {
|
||||
'maximums'?: Array<string>;
|
||||
'minimums'?: Array<string>;
|
||||
|
||||
static discriminator: string | undefined = undefined;
|
||||
|
||||
static attributeTypeMap: Array<{name: string, baseName: string, type: string}> = [
|
||||
{
|
||||
"name": "maximums",
|
||||
"baseName": "maximums",
|
||||
"type": "Array<string>"
|
||||
},
|
||||
{
|
||||
"name": "minimums",
|
||||
"baseName": "minimums",
|
||||
"type": "Array<string>"
|
||||
} ];
|
||||
|
||||
static getAttributeTypeMap() {
|
||||
return DuskyObjectModelsBackupRetention.attributeTypeMap;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,50 +0,0 @@
|
||||
/**
|
||||
* Dusky API
|
||||
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
|
||||
*
|
||||
* The version of the OpenAPI document: v1
|
||||
*
|
||||
*
|
||||
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
|
||||
* https://openapi-generator.tech
|
||||
* Do not edit the class manually.
|
||||
*/
|
||||
|
||||
import { DuskyObjectModelsBackupCopySchedule } from './duskyObjectModelsBackupCopySchedule';
|
||||
import { DuskyObjectModelsBackupTier } from './duskyObjectModelsBackupTier';
|
||||
|
||||
export class DuskyObjectModelsBackupSpec {
|
||||
'deltaMinutes'?: number | null;
|
||||
'fullMinutes'?: number | null;
|
||||
'copySchedule'?: DuskyObjectModelsBackupCopySchedule;
|
||||
'tiers'?: Array<DuskyObjectModelsBackupTier>;
|
||||
|
||||
static discriminator: string | undefined = undefined;
|
||||
|
||||
static attributeTypeMap: Array<{name: string, baseName: string, type: string}> = [
|
||||
{
|
||||
"name": "deltaMinutes",
|
||||
"baseName": "deltaMinutes",
|
||||
"type": "number"
|
||||
},
|
||||
{
|
||||
"name": "fullMinutes",
|
||||
"baseName": "fullMinutes",
|
||||
"type": "number"
|
||||
},
|
||||
{
|
||||
"name": "copySchedule",
|
||||
"baseName": "copySchedule",
|
||||
"type": "DuskyObjectModelsBackupCopySchedule"
|
||||
},
|
||||
{
|
||||
"name": "tiers",
|
||||
"baseName": "tiers",
|
||||
"type": "Array<DuskyObjectModelsBackupTier>"
|
||||
} ];
|
||||
|
||||
static getAttributeTypeMap() {
|
||||
return DuskyObjectModelsBackupSpec.attributeTypeMap;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,38 +0,0 @@
|
||||
/**
|
||||
* Dusky API
|
||||
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
|
||||
*
|
||||
* The version of the OpenAPI document: v1
|
||||
*
|
||||
*
|
||||
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
|
||||
* https://openapi-generator.tech
|
||||
* Do not edit the class manually.
|
||||
*/
|
||||
|
||||
import { DuskyObjectModelsRetentionSpec } from './duskyObjectModelsRetentionSpec';
|
||||
import { DuskyObjectModelsStorageSpec } from './duskyObjectModelsStorageSpec';
|
||||
|
||||
export class DuskyObjectModelsBackupTier {
|
||||
'retention'?: DuskyObjectModelsRetentionSpec;
|
||||
'storage'?: DuskyObjectModelsStorageSpec;
|
||||
|
||||
static discriminator: string | undefined = undefined;
|
||||
|
||||
static attributeTypeMap: Array<{name: string, baseName: string, type: string}> = [
|
||||
{
|
||||
"name": "retention",
|
||||
"baseName": "retention",
|
||||
"type": "DuskyObjectModelsRetentionSpec"
|
||||
},
|
||||
{
|
||||
"name": "storage",
|
||||
"baseName": "storage",
|
||||
"type": "DuskyObjectModelsStorageSpec"
|
||||
} ];
|
||||
|
||||
static getAttributeTypeMap() {
|
||||
return DuskyObjectModelsBackupTier.attributeTypeMap;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,42 +0,0 @@
|
||||
/**
|
||||
* Dusky API
|
||||
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
|
||||
*
|
||||
* The version of the OpenAPI document: v1
|
||||
*
|
||||
*
|
||||
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
|
||||
* https://openapi-generator.tech
|
||||
* Do not edit the class manually.
|
||||
*/
|
||||
|
||||
|
||||
export class DuskyObjectModelsDatabase {
|
||||
'name'?: string;
|
||||
'owner'?: string;
|
||||
'sharded'?: boolean | null;
|
||||
|
||||
static discriminator: string | undefined = undefined;
|
||||
|
||||
static attributeTypeMap: Array<{name: string, baseName: string, type: string}> = [
|
||||
{
|
||||
"name": "name",
|
||||
"baseName": "name",
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"name": "owner",
|
||||
"baseName": "owner",
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"name": "sharded",
|
||||
"baseName": "sharded",
|
||||
"type": "boolean"
|
||||
} ];
|
||||
|
||||
static getAttributeTypeMap() {
|
||||
return DuskyObjectModelsDatabase.attributeTypeMap;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,64 +0,0 @@
|
||||
/**
|
||||
* Dusky API
|
||||
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
|
||||
*
|
||||
* The version of the OpenAPI document: v1
|
||||
*
|
||||
*
|
||||
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
|
||||
* https://openapi-generator.tech
|
||||
* Do not edit the class manually.
|
||||
*/
|
||||
|
||||
import { DuskyObjectModelsDatabaseServiceArcPayload } from './duskyObjectModelsDatabaseServiceArcPayload';
|
||||
import { DuskyObjectModelsDatabaseServiceSpec } from './duskyObjectModelsDatabaseServiceSpec';
|
||||
import { DuskyObjectModelsDatabaseServiceStatus } from './duskyObjectModelsDatabaseServiceStatus';
|
||||
import { V1ObjectMeta } from './v1ObjectMeta';
|
||||
|
||||
export class DuskyObjectModelsDatabaseService {
|
||||
'apiVersion'?: string;
|
||||
'kind'?: string;
|
||||
'metadata'?: V1ObjectMeta;
|
||||
'spec'?: DuskyObjectModelsDatabaseServiceSpec;
|
||||
'status'?: DuskyObjectModelsDatabaseServiceStatus;
|
||||
'arc'?: DuskyObjectModelsDatabaseServiceArcPayload;
|
||||
|
||||
static discriminator: string | undefined = undefined;
|
||||
|
||||
static attributeTypeMap: Array<{name: string, baseName: string, type: string}> = [
|
||||
{
|
||||
"name": "apiVersion",
|
||||
"baseName": "apiVersion",
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"name": "kind",
|
||||
"baseName": "kind",
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"name": "metadata",
|
||||
"baseName": "metadata",
|
||||
"type": "V1ObjectMeta"
|
||||
},
|
||||
{
|
||||
"name": "spec",
|
||||
"baseName": "spec",
|
||||
"type": "DuskyObjectModelsDatabaseServiceSpec"
|
||||
},
|
||||
{
|
||||
"name": "status",
|
||||
"baseName": "status",
|
||||
"type": "DuskyObjectModelsDatabaseServiceStatus"
|
||||
},
|
||||
{
|
||||
"name": "arc",
|
||||
"baseName": "arc",
|
||||
"type": "DuskyObjectModelsDatabaseServiceArcPayload"
|
||||
} ];
|
||||
|
||||
static getAttributeTypeMap() {
|
||||
return DuskyObjectModelsDatabaseService.attributeTypeMap;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,30 +0,0 @@
|
||||
/**
|
||||
* Dusky API
|
||||
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
|
||||
*
|
||||
* The version of the OpenAPI document: v1
|
||||
*
|
||||
*
|
||||
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
|
||||
* https://openapi-generator.tech
|
||||
* Do not edit the class manually.
|
||||
*/
|
||||
|
||||
|
||||
export class DuskyObjectModelsDatabaseServiceArcPayload {
|
||||
'servicePassword'?: string;
|
||||
|
||||
static discriminator: string | undefined = undefined;
|
||||
|
||||
static attributeTypeMap: Array<{name: string, baseName: string, type: string}> = [
|
||||
{
|
||||
"name": "servicePassword",
|
||||
"baseName": "servicePassword",
|
||||
"type": "string"
|
||||
} ];
|
||||
|
||||
static getAttributeTypeMap() {
|
||||
return DuskyObjectModelsDatabaseServiceArcPayload.attributeTypeMap;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,54 +0,0 @@
|
||||
/**
|
||||
* Dusky API
|
||||
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
|
||||
*
|
||||
* The version of the OpenAPI document: v1
|
||||
*
|
||||
*
|
||||
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
|
||||
* https://openapi-generator.tech
|
||||
* Do not edit the class manually.
|
||||
*/
|
||||
|
||||
|
||||
export class DuskyObjectModelsDatabaseServiceCondition {
|
||||
'type'?: string;
|
||||
'status'?: string;
|
||||
'lastTransitionTime'?: Date | null;
|
||||
'reason'?: string;
|
||||
'message'?: string;
|
||||
|
||||
static discriminator: string | undefined = undefined;
|
||||
|
||||
static attributeTypeMap: Array<{name: string, baseName: string, type: string}> = [
|
||||
{
|
||||
"name": "type",
|
||||
"baseName": "type",
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"name": "status",
|
||||
"baseName": "status",
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"name": "lastTransitionTime",
|
||||
"baseName": "lastTransitionTime",
|
||||
"type": "Date"
|
||||
},
|
||||
{
|
||||
"name": "reason",
|
||||
"baseName": "reason",
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"name": "message",
|
||||
"baseName": "message",
|
||||
"type": "string"
|
||||
} ];
|
||||
|
||||
static getAttributeTypeMap() {
|
||||
return DuskyObjectModelsDatabaseServiceCondition.attributeTypeMap;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,50 +0,0 @@
|
||||
/**
|
||||
* Dusky API
|
||||
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
|
||||
*
|
||||
* The version of the OpenAPI document: v1
|
||||
*
|
||||
*
|
||||
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
|
||||
* https://openapi-generator.tech
|
||||
* Do not edit the class manually.
|
||||
*/
|
||||
|
||||
import { DuskyObjectModelsDatabaseService } from './duskyObjectModelsDatabaseService';
|
||||
import { V1ListMeta } from './v1ListMeta';
|
||||
|
||||
export class DuskyObjectModelsDatabaseServiceList {
|
||||
'apiVersion'?: string;
|
||||
'kind'?: string;
|
||||
'metadata'?: V1ListMeta;
|
||||
'items'?: Array<DuskyObjectModelsDatabaseService>;
|
||||
|
||||
static discriminator: string | undefined = undefined;
|
||||
|
||||
static attributeTypeMap: Array<{name: string, baseName: string, type: string}> = [
|
||||
{
|
||||
"name": "apiVersion",
|
||||
"baseName": "apiVersion",
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"name": "kind",
|
||||
"baseName": "kind",
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"name": "metadata",
|
||||
"baseName": "metadata",
|
||||
"type": "V1ListMeta"
|
||||
},
|
||||
{
|
||||
"name": "items",
|
||||
"baseName": "items",
|
||||
"type": "Array<DuskyObjectModelsDatabaseService>"
|
||||
} ];
|
||||
|
||||
static getAttributeTypeMap() {
|
||||
return DuskyObjectModelsDatabaseServiceList.attributeTypeMap;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,87 +0,0 @@
|
||||
/**
|
||||
* Dusky API
|
||||
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
|
||||
*
|
||||
* The version of the OpenAPI document: v1
|
||||
*
|
||||
*
|
||||
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
|
||||
* https://openapi-generator.tech
|
||||
* Do not edit the class manually.
|
||||
*/
|
||||
|
||||
import { DuskyObjectModelsBackupSpec } from './duskyObjectModelsBackupSpec';
|
||||
import { DuskyObjectModelsDockerSpec } from './duskyObjectModelsDockerSpec';
|
||||
import { DuskyObjectModelsEngineSpec } from './duskyObjectModelsEngineSpec';
|
||||
import { DuskyObjectModelsMonitoringSpec } from './duskyObjectModelsMonitoringSpec';
|
||||
import { DuskyObjectModelsScaleSpec } from './duskyObjectModelsScaleSpec';
|
||||
import { DuskyObjectModelsSchedulingSpec } from './duskyObjectModelsSchedulingSpec';
|
||||
import { DuskyObjectModelsSecuritySpec } from './duskyObjectModelsSecuritySpec';
|
||||
import { DuskyObjectModelsServiceSpec } from './duskyObjectModelsServiceSpec';
|
||||
import { DuskyObjectModelsStorageSpec } from './duskyObjectModelsStorageSpec';
|
||||
|
||||
export class DuskyObjectModelsDatabaseServiceSpec {
|
||||
'backups'?: DuskyObjectModelsBackupSpec;
|
||||
'docker'?: DuskyObjectModelsDockerSpec;
|
||||
'engine'?: DuskyObjectModelsEngineSpec;
|
||||
'monitoring'?: DuskyObjectModelsMonitoringSpec;
|
||||
'scale'?: DuskyObjectModelsScaleSpec;
|
||||
'scheduling'?: DuskyObjectModelsSchedulingSpec;
|
||||
'security'?: DuskyObjectModelsSecuritySpec;
|
||||
'service'?: DuskyObjectModelsServiceSpec;
|
||||
'storage'?: DuskyObjectModelsStorageSpec;
|
||||
|
||||
static discriminator: string | undefined = undefined;
|
||||
|
||||
static attributeTypeMap: Array<{name: string, baseName: string, type: string}> = [
|
||||
{
|
||||
"name": "backups",
|
||||
"baseName": "backups",
|
||||
"type": "DuskyObjectModelsBackupSpec"
|
||||
},
|
||||
{
|
||||
"name": "docker",
|
||||
"baseName": "docker",
|
||||
"type": "DuskyObjectModelsDockerSpec"
|
||||
},
|
||||
{
|
||||
"name": "engine",
|
||||
"baseName": "engine",
|
||||
"type": "DuskyObjectModelsEngineSpec"
|
||||
},
|
||||
{
|
||||
"name": "monitoring",
|
||||
"baseName": "monitoring",
|
||||
"type": "DuskyObjectModelsMonitoringSpec"
|
||||
},
|
||||
{
|
||||
"name": "scale",
|
||||
"baseName": "scale",
|
||||
"type": "DuskyObjectModelsScaleSpec"
|
||||
},
|
||||
{
|
||||
"name": "scheduling",
|
||||
"baseName": "scheduling",
|
||||
"type": "DuskyObjectModelsSchedulingSpec"
|
||||
},
|
||||
{
|
||||
"name": "security",
|
||||
"baseName": "security",
|
||||
"type": "DuskyObjectModelsSecuritySpec"
|
||||
},
|
||||
{
|
||||
"name": "service",
|
||||
"baseName": "service",
|
||||
"type": "DuskyObjectModelsServiceSpec"
|
||||
},
|
||||
{
|
||||
"name": "storage",
|
||||
"baseName": "storage",
|
||||
"type": "DuskyObjectModelsStorageSpec"
|
||||
} ];
|
||||
|
||||
static getAttributeTypeMap() {
|
||||
return DuskyObjectModelsDatabaseServiceSpec.attributeTypeMap;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,97 +0,0 @@
|
||||
/**
|
||||
* Dusky API
|
||||
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
|
||||
*
|
||||
* The version of the OpenAPI document: v1
|
||||
*
|
||||
*
|
||||
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
|
||||
* https://openapi-generator.tech
|
||||
* Do not edit the class manually.
|
||||
*/
|
||||
|
||||
import { DuskyObjectModelsDatabaseServiceCondition } from './duskyObjectModelsDatabaseServiceCondition';
|
||||
|
||||
export class DuskyObjectModelsDatabaseServiceStatus {
|
||||
'state'?: string;
|
||||
'appliedGeneration'?: number | null;
|
||||
'conditions'?: Array<DuskyObjectModelsDatabaseServiceCondition>;
|
||||
'internalIP'?: string;
|
||||
'internalPort'?: number | null;
|
||||
'externalIP'?: string;
|
||||
'externalPort'?: number | null;
|
||||
'podsFailed'?: number;
|
||||
'podsPending'?: number;
|
||||
'podsRunning'?: number;
|
||||
'podsUnknown'?: number;
|
||||
'restartRequired'?: boolean;
|
||||
|
||||
static discriminator: string | undefined = undefined;
|
||||
|
||||
static attributeTypeMap: Array<{name: string, baseName: string, type: string}> = [
|
||||
{
|
||||
"name": "state",
|
||||
"baseName": "state",
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"name": "appliedGeneration",
|
||||
"baseName": "appliedGeneration",
|
||||
"type": "number"
|
||||
},
|
||||
{
|
||||
"name": "conditions",
|
||||
"baseName": "conditions",
|
||||
"type": "Array<DuskyObjectModelsDatabaseServiceCondition>"
|
||||
},
|
||||
{
|
||||
"name": "internalIP",
|
||||
"baseName": "internalIP",
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"name": "internalPort",
|
||||
"baseName": "internalPort",
|
||||
"type": "number"
|
||||
},
|
||||
{
|
||||
"name": "externalIP",
|
||||
"baseName": "externalIP",
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"name": "externalPort",
|
||||
"baseName": "externalPort",
|
||||
"type": "number"
|
||||
},
|
||||
{
|
||||
"name": "podsFailed",
|
||||
"baseName": "podsFailed",
|
||||
"type": "number"
|
||||
},
|
||||
{
|
||||
"name": "podsPending",
|
||||
"baseName": "podsPending",
|
||||
"type": "number"
|
||||
},
|
||||
{
|
||||
"name": "podsRunning",
|
||||
"baseName": "podsRunning",
|
||||
"type": "number"
|
||||
},
|
||||
{
|
||||
"name": "podsUnknown",
|
||||
"baseName": "podsUnknown",
|
||||
"type": "number"
|
||||
},
|
||||
{
|
||||
"name": "restartRequired",
|
||||
"baseName": "restartRequired",
|
||||
"type": "boolean"
|
||||
} ];
|
||||
|
||||
static getAttributeTypeMap() {
|
||||
return DuskyObjectModelsDatabaseServiceStatus.attributeTypeMap;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,54 +0,0 @@
|
||||
/**
|
||||
* Dusky API
|
||||
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
|
||||
*
|
||||
* The version of the OpenAPI document: v1
|
||||
*
|
||||
*
|
||||
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
|
||||
* https://openapi-generator.tech
|
||||
* Do not edit the class manually.
|
||||
*/
|
||||
|
||||
|
||||
export class DuskyObjectModelsDatabaseServiceVolumeStatus {
|
||||
'id'?: string;
|
||||
'count'?: number;
|
||||
'totalSize'?: number;
|
||||
'storageClass'?: string;
|
||||
'state'?: string;
|
||||
|
||||
static discriminator: string | undefined = undefined;
|
||||
|
||||
static attributeTypeMap: Array<{name: string, baseName: string, type: string}> = [
|
||||
{
|
||||
"name": "id",
|
||||
"baseName": "id",
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"name": "count",
|
||||
"baseName": "count",
|
||||
"type": "number"
|
||||
},
|
||||
{
|
||||
"name": "totalSize",
|
||||
"baseName": "totalSize",
|
||||
"type": "number"
|
||||
},
|
||||
{
|
||||
"name": "storageClass",
|
||||
"baseName": "storageClass",
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"name": "state",
|
||||
"baseName": "state",
|
||||
"type": "string"
|
||||
} ];
|
||||
|
||||
static getAttributeTypeMap() {
|
||||
return DuskyObjectModelsDatabaseServiceVolumeStatus.attributeTypeMap;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,61 +0,0 @@
|
||||
/**
|
||||
* Dusky API
|
||||
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
|
||||
*
|
||||
* The version of the OpenAPI document: v1
|
||||
*
|
||||
*
|
||||
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
|
||||
* https://openapi-generator.tech
|
||||
* Do not edit the class manually.
|
||||
*/
|
||||
|
||||
|
||||
export class DuskyObjectModelsDockerSpec {
|
||||
'registry'?: string;
|
||||
'repository'?: string;
|
||||
'imagePullPolicy'?: DuskyObjectModelsDockerSpec.ImagePullPolicyEnum;
|
||||
'imagePullSecret'?: string;
|
||||
'imageTagSuffix'?: string;
|
||||
|
||||
static discriminator: string | undefined = undefined;
|
||||
|
||||
static attributeTypeMap: Array<{name: string, baseName: string, type: string}> = [
|
||||
{
|
||||
"name": "registry",
|
||||
"baseName": "registry",
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"name": "repository",
|
||||
"baseName": "repository",
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"name": "imagePullPolicy",
|
||||
"baseName": "imagePullPolicy",
|
||||
"type": "DuskyObjectModelsDockerSpec.ImagePullPolicyEnum"
|
||||
},
|
||||
{
|
||||
"name": "imagePullSecret",
|
||||
"baseName": "imagePullSecret",
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"name": "imageTagSuffix",
|
||||
"baseName": "imageTagSuffix",
|
||||
"type": "string"
|
||||
} ];
|
||||
|
||||
static getAttributeTypeMap() {
|
||||
return DuskyObjectModelsDockerSpec.attributeTypeMap;
|
||||
}
|
||||
}
|
||||
|
||||
export namespace DuskyObjectModelsDockerSpec {
|
||||
export enum ImagePullPolicyEnum {
|
||||
IfNotPresent = <any> 'IfNotPresent',
|
||||
Always = <any> 'Always',
|
||||
Never = <any> 'Never'
|
||||
}
|
||||
}
|
||||
@@ -1,57 +0,0 @@
|
||||
/**
|
||||
* Dusky API
|
||||
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
|
||||
*
|
||||
* The version of the OpenAPI document: v1
|
||||
*
|
||||
*
|
||||
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
|
||||
* https://openapi-generator.tech
|
||||
* Do not edit the class manually.
|
||||
*/
|
||||
|
||||
|
||||
export class DuskyObjectModelsDuskyValidationMessage {
|
||||
'type'?: DuskyObjectModelsDuskyValidationMessage.TypeEnum;
|
||||
'code'?: DuskyObjectModelsDuskyValidationMessage.CodeEnum;
|
||||
'message'?: string;
|
||||
|
||||
static discriminator: string | undefined = undefined;
|
||||
|
||||
static attributeTypeMap: Array<{name: string, baseName: string, type: string}> = [
|
||||
{
|
||||
"name": "type",
|
||||
"baseName": "type",
|
||||
"type": "DuskyObjectModelsDuskyValidationMessage.TypeEnum"
|
||||
},
|
||||
{
|
||||
"name": "code",
|
||||
"baseName": "code",
|
||||
"type": "DuskyObjectModelsDuskyValidationMessage.CodeEnum"
|
||||
},
|
||||
{
|
||||
"name": "message",
|
||||
"baseName": "message",
|
||||
"type": "string"
|
||||
} ];
|
||||
|
||||
static getAttributeTypeMap() {
|
||||
return DuskyObjectModelsDuskyValidationMessage.attributeTypeMap;
|
||||
}
|
||||
}
|
||||
|
||||
export namespace DuskyObjectModelsDuskyValidationMessage {
|
||||
export enum TypeEnum {
|
||||
Info = <any> 'Info',
|
||||
Warning = <any> 'Warning',
|
||||
Fail = <any> 'Fail'
|
||||
}
|
||||
export enum CodeEnum {
|
||||
InvalidInput = <any> 'InvalidInput',
|
||||
ResourceExists = <any> 'ResourceExists',
|
||||
ResourceNotFound = <any> 'ResourceNotFound',
|
||||
ResourceNotRunning = <any> 'ResourceNotRunning',
|
||||
AvailableResources = <any> 'AvailableResources',
|
||||
InsufficientResources = <any> 'InsufficientResources'
|
||||
}
|
||||
}
|
||||
@@ -1,31 +0,0 @@
|
||||
/**
|
||||
* Dusky API
|
||||
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
|
||||
*
|
||||
* The version of the OpenAPI document: v1
|
||||
*
|
||||
*
|
||||
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
|
||||
* https://openapi-generator.tech
|
||||
* Do not edit the class manually.
|
||||
*/
|
||||
|
||||
import { DuskyObjectModelsDuskyValidationMessage } from './duskyObjectModelsDuskyValidationMessage';
|
||||
|
||||
export class DuskyObjectModelsDuskyValidationResult {
|
||||
'messages'?: Array<DuskyObjectModelsDuskyValidationMessage>;
|
||||
|
||||
static discriminator: string | undefined = undefined;
|
||||
|
||||
static attributeTypeMap: Array<{name: string, baseName: string, type: string}> = [
|
||||
{
|
||||
"name": "messages",
|
||||
"baseName": "messages",
|
||||
"type": "Array<DuskyObjectModelsDuskyValidationMessage>"
|
||||
} ];
|
||||
|
||||
static getAttributeTypeMap() {
|
||||
return DuskyObjectModelsDuskyValidationResult.attributeTypeMap;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,36 +0,0 @@
|
||||
/**
|
||||
* Dusky API
|
||||
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
|
||||
*
|
||||
* The version of the OpenAPI document: v1
|
||||
*
|
||||
*
|
||||
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
|
||||
* https://openapi-generator.tech
|
||||
* Do not edit the class manually.
|
||||
*/
|
||||
|
||||
|
||||
export class DuskyObjectModelsEngineSettings {
|
||||
'_default'?: { [key: string]: string; };
|
||||
'roles'?: { [key: string]: { [key: string]: string; }; };
|
||||
|
||||
static discriminator: string | undefined = undefined;
|
||||
|
||||
static attributeTypeMap: Array<{name: string, baseName: string, type: string}> = [
|
||||
{
|
||||
"name": "_default",
|
||||
"baseName": "default",
|
||||
"type": "{ [key: string]: string; }"
|
||||
},
|
||||
{
|
||||
"name": "roles",
|
||||
"baseName": "roles",
|
||||
"type": "{ [key: string]: { [key: string]: string; }; }"
|
||||
} ];
|
||||
|
||||
static getAttributeTypeMap() {
|
||||
return DuskyObjectModelsEngineSettings.attributeTypeMap;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,50 +0,0 @@
|
||||
/**
|
||||
* Dusky API
|
||||
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
|
||||
*
|
||||
* The version of the OpenAPI document: v1
|
||||
*
|
||||
*
|
||||
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
|
||||
* https://openapi-generator.tech
|
||||
* Do not edit the class manually.
|
||||
*/
|
||||
|
||||
import { DuskyObjectModelsEngineSettings } from './duskyObjectModelsEngineSettings';
|
||||
import { DuskyObjectModelsPluginSpec } from './duskyObjectModelsPluginSpec';
|
||||
|
||||
export class DuskyObjectModelsEngineSpec {
|
||||
'type'?: string;
|
||||
'version'?: number | null;
|
||||
'settings'?: DuskyObjectModelsEngineSettings;
|
||||
'plugins'?: Array<DuskyObjectModelsPluginSpec>;
|
||||
|
||||
static discriminator: string | undefined = undefined;
|
||||
|
||||
static attributeTypeMap: Array<{name: string, baseName: string, type: string}> = [
|
||||
{
|
||||
"name": "type",
|
||||
"baseName": "type",
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"name": "version",
|
||||
"baseName": "version",
|
||||
"type": "number"
|
||||
},
|
||||
{
|
||||
"name": "settings",
|
||||
"baseName": "settings",
|
||||
"type": "DuskyObjectModelsEngineSettings"
|
||||
},
|
||||
{
|
||||
"name": "plugins",
|
||||
"baseName": "plugins",
|
||||
"type": "Array<DuskyObjectModelsPluginSpec>"
|
||||
} ];
|
||||
|
||||
static getAttributeTypeMap() {
|
||||
return DuskyObjectModelsEngineSpec.attributeTypeMap;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,49 +0,0 @@
|
||||
/**
|
||||
* Dusky API
|
||||
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
|
||||
*
|
||||
* The version of the OpenAPI document: v1
|
||||
*
|
||||
*
|
||||
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
|
||||
* https://openapi-generator.tech
|
||||
* Do not edit the class manually.
|
||||
*/
|
||||
|
||||
import { DuskyObjectModelsErrorDetails } from './duskyObjectModelsErrorDetails';
|
||||
|
||||
export class DuskyObjectModelsError {
|
||||
'reason'?: string;
|
||||
'message'?: string;
|
||||
'details'?: DuskyObjectModelsErrorDetails;
|
||||
'code'?: number | null;
|
||||
|
||||
static discriminator: string | undefined = undefined;
|
||||
|
||||
static attributeTypeMap: Array<{name: string, baseName: string, type: string}> = [
|
||||
{
|
||||
"name": "reason",
|
||||
"baseName": "reason",
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"name": "message",
|
||||
"baseName": "message",
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"name": "details",
|
||||
"baseName": "details",
|
||||
"type": "DuskyObjectModelsErrorDetails"
|
||||
},
|
||||
{
|
||||
"name": "code",
|
||||
"baseName": "code",
|
||||
"type": "number"
|
||||
} ];
|
||||
|
||||
static getAttributeTypeMap() {
|
||||
return DuskyObjectModelsError.attributeTypeMap;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,42 +0,0 @@
|
||||
/**
|
||||
* Dusky API
|
||||
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
|
||||
*
|
||||
* The version of the OpenAPI document: v1
|
||||
*
|
||||
*
|
||||
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
|
||||
* https://openapi-generator.tech
|
||||
* Do not edit the class manually.
|
||||
*/
|
||||
|
||||
|
||||
export class DuskyObjectModelsErrorDetails {
|
||||
'reason'?: string;
|
||||
'message'?: string;
|
||||
'details'?: DuskyObjectModelsErrorDetails;
|
||||
|
||||
static discriminator: string | undefined = undefined;
|
||||
|
||||
static attributeTypeMap: Array<{name: string, baseName: string, type: string}> = [
|
||||
{
|
||||
"name": "reason",
|
||||
"baseName": "reason",
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"name": "message",
|
||||
"baseName": "message",
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"name": "details",
|
||||
"baseName": "details",
|
||||
"type": "DuskyObjectModelsErrorDetails"
|
||||
} ];
|
||||
|
||||
static getAttributeTypeMap() {
|
||||
return DuskyObjectModelsErrorDetails.attributeTypeMap;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,31 +0,0 @@
|
||||
/**
|
||||
* Dusky API
|
||||
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
|
||||
*
|
||||
* The version of the OpenAPI document: v1
|
||||
*
|
||||
*
|
||||
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
|
||||
* https://openapi-generator.tech
|
||||
* Do not edit the class manually.
|
||||
*/
|
||||
|
||||
import { DuskyObjectModelsTINASpec } from './duskyObjectModelsTINASpec';
|
||||
|
||||
export class DuskyObjectModelsMonitoringSpec {
|
||||
'tina'?: DuskyObjectModelsTINASpec;
|
||||
|
||||
static discriminator: string | undefined = undefined;
|
||||
|
||||
static attributeTypeMap: Array<{name: string, baseName: string, type: string}> = [
|
||||
{
|
||||
"name": "tina",
|
||||
"baseName": "tina",
|
||||
"type": "DuskyObjectModelsTINASpec"
|
||||
} ];
|
||||
|
||||
static getAttributeTypeMap() {
|
||||
return DuskyObjectModelsMonitoringSpec.attributeTypeMap;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,31 +0,0 @@
|
||||
/**
|
||||
* Dusky API
|
||||
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
|
||||
*
|
||||
* The version of the OpenAPI document: v1
|
||||
*
|
||||
*
|
||||
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
|
||||
* https://openapi-generator.tech
|
||||
* Do not edit the class manually.
|
||||
*/
|
||||
|
||||
import { DuskyObjectModelsReplicaStatus } from './duskyObjectModelsReplicaStatus';
|
||||
|
||||
export class DuskyObjectModelsOperatorStatus {
|
||||
'statuses'?: { [key: string]: DuskyObjectModelsReplicaStatus; };
|
||||
|
||||
static discriminator: string | undefined = undefined;
|
||||
|
||||
static attributeTypeMap: Array<{name: string, baseName: string, type: string}> = [
|
||||
{
|
||||
"name": "statuses",
|
||||
"baseName": "statuses",
|
||||
"type": "{ [key: string]: DuskyObjectModelsReplicaStatus; }"
|
||||
} ];
|
||||
|
||||
static getAttributeTypeMap() {
|
||||
return DuskyObjectModelsOperatorStatus.attributeTypeMap;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,30 +0,0 @@
|
||||
/**
|
||||
* Dusky API
|
||||
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
|
||||
*
|
||||
* The version of the OpenAPI document: v1
|
||||
*
|
||||
*
|
||||
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
|
||||
* https://openapi-generator.tech
|
||||
* Do not edit the class manually.
|
||||
*/
|
||||
|
||||
|
||||
export class DuskyObjectModelsPluginSpec {
|
||||
'name'?: string;
|
||||
|
||||
static discriminator: string | undefined = undefined;
|
||||
|
||||
static attributeTypeMap: Array<{name: string, baseName: string, type: string}> = [
|
||||
{
|
||||
"name": "name",
|
||||
"baseName": "name",
|
||||
"type": "string"
|
||||
} ];
|
||||
|
||||
static getAttributeTypeMap() {
|
||||
return DuskyObjectModelsPluginSpec.attributeTypeMap;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,36 +0,0 @@
|
||||
/**
|
||||
* Dusky API
|
||||
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
|
||||
*
|
||||
* The version of the OpenAPI document: v1
|
||||
*
|
||||
*
|
||||
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
|
||||
* https://openapi-generator.tech
|
||||
* Do not edit the class manually.
|
||||
*/
|
||||
|
||||
|
||||
export class DuskyObjectModelsReplicaStatus {
|
||||
'replicas'?: number;
|
||||
'readyReplicas'?: number;
|
||||
|
||||
static discriminator: string | undefined = undefined;
|
||||
|
||||
static attributeTypeMap: Array<{name: string, baseName: string, type: string}> = [
|
||||
{
|
||||
"name": "replicas",
|
||||
"baseName": "replicas",
|
||||
"type": "number"
|
||||
},
|
||||
{
|
||||
"name": "readyReplicas",
|
||||
"baseName": "readyReplicas",
|
||||
"type": "number"
|
||||
} ];
|
||||
|
||||
static getAttributeTypeMap() {
|
||||
return DuskyObjectModelsReplicaStatus.attributeTypeMap;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,67 +0,0 @@
|
||||
/**
|
||||
* Dusky API
|
||||
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
|
||||
*
|
||||
* The version of the OpenAPI document: v1
|
||||
*
|
||||
*
|
||||
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
|
||||
* https://openapi-generator.tech
|
||||
* Do not edit the class manually.
|
||||
*/
|
||||
|
||||
import { DuskyObjectModelsError } from './duskyObjectModelsError';
|
||||
|
||||
export class DuskyObjectModelsRestoreStatus {
|
||||
'backupId'?: string;
|
||||
'endTime'?: Date;
|
||||
'error'?: DuskyObjectModelsError;
|
||||
'fromServer'?: string;
|
||||
'restoreTime'?: Date;
|
||||
'startTime'?: Date;
|
||||
'state'?: string;
|
||||
|
||||
static discriminator: string | undefined = undefined;
|
||||
|
||||
static attributeTypeMap: Array<{name: string, baseName: string, type: string}> = [
|
||||
{
|
||||
"name": "backupId",
|
||||
"baseName": "backupId",
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"name": "endTime",
|
||||
"baseName": "endTime",
|
||||
"type": "Date"
|
||||
},
|
||||
{
|
||||
"name": "error",
|
||||
"baseName": "error",
|
||||
"type": "DuskyObjectModelsError"
|
||||
},
|
||||
{
|
||||
"name": "fromServer",
|
||||
"baseName": "fromServer",
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"name": "restoreTime",
|
||||
"baseName": "restoreTime",
|
||||
"type": "Date"
|
||||
},
|
||||
{
|
||||
"name": "startTime",
|
||||
"baseName": "startTime",
|
||||
"type": "Date"
|
||||
},
|
||||
{
|
||||
"name": "state",
|
||||
"baseName": "state",
|
||||
"type": "string"
|
||||
} ];
|
||||
|
||||
static getAttributeTypeMap() {
|
||||
return DuskyObjectModelsRestoreStatus.attributeTypeMap;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,36 +0,0 @@
|
||||
/**
|
||||
* Dusky API
|
||||
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
|
||||
*
|
||||
* The version of the OpenAPI document: v1
|
||||
*
|
||||
*
|
||||
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
|
||||
* https://openapi-generator.tech
|
||||
* Do not edit the class manually.
|
||||
*/
|
||||
|
||||
|
||||
export class DuskyObjectModelsRetentionSpec {
|
||||
'maximums'?: Array<string>;
|
||||
'minimums'?: Array<string>;
|
||||
|
||||
static discriminator: string | undefined = undefined;
|
||||
|
||||
static attributeTypeMap: Array<{name: string, baseName: string, type: string}> = [
|
||||
{
|
||||
"name": "maximums",
|
||||
"baseName": "maximums",
|
||||
"type": "Array<string>"
|
||||
},
|
||||
{
|
||||
"name": "minimums",
|
||||
"baseName": "minimums",
|
||||
"type": "Array<string>"
|
||||
} ];
|
||||
|
||||
static getAttributeTypeMap() {
|
||||
return DuskyObjectModelsRetentionSpec.attributeTypeMap;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,30 +0,0 @@
|
||||
/**
|
||||
* Dusky API
|
||||
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
|
||||
*
|
||||
* The version of the OpenAPI document: v1
|
||||
*
|
||||
*
|
||||
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
|
||||
* https://openapi-generator.tech
|
||||
* Do not edit the class manually.
|
||||
*/
|
||||
|
||||
|
||||
export class DuskyObjectModelsRole {
|
||||
'name'?: string;
|
||||
|
||||
static discriminator: string | undefined = undefined;
|
||||
|
||||
static attributeTypeMap: Array<{name: string, baseName: string, type: string}> = [
|
||||
{
|
||||
"name": "name",
|
||||
"baseName": "name",
|
||||
"type": "string"
|
||||
} ];
|
||||
|
||||
static getAttributeTypeMap() {
|
||||
return DuskyObjectModelsRole.attributeTypeMap;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,36 +0,0 @@
|
||||
/**
|
||||
* Dusky API
|
||||
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
|
||||
*
|
||||
* The version of the OpenAPI document: v1
|
||||
*
|
||||
*
|
||||
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
|
||||
* https://openapi-generator.tech
|
||||
* Do not edit the class manually.
|
||||
*/
|
||||
|
||||
|
||||
export class DuskyObjectModelsScaleSpec {
|
||||
'replicas'?: number | null;
|
||||
'shards'?: number | null;
|
||||
|
||||
static discriminator: string | undefined = undefined;
|
||||
|
||||
static attributeTypeMap: Array<{name: string, baseName: string, type: string}> = [
|
||||
{
|
||||
"name": "replicas",
|
||||
"baseName": "replicas",
|
||||
"type": "number"
|
||||
},
|
||||
{
|
||||
"name": "shards",
|
||||
"baseName": "shards",
|
||||
"type": "number"
|
||||
} ];
|
||||
|
||||
static getAttributeTypeMap() {
|
||||
return DuskyObjectModelsScaleSpec.attributeTypeMap;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,44 +0,0 @@
|
||||
/**
|
||||
* Dusky API
|
||||
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
|
||||
*
|
||||
* The version of the OpenAPI document: v1
|
||||
*
|
||||
*
|
||||
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
|
||||
* https://openapi-generator.tech
|
||||
* Do not edit the class manually.
|
||||
*/
|
||||
|
||||
import { V1Affinity } from './v1Affinity';
|
||||
import { V1ResourceRequirements } from './v1ResourceRequirements';
|
||||
|
||||
export class DuskyObjectModelsSchedulingOptions {
|
||||
'affinity'?: V1Affinity;
|
||||
'nodeSelector'?: { [key: string]: string; };
|
||||
'resources'?: V1ResourceRequirements;
|
||||
|
||||
static discriminator: string | undefined = undefined;
|
||||
|
||||
static attributeTypeMap: Array<{name: string, baseName: string, type: string}> = [
|
||||
{
|
||||
"name": "affinity",
|
||||
"baseName": "affinity",
|
||||
"type": "V1Affinity"
|
||||
},
|
||||
{
|
||||
"name": "nodeSelector",
|
||||
"baseName": "nodeSelector",
|
||||
"type": "{ [key: string]: string; }"
|
||||
},
|
||||
{
|
||||
"name": "resources",
|
||||
"baseName": "resources",
|
||||
"type": "V1ResourceRequirements"
|
||||
} ];
|
||||
|
||||
static getAttributeTypeMap() {
|
||||
return DuskyObjectModelsSchedulingOptions.attributeTypeMap;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,43 +0,0 @@
|
||||
/**
|
||||
* Dusky API
|
||||
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
|
||||
*
|
||||
* The version of the OpenAPI document: v1
|
||||
*
|
||||
*
|
||||
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
|
||||
* https://openapi-generator.tech
|
||||
* Do not edit the class manually.
|
||||
*/
|
||||
|
||||
import { DuskyObjectModelsSchedulingOptions } from './duskyObjectModelsSchedulingOptions';
|
||||
|
||||
export class DuskyObjectModelsSchedulingSpec {
|
||||
'_default'?: DuskyObjectModelsSchedulingOptions;
|
||||
'roles'?: { [key: string]: DuskyObjectModelsSchedulingOptions; };
|
||||
'availabilityZones'?: { [key: string]: DuskyObjectModelsSchedulingOptions; };
|
||||
|
||||
static discriminator: string | undefined = undefined;
|
||||
|
||||
static attributeTypeMap: Array<{name: string, baseName: string, type: string}> = [
|
||||
{
|
||||
"name": "_default",
|
||||
"baseName": "default",
|
||||
"type": "DuskyObjectModelsSchedulingOptions"
|
||||
},
|
||||
{
|
||||
"name": "roles",
|
||||
"baseName": "roles",
|
||||
"type": "{ [key: string]: DuskyObjectModelsSchedulingOptions; }"
|
||||
},
|
||||
{
|
||||
"name": "availabilityZones",
|
||||
"baseName": "availability-zones",
|
||||
"type": "{ [key: string]: DuskyObjectModelsSchedulingOptions; }"
|
||||
} ];
|
||||
|
||||
static getAttributeTypeMap() {
|
||||
return DuskyObjectModelsSchedulingSpec.attributeTypeMap;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,31 +0,0 @@
|
||||
/**
|
||||
* Dusky API
|
||||
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
|
||||
*
|
||||
* The version of the OpenAPI document: v1
|
||||
*
|
||||
*
|
||||
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
|
||||
* https://openapi-generator.tech
|
||||
* Do not edit the class manually.
|
||||
*/
|
||||
|
||||
import { V1PodSecurityContext } from './v1PodSecurityContext';
|
||||
|
||||
export class DuskyObjectModelsSecuritySpec {
|
||||
'context'?: V1PodSecurityContext;
|
||||
|
||||
static discriminator: string | undefined = undefined;
|
||||
|
||||
static attributeTypeMap: Array<{name: string, baseName: string, type: string}> = [
|
||||
{
|
||||
"name": "context",
|
||||
"baseName": "context",
|
||||
"type": "V1PodSecurityContext"
|
||||
} ];
|
||||
|
||||
static getAttributeTypeMap() {
|
||||
return DuskyObjectModelsSecuritySpec.attributeTypeMap;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,42 +0,0 @@
|
||||
/**
|
||||
* Dusky API
|
||||
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
|
||||
*
|
||||
* The version of the OpenAPI document: v1
|
||||
*
|
||||
*
|
||||
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
|
||||
* https://openapi-generator.tech
|
||||
* Do not edit the class manually.
|
||||
*/
|
||||
|
||||
|
||||
export class DuskyObjectModelsServiceSpec {
|
||||
'port'?: number | null;
|
||||
'type'?: string;
|
||||
'externalIPs'?: Array<string>;
|
||||
|
||||
static discriminator: string | undefined = undefined;
|
||||
|
||||
static attributeTypeMap: Array<{name: string, baseName: string, type: string}> = [
|
||||
{
|
||||
"name": "port",
|
||||
"baseName": "port",
|
||||
"type": "number"
|
||||
},
|
||||
{
|
||||
"name": "type",
|
||||
"baseName": "type",
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"name": "externalIPs",
|
||||
"baseName": "externalIPs",
|
||||
"type": "Array<string>"
|
||||
} ];
|
||||
|
||||
static getAttributeTypeMap() {
|
||||
return DuskyObjectModelsServiceSpec.attributeTypeMap;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,48 +0,0 @@
|
||||
/**
|
||||
* Dusky API
|
||||
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
|
||||
*
|
||||
* The version of the OpenAPI document: v1
|
||||
*
|
||||
*
|
||||
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
|
||||
* https://openapi-generator.tech
|
||||
* Do not edit the class manually.
|
||||
*/
|
||||
|
||||
|
||||
export class DuskyObjectModelsStorageSpec {
|
||||
'storageClassName'?: string;
|
||||
'volumeClaimName'?: string;
|
||||
'volumeSize'?: string;
|
||||
'matchLabels'?: { [key: string]: string; };
|
||||
|
||||
static discriminator: string | undefined = undefined;
|
||||
|
||||
static attributeTypeMap: Array<{name: string, baseName: string, type: string}> = [
|
||||
{
|
||||
"name": "storageClassName",
|
||||
"baseName": "storageClassName",
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"name": "volumeClaimName",
|
||||
"baseName": "volumeClaimName",
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"name": "volumeSize",
|
||||
"baseName": "volumeSize",
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"name": "matchLabels",
|
||||
"baseName": "matchLabels",
|
||||
"type": "{ [key: string]: string; }"
|
||||
} ];
|
||||
|
||||
static getAttributeTypeMap() {
|
||||
return DuskyObjectModelsStorageSpec.attributeTypeMap;
|
||||
}
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user