mirror of
https://github.com/ckaczor/azuredatastudio.git
synced 2026-02-17 11:01:37 -05:00
Compare commits
499 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4d4917d328 | ||
|
|
edea311757 | ||
|
|
e7eacc32c0 | ||
|
|
12f50cca8d | ||
|
|
88a4dba695 | ||
|
|
634ea0ab6a | ||
|
|
cd0b5cbc7a | ||
|
|
0b7de6608a | ||
|
|
8c6bd8c857 | ||
|
|
def5775e00 | ||
|
|
91da9aea98 | ||
|
|
1c898402f8 | ||
|
|
54210cf479 | ||
|
|
cbcea87a82 | ||
|
|
2d50d2c5d1 | ||
|
|
7448c6c32c | ||
|
|
3196cf5be0 | ||
|
|
666726a5fa | ||
|
|
818a3d204e | ||
|
|
d45758b4f4 | ||
|
|
1eda5eb33a | ||
|
|
6ac5b7c8a5 | ||
|
|
397354ebc3 | ||
|
|
2a7b90fd70 | ||
|
|
1554e51932 | ||
|
|
d060f1b9a0 | ||
|
|
c8632c255a | ||
|
|
2ac03b9ef4 | ||
|
|
a0d89449cc | ||
|
|
b03a914934 | ||
|
|
d3bcb942f5 | ||
|
|
84822b23ac | ||
|
|
40ca82c63d | ||
|
|
f4a6b42b3a | ||
|
|
7ad631d4a5 | ||
|
|
4b7baa652f | ||
|
|
148e802f4a | ||
|
|
7bb4d00073 | ||
|
|
3b20e8a61c | ||
|
|
6e0a4f27de | ||
|
|
21ddf30a7b | ||
|
|
2ade45858e | ||
|
|
e0b1a3460d | ||
|
|
f44c714cf2 | ||
|
|
f72e12fe32 | ||
|
|
0b6fb504dc | ||
|
|
145b2491df | ||
|
|
aa30b52d03 | ||
|
|
6edcbbb738 | ||
|
|
815c61315c | ||
|
|
172a044ba7 | ||
|
|
2a81a0a70f | ||
|
|
749989cd0b | ||
|
|
8d42182db8 | ||
|
|
bb2a1db6e8 | ||
|
|
c579ecb111 | ||
|
|
175d46d508 | ||
|
|
870ff39527 | ||
|
|
ddd0b8b4bc | ||
|
|
c7cca5afea | ||
|
|
e63e4f0901 | ||
|
|
ddc8c00090 | ||
|
|
34170e7741 | ||
|
|
f5e4b32d01 | ||
|
|
28fef53731 | ||
|
|
438bc67072 | ||
|
|
472c9decfa | ||
|
|
6cd2d6c942 | ||
|
|
39e1181c5d | ||
|
|
c898b50b94 | ||
|
|
271fe62344 | ||
|
|
c18a54bc1d | ||
|
|
b57bf53b67 | ||
|
|
c699179e15 | ||
|
|
690937443c | ||
|
|
698b79f0f3 | ||
|
|
798af5fc2d | ||
|
|
af55dcfb42 | ||
|
|
76781d6cf4 | ||
|
|
99e3da5b48 | ||
|
|
6b657259a5 | ||
|
|
cbe2ba0901 | ||
|
|
b3d99117ca | ||
|
|
32ac586431 | ||
|
|
bd4676ac8c | ||
|
|
536628603e | ||
|
|
ea7fe08b98 | ||
|
|
6c920f6d54 | ||
|
|
a2f7136728 | ||
|
|
a082c1e478 | ||
|
|
32a6385fef | ||
|
|
468119caa4 | ||
|
|
abdf575885 | ||
|
|
07df54ee61 | ||
|
|
850422164c | ||
|
|
865d49c2fb | ||
|
|
b17ce7a531 | ||
|
|
d64062dfe0 | ||
|
|
1b0f3af094 | ||
|
|
fa608f9f80 | ||
|
|
b32e5f8f25 | ||
|
|
c37f178e71 | ||
|
|
3be00b1635 | ||
|
|
b397150264 | ||
|
|
8c6a966bb9 | ||
|
|
b83da2dfa8 | ||
|
|
b4dd0442c5 | ||
|
|
d4e8053797 | ||
|
|
8bbcfff119 | ||
|
|
d7a6b55f82 | ||
|
|
3dd74971d8 | ||
|
|
e076062d4f | ||
|
|
fbe8e9d9f3 | ||
|
|
1c9322c0e8 | ||
|
|
0d49744061 | ||
|
|
7cd4964f35 | ||
|
|
689c7ab27e | ||
|
|
8cf5e4c9fd | ||
|
|
9d766198b5 | ||
|
|
295aa99e05 | ||
|
|
3f76d343a5 | ||
|
|
fa9a38d74a | ||
|
|
8b73391845 | ||
|
|
5e00dcb78c | ||
|
|
c8db2b60f3 | ||
|
|
aaa8831a7d | ||
|
|
e83d3ba57f | ||
|
|
8b40e8e4bf | ||
|
|
712c6ae5d8 | ||
|
|
bb35276652 | ||
|
|
335c6bf544 | ||
|
|
054583e0de | ||
|
|
cd6fa08543 | ||
|
|
a2265f8ccd | ||
|
|
e4390db779 | ||
|
|
fe546e3791 | ||
|
|
415a20f9f7 | ||
|
|
6c37ac56b7 | ||
|
|
27fbad5884 | ||
|
|
bc452ec9be | ||
|
|
9225d6d6aa | ||
|
|
5174f3b2f6 | ||
|
|
8877d74034 | ||
|
|
d1f26844ef | ||
|
|
fd8f88db4b | ||
|
|
bfb2c20e0f | ||
|
|
6b2c409cff | ||
|
|
4af67c9734 | ||
|
|
06a4b0d1a2 | ||
|
|
3677a69c76 | ||
|
|
e51f16fa8d | ||
|
|
a0ee8b00fb | ||
|
|
eeb7da4ace | ||
|
|
b68d504f68 | ||
|
|
3ec1f7cc2b | ||
|
|
7bdb7c328a | ||
|
|
b175c97dfe | ||
|
|
f10ac10f6d | ||
|
|
64510506e8 | ||
|
|
e6c265b254 | ||
|
|
342ff47e51 | ||
|
|
ba80000e27 | ||
|
|
f792684763 | ||
|
|
036faeb06d | ||
|
|
04117b2333 | ||
|
|
7559d8463f | ||
|
|
351516f08d | ||
|
|
b2e06fd440 | ||
|
|
338beaff29 | ||
|
|
cb1c8503b0 | ||
|
|
d7767c7d91 | ||
|
|
c6f72e6504 | ||
|
|
da6f800f11 | ||
|
|
2f571d868b | ||
|
|
3015845093 | ||
|
|
341f7aa7ad | ||
|
|
8717a03466 | ||
|
|
3b18f9f8ec | ||
|
|
69527f91b0 | ||
|
|
17073f9d2a | ||
|
|
4f96ac46be | ||
|
|
76625012dd | ||
|
|
dfb40e0159 | ||
|
|
82d5fe3821 | ||
|
|
f5fc5c648e | ||
|
|
680dc1b5da | ||
|
|
ac476ba973 | ||
|
|
7857e8aeb9 | ||
|
|
d450588e39 | ||
|
|
e31d563f61 | ||
|
|
7819d25c95 | ||
|
|
8c956cdb79 | ||
|
|
e15ad17967 | ||
|
|
66da2a46c5 | ||
|
|
10f6fe2d09 | ||
|
|
dd77c79090 | ||
|
|
9fcd85e640 | ||
|
|
d1b8c15e11 | ||
|
|
e679d70a4b | ||
|
|
31817c5494 | ||
|
|
9d776863a1 | ||
|
|
1d4398388c | ||
|
|
281592fa97 | ||
|
|
dccccd0110 | ||
|
|
5c474d8614 | ||
|
|
429d8fe584 | ||
|
|
86357b45b0 | ||
|
|
42e16b1752 | ||
|
|
e2c9d3899b | ||
|
|
4587fd06c3 | ||
|
|
f9d34cb18b | ||
|
|
f97ae9e570 | ||
|
|
eec6f64d62 | ||
|
|
20ed569a71 | ||
|
|
79800902db | ||
|
|
1e3c9b722e | ||
|
|
c2bd11fa9e | ||
|
|
791dee1457 | ||
|
|
2db51ca243 | ||
|
|
2bc2f7f520 | ||
|
|
7222c698aa | ||
|
|
1c6d7866e8 | ||
|
|
c810c5a0bb | ||
|
|
ff45bdd072 | ||
|
|
3ad39bd0d3 | ||
|
|
cb30dd1893 | ||
|
|
bf9fd5a3b8 | ||
|
|
4c4d2d4463 | ||
|
|
2d182fcd03 | ||
|
|
c7ab69d46d | ||
|
|
89b935e2df | ||
|
|
3ef2650e69 | ||
|
|
dbb30110ac | ||
|
|
ff8e451af9 | ||
|
|
c97f75283b | ||
|
|
6550c032ee | ||
|
|
dfb1d5411e | ||
|
|
98774527bc | ||
|
|
de7cc8ea53 | ||
|
|
a427606050 | ||
|
|
656d727854 | ||
|
|
4184c28ce7 | ||
|
|
ab7aaf8d2f | ||
|
|
09d559e7e0 | ||
|
|
94b34350a3 | ||
|
|
bed70ebd09 | ||
|
|
660c1d6f21 | ||
|
|
f783a26a56 | ||
|
|
fcec690546 | ||
|
|
c6b3b797c5 | ||
|
|
1e916e93ad | ||
|
|
81122538d2 | ||
|
|
c4f649a849 | ||
|
|
eb36a275a2 | ||
|
|
873668a7cf | ||
|
|
72f7e8de52 | ||
|
|
a1c8d4d34a | ||
|
|
f96a96a60c | ||
|
|
2801e59edc | ||
|
|
39b6cc193f | ||
|
|
49983a6f05 | ||
|
|
767465edbf | ||
|
|
f6949d834b | ||
|
|
f4c7ab29f0 | ||
|
|
36f758dfca | ||
|
|
545a5504e0 | ||
|
|
b460b7834c | ||
|
|
c7e4cf7ca4 | ||
|
|
607447365d | ||
|
|
f72453fc59 | ||
|
|
a88669677f | ||
|
|
52e7bcdf09 | ||
|
|
581774d413 | ||
|
|
34079d1612 | ||
|
|
08219b2d8e | ||
|
|
ee0b87544b | ||
|
|
729378b2d7 | ||
|
|
7bc26cc493 | ||
|
|
a0c03784f2 | ||
|
|
35e79f7173 | ||
|
|
9fdb5037bc | ||
|
|
3251b26317 | ||
|
|
50ec75ec57 | ||
|
|
1c279675c8 | ||
|
|
10d3a6b2ba | ||
|
|
bc3527d310 | ||
|
|
108891ba2e | ||
|
|
f61ffae15c | ||
|
|
82726a9119 | ||
|
|
53f00bee12 | ||
|
|
0f6bb683d6 | ||
|
|
ef8e86a78d | ||
|
|
daf40393d9 | ||
|
|
c04823e2d8 | ||
|
|
846ed9cd26 | ||
|
|
653b442b60 | ||
|
|
9389a92896 | ||
|
|
d9ca879533 | ||
|
|
45cbaca31f | ||
|
|
ba85d370d6 | ||
|
|
5a49b99b8a | ||
|
|
0be752b64c | ||
|
|
1935ce1adb | ||
|
|
2820fb4f15 | ||
|
|
b910bf2f33 | ||
|
|
3e0135b6b3 | ||
|
|
5fb7c879ed | ||
|
|
e61cc474a3 | ||
|
|
bd56c49538 | ||
|
|
18f87ab03c | ||
|
|
27b69b36f7 | ||
|
|
56d2a66ac5 | ||
|
|
acc58a0d1c | ||
|
|
280a9d20f9 | ||
|
|
830cef06db | ||
|
|
23e2a5dd12 | ||
|
|
97cd9dba9f | ||
|
|
adc3fde2e4 | ||
|
|
c1267a9387 | ||
|
|
825663fd77 | ||
|
|
288288df03 | ||
|
|
1dea5f8f7b | ||
|
|
f633e07ed1 | ||
|
|
726f0cef0e | ||
|
|
253ed55412 | ||
|
|
c679d5e1f0 | ||
|
|
362605cea0 | ||
|
|
2af8982640 | ||
|
|
753d785076 | ||
|
|
6ff1e3866b | ||
|
|
58d02b76db | ||
|
|
572310b906 | ||
|
|
6dd7c7d0f0 | ||
|
|
07d4579c19 | ||
|
|
9652007266 | ||
|
|
1df74e8c4a | ||
|
|
b574f2aa32 | ||
|
|
8bf1859d36 | ||
|
|
253f7774de | ||
|
|
9b7a1d7a26 | ||
|
|
4965f3de1a | ||
|
|
2da0fafcd5 | ||
|
|
39e43d2401 | ||
|
|
02afd2dc39 | ||
|
|
54b24d935c | ||
|
|
8e30b9a6e3 | ||
|
|
c9ec18670c | ||
|
|
fac642de1e | ||
|
|
7bfea07b9b | ||
|
|
ab85028a94 | ||
|
|
1cd94518a8 | ||
|
|
0583e1db8a | ||
|
|
5681d6b9e3 | ||
|
|
b3578d058f | ||
|
|
82648fab3e | ||
|
|
3c4df5332e | ||
|
|
b8de69dfac | ||
|
|
fd5acf7ab1 | ||
|
|
5de1c10dd1 | ||
|
|
a91b965a33 | ||
|
|
a2552c1cc1 | ||
|
|
253aa78650 | ||
|
|
2376e7b384 | ||
|
|
cf9754f627 | ||
|
|
44416abe6e | ||
|
|
5718e6b471 | ||
|
|
c79cfd709a | ||
|
|
34a6200a47 | ||
|
|
4ec5991a13 | ||
|
|
5396ed855c | ||
|
|
037d638927 | ||
|
|
62947a3e8a | ||
|
|
56c989ac7b | ||
|
|
7e74465fb1 | ||
|
|
94d0e1972e | ||
|
|
e1a9ed0d1e | ||
|
|
3220f8a0b4 | ||
|
|
28c7188984 | ||
|
|
49de1f80cf | ||
|
|
9780eebb12 | ||
|
|
bf9646ba98 | ||
|
|
1ea33d83bf | ||
|
|
23e1141484 | ||
|
|
96ea3d8273 | ||
|
|
2673eb2c1d | ||
|
|
47532f1287 | ||
|
|
dd22b195bd | ||
|
|
f47c5dcc75 | ||
|
|
807a4ae8c4 | ||
|
|
8bc7079d78 | ||
|
|
4e07685588 | ||
|
|
e1235a7346 | ||
|
|
4a089131fc | ||
|
|
f8eb203643 | ||
|
|
c26963e848 | ||
|
|
cf8283a2b0 | ||
|
|
4f433772af | ||
|
|
95752a7de5 | ||
|
|
3fd92adf78 | ||
|
|
3bc4178ea8 | ||
|
|
0f9b1a7d0c | ||
|
|
2dd9aafb83 | ||
|
|
cb97072ae2 | ||
|
|
a91bf1cecc | ||
|
|
5f4546488c | ||
|
|
fe3d1ff48e | ||
|
|
41ace44f32 | ||
|
|
50dbe65e1d | ||
|
|
1c21c2956d | ||
|
|
21d9485ca7 | ||
|
|
db902beb24 | ||
|
|
f9e9cc76ea | ||
|
|
92a8147c8d | ||
|
|
1054164533 | ||
|
|
9e29c7ab19 | ||
|
|
e892723c58 | ||
|
|
3f77b371b3 | ||
|
|
5bc817f25b | ||
|
|
b110e4dea1 | ||
|
|
384f593c80 | ||
|
|
ca4663001b | ||
|
|
2feaca5537 | ||
|
|
82749989e6 | ||
|
|
ffbb1b3917 | ||
|
|
e8624f2de7 | ||
|
|
117ecfebd1 | ||
|
|
63adfd4d38 | ||
|
|
b05cbe5356 | ||
|
|
6ae0fc9aef | ||
|
|
7c57a82589 | ||
|
|
087f4a260e | ||
|
|
e06980a664 | ||
|
|
bf49788296 | ||
|
|
d00f94f51c | ||
|
|
026f59285a | ||
|
|
54ed8bb2a8 | ||
|
|
4b37ca7d53 | ||
|
|
1355431a36 | ||
|
|
949f40d07e | ||
|
|
de6089609a | ||
|
|
67f317ec93 | ||
|
|
81b9b98250 | ||
|
|
54b5390d03 | ||
|
|
2162e2f50c | ||
|
|
c50067b6d2 | ||
|
|
19566e0d9a | ||
|
|
89d5c5febc | ||
|
|
cac14ff181 | ||
|
|
eea35d4920 | ||
|
|
a9f78694ee | ||
|
|
36d78242f7 | ||
|
|
1f93992736 | ||
|
|
068ba5b4f4 | ||
|
|
2aa00eba80 | ||
|
|
cac686b9e6 | ||
|
|
ee523fb512 | ||
|
|
a0886b9152 | ||
|
|
09cb0764d7 | ||
|
|
77f28083e3 | ||
|
|
da8963d1e5 | ||
|
|
ba44a2f02e | ||
|
|
945e04ed92 | ||
|
|
1ff815fe5a | ||
|
|
451bbe890b | ||
|
|
fca8b85a72 | ||
|
|
be1e0b3c8d | ||
|
|
0cd242cd0c | ||
|
|
8b9103208d | ||
|
|
c4859f665b | ||
|
|
f62020e1ec | ||
|
|
58252bcf97 | ||
|
|
ed65a5124e | ||
|
|
9a32f1a816 | ||
|
|
d793910306 | ||
|
|
dcc8ef54b9 | ||
|
|
78de48391d | ||
|
|
ffb81d88fd | ||
|
|
7e76f8cb20 | ||
|
|
69f5bc1725 | ||
|
|
8aee87e211 | ||
|
|
4dd04cb250 | ||
|
|
eaaaae0a83 | ||
|
|
47e86e6133 | ||
|
|
19519a6d7c | ||
|
|
4cc9cbb0c5 | ||
|
|
233a1aefce | ||
|
|
14b534eb64 | ||
|
|
f79ff99d0b | ||
|
|
9cf80113fc | ||
|
|
92ed830564 | ||
|
|
caeb33248e | ||
|
|
0be5f67621 | ||
|
|
908a15d6a8 | ||
|
|
725e1b2ee3 | ||
|
|
95b76f08f2 | ||
|
|
e75b3d69f6 | ||
|
|
8d76985276 | ||
|
|
23c16ebfb3 | ||
|
|
7a524d7a35 |
@@ -73,6 +73,7 @@ RUN apt-get update \
|
||||
libnss3 \
|
||||
libxss1 \
|
||||
libasound2 \
|
||||
libgbm1 \
|
||||
xfonts-base \
|
||||
xfonts-terminus \
|
||||
fonts-noto \
|
||||
|
||||
11
.github/CODEOWNERS
vendored
Normal file
11
.github/CODEOWNERS
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
# Lines starting with '#' are comments.
|
||||
# Each line is a file pattern followed by one or more owners.
|
||||
# Syntax can be found here: https://docs.github.com/free-pro-team@latest/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax
|
||||
|
||||
/src/sql/*.d.ts @alanrenmsft @Charles-Gagnon @ranasaria
|
||||
/extensions/resource-deployment/ @ranasaria
|
||||
/extensions/arc/ @ranasaria
|
||||
/extensions/azdata/ @ranasaria
|
||||
/extensions/dacpac/ @kisantia
|
||||
/extensions/schema-compare/ @kisantia
|
||||
/extensions/sql-database-projects/ @Benjin @kisantia
|
||||
7
.github/subscribers.json
vendored
Normal file
7
.github/subscribers.json
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"label-to-subscribe-to": [
|
||||
"list of usernames to subscribe",
|
||||
"such as:",
|
||||
"JacksonKearl"
|
||||
]
|
||||
}
|
||||
15
.github/workflows/ci.yml
vendored
15
.github/workflows/ci.yml
vendored
@@ -31,7 +31,10 @@ jobs:
|
||||
with:
|
||||
node-version: 10
|
||||
# TODO: cache node modules
|
||||
- run: yarn --frozen-lockfile
|
||||
# Increase timeout to get around latency issues when fetching certain packages
|
||||
- run: |
|
||||
yarn config set network-timeout 300000
|
||||
yarn --frozen-lockfile
|
||||
name: Install Dependencies
|
||||
- run: yarn electron x64
|
||||
name: Download Electron
|
||||
@@ -79,7 +82,10 @@ jobs:
|
||||
- uses: actions/setup-python@v1
|
||||
with:
|
||||
python-version: '2.x'
|
||||
- run: yarn --frozen-lockfile
|
||||
# Increase timeout to get around latency issues when fetching certain packages
|
||||
- run: |
|
||||
yarn config set network-timeout 300000
|
||||
yarn --frozen-lockfile
|
||||
name: Install Dependencies
|
||||
- run: yarn electron
|
||||
name: Download Electron
|
||||
@@ -112,7 +118,10 @@ jobs:
|
||||
- uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 10
|
||||
- run: yarn --frozen-lockfile
|
||||
# Increase timeout to get around latency issues when fetching certain packages
|
||||
- run: |
|
||||
yarn config set network-timeout 300000
|
||||
yarn --frozen-lockfile
|
||||
name: Install Dependencies
|
||||
- run: yarn electron x64
|
||||
name: Download Electron
|
||||
|
||||
50
.github/workflows/deep-classifier-runner.yml
vendored
Normal file
50
.github/workflows/deep-classifier-runner.yml
vendored
Normal file
@@ -0,0 +1,50 @@
|
||||
name: "Deep Classifier: Runner"
|
||||
on:
|
||||
schedule:
|
||||
- cron: 0 * * * *
|
||||
repository_dispatch:
|
||||
types: [trigger-deep-classifier-runner]
|
||||
|
||||
jobs:
|
||||
main:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout Actions
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
repository: 'microsoft/vscode-github-triage-actions'
|
||||
ref: v35
|
||||
path: ./actions
|
||||
- name: Install Actions
|
||||
run: npm install --production --prefix ./actions
|
||||
- name: Install Additional Dependencies
|
||||
# Pulls in a bunch of other packages that arent needed for the rest of the actions
|
||||
run: npm install @azure/storage-blob@12.1.1
|
||||
- name: "Run Classifier: Scraper"
|
||||
uses: ./actions/classifier-deep/apply/fetch-sources
|
||||
with:
|
||||
# slightly overlapping to protect against issues slipping through the cracks if a run is delayed
|
||||
from: 80
|
||||
until: 5
|
||||
configPath: classifier
|
||||
blobContainerName: vscode-issue-classifier
|
||||
blobStorageKey: ${{secrets.AZURE_BLOB_STORAGE_CONNECTION_STRING}}
|
||||
token: ${{secrets.VSCODE_ISSUE_TRIAGE_BOT_PAT}}
|
||||
appInsightsKey: ${{secrets.TRIAGE_ACTIONS_APP_INSIGHTS}}
|
||||
- name: Set up Python 3.7
|
||||
uses: actions/setup-python@v1
|
||||
with:
|
||||
python-version: 3.7
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install --upgrade numpy scipy scikit-learn joblib nltk simpletransformers torch torchvision
|
||||
- name: "Run Classifier: Generator"
|
||||
run: python ./actions/classifier-deep/apply/generate-labels/main.py
|
||||
- name: "Run Classifier: Labeler"
|
||||
uses: ./actions/classifier-deep/apply/apply-labels
|
||||
with:
|
||||
configPath: classifier
|
||||
allowLabels: "needs more info|new release"
|
||||
appInsightsKey: ${{secrets.TRIAGE_ACTIONS_APP_INSIGHTS}}
|
||||
token: ${{secrets.VSCODE_ISSUE_TRIAGE_BOT_PAT}}
|
||||
27
.github/workflows/deep-classifier-scraper.yml
vendored
Normal file
27
.github/workflows/deep-classifier-scraper.yml
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
name: "Deep Classifier: Scraper"
|
||||
on:
|
||||
repository_dispatch:
|
||||
types: [trigger-deep-classifier-scraper]
|
||||
|
||||
jobs:
|
||||
main:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout Actions
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
repository: 'microsoft/vscode-github-triage-actions'
|
||||
ref: v35
|
||||
path: ./actions
|
||||
- name: Install Actions
|
||||
run: npm install --production --prefix ./actions
|
||||
- name: Install Additional Dependencies
|
||||
# Pulls in a bunch of other packages that arent needed for the rest of the actions
|
||||
run: npm install @azure/storage-blob@12.1.1
|
||||
- name: "Run Classifier: Scraper"
|
||||
uses: ./actions/classifier-deep/train/fetch-issues
|
||||
with:
|
||||
blobContainerName: vscode-issue-classifier
|
||||
blobStorageKey: ${{secrets.AZURE_BLOB_STORAGE_CONNECTION_STRING}}
|
||||
token: ${{secrets.ISSUE_SCRAPER_TOKEN}}
|
||||
appInsightsKey: ${{secrets.TRIAGE_ACTIONS_APP_INSIGHTS}}
|
||||
27
.github/workflows/latest-release-monitor.yml
vendored
Normal file
27
.github/workflows/latest-release-monitor.yml
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
name: Latest Release Monitor
|
||||
on:
|
||||
schedule:
|
||||
- cron: 0/5 * * * *
|
||||
repository_dispatch:
|
||||
types: [trigger-latest-release-monitor]
|
||||
|
||||
jobs:
|
||||
main:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout Actions
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
repository: 'microsoft/vscode-github-triage-actions'
|
||||
path: ./actions
|
||||
ref: v35
|
||||
- name: Install Actions
|
||||
run: npm install --production --prefix ./actions
|
||||
- name: Install Storage Module
|
||||
run: npm install @azure/storage-blob@12.1.1
|
||||
- name: Run Latest Release Monitor
|
||||
uses: ./actions/latest-release-monitor
|
||||
with:
|
||||
storageKey: ${{secrets.AZURE_BLOB_STORAGE_CONNECTION_STRING}}
|
||||
appInsightsKey: ${{secrets.TRIAGE_ACTIONS_APP_INSIGHTS}}
|
||||
token: ${{secrets.VSCODE_ISSUE_TRIAGE_BOT_PAT}}
|
||||
2
.vscode/notebooks/api.github-issues
vendored
2
.vscode/notebooks/api.github-issues
vendored
@@ -8,7 +8,7 @@
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "$repo=repo:microsoft/vscode\n$milestone=milestone:\"August 2020\"",
|
||||
"value": "$repo=repo:microsoft/vscode\n$milestone=milestone:\"September 2020\"",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
|
||||
2
.vscode/notebooks/my-work.github-issues
vendored
2
.vscode/notebooks/my-work.github-issues
vendored
@@ -8,7 +8,7 @@
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "// list of repos we work in\n$repos=repo:microsoft/vscode repo:microsoft/vscode-remote-release repo:microsoft/vscode-js-debug repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-github-issue-notebooks\n\n// current milestone name\n$milestone=milestone:\"August 2020\"",
|
||||
"value": "// list of repos we work in\n$repos=repo:microsoft/vscode repo:microsoft/vscode-remote-release repo:microsoft/vscode-js-debug repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-github-issue-notebooks\n\n// current milestone name\n$milestone=milestone:\"September 2020\"",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
|
||||
2
.vscode/notebooks/verification.github-issues
vendored
2
.vscode/notebooks/verification.github-issues
vendored
@@ -14,7 +14,7 @@
|
||||
{
|
||||
"kind": 2,
|
||||
"language": "github-issues",
|
||||
"value": "$repos=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-remote-release repo:microsoft/vscode-js-debug repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-github-issue-notebooks \n$milestone=milestone:\"July 2020\"",
|
||||
"value": "$repos=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-remote-release repo:microsoft/vscode-js-debug repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-github-issue-notebooks \n$milestone=milestone:\"September 2020\"",
|
||||
"editable": true
|
||||
},
|
||||
{
|
||||
|
||||
194
.vscode/searches/TrustedTypes.code-search
vendored
Normal file
194
.vscode/searches/TrustedTypes.code-search
vendored
Normal file
@@ -0,0 +1,194 @@
|
||||
# Query: .innerHTML =
|
||||
# Flags: CaseSensitive WordMatch
|
||||
# Including: src/vs/**/*.{t,j}s
|
||||
# Excluding: *.test.ts
|
||||
# ContextLines: 3
|
||||
|
||||
22 results - 14 files
|
||||
|
||||
src/vs/base/browser/markdownRenderer.ts:
|
||||
161 const strValue = values[0];
|
||||
162 const span = element.querySelector(`div[data-code="${id}"]`);
|
||||
163 if (span) {
|
||||
164: span.innerHTML = strValue;
|
||||
165 }
|
||||
166 }).catch(err => {
|
||||
167 // ignore
|
||||
|
||||
243 return true;
|
||||
244 }
|
||||
245
|
||||
246: element.innerHTML = insane(renderedMarkdown, {
|
||||
247 allowedSchemes,
|
||||
248 // allowedTags should included everything that markdown renders to.
|
||||
249 // Since we have our own sanitize function for marked, it's possible we missed some tag so let insane make sure.
|
||||
|
||||
src/vs/base/browser/ui/contextview/contextview.ts:
|
||||
157 this.shadowRootHostElement = DOM.$('.shadow-root-host');
|
||||
158 this.container.appendChild(this.shadowRootHostElement);
|
||||
159 this.shadowRoot = this.shadowRootHostElement.attachShadow({ mode: 'open' });
|
||||
160: this.shadowRoot.innerHTML = `
|
||||
161 <style>
|
||||
162 ${SHADOW_ROOT_CSS}
|
||||
163 </style>
|
||||
|
||||
src/vs/code/electron-sandbox/issue/issueReporterMain.ts:
|
||||
57 const platformClass = platform.isWindows ? 'windows' : platform.isLinux ? 'linux' : 'mac';
|
||||
58 addClass(document.body, platformClass); // used by our fonts
|
||||
59
|
||||
60: document.body.innerHTML = BaseHtml();
|
||||
61 const issueReporter = new IssueReporter(configuration);
|
||||
62 issueReporter.render();
|
||||
63 document.body.style.display = 'block';
|
||||
|
||||
src/vs/code/electron-sandbox/processExplorer/processExplorerMain.ts:
|
||||
320 content.push(`.highest { color: ${styles.highlightForeground}; }`);
|
||||
321 }
|
||||
322
|
||||
323: styleTag.innerHTML = content.join('\n');
|
||||
324 if (document.head) {
|
||||
325 document.head.appendChild(styleTag);
|
||||
326 }
|
||||
|
||||
src/vs/editor/browser/view/domLineBreaksComputer.ts:
|
||||
107 allCharOffsets[i] = tmp[0];
|
||||
108 allVisibleColumns[i] = tmp[1];
|
||||
109 }
|
||||
110: containerDomNode.innerHTML = sb.build();
|
||||
111
|
||||
112 containerDomNode.style.position = 'absolute';
|
||||
113 containerDomNode.style.top = '10000';
|
||||
|
||||
src/vs/editor/browser/view/viewLayer.ts:
|
||||
507 private _finishRenderingNewLines(ctx: IRendererContext<T>, domNodeIsEmpty: boolean, newLinesHTML: string, wasNew: boolean[]): void {
|
||||
508 const lastChild = <HTMLElement>this.domNode.lastChild;
|
||||
509 if (domNodeIsEmpty || !lastChild) {
|
||||
510: this.domNode.innerHTML = newLinesHTML;
|
||||
511 } else {
|
||||
512 lastChild.insertAdjacentHTML('afterend', newLinesHTML);
|
||||
513 }
|
||||
|
||||
525 private _finishRenderingInvalidLines(ctx: IRendererContext<T>, invalidLinesHTML: string, wasInvalid: boolean[]): void {
|
||||
526 const hugeDomNode = document.createElement('div');
|
||||
527
|
||||
528: hugeDomNode.innerHTML = invalidLinesHTML;
|
||||
529
|
||||
530 for (let i = 0; i < ctx.linesLength; i++) {
|
||||
531 const line = ctx.lines[i];
|
||||
|
||||
src/vs/editor/browser/widget/diffEditorWidget.ts:
|
||||
2157
|
||||
2158 let domNode = document.createElement('div');
|
||||
2159 domNode.className = `view-lines line-delete ${MOUSE_CURSOR_TEXT_CSS_CLASS_NAME}`;
|
||||
2160: domNode.innerHTML = sb.build();
|
||||
2161 Configuration.applyFontInfoSlow(domNode, fontInfo);
|
||||
2162
|
||||
2163 let marginDomNode = document.createElement('div');
|
||||
2164 marginDomNode.className = 'inline-deleted-margin-view-zone';
|
||||
2165: marginDomNode.innerHTML = marginHTML.join('');
|
||||
2166 Configuration.applyFontInfoSlow(marginDomNode, fontInfo);
|
||||
2167
|
||||
2168 return {
|
||||
|
||||
src/vs/editor/standalone/browser/colorizer.ts:
|
||||
40 let text = domNode.firstChild ? domNode.firstChild.nodeValue : '';
|
||||
41 domNode.className += ' ' + theme;
|
||||
42 let render = (str: string) => {
|
||||
43: domNode.innerHTML = str;
|
||||
44 };
|
||||
45 return this.colorize(modeService, text || '', mimeType, options).then(render, (err) => console.error(err));
|
||||
46 }
|
||||
|
||||
src/vs/editor/standalone/browser/standaloneThemeServiceImpl.ts:
|
||||
212 if (!this._globalStyleElement) {
|
||||
213 this._globalStyleElement = dom.createStyleSheet();
|
||||
214 this._globalStyleElement.className = 'monaco-colors';
|
||||
215: this._globalStyleElement.innerHTML = this._css;
|
||||
216 this._styleElements.push(this._globalStyleElement);
|
||||
217 }
|
||||
218 return Disposable.None;
|
||||
|
||||
221 private _registerShadowDomContainer(domNode: HTMLElement): IDisposable {
|
||||
222 const styleElement = dom.createStyleSheet(domNode);
|
||||
223 styleElement.className = 'monaco-colors';
|
||||
224: styleElement.innerHTML = this._css;
|
||||
225 this._styleElements.push(styleElement);
|
||||
226 return {
|
||||
227 dispose: () => {
|
||||
|
||||
291 ruleCollector.addRule(generateTokensCSSForColorMap(colorMap));
|
||||
292
|
||||
293 this._css = cssRules.join('\n');
|
||||
294: this._styleElements.forEach(styleElement => styleElement.innerHTML = this._css);
|
||||
295
|
||||
296 TokenizationRegistry.setColorMap(colorMap);
|
||||
297 this._onColorThemeChange.fire(theme);
|
||||
|
||||
src/vs/editor/test/browser/controller/imeTester.ts:
|
||||
55 let content = this._model.getModelLineContent(i);
|
||||
56 r += content + '<br/>';
|
||||
57 }
|
||||
58: output.innerHTML = r;
|
||||
59 }
|
||||
60 }
|
||||
61
|
||||
|
||||
69 let title = document.createElement('div');
|
||||
70 title.className = 'title';
|
||||
71
|
||||
72: title.innerHTML = description + '. Type <strong>' + inputStr + '</strong>';
|
||||
73 container.appendChild(title);
|
||||
74
|
||||
75 let startBtn = document.createElement('button');
|
||||
|
||||
src/vs/workbench/contrib/notebook/browser/view/renderers/cellRenderer.ts:
|
||||
454
|
||||
455 private getMarkdownDragImage(templateData: MarkdownCellRenderTemplate): HTMLElement {
|
||||
456 const dragImageContainer = DOM.$('.cell-drag-image.monaco-list-row.focused.markdown-cell-row');
|
||||
457: dragImageContainer.innerHTML = templateData.container.outerHTML;
|
||||
458
|
||||
459 // Remove all rendered content nodes after the
|
||||
460 const markdownContent = dragImageContainer.querySelector('.cell.markdown')!;
|
||||
|
||||
611 return null;
|
||||
612 }
|
||||
613
|
||||
614: editorContainer.innerHTML = richEditorText;
|
||||
615
|
||||
616 return dragImageContainer;
|
||||
617 }
|
||||
|
||||
src/vs/workbench/contrib/notebook/browser/view/renderers/webviewPreloads.ts:
|
||||
375 addMouseoverListeners(outputNode, outputId);
|
||||
376 const content = data.content;
|
||||
377 if (content.type === RenderOutputType.Html) {
|
||||
378: outputNode.innerHTML = content.htmlContent;
|
||||
379 cellOutputContainer.appendChild(outputNode);
|
||||
380 domEval(outputNode);
|
||||
381 } else {
|
||||
|
||||
src/vs/workbench/contrib/webview/browser/pre/main.js:
|
||||
386 // apply default styles
|
||||
387 const defaultStyles = newDocument.createElement('style');
|
||||
388 defaultStyles.id = '_defaultStyles';
|
||||
389: defaultStyles.innerHTML = defaultCssRules;
|
||||
390 newDocument.head.prepend(defaultStyles);
|
||||
391
|
||||
392 applyStyles(newDocument, newDocument.body);
|
||||
|
||||
src/vs/workbench/contrib/welcome/walkThrough/browser/walkThroughPart.ts:
|
||||
281
|
||||
282 const content = model.main.textEditorModel.getValue(EndOfLinePreference.LF);
|
||||
283 if (!strings.endsWith(input.resource.path, '.md')) {
|
||||
284: this.content.innerHTML = content;
|
||||
285 this.updateSizeClasses();
|
||||
286 this.decorateContent();
|
||||
287 this.contentDisposables.push(this.keybindingService.onDidUpdateKeybindings(() => this.decorateContent()));
|
||||
|
||||
303 const innerContent = document.createElement('div');
|
||||
304 innerContent.classList.add('walkThroughContent'); // only for markdown files
|
||||
305 const markdown = this.expandMacros(content);
|
||||
306: innerContent.innerHTML = marked(markdown, { renderer });
|
||||
307 this.content.appendChild(innerContent);
|
||||
308
|
||||
309 model.snippets.forEach((snippet, i) => {
|
||||
48
.vscode/searches/es6.code-search
vendored
48
.vscode/searches/es6.code-search
vendored
@@ -2,43 +2,31 @@
|
||||
# Flags: CaseSensitive WordMatch
|
||||
# ContextLines: 2
|
||||
|
||||
14 results - 4 files
|
||||
12 results - 4 files
|
||||
|
||||
src/vs/base/browser/dom.ts:
|
||||
81 };
|
||||
82
|
||||
83: /** @deprecated ES6 - use classList*/
|
||||
84 export const hasClass: (node: HTMLElement | SVGElement, className: string) => boolean = _classList.hasClass.bind(_classList);
|
||||
83 };
|
||||
84
|
||||
85: /** @deprecated ES6 - use classList*/
|
||||
86 export const addClass: (node: HTMLElement | SVGElement, className: string) => void = _classList.addClass.bind(_classList);
|
||||
86 export const hasClass: (node: HTMLElement | SVGElement, className: string) => boolean = _classList.hasClass.bind(_classList);
|
||||
87: /** @deprecated ES6 - use classList*/
|
||||
88 export const addClasses: (node: HTMLElement | SVGElement, ...classNames: string[]) => void = _classList.addClasses.bind(_classList);
|
||||
88 export const addClass: (node: HTMLElement | SVGElement, className: string) => void = _classList.addClass.bind(_classList);
|
||||
89: /** @deprecated ES6 - use classList*/
|
||||
90 export const removeClass: (node: HTMLElement | SVGElement, className: string) => void = _classList.removeClass.bind(_classList);
|
||||
90 export const addClasses: (node: HTMLElement | SVGElement, ...classNames: string[]) => void = _classList.addClasses.bind(_classList);
|
||||
91: /** @deprecated ES6 - use classList*/
|
||||
92 export const removeClasses: (node: HTMLElement | SVGElement, ...classNames: string[]) => void = _classList.removeClasses.bind(_classList);
|
||||
92 export const removeClass: (node: HTMLElement | SVGElement, className: string) => void = _classList.removeClass.bind(_classList);
|
||||
93: /** @deprecated ES6 - use classList*/
|
||||
94 export const toggleClass: (node: HTMLElement | SVGElement, className: string, shouldHaveIt?: boolean) => void = _classList.toggleClass.bind(_classList);
|
||||
95
|
||||
94 export const removeClasses: (node: HTMLElement | SVGElement, ...classNames: string[]) => void = _classList.removeClasses.bind(_classList);
|
||||
95: /** @deprecated ES6 - use classList*/
|
||||
96 export const toggleClass: (node: HTMLElement | SVGElement, className: string, shouldHaveIt?: boolean) => void = _classList.toggleClass.bind(_classList);
|
||||
97
|
||||
|
||||
src/vs/base/common/arrays.ts:
|
||||
401
|
||||
402 /**
|
||||
403: * @deprecated ES6: use `Array.findIndex`
|
||||
403: * @deprecated ES6: use `Array.find`
|
||||
404 */
|
||||
405 export function firstIndex<T>(array: ReadonlyArray<T>, fn: (item: T) => boolean): number {
|
||||
|
||||
417
|
||||
418 /**
|
||||
419: * @deprecated ES6: use `Array.find`
|
||||
420 */
|
||||
421 export function first<T>(array: ReadonlyArray<T>, fn: (item: T) => boolean, notFoundValue: T): T;
|
||||
|
||||
568
|
||||
569 /**
|
||||
570: * @deprecated ES6: use `Array.find`
|
||||
571 */
|
||||
572 export function find<T>(arr: ArrayLike<T>, predicate: (value: T, index: number, arr: ArrayLike<T>) => any): T | undefined {
|
||||
405 export function first<T>(array: ReadonlyArray<T>, fn: (item: T) => boolean, notFoundValue: T): T;
|
||||
|
||||
src/vs/base/common/objects.ts:
|
||||
115
|
||||
@@ -66,8 +54,8 @@ src/vs/base/common/strings.ts:
|
||||
170 */
|
||||
171 export function endsWith(haystack: string, needle: string): boolean {
|
||||
|
||||
861
|
||||
862 /**
|
||||
863: * @deprecated ES6
|
||||
864 */
|
||||
865 export function repeat(s: string, count: number): string {
|
||||
857
|
||||
858 /**
|
||||
859: * @deprecated ES6
|
||||
860 */
|
||||
861 export function repeat(s: string, count: number): string {
|
||||
|
||||
58
.vscode/searches/ts36031.code-search
vendored
58
.vscode/searches/ts36031.code-search
vendored
@@ -2,18 +2,52 @@
|
||||
# Flags: RegExp
|
||||
# ContextLines: 2
|
||||
|
||||
2 results - 2 files
|
||||
8 results - 4 files
|
||||
|
||||
src/vs/base/browser/ui/tree/asyncDataTree.ts:
|
||||
243 } : () => 'treeitem',
|
||||
244 isChecked: options.accessibilityProvider!.isChecked ? (e) => {
|
||||
245: return !!(options.accessibilityProvider?.isChecked!(e.element as T));
|
||||
246 } : undefined,
|
||||
247 getAriaLabel(e) {
|
||||
241 } : () => 'treeitem',
|
||||
242 isChecked: options.accessibilityProvider!.isChecked ? (e) => {
|
||||
243: return !!(options.accessibilityProvider?.isChecked!(e.element as T));
|
||||
244 } : undefined,
|
||||
245 getAriaLabel(e) {
|
||||
|
||||
src/vs/workbench/contrib/debug/browser/debugConfigurationManager.ts:
|
||||
254
|
||||
255 return debugDynamicExtensions.map(e => {
|
||||
256: const type = e.contributes?.debuggers![0].type!;
|
||||
257 return {
|
||||
258 label: this.getDebuggerLabel(type)!,
|
||||
src/vs/platform/list/browser/listService.ts:
|
||||
463
|
||||
464 if (typeof options?.openOnSingleClick !== 'boolean' && options?.configurationService) {
|
||||
465: this.openOnSingleClick = options?.configurationService!.getValue(openModeSettingKey) !== 'doubleClick';
|
||||
466 this._register(options?.configurationService.onDidChangeConfiguration(() => {
|
||||
467: this.openOnSingleClick = options?.configurationService!.getValue(openModeSettingKey) !== 'doubleClick';
|
||||
468 }));
|
||||
469 } else {
|
||||
|
||||
src/vs/workbench/contrib/notebook/browser/notebookEditorWidget.ts:
|
||||
1526
|
||||
1527 await this._ensureActiveKernel();
|
||||
1528: await this._activeKernel?.cancelNotebookCell!(this._notebookViewModel!.uri, undefined);
|
||||
1529 }
|
||||
1530
|
||||
|
||||
1535
|
||||
1536 await this._ensureActiveKernel();
|
||||
1537: await this._activeKernel?.executeNotebookCell!(this._notebookViewModel!.uri, undefined);
|
||||
1538 }
|
||||
1539
|
||||
|
||||
1553
|
||||
1554 await this._ensureActiveKernel();
|
||||
1555: await this._activeKernel?.cancelNotebookCell!(this._notebookViewModel!.uri, cell.handle);
|
||||
1556 }
|
||||
1557
|
||||
|
||||
1567
|
||||
1568 await this._ensureActiveKernel();
|
||||
1569: await this._activeKernel?.executeNotebookCell!(this._notebookViewModel!.uri, cell.handle);
|
||||
1570 }
|
||||
1571
|
||||
|
||||
src/vs/workbench/contrib/webview/electron-browser/iframeWebviewElement.ts:
|
||||
89 .then(() => this._resourceRequestManager.ensureReady())
|
||||
90 .then(() => {
|
||||
91: this.element?.contentWindow!.postMessage({ channel, args: data }, '*');
|
||||
92 });
|
||||
93 }
|
||||
|
||||
2
.yarnrc
2
.yarnrc
@@ -1,3 +1,3 @@
|
||||
disturl "https://atom.io/download/electron"
|
||||
target "9.2.1"
|
||||
target "9.3.0"
|
||||
runtime "electron"
|
||||
|
||||
61
CHANGELOG.md
61
CHANGELOG.md
@@ -1,5 +1,66 @@
|
||||
# Change Log
|
||||
|
||||
## Version 1.24.0
|
||||
* Release date: November 12, 2020
|
||||
* Release status: General Availability
|
||||
* SQL Project improvements
|
||||
* Notebook improvements, including in WYSIWYG editor enhancements
|
||||
* Azure Arc improvements
|
||||
* Azure SQL Deployment UX improvements
|
||||
* Azure Browse Connections Preview
|
||||
* Bug Fixes
|
||||
|
||||
## Version 1.23.0
|
||||
* Release date: October 14, 2020
|
||||
* Release status: General Availability
|
||||
* Added deployments of Azure SQL DB and VM
|
||||
* Added PowerShell kernel results streaming support
|
||||
* Added improvements to SQL Database Projects extension
|
||||
* Bug Fixes
|
||||
* Extension Updates:
|
||||
* SQL Server Import
|
||||
* Machine Learning
|
||||
* Schema Compare
|
||||
* Kusto
|
||||
* SQL Assessment
|
||||
* SQL Database Projects
|
||||
* Azure Arc
|
||||
* azdata
|
||||
|
||||
## Version 1.22.1
|
||||
* Release date: September 30, 2020
|
||||
* Release status: General Availability
|
||||
* Fix bug #12615 Active connection filter doesn't untoggle | [#12615](https://github.com/microsoft/azuredatastudio/issues/12615)
|
||||
* Fix bug #12572 Edit Data grid doesn't escape special characters | [#12572](https://github.com/microsoft/azuredatastudio/issues/12572)
|
||||
* Fix bug #12570 Dashboard Explorer table doesn't escape special characters | [#12570](https://github.com/microsoft/azuredatastudio/issues/12570)
|
||||
* Fix bug #12582 Delete row on Edit Data fails | [#12582](https://github.com/microsoft/azuredatastudio/issues/12582)
|
||||
* Fix bug #12646 SQL Notebooks: Cells being treated isolated | [#12646](https://github.com/microsoft/azuredatastudio/issues/12646)
|
||||
|
||||
## Version 1.22.0
|
||||
* Release date: September 22, 2020
|
||||
* Release status: General Availability
|
||||
* New Notebook Features
|
||||
* Supports brand new text cell editing experience based on rich text formatting and seamless conversion to markdown, also known as WYSIWYG toolbar (What You See Is What You Get)
|
||||
* Supports Kusto kernel
|
||||
* Supports pinning of notebooks
|
||||
* Added support for new version of Jupyter Books
|
||||
* Improved Jupyter Shortcuts
|
||||
* Introduced perf loading improvements
|
||||
* Added Azure Arc extension - Users can try out Azure Arc public preview through Azure Data Studio. This includes:
|
||||
* Deploy data controller
|
||||
* Deploy Postgres
|
||||
* Deploy Managed Instance for Azure Arc
|
||||
* Connect to data controller
|
||||
* Access data service dashboards
|
||||
* Azure Arc Jupyter Book
|
||||
* Added new deployment options
|
||||
* Azure SQL Database Edge
|
||||
* (Edge will require Azure SQL Edge Deployment Extension)
|
||||
* Added SQL Database Projects extension - The SQL Database Projects extension brings project-based database development to Azure Data Studio. In this preview release, SQL projects can be created and published from Azure Data Studio.
|
||||
* Added Kusto (KQL) extension - Brings native Kusto experiences in Azure Data Studio for data exploration and data analytics against massive amount of real-time streaming data stored in Azure Data Explorer. This preview release supports connecting and browsing Azure Data Explorer clusters, writing KQL queries as well as authoring notebooks with Kusto kernel.
|
||||
* SQL Server Import extension GA - Announcing the GA of the SQL Server Import extension, features no longer in preview. This extension facilitates importing csv/txt files. Learn more about the extension in [this article](sql-server-import-extension.md).
|
||||
* Resolved [bugs and issues](https://github.com/microsoft/azuredatastudio/issues?q=is%3Aissue+milestone%3A%22September+2020+Release%22+is%3Aclosed).
|
||||
|
||||
## Version 1.21.0
|
||||
* Release date: August 12, 2020
|
||||
* Release status: General Availability
|
||||
|
||||
18
README.md
18
README.md
@@ -19,7 +19,7 @@ Azure Data Studio is a data management tool that enables you to work with SQL Se
|
||||
| [Linux DEB][linux-deb] |
|
||||
|
||||
|
||||
Go to our [download page](https://aka.ms/azuredatastudio) for more specific instructions.
|
||||
Go to our [download page](https://aka.ms/getazuredatastudio) for more specific instructions.
|
||||
|
||||
## Try out the latest insiders build from `main`:
|
||||
- [Windows User Installer - **Insiders build**](https://azuredatastudio-update.azurewebsites.net/latest/win32-x64-user/insider)
|
||||
@@ -29,6 +29,8 @@ Go to our [download page](https://aka.ms/azuredatastudio) for more specific inst
|
||||
- [Linux TAR.GZ - **Insiders build**](https://azuredatastudio-update.azurewebsites.net/latest/linux-x64/insider)
|
||||
|
||||
See the [change log](https://github.com/Microsoft/azuredatastudio/blob/main/CHANGELOG.md) for additional details of what's in this release.
|
||||
Go to our [download page](https://aka.ms/getazuredatastudio) for more specific instructions.
|
||||
|
||||
|
||||
## **Feature Highlights**
|
||||
|
||||
@@ -129,10 +131,10 @@ Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
|
||||
Licensed under the [Source EULA](LICENSE.txt).
|
||||
|
||||
[win-user]: https://go.microsoft.com/fwlink/?linkid=2138608
|
||||
[win-system]: https://go.microsoft.com/fwlink/?linkid=2138704
|
||||
[win-zip]: https://go.microsoft.com/fwlink/?linkid=2138705
|
||||
[osx-zip]: https://go.microsoft.com/fwlink/?linkid=2138609
|
||||
[linux-zip]: https://go.microsoft.com/fwlink/?linkid=2138706
|
||||
[linux-rpm]: https://go.microsoft.com/fwlink/?linkid=2138507
|
||||
[linux-deb]: https://go.microsoft.com/fwlink/?linkid=2138508
|
||||
[win-user]: https://go.microsoft.com/fwlink/?linkid=2148607
|
||||
[win-system]: https://go.microsoft.com/fwlink/?linkid=2148907
|
||||
[win-zip]: https://go.microsoft.com/fwlink/?linkid=2148908
|
||||
[osx-zip]: https://go.microsoft.com/fwlink/?linkid=2148710
|
||||
[linux-zip]: https://go.microsoft.com/fwlink/?linkid=2148708
|
||||
[linux-rpm]: https://go.microsoft.com/fwlink/?linkid=2148709
|
||||
[linux-deb]: https://go.microsoft.com/fwlink/?linkid=2148806
|
||||
|
||||
1
build/.gitattributes
vendored
Normal file
1
build/.gitattributes
vendored
Normal file
@@ -0,0 +1 @@
|
||||
* text eol=lf
|
||||
@@ -15,7 +15,7 @@
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.2.3",
|
||||
"@actions/core": "^1.2.6",
|
||||
"@actions/github": "^2.1.1",
|
||||
"axios": "^0.19.2",
|
||||
"ts-node": "^8.6.2",
|
||||
|
||||
@@ -2,10 +2,10 @@
|
||||
# yarn lockfile v1
|
||||
|
||||
|
||||
"@actions/core@^1.2.3":
|
||||
version "1.2.3"
|
||||
resolved "https://registry.yarnpkg.com/@actions/core/-/core-1.2.3.tgz#e844b4fa0820e206075445079130868f95bfca95"
|
||||
integrity sha512-Wp4xnyokakM45Uuj4WLUxdsa8fJjKVl1fDTsPbTEcTcuu0Nb26IPQbOtjmnfaCPGcaoPOOqId8H9NapZ8gii4w==
|
||||
"@actions/core@^1.2.6":
|
||||
version "1.2.6"
|
||||
resolved "https://registry.yarnpkg.com/@actions/core/-/core-1.2.6.tgz#a78d49f41a4def18e88ce47c2cac615d5694bf09"
|
||||
integrity sha512-ZQYitnqiyBc3D+k7LsgSBmMDVkOVidaagDG7j3fOym77jNunWRuYx7VSHa9GNfFZh+zh61xsCjRj4JxMZlDqTA==
|
||||
|
||||
"@actions/github@^2.1.1":
|
||||
version "2.1.1"
|
||||
|
||||
@@ -53,7 +53,7 @@ async function uploadBlob(blobService: azure.BlobService, quality: string, blobN
|
||||
}
|
||||
};
|
||||
|
||||
await new Promise((c, e) => blobService.createBlockBlobFromLocalFile(quality, blobName, filePath, blobOptions, err => err ? e(err) : c()));
|
||||
await new Promise<void>((c, e) => blobService.createBlockBlobFromLocalFile(quality, blobName, filePath, blobOptions, err => err ? e(err) : c()));
|
||||
}
|
||||
|
||||
function getEnv(name: string): string {
|
||||
|
||||
@@ -17,7 +17,7 @@ const fileNames = [
|
||||
];
|
||||
|
||||
async function assertContainer(blobService: azure.BlobService, container: string): Promise<void> {
|
||||
await new Promise((c, e) => blobService.createContainerIfNotExists(container, { publicAccessLevel: 'blob' }, err => err ? e(err) : c()));
|
||||
await new Promise<void>((c, e) => blobService.createContainerIfNotExists(container, { publicAccessLevel: 'blob' }, err => err ? e(err) : c()));
|
||||
}
|
||||
|
||||
async function doesBlobExist(blobService: azure.BlobService, container: string, blobName: string): Promise<boolean | undefined> {
|
||||
@@ -33,7 +33,7 @@ async function uploadBlob(blobService: azure.BlobService, container: string, blo
|
||||
}
|
||||
};
|
||||
|
||||
await new Promise((c, e) => blobService.createBlockBlobFromLocalFile(container, blobName, file, blobOptions, err => err ? e(err) : c()));
|
||||
await new Promise<void>((c, e) => blobService.createBlockBlobFromLocalFile(container, blobName, file, blobOptions, err => err ? e(err) : c()));
|
||||
}
|
||||
|
||||
async function publish(commit: string, files: readonly string[]): Promise<void> {
|
||||
|
||||
@@ -43,6 +43,7 @@ function createDefaultConfig(quality: string): Config {
|
||||
}
|
||||
|
||||
function getConfig(quality: string): Promise<Config> {
|
||||
console.log(`Getting config for quality ${quality}`);
|
||||
const client = new DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT']!, { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||
const collection = 'dbs/builds/colls/config';
|
||||
const query = {
|
||||
@@ -52,13 +53,13 @@ function getConfig(quality: string): Promise<Config> {
|
||||
]
|
||||
};
|
||||
|
||||
return new Promise<Config>((c, e) => {
|
||||
return retry(() => new Promise<Config>((c, e) => {
|
||||
client.queryDocuments(collection, query, { enableCrossPartitionQuery: true }).toArray((err, results) => {
|
||||
if (err && err.code !== 409) { return e(err); }
|
||||
|
||||
c(!results || results.length === 0 ? createDefaultConfig(quality) : results[0] as any as Config);
|
||||
});
|
||||
});
|
||||
}));
|
||||
}
|
||||
|
||||
interface Asset {
|
||||
@@ -86,6 +87,7 @@ function createOrUpdate(commit: string, quality: string, platform: string, type:
|
||||
updateTries++;
|
||||
|
||||
return new Promise<void>((c, e) => {
|
||||
console.log(`Querying existing documents to update...`);
|
||||
client.queryDocuments(collection, updateQuery, { enableCrossPartitionQuery: true }).toArray((err, results) => {
|
||||
if (err) { return e(err); }
|
||||
if (results.length !== 1) { return e(new Error('No documents')); }
|
||||
@@ -101,6 +103,7 @@ function createOrUpdate(commit: string, quality: string, platform: string, type:
|
||||
release.updates[platform] = type;
|
||||
}
|
||||
|
||||
console.log(`Replacing existing document with updated version`);
|
||||
client.replaceDocument(release._self, release, err => {
|
||||
if (err && err.code === 409 && updateTries < 5) { return c(update()); }
|
||||
if (err) { return e(err); }
|
||||
@@ -112,7 +115,8 @@ function createOrUpdate(commit: string, quality: string, platform: string, type:
|
||||
});
|
||||
}
|
||||
|
||||
return new Promise<void>((c, e) => {
|
||||
return retry(() => new Promise<void>((c, e) => {
|
||||
console.log(`Attempting to create document`);
|
||||
client.createDocument(collection, release, err => {
|
||||
if (err && err.code === 409) { return c(update()); }
|
||||
if (err) { return e(err); }
|
||||
@@ -120,7 +124,7 @@ function createOrUpdate(commit: string, quality: string, platform: string, type:
|
||||
console.log('Build successfully published.');
|
||||
c();
|
||||
});
|
||||
});
|
||||
}));
|
||||
}
|
||||
|
||||
async function assertContainer(blobService: azure.BlobService, quality: string): Promise<void> {
|
||||
@@ -188,7 +192,6 @@ async function publish(commit: string, quality: string, platform: string, type:
|
||||
console.log(`Blob ${quality}, ${blobName} already exists, not publishing again.`);
|
||||
return;
|
||||
}
|
||||
|
||||
console.log('Uploading blobs to Azure storage...');
|
||||
|
||||
await uploadBlob(blobService, quality, blobName, file);
|
||||
@@ -247,6 +250,22 @@ async function publish(commit: string, quality: string, platform: string, type:
|
||||
await createOrUpdate(commit, quality, platform, type, release, asset, isUpdate);
|
||||
}
|
||||
|
||||
const RETRY_TIMES = 10;
|
||||
async function retry<T>(fn: () => Promise<T>): Promise<T> {
|
||||
for (let run = 1; run <= RETRY_TIMES; run++) {
|
||||
try {
|
||||
return await fn();
|
||||
} catch (err) {
|
||||
if (!/ECONNRESET/.test(err.message)) {
|
||||
throw err;
|
||||
}
|
||||
console.log(`Caught error ${err} - ${run}/${RETRY_TIMES}`);
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error('Retried too many times');
|
||||
}
|
||||
|
||||
function main(): void {
|
||||
const commit = process.env['BUILD_SOURCEVERSION'];
|
||||
|
||||
|
||||
@@ -87,10 +87,6 @@ steps:
|
||||
set -e
|
||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||
yarn gulp vscode-darwin-min-ci
|
||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||
yarn gulp vscode-reh-darwin-min-ci
|
||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||
yarn gulp vscode-reh-web-darwin-min-ci
|
||||
displayName: Build
|
||||
|
||||
- script: |
|
||||
|
||||
@@ -96,8 +96,6 @@ steps:
|
||||
set -e
|
||||
yarn gulp package-rebuild-extensions
|
||||
yarn gulp vscode-darwin-min-ci
|
||||
yarn gulp vscode-reh-darwin-min-ci
|
||||
yarn gulp vscode-reh-web-darwin-min-ci
|
||||
displayName: Build
|
||||
env:
|
||||
VSCODE_MIXIN_PASSWORD: $(github-distro-mixin-password)
|
||||
@@ -125,19 +123,19 @@ steps:
|
||||
set -e
|
||||
APP_ROOT=$(agent.builddirectory)/azuredatastudio-darwin
|
||||
APP_NAME="`ls $APP_ROOT | head -n 1`"
|
||||
yarn smoketest --build "$APP_ROOT/$APP_NAME" --screenshots "$(build.artifactstagingdirectory)/smokeshots"
|
||||
yarn smoketest --build "$APP_ROOT/$APP_NAME" --screenshots "$(build.artifactstagingdirectory)/smokeshots" --log "$(build.artifactstagingdirectory)/logs/darwin/smoke.log"
|
||||
displayName: Run smoke tests (Electron)
|
||||
continueOnError: true
|
||||
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
node ./node_modules/playwright/install.js
|
||||
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/azuredatastudio-reh-web-darwin" \
|
||||
yarn smoketest --web --headless --screenshots "$(build.artifactstagingdirectory)/smokeshots"
|
||||
displayName: Run smoke tests (Browser)
|
||||
continueOnError: true
|
||||
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||
# - script: |
|
||||
# set -e
|
||||
# node ./node_modules/playwright/install.js
|
||||
# VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/azuredatastudio-reh-web-darwin" \
|
||||
# yarn smoketest --web --headless --screenshots "$(build.artifactstagingdirectory)/smokeshots"
|
||||
# displayName: Run smoke tests (Browser)
|
||||
# continueOnError: true
|
||||
# condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
|
||||
@@ -31,10 +31,10 @@ steps:
|
||||
git config user.email "vscode@microsoft.com"
|
||||
git config user.name "VSCode"
|
||||
|
||||
git checkout origin/electron-x.y.z
|
||||
git checkout origin/electron-11.x.y
|
||||
git merge origin/master
|
||||
|
||||
# Push master branch into exploration branch
|
||||
git push origin HEAD:electron-x.y.z
|
||||
git push origin HEAD:electron-11.x.y
|
||||
|
||||
displayName: Sync & Merge Exploration
|
||||
|
||||
@@ -52,21 +52,25 @@ steps:
|
||||
git merge $(node -p "require('./package.json').distro")
|
||||
displayName: Merge distro
|
||||
|
||||
- script: |
|
||||
echo -n $VSCODE_ARCH > .build/arch
|
||||
displayName: Prepare arch cache flag
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
||||
keyfile: '.build/arch, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
||||
vstsFeed: 'npm-vscode'
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
|
||||
CHILD_CONCURRENCY=1 npm_config_arch=$(NPM_ARCH) yarn --frozen-lockfile
|
||||
displayName: Install dependencies
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
||||
keyfile: '.build/arch, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
||||
vstsFeed: 'npm-vscode'
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
@@ -85,64 +89,64 @@ steps:
|
||||
- script: |
|
||||
set -e
|
||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||
yarn gulp vscode-linux-x64-min-ci
|
||||
yarn gulp vscode-linux-$(VSCODE_ARCH)-min-ci
|
||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||
yarn gulp vscode-reh-linux-x64-min-ci
|
||||
yarn gulp vscode-reh-linux-$(VSCODE_ARCH)-min-ci
|
||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||
yarn gulp vscode-reh-web-linux-x64-min-ci
|
||||
yarn gulp vscode-reh-web-linux-$(VSCODE_ARCH)-min-ci
|
||||
displayName: Build
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
service xvfb start
|
||||
displayName: Start xvfb
|
||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
DISPLAY=:10 ./scripts/test.sh --build --tfs "Unit Tests"
|
||||
displayName: Run unit tests (Electron)
|
||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
DISPLAY=:10 yarn test-browser --build --browser chromium --tfs "Browser Unit Tests"
|
||||
displayName: Run unit tests (Browser)
|
||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
# Figure out the full absolute path of the product we just built
|
||||
# including the remote server and configure the integration tests
|
||||
# to run with these builds instead of running out of sources.
|
||||
set -e
|
||||
APP_ROOT=$(agent.builddirectory)/VSCode-linux-x64
|
||||
APP_ROOT=$(agent.builddirectory)/VSCode-linux-$(VSCODE_ARCH)
|
||||
APP_NAME=$(node -p "require(\"$APP_ROOT/resources/app/product.json\").applicationName")
|
||||
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \
|
||||
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-linux-x64" \
|
||||
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-linux-$(VSCODE_ARCH)" \
|
||||
DISPLAY=:10 ./scripts/test-integration.sh --build --tfs "Integration Tests"
|
||||
displayName: Run integration tests (Electron)
|
||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-web-linux-x64" \
|
||||
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-web-linux-$(VSCODE_ARCH)" \
|
||||
DISPLAY=:10 ./resources/server/test/test-web-integration.sh --browser chromium
|
||||
displayName: Run integration tests (Browser)
|
||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
APP_ROOT=$(agent.builddirectory)/VSCode-linux-x64
|
||||
APP_ROOT=$(agent.builddirectory)/VSCode-linux-$(VSCODE_ARCH)
|
||||
APP_NAME=$(node -p "require(\"$APP_ROOT/resources/app/product.json\").applicationName")
|
||||
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \
|
||||
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-linux-x64" \
|
||||
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-linux-$(VSCODE_ARCH)" \
|
||||
DISPLAY=:10 ./resources/server/test/test-remote-integration.sh
|
||||
displayName: Run remote integration tests (Electron)
|
||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- task: PublishPipelineArtifact@0
|
||||
inputs:
|
||||
artifactName: crash-dump-linux
|
||||
artifactName: 'crash-dump-linux-$(VSCODE_ARCH)'
|
||||
targetPath: .build/crashes
|
||||
displayName: 'Publish Crash Reports'
|
||||
continueOnError: true
|
||||
@@ -157,15 +161,26 @@ steps:
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn gulp "vscode-linux-x64-build-deb"
|
||||
yarn gulp "vscode-linux-x64-build-rpm"
|
||||
yarn gulp "vscode-linux-x64-prepare-snap"
|
||||
displayName: Build packages
|
||||
yarn gulp "vscode-linux-$(VSCODE_ARCH)-build-deb"
|
||||
yarn gulp "vscode-linux-$(VSCODE_ARCH)-build-rpm"
|
||||
displayName: Build deb, rpm packages
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn gulp "vscode-linux-$(VSCODE_ARCH)-prepare-snap"
|
||||
displayName: Prepare snap package
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'))
|
||||
|
||||
# needed for code signing
|
||||
- task: UseDotNet@2
|
||||
displayName: 'Install .NET Core SDK 2.x'
|
||||
inputs:
|
||||
version: 2.x
|
||||
|
||||
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
|
||||
inputs:
|
||||
ConnectedServiceName: 'ESRP CodeSign'
|
||||
FolderPath: '.build/linux/rpm/x86_64'
|
||||
FolderPath: '.build/linux/rpm'
|
||||
Pattern: '*.rpm'
|
||||
signConfigType: inlineSignParams
|
||||
inlineOperation: |
|
||||
@@ -186,14 +201,16 @@ steps:
|
||||
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
|
||||
AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
|
||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||
VSCODE_ARCH="$(VSCODE_ARCH)" \
|
||||
./build/azure-pipelines/linux/publish.sh
|
||||
displayName: Publish
|
||||
|
||||
- task: PublishPipelineArtifact@0
|
||||
displayName: 'Publish Pipeline Artifact'
|
||||
inputs:
|
||||
artifactName: snap-x64
|
||||
artifactName: 'snap-$(VSCODE_ARCH)'
|
||||
targetPath: .build/linux/snap-tarball
|
||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'))
|
||||
|
||||
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
|
||||
displayName: 'Component Detection'
|
||||
|
||||
@@ -4,11 +4,10 @@ REPO="$(pwd)"
|
||||
ROOT="$REPO/.."
|
||||
|
||||
# Publish tarball
|
||||
PLATFORM_LINUX="linux-x64"
|
||||
PLATFORM_LINUX="linux-$VSCODE_ARCH"
|
||||
BUILDNAME="VSCode-$PLATFORM_LINUX"
|
||||
BUILD="$ROOT/$BUILDNAME"
|
||||
BUILD_VERSION="$(date +%s)"
|
||||
[ -z "$VSCODE_QUALITY" ] && TARBALL_FILENAME="code-$BUILD_VERSION.tar.gz" || TARBALL_FILENAME="code-$VSCODE_QUALITY-$BUILD_VERSION.tar.gz"
|
||||
[ -z "$VSCODE_QUALITY" ] && TARBALL_FILENAME="code-$VSCODE_ARCH-$BUILD_VERSION.tar.gz" || TARBALL_FILENAME="code-$VSCODE_QUALITY-$VSCODE_ARCH-$BUILD_VERSION.tar.gz"
|
||||
TARBALL_PATH="$ROOT/$TARBALL_FILENAME"
|
||||
|
||||
rm -rf $ROOT/code-*.tar.*
|
||||
@@ -28,24 +27,36 @@ rm -rf $ROOT/vscode-server-*.tar.*
|
||||
node build/azure-pipelines/common/createAsset.js "server-$PLATFORM_LINUX" archive-unsigned "$SERVER_TARBALL_FILENAME" "$SERVER_TARBALL_PATH"
|
||||
|
||||
# Publish DEB
|
||||
PLATFORM_DEB="linux-deb-x64"
|
||||
DEB_ARCH="amd64"
|
||||
case $VSCODE_ARCH in
|
||||
x64) DEB_ARCH="amd64" ;;
|
||||
*) DEB_ARCH="$VSCODE_ARCH" ;;
|
||||
esac
|
||||
|
||||
PLATFORM_DEB="linux-deb-$VSCODE_ARCH"
|
||||
DEB_FILENAME="$(ls $REPO/.build/linux/deb/$DEB_ARCH/deb/)"
|
||||
DEB_PATH="$REPO/.build/linux/deb/$DEB_ARCH/deb/$DEB_FILENAME"
|
||||
|
||||
node build/azure-pipelines/common/createAsset.js "$PLATFORM_DEB" package "$DEB_FILENAME" "$DEB_PATH"
|
||||
|
||||
# Publish RPM
|
||||
PLATFORM_RPM="linux-rpm-x64"
|
||||
RPM_ARCH="x86_64"
|
||||
case $VSCODE_ARCH in
|
||||
x64) RPM_ARCH="x86_64" ;;
|
||||
armhf) RPM_ARCH="armv7hl" ;;
|
||||
arm64) RPM_ARCH="aarch64" ;;
|
||||
*) RPM_ARCH="$VSCODE_ARCH" ;;
|
||||
esac
|
||||
|
||||
PLATFORM_RPM="linux-rpm-$VSCODE_ARCH"
|
||||
RPM_FILENAME="$(ls $REPO/.build/linux/rpm/$RPM_ARCH/ | grep .rpm)"
|
||||
RPM_PATH="$REPO/.build/linux/rpm/$RPM_ARCH/$RPM_FILENAME"
|
||||
|
||||
node build/azure-pipelines/common/createAsset.js "$PLATFORM_RPM" package "$RPM_FILENAME" "$RPM_PATH"
|
||||
|
||||
# Publish Snap
|
||||
# Pack snap tarball artifact, in order to preserve file perms
|
||||
mkdir -p $REPO/.build/linux/snap-tarball
|
||||
SNAP_TARBALL_PATH="$REPO/.build/linux/snap-tarball/snap-x64.tar.gz"
|
||||
rm -rf $SNAP_TARBALL_PATH
|
||||
(cd .build/linux && tar -czf $SNAP_TARBALL_PATH snap)
|
||||
if [ "$VSCODE_ARCH" == "x64" ]; then
|
||||
# Publish Snap
|
||||
# Pack snap tarball artifact, in order to preserve file perms
|
||||
mkdir -p $REPO/.build/linux/snap-tarball
|
||||
SNAP_TARBALL_PATH="$REPO/.build/linux/snap-tarball/snap-$VSCODE_ARCH.tar.gz"
|
||||
rm -rf $SNAP_TARBALL_PATH
|
||||
(cd .build/linux && tar -czf $SNAP_TARBALL_PATH snap)
|
||||
fi
|
||||
|
||||
@@ -91,8 +91,7 @@ steps:
|
||||
- script: |
|
||||
set -e
|
||||
yarn gulp vscode-linux-x64-min-ci
|
||||
yarn gulp vscode-reh-linux-x64-min-ci
|
||||
yarn gulp vscode-reh-web-linux-x64-min-ci
|
||||
yarn gulp vscode-web-min-ci
|
||||
displayName: Build
|
||||
env:
|
||||
VSCODE_MIXIN_PASSWORD: $(github-distro-mixin-password)
|
||||
@@ -134,7 +133,8 @@ steps:
|
||||
set -e
|
||||
APP_ROOT=$(agent.builddirectory)/azuredatastudio-linux-x64
|
||||
APP_NAME=$(node -p "require(\"$APP_ROOT/resources/app/product.json\").applicationName")
|
||||
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \
|
||||
export INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \
|
||||
export NO_CLEANUP=1
|
||||
DISPLAY=:10 node ./scripts/test-extensions-unit.js ${{ extension }}
|
||||
displayName: 'Run ${{ extension }} Stable Extension Unit Tests'
|
||||
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||
@@ -149,6 +149,15 @@ steps:
|
||||
continueOnError: true
|
||||
condition: and(succeeded(), eq(variables['RUN_UNSTABLE_TESTS'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
mkdir -p $(Build.ArtifactStagingDirectory)/logs/linux-x64
|
||||
cd /tmp
|
||||
tar -czvf $(Build.ArtifactStagingDirectory)/logs/linux-x64/logs-linux-x64.tar.gz adsuser*
|
||||
displayName: Archive Logs
|
||||
continueOnError: true
|
||||
condition: succeededOrFailed()
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn gulp vscode-linux-x64-build-deb
|
||||
@@ -221,6 +230,7 @@ steps:
|
||||
|
||||
- task: PublishBuildArtifacts@1
|
||||
displayName: 'Publish Artifact: drop'
|
||||
condition: succeededOrFailed()
|
||||
|
||||
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
|
||||
displayName: 'Component Detection'
|
||||
|
||||
@@ -13,6 +13,12 @@ resources:
|
||||
- container: vscode-x64
|
||||
image: vscodehub.azurecr.io/vscode-linux-build-agent:x64
|
||||
endpoint: VSCodeHub
|
||||
- container: vscode-arm64
|
||||
image: vscodehub.azurecr.io/vscode-linux-build-agent:stretch-arm64
|
||||
endpoint: VSCodeHub
|
||||
- container: vscode-armhf
|
||||
image: vscodehub.azurecr.io/vscode-linux-build-agent:stretch-armhf
|
||||
endpoint: VSCodeHub
|
||||
- container: snapcraft
|
||||
image: snapcore/snapcraft:stable
|
||||
|
||||
@@ -64,6 +70,9 @@ stages:
|
||||
- job: Linux
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_LINUX'], 'true'))
|
||||
container: vscode-x64
|
||||
variables:
|
||||
VSCODE_ARCH: x64
|
||||
NPM_ARCH: x64
|
||||
steps:
|
||||
- template: linux/product-build-linux.yml
|
||||
|
||||
@@ -72,22 +81,28 @@ stages:
|
||||
- Linux
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_LINUX'], 'true'))
|
||||
container: snapcraft
|
||||
variables:
|
||||
VSCODE_ARCH: x64
|
||||
steps:
|
||||
- template: linux/snap-build-linux.yml
|
||||
|
||||
- job: LinuxArmhf
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_LINUX_ARMHF'], 'true'))
|
||||
container: vscode-armhf
|
||||
variables:
|
||||
VSCODE_ARCH: armhf
|
||||
NPM_ARCH: armv7l
|
||||
steps:
|
||||
- template: linux/product-build-linux-multiarch.yml
|
||||
- template: linux/product-build-linux.yml
|
||||
|
||||
- job: LinuxArm64
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_LINUX_ARM64'], 'true'))
|
||||
container: vscode-arm64
|
||||
variables:
|
||||
VSCODE_ARCH: arm64
|
||||
NPM_ARCH: arm64
|
||||
steps:
|
||||
- template: linux/product-build-linux-multiarch.yml
|
||||
- template: linux/product-build-linux.yml
|
||||
|
||||
- job: LinuxAlpine
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_LINUX_ALPINE'], 'true'))
|
||||
|
||||
@@ -52,9 +52,13 @@ steps:
|
||||
displayName: Merge distro
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||
|
||||
- script: |
|
||||
echo -n $VSCODE_ARCH > .build/arch
|
||||
displayName: Prepare arch cache flag
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
||||
keyfile: '.build/arch, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
||||
vstsFeed: 'npm-vscode'
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||
@@ -67,7 +71,7 @@ steps:
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
||||
keyfile: '.build/arch, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
||||
vstsFeed: 'npm-vscode'
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'), ne(variables['CacheRestored'], 'true'))
|
||||
@@ -112,8 +116,8 @@ steps:
|
||||
yarn gulp compile-build
|
||||
yarn gulp compile-extensions-build
|
||||
yarn gulp minify-vscode
|
||||
yarn gulp minify-vscode-reh
|
||||
yarn gulp minify-vscode-reh-web
|
||||
yarn gulp vscode-reh-linux-x64-min
|
||||
yarn gulp vscode-reh-web-linux-x64-min
|
||||
displayName: Compile
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||
|
||||
|
||||
@@ -17,7 +17,7 @@ jobs:
|
||||
- template: sql-product-compile.yml
|
||||
|
||||
- job: macOS
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_MACOS'], 'true'))
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_MACOS'], 'true'), ne(variables['VSCODE_QUALITY'], 'saw'))
|
||||
pool:
|
||||
vmImage: macOS-latest
|
||||
dependsOn:
|
||||
@@ -27,7 +27,7 @@ jobs:
|
||||
timeoutInMinutes: 180
|
||||
|
||||
- job: macOS_Signing
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_MACOS'], 'true'), eq(variables['signed'], true))
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_MACOS'], 'true'), eq(variables['signed'], true), ne(variables['VSCODE_QUALITY'], 'saw'))
|
||||
pool:
|
||||
vmImage: macOS-latest
|
||||
dependsOn:
|
||||
@@ -50,7 +50,7 @@ jobs:
|
||||
timeoutInMinutes: 70
|
||||
|
||||
- job: LinuxWeb
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_WEB'], 'true'))
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_WEB'], 'true'), ne(variables['VSCODE_QUALITY'], 'saw'))
|
||||
pool:
|
||||
vmImage: 'Ubuntu-16.04'
|
||||
container: linux-x64
|
||||
@@ -61,15 +61,15 @@ jobs:
|
||||
steps:
|
||||
- template: web/sql-product-build-web.yml
|
||||
|
||||
- job: Docker
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_DOCKER'], 'true'))
|
||||
pool:
|
||||
vmImage: 'Ubuntu-16.04'
|
||||
container: linux-x64
|
||||
dependsOn:
|
||||
- Linux
|
||||
steps:
|
||||
- template: docker/sql-product-build-docker.yml
|
||||
# - job: Docker
|
||||
# condition: and(succeeded(), eq(variables['VSCODE_BUILD_DOCKER'], 'true'))
|
||||
# pool:
|
||||
# vmImage: 'Ubuntu-16.04'
|
||||
# container: linux-x64
|
||||
# dependsOn:
|
||||
# - Linux
|
||||
# steps:
|
||||
# - template: docker/sql-product-build-docker.yml
|
||||
|
||||
- job: Windows
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_WIN32'], 'true'))
|
||||
@@ -98,7 +98,7 @@ jobs:
|
||||
dependsOn:
|
||||
- macOS
|
||||
- Linux
|
||||
- Docker
|
||||
# - Docker
|
||||
- Windows
|
||||
- Windows_Test
|
||||
- LinuxWeb
|
||||
|
||||
@@ -96,8 +96,8 @@ steps:
|
||||
yarn gulp compile-build
|
||||
yarn gulp compile-extensions-build
|
||||
yarn gulp minify-vscode
|
||||
yarn gulp minify-vscode-reh
|
||||
yarn gulp minify-vscode-reh-web
|
||||
yarn gulp vscode-reh-linux-x64-min
|
||||
yarn gulp vscode-reh-web-linux-x64-min
|
||||
displayName: Compile
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||
|
||||
|
||||
@@ -12,9 +12,9 @@ $ServerZipLocation = "$Repo\.build\win32-$Arch\server"
|
||||
$ServerZip = "$ServerZipLocation\azuredatastudio-server-win32-$Arch.zip"
|
||||
|
||||
# Create server archive
|
||||
New-Item $ServerZipLocation -ItemType Directory # this will throw even when success for we don't want to exec this
|
||||
# New-Item $ServerZipLocation -ItemType Directory # this will throw even when success for we don't want to exec this
|
||||
$global:LASTEXITCODE = 0
|
||||
exec { Rename-Item -Path $LegacyServer -NewName $ServerName } "Rename Item"
|
||||
exec { .\node_modules\7zip\7zip-lite\7z.exe a -tzip $ServerZip $Server -r } "Zip Server"
|
||||
# exec { Rename-Item -Path $LegacyServer -NewName $ServerName } "Rename Item"
|
||||
# exec { .\node_modules\7zip\7zip-lite\7z.exe a -tzip $ServerZip $Server -r } "Zip Server"
|
||||
|
||||
exec { node build/azure-pipelines/common/copyArtifacts.js } "Copy Artifacts"
|
||||
|
||||
@@ -95,8 +95,8 @@ steps:
|
||||
$ErrorActionPreference = "Stop"
|
||||
exec { yarn gulp "package-rebuild-extensions" }
|
||||
exec { yarn gulp "vscode-win32-x64-min-ci" }
|
||||
exec { yarn gulp "vscode-reh-win32-x64-min-ci" }
|
||||
exec { yarn gulp "vscode-reh-web-win32-x64-min-ci" }
|
||||
exec { yarn gulp "vscode-reh-win32-x64-min" }
|
||||
exec { yarn gulp "vscode-reh-web-win32-x64-min" }
|
||||
exec { yarn gulp "vscode-win32-x64-code-helper" }
|
||||
exec { yarn gulp "vscode-win32-x64-inno-updater" }
|
||||
displayName: Build
|
||||
@@ -131,7 +131,7 @@ steps:
|
||||
$AppRoot = "$(agent.builddirectory)\azuredatastudio-win32-x64"
|
||||
$AppProductJson = Get-Content -Raw -Path "$AppRoot\resources\app\product.json" | ConvertFrom-Json
|
||||
$AppNameShort = $AppProductJson.nameShort
|
||||
exec { $env:INTEGRATION_TEST_ELECTRON_PATH = "$AppRoot\$AppNameShort.exe"; $env:VSCODE_REMOTE_SERVER_PATH = "$(agent.builddirectory)\azuredatastudio-reh-win32-x64"; .\scripts\test-integration.bat --build --tfs "Integration Tests" }
|
||||
# exec { $env:INTEGRATION_TEST_ELECTRON_PATH = "$AppRoot\$AppNameShort.exe"; $env:VSCODE_REMOTE_SERVER_PATH = "$(agent.builddirectory)\azuredatastudio-reh-win32-x64"; .\scripts\test-integration.bat --build --tfs "Integration Tests" }
|
||||
displayName: Run integration tests (Electron)
|
||||
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||
|
||||
|
||||
@@ -104,6 +104,7 @@ const indentationFilter = [
|
||||
'!extensions/admin-tool-ext-win/ssmsmin/**',
|
||||
'!extensions/resource-deployment/notebooks/**',
|
||||
'!extensions/mssql/notebooks/**',
|
||||
'!extensions/azurehybridtoolkit/notebooks/**',
|
||||
'!extensions/integration-tests/testData/**',
|
||||
'!extensions/arc/src/controller/generated/**',
|
||||
'!extensions/sql-database-projects/resources/templates/*.xml',
|
||||
@@ -178,7 +179,9 @@ const copyrightFilter = [
|
||||
'!extensions/mssql/src/prompts/**',
|
||||
'!extensions/kusto/src/prompts/**',
|
||||
'!extensions/notebook/resources/jupyter_config/**',
|
||||
'!extensions/azurehybridtoolkit/notebooks/**',
|
||||
'!extensions/query-history/images/**',
|
||||
'!extensions/sql/build/update-grammar.js',
|
||||
'!**/*.gif',
|
||||
'!**/*.xlf',
|
||||
'!**/*.dacpac',
|
||||
|
||||
@@ -261,7 +261,7 @@ function packageTask(platform, arch, sourceFolderName, destinationFolderName, op
|
||||
.pipe(fileLengthFilter.restore)
|
||||
.pipe(util.skipDirectories())
|
||||
.pipe(util.fixWin32DirectoryPermissions())
|
||||
.pipe(electron(_.extend({}, config, { platform, arch, ffmpegChromium: true })))
|
||||
.pipe(electron(_.extend({}, config, { platform, arch: arch === 'armhf' ? 'arm' : arch, ffmpegChromium: true })))
|
||||
.pipe(filter(['**', '!LICENSE', '!LICENSES.chromium.html', '!version'], { dot: true }));
|
||||
|
||||
if (platform === 'linux') {
|
||||
@@ -345,7 +345,7 @@ const BUILD_TARGETS = [
|
||||
{ platform: 'darwin', arch: null, opts: { stats: true } },
|
||||
{ platform: 'linux', arch: 'ia32' },
|
||||
{ platform: 'linux', arch: 'x64' },
|
||||
{ platform: 'linux', arch: 'arm' },
|
||||
{ platform: 'linux', arch: 'armhf' },
|
||||
{ platform: 'linux', arch: 'arm64' },
|
||||
];
|
||||
BUILD_TARGETS.forEach(buildTarget => {
|
||||
|
||||
@@ -23,7 +23,7 @@ const commit = util.getVersion(root);
|
||||
const linuxPackageRevision = Math.floor(new Date().getTime() / 1000);
|
||||
|
||||
function getDebPackageArch(arch) {
|
||||
return { x64: 'amd64', arm: 'armhf', arm64: 'arm64' }[arch];
|
||||
return { x64: 'amd64', armhf: 'armhf', arm64: 'arm64' }[arch];
|
||||
}
|
||||
|
||||
function prepareDebPackage(arch) {
|
||||
@@ -53,6 +53,11 @@ function prepareDebPackage(arch) {
|
||||
.pipe(replace('@@LICENSE@@', product.licenseName))
|
||||
.pipe(rename('usr/share/appdata/' + product.applicationName + '.appdata.xml'));
|
||||
|
||||
const workspaceMime = gulp.src('resources/linux/code-workspace.xml', { base: '.' })
|
||||
.pipe(replace('@@NAME_LONG@@', product.nameLong))
|
||||
.pipe(replace('@@NAME@@', product.applicationName))
|
||||
.pipe(rename('usr/share/mime/packages/' + product.applicationName + '-workspace.xml'));
|
||||
|
||||
const icon = gulp.src('resources/linux/code.png', { base: '.' })
|
||||
.pipe(rename('usr/share/pixmaps/' + product.linuxIconName + '.png'));
|
||||
|
||||
@@ -96,7 +101,7 @@ function prepareDebPackage(arch) {
|
||||
.pipe(replace('@@UPDATEURL@@', product.updateUrl || '@@UPDATEURL@@'))
|
||||
.pipe(rename('DEBIAN/postinst'));
|
||||
|
||||
const all = es.merge(control, postinst, postrm, prerm, desktops, appdata, icon, bash_completion, zsh_completion, code);
|
||||
const all = es.merge(control, postinst, postrm, prerm, desktops, appdata, workspaceMime, icon, bash_completion, zsh_completion, code);
|
||||
|
||||
return all.pipe(vfs.dest(destination));
|
||||
};
|
||||
@@ -116,7 +121,7 @@ function getRpmBuildPath(rpmArch) {
|
||||
}
|
||||
|
||||
function getRpmPackageArch(arch) {
|
||||
return { x64: 'x86_64', arm: 'armhf', arm64: 'arm64' }[arch];
|
||||
return { x64: 'x86_64', armhf: 'armv7hl', arm64: 'aarch64' }[arch];
|
||||
}
|
||||
|
||||
function prepareRpmPackage(arch) {
|
||||
@@ -145,6 +150,11 @@ function prepareRpmPackage(arch) {
|
||||
.pipe(replace('@@LICENSE@@', product.licenseName))
|
||||
.pipe(rename('usr/share/appdata/' + product.applicationName + '.appdata.xml'));
|
||||
|
||||
const workspaceMime = gulp.src('resources/linux/code-workspace.xml', { base: '.' })
|
||||
.pipe(replace('@@NAME_LONG@@', product.nameLong))
|
||||
.pipe(replace('@@NAME@@', product.applicationName))
|
||||
.pipe(rename('BUILD/usr/share/mime/packages/' + product.applicationName + '-workspace.xml'));
|
||||
|
||||
const icon = gulp.src('resources/linux/code.png', { base: '.' })
|
||||
.pipe(rename('BUILD/usr/share/pixmaps/' + product.linuxIconName + '.png'));
|
||||
|
||||
@@ -175,7 +185,7 @@ function prepareRpmPackage(arch) {
|
||||
const specIcon = gulp.src('resources/linux/rpm/code.xpm', { base: '.' })
|
||||
.pipe(rename('SOURCES/' + product.applicationName + '.xpm'));
|
||||
|
||||
const all = es.merge(code, desktops, appdata, icon, bash_completion, zsh_completion, spec, specIcon);
|
||||
const all = es.merge(code, desktops, appdata, workspaceMime, icon, bash_completion, zsh_completion, spec, specIcon);
|
||||
|
||||
return all.pipe(vfs.dest(getRpmBuildPath(rpmArch)));
|
||||
};
|
||||
@@ -249,33 +259,23 @@ function buildSnapPackage(arch) {
|
||||
|
||||
const BUILD_TARGETS = [
|
||||
{ arch: 'x64' },
|
||||
{ arch: 'arm' },
|
||||
{ arch: 'armhf' },
|
||||
{ arch: 'arm64' },
|
||||
];
|
||||
|
||||
BUILD_TARGETS.forEach((buildTarget) => {
|
||||
const arch = buildTarget.arch;
|
||||
BUILD_TARGETS.forEach(({ arch }) => {
|
||||
const debArch = getDebPackageArch(arch);
|
||||
const prepareDebTask = task.define(`vscode-linux-${arch}-prepare-deb`, task.series(util.rimraf(`.build/linux/deb/${debArch}`), prepareDebPackage(arch)));
|
||||
const buildDebTask = task.define(`vscode-linux-${arch}-build-deb`, task.series(prepareDebTask, buildDebPackage(arch)));
|
||||
gulp.task(buildDebTask);
|
||||
|
||||
{
|
||||
const debArch = getDebPackageArch(arch);
|
||||
const prepareDebTask = task.define(`vscode-linux-${arch}-prepare-deb`, task.series(util.rimraf(`.build/linux/deb/${debArch}`), prepareDebPackage(arch)));
|
||||
// gulp.task(prepareDebTask);
|
||||
const buildDebTask = task.define(`vscode-linux-${arch}-build-deb`, task.series(prepareDebTask, buildDebPackage(arch)));
|
||||
gulp.task(buildDebTask);
|
||||
}
|
||||
const rpmArch = getRpmPackageArch(arch);
|
||||
const prepareRpmTask = task.define(`vscode-linux-${arch}-prepare-rpm`, task.series(util.rimraf(`.build/linux/rpm/${rpmArch}`), prepareRpmPackage(arch)));
|
||||
const buildRpmTask = task.define(`vscode-linux-${arch}-build-rpm`, task.series(prepareRpmTask, buildRpmPackage(arch)));
|
||||
gulp.task(buildRpmTask);
|
||||
|
||||
{
|
||||
const rpmArch = getRpmPackageArch(arch);
|
||||
const prepareRpmTask = task.define(`vscode-linux-${arch}-prepare-rpm`, task.series(util.rimraf(`.build/linux/rpm/${rpmArch}`), prepareRpmPackage(arch)));
|
||||
// gulp.task(prepareRpmTask);
|
||||
const buildRpmTask = task.define(`vscode-linux-${arch}-build-rpm`, task.series(prepareRpmTask, buildRpmPackage(arch)));
|
||||
gulp.task(buildRpmTask);
|
||||
}
|
||||
|
||||
{
|
||||
const prepareSnapTask = task.define(`vscode-linux-${arch}-prepare-snap`, task.series(util.rimraf(`.build/linux/snap/${arch}`), prepareSnapPackage(arch)));
|
||||
gulp.task(prepareSnapTask);
|
||||
const buildSnapTask = task.define(`vscode-linux-${arch}-build-snap`, task.series(prepareSnapTask, buildSnapPackage(arch)));
|
||||
gulp.task(buildSnapTask);
|
||||
}
|
||||
const prepareSnapTask = task.define(`vscode-linux-${arch}-prepare-snap`, task.series(util.rimraf(`.build/linux/snap/${arch}`), prepareSnapPackage(arch)));
|
||||
gulp.task(prepareSnapTask);
|
||||
const buildSnapTask = task.define(`vscode-linux-${arch}-build-snap`, task.series(prepareSnapTask, buildSnapPackage(arch)));
|
||||
gulp.task(buildSnapTask);
|
||||
});
|
||||
|
||||
@@ -55,7 +55,7 @@ function getElectron(arch) {
|
||||
return () => {
|
||||
const electronOpts = _.extend({}, exports.config, {
|
||||
platform: process.platform,
|
||||
arch,
|
||||
arch: arch === 'armhf' ? 'arm' : arch,
|
||||
ffmpegChromium: true,
|
||||
keepDefaultApp: true
|
||||
});
|
||||
|
||||
@@ -61,7 +61,7 @@ function getElectron(arch: string): () => NodeJS.ReadWriteStream {
|
||||
return () => {
|
||||
const electronOpts = _.extend({}, config, {
|
||||
platform: process.platform,
|
||||
arch,
|
||||
arch: arch === 'armhf' ? 'arm' : arch,
|
||||
ffmpegChromium: true,
|
||||
keepDefaultApp: true
|
||||
});
|
||||
|
||||
@@ -207,25 +207,25 @@ const externalExtensions = [
|
||||
// they get packaged separately. Adding extension name here, will make the build to create
|
||||
// a separate vsix package for the extension and the extension will be excluded from the main package.
|
||||
// Any extension not included here will be installed by default.
|
||||
'admin-pack',
|
||||
'admin-tool-ext-win',
|
||||
'agent',
|
||||
'arc',
|
||||
'asde-deployment',
|
||||
'azdata',
|
||||
'import',
|
||||
'profiler',
|
||||
'admin-pack',
|
||||
'dacpac',
|
||||
'schema-compare',
|
||||
'azurehybridtoolkit',
|
||||
'cms',
|
||||
'query-history',
|
||||
'dacpac',
|
||||
'import',
|
||||
'kusto',
|
||||
'liveshare',
|
||||
'sql-database-projects',
|
||||
'machine-learning',
|
||||
'profiler',
|
||||
'query-history',
|
||||
'schema-compare',
|
||||
'sql-assessment',
|
||||
'asde-deployment',
|
||||
'sql-database-projects',
|
||||
'sql-migration',
|
||||
'data-workspace'
|
||||
];
|
||||
// extensions that require a rebuild since they have native parts
|
||||
const rebuildExtensions = [
|
||||
|
||||
@@ -241,25 +241,25 @@ const externalExtensions = [
|
||||
// they get packaged separately. Adding extension name here, will make the build to create
|
||||
// a separate vsix package for the extension and the extension will be excluded from the main package.
|
||||
// Any extension not included here will be installed by default.
|
||||
'admin-pack',
|
||||
'admin-tool-ext-win',
|
||||
'agent',
|
||||
'arc',
|
||||
'asde-deployment',
|
||||
'azdata',
|
||||
'import',
|
||||
'profiler',
|
||||
'admin-pack',
|
||||
'dacpac',
|
||||
'schema-compare',
|
||||
'azurehybridtoolkit',
|
||||
'cms',
|
||||
'query-history',
|
||||
'dacpac',
|
||||
'import',
|
||||
'kusto',
|
||||
'liveshare',
|
||||
'sql-database-projects',
|
||||
'machine-learning',
|
||||
'profiler',
|
||||
'query-history',
|
||||
'schema-compare',
|
||||
'sql-assessment',
|
||||
'asde-deployment',
|
||||
'sql-database-projects',
|
||||
'sql-migration',
|
||||
'data-workspace'
|
||||
];
|
||||
|
||||
// extensions that require a rebuild since they have native parts
|
||||
|
||||
@@ -206,6 +206,10 @@
|
||||
"name": "vs/workbench/contrib/webview",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/contrib/webviewPanel",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/contrib/customEditor",
|
||||
"project": "vscode-workbench"
|
||||
|
||||
@@ -1004,7 +1004,7 @@ function createResource(project: string, slug: string, xlfFile: File, apiHostnam
|
||||
* https://dev.befoolish.co/tx-docs/public/projects/updating-content#what-happens-when-you-update-files
|
||||
*/
|
||||
function updateResource(project: string, slug: string, xlfFile: File, apiHostname: string, credentials: string): Promise<any> {
|
||||
return new Promise((resolve, reject) => {
|
||||
return new Promise<void>((resolve, reject) => {
|
||||
const data = JSON.stringify({ content: xlfFile.contents.toString() });
|
||||
const options = {
|
||||
hostname: apiHostname,
|
||||
|
||||
@@ -53,6 +53,13 @@ const CORE_TYPES = [
|
||||
'trimLeft',
|
||||
'trimRight'
|
||||
];
|
||||
// Types that are defined in a common layer but are known to be only
|
||||
// available in native environments should not be allowed in browser
|
||||
const NATIVE_TYPES = [
|
||||
'NativeParsedArgs',
|
||||
'INativeEnvironmentService',
|
||||
'INativeWindowConfiguration'
|
||||
];
|
||||
const RULES = [
|
||||
// Tests: skip
|
||||
{
|
||||
@@ -68,6 +75,37 @@ const RULES = [
|
||||
'MessageEvent',
|
||||
'data'
|
||||
],
|
||||
disallowedTypes: NATIVE_TYPES,
|
||||
disallowedDefinitions: [
|
||||
'lib.dom.d.ts',
|
||||
'@types/node' // no node.js
|
||||
]
|
||||
},
|
||||
// Common: vs/platform/environment/common/argv.ts
|
||||
{
|
||||
target: '**/{vs,sql}/platform/environment/common/argv.ts',
|
||||
disallowedTypes: [ /* Ignore native types that are defined from here */],
|
||||
allowedTypes: CORE_TYPES,
|
||||
disallowedDefinitions: [
|
||||
'lib.dom.d.ts',
|
||||
'@types/node' // no node.js
|
||||
]
|
||||
},
|
||||
// Common: vs/platform/environment/common/environment.ts
|
||||
{
|
||||
target: '**/{vs,sql}/platform/environment/common/environment.ts',
|
||||
disallowedTypes: [ /* Ignore native types that are defined from here */],
|
||||
allowedTypes: CORE_TYPES,
|
||||
disallowedDefinitions: [
|
||||
'lib.dom.d.ts',
|
||||
'@types/node' // no node.js
|
||||
]
|
||||
},
|
||||
// Common: vs/platform/windows/common/windows.ts
|
||||
{
|
||||
target: '**/{vs,sql}/platform/windows/common/windows.ts',
|
||||
disallowedTypes: [ /* Ignore native types that are defined from here */],
|
||||
allowedTypes: CORE_TYPES,
|
||||
disallowedDefinitions: [
|
||||
'lib.dom.d.ts',
|
||||
'@types/node' // no node.js
|
||||
@@ -81,6 +119,7 @@ const RULES = [
|
||||
// Safe access to global
|
||||
'global'
|
||||
],
|
||||
disallowedTypes: NATIVE_TYPES,
|
||||
disallowedDefinitions: [
|
||||
'lib.dom.d.ts',
|
||||
'@types/node' // no node.js
|
||||
@@ -90,6 +129,7 @@ const RULES = [
|
||||
{
|
||||
target: '**/{vs,sql}/**/common/**',
|
||||
allowedTypes: CORE_TYPES,
|
||||
disallowedTypes: NATIVE_TYPES,
|
||||
disallowedDefinitions: [
|
||||
'lib.dom.d.ts',
|
||||
'@types/node' // no node.js
|
||||
@@ -99,6 +139,7 @@ const RULES = [
|
||||
{
|
||||
target: '**/{vs,sql}/**/browser/**',
|
||||
allowedTypes: CORE_TYPES,
|
||||
disallowedTypes: NATIVE_TYPES,
|
||||
disallowedDefinitions: [
|
||||
'@types/node' // no node.js
|
||||
]
|
||||
@@ -107,6 +148,7 @@ const RULES = [
|
||||
{
|
||||
target: '**/src/{vs,sql}/editor/contrib/**',
|
||||
allowedTypes: CORE_TYPES,
|
||||
disallowedTypes: NATIVE_TYPES,
|
||||
disallowedDefinitions: [
|
||||
'@types/node' // no node.js
|
||||
]
|
||||
@@ -132,7 +174,7 @@ const RULES = [
|
||||
},
|
||||
// Electron (sandbox)
|
||||
{
|
||||
target: '**/vs/**/electron-sandbox/**',
|
||||
target: '**/{vs,sql}/**/electron-sandbox/**',
|
||||
allowedTypes: CORE_TYPES,
|
||||
disallowedDefinitions: [
|
||||
'@types/node' // no node.js
|
||||
@@ -162,7 +204,7 @@ let hasErrors = false;
|
||||
function checkFile(program, sourceFile, rule) {
|
||||
checkNode(sourceFile);
|
||||
function checkNode(node) {
|
||||
var _a;
|
||||
var _a, _b;
|
||||
if (node.kind !== ts.SyntaxKind.Identifier) {
|
||||
return ts.forEachChild(node, checkNode); // recurse down
|
||||
}
|
||||
@@ -170,6 +212,12 @@ function checkFile(program, sourceFile, rule) {
|
||||
if ((_a = rule.allowedTypes) === null || _a === void 0 ? void 0 : _a.some(allowed => allowed === text)) {
|
||||
return; // override
|
||||
}
|
||||
if ((_b = rule.disallowedTypes) === null || _b === void 0 ? void 0 : _b.some(disallowed => disallowed === text)) {
|
||||
const { line, character } = sourceFile.getLineAndCharacterOfPosition(node.getStart());
|
||||
console.log(`[build/lib/layersChecker.ts]: Reference to '${text}' violates layer '${rule.target}' (${sourceFile.fileName} (${line + 1},${character + 1})`);
|
||||
hasErrors = true;
|
||||
return;
|
||||
}
|
||||
const checker = program.getTypeChecker();
|
||||
const symbol = checker.getSymbolAtLocation(node);
|
||||
if (symbol) {
|
||||
|
||||
@@ -55,6 +55,14 @@ const CORE_TYPES = [
|
||||
'trimRight'
|
||||
];
|
||||
|
||||
// Types that are defined in a common layer but are known to be only
|
||||
// available in native environments should not be allowed in browser
|
||||
const NATIVE_TYPES = [
|
||||
'NativeParsedArgs',
|
||||
'INativeEnvironmentService',
|
||||
'INativeWindowConfiguration'
|
||||
];
|
||||
|
||||
const RULES = [
|
||||
|
||||
// Tests: skip
|
||||
@@ -73,6 +81,40 @@ const RULES = [
|
||||
'MessageEvent',
|
||||
'data'
|
||||
],
|
||||
disallowedTypes: NATIVE_TYPES,
|
||||
disallowedDefinitions: [
|
||||
'lib.dom.d.ts', // no DOM
|
||||
'@types/node' // no node.js
|
||||
]
|
||||
},
|
||||
|
||||
// Common: vs/platform/environment/common/argv.ts
|
||||
{
|
||||
target: '**/{vs,sql}/platform/environment/common/argv.ts',
|
||||
disallowedTypes: [/* Ignore native types that are defined from here */],
|
||||
allowedTypes: CORE_TYPES,
|
||||
disallowedDefinitions: [
|
||||
'lib.dom.d.ts', // no DOM
|
||||
'@types/node' // no node.js
|
||||
]
|
||||
},
|
||||
|
||||
// Common: vs/platform/environment/common/environment.ts
|
||||
{
|
||||
target: '**/{vs,sql}/platform/environment/common/environment.ts',
|
||||
disallowedTypes: [/* Ignore native types that are defined from here */],
|
||||
allowedTypes: CORE_TYPES,
|
||||
disallowedDefinitions: [
|
||||
'lib.dom.d.ts', // no DOM
|
||||
'@types/node' // no node.js
|
||||
]
|
||||
},
|
||||
|
||||
// Common: vs/platform/windows/common/windows.ts
|
||||
{
|
||||
target: '**/{vs,sql}/platform/windows/common/windows.ts',
|
||||
disallowedTypes: [/* Ignore native types that are defined from here */],
|
||||
allowedTypes: CORE_TYPES,
|
||||
disallowedDefinitions: [
|
||||
'lib.dom.d.ts', // no DOM
|
||||
'@types/node' // no node.js
|
||||
@@ -88,6 +130,7 @@ const RULES = [
|
||||
// Safe access to global
|
||||
'global'
|
||||
],
|
||||
disallowedTypes: NATIVE_TYPES,
|
||||
disallowedDefinitions: [
|
||||
'lib.dom.d.ts', // no DOM
|
||||
'@types/node' // no node.js
|
||||
@@ -98,6 +141,7 @@ const RULES = [
|
||||
{
|
||||
target: '**/{vs,sql}/**/common/**',
|
||||
allowedTypes: CORE_TYPES,
|
||||
disallowedTypes: NATIVE_TYPES,
|
||||
disallowedDefinitions: [
|
||||
'lib.dom.d.ts', // no DOM
|
||||
'@types/node' // no node.js
|
||||
@@ -108,6 +152,7 @@ const RULES = [
|
||||
{
|
||||
target: '**/{vs,sql}/**/browser/**',
|
||||
allowedTypes: CORE_TYPES,
|
||||
disallowedTypes: NATIVE_TYPES,
|
||||
disallowedDefinitions: [
|
||||
'@types/node' // no node.js
|
||||
]
|
||||
@@ -117,6 +162,7 @@ const RULES = [
|
||||
{
|
||||
target: '**/src/{vs,sql}/editor/contrib/**',
|
||||
allowedTypes: CORE_TYPES,
|
||||
disallowedTypes: NATIVE_TYPES,
|
||||
disallowedDefinitions: [
|
||||
'@types/node' // no node.js
|
||||
]
|
||||
@@ -145,7 +191,7 @@ const RULES = [
|
||||
|
||||
// Electron (sandbox)
|
||||
{
|
||||
target: '**/vs/**/electron-sandbox/**',
|
||||
target: '**/{vs,sql}/**/electron-sandbox/**',
|
||||
allowedTypes: CORE_TYPES,
|
||||
disallowedDefinitions: [
|
||||
'@types/node' // no node.js
|
||||
@@ -181,6 +227,7 @@ interface IRule {
|
||||
skip?: boolean;
|
||||
allowedTypes?: string[];
|
||||
disallowedDefinitions?: string[];
|
||||
disallowedTypes?: string[];
|
||||
}
|
||||
|
||||
let hasErrors = false;
|
||||
@@ -199,6 +246,14 @@ function checkFile(program: ts.Program, sourceFile: ts.SourceFile, rule: IRule)
|
||||
return; // override
|
||||
}
|
||||
|
||||
if (rule.disallowedTypes?.some(disallowed => disallowed === text)) {
|
||||
const { line, character } = sourceFile.getLineAndCharacterOfPosition(node.getStart());
|
||||
console.log(`[build/lib/layersChecker.ts]: Reference to '${text}' violates layer '${rule.target}' (${sourceFile.fileName} (${line + 1},${character + 1})`);
|
||||
|
||||
hasErrors = true;
|
||||
return;
|
||||
}
|
||||
|
||||
const checker = program.getTypeChecker();
|
||||
const symbol = checker.getSymbolAtLocation(node);
|
||||
if (symbol) {
|
||||
|
||||
@@ -15,7 +15,7 @@ const yarn = process.platform === 'win32' ? 'yarn.cmd' : 'yarn';
|
||||
const rootDir = path.resolve(__dirname, '..', '..');
|
||||
|
||||
function runProcess(command: string, args: ReadonlyArray<string> = []) {
|
||||
return new Promise((resolve, reject) => {
|
||||
return new Promise<void>((resolve, reject) => {
|
||||
const child = spawn(command, args, { cwd: rootDir, stdio: 'inherit', env: process.env });
|
||||
child.on('exit', err => !err ? resolve() : process.exit(err ?? 1));
|
||||
child.on('error', reject);
|
||||
|
||||
@@ -60,12 +60,12 @@
|
||||
"git": {
|
||||
"name": "electron",
|
||||
"repositoryUrl": "https://github.com/electron/electron",
|
||||
"commitHash": "03c7a54dc534ce1867d4393b9b1a6989d4a7e005"
|
||||
"commitHash": "fb03807cd21915ddc3aa2521ba4f5ba14597bd7e"
|
||||
}
|
||||
},
|
||||
"isOnlyProductionDependency": true,
|
||||
"license": "MIT",
|
||||
"version": "9.2.1"
|
||||
"version": "9.3.0"
|
||||
},
|
||||
{
|
||||
"component": {
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"name": "agent",
|
||||
"displayName": "SQL Server Agent",
|
||||
"description": "Manage and troubleshoot SQL Server Agent jobs",
|
||||
"version": "0.48.0",
|
||||
"version": "0.49.0",
|
||||
"publisher": "Microsoft",
|
||||
"preview": true,
|
||||
"license": "https://raw.githubusercontent.com/Microsoft/azuredatastudio/main/LICENSE.txt",
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
Welcome to Microsoft Azure Arc Extension for Azure Data Studio!
|
||||
|
||||
**This extension is only applicable to customers in the Azure Arc data services private preview.**
|
||||
**This extension is only applicable to customers in the Azure Arc data services public preview.**
|
||||
|
||||
## Overview
|
||||
|
||||
|
||||
3
extensions/arc/images/discard.svg
Normal file
3
extensions/arc/images/discard.svg
Normal file
@@ -0,0 +1,3 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M8.7 7.9L15.8 15L15 15.8L7.9 8.7L0.8 15.8L0 15L7.1 7.9L0 0.8L0.8 0L7.9 7.1L15 0L15.8 0.8L8.7 7.9Z" fill="#0078D4"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 228 B |
3
extensions/arc/images/information.svg
Normal file
3
extensions/arc/images/information.svg
Normal file
@@ -0,0 +1,3 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 2048 2048" width="16" height="16">
|
||||
<path d="M960 1920q-133 0-255-34t-230-96-194-150-150-195-97-229T0 960q0-133 34-255t96-230 150-194 195-150 229-97T960 0q133 0 255 34t230 96 194 150 150 195 97 229 34 256q0 133-34 255t-96 230-150 194-195 150-229 97-256 34zm0-1792q-115 0-221 30t-198 84-169 130-130 168-84 199-30 221q0 114 30 220t84 199 130 169 168 130 199 84 221 30q114 0 220-30t199-84 169-130 130-168 84-199 30-221q0-114-30-220t-84-199-130-169-168-130-199-84-221-30zm-64 640h128v640H896V768zm0-256h128v128H896V512z" />
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 581 B |
3
extensions/arc/images/save.svg
Normal file
3
extensions/arc/images/save.svg
Normal file
@@ -0,0 +1,3 @@
|
||||
<svg width="16" height="14" viewBox="0 0 16 14" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M14 0H14.4L14.7 0.2L14.9 0.5C14.9524 0.570883 14.9885 0.652432 15.0058 0.738849C15.023 0.825265 15.0211 0.914429 15 1V14H2.8L0.999997 12.2V1C0.985033 0.85904 1.02046 0.717335 1.1 0.6L1.3 0.3L1.6 0.1H14V0ZM14 1H13V7H3V1H2V11.8L3.2 13H4V9H11V13H14V1ZM4 6H12V1H4V6ZM10 10H5V13H6V11H7V13H10V10Z" fill="#0078D4"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 421 B |
@@ -8,5 +8,5 @@
|
||||
not_numbered: true
|
||||
expand_sections: true
|
||||
sections:
|
||||
- title: TSG100 - The Azure Arc Postgres troubleshooter
|
||||
- title: TSG100 - The Azure Arc enabled PostgreSQL Hyperscale troubleshooter
|
||||
url: postgres/tsg100-troubleshoot-postgres
|
||||
|
||||
@@ -3,5 +3,5 @@
|
||||
- This chapter contains notebooks for troubleshooting Postgres on Azure Arc
|
||||
|
||||
## Notebooks in this Chapter
|
||||
- [TSG100 - The Azure Arc Postgres troubleshooter](tsg100-troubleshoot-postgres.ipynb)
|
||||
- [TSG100 - The Azure Arc enabled PostgreSQL Hyperscale troubleshooter](tsg100-troubleshoot-postgres.ipynb)
|
||||
|
||||
|
||||
@@ -3,5 +3,5 @@
|
||||
not_numbered: true
|
||||
expand_sections: true
|
||||
sections:
|
||||
- title: TSG100 - The Azure Arc Postgres troubleshooter
|
||||
- title: TSG100 - The Azure Arc enabled PostgreSQL Hyperscale troubleshooter
|
||||
url: postgres/tsg100-troubleshoot-postgres
|
||||
|
||||
@@ -4,13 +4,14 @@
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"TSG100 - The Azure Arc Postgres troubleshooter\n",
|
||||
"==============================================\n",
|
||||
"TSG100 - The Azure Arc enabled PostgreSQL Hyperscale troubleshooter\n",
|
||||
"===================================================================\n",
|
||||
"\n",
|
||||
"Description\n",
|
||||
"-----------\n",
|
||||
"\n",
|
||||
"Follow these steps to troubleshoot an Azure Arc Postgres Server.\n",
|
||||
"Follow these steps to troubleshoot an Azure Arc enabled PostgreSQL\n",
|
||||
"Hyperscale Server.\n",
|
||||
"\n",
|
||||
"Steps\n",
|
||||
"-----\n",
|
||||
@@ -34,6 +35,7 @@
|
||||
"# the user will be prompted to select a server.\n",
|
||||
"namespace = os.environ.get('POSTGRES_SERVER_NAMESPACE')\n",
|
||||
"name = os.environ.get('POSTGRES_SERVER_NAME')\n",
|
||||
"version = os.environ.get('POSTGRES_SERVER_VERSION')\n",
|
||||
"\n",
|
||||
"tail_lines = 50"
|
||||
]
|
||||
@@ -143,7 +145,7 @@
|
||||
" if cmd.startswith(\"kubectl \") and \"AZDATA_OPENSHIFT\" in os.environ:\n",
|
||||
" cmd_actual[0] = cmd_actual[0].replace(\"kubectl\", \"oc\")\n",
|
||||
"\n",
|
||||
" # To aid supportabilty, determine which binary file will actually be executed on the machine\n",
|
||||
" # To aid supportability, determine which binary file will actually be executed on the machine\n",
|
||||
" #\n",
|
||||
" which_binary = None\n",
|
||||
"\n",
|
||||
@@ -400,11 +402,11 @@
|
||||
"import math\n",
|
||||
"\n",
|
||||
"# If a server was provided, get it\n",
|
||||
"if namespace and name:\n",
|
||||
" server = json.loads(run(f'kubectl get dbs -n {namespace} {name} -o json', return_output=True))\n",
|
||||
"if namespace and name and version:\n",
|
||||
" server = json.loads(run(f'kubectl get postgresql-{version} -n {namespace} {name} -o json', return_output=True))\n",
|
||||
"else:\n",
|
||||
" # Otherwise prompt the user to select a server\n",
|
||||
" servers = json.loads(run(f'kubectl get dbs --all-namespaces -o json', return_output=True))['items']\n",
|
||||
" servers = json.loads(run(f'kubectl get postgresqls --all-namespaces -o json', return_output=True))['items']\n",
|
||||
" if not servers:\n",
|
||||
" raise Exception('No Postgres servers found')\n",
|
||||
"\n",
|
||||
@@ -425,6 +427,7 @@
|
||||
" server = servers[i-1]\n",
|
||||
" namespace = server['metadata']['namespace']\n",
|
||||
" name = server['metadata']['name']\n",
|
||||
" version = server['kind'][len('postgresql-'):]\n",
|
||||
" break\n",
|
||||
"\n",
|
||||
"display(Markdown(f'#### Got server {namespace}.{name}'))"
|
||||
@@ -446,10 +449,10 @@
|
||||
"uid = server['metadata']['uid']\n",
|
||||
"\n",
|
||||
"display(Markdown(f'#### Server summary'))\n",
|
||||
"run(f'kubectl get dbs -n {namespace} {name}')\n",
|
||||
"run(f'kubectl get postgresql-{version} -n {namespace} {name}')\n",
|
||||
"\n",
|
||||
"display(Markdown(f'#### Resource summary'))\n",
|
||||
"run(f'kubectl get pods,pvc,svc,ep -n {namespace} -l dusky.microsoft.com/serviceId={uid}')"
|
||||
"run(f'kubectl get sts,pods,pvc,svc,ep -n {namespace} -l postgresqls.arcdata.microsoft.com/cluster-id={uid}')"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -466,7 +469,7 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"display(Markdown(f'#### Troubleshooting server {namespace}.{name}'))\n",
|
||||
"run(f'kubectl describe dbs -n {namespace} {name}')"
|
||||
"run(f'kubectl describe postgresql-{version} -n {namespace} {name}')"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -482,7 +485,7 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"pods = json.loads(run(f'kubectl get pods -n {namespace} -l dusky.microsoft.com/serviceId={uid} -o json', return_output=True))['items']\n",
|
||||
"pods = json.loads(run(f'kubectl get pods -n {namespace} -l postgresqls.arcdata.microsoft.com/cluster-id={uid} -o json', return_output=True))['items']\n",
|
||||
"\n",
|
||||
"# Summarize and describe each pod\n",
|
||||
"for pod in pods:\n",
|
||||
@@ -529,8 +532,7 @@
|
||||
" con_restarts = con_status.get('restartCount', 0)\n",
|
||||
"\n",
|
||||
" display(Markdown(f'#### Troubleshooting container {namespace}.{pod_name}/{con_name} ({i+1}/{len(cons)})\\n'\n",
|
||||
" f'#### {\"S\" if con_started else \"Not s\"}tarted and '\n",
|
||||
" f'{\"\" if con_ready else \"not \"}ready with {con_restarts} restarts'))\n",
|
||||
" f'#### {\"R\" if con_ready else \"Not r\"}eady with {con_restarts} restarts'))\n",
|
||||
"\n",
|
||||
" run(f'kubectl logs -n {namespace} {pod_name} {con_name} --tail {tail_lines}')\n",
|
||||
"\n",
|
||||
@@ -554,7 +556,7 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"display(Markdown(f'#### Troubleshooting PersistentVolumeClaims'))\n",
|
||||
"run(f'kubectl describe pvc -n {namespace} -l dusky.microsoft.com/serviceId={uid}')"
|
||||
"run(f'kubectl describe pvc -n {namespace} -l postgresqls.arcdata.microsoft.com/cluster-id={uid}')"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -47,7 +47,7 @@
|
||||
"|Tools|Description|Installation|\n",
|
||||
"|---|---|---|\n",
|
||||
"|kubectl | Command-line tool for monitoring the underlying Kubernetes cluster | [Installation](https://kubernetes.io/docs/tasks/tools/install-kubectl/#install-kubectl-binary-using-native-package-management) |\n",
|
||||
"|azdata | Command-line tool for installing and managing resources in an Azure Arc cluster |[Installation](https://github.com/microsoft/Azure-data-services-on-Azure-Arc/blob/master/scenarios/001-install-client-tools.md) |"
|
||||
"|Azure Data CLI (azdata) | Command-line tool for installing and managing resources in an Azure Arc cluster |[Installation](https://docs.microsoft.com/sql/azdata/install/deploy-install-azdata) |"
|
||||
],
|
||||
"metadata": {
|
||||
"azdata_cell_guid": "714582b9-10ee-409e-ab12-15a4825c9471"
|
||||
@@ -65,13 +65,7 @@
|
||||
{
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"import pandas,sys,os,json,html,getpass,time, tempfile\n",
|
||||
"pandas_version = pandas.__version__.split('.')\n",
|
||||
"pandas_major = int(pandas_version[0])\n",
|
||||
"pandas_minor = int(pandas_version[1])\n",
|
||||
"pandas_patch = int(pandas_version[2])\n",
|
||||
"if not (pandas_major > 0 or (pandas_major == 0 and pandas_minor > 24) or (pandas_major == 0 and pandas_minor == 24 and pandas_patch >= 2)):\n",
|
||||
" sys.exit('Please upgrade the Notebook dependency before you can proceed, you can do it by running the \"Reinstall Notebook dependencies\" command in command palette (View menu -> Command Palette…).')\n",
|
||||
"import sys,os,json,html,getpass,time, tempfile\n",
|
||||
"def run_command(command):\n",
|
||||
" print(\"Executing: \" + command)\n",
|
||||
" !{command}\n",
|
||||
@@ -90,7 +84,7 @@
|
||||
"cell_type": "markdown",
|
||||
"source": [
|
||||
"### **Set variables**\n",
|
||||
"Generated by Azure Data Studio using the values collected in the Azure Arc Data controller create wizard"
|
||||
"Generated by Azure Data Studio using the values collected in the 'Create Azure Arc data controller' wizard."
|
||||
],
|
||||
"metadata": {
|
||||
"azdata_cell_guid": "4b266b2d-bd1b-4565-92c9-3fc146cdce6d"
|
||||
@@ -129,18 +123,22 @@
|
||||
{
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"if \"AZDATA_NB_VAR_ARC_DOCKER_PASSWORD\" in os.environ:\n",
|
||||
" arc_docker_password = os.environ[\"AZDATA_NB_VAR_ARC_DOCKER_PASSWORD\"]\n",
|
||||
"if \"AZDATA_NB_VAR_ARC_ADMIN_PASSWORD\" in os.environ:\n",
|
||||
" arc_admin_password = os.environ[\"AZDATA_NB_VAR_ARC_ADMIN_PASSWORD\"]\n",
|
||||
"else:\n",
|
||||
" if arc_admin_password == \"\":\n",
|
||||
" arc_admin_password = getpass.getpass(prompt = 'Azure Arc Data controller password')\n",
|
||||
" arc_admin_password = getpass.getpass(prompt = 'Azure Arc Data Controller password')\n",
|
||||
" if arc_admin_password == \"\":\n",
|
||||
" sys.exit(f'Password is required.')\n",
|
||||
" confirm_password = getpass.getpass(prompt = 'Confirm password')\n",
|
||||
" if arc_admin_password != confirm_password:\n",
|
||||
" sys.exit(f'Passwords do not match.')"
|
||||
" sys.exit(f'Passwords do not match.')\n",
|
||||
"\n",
|
||||
"os.environ[\"SPN_CLIENT_ID\"] = sp_client_id\n",
|
||||
"os.environ[\"SPN_TENANT_ID\"] = sp_tenant_id\n",
|
||||
"if \"AZDATA_NB_VAR_SP_CLIENT_SECRET\" in os.environ:\n",
|
||||
" os.environ[\"SPN_CLIENT_SECRET\"] = os.environ[\"AZDATA_NB_VAR_SP_CLIENT_SECRET\"]\n",
|
||||
"os.environ[\"SPN_AUTHORITY\"] = \"https://login.microsoftonline.com\""
|
||||
],
|
||||
"metadata": {
|
||||
"azdata_cell_guid": "e7e10828-6cae-45af-8c2f-1484b6d4f9ac",
|
||||
@@ -175,7 +173,7 @@
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"source": [
|
||||
"### **Create Azure Arc Data controller**"
|
||||
"### **Create Azure Arc Data Controller**"
|
||||
],
|
||||
"metadata": {
|
||||
"azdata_cell_guid": "efe78cd3-ed73-4c9b-b586-fdd6c07dd37f"
|
||||
@@ -184,16 +182,14 @@
|
||||
{
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"print (f'Creating Azure Arc controller: {arc_data_controller_name} using configuration {arc_cluster_context}')\n",
|
||||
"print (f'Creating Azure Arc Data Controller: {arc_data_controller_name} using configuration {arc_cluster_context}')\n",
|
||||
"os.environ[\"ACCEPT_EULA\"] = 'yes'\n",
|
||||
"os.environ[\"AZDATA_USERNAME\"] = arc_admin_username\n",
|
||||
"os.environ[\"AZDATA_PASSWORD\"] = arc_admin_password\n",
|
||||
"os.environ[\"DOCKER_USERNAME\"] = arc_docker_username\n",
|
||||
"os.environ[\"DOCKER_PASSWORD\"] = arc_docker_password\n",
|
||||
"if os.name == 'nt':\n",
|
||||
" print(f'If you don\\'t see output produced by azdata, you can run the following command in a terminal window to check the deployment status:\\n\\t {os.environ[\"AZDATA_NB_VAR_KUBECTL\"]} get pods -n {arc_data_controller_namespace}')\n",
|
||||
"run_command(f'azdata arc dc create --connectivity-mode {arc_data_controller_connectivity_mode} -n {arc_data_controller_name} -ns {arc_data_controller_namespace} -s {arc_subscription} -g {arc_resource_group} -l {arc_data_controller_location} -sc {arc_data_controller_storage_class} --profile-name {arc_profile}')\n",
|
||||
"print(f'Azure Arc Data controller cluster: {arc_data_controller_name} created.') "
|
||||
"print(f'Azure Arc Data Controller: {arc_data_controller_name} created.') "
|
||||
],
|
||||
"metadata": {
|
||||
"azdata_cell_guid": "373947a1-90b9-49ee-86f4-17a4c7d4ca76",
|
||||
@@ -205,7 +201,7 @@
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"source": [
|
||||
"### **Setting context to created Azure Arc Data controller**"
|
||||
"### **Setting context to created Azure Arc Data Controller**"
|
||||
],
|
||||
"metadata": {
|
||||
"azdata_cell_guid": "a3ddc701-811d-4058-b3fb-b7295fcf50ae"
|
||||
@@ -214,7 +210,7 @@
|
||||
{
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"# Setting context to data controller.\n",
|
||||
"# Setting context to Data Controller.\n",
|
||||
"#\n",
|
||||
"run_command(f'kubectl config set-context --current --namespace {arc_data_controller_namespace}')"
|
||||
],
|
||||
@@ -227,7 +223,7 @@
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"source": [
|
||||
"### **Login to the data controller.**\n"
|
||||
"### **Login to the Data Controller.**\n"
|
||||
],
|
||||
"metadata": {
|
||||
"azdata_cell_guid": "9376b2ab-0edf-478f-9e3c-5ff46ae3501a"
|
||||
@@ -236,9 +232,9 @@
|
||||
{
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"# Login to the data controller.\n",
|
||||
"# Login to the Data Controller.\n",
|
||||
"#\n",
|
||||
"run_command(f'azdata login -n {arc_data_controller_namespace}')"
|
||||
"run_command(f'azdata login --namespace {arc_data_controller_namespace}')"
|
||||
],
|
||||
"metadata": {
|
||||
"azdata_cell_guid": "9aed0c5a-2c8a-4ad7-becb-60281923a196"
|
||||
@@ -247,4 +243,4 @@
|
||||
"execution_count": null
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
@@ -25,12 +25,12 @@
|
||||
"source": [
|
||||
"\n",
|
||||
" \n",
|
||||
"## Deploy a PostgreSQL server group on an existing Azure Arc data cluster\n",
|
||||
"## Create a PostgreSQL Hyperscale - Azure Arc on an existing Azure Arc Data Controller\n",
|
||||
" \n",
|
||||
"This notebook walks through the process of deploying a PostgreSQL server group on an existing Azure Arc data cluster.\n",
|
||||
"This notebook walks through the process of creating a PostgreSQL Hyperscale - Azure Arc on an existing Azure Arc Data Controller.\n",
|
||||
" \n",
|
||||
"* Follow the instructions in the **Prerequisites** cell to install the tools if not already installed.\n",
|
||||
"* Make sure you have the target Azure Arc data cluster already created.\n",
|
||||
"* Make sure you have the target Azure Arc Data Controller already created.\n",
|
||||
"\n",
|
||||
"<span style=\"color:red\"><font size=\"3\">Please press the \"Run All\" button to run the notebook</font></span>"
|
||||
],
|
||||
@@ -41,7 +41,21 @@
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"source": [
|
||||
"### **Check prerequisites**"
|
||||
"### **Prerequisites** \n",
|
||||
"Ensure the following tools are installed and added to PATH before proceeding.\n",
|
||||
" \n",
|
||||
"|Tools|Description|Installation|\n",
|
||||
"|---|---|---|\n",
|
||||
"|Azure Data CLI (azdata) | Command-line tool for installing and managing resources in an Azure Arc cluster |[Installation](https://docs.microsoft.com/sql/azdata/install/deploy-install-azdata) |"
|
||||
],
|
||||
"metadata": {
|
||||
"azdata_cell_guid": "20fe3985-a01e-461c-bce0-235f7606cc3c"
|
||||
}
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"source": [
|
||||
"### **Setup and Check Prerequisites**"
|
||||
],
|
||||
"metadata": {
|
||||
"azdata_cell_guid": "68531b91-ddce-47d7-a1d8-2ddc3d17f3e7"
|
||||
@@ -75,80 +89,20 @@
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"source": [
|
||||
"#### **Ensure Postgres Server Group name and password exist**"
|
||||
"### **Set variables**\n",
|
||||
"\n",
|
||||
"#### \n",
|
||||
"\n",
|
||||
"Generated by Azure Data Studio using the values collected in the 'Deploy PostgreSQL Hyperscale - Azure Arc instance' wizard"
|
||||
],
|
||||
"metadata": {
|
||||
"azdata_cell_guid": "68ec0760-27d1-4ded-9a9f-89077c40b8bb"
|
||||
}
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"# Required Values\n",
|
||||
"env_var = \"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_NAME\" in os.environ\n",
|
||||
"if env_var:\n",
|
||||
" server_group_name = os.environ[\"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_NAME\"]\n",
|
||||
"else:\n",
|
||||
" sys.exit(f'environment variable: AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_NAME was not defined. Exiting\\n')\n",
|
||||
"env_var = \"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_PASSWORD\" in os.environ\n",
|
||||
"if env_var:\n",
|
||||
" postgres_password = os.environ[\"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_PASSWORD\"]\n",
|
||||
"else:\n",
|
||||
" sys.exit(f'environment variable: AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_PASSWORD was not defined. Exiting\\n') \n",
|
||||
"env_var = \"AZDATA_NB_VAR_POSTGRES_STORAGE_CLASS_DATA\" in os.environ\n",
|
||||
"if env_var:\n",
|
||||
" postgres_storage_class_data = os.environ[\"AZDATA_NB_VAR_POSTGRES_STORAGE_CLASS_DATA\"]\n",
|
||||
"else:\n",
|
||||
" sys.exit(f'environment variable: AZDATA_NB_VAR_POSTGRES_STORAGE_CLASS_DATA was not defined. Exiting\\n') \n",
|
||||
"env_var = \"AZDATA_NB_VAR_POSTGRES_STORAGE_CLASS_LOGS\" in os.environ\n",
|
||||
"if env_var:\n",
|
||||
" postgres_storage_class_logs = os.environ[\"AZDATA_NB_VAR_POSTGRES_STORAGE_CLASS_LOGS\"]\n",
|
||||
"else:\n",
|
||||
" sys.exit(f'environment variable: AZDATA_NB_VAR_POSTGRES_STORAGE_CLASS_LOGS was not defined. Exiting\\n') \n",
|
||||
"env_var = \"AZDATA_NB_VAR_POSTGRES_STORAGE_CLASS_BACKUPS\" in os.environ\n",
|
||||
"if env_var:\n",
|
||||
" postgres_storage_class_backups = os.environ[\"AZDATA_NB_VAR_POSTGRES_STORAGE_CLASS_BACKUPS\"]\n",
|
||||
"else:\n",
|
||||
" sys.exit(f'environment variable: AZDATA_NB_VAR_POSTGRES_STORAGE_CLASS_BACKUPS was not defined. Exiting\\n') \n",
|
||||
""
|
||||
],
|
||||
"metadata": {
|
||||
"azdata_cell_guid": "53769960-e1f8-4477-b4cf-3ab1ea34348b",
|
||||
"tags": []
|
||||
},
|
||||
"outputs": [],
|
||||
"execution_count": null
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"source": [
|
||||
"#### **Get optional parameters for the PostgreSQL server group**"
|
||||
],
|
||||
"metadata": {
|
||||
"azdata_cell_guid": "68ec0760-27d1-4ded-9a9f-89077c40b8bb"
|
||||
}
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"server_group_workers = os.environ[\"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_WORKERS\"]\n",
|
||||
"server_group_port = os.environ.get(\"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_PORT\")\n",
|
||||
"server_group_cores_request = os.environ.get(\"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_CORES_REQUEST\")\n",
|
||||
"server_group_cores_limit = os.environ.get(\"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_CORES_LIMIT\")\n",
|
||||
"server_group_memory_request = os.environ.get(\"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_MEMORY_REQUEST\")\n",
|
||||
"server_group_memory_limit = os.environ.get(\"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_MEMORY_LIMIT\")"
|
||||
],
|
||||
"metadata": {
|
||||
"azdata_cell_guid": "53769960-e1f8-4477-b4cf-3ab1ea34348b",
|
||||
"tags": []
|
||||
},
|
||||
"outputs": [],
|
||||
"execution_count": null
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"source": [
|
||||
"### **Installing PostgreSQL server group**"
|
||||
"### **Creating the PostgreSQL Hyperscale - Azure Arc instance**"
|
||||
],
|
||||
"metadata": {
|
||||
"azdata_cell_guid": "90b0e162-2987-463f-9ce6-12dda1267189"
|
||||
@@ -157,17 +111,37 @@
|
||||
{
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"print (f'Creating a PostgreSQL server group on Azure Arc')\n",
|
||||
"# Login to the data controller.\n",
|
||||
"#\n",
|
||||
"os.environ[\"AZDATA_PASSWORD\"] = os.environ[\"AZDATA_NB_VAR_CONTROLLER_PASSWORD\"]\n",
|
||||
"cmd = f'azdata login -e {controller_endpoint} -u {controller_username}'\n",
|
||||
"out=run_command()"
|
||||
],
|
||||
"metadata": {
|
||||
"azdata_cell_guid": "71366399-5963-4e24-b2f2-6bb5bffba4ec"
|
||||
},
|
||||
"outputs": [],
|
||||
"execution_count": null
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"print (f'Creating the PostgreSQL Hyperscale - Azure Arc instance')\n",
|
||||
"\n",
|
||||
"workers_option = f' -w {server_group_workers}' if server_group_workers else \"\"\n",
|
||||
"port_option = f' --port \"{server_group_port}\"' if server_group_port else \"\"\n",
|
||||
"cores_request_option = f' -cr \"{server_group_cores_request}\"' if server_group_cores_request else \"\"\n",
|
||||
"cores_limit_option = f' -cl \"{server_group_cores_limit}\"' if server_group_cores_limit else \"\"\n",
|
||||
"memory_request_option = f' -mr \"{server_group_memory_request}Mi\"' if server_group_memory_request else \"\"\n",
|
||||
"memory_limit_option = f' -ml \"{server_group_memory_limit}Mi\"' if server_group_memory_limit else \"\"\n",
|
||||
"workers_option = f' -w {postgres_server_group_workers}' if postgres_server_group_workers else \"\"\n",
|
||||
"port_option = f' --port \"{postgres_server_group_port}\"' if postgres_server_group_port else \"\"\n",
|
||||
"engine_version_option = f' -ev {postgres_server_group_engine_version}' if postgres_server_group_engine_version else \"\"\n",
|
||||
"extensions_option = f' --extensions \"{postgres_server_group_extensions}\"' if postgres_server_group_extensions else \"\"\n",
|
||||
"volume_size_data_option = f' -vsd {postgres_server_group_volume_size_data}Gi' if postgres_server_group_volume_size_data else \"\"\n",
|
||||
"volume_size_logs_option = f' -vsl {postgres_server_group_volume_size_logs}Gi' if postgres_server_group_volume_size_logs else \"\"\n",
|
||||
"volume_size_backups_option = f' -vsb {postgres_server_group_volume_size_backups}Gi' if postgres_server_group_volume_size_backups else \"\"\n",
|
||||
"cores_request_option = f' -cr \"{postgres_server_group_cores_request}\"' if postgres_server_group_cores_request else \"\"\n",
|
||||
"cores_limit_option = f' -cl \"{postgres_server_group_cores_limit}\"' if postgres_server_group_cores_limit else \"\"\n",
|
||||
"memory_request_option = f' -mr \"{postgres_server_group_memory_request}Gi\"' if postgres_server_group_memory_request else \"\"\n",
|
||||
"memory_limit_option = f' -ml \"{postgres_server_group_memory_limit}Gi\"' if postgres_server_group_memory_limit else \"\"\n",
|
||||
"\n",
|
||||
"os.environ[\"AZDATA_PASSWORD\"] = postgres_password\n",
|
||||
"cmd = f'azdata arc postgres server create -n {server_group_name} -scd {postgres_storage_class_data} -scl {postgres_storage_class_logs} -scb {postgres_storage_class_backups}{workers_option}{port_option}{cores_request_option}{cores_limit_option}{memory_request_option}{memory_limit_option}'\n",
|
||||
"os.environ[\"AZDATA_PASSWORD\"] = os.environ[\"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_PASSWORD\"]\n",
|
||||
"cmd = f'azdata arc postgres server create -n {postgres_server_group_name} -scd {postgres_storage_class_data} -scl {postgres_storage_class_logs} -scb {postgres_storage_class_backups}{workers_option}{port_option}{engine_version_option}{extensions_option}{volume_size_data_option}{volume_size_logs_option}{volume_size_backups_option}{cores_request_option}{cores_limit_option}{memory_request_option}{memory_limit_option}'\n",
|
||||
"out=run_command()"
|
||||
],
|
||||
"metadata": {
|
||||
@@ -177,4 +151,4 @@
|
||||
"execution_count": null
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
@@ -25,12 +25,12 @@
|
||||
"source": [
|
||||
"\n",
|
||||
" \n",
|
||||
"## Deploy Azure SQL managed instance on an existing Azure Arc data cluster\n",
|
||||
"## Create SQL managed instance - Azure Arc on an existing Azure Arc Data Controller\n",
|
||||
" \n",
|
||||
"This notebook walks through the process of deploying a <a href=\"https://docs.microsoft.com/azure/sql-database/sql-database-managed-instance\">Azure SQL managed instance</a> on an existing Azure Arc data cluster.\n",
|
||||
"This notebook walks through the process of creating a <a href=\"https://docs.microsoft.com/azure/sql-database/sql-database-managed-instance\">SQL managed instance - Azure Arc</a> on an existing Azure Arc Data Controller.\n",
|
||||
" \n",
|
||||
"* Follow the instructions in the **Prerequisites** cell to install the tools if not already installed.\n",
|
||||
"* Make sure you have the target Azure Arc data cluster already created.\n",
|
||||
"* Make sure you have the target Azure Arc Data Controller already created.\n",
|
||||
"\n",
|
||||
"<span style=\"color:red\"><font size=\"3\">Please press the \"Run All\" button to run the notebook</font></span>"
|
||||
],
|
||||
@@ -41,7 +41,21 @@
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"source": [
|
||||
"### **Check prerequisites**"
|
||||
"### **Prerequisites** \n",
|
||||
"Ensure the following tools are installed and added to PATH before proceeding.\n",
|
||||
" \n",
|
||||
"|Tools|Description|Installation|\n",
|
||||
"|---|---|---|\n",
|
||||
"|Azure Data CLI (azdata) | Command-line tool for installing and managing resources in an Azure Arc cluster |[Installation](https://docs.microsoft.com/sql/azdata/install/deploy-install-azdata) |"
|
||||
],
|
||||
"metadata": {
|
||||
"azdata_cell_guid": "d1c8258e-9efd-4380-a48c-cd675423ed2f"
|
||||
}
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"source": [
|
||||
"### **Setup and Check Prerequisites**"
|
||||
],
|
||||
"metadata": {
|
||||
"azdata_cell_guid": "68531b91-ddce-47d7-a1d8-2ddc3d17f3e7"
|
||||
@@ -75,49 +89,20 @@
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"source": [
|
||||
"#### **Ensure SQL instance name, username and password exist**"
|
||||
"### **Set variables**\n",
|
||||
"\n",
|
||||
"#### \n",
|
||||
"\n",
|
||||
"Generated by Azure Data Studio using the values collected in the 'Deploy Azure SQL managed instance - Azure Arc' wizard"
|
||||
],
|
||||
"metadata": {
|
||||
"azdata_cell_guid": "68ec0760-27d1-4ded-9a9f-89077c40b8bb"
|
||||
}
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"# Required Values\n",
|
||||
"env_var = \"AZDATA_NB_VAR_SQL_INSTANCE_NAME\" in os.environ\n",
|
||||
"if env_var:\n",
|
||||
" mssql_instance_name = os.environ[\"AZDATA_NB_VAR_SQL_INSTANCE_NAME\"]\n",
|
||||
"else:\n",
|
||||
" sys.exit(f'environment variable: AZDATA_NB_VAR_SQL_INSTANCE_NAME was not defined. Exiting\\n')\n",
|
||||
"env_var = \"AZDATA_NB_VAR_SQL_PASSWORD\" in os.environ\n",
|
||||
"if env_var:\n",
|
||||
" mssql_password = os.environ[\"AZDATA_NB_VAR_SQL_PASSWORD\"]\n",
|
||||
"else:\n",
|
||||
" sys.exit(f'environment variable: AZDATA_NB_VAR_SQL_PASSWORD was not defined. Exiting\\n') \n",
|
||||
"env_var = \"AZDATA_NB_VAR_SQL_STORAGE_CLASS_DATA\" in os.environ\n",
|
||||
"if env_var:\n",
|
||||
" mssql_storage_class_data = os.environ[\"AZDATA_NB_VAR_SQL_STORAGE_CLASS_DATA\"]\n",
|
||||
"else:\n",
|
||||
" sys.exit(f'environment variable: AZDATA_NB_VAR_SQL_STORAGE_CLASS_DATA was not defined. Exiting\\n') \n",
|
||||
"env_var = \"AZDATA_NB_VAR_SQL_STORAGE_CLASS_LOGS\" in os.environ\n",
|
||||
"if env_var:\n",
|
||||
" mssql_storage_class_logs = os.environ[\"AZDATA_NB_VAR_SQL_STORAGE_CLASS_LOGS\"]\n",
|
||||
"else:\n",
|
||||
" sys.exit(f'environment variable: AZDATA_NB_VAR_SQL_STORAGE_CLASS_LOGS was not defined. Exiting\\n') \n",
|
||||
""
|
||||
],
|
||||
"metadata": {
|
||||
"azdata_cell_guid": "53769960-e1f8-4477-b4cf-3ab1ea34348b",
|
||||
"tags": []
|
||||
},
|
||||
"outputs": [],
|
||||
"execution_count": null
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"source": [
|
||||
"### **Installing Managed SQL Instance**"
|
||||
"### **Creating the SQL managed instance - Azure Arc instance**"
|
||||
],
|
||||
"metadata": {
|
||||
"azdata_cell_guid": "90b0e162-2987-463f-9ce6-12dda1267189"
|
||||
@@ -126,10 +111,31 @@
|
||||
{
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"print (f'Creating Managed SQL Server instance on Azure Arc')\n",
|
||||
"# Login to the data controller.\n",
|
||||
"#\n",
|
||||
"os.environ[\"AZDATA_PASSWORD\"] = os.environ[\"AZDATA_NB_VAR_CONTROLLER_PASSWORD\"]\n",
|
||||
"cmd = f'azdata login -e {controller_endpoint} -u {controller_username}'\n",
|
||||
"out=run_command()"
|
||||
],
|
||||
"metadata": {
|
||||
"azdata_cell_guid": "1437c536-17e8-4a7f-80c1-aa43ad02686c"
|
||||
},
|
||||
"outputs": [],
|
||||
"execution_count": null
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"print (f'Creating the SQL managed instance - Azure Arc instance')\n",
|
||||
"\n",
|
||||
"os.environ[\"AZDATA_PASSWORD\"] = mssql_password\n",
|
||||
"cmd = f'azdata arc sql mi create -n {mssql_instance_name} -scd {mssql_storage_class_data} -scl {mssql_storage_class_logs}'\n",
|
||||
"cores_request_option = f' -cr \"{sql_cores_request}\"' if sql_cores_request else \"\"\n",
|
||||
"cores_limit_option = f' -cl \"{sql_cores_limit}\"' if sql_cores_limit else \"\"\n",
|
||||
"memory_request_option = f' -mr \"{sql_memory_request}Gi\"' if sql_memory_request else \"\"\n",
|
||||
"memory_limit_option = f' -ml \"{sql_memory_limit}Gi\"' if sql_memory_limit else \"\"\n",
|
||||
"\n",
|
||||
"os.environ[\"AZDATA_USERNAME\"] = sql_username\n",
|
||||
"os.environ[\"AZDATA_PASSWORD\"] = os.environ[\"AZDATA_NB_VAR_SQL_PASSWORD\"]\n",
|
||||
"cmd = f'azdata arc sql mi create -n {sql_instance_name} -scd {sql_storage_class_data} -scl {sql_storage_class_logs}{cores_request_option}{cores_limit_option}{memory_request_option}{memory_limit_option}'\n",
|
||||
"out=run_command()"
|
||||
],
|
||||
"metadata": {
|
||||
@@ -139,4 +145,4 @@
|
||||
"execution_count": null
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -12,75 +12,87 @@
|
||||
"command.editConnection.title": "Edit Connection",
|
||||
"arc.openDashboard": "Manage",
|
||||
|
||||
"resource.type.azure.arc.display.name": "Azure Arc data controller",
|
||||
"resource.type.azure.arc.display.name": "Azure Arc data controller (preview)",
|
||||
"resource.type.azure.arc.description": "Creates an Azure Arc data controller",
|
||||
|
||||
"arc.control.plane.new.wizard.title": "Create Azure Arc data controller",
|
||||
"arc.control.plane.cluster.environment.title": "What is your target existing Kubernetes cluster environment?",
|
||||
"arc.control.plane.select.cluster.title": "Select from existing Kubernetes clusters",
|
||||
"arc.control.plane.kube.cluster.context": "Cluster context",
|
||||
"arc.control.plane.container.registry.title": "Container registry details",
|
||||
"arc.control.plane.container.registry.name": "Container registry login",
|
||||
"arc.control.plane.container.registry.password": "Container registry password",
|
||||
"arc.control.plane.cluster.config.profile.title": "Choose the config profile",
|
||||
"arc.control.plane.cluster.config.profile": "Config profile",
|
||||
"arc.control.plane.data.controller.create.title": "Provide details to create Azure Arc data controller",
|
||||
"arc.control.plane.project.details.title": "Project details",
|
||||
"arc.control.plane.project.details.description": "Select the subscription to manage deployed resources and costs. Use resource groups like folders to organize and manage all your resources.",
|
||||
"arc.control.plane.data.controller.details.title": "Data controller details",
|
||||
"arc.control.plane.data.controller.details.description": "Provide an Azure region and a name for your Azure Arc data controller. This name will be used to identify your Arc location for remote management and monitoring.",
|
||||
"arc.control.plane.arc.data.controller.connectivity.mode": "Data controller connectivity mode",
|
||||
"arc.control.plane.arc.data.controller.namespace": "Data controller namespace",
|
||||
"arc.control.plane.arc.data.controller.namespace.validation.description": "Data controller namespace (lower case letters, digits and - only)",
|
||||
"arc.control.plane.arc.data.controller.name": "Data controller name",
|
||||
"arc.control.plane.arc.data.controller.name.validation.description": "Data controller name (lower case letters, digits and - only)",
|
||||
"arc.control.plane.arc.data.controller.location": "Location",
|
||||
"arc.control.plane.admin.account.title": "Administrator account",
|
||||
"arc.control.plane.admin.account.name": "Data controller login",
|
||||
"arc.control.plane.admin.account.password": "Password",
|
||||
"arc.control.plane.admin.account.confirm.password": "Confirm password",
|
||||
"arc.control.plane.data.controller.create.summary.title": "Review your configuration",
|
||||
"arc.control.plane.summary.arc.data.controller": "Azure Arc data controller",
|
||||
"arc.control.plane.summary.estimated.cost.per.month": "Estimated cost per month",
|
||||
"arc.control.plane.summary.arc.by.microsoft" : "by Microsoft",
|
||||
"arc.control.plane.summary.free" : "Free",
|
||||
"arc.control.plane.summary.arc.terms.of.use" : "Terms of use",
|
||||
"arc.control.plane.summary.arc.terms.separator" : "|",
|
||||
"arc.control.plane.summary.arc.terms.privacy.policy" : "Privacy policy",
|
||||
"arc.control.plane.summary.terms" : "Terms",
|
||||
"arc.control.plane.summary.terms.description": "By clicking 'Script to notebook', I (a) agree to the legal terms and privacy statement(s) associated with the Marketplace offering(s) listed above; (b) authorize Microsoft to bill my current payment method for the fees associated with the offering(s), with the same billing frequency as my Azure subscription; and (c) agree that Microsoft may share my contact, usage and transactional information with the provider(s) of the offering(s) for support, billing and other transactional activities. Microsoft does not provide rights for third-party offerings. For additional details see {0}.",
|
||||
"arc.control.plane.summary.terms.link.text": "Azure Marketplace Terms",
|
||||
"arc.control.plane.summary.kubernetes": "Kubernetes",
|
||||
"arc.control.plane.summary.kube.config.file.path": "Kube config file path",
|
||||
"arc.control.plane.summary.cluster.context": "Cluster context",
|
||||
"arc.control.plane.summary.profile": "Config profile",
|
||||
"arc.control.plane.summary.username": "Username",
|
||||
"arc.control.plane.summary.docker.username": "Docker username",
|
||||
"arc.control.plane.summary.azure": "Azure",
|
||||
"arc.control.plane.summary.subscription": "Subscription",
|
||||
"arc.control.plane.summary.resource.group": "Resource group",
|
||||
"arc.control.plane.summary.data.controller.connectivity.mode": "Data controller connectivity mode",
|
||||
"arc.control.plane.summary.data.controller.name": "Data controller name",
|
||||
"arc.control.plane.summary.data.controller.namespace": "Data controller namespace",
|
||||
"arc.control.plane.summary.location": "Location",
|
||||
"arc.control.plane.arc.data.controller.agreement": "I accept {0} and {1}.",
|
||||
"arc.data.controller.new.wizard.title": "Create Azure Arc data controller",
|
||||
"arc.data.controller.cluster.environment.title": "What is your target existing Kubernetes cluster environment?",
|
||||
"arc.data.controller.select.cluster.title": "Select from existing Kubernetes clusters",
|
||||
"arc.data.controller.kube.cluster.context": "Cluster context",
|
||||
"arc.data.controller.cluster.config.profile.title": "Choose the config profile",
|
||||
"arc.data.controller.cluster.config.profile": "Config profile",
|
||||
"arc.data.controller.create.azureconfig.title": "Azure and Connectivity Configuration",
|
||||
"arc.data.controller.connectivitymode.description": "Select the connectivity mode for the controller.",
|
||||
"arc.data.controller.create.controllerconfig.title": "Controller Configuration",
|
||||
"arc.data.controller.project.details.title": "Azure details",
|
||||
"arc.data.controller.project.details.description": "Select the subscription to manage deployed resources and costs. Use resource groups like folders to organize and manage all your resources.",
|
||||
"arc.data.controller.details.title": "Data controller details",
|
||||
"arc.data.controller.details.description": "Provide a namespace, name and storage class for your Azure Arc data controller. This name will be used to identify your Arc instance for remote management and monitoring.",
|
||||
"arc.data.controller.namespace": "Data controller namespace",
|
||||
"arc.data.controller.namespace.validation.description": "Namespace must consist of lower case alphanumeric characters or '-', start/end with an alphanumeric character, and be 63 characters or fewer in length.",
|
||||
"arc.data.controller.name": "Data controller name",
|
||||
"arc.data.controller.name.validation.description": "Name must consist of lower case alphanumeric characters, '-' or '.', start/end with an alphanumeric character and be 253 characters or less in length.",
|
||||
"arc.data.controller.location": "Location",
|
||||
"arc.data.controller.admin.account.title": "Administrator account",
|
||||
"arc.data.controller.admin.account.name": "Data controller login",
|
||||
"arc.data.controller.admin.account.password": "Password",
|
||||
"arc.data.controller.admin.account.confirm.password": "Confirm password",
|
||||
"arc.data.controller.connectivitymode": "Connectivity Mode",
|
||||
"arc.data.controller.direct": "Direct",
|
||||
"arc.data.controller.indirect": "Indirect",
|
||||
"arc.data.controller.serviceprincipal.description": "When deploying a controller in direct connected mode a Service Principal is required for uploading metrics to Azure. {0} about how to create this Service Principal and assign it the correct roles.",
|
||||
"arc.data.controller.spclientid": "Service Principal Client ID",
|
||||
"arc.data.controller.spclientid.description": "The Application (client) ID of the created Service Principal",
|
||||
"arc.data.controller.spclientid.validation.description": "The client ID must be a GUID in the format xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx",
|
||||
"arc.data.controller.spclientsecret": "Service Principal Client Secret",
|
||||
"arc.data.controller.spclientsecret.description": "The password generated during creation of the Service Principal",
|
||||
"arc.data.controller.sptenantid": "Service Principal Tenant ID",
|
||||
"arc.data.controller.sptenantid.description": "The Tenant ID of the Service Principal. This must be the same as the Tenant ID of the subscription selected to create this controller for.",
|
||||
"arc.data.controller.sptenantid.validation.description": "The tenant ID must be a GUID in the format xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx",
|
||||
"arc.data.controller.create.summary.title": "Review your configuration",
|
||||
"arc.data.controller.summary.arc.data.controller": "Azure Arc data controller",
|
||||
"arc.data.controller.summary.estimated.cost.per.month": "Estimated cost per month",
|
||||
"arc.data.controller.summary.arc.by.microsoft" : "by Microsoft",
|
||||
"arc.data.controller.summary.free" : "Free",
|
||||
"arc.data.controller.summary.arc.terms.of.use" : "Terms of use",
|
||||
"arc.data.controller.summary.arc.terms.separator" : "|",
|
||||
"arc.data.controller.summary.arc.terms.privacy.policy" : "Privacy policy",
|
||||
"arc.data.controller.summary.terms" : "Terms",
|
||||
"arc.data.controller.summary.terms.description": "By clicking 'Script to notebook', I (a) agree to the legal terms and privacy statement(s) associated with the Marketplace offering(s) listed above; (b) authorize Microsoft to bill my current payment method for the fees associated with the offering(s), with the same billing frequency as my Azure subscription; and (c) agree that Microsoft may share my contact, usage and transactional information with the provider(s) of the offering(s) for support, billing and other transactional activities. Microsoft does not provide rights for third-party offerings. For additional details see {0}.",
|
||||
"arc.data.controller.summary.terms.link.text": "Azure Marketplace Terms",
|
||||
"arc.data.controller.summary.kubernetes": "Kubernetes",
|
||||
"arc.data.controller.summary.kube.config.file.path": "Kube config file path",
|
||||
"arc.data.controller.summary.cluster.context": "Cluster context",
|
||||
"arc.data.controller.summary.profile": "Config profile",
|
||||
"arc.data.controller.summary.username": "Username",
|
||||
"arc.data.controller.summary.azure": "Azure",
|
||||
"arc.data.controller.summary.subscription": "Subscription",
|
||||
"arc.data.controller.summary.resource.group": "Resource group",
|
||||
"arc.data.controller.summary.data.controller.name": "Data controller name",
|
||||
"arc.data.controller.summary.data.controller.namespace": "Data controller namespace",
|
||||
"arc.data.controller.summary.controller": "Controller",
|
||||
"arc.data.controller.summary.location": "Location",
|
||||
"arc.data.controller.agreement": "I accept {0} and {1}.",
|
||||
"arc.data.controller.readmore": "Read more",
|
||||
"microsoft.agreement.privacy.statement":"Microsoft Privacy Statement",
|
||||
"arc.agreement.azdata.eula":"azdata license terms",
|
||||
"deploy.arc.control.plane.action":"Script to notebook",
|
||||
|
||||
"deploy.script.action":"Script to notebook",
|
||||
"deploy.done.action":"Deploy",
|
||||
|
||||
"resource.type.arc.sql.display.name": "Azure SQL managed instance - Azure Arc (preview)",
|
||||
"resource.type.arc.postgres.display.name": "PostgreSQL server groups - Azure Arc (preview)",
|
||||
"resource.type.arc.postgres.display.name": "PostgreSQL Hyperscale server groups - Azure Arc (preview)",
|
||||
"resource.type.arc.sql.description": "Managed SQL Instance service for app developers in a customer-managed environment",
|
||||
"resource.type.arc.postgres.description": "Deploy PostgreSQL server groups into an Azure Arc environment",
|
||||
"resource.type.picker.display.name": "Resource Type",
|
||||
"sql.managed.instance.display.name": "Azure SQL managed instance - Azure Arc",
|
||||
"postgres.server.group.display.name": "PostgreSQL server groups - Azure Arc",
|
||||
"arc.sql.new.dialog.title": "Deploy Azure SQL managed instance - Azure Arc (preview)",
|
||||
"arc.sql.settings.section.title": "SQL Connection information",
|
||||
"resource.type.arc.postgres.description": "Deploy PostgreSQL Hyperscale server groups into an Azure Arc environment",
|
||||
"arc.controller": "Target Azure Arc Controller",
|
||||
|
||||
|
||||
"arc.sql.wizard.title": "Deploy Azure SQL managed instance - Azure Arc (preview)",
|
||||
"arc.sql.wizard.page1.title": "Provide Azure SQL managed instance parameters",
|
||||
"arc.sql.connection.settings.section.title": "SQL Connection information",
|
||||
"arc.sql.instance.settings.section.title": "SQL Instance settings",
|
||||
"arc.azure.section.title": "Azure information",
|
||||
"arc.sql.instance.name": "Instance name (lower case letters and digits only)",
|
||||
"arc.sql.instance.name": "Instance name",
|
||||
"arc.sql.username": "Username",
|
||||
"arc.sql.invalid.username": "sa username is disabled, please choose another username",
|
||||
"arc.sql.invalid.instance.name": "Instance name must consist of lower case alphanumeric characters or '-', start with a letter, end with an alphanumeric character, and be 13 characters or fewer in length.",
|
||||
"arc.storage-class.dc.label": "Storage Class",
|
||||
"arc.sql.storage-class.dc.description": "The storage class to be used for all data and logs persistent volumes for all data controller pods that require them.",
|
||||
"arc.storage-class.data.label": "Storage Class (Data)",
|
||||
@@ -90,6 +102,14 @@
|
||||
"arc.sql.storage-class.logs.description": "The storage class to be used for logs (/var/log)",
|
||||
"arc.postgres.storage-class.logs.description": "The storage class to be used for logs persistent volumes",
|
||||
"arc.storage-class.backups.label": "Storage Class (Backups)",
|
||||
"arc.cores-limit.label": "Cores Limit",
|
||||
"arc.sql.cores-limit.description": "The cores limit of the managed instance as an integer.",
|
||||
"arc.cores-request.label": "Cores Request",
|
||||
"arc.sql.cores-request.description": "The request for cores of the managed instance as an integer.",
|
||||
"arc.memory-limit.label": "Memory Limit",
|
||||
"arc.sql.memory-limit.description": "The limit of the capacity of the managed instance as an integer.",
|
||||
"arc.memory-request.label": "Memory Request",
|
||||
"arc.sql.memory-request.description": "The request for the capacity of the managed instance as an integer amount of memory in GBs.",
|
||||
"arc.postgres.storage-class.backups.description": "The storage class to be used for backup persistent volumes",
|
||||
"arc.password": "Password",
|
||||
"arc.confirm.password": "Confirm password",
|
||||
@@ -97,19 +117,39 @@
|
||||
"arc.azure.subscription": "Azure subscription",
|
||||
"arc.azure.resource.group": "Azure resource group",
|
||||
"arc.azure.location": "Azure location",
|
||||
"arc.postgres.new.dialog.title": "Deploy a PostgreSQL server group on Azure Arc (preview)",
|
||||
"arc.postgres.settings.section.title": "PostgreSQL server group settings",
|
||||
"arc.postgres.settings.resource.title": "PostgreSQL server group resource settings",
|
||||
"arc.postgres.wizard.title": "Deploy an Azure Arc enabled PostgreSQL Hyperscale server group (Preview)",
|
||||
"arc.postgres.wizard.page1.title": "Provide Azure enabled PostgreSQL Hyperscale server group parameters",
|
||||
"arc.postgres.settings.section.title": "General settings",
|
||||
"arc.postgres.settings.resource.title": "Resource settings",
|
||||
"arc.postgres.settings.storage.title": "Storage settings",
|
||||
"arc.postgres.server.group.name": "Server group name",
|
||||
"arc.postgres.server.group.name.validation.description": "Server group name must consist of lower case alphanumeric characters or '-', start with a letter, end with an alphanumeric character, and be 10 characters or fewer in length.",
|
||||
"arc.postgres.server.group.workers": "Number of workers",
|
||||
"arc.postgres.server.group.name.validation.description": "Server group name must consist of lower case alphanumeric characters or '-', start with a letter, end with an alphanumeric character, and be 12 characters or fewer in length.",
|
||||
"arc.postgres.server.group.workers.label": "Number of workers",
|
||||
"arc.postgres.server.group.workers.description": "The number of worker nodes to provision in a sharded cluster, or zero (the default) for single-node Postgres.",
|
||||
"arc.postgres.server.group.port": "Port",
|
||||
"arc.postgres.server.group.cores.request": "Min CPU cores (per node) to reserve",
|
||||
"arc.postgres.server.group.cores.limit": "Max CPU cores (per node) to allow",
|
||||
"arc.postgres.server.group.memory.request": "Min memory MB (per node) to reserve",
|
||||
"arc.postgres.server.group.memory.limit": "Max memory MB (per node) to allow",
|
||||
"arc.agreement": "I accept {0}, {1} and {2}.",
|
||||
"arc.agreement.sql.terms.conditions":"Azure SQL managed instance - Azure Arc terms and conditions",
|
||||
"arc.agreement.postgres.terms.conditions":"PostgreSQL server groups - Azure Arc terms and conditions",
|
||||
"arc.deploy.action":"Deploy"
|
||||
"arc.postgres.server.group.engine.version": "Engine Version",
|
||||
"arc.postgres.server.group.extensions.label": "Extensions",
|
||||
"arc.postgres.server.group.extensions.description": "A comma-separated list of the Postgres extensions that should be loaded on startup. Please refer to the postgres documentation for supported values.",
|
||||
"arc.postgres.server.group.volume.size.data.label": "Volume Size GB (Data)",
|
||||
"arc.postgres.server.group.volume.size.data.description": "The size of the storage volume to be used for data in GB.",
|
||||
"arc.postgres.server.group.volume.size.logs.label": "Volume Size GB (Logs)",
|
||||
"arc.postgres.server.group.volume.size.logs.description": "The size of the storage volume to be used for logs in GB.",
|
||||
"arc.postgres.server.group.volume.size.backups.label": "Volume Size GB (Backups)",
|
||||
"arc.postgres.server.group.volume.size.backups.description": "The size of the storage volume to be used for backups in GB.",
|
||||
"arc.postgres.server.group.cores.request.label": "CPU request (cores per node)",
|
||||
"arc.postgres.server.group.cores.request.description": "The minimum number of CPU cores that must be available per node to schedule the service. Fractional cores are supported.",
|
||||
"arc.postgres.server.group.cores.limit.label": "CPU limit (cores per node)",
|
||||
"arc.postgres.server.group.cores.limit.description": "The maximum number of CPU cores for the Postgres instance that can be used per node. Fractional cores are supported.",
|
||||
"arc.postgres.server.group.memory.request.label": "Memory request (GB per node)",
|
||||
"arc.postgres.server.group.memory.request.description": "The memory request of the Postgres instance per node in GB.",
|
||||
"arc.postgres.server.group.memory.limit.label": "Memory limit (GB per node)",
|
||||
"arc.postgres.server.group.memory.limit.description": "The memory limit of the Postgres instance per node in GB.",
|
||||
"arc.agreement": "I accept {0} and {1}.",
|
||||
"arc.agreement.sql.terms.conditions": "Azure SQL managed instance - Azure Arc terms and conditions",
|
||||
"arc.agreement.postgres.terms.conditions": "Azure Arc enabled PostgreSQL Hyperscale terms and conditions",
|
||||
"should.be.integer": "Value must be an integer",
|
||||
"requested.cores.less.than.or.equal.to.cores.limit": "Requested cores must be less than or equal to cores limit",
|
||||
"cores.limit.greater.than.or.equal.to.requested.cores": "Cores limit must be greater than or equal to requested cores",
|
||||
"requested.memory.less.than.or.equal.to.memory.limit": "Requested memory must be less than or equal to memory limit",
|
||||
"memory.limit.greater.than.or.equal.to.requested.memory": "Memory limit must be greater than or equal to requested memory"
|
||||
}
|
||||
|
||||
41
extensions/arc/src/common/api.ts
Normal file
41
extensions/arc/src/common/api.ts
Normal file
@@ -0,0 +1,41 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as arc from 'arc';
|
||||
import { PasswordToControllerDialog } from '../ui/dialogs/connectControllerDialog';
|
||||
import { AzureArcTreeDataProvider } from '../ui/tree/azureArcTreeDataProvider';
|
||||
import { ControllerTreeNode } from '../ui/tree/controllerTreeNode';
|
||||
import { UserCancelledError } from './utils';
|
||||
|
||||
export function arcApi(treeDataProvider: AzureArcTreeDataProvider): arc.IExtension {
|
||||
return {
|
||||
getRegisteredDataControllers: () => getRegisteredDataControllers(treeDataProvider),
|
||||
getControllerPassword: (controllerInfo: arc.ControllerInfo) => getControllerPassword(treeDataProvider, controllerInfo),
|
||||
reacquireControllerPassword: (controllerInfo: arc.ControllerInfo) => reacquireControllerPassword(treeDataProvider, controllerInfo)
|
||||
};
|
||||
}
|
||||
export async function reacquireControllerPassword(treeDataProvider: AzureArcTreeDataProvider, controllerInfo: arc.ControllerInfo): Promise<string> {
|
||||
const dialog = new PasswordToControllerDialog(treeDataProvider);
|
||||
dialog.showDialog(controllerInfo);
|
||||
const model = await dialog.waitForClose();
|
||||
if (!model) {
|
||||
throw new UserCancelledError();
|
||||
}
|
||||
return model.password;
|
||||
}
|
||||
|
||||
export async function getControllerPassword(treeDataProvider: AzureArcTreeDataProvider, controllerInfo: arc.ControllerInfo): Promise<string> {
|
||||
return await treeDataProvider.getPassword(controllerInfo);
|
||||
}
|
||||
|
||||
export async function getRegisteredDataControllers(treeDataProvider: AzureArcTreeDataProvider): Promise<arc.DataController[]> {
|
||||
return (await treeDataProvider.getChildren())
|
||||
.filter(node => node instanceof ControllerTreeNode)
|
||||
.map(node => ({
|
||||
label: (node as ControllerTreeNode).model.label,
|
||||
info: (node as ControllerTreeNode).model.info
|
||||
}));
|
||||
}
|
||||
|
||||
83
extensions/arc/src/common/cacheManager.ts
Normal file
83
extensions/arc/src/common/cacheManager.ts
Normal file
@@ -0,0 +1,83 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import { Deferred } from './promise';
|
||||
|
||||
const enum Status {
|
||||
notStarted,
|
||||
inProgress,
|
||||
done
|
||||
}
|
||||
|
||||
interface State<T> {
|
||||
entry?: T,
|
||||
error?: Error,
|
||||
status: Status,
|
||||
id: number,
|
||||
pendingOperation: Deferred<void>
|
||||
}
|
||||
|
||||
/**
|
||||
* An implementation of Cache Manager which ensures that only one call to populate cache miss is pending at a given time.
|
||||
* All remaining calls for retrieval are awaited until the one in progress finishes and then all awaited calls are resolved with the value
|
||||
* from the cache.
|
||||
*/
|
||||
export class CacheManager<K, T> {
|
||||
private _cache = new Map<K, State<T>>();
|
||||
private _id = 0;
|
||||
|
||||
public async getCacheEntry(key: K, retrieveEntry: (key: K) => Promise<T>): Promise<T> {
|
||||
const cacheHit: State<T> | undefined = this._cache.get(key);
|
||||
// each branch either throws or returns the password.
|
||||
if (cacheHit === undefined) {
|
||||
// populate a new state entry and add it to the cache
|
||||
const state: State<T> = {
|
||||
status: Status.notStarted,
|
||||
id: this._id++,
|
||||
pendingOperation: new Deferred<void>()
|
||||
};
|
||||
this._cache.set(key, state);
|
||||
// now that we have the state entry initialized, retry to fetch the cacheEntry
|
||||
let returnValue: T = await this.getCacheEntry(key, retrieveEntry);
|
||||
await state.pendingOperation;
|
||||
return returnValue!;
|
||||
} else {
|
||||
switch (cacheHit.status) {
|
||||
case Status.notStarted: {
|
||||
cacheHit.status = Status.inProgress;
|
||||
// retrieve and populate the missed cache hit.
|
||||
try {
|
||||
cacheHit.entry = await retrieveEntry(key);
|
||||
} catch (error) {
|
||||
cacheHit.error = error;
|
||||
} finally {
|
||||
cacheHit.status = Status.done;
|
||||
// we do not reject here even in error case because we do not want our awaits on pendingOperation to throw
|
||||
// We track our own error state and when all done we throw if an error had happened. This results
|
||||
// in the rejection of the promised returned by this method.
|
||||
cacheHit.pendingOperation.resolve();
|
||||
}
|
||||
return await this.getCacheEntry(key, retrieveEntry);
|
||||
}
|
||||
|
||||
case Status.inProgress: {
|
||||
await cacheHit.pendingOperation;
|
||||
return await this.getCacheEntry(key, retrieveEntry);
|
||||
}
|
||||
|
||||
case Status.done: {
|
||||
if (cacheHit.error !== undefined) {
|
||||
await cacheHit.pendingOperation;
|
||||
throw cacheHit.error;
|
||||
}
|
||||
else {
|
||||
await cacheHit.pendingOperation;
|
||||
return cacheHit.entry!;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
39
extensions/arc/src/common/kubeUtils.ts
Normal file
39
extensions/arc/src/common/kubeUtils.ts
Normal file
@@ -0,0 +1,39 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as os from 'os';
|
||||
import * as path from 'path';
|
||||
import * as yamljs from 'yamljs';
|
||||
import * as loc from '../localizedConstants';
|
||||
import { throwUnless } from './utils';
|
||||
export interface KubeClusterContext {
|
||||
name: string;
|
||||
isCurrentContext: boolean;
|
||||
}
|
||||
|
||||
export function getKubeConfigClusterContexts(configFile: string): Promise<KubeClusterContext[]> {
|
||||
const config: any = yamljs.load(configFile);
|
||||
const rawContexts = <any[]>config['contexts'];
|
||||
throwUnless(rawContexts && rawContexts.length, loc.noContextFound(configFile));
|
||||
const currentContext = <string>config['current-context'];
|
||||
throwUnless(currentContext, loc.noCurrentContextFound(configFile));
|
||||
const contexts: KubeClusterContext[] = [];
|
||||
rawContexts.forEach(rawContext => {
|
||||
const name = <string>rawContext['name'];
|
||||
throwUnless(name, loc.noNameInContext(configFile));
|
||||
if (name) {
|
||||
contexts.push({
|
||||
name: name,
|
||||
isCurrentContext: name === currentContext
|
||||
});
|
||||
}
|
||||
});
|
||||
return Promise.resolve(contexts);
|
||||
}
|
||||
|
||||
export function getDefaultKubeConfigPath(): string {
|
||||
return path.join(os.homedir(), '.kube', 'config');
|
||||
}
|
||||
|
||||
@@ -67,7 +67,7 @@ export function getResourceTypeIcon(resourceType: string | undefined): IconPath
|
||||
|
||||
/**
|
||||
* Returns the text to display for known connection modes
|
||||
* @param connectionMode The string repsenting the connection mode
|
||||
* @param connectionMode The string representing the connection mode
|
||||
*/
|
||||
export function getConnectionModeDisplayText(connectionMode: string | undefined): string {
|
||||
connectionMode = connectionMode ?? '';
|
||||
@@ -148,15 +148,15 @@ async function promptInputBox(title: string, options: vscode.InputBoxOptions): P
|
||||
}
|
||||
|
||||
/**
|
||||
* Opens an input box prompting the user to enter in the name of a resource to delete
|
||||
* @param name The name of the resource to delete
|
||||
* Opens an input box prompting the user to enter in the name of an instance to delete
|
||||
* @param name The name of the instance to delete
|
||||
* @returns Promise resolving to true if the user confirmed the name, false if the input box was closed for any other reason
|
||||
*/
|
||||
export async function promptForResourceDeletion(name: string): Promise<boolean> {
|
||||
const title = loc.resourceDeletionWarning(name);
|
||||
export async function promptForInstanceDeletion(name: string): Promise<boolean> {
|
||||
const title = loc.instanceDeletionWarning(name);
|
||||
const options: vscode.InputBoxOptions = {
|
||||
placeHolder: name,
|
||||
validateInput: input => input !== name ? loc.invalidResourceDeletionName(name) : ''
|
||||
validateInput: input => input !== name ? loc.invalidInstanceDeletionName(name) : ''
|
||||
};
|
||||
|
||||
return await promptInputBox(title, options) !== undefined;
|
||||
@@ -189,28 +189,15 @@ export async function promptAndConfirmPassword(validate: (input: string) => stri
|
||||
/**
|
||||
* Gets the message to display for a given error object that may be a variety of types.
|
||||
* @param error The error object
|
||||
* @param useMessageWithLink Whether to use the messageWithLink - if available
|
||||
*/
|
||||
export function getErrorMessage(error: any): string {
|
||||
export function getErrorMessage(error: any, useMessageWithLink: boolean = false): string {
|
||||
if (useMessageWithLink && error.messageWithLink) {
|
||||
return error.messageWithLink;
|
||||
}
|
||||
return error.message ?? error;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses an instance name from the controller. An instance name will either be just its name
|
||||
* e.g. myinstance or namespace_name e.g. mynamespace_my-instance.
|
||||
* @param instanceName The instance name in one of the formats described
|
||||
*/
|
||||
export function parseInstanceName(instanceName: string | undefined): string {
|
||||
instanceName = instanceName ?? '';
|
||||
const parts: string[] = instanceName.split('_');
|
||||
if (parts.length === 2) {
|
||||
instanceName = parts[1];
|
||||
}
|
||||
else if (parts.length > 2) {
|
||||
throw new Error(`Cannot parse resource '${instanceName}'. Acceptable formats are 'namespace_name' or 'name'.`);
|
||||
}
|
||||
return instanceName;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses an address into its separate ip and port values. Address must be in the form <ip>:<port>
|
||||
* @param address The address to parse
|
||||
@@ -225,3 +212,88 @@ export function parseIpAndPort(address: string): { ip: string, port: string } {
|
||||
port: sections[1]
|
||||
};
|
||||
}
|
||||
|
||||
export function createCredentialId(controllerId: string, resourceType: string, instanceName: string): string {
|
||||
return `${controllerId}::${resourceType}::${instanceName}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates the gibibyte (GiB) conversion of a quantity that could currently be represented by a range
|
||||
* of SI suffixes (E, P, T, G, M, K, m) or their power-of-two equivalents (Ei, Pi, Ti, Gi, Mi, Ki)
|
||||
* @param value The string of a quantity to be converted
|
||||
* @returns String of GiB conversion
|
||||
*/
|
||||
export function convertToGibibyteString(value: string): string {
|
||||
if (!value) {
|
||||
throw new Error(`Value provided is not a valid Kubernetes resource quantity`);
|
||||
}
|
||||
|
||||
let base10ToBase2Multiplier;
|
||||
let floatValue = parseFloat(value);
|
||||
let splitValue = value.split(String(floatValue));
|
||||
let unit = splitValue[1];
|
||||
|
||||
if (unit === 'K') {
|
||||
base10ToBase2Multiplier = 1000 / 1024;
|
||||
floatValue = (floatValue * base10ToBase2Multiplier) / Math.pow(1024, 2);
|
||||
} else if (unit === 'M') {
|
||||
base10ToBase2Multiplier = Math.pow(1000, 2) / Math.pow(1024, 2);
|
||||
floatValue = (floatValue * base10ToBase2Multiplier) / 1024;
|
||||
} else if (unit === 'G') {
|
||||
base10ToBase2Multiplier = Math.pow(1000, 3) / Math.pow(1024, 3);
|
||||
floatValue = floatValue * base10ToBase2Multiplier;
|
||||
} else if (unit === 'T') {
|
||||
base10ToBase2Multiplier = Math.pow(1000, 4) / Math.pow(1024, 4);
|
||||
floatValue = (floatValue * base10ToBase2Multiplier) * 1024;
|
||||
} else if (unit === 'P') {
|
||||
base10ToBase2Multiplier = Math.pow(1000, 5) / Math.pow(1024, 5);
|
||||
floatValue = (floatValue * base10ToBase2Multiplier) * Math.pow(1024, 2);
|
||||
} else if (unit === 'E') {
|
||||
base10ToBase2Multiplier = Math.pow(1000, 6) / Math.pow(1024, 6);
|
||||
floatValue = (floatValue * base10ToBase2Multiplier) * Math.pow(1024, 3);
|
||||
} else if (unit === 'm') {
|
||||
floatValue = (floatValue / 1000) / Math.pow(1024, 3);
|
||||
} else if (unit === '') {
|
||||
floatValue = floatValue / Math.pow(1024, 3);
|
||||
} else if (unit === 'Ki') {
|
||||
floatValue = floatValue / Math.pow(1024, 2);
|
||||
} else if (unit === 'Mi') {
|
||||
floatValue = floatValue / 1024;
|
||||
} else if (unit === 'Gi') {
|
||||
floatValue = floatValue;
|
||||
} else if (unit === 'Ti') {
|
||||
floatValue = floatValue * 1024;
|
||||
} else if (unit === 'Pi') {
|
||||
floatValue = floatValue * Math.pow(1024, 2);
|
||||
} else if (unit === 'Ei') {
|
||||
floatValue = floatValue * Math.pow(1024, 3);
|
||||
} else {
|
||||
throw new Error(`${value} is not a valid Kubernetes resource quantity`);
|
||||
}
|
||||
|
||||
return String(floatValue);
|
||||
}
|
||||
|
||||
/*
|
||||
* Throws an Error with given {@link message} unless {@link condition} is true.
|
||||
* This also tells the typescript compiler that the condition is 'truthy' in the remainder of the scope
|
||||
* where this function was called.
|
||||
*
|
||||
* @param condition
|
||||
* @param message
|
||||
*/
|
||||
export function throwUnless(condition: any, message?: string): asserts condition {
|
||||
if (!condition) {
|
||||
throw new Error(message);
|
||||
}
|
||||
}
|
||||
|
||||
export async function tryExecuteAction<T>(action: () => T | PromiseLike<T>): Promise<{ result: T | undefined, error: any }> {
|
||||
let error: any, result: T | undefined;
|
||||
try {
|
||||
result = await action();
|
||||
} catch (e) {
|
||||
error = e;
|
||||
}
|
||||
return { result, error };
|
||||
}
|
||||
|
||||
@@ -7,6 +7,11 @@ import * as vscode from 'vscode';
|
||||
|
||||
export const refreshActionId = 'arc.refresh';
|
||||
|
||||
export const credentialNamespace = 'arcCredentials';
|
||||
|
||||
export const controllerTroubleshootDocsUrl = 'https://aka.ms/arc-data-tsg';
|
||||
export const miaaTroubleshootDocsUrl = 'https://aka.ms/miaa-tsg';
|
||||
|
||||
export interface IconPath {
|
||||
dark: string;
|
||||
light: string;
|
||||
@@ -35,7 +40,10 @@ export class IconPathHelper {
|
||||
public static controller: IconPath;
|
||||
public static health: IconPath;
|
||||
public static success: IconPath;
|
||||
public static save: IconPath;
|
||||
public static discard: IconPath;
|
||||
public static fail: IconPath;
|
||||
public static information: IconPath;
|
||||
|
||||
public static setExtensionContext(context: vscode.ExtensionContext) {
|
||||
IconPathHelper.context = context;
|
||||
@@ -111,10 +119,22 @@ export class IconPathHelper {
|
||||
light: context.asAbsolutePath('images/success.svg'),
|
||||
dark: context.asAbsolutePath('images/success.svg'),
|
||||
};
|
||||
IconPathHelper.save = {
|
||||
light: context.asAbsolutePath('images/save.svg'),
|
||||
dark: context.asAbsolutePath('images/save.svg'),
|
||||
};
|
||||
IconPathHelper.discard = {
|
||||
light: context.asAbsolutePath('images/discard.svg'),
|
||||
dark: context.asAbsolutePath('images/discard.svg'),
|
||||
};
|
||||
IconPathHelper.fail = {
|
||||
light: context.asAbsolutePath('images/fail.svg'),
|
||||
dark: context.asAbsolutePath('images/fail.svg'),
|
||||
};
|
||||
IconPathHelper.information = {
|
||||
light: context.asAbsolutePath('images/information.svg'),
|
||||
dark: context.asAbsolutePath('images/information.svg'),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -4,9 +4,12 @@
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as arc from 'arc';
|
||||
import * as rd from 'resource-deployment';
|
||||
import * as vscode from 'vscode';
|
||||
import { arcApi } from './common/api';
|
||||
import { IconPathHelper, refreshActionId } from './constants';
|
||||
import * as loc from './localizedConstants';
|
||||
import { ArcControllersOptionsSourceProvider } from './providers/arcControllersOptionsSourceProvider';
|
||||
import { ConnectToControllerDialog } from './ui/dialogs/connectControllerDialog';
|
||||
import { AzureArcTreeDataProvider } from './ui/tree/azureArcTreeDataProvider';
|
||||
import { ControllerTreeNode } from './ui/tree/controllerTreeNode';
|
||||
@@ -25,6 +28,14 @@ export async function activate(context: vscode.ExtensionContext): Promise<arc.IE
|
||||
});
|
||||
|
||||
vscode.commands.registerCommand('arc.connectToController', async () => {
|
||||
const nodes = await treeDataProvider.getChildren();
|
||||
if (nodes.length > 0) {
|
||||
const response = await vscode.window.showErrorMessage(loc.onlyOneControllerSupported, loc.yes, loc.no);
|
||||
if (response !== loc.yes) {
|
||||
return;
|
||||
}
|
||||
await treeDataProvider.removeController(nodes[0] as ControllerTreeNode);
|
||||
}
|
||||
const dialog = new ConnectToControllerDialog(treeDataProvider);
|
||||
dialog.showDialog();
|
||||
const model = await dialog.waitForClose();
|
||||
@@ -54,28 +65,12 @@ export async function activate(context: vscode.ExtensionContext): Promise<arc.IE
|
||||
}
|
||||
});
|
||||
|
||||
await checkArcDeploymentExtension();
|
||||
// register option sources
|
||||
const rdApi = <rd.IExtension>vscode.extensions.getExtension(rd.extension.name)?.exports;
|
||||
rdApi.registerOptionsSourceProvider(new ArcControllersOptionsSourceProvider(treeDataProvider));
|
||||
|
||||
return {
|
||||
getRegisteredDataControllers: async () => {
|
||||
return (await treeDataProvider.getChildren())
|
||||
.filter(node => node instanceof ControllerTreeNode)
|
||||
.map(node => (node as ControllerTreeNode).model.info);
|
||||
|
||||
}
|
||||
};
|
||||
return arcApi(treeDataProvider);
|
||||
}
|
||||
|
||||
export function deactivate(): void {
|
||||
}
|
||||
|
||||
async function checkArcDeploymentExtension(): Promise<void> {
|
||||
const version = vscode.extensions.getExtension('Microsoft.arcdeployment')?.packageJSON.version;
|
||||
if (version && version !== '0.3.2') {
|
||||
// If we have an older version of the deployment extension installed then uninstall it now since it's replaced
|
||||
// by this extension. (the latest version of the Arc Deployment extension will uninstall itself so don't do
|
||||
// anything here if that's already updated)
|
||||
await vscode.commands.executeCommand('workbench.extensions.uninstallExtension', 'Microsoft.arcdeployment');
|
||||
vscode.window.showInformationMessage(loc.arcDeploymentDeprecation);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,13 +8,13 @@ import { getErrorMessage } from './common/utils';
|
||||
const localize = nls.loadMessageBundle();
|
||||
|
||||
export const arcDeploymentDeprecation = localize('arc.arcDeploymentDeprecation', "The Arc Deployment extension has been replaced by the Arc extension and has been uninstalled.");
|
||||
export function arcControllerDashboard(name: string): string { return localize('arc.controllerDashboard', "Azure Arc Controller Dashboard (Preview) - {0}", name); }
|
||||
export function miaaDashboard(name: string): string { return localize('arc.miaaDashboard', "Managed Instance Dashboard (Preview) - {0}", name); }
|
||||
export function postgresDashboard(name: string): string { return localize('arc.postgresDashboard', "Postgres Dashboard (Preview) - {0}", name); }
|
||||
export function arcControllerDashboard(name: string): string { return localize('arc.controllerDashboard', "Azure Arc Data Controller Dashboard (Preview) - {0}", name); }
|
||||
export function miaaDashboard(name: string): string { return localize('arc.miaaDashboard', "SQL managed instance - Azure Arc Dashboard (Preview) - {0}", name); }
|
||||
export function postgresDashboard(name: string): string { return localize('arc.postgresDashboard', "PostgreSQL Hyperscale - Azure Arc Dashboard (Preview) - {0}", name); }
|
||||
|
||||
export const dataControllersType = localize('arc.dataControllersType', "Azure Arc Data Controller");
|
||||
export const pgSqlType = localize('arc.pgSqlType', "PostgreSQL Server group - Azure Arc");
|
||||
export const miaaType = localize('arc.miaaType', "SQL instance - Azure Arc");
|
||||
export const pgSqlType = localize('arc.pgSqlType', "PostgreSQL Hyperscale - Azure Arc");
|
||||
export const miaaType = localize('arc.miaaType', "SQL managed instance - Azure Arc");
|
||||
|
||||
export const overview = localize('arc.overview', "Overview");
|
||||
export const connectionStrings = localize('arc.connectionStrings', "Connection Strings");
|
||||
@@ -32,6 +32,8 @@ export const resourceHealth = localize('arc.resourceHealth', "Resource health");
|
||||
|
||||
export const newInstance = localize('arc.createNew', "New Instance");
|
||||
export const deleteText = localize('arc.delete', "Delete");
|
||||
export const saveText = localize('arc.save', "Save");
|
||||
export const discardText = localize('arc.discard', "Discard");
|
||||
export const resetPassword = localize('arc.resetPassword', "Reset Password");
|
||||
export const openInAzurePortal = localize('arc.openInAzurePortal', "Open in Azure Portal");
|
||||
export const resourceGroup = localize('arc.resourceGroup', "Resource Group");
|
||||
@@ -59,6 +61,10 @@ export const yes = localize('arc.yes', "Yes");
|
||||
export const no = localize('arc.no', "No");
|
||||
export const feedback = localize('arc.feedback', "Feedback");
|
||||
export const selectConnectionString = localize('arc.selectConnectionString', "Select from available client connection strings below.");
|
||||
export const addingWokerNodes = localize('arc.addingWokerNodes', "adding worker nodes");
|
||||
export const workerNodesDescription = localize('arc.workerNodesDescription', "Expand your server group and scale your database by adding worker nodes.");
|
||||
export const postgresConfigurationInformation = localize('arc.postgres.configurationInformation', "You can configure the number of CPU cores and storage size that will apply to both worker nodes and coordinator node. Each worker node will have the same configuration. Adjust the number of CPU cores and memory settings for your server group.");
|
||||
export const workerNodesInformation = localize('arc.workerNodeInformation', "In preview it is not possible to reduce the number of worker nodes. Please refer to documentation linked above for more information.");
|
||||
export const vCores = localize('arc.vCores', "vCores");
|
||||
export const ram = localize('arc.ram', "RAM");
|
||||
export const refresh = localize('arc.refresh', "Refresh");
|
||||
@@ -72,8 +78,12 @@ export const direct = localize('arc.direct', "Direct");
|
||||
export const indirect = localize('arc.indirect', "Indirect");
|
||||
export const loading = localize('arc.loading', "Loading...");
|
||||
export const refreshToEnterCredentials = localize('arc.refreshToEnterCredentials', "Refresh node to enter credentials");
|
||||
export const noInstancesAvailable = localize('arc.noInstancesAvailable', "No instances available");
|
||||
export const connectToController = localize('arc.connectToController', "Connect to Existing Controller");
|
||||
export function connectToSql(name: string): string { return localize('arc.connectToSql', "Connect to SQL managed instance - Azure Arc ({0})", name); }
|
||||
export const passwordToController = localize('arc.passwordToController', "Provide Password to Controller");
|
||||
export const controllerUrl = localize('arc.controllerUrl', "Controller URL");
|
||||
export const serverEndpoint = localize('arc.serverEndpoint', "Server Endpoint");
|
||||
export const controllerName = localize('arc.controllerName', "Name");
|
||||
export const defaultControllerName = localize('arc.defaultControllerName', "arc-dc");
|
||||
export const username = localize('arc.username', "Username");
|
||||
@@ -81,6 +91,7 @@ export const password = localize('arc.password', "Password");
|
||||
export const rememberPassword = localize('arc.rememberPassword', "Remember Password");
|
||||
export const connect = localize('arc.connect', "Connect");
|
||||
export const cancel = localize('arc.cancel', "Cancel");
|
||||
export const ok = localize('arc.ok', "Ok");
|
||||
export const notConfigured = localize('arc.notConfigured', "Not Configured");
|
||||
|
||||
// Database States - see https://docs.microsoft.com/sql/relational-databases/databases/database-states
|
||||
@@ -109,9 +120,28 @@ export const databaseName = localize('arc.databaseName', "Database name");
|
||||
export const enterNewPassword = localize('arc.enterNewPassword', "Enter a new password");
|
||||
export const confirmNewPassword = localize('arc.confirmNewPassword', "Confirm the new password");
|
||||
export const learnAboutPostgresClients = localize('arc.learnAboutPostgresClients', "Learn more about Azure PostgreSQL Hyperscale client interfaces");
|
||||
export const scalingCompute = localize('arc.scalingCompute', "scaling compute vCores and memory.");
|
||||
export const postgresComputeAndStorageDescriptionPartOne = localize('arc.postgresComputeAndStorageDescriptionPartOne', "You can scale your Azure Arc enabled");
|
||||
export const miaaComputeAndStorageDescriptionPartOne = localize('arc.miaaComputeAndStorageDescriptionPartOne', "You can scale your Azure SQL managed instance - Azure Arc by");
|
||||
export const postgresComputeAndStorageDescriptionPartTwo = localize('arc.postgres.computeAndStorageDescriptionPartTwo', "PostgreSQL Hyperscale server group by");
|
||||
export const computeAndStorageDescriptionPartThree = localize('arc.computeAndStorageDescriptionPartThree', "without downtime and by");
|
||||
export const computeAndStorageDescriptionPartFour = localize('arc.computeAndStorageDescriptionPartFour', "Before doing so, you need to ensure");
|
||||
export const computeAndStorageDescriptionPartFive = localize('arc.computeAndStorageDescriptionPartFive', "there are sufficient resources available");
|
||||
export const computeAndStorageDescriptionPartSix = localize('arc.computeAndStorageDescriptionPartSix', "in your Kubernetes cluster to honor this configuration.");
|
||||
export const node = localize('arc.node', "node");
|
||||
export const nodes = localize('arc.nodes', "nodes");
|
||||
export const workerNodes = localize('arc.workerNodes', "Worker Nodes");
|
||||
export const storagePerNode = localize('arc.storagePerNode', "storage per node");
|
||||
export const workerNodeCount = localize('arc.workerNodeCount', "Worker node count:");
|
||||
export const configurationPerNode = localize('arc.configurationPerNode', "Configuration (per node)");
|
||||
export const coresLimit = localize('arc.coresLimit', "CPU limit:");
|
||||
export const coresRequest = localize('arc.coresRequest', "CPU request:");
|
||||
export const memoryLimit = localize('arc.memoryLimit', "Memory limit (in GB):");
|
||||
export const memoryRequest = localize('arc.memoryRequest', "Memory request (in GB):");
|
||||
export const workerValidationErrorMessage = localize('arc.workerValidationErrorMessage', "The number of workers cannot be decreased.");
|
||||
export const coresValidationErrorMessage = localize('arc.coresValidationErrorMessage', "Valid CPU resource quantities are strictly positive.");
|
||||
export const memoryRequestValidationErrorMessage = localize('arc.memoryRequestValidationErrorMessage', "Memory request must be at least 0.25Gib");
|
||||
export const memoryLimitValidationErrorMessage = localize('arc.memoryLimitValidationErrorMessage', "Memory limit must be at least 0.25Gib");
|
||||
export const arcResources = localize('arc.arcResources', "Azure Arc Resources");
|
||||
export const enterANonEmptyPassword = localize('arc.enterANonEmptyPassword', "Enter a non empty password or press escape to exit.");
|
||||
export const thePasswordsDoNotMatch = localize('arc.thePasswordsDoNotMatch', "The passwords do not match. Confirm the password or press escape to exit.");
|
||||
@@ -121,9 +151,13 @@ export const condition = localize('arc.condition', "Condition");
|
||||
export const details = localize('arc.details', "Details");
|
||||
export const lastUpdated = localize('arc.lastUpdated', "Last updated");
|
||||
export const noExternalEndpoint = localize('arc.noExternalEndpoint', "No External Endpoint has been configured so this information isn't available.");
|
||||
export const podsReady = localize('arc.podsReady', "pods ready");
|
||||
|
||||
export function databaseCreated(name: string): string { return localize('arc.databaseCreated', "Database {0} created", name); }
|
||||
export function resourceDeleted(name: string): string { return localize('arc.resourceDeleted', "Resource '{0}' deleted", name); }
|
||||
export function deletingInstance(name: string): string { return localize('arc.deletingInstance', "Deleting instance '{0}'...", name); }
|
||||
export function updatingInstance(name: string): string { return localize('arc.updatingInstance', "Updating instance '{0}'...", name); }
|
||||
export function instanceDeleted(name: string): string { return localize('arc.instanceDeleted', "Instance '{0}' deleted", name); }
|
||||
export function instanceUpdated(name: string): string { return localize('arc.instanceUpdated', "Instance '{0}' updated", name); }
|
||||
export function copiedToClipboard(name: string): string { return localize('arc.copiedToClipboard', "{0} copied to clipboard", name); }
|
||||
export function clickTheTroubleshootButton(resourceType: string): string { return localize('arc.clickTheTroubleshootButton', "Click the troubleshoot button to open the Azure Arc {0} troubleshooting notebook.", resourceType); }
|
||||
export function numVCores(vCores: string | undefined): string {
|
||||
@@ -144,15 +178,30 @@ export const connectionRequired = localize('arc.connectionRequired', "A connecti
|
||||
export const couldNotFindControllerRegistration = localize('arc.couldNotFindControllerRegistration', "Could not find controller registration.");
|
||||
export function refreshFailed(error: any): string { return localize('arc.refreshFailed', "Refresh failed. {0}", getErrorMessage(error)); }
|
||||
export function openDashboardFailed(error: any): string { return localize('arc.openDashboardFailed', "Error opening dashboard. {0}", getErrorMessage(error)); }
|
||||
export function resourceDeletionFailed(name: string, error: any): string { return localize('arc.resourceDeletionFailed', "Failed to delete resource {0}. {1}", name, getErrorMessage(error)); }
|
||||
export function instanceDeletionFailed(name: string, error: any): string { return localize('arc.instanceDeletionFailed', "Failed to delete instance {0}. {1}", name, getErrorMessage(error)); }
|
||||
export function instanceUpdateFailed(name: string, error: any): string { return localize('arc.instanceUpdateFailed', "Failed to update instance {0}. {1}", name, getErrorMessage(error)); }
|
||||
export function pageDiscardFailed(error: any): string { return localize('arc.pageDiscardFailed', "Failed to discard user input. {0}", getErrorMessage(error)); }
|
||||
export function databaseCreationFailed(name: string, error: any): string { return localize('arc.databaseCreationFailed', "Failed to create database {0}. {1}", name, getErrorMessage(error)); }
|
||||
export function connectToControllerFailed(url: string, error: any): string { return localize('arc.connectToControllerFailed', "Could not connect to controller {0}. {1}", url, getErrorMessage(error)); }
|
||||
export function connectToSqlFailed(serverName: string, error: any): string { return localize('arc.connectToSqlFailed', "Could not connect to SQL managed instance - Azure Arc Instance {0}. {1}", serverName, getErrorMessage(error)); }
|
||||
export function fetchConfigFailed(name: string, error: any): string { return localize('arc.fetchConfigFailed', "An unexpected error occurred retrieving the config for '{0}'. {1}", name, getErrorMessage(error)); }
|
||||
export function fetchEndpointsFailed(name: string, error: any): string { return localize('arc.fetchEndpointsFailed', "An unexpected error occurred retrieving the endpoints for '{0}'. {1}", name, getErrorMessage(error)); }
|
||||
export function fetchRegistrationsFailed(name: string, error: any): string { return localize('arc.fetchRegistrationsFailed', "An unexpected error occurred retrieving the registrations for '{0}'. {1}", name, getErrorMessage(error)); }
|
||||
export function fetchDatabasesFailed(name: string, error: any): string { return localize('arc.fetchDatabasesFailed', "An unexpected error occurred retrieving the databases for '{0}'. {1}", name, getErrorMessage(error)); }
|
||||
export function resourceDeletionWarning(name: string): string { return localize('arc.resourceDeletionWarning', "Warning! Deleting a resource is permanent and cannot be undone. To delete the resource '{0}' type the name '{0}' below to proceed.", name); }
|
||||
export function invalidResourceDeletionName(name: string): string { return localize('arc.invalidResourceDeletionName', "The value '{0}' does not match the instance name. Try again or press escape to exit", name); }
|
||||
export function instanceDeletionWarning(name: string): string { return localize('arc.instanceDeletionWarning', "Warning! Deleting an instance is permanent and cannot be undone. To delete the instance '{0}' type the name '{0}' below to proceed.", name); }
|
||||
export function invalidInstanceDeletionName(name: string): string { return localize('arc.invalidInstanceDeletionName', "The value '{0}' does not match the instance name. Try again or press escape to exit", name); }
|
||||
export function couldNotFindAzureResource(name: string): string { return localize('arc.couldNotFindAzureResource', "Could not find Azure resource for {0}", name); }
|
||||
export function passwordResetFailed(error: any): string { return localize('arc.passwordResetFailed', "Failed to reset password. {0}", getErrorMessage(error)); }
|
||||
export function errorConnectingToController(error: any): string { return localize('arc.errorConnectingToController', "Error connecting to controller. {0}", getErrorMessage(error)); }
|
||||
export function errorConnectingToController(error: any): string { return localize('arc.errorConnectingToController', "Error connecting to controller. {0}", getErrorMessage(error, true)); }
|
||||
export function passwordAcquisitionFailed(error: any): string { return localize('arc.passwordAcquisitionFailed', "Failed to acquire password. {0}", getErrorMessage(error)); }
|
||||
export const invalidPassword = localize('arc.invalidPassword', "The password did not work, try again.");
|
||||
export function errorVerifyingPassword(error: any): string { return localize('arc.errorVerifyingPassword', "Error encountered while verifying password. {0}", getErrorMessage(error)); }
|
||||
export const onlyOneControllerSupported = localize('arc.onlyOneControllerSupported', "Only one controller connection is currently supported at this time. Do you wish to remove the existing connection and add a new one?");
|
||||
export const noControllersConnected = localize('noControllersConnected', "No Azure Arc controllers are currently connected. Please run the command: 'Connect to Existing Azure Arc Controller' and then try again");
|
||||
export const variableValueFetchForUnsupportedVariable = (variableName: string) => localize('getVariableValue.unknownVariableName', "Attempt to get variable value for unknown variable:{0}", variableName);
|
||||
export const isPasswordFetchForUnsupportedVariable = (variableName: string) => localize('getIsPassword.unknownVariableName', "Attempt to get isPassword for unknown variable:{0}", variableName);
|
||||
export const noControllerInfoFound = (name: string) => localize('noControllerInfoFound', "Controller Info could not be found with name: {0}", name);
|
||||
export const noPasswordFound = (controllerName: string) => localize('noPasswordFound', "Password could not be retrieved for controller: {0} and user did not provide a password. Please retry later.", controllerName);
|
||||
export const noContextFound = (configFile: string) => localize('noContextFound', "No 'contexts' found in the config file: {0}", configFile);
|
||||
export const noCurrentContextFound = (configFile: string) => localize('noCurrentContextFound', "No context is marked as 'current-context' in the config file: {0}", configFile);
|
||||
export const noNameInContext = (configFile: string) => localize('noNameInContext', "No name field was found in a cluster context in the config file: {0}", configFile);
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
import { ControllerInfo, ResourceType } from 'arc';
|
||||
import * as azdataExt from 'azdata-ext';
|
||||
import * as vscode from 'vscode';
|
||||
import { parseInstanceName, UserCancelledError } from '../common/utils';
|
||||
import { UserCancelledError } from '../common/utils';
|
||||
import * as loc from '../localizedConstants';
|
||||
import { ConnectToControllerDialog } from '../ui/dialogs/connectControllerDialog';
|
||||
import { AzureArcTreeDataProvider } from '../ui/tree/azureArcTreeDataProvider';
|
||||
@@ -20,7 +20,6 @@ export type Registration = {
|
||||
export class ControllerModel {
|
||||
private readonly _azdataApi: azdataExt.IExtension;
|
||||
private _endpoints: azdataExt.DcEndpointListResult[] = [];
|
||||
private _namespace: string = '';
|
||||
private _registrations: Registration[] = [];
|
||||
private _controllerConfig: azdataExt.DcConfigShowResult | undefined = undefined;
|
||||
|
||||
@@ -93,7 +92,7 @@ export class ControllerModel {
|
||||
}
|
||||
public async refresh(showErrors: boolean = true, promptReconnect: boolean = false): Promise<void> {
|
||||
await this.azdataLogin(promptReconnect);
|
||||
this._registrations = [];
|
||||
const newRegistrations: Registration[] = [];
|
||||
await Promise.all([
|
||||
this._azdataApi.azdata.arc.dc.config.show().then(result => {
|
||||
this._controllerConfig = result.result;
|
||||
@@ -125,7 +124,7 @@ export class ControllerModel {
|
||||
}),
|
||||
Promise.all([
|
||||
this._azdataApi.azdata.arc.postgres.server.list().then(result => {
|
||||
this._registrations.push(...result.result.map(r => {
|
||||
newRegistrations.push(...result.result.map(r => {
|
||||
return {
|
||||
instanceName: r.name,
|
||||
state: r.state,
|
||||
@@ -134,7 +133,7 @@ export class ControllerModel {
|
||||
}));
|
||||
}),
|
||||
this._azdataApi.azdata.arc.sql.mi.list().then(result => {
|
||||
this._registrations.push(...result.result.map(r => {
|
||||
newRegistrations.push(...result.result.map(r => {
|
||||
return {
|
||||
instanceName: r.name,
|
||||
state: r.state,
|
||||
@@ -143,6 +142,7 @@ export class ControllerModel {
|
||||
}));
|
||||
})
|
||||
]).then(() => {
|
||||
this._registrations = newRegistrations;
|
||||
this.registrationsLastUpdated = new Date();
|
||||
this._onRegistrationsUpdated.fire(this._registrations);
|
||||
})
|
||||
@@ -157,10 +157,6 @@ export class ControllerModel {
|
||||
return this._endpoints.find(e => e.name === name);
|
||||
}
|
||||
|
||||
public get namespace(): string {
|
||||
return this._namespace;
|
||||
}
|
||||
|
||||
public get registrations(): Registration[] {
|
||||
return this._registrations;
|
||||
}
|
||||
@@ -171,19 +167,10 @@ export class ControllerModel {
|
||||
|
||||
public getRegistration(type: ResourceType, name: string): Registration | undefined {
|
||||
return this._registrations.find(r => {
|
||||
return r.instanceType === type && parseInstanceName(r.instanceName) === name;
|
||||
return r.instanceType === type && r.instanceName === name;
|
||||
});
|
||||
}
|
||||
|
||||
public async deleteRegistration(_type: ResourceType, _name: string) {
|
||||
/* TODO chgagnon
|
||||
if (r && !r.isDeleted && r.customObjectName) {
|
||||
const r = this.getRegistration(type, name);
|
||||
await this._registrationRouter.apiV1RegistrationNsNameIsDeletedDelete(this._namespace, r.customObjectName, true);
|
||||
}
|
||||
*/
|
||||
}
|
||||
|
||||
/**
|
||||
* property to for use a display label for this controller
|
||||
*/
|
||||
|
||||
@@ -3,13 +3,15 @@
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import { ResourceInfo } from 'arc';
|
||||
import { MiaaResourceInfo } from 'arc';
|
||||
import * as azdata from 'azdata';
|
||||
import * as azdataExt from 'azdata-ext';
|
||||
import * as vscode from 'vscode';
|
||||
import { Deferred } from '../common/promise';
|
||||
import { UserCancelledError } from '../common/utils';
|
||||
import { createCredentialId, parseIpAndPort, UserCancelledError } from '../common/utils';
|
||||
import { credentialNamespace } from '../constants';
|
||||
import * as loc from '../localizedConstants';
|
||||
import { ConnectToSqlDialog } from '../ui/dialogs/connectSqlDialog';
|
||||
import { AzureArcTreeDataProvider } from '../ui/tree/azureArcTreeDataProvider';
|
||||
import { ControllerModel, Registration } from './controllerModel';
|
||||
import { ResourceModel } from './resourceModel';
|
||||
@@ -35,8 +37,8 @@ export class MiaaModel extends ResourceModel {
|
||||
|
||||
private _refreshPromise: Deferred<void> | undefined = undefined;
|
||||
|
||||
constructor(private _controllerModel: ControllerModel, info: ResourceInfo, registration: Registration, private _treeDataProvider: AzureArcTreeDataProvider) {
|
||||
super(info, registration);
|
||||
constructor(private _controllerModel: ControllerModel, private _miaaInfo: MiaaResourceInfo, registration: Registration, private _treeDataProvider: AzureArcTreeDataProvider) {
|
||||
super(_miaaInfo, registration);
|
||||
this._azdataApi = <azdataExt.IExtension>vscode.extensions.getExtension(azdataExt.extension.name)?.exports;
|
||||
}
|
||||
|
||||
@@ -155,83 +157,58 @@ export class MiaaModel extends ResourceModel {
|
||||
if (this._connectionProfile) {
|
||||
return;
|
||||
}
|
||||
let connection: azdata.connection.ConnectionProfile | azdata.connection.Connection | undefined;
|
||||
|
||||
const ipAndPort = parseIpAndPort(this.config?.status.externalEndpoint || '');
|
||||
let connectionProfile: azdata.IConnectionProfile | undefined = {
|
||||
serverName: `${ipAndPort.ip},${ipAndPort.port}`,
|
||||
databaseName: '',
|
||||
authenticationType: 'SqlLogin',
|
||||
providerName: 'MSSQL',
|
||||
connectionName: '',
|
||||
userName: this._miaaInfo.userName || '',
|
||||
password: '',
|
||||
savePassword: true,
|
||||
groupFullName: undefined,
|
||||
saveProfile: true,
|
||||
id: '',
|
||||
groupId: undefined,
|
||||
options: {}
|
||||
};
|
||||
|
||||
// If we have the ID stored then try to retrieve the password from previous connections
|
||||
if (this.info.connectionId) {
|
||||
try {
|
||||
const connections = await azdata.connection.getConnections();
|
||||
const existingConnection = connections.find(conn => conn.connectionId === this.info.connectionId);
|
||||
if (existingConnection) {
|
||||
const credentials = await azdata.connection.getCredentials(this.info.connectionId);
|
||||
if (credentials) {
|
||||
existingConnection.options['password'] = credentials.password;
|
||||
connection = existingConnection;
|
||||
} else {
|
||||
// We need the password so prompt the user for it
|
||||
const connectionProfile: azdata.IConnectionProfile = {
|
||||
serverName: existingConnection.options['serverName'],
|
||||
databaseName: existingConnection.options['databaseName'],
|
||||
authenticationType: existingConnection.options['authenticationType'],
|
||||
providerName: 'MSSQL',
|
||||
connectionName: '',
|
||||
userName: existingConnection.options['user'],
|
||||
password: '',
|
||||
savePassword: false,
|
||||
groupFullName: undefined,
|
||||
saveProfile: true,
|
||||
id: '',
|
||||
groupId: undefined,
|
||||
options: existingConnection.options
|
||||
};
|
||||
connection = await azdata.connection.openConnectionDialog(['MSSQL'], connectionProfile);
|
||||
const credentialProvider = await azdata.credentials.getProvider(credentialNamespace);
|
||||
const credentials = await credentialProvider.readCredential(createCredentialId(this._controllerModel.info.id, this.info.resourceType, this.info.name));
|
||||
if (credentials.password) {
|
||||
// Try to connect to verify credentials are still valid
|
||||
connectionProfile.password = credentials.password;
|
||||
// If we don't have a username for some reason then just continue on and we'll prompt for the username below
|
||||
if (connectionProfile.userName) {
|
||||
const result = await azdata.connection.connect(connectionProfile, false, false);
|
||||
if (!result.connected) {
|
||||
vscode.window.showErrorMessage(loc.connectToSqlFailed(connectionProfile.serverName, result.errorMessage));
|
||||
const connectToSqlDialog = new ConnectToSqlDialog(this._controllerModel, this);
|
||||
connectToSqlDialog.showDialog(connectionProfile);
|
||||
connectionProfile = await connectToSqlDialog.waitForClose();
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
// ignore - the connection may not necessarily exist anymore and in that case we'll just reprompt for a connection
|
||||
console.warn(`Unexpected error fetching password for MIAA instance ${err}`);
|
||||
// ignore - something happened fetching the password so just reprompt
|
||||
}
|
||||
}
|
||||
|
||||
if (!connection) {
|
||||
// We need the password so prompt the user for it
|
||||
const connectionProfile: azdata.IConnectionProfile = {
|
||||
// TODO chgagnon fill in external IP and port
|
||||
// serverName: (this.registration.externalIp && this.registration.externalPort) ? `${this.registration.externalIp},${this.registration.externalPort}` : '',
|
||||
serverName: '',
|
||||
databaseName: '',
|
||||
authenticationType: 'SqlLogin',
|
||||
providerName: 'MSSQL',
|
||||
connectionName: '',
|
||||
userName: 'sa',
|
||||
password: '',
|
||||
savePassword: true,
|
||||
groupFullName: undefined,
|
||||
saveProfile: true,
|
||||
id: '',
|
||||
groupId: undefined,
|
||||
options: {}
|
||||
};
|
||||
// Weren't able to load the existing connection so prompt user for new one
|
||||
connection = await azdata.connection.openConnectionDialog(['MSSQL'], connectionProfile);
|
||||
if (!connectionProfile?.userName || !connectionProfile?.password) {
|
||||
// Need to prompt user for password since we don't have one stored
|
||||
const connectToSqlDialog = new ConnectToSqlDialog(this._controllerModel, this);
|
||||
connectToSqlDialog.showDialog(connectionProfile);
|
||||
connectionProfile = await connectToSqlDialog.waitForClose();
|
||||
}
|
||||
|
||||
if (connection) {
|
||||
const profile = {
|
||||
// The option name might be different here based on where it came from
|
||||
serverName: connection.options['serverName'] || connection.options['server'],
|
||||
databaseName: connection.options['databaseName'] || connection.options['database'],
|
||||
authenticationType: connection.options['authenticationType'],
|
||||
providerName: 'MSSQL',
|
||||
connectionName: '',
|
||||
userName: connection.options['user'],
|
||||
password: connection.options['password'],
|
||||
savePassword: false,
|
||||
groupFullName: undefined,
|
||||
saveProfile: true,
|
||||
id: connection.connectionId,
|
||||
groupId: undefined,
|
||||
options: connection.options
|
||||
};
|
||||
this.updateConnectionProfile(profile);
|
||||
if (connectionProfile) {
|
||||
this.updateConnectionProfile(connectionProfile);
|
||||
} else {
|
||||
throw new UserCancelledError();
|
||||
}
|
||||
@@ -240,6 +217,7 @@ export class MiaaModel extends ResourceModel {
|
||||
private async updateConnectionProfile(connectionProfile: azdata.IConnectionProfile): Promise<void> {
|
||||
this._connectionProfile = connectionProfile;
|
||||
this.info.connectionId = connectionProfile.id;
|
||||
this._miaaInfo.userName = connectionProfile.userName;
|
||||
await this._treeDataProvider.saveControllers();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,278 +4,103 @@
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import { ResourceInfo } from 'arc';
|
||||
import * as azdataExt from 'azdata-ext';
|
||||
import * as vscode from 'vscode';
|
||||
import * as loc from '../localizedConstants';
|
||||
import { Registration } from './controllerModel';
|
||||
import { ControllerModel, Registration } from './controllerModel';
|
||||
import { ResourceModel } from './resourceModel';
|
||||
|
||||
export enum PodRole {
|
||||
Monitor,
|
||||
Router,
|
||||
Shard
|
||||
}
|
||||
|
||||
export interface V1Pod {
|
||||
'apiVersion'?: string;
|
||||
'kind'?: string;
|
||||
'metadata'?: any; // V1ObjectMeta;
|
||||
'spec'?: any; // V1PodSpec;
|
||||
'status'?: V1PodStatus;
|
||||
}
|
||||
|
||||
export interface V1PodStatus {
|
||||
'conditions'?: any[]; // Array<V1PodCondition>;
|
||||
'containerStatuses'?: Array<V1ContainerStatus>;
|
||||
'ephemeralContainerStatuses'?: any[]; // Array<V1ContainerStatus>;
|
||||
'hostIP'?: string;
|
||||
'initContainerStatuses'?: any[]; // Array<V1ContainerStatus>;
|
||||
'message'?: string;
|
||||
'nominatedNodeName'?: string;
|
||||
'phase'?: string;
|
||||
'podIP'?: string;
|
||||
'podIPs'?: any[]; // Array<V1PodIP>;
|
||||
'qosClass'?: string;
|
||||
'reason'?: string;
|
||||
'startTime'?: Date | null;
|
||||
}
|
||||
|
||||
export interface V1ContainerStatus {
|
||||
'containerID'?: string;
|
||||
'image'?: string;
|
||||
'imageID'?: string;
|
||||
'lastState'?: any; // V1ContainerState;
|
||||
'name'?: string;
|
||||
'ready'?: boolean;
|
||||
'restartCount'?: number;
|
||||
'started'?: boolean | null;
|
||||
'state'?: any; // V1ContainerState;
|
||||
}
|
||||
|
||||
export interface DuskyObjectModelsDatabaseService {
|
||||
'apiVersion'?: string;
|
||||
'kind'?: string;
|
||||
'metadata'?: any; // V1ObjectMeta;
|
||||
'spec'?: any; // DuskyObjectModelsDatabaseServiceSpec;
|
||||
'status'?: any; // DuskyObjectModelsDatabaseServiceStatus;
|
||||
'arc'?: any; // DuskyObjectModelsDatabaseServiceArcPayload;
|
||||
}
|
||||
|
||||
export interface V1Status {
|
||||
'apiVersion'?: string;
|
||||
'code'?: number | null;
|
||||
'details'?: any; // V1StatusDetails;
|
||||
'kind'?: string;
|
||||
'message'?: string;
|
||||
'metadata'?: any; // V1ListMeta;
|
||||
'reason'?: string;
|
||||
'status'?: string;
|
||||
'hasObject'?: boolean;
|
||||
}
|
||||
|
||||
export interface DuskyObjectModelsDatabase {
|
||||
'name'?: string;
|
||||
'owner'?: string;
|
||||
'sharded'?: boolean | null;
|
||||
}
|
||||
import { Deferred } from '../common/promise';
|
||||
import { parseIpAndPort } from '../common/utils';
|
||||
|
||||
export class PostgresModel extends ResourceModel {
|
||||
private _service?: DuskyObjectModelsDatabaseService;
|
||||
private _pods?: V1Pod[];
|
||||
private readonly _onServiceUpdated = new vscode.EventEmitter<DuskyObjectModelsDatabaseService>();
|
||||
private readonly _onPodsUpdated = new vscode.EventEmitter<V1Pod[]>();
|
||||
public onServiceUpdated = this._onServiceUpdated.event;
|
||||
public onPodsUpdated = this._onPodsUpdated.event;
|
||||
public serviceLastUpdated?: Date;
|
||||
public podsLastUpdated?: Date;
|
||||
private _config?: azdataExt.PostgresServerShowResult;
|
||||
private readonly _azdataApi: azdataExt.IExtension;
|
||||
|
||||
constructor(info: ResourceInfo, registration: Registration) {
|
||||
private readonly _onConfigUpdated = new vscode.EventEmitter<azdataExt.PostgresServerShowResult>();
|
||||
public onConfigUpdated = this._onConfigUpdated.event;
|
||||
public configLastUpdated?: Date;
|
||||
|
||||
private _refreshPromise?: Deferred<void>;
|
||||
|
||||
constructor(private _controllerModel: ControllerModel, info: ResourceInfo, registration: Registration) {
|
||||
super(info, registration);
|
||||
this._azdataApi = <azdataExt.IExtension>vscode.extensions.getExtension(azdataExt.extension.name)?.exports;
|
||||
}
|
||||
|
||||
/** Returns the service's Kubernetes namespace */
|
||||
public get namespace(): string | undefined {
|
||||
return ''; // TODO chgagnon return this.info.namespace;
|
||||
/** Returns the configuration of Postgres */
|
||||
public get config(): azdataExt.PostgresServerShowResult | undefined {
|
||||
return this._config;
|
||||
}
|
||||
|
||||
/** Returns the service's name */
|
||||
public get name(): string {
|
||||
return this.info.name;
|
||||
/** Returns the major version of Postgres */
|
||||
public get engineVersion(): string | undefined {
|
||||
const kind = this._config?.kind;
|
||||
return kind
|
||||
? kind.substring(kind.lastIndexOf('-') + 1)
|
||||
: undefined;
|
||||
}
|
||||
|
||||
/** Returns the service's fully qualified name in the format namespace.name */
|
||||
public get fullName(): string {
|
||||
return `${this.namespace}.${this.name}`;
|
||||
/** Returns the IP address and port of Postgres */
|
||||
public get endpoint(): { ip: string, port: string } | undefined {
|
||||
return this._config?.status.externalEndpoint
|
||||
? parseIpAndPort(this._config.status.externalEndpoint)
|
||||
: undefined;
|
||||
}
|
||||
|
||||
/** Returns the service's spec */
|
||||
public get service(): DuskyObjectModelsDatabaseService | undefined {
|
||||
return this._service;
|
||||
}
|
||||
/** Returns the scale configuration of Postgres e.g. '3 nodes, 1.5 vCores, 1Gi RAM, 2Gi storage per node' */
|
||||
public get scaleConfiguration(): string | undefined {
|
||||
if (!this._config) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/** Returns the service's pods */
|
||||
public get pods(): V1Pod[] | undefined {
|
||||
return this._pods;
|
||||
}
|
||||
const cpuLimit = this._config.spec.scheduling?.default?.resources?.limits?.cpu;
|
||||
const ramLimit = this._config.spec.scheduling?.default?.resources?.limits?.memory;
|
||||
const cpuRequest = this._config.spec.scheduling?.default?.resources?.requests?.cpu;
|
||||
const ramRequest = this._config.spec.scheduling?.default?.resources?.requests?.memory;
|
||||
const storage = this._config.spec.storage?.data?.size;
|
||||
|
||||
/** Refreshes the model */
|
||||
public async refresh() {
|
||||
await Promise.all([
|
||||
/* TODO enable
|
||||
this._databaseRouter.getDuskyDatabaseService(this.info.namespace || 'test', this.info.name).then(response => {
|
||||
this._service = response.body;
|
||||
this.serviceLastUpdated = new Date();
|
||||
this._onServiceUpdated.fire(this._service);
|
||||
}),
|
||||
this._databaseRouter.getDuskyPods(this.info.namespace || 'test', this.info.name).then(response => {
|
||||
this._pods = response.body;
|
||||
this.podsLastUpdated = new Date();
|
||||
this._onPodsUpdated.fire(this._pods!);
|
||||
})
|
||||
*/
|
||||
]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates the service
|
||||
* @param func A function of modifications to apply to the service
|
||||
*/
|
||||
public async update(_func: (service: DuskyObjectModelsDatabaseService) => void): Promise<DuskyObjectModelsDatabaseService> {
|
||||
return <any>undefined;
|
||||
/*
|
||||
// Get the latest spec of the service in case it has changed
|
||||
const service = (await this._databaseRouter.getDuskyDatabaseService(this.info.namespace || 'test', this.info.name)).body;
|
||||
service.status = undefined; // can't update the status
|
||||
func(service);
|
||||
|
||||
return await this._databaseRouter.updateDuskyDatabaseService(this.namespace || 'test', this.name, service).then(r => {
|
||||
this._service = r.body;
|
||||
return this._service;
|
||||
});
|
||||
*/
|
||||
}
|
||||
|
||||
/** Deletes the service */
|
||||
public async delete(): Promise<V1Status> {
|
||||
return <any>undefined;
|
||||
// return (await this._databaseRouter.deleteDuskyDatabaseService(this.info.namespace || 'test', this.info.name)).body;
|
||||
}
|
||||
|
||||
/** Creates a SQL database in the service */
|
||||
public async createDatabase(_db: DuskyObjectModelsDatabase): Promise<DuskyObjectModelsDatabase> {
|
||||
return <any>undefined;
|
||||
// return (await this._databaseRouter.createDuskyDatabase(this.namespace || 'test', this.name, db)).body;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the IP address and port of the service, preferring external IP over
|
||||
* internal IP. If either field is not available it will be set to undefined.
|
||||
*/
|
||||
public get endpoint(): { ip?: string, port?: number } {
|
||||
const externalIp = this._service?.status?.externalIP;
|
||||
const internalIp = this._service?.status?.internalIP;
|
||||
const externalPort = this._service?.status?.externalPort;
|
||||
const internalPort = this._service?.status?.internalPort;
|
||||
|
||||
return externalIp ? { ip: externalIp, port: externalPort ?? undefined }
|
||||
: internalIp ? { ip: internalIp, port: internalPort ?? undefined }
|
||||
: { ip: undefined, port: undefined };
|
||||
}
|
||||
|
||||
/** Returns the service's configuration e.g. '3 nodes, 1.5 vCores, 1GiB RAM, 2GiB storage per node' */
|
||||
public get configuration(): string {
|
||||
|
||||
// TODO: Resource requests and limits can be configured per role. Figure out how
|
||||
// to display that in the UI. For now, only show the default configuration.
|
||||
const cpuLimit = this._service?.spec?.scheduling?._default?.resources?.limits?.['cpu'];
|
||||
const ramLimit = this._service?.spec?.scheduling?._default?.resources?.limits?.['memory'];
|
||||
const cpuRequest = this._service?.spec?.scheduling?._default?.resources?.requests?.['cpu'];
|
||||
const ramRequest = this._service?.spec?.scheduling?._default?.resources?.requests?.['memory'];
|
||||
const storage = this._service?.spec?.storage?.volumeSize;
|
||||
const nodes = this.pods?.length;
|
||||
// scale.shards was renamed to scale.workers. Check both for backwards compatibility.
|
||||
const scale = this._config.spec.scale;
|
||||
const nodes = (scale?.workers ?? scale?.shards ?? 0) + 1; // An extra node for the coordinator
|
||||
|
||||
let configuration: string[] = [];
|
||||
|
||||
if (nodes) {
|
||||
configuration.push(`${nodes} ${nodes > 1 ? loc.nodes : loc.node}`);
|
||||
}
|
||||
configuration.push(`${nodes} ${nodes > 1 ? loc.nodes : loc.node}`);
|
||||
|
||||
// Prefer limits if they're provided, otherwise use requests if they're provided
|
||||
if (cpuLimit || cpuRequest) {
|
||||
configuration.push(`${this.formatCores(cpuLimit ?? cpuRequest!)} ${loc.vCores}`);
|
||||
configuration.push(`${cpuLimit ?? cpuRequest!} ${loc.vCores}`);
|
||||
}
|
||||
|
||||
if (ramLimit || ramRequest) {
|
||||
configuration.push(`${this.formatMemory(ramLimit ?? ramRequest!)} ${loc.ram}`);
|
||||
configuration.push(`${ramLimit ?? ramRequest!} ${loc.ram}`);
|
||||
}
|
||||
|
||||
if (storage) {
|
||||
configuration.push(`${this.formatMemory(storage)} ${loc.storagePerNode}`);
|
||||
configuration.push(`${storage} ${loc.storagePerNode}`);
|
||||
}
|
||||
|
||||
return configuration.join(', ');
|
||||
}
|
||||
|
||||
/** Given a V1Pod, returns its PodRole or undefined if the role isn't known */
|
||||
public static getPodRole(pod: V1Pod): PodRole | undefined {
|
||||
const name = pod.metadata?.name;
|
||||
const role = name?.substring(name.lastIndexOf('-'))[1];
|
||||
switch (role) {
|
||||
case 'm': return PodRole.Monitor;
|
||||
case 'r': return PodRole.Router;
|
||||
case 's': return PodRole.Shard;
|
||||
default: return undefined;
|
||||
/** Refreshes the model */
|
||||
public async refresh() {
|
||||
// Only allow one refresh to be happening at a time
|
||||
if (this._refreshPromise) {
|
||||
return this._refreshPromise.promise;
|
||||
}
|
||||
}
|
||||
this._refreshPromise = new Deferred();
|
||||
|
||||
/** Given a PodRole, returns its localized name */
|
||||
public static getPodRoleName(role?: PodRole): string {
|
||||
switch (role) {
|
||||
case PodRole.Monitor: return loc.monitor;
|
||||
case PodRole.Router: return loc.coordinator;
|
||||
case PodRole.Shard: return loc.worker;
|
||||
default: return '';
|
||||
try {
|
||||
await this._controllerModel.azdataLogin();
|
||||
this._config = (await this._azdataApi.azdata.arc.postgres.server.show(this.info.name)).result;
|
||||
this.configLastUpdated = new Date();
|
||||
this._onConfigUpdated.fire(this._config);
|
||||
this._refreshPromise.resolve();
|
||||
} catch (err) {
|
||||
this._refreshPromise.reject(err);
|
||||
throw err;
|
||||
} finally {
|
||||
this._refreshPromise = undefined;
|
||||
}
|
||||
}
|
||||
|
||||
/** Given a V1Pod returns its status */
|
||||
public static getPodStatus(pod: V1Pod): string {
|
||||
const phase = pod.status?.phase;
|
||||
if (phase !== 'Running') {
|
||||
return phase ?? '';
|
||||
}
|
||||
|
||||
// Pods can be in the running phase while some
|
||||
// containers are crashing, so check those too.
|
||||
for (let c of pod.status?.containerStatuses?.filter(c => !c.ready) ?? []) {
|
||||
const wReason = c.state?.waiting?.reason;
|
||||
const tReason = c.state?.terminated?.reason;
|
||||
if (wReason) { return wReason; }
|
||||
if (tReason) { return tReason; }
|
||||
}
|
||||
|
||||
return loc.running;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts millicores to cores (600m -> 0.6 cores)
|
||||
* https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/#meaning-of-cpu
|
||||
* @param cores The millicores to format e.g. 600m
|
||||
*/
|
||||
private formatCores(cores: string): number {
|
||||
return cores?.endsWith('m') ? +cores.slice(0, -1) / 1000 : +cores;
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats the memory to end with 'B' e.g:
|
||||
* 1 -> 1B
|
||||
* 1K -> 1KB, 1Ki -> 1KiB
|
||||
* 1M -> 1MB, 1Mi -> 1MiB
|
||||
* 1G -> 1GB, 1Gi -> 1GiB
|
||||
* 1T -> 1TB, 1Ti -> 1TiB
|
||||
* https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/#meaning-of-memory
|
||||
* @param memory The amount + unit of memory to format e.g. 1K
|
||||
*/
|
||||
private formatMemory(memory: string): string {
|
||||
return memory && !memory.endsWith('B') ? `${memory}B` : memory;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,66 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as arc from 'arc';
|
||||
import * as azdata from 'azdata';
|
||||
import * as rd from 'resource-deployment';
|
||||
import { getControllerPassword, getRegisteredDataControllers, reacquireControllerPassword } from '../common/api';
|
||||
import { CacheManager } from '../common/cacheManager';
|
||||
import { throwUnless } from '../common/utils';
|
||||
import * as loc from '../localizedConstants';
|
||||
import { AzureArcTreeDataProvider } from '../ui/tree/azureArcTreeDataProvider';
|
||||
|
||||
/**
|
||||
* Class that provides options sources for an Arc Data Controller
|
||||
*/
|
||||
export class ArcControllersOptionsSourceProvider implements rd.IOptionsSourceProvider {
|
||||
private _cacheManager = new CacheManager<string, string>();
|
||||
readonly optionsSourceId = 'arc.controllers';
|
||||
constructor(private _treeProvider: AzureArcTreeDataProvider) { }
|
||||
|
||||
async getOptions(): Promise<string[] | azdata.CategoryValue[]> {
|
||||
const controllers = await getRegisteredDataControllers(this._treeProvider);
|
||||
throwUnless(controllers !== undefined && controllers.length !== 0, loc.noControllersConnected);
|
||||
return controllers.map(ci => {
|
||||
return ci.label;
|
||||
});
|
||||
}
|
||||
|
||||
private async retrieveVariable(key: string): Promise<string> {
|
||||
const [variableName, controllerLabel] = JSON.parse(key);
|
||||
const controller = (await getRegisteredDataControllers(this._treeProvider)).find(ci => ci.label === controllerLabel);
|
||||
throwUnless(controller !== undefined, loc.noControllerInfoFound(controllerLabel));
|
||||
switch (variableName) {
|
||||
case 'endpoint': return controller.info.url;
|
||||
case 'username': return controller.info.username;
|
||||
case 'password': return this.getPassword(controller);
|
||||
default: throw new Error(loc.variableValueFetchForUnsupportedVariable(variableName));
|
||||
}
|
||||
}
|
||||
|
||||
getVariableValue(variableName: string, controllerLabel: string): Promise<string> {
|
||||
// capture 'this' in an arrow function object
|
||||
const retrieveVariable = (key: string) => this.retrieveVariable(key);
|
||||
return this._cacheManager.getCacheEntry(JSON.stringify([variableName, controllerLabel]), retrieveVariable);
|
||||
}
|
||||
|
||||
private async getPassword(controller: arc.DataController): Promise<string> {
|
||||
let password = await getControllerPassword(this._treeProvider, controller.info);
|
||||
if (!password) {
|
||||
password = await reacquireControllerPassword(this._treeProvider, controller.info);
|
||||
}
|
||||
throwUnless(password !== undefined, loc.noPasswordFound(controller.label));
|
||||
return password;
|
||||
}
|
||||
|
||||
getIsPassword(variableName: string): boolean {
|
||||
switch (variableName) {
|
||||
case 'endpoint': return false;
|
||||
case 'username': return false;
|
||||
case 'password': return true;
|
||||
default: throw new Error(loc.isPasswordFetchForUnsupportedVariable(variableName));
|
||||
}
|
||||
}
|
||||
}
|
||||
2
extensions/arc/src/test/.gitignore
vendored
2
extensions/arc/src/test/.gitignore
vendored
@@ -1,2 +0,0 @@
|
||||
/env
|
||||
/__pycache__
|
||||
@@ -1,21 +0,0 @@
|
||||
# Tests for deploying Arc resources via Jupyter notebook
|
||||
|
||||
## Prerequisites
|
||||
- Python >= 3.6
|
||||
- Pip package manager
|
||||
- Azdata CLI installed and logged into an Arc controller
|
||||
|
||||
## Running the tests
|
||||
### 1. (Optional, recommended) Create and activate a Python virtual environment
|
||||
- `python -m venv env`
|
||||
- `source env/bin/activate` (Linux)
|
||||
- `env\Scripts\activate.bat` (Windows)
|
||||
|
||||
### 2. Upgrade pip
|
||||
- `pip install --upgrade pip`
|
||||
|
||||
### 3. Install the dependencies
|
||||
- `pip install -r requirements.txt`
|
||||
|
||||
### 4. Run the tests
|
||||
- `pytest`
|
||||
62
extensions/arc/src/test/common/kubeUtils.test.ts
Normal file
62
extensions/arc/src/test/common/kubeUtils.test.ts
Normal file
@@ -0,0 +1,62 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import 'mocha';
|
||||
import * as path from 'path';
|
||||
import * as sinon from 'sinon';
|
||||
import * as yamljs from 'yamljs';
|
||||
import { getDefaultKubeConfigPath, getKubeConfigClusterContexts, KubeClusterContext } from '../../common/kubeUtils';
|
||||
import { tryExecuteAction } from '../../common/utils';
|
||||
|
||||
const kubeConfig =
|
||||
{
|
||||
'contexts': [
|
||||
{
|
||||
'context': {
|
||||
'cluster': 'docker-desktop',
|
||||
'user': 'docker-desktop'
|
||||
},
|
||||
'name': 'docker-for-desktop'
|
||||
},
|
||||
{
|
||||
'context': {
|
||||
'cluster': 'kubernetes',
|
||||
'user': 'kubernetes-admin'
|
||||
},
|
||||
'name': 'kubernetes-admin@kubernetes'
|
||||
}
|
||||
],
|
||||
'current-context': 'docker-for-desktop'
|
||||
};
|
||||
describe('KubeUtils', function (): void {
|
||||
const configFile = 'kubeConfig';
|
||||
|
||||
afterEach('KubeUtils cleanup', () => {
|
||||
sinon.restore();
|
||||
});
|
||||
|
||||
it('getDefaultKubeConfigPath', async () => {
|
||||
getDefaultKubeConfigPath().should.endWith(path.join('.kube', 'config'));
|
||||
});
|
||||
|
||||
describe('get Kube Config Cluster Contexts', () => {
|
||||
it('success', async () => {
|
||||
sinon.stub(yamljs, 'load').returns(<any>kubeConfig);
|
||||
const verifyContexts = (contexts: KubeClusterContext[], testName: string) => {
|
||||
contexts.length.should.equal(2, `test: ${testName} failed`);
|
||||
contexts[0].name.should.equal('docker-for-desktop', `test: ${testName} failed`);
|
||||
contexts[0].isCurrentContext.should.be.true(`test: ${testName} failed`);
|
||||
contexts[1].name.should.equal('kubernetes-admin@kubernetes', `test: ${testName} failed`);
|
||||
contexts[1].isCurrentContext.should.be.false(`test: ${testName} failed`);
|
||||
};
|
||||
verifyContexts(await getKubeConfigClusterContexts(configFile), 'getKubeConfigClusterContexts');
|
||||
});
|
||||
it('throws error when unable to load config file', async () => {
|
||||
const error = new Error('unknown error accessing file');
|
||||
sinon.stub(yamljs, 'load').throws(error); //erroring config file load
|
||||
((await tryExecuteAction(() => getKubeConfigClusterContexts(configFile))).error).should.equal(error, `test: getKubeConfigClusterContexts failed`);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -7,7 +7,7 @@ import { ResourceType } from 'arc';
|
||||
import 'mocha';
|
||||
import * as should from 'should';
|
||||
import * as vscode from 'vscode';
|
||||
import { getAzurecoreApi, getConnectionModeDisplayText, getDatabaseStateDisplayText, getErrorMessage, getResourceTypeIcon, parseEndpoint, parseInstanceName, parseIpAndPort, promptAndConfirmPassword, promptForResourceDeletion, resourceTypeToDisplayName } from '../../common/utils';
|
||||
import { getAzurecoreApi, getConnectionModeDisplayText, getDatabaseStateDisplayText, getErrorMessage, getResourceTypeIcon, parseEndpoint, parseIpAndPort, promptAndConfirmPassword, promptForInstanceDeletion, resourceTypeToDisplayName, convertToGibibyteString } from '../../common/utils';
|
||||
import { ConnectionMode as ConnectionMode, IconPathHelper } from '../../constants';
|
||||
import * as loc from '../../localizedConstants';
|
||||
import { MockInputBox } from '../stubs';
|
||||
@@ -47,24 +47,6 @@ describe('parseEndpoint Method Tests', function (): void {
|
||||
});
|
||||
});
|
||||
|
||||
describe('parseInstanceName Method Tests', () => {
|
||||
it('Should parse valid instanceName with namespace correctly', function (): void {
|
||||
should(parseInstanceName('mynamespace_myinstance')).equal('myinstance');
|
||||
});
|
||||
|
||||
it('Should parse valid instanceName without namespace correctly', function (): void {
|
||||
should(parseInstanceName('myinstance')).equal('myinstance');
|
||||
});
|
||||
|
||||
it('Should return empty string when undefined value passed in', function (): void {
|
||||
should(parseInstanceName(undefined)).equal('');
|
||||
});
|
||||
|
||||
it('Should return empty string when empty string value passed in', function (): void {
|
||||
should(parseInstanceName('')).equal('');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getAzurecoreApi Method Tests', function () {
|
||||
it('Should get azurecore API correctly', function (): void {
|
||||
should(getAzurecoreApi()).not.be.undefined();
|
||||
@@ -140,7 +122,7 @@ describe('promptForResourceDeletion Method Tests', function (): void {
|
||||
});
|
||||
|
||||
it('Resolves as true when value entered is correct', function (done): void {
|
||||
promptForResourceDeletion('myname').then((value: boolean) => {
|
||||
promptForInstanceDeletion('myname').then((value: boolean) => {
|
||||
value ? done() : done(new Error('Expected return value to be true'));
|
||||
});
|
||||
mockInputBox.value = 'myname';
|
||||
@@ -148,14 +130,14 @@ describe('promptForResourceDeletion Method Tests', function (): void {
|
||||
});
|
||||
|
||||
it('Resolves as false when input box is closed early', function (done): void {
|
||||
promptForResourceDeletion('myname').then((value: boolean) => {
|
||||
promptForInstanceDeletion('myname').then((value: boolean) => {
|
||||
!value ? done() : done(new Error('Expected return value to be false'));
|
||||
});
|
||||
mockInputBox.hide();
|
||||
});
|
||||
|
||||
it('Validation message is set when value entered is incorrect', async function (): Promise<void> {
|
||||
promptForResourceDeletion('myname');
|
||||
promptForInstanceDeletion('myname');
|
||||
mockInputBox.value = 'wrong value';
|
||||
await mockInputBox.triggerAccept();
|
||||
should(mockInputBox.validationMessage).not.be.equal('', 'Validation message should not be empty after incorrect value entered');
|
||||
@@ -260,22 +242,6 @@ describe('getErrorMessage Method Tests', function () {
|
||||
});
|
||||
});
|
||||
|
||||
describe('parseInstanceName Method Tests', function () {
|
||||
it('2 part name', function (): void {
|
||||
const name = 'MyName';
|
||||
should(parseInstanceName(`MyNamespace_${name}`)).equal(name);
|
||||
});
|
||||
|
||||
it('1 part name', function (): void {
|
||||
const name = 'MyName';
|
||||
should(parseInstanceName(name)).equal(name);
|
||||
});
|
||||
|
||||
it('Invalid name', function (): void {
|
||||
should(() => parseInstanceName('Some_Invalid_Name')).throwError();
|
||||
});
|
||||
});
|
||||
|
||||
describe('parseIpAndPort', function (): void {
|
||||
it('Valid address', function (): void {
|
||||
const ip = '127.0.0.1';
|
||||
@@ -288,3 +254,116 @@ describe('parseIpAndPort', function (): void {
|
||||
should(() => parseIpAndPort(ip)).throwError();
|
||||
});
|
||||
});
|
||||
|
||||
describe('convertToGibibyteString Method Tests', function () {
|
||||
const tolerance = 0.001;
|
||||
it('Value is in KB', function (): void {
|
||||
const value = '44000K';
|
||||
const conversion = 0.04097819;
|
||||
const check = Math.abs(conversion - parseFloat(convertToGibibyteString(value)));
|
||||
should(check).lessThanOrEqual(tolerance);
|
||||
});
|
||||
|
||||
it('Value is in MB', function (): void {
|
||||
const value = '1100M';
|
||||
const conversion = 1.02445483;
|
||||
const check = Math.abs(conversion - parseFloat(convertToGibibyteString(value)));
|
||||
should(check).lessThanOrEqual(tolerance);
|
||||
});
|
||||
|
||||
it('Value is in GB', function (): void {
|
||||
const value = '1G';
|
||||
const conversion = 0.931322575;
|
||||
const check = Math.abs(conversion - parseFloat(convertToGibibyteString(value)));
|
||||
should(check).lessThanOrEqual(tolerance);
|
||||
});
|
||||
|
||||
it('Value is in TB', function (): void {
|
||||
const value = '1T';
|
||||
const conversion = 931.32257;
|
||||
const check = Math.abs(conversion - parseFloat(convertToGibibyteString(value)));
|
||||
should(check).lessThanOrEqual(tolerance);
|
||||
});
|
||||
|
||||
it('Value is in PB', function (): void {
|
||||
const value = '0.1P';
|
||||
const conversion = 93132.25746;
|
||||
const check = Math.abs(conversion - parseFloat(convertToGibibyteString(value)));
|
||||
should(check).lessThanOrEqual(tolerance);
|
||||
});
|
||||
|
||||
it('Value is in EB', function (): void {
|
||||
const value = '1E';
|
||||
const conversion = 931322574.6154;
|
||||
const check = Math.abs(conversion - parseFloat(convertToGibibyteString(value)));
|
||||
should(check).lessThanOrEqual(tolerance);
|
||||
});
|
||||
|
||||
it('Value is in mB', function (): void {
|
||||
const value = '1073741824000m';
|
||||
const conversion = 1;
|
||||
const check = Math.abs(conversion - parseFloat(convertToGibibyteString(value)));
|
||||
should(check).lessThanOrEqual(tolerance);
|
||||
});
|
||||
|
||||
it('Value is in B', function (): void {
|
||||
const value = '1073741824';
|
||||
const conversion = 1;
|
||||
const check = Math.abs(conversion - parseFloat(convertToGibibyteString(value)));
|
||||
should(check).lessThanOrEqual(tolerance);
|
||||
});
|
||||
|
||||
it('Value is in KiB', function (): void {
|
||||
const value = '1048576Ki';
|
||||
const conversion = 1;
|
||||
const check = Math.abs(conversion - parseFloat(convertToGibibyteString(value)));
|
||||
should(check).lessThanOrEqual(tolerance);
|
||||
});
|
||||
|
||||
it('Value is in MiB', function (): void {
|
||||
const value = '256Mi';
|
||||
const conversion = 0.25;
|
||||
const check = Math.abs(conversion - parseFloat(convertToGibibyteString(value)));
|
||||
should(check).lessThanOrEqual(tolerance);
|
||||
});
|
||||
|
||||
it('Value is in GiB', function (): void {
|
||||
const value = '1000Gi';
|
||||
const conversion = 1000;
|
||||
const check = Math.abs(conversion - parseFloat(convertToGibibyteString(value)));
|
||||
should(check).lessThanOrEqual(tolerance);
|
||||
});
|
||||
|
||||
it('Value is in TiB', function (): void {
|
||||
const value = '1Ti';
|
||||
const conversion = 1024;
|
||||
const check = Math.abs(conversion - parseFloat(convertToGibibyteString(value)));
|
||||
should(check).lessThanOrEqual(tolerance);
|
||||
});
|
||||
|
||||
it('Value is in PiB', function (): void {
|
||||
const value = '1Pi';
|
||||
const conversion = 1048576;
|
||||
const check = Math.abs(conversion - parseFloat(convertToGibibyteString(value)));
|
||||
should(check).lessThanOrEqual(tolerance);
|
||||
});
|
||||
|
||||
it('Value is in EiB', function (): void {
|
||||
const value = '1Ei';
|
||||
const conversion = 1073741824;
|
||||
const check = Math.abs(conversion - parseFloat(convertToGibibyteString(value)));
|
||||
should(check).lessThanOrEqual(tolerance);
|
||||
});
|
||||
|
||||
it('Value is empty', function (): void {
|
||||
const value = '';
|
||||
const error = new Error(`Value provided is not a valid Kubernetes resource quantity`);
|
||||
should(() => convertToGibibyteString(value)).throwError(error);
|
||||
});
|
||||
|
||||
it('Value is not a valid Kubernetes resource quantity', function (): void {
|
||||
const value = '1J';
|
||||
const error = new Error(`${value} is not a valid Kubernetes resource quantity`);
|
||||
should(() => convertToGibibyteString(value)).throwError(error);
|
||||
});
|
||||
});
|
||||
|
||||
86
extensions/arc/src/test/mocks/fakeAzdataApi.ts
Normal file
86
extensions/arc/src/test/mocks/fakeAzdataApi.ts
Normal file
@@ -0,0 +1,86 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as azdataExt from 'azdata-ext';
|
||||
|
||||
/**
|
||||
* Simple fake Azdata Api used to mock the API during tests
|
||||
*/
|
||||
export class FakeAzdataApi implements azdataExt.IAzdataApi {
|
||||
|
||||
public postgresInstances: azdataExt.PostgresServerListResult[] = [];
|
||||
public miaaInstances: azdataExt.SqlMiListResult[] = [];
|
||||
|
||||
//
|
||||
// API Implementation
|
||||
//
|
||||
public get arc() {
|
||||
const self = this;
|
||||
return {
|
||||
dc: {
|
||||
create(_namespace: string, _name: string, _connectivityMode: string, _resourceGroup: string, _location: string, _subscription: string, _profileName?: string, _storageClass?: string): Promise<azdataExt.AzdataOutput<void>> { throw new Error('Method not implemented.'); },
|
||||
endpoint: {
|
||||
async list(): Promise<azdataExt.AzdataOutput<azdataExt.DcEndpointListResult[]>> { return <any>{ result: [] }; }
|
||||
},
|
||||
config: {
|
||||
list(): Promise<azdataExt.AzdataOutput<azdataExt.DcConfigListResult[]>> { throw new Error('Method not implemented.'); },
|
||||
async show(): Promise<azdataExt.AzdataOutput<azdataExt.DcConfigShowResult>> { return <any>{ result: undefined! }; }
|
||||
}
|
||||
},
|
||||
postgres: {
|
||||
server: {
|
||||
delete(_name: string): Promise<azdataExt.AzdataOutput<void>> { throw new Error('Method not implemented.'); },
|
||||
async list(): Promise<azdataExt.AzdataOutput<azdataExt.PostgresServerListResult[]>> { return <any>{ result: self.postgresInstances }; },
|
||||
show(_name: string): Promise<azdataExt.AzdataOutput<azdataExt.PostgresServerShowResult>> { throw new Error('Method not implemented.'); },
|
||||
edit(
|
||||
_name: string,
|
||||
_args: {
|
||||
adminPassword?: boolean,
|
||||
coresLimit?: string,
|
||||
coresRequest?: string,
|
||||
engineSettings?: string,
|
||||
extensions?: string,
|
||||
memoryLimit?: string,
|
||||
memoryRequest?: string,
|
||||
noWait?: boolean,
|
||||
port?: number,
|
||||
replaceEngineSettings?: boolean,
|
||||
workers?: number
|
||||
},
|
||||
_additionalEnvVars?: { [key: string]: string }): Promise<azdataExt.AzdataOutput<void>> { throw new Error('Method not implemented.'); }
|
||||
}
|
||||
},
|
||||
sql: {
|
||||
mi: {
|
||||
delete(_name: string): Promise<azdataExt.AzdataOutput<void>> { throw new Error('Method not implemented.'); },
|
||||
async list(): Promise<azdataExt.AzdataOutput<azdataExt.SqlMiListResult[]>> { return <any>{ result: self.miaaInstances }; },
|
||||
show(_name: string): Promise<azdataExt.AzdataOutput<azdataExt.SqlMiShowResult>> { throw new Error('Method not implemented.'); },
|
||||
edit(
|
||||
_name: string,
|
||||
_args: {
|
||||
coresLimit?: string,
|
||||
coresRequest?: string,
|
||||
memoryLimit?: string,
|
||||
memoryRequest?: string,
|
||||
noWait?: boolean
|
||||
}): Promise<azdataExt.AzdataOutput<void>> { throw new Error('Method not implemented.'); }
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
getPath(): Promise<string> {
|
||||
throw new Error('Method not implemented.');
|
||||
}
|
||||
login(_endpoint: string, _username: string, _password: string): Promise<azdataExt.AzdataOutput<any>> {
|
||||
return <any>undefined;
|
||||
}
|
||||
version(): Promise<azdataExt.AzdataOutput<string>> {
|
||||
throw new Error('Method not implemented.');
|
||||
}
|
||||
getSemVersion(): any {
|
||||
throw new Error('Method not implemented.');
|
||||
}
|
||||
|
||||
}
|
||||
91
extensions/arc/src/test/mocks/fakeRadioButton.ts
Normal file
91
extensions/arc/src/test/mocks/fakeRadioButton.ts
Normal file
@@ -0,0 +1,91 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as azdata from 'azdata';
|
||||
import * as vscode from 'vscode';
|
||||
|
||||
export class FakeRadioButton implements azdata.RadioButtonComponent {
|
||||
|
||||
private _onDidClickEmitter = new vscode.EventEmitter<any>();
|
||||
|
||||
onDidClick = this._onDidClickEmitter.event;
|
||||
|
||||
constructor(props: azdata.RadioButtonProperties) {
|
||||
this.label = props.label;
|
||||
this.value = props.value;
|
||||
this.checked = props.checked;
|
||||
this.enabled = props.enabled;
|
||||
}
|
||||
|
||||
//#region RadioButtonProperties implementation
|
||||
label?: string;
|
||||
value?: string;
|
||||
checked?: boolean;
|
||||
//#endregion
|
||||
|
||||
click() {
|
||||
this.checked = true;
|
||||
this._onDidClickEmitter.fire(this);
|
||||
}
|
||||
//#region Component Implementation
|
||||
id: string = '';
|
||||
updateProperties(_properties: { [key: string]: any; }): Thenable<void> {
|
||||
throw new Error('Method not implemented.');
|
||||
}
|
||||
updateProperty(_key: string, _value: any): Thenable<void> {
|
||||
throw new Error('Method not implemented.');
|
||||
}
|
||||
updateCssStyles(_cssStyles: { [key: string]: string; }): Thenable<void> {
|
||||
throw new Error('Method not implemented.');
|
||||
}
|
||||
onValidityChanged: vscode.Event<boolean> = <vscode.Event<boolean>>{};
|
||||
valid: boolean = false;
|
||||
validate(): Thenable<boolean> {
|
||||
throw new Error('Method not implemented.');
|
||||
}
|
||||
focus(): Thenable<void> {
|
||||
throw new Error('Method not implemented.');
|
||||
}
|
||||
ariaHidden?: boolean | undefined;
|
||||
//#endregion
|
||||
|
||||
//#region ComponentProperties Implementation
|
||||
height?: number | string;
|
||||
width?: number | string;
|
||||
/**
|
||||
* The position CSS property. Empty by default.
|
||||
* This is particularly useful if laying out components inside a FlexContainer and
|
||||
* the size of the component is meant to be a fixed size. In this case the position must be
|
||||
* set to 'absolute', with the parent FlexContainer having 'relative' position.
|
||||
* Without this the component will fail to correctly size itself
|
||||
*/
|
||||
position?: azdata.PositionType;
|
||||
/**
|
||||
* Whether the component is enabled in the DOM
|
||||
*/
|
||||
enabled?: boolean;
|
||||
/**
|
||||
* Corresponds to the display CSS property for the element
|
||||
*/
|
||||
display?: azdata.DisplayType;
|
||||
/**
|
||||
* Corresponds to the aria-label accessibility attribute for this component
|
||||
*/
|
||||
ariaLabel?: string;
|
||||
/**
|
||||
* Corresponds to the role accessibility attribute for this component
|
||||
*/
|
||||
ariaRole?: string;
|
||||
/**
|
||||
* Corresponds to the aria-selected accessibility attribute for this component
|
||||
*/
|
||||
ariaSelected?: boolean;
|
||||
/**
|
||||
* Matches the CSS style key and its available values.
|
||||
*/
|
||||
CSSStyles?: { [key: string]: string };
|
||||
//#endregion
|
||||
|
||||
}
|
||||
@@ -43,7 +43,7 @@ describe('ControllerModel', function (): void {
|
||||
});
|
||||
|
||||
it('Reads password from cred store', async function (): Promise<void> {
|
||||
const password = 'password123';
|
||||
const password = 'password123'; // [SuppressMessage("Microsoft.Security", "CS001:SecretInline", Justification="Test password, not actually used")]
|
||||
|
||||
// Set up cred store to return our password
|
||||
const credProviderMock = TypeMoq.Mock.ofType<azdata.CredentialProvider>();
|
||||
|
||||
@@ -1,2 +0,0 @@
|
||||
pytest==5.3.5
|
||||
notebook==6.0.3
|
||||
@@ -3,8 +3,66 @@
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as azdata from 'azdata';
|
||||
import * as TypeMoq from 'typemoq';
|
||||
import * as vscode from 'vscode';
|
||||
|
||||
export function createModelViewMock() {
|
||||
const mockModelBuilder = TypeMoq.Mock.ofType<azdata.ModelBuilder>();
|
||||
const mockTextBuilder = setupMockComponentBuilder<azdata.TextComponent, azdata.TextComponentProperties>();
|
||||
const mockInputBoxBuilder = setupMockComponentBuilder<azdata.InputBoxComponent, azdata.InputBoxProperties>();
|
||||
const mockRadioButtonBuilder = setupMockComponentBuilder<azdata.RadioButtonComponent, azdata.RadioButtonProperties>();
|
||||
const mockDivBuilder = setupMockContainerBuilder<azdata.DivContainer, azdata.DivContainerProperties, azdata.DivBuilder>();
|
||||
const mockLoadingBuilder = setupMockLoadingBuilder();
|
||||
mockModelBuilder.setup(b => b.loadingComponent()).returns(() => mockLoadingBuilder.object);
|
||||
mockModelBuilder.setup(b => b.text()).returns(() => mockTextBuilder.object);
|
||||
mockModelBuilder.setup(b => b.inputBox()).returns(() => mockInputBoxBuilder.object);
|
||||
mockModelBuilder.setup(b => b.radioButton()).returns(() => mockRadioButtonBuilder.object);
|
||||
mockModelBuilder.setup(b => b.divContainer()).returns(() => mockDivBuilder.object);
|
||||
const mockModelView = TypeMoq.Mock.ofType<azdata.ModelView>();
|
||||
mockModelView.setup(mv => mv.modelBuilder).returns(() => mockModelBuilder.object);
|
||||
return { mockModelView, mockModelBuilder, mockTextBuilder, mockInputBoxBuilder, mockRadioButtonBuilder, mockDivBuilder };
|
||||
}
|
||||
|
||||
function setupMockLoadingBuilder(
|
||||
loadingBuilderGetter?: (item: azdata.Component) => azdata.LoadingComponentBuilder,
|
||||
mockLoadingBuilder?: TypeMoq.IMock<azdata.LoadingComponentBuilder>
|
||||
): TypeMoq.IMock<azdata.LoadingComponentBuilder> {
|
||||
mockLoadingBuilder = mockLoadingBuilder ?? setupMockComponentBuilder<azdata.LoadingComponent, azdata.LoadingComponentProperties, azdata.LoadingComponentBuilder>();
|
||||
let item: azdata.Component;
|
||||
mockLoadingBuilder.setup(b => b.withItem(TypeMoq.It.isAny())).callback((_item) => item = _item).returns(() => loadingBuilderGetter ? loadingBuilderGetter(item) : mockLoadingBuilder!.object);
|
||||
return mockLoadingBuilder;
|
||||
}
|
||||
|
||||
export function setupMockComponentBuilder<T extends azdata.Component, P extends azdata.ComponentProperties, B extends azdata.ComponentBuilder<T, P> = azdata.ComponentBuilder<T, P>>(
|
||||
componentGetter?: (props: P) => T,
|
||||
mockComponentBuilder?: TypeMoq.IMock<B>,
|
||||
): TypeMoq.IMock<B> {
|
||||
mockComponentBuilder = mockComponentBuilder ?? TypeMoq.Mock.ofType<B>();
|
||||
const returnComponent = TypeMoq.Mock.ofType<T>();
|
||||
// Need to setup 'then' for when a mocked object is resolved otherwise the test will hang : https://github.com/florinn/typemoq/issues/66
|
||||
returnComponent.setup((x: any) => x.then).returns(() => { });
|
||||
let compProps: P;
|
||||
mockComponentBuilder.setup(b => b.withProperties(TypeMoq.It.isAny())).callback((props: P) => compProps = props).returns(() => mockComponentBuilder!.object);
|
||||
mockComponentBuilder.setup(b => b.component()).returns(() => {
|
||||
return componentGetter ? componentGetter(compProps) : Object.assign<T, P>(Object.assign({}, returnComponent.object), compProps);
|
||||
});
|
||||
|
||||
// For now just have these be passthrough - can hook up additional functionality later if needed
|
||||
mockComponentBuilder.setup(b => b.withValidation(TypeMoq.It.isAny())).returns(() => mockComponentBuilder!.object);
|
||||
return mockComponentBuilder;
|
||||
}
|
||||
|
||||
export function setupMockContainerBuilder<T extends azdata.Container<any, any>, P extends azdata.ComponentProperties, B extends azdata.ContainerBuilder<T, any, any, any> = azdata.ContainerBuilder<T, any, any, any>>(
|
||||
mockContainerBuilder?: TypeMoq.IMock<B>
|
||||
): TypeMoq.IMock<B> {
|
||||
mockContainerBuilder = mockContainerBuilder ?? setupMockComponentBuilder<T, P, B>();
|
||||
// For now just have these be passthrough - can hook up additional functionality later if needed
|
||||
mockContainerBuilder.setup(b => b.withItems(TypeMoq.It.isAny(), undefined)).returns(() => mockContainerBuilder!.object);
|
||||
mockContainerBuilder.setup(b => b.withLayout(TypeMoq.It.isAny())).returns(() => mockContainerBuilder!.object);
|
||||
return mockContainerBuilder;
|
||||
}
|
||||
|
||||
export class MockInputBox implements vscode.InputBox {
|
||||
private _value: string = '';
|
||||
public get value(): string {
|
||||
|
||||
@@ -1,111 +0,0 @@
|
||||
##---------------------------------------------------------------------------------------------
|
||||
## Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
## Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
##--------------------------------------------------------------------------------------------
|
||||
|
||||
import json
|
||||
import nbformat
|
||||
import os
|
||||
import random
|
||||
import string
|
||||
import sys
|
||||
import uuid
|
||||
from nbconvert.preprocessors import ExecutePreprocessor
|
||||
from subprocess import Popen, PIPE, TimeoutExpired
|
||||
|
||||
## Variables
|
||||
notebook_path = '../../notebooks/arcDeployment/'
|
||||
|
||||
## Helper functions
|
||||
def generate_name(prefix, length=8):
|
||||
return (prefix + '-' + ''.join(
|
||||
[random.choice(string.ascii_lowercase)
|
||||
for n in range(length - len(prefix) - 1)]))
|
||||
|
||||
def clear_env():
|
||||
for k in [k for k in os.environ.keys() if k.startswith('AZDATA_NB_VAR_')]:
|
||||
del os.environ[k]
|
||||
|
||||
def azdata(commands, timeout=None, stdin=None):
|
||||
commands.insert(0, "azdata")
|
||||
print('Executing command: \n', ' '.join(commands))
|
||||
proc = Popen(commands, stdin=PIPE if stdin is not None else None, stdout=PIPE, stderr=PIPE, shell=os.name=='nt')
|
||||
try:
|
||||
(stdout, stderr) = proc.communicate(input=stdin, timeout=timeout)
|
||||
except TimeoutExpired:
|
||||
# https://docs.python.org/3.5/library/subprocess.html#subprocess.Popen.communicate
|
||||
# The child process is not killed if the timeout expires, so in order to
|
||||
# cleanup properly we should kill the child process and finish communication.
|
||||
proc.kill()
|
||||
(stdout, stderr) = proc.communicate(timeout=timeout)
|
||||
sys.stdout.buffer.write(stdout)
|
||||
sys.stderr.buffer.write(stderr)
|
||||
raise
|
||||
|
||||
sys.stdout.buffer.write(stdout)
|
||||
if proc.returncode != 0:
|
||||
raise Exception(stderr)
|
||||
else:
|
||||
sys.stderr.buffer.write(stderr)
|
||||
|
||||
return (stdout.decode(sys.stdout.encoding),
|
||||
stderr.decode(sys.stderr.encoding))
|
||||
|
||||
## Tests
|
||||
def test_postgres_create():
|
||||
# Load the notebook
|
||||
with open(notebook_path + 'deploy.postgres.existing.arc.ipynb') as f:
|
||||
nb = nbformat.read(f, as_version=nbformat.NO_CONVERT)
|
||||
|
||||
name = generate_name('pg')
|
||||
try:
|
||||
# Setup the environment
|
||||
os.environ['AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_NAME'] = name
|
||||
subscription = os.environ['AZDATA_NB_VAR_ARC_SUBSCRIPTION'] = str(uuid.uuid4())
|
||||
resource_group = os.environ['AZDATA_NB_VAR_ARC_RESOURCE_GROUP_NAME'] = 'test'
|
||||
namespace = os.environ['AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_NAMESPACE'] = 'default'
|
||||
workers = os.environ['AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_WORKERS'] = '1'
|
||||
service_type = os.environ['AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_SERVICE_TYPE'] = 'NodePort'
|
||||
data_size = os.environ['AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_DATA_SIZE'] = '512'
|
||||
port = os.environ['AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_PORT'] = '5431'
|
||||
extensions = os.environ['AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_EXTENSIONS'] = 'pg_cron,postgis'
|
||||
cpu_min = os.environ['AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_CPU_MIN'] = '1'
|
||||
cpu_max = os.environ['AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_CPU_MAX'] = '2'
|
||||
memory_min = os.environ['AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_MEMORY_MIN'] = '256'
|
||||
memory_max = os.environ['AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_MEMORY_MAX'] = '1023'
|
||||
backup_sizes = os.environ['AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_BACKUP_SIZES'] = '512,1023'
|
||||
backup_full_interval = os.environ['AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_BACKUP_FULL_INTERVAL'] = '20'
|
||||
backup_delta_interval = os.environ['AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_BACKUP_DELTA_INTERVAL'] = '10'
|
||||
backup_retention_min = os.environ['AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_BACKUP_RETENTION_MIN'] = '1,1GB;2,2GB'
|
||||
backup_retention_max = os.environ['AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_BACKUP_RETENTION_MAX'] = '2,2GB;3,3GB'
|
||||
|
||||
# Execute the notebook that creates Postgres
|
||||
ExecutePreprocessor(timeout=1200).preprocess(nb, {'metadata': {'path': notebook_path}})
|
||||
|
||||
# Verify that Postgres was created successfully
|
||||
(out, _) = azdata(['postgres', 'server', 'show', '-n', name])
|
||||
db = json.loads(out)
|
||||
assert db['metadata']['name'] == name
|
||||
assert db['metadata']['namespace'] == namespace
|
||||
assert db['spec']['scale']['shards'] == int(workers)
|
||||
assert db['spec']['service']['type'] == service_type
|
||||
assert db['spec']['storage']['volumeSize'] == data_size + 'Mi'
|
||||
assert db['spec']['service']['port'] == int(port)
|
||||
assert [p['name'] for p in db['spec']['engine']['plugins']] == ['pg_cron' ,'postgis']
|
||||
assert db['spec']['scheduling']['default']['resources']['requests']['cpu'] == cpu_min
|
||||
assert db['spec']['scheduling']['default']['resources']['limits']['cpu'] == cpu_max
|
||||
assert db['spec']['scheduling']['default']['resources']['requests']['memory'] == memory_min + 'Mi'
|
||||
assert db['spec']['scheduling']['default']['resources']['limits']['memory'] == memory_max + 'Mi'
|
||||
assert [t['storage']['volumeSize'] for t in db['spec']['backups']['tiers']] == [b + 'Mi' for b in backup_sizes.split(',')]
|
||||
assert db['spec']['backups']['fullMinutes'] == int(backup_full_interval)
|
||||
assert db['spec']['backups']['deltaMinutes'] == int(backup_delta_interval)
|
||||
for i in range(len(db['spec']['backups']['tiers'])):
|
||||
assert db['spec']['backups']['tiers'][i]['retention']['minimums'] == backup_retention_min.split(';')[i].split(',')
|
||||
assert db['spec']['backups']['tiers'][i]['retention']['maximums'] == backup_retention_max.split(';')[i].split(',')
|
||||
except Exception:
|
||||
# Capture cell outputs to help with debugging
|
||||
print([c['outputs'] for c in nb['cells'] if c.get('outputs')])
|
||||
raise
|
||||
finally:
|
||||
clear_env()
|
||||
azdata(['postgres', 'server', 'delete', '-n', name])
|
||||
@@ -0,0 +1,88 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as azdata from 'azdata';
|
||||
import * as should from 'should';
|
||||
import { getErrorMessage } from '../../../common/utils';
|
||||
import { RadioOptionsGroup, RadioOptionsInfo } from '../../../ui/components/radioOptionsGroup';
|
||||
import { FakeRadioButton } from '../../mocks/fakeRadioButton';
|
||||
import { setupMockComponentBuilder, createModelViewMock } from '../../stubs';
|
||||
|
||||
|
||||
const loadingError = new Error('Error loading options');
|
||||
const radioOptionsInfo = <RadioOptionsInfo>{
|
||||
values: [
|
||||
'value1',
|
||||
'value2'
|
||||
],
|
||||
defaultValue: 'value2'
|
||||
};
|
||||
const divItems: azdata.Component[] = [];
|
||||
let radioOptionsGroup: RadioOptionsGroup;
|
||||
|
||||
|
||||
describe('radioOptionsGroup', function (): void {
|
||||
beforeEach(async () => {
|
||||
const { mockModelView, mockRadioButtonBuilder, mockDivBuilder } = createModelViewMock();
|
||||
mockRadioButtonBuilder.reset(); // reset any previous mock so that we can set our own.
|
||||
setupMockComponentBuilder<azdata.RadioButtonComponent, azdata.RadioButtonProperties>(
|
||||
(props) => new FakeRadioButton(props),
|
||||
mockRadioButtonBuilder,
|
||||
);
|
||||
mockDivBuilder.reset(); // reset previous setups so new setups we are about to create will replace the setups instead creating a recording chain
|
||||
// create new setups for the DivContainer with custom behavior
|
||||
setupMockComponentBuilder<azdata.DivContainer, azdata.DivContainerProperties, azdata.DivBuilder>(
|
||||
() => <azdata.DivContainer>{
|
||||
addItem: (item) => { divItems.push(item); },
|
||||
clearItems: () => { divItems.length = 0; },
|
||||
get items() { return divItems; },
|
||||
},
|
||||
mockDivBuilder
|
||||
);
|
||||
radioOptionsGroup = new RadioOptionsGroup(mockModelView.object, (_disposable) => { });
|
||||
await radioOptionsGroup.load(async () => radioOptionsInfo);
|
||||
});
|
||||
|
||||
it('verify construction and load', async () => {
|
||||
should(radioOptionsGroup).not.be.undefined();
|
||||
should(radioOptionsGroup.value).not.be.undefined();
|
||||
radioOptionsGroup.value!.should.equal('value2', 'radio options group should be the default checked value');
|
||||
// verify all the radioButtons created in the group
|
||||
verifyRadioGroup();
|
||||
});
|
||||
|
||||
it('onClick', async () => {
|
||||
// click the radioButton corresponding to 'value1'
|
||||
(divItems as FakeRadioButton[]).filter(r => r.value === 'value1').pop()!.click();
|
||||
radioOptionsGroup.value!.should.equal('value1', 'radio options group should correspond to the radioButton that we clicked');
|
||||
// verify all the radioButtons created in the group
|
||||
verifyRadioGroup();
|
||||
});
|
||||
|
||||
it('load throws', async () => {
|
||||
radioOptionsGroup.load(() => { throw loadingError; });
|
||||
//in error case radioButtons array wont hold radioButtons but holds a TextComponent with value equal to error string
|
||||
divItems.length.should.equal(1, 'There is should be only one element in the divContainer when loading error happens');
|
||||
const label = divItems[0] as azdata.TextComponent;
|
||||
should(label.value).not.be.undefined();
|
||||
label.value!.should.deepEqual(getErrorMessage(loadingError));
|
||||
should(label.CSSStyles).not.be.undefined();
|
||||
should(label.CSSStyles!.color).not.be.undefined();
|
||||
label.CSSStyles!.color.should.equal('Red');
|
||||
});
|
||||
});
|
||||
|
||||
function verifyRadioGroup() {
|
||||
const radioButtons = divItems as FakeRadioButton[];
|
||||
radioButtons.length.should.equal(radioOptionsInfo.values!.length);
|
||||
radioButtons.forEach(rb => {
|
||||
should(rb.label).not.be.undefined();
|
||||
should(rb.value).not.be.undefined();
|
||||
should(rb.enabled).not.be.undefined();
|
||||
rb.label!.should.equal(rb.value);
|
||||
rb.enabled!.should.be.true();
|
||||
});
|
||||
}
|
||||
|
||||
@@ -3,16 +3,21 @@
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import { ControllerInfo } from 'arc';
|
||||
import { ControllerInfo, ResourceType } from 'arc';
|
||||
import 'mocha';
|
||||
import * as should from 'should';
|
||||
import * as TypeMoq from 'typemoq';
|
||||
import * as sinon from 'sinon';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
import * as vscode from 'vscode';
|
||||
import * as azdataExt from 'azdata-ext';
|
||||
import { ControllerModel } from '../../../models/controllerModel';
|
||||
import { MiaaModel } from '../../../models/miaaModel';
|
||||
import { AzureArcTreeDataProvider } from '../../../ui/tree/azureArcTreeDataProvider';
|
||||
import { ControllerTreeNode } from '../../../ui/tree/controllerTreeNode';
|
||||
import { MiaaTreeNode } from '../../../ui/tree/miaaTreeNode';
|
||||
import { FakeControllerModel } from '../../mocks/fakeControllerModel';
|
||||
import { FakeAzdataApi } from '../../mocks/fakeAzdataApi';
|
||||
|
||||
describe('AzureArcTreeDataProvider tests', function (): void {
|
||||
let treeDataProvider: AzureArcTreeDataProvider;
|
||||
@@ -84,6 +89,27 @@ describe('AzureArcTreeDataProvider tests', function (): void {
|
||||
let children = await treeDataProvider.getChildren();
|
||||
should(children.length).equal(0, 'After loading we should have 0 children');
|
||||
});
|
||||
|
||||
it('should return all children of controller after loading', async function (): Promise<void> {
|
||||
const mockArcExtension = TypeMoq.Mock.ofType<vscode.Extension<any>>();
|
||||
const mockArcApi = TypeMoq.Mock.ofType<azdataExt.IExtension>();
|
||||
mockArcExtension.setup(x => x.exports).returns(() => {
|
||||
return mockArcApi.object;
|
||||
});
|
||||
const fakeAzdataApi = new FakeAzdataApi();
|
||||
fakeAzdataApi.postgresInstances = [{ name: 'pg1', state: '', workers: 0 }];
|
||||
fakeAzdataApi.miaaInstances = [{ name: 'miaa1', state: '', replicas: '', serverEndpoint: '' }];
|
||||
mockArcApi.setup(x => x.azdata).returns(() => fakeAzdataApi);
|
||||
|
||||
sinon.stub(vscode.extensions, 'getExtension').returns(mockArcExtension.object);
|
||||
const controllerModel = new ControllerModel(treeDataProvider, { id: uuid(), url: '127.0.0.1', name: 'my-arc', username: 'sa', rememberPassword: true, resources: [] }, 'mypassword');
|
||||
await treeDataProvider.addOrUpdateController(controllerModel, '');
|
||||
const controllerNode = treeDataProvider.getControllerNode(controllerModel);
|
||||
const children = await treeDataProvider.getChildren(controllerNode);
|
||||
should(children.filter(c => c.label === fakeAzdataApi.postgresInstances[0].name).length).equal(1, 'Should have a Postgres child');
|
||||
should(children.filter(c => c.label === fakeAzdataApi.miaaInstances[0].name).length).equal(1, 'Should have a MIAA child');
|
||||
should(children.length).equal(2, 'Should have excatly 2 children');
|
||||
});
|
||||
});
|
||||
|
||||
describe('removeController', function (): void {
|
||||
@@ -104,4 +130,31 @@ describe('AzureArcTreeDataProvider tests', function (): void {
|
||||
should((await treeDataProvider.getChildren()).length).equal(0, 'Removing other node again should do nothing');
|
||||
});
|
||||
});
|
||||
|
||||
describe('openResourceDashboard', function (): void {
|
||||
it('Opening dashboard for nonexistent controller node throws', async function (): Promise<void> {
|
||||
const controllerModel = new ControllerModel(treeDataProvider, { id: uuid(), url: '127.0.0.1', name: 'my-arc', username: 'sa', rememberPassword: true, resources: [] });
|
||||
const openDashboardPromise = treeDataProvider.openResourceDashboard(controllerModel, ResourceType.sqlManagedInstances, '');
|
||||
await should(openDashboardPromise).be.rejected();
|
||||
});
|
||||
|
||||
it('Opening dashboard for nonexistent resource throws', async function (): Promise<void> {
|
||||
const controllerModel = new ControllerModel(treeDataProvider, { id: uuid(), url: '127.0.0.1', name: 'my-arc', username: 'sa', rememberPassword: true, resources: [] });
|
||||
await treeDataProvider.addOrUpdateController(controllerModel, '');
|
||||
const openDashboardPromise = treeDataProvider.openResourceDashboard(controllerModel, ResourceType.sqlManagedInstances, '');
|
||||
await should(openDashboardPromise).be.rejected();
|
||||
});
|
||||
|
||||
it('Opening dashboard for existing resource node succeeds', async function (): Promise<void> {
|
||||
const controllerModel = new ControllerModel(treeDataProvider, { id: uuid(), url: '127.0.0.1', name: 'my-arc', username: 'sa', rememberPassword: true, resources: [] });
|
||||
const miaaModel = new MiaaModel(controllerModel, { name: 'miaa-1', resourceType: ResourceType.sqlManagedInstances }, undefined!, treeDataProvider);
|
||||
await treeDataProvider.addOrUpdateController(controllerModel, '');
|
||||
const controllerNode = treeDataProvider.getControllerNode(controllerModel)!;
|
||||
const resourceNode = new MiaaTreeNode(miaaModel, controllerModel);
|
||||
sinon.stub(controllerNode, 'getResourceNode').returns(resourceNode);
|
||||
const showDashboardStub = sinon.stub(resourceNode, 'openDashboard');
|
||||
await treeDataProvider.openResourceDashboard(controllerModel, ResourceType.sqlManagedInstances, '');
|
||||
should(showDashboardStub.calledOnce).be.true('showDashboard should have been called exactly once');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
13
extensions/arc/src/typings/arc.d.ts
vendored
13
extensions/arc/src/typings/arc.d.ts
vendored
@@ -3,7 +3,6 @@
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
declare module 'arc' {
|
||||
import * as vscode from 'vscode';
|
||||
|
||||
/**
|
||||
* Covers defining what the arc extension exports to other extensions
|
||||
@@ -20,6 +19,10 @@ declare module 'arc' {
|
||||
sqlManagedInstances = 'sqlManagedInstances'
|
||||
}
|
||||
|
||||
export type MiaaResourceInfo = ResourceInfo & {
|
||||
userName?: string
|
||||
};
|
||||
|
||||
export type ResourceInfo = {
|
||||
name: string,
|
||||
resourceType: ResourceType | string,
|
||||
@@ -35,7 +38,13 @@ declare module 'arc' {
|
||||
resources: ResourceInfo[]
|
||||
};
|
||||
|
||||
export interface DataController {
|
||||
label: string,
|
||||
info: ControllerInfo
|
||||
}
|
||||
export interface IExtension {
|
||||
getRegisteredDataControllers(): Promise<ControllerInfo[]>;
|
||||
getRegisteredDataControllers(): Promise<DataController[]>;
|
||||
getControllerPassword(controllerInfo: ControllerInfo): Promise<string>;
|
||||
reacquireControllerPassword(controllerInfo: ControllerInfo, password: string, retryCount?: number): Promise<string>;
|
||||
}
|
||||
}
|
||||
|
||||
1
extensions/arc/src/typings/refs.d.ts
vendored
1
extensions/arc/src/typings/refs.d.ts
vendored
@@ -8,3 +8,4 @@
|
||||
/// <reference path='../../../azurecore/src/azurecore.d.ts'/>
|
||||
/// <reference path='../../../../src/vs/vscode.d.ts'/>
|
||||
/// <reference path='../../../azdata/src/typings/azdata-ext.d.ts'/>
|
||||
/// <reference path='../../../resource-deployment/src/typings/resource-deployment.d.ts'/>
|
||||
|
||||
@@ -9,7 +9,7 @@ export abstract class Dashboard {
|
||||
|
||||
private dashboard!: azdata.window.ModelViewDashboard;
|
||||
|
||||
constructor(protected title: string) { }
|
||||
constructor(protected title: string, protected readonly name: string) { }
|
||||
|
||||
public async showDashboard(): Promise<void> {
|
||||
this.dashboard = this.createDashboard();
|
||||
@@ -17,7 +17,7 @@ export abstract class Dashboard {
|
||||
}
|
||||
|
||||
protected createDashboard(): azdata.window.ModelViewDashboard {
|
||||
const dashboard = azdata.window.createModelViewDashboard(this.title);
|
||||
const dashboard = azdata.window.createModelViewDashboard(this.title, this.name);
|
||||
dashboard.registerTabs(async modelView => {
|
||||
return await this.registerTabs(modelView);
|
||||
});
|
||||
|
||||
72
extensions/arc/src/ui/components/radioOptionsGroup.ts
Normal file
72
extensions/arc/src/ui/components/radioOptionsGroup.ts
Normal file
@@ -0,0 +1,72 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
import * as azdata from 'azdata';
|
||||
import * as vscode from 'vscode';
|
||||
import { getErrorMessage } from '../../common/utils';
|
||||
|
||||
export interface RadioOptionsInfo {
|
||||
values?: string[],
|
||||
defaultValue: string
|
||||
}
|
||||
|
||||
export class RadioOptionsGroup {
|
||||
static id: number = 1;
|
||||
private _divContainer!: azdata.DivContainer;
|
||||
private _loadingBuilder: azdata.LoadingComponentBuilder;
|
||||
private _currentRadioOption!: azdata.RadioButtonComponent;
|
||||
|
||||
constructor(private _view: azdata.ModelView, private _onNewDisposableCreated: (disposable: vscode.Disposable) => void, private _groupName: string = `RadioOptionsGroup${RadioOptionsGroup.id++}`) {
|
||||
const divBuilder = this._view.modelBuilder.divContainer();
|
||||
const divBuilderWithProperties = divBuilder.withProperties<azdata.DivContainerProperties>({ clickable: false });
|
||||
this._divContainer = divBuilderWithProperties.component();
|
||||
const loadingComponentBuilder = this._view.modelBuilder.loadingComponent();
|
||||
this._loadingBuilder = loadingComponentBuilder.withItem(this._divContainer);
|
||||
}
|
||||
|
||||
public component(): azdata.LoadingComponent {
|
||||
return this._loadingBuilder.component();
|
||||
}
|
||||
|
||||
async load(optionsInfoGetter: () => Promise<RadioOptionsInfo>): Promise<void> {
|
||||
this.component().loading = true;
|
||||
this._divContainer.clearItems();
|
||||
try {
|
||||
const optionsInfo = await optionsInfoGetter();
|
||||
const options = optionsInfo.values!;
|
||||
let defaultValue: string = optionsInfo.defaultValue!;
|
||||
options.forEach((option: string) => {
|
||||
const radioOption = this._view!.modelBuilder.radioButton().withProperties<azdata.RadioButtonProperties>({
|
||||
label: option,
|
||||
checked: option === defaultValue,
|
||||
name: this._groupName,
|
||||
value: option,
|
||||
enabled: true
|
||||
}).component();
|
||||
if (radioOption.checked) {
|
||||
this._currentRadioOption = radioOption;
|
||||
}
|
||||
this._onNewDisposableCreated(radioOption.onDidClick(() => {
|
||||
if (this._currentRadioOption !== radioOption) {
|
||||
// uncheck the previously saved radio option, the ui gets handled correctly even if we did not do this due to the use of the 'groupName',
|
||||
// however, the checked properties on the radio button do not get updated, so while the stuff works even if we left the previous option checked,
|
||||
// it is just better to keep things clean.
|
||||
this._currentRadioOption.checked = false;
|
||||
this._currentRadioOption = radioOption;
|
||||
}
|
||||
}));
|
||||
this._divContainer.addItem(radioOption);
|
||||
});
|
||||
}
|
||||
catch (e) {
|
||||
const errorLabel = this._view!.modelBuilder.text().withProperties({ value: getErrorMessage(e), CSSStyles: { 'color': 'Red' } }).component();
|
||||
this._divContainer.addItem(errorLabel);
|
||||
}
|
||||
this.component().loading = false;
|
||||
}
|
||||
|
||||
get value(): string | undefined {
|
||||
return this._currentRadioOption?.value;
|
||||
}
|
||||
}
|
||||
@@ -12,7 +12,7 @@ import * as loc from '../../../localizedConstants';
|
||||
export class ControllerDashboard extends Dashboard {
|
||||
|
||||
constructor(private _controllerModel: ControllerModel) {
|
||||
super(loc.arcControllerDashboard(_controllerModel.info.name));
|
||||
super(loc.arcControllerDashboard(_controllerModel.info.name), 'ArcDataControllerDashboard');
|
||||
}
|
||||
|
||||
public async showDashboard(): Promise<void> {
|
||||
|
||||
@@ -7,8 +7,8 @@ import { ResourceType } from 'arc';
|
||||
import * as azdata from 'azdata';
|
||||
import * as azurecore from 'azurecore';
|
||||
import * as vscode from 'vscode';
|
||||
import { getConnectionModeDisplayText, getResourceTypeIcon, parseInstanceName, resourceTypeToDisplayName } from '../../../common/utils';
|
||||
import { cssStyles, Endpoints, IconPathHelper, iconSize } from '../../../constants';
|
||||
import { getConnectionModeDisplayText, getResourceTypeIcon, resourceTypeToDisplayName } from '../../../common/utils';
|
||||
import { cssStyles, Endpoints, IconPathHelper, controllerTroubleshootDocsUrl, iconSize } from '../../../constants';
|
||||
import * as loc from '../../../localizedConstants';
|
||||
import { ControllerModel } from '../../../models/controllerModel';
|
||||
import { DashboardPage } from '../../components/dashboardPage';
|
||||
@@ -93,7 +93,7 @@ export class ControllerDashboardOverviewPage extends DashboardPage {
|
||||
headerCssStyles: cssStyles.tableHeader,
|
||||
rowCssStyles: cssStyles.tableRow
|
||||
}, {
|
||||
displayName: loc.compute,
|
||||
displayName: loc.state,
|
||||
valueType: azdata.DeclarativeDataType.string,
|
||||
width: '34%',
|
||||
isReadOnly: true,
|
||||
@@ -178,18 +178,30 @@ export class ControllerDashboardOverviewPage extends DashboardPage {
|
||||
this._openInAzurePortalButton.onDidClick(async () => {
|
||||
const config = this._controllerModel.controllerConfig;
|
||||
if (config) {
|
||||
vscode.env.openExternal(vscode.Uri.parse(
|
||||
await vscode.env.openExternal(vscode.Uri.parse(
|
||||
`https://portal.azure.com/#resource/subscriptions/${config.spec.settings.azure.subscription}/resourceGroups/${config.spec.settings.azure.resourceGroup}/providers/Microsoft.AzureData/${ResourceType.dataControllers}/${config.metadata.name}`));
|
||||
} else {
|
||||
vscode.window.showErrorMessage(loc.couldNotFindControllerRegistration);
|
||||
}
|
||||
}));
|
||||
|
||||
const troubleshootButton = this.modelView.modelBuilder.button().withProperties<azdata.ButtonProperties>({
|
||||
label: loc.troubleshoot,
|
||||
iconPath: IconPathHelper.wrench
|
||||
}).component();
|
||||
|
||||
this.disposables.push(
|
||||
troubleshootButton.onDidClick(async () => {
|
||||
await vscode.env.openExternal(vscode.Uri.parse(controllerTroubleshootDocsUrl));
|
||||
})
|
||||
);
|
||||
|
||||
return this.modelView.modelBuilder.toolbarContainer().withToolbarItems(
|
||||
[
|
||||
{ component: newInstance },
|
||||
{ component: refreshButton, toolbarSeparatorAfter: true },
|
||||
{ component: this._openInAzurePortalButton }
|
||||
{ component: this._openInAzurePortalButton, toolbarSeparatorAfter: true },
|
||||
{ component: troubleshootButton }
|
||||
]
|
||||
).component();
|
||||
}
|
||||
@@ -219,26 +231,18 @@ export class ControllerDashboardOverviewPage extends DashboardPage {
|
||||
iconHeight: iconSize,
|
||||
iconWidth: iconSize
|
||||
}).component();
|
||||
let nameComponent: azdata.Component;
|
||||
if (r.instanceType === ResourceType.postgresInstances) {
|
||||
nameComponent = this.modelView.modelBuilder.text()
|
||||
.withProperties<azdata.TextComponentProperties>({
|
||||
value: r.instanceName || '',
|
||||
CSSStyles: { ...cssStyles.text, 'margin-block-start': '0px', 'margin-block-end': '0px' }
|
||||
}).component();
|
||||
} else {
|
||||
nameComponent = this.modelView.modelBuilder.hyperlink()
|
||||
.withProperties<azdata.HyperlinkComponentProperties>({
|
||||
label: r.instanceName || '',
|
||||
url: ''
|
||||
}).component();
|
||||
(<azdata.HyperlinkComponent>nameComponent).onDidClick(async () => {
|
||||
await this._controllerModel.treeDataProvider.openResourceDashboard(this._controllerModel, r.instanceType || '', parseInstanceName(r.instanceName));
|
||||
});
|
||||
}
|
||||
|
||||
// TODO chgagnon
|
||||
return [imageComponent, nameComponent, resourceTypeToDisplayName(r.instanceType), '-'/* loc.numVCores(r.vCores) */];
|
||||
const nameComponent = this.modelView.modelBuilder.hyperlink()
|
||||
.withProperties<azdata.HyperlinkComponentProperties>({
|
||||
label: r.instanceName || '',
|
||||
url: ''
|
||||
}).component();
|
||||
|
||||
this.disposables.push(nameComponent.onDidClick(async () => {
|
||||
await this._controllerModel.treeDataProvider.openResourceDashboard(this._controllerModel, r.instanceType || '', r.instanceName);
|
||||
}));
|
||||
|
||||
return [imageComponent, nameComponent, resourceTypeToDisplayName(r.instanceType), r.state];
|
||||
});
|
||||
this._arcResourcesLoadingComponent.loading = !this._controllerModel.registrationsLastUpdated;
|
||||
}
|
||||
|
||||
@@ -0,0 +1,369 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as vscode from 'vscode';
|
||||
import * as azdata from 'azdata';
|
||||
import * as azdataExt from 'azdata-ext';
|
||||
import * as loc from '../../../localizedConstants';
|
||||
import { IconPathHelper, cssStyles } from '../../../constants';
|
||||
import { DashboardPage } from '../../components/dashboardPage';
|
||||
import { convertToGibibyteString } from '../../../common/utils';
|
||||
import { MiaaModel } from '../../../models/miaaModel';
|
||||
|
||||
export class MiaaComputeAndStoragePage extends DashboardPage {
|
||||
|
||||
private configurationContainer?: azdata.DivContainer;
|
||||
private coresLimitBox?: azdata.InputBoxComponent;
|
||||
private coresRequestBox?: azdata.InputBoxComponent;
|
||||
private memoryLimitBox?: azdata.InputBoxComponent;
|
||||
private memoryRequestBox?: azdata.InputBoxComponent;
|
||||
|
||||
private discardButton?: azdata.ButtonComponent;
|
||||
private saveButton?: azdata.ButtonComponent;
|
||||
|
||||
private saveArgs: {
|
||||
coresLimit?: string,
|
||||
coresRequest?: string,
|
||||
memoryLimit?: string,
|
||||
memoryRequest?: string
|
||||
} = {};
|
||||
|
||||
private readonly _azdataApi: azdataExt.IExtension;
|
||||
|
||||
constructor(protected modelView: azdata.ModelView, private _miaaModel: MiaaModel) {
|
||||
super(modelView);
|
||||
this._azdataApi = vscode.extensions.getExtension(azdataExt.extension.name)?.exports;
|
||||
|
||||
this.initializeConfigurationBoxes();
|
||||
|
||||
this.disposables.push(this._miaaModel.onConfigUpdated(
|
||||
() => this.eventuallyRunOnInitialized(() => this.handleServiceUpdated())));
|
||||
}
|
||||
|
||||
protected get title(): string {
|
||||
return loc.computeAndStorage;
|
||||
}
|
||||
|
||||
protected get id(): string {
|
||||
return 'miaa-compute-and-storage';
|
||||
}
|
||||
|
||||
protected get icon(): { dark: string; light: string; } {
|
||||
return IconPathHelper.computeStorage;
|
||||
}
|
||||
|
||||
protected get container(): azdata.Component {
|
||||
const root = this.modelView.modelBuilder.divContainer().component();
|
||||
const content = this.modelView.modelBuilder.divContainer().component();
|
||||
root.addItem(content, { CSSStyles: { 'margin': '20px' } });
|
||||
|
||||
content.addItem(this.modelView.modelBuilder.text().withProperties<azdata.TextComponentProperties>({
|
||||
value: loc.computeAndStorage,
|
||||
CSSStyles: { ...cssStyles.title }
|
||||
}).component());
|
||||
|
||||
const infoComputeStorage_p1 = this.modelView.modelBuilder.text().withProperties<azdata.TextComponentProperties>({
|
||||
value: loc.miaaComputeAndStorageDescriptionPartOne,
|
||||
CSSStyles: { ...cssStyles.text, 'margin-block-start': '0px', 'margin-block-end': '0px', 'max-width': 'auto' }
|
||||
}).component();
|
||||
|
||||
const memoryVCoreslink = this.modelView.modelBuilder.hyperlink().withProperties<azdata.HyperlinkComponentProperties>({
|
||||
label: loc.scalingCompute,
|
||||
url: 'https://docs.microsoft.com/azure/azure-arc/data/configure-managed-instance',
|
||||
CSSStyles: { 'margin-block-start': '0px', 'margin-block-end': '0px' }
|
||||
}).component();
|
||||
|
||||
const infoComputeStorage_p4 = this.modelView.modelBuilder.text().withProperties<azdata.TextComponentProperties>({
|
||||
value: loc.computeAndStorageDescriptionPartFour,
|
||||
CSSStyles: { ...cssStyles.text, 'margin-block-start': '0px', 'margin-block-end': '0px' }
|
||||
}).component();
|
||||
|
||||
const infoComputeStorage_p5 = this.modelView.modelBuilder.text().withProperties<azdata.TextComponentProperties>({
|
||||
value: loc.computeAndStorageDescriptionPartFive,
|
||||
CSSStyles: { ...cssStyles.text, 'margin-block-start': '0px', 'margin-block-end': '0px' }
|
||||
}).component();
|
||||
|
||||
const infoComputeStorage_p6 = this.modelView.modelBuilder.text().withProperties<azdata.TextComponentProperties>({
|
||||
value: loc.computeAndStorageDescriptionPartSix,
|
||||
CSSStyles: { ...cssStyles.text, 'margin-block-start': '0px', 'margin-block-end': '0px' }
|
||||
}).component();
|
||||
|
||||
const computeInfoAndLinks = this.modelView.modelBuilder.flexContainer()
|
||||
.withLayout({ flexWrap: 'wrap' })
|
||||
.withItems([
|
||||
infoComputeStorage_p1,
|
||||
memoryVCoreslink,
|
||||
infoComputeStorage_p4,
|
||||
infoComputeStorage_p5,
|
||||
infoComputeStorage_p6
|
||||
], { CSSStyles: { 'margin-right': '5px' } }).component();
|
||||
content.addItem(computeInfoAndLinks, { CSSStyles: { 'min-height': '30px' } });
|
||||
|
||||
this.configurationContainer = this.modelView.modelBuilder.divContainer().component();
|
||||
this.configurationContainer.addItems(this.createUserInputSection(), { CSSStyles: { 'min-height': '30px' } });
|
||||
content.addItem(this.configurationContainer, { CSSStyles: { 'margin-top': '30px' } });
|
||||
|
||||
this.initialized = true;
|
||||
|
||||
return root;
|
||||
}
|
||||
|
||||
protected get toolbarContainer(): azdata.ToolbarContainer {
|
||||
// Save Edits
|
||||
this.saveButton = this.modelView.modelBuilder.button().withProperties<azdata.ButtonProperties>({
|
||||
label: loc.saveText,
|
||||
iconPath: IconPathHelper.save,
|
||||
enabled: false
|
||||
}).component();
|
||||
|
||||
this.disposables.push(
|
||||
this.saveButton.onDidClick(async () => {
|
||||
this.saveButton!.enabled = false;
|
||||
try {
|
||||
await vscode.window.withProgress(
|
||||
{
|
||||
location: vscode.ProgressLocation.Notification,
|
||||
title: loc.updatingInstance(this._miaaModel.info.name),
|
||||
cancellable: false
|
||||
},
|
||||
async (_progress, _token): Promise<void> => {
|
||||
try {
|
||||
await this._azdataApi.azdata.arc.sql.mi.edit(
|
||||
this._miaaModel.info.name, this.saveArgs);
|
||||
} catch (err) {
|
||||
this.saveButton!.enabled = true;
|
||||
throw err;
|
||||
}
|
||||
|
||||
await this._miaaModel.refresh();
|
||||
}
|
||||
);
|
||||
|
||||
vscode.window.showInformationMessage(loc.instanceUpdated(this._miaaModel.info.name));
|
||||
|
||||
this.discardButton!.enabled = false;
|
||||
} catch (error) {
|
||||
vscode.window.showErrorMessage(loc.instanceUpdateFailed(this._miaaModel.info.name, error));
|
||||
}
|
||||
}));
|
||||
|
||||
// Discard
|
||||
this.discardButton = this.modelView.modelBuilder.button().withProperties<azdata.ButtonProperties>({
|
||||
label: loc.discardText,
|
||||
iconPath: IconPathHelper.discard,
|
||||
enabled: false
|
||||
}).component();
|
||||
|
||||
this.disposables.push(
|
||||
this.discardButton.onDidClick(async () => {
|
||||
this.discardButton!.enabled = false;
|
||||
try {
|
||||
this.editCores();
|
||||
this.editMemory();
|
||||
} catch (error) {
|
||||
vscode.window.showErrorMessage(loc.pageDiscardFailed(error));
|
||||
} finally {
|
||||
this.saveButton!.enabled = false;
|
||||
}
|
||||
}));
|
||||
|
||||
return this.modelView.modelBuilder.toolbarContainer().withToolbarItems([
|
||||
{ component: this.saveButton },
|
||||
{ component: this.discardButton }
|
||||
]).component();
|
||||
}
|
||||
|
||||
private initializeConfigurationBoxes() {
|
||||
this.coresLimitBox = this.modelView.modelBuilder.inputBox().withProperties<azdata.InputBoxProperties>({
|
||||
readOnly: false,
|
||||
min: 1,
|
||||
validationErrorMessage: loc.coresValidationErrorMessage,
|
||||
inputType: 'number',
|
||||
placeHolder: loc.loading
|
||||
}).component();
|
||||
|
||||
this.disposables.push(
|
||||
this.coresLimitBox.onTextChanged(() => {
|
||||
if (!(this.handleOnTextChanged(this.coresLimitBox!))) {
|
||||
this.saveArgs.coresLimit = undefined;
|
||||
} else {
|
||||
this.saveArgs.coresLimit = this.coresLimitBox!.value;
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
this.coresRequestBox = this.modelView.modelBuilder.inputBox().withProperties<azdata.InputBoxProperties>({
|
||||
readOnly: false,
|
||||
min: 1,
|
||||
validationErrorMessage: loc.coresValidationErrorMessage,
|
||||
inputType: 'number',
|
||||
placeHolder: loc.loading
|
||||
}).component();
|
||||
|
||||
this.disposables.push(
|
||||
this.coresRequestBox.onTextChanged(() => {
|
||||
if (!(this.handleOnTextChanged(this.coresRequestBox!))) {
|
||||
this.saveArgs.coresRequest = undefined;
|
||||
} else {
|
||||
this.saveArgs.coresRequest = this.coresRequestBox!.value;
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
this.memoryLimitBox = this.modelView.modelBuilder.inputBox().withProperties<azdata.InputBoxProperties>({
|
||||
readOnly: false,
|
||||
min: 2,
|
||||
validationErrorMessage: loc.memoryLimitValidationErrorMessage,
|
||||
inputType: 'number',
|
||||
placeHolder: loc.loading
|
||||
}).component();
|
||||
|
||||
this.disposables.push(
|
||||
this.memoryLimitBox.onTextChanged(() => {
|
||||
if (!(this.handleOnTextChanged(this.memoryLimitBox!))) {
|
||||
this.saveArgs.memoryLimit = undefined;
|
||||
} else {
|
||||
this.saveArgs.memoryLimit = this.memoryLimitBox!.value + 'Gi';
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
this.memoryRequestBox = this.modelView.modelBuilder.inputBox().withProperties<azdata.InputBoxProperties>({
|
||||
readOnly: false,
|
||||
min: 2,
|
||||
validationErrorMessage: loc.memoryRequestValidationErrorMessage,
|
||||
inputType: 'number',
|
||||
placeHolder: loc.loading
|
||||
}).component();
|
||||
|
||||
this.disposables.push(
|
||||
this.memoryRequestBox.onTextChanged(() => {
|
||||
if (!(this.handleOnTextChanged(this.memoryRequestBox!))) {
|
||||
this.saveArgs.memoryRequest = undefined;
|
||||
} else {
|
||||
this.saveArgs.memoryRequest = this.memoryRequestBox!.value + 'Gi';
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
}
|
||||
|
||||
private createUserInputSection(): azdata.Component[] {
|
||||
if (this._miaaModel.configLastUpdated) {
|
||||
this.editCores();
|
||||
this.editMemory();
|
||||
}
|
||||
|
||||
return [
|
||||
this.createConfigurationSectionContainer(loc.coresRequest, this.coresRequestBox!),
|
||||
this.createConfigurationSectionContainer(loc.coresLimit, this.coresLimitBox!),
|
||||
this.createConfigurationSectionContainer(loc.memoryRequest, this.memoryRequestBox!),
|
||||
this.createConfigurationSectionContainer(loc.memoryLimit, this.memoryLimitBox!)
|
||||
|
||||
];
|
||||
}
|
||||
|
||||
private createConfigurationSectionContainer(key: string, input: azdata.Component): azdata.FlexContainer {
|
||||
const inputFlex = { flex: '0 1 150px' };
|
||||
const keyFlex = { flex: `0 1 250px` };
|
||||
|
||||
const flexContainer = this.modelView.modelBuilder.flexContainer().withLayout({
|
||||
flexWrap: 'wrap',
|
||||
alignItems: 'center'
|
||||
}).component();
|
||||
|
||||
const keyComponent = this.modelView.modelBuilder.text().withProperties<azdata.TextComponentProperties>({
|
||||
value: key,
|
||||
CSSStyles: { ...cssStyles.text, 'margin-block-start': '0px', 'margin-block-end': '0px' }
|
||||
}).component();
|
||||
|
||||
const keyContainer = this.modelView.modelBuilder.flexContainer().withLayout({ alignItems: 'center' }).component();
|
||||
keyContainer.addItem(keyComponent, { CSSStyles: { 'margin-right': '0px', 'margin-bottom': '15px' } });
|
||||
flexContainer.addItem(keyContainer, keyFlex);
|
||||
|
||||
const inputContainer = this.modelView.modelBuilder.flexContainer().withLayout({ alignItems: 'center' }).component();
|
||||
inputContainer.addItem(input, { CSSStyles: { 'margin-bottom': '15px', 'min-width': '50px', 'max-width': '225px' } });
|
||||
|
||||
flexContainer.addItem(inputContainer, inputFlex);
|
||||
|
||||
return flexContainer;
|
||||
}
|
||||
|
||||
private handleOnTextChanged(component: azdata.InputBoxComponent): boolean {
|
||||
if ((!component.value)) {
|
||||
// if there is no text found in the inputbox component return false
|
||||
return false;
|
||||
} else if ((!component.valid)) {
|
||||
// if value given by user is not valid enable discard button for user
|
||||
// to clear all inputs and return false
|
||||
this.discardButton!.enabled = true;
|
||||
return false;
|
||||
} else {
|
||||
// if a valid value has been entered into the input box, enable save and discard buttons
|
||||
// so that user could choose to either edit instance or clear all inputs
|
||||
// return true
|
||||
this.saveButton!.enabled = true;
|
||||
this.discardButton!.enabled = true;
|
||||
return true;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private editCores(): void {
|
||||
let currentCPUSize = this._miaaModel.config?.spec?.requests?.vcores;
|
||||
|
||||
if (!currentCPUSize) {
|
||||
currentCPUSize = '';
|
||||
}
|
||||
|
||||
this.coresRequestBox!.placeHolder = currentCPUSize;
|
||||
this.coresRequestBox!.value = '';
|
||||
this.saveArgs.coresRequest = undefined;
|
||||
|
||||
currentCPUSize = this._miaaModel.config?.spec?.limits?.vcores;
|
||||
|
||||
if (!currentCPUSize) {
|
||||
currentCPUSize = '';
|
||||
}
|
||||
|
||||
this.coresLimitBox!.placeHolder = currentCPUSize;
|
||||
this.coresLimitBox!.value = '';
|
||||
this.saveArgs.coresLimit = undefined;
|
||||
}
|
||||
|
||||
private editMemory(): void {
|
||||
let currentMemSizeConversion: string;
|
||||
let currentMemorySize = this._miaaModel.config?.spec?.requests?.memory;
|
||||
|
||||
if (!currentMemorySize) {
|
||||
currentMemSizeConversion = '';
|
||||
} else {
|
||||
currentMemSizeConversion = convertToGibibyteString(currentMemorySize);
|
||||
}
|
||||
|
||||
this.memoryRequestBox!.placeHolder = currentMemSizeConversion!;
|
||||
this.memoryRequestBox!.value = '';
|
||||
|
||||
this.saveArgs.memoryRequest = undefined;
|
||||
|
||||
currentMemorySize = this._miaaModel.config?.spec?.limits?.memory;
|
||||
|
||||
if (!currentMemorySize) {
|
||||
currentMemSizeConversion = '';
|
||||
} else {
|
||||
currentMemSizeConversion = convertToGibibyteString(currentMemorySize);
|
||||
}
|
||||
|
||||
this.memoryLimitBox!.placeHolder = currentMemSizeConversion!;
|
||||
this.memoryLimitBox!.value = '';
|
||||
|
||||
this.saveArgs.memoryLimit = undefined;
|
||||
}
|
||||
|
||||
private handleServiceUpdated() {
|
||||
this.editCores();
|
||||
this.editMemory();
|
||||
}
|
||||
}
|
||||
@@ -91,8 +91,7 @@ export class MiaaConnectionStringsPage extends DashboardPage {
|
||||
$serverName = "${externalEndpoint.ip},${externalEndpoint.port}";
|
||||
$conn = sqlsrv_connect($serverName, $connectionInfo);`),
|
||||
new InputKeyValue(this.modelView.modelBuilder, 'Python', `dbname='master' user='${username}' host='${externalEndpoint.ip}' password='{your_password_here}' port='${externalEndpoint.port}' sslmode='true'`),
|
||||
new InputKeyValue(this.modelView.modelBuilder, 'Ruby', `host=${externalEndpoint.ip}; user=${username} password={your_password_here} port=${externalEndpoint.port} sslmode=require`),
|
||||
new InputKeyValue(this.modelView.modelBuilder, 'Web App', `Database=master; Data Source=${externalEndpoint.ip}; User Id=${username}; Password={your_password_here}`)
|
||||
new InputKeyValue(this.modelView.modelBuilder, 'Ruby', `host=${externalEndpoint.ip}; user=${username} password={your_password_here} port=${externalEndpoint.port} sslmode=require`)
|
||||
];
|
||||
}
|
||||
|
||||
|
||||
@@ -10,11 +10,12 @@ import { ControllerModel } from '../../../models/controllerModel';
|
||||
import * as loc from '../../../localizedConstants';
|
||||
import { MiaaConnectionStringsPage } from './miaaConnectionStringsPage';
|
||||
import { MiaaModel } from '../../../models/miaaModel';
|
||||
import { MiaaComputeAndStoragePage } from './miaaComputeAndStoragePage';
|
||||
|
||||
export class MiaaDashboard extends Dashboard {
|
||||
|
||||
constructor(private _controllerModel: ControllerModel, private _miaaModel: MiaaModel) {
|
||||
super(loc.miaaDashboard(_miaaModel.info.name));
|
||||
super(loc.miaaDashboard(_miaaModel.info.name), 'ArcMiaaDashboard');
|
||||
}
|
||||
|
||||
public async showDashboard(): Promise<void> {
|
||||
@@ -27,12 +28,14 @@ export class MiaaDashboard extends Dashboard {
|
||||
protected async registerTabs(modelView: azdata.ModelView): Promise<(azdata.DashboardTab | azdata.DashboardTabGroup)[]> {
|
||||
const overviewPage = new MiaaDashboardOverviewPage(modelView, this._controllerModel, this._miaaModel);
|
||||
const connectionStringsPage = new MiaaConnectionStringsPage(modelView, this._controllerModel, this._miaaModel);
|
||||
const computeAndStoragePage = new MiaaComputeAndStoragePage(modelView, this._miaaModel);
|
||||
return [
|
||||
overviewPage.tab,
|
||||
{
|
||||
title: loc.settings,
|
||||
tabs: [
|
||||
connectionStringsPage.tab
|
||||
connectionStringsPage.tab,
|
||||
computeAndStoragePage.tab
|
||||
]
|
||||
},
|
||||
];
|
||||
|
||||
@@ -7,8 +7,8 @@ import * as azdata from 'azdata';
|
||||
import * as azdataExt from 'azdata-ext';
|
||||
import * as azurecore from 'azurecore';
|
||||
import * as vscode from 'vscode';
|
||||
import { getDatabaseStateDisplayText, promptForResourceDeletion } from '../../../common/utils';
|
||||
import { cssStyles, Endpoints, IconPathHelper } from '../../../constants';
|
||||
import { getDatabaseStateDisplayText, promptForInstanceDeletion } from '../../../common/utils';
|
||||
import { cssStyles, IconPathHelper, miaaTroubleshootDocsUrl } from '../../../constants';
|
||||
import * as loc from '../../../localizedConstants';
|
||||
import { ControllerModel } from '../../../models/controllerModel';
|
||||
import { MiaaModel } from '../../../models/miaaModel';
|
||||
@@ -198,13 +198,22 @@ export class MiaaDashboardOverviewPage extends DashboardPage {
|
||||
deleteButton.onDidClick(async () => {
|
||||
deleteButton.enabled = false;
|
||||
try {
|
||||
if (await promptForResourceDeletion(this._miaaModel.info.name)) {
|
||||
await this._azdataApi.azdata.arc.sql.mi.delete(this._miaaModel.info.name);
|
||||
if (await promptForInstanceDeletion(this._miaaModel.info.name)) {
|
||||
await vscode.window.withProgress(
|
||||
{
|
||||
location: vscode.ProgressLocation.Notification,
|
||||
title: loc.deletingInstance(this._miaaModel.info.name),
|
||||
cancellable: false
|
||||
},
|
||||
(_progress, _token) => {
|
||||
return this._azdataApi.azdata.arc.sql.mi.delete(this._miaaModel.info.name);
|
||||
}
|
||||
);
|
||||
await this._controllerModel.refreshTreeNode();
|
||||
vscode.window.showInformationMessage(loc.resourceDeleted(this._miaaModel.info.name));
|
||||
vscode.window.showInformationMessage(loc.instanceDeleted(this._miaaModel.info.name));
|
||||
}
|
||||
} catch (error) {
|
||||
vscode.window.showErrorMessage(loc.resourceDeletionFailed(this._miaaModel.info.name, error));
|
||||
vscode.window.showErrorMessage(loc.instanceDeletionFailed(this._miaaModel.info.name, error));
|
||||
} finally {
|
||||
deleteButton.enabled = true;
|
||||
}
|
||||
@@ -248,6 +257,17 @@ export class MiaaDashboardOverviewPage extends DashboardPage {
|
||||
}
|
||||
}));
|
||||
|
||||
const troubleshootButton = this.modelView.modelBuilder.button().withProperties<azdata.ButtonProperties>({
|
||||
label: loc.troubleshoot,
|
||||
iconPath: IconPathHelper.wrench
|
||||
}).component();
|
||||
|
||||
this.disposables.push(
|
||||
troubleshootButton.onDidClick(async () => {
|
||||
await vscode.env.openExternal(vscode.Uri.parse(miaaTroubleshootDocsUrl));
|
||||
})
|
||||
);
|
||||
|
||||
return this.modelView.modelBuilder.toolbarContainer().withToolbarItems(
|
||||
[
|
||||
{ component: deleteButton },
|
||||
@@ -332,19 +352,13 @@ export class MiaaDashboardOverviewPage extends DashboardPage {
|
||||
}
|
||||
|
||||
private refreshDashboardLinks(): void {
|
||||
const kibanaEndpoint = this._controllerModel.getEndpoint(Endpoints.logsui);
|
||||
if (kibanaEndpoint && this._miaaModel.config) {
|
||||
const kibanaQuery = `kubernetes_namespace:"${this._miaaModel.config.metadata.namespace}" and custom_resource_name :"${this._miaaModel.config.metadata.name}"`;
|
||||
const kibanaUrl = `${kibanaEndpoint.endpoint}/app/kibana#/discover?_a=(query:(language:kuery,query:'${kibanaQuery}'))`;
|
||||
if (this._miaaModel.config) {
|
||||
const kibanaUrl = this._miaaModel.config.status.logSearchDashboard ?? '';
|
||||
this._kibanaLink.label = kibanaUrl;
|
||||
this._kibanaLink.url = kibanaUrl;
|
||||
this._kibanaLoading!.loading = false;
|
||||
}
|
||||
|
||||
const grafanaEndpoint = this._controllerModel.getEndpoint(Endpoints.metricsui);
|
||||
if (grafanaEndpoint && this._miaaModel.config) {
|
||||
const grafanaQuery = `var-hostname=${this._miaaModel.info.name}-0`;
|
||||
const grafanaUrl = grafanaEndpoint ? `${grafanaEndpoint.endpoint}/d/40q72HnGk/sql-managed-instance-metrics?${grafanaQuery}` : '';
|
||||
const grafanaUrl = this._miaaModel.config.status.metricsDashboard ?? '';
|
||||
this._grafanaLink.label = grafanaUrl;
|
||||
this._grafanaLink.url = grafanaUrl;
|
||||
this._grafanaLoading!.loading = false;
|
||||
|
||||
@@ -1,31 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as azdata from 'azdata';
|
||||
import * as loc from '../../../localizedConstants';
|
||||
import { IconPathHelper } from '../../../constants';
|
||||
import { DashboardPage } from '../../components/dashboardPage';
|
||||
|
||||
export class PostgresBackupPage extends DashboardPage {
|
||||
protected get title(): string {
|
||||
return loc.backup;
|
||||
}
|
||||
|
||||
protected get id(): string {
|
||||
return 'postgres-backup';
|
||||
}
|
||||
|
||||
protected get icon(): { dark: string; light: string; } {
|
||||
return IconPathHelper.backup;
|
||||
}
|
||||
|
||||
protected get container(): azdata.Component {
|
||||
return this.modelView.modelBuilder.text().withProperties<azdata.TextComponentProperties>({ value: loc.backup }).component();
|
||||
}
|
||||
|
||||
protected get toolbarContainer(): azdata.ToolbarContainer {
|
||||
return this.modelView.modelBuilder.toolbarContainer().component();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,500 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as vscode from 'vscode';
|
||||
import * as azdata from 'azdata';
|
||||
import * as azdataExt from 'azdata-ext';
|
||||
import * as loc from '../../../localizedConstants';
|
||||
import { IconPathHelper, cssStyles } from '../../../constants';
|
||||
import { DashboardPage } from '../../components/dashboardPage';
|
||||
import { PostgresModel } from '../../../models/postgresModel';
|
||||
import { convertToGibibyteString } from '../../../common/utils';
|
||||
|
||||
export class PostgresComputeAndStoragePage extends DashboardPage {
|
||||
private workerContainer?: azdata.DivContainer;
|
||||
|
||||
private workerBox?: azdata.InputBoxComponent;
|
||||
private coresLimitBox?: azdata.InputBoxComponent;
|
||||
private coresRequestBox?: azdata.InputBoxComponent;
|
||||
private memoryLimitBox?: azdata.InputBoxComponent;
|
||||
private memoryRequestBox?: azdata.InputBoxComponent;
|
||||
|
||||
private discardButton?: azdata.ButtonComponent;
|
||||
private saveButton?: azdata.ButtonComponent;
|
||||
|
||||
private saveArgs: {
|
||||
workers?: number,
|
||||
coresLimit?: string,
|
||||
coresRequest?: string,
|
||||
memoryLimit?: string,
|
||||
memoryRequest?: string
|
||||
} = {};
|
||||
|
||||
private readonly _azdataApi: azdataExt.IExtension;
|
||||
|
||||
constructor(protected modelView: azdata.ModelView, private _postgresModel: PostgresModel) {
|
||||
super(modelView);
|
||||
this._azdataApi = vscode.extensions.getExtension(azdataExt.extension.name)?.exports;
|
||||
|
||||
this.initializeConfigurationBoxes();
|
||||
|
||||
this.disposables.push(this._postgresModel.onConfigUpdated(
|
||||
() => this.eventuallyRunOnInitialized(() => this.handleServiceUpdated())));
|
||||
}
|
||||
|
||||
protected get title(): string {
|
||||
return loc.computeAndStorage;
|
||||
}
|
||||
|
||||
protected get id(): string {
|
||||
return 'postgres-compute-and-storage';
|
||||
}
|
||||
|
||||
protected get icon(): { dark: string; light: string; } {
|
||||
return IconPathHelper.computeStorage;
|
||||
}
|
||||
|
||||
protected get container(): azdata.Component {
|
||||
const root = this.modelView.modelBuilder.divContainer().component();
|
||||
const content = this.modelView.modelBuilder.divContainer().component();
|
||||
root.addItem(content, { CSSStyles: { 'margin': '20px' } });
|
||||
|
||||
content.addItem(this.modelView.modelBuilder.text().withProperties<azdata.TextComponentProperties>({
|
||||
value: loc.computeAndStorage,
|
||||
CSSStyles: { ...cssStyles.title }
|
||||
}).component());
|
||||
|
||||
const infoComputeStorage_p1 = this.modelView.modelBuilder.text().withProperties<azdata.TextComponentProperties>({
|
||||
value: loc.postgresComputeAndStorageDescriptionPartOne,
|
||||
CSSStyles: { ...cssStyles.text, 'margin-block-start': '0px', 'margin-block-end': '0px', 'max-width': 'auto' }
|
||||
}).component();
|
||||
const infoComputeStorage_p2 = this.modelView.modelBuilder.text().withProperties<azdata.TextComponentProperties>({
|
||||
value: loc.postgresComputeAndStorageDescriptionPartTwo,
|
||||
CSSStyles: { ...cssStyles.text, 'margin-block-start': '0px', 'margin-block-end': '0px' }
|
||||
}).component();
|
||||
|
||||
const workerNodeslink = this.modelView.modelBuilder.hyperlink().withProperties<azdata.HyperlinkComponentProperties>({
|
||||
label: loc.addingWokerNodes,
|
||||
url: 'https://docs.microsoft.com/azure/azure-arc/data/scale-up-down-postgresql-hyperscale-server-group-using-cli',
|
||||
CSSStyles: { 'margin-block-start': '0px', 'margin-block-end': '0px' }
|
||||
}).component();
|
||||
|
||||
const infoComputeStorage_p3 = this.modelView.modelBuilder.text().withProperties<azdata.TextComponentProperties>({
|
||||
value: loc.computeAndStorageDescriptionPartThree,
|
||||
CSSStyles: { ...cssStyles.text, 'margin-block-start': '0px', 'margin-block-end': '0px' }
|
||||
}).component();
|
||||
|
||||
const memoryVCoreslink = this.modelView.modelBuilder.hyperlink().withProperties<azdata.HyperlinkComponentProperties>({
|
||||
label: loc.scalingCompute,
|
||||
url: 'https://docs.microsoft.com/azure/azure-arc/data/scale-up-down-postgresql-hyperscale-server-group-using-cli',
|
||||
CSSStyles: { 'margin-block-start': '0px', 'margin-block-end': '0px' }
|
||||
}).component();
|
||||
|
||||
const infoComputeStorage_p4 = this.modelView.modelBuilder.text().withProperties<azdata.TextComponentProperties>({
|
||||
value: loc.computeAndStorageDescriptionPartFour,
|
||||
CSSStyles: { ...cssStyles.text, 'margin-block-start': '0px', 'margin-block-end': '0px' }
|
||||
}).component();
|
||||
|
||||
const infoComputeStorage_p5 = this.modelView.modelBuilder.text().withProperties<azdata.TextComponentProperties>({
|
||||
value: loc.computeAndStorageDescriptionPartFive,
|
||||
CSSStyles: { ...cssStyles.text, 'margin-block-start': '0px', 'margin-block-end': '0px' }
|
||||
}).component();
|
||||
|
||||
const infoComputeStorage_p6 = this.modelView.modelBuilder.text().withProperties<azdata.TextComponentProperties>({
|
||||
value: loc.computeAndStorageDescriptionPartSix,
|
||||
CSSStyles: { ...cssStyles.text, 'margin-block-start': '0px', 'margin-block-end': '0px' }
|
||||
}).component();
|
||||
|
||||
const computeInfoAndLinks = this.modelView.modelBuilder.flexContainer()
|
||||
.withLayout({ flexWrap: 'wrap' })
|
||||
.withItems([
|
||||
infoComputeStorage_p1,
|
||||
infoComputeStorage_p2,
|
||||
workerNodeslink,
|
||||
infoComputeStorage_p3,
|
||||
memoryVCoreslink,
|
||||
infoComputeStorage_p4,
|
||||
infoComputeStorage_p5,
|
||||
infoComputeStorage_p6
|
||||
], { CSSStyles: { 'margin-right': '5px' } })
|
||||
.component();
|
||||
content.addItem(computeInfoAndLinks, { CSSStyles: { 'min-height': '30px' } });
|
||||
|
||||
content.addItem(this.modelView.modelBuilder.text().withProperties<azdata.TextComponentProperties>({
|
||||
value: loc.workerNodes,
|
||||
CSSStyles: { ...cssStyles.title, 'margin-top': '25px' }
|
||||
}).component());
|
||||
|
||||
this.workerContainer = this.modelView.modelBuilder.divContainer().component();
|
||||
this.workerContainer.addItems(this.createUserInputSection(), { CSSStyles: { 'min-height': '30px' } });
|
||||
content.addItem(this.workerContainer, { CSSStyles: { 'min-height': '30px' } });
|
||||
|
||||
this.initialized = true;
|
||||
|
||||
return root;
|
||||
}
|
||||
|
||||
protected get toolbarContainer(): azdata.ToolbarContainer {
|
||||
// Save Edits
|
||||
this.saveButton = this.modelView.modelBuilder.button().withProperties<azdata.ButtonProperties>({
|
||||
label: loc.saveText,
|
||||
iconPath: IconPathHelper.save,
|
||||
enabled: false
|
||||
}).component();
|
||||
|
||||
this.disposables.push(
|
||||
this.saveButton.onDidClick(async () => {
|
||||
this.saveButton!.enabled = false;
|
||||
try {
|
||||
await vscode.window.withProgress(
|
||||
{
|
||||
location: vscode.ProgressLocation.Notification,
|
||||
title: loc.updatingInstance(this._postgresModel.info.name),
|
||||
cancellable: false
|
||||
},
|
||||
async (_progress, _token): Promise<void> => {
|
||||
try {
|
||||
await this._azdataApi.azdata.arc.postgres.server.edit(
|
||||
this._postgresModel.info.name, this.saveArgs);
|
||||
} catch (err) {
|
||||
// If an error occurs while editing the instance then re-enable the save button since
|
||||
// the edit wasn't successfully applied
|
||||
this.saveButton!.enabled = true;
|
||||
throw err;
|
||||
}
|
||||
await this._postgresModel.refresh();
|
||||
}
|
||||
);
|
||||
|
||||
vscode.window.showInformationMessage(loc.instanceUpdated(this._postgresModel.info.name));
|
||||
|
||||
this.discardButton!.enabled = false;
|
||||
|
||||
} catch (error) {
|
||||
vscode.window.showErrorMessage(loc.instanceUpdateFailed(this._postgresModel.info.name, error));
|
||||
}
|
||||
}));
|
||||
|
||||
// Discard
|
||||
this.discardButton = this.modelView.modelBuilder.button().withProperties<azdata.ButtonProperties>({
|
||||
label: loc.discardText,
|
||||
iconPath: IconPathHelper.discard,
|
||||
enabled: false
|
||||
}).component();
|
||||
|
||||
this.disposables.push(
|
||||
this.discardButton.onDidClick(async () => {
|
||||
this.discardButton!.enabled = false;
|
||||
try {
|
||||
this.editWorkerNodeCount();
|
||||
this.editCores();
|
||||
this.editMemory();
|
||||
} catch (error) {
|
||||
vscode.window.showErrorMessage(loc.pageDiscardFailed(error));
|
||||
} finally {
|
||||
this.saveButton!.enabled = false;
|
||||
}
|
||||
}));
|
||||
|
||||
return this.modelView.modelBuilder.toolbarContainer().withToolbarItems([
|
||||
{ component: this.saveButton },
|
||||
{ component: this.discardButton }
|
||||
]).component();
|
||||
}
|
||||
|
||||
private initializeConfigurationBoxes() {
|
||||
this.workerBox = this.modelView.modelBuilder.inputBox().withProperties<azdata.InputBoxProperties>({
|
||||
readOnly: false,
|
||||
validationErrorMessage: loc.workerValidationErrorMessage,
|
||||
inputType: 'number',
|
||||
placeHolder: loc.loading
|
||||
}).component();
|
||||
|
||||
this.disposables.push(
|
||||
this.workerBox.onTextChanged(() => {
|
||||
if (!(this.handleOnTextChanged(this.workerBox!))) {
|
||||
this.saveArgs.workers = undefined;
|
||||
} else {
|
||||
this.saveArgs.workers = parseInt(this.workerBox!.value!);
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
this.coresLimitBox = this.modelView.modelBuilder.inputBox().withProperties<azdata.InputBoxProperties>({
|
||||
readOnly: false,
|
||||
min: 1,
|
||||
validationErrorMessage: loc.coresValidationErrorMessage,
|
||||
inputType: 'number',
|
||||
placeHolder: loc.loading
|
||||
}).component();
|
||||
|
||||
this.disposables.push(
|
||||
this.coresLimitBox.onTextChanged(() => {
|
||||
if (!(this.handleOnTextChanged(this.coresLimitBox!))) {
|
||||
this.saveArgs.coresLimit = undefined;
|
||||
} else {
|
||||
this.saveArgs.coresLimit = this.coresLimitBox!.value;
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
this.coresRequestBox = this.modelView.modelBuilder.inputBox().withProperties<azdata.InputBoxProperties>({
|
||||
readOnly: false,
|
||||
min: 1,
|
||||
validationErrorMessage: loc.coresValidationErrorMessage,
|
||||
inputType: 'number',
|
||||
placeHolder: loc.loading
|
||||
}).component();
|
||||
|
||||
this.disposables.push(
|
||||
this.coresRequestBox.onTextChanged(() => {
|
||||
if (!(this.handleOnTextChanged(this.coresRequestBox!))) {
|
||||
this.saveArgs.coresRequest = undefined;
|
||||
} else {
|
||||
this.saveArgs.coresRequest = this.coresRequestBox!.value;
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
this.memoryLimitBox = this.modelView.modelBuilder.inputBox().withProperties<azdata.InputBoxProperties>({
|
||||
readOnly: false,
|
||||
min: 0.25,
|
||||
validationErrorMessage: loc.memoryLimitValidationErrorMessage,
|
||||
inputType: 'number',
|
||||
placeHolder: loc.loading
|
||||
}).component();
|
||||
|
||||
this.disposables.push(
|
||||
this.memoryLimitBox.onTextChanged(() => {
|
||||
if (!(this.handleOnTextChanged(this.memoryLimitBox!))) {
|
||||
this.saveArgs.memoryLimit = undefined;
|
||||
} else {
|
||||
this.saveArgs.memoryLimit = this.memoryLimitBox!.value + 'Gi';
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
this.memoryRequestBox = this.modelView.modelBuilder.inputBox().withProperties<azdata.InputBoxProperties>({
|
||||
readOnly: false,
|
||||
min: 0.25,
|
||||
validationErrorMessage: loc.memoryRequestValidationErrorMessage,
|
||||
inputType: 'number',
|
||||
placeHolder: loc.loading
|
||||
}).component();
|
||||
|
||||
this.disposables.push(
|
||||
this.memoryRequestBox.onTextChanged(() => {
|
||||
if (!(this.handleOnTextChanged(this.memoryRequestBox!))) {
|
||||
this.saveArgs.memoryRequest = undefined;
|
||||
} else {
|
||||
this.saveArgs.memoryRequest = this.memoryRequestBox!.value + 'Gi';
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
}
|
||||
|
||||
private createUserInputSection(): azdata.Component[] {
|
||||
if (this._postgresModel.configLastUpdated) {
|
||||
this.editWorkerNodeCount();
|
||||
this.editCores();
|
||||
this.editMemory();
|
||||
}
|
||||
|
||||
return [
|
||||
this.createWorkerNodesSectionContainer(),
|
||||
this.createCoresMemorySection(),
|
||||
this.createConfigurationSectionContainer(loc.coresRequest, this.coresRequestBox!),
|
||||
this.createConfigurationSectionContainer(loc.coresLimit, this.coresLimitBox!),
|
||||
this.createConfigurationSectionContainer(loc.memoryRequest, this.memoryRequestBox!),
|
||||
this.createConfigurationSectionContainer(loc.memoryLimit, this.memoryLimitBox!)
|
||||
|
||||
];
|
||||
}
|
||||
|
||||
private createWorkerNodesSectionContainer(): azdata.FlexContainer {
|
||||
const inputFlex = { flex: '0 1 150px' };
|
||||
const keyFlex = { flex: `0 1 250px` };
|
||||
|
||||
const flexContainer = this.modelView.modelBuilder.flexContainer().withLayout({
|
||||
flexWrap: 'wrap',
|
||||
alignItems: 'center'
|
||||
}).component();
|
||||
|
||||
const keyComponent = this.modelView.modelBuilder.text().withProperties<azdata.TextComponentProperties>({
|
||||
value: loc.workerNodeCount,
|
||||
CSSStyles: { ...cssStyles.text, 'margin-block-start': '0px', 'margin-block-end': '0px' }
|
||||
}).component();
|
||||
|
||||
const keyContainer = this.modelView.modelBuilder.flexContainer().withLayout({ alignItems: 'center' }).component();
|
||||
keyContainer.addItem(keyComponent, { CSSStyles: { 'margin-right': '0px', 'margin-bottom': '15px' } });
|
||||
|
||||
const information = this.modelView.modelBuilder.button().withProperties<azdata.ButtonProperties>({
|
||||
iconPath: IconPathHelper.information,
|
||||
title: loc.workerNodesInformation,
|
||||
width: '12px',
|
||||
height: '12px',
|
||||
enabled: false
|
||||
}).component();
|
||||
|
||||
keyContainer.addItem(information, { CSSStyles: { 'margin-left': '5px', 'margin-bottom': '15px' } });
|
||||
flexContainer.addItem(keyContainer, keyFlex);
|
||||
|
||||
const inputContainer = this.modelView.modelBuilder.flexContainer().withLayout({ alignItems: 'center' }).component();
|
||||
inputContainer.addItem(this.workerBox!, { CSSStyles: { 'margin-bottom': '15px', 'min-width': '50px', 'max-width': '225px' } });
|
||||
|
||||
flexContainer.addItem(inputContainer, inputFlex);
|
||||
|
||||
return flexContainer;
|
||||
}
|
||||
|
||||
private createConfigurationSectionContainer(key: string, input: azdata.Component): azdata.FlexContainer {
|
||||
const inputFlex = { flex: '0 1 150px' };
|
||||
const keyFlex = { flex: `0 1 250px` };
|
||||
|
||||
const flexContainer = this.modelView.modelBuilder.flexContainer().withLayout({
|
||||
flexWrap: 'wrap',
|
||||
alignItems: 'center'
|
||||
}).component();
|
||||
|
||||
const keyComponent = this.modelView.modelBuilder.text().withProperties<azdata.TextComponentProperties>({
|
||||
value: key,
|
||||
CSSStyles: { ...cssStyles.text, 'margin-block-start': '0px', 'margin-block-end': '0px' }
|
||||
}).component();
|
||||
|
||||
const keyContainer = this.modelView.modelBuilder.flexContainer().withLayout({ alignItems: 'center' }).component();
|
||||
keyContainer.addItem(keyComponent, { CSSStyles: { 'margin-right': '0px', 'margin-bottom': '15px' } });
|
||||
flexContainer.addItem(keyContainer, keyFlex);
|
||||
|
||||
const inputContainer = this.modelView.modelBuilder.flexContainer().withLayout({ alignItems: 'center' }).component();
|
||||
inputContainer.addItem(input, { CSSStyles: { 'margin-bottom': '15px', 'min-width': '50px', 'max-width': '225px' } });
|
||||
|
||||
flexContainer.addItem(inputContainer, inputFlex);
|
||||
|
||||
return flexContainer;
|
||||
}
|
||||
|
||||
private handleOnTextChanged(component: azdata.InputBoxComponent): boolean {
|
||||
if ((!component.value)) {
|
||||
// if there is no text found in the inputbox component return false
|
||||
return false;
|
||||
} else if ((!component.valid)) {
|
||||
// if value given by user is not valid enable discard button for user
|
||||
// to clear all inputs and return false
|
||||
this.discardButton!.enabled = true;
|
||||
return false;
|
||||
} else {
|
||||
// if a valid value has been entered into the input box, enable save and discard buttons
|
||||
// so that user could choose to either edit instance or clear all inputs
|
||||
// return true
|
||||
this.saveButton!.enabled = true;
|
||||
this.discardButton!.enabled = true;
|
||||
return true;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private editWorkerNodeCount() {
|
||||
// scale.shards was renamed to scale.workers. Check both for backwards compatibility.
|
||||
let scale = this._postgresModel.config?.spec.scale;
|
||||
let currentWorkers = scale?.workers ?? scale?.shards ?? 0;
|
||||
|
||||
this.workerBox!.min = currentWorkers;
|
||||
this.workerBox!.placeHolder = currentWorkers.toString();
|
||||
this.workerBox!.value = '';
|
||||
|
||||
this.saveArgs.workers = undefined;
|
||||
}
|
||||
|
||||
private createCoresMemorySection(): azdata.DivContainer {
|
||||
const titleFlex = { flex: `0 1 250px` };
|
||||
|
||||
const flexContainer = this.modelView.modelBuilder.flexContainer().withLayout({
|
||||
flexWrap: 'wrap',
|
||||
alignItems: 'center'
|
||||
}).component();
|
||||
|
||||
const titleComponent = this.modelView.modelBuilder.text().withProperties<azdata.TextComponentProperties>({
|
||||
value: loc.configurationPerNode,
|
||||
CSSStyles: { ...cssStyles.title, 'font-weight': 'bold', 'margin-block-start': '0px', 'margin-block-end': '0px' }
|
||||
}).component();
|
||||
|
||||
const titleContainer = this.modelView.modelBuilder.flexContainer().withLayout({ alignItems: 'center' }).component();
|
||||
titleContainer.addItem(titleComponent, { CSSStyles: { 'margin-right': '0px', 'margin-bottom': '15px' } });
|
||||
|
||||
const information = this.modelView.modelBuilder.button().withProperties<azdata.ButtonProperties>({
|
||||
iconPath: IconPathHelper.information,
|
||||
title: loc.postgresConfigurationInformation,
|
||||
width: '12px',
|
||||
height: '12px',
|
||||
enabled: false
|
||||
}).component();
|
||||
|
||||
titleContainer.addItem(information, { CSSStyles: { 'margin-left': '5px', 'margin-bottom': '15px' } });
|
||||
flexContainer.addItem(titleContainer, titleFlex);
|
||||
|
||||
let configurationSection = this.modelView.modelBuilder.divContainer().component();
|
||||
configurationSection.addItem(flexContainer);
|
||||
|
||||
return configurationSection;
|
||||
}
|
||||
|
||||
private editCores() {
|
||||
let currentCPUSize = this._postgresModel.config?.spec.scheduling?.default?.resources?.requests?.cpu;
|
||||
|
||||
if (!currentCPUSize) {
|
||||
currentCPUSize = '';
|
||||
}
|
||||
|
||||
this.coresRequestBox!.placeHolder = currentCPUSize;
|
||||
this.coresRequestBox!.value = '';
|
||||
this.saveArgs.coresRequest = undefined;
|
||||
|
||||
currentCPUSize = this._postgresModel.config?.spec.scheduling?.default?.resources?.limits?.cpu;
|
||||
|
||||
if (!currentCPUSize) {
|
||||
currentCPUSize = '';
|
||||
}
|
||||
|
||||
this.coresLimitBox!.placeHolder = currentCPUSize;
|
||||
this.coresLimitBox!.value = '';
|
||||
this.saveArgs.coresLimit = undefined;
|
||||
}
|
||||
|
||||
private editMemory() {
|
||||
let currentMemSizeConversion: string;
|
||||
let currentMemorySize = this._postgresModel.config?.spec.scheduling?.default?.resources?.requests?.memory;
|
||||
|
||||
if (!currentMemorySize) {
|
||||
currentMemSizeConversion = '';
|
||||
} else {
|
||||
currentMemSizeConversion = convertToGibibyteString(currentMemorySize);
|
||||
}
|
||||
|
||||
this.memoryRequestBox!.placeHolder = currentMemSizeConversion!;
|
||||
this.memoryRequestBox!.value = '';
|
||||
|
||||
this.saveArgs.memoryRequest = undefined;
|
||||
|
||||
currentMemorySize = this._postgresModel.config?.spec.scheduling?.default?.resources?.limits?.memory;
|
||||
|
||||
if (!currentMemorySize) {
|
||||
currentMemSizeConversion = '';
|
||||
} else {
|
||||
currentMemSizeConversion = convertToGibibyteString(currentMemorySize);
|
||||
}
|
||||
|
||||
this.memoryLimitBox!.placeHolder = currentMemSizeConversion!;
|
||||
this.memoryLimitBox!.value = '';
|
||||
|
||||
this.saveArgs.memoryLimit = undefined;
|
||||
}
|
||||
|
||||
private handleServiceUpdated() {
|
||||
this.editWorkerNodeCount();
|
||||
this.editCores();
|
||||
this.editMemory();
|
||||
}
|
||||
}
|
||||
@@ -1,31 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as azdata from 'azdata';
|
||||
import * as loc from '../../../localizedConstants';
|
||||
import { IconPathHelper } from '../../../constants';
|
||||
import { DashboardPage } from '../../components/dashboardPage';
|
||||
|
||||
export class PostgresComputeStoragePage extends DashboardPage {
|
||||
protected get title(): string {
|
||||
return loc.computeAndStorage;
|
||||
}
|
||||
|
||||
protected get id(): string {
|
||||
return 'postgres-compute-storage';
|
||||
}
|
||||
|
||||
protected get icon(): { dark: string; light: string; } {
|
||||
return IconPathHelper.computeStorage;
|
||||
}
|
||||
|
||||
protected get container(): azdata.Component {
|
||||
return this.modelView.modelBuilder.text().withProperties<azdata.TextComponentProperties>({ value: loc.computeAndStorage }).component();
|
||||
}
|
||||
|
||||
protected get toolbarContainer(): azdata.ToolbarContainer {
|
||||
return this.modelView.modelBuilder.toolbarContainer().component();
|
||||
}
|
||||
}
|
||||
@@ -3,7 +3,6 @@
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as vscode from 'vscode';
|
||||
import * as azdata from 'azdata';
|
||||
import * as loc from '../../../localizedConstants';
|
||||
import { IconPathHelper, cssStyles } from '../../../constants';
|
||||
@@ -12,13 +11,12 @@ import { DashboardPage } from '../../components/dashboardPage';
|
||||
import { PostgresModel } from '../../../models/postgresModel';
|
||||
|
||||
export class PostgresConnectionStringsPage extends DashboardPage {
|
||||
private loading?: azdata.LoadingComponent;
|
||||
private keyValueContainer?: KeyValueContainer;
|
||||
|
||||
constructor(protected modelView: azdata.ModelView, private _postgresModel: PostgresModel) {
|
||||
super(modelView);
|
||||
|
||||
this.disposables.push(this._postgresModel.onServiceUpdated(
|
||||
this.disposables.push(this._postgresModel.onConfigUpdated(
|
||||
() => this.eventuallyRunOnInitialized(() => this.handleServiceUpdated())));
|
||||
}
|
||||
|
||||
@@ -51,7 +49,7 @@ export class PostgresConnectionStringsPage extends DashboardPage {
|
||||
|
||||
const link = this.modelView.modelBuilder.hyperlink().withProperties<azdata.HyperlinkComponentProperties>({
|
||||
label: loc.learnAboutPostgresClients,
|
||||
url: 'https://docs.microsoft.com/azure/postgresql/concepts-connection-libraries',
|
||||
url: 'https://docs.microsoft.com/azure/azure-arc/data/get-connection-endpoints-and-connection-strings-postgres-hyperscale',
|
||||
}).component();
|
||||
|
||||
const infoAndLink = this.modelView.modelBuilder.flexContainer().withLayout({ flexWrap: 'wrap' }).component();
|
||||
@@ -61,44 +59,20 @@ export class PostgresConnectionStringsPage extends DashboardPage {
|
||||
|
||||
this.keyValueContainer = new KeyValueContainer(this.modelView.modelBuilder, this.getConnectionStrings());
|
||||
this.disposables.push(this.keyValueContainer);
|
||||
|
||||
this.loading = this.modelView.modelBuilder.loadingComponent()
|
||||
.withItem(this.keyValueContainer.container)
|
||||
.withProperties<azdata.LoadingComponentProperties>({
|
||||
loading: !this._postgresModel.serviceLastUpdated
|
||||
}).component();
|
||||
|
||||
content.addItem(this.loading);
|
||||
content.addItem(this.keyValueContainer.container);
|
||||
this.initialized = true;
|
||||
return root;
|
||||
}
|
||||
|
||||
protected get toolbarContainer(): azdata.ToolbarContainer {
|
||||
const refreshButton = this.modelView.modelBuilder.button().withProperties<azdata.ButtonProperties>({
|
||||
label: loc.refresh,
|
||||
iconPath: IconPathHelper.refresh
|
||||
}).component();
|
||||
|
||||
this.disposables.push(
|
||||
refreshButton.onDidClick(async () => {
|
||||
refreshButton.enabled = false;
|
||||
try {
|
||||
this.loading!.loading = true;
|
||||
await this._postgresModel.refresh();
|
||||
} catch (error) {
|
||||
vscode.window.showErrorMessage(loc.refreshFailed(error));
|
||||
} finally {
|
||||
refreshButton.enabled = true;
|
||||
}
|
||||
}));
|
||||
|
||||
return this.modelView.modelBuilder.toolbarContainer().withToolbarItems([
|
||||
{ component: refreshButton }
|
||||
]).component();
|
||||
return this.modelView.modelBuilder.toolbarContainer().component();
|
||||
}
|
||||
|
||||
private getConnectionStrings(): KeyValue[] {
|
||||
const endpoint: { ip?: string, port?: number } = this._postgresModel.endpoint;
|
||||
const endpoint = this._postgresModel.endpoint;
|
||||
if (!endpoint) {
|
||||
return [];
|
||||
}
|
||||
|
||||
return [
|
||||
new InputKeyValue(this.modelView.modelBuilder, 'ADO.NET', `Server=${endpoint.ip};Database=postgres;Port=${endpoint.port};User Id=postgres;Password={your_password_here};Ssl Mode=Require;`),
|
||||
@@ -108,13 +82,11 @@ export class PostgresConnectionStringsPage extends DashboardPage {
|
||||
new InputKeyValue(this.modelView.modelBuilder, 'PHP', `host=${endpoint.ip} port=${endpoint.port} dbname=postgres user=postgres password={your_password_here} sslmode=require`),
|
||||
new InputKeyValue(this.modelView.modelBuilder, 'psql', `psql "host=${endpoint.ip} port=${endpoint.port} dbname=postgres user=postgres password={your_password_here} sslmode=require"`),
|
||||
new InputKeyValue(this.modelView.modelBuilder, 'Python', `dbname='postgres' user='postgres' host='${endpoint.ip}' password='{your_password_here}' port='${endpoint.port}' sslmode='true'`),
|
||||
new InputKeyValue(this.modelView.modelBuilder, 'Ruby', `host=${endpoint.ip}; dbname=postgres user=postgres password={your_password_here} port=${endpoint.port} sslmode=require`),
|
||||
new InputKeyValue(this.modelView.modelBuilder, 'Web App', `Database=postgres; Data Source=${endpoint.ip}; User Id=postgres; Password={your_password_here}`)
|
||||
new InputKeyValue(this.modelView.modelBuilder, 'Ruby', `host=${endpoint.ip}; dbname=postgres user=postgres password={your_password_here} port=${endpoint.port} sslmode=require`)
|
||||
];
|
||||
}
|
||||
|
||||
private handleServiceUpdated() {
|
||||
this.keyValueContainer?.refresh(this.getConnectionStrings());
|
||||
this.loading!.loading = false;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,15 +10,14 @@ import { ControllerModel } from '../../../models/controllerModel';
|
||||
import { PostgresModel } from '../../../models/postgresModel';
|
||||
import { PostgresOverviewPage } from './postgresOverviewPage';
|
||||
import { PostgresConnectionStringsPage } from './postgresConnectionStringsPage';
|
||||
import { PostgresPropertiesPage } from './postgresPropertiesPage';
|
||||
import { Dashboard } from '../../components/dashboard';
|
||||
import { PostgresDiagnoseAndSolveProblemsPage } from './postgresDiagnoseAndSolveProblemsPage';
|
||||
import { PostgresSupportRequestPage } from './postgresSupportRequestPage';
|
||||
import { PostgresResourceHealthPage } from './postgresResourceHealthPage';
|
||||
import { PostgresComputeAndStoragePage } from './postgresComputeAndStoragePage';
|
||||
|
||||
export class PostgresDashboard extends Dashboard {
|
||||
constructor(private _context: vscode.ExtensionContext, private _controllerModel: ControllerModel, private _postgresModel: PostgresModel) {
|
||||
super(loc.postgresDashboard(_postgresModel.name));
|
||||
super(loc.postgresDashboard(_postgresModel.info.name), 'ArcPgDashboard');
|
||||
}
|
||||
|
||||
public async showDashboard(): Promise<void> {
|
||||
@@ -32,10 +31,11 @@ export class PostgresDashboard extends Dashboard {
|
||||
protected async registerTabs(modelView: azdata.ModelView): Promise<(azdata.DashboardTab | azdata.DashboardTabGroup)[]> {
|
||||
const overviewPage = new PostgresOverviewPage(modelView, this._controllerModel, this._postgresModel);
|
||||
const connectionStringsPage = new PostgresConnectionStringsPage(modelView, this._postgresModel);
|
||||
const propertiesPage = new PostgresPropertiesPage(modelView, this._controllerModel, this._postgresModel);
|
||||
const resourceHealthPage = new PostgresResourceHealthPage(modelView, this._postgresModel);
|
||||
const computeAndStoragePage = new PostgresComputeAndStoragePage(modelView, this._postgresModel);
|
||||
// TODO: Removed properties page while investigating bug where refreshed values don't appear in UI
|
||||
// const propertiesPage = new PostgresPropertiesPage(modelView, this._controllerModel, this._postgresModel);
|
||||
const diagnoseAndSolveProblemsPage = new PostgresDiagnoseAndSolveProblemsPage(modelView, this._context, this._postgresModel);
|
||||
const supportRequestPage = new PostgresSupportRequestPage(modelView);
|
||||
const supportRequestPage = new PostgresSupportRequestPage(modelView, this._controllerModel, this._postgresModel);
|
||||
|
||||
return [
|
||||
overviewPage.tab,
|
||||
@@ -43,13 +43,12 @@ export class PostgresDashboard extends Dashboard {
|
||||
title: loc.settings,
|
||||
tabs: [
|
||||
connectionStringsPage.tab,
|
||||
propertiesPage.tab
|
||||
computeAndStoragePage.tab
|
||||
]
|
||||
},
|
||||
{
|
||||
title: loc.supportAndTroubleshooting,
|
||||
tabs: [
|
||||
resourceHealthPage.tab,
|
||||
diagnoseAndSolveProblemsPage.tab,
|
||||
supportRequestPage.tab
|
||||
]
|
||||
|
||||
@@ -50,8 +50,9 @@ export class PostgresDiagnoseAndSolveProblemsPage extends DashboardPage {
|
||||
|
||||
this.disposables.push(
|
||||
troubleshootButton.onDidClick(() => {
|
||||
process.env['POSTGRES_SERVER_NAMESPACE'] = this._postgresModel.namespace;
|
||||
process.env['POSTGRES_SERVER_NAME'] = this._postgresModel.name;
|
||||
process.env['POSTGRES_SERVER_NAMESPACE'] = this._postgresModel.config?.metadata.namespace;
|
||||
process.env['POSTGRES_SERVER_NAME'] = this._postgresModel.info.name;
|
||||
process.env['POSTGRES_SERVER_VERSION'] = this._postgresModel.engineVersion;
|
||||
vscode.commands.executeCommand('bookTreeView.openBook', this._context.asAbsolutePath('notebooks/arcDataServices'), true, 'postgres/tsg100-troubleshoot-postgres');
|
||||
}));
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user