mirror of
https://github.com/ckaczor/azuredatastudio.git
synced 2026-02-24 18:47:10 -05:00
Compare commits
749 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4d4917d328 | ||
|
|
edea311757 | ||
|
|
e7eacc32c0 | ||
|
|
12f50cca8d | ||
|
|
88a4dba695 | ||
|
|
634ea0ab6a | ||
|
|
cd0b5cbc7a | ||
|
|
0b7de6608a | ||
|
|
8c6bd8c857 | ||
|
|
def5775e00 | ||
|
|
91da9aea98 | ||
|
|
1c898402f8 | ||
|
|
54210cf479 | ||
|
|
cbcea87a82 | ||
|
|
2d50d2c5d1 | ||
|
|
7448c6c32c | ||
|
|
3196cf5be0 | ||
|
|
666726a5fa | ||
|
|
818a3d204e | ||
|
|
d45758b4f4 | ||
|
|
1eda5eb33a | ||
|
|
6ac5b7c8a5 | ||
|
|
397354ebc3 | ||
|
|
2a7b90fd70 | ||
|
|
1554e51932 | ||
|
|
d060f1b9a0 | ||
|
|
c8632c255a | ||
|
|
2ac03b9ef4 | ||
|
|
a0d89449cc | ||
|
|
b03a914934 | ||
|
|
d3bcb942f5 | ||
|
|
84822b23ac | ||
|
|
40ca82c63d | ||
|
|
f4a6b42b3a | ||
|
|
7ad631d4a5 | ||
|
|
4b7baa652f | ||
|
|
148e802f4a | ||
|
|
7bb4d00073 | ||
|
|
3b20e8a61c | ||
|
|
6e0a4f27de | ||
|
|
21ddf30a7b | ||
|
|
2ade45858e | ||
|
|
e0b1a3460d | ||
|
|
f44c714cf2 | ||
|
|
f72e12fe32 | ||
|
|
0b6fb504dc | ||
|
|
145b2491df | ||
|
|
aa30b52d03 | ||
|
|
6edcbbb738 | ||
|
|
815c61315c | ||
|
|
172a044ba7 | ||
|
|
2a81a0a70f | ||
|
|
749989cd0b | ||
|
|
8d42182db8 | ||
|
|
bb2a1db6e8 | ||
|
|
c579ecb111 | ||
|
|
175d46d508 | ||
|
|
870ff39527 | ||
|
|
ddd0b8b4bc | ||
|
|
c7cca5afea | ||
|
|
e63e4f0901 | ||
|
|
ddc8c00090 | ||
|
|
34170e7741 | ||
|
|
f5e4b32d01 | ||
|
|
28fef53731 | ||
|
|
438bc67072 | ||
|
|
472c9decfa | ||
|
|
6cd2d6c942 | ||
|
|
39e1181c5d | ||
|
|
c898b50b94 | ||
|
|
271fe62344 | ||
|
|
c18a54bc1d | ||
|
|
b57bf53b67 | ||
|
|
c699179e15 | ||
|
|
690937443c | ||
|
|
698b79f0f3 | ||
|
|
798af5fc2d | ||
|
|
af55dcfb42 | ||
|
|
76781d6cf4 | ||
|
|
99e3da5b48 | ||
|
|
6b657259a5 | ||
|
|
cbe2ba0901 | ||
|
|
b3d99117ca | ||
|
|
32ac586431 | ||
|
|
bd4676ac8c | ||
|
|
536628603e | ||
|
|
ea7fe08b98 | ||
|
|
6c920f6d54 | ||
|
|
a2f7136728 | ||
|
|
a082c1e478 | ||
|
|
32a6385fef | ||
|
|
468119caa4 | ||
|
|
abdf575885 | ||
|
|
07df54ee61 | ||
|
|
850422164c | ||
|
|
865d49c2fb | ||
|
|
b17ce7a531 | ||
|
|
d64062dfe0 | ||
|
|
1b0f3af094 | ||
|
|
fa608f9f80 | ||
|
|
b32e5f8f25 | ||
|
|
c37f178e71 | ||
|
|
3be00b1635 | ||
|
|
b397150264 | ||
|
|
8c6a966bb9 | ||
|
|
b83da2dfa8 | ||
|
|
b4dd0442c5 | ||
|
|
d4e8053797 | ||
|
|
8bbcfff119 | ||
|
|
d7a6b55f82 | ||
|
|
3dd74971d8 | ||
|
|
e076062d4f | ||
|
|
fbe8e9d9f3 | ||
|
|
1c9322c0e8 | ||
|
|
0d49744061 | ||
|
|
7cd4964f35 | ||
|
|
689c7ab27e | ||
|
|
8cf5e4c9fd | ||
|
|
9d766198b5 | ||
|
|
295aa99e05 | ||
|
|
3f76d343a5 | ||
|
|
fa9a38d74a | ||
|
|
8b73391845 | ||
|
|
5e00dcb78c | ||
|
|
c8db2b60f3 | ||
|
|
aaa8831a7d | ||
|
|
e83d3ba57f | ||
|
|
8b40e8e4bf | ||
|
|
712c6ae5d8 | ||
|
|
bb35276652 | ||
|
|
335c6bf544 | ||
|
|
054583e0de | ||
|
|
cd6fa08543 | ||
|
|
a2265f8ccd | ||
|
|
e4390db779 | ||
|
|
fe546e3791 | ||
|
|
415a20f9f7 | ||
|
|
6c37ac56b7 | ||
|
|
27fbad5884 | ||
|
|
bc452ec9be | ||
|
|
9225d6d6aa | ||
|
|
5174f3b2f6 | ||
|
|
8877d74034 | ||
|
|
d1f26844ef | ||
|
|
fd8f88db4b | ||
|
|
bfb2c20e0f | ||
|
|
6b2c409cff | ||
|
|
4af67c9734 | ||
|
|
06a4b0d1a2 | ||
|
|
3677a69c76 | ||
|
|
e51f16fa8d | ||
|
|
a0ee8b00fb | ||
|
|
eeb7da4ace | ||
|
|
b68d504f68 | ||
|
|
3ec1f7cc2b | ||
|
|
7bdb7c328a | ||
|
|
b175c97dfe | ||
|
|
f10ac10f6d | ||
|
|
64510506e8 | ||
|
|
e6c265b254 | ||
|
|
342ff47e51 | ||
|
|
ba80000e27 | ||
|
|
f792684763 | ||
|
|
036faeb06d | ||
|
|
04117b2333 | ||
|
|
7559d8463f | ||
|
|
351516f08d | ||
|
|
b2e06fd440 | ||
|
|
338beaff29 | ||
|
|
cb1c8503b0 | ||
|
|
d7767c7d91 | ||
|
|
c6f72e6504 | ||
|
|
da6f800f11 | ||
|
|
2f571d868b | ||
|
|
3015845093 | ||
|
|
341f7aa7ad | ||
|
|
8717a03466 | ||
|
|
3b18f9f8ec | ||
|
|
69527f91b0 | ||
|
|
17073f9d2a | ||
|
|
4f96ac46be | ||
|
|
76625012dd | ||
|
|
dfb40e0159 | ||
|
|
82d5fe3821 | ||
|
|
f5fc5c648e | ||
|
|
680dc1b5da | ||
|
|
ac476ba973 | ||
|
|
7857e8aeb9 | ||
|
|
d450588e39 | ||
|
|
e31d563f61 | ||
|
|
7819d25c95 | ||
|
|
8c956cdb79 | ||
|
|
e15ad17967 | ||
|
|
66da2a46c5 | ||
|
|
10f6fe2d09 | ||
|
|
dd77c79090 | ||
|
|
9fcd85e640 | ||
|
|
d1b8c15e11 | ||
|
|
e679d70a4b | ||
|
|
31817c5494 | ||
|
|
9d776863a1 | ||
|
|
1d4398388c | ||
|
|
281592fa97 | ||
|
|
dccccd0110 | ||
|
|
5c474d8614 | ||
|
|
429d8fe584 | ||
|
|
86357b45b0 | ||
|
|
42e16b1752 | ||
|
|
e2c9d3899b | ||
|
|
4587fd06c3 | ||
|
|
f9d34cb18b | ||
|
|
f97ae9e570 | ||
|
|
eec6f64d62 | ||
|
|
20ed569a71 | ||
|
|
79800902db | ||
|
|
1e3c9b722e | ||
|
|
c2bd11fa9e | ||
|
|
791dee1457 | ||
|
|
2db51ca243 | ||
|
|
2bc2f7f520 | ||
|
|
7222c698aa | ||
|
|
1c6d7866e8 | ||
|
|
c810c5a0bb | ||
|
|
ff45bdd072 | ||
|
|
3ad39bd0d3 | ||
|
|
cb30dd1893 | ||
|
|
bf9fd5a3b8 | ||
|
|
4c4d2d4463 | ||
|
|
2d182fcd03 | ||
|
|
c7ab69d46d | ||
|
|
89b935e2df | ||
|
|
3ef2650e69 | ||
|
|
dbb30110ac | ||
|
|
ff8e451af9 | ||
|
|
c97f75283b | ||
|
|
6550c032ee | ||
|
|
dfb1d5411e | ||
|
|
98774527bc | ||
|
|
de7cc8ea53 | ||
|
|
a427606050 | ||
|
|
656d727854 | ||
|
|
4184c28ce7 | ||
|
|
ab7aaf8d2f | ||
|
|
09d559e7e0 | ||
|
|
94b34350a3 | ||
|
|
bed70ebd09 | ||
|
|
660c1d6f21 | ||
|
|
f783a26a56 | ||
|
|
fcec690546 | ||
|
|
c6b3b797c5 | ||
|
|
1e916e93ad | ||
|
|
81122538d2 | ||
|
|
c4f649a849 | ||
|
|
eb36a275a2 | ||
|
|
873668a7cf | ||
|
|
72f7e8de52 | ||
|
|
a1c8d4d34a | ||
|
|
f96a96a60c | ||
|
|
2801e59edc | ||
|
|
39b6cc193f | ||
|
|
49983a6f05 | ||
|
|
767465edbf | ||
|
|
f6949d834b | ||
|
|
f4c7ab29f0 | ||
|
|
36f758dfca | ||
|
|
545a5504e0 | ||
|
|
b460b7834c | ||
|
|
c7e4cf7ca4 | ||
|
|
607447365d | ||
|
|
f72453fc59 | ||
|
|
a88669677f | ||
|
|
52e7bcdf09 | ||
|
|
581774d413 | ||
|
|
34079d1612 | ||
|
|
08219b2d8e | ||
|
|
ee0b87544b | ||
|
|
729378b2d7 | ||
|
|
7bc26cc493 | ||
|
|
a0c03784f2 | ||
|
|
35e79f7173 | ||
|
|
9fdb5037bc | ||
|
|
3251b26317 | ||
|
|
50ec75ec57 | ||
|
|
1c279675c8 | ||
|
|
10d3a6b2ba | ||
|
|
bc3527d310 | ||
|
|
108891ba2e | ||
|
|
f61ffae15c | ||
|
|
82726a9119 | ||
|
|
53f00bee12 | ||
|
|
0f6bb683d6 | ||
|
|
ef8e86a78d | ||
|
|
daf40393d9 | ||
|
|
c04823e2d8 | ||
|
|
846ed9cd26 | ||
|
|
653b442b60 | ||
|
|
9389a92896 | ||
|
|
d9ca879533 | ||
|
|
45cbaca31f | ||
|
|
ba85d370d6 | ||
|
|
5a49b99b8a | ||
|
|
0be752b64c | ||
|
|
1935ce1adb | ||
|
|
2820fb4f15 | ||
|
|
b910bf2f33 | ||
|
|
3e0135b6b3 | ||
|
|
5fb7c879ed | ||
|
|
e61cc474a3 | ||
|
|
bd56c49538 | ||
|
|
18f87ab03c | ||
|
|
27b69b36f7 | ||
|
|
56d2a66ac5 | ||
|
|
acc58a0d1c | ||
|
|
280a9d20f9 | ||
|
|
830cef06db | ||
|
|
23e2a5dd12 | ||
|
|
97cd9dba9f | ||
|
|
adc3fde2e4 | ||
|
|
c1267a9387 | ||
|
|
825663fd77 | ||
|
|
288288df03 | ||
|
|
1dea5f8f7b | ||
|
|
f633e07ed1 | ||
|
|
726f0cef0e | ||
|
|
253ed55412 | ||
|
|
c679d5e1f0 | ||
|
|
362605cea0 | ||
|
|
2af8982640 | ||
|
|
753d785076 | ||
|
|
6ff1e3866b | ||
|
|
58d02b76db | ||
|
|
572310b906 | ||
|
|
6dd7c7d0f0 | ||
|
|
07d4579c19 | ||
|
|
9652007266 | ||
|
|
1df74e8c4a | ||
|
|
b574f2aa32 | ||
|
|
8bf1859d36 | ||
|
|
253f7774de | ||
|
|
9b7a1d7a26 | ||
|
|
4965f3de1a | ||
|
|
2da0fafcd5 | ||
|
|
39e43d2401 | ||
|
|
02afd2dc39 | ||
|
|
54b24d935c | ||
|
|
8e30b9a6e3 | ||
|
|
c9ec18670c | ||
|
|
fac642de1e | ||
|
|
7bfea07b9b | ||
|
|
ab85028a94 | ||
|
|
1cd94518a8 | ||
|
|
0583e1db8a | ||
|
|
5681d6b9e3 | ||
|
|
b3578d058f | ||
|
|
82648fab3e | ||
|
|
3c4df5332e | ||
|
|
b8de69dfac | ||
|
|
fd5acf7ab1 | ||
|
|
5de1c10dd1 | ||
|
|
a91b965a33 | ||
|
|
a2552c1cc1 | ||
|
|
253aa78650 | ||
|
|
2376e7b384 | ||
|
|
cf9754f627 | ||
|
|
44416abe6e | ||
|
|
5718e6b471 | ||
|
|
c79cfd709a | ||
|
|
34a6200a47 | ||
|
|
4ec5991a13 | ||
|
|
5396ed855c | ||
|
|
037d638927 | ||
|
|
62947a3e8a | ||
|
|
56c989ac7b | ||
|
|
7e74465fb1 | ||
|
|
94d0e1972e | ||
|
|
e1a9ed0d1e | ||
|
|
3220f8a0b4 | ||
|
|
28c7188984 | ||
|
|
49de1f80cf | ||
|
|
9780eebb12 | ||
|
|
bf9646ba98 | ||
|
|
1ea33d83bf | ||
|
|
23e1141484 | ||
|
|
96ea3d8273 | ||
|
|
2673eb2c1d | ||
|
|
47532f1287 | ||
|
|
dd22b195bd | ||
|
|
f47c5dcc75 | ||
|
|
807a4ae8c4 | ||
|
|
8bc7079d78 | ||
|
|
4e07685588 | ||
|
|
e1235a7346 | ||
|
|
4a089131fc | ||
|
|
f8eb203643 | ||
|
|
c26963e848 | ||
|
|
cf8283a2b0 | ||
|
|
4f433772af | ||
|
|
95752a7de5 | ||
|
|
3fd92adf78 | ||
|
|
3bc4178ea8 | ||
|
|
0f9b1a7d0c | ||
|
|
2dd9aafb83 | ||
|
|
cb97072ae2 | ||
|
|
a91bf1cecc | ||
|
|
5f4546488c | ||
|
|
fe3d1ff48e | ||
|
|
41ace44f32 | ||
|
|
50dbe65e1d | ||
|
|
1c21c2956d | ||
|
|
21d9485ca7 | ||
|
|
db902beb24 | ||
|
|
f9e9cc76ea | ||
|
|
92a8147c8d | ||
|
|
1054164533 | ||
|
|
9e29c7ab19 | ||
|
|
e892723c58 | ||
|
|
3f77b371b3 | ||
|
|
5bc817f25b | ||
|
|
b110e4dea1 | ||
|
|
384f593c80 | ||
|
|
ca4663001b | ||
|
|
2feaca5537 | ||
|
|
82749989e6 | ||
|
|
ffbb1b3917 | ||
|
|
e8624f2de7 | ||
|
|
117ecfebd1 | ||
|
|
63adfd4d38 | ||
|
|
b05cbe5356 | ||
|
|
6ae0fc9aef | ||
|
|
7c57a82589 | ||
|
|
087f4a260e | ||
|
|
e06980a664 | ||
|
|
bf49788296 | ||
|
|
d00f94f51c | ||
|
|
026f59285a | ||
|
|
54ed8bb2a8 | ||
|
|
4b37ca7d53 | ||
|
|
1355431a36 | ||
|
|
949f40d07e | ||
|
|
de6089609a | ||
|
|
67f317ec93 | ||
|
|
81b9b98250 | ||
|
|
54b5390d03 | ||
|
|
2162e2f50c | ||
|
|
c50067b6d2 | ||
|
|
19566e0d9a | ||
|
|
89d5c5febc | ||
|
|
cac14ff181 | ||
|
|
eea35d4920 | ||
|
|
a9f78694ee | ||
|
|
36d78242f7 | ||
|
|
1f93992736 | ||
|
|
068ba5b4f4 | ||
|
|
2aa00eba80 | ||
|
|
cac686b9e6 | ||
|
|
ee523fb512 | ||
|
|
a0886b9152 | ||
|
|
09cb0764d7 | ||
|
|
77f28083e3 | ||
|
|
da8963d1e5 | ||
|
|
ba44a2f02e | ||
|
|
945e04ed92 | ||
|
|
1ff815fe5a | ||
|
|
451bbe890b | ||
|
|
fca8b85a72 | ||
|
|
be1e0b3c8d | ||
|
|
0cd242cd0c | ||
|
|
8b9103208d | ||
|
|
c4859f665b | ||
|
|
f62020e1ec | ||
|
|
58252bcf97 | ||
|
|
ed65a5124e | ||
|
|
9a32f1a816 | ||
|
|
d793910306 | ||
|
|
dcc8ef54b9 | ||
|
|
78de48391d | ||
|
|
ffb81d88fd | ||
|
|
7e76f8cb20 | ||
|
|
69f5bc1725 | ||
|
|
8aee87e211 | ||
|
|
4dd04cb250 | ||
|
|
eaaaae0a83 | ||
|
|
47e86e6133 | ||
|
|
19519a6d7c | ||
|
|
4cc9cbb0c5 | ||
|
|
233a1aefce | ||
|
|
14b534eb64 | ||
|
|
f79ff99d0b | ||
|
|
9cf80113fc | ||
|
|
92ed830564 | ||
|
|
caeb33248e | ||
|
|
0be5f67621 | ||
|
|
908a15d6a8 | ||
|
|
725e1b2ee3 | ||
|
|
95b76f08f2 | ||
|
|
e75b3d69f6 | ||
|
|
8d76985276 | ||
|
|
23c16ebfb3 | ||
|
|
7a524d7a35 | ||
|
|
cea9194595 | ||
|
|
08050956c9 | ||
|
|
e16d3ed827 | ||
|
|
8cc8dcc89c | ||
|
|
173a715a4d | ||
|
|
4fee0210f6 | ||
|
|
f99adf3de4 | ||
|
|
3d81da9762 | ||
|
|
a567ff6de4 | ||
|
|
61ceb72cea | ||
|
|
58d3b969a2 | ||
|
|
f8d725e15b | ||
|
|
a823e44393 | ||
|
|
6929a803dc | ||
|
|
8a6c776a5f | ||
|
|
d1ba3e23f4 | ||
|
|
6670289057 | ||
|
|
0f8fa0ccef | ||
|
|
02ddfc20f1 | ||
|
|
1f0cdf82e4 | ||
|
|
baa12d725f | ||
|
|
133ff73a43 | ||
|
|
7df132b307 | ||
|
|
cd8102535b | ||
|
|
974e832f78 | ||
|
|
e3ec6bf9c5 | ||
|
|
1528c642d1 | ||
|
|
5730940492 | ||
|
|
3ff9df8e0b | ||
|
|
ff61eae164 | ||
|
|
773ff0e62a | ||
|
|
869d071f73 | ||
|
|
d01dc5a5c7 | ||
|
|
adefa213e2 | ||
|
|
2d29ef7eca | ||
|
|
dad31bc387 | ||
|
|
afb872b3e1 | ||
|
|
6dcc832983 | ||
|
|
7a744bc7f2 | ||
|
|
8a8137e96c | ||
|
|
5ae9495bc6 | ||
|
|
3a0be70783 | ||
|
|
a840057cd8 | ||
|
|
f56e09cfa1 | ||
|
|
9ed274fb39 | ||
|
|
e2b5e9bd66 | ||
|
|
f6c63f2dcb | ||
|
|
2b33c7c27f | ||
|
|
d9b4af1217 | ||
|
|
7231df34ce | ||
|
|
68709c02fe | ||
|
|
3134449b38 | ||
|
|
2b2b9ff44d | ||
|
|
c5d0c6f623 | ||
|
|
3d7d1b23cb | ||
|
|
7cbf471913 | ||
|
|
3b2cd653a7 | ||
|
|
6718c7565d | ||
|
|
b8d0e2a9e3 | ||
|
|
fa5bfee0cf | ||
|
|
704222b8d7 | ||
|
|
503090856a | ||
|
|
bbe5b98a2c | ||
|
|
84ca18b428 | ||
|
|
f6b46a1c5c | ||
|
|
59bb827d2e | ||
|
|
c2320831f7 | ||
|
|
1d12823f09 | ||
|
|
a69ce7ec62 | ||
|
|
1e49e47a37 | ||
|
|
4cc3a3f788 | ||
|
|
a57379be49 | ||
|
|
787245b058 | ||
|
|
1e5ab4d9f0 | ||
|
|
9004769865 | ||
|
|
b1ce53ade3 | ||
|
|
1b4f6f8934 | ||
|
|
4eb98a9dcc | ||
|
|
a63578e6f7 | ||
|
|
39d9eed585 | ||
|
|
78ff0c7d93 | ||
|
|
80b85ad74c | ||
|
|
1c4e65ebe1 | ||
|
|
b7c2eaa65d | ||
|
|
528fbb14ea | ||
|
|
f29127d515 | ||
|
|
d97993e518 | ||
|
|
887ce88100 | ||
|
|
6be8c1d54b | ||
|
|
c5e90d0236 | ||
|
|
65e253ae0d | ||
|
|
a6314b57b7 | ||
|
|
e6d250c640 | ||
|
|
c982ea338d | ||
|
|
8b8bef0401 | ||
|
|
bfe694763b | ||
|
|
f174dc1c77 | ||
|
|
2d33c8dd82 | ||
|
|
d8dcc90857 | ||
|
|
9dde80ce1c | ||
|
|
7b099e703a | ||
|
|
5d4afae616 | ||
|
|
6cbfc0fb1c | ||
|
|
2b59cc0185 | ||
|
|
b0211b434a | ||
|
|
7ceee95f52 | ||
|
|
34e317a559 | ||
|
|
177d9bef39 | ||
|
|
8f8d01cee2 | ||
|
|
5cba0b4f7c | ||
|
|
48d4b806ad | ||
|
|
3c6796938d | ||
|
|
53081cfca9 | ||
|
|
96a6d0674a | ||
|
|
2bc1a41ec4 | ||
|
|
635da9a2b2 | ||
|
|
23bd05ea68 | ||
|
|
85e6d785ff | ||
|
|
02248fc065 | ||
|
|
a5b1e027c1 | ||
|
|
a35c267214 | ||
|
|
98d8c19b07 | ||
|
|
b6e45b69a6 | ||
|
|
2692df3cc7 | ||
|
|
22c88cdd2e | ||
|
|
81e81f1c49 | ||
|
|
f3befb0f4d | ||
|
|
f7c7274463 | ||
|
|
cb1d892747 | ||
|
|
7495259e13 | ||
|
|
61e5003931 | ||
|
|
8d88791a0b | ||
|
|
4ad73d381c | ||
|
|
404260b8a0 | ||
|
|
3d0dd0de98 | ||
|
|
1984c4ca97 | ||
|
|
ae830d9e64 | ||
|
|
b4a3325a21 | ||
|
|
48e437c4a5 | ||
|
|
e9b00062a4 | ||
|
|
47c1204e89 | ||
|
|
ceb4df5b8b | ||
|
|
d8aa9b8d7f | ||
|
|
94958236cd | ||
|
|
d59063ebab | ||
|
|
795300347b | ||
|
|
4789f2165b | ||
|
|
e9c5a6dfbd | ||
|
|
100072cabd | ||
|
|
57ce9fae6f | ||
|
|
c04f2aa110 | ||
|
|
70399be699 | ||
|
|
bf278c39bd | ||
|
|
9824118a07 | ||
|
|
00c7600b05 | ||
|
|
b715e6ed82 | ||
|
|
75812c71df | ||
|
|
21c8609eb7 | ||
|
|
fa664bc92f | ||
|
|
c4e06f4db3 | ||
|
|
5a301f9073 | ||
|
|
2f94307635 | ||
|
|
f7279cb1f5 | ||
|
|
a3121c0b2d | ||
|
|
15017917b3 | ||
|
|
4659d727b7 | ||
|
|
8b52e7200c | ||
|
|
b95e20b6ee | ||
|
|
82132583af | ||
|
|
7b54abbc96 | ||
|
|
8e74ce1881 | ||
|
|
52ec96e4f1 | ||
|
|
969932743a | ||
|
|
54826b5fe3 | ||
|
|
bd2ab6071d | ||
|
|
18a6879d64 | ||
|
|
384553c231 | ||
|
|
0e4e8c304c | ||
|
|
d96e83c3f0 | ||
|
|
d8f9cdeacb | ||
|
|
3e515f2f59 | ||
|
|
91065ebc38 | ||
|
|
9133bef329 | ||
|
|
1eb2e5f41a | ||
|
|
9a472cf8ec | ||
|
|
9d680be37a | ||
|
|
f4f4271115 | ||
|
|
faff61bb82 | ||
|
|
061052e4f3 | ||
|
|
a6efd56844 | ||
|
|
b4d61a067e | ||
|
|
d073cd595b | ||
|
|
1b02fb6906 | ||
|
|
31323d6efb | ||
|
|
84d21b1e76 | ||
|
|
b2a1738836 | ||
|
|
9c81db574e | ||
|
|
18dc7e75ff | ||
|
|
adfdd56907 | ||
|
|
ca2b893c2c | ||
|
|
7029276f16 | ||
|
|
bcce80094d | ||
|
|
9861ca77e0 | ||
|
|
9327624930 | ||
|
|
69a96a7d5d | ||
|
|
29fbc7d952 | ||
|
|
fefb47da83 | ||
|
|
de3c6e6e99 | ||
|
|
a3ae2df0ce | ||
|
|
3c538d1c2d | ||
|
|
d2e4eeac88 | ||
|
|
97b6d71a06 | ||
|
|
43f08e7efb | ||
|
|
60c62c0668 | ||
|
|
e90341b3d2 | ||
|
|
0f063d3a2e | ||
|
|
62d60f62f3 | ||
|
|
2bfba53e21 | ||
|
|
6153b7ad06 | ||
|
|
c4b524237c | ||
|
|
17856855f6 | ||
|
|
b16c6f3faa | ||
|
|
fc9d08a62b | ||
|
|
92db8df000 | ||
|
|
b7e12cb8a7 | ||
|
|
3192d056f2 | ||
|
|
273f6b658a | ||
|
|
e90578694b | ||
|
|
3b2e53d270 | ||
|
|
a34a72795b | ||
|
|
ff2d2d0339 | ||
|
|
1ee4af52b4 | ||
|
|
28aa9b7912 | ||
|
|
8cf82c1f8b | ||
|
|
a69b4bf662 | ||
|
|
f3a48da3fa | ||
|
|
d68433ec22 | ||
|
|
c66726cdb5 | ||
|
|
d547dd8ba3 | ||
|
|
cc8989c2a5 | ||
|
|
053a2c7446 | ||
|
|
6e306461d7 | ||
|
|
e40a81e8e1 | ||
|
|
094ee7c50c | ||
|
|
d96fe82fbc | ||
|
|
bc44014532 | ||
|
|
8cbf73dff6 | ||
|
|
28b0d827b9 | ||
|
|
01ea89a461 | ||
|
|
00af075fb3 |
@@ -73,6 +73,7 @@ RUN apt-get update \
|
|||||||
libnss3 \
|
libnss3 \
|
||||||
libxss1 \
|
libxss1 \
|
||||||
libasound2 \
|
libasound2 \
|
||||||
|
libgbm1 \
|
||||||
xfonts-base \
|
xfonts-base \
|
||||||
xfonts-terminus \
|
xfonts-terminus \
|
||||||
fonts-noto \
|
fonts-noto \
|
||||||
|
|||||||
@@ -5,6 +5,7 @@
|
|||||||
**/vs/loader.js
|
**/vs/loader.js
|
||||||
**/insane/**
|
**/insane/**
|
||||||
**/marked/**
|
**/marked/**
|
||||||
|
**/markjs/**
|
||||||
**/test/**/*.js
|
**/test/**/*.js
|
||||||
**/node_modules/**
|
**/node_modules/**
|
||||||
**/vscode-api-tests/testWorkspace/**
|
**/vscode-api-tests/testWorkspace/**
|
||||||
|
|||||||
@@ -682,6 +682,20 @@
|
|||||||
"**/{vs,sql}/workbench/services/**/{common,browser}/**"
|
"**/{vs,sql}/workbench/services/**/{common,browser}/**"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"target": "**/{vs,sql}/workbench/contrib/notebook/common/**",
|
||||||
|
"restrictions": [
|
||||||
|
"vs/nls",
|
||||||
|
"vs/css!./**/*",
|
||||||
|
"**/{vs,sql}/base/**/{common,worker}/**",
|
||||||
|
"**/{vs,sql}/platform/**/common/**",
|
||||||
|
"**/{vs,sql}/editor/**",
|
||||||
|
"**/{vs,sql}/workbench/common/**",
|
||||||
|
"**/{vs,sql}/workbench/api/common/**",
|
||||||
|
"**/{vs,sql}/workbench/services/**/common/**",
|
||||||
|
"**/{vs,sql}/workbench/contrib/**/common/**"
|
||||||
|
]
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"target": "**/{vs,sql}/workbench/contrib/**/common/**",
|
"target": "**/{vs,sql}/workbench/contrib/**/common/**",
|
||||||
"restrictions": [
|
"restrictions": [
|
||||||
@@ -717,7 +731,9 @@
|
|||||||
"chart.js",
|
"chart.js",
|
||||||
"plotly.js-dist-min",
|
"plotly.js-dist-min",
|
||||||
"angular2-grid",
|
"angular2-grid",
|
||||||
"html-query-plan"
|
"html-query-plan",
|
||||||
|
"turndown",
|
||||||
|
"mark.js"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
|||||||
11
.github/CODEOWNERS
vendored
Normal file
11
.github/CODEOWNERS
vendored
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
# Lines starting with '#' are comments.
|
||||||
|
# Each line is a file pattern followed by one or more owners.
|
||||||
|
# Syntax can be found here: https://docs.github.com/free-pro-team@latest/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax
|
||||||
|
|
||||||
|
/src/sql/*.d.ts @alanrenmsft @Charles-Gagnon @ranasaria
|
||||||
|
/extensions/resource-deployment/ @ranasaria
|
||||||
|
/extensions/arc/ @ranasaria
|
||||||
|
/extensions/azdata/ @ranasaria
|
||||||
|
/extensions/dacpac/ @kisantia
|
||||||
|
/extensions/schema-compare/ @kisantia
|
||||||
|
/extensions/sql-database-projects/ @Benjin @kisantia
|
||||||
7
.github/subscribers.json
vendored
Normal file
7
.github/subscribers.json
vendored
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
{
|
||||||
|
"label-to-subscribe-to": [
|
||||||
|
"list of usernames to subscribe",
|
||||||
|
"such as:",
|
||||||
|
"JacksonKearl"
|
||||||
|
]
|
||||||
|
}
|
||||||
15
.github/workflows/ci.yml
vendored
15
.github/workflows/ci.yml
vendored
@@ -31,7 +31,10 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
node-version: 10
|
node-version: 10
|
||||||
# TODO: cache node modules
|
# TODO: cache node modules
|
||||||
- run: yarn --frozen-lockfile
|
# Increase timeout to get around latency issues when fetching certain packages
|
||||||
|
- run: |
|
||||||
|
yarn config set network-timeout 300000
|
||||||
|
yarn --frozen-lockfile
|
||||||
name: Install Dependencies
|
name: Install Dependencies
|
||||||
- run: yarn electron x64
|
- run: yarn electron x64
|
||||||
name: Download Electron
|
name: Download Electron
|
||||||
@@ -79,7 +82,10 @@ jobs:
|
|||||||
- uses: actions/setup-python@v1
|
- uses: actions/setup-python@v1
|
||||||
with:
|
with:
|
||||||
python-version: '2.x'
|
python-version: '2.x'
|
||||||
- run: yarn --frozen-lockfile
|
# Increase timeout to get around latency issues when fetching certain packages
|
||||||
|
- run: |
|
||||||
|
yarn config set network-timeout 300000
|
||||||
|
yarn --frozen-lockfile
|
||||||
name: Install Dependencies
|
name: Install Dependencies
|
||||||
- run: yarn electron
|
- run: yarn electron
|
||||||
name: Download Electron
|
name: Download Electron
|
||||||
@@ -112,7 +118,10 @@ jobs:
|
|||||||
- uses: actions/setup-node@v1
|
- uses: actions/setup-node@v1
|
||||||
with:
|
with:
|
||||||
node-version: 10
|
node-version: 10
|
||||||
- run: yarn --frozen-lockfile
|
# Increase timeout to get around latency issues when fetching certain packages
|
||||||
|
- run: |
|
||||||
|
yarn config set network-timeout 300000
|
||||||
|
yarn --frozen-lockfile
|
||||||
name: Install Dependencies
|
name: Install Dependencies
|
||||||
- run: yarn electron x64
|
- run: yarn electron x64
|
||||||
name: Download Electron
|
name: Download Electron
|
||||||
|
|||||||
50
.github/workflows/deep-classifier-runner.yml
vendored
Normal file
50
.github/workflows/deep-classifier-runner.yml
vendored
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
name: "Deep Classifier: Runner"
|
||||||
|
on:
|
||||||
|
schedule:
|
||||||
|
- cron: 0 * * * *
|
||||||
|
repository_dispatch:
|
||||||
|
types: [trigger-deep-classifier-runner]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
main:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout Actions
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
with:
|
||||||
|
repository: 'microsoft/vscode-github-triage-actions'
|
||||||
|
ref: v35
|
||||||
|
path: ./actions
|
||||||
|
- name: Install Actions
|
||||||
|
run: npm install --production --prefix ./actions
|
||||||
|
- name: Install Additional Dependencies
|
||||||
|
# Pulls in a bunch of other packages that arent needed for the rest of the actions
|
||||||
|
run: npm install @azure/storage-blob@12.1.1
|
||||||
|
- name: "Run Classifier: Scraper"
|
||||||
|
uses: ./actions/classifier-deep/apply/fetch-sources
|
||||||
|
with:
|
||||||
|
# slightly overlapping to protect against issues slipping through the cracks if a run is delayed
|
||||||
|
from: 80
|
||||||
|
until: 5
|
||||||
|
configPath: classifier
|
||||||
|
blobContainerName: vscode-issue-classifier
|
||||||
|
blobStorageKey: ${{secrets.AZURE_BLOB_STORAGE_CONNECTION_STRING}}
|
||||||
|
token: ${{secrets.VSCODE_ISSUE_TRIAGE_BOT_PAT}}
|
||||||
|
appInsightsKey: ${{secrets.TRIAGE_ACTIONS_APP_INSIGHTS}}
|
||||||
|
- name: Set up Python 3.7
|
||||||
|
uses: actions/setup-python@v1
|
||||||
|
with:
|
||||||
|
python-version: 3.7
|
||||||
|
- name: Install dependencies
|
||||||
|
run: |
|
||||||
|
python -m pip install --upgrade pip
|
||||||
|
pip install --upgrade numpy scipy scikit-learn joblib nltk simpletransformers torch torchvision
|
||||||
|
- name: "Run Classifier: Generator"
|
||||||
|
run: python ./actions/classifier-deep/apply/generate-labels/main.py
|
||||||
|
- name: "Run Classifier: Labeler"
|
||||||
|
uses: ./actions/classifier-deep/apply/apply-labels
|
||||||
|
with:
|
||||||
|
configPath: classifier
|
||||||
|
allowLabels: "needs more info|new release"
|
||||||
|
appInsightsKey: ${{secrets.TRIAGE_ACTIONS_APP_INSIGHTS}}
|
||||||
|
token: ${{secrets.VSCODE_ISSUE_TRIAGE_BOT_PAT}}
|
||||||
27
.github/workflows/deep-classifier-scraper.yml
vendored
Normal file
27
.github/workflows/deep-classifier-scraper.yml
vendored
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
name: "Deep Classifier: Scraper"
|
||||||
|
on:
|
||||||
|
repository_dispatch:
|
||||||
|
types: [trigger-deep-classifier-scraper]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
main:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout Actions
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
with:
|
||||||
|
repository: 'microsoft/vscode-github-triage-actions'
|
||||||
|
ref: v35
|
||||||
|
path: ./actions
|
||||||
|
- name: Install Actions
|
||||||
|
run: npm install --production --prefix ./actions
|
||||||
|
- name: Install Additional Dependencies
|
||||||
|
# Pulls in a bunch of other packages that arent needed for the rest of the actions
|
||||||
|
run: npm install @azure/storage-blob@12.1.1
|
||||||
|
- name: "Run Classifier: Scraper"
|
||||||
|
uses: ./actions/classifier-deep/train/fetch-issues
|
||||||
|
with:
|
||||||
|
blobContainerName: vscode-issue-classifier
|
||||||
|
blobStorageKey: ${{secrets.AZURE_BLOB_STORAGE_CONNECTION_STRING}}
|
||||||
|
token: ${{secrets.ISSUE_SCRAPER_TOKEN}}
|
||||||
|
appInsightsKey: ${{secrets.TRIAGE_ACTIONS_APP_INSIGHTS}}
|
||||||
27
.github/workflows/latest-release-monitor.yml
vendored
Normal file
27
.github/workflows/latest-release-monitor.yml
vendored
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
name: Latest Release Monitor
|
||||||
|
on:
|
||||||
|
schedule:
|
||||||
|
- cron: 0/5 * * * *
|
||||||
|
repository_dispatch:
|
||||||
|
types: [trigger-latest-release-monitor]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
main:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout Actions
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
with:
|
||||||
|
repository: 'microsoft/vscode-github-triage-actions'
|
||||||
|
path: ./actions
|
||||||
|
ref: v35
|
||||||
|
- name: Install Actions
|
||||||
|
run: npm install --production --prefix ./actions
|
||||||
|
- name: Install Storage Module
|
||||||
|
run: npm install @azure/storage-blob@12.1.1
|
||||||
|
- name: Run Latest Release Monitor
|
||||||
|
uses: ./actions/latest-release-monitor
|
||||||
|
with:
|
||||||
|
storageKey: ${{secrets.AZURE_BLOB_STORAGE_CONNECTION_STRING}}
|
||||||
|
appInsightsKey: ${{secrets.TRIAGE_ACTIONS_APP_INSIGHTS}}
|
||||||
|
token: ${{secrets.VSCODE_ISSUE_TRIAGE_BOT_PAT}}
|
||||||
15
.vscode/launch.json
vendored
15
.vscode/launch.json
vendored
@@ -19,7 +19,8 @@
|
|||||||
"timeout": 30000,
|
"timeout": 30000,
|
||||||
"port": 5870,
|
"port": 5870,
|
||||||
"outFiles": [
|
"outFiles": [
|
||||||
"${workspaceFolder}/out/**/*.js"
|
"${workspaceFolder}/out/**/*.js",
|
||||||
|
"${workspaceFolder}/extensions/*/out/**/*.js"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -68,10 +69,9 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"type": "pwa-chrome",
|
"type": "chrome",
|
||||||
"request": "attach",
|
"request": "attach",
|
||||||
"name": "Attach to azuredatastudio",
|
"name": "Attach to azuredatastudio",
|
||||||
"timeout": 50000,
|
|
||||||
"port": 9222
|
"port": 9222
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -100,7 +100,9 @@
|
|||||||
"--no-cached-data",
|
"--no-cached-data",
|
||||||
],
|
],
|
||||||
"webRoot": "${workspaceFolder}",
|
"webRoot": "${workspaceFolder}",
|
||||||
// Settings for js-debug:
|
"cascadeTerminateToConfigurations": [
|
||||||
|
"Attach to Extension Host"
|
||||||
|
],
|
||||||
"userDataDir": false,
|
"userDataDir": false,
|
||||||
"pauseForSourceMap": false,
|
"pauseForSourceMap": false,
|
||||||
"outFiles": [
|
"outFiles": [
|
||||||
@@ -110,10 +112,10 @@
|
|||||||
"preLaunchTask": "Ensure Prelaunch Dependencies",
|
"preLaunchTask": "Ensure Prelaunch Dependencies",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"type": "chrome",
|
"type": "node",
|
||||||
"request": "launch",
|
"request": "launch",
|
||||||
"name": "Launch ADS (Web) (TBD)",
|
"name": "Launch ADS (Web) (TBD)",
|
||||||
"program": "${workspaceFolder}/resources/serverless/code-web.js",
|
"program": "${workspaceFolder}/resources/web/code-web.js",
|
||||||
"presentation": {
|
"presentation": {
|
||||||
"group": "0_vscode",
|
"group": "0_vscode",
|
||||||
"order": 2
|
"order": 2
|
||||||
@@ -274,6 +276,7 @@
|
|||||||
"Attach to Extension Host",
|
"Attach to Extension Host",
|
||||||
"Attach to Shared Process",
|
"Attach to Shared Process",
|
||||||
],
|
],
|
||||||
|
"preLaunchTask": "Ensure Prelaunch Dependencies",
|
||||||
"presentation": {
|
"presentation": {
|
||||||
"group": "0_vscode",
|
"group": "0_vscode",
|
||||||
"order": 1
|
"order": 1
|
||||||
|
|||||||
2
.vscode/notebooks/api.github-issues
vendored
2
.vscode/notebooks/api.github-issues
vendored
@@ -8,7 +8,7 @@
|
|||||||
{
|
{
|
||||||
"kind": 2,
|
"kind": 2,
|
||||||
"language": "github-issues",
|
"language": "github-issues",
|
||||||
"value": "$repo=repo:microsoft/vscode\n$milestone=milestone:\"July 2020\"",
|
"value": "$repo=repo:microsoft/vscode\n$milestone=milestone:\"September 2020\"",
|
||||||
"editable": true
|
"editable": true
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
|||||||
9
.vscode/notebooks/inbox.github-issues
vendored
9
.vscode/notebooks/inbox.github-issues
vendored
@@ -8,17 +8,20 @@
|
|||||||
{
|
{
|
||||||
"kind": 2,
|
"kind": 2,
|
||||||
"language": "github-issues",
|
"language": "github-issues",
|
||||||
"value": "$inbox=repo:microsoft/vscode is:open no:assignee -label:feature-request -label:testplan-item -label:plan-item "
|
"value": "$inbox=repo:microsoft/vscode is:open no:assignee -label:feature-request -label:testplan-item -label:plan-item ",
|
||||||
|
"editable": true
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"kind": 1,
|
"kind": 1,
|
||||||
"language": "markdown",
|
"language": "markdown",
|
||||||
"value": "## Inbox tracking and Issue triage"
|
"value": "## Inbox tracking and Issue triage",
|
||||||
|
"editable": true
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"kind": 1,
|
"kind": 1,
|
||||||
"language": "markdown",
|
"language": "markdown",
|
||||||
"value": "New issues or pull requests submitted by the community are initially triaged by an [automatic classification bot](https://github.com/microsoft/vscode-github-triage-actions/tree/master/classifier-deep). Issues that the bot does not correctly triage are then triaged by a team member. The team rotates the inbox tracker on a weekly basis.\n\nA [mirror](https://github.com/JacksonKearl/testissues/issues) of the VS Code issue stream is available with details about how the bot classifies issues, including feature-area classifications and confidence ratings. Per-category confidence thresholds and feature-area ownership data is maintained in [.github/classifier.json](https://github.com/microsoft/vscode/blob/master/.github/classifier.json). \n\n💡 The bot is being run through a GitHub action that runs every 30 minutes. Give the bot the opportunity to classify an issue before doing it manually.\n\n### Inbox Tracking\n\nThe inbox tracker is responsible for the [global inbox](https://github.com/Microsoft/vscode/issues?utf8=%E2%9C%93&q=is%3Aopen+no%3Aassignee+-label%3Afeature-request+-label%3Atestplan-item+-label%3Aplan-item) containing all **open issues and pull requests** that\n- are neither **feature requests** nor **test plan items** nor **plan items** and\n- have **no owner assignment**.\n\nThe **inbox tracker** may perform any step described in our [issue triaging documentation](https://github.com/microsoft/vscode/wiki/Issues-Triaging) but its main responsibility is to route issues to the actual feature area owner.\n\nFeature area owners track the **feature area inbox** containing all **open issues and pull requests** that\n- are personally assigned to them and are not assigned to any milestone\n- are labeled with their feature area label and are not assigned to any milestone.\nThis secondary triage may involve any of the steps described in our [issue triaging documentation](https://github.com/microsoft/vscode/wiki/Issues-Triaging) and results in a fully triaged or closed issue.\n\nThe [github triage extension](https://github.com/microsoft/vscode-github-triage-extension) can be used to assist with triaging — it provides a \"Command Palette\"-style list of triaging actions like assignment, labeling, and triggers for various bot actions."
|
"value": "New issues or pull requests submitted by the community are initially triaged by an [automatic classification bot](https://github.com/microsoft/vscode-github-triage-actions/tree/master/classifier-deep). Issues that the bot does not correctly triage are then triaged by a team member. The team rotates the inbox tracker on a weekly basis.\n\nA [mirror](https://github.com/JacksonKearl/testissues/issues) of the VS Code issue stream is available with details about how the bot classifies issues, including feature-area classifications and confidence ratings. Per-category confidence thresholds and feature-area ownership data is maintained in [.github/classifier.json](https://github.com/microsoft/vscode/blob/master/.github/classifier.json). \n\n💡 The bot is being run through a GitHub action that runs every 30 minutes. Give the bot the opportunity to classify an issue before doing it manually.\n\n### Inbox Tracking\n\nThe inbox tracker is responsible for the [global inbox](https://github.com/Microsoft/vscode/issues?utf8=%E2%9C%93&q=is%3Aopen+no%3Aassignee+-label%3Afeature-request+-label%3Atestplan-item+-label%3Aplan-item) containing all **open issues and pull requests** that\n- are neither **feature requests** nor **test plan items** nor **plan items** and\n- have **no owner assignment**.\n\nThe **inbox tracker** may perform any step described in our [issue triaging documentation](https://github.com/microsoft/vscode/wiki/Issues-Triaging) but its main responsibility is to route issues to the actual feature area owner.\n\nFeature area owners track the **feature area inbox** containing all **open issues and pull requests** that\n- are personally assigned to them and are not assigned to any milestone\n- are labeled with their feature area label and are not assigned to any milestone.\nThis secondary triage may involve any of the steps described in our [issue triaging documentation](https://github.com/microsoft/vscode/wiki/Issues-Triaging) and results in a fully triaged or closed issue.\n\nThe [github triage extension](https://github.com/microsoft/vscode-github-triage-extension) can be used to assist with triaging — it provides a \"Command Palette\"-style list of triaging actions like assignment, labeling, and triggers for various bot actions.",
|
||||||
|
"editable": true
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"kind": 1,
|
"kind": 1,
|
||||||
|
|||||||
2
.vscode/notebooks/my-work.github-issues
vendored
2
.vscode/notebooks/my-work.github-issues
vendored
@@ -8,7 +8,7 @@
|
|||||||
{
|
{
|
||||||
"kind": 2,
|
"kind": 2,
|
||||||
"language": "github-issues",
|
"language": "github-issues",
|
||||||
"value": "// list of repos we work in\n$repos=repo:microsoft/vscode repo:microsoft/vscode-remote-release repo:microsoft/vscode-js-debug repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-github-issue-notebooks\n\n// current milestone name\n$milestone=milestone:\"June 2020\"",
|
"value": "// list of repos we work in\n$repos=repo:microsoft/vscode repo:microsoft/vscode-remote-release repo:microsoft/vscode-js-debug repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-github-issue-notebooks\n\n// current milestone name\n$milestone=milestone:\"September 2020\"",
|
||||||
"editable": true
|
"editable": true
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
|||||||
5
.vscode/notebooks/verification.github-issues
vendored
5
.vscode/notebooks/verification.github-issues
vendored
@@ -14,7 +14,7 @@
|
|||||||
{
|
{
|
||||||
"kind": 2,
|
"kind": 2,
|
||||||
"language": "github-issues",
|
"language": "github-issues",
|
||||||
"value": "$repos=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-remote-release repo:microsoft/vscode-js-debug repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-github-issue-notebooks \n$milestone=milestone:\"June 2020\"",
|
"value": "$repos=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-remote-release repo:microsoft/vscode-js-debug repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-github-issue-notebooks \n$milestone=milestone:\"September 2020\"",
|
||||||
"editable": true
|
"editable": true
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -44,7 +44,8 @@
|
|||||||
{
|
{
|
||||||
"kind": 1,
|
"kind": 1,
|
||||||
"language": "markdown",
|
"language": "markdown",
|
||||||
"value": "### All"
|
"value": "### All",
|
||||||
|
"editable": true
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"kind": 2,
|
"kind": 2,
|
||||||
|
|||||||
194
.vscode/searches/TrustedTypes.code-search
vendored
Normal file
194
.vscode/searches/TrustedTypes.code-search
vendored
Normal file
@@ -0,0 +1,194 @@
|
|||||||
|
# Query: .innerHTML =
|
||||||
|
# Flags: CaseSensitive WordMatch
|
||||||
|
# Including: src/vs/**/*.{t,j}s
|
||||||
|
# Excluding: *.test.ts
|
||||||
|
# ContextLines: 3
|
||||||
|
|
||||||
|
22 results - 14 files
|
||||||
|
|
||||||
|
src/vs/base/browser/markdownRenderer.ts:
|
||||||
|
161 const strValue = values[0];
|
||||||
|
162 const span = element.querySelector(`div[data-code="${id}"]`);
|
||||||
|
163 if (span) {
|
||||||
|
164: span.innerHTML = strValue;
|
||||||
|
165 }
|
||||||
|
166 }).catch(err => {
|
||||||
|
167 // ignore
|
||||||
|
|
||||||
|
243 return true;
|
||||||
|
244 }
|
||||||
|
245
|
||||||
|
246: element.innerHTML = insane(renderedMarkdown, {
|
||||||
|
247 allowedSchemes,
|
||||||
|
248 // allowedTags should included everything that markdown renders to.
|
||||||
|
249 // Since we have our own sanitize function for marked, it's possible we missed some tag so let insane make sure.
|
||||||
|
|
||||||
|
src/vs/base/browser/ui/contextview/contextview.ts:
|
||||||
|
157 this.shadowRootHostElement = DOM.$('.shadow-root-host');
|
||||||
|
158 this.container.appendChild(this.shadowRootHostElement);
|
||||||
|
159 this.shadowRoot = this.shadowRootHostElement.attachShadow({ mode: 'open' });
|
||||||
|
160: this.shadowRoot.innerHTML = `
|
||||||
|
161 <style>
|
||||||
|
162 ${SHADOW_ROOT_CSS}
|
||||||
|
163 </style>
|
||||||
|
|
||||||
|
src/vs/code/electron-sandbox/issue/issueReporterMain.ts:
|
||||||
|
57 const platformClass = platform.isWindows ? 'windows' : platform.isLinux ? 'linux' : 'mac';
|
||||||
|
58 addClass(document.body, platformClass); // used by our fonts
|
||||||
|
59
|
||||||
|
60: document.body.innerHTML = BaseHtml();
|
||||||
|
61 const issueReporter = new IssueReporter(configuration);
|
||||||
|
62 issueReporter.render();
|
||||||
|
63 document.body.style.display = 'block';
|
||||||
|
|
||||||
|
src/vs/code/electron-sandbox/processExplorer/processExplorerMain.ts:
|
||||||
|
320 content.push(`.highest { color: ${styles.highlightForeground}; }`);
|
||||||
|
321 }
|
||||||
|
322
|
||||||
|
323: styleTag.innerHTML = content.join('\n');
|
||||||
|
324 if (document.head) {
|
||||||
|
325 document.head.appendChild(styleTag);
|
||||||
|
326 }
|
||||||
|
|
||||||
|
src/vs/editor/browser/view/domLineBreaksComputer.ts:
|
||||||
|
107 allCharOffsets[i] = tmp[0];
|
||||||
|
108 allVisibleColumns[i] = tmp[1];
|
||||||
|
109 }
|
||||||
|
110: containerDomNode.innerHTML = sb.build();
|
||||||
|
111
|
||||||
|
112 containerDomNode.style.position = 'absolute';
|
||||||
|
113 containerDomNode.style.top = '10000';
|
||||||
|
|
||||||
|
src/vs/editor/browser/view/viewLayer.ts:
|
||||||
|
507 private _finishRenderingNewLines(ctx: IRendererContext<T>, domNodeIsEmpty: boolean, newLinesHTML: string, wasNew: boolean[]): void {
|
||||||
|
508 const lastChild = <HTMLElement>this.domNode.lastChild;
|
||||||
|
509 if (domNodeIsEmpty || !lastChild) {
|
||||||
|
510: this.domNode.innerHTML = newLinesHTML;
|
||||||
|
511 } else {
|
||||||
|
512 lastChild.insertAdjacentHTML('afterend', newLinesHTML);
|
||||||
|
513 }
|
||||||
|
|
||||||
|
525 private _finishRenderingInvalidLines(ctx: IRendererContext<T>, invalidLinesHTML: string, wasInvalid: boolean[]): void {
|
||||||
|
526 const hugeDomNode = document.createElement('div');
|
||||||
|
527
|
||||||
|
528: hugeDomNode.innerHTML = invalidLinesHTML;
|
||||||
|
529
|
||||||
|
530 for (let i = 0; i < ctx.linesLength; i++) {
|
||||||
|
531 const line = ctx.lines[i];
|
||||||
|
|
||||||
|
src/vs/editor/browser/widget/diffEditorWidget.ts:
|
||||||
|
2157
|
||||||
|
2158 let domNode = document.createElement('div');
|
||||||
|
2159 domNode.className = `view-lines line-delete ${MOUSE_CURSOR_TEXT_CSS_CLASS_NAME}`;
|
||||||
|
2160: domNode.innerHTML = sb.build();
|
||||||
|
2161 Configuration.applyFontInfoSlow(domNode, fontInfo);
|
||||||
|
2162
|
||||||
|
2163 let marginDomNode = document.createElement('div');
|
||||||
|
2164 marginDomNode.className = 'inline-deleted-margin-view-zone';
|
||||||
|
2165: marginDomNode.innerHTML = marginHTML.join('');
|
||||||
|
2166 Configuration.applyFontInfoSlow(marginDomNode, fontInfo);
|
||||||
|
2167
|
||||||
|
2168 return {
|
||||||
|
|
||||||
|
src/vs/editor/standalone/browser/colorizer.ts:
|
||||||
|
40 let text = domNode.firstChild ? domNode.firstChild.nodeValue : '';
|
||||||
|
41 domNode.className += ' ' + theme;
|
||||||
|
42 let render = (str: string) => {
|
||||||
|
43: domNode.innerHTML = str;
|
||||||
|
44 };
|
||||||
|
45 return this.colorize(modeService, text || '', mimeType, options).then(render, (err) => console.error(err));
|
||||||
|
46 }
|
||||||
|
|
||||||
|
src/vs/editor/standalone/browser/standaloneThemeServiceImpl.ts:
|
||||||
|
212 if (!this._globalStyleElement) {
|
||||||
|
213 this._globalStyleElement = dom.createStyleSheet();
|
||||||
|
214 this._globalStyleElement.className = 'monaco-colors';
|
||||||
|
215: this._globalStyleElement.innerHTML = this._css;
|
||||||
|
216 this._styleElements.push(this._globalStyleElement);
|
||||||
|
217 }
|
||||||
|
218 return Disposable.None;
|
||||||
|
|
||||||
|
221 private _registerShadowDomContainer(domNode: HTMLElement): IDisposable {
|
||||||
|
222 const styleElement = dom.createStyleSheet(domNode);
|
||||||
|
223 styleElement.className = 'monaco-colors';
|
||||||
|
224: styleElement.innerHTML = this._css;
|
||||||
|
225 this._styleElements.push(styleElement);
|
||||||
|
226 return {
|
||||||
|
227 dispose: () => {
|
||||||
|
|
||||||
|
291 ruleCollector.addRule(generateTokensCSSForColorMap(colorMap));
|
||||||
|
292
|
||||||
|
293 this._css = cssRules.join('\n');
|
||||||
|
294: this._styleElements.forEach(styleElement => styleElement.innerHTML = this._css);
|
||||||
|
295
|
||||||
|
296 TokenizationRegistry.setColorMap(colorMap);
|
||||||
|
297 this._onColorThemeChange.fire(theme);
|
||||||
|
|
||||||
|
src/vs/editor/test/browser/controller/imeTester.ts:
|
||||||
|
55 let content = this._model.getModelLineContent(i);
|
||||||
|
56 r += content + '<br/>';
|
||||||
|
57 }
|
||||||
|
58: output.innerHTML = r;
|
||||||
|
59 }
|
||||||
|
60 }
|
||||||
|
61
|
||||||
|
|
||||||
|
69 let title = document.createElement('div');
|
||||||
|
70 title.className = 'title';
|
||||||
|
71
|
||||||
|
72: title.innerHTML = description + '. Type <strong>' + inputStr + '</strong>';
|
||||||
|
73 container.appendChild(title);
|
||||||
|
74
|
||||||
|
75 let startBtn = document.createElement('button');
|
||||||
|
|
||||||
|
src/vs/workbench/contrib/notebook/browser/view/renderers/cellRenderer.ts:
|
||||||
|
454
|
||||||
|
455 private getMarkdownDragImage(templateData: MarkdownCellRenderTemplate): HTMLElement {
|
||||||
|
456 const dragImageContainer = DOM.$('.cell-drag-image.monaco-list-row.focused.markdown-cell-row');
|
||||||
|
457: dragImageContainer.innerHTML = templateData.container.outerHTML;
|
||||||
|
458
|
||||||
|
459 // Remove all rendered content nodes after the
|
||||||
|
460 const markdownContent = dragImageContainer.querySelector('.cell.markdown')!;
|
||||||
|
|
||||||
|
611 return null;
|
||||||
|
612 }
|
||||||
|
613
|
||||||
|
614: editorContainer.innerHTML = richEditorText;
|
||||||
|
615
|
||||||
|
616 return dragImageContainer;
|
||||||
|
617 }
|
||||||
|
|
||||||
|
src/vs/workbench/contrib/notebook/browser/view/renderers/webviewPreloads.ts:
|
||||||
|
375 addMouseoverListeners(outputNode, outputId);
|
||||||
|
376 const content = data.content;
|
||||||
|
377 if (content.type === RenderOutputType.Html) {
|
||||||
|
378: outputNode.innerHTML = content.htmlContent;
|
||||||
|
379 cellOutputContainer.appendChild(outputNode);
|
||||||
|
380 domEval(outputNode);
|
||||||
|
381 } else {
|
||||||
|
|
||||||
|
src/vs/workbench/contrib/webview/browser/pre/main.js:
|
||||||
|
386 // apply default styles
|
||||||
|
387 const defaultStyles = newDocument.createElement('style');
|
||||||
|
388 defaultStyles.id = '_defaultStyles';
|
||||||
|
389: defaultStyles.innerHTML = defaultCssRules;
|
||||||
|
390 newDocument.head.prepend(defaultStyles);
|
||||||
|
391
|
||||||
|
392 applyStyles(newDocument, newDocument.body);
|
||||||
|
|
||||||
|
src/vs/workbench/contrib/welcome/walkThrough/browser/walkThroughPart.ts:
|
||||||
|
281
|
||||||
|
282 const content = model.main.textEditorModel.getValue(EndOfLinePreference.LF);
|
||||||
|
283 if (!strings.endsWith(input.resource.path, '.md')) {
|
||||||
|
284: this.content.innerHTML = content;
|
||||||
|
285 this.updateSizeClasses();
|
||||||
|
286 this.decorateContent();
|
||||||
|
287 this.contentDisposables.push(this.keybindingService.onDidUpdateKeybindings(() => this.decorateContent()));
|
||||||
|
|
||||||
|
303 const innerContent = document.createElement('div');
|
||||||
|
304 innerContent.classList.add('walkThroughContent'); // only for markdown files
|
||||||
|
305 const markdown = this.expandMacros(content);
|
||||||
|
306: innerContent.innerHTML = marked(markdown, { renderer });
|
||||||
|
307 this.content.appendChild(innerContent);
|
||||||
|
308
|
||||||
|
309 model.snippets.forEach((snippet, i) => {
|
||||||
48
.vscode/searches/es6.code-search
vendored
48
.vscode/searches/es6.code-search
vendored
@@ -2,43 +2,31 @@
|
|||||||
# Flags: CaseSensitive WordMatch
|
# Flags: CaseSensitive WordMatch
|
||||||
# ContextLines: 2
|
# ContextLines: 2
|
||||||
|
|
||||||
14 results - 4 files
|
12 results - 4 files
|
||||||
|
|
||||||
src/vs/base/browser/dom.ts:
|
src/vs/base/browser/dom.ts:
|
||||||
81 };
|
83 };
|
||||||
82
|
84
|
||||||
83: /** @deprecated ES6 - use classList*/
|
|
||||||
84 export const hasClass: (node: HTMLElement | SVGElement, className: string) => boolean = _classList.hasClass.bind(_classList);
|
|
||||||
85: /** @deprecated ES6 - use classList*/
|
85: /** @deprecated ES6 - use classList*/
|
||||||
86 export const addClass: (node: HTMLElement | SVGElement, className: string) => void = _classList.addClass.bind(_classList);
|
86 export const hasClass: (node: HTMLElement | SVGElement, className: string) => boolean = _classList.hasClass.bind(_classList);
|
||||||
87: /** @deprecated ES6 - use classList*/
|
87: /** @deprecated ES6 - use classList*/
|
||||||
88 export const addClasses: (node: HTMLElement | SVGElement, ...classNames: string[]) => void = _classList.addClasses.bind(_classList);
|
88 export const addClass: (node: HTMLElement | SVGElement, className: string) => void = _classList.addClass.bind(_classList);
|
||||||
89: /** @deprecated ES6 - use classList*/
|
89: /** @deprecated ES6 - use classList*/
|
||||||
90 export const removeClass: (node: HTMLElement | SVGElement, className: string) => void = _classList.removeClass.bind(_classList);
|
90 export const addClasses: (node: HTMLElement | SVGElement, ...classNames: string[]) => void = _classList.addClasses.bind(_classList);
|
||||||
91: /** @deprecated ES6 - use classList*/
|
91: /** @deprecated ES6 - use classList*/
|
||||||
92 export const removeClasses: (node: HTMLElement | SVGElement, ...classNames: string[]) => void = _classList.removeClasses.bind(_classList);
|
92 export const removeClass: (node: HTMLElement | SVGElement, className: string) => void = _classList.removeClass.bind(_classList);
|
||||||
93: /** @deprecated ES6 - use classList*/
|
93: /** @deprecated ES6 - use classList*/
|
||||||
94 export const toggleClass: (node: HTMLElement | SVGElement, className: string, shouldHaveIt?: boolean) => void = _classList.toggleClass.bind(_classList);
|
94 export const removeClasses: (node: HTMLElement | SVGElement, ...classNames: string[]) => void = _classList.removeClasses.bind(_classList);
|
||||||
95
|
95: /** @deprecated ES6 - use classList*/
|
||||||
|
96 export const toggleClass: (node: HTMLElement | SVGElement, className: string, shouldHaveIt?: boolean) => void = _classList.toggleClass.bind(_classList);
|
||||||
|
97
|
||||||
|
|
||||||
src/vs/base/common/arrays.ts:
|
src/vs/base/common/arrays.ts:
|
||||||
401
|
401
|
||||||
402 /**
|
402 /**
|
||||||
403: * @deprecated ES6: use `Array.findIndex`
|
403: * @deprecated ES6: use `Array.find`
|
||||||
404 */
|
404 */
|
||||||
405 export function firstIndex<T>(array: ReadonlyArray<T>, fn: (item: T) => boolean): number {
|
405 export function first<T>(array: ReadonlyArray<T>, fn: (item: T) => boolean, notFoundValue: T): T;
|
||||||
|
|
||||||
417
|
|
||||||
418 /**
|
|
||||||
419: * @deprecated ES6: use `Array.find`
|
|
||||||
420 */
|
|
||||||
421 export function first<T>(array: ReadonlyArray<T>, fn: (item: T) => boolean, notFoundValue: T): T;
|
|
||||||
|
|
||||||
569
|
|
||||||
570 /**
|
|
||||||
571: * @deprecated ES6: use `Array.find`
|
|
||||||
572 */
|
|
||||||
573 export function find<T>(arr: ArrayLike<T>, predicate: (value: T, index: number, arr: ArrayLike<T>) => any): T | undefined {
|
|
||||||
|
|
||||||
src/vs/base/common/objects.ts:
|
src/vs/base/common/objects.ts:
|
||||||
115
|
115
|
||||||
@@ -66,8 +54,8 @@ src/vs/base/common/strings.ts:
|
|||||||
170 */
|
170 */
|
||||||
171 export function endsWith(haystack: string, needle: string): boolean {
|
171 export function endsWith(haystack: string, needle: string): boolean {
|
||||||
|
|
||||||
861
|
857
|
||||||
862 /**
|
858 /**
|
||||||
863: * @deprecated ES6
|
859: * @deprecated ES6
|
||||||
864 */
|
860 */
|
||||||
865 export function repeat(s: string, count: number): string {
|
861 export function repeat(s: string, count: number): string {
|
||||||
|
|||||||
58
.vscode/searches/ts36031.code-search
vendored
58
.vscode/searches/ts36031.code-search
vendored
@@ -2,18 +2,52 @@
|
|||||||
# Flags: RegExp
|
# Flags: RegExp
|
||||||
# ContextLines: 2
|
# ContextLines: 2
|
||||||
|
|
||||||
2 results - 2 files
|
8 results - 4 files
|
||||||
|
|
||||||
src/vs/base/browser/ui/tree/asyncDataTree.ts:
|
src/vs/base/browser/ui/tree/asyncDataTree.ts:
|
||||||
243 } : () => 'treeitem',
|
241 } : () => 'treeitem',
|
||||||
244 isChecked: options.accessibilityProvider!.isChecked ? (e) => {
|
242 isChecked: options.accessibilityProvider!.isChecked ? (e) => {
|
||||||
245: return !!(options.accessibilityProvider?.isChecked!(e.element as T));
|
243: return !!(options.accessibilityProvider?.isChecked!(e.element as T));
|
||||||
246 } : undefined,
|
244 } : undefined,
|
||||||
247 getAriaLabel(e) {
|
245 getAriaLabel(e) {
|
||||||
|
|
||||||
src/vs/workbench/contrib/debug/browser/debugConfigurationManager.ts:
|
src/vs/platform/list/browser/listService.ts:
|
||||||
254
|
463
|
||||||
255 return debugDynamicExtensions.map(e => {
|
464 if (typeof options?.openOnSingleClick !== 'boolean' && options?.configurationService) {
|
||||||
256: const type = e.contributes?.debuggers![0].type!;
|
465: this.openOnSingleClick = options?.configurationService!.getValue(openModeSettingKey) !== 'doubleClick';
|
||||||
257 return {
|
466 this._register(options?.configurationService.onDidChangeConfiguration(() => {
|
||||||
258 label: this.getDebuggerLabel(type)!,
|
467: this.openOnSingleClick = options?.configurationService!.getValue(openModeSettingKey) !== 'doubleClick';
|
||||||
|
468 }));
|
||||||
|
469 } else {
|
||||||
|
|
||||||
|
src/vs/workbench/contrib/notebook/browser/notebookEditorWidget.ts:
|
||||||
|
1526
|
||||||
|
1527 await this._ensureActiveKernel();
|
||||||
|
1528: await this._activeKernel?.cancelNotebookCell!(this._notebookViewModel!.uri, undefined);
|
||||||
|
1529 }
|
||||||
|
1530
|
||||||
|
|
||||||
|
1535
|
||||||
|
1536 await this._ensureActiveKernel();
|
||||||
|
1537: await this._activeKernel?.executeNotebookCell!(this._notebookViewModel!.uri, undefined);
|
||||||
|
1538 }
|
||||||
|
1539
|
||||||
|
|
||||||
|
1553
|
||||||
|
1554 await this._ensureActiveKernel();
|
||||||
|
1555: await this._activeKernel?.cancelNotebookCell!(this._notebookViewModel!.uri, cell.handle);
|
||||||
|
1556 }
|
||||||
|
1557
|
||||||
|
|
||||||
|
1567
|
||||||
|
1568 await this._ensureActiveKernel();
|
||||||
|
1569: await this._activeKernel?.executeNotebookCell!(this._notebookViewModel!.uri, cell.handle);
|
||||||
|
1570 }
|
||||||
|
1571
|
||||||
|
|
||||||
|
src/vs/workbench/contrib/webview/electron-browser/iframeWebviewElement.ts:
|
||||||
|
89 .then(() => this._resourceRequestManager.ensureReady())
|
||||||
|
90 .then(() => {
|
||||||
|
91: this.element?.contentWindow!.postMessage({ channel, args: data }, '*');
|
||||||
|
92 });
|
||||||
|
93 }
|
||||||
|
|||||||
18
.vscode/tasks.json
vendored
18
.vscode/tasks.json
vendored
@@ -139,7 +139,7 @@
|
|||||||
"label": "Kill Build Web Extensions",
|
"label": "Kill Build Web Extensions",
|
||||||
"group": "build",
|
"group": "build",
|
||||||
"presentation": {
|
"presentation": {
|
||||||
"reveal": "never",
|
"reveal": "never"
|
||||||
},
|
},
|
||||||
"problemMatcher": "$tsc"
|
"problemMatcher": "$tsc"
|
||||||
},
|
},
|
||||||
@@ -203,11 +203,25 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"type": "shell",
|
"type": "shell",
|
||||||
"command": "node build/lib/prelaunch.js",
|
"command": "node build/lib/preLaunch.js",
|
||||||
"label": "Ensure Prelaunch Dependencies",
|
"label": "Ensure Prelaunch Dependencies",
|
||||||
"presentation": {
|
"presentation": {
|
||||||
"reveal": "silent"
|
"reveal": "silent"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"type": "npm",
|
||||||
|
"script": "tsec-compile-check",
|
||||||
|
"problemMatcher": [
|
||||||
|
{
|
||||||
|
"base": "$tsc",
|
||||||
|
"applyTo": "allDocuments",
|
||||||
|
"owner": "tsec"
|
||||||
|
},
|
||||||
|
],
|
||||||
|
"group": "build",
|
||||||
|
"label": "npm: tsec-compile-check",
|
||||||
|
"detail": "node_modules/tsec/bin/tsec -p src/tsconfig.json --noEmit"
|
||||||
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
2
.yarnrc
2
.yarnrc
@@ -1,3 +1,3 @@
|
|||||||
disturl "https://atom.io/download/electron"
|
disturl "https://atom.io/download/electron"
|
||||||
target "7.3.2"
|
target "9.3.0"
|
||||||
runtime "electron"
|
runtime "electron"
|
||||||
|
|||||||
71
CHANGELOG.md
71
CHANGELOG.md
@@ -1,5 +1,76 @@
|
|||||||
# Change Log
|
# Change Log
|
||||||
|
|
||||||
|
## Version 1.24.0
|
||||||
|
* Release date: November 12, 2020
|
||||||
|
* Release status: General Availability
|
||||||
|
* SQL Project improvements
|
||||||
|
* Notebook improvements, including in WYSIWYG editor enhancements
|
||||||
|
* Azure Arc improvements
|
||||||
|
* Azure SQL Deployment UX improvements
|
||||||
|
* Azure Browse Connections Preview
|
||||||
|
* Bug Fixes
|
||||||
|
|
||||||
|
## Version 1.23.0
|
||||||
|
* Release date: October 14, 2020
|
||||||
|
* Release status: General Availability
|
||||||
|
* Added deployments of Azure SQL DB and VM
|
||||||
|
* Added PowerShell kernel results streaming support
|
||||||
|
* Added improvements to SQL Database Projects extension
|
||||||
|
* Bug Fixes
|
||||||
|
* Extension Updates:
|
||||||
|
* SQL Server Import
|
||||||
|
* Machine Learning
|
||||||
|
* Schema Compare
|
||||||
|
* Kusto
|
||||||
|
* SQL Assessment
|
||||||
|
* SQL Database Projects
|
||||||
|
* Azure Arc
|
||||||
|
* azdata
|
||||||
|
|
||||||
|
## Version 1.22.1
|
||||||
|
* Release date: September 30, 2020
|
||||||
|
* Release status: General Availability
|
||||||
|
* Fix bug #12615 Active connection filter doesn't untoggle | [#12615](https://github.com/microsoft/azuredatastudio/issues/12615)
|
||||||
|
* Fix bug #12572 Edit Data grid doesn't escape special characters | [#12572](https://github.com/microsoft/azuredatastudio/issues/12572)
|
||||||
|
* Fix bug #12570 Dashboard Explorer table doesn't escape special characters | [#12570](https://github.com/microsoft/azuredatastudio/issues/12570)
|
||||||
|
* Fix bug #12582 Delete row on Edit Data fails | [#12582](https://github.com/microsoft/azuredatastudio/issues/12582)
|
||||||
|
* Fix bug #12646 SQL Notebooks: Cells being treated isolated | [#12646](https://github.com/microsoft/azuredatastudio/issues/12646)
|
||||||
|
|
||||||
|
## Version 1.22.0
|
||||||
|
* Release date: September 22, 2020
|
||||||
|
* Release status: General Availability
|
||||||
|
* New Notebook Features
|
||||||
|
* Supports brand new text cell editing experience based on rich text formatting and seamless conversion to markdown, also known as WYSIWYG toolbar (What You See Is What You Get)
|
||||||
|
* Supports Kusto kernel
|
||||||
|
* Supports pinning of notebooks
|
||||||
|
* Added support for new version of Jupyter Books
|
||||||
|
* Improved Jupyter Shortcuts
|
||||||
|
* Introduced perf loading improvements
|
||||||
|
* Added Azure Arc extension - Users can try out Azure Arc public preview through Azure Data Studio. This includes:
|
||||||
|
* Deploy data controller
|
||||||
|
* Deploy Postgres
|
||||||
|
* Deploy Managed Instance for Azure Arc
|
||||||
|
* Connect to data controller
|
||||||
|
* Access data service dashboards
|
||||||
|
* Azure Arc Jupyter Book
|
||||||
|
* Added new deployment options
|
||||||
|
* Azure SQL Database Edge
|
||||||
|
* (Edge will require Azure SQL Edge Deployment Extension)
|
||||||
|
* Added SQL Database Projects extension - The SQL Database Projects extension brings project-based database development to Azure Data Studio. In this preview release, SQL projects can be created and published from Azure Data Studio.
|
||||||
|
* Added Kusto (KQL) extension - Brings native Kusto experiences in Azure Data Studio for data exploration and data analytics against massive amount of real-time streaming data stored in Azure Data Explorer. This preview release supports connecting and browsing Azure Data Explorer clusters, writing KQL queries as well as authoring notebooks with Kusto kernel.
|
||||||
|
* SQL Server Import extension GA - Announcing the GA of the SQL Server Import extension, features no longer in preview. This extension facilitates importing csv/txt files. Learn more about the extension in [this article](sql-server-import-extension.md).
|
||||||
|
* Resolved [bugs and issues](https://github.com/microsoft/azuredatastudio/issues?q=is%3Aissue+milestone%3A%22September+2020+Release%22+is%3Aclosed).
|
||||||
|
|
||||||
|
## Version 1.21.0
|
||||||
|
* Release date: August 12, 2020
|
||||||
|
* Release status: General Availability
|
||||||
|
* New Notebook Features
|
||||||
|
* Move cell locations changd
|
||||||
|
* Added action to convert cells to Text Cell or Code cell
|
||||||
|
* Jupyter Books picker to open Jupyter Books directly from Github
|
||||||
|
* Search bar added to Notebooks Viewlet for searching through Jupyter Books
|
||||||
|
* Address issues in [August 2020 Milestone](https://github.com/microsoft/azuredatastudio/milestone/59?closed=1)
|
||||||
|
|
||||||
## Version 1.20.1
|
## Version 1.20.1
|
||||||
* Release date: July 17, 2020
|
* Release date: July 17, 2020
|
||||||
* Release status: General Availability
|
* Release status: General Availability
|
||||||
|
|||||||
18
README.md
18
README.md
@@ -19,7 +19,7 @@ Azure Data Studio is a data management tool that enables you to work with SQL Se
|
|||||||
| [Linux DEB][linux-deb] |
|
| [Linux DEB][linux-deb] |
|
||||||
|
|
||||||
|
|
||||||
Go to our [download page](https://aka.ms/azuredatastudio) for more specific instructions.
|
Go to our [download page](https://aka.ms/getazuredatastudio) for more specific instructions.
|
||||||
|
|
||||||
## Try out the latest insiders build from `main`:
|
## Try out the latest insiders build from `main`:
|
||||||
- [Windows User Installer - **Insiders build**](https://azuredatastudio-update.azurewebsites.net/latest/win32-x64-user/insider)
|
- [Windows User Installer - **Insiders build**](https://azuredatastudio-update.azurewebsites.net/latest/win32-x64-user/insider)
|
||||||
@@ -29,6 +29,8 @@ Go to our [download page](https://aka.ms/azuredatastudio) for more specific inst
|
|||||||
- [Linux TAR.GZ - **Insiders build**](https://azuredatastudio-update.azurewebsites.net/latest/linux-x64/insider)
|
- [Linux TAR.GZ - **Insiders build**](https://azuredatastudio-update.azurewebsites.net/latest/linux-x64/insider)
|
||||||
|
|
||||||
See the [change log](https://github.com/Microsoft/azuredatastudio/blob/main/CHANGELOG.md) for additional details of what's in this release.
|
See the [change log](https://github.com/Microsoft/azuredatastudio/blob/main/CHANGELOG.md) for additional details of what's in this release.
|
||||||
|
Go to our [download page](https://aka.ms/getazuredatastudio) for more specific instructions.
|
||||||
|
|
||||||
|
|
||||||
## **Feature Highlights**
|
## **Feature Highlights**
|
||||||
|
|
||||||
@@ -129,10 +131,10 @@ Copyright (c) Microsoft Corporation. All rights reserved.
|
|||||||
|
|
||||||
Licensed under the [Source EULA](LICENSE.txt).
|
Licensed under the [Source EULA](LICENSE.txt).
|
||||||
|
|
||||||
[win-user]: https://go.microsoft.com/fwlink/?linkid=2135512
|
[win-user]: https://go.microsoft.com/fwlink/?linkid=2148607
|
||||||
[win-system]: https://go.microsoft.com/fwlink/?linkid=2135513
|
[win-system]: https://go.microsoft.com/fwlink/?linkid=2148907
|
||||||
[win-zip]: https://go.microsoft.com/fwlink/?linkid=2135514
|
[win-zip]: https://go.microsoft.com/fwlink/?linkid=2148908
|
||||||
[osx-zip]: https://go.microsoft.com/fwlink/?linkid=2135266
|
[osx-zip]: https://go.microsoft.com/fwlink/?linkid=2148710
|
||||||
[linux-zip]: https://go.microsoft.com/fwlink/?linkid=2135267
|
[linux-zip]: https://go.microsoft.com/fwlink/?linkid=2148708
|
||||||
[linux-rpm]: https://go.microsoft.com/fwlink/?linkid=2135268
|
[linux-rpm]: https://go.microsoft.com/fwlink/?linkid=2148709
|
||||||
[linux-deb]: https://go.microsoft.com/fwlink/?linkid=2135515
|
[linux-deb]: https://go.microsoft.com/fwlink/?linkid=2148806
|
||||||
|
|||||||
@@ -41,6 +41,7 @@ expressly granted herein, whether by implication, estoppel or otherwise.
|
|||||||
JupyterLab: https://github.com/jupyterlab/jupyterlab
|
JupyterLab: https://github.com/jupyterlab/jupyterlab
|
||||||
keytar: https://github.com/atom/node-keytar
|
keytar: https://github.com/atom/node-keytar
|
||||||
make-error: https://github.com/JsCommunity/make-error
|
make-error: https://github.com/JsCommunity/make-error
|
||||||
|
mark.js: https://github.com/julmot/mark.js
|
||||||
minimist: https://github.com/substack/minimist
|
minimist: https://github.com/substack/minimist
|
||||||
moment: https://github.com/moment/moment
|
moment: https://github.com/moment/moment
|
||||||
native-keymap: https://github.com/Microsoft/node-native-keymap
|
native-keymap: https://github.com/Microsoft/node-native-keymap
|
||||||
@@ -63,6 +64,8 @@ expressly granted herein, whether by implication, estoppel or otherwise.
|
|||||||
svg.js: https://github.com/svgdotjs/svg.js
|
svg.js: https://github.com/svgdotjs/svg.js
|
||||||
systemjs: https://github.com/systemjs/systemjs
|
systemjs: https://github.com/systemjs/systemjs
|
||||||
temp-write: https://github.com/sindresorhus/temp-write
|
temp-write: https://github.com/sindresorhus/temp-write
|
||||||
|
turndown: https://github.com/domchristie/turndown
|
||||||
|
turndown-plugin-gfm: https://github.com/domchristie/turndown-plugin-gfm
|
||||||
underscore: https://github.com/jashkenas/underscore
|
underscore: https://github.com/jashkenas/underscore
|
||||||
v8-profiler: https://github.com/node-inspector/v8-profiler
|
v8-profiler: https://github.com/node-inspector/v8-profiler
|
||||||
vscode: https://github.com/microsoft/vscode
|
vscode: https://github.com/microsoft/vscode
|
||||||
@@ -1254,6 +1257,32 @@ ISC © Julien Fontanet
|
|||||||
=========================================
|
=========================================
|
||||||
END OF make-error NOTICES AND INFORMATION
|
END OF make-error NOTICES AND INFORMATION
|
||||||
|
|
||||||
|
%% mark.js NOTICES AND INFORMATION BEGIN HERE
|
||||||
|
=========================================
|
||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) 2014–2019 Julian Kühnel
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
||||||
|
=========================================
|
||||||
|
END OF mark.js NOTICES AND INFORMATION
|
||||||
|
|
||||||
%% minimist NOTICES AND INFORMATION BEGIN HERE
|
%% minimist NOTICES AND INFORMATION BEGIN HERE
|
||||||
=========================================
|
=========================================
|
||||||
This software is released under the MIT license:
|
This software is released under the MIT license:
|
||||||
@@ -2002,6 +2031,58 @@ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLI
|
|||||||
=========================================
|
=========================================
|
||||||
END OF temp-write NOTICES AND INFORMATION
|
END OF temp-write NOTICES AND INFORMATION
|
||||||
|
|
||||||
|
%% turndown NOTICES AND INFORMATION BEGIN HERE
|
||||||
|
=========================================
|
||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) 2017 Dom Christie
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
||||||
|
=========================================
|
||||||
|
END OF turndown NOTICES AND INFORMATION
|
||||||
|
|
||||||
|
%% turndown-plugin-gfm NOTICES AND INFORMATION BEGIN HERE
|
||||||
|
=========================================
|
||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) 2017 Dom Christie
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
||||||
|
=========================================
|
||||||
|
END OF turndown-plugin-gfm NOTICES AND INFORMATION
|
||||||
|
|
||||||
%% underscore NOTICES AND INFORMATION BEGIN HERE
|
%% underscore NOTICES AND INFORMATION BEGIN HERE
|
||||||
=========================================
|
=========================================
|
||||||
Copyright (c) 2009-2017 Jeremy Ashkenas, DocumentCloud and Investigative
|
Copyright (c) 2009-2017 Jeremy Ashkenas, DocumentCloud and Investigative
|
||||||
|
|||||||
1
build/.gitattributes
vendored
Normal file
1
build/.gitattributes
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
* text eol=lf
|
||||||
@@ -15,7 +15,7 @@
|
|||||||
"keywords": [],
|
"keywords": [],
|
||||||
"author": "",
|
"author": "",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/core": "^1.2.3",
|
"@actions/core": "^1.2.6",
|
||||||
"@actions/github": "^2.1.1",
|
"@actions/github": "^2.1.1",
|
||||||
"axios": "^0.19.2",
|
"axios": "^0.19.2",
|
||||||
"ts-node": "^8.6.2",
|
"ts-node": "^8.6.2",
|
||||||
|
|||||||
@@ -2,10 +2,10 @@
|
|||||||
# yarn lockfile v1
|
# yarn lockfile v1
|
||||||
|
|
||||||
|
|
||||||
"@actions/core@^1.2.3":
|
"@actions/core@^1.2.6":
|
||||||
version "1.2.3"
|
version "1.2.6"
|
||||||
resolved "https://registry.yarnpkg.com/@actions/core/-/core-1.2.3.tgz#e844b4fa0820e206075445079130868f95bfca95"
|
resolved "https://registry.yarnpkg.com/@actions/core/-/core-1.2.6.tgz#a78d49f41a4def18e88ce47c2cac615d5694bf09"
|
||||||
integrity sha512-Wp4xnyokakM45Uuj4WLUxdsa8fJjKVl1fDTsPbTEcTcuu0Nb26IPQbOtjmnfaCPGcaoPOOqId8H9NapZ8gii4w==
|
integrity sha512-ZQYitnqiyBc3D+k7LsgSBmMDVkOVidaagDG7j3fOym77jNunWRuYx7VSHa9GNfFZh+zh61xsCjRj4JxMZlDqTA==
|
||||||
|
|
||||||
"@actions/github@^2.1.1":
|
"@actions/github@^2.1.1":
|
||||||
version "2.1.1"
|
version "2.1.1"
|
||||||
@@ -286,9 +286,9 @@ nice-try@^1.0.4:
|
|||||||
integrity sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==
|
integrity sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==
|
||||||
|
|
||||||
node-fetch@^2.3.0:
|
node-fetch@^2.3.0:
|
||||||
version "2.6.0"
|
version "2.6.1"
|
||||||
resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.0.tgz#e633456386d4aa55863f676a7ab0daa8fdecb0fd"
|
resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.1.tgz#045bd323631f76ed2e2b55573394416b639a0052"
|
||||||
integrity sha512-8dG4H5ujfvFiqDmVu9fQ5bOHUC15JMjMY/Zumv26oOvvVJjM67KF8koCWIabKQ1GJIa9r2mMZscBq/TbdOcmNA==
|
integrity sha512-V4aYg89jEoVRxRb2fJdAg8FHvI7cEyYdVAh94HH0UIK8oJxUfkjlDQN9RbMx+bEjP7+ggMiFRprSti032Oipxw==
|
||||||
|
|
||||||
npm-run-path@^2.0.0:
|
npm-run-path@^2.0.0:
|
||||||
version "2.0.2"
|
version "2.0.2"
|
||||||
|
|||||||
@@ -53,7 +53,7 @@ async function uploadBlob(blobService: azure.BlobService, quality: string, blobN
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
await new Promise((c, e) => blobService.createBlockBlobFromLocalFile(quality, blobName, filePath, blobOptions, err => err ? e(err) : c()));
|
await new Promise<void>((c, e) => blobService.createBlockBlobFromLocalFile(quality, blobName, filePath, blobOptions, err => err ? e(err) : c()));
|
||||||
}
|
}
|
||||||
|
|
||||||
function getEnv(name: string): string {
|
function getEnv(name: string): string {
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ const fileNames = [
|
|||||||
];
|
];
|
||||||
|
|
||||||
async function assertContainer(blobService: azure.BlobService, container: string): Promise<void> {
|
async function assertContainer(blobService: azure.BlobService, container: string): Promise<void> {
|
||||||
await new Promise((c, e) => blobService.createContainerIfNotExists(container, { publicAccessLevel: 'blob' }, err => err ? e(err) : c()));
|
await new Promise<void>((c, e) => blobService.createContainerIfNotExists(container, { publicAccessLevel: 'blob' }, err => err ? e(err) : c()));
|
||||||
}
|
}
|
||||||
|
|
||||||
async function doesBlobExist(blobService: azure.BlobService, container: string, blobName: string): Promise<boolean | undefined> {
|
async function doesBlobExist(blobService: azure.BlobService, container: string, blobName: string): Promise<boolean | undefined> {
|
||||||
@@ -33,7 +33,7 @@ async function uploadBlob(blobService: azure.BlobService, container: string, blo
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
await new Promise((c, e) => blobService.createBlockBlobFromLocalFile(container, blobName, file, blobOptions, err => err ? e(err) : c()));
|
await new Promise<void>((c, e) => blobService.createBlockBlobFromLocalFile(container, blobName, file, blobOptions, err => err ? e(err) : c()));
|
||||||
}
|
}
|
||||||
|
|
||||||
async function publish(commit: string, files: readonly string[]): Promise<void> {
|
async function publish(commit: string, files: readonly string[]): Promise<void> {
|
||||||
|
|||||||
@@ -43,6 +43,7 @@ function createDefaultConfig(quality: string): Config {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function getConfig(quality: string): Promise<Config> {
|
function getConfig(quality: string): Promise<Config> {
|
||||||
|
console.log(`Getting config for quality ${quality}`);
|
||||||
const client = new DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT']!, { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
const client = new DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT']!, { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||||
const collection = 'dbs/builds/colls/config';
|
const collection = 'dbs/builds/colls/config';
|
||||||
const query = {
|
const query = {
|
||||||
@@ -52,13 +53,13 @@ function getConfig(quality: string): Promise<Config> {
|
|||||||
]
|
]
|
||||||
};
|
};
|
||||||
|
|
||||||
return new Promise<Config>((c, e) => {
|
return retry(() => new Promise<Config>((c, e) => {
|
||||||
client.queryDocuments(collection, query, { enableCrossPartitionQuery: true }).toArray((err, results) => {
|
client.queryDocuments(collection, query, { enableCrossPartitionQuery: true }).toArray((err, results) => {
|
||||||
if (err && err.code !== 409) { return e(err); }
|
if (err && err.code !== 409) { return e(err); }
|
||||||
|
|
||||||
c(!results || results.length === 0 ? createDefaultConfig(quality) : results[0] as any as Config);
|
c(!results || results.length === 0 ? createDefaultConfig(quality) : results[0] as any as Config);
|
||||||
});
|
});
|
||||||
});
|
}));
|
||||||
}
|
}
|
||||||
|
|
||||||
interface Asset {
|
interface Asset {
|
||||||
@@ -86,6 +87,7 @@ function createOrUpdate(commit: string, quality: string, platform: string, type:
|
|||||||
updateTries++;
|
updateTries++;
|
||||||
|
|
||||||
return new Promise<void>((c, e) => {
|
return new Promise<void>((c, e) => {
|
||||||
|
console.log(`Querying existing documents to update...`);
|
||||||
client.queryDocuments(collection, updateQuery, { enableCrossPartitionQuery: true }).toArray((err, results) => {
|
client.queryDocuments(collection, updateQuery, { enableCrossPartitionQuery: true }).toArray((err, results) => {
|
||||||
if (err) { return e(err); }
|
if (err) { return e(err); }
|
||||||
if (results.length !== 1) { return e(new Error('No documents')); }
|
if (results.length !== 1) { return e(new Error('No documents')); }
|
||||||
@@ -101,6 +103,7 @@ function createOrUpdate(commit: string, quality: string, platform: string, type:
|
|||||||
release.updates[platform] = type;
|
release.updates[platform] = type;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
console.log(`Replacing existing document with updated version`);
|
||||||
client.replaceDocument(release._self, release, err => {
|
client.replaceDocument(release._self, release, err => {
|
||||||
if (err && err.code === 409 && updateTries < 5) { return c(update()); }
|
if (err && err.code === 409 && updateTries < 5) { return c(update()); }
|
||||||
if (err) { return e(err); }
|
if (err) { return e(err); }
|
||||||
@@ -112,7 +115,8 @@ function createOrUpdate(commit: string, quality: string, platform: string, type:
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
return new Promise<void>((c, e) => {
|
return retry(() => new Promise<void>((c, e) => {
|
||||||
|
console.log(`Attempting to create document`);
|
||||||
client.createDocument(collection, release, err => {
|
client.createDocument(collection, release, err => {
|
||||||
if (err && err.code === 409) { return c(update()); }
|
if (err && err.code === 409) { return c(update()); }
|
||||||
if (err) { return e(err); }
|
if (err) { return e(err); }
|
||||||
@@ -120,7 +124,7 @@ function createOrUpdate(commit: string, quality: string, platform: string, type:
|
|||||||
console.log('Build successfully published.');
|
console.log('Build successfully published.');
|
||||||
c();
|
c();
|
||||||
});
|
});
|
||||||
});
|
}));
|
||||||
}
|
}
|
||||||
|
|
||||||
async function assertContainer(blobService: azure.BlobService, quality: string): Promise<void> {
|
async function assertContainer(blobService: azure.BlobService, quality: string): Promise<void> {
|
||||||
@@ -188,7 +192,6 @@ async function publish(commit: string, quality: string, platform: string, type:
|
|||||||
console.log(`Blob ${quality}, ${blobName} already exists, not publishing again.`);
|
console.log(`Blob ${quality}, ${blobName} already exists, not publishing again.`);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log('Uploading blobs to Azure storage...');
|
console.log('Uploading blobs to Azure storage...');
|
||||||
|
|
||||||
await uploadBlob(blobService, quality, blobName, file);
|
await uploadBlob(blobService, quality, blobName, file);
|
||||||
@@ -247,6 +250,22 @@ async function publish(commit: string, quality: string, platform: string, type:
|
|||||||
await createOrUpdate(commit, quality, platform, type, release, asset, isUpdate);
|
await createOrUpdate(commit, quality, platform, type, release, asset, isUpdate);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const RETRY_TIMES = 10;
|
||||||
|
async function retry<T>(fn: () => Promise<T>): Promise<T> {
|
||||||
|
for (let run = 1; run <= RETRY_TIMES; run++) {
|
||||||
|
try {
|
||||||
|
return await fn();
|
||||||
|
} catch (err) {
|
||||||
|
if (!/ECONNRESET/.test(err.message)) {
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
console.log(`Caught error ${err} - ${run}/${RETRY_TIMES}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new Error('Retried too many times');
|
||||||
|
}
|
||||||
|
|
||||||
function main(): void {
|
function main(): void {
|
||||||
const commit = process.env['BUILD_SOURCEVERSION'];
|
const commit = process.env['BUILD_SOURCEVERSION'];
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
steps:
|
steps:
|
||||||
- task: NodeTool@0
|
- task: NodeTool@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "12.13.0"
|
versionSpec: "12.14.1"
|
||||||
|
|
||||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3 # {{SQL CARBON EDIT}} update version
|
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3 # {{SQL CARBON EDIT}} update version
|
||||||
inputs:
|
inputs:
|
||||||
@@ -50,7 +50,7 @@ steps:
|
|||||||
displayName: Run Unit Tests (Electron)
|
displayName: Run Unit Tests (Electron)
|
||||||
|
|
||||||
# - script: | {{SQL CARBON EDIT}} disable
|
# - script: | {{SQL CARBON EDIT}} disable
|
||||||
# yarn test-browser --browser chromium --browser webkit --browser firefox
|
# yarn test-browser --browser chromium --browser webkit --browser firefox --tfs "Browser Unit Tests"
|
||||||
# displayName: Run Unit Tests (Browser)
|
# displayName: Run Unit Tests (Browser)
|
||||||
|
|
||||||
# - script: | {{SQL CARBON EDIT}} disable
|
# - script: | {{SQL CARBON EDIT}} disable
|
||||||
|
|||||||
@@ -21,7 +21,7 @@ steps:
|
|||||||
|
|
||||||
- task: NodeTool@0
|
- task: NodeTool@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "12.13.0"
|
versionSpec: "12.14.1"
|
||||||
|
|
||||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||||
inputs:
|
inputs:
|
||||||
@@ -87,10 +87,6 @@ steps:
|
|||||||
set -e
|
set -e
|
||||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||||
yarn gulp vscode-darwin-min-ci
|
yarn gulp vscode-darwin-min-ci
|
||||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
|
||||||
yarn gulp vscode-reh-darwin-min-ci
|
|
||||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
|
||||||
yarn gulp vscode-reh-web-darwin-min-ci
|
|
||||||
displayName: Build
|
displayName: Build
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
@@ -101,7 +97,7 @@ steps:
|
|||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
yarn test-browser --build --browser chromium --browser webkit --browser firefox
|
yarn test-browser --build --browser chromium --browser webkit --browser firefox --tfs "Browser Unit Tests"
|
||||||
displayName: Run unit tests (Browser)
|
displayName: Run unit tests (Browser)
|
||||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||||
|
|
||||||
@@ -118,6 +114,13 @@ steps:
|
|||||||
displayName: Run integration tests (Electron)
|
displayName: Run integration tests (Electron)
|
||||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-web-darwin" \
|
||||||
|
./resources/server/test/test-web-integration.sh --browser webkit
|
||||||
|
displayName: Run integration tests (Browser)
|
||||||
|
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
APP_ROOT=$(agent.builddirectory)/VSCode-darwin
|
APP_ROOT=$(agent.builddirectory)/VSCode-darwin
|
||||||
@@ -128,13 +131,6 @@ steps:
|
|||||||
displayName: Run remote integration tests (Electron)
|
displayName: Run remote integration tests (Electron)
|
||||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||||
|
|
||||||
- script: |
|
|
||||||
set -e
|
|
||||||
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-web-darwin" \
|
|
||||||
./resources/server/test/test-web-integration.sh --browser webkit
|
|
||||||
displayName: Run integration tests (Browser)
|
|
||||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
APP_ROOT=$(agent.builddirectory)/VSCode-darwin
|
APP_ROOT=$(agent.builddirectory)/VSCode-darwin
|
||||||
@@ -160,6 +156,13 @@ steps:
|
|||||||
continueOnError: true
|
continueOnError: true
|
||||||
condition: failed()
|
condition: failed()
|
||||||
|
|
||||||
|
- task: PublishTestResults@2
|
||||||
|
displayName: Publish Tests Results
|
||||||
|
inputs:
|
||||||
|
testResultsFiles: '*-results.xml'
|
||||||
|
searchFolder: '$(Build.ArtifactStagingDirectory)/test-results'
|
||||||
|
condition: succeededOrFailed()
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
security create-keychain -p pwd $(agent.tempdirectory)/buildagent.keychain
|
security create-keychain -p pwd $(agent.tempdirectory)/buildagent.keychain
|
||||||
@@ -204,13 +207,6 @@ steps:
|
|||||||
zip -d $(agent.builddirectory)/VSCode-darwin.zip "*.pkg"
|
zip -d $(agent.builddirectory)/VSCode-darwin.zip "*.pkg"
|
||||||
displayName: Clean Archive
|
displayName: Clean Archive
|
||||||
|
|
||||||
- script: |
|
|
||||||
set -e
|
|
||||||
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
|
|
||||||
AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
|
|
||||||
node build/azure-pipelines/common/createAsset.js darwin-unnotarized archive "VSCode-darwin-$VSCODE_QUALITY.zip" $(agent.builddirectory)/VSCode-darwin.zip
|
|
||||||
displayName: Publish Unnotarized Build
|
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
APP_ROOT=$(agent.builddirectory)/VSCode-darwin
|
APP_ROOT=$(agent.builddirectory)/VSCode-darwin
|
||||||
APP_NAME="`ls $APP_ROOT | head -n 1`"
|
APP_NAME="`ls $APP_ROOT | head -n 1`"
|
||||||
|
|||||||
@@ -96,8 +96,6 @@ steps:
|
|||||||
set -e
|
set -e
|
||||||
yarn gulp package-rebuild-extensions
|
yarn gulp package-rebuild-extensions
|
||||||
yarn gulp vscode-darwin-min-ci
|
yarn gulp vscode-darwin-min-ci
|
||||||
yarn gulp vscode-reh-darwin-min-ci
|
|
||||||
yarn gulp vscode-reh-web-darwin-min-ci
|
|
||||||
displayName: Build
|
displayName: Build
|
||||||
env:
|
env:
|
||||||
VSCODE_MIXIN_PASSWORD: $(github-distro-mixin-password)
|
VSCODE_MIXIN_PASSWORD: $(github-distro-mixin-password)
|
||||||
@@ -125,8 +123,9 @@ steps:
|
|||||||
set -e
|
set -e
|
||||||
APP_ROOT=$(agent.builddirectory)/azuredatastudio-darwin
|
APP_ROOT=$(agent.builddirectory)/azuredatastudio-darwin
|
||||||
APP_NAME="`ls $APP_ROOT | head -n 1`"
|
APP_NAME="`ls $APP_ROOT | head -n 1`"
|
||||||
yarn smoketest --build "$APP_ROOT/$APP_NAME" --screenshots "$(build.artifactstagingdirectory)/smokeshots"
|
yarn smoketest --build "$APP_ROOT/$APP_NAME" --screenshots "$(build.artifactstagingdirectory)/smokeshots" --log "$(build.artifactstagingdirectory)/logs/darwin/smoke.log"
|
||||||
displayName: Run smoke tests (Electron)
|
displayName: Run smoke tests (Electron)
|
||||||
|
continueOnError: true
|
||||||
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||||
|
|
||||||
# - script: |
|
# - script: |
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ pr:
|
|||||||
steps:
|
steps:
|
||||||
- task: NodeTool@0
|
- task: NodeTool@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "12.13.0"
|
versionSpec: "12.14.1"
|
||||||
|
|
||||||
- task: AzureKeyVault@1
|
- task: AzureKeyVault@1
|
||||||
displayName: 'Azure Key Vault: Get Secrets'
|
displayName: 'Azure Key Vault: Get Secrets'
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ pr:
|
|||||||
steps:
|
steps:
|
||||||
- task: NodeTool@0
|
- task: NodeTool@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "12.13.0"
|
versionSpec: "12.14.1"
|
||||||
|
|
||||||
- task: AzureKeyVault@1
|
- task: AzureKeyVault@1
|
||||||
displayName: 'Azure Key Vault: Get Secrets'
|
displayName: 'Azure Key Vault: Get Secrets'
|
||||||
@@ -31,10 +31,10 @@ steps:
|
|||||||
git config user.email "vscode@microsoft.com"
|
git config user.email "vscode@microsoft.com"
|
||||||
git config user.name "VSCode"
|
git config user.name "VSCode"
|
||||||
|
|
||||||
git checkout origin/electron-x.y.z
|
git checkout origin/electron-11.x.y
|
||||||
git merge origin/master
|
git merge origin/master
|
||||||
|
|
||||||
# Push master branch into exploration branch
|
# Push master branch into exploration branch
|
||||||
git push origin HEAD:electron-x.y.z
|
git push origin HEAD:electron-11.x.y
|
||||||
|
|
||||||
displayName: Sync & Merge Exploration
|
displayName: Sync & Merge Exploration
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ steps:
|
|||||||
|
|
||||||
- task: NodeTool@0
|
- task: NodeTool@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "12.13.0"
|
versionSpec: "12.14.1"
|
||||||
|
|
||||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
|
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
|
||||||
inputs:
|
inputs:
|
||||||
@@ -44,8 +44,8 @@ steps:
|
|||||||
|
|
||||||
- script: | # {{SQL CARBON EDIT}} add strict null check
|
- script: | # {{SQL CARBON EDIT}} add strict null check
|
||||||
yarn strict-vscode
|
yarn strict-vscode
|
||||||
|
|
||||||
displayName: Run Strict Null Check
|
displayName: Run Strict Null Check
|
||||||
|
|
||||||
# - script: | {{SQL CARBON EDIT}} remove monaco editor checks
|
# - script: | {{SQL CARBON EDIT}} remove monaco editor checks
|
||||||
# yarn monaco-compile-check
|
# yarn monaco-compile-check
|
||||||
# displayName: Run Monaco Editor Checks
|
# displayName: Run Monaco Editor Checks
|
||||||
@@ -67,7 +67,7 @@ steps:
|
|||||||
displayName: Run Unit Tests (Electron)
|
displayName: Run Unit Tests (Electron)
|
||||||
|
|
||||||
# - script: | {{SQL CARBON EDIT}} disable
|
# - script: | {{SQL CARBON EDIT}} disable
|
||||||
# DISPLAY=:10 yarn test-browser --browser chromium
|
# DISPLAY=:10 yarn test-browser --browser chromium --tfs "Browser Unit Tests"
|
||||||
# displayName: Run Unit Tests (Browser)
|
# displayName: Run Unit Tests (Browser)
|
||||||
|
|
||||||
# - script: | {{SQL CARBON EDIT}} disable
|
# - script: | {{SQL CARBON EDIT}} disable
|
||||||
|
|||||||
@@ -21,7 +21,7 @@ steps:
|
|||||||
|
|
||||||
- task: NodeTool@0
|
- task: NodeTool@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "12.13.0"
|
versionSpec: "12.14.1"
|
||||||
|
|
||||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||||
inputs:
|
inputs:
|
||||||
|
|||||||
@@ -21,7 +21,7 @@ steps:
|
|||||||
|
|
||||||
- task: NodeTool@0
|
- task: NodeTool@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "12.13.0"
|
versionSpec: "12.14.1"
|
||||||
|
|
||||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||||
inputs:
|
inputs:
|
||||||
@@ -52,21 +52,25 @@ steps:
|
|||||||
git merge $(node -p "require('./package.json').distro")
|
git merge $(node -p "require('./package.json').distro")
|
||||||
displayName: Merge distro
|
displayName: Merge distro
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
echo -n $VSCODE_ARCH > .build/arch
|
||||||
|
displayName: Prepare arch cache flag
|
||||||
|
|
||||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||||
inputs:
|
inputs:
|
||||||
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
keyfile: '.build/arch, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
||||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
||||||
vstsFeed: 'npm-vscode'
|
vstsFeed: 'npm-vscode'
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
|
CHILD_CONCURRENCY=1 npm_config_arch=$(NPM_ARCH) yarn --frozen-lockfile
|
||||||
displayName: Install dependencies
|
displayName: Install dependencies
|
||||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||||
|
|
||||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||||
inputs:
|
inputs:
|
||||||
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
keyfile: '.build/arch, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
||||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
||||||
vstsFeed: 'npm-vscode'
|
vstsFeed: 'npm-vscode'
|
||||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||||
@@ -85,80 +89,98 @@ steps:
|
|||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||||
yarn gulp vscode-linux-x64-min-ci
|
yarn gulp vscode-linux-$(VSCODE_ARCH)-min-ci
|
||||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||||
yarn gulp vscode-reh-linux-x64-min-ci
|
yarn gulp vscode-reh-linux-$(VSCODE_ARCH)-min-ci
|
||||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||||
yarn gulp vscode-reh-web-linux-x64-min-ci
|
yarn gulp vscode-reh-web-linux-$(VSCODE_ARCH)-min-ci
|
||||||
displayName: Build
|
displayName: Build
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
service xvfb start
|
service xvfb start
|
||||||
displayName: Start xvfb
|
displayName: Start xvfb
|
||||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
DISPLAY=:10 ./scripts/test.sh --build --tfs "Unit Tests"
|
DISPLAY=:10 ./scripts/test.sh --build --tfs "Unit Tests"
|
||||||
displayName: Run unit tests (Electron)
|
displayName: Run unit tests (Electron)
|
||||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
DISPLAY=:10 yarn test-browser --build --browser chromium
|
DISPLAY=:10 yarn test-browser --build --browser chromium --tfs "Browser Unit Tests"
|
||||||
displayName: Run unit tests (Browser)
|
displayName: Run unit tests (Browser)
|
||||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
# Figure out the full absolute path of the product we just built
|
# Figure out the full absolute path of the product we just built
|
||||||
# including the remote server and configure the integration tests
|
# including the remote server and configure the integration tests
|
||||||
# to run with these builds instead of running out of sources.
|
# to run with these builds instead of running out of sources.
|
||||||
set -e
|
set -e
|
||||||
APP_ROOT=$(agent.builddirectory)/VSCode-linux-x64
|
APP_ROOT=$(agent.builddirectory)/VSCode-linux-$(VSCODE_ARCH)
|
||||||
APP_NAME=$(node -p "require(\"$APP_ROOT/resources/app/product.json\").applicationName")
|
APP_NAME=$(node -p "require(\"$APP_ROOT/resources/app/product.json\").applicationName")
|
||||||
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \
|
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \
|
||||||
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-linux-x64" \
|
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-linux-$(VSCODE_ARCH)" \
|
||||||
DISPLAY=:10 ./scripts/test-integration.sh --build --tfs "Integration Tests"
|
DISPLAY=:10 ./scripts/test-integration.sh --build --tfs "Integration Tests"
|
||||||
displayName: Run integration tests (Electron)
|
displayName: Run integration tests (Electron)
|
||||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
APP_ROOT=$(agent.builddirectory)/VSCode-linux-x64
|
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-web-linux-$(VSCODE_ARCH)" \
|
||||||
APP_NAME=$(node -p "require(\"$APP_ROOT/resources/app/product.json\").applicationName")
|
|
||||||
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \
|
|
||||||
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-linux-x64" \
|
|
||||||
DISPLAY=:10 ./resources/server/test/test-remote-integration.sh
|
|
||||||
displayName: Run remote integration tests (Electron)
|
|
||||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
|
||||||
|
|
||||||
- script: |
|
|
||||||
set -e
|
|
||||||
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-web-linux-x64" \
|
|
||||||
DISPLAY=:10 ./resources/server/test/test-web-integration.sh --browser chromium
|
DISPLAY=:10 ./resources/server/test/test-web-integration.sh --browser chromium
|
||||||
displayName: Run integration tests (Browser)
|
displayName: Run integration tests (Browser)
|
||||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
APP_ROOT=$(agent.builddirectory)/VSCode-linux-$(VSCODE_ARCH)
|
||||||
|
APP_NAME=$(node -p "require(\"$APP_ROOT/resources/app/product.json\").applicationName")
|
||||||
|
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \
|
||||||
|
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-linux-$(VSCODE_ARCH)" \
|
||||||
|
DISPLAY=:10 ./resources/server/test/test-remote-integration.sh
|
||||||
|
displayName: Run remote integration tests (Electron)
|
||||||
|
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||||
|
|
||||||
- task: PublishPipelineArtifact@0
|
- task: PublishPipelineArtifact@0
|
||||||
inputs:
|
inputs:
|
||||||
artifactName: crash-dump-linux
|
artifactName: 'crash-dump-linux-$(VSCODE_ARCH)'
|
||||||
targetPath: .build/crashes
|
targetPath: .build/crashes
|
||||||
displayName: 'Publish Crash Reports'
|
displayName: 'Publish Crash Reports'
|
||||||
continueOnError: true
|
continueOnError: true
|
||||||
condition: failed()
|
condition: failed()
|
||||||
|
|
||||||
|
- task: PublishTestResults@2
|
||||||
|
displayName: Publish Tests Results
|
||||||
|
inputs:
|
||||||
|
testResultsFiles: '*-results.xml'
|
||||||
|
searchFolder: '$(Build.ArtifactStagingDirectory)/test-results'
|
||||||
|
condition: succeededOrFailed()
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
yarn gulp "vscode-linux-x64-build-deb"
|
yarn gulp "vscode-linux-$(VSCODE_ARCH)-build-deb"
|
||||||
yarn gulp "vscode-linux-x64-build-rpm"
|
yarn gulp "vscode-linux-$(VSCODE_ARCH)-build-rpm"
|
||||||
yarn gulp "vscode-linux-x64-prepare-snap"
|
displayName: Build deb, rpm packages
|
||||||
displayName: Build packages
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
yarn gulp "vscode-linux-$(VSCODE_ARCH)-prepare-snap"
|
||||||
|
displayName: Prepare snap package
|
||||||
|
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'))
|
||||||
|
|
||||||
|
# needed for code signing
|
||||||
|
- task: UseDotNet@2
|
||||||
|
displayName: 'Install .NET Core SDK 2.x'
|
||||||
|
inputs:
|
||||||
|
version: 2.x
|
||||||
|
|
||||||
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
|
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
|
||||||
inputs:
|
inputs:
|
||||||
ConnectedServiceName: 'ESRP CodeSign'
|
ConnectedServiceName: 'ESRP CodeSign'
|
||||||
FolderPath: '.build/linux/rpm/x86_64'
|
FolderPath: '.build/linux/rpm'
|
||||||
Pattern: '*.rpm'
|
Pattern: '*.rpm'
|
||||||
signConfigType: inlineSignParams
|
signConfigType: inlineSignParams
|
||||||
inlineOperation: |
|
inlineOperation: |
|
||||||
@@ -179,14 +201,16 @@ steps:
|
|||||||
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
|
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
|
||||||
AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
|
AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
|
||||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||||
|
VSCODE_ARCH="$(VSCODE_ARCH)" \
|
||||||
./build/azure-pipelines/linux/publish.sh
|
./build/azure-pipelines/linux/publish.sh
|
||||||
displayName: Publish
|
displayName: Publish
|
||||||
|
|
||||||
- task: PublishPipelineArtifact@0
|
- task: PublishPipelineArtifact@0
|
||||||
displayName: 'Publish Pipeline Artifact'
|
displayName: 'Publish Pipeline Artifact'
|
||||||
inputs:
|
inputs:
|
||||||
artifactName: snap-x64
|
artifactName: 'snap-$(VSCODE_ARCH)'
|
||||||
targetPath: .build/linux/snap-tarball
|
targetPath: .build/linux/snap-tarball
|
||||||
|
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'))
|
||||||
|
|
||||||
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
|
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
|
||||||
displayName: 'Component Detection'
|
displayName: 'Component Detection'
|
||||||
|
|||||||
@@ -4,11 +4,10 @@ REPO="$(pwd)"
|
|||||||
ROOT="$REPO/.."
|
ROOT="$REPO/.."
|
||||||
|
|
||||||
# Publish tarball
|
# Publish tarball
|
||||||
PLATFORM_LINUX="linux-x64"
|
PLATFORM_LINUX="linux-$VSCODE_ARCH"
|
||||||
BUILDNAME="VSCode-$PLATFORM_LINUX"
|
BUILDNAME="VSCode-$PLATFORM_LINUX"
|
||||||
BUILD="$ROOT/$BUILDNAME"
|
|
||||||
BUILD_VERSION="$(date +%s)"
|
BUILD_VERSION="$(date +%s)"
|
||||||
[ -z "$VSCODE_QUALITY" ] && TARBALL_FILENAME="code-$BUILD_VERSION.tar.gz" || TARBALL_FILENAME="code-$VSCODE_QUALITY-$BUILD_VERSION.tar.gz"
|
[ -z "$VSCODE_QUALITY" ] && TARBALL_FILENAME="code-$VSCODE_ARCH-$BUILD_VERSION.tar.gz" || TARBALL_FILENAME="code-$VSCODE_QUALITY-$VSCODE_ARCH-$BUILD_VERSION.tar.gz"
|
||||||
TARBALL_PATH="$ROOT/$TARBALL_FILENAME"
|
TARBALL_PATH="$ROOT/$TARBALL_FILENAME"
|
||||||
|
|
||||||
rm -rf $ROOT/code-*.tar.*
|
rm -rf $ROOT/code-*.tar.*
|
||||||
@@ -28,24 +27,36 @@ rm -rf $ROOT/vscode-server-*.tar.*
|
|||||||
node build/azure-pipelines/common/createAsset.js "server-$PLATFORM_LINUX" archive-unsigned "$SERVER_TARBALL_FILENAME" "$SERVER_TARBALL_PATH"
|
node build/azure-pipelines/common/createAsset.js "server-$PLATFORM_LINUX" archive-unsigned "$SERVER_TARBALL_FILENAME" "$SERVER_TARBALL_PATH"
|
||||||
|
|
||||||
# Publish DEB
|
# Publish DEB
|
||||||
PLATFORM_DEB="linux-deb-x64"
|
case $VSCODE_ARCH in
|
||||||
DEB_ARCH="amd64"
|
x64) DEB_ARCH="amd64" ;;
|
||||||
|
*) DEB_ARCH="$VSCODE_ARCH" ;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
PLATFORM_DEB="linux-deb-$VSCODE_ARCH"
|
||||||
DEB_FILENAME="$(ls $REPO/.build/linux/deb/$DEB_ARCH/deb/)"
|
DEB_FILENAME="$(ls $REPO/.build/linux/deb/$DEB_ARCH/deb/)"
|
||||||
DEB_PATH="$REPO/.build/linux/deb/$DEB_ARCH/deb/$DEB_FILENAME"
|
DEB_PATH="$REPO/.build/linux/deb/$DEB_ARCH/deb/$DEB_FILENAME"
|
||||||
|
|
||||||
node build/azure-pipelines/common/createAsset.js "$PLATFORM_DEB" package "$DEB_FILENAME" "$DEB_PATH"
|
node build/azure-pipelines/common/createAsset.js "$PLATFORM_DEB" package "$DEB_FILENAME" "$DEB_PATH"
|
||||||
|
|
||||||
# Publish RPM
|
# Publish RPM
|
||||||
PLATFORM_RPM="linux-rpm-x64"
|
case $VSCODE_ARCH in
|
||||||
RPM_ARCH="x86_64"
|
x64) RPM_ARCH="x86_64" ;;
|
||||||
|
armhf) RPM_ARCH="armv7hl" ;;
|
||||||
|
arm64) RPM_ARCH="aarch64" ;;
|
||||||
|
*) RPM_ARCH="$VSCODE_ARCH" ;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
PLATFORM_RPM="linux-rpm-$VSCODE_ARCH"
|
||||||
RPM_FILENAME="$(ls $REPO/.build/linux/rpm/$RPM_ARCH/ | grep .rpm)"
|
RPM_FILENAME="$(ls $REPO/.build/linux/rpm/$RPM_ARCH/ | grep .rpm)"
|
||||||
RPM_PATH="$REPO/.build/linux/rpm/$RPM_ARCH/$RPM_FILENAME"
|
RPM_PATH="$REPO/.build/linux/rpm/$RPM_ARCH/$RPM_FILENAME"
|
||||||
|
|
||||||
node build/azure-pipelines/common/createAsset.js "$PLATFORM_RPM" package "$RPM_FILENAME" "$RPM_PATH"
|
node build/azure-pipelines/common/createAsset.js "$PLATFORM_RPM" package "$RPM_FILENAME" "$RPM_PATH"
|
||||||
|
|
||||||
# Publish Snap
|
if [ "$VSCODE_ARCH" == "x64" ]; then
|
||||||
# Pack snap tarball artifact, in order to preserve file perms
|
# Publish Snap
|
||||||
mkdir -p $REPO/.build/linux/snap-tarball
|
# Pack snap tarball artifact, in order to preserve file perms
|
||||||
SNAP_TARBALL_PATH="$REPO/.build/linux/snap-tarball/snap-x64.tar.gz"
|
mkdir -p $REPO/.build/linux/snap-tarball
|
||||||
rm -rf $SNAP_TARBALL_PATH
|
SNAP_TARBALL_PATH="$REPO/.build/linux/snap-tarball/snap-$VSCODE_ARCH.tar.gz"
|
||||||
(cd .build/linux && tar -czf $SNAP_TARBALL_PATH snap)
|
rm -rf $SNAP_TARBALL_PATH
|
||||||
|
(cd .build/linux && tar -czf $SNAP_TARBALL_PATH snap)
|
||||||
|
fi
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
steps:
|
steps:
|
||||||
- task: NodeTool@0
|
- task: NodeTool@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "12.13.0"
|
versionSpec: "12.14.1"
|
||||||
|
|
||||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||||
inputs:
|
inputs:
|
||||||
|
|||||||
@@ -91,8 +91,7 @@ steps:
|
|||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
yarn gulp vscode-linux-x64-min-ci
|
yarn gulp vscode-linux-x64-min-ci
|
||||||
yarn gulp vscode-reh-linux-x64-min-ci
|
yarn gulp vscode-web-min-ci
|
||||||
yarn gulp vscode-reh-web-linux-x64-min-ci
|
|
||||||
displayName: Build
|
displayName: Build
|
||||||
env:
|
env:
|
||||||
VSCODE_MIXIN_PASSWORD: $(github-distro-mixin-password)
|
VSCODE_MIXIN_PASSWORD: $(github-distro-mixin-password)
|
||||||
@@ -134,7 +133,8 @@ steps:
|
|||||||
set -e
|
set -e
|
||||||
APP_ROOT=$(agent.builddirectory)/azuredatastudio-linux-x64
|
APP_ROOT=$(agent.builddirectory)/azuredatastudio-linux-x64
|
||||||
APP_NAME=$(node -p "require(\"$APP_ROOT/resources/app/product.json\").applicationName")
|
APP_NAME=$(node -p "require(\"$APP_ROOT/resources/app/product.json\").applicationName")
|
||||||
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \
|
export INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \
|
||||||
|
export NO_CLEANUP=1
|
||||||
DISPLAY=:10 node ./scripts/test-extensions-unit.js ${{ extension }}
|
DISPLAY=:10 node ./scripts/test-extensions-unit.js ${{ extension }}
|
||||||
displayName: 'Run ${{ extension }} Stable Extension Unit Tests'
|
displayName: 'Run ${{ extension }} Stable Extension Unit Tests'
|
||||||
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||||
@@ -149,6 +149,15 @@ steps:
|
|||||||
continueOnError: true
|
continueOnError: true
|
||||||
condition: and(succeeded(), eq(variables['RUN_UNSTABLE_TESTS'], 'true'))
|
condition: and(succeeded(), eq(variables['RUN_UNSTABLE_TESTS'], 'true'))
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
mkdir -p $(Build.ArtifactStagingDirectory)/logs/linux-x64
|
||||||
|
cd /tmp
|
||||||
|
tar -czvf $(Build.ArtifactStagingDirectory)/logs/linux-x64/logs-linux-x64.tar.gz adsuser*
|
||||||
|
displayName: Archive Logs
|
||||||
|
continueOnError: true
|
||||||
|
condition: succeededOrFailed()
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
yarn gulp vscode-linux-x64-build-deb
|
yarn gulp vscode-linux-x64-build-deb
|
||||||
@@ -159,6 +168,45 @@ steps:
|
|||||||
yarn gulp vscode-linux-x64-build-rpm
|
yarn gulp vscode-linux-x64-build-rpm
|
||||||
displayName: Build Rpm
|
displayName: Build Rpm
|
||||||
|
|
||||||
|
- task: UseDotNet@2
|
||||||
|
displayName: 'Install .NET Core sdk for signing'
|
||||||
|
inputs:
|
||||||
|
packageType: sdk
|
||||||
|
version: 2.1.x
|
||||||
|
installationPath: $(Agent.ToolsDirectory)/dotnet
|
||||||
|
|
||||||
|
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
|
||||||
|
inputs:
|
||||||
|
ConnectedServiceName: 'Code Signing'
|
||||||
|
FolderPath: '$(Build.SourcesDirectory)/.build'
|
||||||
|
Pattern: 'extensions/*.vsix'
|
||||||
|
signConfigType: inlineSignParams
|
||||||
|
inlineOperation: |
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"keyCode": "CP-233016",
|
||||||
|
"operationSetCode": "OpcSign",
|
||||||
|
"parameters": [
|
||||||
|
{
|
||||||
|
"parameterName": "FileDigest",
|
||||||
|
"parameterValue": "/fd \"SHA256\""
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"toolName": "sign",
|
||||||
|
"toolVersion": "1.0"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"keyCode": "CP-233016",
|
||||||
|
"operationSetCode": "OpcVerify",
|
||||||
|
"parameters": [],
|
||||||
|
"toolName": "sign",
|
||||||
|
"toolVersion": "1.0"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
SessionTimeout: 120
|
||||||
|
displayName: 'Signing Extensions'
|
||||||
|
condition: and(succeeded(), eq(variables['signed'], true))
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
./build/azure-pipelines/linux/createDrop.sh
|
./build/azure-pipelines/linux/createDrop.sh
|
||||||
@@ -170,6 +218,7 @@ steps:
|
|||||||
mkdir -p $(Build.ArtifactStagingDirectory)/test-results/coverage
|
mkdir -p $(Build.ArtifactStagingDirectory)/test-results/coverage
|
||||||
cp --parents -r $(Build.SourcesDirectory)/extensions/*/coverage/** $(Build.ArtifactStagingDirectory)/test-results/coverage
|
cp --parents -r $(Build.SourcesDirectory)/extensions/*/coverage/** $(Build.ArtifactStagingDirectory)/test-results/coverage
|
||||||
displayName: Copy Coverage
|
displayName: Copy Coverage
|
||||||
|
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||||
|
|
||||||
- task: PublishTestResults@2
|
- task: PublishTestResults@2
|
||||||
displayName: 'Publish Test Results test-results.xml'
|
displayName: 'Publish Test Results test-results.xml'
|
||||||
@@ -181,6 +230,7 @@ steps:
|
|||||||
|
|
||||||
- task: PublishBuildArtifacts@1
|
- task: PublishBuildArtifacts@1
|
||||||
displayName: 'Publish Artifact: drop'
|
displayName: 'Publish Artifact: drop'
|
||||||
|
condition: succeededOrFailed()
|
||||||
|
|
||||||
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
|
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
|
||||||
displayName: 'Component Detection'
|
displayName: 'Component Detection'
|
||||||
|
|||||||
@@ -1,157 +1,3 @@
|
|||||||
resources:
|
|
||||||
containers:
|
|
||||||
- container: vscode-x64
|
|
||||||
image: vscodehub.azurecr.io/vscode-linux-build-agent:x64
|
|
||||||
endpoint: VSCodeHub
|
|
||||||
- container: snapcraft
|
|
||||||
image: snapcore/snapcraft:stable
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
- job: Compile
|
|
||||||
pool:
|
|
||||||
vmImage: 'Ubuntu-16.04'
|
|
||||||
container: vscode-x64
|
|
||||||
steps:
|
|
||||||
- template: product-compile.yml
|
|
||||||
|
|
||||||
- job: Windows
|
|
||||||
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_WIN32'], 'true'))
|
|
||||||
pool:
|
|
||||||
vmImage: VS2017-Win2016
|
|
||||||
variables:
|
|
||||||
VSCODE_ARCH: x64
|
|
||||||
dependsOn:
|
|
||||||
- Compile
|
|
||||||
steps:
|
|
||||||
- template: win32/product-build-win32.yml
|
|
||||||
|
|
||||||
- job: Windows32
|
|
||||||
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_WIN32_32BIT'], 'true'))
|
|
||||||
pool:
|
|
||||||
vmImage: VS2017-Win2016
|
|
||||||
variables:
|
|
||||||
VSCODE_ARCH: ia32
|
|
||||||
dependsOn:
|
|
||||||
- Compile
|
|
||||||
steps:
|
|
||||||
- template: win32/product-build-win32.yml
|
|
||||||
|
|
||||||
- job: WindowsARM64
|
|
||||||
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_WIN32_ARM64'], 'true'))
|
|
||||||
pool:
|
|
||||||
vmImage: VS2017-Win2016
|
|
||||||
variables:
|
|
||||||
VSCODE_ARCH: arm64
|
|
||||||
dependsOn:
|
|
||||||
- Compile
|
|
||||||
steps:
|
|
||||||
- template: win32/product-build-win32-arm64.yml
|
|
||||||
|
|
||||||
- job: Linux
|
|
||||||
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_LINUX'], 'true'))
|
|
||||||
pool:
|
|
||||||
vmImage: 'Ubuntu-16.04'
|
|
||||||
container: vscode-x64
|
|
||||||
dependsOn:
|
|
||||||
- Compile
|
|
||||||
steps:
|
|
||||||
- template: linux/product-build-linux.yml
|
|
||||||
|
|
||||||
- job: LinuxSnap
|
|
||||||
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_LINUX'], 'true'))
|
|
||||||
pool:
|
|
||||||
vmImage: 'Ubuntu-16.04'
|
|
||||||
container: snapcraft
|
|
||||||
dependsOn: Linux
|
|
||||||
steps:
|
|
||||||
- template: linux/snap-build-linux.yml
|
|
||||||
|
|
||||||
- job: LinuxArmhf
|
|
||||||
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_LINUX_ARMHF'], 'true'))
|
|
||||||
pool:
|
|
||||||
vmImage: 'Ubuntu-16.04'
|
|
||||||
variables:
|
|
||||||
VSCODE_ARCH: armhf
|
|
||||||
dependsOn:
|
|
||||||
- Compile
|
|
||||||
steps:
|
|
||||||
- template: linux/product-build-linux-multiarch.yml
|
|
||||||
|
|
||||||
- job: LinuxArm64
|
|
||||||
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_LINUX_ARM64'], 'true'))
|
|
||||||
pool:
|
|
||||||
vmImage: 'Ubuntu-16.04'
|
|
||||||
variables:
|
|
||||||
VSCODE_ARCH: arm64
|
|
||||||
dependsOn:
|
|
||||||
- Compile
|
|
||||||
steps:
|
|
||||||
- template: linux/product-build-linux-multiarch.yml
|
|
||||||
|
|
||||||
- job: LinuxAlpine
|
|
||||||
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_LINUX_ALPINE'], 'true'))
|
|
||||||
pool:
|
|
||||||
vmImage: 'Ubuntu-16.04'
|
|
||||||
variables:
|
|
||||||
VSCODE_ARCH: alpine
|
|
||||||
dependsOn:
|
|
||||||
- Compile
|
|
||||||
steps:
|
|
||||||
- template: linux/product-build-linux-multiarch.yml
|
|
||||||
|
|
||||||
- job: LinuxWeb
|
|
||||||
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_WEB'], 'true'))
|
|
||||||
pool:
|
|
||||||
vmImage: 'Ubuntu-16.04'
|
|
||||||
variables:
|
|
||||||
VSCODE_ARCH: x64
|
|
||||||
dependsOn:
|
|
||||||
- Compile
|
|
||||||
steps:
|
|
||||||
- template: web/product-build-web.yml
|
|
||||||
|
|
||||||
- job: macOS
|
|
||||||
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_MACOS'], 'true'))
|
|
||||||
pool:
|
|
||||||
vmImage: macOS-latest
|
|
||||||
dependsOn:
|
|
||||||
- Compile
|
|
||||||
steps:
|
|
||||||
- template: darwin/product-build-darwin.yml
|
|
||||||
|
|
||||||
- job: Release
|
|
||||||
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), or(eq(variables['VSCODE_RELEASE'], 'true'), and(or(eq(variables['VSCODE_QUALITY'], 'insider'), eq(variables['VSCODE_QUALITY'], 'exploration')), eq(variables['Build.Reason'], 'Schedule'))))
|
|
||||||
pool:
|
|
||||||
vmImage: 'Ubuntu-16.04'
|
|
||||||
dependsOn:
|
|
||||||
- Windows
|
|
||||||
- Windows32
|
|
||||||
- Linux
|
|
||||||
- LinuxSnap
|
|
||||||
- LinuxArmhf
|
|
||||||
- LinuxArm64
|
|
||||||
- LinuxAlpine
|
|
||||||
- macOS
|
|
||||||
steps:
|
|
||||||
- template: release.yml
|
|
||||||
|
|
||||||
- job: Mooncake
|
|
||||||
pool:
|
|
||||||
vmImage: 'Ubuntu-16.04'
|
|
||||||
condition: and(succeededOrFailed(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'))
|
|
||||||
dependsOn:
|
|
||||||
- Windows
|
|
||||||
- Windows32
|
|
||||||
- Linux
|
|
||||||
- LinuxSnap
|
|
||||||
- LinuxArmhf
|
|
||||||
- LinuxArm64
|
|
||||||
- LinuxAlpine
|
|
||||||
- LinuxWeb
|
|
||||||
- macOS
|
|
||||||
steps:
|
|
||||||
- template: sync-mooncake.yml
|
|
||||||
|
|
||||||
trigger: none
|
trigger: none
|
||||||
pr: none
|
pr: none
|
||||||
|
|
||||||
@@ -160,4 +6,154 @@ schedules:
|
|||||||
displayName: Mon-Fri at 7:00
|
displayName: Mon-Fri at 7:00
|
||||||
branches:
|
branches:
|
||||||
include:
|
include:
|
||||||
- main
|
- master
|
||||||
|
|
||||||
|
resources:
|
||||||
|
containers:
|
||||||
|
- container: vscode-x64
|
||||||
|
image: vscodehub.azurecr.io/vscode-linux-build-agent:x64
|
||||||
|
endpoint: VSCodeHub
|
||||||
|
- container: vscode-arm64
|
||||||
|
image: vscodehub.azurecr.io/vscode-linux-build-agent:stretch-arm64
|
||||||
|
endpoint: VSCodeHub
|
||||||
|
- container: vscode-armhf
|
||||||
|
image: vscodehub.azurecr.io/vscode-linux-build-agent:stretch-armhf
|
||||||
|
endpoint: VSCodeHub
|
||||||
|
- container: snapcraft
|
||||||
|
image: snapcore/snapcraft:stable
|
||||||
|
|
||||||
|
stages:
|
||||||
|
- stage: Compile
|
||||||
|
jobs:
|
||||||
|
- job: Compile
|
||||||
|
pool:
|
||||||
|
vmImage: 'Ubuntu-16.04'
|
||||||
|
container: vscode-x64
|
||||||
|
steps:
|
||||||
|
- template: product-compile.yml
|
||||||
|
|
||||||
|
- stage: Windows
|
||||||
|
dependsOn:
|
||||||
|
- Compile
|
||||||
|
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'))
|
||||||
|
pool:
|
||||||
|
vmImage: VS2017-Win2016
|
||||||
|
jobs:
|
||||||
|
- job: Windows
|
||||||
|
condition: and(succeeded(), eq(variables['VSCODE_BUILD_WIN32'], 'true'))
|
||||||
|
variables:
|
||||||
|
VSCODE_ARCH: x64
|
||||||
|
steps:
|
||||||
|
- template: win32/product-build-win32.yml
|
||||||
|
|
||||||
|
- job: Windows32
|
||||||
|
condition: and(succeeded(), eq(variables['VSCODE_BUILD_WIN32_32BIT'], 'true'))
|
||||||
|
variables:
|
||||||
|
VSCODE_ARCH: ia32
|
||||||
|
steps:
|
||||||
|
- template: win32/product-build-win32.yml
|
||||||
|
|
||||||
|
- job: WindowsARM64
|
||||||
|
condition: and(succeeded(), eq(variables['VSCODE_BUILD_WIN32_ARM64'], 'true'))
|
||||||
|
variables:
|
||||||
|
VSCODE_ARCH: arm64
|
||||||
|
steps:
|
||||||
|
- template: win32/product-build-win32-arm64.yml
|
||||||
|
|
||||||
|
- stage: Linux
|
||||||
|
dependsOn:
|
||||||
|
- Compile
|
||||||
|
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'))
|
||||||
|
pool:
|
||||||
|
vmImage: 'Ubuntu-16.04'
|
||||||
|
jobs:
|
||||||
|
- job: Linux
|
||||||
|
condition: and(succeeded(), eq(variables['VSCODE_BUILD_LINUX'], 'true'))
|
||||||
|
container: vscode-x64
|
||||||
|
variables:
|
||||||
|
VSCODE_ARCH: x64
|
||||||
|
NPM_ARCH: x64
|
||||||
|
steps:
|
||||||
|
- template: linux/product-build-linux.yml
|
||||||
|
|
||||||
|
- job: LinuxSnap
|
||||||
|
dependsOn:
|
||||||
|
- Linux
|
||||||
|
condition: and(succeeded(), eq(variables['VSCODE_BUILD_LINUX'], 'true'))
|
||||||
|
container: snapcraft
|
||||||
|
variables:
|
||||||
|
VSCODE_ARCH: x64
|
||||||
|
steps:
|
||||||
|
- template: linux/snap-build-linux.yml
|
||||||
|
|
||||||
|
- job: LinuxArmhf
|
||||||
|
condition: and(succeeded(), eq(variables['VSCODE_BUILD_LINUX_ARMHF'], 'true'))
|
||||||
|
container: vscode-armhf
|
||||||
|
variables:
|
||||||
|
VSCODE_ARCH: armhf
|
||||||
|
NPM_ARCH: armv7l
|
||||||
|
steps:
|
||||||
|
- template: linux/product-build-linux.yml
|
||||||
|
|
||||||
|
- job: LinuxArm64
|
||||||
|
condition: and(succeeded(), eq(variables['VSCODE_BUILD_LINUX_ARM64'], 'true'))
|
||||||
|
container: vscode-arm64
|
||||||
|
variables:
|
||||||
|
VSCODE_ARCH: arm64
|
||||||
|
NPM_ARCH: arm64
|
||||||
|
steps:
|
||||||
|
- template: linux/product-build-linux.yml
|
||||||
|
|
||||||
|
- job: LinuxAlpine
|
||||||
|
condition: and(succeeded(), eq(variables['VSCODE_BUILD_LINUX_ALPINE'], 'true'))
|
||||||
|
variables:
|
||||||
|
VSCODE_ARCH: alpine
|
||||||
|
steps:
|
||||||
|
- template: linux/product-build-linux-multiarch.yml
|
||||||
|
|
||||||
|
- job: LinuxWeb
|
||||||
|
condition: and(succeeded(), eq(variables['VSCODE_BUILD_WEB'], 'true'))
|
||||||
|
variables:
|
||||||
|
VSCODE_ARCH: x64
|
||||||
|
steps:
|
||||||
|
- template: web/product-build-web.yml
|
||||||
|
|
||||||
|
- stage: macOS
|
||||||
|
dependsOn:
|
||||||
|
- Compile
|
||||||
|
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'))
|
||||||
|
pool:
|
||||||
|
vmImage: macOS-latest
|
||||||
|
jobs:
|
||||||
|
- job: macOS
|
||||||
|
condition: and(succeeded(), eq(variables['VSCODE_BUILD_MACOS'], 'true'))
|
||||||
|
steps:
|
||||||
|
- template: darwin/product-build-darwin.yml
|
||||||
|
|
||||||
|
- stage: Mooncake
|
||||||
|
dependsOn:
|
||||||
|
- Windows
|
||||||
|
- Linux
|
||||||
|
- macOS
|
||||||
|
condition: and(succeededOrFailed(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'))
|
||||||
|
pool:
|
||||||
|
vmImage: 'Ubuntu-16.04'
|
||||||
|
jobs:
|
||||||
|
- job: SyncMooncake
|
||||||
|
displayName: Sync Mooncake
|
||||||
|
steps:
|
||||||
|
- template: sync-mooncake.yml
|
||||||
|
|
||||||
|
- stage: Publish
|
||||||
|
dependsOn:
|
||||||
|
- Windows
|
||||||
|
- Linux
|
||||||
|
- macOS
|
||||||
|
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), or(eq(variables['VSCODE_RELEASE'], 'true'), and(or(eq(variables['VSCODE_QUALITY'], 'insider'), eq(variables['VSCODE_QUALITY'], 'exploration')), eq(variables['Build.Reason'], 'Schedule'))))
|
||||||
|
pool:
|
||||||
|
vmImage: 'Ubuntu-16.04'
|
||||||
|
jobs:
|
||||||
|
- job: BuildService
|
||||||
|
displayName: Build Service
|
||||||
|
steps:
|
||||||
|
- template: release.yml
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ steps:
|
|||||||
|
|
||||||
- task: NodeTool@0
|
- task: NodeTool@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "12.13.0"
|
versionSpec: "12.14.1"
|
||||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||||
|
|
||||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||||
@@ -52,9 +52,13 @@ steps:
|
|||||||
displayName: Merge distro
|
displayName: Merge distro
|
||||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
echo -n $VSCODE_ARCH > .build/arch
|
||||||
|
displayName: Prepare arch cache flag
|
||||||
|
|
||||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||||
inputs:
|
inputs:
|
||||||
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
keyfile: '.build/arch, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
||||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
||||||
vstsFeed: 'npm-vscode'
|
vstsFeed: 'npm-vscode'
|
||||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||||
@@ -67,7 +71,7 @@ steps:
|
|||||||
|
|
||||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||||
inputs:
|
inputs:
|
||||||
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
keyfile: '.build/arch, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
||||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
||||||
vstsFeed: 'npm-vscode'
|
vstsFeed: 'npm-vscode'
|
||||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'), ne(variables['CacheRestored'], 'true'))
|
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'), ne(variables['CacheRestored'], 'true'))
|
||||||
@@ -112,8 +116,8 @@ steps:
|
|||||||
yarn gulp compile-build
|
yarn gulp compile-build
|
||||||
yarn gulp compile-extensions-build
|
yarn gulp compile-extensions-build
|
||||||
yarn gulp minify-vscode
|
yarn gulp minify-vscode
|
||||||
yarn gulp minify-vscode-reh
|
yarn gulp vscode-reh-linux-x64-min
|
||||||
yarn gulp minify-vscode-reh-web
|
yarn gulp vscode-reh-web-linux-x64-min
|
||||||
displayName: Compile
|
displayName: Compile
|
||||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||||
|
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ pr: none
|
|||||||
steps:
|
steps:
|
||||||
- task: NodeTool@0
|
- task: NodeTool@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "12.13.0"
|
versionSpec: "12.14.1"
|
||||||
|
|
||||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||||
inputs:
|
inputs:
|
||||||
|
|||||||
@@ -45,7 +45,7 @@ function repeat(str: string, times: number): string {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function convertTabsToSpaces(str: string): string {
|
function convertTabsToSpaces(str: string): string {
|
||||||
return str.replace(/^\t+/gm, value => repeat(' ', value.length));
|
return str.replace(/\t/gm, value => repeat(' ', value.length));
|
||||||
}
|
}
|
||||||
|
|
||||||
function getNewFileContent(content: string, tag: string) {
|
function getNewFileContent(content: string, tag: string) {
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ jobs:
|
|||||||
- template: sql-product-compile.yml
|
- template: sql-product-compile.yml
|
||||||
|
|
||||||
- job: macOS
|
- job: macOS
|
||||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_MACOS'], 'true'))
|
condition: and(succeeded(), eq(variables['VSCODE_BUILD_MACOS'], 'true'), ne(variables['VSCODE_QUALITY'], 'saw'))
|
||||||
pool:
|
pool:
|
||||||
vmImage: macOS-latest
|
vmImage: macOS-latest
|
||||||
dependsOn:
|
dependsOn:
|
||||||
@@ -27,7 +27,7 @@ jobs:
|
|||||||
timeoutInMinutes: 180
|
timeoutInMinutes: 180
|
||||||
|
|
||||||
- job: macOS_Signing
|
- job: macOS_Signing
|
||||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_MACOS'], 'true'), eq(variables['signed'], true))
|
condition: and(succeeded(), eq(variables['VSCODE_BUILD_MACOS'], 'true'), eq(variables['signed'], true), ne(variables['VSCODE_QUALITY'], 'saw'))
|
||||||
pool:
|
pool:
|
||||||
vmImage: macOS-latest
|
vmImage: macOS-latest
|
||||||
dependsOn:
|
dependsOn:
|
||||||
@@ -46,11 +46,11 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- template: linux/sql-product-build-linux.yml
|
- template: linux/sql-product-build-linux.yml
|
||||||
parameters:
|
parameters:
|
||||||
extensionsToUnitTest: ["admin-tool-ext-win", "agent", "azdata", "azurecore", "cms", "dacpac", "import", "schema-compare", "notebook", "resource-deployment", "machine-learning", "sql-database-projects"]
|
extensionsToUnitTest: ["admin-tool-ext-win", "agent", "azdata", "azurecore", "cms", "dacpac", "import", "schema-compare", "notebook", "resource-deployment", "machine-learning", "sql-database-projects", "data-workspace"]
|
||||||
timeoutInMinutes: 70
|
timeoutInMinutes: 70
|
||||||
|
|
||||||
- job: LinuxWeb
|
- job: LinuxWeb
|
||||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_WEB'], 'true'))
|
condition: and(succeeded(), eq(variables['VSCODE_BUILD_WEB'], 'true'), ne(variables['VSCODE_QUALITY'], 'saw'))
|
||||||
pool:
|
pool:
|
||||||
vmImage: 'Ubuntu-16.04'
|
vmImage: 'Ubuntu-16.04'
|
||||||
container: linux-x64
|
container: linux-x64
|
||||||
@@ -61,15 +61,15 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- template: web/sql-product-build-web.yml
|
- template: web/sql-product-build-web.yml
|
||||||
|
|
||||||
- job: Docker
|
# - job: Docker
|
||||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_DOCKER'], 'true'))
|
# condition: and(succeeded(), eq(variables['VSCODE_BUILD_DOCKER'], 'true'))
|
||||||
pool:
|
# pool:
|
||||||
vmImage: 'Ubuntu-16.04'
|
# vmImage: 'Ubuntu-16.04'
|
||||||
container: linux-x64
|
# container: linux-x64
|
||||||
dependsOn:
|
# dependsOn:
|
||||||
- Linux
|
# - Linux
|
||||||
steps:
|
# steps:
|
||||||
- template: docker/sql-product-build-docker.yml
|
# - template: docker/sql-product-build-docker.yml
|
||||||
|
|
||||||
- job: Windows
|
- job: Windows
|
||||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_WIN32'], 'true'))
|
condition: and(succeeded(), eq(variables['VSCODE_BUILD_WIN32'], 'true'))
|
||||||
@@ -98,7 +98,7 @@ jobs:
|
|||||||
dependsOn:
|
dependsOn:
|
||||||
- macOS
|
- macOS
|
||||||
- Linux
|
- Linux
|
||||||
- Docker
|
# - Docker
|
||||||
- Windows
|
- Windows
|
||||||
- Windows_Test
|
- Windows_Test
|
||||||
- LinuxWeb
|
- LinuxWeb
|
||||||
|
|||||||
@@ -96,8 +96,8 @@ steps:
|
|||||||
yarn gulp compile-build
|
yarn gulp compile-build
|
||||||
yarn gulp compile-extensions-build
|
yarn gulp compile-extensions-build
|
||||||
yarn gulp minify-vscode
|
yarn gulp minify-vscode
|
||||||
yarn gulp minify-vscode-reh
|
yarn gulp vscode-reh-linux-x64-min
|
||||||
yarn gulp minify-vscode-reh-web
|
yarn gulp vscode-reh-web-linux-x64-min
|
||||||
displayName: Compile
|
displayName: Compile
|
||||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
steps:
|
steps:
|
||||||
- task: NodeTool@0
|
- task: NodeTool@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "12.13.0"
|
versionSpec: "12.14.1"
|
||||||
|
|
||||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||||
inputs:
|
inputs:
|
||||||
|
|||||||
@@ -21,7 +21,7 @@ steps:
|
|||||||
|
|
||||||
- task: NodeTool@0
|
- task: NodeTool@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "12.13.0"
|
versionSpec: "12.14.1"
|
||||||
|
|
||||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||||
inputs:
|
inputs:
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
steps:
|
steps:
|
||||||
- task: NodeTool@0
|
- task: NodeTool@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "12.13.0"
|
versionSpec: "12.14.1"
|
||||||
|
|
||||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3 # {{SQL CARBON EDIT}} update version
|
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3 # {{SQL CARBON EDIT}} update version
|
||||||
inputs:
|
inputs:
|
||||||
@@ -57,7 +57,7 @@ steps:
|
|||||||
displayName: Run Unit Tests (Electron)
|
displayName: Run Unit Tests (Electron)
|
||||||
|
|
||||||
# - powershell: | {{SQL CARBON EDIT}} disable
|
# - powershell: | {{SQL CARBON EDIT}} disable
|
||||||
# yarn test-browser --browser chromium --browser firefox
|
# yarn test-browser --browser chromium --browser firefox --tfs "Browser Unit Tests"
|
||||||
# displayName: Run Unit Tests (Browser)
|
# displayName: Run Unit Tests (Browser)
|
||||||
|
|
||||||
# - powershell: | {{SQL CARBON EDIT}} disable
|
# - powershell: | {{SQL CARBON EDIT}} disable
|
||||||
|
|||||||
@@ -12,9 +12,9 @@ $ServerZipLocation = "$Repo\.build\win32-$Arch\server"
|
|||||||
$ServerZip = "$ServerZipLocation\azuredatastudio-server-win32-$Arch.zip"
|
$ServerZip = "$ServerZipLocation\azuredatastudio-server-win32-$Arch.zip"
|
||||||
|
|
||||||
# Create server archive
|
# Create server archive
|
||||||
New-Item $ServerZipLocation -ItemType Directory # this will throw even when success for we don't want to exec this
|
# New-Item $ServerZipLocation -ItemType Directory # this will throw even when success for we don't want to exec this
|
||||||
$global:LASTEXITCODE = 0
|
$global:LASTEXITCODE = 0
|
||||||
exec { Rename-Item -Path $LegacyServer -NewName $ServerName } "Rename Item"
|
# exec { Rename-Item -Path $LegacyServer -NewName $ServerName } "Rename Item"
|
||||||
exec { .\node_modules\7zip\7zip-lite\7z.exe a -tzip $ServerZip $Server -r } "Zip Server"
|
# exec { .\node_modules\7zip\7zip-lite\7z.exe a -tzip $ServerZip $Server -r } "Zip Server"
|
||||||
|
|
||||||
exec { node build/azure-pipelines/common/copyArtifacts.js } "Copy Artifacts"
|
exec { node build/azure-pipelines/common/copyArtifacts.js } "Copy Artifacts"
|
||||||
|
|||||||
@@ -21,7 +21,7 @@ steps:
|
|||||||
|
|
||||||
- task: NodeTool@0
|
- task: NodeTool@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "12.13.0"
|
versionSpec: "12.14.1"
|
||||||
|
|
||||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||||
inputs:
|
inputs:
|
||||||
|
|||||||
@@ -21,7 +21,7 @@ steps:
|
|||||||
|
|
||||||
- task: NodeTool@0
|
- task: NodeTool@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "12.13.0"
|
versionSpec: "12.14.1"
|
||||||
|
|
||||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||||
inputs:
|
inputs:
|
||||||
@@ -115,7 +115,7 @@ steps:
|
|||||||
- powershell: |
|
- powershell: |
|
||||||
. build/azure-pipelines/win32/exec.ps1
|
. build/azure-pipelines/win32/exec.ps1
|
||||||
$ErrorActionPreference = "Stop"
|
$ErrorActionPreference = "Stop"
|
||||||
exec { yarn test-browser --build --browser chromium --browser firefox }
|
exec { yarn test-browser --build --browser chromium --browser firefox --tfs "Browser Unit Tests" }
|
||||||
displayName: Run unit tests (Browser)
|
displayName: Run unit tests (Browser)
|
||||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||||
|
|
||||||
@@ -135,18 +135,18 @@ steps:
|
|||||||
- powershell: |
|
- powershell: |
|
||||||
. build/azure-pipelines/win32/exec.ps1
|
. build/azure-pipelines/win32/exec.ps1
|
||||||
$ErrorActionPreference = "Stop"
|
$ErrorActionPreference = "Stop"
|
||||||
$AppRoot = "$(agent.builddirectory)\VSCode-win32-$(VSCODE_ARCH)"
|
exec { $env:VSCODE_REMOTE_SERVER_PATH = "$(agent.builddirectory)\vscode-reh-web-win32-$(VSCODE_ARCH)"; .\resources\server\test\test-web-integration.bat --browser firefox }
|
||||||
$AppProductJson = Get-Content -Raw -Path "$AppRoot\resources\app\product.json" | ConvertFrom-Json
|
displayName: Run integration tests (Browser)
|
||||||
$AppNameShort = $AppProductJson.nameShort
|
|
||||||
exec { $env:INTEGRATION_TEST_ELECTRON_PATH = "$AppRoot\$AppNameShort.exe"; $env:VSCODE_REMOTE_SERVER_PATH = "$(agent.builddirectory)\vscode-reh-win32-$(VSCODE_ARCH)"; .\resources\server\test\test-remote-integration.bat }
|
|
||||||
displayName: Run remote integration tests (Electron)
|
|
||||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||||
|
|
||||||
- powershell: |
|
- powershell: |
|
||||||
. build/azure-pipelines/win32/exec.ps1
|
. build/azure-pipelines/win32/exec.ps1
|
||||||
$ErrorActionPreference = "Stop"
|
$ErrorActionPreference = "Stop"
|
||||||
exec { $env:VSCODE_REMOTE_SERVER_PATH = "$(agent.builddirectory)\vscode-reh-web-win32-$(VSCODE_ARCH)"; .\resources\server\test\test-web-integration.bat --browser firefox }
|
$AppRoot = "$(agent.builddirectory)\VSCode-win32-$(VSCODE_ARCH)"
|
||||||
displayName: Run integration tests (Browser)
|
$AppProductJson = Get-Content -Raw -Path "$AppRoot\resources\app\product.json" | ConvertFrom-Json
|
||||||
|
$AppNameShort = $AppProductJson.nameShort
|
||||||
|
exec { $env:INTEGRATION_TEST_ELECTRON_PATH = "$AppRoot\$AppNameShort.exe"; $env:VSCODE_REMOTE_SERVER_PATH = "$(agent.builddirectory)\vscode-reh-win32-$(VSCODE_ARCH)"; .\resources\server\test\test-remote-integration.bat }
|
||||||
|
displayName: Run remote integration tests (Electron)
|
||||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||||
|
|
||||||
- task: PublishPipelineArtifact@0
|
- task: PublishPipelineArtifact@0
|
||||||
@@ -157,6 +157,13 @@ steps:
|
|||||||
continueOnError: true
|
continueOnError: true
|
||||||
condition: failed()
|
condition: failed()
|
||||||
|
|
||||||
|
- task: PublishTestResults@2
|
||||||
|
displayName: Publish Tests Results
|
||||||
|
inputs:
|
||||||
|
testResultsFiles: '*-results.xml'
|
||||||
|
searchFolder: '$(Build.ArtifactStagingDirectory)/test-results'
|
||||||
|
condition: succeededOrFailed()
|
||||||
|
|
||||||
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
|
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
|
||||||
inputs:
|
inputs:
|
||||||
ConnectedServiceName: 'ESRP CodeSign'
|
ConnectedServiceName: 'ESRP CodeSign'
|
||||||
|
|||||||
@@ -95,8 +95,8 @@ steps:
|
|||||||
$ErrorActionPreference = "Stop"
|
$ErrorActionPreference = "Stop"
|
||||||
exec { yarn gulp "package-rebuild-extensions" }
|
exec { yarn gulp "package-rebuild-extensions" }
|
||||||
exec { yarn gulp "vscode-win32-x64-min-ci" }
|
exec { yarn gulp "vscode-win32-x64-min-ci" }
|
||||||
exec { yarn gulp "vscode-reh-win32-x64-min-ci" }
|
exec { yarn gulp "vscode-reh-win32-x64-min" }
|
||||||
exec { yarn gulp "vscode-reh-web-win32-x64-min-ci" }
|
exec { yarn gulp "vscode-reh-web-win32-x64-min" }
|
||||||
exec { yarn gulp "vscode-win32-x64-code-helper" }
|
exec { yarn gulp "vscode-win32-x64-code-helper" }
|
||||||
exec { yarn gulp "vscode-win32-x64-inno-updater" }
|
exec { yarn gulp "vscode-win32-x64-inno-updater" }
|
||||||
displayName: Build
|
displayName: Build
|
||||||
@@ -131,7 +131,7 @@ steps:
|
|||||||
$AppRoot = "$(agent.builddirectory)\azuredatastudio-win32-x64"
|
$AppRoot = "$(agent.builddirectory)\azuredatastudio-win32-x64"
|
||||||
$AppProductJson = Get-Content -Raw -Path "$AppRoot\resources\app\product.json" | ConvertFrom-Json
|
$AppProductJson = Get-Content -Raw -Path "$AppRoot\resources\app\product.json" | ConvertFrom-Json
|
||||||
$AppNameShort = $AppProductJson.nameShort
|
$AppNameShort = $AppProductJson.nameShort
|
||||||
exec { $env:INTEGRATION_TEST_ELECTRON_PATH = "$AppRoot\$AppNameShort.exe"; $env:VSCODE_REMOTE_SERVER_PATH = "$(agent.builddirectory)\azuredatastudio-reh-win32-x64"; .\scripts\test-integration.bat --build --tfs "Integration Tests" }
|
# exec { $env:INTEGRATION_TEST_ELECTRON_PATH = "$AppRoot\$AppNameShort.exe"; $env:VSCODE_REMOTE_SERVER_PATH = "$(agent.builddirectory)\azuredatastudio-reh-win32-x64"; .\scripts\test-integration.bat --build --tfs "Integration Tests" }
|
||||||
displayName: Run integration tests (Electron)
|
displayName: Run integration tests (Electron)
|
||||||
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||||
|
|
||||||
|
|||||||
@@ -96,6 +96,7 @@ const indentationFilter = [
|
|||||||
'!**/*.dockerfile',
|
'!**/*.dockerfile',
|
||||||
'!extensions/markdown-language-features/media/*.js',
|
'!extensions/markdown-language-features/media/*.js',
|
||||||
// {{SQL CARBON EDIT}}
|
// {{SQL CARBON EDIT}}
|
||||||
|
'!**/*.gif',
|
||||||
'!build/actions/**/*.js',
|
'!build/actions/**/*.js',
|
||||||
'!**/*.{xlf,docx,sql,vsix,bacpac,ipynb,jpg}',
|
'!**/*.{xlf,docx,sql,vsix,bacpac,ipynb,jpg}',
|
||||||
'!extensions/mssql/sqltoolsservice/**',
|
'!extensions/mssql/sqltoolsservice/**',
|
||||||
@@ -103,6 +104,7 @@ const indentationFilter = [
|
|||||||
'!extensions/admin-tool-ext-win/ssmsmin/**',
|
'!extensions/admin-tool-ext-win/ssmsmin/**',
|
||||||
'!extensions/resource-deployment/notebooks/**',
|
'!extensions/resource-deployment/notebooks/**',
|
||||||
'!extensions/mssql/notebooks/**',
|
'!extensions/mssql/notebooks/**',
|
||||||
|
'!extensions/azurehybridtoolkit/notebooks/**',
|
||||||
'!extensions/integration-tests/testData/**',
|
'!extensions/integration-tests/testData/**',
|
||||||
'!extensions/arc/src/controller/generated/**',
|
'!extensions/arc/src/controller/generated/**',
|
||||||
'!extensions/sql-database-projects/resources/templates/*.xml',
|
'!extensions/sql-database-projects/resources/templates/*.xml',
|
||||||
@@ -138,6 +140,7 @@ const copyrightFilter = [
|
|||||||
'!resources/linux/snap/snapcraft.yaml',
|
'!resources/linux/snap/snapcraft.yaml',
|
||||||
'!resources/linux/snap/electron-launch',
|
'!resources/linux/snap/electron-launch',
|
||||||
'!resources/win32/bin/code.js',
|
'!resources/win32/bin/code.js',
|
||||||
|
'!resources/web/code-web.js',
|
||||||
'!resources/completions/**',
|
'!resources/completions/**',
|
||||||
'!extensions/markdown-language-features/media/highlight.css',
|
'!extensions/markdown-language-features/media/highlight.css',
|
||||||
'!extensions/html-language-features/server/src/modes/typescript/*',
|
'!extensions/html-language-features/server/src/modes/typescript/*',
|
||||||
@@ -145,6 +148,7 @@ const copyrightFilter = [
|
|||||||
'!src/vs/editor/test/node/classification/typescript-test.ts',
|
'!src/vs/editor/test/node/classification/typescript-test.ts',
|
||||||
'!scripts/code-web.js',
|
'!scripts/code-web.js',
|
||||||
'!resources/serverless/code-web.js',
|
'!resources/serverless/code-web.js',
|
||||||
|
'!src/vs/editor/test/node/classification/typescript-test.ts',
|
||||||
// {{SQL CARBON EDIT}}
|
// {{SQL CARBON EDIT}}
|
||||||
'!extensions/notebook/src/intellisense/text.ts',
|
'!extensions/notebook/src/intellisense/text.ts',
|
||||||
'!extensions/mssql/src/hdfs/webhdfs.ts',
|
'!extensions/mssql/src/hdfs/webhdfs.ts',
|
||||||
@@ -168,12 +172,16 @@ const copyrightFilter = [
|
|||||||
'!extensions/markdown-language-features/media/tomorrow.css',
|
'!extensions/markdown-language-features/media/tomorrow.css',
|
||||||
'!src/sql/workbench/browser/modelComponents/media/highlight.css',
|
'!src/sql/workbench/browser/modelComponents/media/highlight.css',
|
||||||
'!src/sql/workbench/contrib/notebook/electron-browser/cellViews/media/highlight.css',
|
'!src/sql/workbench/contrib/notebook/electron-browser/cellViews/media/highlight.css',
|
||||||
|
'!src/sql/workbench/contrib/notebook/browser/turndownPluginGfm.ts',
|
||||||
'!extensions/mssql/sqltoolsservice/**',
|
'!extensions/mssql/sqltoolsservice/**',
|
||||||
'!extensions/import/flatfileimportservice/**',
|
'!extensions/import/flatfileimportservice/**',
|
||||||
'!extensions/notebook/src/prompts/**',
|
'!extensions/notebook/src/prompts/**',
|
||||||
'!extensions/mssql/src/prompts/**',
|
'!extensions/mssql/src/prompts/**',
|
||||||
|
'!extensions/kusto/src/prompts/**',
|
||||||
'!extensions/notebook/resources/jupyter_config/**',
|
'!extensions/notebook/resources/jupyter_config/**',
|
||||||
|
'!extensions/azurehybridtoolkit/notebooks/**',
|
||||||
'!extensions/query-history/images/**',
|
'!extensions/query-history/images/**',
|
||||||
|
'!extensions/sql/build/update-grammar.js',
|
||||||
'!**/*.gif',
|
'!**/*.gif',
|
||||||
'!**/*.xlf',
|
'!**/*.xlf',
|
||||||
'!**/*.dacpac',
|
'!**/*.dacpac',
|
||||||
|
|||||||
@@ -42,6 +42,7 @@ const vscodeEntryPoints = _.flatten([
|
|||||||
buildfile.entrypoint('vs/workbench/workbench.desktop.main'),
|
buildfile.entrypoint('vs/workbench/workbench.desktop.main'),
|
||||||
buildfile.base,
|
buildfile.base,
|
||||||
buildfile.workerExtensionHost,
|
buildfile.workerExtensionHost,
|
||||||
|
buildfile.workerNotebook,
|
||||||
buildfile.workbenchDesktop,
|
buildfile.workbenchDesktop,
|
||||||
buildfile.code
|
buildfile.code
|
||||||
]);
|
]);
|
||||||
@@ -76,8 +77,7 @@ const vscodeResources = [
|
|||||||
'out-build/vs/platform/files/**/*.md',
|
'out-build/vs/platform/files/**/*.md',
|
||||||
'out-build/vs/code/electron-browser/workbench/**',
|
'out-build/vs/code/electron-browser/workbench/**',
|
||||||
'out-build/vs/code/electron-browser/sharedProcess/sharedProcess.js',
|
'out-build/vs/code/electron-browser/sharedProcess/sharedProcess.js',
|
||||||
'out-build/vs/code/electron-browser/issue/issueReporter.js',
|
'out-build/vs/code/electron-sandbox/issue/issueReporter.js',
|
||||||
'out-build/sql/workbench/electron-browser/splashscreen/*', // {{SQL CARBON EDIT}} STart
|
|
||||||
'out-build/sql/**/*.{svg,png,cur,html}',
|
'out-build/sql/**/*.{svg,png,cur,html}',
|
||||||
'out-build/sql/base/browser/ui/table/media/*.{gif,png,svg}',
|
'out-build/sql/base/browser/ui/table/media/*.{gif,png,svg}',
|
||||||
'out-build/sql/base/browser/ui/checkbox/media/*.{gif,png,svg}',
|
'out-build/sql/base/browser/ui/checkbox/media/*.{gif,png,svg}',
|
||||||
@@ -97,7 +97,7 @@ const vscodeResources = [
|
|||||||
'out-build/sql/workbench/parts/notebook/media/**/*.svg',
|
'out-build/sql/workbench/parts/notebook/media/**/*.svg',
|
||||||
'out-build/sql/setup.js', // {{SQL CARBON EDIT}} end
|
'out-build/sql/setup.js', // {{SQL CARBON EDIT}} end
|
||||||
'out-build/vs/code/electron-sandbox/processExplorer/processExplorer.js',
|
'out-build/vs/code/electron-sandbox/processExplorer/processExplorer.js',
|
||||||
'out-build/vs/platform/auth/common/auth.css',
|
'out-build/vs/code/electron-sandbox/proxy/auth.js',
|
||||||
'!**/test/**'
|
'!**/test/**'
|
||||||
];
|
];
|
||||||
|
|
||||||
@@ -261,7 +261,7 @@ function packageTask(platform, arch, sourceFolderName, destinationFolderName, op
|
|||||||
.pipe(fileLengthFilter.restore)
|
.pipe(fileLengthFilter.restore)
|
||||||
.pipe(util.skipDirectories())
|
.pipe(util.skipDirectories())
|
||||||
.pipe(util.fixWin32DirectoryPermissions())
|
.pipe(util.fixWin32DirectoryPermissions())
|
||||||
.pipe(electron(_.extend({}, config, { platform, arch, ffmpegChromium: true })))
|
.pipe(electron(_.extend({}, config, { platform, arch: arch === 'armhf' ? 'arm' : arch, ffmpegChromium: true })))
|
||||||
.pipe(filter(['**', '!LICENSE', '!LICENSES.chromium.html', '!version'], { dot: true }));
|
.pipe(filter(['**', '!LICENSE', '!LICENSES.chromium.html', '!version'], { dot: true }));
|
||||||
|
|
||||||
if (platform === 'linux') {
|
if (platform === 'linux') {
|
||||||
@@ -345,7 +345,7 @@ const BUILD_TARGETS = [
|
|||||||
{ platform: 'darwin', arch: null, opts: { stats: true } },
|
{ platform: 'darwin', arch: null, opts: { stats: true } },
|
||||||
{ platform: 'linux', arch: 'ia32' },
|
{ platform: 'linux', arch: 'ia32' },
|
||||||
{ platform: 'linux', arch: 'x64' },
|
{ platform: 'linux', arch: 'x64' },
|
||||||
{ platform: 'linux', arch: 'arm' },
|
{ platform: 'linux', arch: 'armhf' },
|
||||||
{ platform: 'linux', arch: 'arm64' },
|
{ platform: 'linux', arch: 'arm64' },
|
||||||
];
|
];
|
||||||
BUILD_TARGETS.forEach(buildTarget => {
|
BUILD_TARGETS.forEach(buildTarget => {
|
||||||
|
|||||||
@@ -23,7 +23,7 @@ const commit = util.getVersion(root);
|
|||||||
const linuxPackageRevision = Math.floor(new Date().getTime() / 1000);
|
const linuxPackageRevision = Math.floor(new Date().getTime() / 1000);
|
||||||
|
|
||||||
function getDebPackageArch(arch) {
|
function getDebPackageArch(arch) {
|
||||||
return { x64: 'amd64', arm: 'armhf', arm64: 'arm64' }[arch];
|
return { x64: 'amd64', armhf: 'armhf', arm64: 'arm64' }[arch];
|
||||||
}
|
}
|
||||||
|
|
||||||
function prepareDebPackage(arch) {
|
function prepareDebPackage(arch) {
|
||||||
@@ -53,6 +53,11 @@ function prepareDebPackage(arch) {
|
|||||||
.pipe(replace('@@LICENSE@@', product.licenseName))
|
.pipe(replace('@@LICENSE@@', product.licenseName))
|
||||||
.pipe(rename('usr/share/appdata/' + product.applicationName + '.appdata.xml'));
|
.pipe(rename('usr/share/appdata/' + product.applicationName + '.appdata.xml'));
|
||||||
|
|
||||||
|
const workspaceMime = gulp.src('resources/linux/code-workspace.xml', { base: '.' })
|
||||||
|
.pipe(replace('@@NAME_LONG@@', product.nameLong))
|
||||||
|
.pipe(replace('@@NAME@@', product.applicationName))
|
||||||
|
.pipe(rename('usr/share/mime/packages/' + product.applicationName + '-workspace.xml'));
|
||||||
|
|
||||||
const icon = gulp.src('resources/linux/code.png', { base: '.' })
|
const icon = gulp.src('resources/linux/code.png', { base: '.' })
|
||||||
.pipe(rename('usr/share/pixmaps/' + product.linuxIconName + '.png'));
|
.pipe(rename('usr/share/pixmaps/' + product.linuxIconName + '.png'));
|
||||||
|
|
||||||
@@ -96,7 +101,7 @@ function prepareDebPackage(arch) {
|
|||||||
.pipe(replace('@@UPDATEURL@@', product.updateUrl || '@@UPDATEURL@@'))
|
.pipe(replace('@@UPDATEURL@@', product.updateUrl || '@@UPDATEURL@@'))
|
||||||
.pipe(rename('DEBIAN/postinst'));
|
.pipe(rename('DEBIAN/postinst'));
|
||||||
|
|
||||||
const all = es.merge(control, postinst, postrm, prerm, desktops, appdata, icon, bash_completion, zsh_completion, code);
|
const all = es.merge(control, postinst, postrm, prerm, desktops, appdata, workspaceMime, icon, bash_completion, zsh_completion, code);
|
||||||
|
|
||||||
return all.pipe(vfs.dest(destination));
|
return all.pipe(vfs.dest(destination));
|
||||||
};
|
};
|
||||||
@@ -116,7 +121,7 @@ function getRpmBuildPath(rpmArch) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function getRpmPackageArch(arch) {
|
function getRpmPackageArch(arch) {
|
||||||
return { x64: 'x86_64', arm: 'armhf', arm64: 'arm64' }[arch];
|
return { x64: 'x86_64', armhf: 'armv7hl', arm64: 'aarch64' }[arch];
|
||||||
}
|
}
|
||||||
|
|
||||||
function prepareRpmPackage(arch) {
|
function prepareRpmPackage(arch) {
|
||||||
@@ -145,6 +150,11 @@ function prepareRpmPackage(arch) {
|
|||||||
.pipe(replace('@@LICENSE@@', product.licenseName))
|
.pipe(replace('@@LICENSE@@', product.licenseName))
|
||||||
.pipe(rename('usr/share/appdata/' + product.applicationName + '.appdata.xml'));
|
.pipe(rename('usr/share/appdata/' + product.applicationName + '.appdata.xml'));
|
||||||
|
|
||||||
|
const workspaceMime = gulp.src('resources/linux/code-workspace.xml', { base: '.' })
|
||||||
|
.pipe(replace('@@NAME_LONG@@', product.nameLong))
|
||||||
|
.pipe(replace('@@NAME@@', product.applicationName))
|
||||||
|
.pipe(rename('BUILD/usr/share/mime/packages/' + product.applicationName + '-workspace.xml'));
|
||||||
|
|
||||||
const icon = gulp.src('resources/linux/code.png', { base: '.' })
|
const icon = gulp.src('resources/linux/code.png', { base: '.' })
|
||||||
.pipe(rename('BUILD/usr/share/pixmaps/' + product.linuxIconName + '.png'));
|
.pipe(rename('BUILD/usr/share/pixmaps/' + product.linuxIconName + '.png'));
|
||||||
|
|
||||||
@@ -175,7 +185,7 @@ function prepareRpmPackage(arch) {
|
|||||||
const specIcon = gulp.src('resources/linux/rpm/code.xpm', { base: '.' })
|
const specIcon = gulp.src('resources/linux/rpm/code.xpm', { base: '.' })
|
||||||
.pipe(rename('SOURCES/' + product.applicationName + '.xpm'));
|
.pipe(rename('SOURCES/' + product.applicationName + '.xpm'));
|
||||||
|
|
||||||
const all = es.merge(code, desktops, appdata, icon, bash_completion, zsh_completion, spec, specIcon);
|
const all = es.merge(code, desktops, appdata, workspaceMime, icon, bash_completion, zsh_completion, spec, specIcon);
|
||||||
|
|
||||||
return all.pipe(vfs.dest(getRpmBuildPath(rpmArch)));
|
return all.pipe(vfs.dest(getRpmBuildPath(rpmArch)));
|
||||||
};
|
};
|
||||||
@@ -249,33 +259,23 @@ function buildSnapPackage(arch) {
|
|||||||
|
|
||||||
const BUILD_TARGETS = [
|
const BUILD_TARGETS = [
|
||||||
{ arch: 'x64' },
|
{ arch: 'x64' },
|
||||||
{ arch: 'arm' },
|
{ arch: 'armhf' },
|
||||||
{ arch: 'arm64' },
|
{ arch: 'arm64' },
|
||||||
];
|
];
|
||||||
|
|
||||||
BUILD_TARGETS.forEach((buildTarget) => {
|
BUILD_TARGETS.forEach(({ arch }) => {
|
||||||
const arch = buildTarget.arch;
|
const debArch = getDebPackageArch(arch);
|
||||||
|
const prepareDebTask = task.define(`vscode-linux-${arch}-prepare-deb`, task.series(util.rimraf(`.build/linux/deb/${debArch}`), prepareDebPackage(arch)));
|
||||||
|
const buildDebTask = task.define(`vscode-linux-${arch}-build-deb`, task.series(prepareDebTask, buildDebPackage(arch)));
|
||||||
|
gulp.task(buildDebTask);
|
||||||
|
|
||||||
{
|
const rpmArch = getRpmPackageArch(arch);
|
||||||
const debArch = getDebPackageArch(arch);
|
const prepareRpmTask = task.define(`vscode-linux-${arch}-prepare-rpm`, task.series(util.rimraf(`.build/linux/rpm/${rpmArch}`), prepareRpmPackage(arch)));
|
||||||
const prepareDebTask = task.define(`vscode-linux-${arch}-prepare-deb`, task.series(util.rimraf(`.build/linux/deb/${debArch}`), prepareDebPackage(arch)));
|
const buildRpmTask = task.define(`vscode-linux-${arch}-build-rpm`, task.series(prepareRpmTask, buildRpmPackage(arch)));
|
||||||
// gulp.task(prepareDebTask);
|
gulp.task(buildRpmTask);
|
||||||
const buildDebTask = task.define(`vscode-linux-${arch}-build-deb`, task.series(prepareDebTask, buildDebPackage(arch)));
|
|
||||||
gulp.task(buildDebTask);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
const prepareSnapTask = task.define(`vscode-linux-${arch}-prepare-snap`, task.series(util.rimraf(`.build/linux/snap/${arch}`), prepareSnapPackage(arch)));
|
||||||
const rpmArch = getRpmPackageArch(arch);
|
gulp.task(prepareSnapTask);
|
||||||
const prepareRpmTask = task.define(`vscode-linux-${arch}-prepare-rpm`, task.series(util.rimraf(`.build/linux/rpm/${rpmArch}`), prepareRpmPackage(arch)));
|
const buildSnapTask = task.define(`vscode-linux-${arch}-build-snap`, task.series(prepareSnapTask, buildSnapPackage(arch)));
|
||||||
// gulp.task(prepareRpmTask);
|
gulp.task(buildSnapTask);
|
||||||
const buildRpmTask = task.define(`vscode-linux-${arch}-build-rpm`, task.series(prepareRpmTask, buildRpmPackage(arch)));
|
|
||||||
gulp.task(buildRpmTask);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
const prepareSnapTask = task.define(`vscode-linux-${arch}-prepare-snap`, task.series(util.rimraf(`.build/linux/snap/${arch}`), prepareSnapPackage(arch)));
|
|
||||||
gulp.task(prepareSnapTask);
|
|
||||||
const buildSnapTask = task.define(`vscode-linux-${arch}-build-snap`, task.series(prepareSnapTask, buildSnapPackage(arch)));
|
|
||||||
gulp.task(buildSnapTask);
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -53,7 +53,9 @@ function createAsar(folderPath, unpackGlobs, destFilename) {
|
|||||||
const insertFile = (relativePath, stat, shouldUnpack) => {
|
const insertFile = (relativePath, stat, shouldUnpack) => {
|
||||||
insertDirectoryForFile(relativePath);
|
insertDirectoryForFile(relativePath);
|
||||||
pendingInserts++;
|
pendingInserts++;
|
||||||
filesystem.insertFile(relativePath, shouldUnpack, { stat: stat }, {}, onFileInserted);
|
// Do not pass `onFileInserted` directly because it gets overwritten below.
|
||||||
|
// Create a closure capturing `onFileInserted`.
|
||||||
|
filesystem.insertFile(relativePath, shouldUnpack, { stat: stat }, {}).then(() => onFileInserted(), () => onFileInserted());
|
||||||
};
|
};
|
||||||
return es.through(function (file) {
|
return es.through(function (file) {
|
||||||
if (file.stat.isDirectory()) {
|
if (file.stat.isDirectory()) {
|
||||||
|
|||||||
@@ -8,10 +8,17 @@
|
|||||||
import * as path from 'path';
|
import * as path from 'path';
|
||||||
import * as es from 'event-stream';
|
import * as es from 'event-stream';
|
||||||
const pickle = require('chromium-pickle-js');
|
const pickle = require('chromium-pickle-js');
|
||||||
const Filesystem = require('asar/lib/filesystem');
|
const Filesystem = <typeof AsarFilesystem>require('asar/lib/filesystem');
|
||||||
import * as VinylFile from 'vinyl';
|
import * as VinylFile from 'vinyl';
|
||||||
import * as minimatch from 'minimatch';
|
import * as minimatch from 'minimatch';
|
||||||
|
|
||||||
|
declare class AsarFilesystem {
|
||||||
|
readonly header: unknown;
|
||||||
|
constructor(src: string);
|
||||||
|
insertDirectory(path: string, shouldUnpack?: boolean): unknown;
|
||||||
|
insertFile(path: string, shouldUnpack: boolean, file: { stat: { size: number; mode: number; }; }, options: {}): Promise<void>;
|
||||||
|
}
|
||||||
|
|
||||||
export function createAsar(folderPath: string, unpackGlobs: string[], destFilename: string): NodeJS.ReadWriteStream {
|
export function createAsar(folderPath: string, unpackGlobs: string[], destFilename: string): NodeJS.ReadWriteStream {
|
||||||
|
|
||||||
const shouldUnpackFile = (file: VinylFile): boolean => {
|
const shouldUnpackFile = (file: VinylFile): boolean => {
|
||||||
@@ -61,7 +68,9 @@ export function createAsar(folderPath: string, unpackGlobs: string[], destFilena
|
|||||||
const insertFile = (relativePath: string, stat: { size: number; mode: number; }, shouldUnpack: boolean) => {
|
const insertFile = (relativePath: string, stat: { size: number; mode: number; }, shouldUnpack: boolean) => {
|
||||||
insertDirectoryForFile(relativePath);
|
insertDirectoryForFile(relativePath);
|
||||||
pendingInserts++;
|
pendingInserts++;
|
||||||
filesystem.insertFile(relativePath, shouldUnpack, { stat: stat }, {}, onFileInserted);
|
// Do not pass `onFileInserted` directly because it gets overwritten below.
|
||||||
|
// Create a closure capturing `onFileInserted`.
|
||||||
|
filesystem.insertFile(relativePath, shouldUnpack, { stat: stat }, {}).then(() => onFileInserted(), () => onFileInserted());
|
||||||
};
|
};
|
||||||
|
|
||||||
return es.through(function (file) {
|
return es.through(function (file) {
|
||||||
|
|||||||
@@ -18,7 +18,9 @@ const fancyLog = require('fancy-log');
|
|||||||
const ansiColors = require('ansi-colors');
|
const ansiColors = require('ansi-colors');
|
||||||
|
|
||||||
const root = path.dirname(path.dirname(__dirname));
|
const root = path.dirname(path.dirname(__dirname));
|
||||||
const builtInExtensions = JSON.parse(fs.readFileSync(path.join(__dirname, '../../product.json'), 'utf8')).builtInExtensions;
|
const productjson = JSON.parse(fs.readFileSync(path.join(__dirname, '../../product.json'), 'utf8'));
|
||||||
|
const builtInExtensions = productjson.builtInExtensions;
|
||||||
|
const webBuiltInExtensions = productjson.webBuiltInExtensions;
|
||||||
const controlFilePath = path.join(os.homedir(), '.vscode-oss-dev', 'extensions', 'control.json');
|
const controlFilePath = path.join(os.homedir(), '.vscode-oss-dev', 'extensions', 'control.json');
|
||||||
const ENABLE_LOGGING = !process.env['VSCODE_BUILD_BUILTIN_EXTENSIONS_SILENCE_PLEASE'];
|
const ENABLE_LOGGING = !process.env['VSCODE_BUILD_BUILTIN_EXTENSIONS_SILENCE_PLEASE'];
|
||||||
|
|
||||||
@@ -107,7 +109,7 @@ exports.getBuiltInExtensions = function getBuiltInExtensions() {
|
|||||||
const control = readControlFile();
|
const control = readControlFile();
|
||||||
const streams = [];
|
const streams = [];
|
||||||
|
|
||||||
for (const extension of builtInExtensions) {
|
for (const extension of [...builtInExtensions, ...webBuiltInExtensions]) {
|
||||||
let controlState = control[extension.name] || 'marketplace';
|
let controlState = control[extension.name] || 'marketplace';
|
||||||
control[extension.name] = controlState;
|
control[extension.name] = controlState;
|
||||||
|
|
||||||
|
|||||||
@@ -55,7 +55,7 @@ function getElectron(arch) {
|
|||||||
return () => {
|
return () => {
|
||||||
const electronOpts = _.extend({}, exports.config, {
|
const electronOpts = _.extend({}, exports.config, {
|
||||||
platform: process.platform,
|
platform: process.platform,
|
||||||
arch,
|
arch: arch === 'armhf' ? 'arm' : arch,
|
||||||
ffmpegChromium: true,
|
ffmpegChromium: true,
|
||||||
keepDefaultApp: true
|
keepDefaultApp: true
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -61,7 +61,7 @@ function getElectron(arch: string): () => NodeJS.ReadWriteStream {
|
|||||||
return () => {
|
return () => {
|
||||||
const electronOpts = _.extend({}, config, {
|
const electronOpts = _.extend({}, config, {
|
||||||
platform: process.platform,
|
platform: process.platform,
|
||||||
arch,
|
arch: arch === 'armhf' ? 'arm' : arch,
|
||||||
ffmpegChromium: true,
|
ffmpegChromium: true,
|
||||||
keepDefaultApp: true
|
keepDefaultApp: true
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -207,23 +207,25 @@ const externalExtensions = [
|
|||||||
// they get packaged separately. Adding extension name here, will make the build to create
|
// they get packaged separately. Adding extension name here, will make the build to create
|
||||||
// a separate vsix package for the extension and the extension will be excluded from the main package.
|
// a separate vsix package for the extension and the extension will be excluded from the main package.
|
||||||
// Any extension not included here will be installed by default.
|
// Any extension not included here will be installed by default.
|
||||||
|
'admin-pack',
|
||||||
'admin-tool-ext-win',
|
'admin-tool-ext-win',
|
||||||
'agent',
|
'agent',
|
||||||
'arc',
|
'arc',
|
||||||
'azdata',
|
|
||||||
'import',
|
|
||||||
'profiler',
|
|
||||||
'admin-pack',
|
|
||||||
'dacpac',
|
|
||||||
'schema-compare',
|
|
||||||
'cms',
|
|
||||||
'query-history',
|
|
||||||
'liveshare',
|
|
||||||
'sql-database-projects',
|
|
||||||
'machine-learning',
|
|
||||||
'sql-assessment',
|
|
||||||
'asde-deployment',
|
'asde-deployment',
|
||||||
'sql-migration'
|
'azdata',
|
||||||
|
'azurehybridtoolkit',
|
||||||
|
'cms',
|
||||||
|
'dacpac',
|
||||||
|
'import',
|
||||||
|
'kusto',
|
||||||
|
'liveshare',
|
||||||
|
'machine-learning',
|
||||||
|
'profiler',
|
||||||
|
'query-history',
|
||||||
|
'schema-compare',
|
||||||
|
'sql-assessment',
|
||||||
|
'sql-database-projects',
|
||||||
|
'sql-migration',
|
||||||
];
|
];
|
||||||
// extensions that require a rebuild since they have native parts
|
// extensions that require a rebuild since they have native parts
|
||||||
const rebuildExtensions = [
|
const rebuildExtensions = [
|
||||||
@@ -254,7 +256,6 @@ function packageLocalExtensionsStream(forWeb) {
|
|||||||
const extensionName = path.basename(extensionPath);
|
const extensionName = path.basename(extensionPath);
|
||||||
return { name: extensionName, path: extensionPath, manifestPath: absoluteManifestPath };
|
return { name: extensionName, path: extensionPath, manifestPath: absoluteManifestPath };
|
||||||
})
|
})
|
||||||
.filter(({ name }) => (name === 'vscode-web-playground' ? forWeb : true)) // package vscode-web-playground only for web
|
|
||||||
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
|
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
|
||||||
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
|
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
|
||||||
.filter(({ name }) => externalExtensions.indexOf(name) === -1) // {{SQL CARBON EDIT}} Remove external Extensions with separate package
|
.filter(({ name }) => externalExtensions.indexOf(name) === -1) // {{SQL CARBON EDIT}} Remove external Extensions with separate package
|
||||||
|
|||||||
@@ -241,23 +241,25 @@ const externalExtensions = [
|
|||||||
// they get packaged separately. Adding extension name here, will make the build to create
|
// they get packaged separately. Adding extension name here, will make the build to create
|
||||||
// a separate vsix package for the extension and the extension will be excluded from the main package.
|
// a separate vsix package for the extension and the extension will be excluded from the main package.
|
||||||
// Any extension not included here will be installed by default.
|
// Any extension not included here will be installed by default.
|
||||||
|
'admin-pack',
|
||||||
'admin-tool-ext-win',
|
'admin-tool-ext-win',
|
||||||
'agent',
|
'agent',
|
||||||
'arc',
|
'arc',
|
||||||
'azdata',
|
|
||||||
'import',
|
|
||||||
'profiler',
|
|
||||||
'admin-pack',
|
|
||||||
'dacpac',
|
|
||||||
'schema-compare',
|
|
||||||
'cms',
|
|
||||||
'query-history',
|
|
||||||
'liveshare',
|
|
||||||
'sql-database-projects',
|
|
||||||
'machine-learning',
|
|
||||||
'sql-assessment',
|
|
||||||
'asde-deployment',
|
'asde-deployment',
|
||||||
'sql-migration'
|
'azdata',
|
||||||
|
'azurehybridtoolkit',
|
||||||
|
'cms',
|
||||||
|
'dacpac',
|
||||||
|
'import',
|
||||||
|
'kusto',
|
||||||
|
'liveshare',
|
||||||
|
'machine-learning',
|
||||||
|
'profiler',
|
||||||
|
'query-history',
|
||||||
|
'schema-compare',
|
||||||
|
'sql-assessment',
|
||||||
|
'sql-database-projects',
|
||||||
|
'sql-migration',
|
||||||
];
|
];
|
||||||
|
|
||||||
// extensions that require a rebuild since they have native parts
|
// extensions that require a rebuild since they have native parts
|
||||||
@@ -307,7 +309,6 @@ export function packageLocalExtensionsStream(forWeb: boolean): Stream {
|
|||||||
const extensionName = path.basename(extensionPath);
|
const extensionName = path.basename(extensionPath);
|
||||||
return { name: extensionName, path: extensionPath, manifestPath: absoluteManifestPath };
|
return { name: extensionName, path: extensionPath, manifestPath: absoluteManifestPath };
|
||||||
})
|
})
|
||||||
.filter(({ name }) => (name === 'vscode-web-playground' ? forWeb : true)) // package vscode-web-playground only for web
|
|
||||||
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
|
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
|
||||||
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
|
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
|
||||||
.filter(({ name }) => externalExtensions.indexOf(name) === -1) // {{SQL CARBON EDIT}} Remove external Extensions with separate package
|
.filter(({ name }) => externalExtensions.indexOf(name) === -1) // {{SQL CARBON EDIT}} Remove external Extensions with separate package
|
||||||
|
|||||||
@@ -94,6 +94,10 @@
|
|||||||
"name": "vs/workbench/contrib/issue",
|
"name": "vs/workbench/contrib/issue",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"name": "vs/workbench/contrib/keybindings",
|
||||||
|
"project": "vscode-workbench"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"name": "vs/workbench/contrib/markers",
|
"name": "vs/workbench/contrib/markers",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
@@ -202,6 +206,10 @@
|
|||||||
"name": "vs/workbench/contrib/webview",
|
"name": "vs/workbench/contrib/webview",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"name": "vs/workbench/contrib/webviewPanel",
|
||||||
|
"project": "vscode-workbench"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"name": "vs/workbench/contrib/customEditor",
|
"name": "vs/workbench/contrib/customEditor",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
@@ -246,10 +254,6 @@
|
|||||||
"name": "vs/workbench/services/configurationResolver",
|
"name": "vs/workbench/services/configurationResolver",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"name": "vs/workbench/services/crashReporter",
|
|
||||||
"project": "vscode-workbench"
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"name": "vs/workbench/services/dialogs",
|
"name": "vs/workbench/services/dialogs",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
|
|||||||
@@ -1004,7 +1004,7 @@ function createResource(project: string, slug: string, xlfFile: File, apiHostnam
|
|||||||
* https://dev.befoolish.co/tx-docs/public/projects/updating-content#what-happens-when-you-update-files
|
* https://dev.befoolish.co/tx-docs/public/projects/updating-content#what-happens-when-you-update-files
|
||||||
*/
|
*/
|
||||||
function updateResource(project: string, slug: string, xlfFile: File, apiHostname: string, credentials: string): Promise<any> {
|
function updateResource(project: string, slug: string, xlfFile: File, apiHostname: string, credentials: string): Promise<any> {
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise<void>((resolve, reject) => {
|
||||||
const data = JSON.stringify({ content: xlfFile.contents.toString() });
|
const data = JSON.stringify({ content: xlfFile.contents.toString() });
|
||||||
const options = {
|
const options = {
|
||||||
hostname: apiHostname,
|
hostname: apiHostname,
|
||||||
|
|||||||
@@ -53,6 +53,13 @@ const CORE_TYPES = [
|
|||||||
'trimLeft',
|
'trimLeft',
|
||||||
'trimRight'
|
'trimRight'
|
||||||
];
|
];
|
||||||
|
// Types that are defined in a common layer but are known to be only
|
||||||
|
// available in native environments should not be allowed in browser
|
||||||
|
const NATIVE_TYPES = [
|
||||||
|
'NativeParsedArgs',
|
||||||
|
'INativeEnvironmentService',
|
||||||
|
'INativeWindowConfiguration'
|
||||||
|
];
|
||||||
const RULES = [
|
const RULES = [
|
||||||
// Tests: skip
|
// Tests: skip
|
||||||
{
|
{
|
||||||
@@ -68,6 +75,37 @@ const RULES = [
|
|||||||
'MessageEvent',
|
'MessageEvent',
|
||||||
'data'
|
'data'
|
||||||
],
|
],
|
||||||
|
disallowedTypes: NATIVE_TYPES,
|
||||||
|
disallowedDefinitions: [
|
||||||
|
'lib.dom.d.ts',
|
||||||
|
'@types/node' // no node.js
|
||||||
|
]
|
||||||
|
},
|
||||||
|
// Common: vs/platform/environment/common/argv.ts
|
||||||
|
{
|
||||||
|
target: '**/{vs,sql}/platform/environment/common/argv.ts',
|
||||||
|
disallowedTypes: [ /* Ignore native types that are defined from here */],
|
||||||
|
allowedTypes: CORE_TYPES,
|
||||||
|
disallowedDefinitions: [
|
||||||
|
'lib.dom.d.ts',
|
||||||
|
'@types/node' // no node.js
|
||||||
|
]
|
||||||
|
},
|
||||||
|
// Common: vs/platform/environment/common/environment.ts
|
||||||
|
{
|
||||||
|
target: '**/{vs,sql}/platform/environment/common/environment.ts',
|
||||||
|
disallowedTypes: [ /* Ignore native types that are defined from here */],
|
||||||
|
allowedTypes: CORE_TYPES,
|
||||||
|
disallowedDefinitions: [
|
||||||
|
'lib.dom.d.ts',
|
||||||
|
'@types/node' // no node.js
|
||||||
|
]
|
||||||
|
},
|
||||||
|
// Common: vs/platform/windows/common/windows.ts
|
||||||
|
{
|
||||||
|
target: '**/{vs,sql}/platform/windows/common/windows.ts',
|
||||||
|
disallowedTypes: [ /* Ignore native types that are defined from here */],
|
||||||
|
allowedTypes: CORE_TYPES,
|
||||||
disallowedDefinitions: [
|
disallowedDefinitions: [
|
||||||
'lib.dom.d.ts',
|
'lib.dom.d.ts',
|
||||||
'@types/node' // no node.js
|
'@types/node' // no node.js
|
||||||
@@ -81,6 +119,7 @@ const RULES = [
|
|||||||
// Safe access to global
|
// Safe access to global
|
||||||
'global'
|
'global'
|
||||||
],
|
],
|
||||||
|
disallowedTypes: NATIVE_TYPES,
|
||||||
disallowedDefinitions: [
|
disallowedDefinitions: [
|
||||||
'lib.dom.d.ts',
|
'lib.dom.d.ts',
|
||||||
'@types/node' // no node.js
|
'@types/node' // no node.js
|
||||||
@@ -90,6 +129,7 @@ const RULES = [
|
|||||||
{
|
{
|
||||||
target: '**/{vs,sql}/**/common/**',
|
target: '**/{vs,sql}/**/common/**',
|
||||||
allowedTypes: CORE_TYPES,
|
allowedTypes: CORE_TYPES,
|
||||||
|
disallowedTypes: NATIVE_TYPES,
|
||||||
disallowedDefinitions: [
|
disallowedDefinitions: [
|
||||||
'lib.dom.d.ts',
|
'lib.dom.d.ts',
|
||||||
'@types/node' // no node.js
|
'@types/node' // no node.js
|
||||||
@@ -99,6 +139,7 @@ const RULES = [
|
|||||||
{
|
{
|
||||||
target: '**/{vs,sql}/**/browser/**',
|
target: '**/{vs,sql}/**/browser/**',
|
||||||
allowedTypes: CORE_TYPES,
|
allowedTypes: CORE_TYPES,
|
||||||
|
disallowedTypes: NATIVE_TYPES,
|
||||||
disallowedDefinitions: [
|
disallowedDefinitions: [
|
||||||
'@types/node' // no node.js
|
'@types/node' // no node.js
|
||||||
]
|
]
|
||||||
@@ -107,6 +148,7 @@ const RULES = [
|
|||||||
{
|
{
|
||||||
target: '**/src/{vs,sql}/editor/contrib/**',
|
target: '**/src/{vs,sql}/editor/contrib/**',
|
||||||
allowedTypes: CORE_TYPES,
|
allowedTypes: CORE_TYPES,
|
||||||
|
disallowedTypes: NATIVE_TYPES,
|
||||||
disallowedDefinitions: [
|
disallowedDefinitions: [
|
||||||
'@types/node' // no node.js
|
'@types/node' // no node.js
|
||||||
]
|
]
|
||||||
@@ -132,7 +174,7 @@ const RULES = [
|
|||||||
},
|
},
|
||||||
// Electron (sandbox)
|
// Electron (sandbox)
|
||||||
{
|
{
|
||||||
target: '**/vs/**/electron-sandbox/**',
|
target: '**/{vs,sql}/**/electron-sandbox/**',
|
||||||
allowedTypes: CORE_TYPES,
|
allowedTypes: CORE_TYPES,
|
||||||
disallowedDefinitions: [
|
disallowedDefinitions: [
|
||||||
'@types/node' // no node.js
|
'@types/node' // no node.js
|
||||||
@@ -162,7 +204,7 @@ let hasErrors = false;
|
|||||||
function checkFile(program, sourceFile, rule) {
|
function checkFile(program, sourceFile, rule) {
|
||||||
checkNode(sourceFile);
|
checkNode(sourceFile);
|
||||||
function checkNode(node) {
|
function checkNode(node) {
|
||||||
var _a;
|
var _a, _b;
|
||||||
if (node.kind !== ts.SyntaxKind.Identifier) {
|
if (node.kind !== ts.SyntaxKind.Identifier) {
|
||||||
return ts.forEachChild(node, checkNode); // recurse down
|
return ts.forEachChild(node, checkNode); // recurse down
|
||||||
}
|
}
|
||||||
@@ -170,6 +212,12 @@ function checkFile(program, sourceFile, rule) {
|
|||||||
if ((_a = rule.allowedTypes) === null || _a === void 0 ? void 0 : _a.some(allowed => allowed === text)) {
|
if ((_a = rule.allowedTypes) === null || _a === void 0 ? void 0 : _a.some(allowed => allowed === text)) {
|
||||||
return; // override
|
return; // override
|
||||||
}
|
}
|
||||||
|
if ((_b = rule.disallowedTypes) === null || _b === void 0 ? void 0 : _b.some(disallowed => disallowed === text)) {
|
||||||
|
const { line, character } = sourceFile.getLineAndCharacterOfPosition(node.getStart());
|
||||||
|
console.log(`[build/lib/layersChecker.ts]: Reference to '${text}' violates layer '${rule.target}' (${sourceFile.fileName} (${line + 1},${character + 1})`);
|
||||||
|
hasErrors = true;
|
||||||
|
return;
|
||||||
|
}
|
||||||
const checker = program.getTypeChecker();
|
const checker = program.getTypeChecker();
|
||||||
const symbol = checker.getSymbolAtLocation(node);
|
const symbol = checker.getSymbolAtLocation(node);
|
||||||
if (symbol) {
|
if (symbol) {
|
||||||
|
|||||||
@@ -55,6 +55,14 @@ const CORE_TYPES = [
|
|||||||
'trimRight'
|
'trimRight'
|
||||||
];
|
];
|
||||||
|
|
||||||
|
// Types that are defined in a common layer but are known to be only
|
||||||
|
// available in native environments should not be allowed in browser
|
||||||
|
const NATIVE_TYPES = [
|
||||||
|
'NativeParsedArgs',
|
||||||
|
'INativeEnvironmentService',
|
||||||
|
'INativeWindowConfiguration'
|
||||||
|
];
|
||||||
|
|
||||||
const RULES = [
|
const RULES = [
|
||||||
|
|
||||||
// Tests: skip
|
// Tests: skip
|
||||||
@@ -73,6 +81,40 @@ const RULES = [
|
|||||||
'MessageEvent',
|
'MessageEvent',
|
||||||
'data'
|
'data'
|
||||||
],
|
],
|
||||||
|
disallowedTypes: NATIVE_TYPES,
|
||||||
|
disallowedDefinitions: [
|
||||||
|
'lib.dom.d.ts', // no DOM
|
||||||
|
'@types/node' // no node.js
|
||||||
|
]
|
||||||
|
},
|
||||||
|
|
||||||
|
// Common: vs/platform/environment/common/argv.ts
|
||||||
|
{
|
||||||
|
target: '**/{vs,sql}/platform/environment/common/argv.ts',
|
||||||
|
disallowedTypes: [/* Ignore native types that are defined from here */],
|
||||||
|
allowedTypes: CORE_TYPES,
|
||||||
|
disallowedDefinitions: [
|
||||||
|
'lib.dom.d.ts', // no DOM
|
||||||
|
'@types/node' // no node.js
|
||||||
|
]
|
||||||
|
},
|
||||||
|
|
||||||
|
// Common: vs/platform/environment/common/environment.ts
|
||||||
|
{
|
||||||
|
target: '**/{vs,sql}/platform/environment/common/environment.ts',
|
||||||
|
disallowedTypes: [/* Ignore native types that are defined from here */],
|
||||||
|
allowedTypes: CORE_TYPES,
|
||||||
|
disallowedDefinitions: [
|
||||||
|
'lib.dom.d.ts', // no DOM
|
||||||
|
'@types/node' // no node.js
|
||||||
|
]
|
||||||
|
},
|
||||||
|
|
||||||
|
// Common: vs/platform/windows/common/windows.ts
|
||||||
|
{
|
||||||
|
target: '**/{vs,sql}/platform/windows/common/windows.ts',
|
||||||
|
disallowedTypes: [/* Ignore native types that are defined from here */],
|
||||||
|
allowedTypes: CORE_TYPES,
|
||||||
disallowedDefinitions: [
|
disallowedDefinitions: [
|
||||||
'lib.dom.d.ts', // no DOM
|
'lib.dom.d.ts', // no DOM
|
||||||
'@types/node' // no node.js
|
'@types/node' // no node.js
|
||||||
@@ -88,6 +130,7 @@ const RULES = [
|
|||||||
// Safe access to global
|
// Safe access to global
|
||||||
'global'
|
'global'
|
||||||
],
|
],
|
||||||
|
disallowedTypes: NATIVE_TYPES,
|
||||||
disallowedDefinitions: [
|
disallowedDefinitions: [
|
||||||
'lib.dom.d.ts', // no DOM
|
'lib.dom.d.ts', // no DOM
|
||||||
'@types/node' // no node.js
|
'@types/node' // no node.js
|
||||||
@@ -98,6 +141,7 @@ const RULES = [
|
|||||||
{
|
{
|
||||||
target: '**/{vs,sql}/**/common/**',
|
target: '**/{vs,sql}/**/common/**',
|
||||||
allowedTypes: CORE_TYPES,
|
allowedTypes: CORE_TYPES,
|
||||||
|
disallowedTypes: NATIVE_TYPES,
|
||||||
disallowedDefinitions: [
|
disallowedDefinitions: [
|
||||||
'lib.dom.d.ts', // no DOM
|
'lib.dom.d.ts', // no DOM
|
||||||
'@types/node' // no node.js
|
'@types/node' // no node.js
|
||||||
@@ -108,6 +152,7 @@ const RULES = [
|
|||||||
{
|
{
|
||||||
target: '**/{vs,sql}/**/browser/**',
|
target: '**/{vs,sql}/**/browser/**',
|
||||||
allowedTypes: CORE_TYPES,
|
allowedTypes: CORE_TYPES,
|
||||||
|
disallowedTypes: NATIVE_TYPES,
|
||||||
disallowedDefinitions: [
|
disallowedDefinitions: [
|
||||||
'@types/node' // no node.js
|
'@types/node' // no node.js
|
||||||
]
|
]
|
||||||
@@ -117,6 +162,7 @@ const RULES = [
|
|||||||
{
|
{
|
||||||
target: '**/src/{vs,sql}/editor/contrib/**',
|
target: '**/src/{vs,sql}/editor/contrib/**',
|
||||||
allowedTypes: CORE_TYPES,
|
allowedTypes: CORE_TYPES,
|
||||||
|
disallowedTypes: NATIVE_TYPES,
|
||||||
disallowedDefinitions: [
|
disallowedDefinitions: [
|
||||||
'@types/node' // no node.js
|
'@types/node' // no node.js
|
||||||
]
|
]
|
||||||
@@ -145,7 +191,7 @@ const RULES = [
|
|||||||
|
|
||||||
// Electron (sandbox)
|
// Electron (sandbox)
|
||||||
{
|
{
|
||||||
target: '**/vs/**/electron-sandbox/**',
|
target: '**/{vs,sql}/**/electron-sandbox/**',
|
||||||
allowedTypes: CORE_TYPES,
|
allowedTypes: CORE_TYPES,
|
||||||
disallowedDefinitions: [
|
disallowedDefinitions: [
|
||||||
'@types/node' // no node.js
|
'@types/node' // no node.js
|
||||||
@@ -181,6 +227,7 @@ interface IRule {
|
|||||||
skip?: boolean;
|
skip?: boolean;
|
||||||
allowedTypes?: string[];
|
allowedTypes?: string[];
|
||||||
disallowedDefinitions?: string[];
|
disallowedDefinitions?: string[];
|
||||||
|
disallowedTypes?: string[];
|
||||||
}
|
}
|
||||||
|
|
||||||
let hasErrors = false;
|
let hasErrors = false;
|
||||||
@@ -199,6 +246,14 @@ function checkFile(program: ts.Program, sourceFile: ts.SourceFile, rule: IRule)
|
|||||||
return; // override
|
return; // override
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (rule.disallowedTypes?.some(disallowed => disallowed === text)) {
|
||||||
|
const { line, character } = sourceFile.getLineAndCharacterOfPosition(node.getStart());
|
||||||
|
console.log(`[build/lib/layersChecker.ts]: Reference to '${text}' violates layer '${rule.target}' (${sourceFile.fileName} (${line + 1},${character + 1})`);
|
||||||
|
|
||||||
|
hasErrors = true;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
const checker = program.getTypeChecker();
|
const checker = program.getTypeChecker();
|
||||||
const symbol = checker.getSymbolAtLocation(node);
|
const symbol = checker.getSymbolAtLocation(node);
|
||||||
if (symbol) {
|
if (symbol) {
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ const yarn = process.platform === 'win32' ? 'yarn.cmd' : 'yarn';
|
|||||||
const rootDir = path.resolve(__dirname, '..', '..');
|
const rootDir = path.resolve(__dirname, '..', '..');
|
||||||
|
|
||||||
function runProcess(command: string, args: ReadonlyArray<string> = []) {
|
function runProcess(command: string, args: ReadonlyArray<string> = []) {
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise<void>((resolve, reject) => {
|
||||||
const child = spawn(command, args, { cwd: rootDir, stdio: 'inherit', env: process.env });
|
const child = spawn(command, args, { cwd: rootDir, stdio: 'inherit', env: process.env });
|
||||||
child.on('exit', err => !err ? resolve() : process.exit(err ?? 1));
|
child.on('exit', err => !err ? resolve() : process.exit(err ?? 1));
|
||||||
child.on('error', reject);
|
child.on('error', reject);
|
||||||
|
|||||||
@@ -50,7 +50,7 @@
|
|||||||
"rollup-plugin-commonjs": "^10.1.0",
|
"rollup-plugin-commonjs": "^10.1.0",
|
||||||
"rollup-plugin-node-resolve": "^5.2.0",
|
"rollup-plugin-node-resolve": "^5.2.0",
|
||||||
"terser": "4.3.8",
|
"terser": "4.3.8",
|
||||||
"typescript": "^4.0.0-dev.20200803",
|
"typescript": "^4.1.0-dev.20200824",
|
||||||
"vsce": "1.48.0",
|
"vsce": "1.48.0",
|
||||||
"vscode-telemetry-extractor": "^1.6.0",
|
"vscode-telemetry-extractor": "^1.6.0",
|
||||||
"xml2js": "^0.4.17"
|
"xml2js": "^0.4.17"
|
||||||
|
|||||||
@@ -88,7 +88,7 @@ Source: "{#ProductJsonPath}"; DestDir: "{code:GetDestDir}\resources\app"; Flags:
|
|||||||
|
|
||||||
[Icons]
|
[Icons]
|
||||||
Name: "{group}\{#NameLong}"; Filename: "{app}\{#ExeBasename}.exe"; AppUserModelID: "{#AppUserId}"
|
Name: "{group}\{#NameLong}"; Filename: "{app}\{#ExeBasename}.exe"; AppUserModelID: "{#AppUserId}"
|
||||||
Name: "{commondesktop}\{#NameLong}"; Filename: "{app}\{#ExeBasename}.exe"; Tasks: desktopicon; AppUserModelID: "{#AppUserId}"
|
Name: "{autodesktop}\{#NameLong}"; Filename: "{app}\{#ExeBasename}.exe"; Tasks: desktopicon; AppUserModelID: "{#AppUserId}"
|
||||||
Name: "{userappdata}\Microsoft\Internet Explorer\Quick Launch\{#NameLong}"; Filename: "{app}\{#ExeBasename}.exe"; Tasks: quicklaunchicon; AppUserModelID: "{#AppUserId}"
|
Name: "{userappdata}\Microsoft\Internet Explorer\Quick Launch\{#NameLong}"; Filename: "{app}\{#ExeBasename}.exe"; Tasks: quicklaunchicon; AppUserModelID: "{#AppUserId}"
|
||||||
|
|
||||||
[Run]
|
[Run]
|
||||||
|
|||||||
@@ -2553,9 +2553,9 @@ node-abort-controller@^1.0.4:
|
|||||||
integrity sha512-7cNtLKTAg0LrW3ViS2C7UfIzbL3rZd8L0++5MidbKqQVJ8yrH6+1VRSHl33P0ZjBTbOJd37d9EYekvHyKkB0QQ==
|
integrity sha512-7cNtLKTAg0LrW3ViS2C7UfIzbL3rZd8L0++5MidbKqQVJ8yrH6+1VRSHl33P0ZjBTbOJd37d9EYekvHyKkB0QQ==
|
||||||
|
|
||||||
node-fetch@^2.6.0:
|
node-fetch@^2.6.0:
|
||||||
version "2.6.0"
|
version "2.6.1"
|
||||||
resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.0.tgz#e633456386d4aa55863f676a7ab0daa8fdecb0fd"
|
resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.1.tgz#045bd323631f76ed2e2b55573394416b639a0052"
|
||||||
integrity sha512-8dG4H5ujfvFiqDmVu9fQ5bOHUC15JMjMY/Zumv26oOvvVJjM67KF8koCWIabKQ1GJIa9r2mMZscBq/TbdOcmNA==
|
integrity sha512-V4aYg89jEoVRxRb2fJdAg8FHvI7cEyYdVAh94HH0UIK8oJxUfkjlDQN9RbMx+bEjP7+ggMiFRprSti032Oipxw==
|
||||||
|
|
||||||
node-pre-gyp@^0.10.0:
|
node-pre-gyp@^0.10.0:
|
||||||
version "0.10.3"
|
version "0.10.3"
|
||||||
@@ -3544,10 +3544,10 @@ typescript@^3.0.1:
|
|||||||
resolved "https://registry.yarnpkg.com/typescript/-/typescript-3.5.3.tgz#c830f657f93f1ea846819e929092f5fe5983e977"
|
resolved "https://registry.yarnpkg.com/typescript/-/typescript-3.5.3.tgz#c830f657f93f1ea846819e929092f5fe5983e977"
|
||||||
integrity sha512-ACzBtm/PhXBDId6a6sDJfroT2pOWt/oOnk4/dElG5G33ZL776N3Y6/6bKZJBFpd+b05F3Ct9qDjMeJmRWtE2/g==
|
integrity sha512-ACzBtm/PhXBDId6a6sDJfroT2pOWt/oOnk4/dElG5G33ZL776N3Y6/6bKZJBFpd+b05F3Ct9qDjMeJmRWtE2/g==
|
||||||
|
|
||||||
typescript@^4.0.0-dev.20200803:
|
typescript@^4.1.0-dev.20200824:
|
||||||
version "4.0.0-dev.20200803"
|
version "4.1.0-dev.20200824"
|
||||||
resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.0.0-dev.20200803.tgz#ea8b0e9fb2ee3085598ff200c8568f04f4cbb2ba"
|
resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.1.0-dev.20200824.tgz#34c92d9b6e5124600658c0d4e9b8c125beaf577d"
|
||||||
integrity sha512-f/jDkFqCs0gbUd5MCUijO9u3AOMx1x1HdRDDHSidlc6uPVEkRduxjeTFhIXbGutO7ivzv+aC2sxH+1FQwsyBcg==
|
integrity sha512-hTJfocmebnMKoqRw/xs3bL61z87XXtvOUwYtM7zaCX9mAvnfdo1x1bzQlLZAsvdzRIgAHPJQYbqYHKygWkDw6g==
|
||||||
|
|
||||||
typical@^4.0.0:
|
typical@^4.0.0:
|
||||||
version "4.0.0"
|
version "4.0.0"
|
||||||
|
|||||||
@@ -6,7 +6,7 @@
|
|||||||
"git": {
|
"git": {
|
||||||
"name": "chromium",
|
"name": "chromium",
|
||||||
"repositoryUrl": "https://chromium.googlesource.com/chromium/src",
|
"repositoryUrl": "https://chromium.googlesource.com/chromium/src",
|
||||||
"commitHash": "e4745133a1d3745f066e068b8033c6a269b59caf"
|
"commitHash": "894fb9eb56c6cbda65e3c3ae9ada6d4cb5850cc9"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"licenseDetail": [
|
"licenseDetail": [
|
||||||
@@ -40,7 +40,7 @@
|
|||||||
"SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
|
"SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
|
||||||
],
|
],
|
||||||
"isOnlyProductionDependency": true,
|
"isOnlyProductionDependency": true,
|
||||||
"version": "78.0.3904.130"
|
"version": "83.0.4103.122"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"component": {
|
"component": {
|
||||||
@@ -48,11 +48,11 @@
|
|||||||
"git": {
|
"git": {
|
||||||
"name": "nodejs",
|
"name": "nodejs",
|
||||||
"repositoryUrl": "https://github.com/nodejs/node",
|
"repositoryUrl": "https://github.com/nodejs/node",
|
||||||
"commitHash": "787378879acfb212ed4ff824bf9f767a24a5cb43a"
|
"commitHash": "9622fed3fb2cffcea9efff6c8cb4cc2def99d75d"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"isOnlyProductionDependency": true,
|
"isOnlyProductionDependency": true,
|
||||||
"version": "12.8.1"
|
"version": "12.14.1"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"component": {
|
"component": {
|
||||||
@@ -60,12 +60,12 @@
|
|||||||
"git": {
|
"git": {
|
||||||
"name": "electron",
|
"name": "electron",
|
||||||
"repositoryUrl": "https://github.com/electron/electron",
|
"repositoryUrl": "https://github.com/electron/electron",
|
||||||
"commitHash": "5f93e889020d279d5a9cd1ecab080ab467312447"
|
"commitHash": "fb03807cd21915ddc3aa2521ba4f5ba14597bd7e"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"isOnlyProductionDependency": true,
|
"isOnlyProductionDependency": true,
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"version": "7.3.2"
|
"version": "9.3.0"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"component": {
|
"component": {
|
||||||
|
|||||||
@@ -102,7 +102,7 @@
|
|||||||
"mocha-junit-reporter": "^1.17.0",
|
"mocha-junit-reporter": "^1.17.0",
|
||||||
"mocha-multi-reporters": "^1.1.7",
|
"mocha-multi-reporters": "^1.1.7",
|
||||||
"should": "^13.2.3",
|
"should": "^13.2.3",
|
||||||
"vscodetestcover": "^1.0.9"
|
"vscodetestcover": "^1.1.0"
|
||||||
},
|
},
|
||||||
"__metadata": {
|
"__metadata": {
|
||||||
"id": "41",
|
"id": "41",
|
||||||
|
|||||||
@@ -986,10 +986,10 @@ vscode-nls@^3.2.1:
|
|||||||
resolved "https://registry.yarnpkg.com/vscode-nls/-/vscode-nls-3.2.5.tgz#25520c1955108036dec607c85e00a522f247f1a4"
|
resolved "https://registry.yarnpkg.com/vscode-nls/-/vscode-nls-3.2.5.tgz#25520c1955108036dec607c85e00a522f247f1a4"
|
||||||
integrity sha512-ITtoh3V4AkWXMmp3TB97vsMaHRgHhsSFPsUdzlueSL+dRZbSNTZeOmdQv60kjCV306ghPxhDeoNUEm3+EZMuyw==
|
integrity sha512-ITtoh3V4AkWXMmp3TB97vsMaHRgHhsSFPsUdzlueSL+dRZbSNTZeOmdQv60kjCV306ghPxhDeoNUEm3+EZMuyw==
|
||||||
|
|
||||||
vscodetestcover@^1.0.9:
|
vscodetestcover@^1.1.0:
|
||||||
version "1.0.9"
|
version "1.1.0"
|
||||||
resolved "https://registry.yarnpkg.com/vscodetestcover/-/vscodetestcover-1.0.9.tgz#0191f403dd59ba1153fc57979e281e992ce63731"
|
resolved "https://registry.yarnpkg.com/vscodetestcover/-/vscodetestcover-1.1.0.tgz#ea2bc2fb0c54ca4084057883e7e1614a20533e14"
|
||||||
integrity sha512-8z2961KF9Tuz5XdHAC6RMV3CrzAoUcfIK7wLYjLIXD4dbHIT7ceZMhoxToW1olyi3pFnThlS4lRXtx8Q5iyMMQ==
|
integrity sha512-b/5mYqWC4yPxPUM1G8MD8ZnRt7eYd1IxAg/vdTE6JiNZlpGtxkDv91eXbF4TbQVlOPoqTzfhpY5GxbZbHVv+DQ==
|
||||||
dependencies:
|
dependencies:
|
||||||
decache "^4.4.0"
|
decache "^4.4.0"
|
||||||
glob "^7.1.2"
|
glob "^7.1.2"
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
"name": "agent",
|
"name": "agent",
|
||||||
"displayName": "SQL Server Agent",
|
"displayName": "SQL Server Agent",
|
||||||
"description": "Manage and troubleshoot SQL Server Agent jobs",
|
"description": "Manage and troubleshoot SQL Server Agent jobs",
|
||||||
"version": "0.48.0",
|
"version": "0.49.0",
|
||||||
"publisher": "Microsoft",
|
"publisher": "Microsoft",
|
||||||
"preview": true,
|
"preview": true,
|
||||||
"license": "https://raw.githubusercontent.com/Microsoft/azuredatastudio/main/LICENSE.txt",
|
"license": "https://raw.githubusercontent.com/Microsoft/azuredatastudio/main/LICENSE.txt",
|
||||||
@@ -90,7 +90,7 @@
|
|||||||
"mocha-multi-reporters": "^1.1.7",
|
"mocha-multi-reporters": "^1.1.7",
|
||||||
"should": "^13.2.1",
|
"should": "^13.2.1",
|
||||||
"typemoq": "^2.1.0",
|
"typemoq": "^2.1.0",
|
||||||
"vscodetestcover": "^1.0.9"
|
"vscodetestcover": "^1.1.0"
|
||||||
},
|
},
|
||||||
"__metadata": {
|
"__metadata": {
|
||||||
"id": "10",
|
"id": "10",
|
||||||
|
|||||||
@@ -769,10 +769,10 @@ vscode-nls@^3.2.1:
|
|||||||
resolved "https://registry.yarnpkg.com/vscode-nls/-/vscode-nls-3.2.5.tgz#25520c1955108036dec607c85e00a522f247f1a4"
|
resolved "https://registry.yarnpkg.com/vscode-nls/-/vscode-nls-3.2.5.tgz#25520c1955108036dec607c85e00a522f247f1a4"
|
||||||
integrity sha512-ITtoh3V4AkWXMmp3TB97vsMaHRgHhsSFPsUdzlueSL+dRZbSNTZeOmdQv60kjCV306ghPxhDeoNUEm3+EZMuyw==
|
integrity sha512-ITtoh3V4AkWXMmp3TB97vsMaHRgHhsSFPsUdzlueSL+dRZbSNTZeOmdQv60kjCV306ghPxhDeoNUEm3+EZMuyw==
|
||||||
|
|
||||||
vscodetestcover@^1.0.9:
|
vscodetestcover@^1.1.0:
|
||||||
version "1.0.9"
|
version "1.1.0"
|
||||||
resolved "https://registry.yarnpkg.com/vscodetestcover/-/vscodetestcover-1.0.9.tgz#0191f403dd59ba1153fc57979e281e992ce63731"
|
resolved "https://registry.yarnpkg.com/vscodetestcover/-/vscodetestcover-1.1.0.tgz#ea2bc2fb0c54ca4084057883e7e1614a20533e14"
|
||||||
integrity sha512-8z2961KF9Tuz5XdHAC6RMV3CrzAoUcfIK7wLYjLIXD4dbHIT7ceZMhoxToW1olyi3pFnThlS4lRXtx8Q5iyMMQ==
|
integrity sha512-b/5mYqWC4yPxPUM1G8MD8ZnRt7eYd1IxAg/vdTE6JiNZlpGtxkDv91eXbF4TbQVlOPoqTzfhpY5GxbZbHVv+DQ==
|
||||||
dependencies:
|
dependencies:
|
||||||
decache "^4.4.0"
|
decache "^4.4.0"
|
||||||
glob "^7.1.2"
|
glob "^7.1.2"
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
Welcome to Microsoft Azure Arc Extension for Azure Data Studio!
|
Welcome to Microsoft Azure Arc Extension for Azure Data Studio!
|
||||||
|
|
||||||
**This extension is only applicable to customers in the Azure Arc data services private preview.**
|
**This extension is only applicable to customers in the Azure Arc data services public preview.**
|
||||||
|
|
||||||
## Overview
|
## Overview
|
||||||
|
|
||||||
|
|||||||
3
extensions/arc/images/discard.svg
Normal file
3
extensions/arc/images/discard.svg
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||||
|
<path d="M8.7 7.9L15.8 15L15 15.8L7.9 8.7L0.8 15.8L0 15L7.1 7.9L0 0.8L0.8 0L7.9 7.1L15 0L15.8 0.8L8.7 7.9Z" fill="#0078D4"/>
|
||||||
|
</svg>
|
||||||
|
After Width: | Height: | Size: 228 B |
3
extensions/arc/images/information.svg
Normal file
3
extensions/arc/images/information.svg
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 2048 2048" width="16" height="16">
|
||||||
|
<path d="M960 1920q-133 0-255-34t-230-96-194-150-150-195-97-229T0 960q0-133 34-255t96-230 150-194 195-150 229-97T960 0q133 0 255 34t230 96 194 150 150 195 97 229 34 256q0 133-34 255t-96 230-150 194-195 150-229 97-256 34zm0-1792q-115 0-221 30t-198 84-169 130-130 168-84 199-30 221q0 114 30 220t84 199 130 169 168 130 199 84 221 30q114 0 220-30t199-84 169-130 130-168 84-199 30-221q0-114-30-220t-84-199-130-169-168-130-199-84-221-30zm-64 640h128v640H896V768zm0-256h128v128H896V512z" />
|
||||||
|
</svg>
|
||||||
|
After Width: | Height: | Size: 581 B |
3
extensions/arc/images/save.svg
Normal file
3
extensions/arc/images/save.svg
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
<svg width="16" height="14" viewBox="0 0 16 14" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||||
|
<path d="M14 0H14.4L14.7 0.2L14.9 0.5C14.9524 0.570883 14.9885 0.652432 15.0058 0.738849C15.023 0.825265 15.0211 0.914429 15 1V14H2.8L0.999997 12.2V1C0.985033 0.85904 1.02046 0.717335 1.1 0.6L1.3 0.3L1.6 0.1H14V0ZM14 1H13V7H3V1H2V11.8L3.2 13H4V9H11V13H14V1ZM4 6H12V1H4V6ZM10 10H5V13H6V11H7V13H10V10Z" fill="#0078D4"/>
|
||||||
|
</svg>
|
||||||
|
After Width: | Height: | Size: 421 B |
@@ -8,5 +8,5 @@
|
|||||||
not_numbered: true
|
not_numbered: true
|
||||||
expand_sections: true
|
expand_sections: true
|
||||||
sections:
|
sections:
|
||||||
- title: TSG100 - The Azure Arc Postgres troubleshooter
|
- title: TSG100 - The Azure Arc enabled PostgreSQL Hyperscale troubleshooter
|
||||||
url: postgres/tsg100-troubleshoot-postgres
|
url: postgres/tsg100-troubleshoot-postgres
|
||||||
|
|||||||
@@ -3,5 +3,5 @@
|
|||||||
- This chapter contains notebooks for troubleshooting Postgres on Azure Arc
|
- This chapter contains notebooks for troubleshooting Postgres on Azure Arc
|
||||||
|
|
||||||
## Notebooks in this Chapter
|
## Notebooks in this Chapter
|
||||||
- [TSG100 - The Azure Arc Postgres troubleshooter](tsg100-troubleshoot-postgres.ipynb)
|
- [TSG100 - The Azure Arc enabled PostgreSQL Hyperscale troubleshooter](tsg100-troubleshoot-postgres.ipynb)
|
||||||
|
|
||||||
|
|||||||
@@ -3,5 +3,5 @@
|
|||||||
not_numbered: true
|
not_numbered: true
|
||||||
expand_sections: true
|
expand_sections: true
|
||||||
sections:
|
sections:
|
||||||
- title: TSG100 - The Azure Arc Postgres troubleshooter
|
- title: TSG100 - The Azure Arc enabled PostgreSQL Hyperscale troubleshooter
|
||||||
url: postgres/tsg100-troubleshoot-postgres
|
url: postgres/tsg100-troubleshoot-postgres
|
||||||
|
|||||||
@@ -4,13 +4,14 @@
|
|||||||
"cell_type": "markdown",
|
"cell_type": "markdown",
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
"source": [
|
"source": [
|
||||||
"TSG100 - The Azure Arc Postgres troubleshooter\n",
|
"TSG100 - The Azure Arc enabled PostgreSQL Hyperscale troubleshooter\n",
|
||||||
"==============================================\n",
|
"===================================================================\n",
|
||||||
"\n",
|
"\n",
|
||||||
"Description\n",
|
"Description\n",
|
||||||
"-----------\n",
|
"-----------\n",
|
||||||
"\n",
|
"\n",
|
||||||
"Follow these steps to troubleshoot an Azure Arc Postgres Server.\n",
|
"Follow these steps to troubleshoot an Azure Arc enabled PostgreSQL\n",
|
||||||
|
"Hyperscale Server.\n",
|
||||||
"\n",
|
"\n",
|
||||||
"Steps\n",
|
"Steps\n",
|
||||||
"-----\n",
|
"-----\n",
|
||||||
@@ -34,6 +35,7 @@
|
|||||||
"# the user will be prompted to select a server.\n",
|
"# the user will be prompted to select a server.\n",
|
||||||
"namespace = os.environ.get('POSTGRES_SERVER_NAMESPACE')\n",
|
"namespace = os.environ.get('POSTGRES_SERVER_NAMESPACE')\n",
|
||||||
"name = os.environ.get('POSTGRES_SERVER_NAME')\n",
|
"name = os.environ.get('POSTGRES_SERVER_NAME')\n",
|
||||||
|
"version = os.environ.get('POSTGRES_SERVER_VERSION')\n",
|
||||||
"\n",
|
"\n",
|
||||||
"tail_lines = 50"
|
"tail_lines = 50"
|
||||||
]
|
]
|
||||||
@@ -143,7 +145,7 @@
|
|||||||
" if cmd.startswith(\"kubectl \") and \"AZDATA_OPENSHIFT\" in os.environ:\n",
|
" if cmd.startswith(\"kubectl \") and \"AZDATA_OPENSHIFT\" in os.environ:\n",
|
||||||
" cmd_actual[0] = cmd_actual[0].replace(\"kubectl\", \"oc\")\n",
|
" cmd_actual[0] = cmd_actual[0].replace(\"kubectl\", \"oc\")\n",
|
||||||
"\n",
|
"\n",
|
||||||
" # To aid supportabilty, determine which binary file will actually be executed on the machine\n",
|
" # To aid supportability, determine which binary file will actually be executed on the machine\n",
|
||||||
" #\n",
|
" #\n",
|
||||||
" which_binary = None\n",
|
" which_binary = None\n",
|
||||||
"\n",
|
"\n",
|
||||||
@@ -400,11 +402,11 @@
|
|||||||
"import math\n",
|
"import math\n",
|
||||||
"\n",
|
"\n",
|
||||||
"# If a server was provided, get it\n",
|
"# If a server was provided, get it\n",
|
||||||
"if namespace and name:\n",
|
"if namespace and name and version:\n",
|
||||||
" server = json.loads(run(f'kubectl get dbs -n {namespace} {name} -o json', return_output=True))\n",
|
" server = json.loads(run(f'kubectl get postgresql-{version} -n {namespace} {name} -o json', return_output=True))\n",
|
||||||
"else:\n",
|
"else:\n",
|
||||||
" # Otherwise prompt the user to select a server\n",
|
" # Otherwise prompt the user to select a server\n",
|
||||||
" servers = json.loads(run(f'kubectl get dbs --all-namespaces -o json', return_output=True))['items']\n",
|
" servers = json.loads(run(f'kubectl get postgresqls --all-namespaces -o json', return_output=True))['items']\n",
|
||||||
" if not servers:\n",
|
" if not servers:\n",
|
||||||
" raise Exception('No Postgres servers found')\n",
|
" raise Exception('No Postgres servers found')\n",
|
||||||
"\n",
|
"\n",
|
||||||
@@ -425,6 +427,7 @@
|
|||||||
" server = servers[i-1]\n",
|
" server = servers[i-1]\n",
|
||||||
" namespace = server['metadata']['namespace']\n",
|
" namespace = server['metadata']['namespace']\n",
|
||||||
" name = server['metadata']['name']\n",
|
" name = server['metadata']['name']\n",
|
||||||
|
" version = server['kind'][len('postgresql-'):]\n",
|
||||||
" break\n",
|
" break\n",
|
||||||
"\n",
|
"\n",
|
||||||
"display(Markdown(f'#### Got server {namespace}.{name}'))"
|
"display(Markdown(f'#### Got server {namespace}.{name}'))"
|
||||||
@@ -446,10 +449,10 @@
|
|||||||
"uid = server['metadata']['uid']\n",
|
"uid = server['metadata']['uid']\n",
|
||||||
"\n",
|
"\n",
|
||||||
"display(Markdown(f'#### Server summary'))\n",
|
"display(Markdown(f'#### Server summary'))\n",
|
||||||
"run(f'kubectl get dbs -n {namespace} {name}')\n",
|
"run(f'kubectl get postgresql-{version} -n {namespace} {name}')\n",
|
||||||
"\n",
|
"\n",
|
||||||
"display(Markdown(f'#### Resource summary'))\n",
|
"display(Markdown(f'#### Resource summary'))\n",
|
||||||
"run(f'kubectl get pods,pvc,svc,ep -n {namespace} -l dusky.microsoft.com/serviceId={uid}')"
|
"run(f'kubectl get sts,pods,pvc,svc,ep -n {namespace} -l postgresqls.arcdata.microsoft.com/cluster-id={uid}')"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -466,7 +469,7 @@
|
|||||||
"outputs": [],
|
"outputs": [],
|
||||||
"source": [
|
"source": [
|
||||||
"display(Markdown(f'#### Troubleshooting server {namespace}.{name}'))\n",
|
"display(Markdown(f'#### Troubleshooting server {namespace}.{name}'))\n",
|
||||||
"run(f'kubectl describe dbs -n {namespace} {name}')"
|
"run(f'kubectl describe postgresql-{version} -n {namespace} {name}')"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -482,7 +485,7 @@
|
|||||||
"metadata": {},
|
"metadata": {},
|
||||||
"outputs": [],
|
"outputs": [],
|
||||||
"source": [
|
"source": [
|
||||||
"pods = json.loads(run(f'kubectl get pods -n {namespace} -l dusky.microsoft.com/serviceId={uid} -o json', return_output=True))['items']\n",
|
"pods = json.loads(run(f'kubectl get pods -n {namespace} -l postgresqls.arcdata.microsoft.com/cluster-id={uid} -o json', return_output=True))['items']\n",
|
||||||
"\n",
|
"\n",
|
||||||
"# Summarize and describe each pod\n",
|
"# Summarize and describe each pod\n",
|
||||||
"for pod in pods:\n",
|
"for pod in pods:\n",
|
||||||
@@ -529,8 +532,7 @@
|
|||||||
" con_restarts = con_status.get('restartCount', 0)\n",
|
" con_restarts = con_status.get('restartCount', 0)\n",
|
||||||
"\n",
|
"\n",
|
||||||
" display(Markdown(f'#### Troubleshooting container {namespace}.{pod_name}/{con_name} ({i+1}/{len(cons)})\\n'\n",
|
" display(Markdown(f'#### Troubleshooting container {namespace}.{pod_name}/{con_name} ({i+1}/{len(cons)})\\n'\n",
|
||||||
" f'#### {\"S\" if con_started else \"Not s\"}tarted and '\n",
|
" f'#### {\"R\" if con_ready else \"Not r\"}eady with {con_restarts} restarts'))\n",
|
||||||
" f'{\"\" if con_ready else \"not \"}ready with {con_restarts} restarts'))\n",
|
|
||||||
"\n",
|
"\n",
|
||||||
" run(f'kubectl logs -n {namespace} {pod_name} {con_name} --tail {tail_lines}')\n",
|
" run(f'kubectl logs -n {namespace} {pod_name} {con_name} --tail {tail_lines}')\n",
|
||||||
"\n",
|
"\n",
|
||||||
@@ -554,7 +556,7 @@
|
|||||||
"outputs": [],
|
"outputs": [],
|
||||||
"source": [
|
"source": [
|
||||||
"display(Markdown(f'#### Troubleshooting PersistentVolumeClaims'))\n",
|
"display(Markdown(f'#### Troubleshooting PersistentVolumeClaims'))\n",
|
||||||
"run(f'kubectl describe pvc -n {namespace} -l dusky.microsoft.com/serviceId={uid}')"
|
"run(f'kubectl describe pvc -n {namespace} -l postgresqls.arcdata.microsoft.com/cluster-id={uid}')"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -47,7 +47,7 @@
|
|||||||
"|Tools|Description|Installation|\n",
|
"|Tools|Description|Installation|\n",
|
||||||
"|---|---|---|\n",
|
"|---|---|---|\n",
|
||||||
"|kubectl | Command-line tool for monitoring the underlying Kubernetes cluster | [Installation](https://kubernetes.io/docs/tasks/tools/install-kubectl/#install-kubectl-binary-using-native-package-management) |\n",
|
"|kubectl | Command-line tool for monitoring the underlying Kubernetes cluster | [Installation](https://kubernetes.io/docs/tasks/tools/install-kubectl/#install-kubectl-binary-using-native-package-management) |\n",
|
||||||
"|azdata | Command-line tool for installing and managing resources in an Azure Arc cluster |[Installation](https://github.com/microsoft/Azure-data-services-on-Azure-Arc/blob/master/scenarios/001-install-client-tools.md) |"
|
"|Azure Data CLI (azdata) | Command-line tool for installing and managing resources in an Azure Arc cluster |[Installation](https://docs.microsoft.com/sql/azdata/install/deploy-install-azdata) |"
|
||||||
],
|
],
|
||||||
"metadata": {
|
"metadata": {
|
||||||
"azdata_cell_guid": "714582b9-10ee-409e-ab12-15a4825c9471"
|
"azdata_cell_guid": "714582b9-10ee-409e-ab12-15a4825c9471"
|
||||||
@@ -65,13 +65,7 @@
|
|||||||
{
|
{
|
||||||
"cell_type": "code",
|
"cell_type": "code",
|
||||||
"source": [
|
"source": [
|
||||||
"import pandas,sys,os,json,html,getpass,time, tempfile\n",
|
"import sys,os,json,html,getpass,time, tempfile\n",
|
||||||
"pandas_version = pandas.__version__.split('.')\n",
|
|
||||||
"pandas_major = int(pandas_version[0])\n",
|
|
||||||
"pandas_minor = int(pandas_version[1])\n",
|
|
||||||
"pandas_patch = int(pandas_version[2])\n",
|
|
||||||
"if not (pandas_major > 0 or (pandas_major == 0 and pandas_minor > 24) or (pandas_major == 0 and pandas_minor == 24 and pandas_patch >= 2)):\n",
|
|
||||||
" sys.exit('Please upgrade the Notebook dependency before you can proceed, you can do it by running the \"Reinstall Notebook dependencies\" command in command palette (View menu -> Command Palette…).')\n",
|
|
||||||
"def run_command(command):\n",
|
"def run_command(command):\n",
|
||||||
" print(\"Executing: \" + command)\n",
|
" print(\"Executing: \" + command)\n",
|
||||||
" !{command}\n",
|
" !{command}\n",
|
||||||
@@ -90,7 +84,7 @@
|
|||||||
"cell_type": "markdown",
|
"cell_type": "markdown",
|
||||||
"source": [
|
"source": [
|
||||||
"### **Set variables**\n",
|
"### **Set variables**\n",
|
||||||
"Generated by Azure Data Studio using the values collected in the Azure Arc Data controller create wizard"
|
"Generated by Azure Data Studio using the values collected in the 'Create Azure Arc data controller' wizard."
|
||||||
],
|
],
|
||||||
"metadata": {
|
"metadata": {
|
||||||
"azdata_cell_guid": "4b266b2d-bd1b-4565-92c9-3fc146cdce6d"
|
"azdata_cell_guid": "4b266b2d-bd1b-4565-92c9-3fc146cdce6d"
|
||||||
@@ -129,18 +123,22 @@
|
|||||||
{
|
{
|
||||||
"cell_type": "code",
|
"cell_type": "code",
|
||||||
"source": [
|
"source": [
|
||||||
"if \"AZDATA_NB_VAR_ARC_DOCKER_PASSWORD\" in os.environ:\n",
|
|
||||||
" arc_docker_password = os.environ[\"AZDATA_NB_VAR_ARC_DOCKER_PASSWORD\"]\n",
|
|
||||||
"if \"AZDATA_NB_VAR_ARC_ADMIN_PASSWORD\" in os.environ:\n",
|
"if \"AZDATA_NB_VAR_ARC_ADMIN_PASSWORD\" in os.environ:\n",
|
||||||
" arc_admin_password = os.environ[\"AZDATA_NB_VAR_ARC_ADMIN_PASSWORD\"]\n",
|
" arc_admin_password = os.environ[\"AZDATA_NB_VAR_ARC_ADMIN_PASSWORD\"]\n",
|
||||||
"else:\n",
|
"else:\n",
|
||||||
" if arc_admin_password == \"\":\n",
|
" if arc_admin_password == \"\":\n",
|
||||||
" arc_admin_password = getpass.getpass(prompt = 'Azure Arc Data controller password')\n",
|
" arc_admin_password = getpass.getpass(prompt = 'Azure Arc Data Controller password')\n",
|
||||||
" if arc_admin_password == \"\":\n",
|
" if arc_admin_password == \"\":\n",
|
||||||
" sys.exit(f'Password is required.')\n",
|
" sys.exit(f'Password is required.')\n",
|
||||||
" confirm_password = getpass.getpass(prompt = 'Confirm password')\n",
|
" confirm_password = getpass.getpass(prompt = 'Confirm password')\n",
|
||||||
" if arc_admin_password != confirm_password:\n",
|
" if arc_admin_password != confirm_password:\n",
|
||||||
" sys.exit(f'Passwords do not match.')"
|
" sys.exit(f'Passwords do not match.')\n",
|
||||||
|
"\n",
|
||||||
|
"os.environ[\"SPN_CLIENT_ID\"] = sp_client_id\n",
|
||||||
|
"os.environ[\"SPN_TENANT_ID\"] = sp_tenant_id\n",
|
||||||
|
"if \"AZDATA_NB_VAR_SP_CLIENT_SECRET\" in os.environ:\n",
|
||||||
|
" os.environ[\"SPN_CLIENT_SECRET\"] = os.environ[\"AZDATA_NB_VAR_SP_CLIENT_SECRET\"]\n",
|
||||||
|
"os.environ[\"SPN_AUTHORITY\"] = \"https://login.microsoftonline.com\""
|
||||||
],
|
],
|
||||||
"metadata": {
|
"metadata": {
|
||||||
"azdata_cell_guid": "e7e10828-6cae-45af-8c2f-1484b6d4f9ac",
|
"azdata_cell_guid": "e7e10828-6cae-45af-8c2f-1484b6d4f9ac",
|
||||||
@@ -175,7 +173,7 @@
|
|||||||
{
|
{
|
||||||
"cell_type": "markdown",
|
"cell_type": "markdown",
|
||||||
"source": [
|
"source": [
|
||||||
"### **Create Azure Arc Data controller**"
|
"### **Create Azure Arc Data Controller**"
|
||||||
],
|
],
|
||||||
"metadata": {
|
"metadata": {
|
||||||
"azdata_cell_guid": "efe78cd3-ed73-4c9b-b586-fdd6c07dd37f"
|
"azdata_cell_guid": "efe78cd3-ed73-4c9b-b586-fdd6c07dd37f"
|
||||||
@@ -184,16 +182,14 @@
|
|||||||
{
|
{
|
||||||
"cell_type": "code",
|
"cell_type": "code",
|
||||||
"source": [
|
"source": [
|
||||||
"print (f'Creating Azure Arc controller: {arc_data_controller_name} using configuration {arc_cluster_context}')\n",
|
"print (f'Creating Azure Arc Data Controller: {arc_data_controller_name} using configuration {arc_cluster_context}')\n",
|
||||||
"os.environ[\"ACCEPT_EULA\"] = 'yes'\n",
|
"os.environ[\"ACCEPT_EULA\"] = 'yes'\n",
|
||||||
"os.environ[\"AZDATA_USERNAME\"] = arc_admin_username\n",
|
"os.environ[\"AZDATA_USERNAME\"] = arc_admin_username\n",
|
||||||
"os.environ[\"AZDATA_PASSWORD\"] = arc_admin_password\n",
|
"os.environ[\"AZDATA_PASSWORD\"] = arc_admin_password\n",
|
||||||
"os.environ[\"DOCKER_USERNAME\"] = arc_docker_username\n",
|
|
||||||
"os.environ[\"DOCKER_PASSWORD\"] = arc_docker_password\n",
|
|
||||||
"if os.name == 'nt':\n",
|
"if os.name == 'nt':\n",
|
||||||
" print(f'If you don\\'t see output produced by azdata, you can run the following command in a terminal window to check the deployment status:\\n\\t {os.environ[\"AZDATA_NB_VAR_KUBECTL\"]} get pods -A')\n",
|
" print(f'If you don\\'t see output produced by azdata, you can run the following command in a terminal window to check the deployment status:\\n\\t {os.environ[\"AZDATA_NB_VAR_KUBECTL\"]} get pods -n {arc_data_controller_namespace}')\n",
|
||||||
"run_command(f'azdata arc dc create --connectivity-mode {arc_data_controller_connectivity_mode} -n {arc_data_controller_name} -ns {arc_data_controller_namespace} -s {arc_subscription} -g {arc_resource_group} -l {arc_data_controller_location} -p {arc_profile}')\n",
|
"run_command(f'azdata arc dc create --connectivity-mode {arc_data_controller_connectivity_mode} -n {arc_data_controller_name} -ns {arc_data_controller_namespace} -s {arc_subscription} -g {arc_resource_group} -l {arc_data_controller_location} -sc {arc_data_controller_storage_class} --profile-name {arc_profile}')\n",
|
||||||
"print(f'Azure Arc Data controller cluster: {arc_data_controller_name} created.') "
|
"print(f'Azure Arc Data Controller: {arc_data_controller_name} created.') "
|
||||||
],
|
],
|
||||||
"metadata": {
|
"metadata": {
|
||||||
"azdata_cell_guid": "373947a1-90b9-49ee-86f4-17a4c7d4ca76",
|
"azdata_cell_guid": "373947a1-90b9-49ee-86f4-17a4c7d4ca76",
|
||||||
@@ -205,7 +201,7 @@
|
|||||||
{
|
{
|
||||||
"cell_type": "markdown",
|
"cell_type": "markdown",
|
||||||
"source": [
|
"source": [
|
||||||
"### **Setting context to created Azure Arc Data controller**"
|
"### **Setting context to created Azure Arc Data Controller**"
|
||||||
],
|
],
|
||||||
"metadata": {
|
"metadata": {
|
||||||
"azdata_cell_guid": "a3ddc701-811d-4058-b3fb-b7295fcf50ae"
|
"azdata_cell_guid": "a3ddc701-811d-4058-b3fb-b7295fcf50ae"
|
||||||
@@ -214,7 +210,7 @@
|
|||||||
{
|
{
|
||||||
"cell_type": "code",
|
"cell_type": "code",
|
||||||
"source": [
|
"source": [
|
||||||
"# Setting context to data controller.\n",
|
"# Setting context to Data Controller.\n",
|
||||||
"#\n",
|
"#\n",
|
||||||
"run_command(f'kubectl config set-context --current --namespace {arc_data_controller_namespace}')"
|
"run_command(f'kubectl config set-context --current --namespace {arc_data_controller_namespace}')"
|
||||||
],
|
],
|
||||||
@@ -227,7 +223,7 @@
|
|||||||
{
|
{
|
||||||
"cell_type": "markdown",
|
"cell_type": "markdown",
|
||||||
"source": [
|
"source": [
|
||||||
"### **Login to the data controller.**\n"
|
"### **Login to the Data Controller.**\n"
|
||||||
],
|
],
|
||||||
"metadata": {
|
"metadata": {
|
||||||
"azdata_cell_guid": "9376b2ab-0edf-478f-9e3c-5ff46ae3501a"
|
"azdata_cell_guid": "9376b2ab-0edf-478f-9e3c-5ff46ae3501a"
|
||||||
@@ -236,9 +232,9 @@
|
|||||||
{
|
{
|
||||||
"cell_type": "code",
|
"cell_type": "code",
|
||||||
"source": [
|
"source": [
|
||||||
"# Login to the data controller.\n",
|
"# Login to the Data Controller.\n",
|
||||||
"#\n",
|
"#\n",
|
||||||
"run_command(f'azdata login -n {arc_data_controller_namespace}')"
|
"run_command(f'azdata login --namespace {arc_data_controller_namespace}')"
|
||||||
],
|
],
|
||||||
"metadata": {
|
"metadata": {
|
||||||
"azdata_cell_guid": "9aed0c5a-2c8a-4ad7-becb-60281923a196"
|
"azdata_cell_guid": "9aed0c5a-2c8a-4ad7-becb-60281923a196"
|
||||||
@@ -25,12 +25,12 @@
|
|||||||
"source": [
|
"source": [
|
||||||
"\n",
|
"\n",
|
||||||
" \n",
|
" \n",
|
||||||
"## Deploy a PostgreSQL server group on an existing Azure Arc data cluster\n",
|
"## Create a PostgreSQL Hyperscale - Azure Arc on an existing Azure Arc Data Controller\n",
|
||||||
" \n",
|
" \n",
|
||||||
"This notebook walks through the process of deploying a PostgreSQL server group on an existing Azure Arc data cluster.\n",
|
"This notebook walks through the process of creating a PostgreSQL Hyperscale - Azure Arc on an existing Azure Arc Data Controller.\n",
|
||||||
" \n",
|
" \n",
|
||||||
"* Follow the instructions in the **Prerequisites** cell to install the tools if not already installed.\n",
|
"* Follow the instructions in the **Prerequisites** cell to install the tools if not already installed.\n",
|
||||||
"* Make sure you have the target Azure Arc data cluster already created.\n",
|
"* Make sure you have the target Azure Arc Data Controller already created.\n",
|
||||||
"\n",
|
"\n",
|
||||||
"<span style=\"color:red\"><font size=\"3\">Please press the \"Run All\" button to run the notebook</font></span>"
|
"<span style=\"color:red\"><font size=\"3\">Please press the \"Run All\" button to run the notebook</font></span>"
|
||||||
],
|
],
|
||||||
@@ -41,7 +41,21 @@
|
|||||||
{
|
{
|
||||||
"cell_type": "markdown",
|
"cell_type": "markdown",
|
||||||
"source": [
|
"source": [
|
||||||
"### **Check prerequisites**"
|
"### **Prerequisites** \n",
|
||||||
|
"Ensure the following tools are installed and added to PATH before proceeding.\n",
|
||||||
|
" \n",
|
||||||
|
"|Tools|Description|Installation|\n",
|
||||||
|
"|---|---|---|\n",
|
||||||
|
"|Azure Data CLI (azdata) | Command-line tool for installing and managing resources in an Azure Arc cluster |[Installation](https://docs.microsoft.com/sql/azdata/install/deploy-install-azdata) |"
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"azdata_cell_guid": "20fe3985-a01e-461c-bce0-235f7606cc3c"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"source": [
|
||||||
|
"### **Setup and Check Prerequisites**"
|
||||||
],
|
],
|
||||||
"metadata": {
|
"metadata": {
|
||||||
"azdata_cell_guid": "68531b91-ddce-47d7-a1d8-2ddc3d17f3e7"
|
"azdata_cell_guid": "68531b91-ddce-47d7-a1d8-2ddc3d17f3e7"
|
||||||
@@ -75,48 +89,20 @@
|
|||||||
{
|
{
|
||||||
"cell_type": "markdown",
|
"cell_type": "markdown",
|
||||||
"source": [
|
"source": [
|
||||||
"#### **Get required and optional parameters for the PostgreSQL server group**"
|
"### **Set variables**\n",
|
||||||
|
"\n",
|
||||||
|
"#### \n",
|
||||||
|
"\n",
|
||||||
|
"Generated by Azure Data Studio using the values collected in the 'Deploy PostgreSQL Hyperscale - Azure Arc instance' wizard"
|
||||||
],
|
],
|
||||||
"metadata": {
|
"metadata": {
|
||||||
"azdata_cell_guid": "68ec0760-27d1-4ded-9a9f-89077c40b8bb"
|
"azdata_cell_guid": "68ec0760-27d1-4ded-9a9f-89077c40b8bb"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"source": [
|
|
||||||
"# Required Values\n",
|
|
||||||
"server_group_name = os.environ[\"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_NAME\"]\n",
|
|
||||||
"server_group_namespace = os.environ[\"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_NAMESPACE\"]\n",
|
|
||||||
"server_group_workers = os.environ[\"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_WORKERS\"]\n",
|
|
||||||
"server_group_service_type = os.environ[\"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_SERVICE_TYPE\"]\n",
|
|
||||||
"server_group_data_size = os.environ[\"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_DATA_SIZE\"]\n",
|
|
||||||
"\n",
|
|
||||||
"# Optional Values\n",
|
|
||||||
"server_group_data_class = os.environ.get(\"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_DATA_CLASS\")\n",
|
|
||||||
"server_group_port = os.environ.get(\"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_PORT\")\n",
|
|
||||||
"server_group_extensions = os.environ.get(\"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_EXTENSIONS\")\n",
|
|
||||||
"server_group_cpu_min = os.environ.get(\"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_CPU_MIN\")\n",
|
|
||||||
"server_group_cpu_max = os.environ.get(\"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_CPU_MAX\")\n",
|
|
||||||
"server_group_memory_min = os.environ.get(\"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_MEMORY_MIN\")\n",
|
|
||||||
"server_group_memory_max = os.environ.get(\"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_MEMORY_MAX\")\n",
|
|
||||||
"server_group_backup_classes = os.environ.get(\"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_BACKUP_CLASSES\")\n",
|
|
||||||
"server_group_backup_sizes = os.environ.get(\"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_BACKUP_SIZES\")\n",
|
|
||||||
"server_group_backup_full_interval = os.environ.get(\"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_BACKUP_FULL_INTERVAL\")\n",
|
|
||||||
"server_group_backup_delta_interval = os.environ.get(\"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_BACKUP_DELTA_INTERVAL\")\n",
|
|
||||||
"server_group_backup_retention_min = os.environ.get(\"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_BACKUP_RETENTION_MIN\")\n",
|
|
||||||
"server_group_backup_retention_max = os.environ.get(\"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_BACKUP_RETENTION_MAX\")"
|
|
||||||
],
|
|
||||||
"metadata": {
|
|
||||||
"azdata_cell_guid": "53769960-e1f8-4477-b4cf-3ab1ea34348b",
|
|
||||||
"tags": []
|
|
||||||
},
|
|
||||||
"outputs": [],
|
|
||||||
"execution_count": null
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"cell_type": "markdown",
|
"cell_type": "markdown",
|
||||||
"source": [
|
"source": [
|
||||||
"### **Installing PostgreSQL server group**"
|
"### **Creating the PostgreSQL Hyperscale - Azure Arc instance**"
|
||||||
],
|
],
|
||||||
"metadata": {
|
"metadata": {
|
||||||
"azdata_cell_guid": "90b0e162-2987-463f-9ce6-12dda1267189"
|
"azdata_cell_guid": "90b0e162-2987-463f-9ce6-12dda1267189"
|
||||||
@@ -125,22 +111,37 @@
|
|||||||
{
|
{
|
||||||
"cell_type": "code",
|
"cell_type": "code",
|
||||||
"source": [
|
"source": [
|
||||||
"print (f'Creating a PostgreSQL server group on Azure Arc')\n",
|
"# Login to the data controller.\n",
|
||||||
|
"#\n",
|
||||||
|
"os.environ[\"AZDATA_PASSWORD\"] = os.environ[\"AZDATA_NB_VAR_CONTROLLER_PASSWORD\"]\n",
|
||||||
|
"cmd = f'azdata login -e {controller_endpoint} -u {controller_username}'\n",
|
||||||
|
"out=run_command()"
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"azdata_cell_guid": "71366399-5963-4e24-b2f2-6bb5bffba4ec"
|
||||||
|
},
|
||||||
|
"outputs": [],
|
||||||
|
"execution_count": null
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"source": [
|
||||||
|
"print (f'Creating the PostgreSQL Hyperscale - Azure Arc instance')\n",
|
||||||
"\n",
|
"\n",
|
||||||
"data_class_option = f' --dataClass \"{server_group_data_class}\"' if server_group_data_class else \"\"\n",
|
"workers_option = f' -w {postgres_server_group_workers}' if postgres_server_group_workers else \"\"\n",
|
||||||
"port_option = f' --port \"{server_group_port}\"' if server_group_port else \"\"\n",
|
"port_option = f' --port \"{postgres_server_group_port}\"' if postgres_server_group_port else \"\"\n",
|
||||||
"extensions_option = f' --extensions \"{server_group_extensions}\"' if server_group_extensions else \"\"\n",
|
"engine_version_option = f' -ev {postgres_server_group_engine_version}' if postgres_server_group_engine_version else \"\"\n",
|
||||||
"cpu_min_option = f' --minCpu \"{server_group_cpu_min}\"' if server_group_cpu_min else \"\"\n",
|
"extensions_option = f' --extensions \"{postgres_server_group_extensions}\"' if postgres_server_group_extensions else \"\"\n",
|
||||||
"cpu_max_option = f' --maxCpu \"{server_group_cpu_max}\"' if server_group_cpu_max else \"\"\n",
|
"volume_size_data_option = f' -vsd {postgres_server_group_volume_size_data}Gi' if postgres_server_group_volume_size_data else \"\"\n",
|
||||||
"memory_min_option = f' --minMemoryMb \"{server_group_memory_min}\"' if server_group_memory_min else \"\"\n",
|
"volume_size_logs_option = f' -vsl {postgres_server_group_volume_size_logs}Gi' if postgres_server_group_volume_size_logs else \"\"\n",
|
||||||
"memory_max_option = f' --maxMemoryMb \"{server_group_memory_max}\"' if server_group_memory_max else \"\"\n",
|
"volume_size_backups_option = f' -vsb {postgres_server_group_volume_size_backups}Gi' if postgres_server_group_volume_size_backups else \"\"\n",
|
||||||
"backup_classes_option = f' --backupClasses \"{server_group_backup_classes}\"' if server_group_backup_classes else \"\"\n",
|
"cores_request_option = f' -cr \"{postgres_server_group_cores_request}\"' if postgres_server_group_cores_request else \"\"\n",
|
||||||
"backup_sizes_option = f' --backupSizesMb \"{server_group_backup_sizes}\"' if server_group_backup_sizes else \"\"\n",
|
"cores_limit_option = f' -cl \"{postgres_server_group_cores_limit}\"' if postgres_server_group_cores_limit else \"\"\n",
|
||||||
"backup_full_interval_option = f' --fullBackupInterval \"{server_group_backup_full_interval}\"' if server_group_backup_full_interval else \"\"\n",
|
"memory_request_option = f' -mr \"{postgres_server_group_memory_request}Gi\"' if postgres_server_group_memory_request else \"\"\n",
|
||||||
"backup_delta_interval_option = f' --deltaBackupInterval \"{server_group_backup_delta_interval}\"' if server_group_backup_delta_interval else \"\"\n",
|
"memory_limit_option = f' -ml \"{postgres_server_group_memory_limit}Gi\"' if postgres_server_group_memory_limit else \"\"\n",
|
||||||
"backup_retention_min_option = f' --retentionMin \"{server_group_backup_retention_min}\"' if server_group_backup_retention_min else \"\"\n",
|
"\n",
|
||||||
"backup_retention_max_option = f' --retentionMax \"{server_group_backup_retention_max}\"' if server_group_backup_retention_max else \"\"\n",
|
"os.environ[\"AZDATA_PASSWORD\"] = os.environ[\"AZDATA_NB_VAR_POSTGRES_SERVER_GROUP_PASSWORD\"]\n",
|
||||||
"cmd = f'azdata postgres server create --name {server_group_name} --namespace {server_group_namespace} --workers {server_group_workers} --serviceType {server_group_service_type} --dataSizeMb {server_group_data_size}{data_class_option}{port_option}{extensions_option}{cpu_min_option}{cpu_max_option}{memory_min_option}{memory_max_option}{backup_classes_option}{backup_sizes_option}{backup_full_interval_option}{backup_delta_interval_option}{backup_retention_min_option}{backup_retention_max_option}'\n",
|
"cmd = f'azdata arc postgres server create -n {postgres_server_group_name} -scd {postgres_storage_class_data} -scl {postgres_storage_class_logs} -scb {postgres_storage_class_backups}{workers_option}{port_option}{engine_version_option}{extensions_option}{volume_size_data_option}{volume_size_logs_option}{volume_size_backups_option}{cores_request_option}{cores_limit_option}{memory_request_option}{memory_limit_option}'\n",
|
||||||
"out=run_command()"
|
"out=run_command()"
|
||||||
],
|
],
|
||||||
"metadata": {
|
"metadata": {
|
||||||
@@ -150,4 +151,4 @@
|
|||||||
"execution_count": null
|
"execution_count": null
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
@@ -25,12 +25,12 @@
|
|||||||
"source": [
|
"source": [
|
||||||
"\n",
|
"\n",
|
||||||
" \n",
|
" \n",
|
||||||
"## Deploy Azure SQL managed instance on an existing Azure Arc data cluster\n",
|
"## Create SQL managed instance - Azure Arc on an existing Azure Arc Data Controller\n",
|
||||||
" \n",
|
" \n",
|
||||||
"This notebook walks through the process of deploying a <a href=\"https://docs.microsoft.com/azure/sql-database/sql-database-managed-instance\">Azure SQL managed instance</a> on an existing Azure Arc data cluster.\n",
|
"This notebook walks through the process of creating a <a href=\"https://docs.microsoft.com/azure/sql-database/sql-database-managed-instance\">SQL managed instance - Azure Arc</a> on an existing Azure Arc Data Controller.\n",
|
||||||
" \n",
|
" \n",
|
||||||
"* Follow the instructions in the **Prerequisites** cell to install the tools if not already installed.\n",
|
"* Follow the instructions in the **Prerequisites** cell to install the tools if not already installed.\n",
|
||||||
"* Make sure you have the target Azure Arc data cluster already created.\n",
|
"* Make sure you have the target Azure Arc Data Controller already created.\n",
|
||||||
"\n",
|
"\n",
|
||||||
"<span style=\"color:red\"><font size=\"3\">Please press the \"Run All\" button to run the notebook</font></span>"
|
"<span style=\"color:red\"><font size=\"3\">Please press the \"Run All\" button to run the notebook</font></span>"
|
||||||
],
|
],
|
||||||
@@ -41,7 +41,21 @@
|
|||||||
{
|
{
|
||||||
"cell_type": "markdown",
|
"cell_type": "markdown",
|
||||||
"source": [
|
"source": [
|
||||||
"### **Check prerequisites**"
|
"### **Prerequisites** \n",
|
||||||
|
"Ensure the following tools are installed and added to PATH before proceeding.\n",
|
||||||
|
" \n",
|
||||||
|
"|Tools|Description|Installation|\n",
|
||||||
|
"|---|---|---|\n",
|
||||||
|
"|Azure Data CLI (azdata) | Command-line tool for installing and managing resources in an Azure Arc cluster |[Installation](https://docs.microsoft.com/sql/azdata/install/deploy-install-azdata) |"
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"azdata_cell_guid": "d1c8258e-9efd-4380-a48c-cd675423ed2f"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"source": [
|
||||||
|
"### **Setup and Check Prerequisites**"
|
||||||
],
|
],
|
||||||
"metadata": {
|
"metadata": {
|
||||||
"azdata_cell_guid": "68531b91-ddce-47d7-a1d8-2ddc3d17f3e7"
|
"azdata_cell_guid": "68531b91-ddce-47d7-a1d8-2ddc3d17f3e7"
|
||||||
@@ -75,42 +89,20 @@
|
|||||||
{
|
{
|
||||||
"cell_type": "markdown",
|
"cell_type": "markdown",
|
||||||
"source": [
|
"source": [
|
||||||
"#### **Ensure SQL instance name, username, password, subscription id and resource group name**"
|
"### **Set variables**\n",
|
||||||
|
"\n",
|
||||||
|
"#### \n",
|
||||||
|
"\n",
|
||||||
|
"Generated by Azure Data Studio using the values collected in the 'Deploy Azure SQL managed instance - Azure Arc' wizard"
|
||||||
],
|
],
|
||||||
"metadata": {
|
"metadata": {
|
||||||
"azdata_cell_guid": "68ec0760-27d1-4ded-9a9f-89077c40b8bb"
|
"azdata_cell_guid": "68ec0760-27d1-4ded-9a9f-89077c40b8bb"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"source": [
|
|
||||||
"# Required Values\n",
|
|
||||||
"env_var = \"AZDATA_NB_VAR_SQL_INSTANCE_NAME\" in os.environ\n",
|
|
||||||
"if env_var:\n",
|
|
||||||
" mssql_instance_name = os.environ[\"AZDATA_NB_VAR_SQL_INSTANCE_NAME\"]\n",
|
|
||||||
"else:\n",
|
|
||||||
" sys.exit(f'environment variable: AZDATA_NB_VAR_SQL_INSTANCE_NAME was not defined. Exiting\\n')\n",
|
|
||||||
"env_var = \"AZDATA_NB_VAR_SQL_PASSWORD\" in os.environ\n",
|
|
||||||
"if env_var:\n",
|
|
||||||
" mssql_password = os.environ[\"AZDATA_NB_VAR_SQL_PASSWORD\"]\n",
|
|
||||||
"else:\n",
|
|
||||||
" sys.exit(f'environment variable: AZDATA_NB_VAR_SQL_PASSWORD was not defined. Exiting\\n') \n",
|
|
||||||
"\n",
|
|
||||||
"# Optional Values\n",
|
|
||||||
"subscription = os.environ[\"AZDATA_NB_VAR_ARC_SUBSCRIPTION\"] \n",
|
|
||||||
"resource_group_name = os.environ[\"AZDATA_NB_VAR_ARC_RESOURCE_GROUP_NAME\"]\n"
|
|
||||||
],
|
|
||||||
"metadata": {
|
|
||||||
"azdata_cell_guid": "53769960-e1f8-4477-b4cf-3ab1ea34348b",
|
|
||||||
"tags": []
|
|
||||||
},
|
|
||||||
"outputs": [],
|
|
||||||
"execution_count": null
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"cell_type": "markdown",
|
"cell_type": "markdown",
|
||||||
"source": [
|
"source": [
|
||||||
"### **Installing Managed SQL Instance**"
|
"### **Creating the SQL managed instance - Azure Arc instance**"
|
||||||
],
|
],
|
||||||
"metadata": {
|
"metadata": {
|
||||||
"azdata_cell_guid": "90b0e162-2987-463f-9ce6-12dda1267189"
|
"azdata_cell_guid": "90b0e162-2987-463f-9ce6-12dda1267189"
|
||||||
@@ -119,12 +111,31 @@
|
|||||||
{
|
{
|
||||||
"cell_type": "code",
|
"cell_type": "code",
|
||||||
"source": [
|
"source": [
|
||||||
"print (f'Creating Managed SQL Server instance on Azure Arc')\n",
|
"# Login to the data controller.\n",
|
||||||
|
"#\n",
|
||||||
|
"os.environ[\"AZDATA_PASSWORD\"] = os.environ[\"AZDATA_NB_VAR_CONTROLLER_PASSWORD\"]\n",
|
||||||
|
"cmd = f'azdata login -e {controller_endpoint} -u {controller_username}'\n",
|
||||||
|
"out=run_command()"
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"azdata_cell_guid": "1437c536-17e8-4a7f-80c1-aa43ad02686c"
|
||||||
|
},
|
||||||
|
"outputs": [],
|
||||||
|
"execution_count": null
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"source": [
|
||||||
|
"print (f'Creating the SQL managed instance - Azure Arc instance')\n",
|
||||||
"\n",
|
"\n",
|
||||||
"os.environ[\"MSSQL_SA_PASSWORD\"] = mssql_password\n",
|
"cores_request_option = f' -cr \"{sql_cores_request}\"' if sql_cores_request else \"\"\n",
|
||||||
"subscription_option = f' -s \"{subscription}\"' if subscription else \"\"\n",
|
"cores_limit_option = f' -cl \"{sql_cores_limit}\"' if sql_cores_limit else \"\"\n",
|
||||||
"resource_group_option = f' -r \"{resource_group_name}\"' if resource_group_name else \"\"\n",
|
"memory_request_option = f' -mr \"{sql_memory_request}Gi\"' if sql_memory_request else \"\"\n",
|
||||||
"cmd = f'azdata sql instance create -n {mssql_instance_name}{subscription_option}{resource_group_option}'\n",
|
"memory_limit_option = f' -ml \"{sql_memory_limit}Gi\"' if sql_memory_limit else \"\"\n",
|
||||||
|
"\n",
|
||||||
|
"os.environ[\"AZDATA_USERNAME\"] = sql_username\n",
|
||||||
|
"os.environ[\"AZDATA_PASSWORD\"] = os.environ[\"AZDATA_NB_VAR_SQL_PASSWORD\"]\n",
|
||||||
|
"cmd = f'azdata arc sql mi create -n {sql_instance_name} -scd {sql_storage_class_data} -scl {sql_storage_class_logs}{cores_request_option}{cores_limit_option}{memory_request_option}{memory_limit_option}'\n",
|
||||||
"out=run_command()"
|
"out=run_command()"
|
||||||
],
|
],
|
||||||
"metadata": {
|
"metadata": {
|
||||||
@@ -134,4 +145,4 @@
|
|||||||
"execution_count": null
|
"execution_count": null
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -2,131 +2,154 @@
|
|||||||
"arc.displayName": "Azure Arc",
|
"arc.displayName": "Azure Arc",
|
||||||
"arc.description": "Support for Azure Arc",
|
"arc.description": "Support for Azure Arc",
|
||||||
"arc.configuration.title": "Azure Arc",
|
"arc.configuration.title": "Azure Arc",
|
||||||
"arc.ignoreSslVerification.desc" : "Ignore SSL verification errors against the controller endpoint if true",
|
|
||||||
"arc.manageMiaa": "Manage MIAA",
|
|
||||||
"arc.managePostgres": "Manage Postgres",
|
|
||||||
"arc.manageArcController": "Manage Arc Controller",
|
|
||||||
"arc.view.title" : "Azure Arc Controllers",
|
"arc.view.title" : "Azure Arc Controllers",
|
||||||
"arc.view.welcome.connect" : "No Azure Arc controllers registered. [Learn More](https://azure.microsoft.com/services/azure-arc/)\n[Connect Controller](command:arc.connectToController)",
|
"arc.view.welcome.connect" : "No Azure Arc controllers registered. [Learn More](https://azure.microsoft.com/services/azure-arc/)\n[Connect Controller](command:arc.connectToController)",
|
||||||
"arc.view.welcome.loading" : "Loading controllers...",
|
"arc.view.welcome.loading" : "Loading controllers...",
|
||||||
"command.createController.title" : "Create New Controller",
|
"command.createController.title" : "Create New Azure Arc Controller",
|
||||||
"command.connectToController.title": "Connect to Existing Controller",
|
"command.connectToController.title": "Connect to Existing Azure Arc Controller",
|
||||||
"command.removeController.title": "Remove Controller",
|
"command.removeController.title": "Remove Controller",
|
||||||
"command.refresh.title": "Refresh",
|
"command.refresh.title": "Refresh",
|
||||||
|
"command.editConnection.title": "Edit Connection",
|
||||||
"arc.openDashboard": "Manage",
|
"arc.openDashboard": "Manage",
|
||||||
|
|
||||||
"resource.type.azure.arc.display.name": "Azure Arc data controller",
|
"resource.type.azure.arc.display.name": "Azure Arc data controller (preview)",
|
||||||
"resource.type.azure.arc.description": "Creates an Azure Arc data controller",
|
"resource.type.azure.arc.description": "Creates an Azure Arc data controller",
|
||||||
|
"arc.data.controller.new.wizard.title": "Create Azure Arc data controller",
|
||||||
"arc.control.plane.new.wizard.title": "Create Azure Arc data controller",
|
"arc.data.controller.cluster.environment.title": "What is your target existing Kubernetes cluster environment?",
|
||||||
"arc.control.plane.cluster.environment.title": "What is your target existing Kubernetes cluster environment?",
|
"arc.data.controller.select.cluster.title": "Select from existing Kubernetes clusters",
|
||||||
"arc.control.plane.select.cluster.title": "Select from installed existing Kubernetes clusters",
|
"arc.data.controller.kube.cluster.context": "Cluster context",
|
||||||
"arc.control.plane.kube.cluster.context": "Cluster context",
|
"arc.data.controller.cluster.config.profile.title": "Choose the config profile",
|
||||||
"arc.control.plane.container.registry.title": "Container registry details",
|
"arc.data.controller.cluster.config.profile": "Config profile",
|
||||||
"arc.control.plane.container.registry.name": "Container registry login",
|
"arc.data.controller.create.azureconfig.title": "Azure and Connectivity Configuration",
|
||||||
"arc.control.plane.container.registry.password": "Container registry password",
|
"arc.data.controller.connectivitymode.description": "Select the connectivity mode for the controller.",
|
||||||
"arc.control.plane.cluster.config.profile.title": "Choose the config profile",
|
"arc.data.controller.create.controllerconfig.title": "Controller Configuration",
|
||||||
"arc.control.plane.cluster.config.profile": "Config profile",
|
"arc.data.controller.project.details.title": "Azure details",
|
||||||
"arc.control.plane.data.controller.create.title": "Provide details to create Azure Arc data controller and register it with Azure",
|
"arc.data.controller.project.details.description": "Select the subscription to manage deployed resources and costs. Use resource groups like folders to organize and manage all your resources.",
|
||||||
"arc.control.plane.project.details.title": "Project details",
|
"arc.data.controller.details.title": "Data controller details",
|
||||||
"arc.control.plane.project.details.description": "Select the subscription to manage deployed resources and costs. Use resource groups like folders to organize and manage all your resources.",
|
"arc.data.controller.details.description": "Provide a namespace, name and storage class for your Azure Arc data controller. This name will be used to identify your Arc instance for remote management and monitoring.",
|
||||||
"arc.control.plane.data.controller.details.title": "Data controller details",
|
"arc.data.controller.namespace": "Data controller namespace",
|
||||||
"arc.control.plane.data.controller.details.description": "Provide an Azure region and a name for your Azure Arc data controller. This name will be used to identify your Arc location for remote management and monitoring.",
|
"arc.data.controller.namespace.validation.description": "Namespace must consist of lower case alphanumeric characters or '-', start/end with an alphanumeric character, and be 63 characters or fewer in length.",
|
||||||
"arc.control.plane.arc.data.controller.connectivity.mode": "Data controller connectivity mode",
|
"arc.data.controller.name": "Data controller name",
|
||||||
"arc.control.plane.arc.data.controller.namespace": "Data controller namespace",
|
"arc.data.controller.name.validation.description": "Name must consist of lower case alphanumeric characters, '-' or '.', start/end with an alphanumeric character and be 253 characters or less in length.",
|
||||||
"arc.control.plane.arc.data.controller.namespace.validation.description": "Data controller namespace (lower case letters, digits and - only)",
|
"arc.data.controller.location": "Location",
|
||||||
"arc.control.plane.arc.data.controller.name": "Data controller name",
|
"arc.data.controller.admin.account.title": "Administrator account",
|
||||||
"arc.control.plane.arc.data.controller.name.validation.description": "Data controller name (lower case letters, digits and - only)",
|
"arc.data.controller.admin.account.name": "Data controller login",
|
||||||
"arc.control.plane.arc.data.controller.location": "Location",
|
"arc.data.controller.admin.account.password": "Password",
|
||||||
"arc.control.plane.admin.account.title": "Administrator account",
|
"arc.data.controller.admin.account.confirm.password": "Confirm password",
|
||||||
"arc.control.plane.admin.account.name": "Data controller login",
|
"arc.data.controller.connectivitymode": "Connectivity Mode",
|
||||||
"arc.control.plane.admin.account.password": "Password",
|
"arc.data.controller.direct": "Direct",
|
||||||
"arc.control.plane.admin.account.confirm.password": "Confirm password",
|
"arc.data.controller.indirect": "Indirect",
|
||||||
"arc.control.plane.data.controller.create.summary.title": "Review your configuration",
|
"arc.data.controller.serviceprincipal.description": "When deploying a controller in direct connected mode a Service Principal is required for uploading metrics to Azure. {0} about how to create this Service Principal and assign it the correct roles.",
|
||||||
"arc.control.plane.summary.arc.data.controller": "Azure Arc data controller",
|
"arc.data.controller.spclientid": "Service Principal Client ID",
|
||||||
"arc.control.plane.summary.estimated.cost.per.month": "Estimated cost per month",
|
"arc.data.controller.spclientid.description": "The Application (client) ID of the created Service Principal",
|
||||||
"arc.control.plane.summary.arc.by.microsoft" : "by Microsoft",
|
"arc.data.controller.spclientid.validation.description": "The client ID must be a GUID in the format xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx",
|
||||||
"arc.control.plane.summary.free" : "Free",
|
"arc.data.controller.spclientsecret": "Service Principal Client Secret",
|
||||||
"arc.control.plane.summary.arc.terms.of.use" : "Terms of use",
|
"arc.data.controller.spclientsecret.description": "The password generated during creation of the Service Principal",
|
||||||
"arc.control.plane.summary.arc.terms.separator" : "|",
|
"arc.data.controller.sptenantid": "Service Principal Tenant ID",
|
||||||
"arc.control.plane.summary.arc.terms.privacy.policy" : "Privacy policy",
|
"arc.data.controller.sptenantid.description": "The Tenant ID of the Service Principal. This must be the same as the Tenant ID of the subscription selected to create this controller for.",
|
||||||
"arc.control.plane.summary.terms" : "Terms",
|
"arc.data.controller.sptenantid.validation.description": "The tenant ID must be a GUID in the format xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx",
|
||||||
"arc.control.plane.summary.terms.description": "By clicking 'Script to notebook', I (a) agree to the legal terms and privacy statement(s) associated with the Marketplace offering(s) listed above; (b) authorize Microsoft to bill my current payment method for the fees associated with the offering(s), with the same billing frequency as my Azure subscription; and (c) agree that Microsoft may share my contact, usage and transactional information with the provider(s) of the offering(s) for support, billing and other transactional activities. Microsoft does not provide rights for third-party offerings. For additional details see {0}.",
|
"arc.data.controller.create.summary.title": "Review your configuration",
|
||||||
"arc.control.plane.summary.terms.link.text": "Azure Marketplace Terms",
|
"arc.data.controller.summary.arc.data.controller": "Azure Arc data controller",
|
||||||
"arc.control.plane.summary.kubernetes": "Kubernetes",
|
"arc.data.controller.summary.estimated.cost.per.month": "Estimated cost per month",
|
||||||
"arc.control.plane.summary.kube.config.file.path": "Kube config file path",
|
"arc.data.controller.summary.arc.by.microsoft" : "by Microsoft",
|
||||||
"arc.control.plane.summary.cluster.context": "Cluster context",
|
"arc.data.controller.summary.free" : "Free",
|
||||||
"arc.control.plane.summary.profile": "Config profile",
|
"arc.data.controller.summary.arc.terms.of.use" : "Terms of use",
|
||||||
"arc.control.plane.summary.username": "Username",
|
"arc.data.controller.summary.arc.terms.separator" : "|",
|
||||||
"arc.control.plane.summary.docker.username": "Docker username",
|
"arc.data.controller.summary.arc.terms.privacy.policy" : "Privacy policy",
|
||||||
"arc.control.plane.summary.azure": "Azure",
|
"arc.data.controller.summary.terms" : "Terms",
|
||||||
"arc.control.plane.summary.subscription": "Subscription",
|
"arc.data.controller.summary.terms.description": "By clicking 'Script to notebook', I (a) agree to the legal terms and privacy statement(s) associated with the Marketplace offering(s) listed above; (b) authorize Microsoft to bill my current payment method for the fees associated with the offering(s), with the same billing frequency as my Azure subscription; and (c) agree that Microsoft may share my contact, usage and transactional information with the provider(s) of the offering(s) for support, billing and other transactional activities. Microsoft does not provide rights for third-party offerings. For additional details see {0}.",
|
||||||
"arc.control.plane.summary.resource.group": "Resource group",
|
"arc.data.controller.summary.terms.link.text": "Azure Marketplace Terms",
|
||||||
"arc.control.plane.summary.data.controller.connectivity.mode": "Data controller connectivity mode",
|
"arc.data.controller.summary.kubernetes": "Kubernetes",
|
||||||
"arc.control.plane.summary.data.controller.name": "Data controller name",
|
"arc.data.controller.summary.kube.config.file.path": "Kube config file path",
|
||||||
"arc.control.plane.summary.data.controller.namespace": "Data controller namespace",
|
"arc.data.controller.summary.cluster.context": "Cluster context",
|
||||||
"arc.control.plane.summary.location": "Location",
|
"arc.data.controller.summary.profile": "Config profile",
|
||||||
"arc.control.plane.arc.data.controller.agreement": "I accept {0} and {1}.",
|
"arc.data.controller.summary.username": "Username",
|
||||||
|
"arc.data.controller.summary.azure": "Azure",
|
||||||
|
"arc.data.controller.summary.subscription": "Subscription",
|
||||||
|
"arc.data.controller.summary.resource.group": "Resource group",
|
||||||
|
"arc.data.controller.summary.data.controller.name": "Data controller name",
|
||||||
|
"arc.data.controller.summary.data.controller.namespace": "Data controller namespace",
|
||||||
|
"arc.data.controller.summary.controller": "Controller",
|
||||||
|
"arc.data.controller.summary.location": "Location",
|
||||||
|
"arc.data.controller.agreement": "I accept {0} and {1}.",
|
||||||
|
"arc.data.controller.readmore": "Read more",
|
||||||
"microsoft.agreement.privacy.statement":"Microsoft Privacy Statement",
|
"microsoft.agreement.privacy.statement":"Microsoft Privacy Statement",
|
||||||
"arc.agreement.azdata.eula":"azdata license terms",
|
"deploy.script.action":"Script to notebook",
|
||||||
"deploy.arc.control.plane.action":"Script to notebook",
|
"deploy.done.action":"Deploy",
|
||||||
|
|
||||||
|
|
||||||
"resource.type.arc.sql.display.name": "Azure SQL managed instance - Azure Arc (preview)",
|
"resource.type.arc.sql.display.name": "Azure SQL managed instance - Azure Arc (preview)",
|
||||||
"resource.type.arc.postgres.display.name": "PostgreSQL server groups - Azure Arc (preview)",
|
"resource.type.arc.postgres.display.name": "PostgreSQL Hyperscale server groups - Azure Arc (preview)",
|
||||||
"resource.type.arc.sql.description": "Managed SQL Instance service for app developers in a customer-managed environment",
|
"resource.type.arc.sql.description": "Managed SQL Instance service for app developers in a customer-managed environment",
|
||||||
"resource.type.arc.postgres.description": "Deploy PostgreSQL server groups into an Azure Arc environment",
|
"resource.type.arc.postgres.description": "Deploy PostgreSQL Hyperscale server groups into an Azure Arc environment",
|
||||||
"resource.type.picker.display.name": "Resource Type",
|
"arc.controller": "Target Azure Arc Controller",
|
||||||
"sql.managed.instance.display.name": "Azure SQL managed instance - Azure Arc",
|
|
||||||
"postgres.server.group.display.name": "PostgreSQL server groups - Azure Arc",
|
|
||||||
"arc.sql.new.dialog.title": "Deploy Azure SQL managed instance - Azure Arc (preview)",
|
"arc.sql.wizard.title": "Deploy Azure SQL managed instance - Azure Arc (preview)",
|
||||||
"arc.sql.settings.section.title": "SQL Connection information",
|
"arc.sql.wizard.page1.title": "Provide Azure SQL managed instance parameters",
|
||||||
|
"arc.sql.connection.settings.section.title": "SQL Connection information",
|
||||||
|
"arc.sql.instance.settings.section.title": "SQL Instance settings",
|
||||||
"arc.azure.section.title": "Azure information",
|
"arc.azure.section.title": "Azure information",
|
||||||
"arc.sql.instance.name": "Instance name (lower case letters and digits only)",
|
"arc.sql.instance.name": "Instance name",
|
||||||
"arc.sql.username": "Username",
|
"arc.sql.username": "Username",
|
||||||
"arc.sql.password": "Password",
|
"arc.sql.invalid.username": "sa username is disabled, please choose another username",
|
||||||
|
"arc.sql.invalid.instance.name": "Instance name must consist of lower case alphanumeric characters or '-', start with a letter, end with an alphanumeric character, and be 13 characters or fewer in length.",
|
||||||
|
"arc.storage-class.dc.label": "Storage Class",
|
||||||
|
"arc.sql.storage-class.dc.description": "The storage class to be used for all data and logs persistent volumes for all data controller pods that require them.",
|
||||||
|
"arc.storage-class.data.label": "Storage Class (Data)",
|
||||||
|
"arc.sql.storage-class.data.description": "The storage class to be used for data (.mdf)",
|
||||||
|
"arc.postgres.storage-class.data.description": "The storage class to be used for data persistent volumes",
|
||||||
|
"arc.storage-class.logs.label": "Storage Class (Logs)",
|
||||||
|
"arc.sql.storage-class.logs.description": "The storage class to be used for logs (/var/log)",
|
||||||
|
"arc.postgres.storage-class.logs.description": "The storage class to be used for logs persistent volumes",
|
||||||
|
"arc.storage-class.backups.label": "Storage Class (Backups)",
|
||||||
|
"arc.cores-limit.label": "Cores Limit",
|
||||||
|
"arc.sql.cores-limit.description": "The cores limit of the managed instance as an integer.",
|
||||||
|
"arc.cores-request.label": "Cores Request",
|
||||||
|
"arc.sql.cores-request.description": "The request for cores of the managed instance as an integer.",
|
||||||
|
"arc.memory-limit.label": "Memory Limit",
|
||||||
|
"arc.sql.memory-limit.description": "The limit of the capacity of the managed instance as an integer.",
|
||||||
|
"arc.memory-request.label": "Memory Request",
|
||||||
|
"arc.sql.memory-request.description": "The request for the capacity of the managed instance as an integer amount of memory in GBs.",
|
||||||
|
"arc.postgres.storage-class.backups.description": "The storage class to be used for backup persistent volumes",
|
||||||
|
"arc.password": "Password",
|
||||||
"arc.confirm.password": "Confirm password",
|
"arc.confirm.password": "Confirm password",
|
||||||
"arc.azure.account": "Azure account",
|
"arc.azure.account": "Azure account",
|
||||||
"arc.azure.subscription": "Azure subscription",
|
"arc.azure.subscription": "Azure subscription",
|
||||||
"arc.azure.resource.group": "Azure resource group",
|
"arc.azure.resource.group": "Azure resource group",
|
||||||
"arc.azure.location": "Azure location",
|
"arc.azure.location": "Azure location",
|
||||||
"arc.postgres.new.dialog.title": "Deploy a PostgreSQL server group on Azure Arc (preview)",
|
"arc.postgres.wizard.title": "Deploy an Azure Arc enabled PostgreSQL Hyperscale server group (Preview)",
|
||||||
"arc.postgres.settings.section.title": "PostgreSQL server group settings",
|
"arc.postgres.wizard.page1.title": "Provide Azure enabled PostgreSQL Hyperscale server group parameters",
|
||||||
"arc.postgres.settings.backups.title": "PostgreSQL server group backup settings",
|
"arc.postgres.settings.section.title": "General settings",
|
||||||
"arc.postgres.settings.scheduling.title": "PostgreSQL server group scheduling settings",
|
"arc.postgres.settings.resource.title": "Resource settings",
|
||||||
|
"arc.postgres.settings.storage.title": "Storage settings",
|
||||||
"arc.postgres.server.group.name": "Server group name",
|
"arc.postgres.server.group.name": "Server group name",
|
||||||
"arc.postgres.server.group.name.validation.description": "Server group name must consist of lower case alphanumeric characters or '-', start with a letter, end with an alphanumeric character, and be 10 characters or fewer in length.",
|
"arc.postgres.server.group.name.validation.description": "Server group name must consist of lower case alphanumeric characters or '-', start with a letter, end with an alphanumeric character, and be 12 characters or fewer in length.",
|
||||||
"arc.postgres.server.group.namespace": "Kubernetes namespace",
|
"arc.postgres.server.group.workers.label": "Number of workers",
|
||||||
"arc.postgres.server.group.workers": "Number of workers",
|
"arc.postgres.server.group.workers.description": "The number of worker nodes to provision in a sharded cluster, or zero (the default) for single-node Postgres.",
|
||||||
"arc.postgres.server.group.service.type": "Kubernetes service type",
|
|
||||||
"arc.postgres.server.group.data.size": "Data volume size (MB)",
|
|
||||||
"arc.postgres.server.group.data.size.description": "The number of megabytes (per node) that will be requested for the PostgreSQL server group's data volumes.",
|
|
||||||
"arc.postgres.server.group.data.class": "Data volume storage class",
|
|
||||||
"arc.postgres.server.group.data.class.description": "The Kubernetes storage class to use for the PostgreSQL server group's data volumes, or empty to use the default storage class.",
|
|
||||||
"arc.postgres.server.group.port": "Port",
|
"arc.postgres.server.group.port": "Port",
|
||||||
"arc.postgres.server.group.extensions": "PostgreSQL extensions",
|
"arc.postgres.server.group.engine.version": "Engine Version",
|
||||||
"arc.postgres.server.group.extensions.description": "A comma-separated list of the PostgreSQL extensions that should be added. Supported values: pg_cron, postgis, postgis_raster, postgis_topology.",
|
"arc.postgres.server.group.extensions.label": "Extensions",
|
||||||
"arc.postgres.server.group.extensions.validation.description": "Supported PostgreSQL extensions: pg_cron, postgis, postgis_raster, postgis_topology.",
|
"arc.postgres.server.group.extensions.description": "A comma-separated list of the Postgres extensions that should be loaded on startup. Please refer to the postgres documentation for supported values.",
|
||||||
"arc.postgres.server.group.cpu.min": "Min CPU cores (per node) to reserve",
|
"arc.postgres.server.group.volume.size.data.label": "Volume Size GB (Data)",
|
||||||
"arc.postgres.server.group.cpu.max": "Max CPU cores (per node) to allow",
|
"arc.postgres.server.group.volume.size.data.description": "The size of the storage volume to be used for data in GB.",
|
||||||
"arc.postgres.server.group.memory.min": "Min memory MB (per node) to reserve",
|
"arc.postgres.server.group.volume.size.logs.label": "Volume Size GB (Logs)",
|
||||||
"arc.postgres.server.group.memory.max": "Max memory MB (per node) to allow",
|
"arc.postgres.server.group.volume.size.logs.description": "The size of the storage volume to be used for logs in GB.",
|
||||||
"arc.postgres.server.group.backup.classes": "Backup volume storage classes",
|
"arc.postgres.server.group.volume.size.backups.label": "Volume Size GB (Backups)",
|
||||||
"arc.postgres.server.group.backup.classes.description": "A comma-separated list of existing Kubernetes storage classes to use for the PostgreSQL server group's backup volumes, one per backup tier. If provided, backup volume sizes must also be provided.",
|
"arc.postgres.server.group.volume.size.backups.description": "The size of the storage volume to be used for backups in GB.",
|
||||||
"arc.postgres.server.group.backup.sizes": "Backup volume sizes (MB)",
|
"arc.postgres.server.group.cores.request.label": "CPU request (cores per node)",
|
||||||
"arc.postgres.server.group.backup.sizes.description": "A comma-separated list of the number of megabytes (per node) that will be requested for the PostgreSQL server group's backup volumes, one per backup tier. If specified, backups will be enabled. In this configuration a separate backup volume is used for each node.",
|
"arc.postgres.server.group.cores.request.description": "The minimum number of CPU cores that must be available per node to schedule the service. Fractional cores are supported.",
|
||||||
"arc.postgres.server.group.backup.claims": "Backup volume claims",
|
"arc.postgres.server.group.cores.limit.label": "CPU limit (cores per node)",
|
||||||
"arc.postgres.server.group.backup.claims.description": "A comma-separated list of existing Kubernetes persistent volume claims (in the same namespace) to use for the PostgreSQL server group's backups, one per backup tier. If specified, backups will be enabled. In this configuration the backup volumes are shared across all nodes.",
|
"arc.postgres.server.group.cores.limit.description": "The maximum number of CPU cores for the Postgres instance that can be used per node. Fractional cores are supported.",
|
||||||
"arc.postgres.server.group.backup.full.interval": "Minutes between full backups",
|
"arc.postgres.server.group.memory.request.label": "Memory request (GB per node)",
|
||||||
"arc.postgres.server.group.backup.delta.interval": "Minutes between delta backups",
|
"arc.postgres.server.group.memory.request.description": "The memory request of the Postgres instance per node in GB.",
|
||||||
"arc.postgres.server.group.backup.retention.min": "Minimum trim settings",
|
"arc.postgres.server.group.memory.limit.label": "Memory limit (GB per node)",
|
||||||
"arc.postgres.server.group.backup.retention.min.description": "A list of trim settings that specifies the minimum number of days/size/counts of backups to preserve per tier. Each trim setting contains 1 or more trim values separated by commas and each tier is separated by a semicolon. Possible trim values include '7d', '10GB', or '50'.",
|
"arc.postgres.server.group.memory.limit.description": "The memory limit of the Postgres instance per node in GB.",
|
||||||
"arc.postgres.server.group.backup.retention.min.validation.description": "Minimum trim settings must contain 1 or more trim values separated by commas with each tier separated by a semicolon.",
|
"arc.agreement": "I accept {0} and {1}.",
|
||||||
"arc.postgres.server.group.backup.retention.max": "Maximum trim settings",
|
"arc.agreement.sql.terms.conditions": "Azure SQL managed instance - Azure Arc terms and conditions",
|
||||||
"arc.postgres.server.group.backup.retention.max.description": "A list of trim settings that specifies the maximum number of days/size/counts of backups to preserve per tier. Each trim setting contains 1 or more trim values separated by commas and each tier is separated by a semicolon. Possible trim values include '7d', '10GB', or '50'.",
|
"arc.agreement.postgres.terms.conditions": "Azure Arc enabled PostgreSQL Hyperscale terms and conditions",
|
||||||
"arc.postgres.server.group.backup.retention.max.validation.description": "Maximum trim settings must contain 1 or more trim values separated by commas with each tier separated by a semicolon.",
|
"should.be.integer": "Value must be an integer",
|
||||||
"arc.agreement": "I accept {0}, {1} and {2}.",
|
"requested.cores.less.than.or.equal.to.cores.limit": "Requested cores must be less than or equal to cores limit",
|
||||||
"arc.agreement.sql.terms.conditions":"Azure SQL managed instance - Azure Arc terms and conditions",
|
"cores.limit.greater.than.or.equal.to.requested.cores": "Cores limit must be greater than or equal to requested cores",
|
||||||
"arc.agreement.postgres.terms.conditions":"PostgreSQL server groups - Azure Arc terms and conditions",
|
"requested.memory.less.than.or.equal.to.memory.limit": "Requested memory must be less than or equal to memory limit",
|
||||||
"arc.deploy.action":"Deploy"
|
"memory.limit.greater.than.or.equal.to.requested.memory": "Memory limit must be greater than or equal to requested memory"
|
||||||
}
|
}
|
||||||
|
|||||||
41
extensions/arc/src/common/api.ts
Normal file
41
extensions/arc/src/common/api.ts
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
/*---------------------------------------------------------------------------------------------
|
||||||
|
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||||
|
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||||
|
*--------------------------------------------------------------------------------------------*/
|
||||||
|
|
||||||
|
import * as arc from 'arc';
|
||||||
|
import { PasswordToControllerDialog } from '../ui/dialogs/connectControllerDialog';
|
||||||
|
import { AzureArcTreeDataProvider } from '../ui/tree/azureArcTreeDataProvider';
|
||||||
|
import { ControllerTreeNode } from '../ui/tree/controllerTreeNode';
|
||||||
|
import { UserCancelledError } from './utils';
|
||||||
|
|
||||||
|
export function arcApi(treeDataProvider: AzureArcTreeDataProvider): arc.IExtension {
|
||||||
|
return {
|
||||||
|
getRegisteredDataControllers: () => getRegisteredDataControllers(treeDataProvider),
|
||||||
|
getControllerPassword: (controllerInfo: arc.ControllerInfo) => getControllerPassword(treeDataProvider, controllerInfo),
|
||||||
|
reacquireControllerPassword: (controllerInfo: arc.ControllerInfo) => reacquireControllerPassword(treeDataProvider, controllerInfo)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
export async function reacquireControllerPassword(treeDataProvider: AzureArcTreeDataProvider, controllerInfo: arc.ControllerInfo): Promise<string> {
|
||||||
|
const dialog = new PasswordToControllerDialog(treeDataProvider);
|
||||||
|
dialog.showDialog(controllerInfo);
|
||||||
|
const model = await dialog.waitForClose();
|
||||||
|
if (!model) {
|
||||||
|
throw new UserCancelledError();
|
||||||
|
}
|
||||||
|
return model.password;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getControllerPassword(treeDataProvider: AzureArcTreeDataProvider, controllerInfo: arc.ControllerInfo): Promise<string> {
|
||||||
|
return await treeDataProvider.getPassword(controllerInfo);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getRegisteredDataControllers(treeDataProvider: AzureArcTreeDataProvider): Promise<arc.DataController[]> {
|
||||||
|
return (await treeDataProvider.getChildren())
|
||||||
|
.filter(node => node instanceof ControllerTreeNode)
|
||||||
|
.map(node => ({
|
||||||
|
label: (node as ControllerTreeNode).model.label,
|
||||||
|
info: (node as ControllerTreeNode).model.info
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
83
extensions/arc/src/common/cacheManager.ts
Normal file
83
extensions/arc/src/common/cacheManager.ts
Normal file
@@ -0,0 +1,83 @@
|
|||||||
|
/*---------------------------------------------------------------------------------------------
|
||||||
|
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||||
|
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||||
|
*--------------------------------------------------------------------------------------------*/
|
||||||
|
|
||||||
|
import { Deferred } from './promise';
|
||||||
|
|
||||||
|
const enum Status {
|
||||||
|
notStarted,
|
||||||
|
inProgress,
|
||||||
|
done
|
||||||
|
}
|
||||||
|
|
||||||
|
interface State<T> {
|
||||||
|
entry?: T,
|
||||||
|
error?: Error,
|
||||||
|
status: Status,
|
||||||
|
id: number,
|
||||||
|
pendingOperation: Deferred<void>
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* An implementation of Cache Manager which ensures that only one call to populate cache miss is pending at a given time.
|
||||||
|
* All remaining calls for retrieval are awaited until the one in progress finishes and then all awaited calls are resolved with the value
|
||||||
|
* from the cache.
|
||||||
|
*/
|
||||||
|
export class CacheManager<K, T> {
|
||||||
|
private _cache = new Map<K, State<T>>();
|
||||||
|
private _id = 0;
|
||||||
|
|
||||||
|
public async getCacheEntry(key: K, retrieveEntry: (key: K) => Promise<T>): Promise<T> {
|
||||||
|
const cacheHit: State<T> | undefined = this._cache.get(key);
|
||||||
|
// each branch either throws or returns the password.
|
||||||
|
if (cacheHit === undefined) {
|
||||||
|
// populate a new state entry and add it to the cache
|
||||||
|
const state: State<T> = {
|
||||||
|
status: Status.notStarted,
|
||||||
|
id: this._id++,
|
||||||
|
pendingOperation: new Deferred<void>()
|
||||||
|
};
|
||||||
|
this._cache.set(key, state);
|
||||||
|
// now that we have the state entry initialized, retry to fetch the cacheEntry
|
||||||
|
let returnValue: T = await this.getCacheEntry(key, retrieveEntry);
|
||||||
|
await state.pendingOperation;
|
||||||
|
return returnValue!;
|
||||||
|
} else {
|
||||||
|
switch (cacheHit.status) {
|
||||||
|
case Status.notStarted: {
|
||||||
|
cacheHit.status = Status.inProgress;
|
||||||
|
// retrieve and populate the missed cache hit.
|
||||||
|
try {
|
||||||
|
cacheHit.entry = await retrieveEntry(key);
|
||||||
|
} catch (error) {
|
||||||
|
cacheHit.error = error;
|
||||||
|
} finally {
|
||||||
|
cacheHit.status = Status.done;
|
||||||
|
// we do not reject here even in error case because we do not want our awaits on pendingOperation to throw
|
||||||
|
// We track our own error state and when all done we throw if an error had happened. This results
|
||||||
|
// in the rejection of the promised returned by this method.
|
||||||
|
cacheHit.pendingOperation.resolve();
|
||||||
|
}
|
||||||
|
return await this.getCacheEntry(key, retrieveEntry);
|
||||||
|
}
|
||||||
|
|
||||||
|
case Status.inProgress: {
|
||||||
|
await cacheHit.pendingOperation;
|
||||||
|
return await this.getCacheEntry(key, retrieveEntry);
|
||||||
|
}
|
||||||
|
|
||||||
|
case Status.done: {
|
||||||
|
if (cacheHit.error !== undefined) {
|
||||||
|
await cacheHit.pendingOperation;
|
||||||
|
throw cacheHit.error;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
await cacheHit.pendingOperation;
|
||||||
|
return cacheHit.entry!;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
39
extensions/arc/src/common/kubeUtils.ts
Normal file
39
extensions/arc/src/common/kubeUtils.ts
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
/*---------------------------------------------------------------------------------------------
|
||||||
|
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||||
|
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||||
|
*--------------------------------------------------------------------------------------------*/
|
||||||
|
|
||||||
|
import * as os from 'os';
|
||||||
|
import * as path from 'path';
|
||||||
|
import * as yamljs from 'yamljs';
|
||||||
|
import * as loc from '../localizedConstants';
|
||||||
|
import { throwUnless } from './utils';
|
||||||
|
export interface KubeClusterContext {
|
||||||
|
name: string;
|
||||||
|
isCurrentContext: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getKubeConfigClusterContexts(configFile: string): Promise<KubeClusterContext[]> {
|
||||||
|
const config: any = yamljs.load(configFile);
|
||||||
|
const rawContexts = <any[]>config['contexts'];
|
||||||
|
throwUnless(rawContexts && rawContexts.length, loc.noContextFound(configFile));
|
||||||
|
const currentContext = <string>config['current-context'];
|
||||||
|
throwUnless(currentContext, loc.noCurrentContextFound(configFile));
|
||||||
|
const contexts: KubeClusterContext[] = [];
|
||||||
|
rawContexts.forEach(rawContext => {
|
||||||
|
const name = <string>rawContext['name'];
|
||||||
|
throwUnless(name, loc.noNameInContext(configFile));
|
||||||
|
if (name) {
|
||||||
|
contexts.push({
|
||||||
|
name: name,
|
||||||
|
isCurrentContext: name === currentContext
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return Promise.resolve(contexts);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getDefaultKubeConfigPath(): string {
|
||||||
|
return path.join(os.homedir(), '.kube', 'config');
|
||||||
|
}
|
||||||
|
|
||||||
@@ -3,10 +3,11 @@
|
|||||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||||
*--------------------------------------------------------------------------------------------*/
|
*--------------------------------------------------------------------------------------------*/
|
||||||
|
|
||||||
|
import { ResourceType } from 'arc';
|
||||||
|
import * as azurecore from 'azurecore';
|
||||||
import * as vscode from 'vscode';
|
import * as vscode from 'vscode';
|
||||||
import * as azurecore from '../../../azurecore/src/azurecore';
|
import { ConnectionMode, IconPath, IconPathHelper } from '../constants';
|
||||||
import * as loc from '../localizedConstants';
|
import * as loc from '../localizedConstants';
|
||||||
import { IconPathHelper, IconPath, ResourceType, ConnectionMode } from '../constants';
|
|
||||||
|
|
||||||
export class UserCancelledError extends Error { }
|
export class UserCancelledError extends Error { }
|
||||||
|
|
||||||
@@ -66,7 +67,7 @@ export function getResourceTypeIcon(resourceType: string | undefined): IconPath
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns the text to display for known connection modes
|
* Returns the text to display for known connection modes
|
||||||
* @param connectionMode The string repsenting the connection mode
|
* @param connectionMode The string representing the connection mode
|
||||||
*/
|
*/
|
||||||
export function getConnectionModeDisplayText(connectionMode: string | undefined): string {
|
export function getConnectionModeDisplayText(connectionMode: string | undefined): string {
|
||||||
connectionMode = connectionMode ?? '';
|
connectionMode = connectionMode ?? '';
|
||||||
@@ -147,16 +148,15 @@ async function promptInputBox(title: string, options: vscode.InputBoxOptions): P
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Opens an input box prompting the user to enter in the name of a resource to delete
|
* Opens an input box prompting the user to enter in the name of an instance to delete
|
||||||
* @param namespace The namespace of the resource to delete
|
* @param name The name of the instance to delete
|
||||||
* @param name The name of the resource to delete
|
|
||||||
* @returns Promise resolving to true if the user confirmed the name, false if the input box was closed for any other reason
|
* @returns Promise resolving to true if the user confirmed the name, false if the input box was closed for any other reason
|
||||||
*/
|
*/
|
||||||
export async function promptForResourceDeletion(namespace: string, name: string): Promise<boolean> {
|
export async function promptForInstanceDeletion(name: string): Promise<boolean> {
|
||||||
const title = loc.resourceDeletionWarning(namespace, name);
|
const title = loc.instanceDeletionWarning(name);
|
||||||
const options: vscode.InputBoxOptions = {
|
const options: vscode.InputBoxOptions = {
|
||||||
placeHolder: name,
|
placeHolder: name,
|
||||||
validateInput: input => input !== name ? loc.invalidResourceDeletionName(name) : ''
|
validateInput: input => input !== name ? loc.invalidInstanceDeletionName(name) : ''
|
||||||
};
|
};
|
||||||
|
|
||||||
return await promptInputBox(title, options) !== undefined;
|
return await promptInputBox(title, options) !== undefined;
|
||||||
@@ -189,36 +189,111 @@ export async function promptAndConfirmPassword(validate: (input: string) => stri
|
|||||||
/**
|
/**
|
||||||
* Gets the message to display for a given error object that may be a variety of types.
|
* Gets the message to display for a given error object that may be a variety of types.
|
||||||
* @param error The error object
|
* @param error The error object
|
||||||
|
* @param useMessageWithLink Whether to use the messageWithLink - if available
|
||||||
*/
|
*/
|
||||||
export function getErrorMessage(error: any): string {
|
export function getErrorMessage(error: any, useMessageWithLink: boolean = false): string {
|
||||||
if (error.body?.reason) {
|
if (useMessageWithLink && error.messageWithLink) {
|
||||||
// For HTTP Errors with a body pull out the reason message since that's usually the most helpful
|
return error.messageWithLink;
|
||||||
return error.body.reason;
|
|
||||||
} else if (error.message) {
|
|
||||||
if (error.response?.statusMessage) {
|
|
||||||
// Some Http errors just have a status message as additional detail, but it's not enough on its
|
|
||||||
// own to be useful so append to the message as well
|
|
||||||
return `${error.message} (${error.response.statusMessage})`;
|
|
||||||
}
|
|
||||||
return error.message;
|
|
||||||
} else {
|
|
||||||
return error;
|
|
||||||
}
|
}
|
||||||
|
return error.message ?? error;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Parses an instance name from the controller. An instance name will either be just its name
|
* Parses an address into its separate ip and port values. Address must be in the form <ip>:<port>
|
||||||
* e.g. myinstance or namespace_name e.g. mynamespace_my-instance.
|
* @param address The address to parse
|
||||||
* @param instanceName The instance name in one of the formats described
|
|
||||||
*/
|
*/
|
||||||
export function parseInstanceName(instanceName: string | undefined): string {
|
export function parseIpAndPort(address: string): { ip: string, port: string } {
|
||||||
instanceName = instanceName ?? '';
|
const sections = address.split(':');
|
||||||
const parts: string[] = instanceName.split('_');
|
if (sections.length !== 2) {
|
||||||
if (parts.length === 2) {
|
throw new Error(`Invalid address format for ${address}. Address must be in the form <ip>:<port>`);
|
||||||
instanceName = parts[1];
|
|
||||||
}
|
}
|
||||||
else if (parts.length > 2) {
|
return {
|
||||||
throw new Error(`Cannot parse resource '${instanceName}'. Acceptable formats are 'namespace_name' or 'name'.`);
|
ip: sections[0],
|
||||||
}
|
port: sections[1]
|
||||||
return instanceName;
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export function createCredentialId(controllerId: string, resourceType: string, instanceName: string): string {
|
||||||
|
return `${controllerId}::${resourceType}::${instanceName}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculates the gibibyte (GiB) conversion of a quantity that could currently be represented by a range
|
||||||
|
* of SI suffixes (E, P, T, G, M, K, m) or their power-of-two equivalents (Ei, Pi, Ti, Gi, Mi, Ki)
|
||||||
|
* @param value The string of a quantity to be converted
|
||||||
|
* @returns String of GiB conversion
|
||||||
|
*/
|
||||||
|
export function convertToGibibyteString(value: string): string {
|
||||||
|
if (!value) {
|
||||||
|
throw new Error(`Value provided is not a valid Kubernetes resource quantity`);
|
||||||
|
}
|
||||||
|
|
||||||
|
let base10ToBase2Multiplier;
|
||||||
|
let floatValue = parseFloat(value);
|
||||||
|
let splitValue = value.split(String(floatValue));
|
||||||
|
let unit = splitValue[1];
|
||||||
|
|
||||||
|
if (unit === 'K') {
|
||||||
|
base10ToBase2Multiplier = 1000 / 1024;
|
||||||
|
floatValue = (floatValue * base10ToBase2Multiplier) / Math.pow(1024, 2);
|
||||||
|
} else if (unit === 'M') {
|
||||||
|
base10ToBase2Multiplier = Math.pow(1000, 2) / Math.pow(1024, 2);
|
||||||
|
floatValue = (floatValue * base10ToBase2Multiplier) / 1024;
|
||||||
|
} else if (unit === 'G') {
|
||||||
|
base10ToBase2Multiplier = Math.pow(1000, 3) / Math.pow(1024, 3);
|
||||||
|
floatValue = floatValue * base10ToBase2Multiplier;
|
||||||
|
} else if (unit === 'T') {
|
||||||
|
base10ToBase2Multiplier = Math.pow(1000, 4) / Math.pow(1024, 4);
|
||||||
|
floatValue = (floatValue * base10ToBase2Multiplier) * 1024;
|
||||||
|
} else if (unit === 'P') {
|
||||||
|
base10ToBase2Multiplier = Math.pow(1000, 5) / Math.pow(1024, 5);
|
||||||
|
floatValue = (floatValue * base10ToBase2Multiplier) * Math.pow(1024, 2);
|
||||||
|
} else if (unit === 'E') {
|
||||||
|
base10ToBase2Multiplier = Math.pow(1000, 6) / Math.pow(1024, 6);
|
||||||
|
floatValue = (floatValue * base10ToBase2Multiplier) * Math.pow(1024, 3);
|
||||||
|
} else if (unit === 'm') {
|
||||||
|
floatValue = (floatValue / 1000) / Math.pow(1024, 3);
|
||||||
|
} else if (unit === '') {
|
||||||
|
floatValue = floatValue / Math.pow(1024, 3);
|
||||||
|
} else if (unit === 'Ki') {
|
||||||
|
floatValue = floatValue / Math.pow(1024, 2);
|
||||||
|
} else if (unit === 'Mi') {
|
||||||
|
floatValue = floatValue / 1024;
|
||||||
|
} else if (unit === 'Gi') {
|
||||||
|
floatValue = floatValue;
|
||||||
|
} else if (unit === 'Ti') {
|
||||||
|
floatValue = floatValue * 1024;
|
||||||
|
} else if (unit === 'Pi') {
|
||||||
|
floatValue = floatValue * Math.pow(1024, 2);
|
||||||
|
} else if (unit === 'Ei') {
|
||||||
|
floatValue = floatValue * Math.pow(1024, 3);
|
||||||
|
} else {
|
||||||
|
throw new Error(`${value} is not a valid Kubernetes resource quantity`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return String(floatValue);
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Throws an Error with given {@link message} unless {@link condition} is true.
|
||||||
|
* This also tells the typescript compiler that the condition is 'truthy' in the remainder of the scope
|
||||||
|
* where this function was called.
|
||||||
|
*
|
||||||
|
* @param condition
|
||||||
|
* @param message
|
||||||
|
*/
|
||||||
|
export function throwUnless(condition: any, message?: string): asserts condition {
|
||||||
|
if (!condition) {
|
||||||
|
throw new Error(message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function tryExecuteAction<T>(action: () => T | PromiseLike<T>): Promise<{ result: T | undefined, error: any }> {
|
||||||
|
let error: any, result: T | undefined;
|
||||||
|
try {
|
||||||
|
result = await action();
|
||||||
|
} catch (e) {
|
||||||
|
error = e;
|
||||||
|
}
|
||||||
|
return { result, error };
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -7,6 +7,11 @@ import * as vscode from 'vscode';
|
|||||||
|
|
||||||
export const refreshActionId = 'arc.refresh';
|
export const refreshActionId = 'arc.refresh';
|
||||||
|
|
||||||
|
export const credentialNamespace = 'arcCredentials';
|
||||||
|
|
||||||
|
export const controllerTroubleshootDocsUrl = 'https://aka.ms/arc-data-tsg';
|
||||||
|
export const miaaTroubleshootDocsUrl = 'https://aka.ms/miaa-tsg';
|
||||||
|
|
||||||
export interface IconPath {
|
export interface IconPath {
|
||||||
dark: string;
|
dark: string;
|
||||||
light: string;
|
light: string;
|
||||||
@@ -35,7 +40,10 @@ export class IconPathHelper {
|
|||||||
public static controller: IconPath;
|
public static controller: IconPath;
|
||||||
public static health: IconPath;
|
public static health: IconPath;
|
||||||
public static success: IconPath;
|
public static success: IconPath;
|
||||||
|
public static save: IconPath;
|
||||||
|
public static discard: IconPath;
|
||||||
public static fail: IconPath;
|
public static fail: IconPath;
|
||||||
|
public static information: IconPath;
|
||||||
|
|
||||||
public static setExtensionContext(context: vscode.ExtensionContext) {
|
public static setExtensionContext(context: vscode.ExtensionContext) {
|
||||||
IconPathHelper.context = context;
|
IconPathHelper.context = context;
|
||||||
@@ -111,18 +119,25 @@ export class IconPathHelper {
|
|||||||
light: context.asAbsolutePath('images/success.svg'),
|
light: context.asAbsolutePath('images/success.svg'),
|
||||||
dark: context.asAbsolutePath('images/success.svg'),
|
dark: context.asAbsolutePath('images/success.svg'),
|
||||||
};
|
};
|
||||||
|
IconPathHelper.save = {
|
||||||
|
light: context.asAbsolutePath('images/save.svg'),
|
||||||
|
dark: context.asAbsolutePath('images/save.svg'),
|
||||||
|
};
|
||||||
|
IconPathHelper.discard = {
|
||||||
|
light: context.asAbsolutePath('images/discard.svg'),
|
||||||
|
dark: context.asAbsolutePath('images/discard.svg'),
|
||||||
|
};
|
||||||
IconPathHelper.fail = {
|
IconPathHelper.fail = {
|
||||||
light: context.asAbsolutePath('images/fail.svg'),
|
light: context.asAbsolutePath('images/fail.svg'),
|
||||||
dark: context.asAbsolutePath('images/fail.svg'),
|
dark: context.asAbsolutePath('images/fail.svg'),
|
||||||
};
|
};
|
||||||
|
IconPathHelper.information = {
|
||||||
|
light: context.asAbsolutePath('images/information.svg'),
|
||||||
|
dark: context.asAbsolutePath('images/information.svg'),
|
||||||
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export const enum ResourceType {
|
|
||||||
dataControllers = 'dataControllers',
|
|
||||||
postgresInstances = 'postgresInstances',
|
|
||||||
sqlManagedInstances = 'sqlManagedInstances'
|
|
||||||
}
|
|
||||||
|
|
||||||
export const enum Endpoints {
|
export const enum Endpoints {
|
||||||
mgmtproxy = 'mgmtproxy',
|
mgmtproxy = 'mgmtproxy',
|
||||||
|
|||||||
@@ -1,20 +0,0 @@
|
|||||||
# Updating the Swagger generated clients
|
|
||||||
|
|
||||||
The TypeScript clients used to communicate with the controller are generated from the controller's Swagger specification. To update the clients:
|
|
||||||
|
|
||||||
1. Get the Swagger specification from a running controller, and save it locally:
|
|
||||||
* `https://<controller_ip>:30080/api/<api_name>/swagger.json`
|
|
||||||
|
|
||||||
2. Generate the clients:
|
|
||||||
* At the time of writing, [editor.swagger.io](https://editor.swagger.io) does not support typescript-node client generation from OpenAPI 3.x specifications. So we'll use [openapi-generator.tech](https://openapi-generator.tech) instead.
|
|
||||||
|
|
||||||
* Run openapi-generator:
|
|
||||||
* Either by [installing it](https://openapi-generator.tech/docs/installation) (requires Java) and running:
|
|
||||||
* `openapi-generator generate -i swagger.json -g typescript-node -o out --additional-properties supportsES6=true`
|
|
||||||
|
|
||||||
* Or by running the Docker image (works in Linux or PowerShell):
|
|
||||||
* `docker run --rm -v ${PWD}:/local openapitools/openapi-generator-cli generate -i /local/swagger.json -g typescript-node -o /local/out --additional-properties supportsES6=true`
|
|
||||||
|
|
||||||
3. Copy the generated clients (api.ts, api/, model/) to ./generated/<api_name>.
|
|
||||||
|
|
||||||
4. The generated clients have some unused imports. This will not compile. VS Code has an "Organize Imports" command (Shift + Alt + O) that fixes this, but it fixes a single file. To organize imports for all files in a folder, you can use the [Folder Source Actions extension](https://marketplace.visualstudio.com/items?itemName=bierner.folder-source-actions). Followed by File -> Save All.
|
|
||||||
@@ -1,48 +0,0 @@
|
|||||||
/*---------------------------------------------------------------------------------------------
|
|
||||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
|
||||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
|
||||||
*--------------------------------------------------------------------------------------------*/
|
|
||||||
|
|
||||||
import * as request from 'request';
|
|
||||||
import * as vscode from 'vscode';
|
|
||||||
|
|
||||||
export interface Authentication {
|
|
||||||
applyToRequest(requestOptions: request.Options): Promise<void> | void;
|
|
||||||
}
|
|
||||||
|
|
||||||
class SslAuth implements Authentication {
|
|
||||||
constructor() { }
|
|
||||||
|
|
||||||
applyToRequest(requestOptions: request.Options): void {
|
|
||||||
requestOptions['agentOptions'] = {
|
|
||||||
rejectUnauthorized: !getIgnoreSslVerificationConfigSetting()
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export class BasicAuth extends SslAuth implements Authentication {
|
|
||||||
constructor(public username: string, public password: string) {
|
|
||||||
super();
|
|
||||||
}
|
|
||||||
|
|
||||||
applyToRequest(requestOptions: request.Options): void {
|
|
||||||
super.applyToRequest(requestOptions);
|
|
||||||
requestOptions.auth = {
|
|
||||||
username: this.username, password: this.password
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Retrieves the current setting for whether to ignore SSL verification errors */
|
|
||||||
export function getIgnoreSslVerificationConfigSetting(): boolean {
|
|
||||||
const arcConfigSectionName = 'arc';
|
|
||||||
const ignoreSslConfigName = 'ignoreSslVerification';
|
|
||||||
|
|
||||||
try {
|
|
||||||
const config = vscode.workspace.getConfiguration(arcConfigSectionName);
|
|
||||||
return config.get<boolean>(ignoreSslConfigName, true);
|
|
||||||
} catch (error) {
|
|
||||||
console.error(`Unexpected error retrieving ${arcConfigSectionName}.${ignoreSslConfigName} setting : ${error}`);
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
@@ -1,3 +0,0 @@
|
|||||||
// This is the entrypoint for the package
|
|
||||||
export * from './api/apis';
|
|
||||||
export * from './model/models';
|
|
||||||
@@ -1,31 +0,0 @@
|
|||||||
export * from './databaseRouterApi';
|
|
||||||
export * from './databaseValidateRouterApi';
|
|
||||||
export * from './logsRouterApi';
|
|
||||||
export * from './metricRouterApi';
|
|
||||||
export * from './operatorRouterApi';
|
|
||||||
import * as fs from 'fs';
|
|
||||||
import * as http from 'http';
|
|
||||||
import { DatabaseRouterApi } from './databaseRouterApi';
|
|
||||||
import { DatabaseValidateRouterApi } from './databaseValidateRouterApi';
|
|
||||||
import { LogsRouterApi } from './logsRouterApi';
|
|
||||||
import { MetricRouterApi } from './metricRouterApi';
|
|
||||||
import { OperatorRouterApi } from './operatorRouterApi';
|
|
||||||
|
|
||||||
export class HttpError extends Error {
|
|
||||||
constructor (public response: http.IncomingMessage, public body: any, public statusCode?: number) {
|
|
||||||
super('HTTP request failed');
|
|
||||||
this.name = 'HttpError';
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface RequestDetailedFile {
|
|
||||||
value: Buffer;
|
|
||||||
options?: {
|
|
||||||
filename?: string;
|
|
||||||
contentType?: string;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export type RequestFile = string | Buffer | fs.ReadStream | RequestDetailedFile;
|
|
||||||
|
|
||||||
export const APIS = [DatabaseRouterApi, DatabaseValidateRouterApi, LogsRouterApi, MetricRouterApi, OperatorRouterApi];
|
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -1,260 +0,0 @@
|
|||||||
/**
|
|
||||||
* Dusky API
|
|
||||||
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
|
|
||||||
*
|
|
||||||
* The version of the OpenAPI document: v1
|
|
||||||
*
|
|
||||||
*
|
|
||||||
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
|
|
||||||
* https://openapi-generator.tech
|
|
||||||
* Do not edit the class manually.
|
|
||||||
*/
|
|
||||||
|
|
||||||
import localVarRequest = require('request');
|
|
||||||
import http = require('http');
|
|
||||||
|
|
||||||
/* tslint:disable:no-unused-locals */
|
|
||||||
import { DuskyObjectModelsDatabaseService } from '../model/duskyObjectModelsDatabaseService';
|
|
||||||
import { DuskyObjectModelsDuskyValidationResult } from '../model/duskyObjectModelsDuskyValidationResult';
|
|
||||||
import { Authentication, HttpBasicAuth, HttpBearerAuth, Interceptor, ObjectSerializer, VoidAuth } from '../model/models';
|
|
||||||
import { HttpError } from './apis';
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
let defaultBasePath = 'https://10.135.16.138:30080';
|
|
||||||
|
|
||||||
// ===============================================
|
|
||||||
// This file is autogenerated - Please do not edit
|
|
||||||
// ===============================================
|
|
||||||
|
|
||||||
export enum DatabaseValidateRouterApiApiKeys {
|
|
||||||
}
|
|
||||||
|
|
||||||
export class DatabaseValidateRouterApi {
|
|
||||||
protected _basePath = defaultBasePath;
|
|
||||||
protected _defaultHeaders : any = {};
|
|
||||||
protected _useQuerystring : boolean = false;
|
|
||||||
|
|
||||||
protected authentications = {
|
|
||||||
'default': <Authentication>new VoidAuth(),
|
|
||||||
'BasicAuth': new HttpBasicAuth(),
|
|
||||||
'BearerAuth': new HttpBearerAuth(),
|
|
||||||
}
|
|
||||||
|
|
||||||
protected interceptors: Interceptor[] = [];
|
|
||||||
|
|
||||||
constructor(basePath?: string);
|
|
||||||
constructor(username: string, password: string, basePath?: string);
|
|
||||||
constructor(basePathOrUsername: string, password?: string, basePath?: string) {
|
|
||||||
if (password) {
|
|
||||||
this.username = basePathOrUsername;
|
|
||||||
this.password = password
|
|
||||||
if (basePath) {
|
|
||||||
this.basePath = basePath;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
if (basePathOrUsername) {
|
|
||||||
this.basePath = basePathOrUsername
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
set useQuerystring(value: boolean) {
|
|
||||||
this._useQuerystring = value;
|
|
||||||
}
|
|
||||||
|
|
||||||
set basePath(basePath: string) {
|
|
||||||
this._basePath = basePath;
|
|
||||||
}
|
|
||||||
|
|
||||||
set defaultHeaders(defaultHeaders: any) {
|
|
||||||
this._defaultHeaders = defaultHeaders;
|
|
||||||
}
|
|
||||||
|
|
||||||
get defaultHeaders() {
|
|
||||||
return this._defaultHeaders;
|
|
||||||
}
|
|
||||||
|
|
||||||
get basePath() {
|
|
||||||
return this._basePath;
|
|
||||||
}
|
|
||||||
|
|
||||||
public setDefaultAuthentication(auth: Authentication) {
|
|
||||||
this.authentications.default = auth;
|
|
||||||
}
|
|
||||||
|
|
||||||
public setApiKey(key: DatabaseValidateRouterApiApiKeys, value: string) {
|
|
||||||
(this.authentications as any)[DatabaseValidateRouterApiApiKeys[key]].apiKey = value;
|
|
||||||
}
|
|
||||||
|
|
||||||
set username(username: string) {
|
|
||||||
this.authentications.BasicAuth.username = username;
|
|
||||||
}
|
|
||||||
|
|
||||||
set password(password: string) {
|
|
||||||
this.authentications.BasicAuth.password = password;
|
|
||||||
}
|
|
||||||
|
|
||||||
set accessToken(accessToken: string | (() => string)) {
|
|
||||||
this.authentications.BearerAuth.accessToken = accessToken;
|
|
||||||
}
|
|
||||||
|
|
||||||
public addInterceptor(interceptor: Interceptor) {
|
|
||||||
this.interceptors.push(interceptor);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
* @summary Validate database service creation.
|
|
||||||
* @param duskyObjectModelsDatabaseService
|
|
||||||
*/
|
|
||||||
public async validateCreateDatabaseService (duskyObjectModelsDatabaseService?: DuskyObjectModelsDatabaseService, options: {headers: {[name: string]: string}} = {headers: {}}) : Promise<{ response: http.IncomingMessage; body: DuskyObjectModelsDuskyValidationResult; }> {
|
|
||||||
const localVarPath = this.basePath + '/dusky/databases/validate';
|
|
||||||
let localVarQueryParameters: any = {};
|
|
||||||
let localVarHeaderParams: any = (<any>Object).assign({}, this._defaultHeaders);
|
|
||||||
const produces = ['application/json'];
|
|
||||||
// give precedence to 'application/json'
|
|
||||||
if (produces.indexOf('application/json') >= 0) {
|
|
||||||
localVarHeaderParams.Accept = 'application/json';
|
|
||||||
} else {
|
|
||||||
localVarHeaderParams.Accept = produces.join(',');
|
|
||||||
}
|
|
||||||
let localVarFormParams: any = {};
|
|
||||||
|
|
||||||
(<any>Object).assign(localVarHeaderParams, options.headers);
|
|
||||||
|
|
||||||
let localVarUseFormData = false;
|
|
||||||
|
|
||||||
let localVarRequestOptions: localVarRequest.Options = {
|
|
||||||
method: 'POST',
|
|
||||||
qs: localVarQueryParameters,
|
|
||||||
headers: localVarHeaderParams,
|
|
||||||
uri: localVarPath,
|
|
||||||
useQuerystring: this._useQuerystring,
|
|
||||||
json: true,
|
|
||||||
body: ObjectSerializer.serialize(duskyObjectModelsDatabaseService, "DuskyObjectModelsDatabaseService")
|
|
||||||
};
|
|
||||||
|
|
||||||
let authenticationPromise = Promise.resolve();
|
|
||||||
if (this.authentications.BasicAuth.username && this.authentications.BasicAuth.password) {
|
|
||||||
authenticationPromise = authenticationPromise.then(() => this.authentications.BasicAuth.applyToRequest(localVarRequestOptions));
|
|
||||||
}
|
|
||||||
if (this.authentications.BearerAuth.accessToken) {
|
|
||||||
authenticationPromise = authenticationPromise.then(() => this.authentications.BearerAuth.applyToRequest(localVarRequestOptions));
|
|
||||||
}
|
|
||||||
authenticationPromise = authenticationPromise.then(() => this.authentications.default.applyToRequest(localVarRequestOptions));
|
|
||||||
|
|
||||||
let interceptorPromise = authenticationPromise;
|
|
||||||
for (const interceptor of this.interceptors) {
|
|
||||||
interceptorPromise = interceptorPromise.then(() => interceptor(localVarRequestOptions));
|
|
||||||
}
|
|
||||||
|
|
||||||
return interceptorPromise.then(() => {
|
|
||||||
if (Object.keys(localVarFormParams).length) {
|
|
||||||
if (localVarUseFormData) {
|
|
||||||
(<any>localVarRequestOptions).formData = localVarFormParams;
|
|
||||||
} else {
|
|
||||||
localVarRequestOptions.form = localVarFormParams;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return new Promise<{ response: http.IncomingMessage; body: DuskyObjectModelsDuskyValidationResult; }>((resolve, reject) => {
|
|
||||||
localVarRequest(localVarRequestOptions, (error, response, body) => {
|
|
||||||
if (error) {
|
|
||||||
reject(error);
|
|
||||||
} else {
|
|
||||||
body = ObjectSerializer.deserialize(body, "DuskyObjectModelsDuskyValidationResult");
|
|
||||||
if (response.statusCode && response.statusCode >= 200 && response.statusCode <= 299) {
|
|
||||||
resolve({ response: response, body: body });
|
|
||||||
} else {
|
|
||||||
reject(new HttpError(response, body, response.statusCode));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
* @summary Validate database service update.
|
|
||||||
* @param ns The namespace of the database service.
|
|
||||||
* @param name The name of the database service to update.
|
|
||||||
* @param duskyObjectModelsDatabaseService
|
|
||||||
*/
|
|
||||||
public async validateUpdateDatabaseService (ns: string, name: string, duskyObjectModelsDatabaseService?: DuskyObjectModelsDatabaseService, options: {headers: {[name: string]: string}} = {headers: {}}) : Promise<{ response: http.IncomingMessage; body: DuskyObjectModelsDuskyValidationResult; }> {
|
|
||||||
const localVarPath = this.basePath + '/dusky/databases/validate/{ns}/{name}'
|
|
||||||
.replace('{' + 'ns' + '}', encodeURIComponent(String(ns)))
|
|
||||||
.replace('{' + 'name' + '}', encodeURIComponent(String(name)));
|
|
||||||
let localVarQueryParameters: any = {};
|
|
||||||
let localVarHeaderParams: any = (<any>Object).assign({}, this._defaultHeaders);
|
|
||||||
const produces = ['application/json'];
|
|
||||||
// give precedence to 'application/json'
|
|
||||||
if (produces.indexOf('application/json') >= 0) {
|
|
||||||
localVarHeaderParams.Accept = 'application/json';
|
|
||||||
} else {
|
|
||||||
localVarHeaderParams.Accept = produces.join(',');
|
|
||||||
}
|
|
||||||
let localVarFormParams: any = {};
|
|
||||||
|
|
||||||
// verify required parameter 'ns' is not null or undefined
|
|
||||||
if (ns === null || ns === undefined) {
|
|
||||||
throw new Error('Required parameter ns was null or undefined when calling validateUpdateDatabaseService.');
|
|
||||||
}
|
|
||||||
|
|
||||||
// verify required parameter 'name' is not null or undefined
|
|
||||||
if (name === null || name === undefined) {
|
|
||||||
throw new Error('Required parameter name was null or undefined when calling validateUpdateDatabaseService.');
|
|
||||||
}
|
|
||||||
|
|
||||||
(<any>Object).assign(localVarHeaderParams, options.headers);
|
|
||||||
|
|
||||||
let localVarUseFormData = false;
|
|
||||||
|
|
||||||
let localVarRequestOptions: localVarRequest.Options = {
|
|
||||||
method: 'POST',
|
|
||||||
qs: localVarQueryParameters,
|
|
||||||
headers: localVarHeaderParams,
|
|
||||||
uri: localVarPath,
|
|
||||||
useQuerystring: this._useQuerystring,
|
|
||||||
json: true,
|
|
||||||
body: ObjectSerializer.serialize(duskyObjectModelsDatabaseService, "DuskyObjectModelsDatabaseService")
|
|
||||||
};
|
|
||||||
|
|
||||||
let authenticationPromise = Promise.resolve();
|
|
||||||
if (this.authentications.BasicAuth.username && this.authentications.BasicAuth.password) {
|
|
||||||
authenticationPromise = authenticationPromise.then(() => this.authentications.BasicAuth.applyToRequest(localVarRequestOptions));
|
|
||||||
}
|
|
||||||
if (this.authentications.BearerAuth.accessToken) {
|
|
||||||
authenticationPromise = authenticationPromise.then(() => this.authentications.BearerAuth.applyToRequest(localVarRequestOptions));
|
|
||||||
}
|
|
||||||
authenticationPromise = authenticationPromise.then(() => this.authentications.default.applyToRequest(localVarRequestOptions));
|
|
||||||
|
|
||||||
let interceptorPromise = authenticationPromise;
|
|
||||||
for (const interceptor of this.interceptors) {
|
|
||||||
interceptorPromise = interceptorPromise.then(() => interceptor(localVarRequestOptions));
|
|
||||||
}
|
|
||||||
|
|
||||||
return interceptorPromise.then(() => {
|
|
||||||
if (Object.keys(localVarFormParams).length) {
|
|
||||||
if (localVarUseFormData) {
|
|
||||||
(<any>localVarRequestOptions).formData = localVarFormParams;
|
|
||||||
} else {
|
|
||||||
localVarRequestOptions.form = localVarFormParams;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return new Promise<{ response: http.IncomingMessage; body: DuskyObjectModelsDuskyValidationResult; }>((resolve, reject) => {
|
|
||||||
localVarRequest(localVarRequestOptions, (error, response, body) => {
|
|
||||||
if (error) {
|
|
||||||
reject(error);
|
|
||||||
} else {
|
|
||||||
body = ObjectSerializer.deserialize(body, "DuskyObjectModelsDuskyValidationResult");
|
|
||||||
if (response.statusCode && response.statusCode >= 200 && response.statusCode <= 299) {
|
|
||||||
resolve({ response: response, body: body });
|
|
||||||
} else {
|
|
||||||
reject(new HttpError(response, body, response.statusCode));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,175 +0,0 @@
|
|||||||
/**
|
|
||||||
* Dusky API
|
|
||||||
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
|
|
||||||
*
|
|
||||||
* The version of the OpenAPI document: v1
|
|
||||||
*
|
|
||||||
*
|
|
||||||
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
|
|
||||||
* https://openapi-generator.tech
|
|
||||||
* Do not edit the class manually.
|
|
||||||
*/
|
|
||||||
|
|
||||||
import localVarRequest = require('request');
|
|
||||||
import http = require('http');
|
|
||||||
|
|
||||||
/* tslint:disable:no-unused-locals */
|
|
||||||
import { LogsRequest } from '../model/logsRequest';
|
|
||||||
import { Authentication, HttpBasicAuth, HttpBearerAuth, Interceptor, ObjectSerializer, VoidAuth } from '../model/models';
|
|
||||||
import { HttpError } from './apis';
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
let defaultBasePath = 'https://10.135.16.138:30080';
|
|
||||||
|
|
||||||
// ===============================================
|
|
||||||
// This file is autogenerated - Please do not edit
|
|
||||||
// ===============================================
|
|
||||||
|
|
||||||
export enum LogsRouterApiApiKeys {
|
|
||||||
}
|
|
||||||
|
|
||||||
export class LogsRouterApi {
|
|
||||||
protected _basePath = defaultBasePath;
|
|
||||||
protected _defaultHeaders : any = {};
|
|
||||||
protected _useQuerystring : boolean = false;
|
|
||||||
|
|
||||||
protected authentications = {
|
|
||||||
'default': <Authentication>new VoidAuth(),
|
|
||||||
'BasicAuth': new HttpBasicAuth(),
|
|
||||||
'BearerAuth': new HttpBearerAuth(),
|
|
||||||
}
|
|
||||||
|
|
||||||
protected interceptors: Interceptor[] = [];
|
|
||||||
|
|
||||||
constructor(basePath?: string);
|
|
||||||
constructor(username: string, password: string, basePath?: string);
|
|
||||||
constructor(basePathOrUsername: string, password?: string, basePath?: string) {
|
|
||||||
if (password) {
|
|
||||||
this.username = basePathOrUsername;
|
|
||||||
this.password = password
|
|
||||||
if (basePath) {
|
|
||||||
this.basePath = basePath;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
if (basePathOrUsername) {
|
|
||||||
this.basePath = basePathOrUsername
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
set useQuerystring(value: boolean) {
|
|
||||||
this._useQuerystring = value;
|
|
||||||
}
|
|
||||||
|
|
||||||
set basePath(basePath: string) {
|
|
||||||
this._basePath = basePath;
|
|
||||||
}
|
|
||||||
|
|
||||||
set defaultHeaders(defaultHeaders: any) {
|
|
||||||
this._defaultHeaders = defaultHeaders;
|
|
||||||
}
|
|
||||||
|
|
||||||
get defaultHeaders() {
|
|
||||||
return this._defaultHeaders;
|
|
||||||
}
|
|
||||||
|
|
||||||
get basePath() {
|
|
||||||
return this._basePath;
|
|
||||||
}
|
|
||||||
|
|
||||||
public setDefaultAuthentication(auth: Authentication) {
|
|
||||||
this.authentications.default = auth;
|
|
||||||
}
|
|
||||||
|
|
||||||
public setApiKey(key: LogsRouterApiApiKeys, value: string) {
|
|
||||||
(this.authentications as any)[LogsRouterApiApiKeys[key]].apiKey = value;
|
|
||||||
}
|
|
||||||
|
|
||||||
set username(username: string) {
|
|
||||||
this.authentications.BasicAuth.username = username;
|
|
||||||
}
|
|
||||||
|
|
||||||
set password(password: string) {
|
|
||||||
this.authentications.BasicAuth.password = password;
|
|
||||||
}
|
|
||||||
|
|
||||||
set accessToken(accessToken: string | (() => string)) {
|
|
||||||
this.authentications.BearerAuth.accessToken = accessToken;
|
|
||||||
}
|
|
||||||
|
|
||||||
public addInterceptor(interceptor: Interceptor) {
|
|
||||||
this.interceptors.push(interceptor);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
* @summary Gets logs from Elasticsearch.
|
|
||||||
* @param logsRequest
|
|
||||||
*/
|
|
||||||
public async apiV1LogsPost (logsRequest?: LogsRequest, options: {headers: {[name: string]: string}} = {headers: {}}) : Promise<{ response: http.IncomingMessage; body: object; }> {
|
|
||||||
const localVarPath = this.basePath + '/api/v1/logs';
|
|
||||||
let localVarQueryParameters: any = {};
|
|
||||||
let localVarHeaderParams: any = (<any>Object).assign({}, this._defaultHeaders);
|
|
||||||
const produces = ['application/json'];
|
|
||||||
// give precedence to 'application/json'
|
|
||||||
if (produces.indexOf('application/json') >= 0) {
|
|
||||||
localVarHeaderParams.Accept = 'application/json';
|
|
||||||
} else {
|
|
||||||
localVarHeaderParams.Accept = produces.join(',');
|
|
||||||
}
|
|
||||||
let localVarFormParams: any = {};
|
|
||||||
|
|
||||||
(<any>Object).assign(localVarHeaderParams, options.headers);
|
|
||||||
|
|
||||||
let localVarUseFormData = false;
|
|
||||||
|
|
||||||
let localVarRequestOptions: localVarRequest.Options = {
|
|
||||||
method: 'POST',
|
|
||||||
qs: localVarQueryParameters,
|
|
||||||
headers: localVarHeaderParams,
|
|
||||||
uri: localVarPath,
|
|
||||||
useQuerystring: this._useQuerystring,
|
|
||||||
json: true,
|
|
||||||
body: ObjectSerializer.serialize(logsRequest, "LogsRequest")
|
|
||||||
};
|
|
||||||
|
|
||||||
let authenticationPromise = Promise.resolve();
|
|
||||||
if (this.authentications.BasicAuth.username && this.authentications.BasicAuth.password) {
|
|
||||||
authenticationPromise = authenticationPromise.then(() => this.authentications.BasicAuth.applyToRequest(localVarRequestOptions));
|
|
||||||
}
|
|
||||||
if (this.authentications.BearerAuth.accessToken) {
|
|
||||||
authenticationPromise = authenticationPromise.then(() => this.authentications.BearerAuth.applyToRequest(localVarRequestOptions));
|
|
||||||
}
|
|
||||||
authenticationPromise = authenticationPromise.then(() => this.authentications.default.applyToRequest(localVarRequestOptions));
|
|
||||||
|
|
||||||
let interceptorPromise = authenticationPromise;
|
|
||||||
for (const interceptor of this.interceptors) {
|
|
||||||
interceptorPromise = interceptorPromise.then(() => interceptor(localVarRequestOptions));
|
|
||||||
}
|
|
||||||
|
|
||||||
return interceptorPromise.then(() => {
|
|
||||||
if (Object.keys(localVarFormParams).length) {
|
|
||||||
if (localVarUseFormData) {
|
|
||||||
(<any>localVarRequestOptions).formData = localVarFormParams;
|
|
||||||
} else {
|
|
||||||
localVarRequestOptions.form = localVarFormParams;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return new Promise<{ response: http.IncomingMessage; body: object; }>((resolve, reject) => {
|
|
||||||
localVarRequest(localVarRequestOptions, (error, response, body) => {
|
|
||||||
if (error) {
|
|
||||||
reject(error);
|
|
||||||
} else {
|
|
||||||
body = ObjectSerializer.deserialize(body, "object");
|
|
||||||
if (response.statusCode && response.statusCode >= 200 && response.statusCode <= 299) {
|
|
||||||
resolve({ response: response, body: body });
|
|
||||||
} else {
|
|
||||||
reject(new HttpError(response, body, response.statusCode));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user