mirror of
https://github.com/ckaczor/azuredatastudio.git
synced 2026-02-25 11:01:36 -05:00
Compare commits
546 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4a45ba7cf2 | ||
|
|
3b23809846 | ||
|
|
0ff54a11df | ||
|
|
ce547fa4b6 | ||
|
|
d47b5f1afb | ||
|
|
66ef175501 | ||
|
|
f92db4ae76 | ||
|
|
c520c009a5 | ||
|
|
fd148e557b | ||
|
|
b0d3d06b5d | ||
|
|
2fbfb2ad61 | ||
|
|
4b12216c07 | ||
|
|
98dc71a08b | ||
|
|
05f8bb94f3 | ||
|
|
de027df955 | ||
|
|
4a4888891b | ||
|
|
0b2d353cd2 | ||
|
|
2dd4232a40 | ||
|
|
dc0ccba767 | ||
|
|
a4bccb6e6c | ||
|
|
f1b9931116 | ||
|
|
0f229b3444 | ||
|
|
1a97516cb6 | ||
|
|
9ee941eb12 | ||
|
|
512c2d3ddc | ||
|
|
57f5e04590 | ||
|
|
fddfb72e10 | ||
|
|
90ac21ccfb | ||
|
|
310d651df8 | ||
|
|
63c461dca8 | ||
|
|
6c40f52f35 | ||
|
|
013ce71166 | ||
|
|
3a4caa64d2 | ||
|
|
aeabb325f6 | ||
|
|
b8da94f9ef | ||
|
|
e6f356accc | ||
|
|
991b9af198 | ||
|
|
09a4dfa5b0 | ||
|
|
8ed463b4d7 | ||
|
|
3b0cf9db50 | ||
|
|
9f761c44c4 | ||
|
|
c6308b77df | ||
|
|
33baaa475d | ||
|
|
35983659b1 | ||
|
|
2b103a79c3 | ||
|
|
99b5c5ce8c | ||
|
|
84d85ba43e | ||
|
|
7b709b37cd | ||
|
|
3d7edd2d6a | ||
|
|
cf97ced7f1 | ||
|
|
0060b0e27b | ||
|
|
47b249a7b1 | ||
|
|
c4ef48dcd8 | ||
|
|
366fe7c162 | ||
|
|
d189805bcc | ||
|
|
aef4474a08 | ||
|
|
5e34982fd9 | ||
|
|
43158a60e3 | ||
|
|
e3d672cea1 | ||
|
|
0567141bc4 | ||
|
|
6985d95300 | ||
|
|
e2dd257fa9 | ||
|
|
da7585eb44 | ||
|
|
2f8d00af56 | ||
|
|
7cdb21cca5 | ||
|
|
b35e78a07f | ||
|
|
052cb54199 | ||
|
|
42b1a10fec | ||
|
|
d28d77dbfc | ||
|
|
aadf2ae081 | ||
|
|
760cf01022 | ||
|
|
97978cbe81 | ||
|
|
466193adbe | ||
|
|
56ad631478 | ||
|
|
919cc732b7 | ||
|
|
244e27c2de | ||
|
|
0a181a1ba8 | ||
|
|
045dc3e558 | ||
|
|
30393a1f1b | ||
|
|
748bb53173 | ||
|
|
373a519f25 | ||
|
|
3af2b4a13d | ||
|
|
91676afd0d | ||
|
|
914fe8fc29 | ||
|
|
65cc61fdbd | ||
|
|
83af84774a | ||
|
|
b3e9428898 | ||
|
|
2427cbe3c6 | ||
|
|
179678b495 | ||
|
|
a7c1bcaf93 | ||
|
|
3362462142 | ||
|
|
87cc568493 | ||
|
|
8a67f87090 | ||
|
|
47151435e7 | ||
|
|
30dffdf696 | ||
|
|
68a22421f7 | ||
|
|
d6fd64c5eb | ||
|
|
245ae5b9ee | ||
|
|
788c84a1ee | ||
|
|
aafe0876bb | ||
|
|
e3c7e06983 | ||
|
|
251d250523 | ||
|
|
f4d0bdc784 | ||
|
|
0bbbb91adf | ||
|
|
39f65b1881 | ||
|
|
0e9d956ee5 | ||
|
|
01671b118d | ||
|
|
ef3d2e7d99 | ||
|
|
1b073c6748 | ||
|
|
daa897936b | ||
|
|
eaf9757565 | ||
|
|
d1d858090c | ||
|
|
c178b6327a | ||
|
|
c32c09e1a7 | ||
|
|
25e237fa35 | ||
|
|
118d03c151 | ||
|
|
27c86e3c45 | ||
|
|
07ad50670e | ||
|
|
8afd420971 | ||
|
|
5ff102d531 | ||
|
|
63a65f5821 | ||
|
|
2d8e0d648a | ||
|
|
7a35d4aeeb | ||
|
|
35207a1e04 | ||
|
|
7c14ec2b6d | ||
|
|
f8da3cc32a | ||
|
|
f01e9e2fc0 | ||
|
|
b2c203eaef | ||
|
|
24349885d3 | ||
|
|
2e9eff7ffc | ||
|
|
6a08af4d9a | ||
|
|
a0f56890b5 | ||
|
|
70fc6bd43d | ||
|
|
c60bcc0d0d | ||
|
|
c3c6d8ee8c | ||
|
|
9a71846e22 | ||
|
|
e4db31b334 | ||
|
|
63dc94009e | ||
|
|
77d397ce18 | ||
|
|
6aeea8f1df | ||
|
|
fdb426cda5 | ||
|
|
f72a252fb0 | ||
|
|
6fcfa93329 | ||
|
|
c4ce3bef8d | ||
|
|
9fda448303 | ||
|
|
01aefe7b9f | ||
|
|
cb8c4b80d0 | ||
|
|
668ab43865 | ||
|
|
be0edf9606 | ||
|
|
151522013f | ||
|
|
66c62fcce3 | ||
|
|
699648ff6d | ||
|
|
7968d51172 | ||
|
|
2e8d9c50d4 | ||
|
|
880cfc3b59 | ||
|
|
c2d45fa01f | ||
|
|
c0dd781d77 | ||
|
|
df5ed2c889 | ||
|
|
107023c7d0 | ||
|
|
ce4fa98691 | ||
|
|
b2a9074a25 | ||
|
|
c082b572d5 | ||
|
|
0509f8f0c3 | ||
|
|
88d28b7d51 | ||
|
|
df177ec779 | ||
|
|
64d432c8e2 | ||
|
|
380457122c | ||
|
|
7d6d8dbe96 | ||
|
|
f2dcfacc8c | ||
|
|
fd954ddcb2 | ||
|
|
c5c7ca019d | ||
|
|
6d4608dd8b | ||
|
|
f390c4cbc2 | ||
|
|
d031211693 | ||
|
|
c1f4c50177 | ||
|
|
87633faaa4 | ||
|
|
dd4e87ed41 | ||
|
|
b472539646 | ||
|
|
b2a2a48ed6 | ||
|
|
a3a06b92e8 | ||
|
|
3b0fff63d4 | ||
|
|
2bc6a881bd | ||
|
|
efa82650f8 | ||
|
|
d1892b514f | ||
|
|
32da4219a9 | ||
|
|
fb16924f93 | ||
|
|
94b99c7862 | ||
|
|
78d905a217 | ||
|
|
be3d966cf0 | ||
|
|
ba6359e1ff | ||
|
|
43cf19e316 | ||
|
|
1eb03404ad | ||
|
|
a322c5be9d | ||
|
|
d663ec6129 | ||
|
|
88fd0cae3b | ||
|
|
3e2bf7b9fa | ||
|
|
3ce6f9e78c | ||
|
|
efaf39f96a | ||
|
|
7c82d0291e | ||
|
|
86c2547b22 | ||
|
|
4a51cb2020 | ||
|
|
260fdac944 | ||
|
|
85082dee75 | ||
|
|
a9b338b5c4 | ||
|
|
f92aa1ead8 | ||
|
|
d4e367e4f9 | ||
|
|
57df7e706f | ||
|
|
b58c19684f | ||
|
|
d0c7028d97 | ||
|
|
dc471faa7a | ||
|
|
4aaa7eae29 | ||
|
|
1000e97091 | ||
|
|
9b31e7beac | ||
|
|
f0b158edda | ||
|
|
9f29efba85 | ||
|
|
de0719d91a | ||
|
|
b9041b0afe | ||
|
|
adb0ec3cab | ||
|
|
3eefc70cbe | ||
|
|
a7c6a98ad9 | ||
|
|
87f5f4edfc | ||
|
|
08d6b71929 | ||
|
|
aba576dd2f | ||
|
|
2c2e2bb984 | ||
|
|
07567d2514 | ||
|
|
9cbcf9e2c6 | ||
|
|
e0cad0231d | ||
|
|
379c60dd27 | ||
|
|
eaba5679d4 | ||
|
|
fdaf29ccb4 | ||
|
|
fa21781df2 | ||
|
|
f978331e7b | ||
|
|
8aedeab9d4 | ||
|
|
669987ccf3 | ||
|
|
2984244377 | ||
|
|
fbb17c047c | ||
|
|
36e1aeb43d | ||
|
|
62d86163c9 | ||
|
|
5171c37b6a | ||
|
|
05f1f84872 | ||
|
|
3d8fb454c0 | ||
|
|
430e2c9916 | ||
|
|
e026b743ab | ||
|
|
c170fdfc68 | ||
|
|
d0ecb292f7 | ||
|
|
7d3fa81d3a | ||
|
|
c1e365bdc8 | ||
|
|
c6f8f6ffd0 | ||
|
|
43e9f6da8c | ||
|
|
5fb7f9e452 | ||
|
|
64ea6c569a | ||
|
|
efdf38dbc4 | ||
|
|
1fe6939f92 | ||
|
|
8332fc6b1d | ||
|
|
7669ad0180 | ||
|
|
e6fd98caa8 | ||
|
|
814f5bea3a | ||
|
|
2e81300f9e | ||
|
|
eed495ef64 | ||
|
|
105636dda6 | ||
|
|
f4ef8d1374 | ||
|
|
01a594ea83 | ||
|
|
f0f6aee89b | ||
|
|
36286f7df6 | ||
|
|
ed5a2ee90d | ||
|
|
0483c5c8b2 | ||
|
|
7f614b0fde | ||
|
|
7c5de73fde | ||
|
|
e499fd9e80 | ||
|
|
a8e4d01ff0 | ||
|
|
ba0261eb98 | ||
|
|
dbf15745d0 | ||
|
|
6e4c7a1075 | ||
|
|
c85ad7b7d5 | ||
|
|
b705e1ca61 | ||
|
|
bc1bf60a98 | ||
|
|
6837b4b801 | ||
|
|
f127b7d94a | ||
|
|
d1d9795965 | ||
|
|
83300acb38 | ||
|
|
24b45a9baf | ||
|
|
4e67aa86d8 | ||
|
|
0dbde9e9b1 | ||
|
|
712633fadd | ||
|
|
7a419426ad | ||
|
|
f17689319c | ||
|
|
ee0896ea5d | ||
|
|
acea03ea61 | ||
|
|
d88c49702a | ||
|
|
5e29c936d7 | ||
|
|
0474a5ca9e | ||
|
|
d09ba43d15 | ||
|
|
164b100421 | ||
|
|
2d2c568609 | ||
|
|
ff6e377477 | ||
|
|
63e97caa94 | ||
|
|
eed792f3db | ||
|
|
3847271e67 | ||
|
|
f53a06a403 | ||
|
|
78a144b5ca | ||
|
|
3f047ae15a | ||
|
|
6df69f525c | ||
|
|
669623a228 | ||
|
|
96efba004d | ||
|
|
dc0eb133f9 | ||
|
|
ac2198c7d3 | ||
|
|
49519232ba | ||
|
|
8c9cc03c89 | ||
|
|
c4830d9efb | ||
|
|
5923793f0c | ||
|
|
126a0383ce | ||
|
|
c70d5b957e | ||
|
|
245156a66c | ||
|
|
ebb02de1c4 | ||
|
|
f662d480e7 | ||
|
|
49dbce5171 | ||
|
|
b3d8e522f7 | ||
|
|
4c421e0a38 | ||
|
|
4275fe89a7 | ||
|
|
9c3d88b64a | ||
|
|
0bf574c227 | ||
|
|
5059c94adc | ||
|
|
2a74ad4190 | ||
|
|
b45f3e7218 | ||
|
|
0ff8786885 | ||
|
|
1e81b6f054 | ||
|
|
785839baac | ||
|
|
d03fbbc066 | ||
|
|
06da33bb3b | ||
|
|
202036ca47 | ||
|
|
8530bf214e | ||
|
|
8faf115329 | ||
|
|
1bd1c64b08 | ||
|
|
68e0f86120 | ||
|
|
43183c90a1 | ||
|
|
b497063482 | ||
|
|
63d4cc0e80 | ||
|
|
351a55121d | ||
|
|
f8583f53c5 | ||
|
|
39aefa7e29 | ||
|
|
a4ffa64918 | ||
|
|
181cad5b75 | ||
|
|
4fd890e651 | ||
|
|
b5411f0f6f | ||
|
|
5496e9ac33 | ||
|
|
5a428b83ae | ||
|
|
1308878650 | ||
|
|
c85e164836 | ||
|
|
c72a194bc3 | ||
|
|
c131c252cd | ||
|
|
72c0d30517 | ||
|
|
ba87c8e245 | ||
|
|
11f5ddf062 | ||
|
|
8faa0cf0e2 | ||
|
|
f7bf914bcb | ||
|
|
d5f52ba53d | ||
|
|
f059612bd0 | ||
|
|
52c21a7885 | ||
|
|
743fd9f97c | ||
|
|
e7f614ebf7 | ||
|
|
bd1633c04c | ||
|
|
5524a3659c | ||
|
|
2785538afb | ||
|
|
0438e9cd41 | ||
|
|
601a4aaed1 | ||
|
|
a0f46fec65 | ||
|
|
a7311764be | ||
|
|
7fc69cc4d5 | ||
|
|
32a5ec3cd0 | ||
|
|
b4ab73a636 | ||
|
|
625d4bc4bf | ||
|
|
45d664fea2 | ||
|
|
cf05dc0016 | ||
|
|
5c17529e40 | ||
|
|
f5e38482c3 | ||
|
|
e3dc417df4 | ||
|
|
affe3a838b | ||
|
|
9fc2cff654 | ||
|
|
32ba55b7ed | ||
|
|
8b383294f7 | ||
|
|
2b8ae507aa | ||
|
|
f7fc145b0c | ||
|
|
6baf2ee982 | ||
|
|
ba20cdb885 | ||
|
|
d8433ddbf4 | ||
|
|
1c1da18f45 | ||
|
|
862c91d7d3 | ||
|
|
313d5b026e | ||
|
|
263f72523f | ||
|
|
fd55651637 | ||
|
|
67a0585dc0 | ||
|
|
29efd0a0e2 | ||
|
|
f1bd5e09ce | ||
|
|
6c5d35eaae | ||
|
|
e206eb81a3 | ||
|
|
679e3d1393 | ||
|
|
0045193262 | ||
|
|
27823e9900 | ||
|
|
8f202d91b6 | ||
|
|
7ba0e49673 | ||
|
|
c69c303a2b | ||
|
|
74cfac39d2 | ||
|
|
922348b846 | ||
|
|
1a32db1343 | ||
|
|
37823db9c9 | ||
|
|
c5893a488f | ||
|
|
3f0ea580f5 | ||
|
|
9a8c9041df | ||
|
|
f8c4d332bb | ||
|
|
64cee64516 | ||
|
|
d942799f9d | ||
|
|
6078e9f459 | ||
|
|
14ce88023e | ||
|
|
073f9e052b | ||
|
|
9ba4057a3c | ||
|
|
7c68506975 | ||
|
|
48b7d96999 | ||
|
|
9ac1f16cea | ||
|
|
19e25f04b1 | ||
|
|
9f77c74b9f | ||
|
|
f894dad38b | ||
|
|
ba3ab201c8 | ||
|
|
24b77a6a40 | ||
|
|
e8ba89750b | ||
|
|
225788a121 | ||
|
|
b1a9d6baa1 | ||
|
|
5bc5816a25 | ||
|
|
30462dfb25 | ||
|
|
ca07671a68 | ||
|
|
91d51a43cd | ||
|
|
38a6fc4d60 | ||
|
|
41bac47cbd | ||
|
|
a6644333c0 | ||
|
|
7cf7ca5d15 | ||
|
|
0e10e3e3ca | ||
|
|
667cf760f0 | ||
|
|
e72ba26411 | ||
|
|
7ce791d826 | ||
|
|
66c1fdc457 | ||
|
|
6e814d9ff0 | ||
|
|
ec057ba8c1 | ||
|
|
3f4e7f8c36 | ||
|
|
8ddb6ca7e8 | ||
|
|
aa3b4f0806 | ||
|
|
1538adf5ed | ||
|
|
31e9c21c06 | ||
|
|
10ad6f7119 | ||
|
|
aee24b202d | ||
|
|
7ccbe61a1a | ||
|
|
dac1bdafd7 | ||
|
|
5fa53c3519 | ||
|
|
a9e8885fe6 | ||
|
|
189fe762a9 | ||
|
|
c3e0958f2d | ||
|
|
edc319a90c | ||
|
|
ff1a642157 | ||
|
|
7c6368b4b6 | ||
|
|
768bf47aec | ||
|
|
33ff661c6f | ||
|
|
1caef2dc6e | ||
|
|
3eef3f2a0b | ||
|
|
0d2710341a | ||
|
|
a1652a5ecb | ||
|
|
8c6be27254 | ||
|
|
ae7e69381a | ||
|
|
0a3ed1a63a | ||
|
|
478a2bf64b | ||
|
|
e639a94dda | ||
|
|
f7a723d98d | ||
|
|
a180a6bc47 | ||
|
|
3a3d7f5271 | ||
|
|
e21f56d719 | ||
|
|
4707c1601c | ||
|
|
c636e24d03 | ||
|
|
3634110974 | ||
|
|
6c6a0506b2 | ||
|
|
00361e52a2 | ||
|
|
1e2cb1cdf9 | ||
|
|
ec2f2b19fb | ||
|
|
1dc4a437eb | ||
|
|
49562239a0 | ||
|
|
a35f8ffc52 | ||
|
|
999acca745 | ||
|
|
90b90afeab | ||
|
|
adbf40859f | ||
|
|
3cc8d29821 | ||
|
|
248e5aa40d | ||
|
|
0e1a8ab8ea | ||
|
|
22dcf7777c | ||
|
|
207312bd96 | ||
|
|
acfa5e1d1b | ||
|
|
38edcae32c | ||
|
|
03f31a865a | ||
|
|
d92c1d5ca8 | ||
|
|
2eab870ca2 | ||
|
|
b8dbd3f0dc | ||
|
|
06e7d4d489 | ||
|
|
d08182416b | ||
|
|
aeda95bb70 | ||
|
|
4c2f6eafe0 | ||
|
|
488ccea731 | ||
|
|
35832e83da | ||
|
|
efd752aafe | ||
|
|
df296939a8 | ||
|
|
d96835c083 | ||
|
|
331f8115dc | ||
|
|
d871efc079 | ||
|
|
2ec6a689d3 | ||
|
|
cae598d36c | ||
|
|
7c3a7e2646 | ||
|
|
3cb2f552a6 | ||
|
|
fdcb97c7f7 | ||
|
|
1231be124a | ||
|
|
be1ff8e37b | ||
|
|
4c039f7a88 | ||
|
|
3315214435 | ||
|
|
e4e703151f | ||
|
|
5bd1552a59 | ||
|
|
a8f3d97f59 | ||
|
|
24cd2106a1 | ||
|
|
6c23007a79 | ||
|
|
ebe24046de | ||
|
|
c9850fe59f | ||
|
|
db3a0ef7ac | ||
|
|
6a7e1dfd27 | ||
|
|
902b5dc1fa | ||
|
|
b8509d8b7c | ||
|
|
b413de153c | ||
|
|
ddc6b2fdd8 | ||
|
|
eea8c61b6e | ||
|
|
922d1287ac | ||
|
|
87b8f1a1aa | ||
|
|
05f23fdd5e | ||
|
|
69a990469d | ||
|
|
6506c9ca1f | ||
|
|
5374129350 | ||
|
|
c8ef4ee47a | ||
|
|
7256bff094 | ||
|
|
95704dc45e | ||
|
|
75dd19bb82 | ||
|
|
ecd4bd6864 | ||
|
|
a2d348ccc4 | ||
|
|
d91660b66f | ||
|
|
a61462a2c0 | ||
|
|
d6d2e7dc08 | ||
|
|
d8b693341e |
@@ -32,17 +32,17 @@ Next: **[Try it out!](#try-it)**
|
|||||||
|
|
||||||
## Quick start - GitHub Codespaces
|
## Quick start - GitHub Codespaces
|
||||||
|
|
||||||
> **IMPORTANT:** The current free user beta for GitHub Codespaces uses a "Basic" sized codespace which does not have enough RAM to run a full build of VS Code and will be considerably slower during codespace start and running VS Code. You'll soon be able to use a "Standard" sized codespace (4-core, 8GB) that will be better suited for this purpose (along with even larger sizes should you need it).
|
> **IMPORTANT:** You need to use a "Standard" sized codespace or larger (4-core, 8GB) since VS Code needs 6GB of RAM to compile. This is now the default for GitHub Codespaces, but do not downgrade to "Basic" unless you do not intend to compile.
|
||||||
|
|
||||||
1. From the [microsoft/vscode GitHub repository](https://github.com/microsoft/vscode), click on the **Code** dropdown, select **Open with Codespaces**, and the **New codespace**
|
1. From the [microsoft/vscode GitHub repository](https://github.com/microsoft/vscode), click on the **Code** dropdown, select **Open with Codespaces**, and the **New codespace**
|
||||||
|
|
||||||
> Note that you will not see these options if you are not in the beta yet.
|
> Note that you will not see these options if you are not in the beta yet.
|
||||||
|
|
||||||
2. After the codespace is up and running in your browser, press <kbd>Ctrl/Cmd</kbd> + <kbd>Shift</kbd> + <kbd>P</kbd> and select **View: Show Remote Explorer**.
|
2. After the codespace is up and running in your browser, press <kbd>F1</kbd> and select **Ports: Focus on Ports View**.
|
||||||
|
|
||||||
3. You should see port `6080` under **Forwarded Ports**. Select the line and click on the globe icon to open it in a browser tab.
|
3. You should see port `6080` under **Forwarded Ports**. Select the line and click on the globe icon to open it in a browser tab.
|
||||||
|
|
||||||
> If you do not see port `6080`, press <kbd>Ctrl/Cmd</kbd> + <kbd>Shift</kbd> + <kbd>P</kbd>, select **Forward a Port** and enter port `6080`.
|
> If you do not see port `6080`, press <kbd>F1</kbd>, select **Forward a Port** and enter port `6080`.
|
||||||
|
|
||||||
4. In the new tab, you should see noVNC. Click **Connect** and enter `vscode` as the password.
|
4. In the new tab, you should see noVNC. Click **Connect** and enter `vscode` as the password.
|
||||||
|
|
||||||
@@ -58,7 +58,7 @@ You will likely see better performance when accessing the codespace you created
|
|||||||
|
|
||||||
2. Set up [VS Code for use with GitHub Codespaces](https://docs.github.com/github/developing-online-with-codespaces/using-codespaces-in-visual-studio-code)
|
2. Set up [VS Code for use with GitHub Codespaces](https://docs.github.com/github/developing-online-with-codespaces/using-codespaces-in-visual-studio-code)
|
||||||
|
|
||||||
3. After the VS Code is up and running, press <kbd>Ctrl/Cmd</kbd> + <kbd>Shift</kbd> + <kbd>P</kbd>, choose **Codespaces: Connect to Codespace**, and select the codespace you created.
|
3. After the VS Code is up and running, press <kbd>F1</kbd>, choose **Codespaces: Connect to Codespace**, and select the codespace you created.
|
||||||
|
|
||||||
4. After you've connected to the codespace, use a [VNC Viewer](https://www.realvnc.com/en/connect/download/viewer/) to connect to `localhost:5901` and enter `vscode` as the password.
|
4. After you've connected to the codespace, use a [VNC Viewer](https://www.realvnc.com/en/connect/download/viewer/) to connect to `localhost:5901` and enter `vscode` as the password.
|
||||||
|
|
||||||
|
|||||||
2
.devcontainer/cache/build-cache-image.sh
vendored
2
.devcontainer/cache/build-cache-image.sh
vendored
@@ -8,7 +8,7 @@ set -e
|
|||||||
|
|
||||||
SCRIPT_PATH="$(cd "$(dirname $0)" && pwd)"
|
SCRIPT_PATH="$(cd "$(dirname $0)" && pwd)"
|
||||||
CONTAINER_IMAGE_REPOSITORY="$1"
|
CONTAINER_IMAGE_REPOSITORY="$1"
|
||||||
BRANCH="${2:-"master"}"
|
BRANCH="${2:-"main"}"
|
||||||
|
|
||||||
if [ "${CONTAINER_IMAGE_REPOSITORY}" = "" ]; then
|
if [ "${CONTAINER_IMAGE_REPOSITORY}" = "" ]; then
|
||||||
echo "Container repository not specified!"
|
echo "Container repository not specified!"
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
"name": "Code - OSS",
|
"name": "Code - OSS",
|
||||||
|
|
||||||
// Image contents: https://github.com/microsoft/vscode-dev-containers/blob/master/repository-containers/images/github.com/microsoft/vscode/.devcontainer/base.Dockerfile
|
// Image contents: https://github.com/microsoft/vscode-dev-containers/blob/master/repository-containers/images/github.com/microsoft/vscode/.devcontainer/base.Dockerfile
|
||||||
"image": "mcr.microsoft.com/vscode/devcontainers/repos/microsoft/vscode:branch-master",
|
"image": "mcr.microsoft.com/vscode/devcontainers/repos/microsoft/vscode:branch-main",
|
||||||
|
|
||||||
"workspaceMount": "source=${localWorkspaceFolder},target=/home/node/workspace/vscode,type=bind,consistency=cached",
|
"workspaceMount": "source=${localWorkspaceFolder},target=/home/node/workspace/vscode,type=bind,consistency=cached",
|
||||||
"workspaceFolder": "/home/node/workspace/vscode",
|
"workspaceFolder": "/home/node/workspace/vscode",
|
||||||
|
|||||||
@@ -12,6 +12,9 @@
|
|||||||
**/vscode-api-tests/testWorkspace2/**
|
**/vscode-api-tests/testWorkspace2/**
|
||||||
**/extensions/**/out/**
|
**/extensions/**/out/**
|
||||||
**/extensions/**/build/**
|
**/extensions/**/build/**
|
||||||
|
**/big-data-cluster/src/bigDataCluster/controller/apiGenerated.ts
|
||||||
|
**/big-data-cluster/src/bigDataCluster/controller/clusterApiGenerated2.ts
|
||||||
**/extensions/markdown-language-features/media/**
|
**/extensions/markdown-language-features/media/**
|
||||||
|
**/extensions/markdown-language-features/notebook-out/**
|
||||||
**/extensions/typescript-basics/test/colorize-fixtures/**
|
**/extensions/typescript-basics/test/colorize-fixtures/**
|
||||||
**/extensions/**/dist/**
|
**/extensions/**/dist/**
|
||||||
|
|||||||
2124
.eslintrc.json
2124
.eslintrc.json
File diff suppressed because it is too large
Load Diff
9
.github/CODEOWNERS
vendored
9
.github/CODEOWNERS
vendored
@@ -3,10 +3,12 @@
|
|||||||
# Syntax can be found here: https://docs.github.com/free-pro-team@latest/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax
|
# Syntax can be found here: https://docs.github.com/free-pro-team@latest/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax
|
||||||
|
|
||||||
/extensions/admin-tool-ext-win @Charles-Gagnon
|
/extensions/admin-tool-ext-win @Charles-Gagnon
|
||||||
/extensions/arc/ @Charles-Gagnon
|
/extensions/arc/ @Charles-Gagnon @swells @candiceye
|
||||||
/extensions/azdata/ @Charles-Gagnon
|
/extensions/azcli/ @Charles-Gagnon @swells @candiceye
|
||||||
|
/extensions/azdata/ @Charles-Gagnon @swells @candiceye
|
||||||
/extensions/big-data-cluster/ @Charles-Gagnon
|
/extensions/big-data-cluster/ @Charles-Gagnon
|
||||||
/extensions/dacpac/ @kisantia
|
/extensions/dacpac/ @kisantia
|
||||||
|
/extensions/notebook @azure-data-studio-notebook-devs
|
||||||
/extensions/query-history/ @Charles-Gagnon
|
/extensions/query-history/ @Charles-Gagnon
|
||||||
/extensions/resource-deployment/ @Charles-Gagnon
|
/extensions/resource-deployment/ @Charles-Gagnon
|
||||||
/extensions/schema-compare/ @kisantia
|
/extensions/schema-compare/ @kisantia
|
||||||
@@ -14,3 +16,6 @@
|
|||||||
/extensions/mssql/config.json @Charles-Gagnon @alanrenmsft @kburtram
|
/extensions/mssql/config.json @Charles-Gagnon @alanrenmsft @kburtram
|
||||||
|
|
||||||
/src/sql/*.d.ts @alanrenmsft @Charles-Gagnon
|
/src/sql/*.d.ts @alanrenmsft @Charles-Gagnon
|
||||||
|
/src/sql/workbench/browser/modelComponents @Charles-Gagnon @alanrenmsft
|
||||||
|
/src/sql/workbench/api @Charles-Gagnon @alanrenmsft
|
||||||
|
/src/sql/**/notebook @azure-data-studio-notebook-devs
|
||||||
|
|||||||
14
.github/ISSUE_TEMPLATE/bug_report.md
vendored
14
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -8,12 +8,18 @@ assignees: ''
|
|||||||
---
|
---
|
||||||
<!-- ⚠️⚠️ Do Not Delete This! bug_report_template ⚠️⚠️ -->
|
<!-- ⚠️⚠️ Do Not Delete This! bug_report_template ⚠️⚠️ -->
|
||||||
<!-- Please read our Rules of Conduct: https://opensource.microsoft.com/codeofconduct/ -->
|
<!-- Please read our Rules of Conduct: https://opensource.microsoft.com/codeofconduct/ -->
|
||||||
<!-- Please search existing issues to avoid creating duplicates. -->
|
<!-- 🔎 Search existing issues to avoid creating duplicates. -->
|
||||||
<!-- Also please test using the latest insiders build to make sure your issue has not already been fixed. -->
|
<!-- 🧪 Test using the latest Insiders build to see if your issue has already been fixed: https://github.com/Microsoft/azuredatastudio#try-out-the-latest-insiders-build-from-main -->
|
||||||
|
<!-- 💡 Instead of creating your report here, use 'Report Issue' from the 'Help' menu in Azure Data Studio to pre-fill useful information. -->
|
||||||
<!-- Use Help > Report Issue to prefill these. -->
|
|
||||||
- Azure Data Studio Version:
|
- Azure Data Studio Version:
|
||||||
|
- OS Version:
|
||||||
|
|
||||||
Steps to Reproduce:
|
Steps to Reproduce:
|
||||||
|
|
||||||
1.
|
1.
|
||||||
|
2.
|
||||||
|
|
||||||
|
<!-- 🔧 Launch with `azuredatastudio --disable-extensions` to check. -->
|
||||||
|
Does this issue occur when all extensions are disabled?: Yes/No
|
||||||
|
|
||||||
|
<!-- 📣 Issues caused by an extension need to be reported directly to the extension publisher. The 'Help > Report Issue' dialog can assist with this. -->
|
||||||
|
|||||||
35
.github/workflows/build-chat.yml
vendored
35
.github/workflows/build-chat.yml
vendored
@@ -1,35 +0,0 @@
|
|||||||
name: "Build Chat"
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_run:
|
|
||||||
workflows:
|
|
||||||
- CI
|
|
||||||
types:
|
|
||||||
- completed
|
|
||||||
branches:
|
|
||||||
- master
|
|
||||||
- release/*
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
main:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Checkout Actions
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
with:
|
|
||||||
repository: "microsoft/vscode-github-triage-actions"
|
|
||||||
path: ./actions
|
|
||||||
- name: Install Actions
|
|
||||||
run: npm install --production --prefix ./actions
|
|
||||||
- name: Install Additional Dependencies
|
|
||||||
# Pulls in a bunch of other packages that arent needed for the rest of the actions
|
|
||||||
run: npm install @azure/storage-blob@12.1.1
|
|
||||||
- name: Build Chat
|
|
||||||
uses: ./actions/build-chat
|
|
||||||
with:
|
|
||||||
token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
slack_token: ${{ secrets.SLACK_TOKEN }}
|
|
||||||
storage_connection_string: ${{ secrets.BUILD_CHAT_STORAGE_CONNECTION_STRING }}
|
|
||||||
workflow_run_url: ${{ github.event.workflow_run.url }}
|
|
||||||
notification_channel: build
|
|
||||||
log_channel: bot-log
|
|
||||||
397
.github/workflows/ci.yml
vendored
397
.github/workflows/ci.yml
vendored
@@ -11,161 +11,304 @@ on:
|
|||||||
- release/*
|
- release/*
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
windows:
|
||||||
|
name: Windows
|
||||||
|
runs-on: windows-latest
|
||||||
|
timeout-minutes: 30
|
||||||
|
env:
|
||||||
|
CHILD_CONCURRENCY: "1"
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2.2.0
|
||||||
|
|
||||||
|
- uses: actions/setup-node@v2
|
||||||
|
with:
|
||||||
|
node-version: 12
|
||||||
|
|
||||||
|
- uses: actions/setup-python@v2
|
||||||
|
with:
|
||||||
|
python-version: "2.x"
|
||||||
|
|
||||||
|
# {{SQL CARBON EDIT}} Skip caching for now
|
||||||
|
# - name: Compute node modules cache key
|
||||||
|
# id: nodeModulesCacheKey
|
||||||
|
# run: echo "::set-output name=value::$(node build/azure-pipelines/common/computeNodeModulesCacheKey.js)"
|
||||||
|
# - name: Cache node_modules archive
|
||||||
|
# id: cacheNodeModules
|
||||||
|
# uses: actions/cache@v2
|
||||||
|
# with:
|
||||||
|
# path: ".build/node_modules_cache"
|
||||||
|
# key: "${{ runner.os }}-cacheNodeModulesArchive-${{ steps.nodeModulesCacheKey.outputs.value }}"
|
||||||
|
# - name: Extract node_modules archive
|
||||||
|
# if: ${{ steps.cacheNodeModules.outputs.cache-hit == 'true' }}
|
||||||
|
# run: 7z.exe x .build/node_modules_cache/cache.7z -aos
|
||||||
|
# - name: Get yarn cache directory path
|
||||||
|
# id: yarnCacheDirPath
|
||||||
|
# if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
|
||||||
|
# run: echo "::set-output name=dir::$(yarn cache dir)"
|
||||||
|
# - name: Cache yarn directory
|
||||||
|
# if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
|
||||||
|
# uses: actions/cache@v2
|
||||||
|
# with:
|
||||||
|
# path: ${{ steps.yarnCacheDirPath.outputs.dir }}
|
||||||
|
# key: ${{ runner.os }}-yarnCacheDir-${{ steps.nodeModulesCacheKey.outputs.value }}
|
||||||
|
# restore-keys: ${{ runner.os }}-yarnCacheDir-
|
||||||
|
|
||||||
|
- name: Execute yarn
|
||||||
|
# if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }} {{SQL CARBON EDIT}} Skipping caching for now
|
||||||
|
env:
|
||||||
|
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
|
||||||
|
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
|
||||||
|
run: yarn --frozen-lockfile --network-timeout 180000
|
||||||
|
# - name: Create node_modules archive {{SQL CARBON EDIT}} Skip caching for now
|
||||||
|
# if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
|
||||||
|
# run: |
|
||||||
|
# mkdir -Force .build
|
||||||
|
# node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt
|
||||||
|
# mkdir -Force .build/node_modules_cache
|
||||||
|
# 7z.exe a .build/node_modules_cache/cache.7z -mx3 `@.build/node_modules_list.txt
|
||||||
|
|
||||||
|
- name: Compile and Download
|
||||||
|
run: yarn npm-run-all --max_old_space_size=4095 -lp compile "electron x64" # {{SQL CARBON EDIT}} Remove unused options playwright-install download-builtin-extensions
|
||||||
|
|
||||||
|
- name: Run Unit Tests (Electron)
|
||||||
|
run: .\scripts\test.bat
|
||||||
|
|
||||||
|
# - name: Run Unit Tests (Browser) {{SQL CARBON EDIT}} disable for now
|
||||||
|
# run: yarn test-browser --browser chromium
|
||||||
|
|
||||||
|
# - name: Run Integration Tests (Electron) {{SQL CARBON EDIT}} disable for now
|
||||||
|
# run: .\scripts\test-integration.bat
|
||||||
|
|
||||||
linux:
|
linux:
|
||||||
|
name: Linux
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
timeout-minutes: 30
|
||||||
env:
|
env:
|
||||||
CHILD_CONCURRENCY: "1"
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2.2.0
|
- uses: actions/checkout@v2.2.0
|
||||||
|
|
||||||
# TODO: rename azure-pipelines/linux/xvfb.init to github-actions
|
# TODO: rename azure-pipelines/linux/xvfb.init to github-actions
|
||||||
- run: |
|
- name: Setup Build Environment
|
||||||
|
run: |
|
||||||
sudo apt-get update
|
sudo apt-get update
|
||||||
sudo apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus xvfb libgtk-3-0 libkrb5-dev # {{SQL CARBON EDIT}} add kerberos dep
|
sudo apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus xvfb libgtk-3-0 libgbm1 libkrb5-dev # {{SQL CARBON EDIT}} add kerberos dep
|
||||||
sudo cp build/azure-pipelines/linux/xvfb.init /etc/init.d/xvfb
|
sudo cp build/azure-pipelines/linux/xvfb.init /etc/init.d/xvfb
|
||||||
sudo chmod +x /etc/init.d/xvfb
|
sudo chmod +x /etc/init.d/xvfb
|
||||||
sudo update-rc.d xvfb defaults
|
sudo update-rc.d xvfb defaults
|
||||||
sudo service xvfb start
|
sudo service xvfb start
|
||||||
name: Setup Build Environment
|
|
||||||
- uses: actions/setup-node@v1
|
- uses: actions/setup-node@v2
|
||||||
with:
|
with:
|
||||||
node-version: 12
|
node-version: 12
|
||||||
# TODO: cache node modules
|
# {{SQL CARBON EDIT}} Skip caching for now
|
||||||
# Increase timeout to get around latency issues when fetching certain packages
|
# - name: Compute node modules cache key
|
||||||
- run: |
|
# id: nodeModulesCacheKey
|
||||||
yarn config set network-timeout 300000
|
# run: echo "::set-output name=value::$(node build/azure-pipelines/common/computeNodeModulesCacheKey.js)"
|
||||||
yarn --frozen-lockfile
|
# - name: Cache node modules
|
||||||
name: Install Dependencies
|
# id: cacheNodeModules
|
||||||
- run: yarn electron x64
|
# uses: actions/cache@v2
|
||||||
name: Download Electron
|
# with:
|
||||||
- run: yarn gulp hygiene
|
# path: "**/node_modules"
|
||||||
name: Run Hygiene Checks
|
# key: ${{ runner.os }}-cacheNodeModules13-${{ steps.nodeModulesCacheKey.outputs.value }}
|
||||||
- run: yarn strict-vscode # {{SQL CARBON EDIT}} add step
|
# restore-keys: ${{ runner.os }}-cacheNodeModules13-
|
||||||
name: Run Strict Compile Options
|
# - name: Get yarn cache directory path
|
||||||
# - run: yarn monaco-compile-check {{SQL CARBON EDIT}} remove step
|
# id: yarnCacheDirPath
|
||||||
# name: Run Monaco Editor Checks
|
# if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
|
||||||
- run: yarn valid-layers-check
|
# run: echo "::set-output name=dir::$(yarn cache dir)"
|
||||||
name: Run Valid Layers Checks
|
# - name: Cache yarn directory
|
||||||
- run: yarn compile
|
# if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
|
||||||
name: Compile Sources
|
# uses: actions/cache@v2
|
||||||
# - run: yarn download-builtin-extensions {{SQL CARBON EDIT}} remove step
|
# with:
|
||||||
# name: Download Built-in Extensions
|
# path: ${{ steps.yarnCacheDirPath.outputs.dir }}
|
||||||
- run: DISPLAY=:10 ./scripts/test.sh --tfs "Unit Tests" --coverage --runGlob "**/sql/**/*.test.js"
|
# key: ${{ runner.os }}-yarnCacheDir-${{ steps.nodeModulesCacheKey.outputs.value }}
|
||||||
name: Run Unit Tests (Electron)
|
# restore-keys: ${{ runner.os }}-yarnCacheDir-
|
||||||
- run: DISPLAY=:10 ./scripts/test-extensions-unit.sh
|
- name: Execute yarn
|
||||||
name: Run Extension Unit Tests (Electron)
|
# if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }} {{SQL CARBON EDIT}} Skip caching for now
|
||||||
# {{SQL CARBON EDIT}} Add coveralls. We merge first to get around issue where parallel builds weren't being combined correctly
|
env:
|
||||||
- run: node test/combineCoverage
|
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
|
||||||
name: Combine code coverage files
|
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
|
||||||
|
run: yarn --frozen-lockfile --network-timeout 180000
|
||||||
|
|
||||||
|
- name: Compile and Download
|
||||||
|
run: yarn npm-run-all --max_old_space_size=4095 -lp compile "electron x64" playwright-install download-builtin-extensions
|
||||||
|
|
||||||
|
- name: Run Unit Tests (Electron)
|
||||||
|
id: electron-unit-tests
|
||||||
|
run: DISPLAY=:10 ./scripts/test.sh --coverage --runGlob "**/sql/**/*.test.js" # {{SQL CARBON EDIT}} Run only our tests with coverage
|
||||||
|
|
||||||
|
- name: Run Extension Unit Tests (Electron)
|
||||||
|
id: electron-extension-unit-tests
|
||||||
|
run: DISPLAY=:10 ./scripts/test-extensions-unit.sh
|
||||||
|
|
||||||
|
# {{SQL CARBON EDIT}} Add coveralls. We merge first to get around issue where parallel builds weren't being combined correctly
|
||||||
|
- name: Combine code coverage files
|
||||||
|
run: node test/combineCoverage
|
||||||
- name: Upload Code Coverage
|
- name: Upload Code Coverage
|
||||||
uses: coverallsapp/github-action@v1.1.1
|
uses: coverallsapp/github-action@v1.1.1
|
||||||
with:
|
with:
|
||||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
path-to-lcov: "test/coverage/lcov.info"
|
path-to-lcov: "test/coverage/lcov.info"
|
||||||
|
|
||||||
# Fails with cryptic error (e.g. https://github.com/microsoft/vscode/pull/90292/checks?check_run_id=433681926#step:13:9)
|
# - name: Run Unit Tests (Browser) {{SQL CARBON EDIT}} Skip for now
|
||||||
# - run: DISPLAY=:10 yarn test-browser --browser chromium
|
# id: browser-unit-tests
|
||||||
# name: Run Unit Tests (Browser)
|
# run: DISPLAY=:10 yarn test-browser --browser chromium
|
||||||
# - run: DISPLAY=:10 ./scripts/test-integration.sh --tfs "Integration Tests" {{SQL CARBON EDIT}} remove step
|
|
||||||
# name: Run Integration Tests (Electron)
|
|
||||||
|
|
||||||
windows:
|
# - name: Run Integration Tests (Electron) {{SQL CARBON EDIT}} Skip for now
|
||||||
runs-on: windows-2016
|
# id: electron-integration-tests
|
||||||
env:
|
# run: DISPLAY=:10 ./scripts/test-integration.sh
|
||||||
CHILD_CONCURRENCY: "1"
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2.2.0
|
|
||||||
- uses: actions/setup-node@v1
|
|
||||||
with:
|
|
||||||
node-version: 12
|
|
||||||
- uses: actions/setup-python@v1
|
|
||||||
with:
|
|
||||||
python-version: "2.x"
|
|
||||||
# Increase timeout to get around latency issues when fetching certain packages
|
|
||||||
- run: |
|
|
||||||
yarn config set network-timeout 300000
|
|
||||||
yarn --frozen-lockfile
|
|
||||||
name: Install Dependencies
|
|
||||||
- run: yarn electron
|
|
||||||
name: Download Electron
|
|
||||||
- run: yarn gulp hygiene
|
|
||||||
name: Run Hygiene Checks
|
|
||||||
- run: yarn strict-vscode # {{SQL CARBON EDIT}} add step
|
|
||||||
name: Run Strict Compile Options
|
|
||||||
# - run: yarn monaco-compile-check {{SQL CARBON EDIT}} remove step
|
|
||||||
# name: Run Monaco Editor Checks
|
|
||||||
- run: yarn valid-layers-check
|
|
||||||
name: Run Valid Layers Checks
|
|
||||||
- run: yarn compile
|
|
||||||
name: Compile Sources
|
|
||||||
# - run: yarn download-builtin-extensions {{SQL CARBON EDIT}} remove step
|
|
||||||
# name: Download Built-in Extensions
|
|
||||||
- run: .\scripts\test.bat --tfs "Unit Tests"
|
|
||||||
name: Run Unit Tests (Electron)
|
|
||||||
# - run: yarn test-browser --browser chromium {{SQL CARBON EDIT}} disable for now @TODO @anthonydresser
|
|
||||||
# name: Run Unit Tests (Browser)
|
|
||||||
# - run: .\scripts\test-integration.bat --tfs "Integration Tests" {{SQL CARBON EDIT}} remove step
|
|
||||||
# name: Run Integration Tests (Electron)
|
|
||||||
|
|
||||||
darwin:
|
darwin:
|
||||||
|
name: macOS
|
||||||
runs-on: macos-latest
|
runs-on: macos-latest
|
||||||
|
timeout-minutes: 30
|
||||||
env:
|
env:
|
||||||
CHILD_CONCURRENCY: "1"
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2.2.0
|
- uses: actions/checkout@v2.2.0
|
||||||
- uses: actions/setup-node@v1
|
|
||||||
|
- uses: actions/setup-node@v2
|
||||||
with:
|
with:
|
||||||
node-version: 12
|
node-version: 12
|
||||||
# Increase timeout to get around latency issues when fetching certain packages
|
|
||||||
- run: |
|
|
||||||
yarn config set network-timeout 300000
|
|
||||||
yarn --frozen-lockfile
|
|
||||||
name: Install Dependencies
|
|
||||||
- run: yarn electron x64
|
|
||||||
name: Download Electron
|
|
||||||
- run: yarn gulp hygiene
|
|
||||||
name: Run Hygiene Checks
|
|
||||||
- run: yarn strict-vscode # {{SQL CARBON EDIT}} add step
|
|
||||||
name: Run Strict Compile Options
|
|
||||||
# - run: yarn monaco-compile-check {{SQL CARBON EDIT}} remove step
|
|
||||||
# name: Run Monaco Editor Checks
|
|
||||||
- run: yarn valid-layers-check
|
|
||||||
name: Run Valid Layers Checks
|
|
||||||
- run: yarn compile
|
|
||||||
name: Compile Sources
|
|
||||||
# - run: yarn download-builtin-extensions {{SQL CARBON EDIT}} remove step
|
|
||||||
# name: Download Built-in Extensions
|
|
||||||
- run: ./scripts/test.sh --tfs "Unit Tests"
|
|
||||||
name: Run Unit Tests (Electron)
|
|
||||||
# - run: yarn test-browser --browser chromium --browser webkit
|
|
||||||
# name: Run Unit Tests (Browser)
|
|
||||||
# - run: ./scripts/test-integration.sh --tfs "Integration Tests"
|
|
||||||
# name: Run Integration Tests (Electron)
|
|
||||||
|
|
||||||
# monaco:
|
# {{SQL CARBON EDIT}} Skip caching for now
|
||||||
# runs-on: ubuntu-latest
|
# - name: Compute node modules cache key
|
||||||
# env:
|
# id: nodeModulesCacheKey
|
||||||
# CHILD_CONCURRENCY: "1"
|
# run: echo "::set-output name=value::$(node build/azure-pipelines/common/computeNodeModulesCacheKey.js)"
|
||||||
# GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
# - name: Cache node modules
|
||||||
# steps:
|
# id: cacheNodeModules
|
||||||
# - uses: actions/checkout@v2.2.0
|
# uses: actions/cache@v2
|
||||||
# # TODO: rename azure-pipelines/linux/xvfb.init to github-actions
|
# with:
|
||||||
# - run: |
|
# path: "**/node_modules"
|
||||||
# sudo apt-get update
|
# key: ${{ runner.os }}-cacheNodeModules13-${{ steps.nodeModulesCacheKey.outputs.value }}
|
||||||
# sudo apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus xvfb libgtk-3-0 libgbm1
|
# restore-keys: ${{ runner.os }}-cacheNodeModules13-
|
||||||
# sudo cp build/azure-pipelines/linux/xvfb.init /etc/init.d/xvfb
|
# - name: Get yarn cache directory path
|
||||||
# sudo chmod +x /etc/init.d/xvfb
|
# id: yarnCacheDirPath
|
||||||
# sudo update-rc.d xvfb defaults
|
# if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
|
||||||
# sudo service xvfb start
|
# run: echo "::set-output name=dir::$(yarn cache dir)"
|
||||||
# name: Setup Build Environment
|
# - name: Cache yarn directory
|
||||||
# - uses: actions/setup-node@v1
|
# if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
|
||||||
# with:
|
# uses: actions/cache@v2
|
||||||
# node-version: 10
|
# with:
|
||||||
# - run: yarn --frozen-lockfile
|
# path: ${{ steps.yarnCacheDirPath.outputs.dir }}
|
||||||
# name: Install Dependencies
|
# key: ${{ runner.os }}-yarnCacheDir-${{ steps.nodeModulesCacheKey.outputs.value }}
|
||||||
# - run: yarn monaco-compile-check
|
# restore-keys: ${{ runner.os }}-yarnCacheDir-
|
||||||
# name: Run Monaco Editor Checks
|
- name: Execute yarn
|
||||||
# - run: yarn gulp editor-esm-bundle
|
if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
|
||||||
# name: Editor Distro & ESM Bundle
|
env:
|
||||||
|
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
|
||||||
|
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
|
||||||
|
run: yarn --frozen-lockfile --network-timeout 180000
|
||||||
|
|
||||||
|
- name: Compile and Download
|
||||||
|
run: yarn npm-run-all --max_old_space_size=4095 -lp compile "electron x64" playwright-install download-builtin-extensions
|
||||||
|
|
||||||
|
# This is required for keytar unittests, otherwise we hit
|
||||||
|
# https://github.com/atom/node-keytar/issues/76
|
||||||
|
- name: Create temporary keychain
|
||||||
|
run: |
|
||||||
|
security create-keychain -p pwd $RUNNER_TEMP/buildagent.keychain
|
||||||
|
security default-keychain -s $RUNNER_TEMP/buildagent.keychain
|
||||||
|
security unlock-keychain -p pwd $RUNNER_TEMP/buildagent.keychain
|
||||||
|
|
||||||
|
- name: Run Unit Tests (Electron)
|
||||||
|
run: DISPLAY=:10 ./scripts/test.sh
|
||||||
|
|
||||||
|
# - name: Run Unit Tests (Browser) {{SQL CARBON EDIT}} Skip for now
|
||||||
|
# run: DISPLAY=:10 yarn test-browser --browser chromium
|
||||||
|
|
||||||
|
# - name: Run Integration Tests (Electron) {{SQL CARBON EDIT}} Skip for now
|
||||||
|
# run: DISPLAY=:10 ./scripts/test-integration.sh
|
||||||
|
|
||||||
|
hygiene:
|
||||||
|
name: Hygiene and Layering
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
timeout-minutes: 30
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
|
- uses: actions/setup-node@v2
|
||||||
|
with:
|
||||||
|
node-version: 12
|
||||||
|
|
||||||
|
- name: Compute node modules cache key
|
||||||
|
id: nodeModulesCacheKey
|
||||||
|
run: echo "::set-output name=value::$(node build/azure-pipelines/common/sql-computeNodeModulesCacheKey.js)"
|
||||||
|
- name: Cache node modules
|
||||||
|
id: cacheNodeModules
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: "**/node_modules"
|
||||||
|
key: ${{ runner.os }}-cacheNodeModules13-${{ steps.nodeModulesCacheKey.outputs.value }}
|
||||||
|
restore-keys: ${{ runner.os }}-cacheNodeModules13-
|
||||||
|
- name: Get yarn cache directory path
|
||||||
|
id: yarnCacheDirPath
|
||||||
|
if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
|
||||||
|
run: echo "::set-output name=dir::$(yarn cache dir)"
|
||||||
|
- name: Cache yarn directory
|
||||||
|
if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: ${{ steps.yarnCacheDirPath.outputs.dir }}
|
||||||
|
key: ${{ runner.os }}-yarnCacheDir-${{ steps.nodeModulesCacheKey.outputs.value }}
|
||||||
|
restore-keys: ${{ runner.os }}-yarnCacheDir-
|
||||||
|
- name: Setup Build Environment # {{SQL CARBON EDIT}} Add step to install required packages if we need to run yarn
|
||||||
|
if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
|
||||||
|
run: |
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install -y libkrb5-dev
|
||||||
|
- name: Execute yarn
|
||||||
|
if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
|
||||||
|
env:
|
||||||
|
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
|
||||||
|
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
|
||||||
|
run: yarn --frozen-lockfile --network-timeout 180000
|
||||||
|
|
||||||
|
- name: Run Hygiene Checks
|
||||||
|
run: yarn gulp hygiene
|
||||||
|
|
||||||
|
- name: Run Valid Layers Checks
|
||||||
|
run: yarn valid-layers-check
|
||||||
|
|
||||||
|
- name: Run Strict Compile Options # {{SQL CARBON EDIT}} add step
|
||||||
|
run: yarn strict-vscode
|
||||||
|
|
||||||
|
# - name: Run Monaco Editor Checks {{SQL CARBON EDIT}} Remove Monaco checks
|
||||||
|
# run: yarn monaco-compile-check
|
||||||
|
|
||||||
|
- name: Run Trusted Types Checks
|
||||||
|
run: yarn tsec-compile-check
|
||||||
|
|
||||||
|
# - name: Editor Distro & ESM Bundle {{SQL CARBON EDIT}} Remove Monaco checks
|
||||||
|
# run: yarn gulp editor-esm-bundle
|
||||||
|
|
||||||
|
# - name: Typings validation prep {{SQL CARBON EDIT}} Remove Monaco checks
|
||||||
|
# run: |
|
||||||
|
# mkdir typings-test
|
||||||
|
|
||||||
|
# - name: Typings validation {{SQL CARBON EDIT}} Remove Monaco checks
|
||||||
|
# working-directory: ./typings-test
|
||||||
|
# run: |
|
||||||
|
# yarn init -yp
|
||||||
|
# ../node_modules/.bin/tsc --init
|
||||||
|
# echo "import '../out-monaco-editor-core';" > a.ts
|
||||||
|
# ../node_modules/.bin/tsc --noEmit
|
||||||
|
|
||||||
|
# - name: Webpack Editor {{SQL CARBON EDIT}} Remove Monaco checks
|
||||||
|
# working-directory: ./test/monaco
|
||||||
|
# run: yarn run bundle
|
||||||
|
|
||||||
|
# - name: Compile Editor Tests {{SQL CARBON EDIT}} Remove Monaco checks
|
||||||
|
# working-directory: ./test/monaco
|
||||||
|
# run: yarn run compile
|
||||||
|
|
||||||
|
# - name: Download Playwright {{SQL CARBON EDIT}} Remove Monaco checks
|
||||||
|
# run: yarn playwright-install
|
||||||
|
|
||||||
|
# - name: Run Editor Tests {{SQL CARBON EDIT}} Remove Monaco checks
|
||||||
|
# timeout-minutes: 5
|
||||||
|
# working-directory: ./test/monaco
|
||||||
|
# run: yarn test
|
||||||
|
|||||||
44
.github/workflows/codeql.yml
vendored
44
.github/workflows/codeql.yml
vendored
@@ -1,44 +0,0 @@
|
|||||||
name: "Code Scanning - Action"
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
schedule:
|
|
||||||
- cron: "0 0 * * 0"
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
CodeQL-Build:
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
|
|
||||||
# CodeQL runs on ubuntu-latest, windows-latest, and macos-latest
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout repository
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
|
|
||||||
# Initializes the CodeQL tools for scanning.
|
|
||||||
- name: Initialize CodeQL
|
|
||||||
uses: github/codeql-action/init@v1
|
|
||||||
# Override language selection by uncommenting this and choosing your languages
|
|
||||||
# with:
|
|
||||||
# languages: go, javascript, csharp, python, cpp, java
|
|
||||||
|
|
||||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
|
||||||
# If this step fails, then you should remove it and run the build manually (see below).
|
|
||||||
- name: Autobuild
|
|
||||||
uses: github/codeql-action/autobuild@v1
|
|
||||||
|
|
||||||
# ℹ️ Command-line programs to run using the OS shell.
|
|
||||||
# 📚 https://git.io/JvXDl
|
|
||||||
|
|
||||||
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
|
|
||||||
# and modify them (or add more) to build your code if your project
|
|
||||||
# uses a compiled language
|
|
||||||
|
|
||||||
#- run: |
|
|
||||||
# make bootstrap
|
|
||||||
# make release
|
|
||||||
|
|
||||||
- name: Perform CodeQL Analysis
|
|
||||||
uses: github/codeql-action/analyze@v1
|
|
||||||
50
.github/workflows/deep-classifier-runner.yml
vendored
50
.github/workflows/deep-classifier-runner.yml
vendored
@@ -1,50 +0,0 @@
|
|||||||
name: "Deep Classifier: Runner"
|
|
||||||
on:
|
|
||||||
schedule:
|
|
||||||
- cron: 0 * * * *
|
|
||||||
repository_dispatch:
|
|
||||||
types: [trigger-deep-classifier-runner]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
main:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Checkout Actions
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
with:
|
|
||||||
repository: "microsoft/vscode-github-triage-actions"
|
|
||||||
ref: v42
|
|
||||||
path: ./actions
|
|
||||||
- name: Install Actions
|
|
||||||
run: npm install --production --prefix ./actions
|
|
||||||
- name: Install Additional Dependencies
|
|
||||||
# Pulls in a bunch of other packages that arent needed for the rest of the actions
|
|
||||||
run: npm install @azure/storage-blob@12.1.1
|
|
||||||
- name: "Run Classifier: Scraper"
|
|
||||||
uses: ./actions/classifier-deep/apply/fetch-sources
|
|
||||||
with:
|
|
||||||
# slightly overlapping to protect against issues slipping through the cracks if a run is delayed
|
|
||||||
from: 80
|
|
||||||
until: 5
|
|
||||||
configPath: classifier
|
|
||||||
blobContainerName: vscode-issue-classifier
|
|
||||||
blobStorageKey: ${{secrets.AZURE_BLOB_STORAGE_CONNECTION_STRING}}
|
|
||||||
token: ${{secrets.VSCODE_ISSUE_TRIAGE_BOT_PAT}}
|
|
||||||
appInsightsKey: ${{secrets.TRIAGE_ACTIONS_APP_INSIGHTS}}
|
|
||||||
- name: Set up Python 3.7
|
|
||||||
uses: actions/setup-python@v1
|
|
||||||
with:
|
|
||||||
python-version: 3.7
|
|
||||||
- name: Install dependencies
|
|
||||||
run: |
|
|
||||||
python -m pip install --upgrade pip
|
|
||||||
pip install --upgrade numpy scipy scikit-learn joblib nltk simpletransformers torch torchvision
|
|
||||||
- name: "Run Classifier: Generator"
|
|
||||||
run: python ./actions/classifier-deep/apply/generate-labels/main.py
|
|
||||||
- name: "Run Classifier: Labeler"
|
|
||||||
uses: ./actions/classifier-deep/apply/apply-labels
|
|
||||||
with:
|
|
||||||
configPath: classifier
|
|
||||||
allowLabels: "needs more info|new release"
|
|
||||||
appInsightsKey: ${{secrets.TRIAGE_ACTIONS_APP_INSIGHTS}}
|
|
||||||
token: ${{secrets.VSCODE_ISSUE_TRIAGE_BOT_PAT}}
|
|
||||||
27
.github/workflows/deep-classifier-scraper.yml
vendored
27
.github/workflows/deep-classifier-scraper.yml
vendored
@@ -1,27 +0,0 @@
|
|||||||
name: "Deep Classifier: Scraper"
|
|
||||||
on:
|
|
||||||
repository_dispatch:
|
|
||||||
types: [trigger-deep-classifier-scraper]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
main:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Checkout Actions
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
with:
|
|
||||||
repository: "microsoft/vscode-github-triage-actions"
|
|
||||||
ref: v42
|
|
||||||
path: ./actions
|
|
||||||
- name: Install Actions
|
|
||||||
run: npm install --production --prefix ./actions
|
|
||||||
- name: Install Additional Dependencies
|
|
||||||
# Pulls in a bunch of other packages that arent needed for the rest of the actions
|
|
||||||
run: npm install @azure/storage-blob@12.1.1
|
|
||||||
- name: "Run Classifier: Scraper"
|
|
||||||
uses: ./actions/classifier-deep/train/fetch-issues
|
|
||||||
with:
|
|
||||||
blobContainerName: vscode-issue-classifier
|
|
||||||
blobStorageKey: ${{secrets.AZURE_BLOB_STORAGE_CONNECTION_STRING}}
|
|
||||||
token: ${{secrets.ISSUE_SCRAPER_TOKEN}}
|
|
||||||
appInsightsKey: ${{secrets.TRIAGE_ACTIONS_APP_INSIGHTS}}
|
|
||||||
40
.github/workflows/devcontainer-cache.yml
vendored
40
.github/workflows/devcontainer-cache.yml
vendored
@@ -1,40 +0,0 @@
|
|||||||
name: VS Code Repo Dev Container Cache Image Generation
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
# Currently doing this for master, but could be done for PRs as well
|
|
||||||
branches:
|
|
||||||
- "master"
|
|
||||||
|
|
||||||
# Only updates to these files result in changes to installed packages, so skip otherwise
|
|
||||||
paths:
|
|
||||||
- "**/package-lock.json"
|
|
||||||
- "**/yarn.lock"
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
devcontainer:
|
|
||||||
name: Generate cache image
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Checkout
|
|
||||||
id: checkout
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Azure CLI login
|
|
||||||
id: az_login
|
|
||||||
uses: azure/login@v1
|
|
||||||
with:
|
|
||||||
creds: ${{ secrets.AZ_ACR_CREDS }}
|
|
||||||
|
|
||||||
- name: Build and push
|
|
||||||
id: build_and_push
|
|
||||||
run: |
|
|
||||||
set -e
|
|
||||||
|
|
||||||
ACR_REGISTRY_NAME=$(echo ${{ secrets.CONTAINER_IMAGE_REGISTRY }} | grep -oP '(.+)(?=\.azurecr\.io)')
|
|
||||||
az acr login --name $ACR_REGISTRY_NAME
|
|
||||||
|
|
||||||
GIT_BRANCH=$(echo "${{ github.ref }}" | grep -oP 'refs/(heads|tags)/\K(.+)')
|
|
||||||
if [ "$GIT_BRANCH" == "" ]; then GIT_BRANCH=master; fi
|
|
||||||
|
|
||||||
.devcontainer/cache/build-cache-image.sh "${{ secrets.CONTAINER_IMAGE_REGISTRY }}/public/vscode/devcontainers/repos/microsoft/vscode" "${GIT_BRANCH}"
|
|
||||||
27
.github/workflows/latest-release-monitor.yml
vendored
27
.github/workflows/latest-release-monitor.yml
vendored
@@ -1,27 +0,0 @@
|
|||||||
name: Latest Release Monitor
|
|
||||||
on:
|
|
||||||
schedule:
|
|
||||||
- cron: 0/5 * * * *
|
|
||||||
repository_dispatch:
|
|
||||||
types: [trigger-latest-release-monitor]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
main:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Checkout Actions
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
with:
|
|
||||||
repository: "microsoft/vscode-github-triage-actions"
|
|
||||||
path: ./actions
|
|
||||||
ref: v42
|
|
||||||
- name: Install Actions
|
|
||||||
run: npm install --production --prefix ./actions
|
|
||||||
- name: Install Storage Module
|
|
||||||
run: npm install @azure/storage-blob@12.1.1
|
|
||||||
- name: Run Latest Release Monitor
|
|
||||||
uses: ./actions/latest-release-monitor
|
|
||||||
with:
|
|
||||||
storageKey: ${{secrets.AZURE_BLOB_STORAGE_CONNECTION_STRING}}
|
|
||||||
appInsightsKey: ${{secrets.TRIAGE_ACTIONS_APP_INSIGHTS}}
|
|
||||||
token: ${{secrets.VSCODE_ISSUE_TRIAGE_BOT_PAT}}
|
|
||||||
141
.vscode/launch.json
vendored
141
.vscode/launch.json
vendored
@@ -66,10 +66,147 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"type": "chrome",
|
"type": "extensionHost",
|
||||||
|
"request": "launch",
|
||||||
|
"name": "VS Code Emmet Tests",
|
||||||
|
"runtimeExecutable": "${execPath}",
|
||||||
|
"args": [
|
||||||
|
"${workspaceFolder}/extensions/emmet/test-fixtures",
|
||||||
|
"--extensionDevelopmentPath=${workspaceFolder}/extensions/emmet",
|
||||||
|
"--extensionTestsPath=${workspaceFolder}/extensions/emmet/out/test"
|
||||||
|
],
|
||||||
|
"outFiles": [
|
||||||
|
"${workspaceFolder}/out/**/*.js"
|
||||||
|
],
|
||||||
|
"presentation": {
|
||||||
|
"group": "5_tests",
|
||||||
|
"order": 6
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "extensionHost",
|
||||||
|
"request": "launch",
|
||||||
|
"name": "VS Code Git Tests",
|
||||||
|
"runtimeExecutable": "${execPath}",
|
||||||
|
"args": [
|
||||||
|
"/tmp/my4g9l",
|
||||||
|
"--extensionDevelopmentPath=${workspaceFolder}/extensions/git",
|
||||||
|
"--extensionTestsPath=${workspaceFolder}/extensions/git/out/test"
|
||||||
|
],
|
||||||
|
"outFiles": [
|
||||||
|
"${workspaceFolder}/extensions/git/out/**/*.js"
|
||||||
|
],
|
||||||
|
"presentation": {
|
||||||
|
"group": "5_tests",
|
||||||
|
"order": 6
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "extensionHost",
|
||||||
|
"request": "launch",
|
||||||
|
"name": "VS Code API Tests (single folder)",
|
||||||
|
"runtimeExecutable": "${execPath}",
|
||||||
|
"args": [
|
||||||
|
// "${workspaceFolder}", // Uncomment for running out of sources.
|
||||||
|
"${workspaceFolder}/extensions/vscode-api-tests/testWorkspace",
|
||||||
|
"--extensionDevelopmentPath=${workspaceFolder}/extensions/vscode-api-tests",
|
||||||
|
"--extensionTestsPath=${workspaceFolder}/extensions/vscode-api-tests/out/singlefolder-tests",
|
||||||
|
"--disable-extensions"
|
||||||
|
],
|
||||||
|
"outFiles": [
|
||||||
|
"${workspaceFolder}/out/**/*.js"
|
||||||
|
],
|
||||||
|
"presentation": {
|
||||||
|
"group": "5_tests",
|
||||||
|
"order": 3
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "extensionHost",
|
||||||
|
"request": "launch",
|
||||||
|
"name": "VS Code API Tests (workspace)",
|
||||||
|
"runtimeExecutable": "${execPath}",
|
||||||
|
"args": [
|
||||||
|
"${workspaceFolder}/extensions/vscode-api-tests/testworkspace.code-workspace",
|
||||||
|
"--extensionDevelopmentPath=${workspaceFolder}/extensions/vscode-api-tests",
|
||||||
|
"--extensionTestsPath=${workspaceFolder}/extensions/vscode-api-tests/out/workspace-tests"
|
||||||
|
],
|
||||||
|
"outFiles": [
|
||||||
|
"${workspaceFolder}/out/**/*.js"
|
||||||
|
],
|
||||||
|
"presentation": {
|
||||||
|
"group": "5_tests",
|
||||||
|
"order": 4
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "extensionHost",
|
||||||
|
"request": "launch",
|
||||||
|
"name": "VS Code Tokenizer Tests",
|
||||||
|
"runtimeExecutable": "${execPath}",
|
||||||
|
"args": [
|
||||||
|
"${workspaceFolder}/extensions/vscode-colorize-tests/test",
|
||||||
|
"--extensionDevelopmentPath=${workspaceFolder}/extensions/vscode-colorize-tests",
|
||||||
|
"--extensionTestsPath=${workspaceFolder}/extensions/vscode-colorize-tests/out"
|
||||||
|
],
|
||||||
|
"outFiles": [
|
||||||
|
"${workspaceFolder}/out/**/*.js"
|
||||||
|
],
|
||||||
|
"presentation": {
|
||||||
|
"group": "5_tests",
|
||||||
|
"order": 5
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "extensionHost",
|
||||||
|
"request": "launch",
|
||||||
|
"name": "VS Code Notebook Tests",
|
||||||
|
"runtimeExecutable": "${execPath}",
|
||||||
|
"args": [
|
||||||
|
"${workspaceFolder}/extensions/vscode-notebook-tests/test",
|
||||||
|
"--extensionDevelopmentPath=${workspaceFolder}/extensions/vscode-notebook-tests",
|
||||||
|
"--extensionTestsPath=${workspaceFolder}/extensions/vscode-notebook-tests/out"
|
||||||
|
],
|
||||||
|
"outFiles": [
|
||||||
|
"${workspaceFolder}/out/**/*.js"
|
||||||
|
],
|
||||||
|
"presentation": {
|
||||||
|
"group": "5_tests",
|
||||||
|
"order": 6
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "extensionHost",
|
||||||
|
"request": "launch",
|
||||||
|
"name": "VS Code Custom Editor Tests",
|
||||||
|
"runtimeExecutable": "${execPath}",
|
||||||
|
"args": [
|
||||||
|
"${workspaceFolder}/extensions/vscode-custom-editor-tests/test-workspace",
|
||||||
|
"--extensionDevelopmentPath=${workspaceFolder}/extensions/vscode-custom-editor-tests",
|
||||||
|
"--extensionTestsPath=${workspaceFolder}/extensions/vscode-custom-editor-tests/out/test"
|
||||||
|
],
|
||||||
|
"outFiles": [
|
||||||
|
"${workspaceFolder}/out/**/*.js"
|
||||||
|
],
|
||||||
|
"presentation": {
|
||||||
|
"group": "5_tests",
|
||||||
|
"order": 6
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "pwa-chrome",
|
||||||
"request": "attach",
|
"request": "attach",
|
||||||
"name": "Attach to azuredatastudio",
|
"name": "Attach to azuredatastudio",
|
||||||
"port": 9222
|
"browserAttachLocation": "workspace",
|
||||||
|
"port": 9222,
|
||||||
|
"trace": true,
|
||||||
|
"outFiles": [
|
||||||
|
"${workspaceFolder}/out/**/*.js"
|
||||||
|
],
|
||||||
|
"resolveSourceMapLocations": [
|
||||||
|
"${workspaceFolder}/out/**/*.js"
|
||||||
|
],
|
||||||
|
"perScriptSourcemaps": "yes"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"type": "pwa-chrome",
|
"type": "pwa-chrome",
|
||||||
|
|||||||
2
.vscode/notebooks/api.github-issues
vendored
2
.vscode/notebooks/api.github-issues
vendored
@@ -8,7 +8,7 @@
|
|||||||
{
|
{
|
||||||
"kind": 2,
|
"kind": 2,
|
||||||
"language": "github-issues",
|
"language": "github-issues",
|
||||||
"value": "$repo=repo:microsoft/vscode\n$milestone=milestone:\"February 2021\"",
|
"value": "$repo=repo:microsoft/vscode\n$milestone=milestone:\"April 2021\"",
|
||||||
"editable": true
|
"editable": true
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
|||||||
63
.vscode/notebooks/endgame.github-issues
vendored
63
.vscode/notebooks/endgame.github-issues
vendored
@@ -2,127 +2,106 @@
|
|||||||
{
|
{
|
||||||
"kind": 1,
|
"kind": 1,
|
||||||
"language": "markdown",
|
"language": "markdown",
|
||||||
"value": "#### Macros",
|
"value": "#### Macros"
|
||||||
"editable": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"kind": 2,
|
"kind": 2,
|
||||||
"language": "github-issues",
|
"language": "github-issues",
|
||||||
"value": "$REPOS=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-js-debug repo:microsoft/vscode-remote-release repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-settings-sync-server repo:microsoft/vscode-emmet-helper\n\n$MILESTONE=milestone:\"January 2021\"",
|
"value": "$REPOS=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-js-debug repo:microsoft/vscode-remote-release repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-settings-sync-server repo:microsoft/vscode-emmet-helper repo:microsoft/vscode-remotehub\n\n$MILESTONE=milestone:\"April 2021\""
|
||||||
"editable": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"kind": 1,
|
"kind": 1,
|
||||||
"language": "markdown",
|
"language": "markdown",
|
||||||
"value": "# Preparation",
|
"value": "# Preparation"
|
||||||
"editable": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"kind": 1,
|
"kind": 1,
|
||||||
"language": "markdown",
|
"language": "markdown",
|
||||||
"value": "## Open Pull Requests on the Milestone",
|
"value": "## Open Pull Requests on the Milestone"
|
||||||
"editable": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"kind": 2,
|
"kind": 2,
|
||||||
"language": "github-issues",
|
"language": "github-issues",
|
||||||
"value": "$REPOS $MILESTONE is:pr is:open",
|
"value": "$REPOS $MILESTONE is:pr is:open"
|
||||||
"editable": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"kind": 1,
|
"kind": 1,
|
||||||
"language": "markdown",
|
"language": "markdown",
|
||||||
"value": "## Open Issues on the Milestone",
|
"value": "## Open Issues on the Milestone"
|
||||||
"editable": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"kind": 2,
|
"kind": 2,
|
||||||
"language": "github-issues",
|
"language": "github-issues",
|
||||||
"value": "$REPOS $MILESTONE is:issue is:open -label:iteration-plan -label:endgame-plan -label:testplan-item",
|
"value": "$REPOS $MILESTONE is:issue is:open -label:iteration-plan -label:endgame-plan -label:testplan-item"
|
||||||
"editable": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"kind": 1,
|
"kind": 1,
|
||||||
"language": "markdown",
|
"language": "markdown",
|
||||||
"value": "## Feature Requests Missing Labels",
|
"value": "## Feature Requests Missing Labels"
|
||||||
"editable": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"kind": 2,
|
"kind": 2,
|
||||||
"language": "github-issues",
|
"language": "github-issues",
|
||||||
"value": "$REPOS $MILESTONE is:issue is:closed label:feature-request -label:verification-needed -label:on-testplan -label:verified -label:*duplicate",
|
"value": "$REPOS $MILESTONE is:issue is:closed label:feature-request -label:verification-needed -label:on-testplan -label:verified -label:*duplicate"
|
||||||
"editable": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"kind": 1,
|
"kind": 1,
|
||||||
"language": "markdown",
|
"language": "markdown",
|
||||||
"value": "# Testing",
|
"value": "# Testing"
|
||||||
"editable": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"kind": 1,
|
"kind": 1,
|
||||||
"language": "markdown",
|
"language": "markdown",
|
||||||
"value": "## Test Plan Items",
|
"value": "## Test Plan Items"
|
||||||
"editable": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"kind": 2,
|
"kind": 2,
|
||||||
"language": "github-issues",
|
"language": "github-issues",
|
||||||
"value": "$REPOS $MILESTONE is:issue is:open label:testplan-item",
|
"value": "$REPOS $MILESTONE is:issue is:open label:testplan-item"
|
||||||
"editable": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"kind": 1,
|
"kind": 1,
|
||||||
"language": "markdown",
|
"language": "markdown",
|
||||||
"value": "## Verification Needed",
|
"value": "## Verification Needed"
|
||||||
"editable": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"kind": 2,
|
"kind": 2,
|
||||||
"language": "github-issues",
|
"language": "github-issues",
|
||||||
"value": "$REPOS $MILESTONE is:issue is:closed label:feature-request label:verification-needed -label:verified",
|
"value": "$REPOS $MILESTONE is:issue is:closed label:feature-request label:verification-needed -label:verified"
|
||||||
"editable": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"kind": 1,
|
"kind": 1,
|
||||||
"language": "markdown",
|
"language": "markdown",
|
||||||
"value": "# Verification",
|
"value": "# Verification"
|
||||||
"editable": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"kind": 1,
|
"kind": 1,
|
||||||
"language": "markdown",
|
"language": "markdown",
|
||||||
"value": "## Verifiable Fixes",
|
"value": "## Verifiable Fixes"
|
||||||
"editable": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"kind": 2,
|
"kind": 2,
|
||||||
"language": "github-issues",
|
"language": "github-issues",
|
||||||
"value": "$REPOS $MILESTONE is:issue is:closed sort:updated-asc label:bug -label:verified -label:on-testplan -label:*duplicate -label:duplicate -label:invalid -label:*as-designed -label:error-telemetry -label:verification-steps-needed -label:verification-found -label:z-author-verified -label:unreleased",
|
"value": "$REPOS $MILESTONE is:issue is:closed sort:updated-asc label:bug -label:verified -label:on-testplan -label:*duplicate -label:duplicate -label:invalid -label:*as-designed -label:error-telemetry -label:verification-steps-needed -label:verification-found -label:z-author-verified -label:unreleased"
|
||||||
"editable": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"kind": 1,
|
"kind": 1,
|
||||||
"language": "markdown",
|
"language": "markdown",
|
||||||
"value": "## Unreleased Fixes",
|
"value": "## Unreleased Fixes"
|
||||||
"editable": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"kind": 2,
|
"kind": 2,
|
||||||
"language": "github-issues",
|
"language": "github-issues",
|
||||||
"value": "$REPOS $MILESTONE is:issue is:closed sort:updated-asc label:bug -label:verified -label:on-testplan -label:*duplicate -label:duplicate -label:invalid -label:*as-designed -label:error-telemetry -label:verification-steps-needed -label:verification-found -label:z-author-verified label:unreleased",
|
"value": "$REPOS $MILESTONE is:issue is:closed sort:updated-asc label:bug -label:verified -label:on-testplan -label:*duplicate -label:duplicate -label:invalid -label:*as-designed -label:error-telemetry -label:verification-steps-needed -label:verification-found -label:z-author-verified label:unreleased"
|
||||||
"editable": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"kind": 1,
|
"kind": 1,
|
||||||
"language": "markdown",
|
"language": "markdown",
|
||||||
"value": "# Candidates",
|
"value": "# Candidates"
|
||||||
"editable": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"kind": 2,
|
"kind": 2,
|
||||||
"language": "github-issues",
|
"language": "github-issues",
|
||||||
"value": "$REPOS $MILESTONE is:open label:candidate",
|
"value": "$REPOS $MILESTONE is:open label:candidate"
|
||||||
"editable": true
|
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
12
.vscode/notebooks/grooming.github-issues
vendored
12
.vscode/notebooks/grooming.github-issues
vendored
@@ -3,24 +3,28 @@
|
|||||||
"kind": 1,
|
"kind": 1,
|
||||||
"language": "markdown",
|
"language": "markdown",
|
||||||
"value": "### Categorizing Issues\n\nEach issue must have a type label. Most type labels are grey, some are yellow. Bugs are grey with a touch of red.",
|
"value": "### Categorizing Issues\n\nEach issue must have a type label. Most type labels are grey, some are yellow. Bugs are grey with a touch of red.",
|
||||||
"editable": true
|
"editable": true,
|
||||||
|
"outputs": []
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"kind": 2,
|
"kind": 2,
|
||||||
"language": "github-issues",
|
"language": "github-issues",
|
||||||
"value": "repo:microsoft/vscode is:open is:issue assignee:@me -label:\"needs more info\" -label:bug -label:feature-request -label:under-discussion -label:debt -label:*question -label:upstream -label:electron -label:engineering -label:plan-item ",
|
"value": "repo:microsoft/vscode is:open is:issue assignee:@me -label:\"needs more info\" -label:bug -label:feature-request -label:under-discussion -label:debt -label:*question -label:upstream -label:electron -label:engineering -label:plan-item ",
|
||||||
"editable": true
|
"editable": true,
|
||||||
|
"outputs": []
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"kind": 1,
|
"kind": 1,
|
||||||
"language": "markdown",
|
"language": "markdown",
|
||||||
"value": "### Feature Areas\n\nEach issue should be assigned to a feature area",
|
"value": "### Feature Areas\n\nEach issue should be assigned to a feature area",
|
||||||
"editable": true
|
"editable": true,
|
||||||
|
"outputs": []
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"kind": 2,
|
"kind": 2,
|
||||||
"language": "github-issues",
|
"language": "github-issues",
|
||||||
"value": "repo:microsoft/vscode is:open is:issue assignee:@me -label:L10N -label:VIM -label:api -label:api-finalization -label:api-proposal -label:authentication -label:breadcrumbs -label:callhierarchy -label:code-lens -label:color-palette -label:comments -label:config -label:context-keys -label:css-less-scss -label:custom-editors -label:debug -label:debug-console -label:dialogs -label:diff-editor -label:dropdown -label:editor -label:editor-RTL -label:editor-autoclosing -label:editor-autoindent -label:editor-bracket-matching -label:editor-clipboard -label:editor-code-actions -label:editor-color-picker -label:editor-columnselect -label:editor-commands -label:editor-comments -label:editor-contrib -label:editor-core -label:editor-drag-and-drop -label:editor-error-widget -label:editor-find -label:editor-folding -label:editor-highlight -label:editor-hover -label:editor-indent-detection -label:editor-indent-guides -label:editor-input -label:editor-input-IME -label:editor-insets -label:editor-minimap -label:editor-multicursor -label:editor-parameter-hints -label:editor-render-whitespace -label:editor-rendering -label:editor-scrollbar -label:editor-symbols -label:editor-synced-region -label:editor-textbuffer -label:editor-theming -label:editor-wordnav -label:editor-wrapping -label:emmet -label:error-list -label:explorer-custom -label:extension-host -label:extension-recommendations -label:extensions -label:extensions-development -label:file-decorations -label:file-encoding -label:file-explorer -label:file-glob -label:file-guess-encoding -label:file-io -label:file-watcher -label:font-rendering -label:formatting -label:git -label:github -label:gpu -label:grammar -label:grid-view -label:html -label:i18n -label:icon-brand -label:icons-product -label:install-update -label:integrated-terminal -label:integrated-terminal-conpty -label:integrated-terminal-links -label:integrated-terminal-rendering -label:integrated-terminal-winpty -label:intellisense-config -label:ipc -label:issue-bot -label:issue-reporter -label:javascript -label:json -label:keybindings -label:keybindings-editor -label:keyboard-layout -label:label-provider -label:languages-basic -label:languages-diagnostics -label:languages-guessing -label:layout -label:lcd-text-rendering -label:list -label:log -label:markdown -label:marketplace -label:menus -label:merge-conflict -label:notebook -label:outline -label:output -label:perf -label:perf-bloat -label:perf-startup -label:php -label:portable-mode -label:proxy -label:quick-pick -label:references-viewlet -label:release-notes -label:remote -label:remote-explorer -label:rename -label:sandbox -label:scm -label:screencast-mode -label:search -label:search-api -label:search-editor -label:search-replace -label:semantic-tokens -label:settings-editor -label:settings-sync -label:settings-sync-server -label:shared-process -label:simple-file-dialog -label:smart-select -label:snap -label:snippets -label:splitview -label:suggest -label:sync-error-handling -label:tasks -label:telemetry -label:themes -label:timeline -label:timeline-git -label:titlebar -label:tokenization -label:touch/pointer -label:trackpad/scroll -label:tree -label:typescript -label:undo-redo -label:uri -label:ux -label:variable-resolving -label:vscode-build -label:vscode-website -label:web -label:webview -label:workbench-actions -label:workbench-cli -label:workbench-diagnostics -label:workbench-dnd -label:workbench-editor-grid -label:workbench-editors -label:workbench-electron -label:workbench-feedback -label:workbench-history -label:workbench-hot-exit -label:workbench-hover -label:workbench-launch -label:workbench-link -label:workbench-multiroot -label:workbench-notifications -label:workbench-os-integration -label:workbench-rapid-render -label:workbench-run-as-admin -label:workbench-state -label:workbench-status -label:workbench-tabs -label:workbench-touchbar -label:workbench-views -label:workbench-welcome -label:workbench-window -label:workbench-zen -label:workspace-edit -label:workspace-symbols -label:zoom",
|
"value": "repo:microsoft/vscode is:open is:issue assignee:@me -label:L10N -label:VIM -label:api -label:api-finalization -label:api-proposal -label:authentication -label:breadcrumbs -label:callhierarchy -label:code-lens -label:color-palette -label:comments -label:config -label:context-keys -label:css-less-scss -label:custom-editors -label:debug -label:debug-console -label:dialogs -label:diff-editor -label:dropdown -label:editor -label:editor-RTL -label:editor-autoclosing -label:editor-autoindent -label:editor-bracket-matching -label:editor-clipboard -label:editor-code-actions -label:editor-color-picker -label:editor-columnselect -label:editor-commands -label:editor-comments -label:editor-contrib -label:editor-core -label:editor-drag-and-drop -label:editor-error-widget -label:editor-find -label:editor-folding -label:editor-highlight -label:editor-hover -label:editor-indent-detection -label:editor-indent-guides -label:editor-input -label:editor-input-IME -label:editor-insets -label:editor-minimap -label:editor-multicursor -label:editor-parameter-hints -label:editor-render-whitespace -label:editor-rendering -label:editor-scrollbar -label:editor-symbols -label:editor-synced-region -label:editor-textbuffer -label:editor-theming -label:editor-wordnav -label:editor-wrapping -label:emmet -label:error-list -label:explorer-custom -label:extension-host -label:extension-recommendations -label:extensions -label:extensions-development -label:file-decorations -label:file-encoding -label:file-explorer -label:file-glob -label:file-guess-encoding -label:file-io -label:file-watcher -label:font-rendering -label:formatting -label:git -label:github -label:gpu -label:grammar -label:grid-view -label:html -label:i18n -label:icon-brand -label:icons-product -label:install-update -label:integrated-terminal -label:integrated-terminal-conpty -label:integrated-terminal-links -label:integrated-terminal-rendering -label:integrated-terminal-winpty -label:intellisense-config -label:ipc -label:issue-bot -label:issue-reporter -label:javascript -label:json -label:keybindings -label:keybindings-editor -label:keyboard-layout -label:label-provider -label:languages-basic -label:languages-diagnostics -label:languages-guessing -label:layout -label:lcd-text-rendering -label:list -label:log -label:markdown -label:marketplace -label:menus -label:merge-conflict -label:notebook -label:outline -label:output -label:perf -label:perf-bloat -label:perf-startup -label:php -label:portable-mode -label:proxy -label:quick-pick -label:references-viewlet -label:release-notes -label:remote -label:remote-explorer -label:rename -label:sandbox -label:scm -label:screencast-mode -label:search -label:search-api -label:search-editor -label:search-replace -label:semantic-tokens -label:settings-editor -label:settings-sync -label:settings-sync-server -label:shared-process -label:simple-file-dialog -label:smart-select -label:snap -label:snippets -label:splitview -label:suggest -label:sync-error-handling -label:tasks -label:telemetry -label:themes -label:timeline -label:timeline-git -label:titlebar -label:tokenization -label:touch/pointer -label:trackpad/scroll -label:tree -label:typescript -label:undo-redo -label:uri -label:ux -label:variable-resolving -label:vscode-build -label:vscode-website -label:web -label:webview -label:workbench-actions -label:workbench-cli -label:workbench-diagnostics -label:workbench-dnd -label:workbench-editor-grid -label:workbench-editors -label:workbench-electron -label:workbench-feedback -label:workbench-history -label:workbench-hot-exit -label:workbench-hover -label:workbench-launch -label:workbench-link -label:workbench-multiroot -label:workbench-notifications -label:workbench-os-integration -label:workbench-rapid-render -label:workbench-run-as-admin -label:workbench-state -label:workbench-status -label:workbench-tabs -label:workbench-touchbar -label:workbench-views -label:workbench-welcome -label:workbench-window -label:workbench-zen -label:workspace-edit -label:workspace-symbols -label:zoom",
|
||||||
"editable": true
|
"editable": true,
|
||||||
|
"outputs": []
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
8
.vscode/notebooks/inbox.github-issues
vendored
8
.vscode/notebooks/inbox.github-issues
vendored
@@ -18,9 +18,9 @@
|
|||||||
"editable": true
|
"editable": true
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"kind": 1,
|
"kind": 2,
|
||||||
"language": "markdown",
|
"language": "github-issues",
|
||||||
"value": "New issues or pull requests submitted by the community are initially triaged by an [automatic classification bot](https://github.com/microsoft/vscode-github-triage-actions/tree/master/classifier-deep). Issues that the bot does not correctly triage are then triaged by a team member. The team rotates the inbox tracker on a weekly basis.\n\nA [mirror](https://github.com/JacksonKearl/testissues/issues) of the VS Code issue stream is available with details about how the bot classifies issues, including feature-area classifications and confidence ratings. Per-category confidence thresholds and feature-area ownership data is maintained in [.github/classifier.json](https://github.com/microsoft/vscode/blob/master/.github/classifier.json). \n\n💡 The bot is being run through a GitHub action that runs every 30 minutes. Give the bot the opportunity to classify an issue before doing it manually.\n\n### Inbox Tracking\n\nThe inbox tracker is responsible for the [global inbox](https://github.com/Microsoft/vscode/issues?utf8=%E2%9C%93&q=is%3Aopen+no%3Aassignee+-label%3Afeature-request+-label%3Atestplan-item+-label%3Aplan-item) containing all **open issues and pull requests** that\n- are neither **feature requests** nor **test plan items** nor **plan items** and\n- have **no owner assignment**.\n\nThe **inbox tracker** may perform any step described in our [issue triaging documentation](https://github.com/microsoft/vscode/wiki/Issues-Triaging) but its main responsibility is to route issues to the actual feature area owner.\n\nFeature area owners track the **feature area inbox** containing all **open issues and pull requests** that\n- are personally assigned to them and are not assigned to any milestone\n- are labeled with their feature area label and are not assigned to any milestone.\nThis secondary triage may involve any of the steps described in our [issue triaging documentation](https://github.com/microsoft/vscode/wiki/Issues-Triaging) and results in a fully triaged or closed issue.\n\nThe [github triage extension](https://github.com/microsoft/vscode-github-triage-extension) can be used to assist with triaging — it provides a \"Command Palette\"-style list of triaging actions like assignment, labeling, and triggers for various bot actions.",
|
"value": "$inbox=repo:microsoft/vscode is:open no:assignee -label:feature-request -label:testplan-item -label:plan-item ",
|
||||||
"editable": true
|
"editable": true
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -32,7 +32,7 @@
|
|||||||
{
|
{
|
||||||
"kind": 1,
|
"kind": 1,
|
||||||
"language": "markdown",
|
"language": "markdown",
|
||||||
"value": "New issues or pull requests submitted by the community are initially triaged by an [automatic classification bot](https://github.com/microsoft/vscode-github-triage-actions/tree/master/classifier-deep). Issues that the bot does not correctly triage are then triaged by a team member. The team rotates the inbox tracker on a weekly basis.\n\nA [mirror](https://github.com/JacksonKearl/testissues/issues) of the VS Code issue stream is available with details about how the bot classifies issues, including feature-area classifications and confidence ratings. Per-category confidence thresholds and feature-area ownership data is maintained in [.github/classifier.json](https://github.com/microsoft/vscode/blob/master/.github/classifier.json). \n\n💡 The bot is being run through a GitHub action that runs every 30 minutes. Give the bot the opportunity to classify an issue before doing it manually.\n\n### Inbox Tracking\n\nThe inbox tracker is responsible for the [global inbox](https://github.com/microsoft/vscode/issues?utf8=%E2%9C%93&q=is%3Aopen+no%3Aassignee+-label%3Afeature-request+-label%3Atestplan-item+-label%3Aplan-item) containing all **open issues and pull requests** that\n- are neither **feature requests** nor **test plan items** nor **plan items** and\n- have **no owner assignment**.\n\nThe **inbox tracker** may perform any step described in our [issue triaging documentation](https://github.com/microsoft/vscode/wiki/Issues-Triaging) but its main responsibility is to route issues to the actual feature area owner.\n\nFeature area owners track the **feature area inbox** containing all **open issues and pull requests** that\n- are personally assigned to them and are not assigned to any milestone\n- are labeled with their feature area label and are not assigned to any milestone.\nThis secondary triage may involve any of the steps described in our [issue triaging documentation](https://github.com/microsoft/vscode/wiki/Issues-Triaging) and results in a fully triaged or closed issue.\n\nThe [github triage extension](https://github.com/microsoft/vscode-github-triage-extension) can be used to assist with triaging — it provides a \"Command Palette\"-style list of triaging actions like assignment, labeling, and triggers for various bot actions.",
|
"value": "New issues or pull requests submitted by the community are initially triaged by an [automatic classification bot](https://github.com/microsoft/vscode-github-triage-actions/tree/master/classifier-deep). Issues that the bot does not correctly triage are then triaged by a team member. The team rotates the inbox tracker on a weekly basis.\n\nA [mirror](https://github.com/JacksonKearl/testissues/issues) of the VS Code issue stream is available with details about how the bot classifies issues, including feature-area classifications and confidence ratings. Per-category confidence thresholds and feature-area ownership data is maintained in [.github/classifier.json](https://github.com/microsoft/vscode/blob/main/.github/classifier.json). \n\n💡 The bot is being run through a GitHub action that runs every 30 minutes. Give the bot the opportunity to classify an issue before doing it manually.\n\n### Inbox Tracking\n\nThe inbox tracker is responsible for the [global inbox](https://github.com/microsoft/vscode/issues?utf8=%E2%9C%93&q=is%3Aopen+no%3Aassignee+-label%3Afeature-request+-label%3Atestplan-item+-label%3Aplan-item) containing all **open issues and pull requests** that\n- are neither **feature requests** nor **test plan items** nor **plan items** and\n- have **no owner assignment**.\n\nThe **inbox tracker** may perform any step described in our [issue triaging documentation](https://github.com/microsoft/vscode/wiki/Issues-Triaging) but its main responsibility is to route issues to the actual feature area owner.\n\nFeature area owners track the **feature area inbox** containing all **open issues and pull requests** that\n- are personally assigned to them and are not assigned to any milestone\n- are labeled with their feature area label and are not assigned to any milestone.\nThis secondary triage may involve any of the steps described in our [issue triaging documentation](https://github.com/microsoft/vscode/wiki/Issues-Triaging) and results in a fully triaged or closed issue.\n\nThe [github triage extension](https://github.com/microsoft/vscode-github-triage-extension) can be used to assist with triaging — it provides a \"Command Palette\"-style list of triaging actions like assignment, labeling, and triggers for various bot actions.",
|
||||||
"editable": true
|
"editable": true
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
|||||||
108
.vscode/notebooks/my-endgame.github-issues
vendored
108
.vscode/notebooks/my-endgame.github-issues
vendored
@@ -2,217 +2,181 @@
|
|||||||
{
|
{
|
||||||
"kind": 1,
|
"kind": 1,
|
||||||
"language": "markdown",
|
"language": "markdown",
|
||||||
"value": "#### Macros",
|
"value": "#### Macros"
|
||||||
"editable": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"kind": 2,
|
"kind": 2,
|
||||||
"language": "github-issues",
|
"language": "github-issues",
|
||||||
"value": "$REPOS=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-js-debug repo:microsoft/vscode-remote-release repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-settings-sync-server\n\n$MILESTONE=milestone:\"January 2021\"\n\n$MINE=assignee:@me",
|
"value": "$REPOS=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-js-debug repo:microsoft/vscode-remote-release repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-settings-sync-server repo:microsoft/vscode-remotehub\n\n$MILESTONE=milestone:\"April 2021\"\n\n$MINE=assignee:@me"
|
||||||
"editable": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"kind": 1,
|
"kind": 1,
|
||||||
"language": "markdown",
|
"language": "markdown",
|
||||||
"value": "# Preparation",
|
"value": "# Preparation"
|
||||||
"editable": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"kind": 1,
|
"kind": 1,
|
||||||
"language": "markdown",
|
"language": "markdown",
|
||||||
"value": "## Open Pull Requests on the Milestone",
|
"value": "## Open Pull Requests on the Milestone"
|
||||||
"editable": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"kind": 2,
|
"kind": 2,
|
||||||
"language": "github-issues",
|
"language": "github-issues",
|
||||||
"value": "$REPOS $MILESTONE $MINE is:pr is:open",
|
"value": "$REPOS $MILESTONE $MINE is:pr is:open"
|
||||||
"editable": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"kind": 1,
|
"kind": 1,
|
||||||
"language": "markdown",
|
"language": "markdown",
|
||||||
"value": "## Open Issues on the Milestone",
|
"value": "## Open Issues on the Milestone"
|
||||||
"editable": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"kind": 2,
|
"kind": 2,
|
||||||
"language": "github-issues",
|
"language": "github-issues",
|
||||||
"value": "$REPOS $MILESTONE $MINE is:issue is:open -label:iteration-plan -label:endgame-plan -label:testplan-item",
|
"value": "$REPOS $MILESTONE $MINE is:issue is:open -label:iteration-plan -label:endgame-plan -label:testplan-item"
|
||||||
"editable": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"kind": 1,
|
"kind": 1,
|
||||||
"language": "markdown",
|
"language": "markdown",
|
||||||
"value": "## Feature Requests Missing Labels",
|
"value": "## Feature Requests Missing Labels"
|
||||||
"editable": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"kind": 2,
|
"kind": 2,
|
||||||
"language": "github-issues",
|
"language": "github-issues",
|
||||||
"value": "$REPOS $MILESTONE $MINE is:issue is:closed label:feature-request -label:verification-needed -label:on-testplan -label:verified -label:*duplicate",
|
"value": "$REPOS $MILESTONE $MINE is:issue is:closed label:feature-request -label:verification-needed -label:on-testplan -label:verified -label:*duplicate"
|
||||||
"editable": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"kind": 1,
|
"kind": 1,
|
||||||
"language": "markdown",
|
"language": "markdown",
|
||||||
"value": "## Test Plan Items",
|
"value": "## Test Plan Items"
|
||||||
"editable": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"kind": 2,
|
"kind": 2,
|
||||||
"language": "github-issues",
|
"language": "github-issues",
|
||||||
"value": "$REPOS $MILESTONE is:issue is:open author:@me label:testplan-item",
|
"value": "$REPOS $MILESTONE is:issue is:open author:@me label:testplan-item"
|
||||||
"editable": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"kind": 1,
|
"kind": 1,
|
||||||
"language": "markdown",
|
"language": "markdown",
|
||||||
"value": "## Verification Needed",
|
"value": "## Verification Needed"
|
||||||
"editable": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"kind": 2,
|
"kind": 2,
|
||||||
"language": "github-issues",
|
"language": "github-issues",
|
||||||
"value": "$REPOS $MILESTONE $MINE is:issue is:closed label:feature-request label:verification-needed",
|
"value": "$REPOS $MILESTONE $MINE is:issue is:closed label:feature-request label:verification-needed"
|
||||||
"editable": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"kind": 1,
|
"kind": 1,
|
||||||
"language": "markdown",
|
"language": "markdown",
|
||||||
"value": "# Testing",
|
"value": "# Testing"
|
||||||
"editable": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"kind": 1,
|
"kind": 1,
|
||||||
"language": "markdown",
|
"language": "markdown",
|
||||||
"value": "## Test Plan Items",
|
"value": "## Test Plan Items"
|
||||||
"editable": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"kind": 2,
|
"kind": 2,
|
||||||
"language": "github-issues",
|
"language": "github-issues",
|
||||||
"value": "$REPOS $MILESTONE $MINE is:issue is:open label:testplan-item",
|
"value": "$REPOS $MILESTONE $MINE is:issue is:open label:testplan-item"
|
||||||
"editable": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"kind": 1,
|
"kind": 1,
|
||||||
"language": "markdown",
|
"language": "markdown",
|
||||||
"value": "## Verification Needed",
|
"value": "## Verification Needed"
|
||||||
"editable": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"kind": 2,
|
"kind": 2,
|
||||||
"language": "github-issues",
|
"language": "github-issues",
|
||||||
"value": "$REPOS $MILESTONE -$MINE is:issue is:closed -assignee:@me -label:verified label:feature-request label:verification-needed",
|
"value": "$REPOS $MILESTONE -$MINE is:issue is:closed -assignee:@me -label:verified -label:z-author-verified label:feature-request label:verification-needed"
|
||||||
"editable": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"kind": 1,
|
"kind": 1,
|
||||||
"language": "markdown",
|
"language": "markdown",
|
||||||
"value": "# Fixing",
|
"value": "# Fixing"
|
||||||
"editable": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"kind": 1,
|
"kind": 1,
|
||||||
"language": "markdown",
|
"language": "markdown",
|
||||||
"value": "## Open Issues",
|
"value": "## Open Issues"
|
||||||
"editable": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"kind": 2,
|
"kind": 2,
|
||||||
"language": "github-issues",
|
"language": "github-issues",
|
||||||
"value": "$REPOS $MILESTONE $MINE is:issue is:open -label:endgame-plan",
|
"value": "$REPOS $MILESTONE $MINE is:issue is:open -label:endgame-plan -label:testplan-item -label:iteration-plan"
|
||||||
"editable": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"kind": 1,
|
"kind": 1,
|
||||||
"language": "markdown",
|
"language": "markdown",
|
||||||
"value": "## Open Bugs",
|
"value": "## Open Bugs"
|
||||||
"editable": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"kind": 2,
|
"kind": 2,
|
||||||
"language": "github-issues",
|
"language": "github-issues",
|
||||||
"value": "$REPOS $MILESTONE $MINE is:issue is:open label:bug",
|
"value": "$REPOS $MILESTONE $MINE is:issue is:open label:bug"
|
||||||
"editable": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"kind": 1,
|
"kind": 1,
|
||||||
"language": "markdown",
|
"language": "markdown",
|
||||||
"value": "# Verification",
|
"value": "# Verification"
|
||||||
"editable": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"kind": 1,
|
"kind": 1,
|
||||||
"language": "markdown",
|
"language": "markdown",
|
||||||
"value": "## My Issues (verification-steps-needed)",
|
"value": "## My Issues (verification-steps-needed)"
|
||||||
"editable": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"kind": 2,
|
"kind": 2,
|
||||||
"language": "github-issues",
|
"language": "github-issues",
|
||||||
"value": "$REPOS $MILESTONE $MINE is:issue label:bug label:verification-steps-needed",
|
"value": "$REPOS $MILESTONE $MINE is:issue label:bug label:verification-steps-needed"
|
||||||
"editable": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"kind": 1,
|
"kind": 1,
|
||||||
"language": "markdown",
|
"language": "markdown",
|
||||||
"value": "## My Issues (verification-found)",
|
"value": "## My Issues (verification-found)"
|
||||||
"editable": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"kind": 2,
|
"kind": 2,
|
||||||
"language": "github-issues",
|
"language": "github-issues",
|
||||||
"value": "$REPOS $MILESTONE $MINE is:issue label:bug label:verification-found",
|
"value": "$REPOS $MILESTONE $MINE is:issue label:bug label:verification-found"
|
||||||
"editable": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"kind": 1,
|
"kind": 1,
|
||||||
"language": "markdown",
|
"language": "markdown",
|
||||||
"value": "## Issues filed by me",
|
"value": "## Issues filed by me"
|
||||||
"editable": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"kind": 2,
|
"kind": 2,
|
||||||
"language": "github-issues",
|
"language": "github-issues",
|
||||||
"value": "$REPOS $MILESTONE -$MINE is:issue is:closed author:@me sort:updated-asc label:bug -label:verified -label:on-testplan -label:*duplicate -label:duplicate -label:invalid -label:*as-designed -label:error-telemetry -label:verification-steps-needed -label:verification-found",
|
"value": "$REPOS $MILESTONE -$MINE is:issue is:closed author:@me sort:updated-asc label:bug -label:verified -label:z-author-verified -label:on-testplan -label:*duplicate -label:duplicate -label:invalid -label:*as-designed -label:error-telemetry -label:verification-steps-needed -label:verification-found"
|
||||||
"editable": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"kind": 1,
|
"kind": 1,
|
||||||
"language": "markdown",
|
"language": "markdown",
|
||||||
"value": "## Issues filed from outside team",
|
"value": "## Issues filed from outside team"
|
||||||
"editable": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"kind": 2,
|
"kind": 2,
|
||||||
"language": "github-issues",
|
"language": "github-issues",
|
||||||
"value": "$REPOS $MILESTONE -$MINE is:issue is:closed sort:updated-asc label:bug -label:verified -label:on-testplan -label:*duplicate -label:duplicate -label:invalid -label:*as-designed -label:error-telemetry -label:verification-steps-needed -label:verification-found -author:aeschli -author:alexdima -author:alexr00 -author:AmandaSilver -author:bamurtaugh -author:bpasero -author:btholt -author:chrisdias -author:chrmarti -author:Chuxel -author:connor4312 -author:dbaeumer -author:deepak1556 -author:devinvalenciano -author:digitarald -author:eamodio -author:egamma -author:fiveisprime -author:gregvanl -author:isidorn -author:ItalyPaleAle -author:JacksonKearl -author:joaomoreno -author:jrieken -author:kieferrm -author:lszomoru -author:meganrogge -author:misolori -author:mjbvz -author:ornellaalt -author:orta -author:rebornix -author:RMacfarlane -author:roblourens -author:rzhao271 -author:sana-ajani -author:sandy081 -author:sbatten -author:stevencl -author:Tyriar -author:weinand -author:TylerLeonhardt -author:lramos15",
|
"value": "$REPOS $MILESTONE -$MINE is:issue is:closed sort:updated-asc label:bug -label:verified -label:z-author-verified -label:on-testplan -label:*duplicate -label:duplicate -label:invalid -label:*as-designed -label:error-telemetry -label:verification-steps-needed -label:verification-found -author:aeschli -author:alexdima -author:alexr00 -author:AmandaSilver -author:bamurtaugh -author:bpasero -author:btholt -author:chrisdias -author:chrmarti -author:Chuxel -author:connor4312 -author:dbaeumer -author:deepak1556 -author:devinvalenciano -author:digitarald -author:eamodio -author:egamma -author:fiveisprime -author:gregvanl -author:isidorn -author:ItalyPaleAle -author:JacksonKearl -author:joaomoreno -author:jrieken -author:kieferrm -author:lszomoru -author:meganrogge -author:misolori -author:mjbvz -author:ornellaalt -author:orta -author:rebornix -author:RMacfarlane -author:roblourens -author:rzhao271 -author:sana-ajani -author:sandy081 -author:sbatten -author:stevencl -author:Tyriar -author:weinand -author:TylerLeonhardt -author:lramos15"
|
||||||
"editable": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"kind": 1,
|
"kind": 1,
|
||||||
"language": "markdown",
|
"language": "markdown",
|
||||||
"value": "## Issues filed by others",
|
"value": "## Issues filed by others"
|
||||||
"editable": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"kind": 2,
|
"kind": 2,
|
||||||
"language": "github-issues",
|
"language": "github-issues",
|
||||||
"value": "$REPOS $MILESTONE -$MINE is:issue is:closed -author:@me sort:updated-asc label:bug -label:verified -label:on-testplan -label:*duplicate -label:duplicate -label:invalid -label:*as-designed -label:error-telemetry -label:verification-steps-needed -label:verification-found",
|
"value": "$REPOS $MILESTONE -$MINE is:issue is:closed -author:@me sort:updated-asc label:bug -label:verified -label:z-author-verified -label:on-testplan -label:*duplicate -label:duplicate -label:invalid -label:*as-designed -label:error-telemetry -label:verification-steps-needed -label:verification-found"
|
||||||
"editable": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"kind": 1,
|
"kind": 1,
|
||||||
"language": "markdown",
|
"language": "markdown",
|
||||||
"value": "# Release Notes",
|
"value": "# Release Notes"
|
||||||
"editable": true
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"kind": 2,
|
"kind": 2,
|
||||||
"language": "github-issues",
|
"language": "github-issues",
|
||||||
"value": "repo:microsoft/vscode $MILESTONE is:issue is:closed label:feature-request -label:on-release-notes",
|
"value": "repo:microsoft/vscode $MILESTONE $MINE is:issue is:closed label:feature-request -label:on-release-notes"
|
||||||
"editable": true
|
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
8
.vscode/notebooks/my-work.github-issues
vendored
8
.vscode/notebooks/my-work.github-issues
vendored
@@ -8,7 +8,7 @@
|
|||||||
{
|
{
|
||||||
"kind": 2,
|
"kind": 2,
|
||||||
"language": "github-issues",
|
"language": "github-issues",
|
||||||
"value": "// list of repos we work in\n$repos=repo:microsoft/vscode repo:microsoft/vscode-remote-release repo:microsoft/vscode-js-debug repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-github-issue-notebooks repo:microsoft/vscode-internalbacklog\n\n// current milestone name\n$milestone=milestone:\"February 2021\"",
|
"value": "// list of repos we work in\n$repos=repo:microsoft/vscode repo:microsoft/vscode-remote-release repo:microsoft/vscode-js-debug repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-github-issue-notebooks repo:microsoft/vscode-internalbacklog\n\n// current milestone name\n$milestone=milestone:\"April 2021\"",
|
||||||
"editable": true
|
"editable": true
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -21,7 +21,7 @@
|
|||||||
"kind": 2,
|
"kind": 2,
|
||||||
"language": "github-issues",
|
"language": "github-issues",
|
||||||
"value": "$repos $milestone assignee:@me is:open",
|
"value": "$repos $milestone assignee:@me is:open",
|
||||||
"editable": false
|
"editable": true
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"kind": 1,
|
"kind": 1,
|
||||||
@@ -81,7 +81,7 @@
|
|||||||
"kind": 2,
|
"kind": 2,
|
||||||
"language": "github-issues",
|
"language": "github-issues",
|
||||||
"value": "$repos assignee:@me is:open milestone:\"Backlog Candidates\"",
|
"value": "$repos assignee:@me is:open milestone:\"Backlog Candidates\"",
|
||||||
"editable": false
|
"editable": true
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"kind": 1,
|
"kind": 1,
|
||||||
@@ -111,6 +111,6 @@
|
|||||||
"kind": 2,
|
"kind": 2,
|
||||||
"language": "github-issues",
|
"language": "github-issues",
|
||||||
"value": "$repos assignee:@me is:open label:\"needs more info\"",
|
"value": "$repos assignee:@me is:open label:\"needs more info\"",
|
||||||
"editable": false
|
"editable": true
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
2
.vscode/notebooks/papercuts.github-issues
vendored
2
.vscode/notebooks/papercuts.github-issues
vendored
@@ -41,4 +41,4 @@
|
|||||||
"value": "repo:microsoft/vscode is:open assignee:@me label:\"papercut :drop_of_blood:\"",
|
"value": "repo:microsoft/vscode is:open assignee:@me label:\"papercut :drop_of_blood:\"",
|
||||||
"editable": true
|
"editable": true
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
4
.vscode/notebooks/verification.github-issues
vendored
4
.vscode/notebooks/verification.github-issues
vendored
@@ -14,7 +14,7 @@
|
|||||||
{
|
{
|
||||||
"kind": 2,
|
"kind": 2,
|
||||||
"language": "github-issues",
|
"language": "github-issues",
|
||||||
"value": "$repos=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-remote-release repo:microsoft/vscode-js-debug repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-github-issue-notebooks \n$milestone=milestone:\"January 2021\"",
|
"value": "$repos=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-remote-release repo:microsoft/vscode-js-debug repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-github-issue-notebooks \n$milestone=milestone:\"March 2021\"",
|
||||||
"editable": true
|
"editable": true
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -38,7 +38,7 @@
|
|||||||
{
|
{
|
||||||
"kind": 2,
|
"kind": 2,
|
||||||
"language": "github-issues",
|
"language": "github-issues",
|
||||||
"value": "$repos $milestone is:closed -assignee:@me label:bug -label:verified -label:*duplicate -author:@me -assignee:@me label:bug -label:verified -author:@me -author:aeschli -author:alexdima -author:alexr00 -author:bpasero -author:chrisdias -author:chrmarti -author:connor4312 -author:dbaeumer -author:deepak1556 -author:eamodio -author:egamma -author:gregvanl -author:isidorn -author:JacksonKearl -author:joaomoreno -author:jrieken -author:lramos15 -author:lszomoru -author:meganrogge -author:misolori -author:mjbvz -author:rebornix -author:RMacfarlane -author:roblourens -author:sana-ajani -author:sandy081 -author:sbatten -author:Tyriar -author:weinand",
|
"value": "$repos $milestone is:closed -assignee:@me label:bug -label:verified -label:*duplicate -author:@me -assignee:@me label:bug -label:verified -author:@me -author:aeschli -author:alexdima -author:alexr00 -author:bpasero -author:chrisdias -author:chrmarti -author:connor4312 -author:dbaeumer -author:deepak1556 -author:eamodio -author:egamma -author:gregvanl -author:isidorn -author:JacksonKearl -author:joaomoreno -author:jrieken -author:lramos15 -author:lszomoru -author:meganrogge -author:misolori -author:mjbvz -author:rebornix -author:RMacfarlane -author:roblourens -author:sana-ajani -author:sandy081 -author:sbatten -author:Tyriar -author:weinand -author:rzhao271 -author:kieferrm -author:TylerLeonhardt -author:bamurtaugh",
|
||||||
"editable": false
|
"editable": false
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
|||||||
4
.vscode/settings.json
vendored
4
.vscode/settings.json
vendored
@@ -79,5 +79,7 @@
|
|||||||
"[typescript]": {
|
"[typescript]": {
|
||||||
"editor.defaultFormatter": "vscode.typescript-language-features"
|
"editor.defaultFormatter": "vscode.typescript-language-features"
|
||||||
},
|
},
|
||||||
"typescript.tsc.autoDetect": "off"
|
"typescript.tsc.autoDetect": "off",
|
||||||
|
"notebook.experimental.useMarkdownRenderer": true,
|
||||||
|
"testing.autoRun.mode": "rerun",
|
||||||
}
|
}
|
||||||
|
|||||||
76
.vscode/tasks.json
vendored
76
.vscode/tasks.json
vendored
@@ -4,10 +4,11 @@
|
|||||||
{
|
{
|
||||||
"type": "npm",
|
"type": "npm",
|
||||||
"script": "watch-clientd",
|
"script": "watch-clientd",
|
||||||
"label": "Build VS Code Core",
|
"label": "Core - Build",
|
||||||
"isBackground": true,
|
"isBackground": true,
|
||||||
"presentation": {
|
"presentation": {
|
||||||
"reveal": "never"
|
"reveal": "never",
|
||||||
|
"group": "buildWatchers"
|
||||||
},
|
},
|
||||||
"problemMatcher": {
|
"problemMatcher": {
|
||||||
"owner": "typescript",
|
"owner": "typescript",
|
||||||
@@ -30,10 +31,11 @@
|
|||||||
{
|
{
|
||||||
"type": "npm",
|
"type": "npm",
|
||||||
"script": "watch-extensionsd",
|
"script": "watch-extensionsd",
|
||||||
"label": "Build VS Code Extensions",
|
"label": "Ext - Build",
|
||||||
"isBackground": true,
|
"isBackground": true,
|
||||||
"presentation": {
|
"presentation": {
|
||||||
"reveal": "never"
|
"reveal": "never",
|
||||||
|
"group": "buildWatchers"
|
||||||
},
|
},
|
||||||
"problemMatcher": {
|
"problemMatcher": {
|
||||||
"owner": "typescript",
|
"owner": "typescript",
|
||||||
@@ -54,10 +56,38 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"label": "Build VS Code",
|
"type": "npm",
|
||||||
|
"script": "watch-extension-mediad",
|
||||||
|
"label": "Ext Media - Build",
|
||||||
|
"isBackground": true,
|
||||||
|
"presentation": {
|
||||||
|
"reveal": "never",
|
||||||
|
"group": "buildWatchers"
|
||||||
|
},
|
||||||
|
"problemMatcher": {
|
||||||
|
"owner": "typescript",
|
||||||
|
"applyTo": "closedDocuments",
|
||||||
|
"fileLocation": [
|
||||||
|
"absolute"
|
||||||
|
],
|
||||||
|
"pattern": {
|
||||||
|
"regexp": "Error: ([^(]+)\\((\\d+|\\d+,\\d+|\\d+,\\d+,\\d+,\\d+)\\): (.*)$",
|
||||||
|
"file": 1,
|
||||||
|
"location": 2,
|
||||||
|
"message": 3
|
||||||
|
},
|
||||||
|
"background": {
|
||||||
|
"beginsPattern": "Starting compilation",
|
||||||
|
"endsPattern": "Finished compilation"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "VS Code - Build",
|
||||||
"dependsOn": [
|
"dependsOn": [
|
||||||
"Build VS Code Core",
|
"Core - Build",
|
||||||
"Build VS Code Extensions"
|
"Ext - Build",
|
||||||
|
"Ext Media - Build",
|
||||||
],
|
],
|
||||||
"group": {
|
"group": {
|
||||||
"kind": "build",
|
"kind": "build",
|
||||||
@@ -68,28 +98,42 @@
|
|||||||
{
|
{
|
||||||
"type": "npm",
|
"type": "npm",
|
||||||
"script": "kill-watch-clientd",
|
"script": "kill-watch-clientd",
|
||||||
"label": "Kill Build VS Code Core",
|
"label": "Kill Core - Build",
|
||||||
"group": "build",
|
"group": "build",
|
||||||
"presentation": {
|
"presentation": {
|
||||||
"reveal": "never"
|
"reveal": "never",
|
||||||
|
"group": "buildKillers"
|
||||||
},
|
},
|
||||||
"problemMatcher": "$tsc"
|
"problemMatcher": "$tsc"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"type": "npm",
|
"type": "npm",
|
||||||
"script": "kill-watch-extensionsd",
|
"script": "kill-watch-extensionsd",
|
||||||
"label": "Kill Build VS Code Extensions",
|
"label": "Kill Ext - Build",
|
||||||
"group": "build",
|
"group": "build",
|
||||||
"presentation": {
|
"presentation": {
|
||||||
"reveal": "never"
|
"reveal": "never",
|
||||||
|
"group": "buildKillers"
|
||||||
},
|
},
|
||||||
"problemMatcher": "$tsc"
|
"problemMatcher": "$tsc"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"label": "Kill Build VS Code",
|
"type": "npm",
|
||||||
|
"script": "kill-watch-extension-mediad",
|
||||||
|
"label": "Kill Ext Media - Build",
|
||||||
|
"group": "build",
|
||||||
|
"presentation": {
|
||||||
|
"reveal": "never",
|
||||||
|
"group": "buildKillers"
|
||||||
|
},
|
||||||
|
"problemMatcher": "$tsc"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "Kill VS Code - Build",
|
||||||
"dependsOn": [
|
"dependsOn": [
|
||||||
"Kill Build VS Code Core",
|
"Kill Core - Build",
|
||||||
"Kill Build VS Code Extensions"
|
"Kill Ext - Build",
|
||||||
|
"Kill Ext Media - Build",
|
||||||
],
|
],
|
||||||
"group": "build",
|
"group": "build",
|
||||||
"problemMatcher": []
|
"problemMatcher": []
|
||||||
@@ -111,7 +155,7 @@
|
|||||||
{
|
{
|
||||||
"type": "npm",
|
"type": "npm",
|
||||||
"script": "watch-webd",
|
"script": "watch-webd",
|
||||||
"label": "Build Web Extensions",
|
"label": "Web Ext - Build",
|
||||||
"group": "build",
|
"group": "build",
|
||||||
"isBackground": true,
|
"isBackground": true,
|
||||||
"presentation": {
|
"presentation": {
|
||||||
@@ -138,7 +182,7 @@
|
|||||||
{
|
{
|
||||||
"type": "npm",
|
"type": "npm",
|
||||||
"script": "kill-watch-webd",
|
"script": "kill-watch-webd",
|
||||||
"label": "Kill Build Web Extensions",
|
"label": "Kill Web Ext - Build",
|
||||||
"group": "build",
|
"group": "build",
|
||||||
"presentation": {
|
"presentation": {
|
||||||
"reveal": "never"
|
"reveal": "never"
|
||||||
|
|||||||
2
.yarnrc
2
.yarnrc
@@ -1,3 +1,3 @@
|
|||||||
disturl "https://electronjs.org/headers"
|
disturl "https://electronjs.org/headers"
|
||||||
target "11.2.2"
|
target "12.0.7"
|
||||||
runtime "electron"
|
runtime "electron"
|
||||||
|
|||||||
36
CHANGELOG.md
36
CHANGELOG.md
@@ -1,5 +1,39 @@
|
|||||||
# Change Log
|
# Change Log
|
||||||
|
|
||||||
|
## Version 1.31.1
|
||||||
|
* Release date: July 29, 2021
|
||||||
|
* Release status: General Availability
|
||||||
|
## Hotfix Release
|
||||||
|
- Fix for [#16436 Database Connection Toolbar Missing](https://github.com/microsoft/azuredatastudio/issues/16436)
|
||||||
|
|
||||||
|
## Version 1.31.0
|
||||||
|
* Release date: July 21, 2021
|
||||||
|
* Release status: General Availability
|
||||||
|
* New Notebook Features:
|
||||||
|
* WYSIWYG link improvements
|
||||||
|
* Extension Updates:
|
||||||
|
* Import
|
||||||
|
* SandDance
|
||||||
|
* SQL Database Projects
|
||||||
|
* Bug Fixes
|
||||||
|
* Accessibility bug fixes
|
||||||
|
|
||||||
|
## Version 1.30.0
|
||||||
|
* Release date: June 17, 2021
|
||||||
|
* Release status: General Availability
|
||||||
|
* New Notebook Features:
|
||||||
|
* Show book's notebook TOC title in pinned notebooks view
|
||||||
|
* Add new book icon
|
||||||
|
* Update Python to 3.8.10
|
||||||
|
* Query Editor Features:
|
||||||
|
* Added filtering/sorting feature for query result grid in query editor and notebook, the feature can be invoked from the column headers. Note that this feature is only available when you enable the preview features
|
||||||
|
* Added a status bar item to show summary of the selected cells if there are multiple numeric values
|
||||||
|
* Extension Updates:
|
||||||
|
* SQL Database Projects
|
||||||
|
* Machine Learning
|
||||||
|
* Bug Fixes
|
||||||
|
* Fix WYSIWYG Table cell adding new line in table cell
|
||||||
|
|
||||||
## Version 1.29.0
|
## Version 1.29.0
|
||||||
* Release date: May 19, 2021
|
* Release date: May 19, 2021
|
||||||
* Release status: General Availability
|
* Release status: General Availability
|
||||||
@@ -587,7 +621,7 @@ The May release is focused on stabilization and bug fixes leading up to the Buil
|
|||||||
|
|
||||||
* Announcing **Redgate SQL Search** extension available in Extension Manager
|
* Announcing **Redgate SQL Search** extension available in Extension Manager
|
||||||
* Community Localization available for 10 languages: **German, Spanish, French, Italian, Japanese, Korean, Portuguese, Russian, Simplified Chinese and Traditional Chinese!**
|
* Community Localization available for 10 languages: **German, Spanish, French, Italian, Japanese, Korean, Portuguese, Russian, Simplified Chinese and Traditional Chinese!**
|
||||||
* Reduced telemetry collection, improved [opt-out](https://github.com/Microsoft/azuredatastudio/wiki/How-to-Disable-Telemetry-Reporting) experience and in-product links to [Privacy Statement](https://privacy.microsoft.com/en-us/privacystatement)
|
* Reduced telemetry collection, improved [opt-out](https://github.com/Microsoft/azuredatastudio/wiki/How-to-Disable-Telemetry-Reporting) experience and in-product links to [Privacy Statement](https://privacy.microsoft.com/privacystatement)
|
||||||
* Extension Manager has improved Marketplace experience to easily discover community extensions
|
* Extension Manager has improved Marketplace experience to easily discover community extensions
|
||||||
* SQL Agent extension Jobs and Job History view improvement
|
* SQL Agent extension Jobs and Job History view improvement
|
||||||
* Updates for **whoisactive** and **Server Reports** extensions
|
* Updates for **whoisactive** and **Server Reports** extensions
|
||||||
|
|||||||
16
README.md
16
README.md
@@ -65,7 +65,7 @@ This project has adopted the [Microsoft Open Source Code of Conduct](https://ope
|
|||||||
Azure Data Studio is localized into 10 languages: French, Italian, German, Spanish, Simplified Chinese, Traditional Chinese, Japanese, Korean, Russian, and Portuguese (Brazil). The language packs are available in the Extension Manager marketplace. Simply, search for the specific language using the extension marketplace and install. Once you install the selected language, Azure Data Studio will prompt you to restart with the new language.
|
Azure Data Studio is localized into 10 languages: French, Italian, German, Spanish, Simplified Chinese, Traditional Chinese, Japanese, Korean, Russian, and Portuguese (Brazil). The language packs are available in the Extension Manager marketplace. Simply, search for the specific language using the extension marketplace and install. Once you install the selected language, Azure Data Studio will prompt you to restart with the new language.
|
||||||
|
|
||||||
## Privacy Statement
|
## Privacy Statement
|
||||||
The [Microsoft Enterprise and Developer Privacy Statement](https://privacy.microsoft.com/en-us/privacystatement) describes the privacy statement of this software.
|
The [Microsoft Enterprise and Developer Privacy Statement](https://privacy.microsoft.com/privacystatement) describes the privacy statement of this software.
|
||||||
|
|
||||||
## Contributions and "Thank You"
|
## Contributions and "Thank You"
|
||||||
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
|
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
|
||||||
@@ -131,10 +131,10 @@ Copyright (c) Microsoft Corporation. All rights reserved.
|
|||||||
|
|
||||||
Licensed under the [Source EULA](LICENSE.txt).
|
Licensed under the [Source EULA](LICENSE.txt).
|
||||||
|
|
||||||
[win-user]: https://go.microsoft.com/fwlink/?linkid=2163435
|
[win-user]: https://go.microsoft.com/fwlink/?linkid=2168181
|
||||||
[win-system]: https://go.microsoft.com/fwlink/?linkid=2163531
|
[win-system]: https://go.microsoft.com/fwlink/?linkid=2168180
|
||||||
[win-zip]: https://go.microsoft.com/fwlink/?linkid=2163529
|
[win-zip]: https://go.microsoft.com/fwlink/?linkid=2168436
|
||||||
[osx-zip]: https://go.microsoft.com/fwlink/?linkid=2163528
|
[osx-zip]: https://go.microsoft.com/fwlink/?linkid=2168435
|
||||||
[linux-zip]: https://go.microsoft.com/fwlink/?linkid=2163530
|
[linux-zip]: https://go.microsoft.com/fwlink/?linkid=2168338
|
||||||
[linux-rpm]: https://go.microsoft.com/fwlink/?linkid=2163437
|
[linux-rpm]: https://go.microsoft.com/fwlink/?linkid=2168271
|
||||||
[linux-deb]: https://go.microsoft.com/fwlink/?linkid=2163436
|
[linux-deb]: https://go.microsoft.com/fwlink/?linkid=2168339
|
||||||
|
|||||||
41
SECURITY.md
Normal file
41
SECURITY.md
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
<!-- BEGIN MICROSOFT SECURITY.MD V0.0.5 BLOCK -->
|
||||||
|
|
||||||
|
## Security
|
||||||
|
|
||||||
|
Microsoft takes the security of our software products and services seriously, which includes all source code repositories managed through our GitHub organizations, which include [Microsoft](https://github.com/Microsoft), [Azure](https://github.com/Azure), [DotNet](https://github.com/dotnet), [AspNet](https://github.com/aspnet), [Xamarin](https://github.com/xamarin), and [our GitHub organizations](https://opensource.microsoft.com/).
|
||||||
|
|
||||||
|
If you believe you have found a security vulnerability in any Microsoft-owned repository that meets [Microsoft's definition of a security vulnerability](https://docs.microsoft.com/en-us/previous-versions/tn-archive/cc751383(v=technet.10)), please report it to us as described below.
|
||||||
|
|
||||||
|
## Reporting Security Issues
|
||||||
|
|
||||||
|
**Please do not report security vulnerabilities through public GitHub issues.**
|
||||||
|
|
||||||
|
Instead, please report them to the Microsoft Security Response Center (MSRC) at [https://msrc.microsoft.com/create-report](https://msrc.microsoft.com/create-report).
|
||||||
|
|
||||||
|
If you prefer to submit without logging in, send email to [secure@microsoft.com](mailto:secure@microsoft.com). If possible, encrypt your message with our PGP key; please download it from the [Microsoft Security Response Center PGP Key page](https://www.microsoft.com/en-us/msrc/pgp-key-msrc).
|
||||||
|
|
||||||
|
You should receive a response within 24 hours. If for some reason you do not, please follow up via email to ensure we received your original message. Additional information can be found at [microsoft.com/msrc](https://www.microsoft.com/msrc).
|
||||||
|
|
||||||
|
Please include the requested information listed below (as much as you can provide) to help us better understand the nature and scope of the possible issue:
|
||||||
|
|
||||||
|
* Type of issue (e.g. buffer overflow, SQL injection, cross-site scripting, etc.)
|
||||||
|
* Full paths of source file(s) related to the manifestation of the issue
|
||||||
|
* The location of the affected source code (tag/branch/commit or direct URL)
|
||||||
|
* Any special configuration required to reproduce the issue
|
||||||
|
* Step-by-step instructions to reproduce the issue
|
||||||
|
* Proof-of-concept or exploit code (if possible)
|
||||||
|
* Impact of the issue, including how an attacker might exploit the issue
|
||||||
|
|
||||||
|
This information will help us triage your report more quickly.
|
||||||
|
|
||||||
|
If you are reporting for a bug bounty, more complete reports can contribute to a higher bounty award. Please visit our [Microsoft Bug Bounty Program](https://microsoft.com/msrc/bounty) page for more details about our active programs.
|
||||||
|
|
||||||
|
## Preferred Languages
|
||||||
|
|
||||||
|
We prefer all communications to be in English.
|
||||||
|
|
||||||
|
## Policy
|
||||||
|
|
||||||
|
Microsoft follows the principle of [Coordinated Vulnerability Disclosure](https://www.microsoft.com/en-us/msrc/cvd).
|
||||||
|
|
||||||
|
<!-- END MICROSOFT SECURITY.MD BLOCK -->
|
||||||
@@ -12,7 +12,7 @@ expressly granted herein, whether by implication, estoppel or otherwise.
|
|||||||
angular2-grid: https://github.com/BTMorton/angular2-grid
|
angular2-grid: https://github.com/BTMorton/angular2-grid
|
||||||
angular2-slickgrid: https://github.com/Microsoft/angular2-slickgrid
|
angular2-slickgrid: https://github.com/Microsoft/angular2-slickgrid
|
||||||
applicationinsights: https://github.com/Microsoft/ApplicationInsights-node.js
|
applicationinsights: https://github.com/Microsoft/ApplicationInsights-node.js
|
||||||
axios: https://github.com/axios/axios
|
axios: https://github.com/axios/axios
|
||||||
bootstrap: https://github.com/twbs/bootstrap
|
bootstrap: https://github.com/twbs/bootstrap
|
||||||
chart.js: https://github.com/Timer/chartjs
|
chart.js: https://github.com/Timer/chartjs
|
||||||
chokidar: https://github.com/paulmillr/chokidar
|
chokidar: https://github.com/paulmillr/chokidar
|
||||||
@@ -30,6 +30,7 @@ expressly granted herein, whether by implication, estoppel or otherwise.
|
|||||||
getmac: https://github.com/bevry/getmac
|
getmac: https://github.com/bevry/getmac
|
||||||
graceful-fs: https://github.com/isaacs/node-graceful-fs
|
graceful-fs: https://github.com/isaacs/node-graceful-fs
|
||||||
gridstack: https://github.com/gridstack/gridstack.js
|
gridstack: https://github.com/gridstack/gridstack.js
|
||||||
|
html-to-image: https://github.com/bubkoo/html-to-image
|
||||||
html-query-plan: https://github.com/JustinPealing/html-query-plan
|
html-query-plan: https://github.com/JustinPealing/html-query-plan
|
||||||
http-proxy-agent: https://github.com/TooTallNate/node-https-proxy-agent
|
http-proxy-agent: https://github.com/TooTallNate/node-https-proxy-agent
|
||||||
https-proxy-agent: https://github.com/TooTallNate/node-https-proxy-agent
|
https-proxy-agent: https://github.com/TooTallNate/node-https-proxy-agent
|
||||||
@@ -520,6 +521,32 @@ SOFTWARE.
|
|||||||
=========================================
|
=========================================
|
||||||
END OF gridstack NOTICES AND INFORMATION
|
END OF gridstack NOTICES AND INFORMATION
|
||||||
|
|
||||||
|
%% html-to-image NOTICES AND INFORMATION BEGIN HERE
|
||||||
|
=========================================
|
||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) 2017 W.Y.
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
||||||
|
=========================================
|
||||||
|
END OF html-to-image NOTICES AND INFORMATION
|
||||||
|
|
||||||
%% html-query-plan NOTICES AND INFORMATION BEGIN HERE
|
%% html-query-plan NOTICES AND INFORMATION BEGIN HERE
|
||||||
=========================================
|
=========================================
|
||||||
The MIT License (MIT)
|
The MIT License (MIT)
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
2021-04-07T00:04:17.775Z
|
2021-04-07T03:52:18.011Z
|
||||||
|
|||||||
@@ -104,13 +104,11 @@ kerberos/build/**
|
|||||||
|
|
||||||
# END SQL Modules
|
# END SQL Modules
|
||||||
|
|
||||||
vscode-nsfw/binding.gyp
|
nsfw/binding.gyp
|
||||||
vscode-nsfw/build/**
|
nsfw/build/**
|
||||||
vscode-nsfw/src/**
|
nsfw/src/**
|
||||||
vscode-nsfw/openpa/**
|
nsfw/includes/**
|
||||||
vscode-nsfw/includes/**
|
!nsfw/build/Release/*.node
|
||||||
!vscode-nsfw/build/Release/*.node
|
|
||||||
!vscode-nsfw/**/*.a
|
|
||||||
|
|
||||||
vsda/build/**
|
vsda/build/**
|
||||||
vsda/ci/**
|
vsda/ci/**
|
||||||
|
|||||||
@@ -83,7 +83,7 @@ async function main() {
|
|||||||
console.log('Asset:', JSON.stringify(asset, null, ' '));
|
console.log('Asset:', JSON.stringify(asset, null, ' '));
|
||||||
const client = new cosmos_1.CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT'], key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
const client = new cosmos_1.CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT'], key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||||
const scripts = client.database('builds').container(quality).scripts;
|
const scripts = client.database('builds').container(quality).scripts;
|
||||||
await retry_1.retry(() => scripts.storedProcedure('createAsset').execute('', [commit, asset, true]));
|
await (0, retry_1.retry)(() => scripts.storedProcedure('createAsset').execute('', [commit, asset, true]));
|
||||||
}
|
}
|
||||||
main().then(() => {
|
main().then(() => {
|
||||||
console.log('Asset successfully created');
|
console.log('Asset successfully created');
|
||||||
|
|||||||
@@ -40,7 +40,7 @@ async function main() {
|
|||||||
};
|
};
|
||||||
const client = new cosmos_1.CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT'], key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
const client = new cosmos_1.CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT'], key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||||
const scripts = client.database('builds').container(quality).scripts;
|
const scripts = client.database('builds').container(quality).scripts;
|
||||||
await retry_1.retry(() => scripts.storedProcedure('createBuild').execute('', [Object.assign(Object.assign({}, build), { _partitionKey: '' })]));
|
await (0, retry_1.retry)(() => scripts.storedProcedure('createBuild').execute('', [Object.assign(Object.assign({}, build), { _partitionKey: '' })]));
|
||||||
}
|
}
|
||||||
main().then(() => {
|
main().then(() => {
|
||||||
console.log('Build successfully created');
|
console.log('Build successfully created');
|
||||||
|
|||||||
@@ -39,7 +39,7 @@ async function publish(commit, files) {
|
|||||||
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
|
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
|
||||||
await assertContainer(blobService, commit);
|
await assertContainer(blobService, commit);
|
||||||
for (const file of files) {
|
for (const file of files) {
|
||||||
const blobName = path_1.basename(file);
|
const blobName = (0, path_1.basename)(file);
|
||||||
const blobExists = await doesBlobExist(blobService, commit, blobName);
|
const blobExists = await doesBlobExist(blobService, commit, blobName);
|
||||||
if (blobExists) {
|
if (blobExists) {
|
||||||
console.log(`Blob ${commit}, ${blobName} already exists, not publishing again.`);
|
console.log(`Blob ${commit}, ${blobName} already exists, not publishing again.`);
|
||||||
@@ -58,7 +58,7 @@ function main() {
|
|||||||
}
|
}
|
||||||
const opts = minimist(process.argv.slice(2));
|
const opts = minimist(process.argv.slice(2));
|
||||||
const [directory] = opts._;
|
const [directory] = opts._;
|
||||||
const files = fileNames.map(fileName => path_1.join(directory, fileName));
|
const files = fileNames.map(fileName => (0, path_1.join)(directory, fileName));
|
||||||
publish(commit, files).catch(err => {
|
publish(commit, files).catch(err => {
|
||||||
console.error(err);
|
console.error(err);
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
|
|||||||
@@ -39,7 +39,7 @@ async function main() {
|
|||||||
}
|
}
|
||||||
console.log(`Releasing build ${commit}...`);
|
console.log(`Releasing build ${commit}...`);
|
||||||
const scripts = client.database('builds').container(quality).scripts;
|
const scripts = client.database('builds').container(quality).scripts;
|
||||||
await retry_1.retry(() => scripts.storedProcedure('releaseBuild').execute('', [commit]));
|
await (0, retry_1.retry)(() => scripts.storedProcedure('releaseBuild').execute('', [commit]));
|
||||||
}
|
}
|
||||||
main().then(() => {
|
main().then(() => {
|
||||||
console.log('Build successfully released');
|
console.log('Build successfully released');
|
||||||
|
|||||||
@@ -62,7 +62,7 @@ async function sync(commit, quality) {
|
|||||||
await new Promise((c, e) => readStream.pipe(writeStream).on('finish', c).on('error', e));
|
await new Promise((c, e) => readStream.pipe(writeStream).on('finish', c).on('error', e));
|
||||||
log(` Updating build in DB...`);
|
log(` Updating build in DB...`);
|
||||||
const mooncakeUrl = `${process.env['MOONCAKE_CDN_URL']}${blobPath}`;
|
const mooncakeUrl = `${process.env['MOONCAKE_CDN_URL']}${blobPath}`;
|
||||||
await retry_1.retry(() => container.scripts.storedProcedure('setAssetMooncakeUrl')
|
await (0, retry_1.retry)(() => container.scripts.storedProcedure('setAssetMooncakeUrl')
|
||||||
.execute('', [commit, asset.platform, asset.type, mooncakeUrl]));
|
.execute('', [commit, asset.platform, asset.type, mooncakeUrl]));
|
||||||
log(` Done ✔️`);
|
log(` Done ✔️`);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -8,5 +8,11 @@
|
|||||||
<true/>
|
<true/>
|
||||||
<key>com.apple.security.cs.allow-dyld-environment-variables</key>
|
<key>com.apple.security.cs.allow-dyld-environment-variables</key>
|
||||||
<true/>
|
<true/>
|
||||||
|
<key>com.apple.security.device.audio-input</key>
|
||||||
|
<true/>
|
||||||
|
<key>com.apple.security.device.camera</key>
|
||||||
|
<true/>
|
||||||
|
<key>com.apple.security.automation.apple-events</key>
|
||||||
|
<true/>
|
||||||
</dict>
|
</dict>
|
||||||
</plist>
|
</plist>
|
||||||
|
|||||||
129
build/azure-pipelines/darwin/product-build-darwin-sign.yml
Normal file
129
build/azure-pipelines/darwin/product-build-darwin-sign.yml
Normal file
@@ -0,0 +1,129 @@
|
|||||||
|
steps:
|
||||||
|
- task: NodeTool@0
|
||||||
|
inputs:
|
||||||
|
versionSpec: "14.x"
|
||||||
|
|
||||||
|
- task: AzureKeyVault@1
|
||||||
|
displayName: "Azure Key Vault: Get Secrets"
|
||||||
|
inputs:
|
||||||
|
azureSubscription: "vscode-builds-subscription"
|
||||||
|
KeyVaultName: vscode
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
cat << EOF > ~/.netrc
|
||||||
|
machine github.com
|
||||||
|
login vscode
|
||||||
|
password $(github-distro-mixin-password)
|
||||||
|
EOF
|
||||||
|
|
||||||
|
git config user.email "vscode@microsoft.com"
|
||||||
|
git config user.name "VSCode"
|
||||||
|
displayName: Prepare tooling
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
git pull --no-rebase https://github.com/$(VSCODE_MIXIN_REPO).git $(node -p "require('./package.json').distro")
|
||||||
|
displayName: Merge distro
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
pushd build \
|
||||||
|
&& yarn \
|
||||||
|
&& npm install -g typescript \
|
||||||
|
&& tsc azure-pipelines/common/createAsset.ts \
|
||||||
|
&& popd
|
||||||
|
displayName: Restore modules for just build folder and compile it
|
||||||
|
|
||||||
|
- download: current
|
||||||
|
artifact: vscode-darwin-$(VSCODE_ARCH)
|
||||||
|
displayName: Download $(VSCODE_ARCH) artifact
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
unzip $(Pipeline.Workspace)/vscode-darwin-$(VSCODE_ARCH)/VSCode-darwin-$(VSCODE_ARCH).zip -d $(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH)
|
||||||
|
mv $(Pipeline.Workspace)/vscode-darwin-$(VSCODE_ARCH)/VSCode-darwin-$(VSCODE_ARCH).zip $(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH).zip
|
||||||
|
displayName: Unzip & move
|
||||||
|
|
||||||
|
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
|
||||||
|
inputs:
|
||||||
|
ConnectedServiceName: "ESRP CodeSign"
|
||||||
|
FolderPath: "$(agent.builddirectory)"
|
||||||
|
Pattern: "VSCode-darwin-$(VSCODE_ARCH).zip"
|
||||||
|
signConfigType: inlineSignParams
|
||||||
|
inlineOperation: |
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"keyCode": "CP-401337-Apple",
|
||||||
|
"operationSetCode": "MacAppDeveloperSign",
|
||||||
|
"parameters": [
|
||||||
|
{
|
||||||
|
"parameterName": "Hardening",
|
||||||
|
"parameterValue": "--options=runtime"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"toolName": "sign",
|
||||||
|
"toolVersion": "1.0"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
SessionTimeout: 60
|
||||||
|
displayName: Codesign
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
APP_ROOT=$(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH)
|
||||||
|
APP_NAME="`ls $APP_ROOT | head -n 1`"
|
||||||
|
BUNDLE_IDENTIFIER=$(node -p "require(\"$APP_ROOT/$APP_NAME/Contents/Resources/app/product.json\").darwinBundleIdentifier")
|
||||||
|
echo "##vso[task.setvariable variable=BundleIdentifier]$BUNDLE_IDENTIFIER"
|
||||||
|
displayName: Export bundle identifier
|
||||||
|
|
||||||
|
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
|
||||||
|
inputs:
|
||||||
|
ConnectedServiceName: "ESRP CodeSign"
|
||||||
|
FolderPath: "$(agent.builddirectory)"
|
||||||
|
Pattern: "VSCode-darwin-$(VSCODE_ARCH).zip"
|
||||||
|
signConfigType: inlineSignParams
|
||||||
|
inlineOperation: |
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"keyCode": "CP-401337-Apple",
|
||||||
|
"operationSetCode": "MacAppNotarize",
|
||||||
|
"parameters": [
|
||||||
|
{
|
||||||
|
"parameterName": "BundleId",
|
||||||
|
"parameterValue": "$(BundleIdentifier)"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"toolName": "sign",
|
||||||
|
"toolVersion": "1.0"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
SessionTimeout: 60
|
||||||
|
displayName: Notarization
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
APP_ROOT=$(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH)
|
||||||
|
APP_NAME="`ls $APP_ROOT | head -n 1`"
|
||||||
|
"$APP_ROOT/$APP_NAME/Contents/Resources/app/bin/code" --export-default-configuration=.build
|
||||||
|
displayName: Verify start after signing (export configuration)
|
||||||
|
condition: and(succeeded(), ne(variables['VSCODE_ARCH'], 'arm64'))
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
|
||||||
|
# For legacy purposes, arch for x64 is just 'darwin'
|
||||||
|
case $VSCODE_ARCH in
|
||||||
|
x64) ASSET_ID="darwin" ;;
|
||||||
|
arm64) ASSET_ID="darwin-arm64" ;;
|
||||||
|
universal) ASSET_ID="darwin-universal" ;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||||
|
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
|
||||||
|
AZURE_STORAGE_ACCESS_KEY="$(ticino-storage-key)" \
|
||||||
|
AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
|
||||||
|
node build/azure-pipelines/common/createAsset.js \
|
||||||
|
"$ASSET_ID" \
|
||||||
|
archive \
|
||||||
|
"VSCode-$ASSET_ID.zip" \
|
||||||
|
../VSCode-darwin-$(VSCODE_ARCH).zip
|
||||||
|
displayName: Publish Clients
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
steps:
|
steps:
|
||||||
- task: NodeTool@0
|
- task: NodeTool@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "12.18.3"
|
versionSpec: "14.x"
|
||||||
|
|
||||||
- task: AzureKeyVault@1
|
- task: AzureKeyVault@1
|
||||||
displayName: "Azure Key Vault: Get Secrets"
|
displayName: "Azure Key Vault: Get Secrets"
|
||||||
@@ -22,6 +22,8 @@ steps:
|
|||||||
displayName: Extract compilation output
|
displayName: Extract compilation output
|
||||||
condition: and(succeeded(), ne(variables['VSCODE_ARCH'], 'universal'))
|
condition: and(succeeded(), ne(variables['VSCODE_ARCH'], 'universal'))
|
||||||
|
|
||||||
|
# Set up the credentials to retrieve distro repo and setup git persona
|
||||||
|
# to create a merge commit for when we merge distro into oss
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
cat << EOF > ~/.netrc
|
cat << EOF > ~/.netrc
|
||||||
@@ -71,6 +73,7 @@ steps:
|
|||||||
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
|
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
|
set -e
|
||||||
npx https://aka.ms/enablesecurefeed standAlone
|
npx https://aka.ms/enablesecurefeed standAlone
|
||||||
timeoutInMinutes: 5
|
timeoutInMinutes: 5
|
||||||
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), eq(variables['ENABLE_TERRAPIN'], 'true'))
|
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), eq(variables['ENABLE_TERRAPIN'], 'true'))
|
||||||
@@ -80,6 +83,7 @@ steps:
|
|||||||
set -e
|
set -e
|
||||||
export npm_config_arch=$(VSCODE_ARCH)
|
export npm_config_arch=$(VSCODE_ARCH)
|
||||||
export npm_config_node_gyp=$(which node-gyp)
|
export npm_config_node_gyp=$(which node-gyp)
|
||||||
|
export npm_config_build_from_source=true
|
||||||
export SDKROOT=/Applications/Xcode_12.2.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX11.0.sdk
|
export SDKROOT=/Applications/Xcode_12.2.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX11.0.sdk
|
||||||
|
|
||||||
for i in {1..3}; do # try 3 times, for Terrapin
|
for i in {1..3}; do # try 3 times, for Terrapin
|
||||||
@@ -104,30 +108,7 @@ steps:
|
|||||||
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
|
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
|
||||||
displayName: Create node_modules archive
|
displayName: Create node_modules archive
|
||||||
|
|
||||||
- script: |
|
# This script brings in the right resources (images, icons, etc) based on the quality (insiders, stable, exploration)
|
||||||
set -e
|
|
||||||
export npm_config_arch=$(VSCODE_ARCH)
|
|
||||||
export npm_config_node_gyp=$(which node-gyp)
|
|
||||||
export npm_config_build_from_source=true
|
|
||||||
export SDKROOT=/Applications/Xcode_12.2.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX11.0.sdk
|
|
||||||
ls /Applications/Xcode_12.2.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/
|
|
||||||
yarn electron-rebuild
|
|
||||||
# remove once https://github.com/prebuild/prebuild-install/pull/140 is merged and found in keytar
|
|
||||||
cd ./node_modules/keytar
|
|
||||||
node-gyp rebuild
|
|
||||||
displayName: Rebuild native modules for ARM64
|
|
||||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'arm64'))
|
|
||||||
|
|
||||||
- download: current
|
|
||||||
artifact: vscode-darwin-x64
|
|
||||||
displayName: Download x64 artifact
|
|
||||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'universal'))
|
|
||||||
|
|
||||||
- download: current
|
|
||||||
artifact: vscode-darwin-arm64
|
|
||||||
displayName: Download arm64 artifact
|
|
||||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'universal'))
|
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
node build/azure-pipelines/mixin
|
node build/azure-pipelines/mixin
|
||||||
@@ -137,7 +118,7 @@ steps:
|
|||||||
set -e
|
set -e
|
||||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||||
yarn gulp vscode-darwin-$(VSCODE_ARCH)-min-ci
|
yarn gulp vscode-darwin-$(VSCODE_ARCH)-min-ci
|
||||||
displayName: Build
|
displayName: Build client
|
||||||
condition: and(succeeded(), ne(variables['VSCODE_ARCH'], 'universal'))
|
condition: and(succeeded(), ne(variables['VSCODE_ARCH'], 'universal'))
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
@@ -149,14 +130,6 @@ steps:
|
|||||||
displayName: Build Server
|
displayName: Build Server
|
||||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'))
|
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'))
|
||||||
|
|
||||||
- script: |
|
|
||||||
set -e
|
|
||||||
unzip $(Pipeline.Workspace)/vscode-darwin-x64/VSCode-darwin-x64.zip -d $(agent.builddirectory)/vscode-x64
|
|
||||||
unzip $(Pipeline.Workspace)/vscode-darwin-arm64/VSCode-darwin-arm64.zip -d $(agent.builddirectory)/vscode-arm64
|
|
||||||
DEBUG=* node build/darwin/create-universal-app.js
|
|
||||||
displayName: Create Universal App
|
|
||||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'universal'))
|
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||||
@@ -164,6 +137,29 @@ steps:
|
|||||||
displayName: Download Electron and Playwright
|
displayName: Download Electron and Playwright
|
||||||
condition: and(succeeded(), ne(variables['VSCODE_ARCH'], 'universal'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
condition: and(succeeded(), ne(variables['VSCODE_ARCH'], 'universal'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||||
|
|
||||||
|
- download: current
|
||||||
|
artifact: vscode-darwin-x64
|
||||||
|
displayName: Download x64 artifact
|
||||||
|
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'universal'))
|
||||||
|
|
||||||
|
- download: current
|
||||||
|
artifact: vscode-darwin-arm64
|
||||||
|
displayName: Download arm64 artifact
|
||||||
|
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'universal'))
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
cp $(Pipeline.Workspace)/vscode-darwin-x64/VSCode-darwin-x64.zip $(agent.builddirectory)/VSCode-darwin-x64.zip
|
||||||
|
cp $(Pipeline.Workspace)/vscode-darwin-arm64/VSCode-darwin-arm64.zip $(agent.builddirectory)/VSCode-darwin-arm64.zip
|
||||||
|
unzip $(agent.builddirectory)/VSCode-darwin-x64.zip -d $(agent.builddirectory)/VSCode-darwin-x64
|
||||||
|
unzip $(agent.builddirectory)/VSCode-darwin-arm64.zip -d $(agent.builddirectory)/VSCode-darwin-arm64
|
||||||
|
DEBUG=* node build/darwin/create-universal-app.js
|
||||||
|
displayName: Create Universal App
|
||||||
|
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'universal'))
|
||||||
|
|
||||||
|
# Setting hardened entitlements is a requirement for:
|
||||||
|
# * Apple notarization
|
||||||
|
# * Running tests on Big Sur (because Big Sur has additional security precautions)
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
security create-keychain -p pwd $(agent.tempdirectory)/buildagent.keychain
|
security create-keychain -p pwd $(agent.tempdirectory)/buildagent.keychain
|
||||||
@@ -172,22 +168,21 @@ steps:
|
|||||||
echo "$(macos-developer-certificate)" | base64 -D > $(agent.tempdirectory)/cert.p12
|
echo "$(macos-developer-certificate)" | base64 -D > $(agent.tempdirectory)/cert.p12
|
||||||
security import $(agent.tempdirectory)/cert.p12 -k $(agent.tempdirectory)/buildagent.keychain -P "$(macos-developer-certificate-key)" -T /usr/bin/codesign
|
security import $(agent.tempdirectory)/cert.p12 -k $(agent.tempdirectory)/buildagent.keychain -P "$(macos-developer-certificate-key)" -T /usr/bin/codesign
|
||||||
security set-key-partition-list -S apple-tool:,apple:,codesign: -s -k pwd $(agent.tempdirectory)/buildagent.keychain
|
security set-key-partition-list -S apple-tool:,apple:,codesign: -s -k pwd $(agent.tempdirectory)/buildagent.keychain
|
||||||
VSCODE_ARCH="$(VSCODE_ARCH)" DEBUG=electron-osx-sign* node build/darwin/sign.js
|
VSCODE_ARCH=$(VSCODE_ARCH) DEBUG=electron-osx-sign* node build/darwin/sign.js
|
||||||
displayName: Set Hardened Entitlements
|
displayName: Set Hardened Entitlements
|
||||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
./scripts/test.sh --build --tfs "Unit Tests"
|
./scripts/test.sh --build --tfs "Unit Tests"
|
||||||
displayName: Run unit tests (Electron)
|
displayName: Run unit tests (Electron)
|
||||||
timeoutInMinutes: 5
|
timeoutInMinutes: 7
|
||||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
yarn test-browser --build --browser chromium --browser webkit --browser firefox --tfs "Browser Unit Tests"
|
yarn test-browser --build --browser chromium --browser webkit --browser firefox --tfs "Browser Unit Tests"
|
||||||
displayName: Run unit tests (Browser)
|
displayName: Run unit tests (Browser)
|
||||||
timeoutInMinutes: 5
|
timeoutInMinutes: 7
|
||||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
@@ -215,7 +210,7 @@ steps:
|
|||||||
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-web-darwin" \
|
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-web-darwin" \
|
||||||
./resources/server/test/test-web-integration.sh --browser webkit
|
./resources/server/test/test-web-integration.sh --browser webkit
|
||||||
displayName: Run integration tests (Browser)
|
displayName: Run integration tests (Browser)
|
||||||
timeoutInMinutes: 5
|
timeoutInMinutes: 10
|
||||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
@@ -226,7 +221,7 @@ steps:
|
|||||||
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-darwin" \
|
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-darwin" \
|
||||||
./resources/server/test/test-remote-integration.sh
|
./resources/server/test/test-remote-integration.sh
|
||||||
displayName: Run remote integration tests (Electron)
|
displayName: Run remote integration tests (Electron)
|
||||||
timeoutInMinutes: 5
|
timeoutInMinutes: 7
|
||||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
@@ -283,91 +278,19 @@ steps:
|
|||||||
displayName: Archive build
|
displayName: Archive build
|
||||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||||
|
|
||||||
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
|
|
||||||
inputs:
|
|
||||||
ConnectedServiceName: "ESRP CodeSign"
|
|
||||||
FolderPath: "$(agent.builddirectory)"
|
|
||||||
Pattern: "VSCode-darwin-$(VSCODE_ARCH).zip"
|
|
||||||
signConfigType: inlineSignParams
|
|
||||||
inlineOperation: |
|
|
||||||
[
|
|
||||||
{
|
|
||||||
"keyCode": "CP-401337-Apple",
|
|
||||||
"operationSetCode": "MacAppDeveloperSign",
|
|
||||||
"parameters": [
|
|
||||||
{
|
|
||||||
"parameterName": "Hardening",
|
|
||||||
"parameterValue": "--options=runtime"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"toolName": "sign",
|
|
||||||
"toolVersion": "1.0"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
SessionTimeout: 60
|
|
||||||
displayName: Codesign
|
|
||||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
|
||||||
|
|
||||||
- script: |
|
|
||||||
zip -d $(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH).zip "*.pkg"
|
|
||||||
displayName: Clean
|
|
||||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
|
||||||
|
|
||||||
- script: |
|
|
||||||
APP_ROOT=$(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH)
|
|
||||||
APP_NAME="`ls $APP_ROOT | head -n 1`"
|
|
||||||
BUNDLE_IDENTIFIER=$(node -p "require(\"$APP_ROOT/$APP_NAME/Contents/Resources/app/product.json\").darwinBundleIdentifier")
|
|
||||||
echo "##vso[task.setvariable variable=BundleIdentifier]$BUNDLE_IDENTIFIER"
|
|
||||||
displayName: Export bundle identifier
|
|
||||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
|
||||||
|
|
||||||
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
|
|
||||||
inputs:
|
|
||||||
ConnectedServiceName: "ESRP CodeSign"
|
|
||||||
FolderPath: "$(agent.builddirectory)"
|
|
||||||
Pattern: "VSCode-darwin-$(VSCODE_ARCH).zip"
|
|
||||||
signConfigType: inlineSignParams
|
|
||||||
inlineOperation: |
|
|
||||||
[
|
|
||||||
{
|
|
||||||
"keyCode": "CP-401337-Apple",
|
|
||||||
"operationSetCode": "MacAppNotarize",
|
|
||||||
"parameters": [
|
|
||||||
{
|
|
||||||
"parameterName": "BundleId",
|
|
||||||
"parameterValue": "$(BundleIdentifier)"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"toolName": "sign",
|
|
||||||
"toolVersion": "1.0"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
SessionTimeout: 60
|
|
||||||
displayName: Notarization
|
|
||||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
|
||||||
|
|
||||||
- script: |
|
|
||||||
set -e
|
|
||||||
APP_ROOT=$(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH)
|
|
||||||
APP_NAME="`ls $APP_ROOT | head -n 1`"
|
|
||||||
"$APP_ROOT/$APP_NAME/Contents/Resources/app/bin/code" --export-default-configuration=.build
|
|
||||||
displayName: Verify start after signing (export configuration)
|
|
||||||
condition: and(succeeded(), ne(variables['VSCODE_ARCH'], 'arm64'), ne(variables['VSCODE_PUBLISH'], 'false'))
|
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||||
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
|
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
|
||||||
AZURE_STORAGE_ACCESS_KEY="$(ticino-storage-key)" \
|
AZURE_STORAGE_ACCESS_KEY="$(ticino-storage-key)" \
|
||||||
AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
|
AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
|
||||||
VSCODE_ARCH="$(VSCODE_ARCH)" \
|
VSCODE_ARCH="$(VSCODE_ARCH)" ./build/azure-pipelines/darwin/publish-server.sh
|
||||||
./build/azure-pipelines/darwin/publish.sh
|
displayName: Publish Servers
|
||||||
displayName: Publish
|
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
|
||||||
|
|
||||||
- publish: $(Agent.BuildDirectory)/VSCode-darwin-$(VSCODE_ARCH).zip
|
- publish: $(Agent.BuildDirectory)/VSCode-darwin-$(VSCODE_ARCH).zip
|
||||||
artifact: vscode-darwin-$(VSCODE_ARCH)
|
artifact: vscode-darwin-$(VSCODE_ARCH)
|
||||||
displayName: Publish archive
|
displayName: Publish client archive
|
||||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||||
|
|
||||||
- publish: $(Agent.BuildDirectory)/vscode-server-darwin.zip
|
- publish: $(Agent.BuildDirectory)/vscode-server-darwin.zip
|
||||||
@@ -385,9 +308,5 @@ steps:
|
|||||||
VSCODE_ARCH="$(VSCODE_ARCH)" \
|
VSCODE_ARCH="$(VSCODE_ARCH)" \
|
||||||
yarn gulp upload-vscode-configuration
|
yarn gulp upload-vscode-configuration
|
||||||
displayName: Upload configuration (for Bing settings search)
|
displayName: Upload configuration (for Bing settings search)
|
||||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), ne(variables['VSCODE_PUBLISH'], 'false'))
|
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'))
|
||||||
continueOnError: true
|
|
||||||
|
|
||||||
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
|
|
||||||
displayName: "Component Detection"
|
|
||||||
continueOnError: true
|
continueOnError: true
|
||||||
|
|||||||
@@ -1,20 +1,6 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
set -e
|
set -e
|
||||||
|
|
||||||
# Publish DEB
|
|
||||||
case $VSCODE_ARCH in
|
|
||||||
x64) ASSET_ID="darwin" ;;
|
|
||||||
arm64) ASSET_ID="darwin-arm64" ;;
|
|
||||||
universal) ASSET_ID="darwin-universal" ;;
|
|
||||||
esac
|
|
||||||
|
|
||||||
# publish the build
|
|
||||||
node build/azure-pipelines/common/createAsset.js \
|
|
||||||
"$ASSET_ID" \
|
|
||||||
archive \
|
|
||||||
"VSCode-$ASSET_ID.zip" \
|
|
||||||
../VSCode-darwin-$VSCODE_ARCH.zip
|
|
||||||
|
|
||||||
if [ "$VSCODE_ARCH" == "x64" ]; then
|
if [ "$VSCODE_ARCH" == "x64" ]; then
|
||||||
# package Remote Extension Host
|
# package Remote Extension Host
|
||||||
pushd .. && mv vscode-reh-darwin vscode-server-darwin && zip -Xry vscode-server-darwin.zip vscode-server-darwin && popd
|
pushd .. && mv vscode-reh-darwin vscode-server-darwin && zip -Xry vscode-server-darwin.zip vscode-server-darwin && popd
|
||||||
@@ -3,39 +3,39 @@ pool:
|
|||||||
|
|
||||||
trigger:
|
trigger:
|
||||||
branches:
|
branches:
|
||||||
include: ['main', 'release/*']
|
include: ["main", "release/*"]
|
||||||
pr:
|
pr:
|
||||||
branches:
|
branches:
|
||||||
include: ['main', 'release/*']
|
include: ["main", "release/*"]
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- task: NodeTool@0
|
- task: NodeTool@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "12.18.3"
|
versionSpec: "14.x"
|
||||||
|
|
||||||
- task: AzureKeyVault@1
|
- task: AzureKeyVault@1
|
||||||
displayName: 'Azure Key Vault: Get Secrets'
|
displayName: "Azure Key Vault: Get Secrets"
|
||||||
inputs:
|
inputs:
|
||||||
azureSubscription: 'azuredatastudio-adointegration'
|
azureSubscription: "vscode-builds-subscription"
|
||||||
KeyVaultName: ado-secrets
|
KeyVaultName: vscode
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
|
|
||||||
cat << EOF > ~/.netrc
|
cat << EOF > ~/.netrc
|
||||||
machine github.com
|
machine github.com
|
||||||
login azuredatastudio
|
login vscode
|
||||||
password $(github-distro-mixin-password)
|
password $(github-distro-mixin-password)
|
||||||
EOF
|
EOF
|
||||||
|
|
||||||
git config user.email "sqltools@service.microsoft.com"
|
git config user.email "vscode@microsoft.com"
|
||||||
git config user.name "AzureDataStudio"
|
git config user.name "VSCode"
|
||||||
|
|
||||||
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
|
git remote add distro "https://github.com/$VSCODE_MIXIN_REPO.git"
|
||||||
git fetch distro
|
git fetch distro
|
||||||
|
|
||||||
# Push main branch into oss/master
|
# Push main branch into oss/main
|
||||||
git push distro origin/main:refs/heads/oss/master
|
git push distro origin/main:refs/heads/oss/main
|
||||||
|
|
||||||
# Push every release branch into oss/release
|
# Push every release branch into oss/release
|
||||||
git for-each-ref --format="%(refname:short)" refs/remotes/origin/release/* | sed 's/^origin\/\(.*\)$/\0:refs\/heads\/oss\/\1/' | xargs git push distro
|
git for-each-ref --format="%(refname:short)" refs/remotes/origin/release/* | sed 's/^origin\/\(.*\)$/\0:refs\/heads\/oss\/\1/' | xargs git push distro
|
||||||
|
|||||||
@@ -1,10 +1,14 @@
|
|||||||
#Download base image ubuntu 18.04
|
#Download base image ubuntu 21.04
|
||||||
FROM ubuntu:18.04
|
FROM ubuntu:21.04
|
||||||
|
ENV TZ=America/Los_Angeles
|
||||||
|
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
|
||||||
|
|
||||||
# Update Software repository
|
# Update Software repository
|
||||||
RUN apt-get update
|
RUN apt-get update && apt-get upgrade -y
|
||||||
|
|
||||||
RUN apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus libgtk-3-0
|
RUN apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus xvfb libgtk-3-0 \
|
||||||
|
libkrb5-dev git apt-transport-https ca-certificates curl gnupg-agent software-properties-common \
|
||||||
|
libnss3 libasound2 make gcc libx11-dev fakeroot rpm libgconf-2-4 libunwind8 g++ python3-dev python3-pip
|
||||||
|
|
||||||
ADD ./ /opt/ads-server
|
ADD ./ /opt/ads-server
|
||||||
|
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ pr:
|
|||||||
steps:
|
steps:
|
||||||
- task: NodeTool@0
|
- task: NodeTool@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "12.18.3"
|
versionSpec: "14.x"
|
||||||
|
|
||||||
- task: AzureKeyVault@1
|
- task: AzureKeyVault@1
|
||||||
displayName: "Azure Key Vault: Get Secrets"
|
displayName: "Azure Key Vault: Get Secrets"
|
||||||
@@ -31,10 +31,10 @@ steps:
|
|||||||
git config user.email "vscode@microsoft.com"
|
git config user.email "vscode@microsoft.com"
|
||||||
git config user.name "VSCode"
|
git config user.name "VSCode"
|
||||||
|
|
||||||
git checkout origin/electron-11.x.y
|
git checkout origin/electron-12.x.y
|
||||||
git merge origin/master
|
git merge origin/main
|
||||||
|
|
||||||
# Push master branch into exploration branch
|
# Push main branch into exploration branch
|
||||||
git push origin HEAD:electron-11.x.y
|
git push origin HEAD:electron-12.x.y
|
||||||
|
|
||||||
displayName: Sync & Merge Exploration
|
displayName: Sync & Merge Exploration
|
||||||
|
|||||||
@@ -14,22 +14,4 @@ TARBALL_PATH="$REPO/.build/linux/archive/$TARBALL_FILENAME"
|
|||||||
rm -rf $ROOT/code-*.tar.*
|
rm -rf $ROOT/code-*.tar.*
|
||||||
(cd $ROOT && tar -czf $TARBALL_PATH $BUILDNAME)
|
(cd $ROOT && tar -czf $TARBALL_PATH $BUILDNAME)
|
||||||
|
|
||||||
# # Publish Remote Extension Host
|
|
||||||
# LEGACY_SERVER_BUILD_NAME="azuredatastudio-reh-$PLATFORM_LINUX"
|
|
||||||
# SERVER_BUILD_NAME="azuredatastudio-server-$PLATFORM_LINUX"
|
|
||||||
# SERVER_TARBALL_FILENAME="azuredatastudio-server-$PLATFORM_LINUX.tar.gz"
|
|
||||||
# SERVER_TARBALL_PATH="$REPO/.build/linux/server/$SERVER_TARBALL_FILENAME"
|
|
||||||
|
|
||||||
# rm -rf $ROOT/azuredatastudio-server-*.tar.*
|
|
||||||
# (cd $ROOT && mv $LEGACY_SERVER_BUILD_NAME $SERVER_BUILD_NAME && tar --owner=0 --group=0 -czf $SERVER_TARBALL_PATH $SERVER_BUILD_NAME)
|
|
||||||
|
|
||||||
# Publish Remote Extension Host (Web)
|
|
||||||
LEGACY_SERVER_BUILD_NAME_WEB="azuredatastudio-reh-web-$PLATFORM_LINUX"
|
|
||||||
SERVER_BUILD_NAME_WEB="azuredatastudio-server-$PLATFORM_LINUX-web"
|
|
||||||
SERVER_TARBALL_FILENAME_WEB="azuredatastudio-server-$PLATFORM_LINUX-web.tar.gz"
|
|
||||||
SERVER_TARBALL_PATH_WEB="$REPO/.build/linux/server/$SERVER_TARBALL_FILENAME_WEB"
|
|
||||||
|
|
||||||
rm -rf $ROOT/azuredatastudio-server-*.tar.*
|
|
||||||
(cd $ROOT && mv vscode-reh-web-linux-x64 $SERVER_BUILD_NAME_WEB && tar --owner=0 --group=0 -czf $SERVER_TARBALL_PATH_WEB $SERVER_BUILD_NAME_WEB)
|
|
||||||
|
|
||||||
node build/azure-pipelines/common/copyArtifacts.js
|
node build/azure-pipelines/common/copyArtifacts.js
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
steps:
|
steps:
|
||||||
- task: NodeTool@0
|
- task: NodeTool@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "12.18.3"
|
versionSpec: "14.x"
|
||||||
|
|
||||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||||
inputs:
|
inputs:
|
||||||
@@ -69,6 +69,7 @@ steps:
|
|||||||
displayName: Extract node_modules cache
|
displayName: Extract node_modules cache
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
|
set -e
|
||||||
npx https://aka.ms/enablesecurefeed standAlone
|
npx https://aka.ms/enablesecurefeed standAlone
|
||||||
timeoutInMinutes: 5
|
timeoutInMinutes: 5
|
||||||
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), eq(variables['ENABLE_TERRAPIN'], 'true'))
|
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), eq(variables['ENABLE_TERRAPIN'], 'true'))
|
||||||
@@ -132,7 +133,3 @@ steps:
|
|||||||
artifact: vscode-server-linux-alpine-web
|
artifact: vscode-server-linux-alpine-web
|
||||||
displayName: Publish web server archive
|
displayName: Publish web server archive
|
||||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||||
|
|
||||||
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
|
|
||||||
displayName: "Component Detection"
|
|
||||||
continueOnError: true
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
steps:
|
steps:
|
||||||
- task: NodeTool@0
|
- task: NodeTool@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "12.18.3"
|
versionSpec: "14.x"
|
||||||
|
|
||||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||||
inputs:
|
inputs:
|
||||||
@@ -61,12 +61,6 @@ steps:
|
|||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
npm install -g node-gyp@latest
|
|
||||||
node-gyp --version
|
|
||||||
displayName: Update node-gyp
|
|
||||||
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), eq(variables['VSCODE_ARCH'], 'x64'))
|
|
||||||
|
|
||||||
- script: |
|
|
||||||
npx https://aka.ms/enablesecurefeed standAlone
|
npx https://aka.ms/enablesecurefeed standAlone
|
||||||
timeoutInMinutes: 5
|
timeoutInMinutes: 5
|
||||||
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), eq(variables['ENABLE_TERRAPIN'], 'true'))
|
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), eq(variables['ENABLE_TERRAPIN'], 'true'))
|
||||||
@@ -85,7 +79,6 @@ steps:
|
|||||||
if [ "$VSCODE_ARCH" == "x64" ]; then
|
if [ "$VSCODE_ARCH" == "x64" ]; then
|
||||||
export VSCODE_REMOTE_CC=$(which gcc-4.8)
|
export VSCODE_REMOTE_CC=$(which gcc-4.8)
|
||||||
export VSCODE_REMOTE_CXX=$(which g++-4.8)
|
export VSCODE_REMOTE_CXX=$(which g++-4.8)
|
||||||
export VSCODE_REMOTE_NODE_GYP=$(which node-gyp)
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
for i in {1..3}; do # try 3 times, for Terrapin
|
for i in {1..3}; do # try 3 times, for Terrapin
|
||||||
@@ -96,10 +89,6 @@ steps:
|
|||||||
fi
|
fi
|
||||||
echo "Yarn failed $i, trying again..."
|
echo "Yarn failed $i, trying again..."
|
||||||
done
|
done
|
||||||
|
|
||||||
# remove once https://github.com/prebuild/prebuild-install/pull/140 is merged and found in keytar
|
|
||||||
cd ./node_modules/keytar
|
|
||||||
npx node-gyp rebuild
|
|
||||||
env:
|
env:
|
||||||
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
|
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
|
||||||
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
|
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
|
||||||
@@ -144,14 +133,14 @@ steps:
|
|||||||
set -e
|
set -e
|
||||||
DISPLAY=:10 ./scripts/test.sh --build --tfs "Unit Tests"
|
DISPLAY=:10 ./scripts/test.sh --build --tfs "Unit Tests"
|
||||||
displayName: Run unit tests (Electron)
|
displayName: Run unit tests (Electron)
|
||||||
timeoutInMinutes: 5
|
timeoutInMinutes: 7
|
||||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
DISPLAY=:10 yarn test-browser --build --browser chromium --tfs "Browser Unit Tests"
|
DISPLAY=:10 yarn test-browser --build --browser chromium --tfs "Browser Unit Tests"
|
||||||
displayName: Run unit tests (Browser)
|
displayName: Run unit tests (Browser)
|
||||||
timeoutInMinutes: 5
|
timeoutInMinutes: 7
|
||||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
@@ -180,7 +169,7 @@ steps:
|
|||||||
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-web-linux-$(VSCODE_ARCH)" \
|
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-web-linux-$(VSCODE_ARCH)" \
|
||||||
DISPLAY=:10 ./resources/server/test/test-web-integration.sh --browser chromium
|
DISPLAY=:10 ./resources/server/test/test-web-integration.sh --browser chromium
|
||||||
displayName: Run integration tests (Browser)
|
displayName: Run integration tests (Browser)
|
||||||
timeoutInMinutes: 5
|
timeoutInMinutes: 10
|
||||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
@@ -192,7 +181,7 @@ steps:
|
|||||||
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-linux-$(VSCODE_ARCH)" \
|
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-linux-$(VSCODE_ARCH)" \
|
||||||
DISPLAY=:10 ./resources/server/test/test-remote-integration.sh
|
DISPLAY=:10 ./resources/server/test/test-remote-integration.sh
|
||||||
displayName: Run remote integration tests (Electron)
|
displayName: Run remote integration tests (Electron)
|
||||||
timeoutInMinutes: 5
|
timeoutInMinutes: 7
|
||||||
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||||
|
|
||||||
- task: PublishPipelineArtifact@0
|
- task: PublishPipelineArtifact@0
|
||||||
@@ -286,7 +275,3 @@ steps:
|
|||||||
artifactName: "snap-$(VSCODE_ARCH)"
|
artifactName: "snap-$(VSCODE_ARCH)"
|
||||||
targetPath: .build/linux/snap-tarball
|
targetPath: .build/linux/snap-tarball
|
||||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||||
|
|
||||||
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
|
|
||||||
displayName: "Component Detection"
|
|
||||||
continueOnError: true
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
steps:
|
steps:
|
||||||
- task: NodeTool@0
|
- task: NodeTool@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "12.18.3"
|
versionSpec: "14.x"
|
||||||
|
|
||||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||||
inputs:
|
inputs:
|
||||||
@@ -48,7 +48,7 @@ steps:
|
|||||||
x64) SNAPCRAFT_TARGET_ARGS="" ;;
|
x64) SNAPCRAFT_TARGET_ARGS="" ;;
|
||||||
*) SNAPCRAFT_TARGET_ARGS="--target-arch $(VSCODE_ARCH)" ;;
|
*) SNAPCRAFT_TARGET_ARGS="--target-arch $(VSCODE_ARCH)" ;;
|
||||||
esac
|
esac
|
||||||
(cd $SNAP_ROOT/code-* && sudo --preserve-env snapcraft snap $SNAPCRAFT_TARGET_ARGS --output "$SNAP_PATH")
|
(cd $SNAP_ROOT/code-* && sudo --preserve-env snapcraft prime $SNAPCRAFT_TARGET_ARGS && snap pack prime --compression=lzo --filename="$SNAP_PATH")
|
||||||
|
|
||||||
# Publish snap package
|
# Publish snap package
|
||||||
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
|
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
|
||||||
|
|||||||
@@ -94,8 +94,6 @@ steps:
|
|||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
yarn gulp vscode-linux-x64-min-ci
|
yarn gulp vscode-linux-x64-min-ci
|
||||||
yarn gulp vscode-web-min-ci
|
|
||||||
yarn gulp vscode-reh-web-linux-x64-min
|
|
||||||
displayName: Build
|
displayName: Build
|
||||||
env:
|
env:
|
||||||
VSCODE_MIXIN_PASSWORD: $(github-distro-mixin-password)
|
VSCODE_MIXIN_PASSWORD: $(github-distro-mixin-password)
|
||||||
@@ -173,7 +171,7 @@ steps:
|
|||||||
done
|
done
|
||||||
displayName: Archive Logs
|
displayName: Archive Logs
|
||||||
continueOnError: true
|
continueOnError: true
|
||||||
condition: succeededOrFailed()
|
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
@@ -224,19 +222,6 @@ steps:
|
|||||||
displayName: 'Signing Extensions and Langpacks'
|
displayName: 'Signing Extensions and Langpacks'
|
||||||
condition: and(succeeded(), eq(variables['signed'], true))
|
condition: and(succeeded(), eq(variables['signed'], true))
|
||||||
|
|
||||||
# - script: |
|
|
||||||
# set -e
|
|
||||||
# cd ./extensions/mssql/node_modules/@microsoft/ads-kerberos
|
|
||||||
# # npx node-gyp rebuild
|
|
||||||
# yarn install
|
|
||||||
# displayName: Recompile native node modules
|
|
||||||
|
|
||||||
# - script: |
|
|
||||||
# set -e
|
|
||||||
# VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
|
||||||
# yarn gulp vscode-reh-web-linux-x64-min
|
|
||||||
# displayName: Build web server
|
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
./build/azure-pipelines/linux/createDrop.sh
|
./build/azure-pipelines/linux/createDrop.sh
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ schedules:
|
|||||||
displayName: Mon-Fri at 7:00
|
displayName: Mon-Fri at 7:00
|
||||||
branches:
|
branches:
|
||||||
include:
|
include:
|
||||||
- master
|
- main
|
||||||
|
|
||||||
parameters:
|
parameters:
|
||||||
- name: VSCODE_QUALITY
|
- name: VSCODE_QUALITY
|
||||||
@@ -254,6 +254,15 @@ stages:
|
|||||||
VSCODE_ARCH: x64
|
VSCODE_ARCH: x64
|
||||||
steps:
|
steps:
|
||||||
- template: darwin/product-build-darwin.yml
|
- template: darwin/product-build-darwin.yml
|
||||||
|
- ${{ if ne(variables['VSCODE_PUBLISH'], 'false') }}:
|
||||||
|
- job: macOSSign
|
||||||
|
dependsOn:
|
||||||
|
- macOS
|
||||||
|
timeoutInMinutes: 90
|
||||||
|
variables:
|
||||||
|
VSCODE_ARCH: x64
|
||||||
|
steps:
|
||||||
|
- template: darwin/product-build-darwin-sign.yml
|
||||||
|
|
||||||
- ${{ if and(eq(variables['VSCODE_CIBUILD'], false), eq(parameters.VSCODE_BUILD_MACOS_ARM64, true)) }}:
|
- ${{ if and(eq(variables['VSCODE_CIBUILD'], false), eq(parameters.VSCODE_BUILD_MACOS_ARM64, true)) }}:
|
||||||
- job: macOSARM64
|
- job: macOSARM64
|
||||||
@@ -262,6 +271,15 @@ stages:
|
|||||||
VSCODE_ARCH: arm64
|
VSCODE_ARCH: arm64
|
||||||
steps:
|
steps:
|
||||||
- template: darwin/product-build-darwin.yml
|
- template: darwin/product-build-darwin.yml
|
||||||
|
- ${{ if ne(variables['VSCODE_PUBLISH'], 'false') }}:
|
||||||
|
- job: macOSARM64Sign
|
||||||
|
dependsOn:
|
||||||
|
- macOSARM64
|
||||||
|
timeoutInMinutes: 90
|
||||||
|
variables:
|
||||||
|
VSCODE_ARCH: arm64
|
||||||
|
steps:
|
||||||
|
- template: darwin/product-build-darwin-sign.yml
|
||||||
|
|
||||||
- ${{ if eq(variables['VSCODE_BUILD_MACOS_UNIVERSAL'], true) }}:
|
- ${{ if eq(variables['VSCODE_BUILD_MACOS_UNIVERSAL'], true) }}:
|
||||||
- job: macOSUniversal
|
- job: macOSUniversal
|
||||||
@@ -273,6 +291,15 @@ stages:
|
|||||||
VSCODE_ARCH: universal
|
VSCODE_ARCH: universal
|
||||||
steps:
|
steps:
|
||||||
- template: darwin/product-build-darwin.yml
|
- template: darwin/product-build-darwin.yml
|
||||||
|
- ${{ if ne(variables['VSCODE_PUBLISH'], 'false') }}:
|
||||||
|
- job: macOSUniversalSign
|
||||||
|
dependsOn:
|
||||||
|
- macOSUniversal
|
||||||
|
timeoutInMinutes: 90
|
||||||
|
variables:
|
||||||
|
VSCODE_ARCH: universal
|
||||||
|
steps:
|
||||||
|
- template: darwin/product-build-darwin-sign.yml
|
||||||
|
|
||||||
- ${{ if and(eq(variables['VSCODE_PUBLISH'], true), eq(parameters.VSCODE_COMPILE_ONLY, false)) }}:
|
- ${{ if and(eq(variables['VSCODE_PUBLISH'], true), eq(parameters.VSCODE_COMPILE_ONLY, false)) }}:
|
||||||
- stage: Mooncake
|
- stage: Mooncake
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
steps:
|
steps:
|
||||||
- task: NodeTool@0
|
- task: NodeTool@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "12.18.3"
|
versionSpec: "14.x"
|
||||||
|
|
||||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||||
inputs:
|
inputs:
|
||||||
@@ -30,39 +30,41 @@ steps:
|
|||||||
git pull --no-rebase https://github.com/$(VSCODE_MIXIN_REPO).git $(node -p "require('./package.json').distro")
|
git pull --no-rebase https://github.com/$(VSCODE_MIXIN_REPO).git $(node -p "require('./package.json').distro")
|
||||||
displayName: Merge distro
|
displayName: Merge distro
|
||||||
|
|
||||||
- script: |
|
|
||||||
npx https://aka.ms/enablesecurefeed standAlone
|
|
||||||
displayName: Switch to Terrapin packages
|
|
||||||
timeoutInMinutes: 5
|
|
||||||
condition: and(succeeded(), eq(variables['ENABLE_TERRAPIN'], 'true'))
|
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
mkdir -p .build
|
mkdir -p .build
|
||||||
echo -n $(VSCODE_ARCH) > .build/arch
|
node build/azure-pipelines/common/computeNodeModulesCacheKey.js $VSCODE_ARCH $ENABLE_TERRAPIN > .build/yarnlockhash
|
||||||
echo -n $ENABLE_TERRAPIN > .build/terrapin
|
|
||||||
displayName: Prepare yarn cache flags
|
displayName: Prepare yarn cache flags
|
||||||
|
|
||||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
# using `genericNodeModules` instead of `nodeModules` here to avoid sharing the cache with builds running inside containers
|
||||||
|
- task: Cache@2
|
||||||
inputs:
|
inputs:
|
||||||
keyfile: ".build/arch, .build/terrapin, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
|
key: 'genericNodeModules | $(Agent.OS) | .build/yarnlockhash'
|
||||||
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
|
path: .build/node_modules_cache
|
||||||
vstsFeed: "npm-vscode"
|
cacheHitVar: NODE_MODULES_RESTORED
|
||||||
|
displayName: Restore node_modules cache
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
export npm_config_arch=$(NPM_ARCH)
|
tar -xzf .build/node_modules_cache/cache.tgz
|
||||||
|
condition: and(succeeded(), eq(variables.NODE_MODULES_RESTORED, 'true'))
|
||||||
|
displayName: Extract node_modules cache
|
||||||
|
|
||||||
if [ -z "$CC" ] || [ -z "$CXX" ]; then
|
- script: |
|
||||||
export CC=$(which gcc-5)
|
set -e
|
||||||
export CXX=$(which g++-5)
|
npx https://aka.ms/enablesecurefeed standAlone
|
||||||
fi
|
timeoutInMinutes: 5
|
||||||
|
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), eq(variables['ENABLE_TERRAPIN'], 'true'))
|
||||||
|
displayName: Switch to Terrapin packages
|
||||||
|
|
||||||
if [ "$VSCODE_ARCH" == "x64" ]; then
|
- script: |
|
||||||
export VSCODE_REMOTE_CC=$(which gcc-4.8)
|
set -e
|
||||||
export VSCODE_REMOTE_CXX=$(which g++-4.8)
|
sudo apt update -y
|
||||||
export VSCODE_REMOTE_NODE_GYP=$(which node-gyp)
|
sudo apt install -y build-essential pkg-config libx11-dev libx11-xcb-dev libxkbfile-dev libsecret-1-dev libnotify-bin
|
||||||
fi
|
displayName: Install build tools
|
||||||
|
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
for i in {1..3}; do # try 3 times, for Terrapin
|
for i in {1..3}; do # try 3 times, for Terrapin
|
||||||
yarn --frozen-lockfile && break
|
yarn --frozen-lockfile && break
|
||||||
if [ $i -eq 3 ]; then
|
if [ $i -eq 3 ]; then
|
||||||
@@ -75,14 +77,15 @@ steps:
|
|||||||
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
|
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
|
||||||
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
|
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
|
||||||
displayName: Install dependencies
|
displayName: Install dependencies
|
||||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
|
||||||
|
|
||||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
- script: |
|
||||||
inputs:
|
set -e
|
||||||
keyfile: ".build/arch, .build/terrapin, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
|
node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt
|
||||||
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
|
mkdir -p .build/node_modules_cache
|
||||||
vstsFeed: "npm-vscode"
|
tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt
|
||||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
|
||||||
|
displayName: Create node_modules archive
|
||||||
|
|
||||||
# Mixin must run before optimize, because the CSS loader will inline small SVGs
|
# Mixin must run before optimize, because the CSS loader will inline small SVGs
|
||||||
- script: |
|
- script: |
|
||||||
@@ -92,12 +95,8 @@ steps:
|
|||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
yarn gulp compile-build
|
yarn npm-run-all -lp core-ci extensions-ci hygiene eslint valid-layers-check
|
||||||
yarn gulp compile-extensions-build
|
displayName: Compile & Hygiene
|
||||||
yarn gulp minify-vscode
|
|
||||||
yarn gulp vscode-reh-linux-x64-min
|
|
||||||
yarn gulp vscode-reh-web-linux-x64-min
|
|
||||||
displayName: Compile
|
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
@@ -106,6 +105,19 @@ steps:
|
|||||||
displayName: Upload sourcemaps
|
displayName: Upload sourcemaps
|
||||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -
|
||||||
|
./build/azure-pipelines/common/extract-telemetry.sh
|
||||||
|
displayName: Extract Telemetry
|
||||||
|
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
AZURE_WEBVIEW_STORAGE_ACCESS_KEY="$(vscode-webview-storage-key)" \
|
||||||
|
./build/azure-pipelines/common/publish-webview.sh
|
||||||
|
displayName: Publish Webview
|
||||||
|
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
VERSION=`node -p "require(\"./package.json\").version"`
|
VERSION=`node -p "require(\"./package.json\").version"`
|
||||||
@@ -125,3 +137,15 @@ steps:
|
|||||||
targetPath: $(Build.ArtifactStagingDirectory)/compilation.tar.gz
|
targetPath: $(Build.ArtifactStagingDirectory)/compilation.tar.gz
|
||||||
artifactName: Compilation
|
artifactName: Compilation
|
||||||
displayName: Publish compilation artifact
|
displayName: Publish compilation artifact
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||||
|
yarn download-builtin-extensions-cg
|
||||||
|
displayName: Built-in extensions component details
|
||||||
|
|
||||||
|
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
|
||||||
|
displayName: "Component Detection"
|
||||||
|
inputs:
|
||||||
|
sourceScanPath: $(Build.SourcesDirectory)
|
||||||
|
continueOnError: true
|
||||||
|
|||||||
@@ -9,27 +9,27 @@ pr: none
|
|||||||
steps:
|
steps:
|
||||||
- task: NodeTool@0
|
- task: NodeTool@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "12.18.3"
|
versionSpec: "14.x"
|
||||||
|
|
||||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "1.x"
|
versionSpec: "1.x"
|
||||||
|
|
||||||
- bash: |
|
# - bash: |
|
||||||
TAG_VERSION=$(git describe --tags `git rev-list --tags --max-count=1`)
|
# TAG_VERSION=$(git describe --tags `git rev-list --tags --max-count=1`)
|
||||||
CHANNEL="G1C14HJ2F"
|
# CHANNEL="G1C14HJ2F"
|
||||||
|
|
||||||
if [ "$TAG_VERSION" == "1.999.0" ]; then
|
# if [ "$TAG_VERSION" == "1.999.0" ]; then
|
||||||
MESSAGE="<!here>. Someone pushed 1.999.0 tag. Please delete it ASAP from remote and local."
|
# MESSAGE="<!here>. Someone pushed 1.999.0 tag. Please delete it ASAP from remote and local."
|
||||||
|
|
||||||
curl -X POST -H "Authorization: Bearer $(SLACK_TOKEN)" \
|
# curl -X POST -H "Authorization: Bearer $(SLACK_TOKEN)" \
|
||||||
-H 'Content-type: application/json; charset=utf-8' \
|
# -H 'Content-type: application/json; charset=utf-8' \
|
||||||
--data '{"channel":"'"$CHANNEL"'", "link_names": true, "text":"'"$MESSAGE"'"}' \
|
# --data '{"channel":"'"$CHANNEL"'", "link_names": true, "text":"'"$MESSAGE"'"}' \
|
||||||
https://slack.com/api/chat.postMessage
|
# https://slack.com/api/chat.postMessage
|
||||||
|
|
||||||
exit 1
|
# exit 1
|
||||||
fi
|
# fi
|
||||||
displayName: Check 1.999.0 tag
|
# displayName: Check 1.999.0 tag
|
||||||
|
|
||||||
- bash: |
|
- bash: |
|
||||||
# Install build dependencies
|
# Install build dependencies
|
||||||
@@ -37,47 +37,54 @@ steps:
|
|||||||
node build/azure-pipelines/publish-types/check-version.js
|
node build/azure-pipelines/publish-types/check-version.js
|
||||||
displayName: Check version
|
displayName: Check version
|
||||||
|
|
||||||
|
# {{SQL CARBON EDIT}} Modify to fit our own scenario - specifically currently we need to use a fork of the repo since we don't
|
||||||
|
# have an account with push access to DT
|
||||||
- bash: |
|
- bash: |
|
||||||
git config --global user.email "vscode@microsoft.com"
|
git config --global user.email "azuredatastudio@microsoft.com"
|
||||||
git config --global user.name "VSCode"
|
git config --global user.name "Azure Data Studio"
|
||||||
|
|
||||||
git clone https://$(GITHUB_TOKEN)@github.com/DefinitelyTyped/DefinitelyTyped.git --depth=1
|
git clone https://$(GITHUB_TOKEN)@$(REPO) --depth=1
|
||||||
node build/azure-pipelines/publish-types/update-types.js
|
node build/azure-pipelines/publish-types/update-types.js
|
||||||
|
|
||||||
TAG_VERSION=$(git describe --tags `git rev-list --tags --max-count=1`)
|
TAG_VERSION=$(git describe --tags `git rev-list --tags --max-count=1`)
|
||||||
|
|
||||||
cd DefinitelyTyped
|
cd DefinitelyTyped
|
||||||
|
|
||||||
|
# Sync up to latest from the DT repo
|
||||||
|
git remote add upstream https://github.com/DefinitelyTyped/DefinitelyTyped.git
|
||||||
|
git merge upstream/master
|
||||||
|
git push origin
|
||||||
|
|
||||||
git diff --color | cat
|
git diff --color | cat
|
||||||
git add -A
|
git add -A
|
||||||
git status
|
git status
|
||||||
git checkout -b "vscode-types-$TAG_VERSION"
|
git checkout -b "azdata-types-$TAG_VERSION"
|
||||||
git commit -m "VS Code $TAG_VERSION Extension API"
|
git commit -m "Azure Data Studio $TAG_VERSION Extension API"
|
||||||
git push origin "vscode-types-$TAG_VERSION"
|
git push origin "azdata-types-$TAG_VERSION"
|
||||||
|
|
||||||
displayName: Push update to DefinitelyTyped
|
displayName: Push update to DefinitelyTyped
|
||||||
|
|
||||||
- bash: |
|
# - bash: |
|
||||||
TAG_VERSION=$(git describe --tags `git rev-list --tags --max-count=1`)
|
# TAG_VERSION=$(git describe --tags `git rev-list --tags --max-count=1`)
|
||||||
CHANNEL="G1C14HJ2F"
|
# CHANNEL="G1C14HJ2F"
|
||||||
|
|
||||||
MESSAGE="DefinitelyTyped/DefinitelyTyped#vscode-types-$TAG_VERSION created. Endgame master, please open this link, examine changes and create a PR:"
|
# MESSAGE="DefinitelyTyped/DefinitelyTyped#vscode-types-$TAG_VERSION created. Endgame champion, please open this link, examine changes and create a PR:"
|
||||||
LINK="https://github.com/DefinitelyTyped/DefinitelyTyped/compare/vscode-types-$TAG_VERSION?quick_pull=1&body=Updating%20VS%20Code%20Extension%20API.%20See%20https%3A%2F%2Fgithub.com%2Fmicrosoft%2Fvscode%2Fissues%2F70175%20for%20details."
|
# LINK="https://github.com/DefinitelyTyped/DefinitelyTyped/compare/vscode-types-$TAG_VERSION?quick_pull=1&body=Updating%20VS%20Code%20Extension%20API.%20See%20https%3A%2F%2Fgithub.com%2Fmicrosoft%2Fvscode%2Fissues%2F70175%20for%20details."
|
||||||
MESSAGE2="[@eamodio, @jrieken, @kmaetzel, @egamma]. Please review and merge PR to publish @types/vscode."
|
# MESSAGE2="[@eamodio, @jrieken, @kmaetzel, @egamma]. Please review and merge PR to publish @types/vscode."
|
||||||
|
|
||||||
curl -X POST -H "Authorization: Bearer $(SLACK_TOKEN)" \
|
# curl -X POST -H "Authorization: Bearer $(SLACK_TOKEN)" \
|
||||||
-H 'Content-type: application/json; charset=utf-8' \
|
# -H 'Content-type: application/json; charset=utf-8' \
|
||||||
--data '{"channel":"'"$CHANNEL"'", "link_names": true, "text":"'"$MESSAGE"'"}' \
|
# --data '{"channel":"'"$CHANNEL"'", "link_names": true, "text":"'"$MESSAGE"'"}' \
|
||||||
https://slack.com/api/chat.postMessage
|
# https://slack.com/api/chat.postMessage
|
||||||
|
|
||||||
curl -X POST -H "Authorization: Bearer $(SLACK_TOKEN)" \
|
# curl -X POST -H "Authorization: Bearer $(SLACK_TOKEN)" \
|
||||||
-H 'Content-type: application/json; charset=utf-8' \
|
# -H 'Content-type: application/json; charset=utf-8' \
|
||||||
--data '{"channel":"'"$CHANNEL"'", "link_names": true, "text":"'"$LINK"'"}' \
|
# --data '{"channel":"'"$CHANNEL"'", "link_names": true, "text":"'"$LINK"'"}' \
|
||||||
https://slack.com/api/chat.postMessage
|
# https://slack.com/api/chat.postMessage
|
||||||
|
|
||||||
curl -X POST -H "Authorization: Bearer $(SLACK_TOKEN)" \
|
# curl -X POST -H "Authorization: Bearer $(SLACK_TOKEN)" \
|
||||||
-H 'Content-type: application/json; charset=utf-8' \
|
# -H 'Content-type: application/json; charset=utf-8' \
|
||||||
--data '{"channel":"'"$CHANNEL"'", "link_names": true, "text":"'"$MESSAGE2"'"}' \
|
# --data '{"channel":"'"$CHANNEL"'", "link_names": true, "text":"'"$MESSAGE2"'"}' \
|
||||||
https://slack.com/api/chat.postMessage
|
# https://slack.com/api/chat.postMessage
|
||||||
|
|
||||||
displayName: Send message on Slack
|
# displayName: Send message on Slack
|
||||||
|
|||||||
@@ -13,11 +13,11 @@ try {
|
|||||||
.execSync('git describe --tags `git rev-list --tags --max-count=1`')
|
.execSync('git describe --tags `git rev-list --tags --max-count=1`')
|
||||||
.toString()
|
.toString()
|
||||||
.trim();
|
.trim();
|
||||||
const dtsUri = `https://raw.githubusercontent.com/microsoft/vscode/${tag}/src/vs/vscode.d.ts`;
|
const dtsUri = `https://raw.githubusercontent.com/microsoft/azuredatastudio/${tag}/src/sql/azdata.d.ts`; // {{SQL CARBON EDIT}} Use our typings
|
||||||
const outPath = path.resolve(process.cwd(), 'DefinitelyTyped/types/vscode/index.d.ts');
|
const outPath = path.resolve(process.cwd(), 'DefinitelyTyped/types/azdata/index.d.ts'); // {{SQL CARBON EDIT}} Use our typings
|
||||||
cp.execSync(`curl ${dtsUri} --output ${outPath}`);
|
cp.execSync(`curl ${dtsUri} --output ${outPath}`);
|
||||||
updateDTSFile(outPath, tag);
|
updateDTSFile(outPath, tag);
|
||||||
console.log(`Done updating vscode.d.ts at ${outPath}`);
|
console.log(`Done updating azdata.d.ts at ${outPath}`); // {{SQL CARBON EDIT}} Use our typings
|
||||||
}
|
}
|
||||||
catch (err) {
|
catch (err) {
|
||||||
console.error(err);
|
console.error(err);
|
||||||
@@ -51,21 +51,25 @@ function getNewFileContent(content, tag) {
|
|||||||
function getNewFileHeader(tag) {
|
function getNewFileHeader(tag) {
|
||||||
const [major, minor] = tag.split('.');
|
const [major, minor] = tag.split('.');
|
||||||
const shorttag = `${major}.${minor}`;
|
const shorttag = `${major}.${minor}`;
|
||||||
|
// {{SQL CARBON EDIT}} Use our own header
|
||||||
const header = [
|
const header = [
|
||||||
`// Type definitions for Visual Studio Code ${shorttag}`,
|
`// Type definitions for Azure Data Studio ${shorttag}`,
|
||||||
`// Project: https://github.com/microsoft/vscode`,
|
`// Project: https://github.com/microsoft/azuredatastudio`,
|
||||||
`// Definitions by: Visual Studio Code Team, Microsoft <https://github.com/Microsoft>`,
|
`// Definitions by: Charles Gagnon <https://github.com/Charles-Gagnon>`,
|
||||||
|
`// Alan Ren: <https://github.com/alanrenmsft>`,
|
||||||
|
`// Karl Burtram: <https://github.com/kburtram>`,
|
||||||
|
`// Ken Van Hyning: <https://github.com/kenvanhyning>`,
|
||||||
`// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped`,
|
`// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped`,
|
||||||
``,
|
``,
|
||||||
`/*---------------------------------------------------------------------------------------------`,
|
`/*---------------------------------------------------------------------------------------------`,
|
||||||
` * Copyright (c) Microsoft Corporation. All rights reserved.`,
|
` * Copyright (c) Microsoft Corporation. All rights reserved.`,
|
||||||
` * Licensed under the Source EULA.`,
|
` * Licensed under the MIT License.`,
|
||||||
` * See https://github.com/Microsoft/vscode/blob/master/LICENSE.txt for license information.`,
|
` * See https://github.com/Microsoft/azuredatastudio/blob/main/LICENSE.txt for license information.`,
|
||||||
` *--------------------------------------------------------------------------------------------*/`,
|
` *--------------------------------------------------------------------------------------------*/`,
|
||||||
``,
|
``,
|
||||||
`/**`,
|
`/**`,
|
||||||
` * Type Definition for Visual Studio Code ${shorttag} Extension API`,
|
` * Type Definition for Azure Data Studio ${shorttag} Extension API`,
|
||||||
` * See https://code.visualstudio.com/api for more information`,
|
` * See https://docs.microsoft.com/sql/azure-data-studio/extensibility-apis for more information`,
|
||||||
` */`
|
` */`
|
||||||
].join('\n');
|
].join('\n');
|
||||||
return header;
|
return header;
|
||||||
|
|||||||
@@ -16,13 +16,13 @@ try {
|
|||||||
.toString()
|
.toString()
|
||||||
.trim();
|
.trim();
|
||||||
|
|
||||||
const dtsUri = `https://raw.githubusercontent.com/microsoft/vscode/${tag}/src/vs/vscode.d.ts`;
|
const dtsUri = `https://raw.githubusercontent.com/microsoft/azuredatastudio/${tag}/src/sql/azdata.d.ts`; // {{SQL CARBON EDIT}} Use our typings
|
||||||
const outPath = path.resolve(process.cwd(), 'DefinitelyTyped/types/vscode/index.d.ts');
|
const outPath = path.resolve(process.cwd(), 'DefinitelyTyped/types/azdata/index.d.ts'); // {{SQL CARBON EDIT}} Use our typings
|
||||||
cp.execSync(`curl ${dtsUri} --output ${outPath}`);
|
cp.execSync(`curl ${dtsUri} --output ${outPath}`);
|
||||||
|
|
||||||
updateDTSFile(outPath, tag);
|
updateDTSFile(outPath, tag);
|
||||||
|
|
||||||
console.log(`Done updating vscode.d.ts at ${outPath}`);
|
console.log(`Done updating azdata.d.ts at ${outPath}`); // {{SQL CARBON EDIT}} Use our typings
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error(err);
|
console.error(err);
|
||||||
console.error('Failed to update types');
|
console.error('Failed to update types');
|
||||||
@@ -63,21 +63,25 @@ function getNewFileHeader(tag: string) {
|
|||||||
const [major, minor] = tag.split('.');
|
const [major, minor] = tag.split('.');
|
||||||
const shorttag = `${major}.${minor}`;
|
const shorttag = `${major}.${minor}`;
|
||||||
|
|
||||||
|
// {{SQL CARBON EDIT}} Use our own header
|
||||||
const header = [
|
const header = [
|
||||||
`// Type definitions for Visual Studio Code ${shorttag}`,
|
`// Type definitions for Azure Data Studio ${shorttag}`,
|
||||||
`// Project: https://github.com/microsoft/vscode`,
|
`// Project: https://github.com/microsoft/azuredatastudio`,
|
||||||
`// Definitions by: Visual Studio Code Team, Microsoft <https://github.com/Microsoft>`,
|
`// Definitions by: Charles Gagnon <https://github.com/Charles-Gagnon>`,
|
||||||
|
`// Alan Ren: <https://github.com/alanrenmsft>`,
|
||||||
|
`// Karl Burtram: <https://github.com/kburtram>`,
|
||||||
|
`// Ken Van Hyning: <https://github.com/kenvanhyning>`,
|
||||||
`// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped`,
|
`// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped`,
|
||||||
``,
|
``,
|
||||||
`/*---------------------------------------------------------------------------------------------`,
|
`/*---------------------------------------------------------------------------------------------`,
|
||||||
` * Copyright (c) Microsoft Corporation. All rights reserved.`,
|
` * Copyright (c) Microsoft Corporation. All rights reserved.`,
|
||||||
` * Licensed under the Source EULA.`,
|
` * Licensed under the MIT License.`,
|
||||||
` * See https://github.com/Microsoft/vscode/blob/master/LICENSE.txt for license information.`,
|
` * See https://github.com/Microsoft/azuredatastudio/blob/main/LICENSE.txt for license information.`,
|
||||||
` *--------------------------------------------------------------------------------------------*/`,
|
` *--------------------------------------------------------------------------------------------*/`,
|
||||||
``,
|
``,
|
||||||
`/**`,
|
`/**`,
|
||||||
` * Type Definition for Visual Studio Code ${shorttag} Extension API`,
|
` * Type Definition for Azure Data Studio ${shorttag} Extension API`,
|
||||||
` * See https://code.visualstudio.com/api for more information`,
|
` * See https://docs.microsoft.com/sql/azure-data-studio/extensibility-apis for more information`,
|
||||||
` */`
|
` */`
|
||||||
].join('\n');
|
].join('\n');
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
steps:
|
steps:
|
||||||
- task: NodeTool@0
|
- task: NodeTool@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "10.x"
|
versionSpec: "14.x"
|
||||||
|
|
||||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||||
inputs:
|
inputs:
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ jobs:
|
|||||||
echo "##vso[build.addbuildtag]$(VSCODE_QUALITY)"
|
echo "##vso[build.addbuildtag]$(VSCODE_QUALITY)"
|
||||||
displayName: Add Quality Build Tag
|
displayName: Add Quality Build Tag
|
||||||
- template: sql-product-compile.yml
|
- template: sql-product-compile.yml
|
||||||
timeoutInMinutes: 90
|
timeoutInMinutes: 120
|
||||||
|
|
||||||
- job: macOS
|
- job: macOS
|
||||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_MACOS'], 'true'), ne(variables['VSCODE_QUALITY'], 'saw'))
|
condition: and(succeeded(), eq(variables['VSCODE_BUILD_MACOS'], 'true'), ne(variables['VSCODE_QUALITY'], 'saw'))
|
||||||
@@ -47,31 +47,7 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- template: linux/sql-product-build-linux.yml
|
- template: linux/sql-product-build-linux.yml
|
||||||
parameters:
|
parameters:
|
||||||
extensionsToUnitTest: ["admin-tool-ext-win", "agent", "azdata", "azurecore", "cms", "dacpac", "import", "schema-compare", "notebook", "resource-deployment", "machine-learning", "sql-database-projects", "data-workspace"]
|
extensionsToUnitTest: ["admin-tool-ext-win", "agent", "azcli", "azdata", "azurecore", "cms", "dacpac", "data-workspace", "import", "machine-learning", "notebook", "resource-deployment", "schema-compare", "sql-database-projects"]
|
||||||
timeoutInMinutes: 90
|
|
||||||
|
|
||||||
- job: LinuxWeb
|
|
||||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_WEB'], 'true'), ne(variables['VSCODE_QUALITY'], 'saw'))
|
|
||||||
pool:
|
|
||||||
vmImage: 'Ubuntu-18.04'
|
|
||||||
container: linux-x64
|
|
||||||
variables:
|
|
||||||
VSCODE_ARCH: x64
|
|
||||||
dependsOn:
|
|
||||||
- Compile
|
|
||||||
steps:
|
|
||||||
- template: web/sql-product-build-web.yml
|
|
||||||
timeoutInMinutes: 90
|
|
||||||
|
|
||||||
- job: Docker
|
|
||||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_DOCKER'], 'true'))
|
|
||||||
pool:
|
|
||||||
vmImage: 'Ubuntu-18.04'
|
|
||||||
container: linux-x64
|
|
||||||
dependsOn:
|
|
||||||
- Linux
|
|
||||||
steps:
|
|
||||||
- template: docker/sql-product-build-docker.yml
|
|
||||||
timeoutInMinutes: 90
|
timeoutInMinutes: 90
|
||||||
|
|
||||||
- job: Windows
|
- job: Windows
|
||||||
@@ -102,10 +78,8 @@ jobs:
|
|||||||
dependsOn:
|
dependsOn:
|
||||||
- macOS
|
- macOS
|
||||||
- Linux
|
- Linux
|
||||||
# - Docker
|
|
||||||
- Windows
|
- Windows
|
||||||
- Windows_Test
|
- Windows_Test
|
||||||
- LinuxWeb
|
|
||||||
- macOS_Signing
|
- macOS_Signing
|
||||||
steps:
|
steps:
|
||||||
- template: sql-release.yml
|
- template: sql-release.yml
|
||||||
|
|||||||
@@ -79,21 +79,8 @@ steps:
|
|||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
yarn sqllint
|
yarn npm-run-all -lp core-ci extensions-ci hygiene eslint valid-layers-check sqllint strict-vscode
|
||||||
yarn gulp hygiene
|
displayName: Compile & Hygiene
|
||||||
yarn strict-vscode
|
|
||||||
yarn valid-layers-check
|
|
||||||
displayName: Run hygiene, eslint
|
|
||||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
|
||||||
|
|
||||||
- script: |
|
|
||||||
set -e
|
|
||||||
yarn gulp compile-build
|
|
||||||
yarn gulp compile-extensions-build
|
|
||||||
yarn gulp minify-vscode
|
|
||||||
yarn gulp vscode-reh-linux-x64-min
|
|
||||||
yarn gulp vscode-reh-web-linux-x64-min
|
|
||||||
displayName: Compile
|
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
|
|||||||
29
build/azure-pipelines/sql-web-build.yml
Normal file
29
build/azure-pipelines/sql-web-build.yml
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
resources:
|
||||||
|
containers:
|
||||||
|
- container: linux-x64
|
||||||
|
image: sqltoolscontainers.azurecr.io/web-build-image:1
|
||||||
|
endpoint: ContainerRegistry
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
- job: LinuxWeb
|
||||||
|
pool:
|
||||||
|
vmImage: 'Ubuntu-18.04'
|
||||||
|
container: linux-x64
|
||||||
|
variables:
|
||||||
|
VSCODE_ARCH: x64
|
||||||
|
steps:
|
||||||
|
- template: web/sql-product-build-web.yml
|
||||||
|
timeoutInMinutes: 90
|
||||||
|
|
||||||
|
- job: Docker
|
||||||
|
pool:
|
||||||
|
vmImage: 'Ubuntu-18.04'
|
||||||
|
container: linux-x64
|
||||||
|
dependsOn:
|
||||||
|
- LinuxWeb
|
||||||
|
steps:
|
||||||
|
- template: docker/sql-product-build-docker.yml
|
||||||
|
timeoutInMinutes: 90
|
||||||
|
|
||||||
|
trigger: none
|
||||||
|
pr: none
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
steps:
|
steps:
|
||||||
- task: NodeTool@0
|
- task: NodeTool@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "12.18.3"
|
versionSpec: "14.x"
|
||||||
|
|
||||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||||
inputs:
|
inputs:
|
||||||
|
|||||||
@@ -2,56 +2,54 @@
|
|||||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||||
*--------------------------------------------------------------------------------------------*/
|
*--------------------------------------------------------------------------------------------*/
|
||||||
|
|
||||||
'use strict';
|
'use strict';
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
const path = require('path');
|
const path = require("path");
|
||||||
const es = require('event-stream');
|
const es = require("event-stream");
|
||||||
|
const vfs = require("vinyl-fs");
|
||||||
|
const util = require("../lib/util");
|
||||||
|
// @ts-ignore
|
||||||
|
const deps = require("../lib/dependencies");
|
||||||
const azure = require('gulp-azure-storage');
|
const azure = require('gulp-azure-storage');
|
||||||
const vfs = require('vinyl-fs');
|
|
||||||
const util = require('../lib/util');
|
|
||||||
const root = path.dirname(path.dirname(__dirname));
|
const root = path.dirname(path.dirname(__dirname));
|
||||||
const commit = util.getVersion(root);
|
const commit = util.getVersion(root);
|
||||||
|
|
||||||
// optionally allow to pass in explicit base/maps to upload
|
// optionally allow to pass in explicit base/maps to upload
|
||||||
const [, , base, maps] = process.argv;
|
const [, , base, maps] = process.argv;
|
||||||
|
function src(base, maps = `${base}/**/*.map`) {
|
||||||
const fetch = function (base, maps = `${base}/**/*.map`) {
|
return vfs.src(maps, { base })
|
||||||
return vfs.src(maps, { base })
|
.pipe(es.mapSync((f) => {
|
||||||
.pipe(es.mapSync(f => {
|
f.path = `${f.base}/core/${f.relative}`;
|
||||||
f.path = `${f.base}/core/${f.relative}`;
|
return f;
|
||||||
return f;
|
}));
|
||||||
}));
|
}
|
||||||
};
|
function main() {
|
||||||
|
const sources = [];
|
||||||
function main() {
|
// vscode client maps (default)
|
||||||
const sources = [];
|
if (!base) {
|
||||||
|
const vs = src('out-vscode-min'); // client source-maps only
|
||||||
// vscode client maps (default)
|
sources.push(vs);
|
||||||
if (!base) {
|
const productionDependencies = deps.getProductionDependencies(root);
|
||||||
const vs = fetch('out-vscode-min'); // client source-maps only
|
const productionDependenciesSrc = productionDependencies.map(d => path.relative(root, d.path)).map(d => `./${d}/**/*.map`);
|
||||||
sources.push(vs);
|
const nodeModules = vfs.src(productionDependenciesSrc, { base: '.' })
|
||||||
|
.pipe(util.cleanNodeModules(path.join(root, 'build', '.moduleignore')));
|
||||||
const extensionsOut = vfs.src(['.build/extensions/**/*.js.map', '!**/node_modules/**'], { base: '.build' });
|
sources.push(nodeModules);
|
||||||
sources.push(extensionsOut);
|
const extensionsOut = vfs.src(['.build/extensions/**/*.js.map', '!**/node_modules/**'], { base: '.build' });
|
||||||
}
|
sources.push(extensionsOut);
|
||||||
|
}
|
||||||
// specific client base/maps
|
// specific client base/maps
|
||||||
else {
|
else {
|
||||||
sources.push(fetch(base, maps));
|
sources.push(src(base, maps));
|
||||||
}
|
}
|
||||||
|
return es.merge(...sources)
|
||||||
return es.merge(...sources)
|
.pipe(es.through(function (data) {
|
||||||
.pipe(es.through(function (data) {
|
console.log('Uploading Sourcemap', data.relative); // debug
|
||||||
console.log('Uploading Sourcemap', data.relative); // debug
|
this.emit('data', data);
|
||||||
this.emit('data', data);
|
}))
|
||||||
}))
|
.pipe(azure.upload({
|
||||||
.pipe(azure.upload({
|
account: process.env.AZURE_STORAGE_ACCOUNT,
|
||||||
account: process.env.AZURE_STORAGE_ACCOUNT,
|
key: process.env.AZURE_STORAGE_ACCESS_KEY,
|
||||||
key: process.env.AZURE_STORAGE_ACCESS_KEY,
|
container: 'sourcemaps',
|
||||||
container: 'sourcemaps',
|
prefix: commit + '/'
|
||||||
prefix: commit + '/'
|
}));
|
||||||
}));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
main();
|
main();
|
||||||
|
|||||||
67
build/azure-pipelines/upload-sourcemaps.ts
Normal file
67
build/azure-pipelines/upload-sourcemaps.ts
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
/*---------------------------------------------------------------------------------------------
|
||||||
|
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||||
|
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||||
|
*--------------------------------------------------------------------------------------------*/
|
||||||
|
|
||||||
|
'use strict';
|
||||||
|
|
||||||
|
import * as path from 'path';
|
||||||
|
import * as es from 'event-stream';
|
||||||
|
import * as Vinyl from 'vinyl';
|
||||||
|
import * as vfs from 'vinyl-fs';
|
||||||
|
import * as util from '../lib/util';
|
||||||
|
// @ts-ignore
|
||||||
|
import * as deps from '../lib/dependencies';
|
||||||
|
const azure = require('gulp-azure-storage');
|
||||||
|
|
||||||
|
const root = path.dirname(path.dirname(__dirname));
|
||||||
|
const commit = util.getVersion(root);
|
||||||
|
|
||||||
|
// optionally allow to pass in explicit base/maps to upload
|
||||||
|
const [, , base, maps] = process.argv;
|
||||||
|
|
||||||
|
function src(base: string, maps = `${base}/**/*.map`) {
|
||||||
|
return vfs.src(maps, { base })
|
||||||
|
.pipe(es.mapSync((f: Vinyl) => {
|
||||||
|
f.path = `${f.base}/core/${f.relative}`;
|
||||||
|
return f;
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
function main() {
|
||||||
|
const sources = [];
|
||||||
|
|
||||||
|
// vscode client maps (default)
|
||||||
|
if (!base) {
|
||||||
|
const vs = src('out-vscode-min'); // client source-maps only
|
||||||
|
sources.push(vs);
|
||||||
|
|
||||||
|
const productionDependencies: { name: string, path: string, version: string }[] = deps.getProductionDependencies(root);
|
||||||
|
const productionDependenciesSrc = productionDependencies.map(d => path.relative(root, d.path)).map(d => `./${d}/**/*.map`);
|
||||||
|
const nodeModules = vfs.src(productionDependenciesSrc, { base: '.' })
|
||||||
|
.pipe(util.cleanNodeModules(path.join(root, 'build', '.moduleignore')));
|
||||||
|
sources.push(nodeModules);
|
||||||
|
|
||||||
|
const extensionsOut = vfs.src(['.build/extensions/**/*.js.map', '!**/node_modules/**'], { base: '.build' });
|
||||||
|
sources.push(extensionsOut);
|
||||||
|
}
|
||||||
|
|
||||||
|
// specific client base/maps
|
||||||
|
else {
|
||||||
|
sources.push(src(base, maps));
|
||||||
|
}
|
||||||
|
|
||||||
|
return es.merge(...sources)
|
||||||
|
.pipe(es.through(function (data: Vinyl) {
|
||||||
|
console.log('Uploading Sourcemap', data.relative); // debug
|
||||||
|
this.emit('data', data);
|
||||||
|
}))
|
||||||
|
.pipe(azure.upload({
|
||||||
|
account: process.env.AZURE_STORAGE_ACCOUNT,
|
||||||
|
key: process.env.AZURE_STORAGE_ACCESS_KEY,
|
||||||
|
container: 'sourcemaps',
|
||||||
|
prefix: commit + '/'
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
main();
|
||||||
24
build/azure-pipelines/web/build/Dockerfile
Normal file
24
build/azure-pipelines/web/build/Dockerfile
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
#Download base image ubuntu 21.04
|
||||||
|
FROM ubuntu:21.04
|
||||||
|
ENV TZ=America/Los_Angeles
|
||||||
|
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
|
||||||
|
|
||||||
|
# Update Software repository
|
||||||
|
RUN apt-get update && apt-get upgrade -y
|
||||||
|
|
||||||
|
RUN apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus xvfb libgtk-3-0 \
|
||||||
|
libkrb5-dev git apt-transport-https ca-certificates curl gnupg-agent software-properties-common \
|
||||||
|
libnss3 libasound2 make gcc libx11-dev fakeroot rpm libgconf-2-4 libunwind8 g++ python
|
||||||
|
|
||||||
|
#docker
|
||||||
|
RUN curl -fsSL https://download.docker.com/linux/ubuntu/gpg | apt-key add -
|
||||||
|
RUN apt-key fingerprint 0EBFCD88
|
||||||
|
RUN add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable"
|
||||||
|
RUN apt-get update
|
||||||
|
RUN apt-get -y install docker-ce docker-ce-cli containerd.io
|
||||||
|
|
||||||
|
# This image needs to be built on a linux host; some weird stuff happens and the xvfb service won't start
|
||||||
|
# if built on a windows host.
|
||||||
|
ADD ./xvfb.init /etc/init.d/xvfb
|
||||||
|
RUN chmod +x /etc/init.d/xvfb
|
||||||
|
RUN update-rc.d xvfb defaults
|
||||||
53
build/azure-pipelines/web/build/xvfb.init
Normal file
53
build/azure-pipelines/web/build/xvfb.init
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
#
|
||||||
|
# /etc/rc.d/init.d/xvfbd
|
||||||
|
#
|
||||||
|
# chkconfig: 345 95 28
|
||||||
|
# description: Starts/Stops X Virtual Framebuffer server
|
||||||
|
# processname: Xvfb
|
||||||
|
#
|
||||||
|
### BEGIN INIT INFO
|
||||||
|
# Provides: xvfb
|
||||||
|
# Required-Start: $remote_fs $syslog
|
||||||
|
# Required-Stop: $remote_fs $syslog
|
||||||
|
# Default-Start: 2 3 4 5
|
||||||
|
# Default-Stop: 0 1 6
|
||||||
|
# Short-Description: Start xvfb at boot time
|
||||||
|
# Description: Enable xvfb provided by daemon.
|
||||||
|
### END INIT INFO
|
||||||
|
|
||||||
|
[ "${NETWORKING}" = "no" ] && exit 0
|
||||||
|
|
||||||
|
PROG="/usr/bin/Xvfb"
|
||||||
|
PROG_OPTIONS=":10 -ac -screen 0 1024x768x24"
|
||||||
|
PROG_OUTPUT="/tmp/Xvfb.out"
|
||||||
|
|
||||||
|
case "$1" in
|
||||||
|
start)
|
||||||
|
echo "Starting : X Virtual Frame Buffer "
|
||||||
|
$PROG $PROG_OPTIONS>>$PROG_OUTPUT 2>&1 &
|
||||||
|
disown -ar
|
||||||
|
;;
|
||||||
|
stop)
|
||||||
|
echo "Shutting down : X Virtual Frame Buffer"
|
||||||
|
killproc $PROG
|
||||||
|
RETVAL=$?
|
||||||
|
[ $RETVAL -eq 0 ] && /bin/rm -f /var/lock/subsys/Xvfb
|
||||||
|
/var/run/Xvfb.pid
|
||||||
|
echo
|
||||||
|
;;
|
||||||
|
restart|reload)
|
||||||
|
$0 stop
|
||||||
|
$0 start
|
||||||
|
RETVAL=$?
|
||||||
|
;;
|
||||||
|
status)
|
||||||
|
status Xvfb
|
||||||
|
RETVAL=$?
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo $"Usage: $0 (start|stop|restart|reload|status)"
|
||||||
|
exit 1
|
||||||
|
esac
|
||||||
|
|
||||||
|
exit $RETVAL
|
||||||
35
build/azure-pipelines/web/createDrop.sh
Executable file
35
build/azure-pipelines/web/createDrop.sh
Executable file
@@ -0,0 +1,35 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
set -e
|
||||||
|
REPO="$(pwd)"
|
||||||
|
ROOT="$REPO/.."
|
||||||
|
|
||||||
|
# Publish tarball
|
||||||
|
mkdir -p $REPO/.build/linux/{archive,server}
|
||||||
|
PLATFORM_LINUX="linux-x64"
|
||||||
|
BUILDNAME="azuredatastudio-$PLATFORM_LINUX"
|
||||||
|
BUILD="$ROOT/$BUILDNAME"
|
||||||
|
TARBALL_FILENAME="azuredatastudio-$PLATFORM_LINUX.tar.gz"
|
||||||
|
TARBALL_PATH="$REPO/.build/linux/archive/$TARBALL_FILENAME"
|
||||||
|
|
||||||
|
rm -rf $ROOT/code-*.tar.*
|
||||||
|
(cd $ROOT && tar -czf $TARBALL_PATH $BUILDNAME)
|
||||||
|
|
||||||
|
# # Publish Remote Extension Host
|
||||||
|
# LEGACY_SERVER_BUILD_NAME="azuredatastudio-reh-$PLATFORM_LINUX"
|
||||||
|
# SERVER_BUILD_NAME="azuredatastudio-server-$PLATFORM_LINUX"
|
||||||
|
# SERVER_TARBALL_FILENAME="azuredatastudio-server-$PLATFORM_LINUX.tar.gz"
|
||||||
|
# SERVER_TARBALL_PATH="$REPO/.build/linux/server/$SERVER_TARBALL_FILENAME"
|
||||||
|
|
||||||
|
# rm -rf $ROOT/azuredatastudio-server-*.tar.*
|
||||||
|
# (cd $ROOT && mv $LEGACY_SERVER_BUILD_NAME $SERVER_BUILD_NAME && tar --owner=0 --group=0 -czf $SERVER_TARBALL_PATH $SERVER_BUILD_NAME)
|
||||||
|
|
||||||
|
# Publish Remote Extension Host (Web)
|
||||||
|
LEGACY_SERVER_BUILD_NAME_WEB="azuredatastudio-reh-web-$PLATFORM_LINUX"
|
||||||
|
SERVER_BUILD_NAME_WEB="azuredatastudio-server-$PLATFORM_LINUX-web"
|
||||||
|
SERVER_TARBALL_FILENAME_WEB="azuredatastudio-server-$PLATFORM_LINUX-web.tar.gz"
|
||||||
|
SERVER_TARBALL_PATH_WEB="$REPO/.build/linux/server/$SERVER_TARBALL_FILENAME_WEB"
|
||||||
|
|
||||||
|
rm -rf $ROOT/azuredatastudio-server-*.tar.*
|
||||||
|
(cd $ROOT && mv vscode-reh-web-linux-x64 $SERVER_BUILD_NAME_WEB && tar --owner=0 --group=0 -czf $SERVER_TARBALL_PATH_WEB $SERVER_BUILD_NAME_WEB)
|
||||||
|
|
||||||
|
node build/azure-pipelines/common/copyArtifacts.js
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
steps:
|
steps:
|
||||||
- task: NodeTool@0
|
- task: NodeTool@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "12.18.3"
|
versionSpec: "14.x"
|
||||||
|
|
||||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||||
inputs:
|
inputs:
|
||||||
@@ -60,6 +60,7 @@ steps:
|
|||||||
displayName: Extract node_modules cache
|
displayName: Extract node_modules cache
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
|
set -e
|
||||||
npx https://aka.ms/enablesecurefeed standAlone
|
npx https://aka.ms/enablesecurefeed standAlone
|
||||||
timeoutInMinutes: 5
|
timeoutInMinutes: 5
|
||||||
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), eq(variables['ENABLE_TERRAPIN'], 'true'))
|
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), eq(variables['ENABLE_TERRAPIN'], 'true'))
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- task: NodeTool@0
|
- task: NodeTool@0
|
||||||
inputs:
|
inputs:
|
||||||
@@ -13,17 +12,6 @@ steps:
|
|||||||
inputs:
|
inputs:
|
||||||
azureSubscription: 'ClientToolsInfra_670062 (88d5392f-a34f-4769-b405-f597fc533613)'
|
azureSubscription: 'ClientToolsInfra_670062 (88d5392f-a34f-4769-b405-f597fc533613)'
|
||||||
KeyVaultName: ado-secrets
|
KeyVaultName: ado-secrets
|
||||||
SecretsFilter: 'github-distro-mixin-password'
|
|
||||||
|
|
||||||
- task: DownloadPipelineArtifact@2
|
|
||||||
inputs:
|
|
||||||
artifact: Compilation
|
|
||||||
displayName: Download compilation output
|
|
||||||
|
|
||||||
- script: |
|
|
||||||
set -e
|
|
||||||
tar -xzf $(Pipeline.Workspace)/compilation.tar.gz
|
|
||||||
displayName: Extract compilation output
|
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
@@ -44,33 +32,46 @@ steps:
|
|||||||
git merge $(node -p "require('./package.json').distro")
|
git merge $(node -p "require('./package.json').distro")
|
||||||
displayName: Merge distro
|
displayName: Merge distro
|
||||||
|
|
||||||
# - task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
- script: |
|
||||||
# displayName: Restore Cache - Node Modules
|
mkdir -p .build
|
||||||
# inputs:
|
node build/azure-pipelines/common/sql-computeNodeModulesCacheKey.js > .build/yarnlockhash
|
||||||
# keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
displayName: Prepare yarn cache key
|
||||||
# targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
|
||||||
# vstsFeed: 'npm-vscode'
|
- task: Cache@2
|
||||||
|
inputs:
|
||||||
|
key: 'nodeModules | $(Agent.OS) | .build/yarnlockhash'
|
||||||
|
path: .build/node_modules_cache
|
||||||
|
cacheHitVar: NODE_MODULES_RESTORED
|
||||||
|
displayName: Restore Cache - Node Modules
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
tar -xzf .build/node_modules_cache/cache.tgz
|
||||||
|
condition: and(succeeded(), eq(variables.NODE_MODULES_RESTORED, 'true'))
|
||||||
|
displayName: Extract node_modules archive
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
|
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
|
||||||
displayName: Install dependencies
|
displayName: Install dependencies
|
||||||
# condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
condition: and(succeeded(), ne(variables['NODE_MODULES_RESTORED'], 'true'))
|
||||||
|
|
||||||
# - task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
- script: |
|
||||||
# displayName: Save Cache - Node Modules
|
set -e
|
||||||
# inputs:
|
node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt
|
||||||
# keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
mkdir -p .build/node_modules_cache
|
||||||
# targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt
|
||||||
# vstsFeed: 'npm-vscode'
|
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
|
||||||
# condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
displayName: Create node_modules archive
|
||||||
|
|
||||||
# - script: |
|
- script: |
|
||||||
# set -e
|
set -e
|
||||||
# yarn postinstall
|
yarn postinstall
|
||||||
# displayName: Run postinstall scripts
|
displayName: Run postinstall scripts
|
||||||
# condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
|
condition: and(succeeded(), eq(variables['NODE_MODULES_RESTORED'], 'true'))
|
||||||
|
|
||||||
|
# Mixin must run before optimize, because the CSS loader will
|
||||||
|
# inline small SVGs
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
node build/azure-pipelines/mixin
|
node build/azure-pipelines/mixin
|
||||||
@@ -78,25 +79,128 @@ steps:
|
|||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
yarn sqllint
|
||||||
yarn gulp vscode-web-min-ci
|
yarn gulp hygiene
|
||||||
displayName: Build
|
yarn strict-vscode
|
||||||
|
yarn valid-layers-check
|
||||||
|
displayName: Run hygiene, eslint
|
||||||
|
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||||
|
|
||||||
# upload only the workbench.web.api.js source maps because
|
|
||||||
# we just compiled these bits in the previous step and the
|
|
||||||
# general task to upload source maps has already been run
|
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
AZURE_STORAGE_ACCOUNT="$(sourcemap-storage-account)" \
|
yarn gulp compile-build
|
||||||
AZURE_STORAGE_ACCESS_KEY="$(sourcemap-storage-key)" \
|
yarn gulp compile-extensions-build
|
||||||
node build/azure-pipelines/upload-sourcemaps out-vscode-web-min out-vscode-web-min/vs/workbench/workbench.web.api.js.map
|
yarn gulp minify-vscode
|
||||||
displayName: Upload sourcemaps (Web)
|
yarn gulp vscode-linux-x64-min-ci
|
||||||
|
yarn gulp vscode-web-min-ci
|
||||||
|
yarn gulp vscode-reh-linux-x64-min
|
||||||
|
yarn gulp vscode-reh-web-linux-x64-yarnrc-extensions
|
||||||
|
yarn gulp vscode-reh-web-linux-x64-min
|
||||||
|
displayName: Compile
|
||||||
|
|
||||||
# - script: |
|
# - script: |
|
||||||
# set -e
|
# set -e
|
||||||
# AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
|
# AZURE_STORAGE_ACCOUNT="$(sourcemap-storage-account)" \
|
||||||
# AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
|
# AZURE_STORAGE_ACCESS_KEY="$(sourcemap-storage-key)" \
|
||||||
# VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
# node build/azure-pipelines/upload-sourcemaps
|
||||||
# ./build/azure-pipelines/web/publish.sh
|
# displayName: Upload sourcemaps
|
||||||
# displayName: Publish
|
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
|
||||||
|
VERSION=$(node -p "require(\"./package.json\").version")
|
||||||
|
|
||||||
|
echo -e "{ \"version\": \"$VERSION\", \"quality\": \"$VSCODE_QUALITY\", \"commit\": \"$BUILD_SOURCEVERSION\" }" > ".build/version.json"
|
||||||
|
|
||||||
|
node build/azure-pipelines/common/copyArtifacts.js
|
||||||
|
displayName: Write Version Information
|
||||||
|
|
||||||
|
- task: PublishBuildArtifacts@1
|
||||||
|
displayName: 'Publish Artifact: drop'
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
tar -czf $(Build.ArtifactStagingDirectory)/compilation.tar.gz .build out-*
|
||||||
|
displayName: Compress compilation artifact
|
||||||
|
|
||||||
|
- task: PublishPipelineArtifact@1
|
||||||
|
inputs:
|
||||||
|
targetPath: $(Build.ArtifactStagingDirectory)/compilation.tar.gz
|
||||||
|
artifactName: Compilation
|
||||||
|
displayName: Publish compilation artifact
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
yarn gulp package-rebuild-extensions
|
||||||
|
yarn gulp compile-extensions
|
||||||
|
yarn gulp package-external-extensions
|
||||||
|
displayName: Package External extensions
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
yarn gulp package-langpacks
|
||||||
|
displayName: Package Langpacks
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
yarn gulp vscode-linux-x64-build-deb
|
||||||
|
displayName: Build Deb
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
yarn gulp vscode-linux-x64-build-rpm
|
||||||
|
displayName: Build Rpm
|
||||||
|
|
||||||
|
- task: UseDotNet@2
|
||||||
|
displayName: 'Install .NET Core sdk for signing'
|
||||||
|
inputs:
|
||||||
|
packageType: sdk
|
||||||
|
version: 2.1.x
|
||||||
|
installationPath: $(Agent.ToolsDirectory)/dotnet
|
||||||
|
|
||||||
|
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
|
||||||
|
inputs:
|
||||||
|
ConnectedServiceName: 'Code Signing'
|
||||||
|
FolderPath: '$(Build.SourcesDirectory)/.build'
|
||||||
|
Pattern: 'extensions/*.vsix,langpacks/*.vsix'
|
||||||
|
signConfigType: inlineSignParams
|
||||||
|
inlineOperation: |
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"keyCode": "CP-233016",
|
||||||
|
"operationSetCode": "OpcSign",
|
||||||
|
"parameters": [
|
||||||
|
{
|
||||||
|
"parameterName": "FileDigest",
|
||||||
|
"parameterValue": "/fd \"SHA256\""
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"toolName": "sign",
|
||||||
|
"toolVersion": "1.0"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"keyCode": "CP-233016",
|
||||||
|
"operationSetCode": "OpcVerify",
|
||||||
|
"parameters": [],
|
||||||
|
"toolName": "sign",
|
||||||
|
"toolVersion": "1.0"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
SessionTimeout: 120
|
||||||
|
displayName: 'Signing Extensions and Langpacks'
|
||||||
|
condition: and(succeeded(), eq(variables['signed'], true))
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
./build/azure-pipelines/web/createDrop.sh
|
||||||
|
displayName: Create Drop
|
||||||
|
|
||||||
|
- task: PublishBuildArtifacts@1
|
||||||
|
displayName: 'Publish Artifact: drop'
|
||||||
|
continueOnError: true
|
||||||
|
condition: succeededOrFailed()
|
||||||
|
|
||||||
|
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
|
||||||
|
displayName: 'Component Detection'
|
||||||
|
inputs:
|
||||||
|
failOnAlert: true
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
steps:
|
steps:
|
||||||
- task: NodeTool@0
|
- task: NodeTool@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "12.18.3"
|
versionSpec: "14.x"
|
||||||
|
|
||||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||||
inputs:
|
inputs:
|
||||||
@@ -65,8 +65,10 @@ steps:
|
|||||||
condition: and(succeeded(), eq(variables.NODE_MODULES_RESTORED, 'true'))
|
condition: and(succeeded(), eq(variables.NODE_MODULES_RESTORED, 'true'))
|
||||||
displayName: Extract node_modules cache
|
displayName: Extract node_modules cache
|
||||||
|
|
||||||
- script: |
|
- powershell: |
|
||||||
npx https://aka.ms/enablesecurefeed standAlone
|
. build/azure-pipelines/win32/exec.ps1
|
||||||
|
$ErrorActionPreference = "Stop"
|
||||||
|
exec { npx https://aka.ms/enablesecurefeed standAlone }
|
||||||
timeoutInMinutes: 5
|
timeoutInMinutes: 5
|
||||||
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), eq(variables['ENABLE_TERRAPIN'], 'true'))
|
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), eq(variables['ENABLE_TERRAPIN'], 'true'))
|
||||||
displayName: Switch to Terrapin packages
|
displayName: Switch to Terrapin packages
|
||||||
@@ -76,6 +78,7 @@ steps:
|
|||||||
. build/azure-pipelines/win32/retry.ps1
|
. build/azure-pipelines/win32/retry.ps1
|
||||||
$ErrorActionPreference = "Stop"
|
$ErrorActionPreference = "Stop"
|
||||||
$env:npm_config_arch="$(VSCODE_ARCH)"
|
$env:npm_config_arch="$(VSCODE_ARCH)"
|
||||||
|
$env:npm_config_build_from_source="true"
|
||||||
$env:CHILD_CONCURRENCY="1"
|
$env:CHILD_CONCURRENCY="1"
|
||||||
retry { exec { yarn --frozen-lockfile } }
|
retry { exec { yarn --frozen-lockfile } }
|
||||||
env:
|
env:
|
||||||
@@ -177,7 +180,7 @@ steps:
|
|||||||
$ErrorActionPreference = "Stop"
|
$ErrorActionPreference = "Stop"
|
||||||
exec { $env:VSCODE_REMOTE_SERVER_PATH = "$(agent.builddirectory)\vscode-reh-web-win32-$(VSCODE_ARCH)"; .\resources\server\test\test-web-integration.bat --browser firefox }
|
exec { $env:VSCODE_REMOTE_SERVER_PATH = "$(agent.builddirectory)\vscode-reh-web-win32-$(VSCODE_ARCH)"; .\resources\server\test\test-web-integration.bat --browser firefox }
|
||||||
displayName: Run integration tests (Browser)
|
displayName: Run integration tests (Browser)
|
||||||
timeoutInMinutes: 7
|
timeoutInMinutes: 10
|
||||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
|
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
|
||||||
|
|
||||||
- powershell: |
|
- powershell: |
|
||||||
@@ -320,7 +323,3 @@ steps:
|
|||||||
artifact: vscode-server-win32-$(VSCODE_ARCH)-web
|
artifact: vscode-server-win32-$(VSCODE_ARCH)-web
|
||||||
displayName: Publish web server archive
|
displayName: Publish web server archive
|
||||||
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
|
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
|
||||||
|
|
||||||
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
|
|
||||||
displayName: "Component Detection"
|
|
||||||
continueOnError: true
|
|
||||||
|
|||||||
@@ -23,7 +23,17 @@ ipcMain.handle('pickdir', async () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
app.once('ready', () => {
|
app.once('ready', () => {
|
||||||
window = new BrowserWindow({ width: 800, height: 600, webPreferences: { nodeIntegration: true, webviewTag: true, enableWebSQL: false, nativeWindowOpen: true } });
|
window = new BrowserWindow({
|
||||||
|
width: 800,
|
||||||
|
height: 600,
|
||||||
|
webPreferences: {
|
||||||
|
nodeIntegration: true,
|
||||||
|
contextIsolation: false,
|
||||||
|
webviewTag: true,
|
||||||
|
enableWebSQL: false,
|
||||||
|
nativeWindowOpen: true
|
||||||
|
}
|
||||||
|
});
|
||||||
window.setMenuBarVisibility(false);
|
window.setMenuBarVisibility(false);
|
||||||
window.loadURL(url.format({ pathname: path.join(__dirname, 'index.html'), protocol: 'file:', slashes: true }));
|
window.loadURL(url.format({ pathname: path.join(__dirname, 'index.html'), protocol: 'file:', slashes: true }));
|
||||||
// window.webContents.openDevTools();
|
// window.webContents.openDevTools();
|
||||||
|
|||||||
@@ -16,14 +16,14 @@ async function main() {
|
|||||||
throw new Error('$AGENT_BUILDDIRECTORY not set');
|
throw new Error('$AGENT_BUILDDIRECTORY not set');
|
||||||
}
|
}
|
||||||
const appName = product.nameLong + '.app';
|
const appName = product.nameLong + '.app';
|
||||||
const x64AppPath = path.join(buildDir, 'vscode-x64', appName);
|
const x64AppPath = path.join(buildDir, 'VSCode-darwin-x64', appName);
|
||||||
const arm64AppPath = path.join(buildDir, 'vscode-arm64', appName);
|
const arm64AppPath = path.join(buildDir, 'VSCode-darwin-arm64', appName);
|
||||||
const x64AsarPath = path.join(x64AppPath, 'Contents', 'Resources', 'app', 'node_modules.asar');
|
const x64AsarPath = path.join(x64AppPath, 'Contents', 'Resources', 'app', 'node_modules.asar');
|
||||||
const arm64AsarPath = path.join(arm64AppPath, 'Contents', 'Resources', 'app', 'node_modules.asar');
|
const arm64AsarPath = path.join(arm64AppPath, 'Contents', 'Resources', 'app', 'node_modules.asar');
|
||||||
const outAppPath = path.join(buildDir, `VSCode-darwin-${arch}`, appName);
|
const outAppPath = path.join(buildDir, `VSCode-darwin-${arch}`, appName);
|
||||||
const productJsonPath = path.resolve(outAppPath, 'Contents', 'Resources', 'app', 'product.json');
|
const productJsonPath = path.resolve(outAppPath, 'Contents', 'Resources', 'app', 'product.json');
|
||||||
const infoPlistPath = path.resolve(outAppPath, 'Contents', 'Info.plist');
|
const infoPlistPath = path.resolve(outAppPath, 'Contents', 'Info.plist');
|
||||||
await vscode_universal_1.makeUniversalApp({
|
await (0, vscode_universal_1.makeUniversalApp)({
|
||||||
x64AppPath,
|
x64AppPath,
|
||||||
arm64AppPath,
|
arm64AppPath,
|
||||||
x64AsarPath,
|
x64AsarPath,
|
||||||
@@ -33,6 +33,7 @@ async function main() {
|
|||||||
'Credits.rtf',
|
'Credits.rtf',
|
||||||
'CodeResources',
|
'CodeResources',
|
||||||
'fsevents.node',
|
'fsevents.node',
|
||||||
|
'Info.plist',
|
||||||
'.npmrc'
|
'.npmrc'
|
||||||
],
|
],
|
||||||
outAppPath,
|
outAppPath,
|
||||||
|
|||||||
@@ -20,8 +20,8 @@ async function main() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const appName = product.nameLong + '.app';
|
const appName = product.nameLong + '.app';
|
||||||
const x64AppPath = path.join(buildDir, 'vscode-x64', appName);
|
const x64AppPath = path.join(buildDir, 'VSCode-darwin-x64', appName);
|
||||||
const arm64AppPath = path.join(buildDir, 'vscode-arm64', appName);
|
const arm64AppPath = path.join(buildDir, 'VSCode-darwin-arm64', appName);
|
||||||
const x64AsarPath = path.join(x64AppPath, 'Contents', 'Resources', 'app', 'node_modules.asar');
|
const x64AsarPath = path.join(x64AppPath, 'Contents', 'Resources', 'app', 'node_modules.asar');
|
||||||
const arm64AsarPath = path.join(arm64AppPath, 'Contents', 'Resources', 'app', 'node_modules.asar');
|
const arm64AsarPath = path.join(arm64AppPath, 'Contents', 'Resources', 'app', 'node_modules.asar');
|
||||||
const outAppPath = path.join(buildDir, `VSCode-darwin-${arch}`, appName);
|
const outAppPath = path.join(buildDir, `VSCode-darwin-${arch}`, appName);
|
||||||
@@ -38,6 +38,7 @@ async function main() {
|
|||||||
'Credits.rtf',
|
'Credits.rtf',
|
||||||
'CodeResources',
|
'CodeResources',
|
||||||
'fsevents.node',
|
'fsevents.node',
|
||||||
|
'Info.plist', // TODO@deepak1556: regressed with 11.4.2 internal builds
|
||||||
'.npmrc'
|
'.npmrc'
|
||||||
],
|
],
|
||||||
outAppPath,
|
outAppPath,
|
||||||
|
|||||||
@@ -5,7 +5,9 @@
|
|||||||
'use strict';
|
'use strict';
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
const codesign = require("electron-osx-sign");
|
const codesign = require("electron-osx-sign");
|
||||||
|
const fs = require("fs-extra");
|
||||||
const path = require("path");
|
const path = require("path");
|
||||||
|
const plist = require("plist");
|
||||||
const util = require("../lib/util");
|
const util = require("../lib/util");
|
||||||
const product = require("../../product.json");
|
const product = require("../../product.json");
|
||||||
async function main() {
|
async function main() {
|
||||||
@@ -25,6 +27,7 @@ async function main() {
|
|||||||
const helperAppBaseName = product.nameShort;
|
const helperAppBaseName = product.nameShort;
|
||||||
const gpuHelperAppName = helperAppBaseName + ' Helper (GPU).app';
|
const gpuHelperAppName = helperAppBaseName + ' Helper (GPU).app';
|
||||||
const rendererHelperAppName = helperAppBaseName + ' Helper (Renderer).app';
|
const rendererHelperAppName = helperAppBaseName + ' Helper (Renderer).app';
|
||||||
|
const infoPlistPath = path.resolve(appRoot, appName, 'Contents', 'Info.plist');
|
||||||
const defaultOpts = {
|
const defaultOpts = {
|
||||||
app: path.join(appRoot, appName),
|
app: path.join(appRoot, appName),
|
||||||
platform: 'darwin',
|
platform: 'darwin',
|
||||||
@@ -46,6 +49,14 @@ async function main() {
|
|||||||
} });
|
} });
|
||||||
const gpuHelperOpts = Object.assign(Object.assign({}, defaultOpts), { app: path.join(appFrameworkPath, gpuHelperAppName), entitlements: path.join(baseDir, 'azure-pipelines', 'darwin', 'helper-gpu-entitlements.plist'), 'entitlements-inherit': path.join(baseDir, 'azure-pipelines', 'darwin', 'helper-gpu-entitlements.plist') });
|
const gpuHelperOpts = Object.assign(Object.assign({}, defaultOpts), { app: path.join(appFrameworkPath, gpuHelperAppName), entitlements: path.join(baseDir, 'azure-pipelines', 'darwin', 'helper-gpu-entitlements.plist'), 'entitlements-inherit': path.join(baseDir, 'azure-pipelines', 'darwin', 'helper-gpu-entitlements.plist') });
|
||||||
const rendererHelperOpts = Object.assign(Object.assign({}, defaultOpts), { app: path.join(appFrameworkPath, rendererHelperAppName), entitlements: path.join(baseDir, 'azure-pipelines', 'darwin', 'helper-renderer-entitlements.plist'), 'entitlements-inherit': path.join(baseDir, 'azure-pipelines', 'darwin', 'helper-renderer-entitlements.plist') });
|
const rendererHelperOpts = Object.assign(Object.assign({}, defaultOpts), { app: path.join(appFrameworkPath, rendererHelperAppName), entitlements: path.join(baseDir, 'azure-pipelines', 'darwin', 'helper-renderer-entitlements.plist'), 'entitlements-inherit': path.join(baseDir, 'azure-pipelines', 'darwin', 'helper-renderer-entitlements.plist') });
|
||||||
|
let infoPlistString = await fs.readFile(infoPlistPath, 'utf8');
|
||||||
|
let infoPlistJson = plist.parse(infoPlistString);
|
||||||
|
Object.assign(infoPlistJson, {
|
||||||
|
NSAppleEventsUsageDescription: 'An application in Visual Studio Code wants to use AppleScript.',
|
||||||
|
NSMicrophoneUsageDescription: 'An application in Visual Studio Code wants to use the Microphone.',
|
||||||
|
NSCameraUsageDescription: 'An application in Visual Studio Code wants to use the Camera.'
|
||||||
|
});
|
||||||
|
await fs.writeFile(infoPlistPath, plist.build(infoPlistJson), 'utf8');
|
||||||
await codesign.signAsync(gpuHelperOpts);
|
await codesign.signAsync(gpuHelperOpts);
|
||||||
await codesign.signAsync(rendererHelperOpts);
|
await codesign.signAsync(rendererHelperOpts);
|
||||||
await codesign.signAsync(appOpts);
|
await codesign.signAsync(appOpts);
|
||||||
|
|||||||
@@ -6,7 +6,9 @@
|
|||||||
'use strict';
|
'use strict';
|
||||||
|
|
||||||
import * as codesign from 'electron-osx-sign';
|
import * as codesign from 'electron-osx-sign';
|
||||||
|
import * as fs from 'fs-extra';
|
||||||
import * as path from 'path';
|
import * as path from 'path';
|
||||||
|
import * as plist from 'plist';
|
||||||
import * as util from '../lib/util';
|
import * as util from '../lib/util';
|
||||||
import * as product from '../../product.json';
|
import * as product from '../../product.json';
|
||||||
|
|
||||||
@@ -30,6 +32,7 @@ async function main(): Promise<void> {
|
|||||||
const helperAppBaseName = product.nameShort;
|
const helperAppBaseName = product.nameShort;
|
||||||
const gpuHelperAppName = helperAppBaseName + ' Helper (GPU).app';
|
const gpuHelperAppName = helperAppBaseName + ' Helper (GPU).app';
|
||||||
const rendererHelperAppName = helperAppBaseName + ' Helper (Renderer).app';
|
const rendererHelperAppName = helperAppBaseName + ' Helper (Renderer).app';
|
||||||
|
const infoPlistPath = path.resolve(appRoot, appName, 'Contents', 'Info.plist');
|
||||||
|
|
||||||
const defaultOpts: codesign.SignOptions = {
|
const defaultOpts: codesign.SignOptions = {
|
||||||
app: path.join(appRoot, appName),
|
app: path.join(appRoot, appName),
|
||||||
@@ -68,6 +71,15 @@ async function main(): Promise<void> {
|
|||||||
'entitlements-inherit': path.join(baseDir, 'azure-pipelines', 'darwin', 'helper-renderer-entitlements.plist'),
|
'entitlements-inherit': path.join(baseDir, 'azure-pipelines', 'darwin', 'helper-renderer-entitlements.plist'),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let infoPlistString = await fs.readFile(infoPlistPath, 'utf8');
|
||||||
|
let infoPlistJson = plist.parse(infoPlistString);
|
||||||
|
Object.assign(infoPlistJson, {
|
||||||
|
NSAppleEventsUsageDescription: 'An application in Visual Studio Code wants to use AppleScript.',
|
||||||
|
NSMicrophoneUsageDescription: 'An application in Visual Studio Code wants to use the Microphone.',
|
||||||
|
NSCameraUsageDescription: 'An application in Visual Studio Code wants to use the Camera.'
|
||||||
|
});
|
||||||
|
await fs.writeFile(infoPlistPath, plist.build(infoPlistJson), 'utf8');
|
||||||
|
|
||||||
await codesign.signAsync(gpuHelperOpts);
|
await codesign.signAsync(gpuHelperOpts);
|
||||||
await codesign.signAsync(rendererHelperOpts);
|
await codesign.signAsync(rendererHelperOpts);
|
||||||
await codesign.signAsync(appOpts as any);
|
await codesign.signAsync(appOpts as any);
|
||||||
|
|||||||
@@ -51,8 +51,10 @@ module.exports.indentationFilter = [
|
|||||||
'!test/monaco/out/**',
|
'!test/monaco/out/**',
|
||||||
'!test/smoke/out/**',
|
'!test/smoke/out/**',
|
||||||
'!extensions/typescript-language-features/test-workspace/**',
|
'!extensions/typescript-language-features/test-workspace/**',
|
||||||
|
'!extensions/notebook-markdown-extensions/notebook-out/**',
|
||||||
'!extensions/vscode-api-tests/testWorkspace/**',
|
'!extensions/vscode-api-tests/testWorkspace/**',
|
||||||
'!extensions/vscode-api-tests/testWorkspace2/**',
|
'!extensions/vscode-api-tests/testWorkspace2/**',
|
||||||
|
'!extensions/vscode-custom-editor-tests/test-workspace/**',
|
||||||
'!build/monaco/**',
|
'!build/monaco/**',
|
||||||
'!build/win32/**',
|
'!build/win32/**',
|
||||||
|
|
||||||
@@ -86,6 +88,8 @@ module.exports.indentationFilter = [
|
|||||||
'!**/*.Dockerfile',
|
'!**/*.Dockerfile',
|
||||||
'!**/*.dockerfile',
|
'!**/*.dockerfile',
|
||||||
'!extensions/markdown-language-features/media/*.js',
|
'!extensions/markdown-language-features/media/*.js',
|
||||||
|
'!extensions/markdown-language-features/notebook-out/*.js',
|
||||||
|
'!extensions/notebook-markdown-extensions/notebook-out/*.js',
|
||||||
'!extensions/simple-browser/media/*.js',
|
'!extensions/simple-browser/media/*.js',
|
||||||
];
|
];
|
||||||
|
|
||||||
@@ -115,6 +119,7 @@ module.exports.copyrightFilter = [
|
|||||||
'!resources/completions/**',
|
'!resources/completions/**',
|
||||||
'!extensions/configuration-editing/build/inline-allOf.ts',
|
'!extensions/configuration-editing/build/inline-allOf.ts',
|
||||||
'!extensions/markdown-language-features/media/highlight.css',
|
'!extensions/markdown-language-features/media/highlight.css',
|
||||||
|
'!extensions/notebook-markdown-extensions/notebook-out/**',
|
||||||
'!extensions/html-language-features/server/src/modes/typescript/*',
|
'!extensions/html-language-features/server/src/modes/typescript/*',
|
||||||
'!extensions/*/server/bin/*',
|
'!extensions/*/server/bin/*',
|
||||||
'!src/vs/editor/test/node/classification/typescript-test.ts',
|
'!src/vs/editor/test/node/classification/typescript-test.ts',
|
||||||
|
|||||||
@@ -8,6 +8,7 @@ require('events').EventEmitter.defaultMaxListeners = 100;
|
|||||||
|
|
||||||
const gulp = require('gulp');
|
const gulp = require('gulp');
|
||||||
const path = require('path');
|
const path = require('path');
|
||||||
|
const child_process = require('child_process');
|
||||||
const nodeUtil = require('util');
|
const nodeUtil = require('util');
|
||||||
const es = require('event-stream');
|
const es = require('event-stream');
|
||||||
const filter = require('gulp-filter');
|
const filter = require('gulp-filter');
|
||||||
@@ -24,26 +25,55 @@ const ansiColors = require('ansi-colors');
|
|||||||
const ext = require('./lib/extensions');
|
const ext = require('./lib/extensions');
|
||||||
|
|
||||||
const extensionsPath = path.join(path.dirname(__dirname), 'extensions');
|
const extensionsPath = path.join(path.dirname(__dirname), 'extensions');
|
||||||
// {{SQL CARBON EDIT}}
|
|
||||||
const sqlLocalizedExtensions = [
|
|
||||||
'admin-tool-ext-win',
|
|
||||||
'agent',
|
|
||||||
'cms',
|
|
||||||
'dacpac',
|
|
||||||
'import',
|
|
||||||
'machine-learning',
|
|
||||||
'profiler',
|
|
||||||
'schema-compare',
|
|
||||||
'server-report',
|
|
||||||
'sql-assessment',
|
|
||||||
'sql-database-projects'
|
|
||||||
];
|
|
||||||
// {{SQL CARBON EDIT}}
|
|
||||||
|
|
||||||
|
// {{SQL CARBON EDIT}} - TODO: Import needs to be updated to work with langpacks.
|
||||||
|
const sqlLocalizedExtensions = [
|
||||||
|
'import',
|
||||||
|
];
|
||||||
|
|
||||||
|
// {{SQL CARBON EDIT}} Not doing this for us right now
|
||||||
|
// To save 250ms for each gulp startup, we are caching the result here
|
||||||
const compilations = glob.sync('**/tsconfig.json', {
|
const compilations = glob.sync('**/tsconfig.json', {
|
||||||
cwd: extensionsPath,
|
cwd: extensionsPath,
|
||||||
ignore: ['**/out/**', '**/node_modules/**']
|
ignore: ['**/out/**', '**/node_modules/**']
|
||||||
});
|
});
|
||||||
|
// const compilations = [
|
||||||
|
// 'configuration-editing/build/tsconfig.json',
|
||||||
|
// 'configuration-editing/tsconfig.json',
|
||||||
|
// 'css-language-features/client/tsconfig.json',
|
||||||
|
// 'css-language-features/server/tsconfig.json',
|
||||||
|
// 'debug-auto-launch/tsconfig.json',
|
||||||
|
// 'debug-server-ready/tsconfig.json',
|
||||||
|
// 'emmet/tsconfig.json',
|
||||||
|
// 'extension-editing/tsconfig.json',
|
||||||
|
// 'git/tsconfig.json',
|
||||||
|
// 'github-authentication/tsconfig.json',
|
||||||
|
// 'github/tsconfig.json',
|
||||||
|
// 'grunt/tsconfig.json',
|
||||||
|
// 'gulp/tsconfig.json',
|
||||||
|
// 'html-language-features/client/tsconfig.json',
|
||||||
|
// 'html-language-features/server/tsconfig.json',
|
||||||
|
// 'image-preview/tsconfig.json',
|
||||||
|
// 'jake/tsconfig.json',
|
||||||
|
// 'json-language-features/client/tsconfig.json',
|
||||||
|
// 'json-language-features/server/tsconfig.json',
|
||||||
|
// 'markdown-language-features/preview-src/tsconfig.json',
|
||||||
|
// 'markdown-language-features/tsconfig.json',
|
||||||
|
// 'merge-conflict/tsconfig.json',
|
||||||
|
// 'microsoft-authentication/tsconfig.json',
|
||||||
|
// 'npm/tsconfig.json',
|
||||||
|
// 'php-language-features/tsconfig.json',
|
||||||
|
// 'search-result/tsconfig.json',
|
||||||
|
// 'simple-browser/tsconfig.json',
|
||||||
|
// 'testing-editor-contributions/tsconfig.json',
|
||||||
|
// 'typescript-language-features/test-workspace/tsconfig.json',
|
||||||
|
// 'typescript-language-features/tsconfig.json',
|
||||||
|
// 'vscode-api-tests/tsconfig.json',
|
||||||
|
// 'vscode-colorize-tests/tsconfig.json',
|
||||||
|
// 'vscode-custom-editor-tests/tsconfig.json',
|
||||||
|
// 'vscode-notebook-tests/tsconfig.json',
|
||||||
|
// 'vscode-test-resolver/tsconfig.json'
|
||||||
|
// ];
|
||||||
|
|
||||||
const getBaseUrl = out => `https://sqlopsbuilds.blob.core.windows.net/sourcemaps/${commit}/${out}`;
|
const getBaseUrl = out => `https://sqlopsbuilds.blob.core.windows.net/sourcemaps/${commit}/${out}`;
|
||||||
|
|
||||||
@@ -175,7 +205,50 @@ exports.watchExtensionsTask = watchExtensionsTask;
|
|||||||
const compileExtensionsBuildLegacyTask = task.define('compile-extensions-build-legacy', task.parallel(...tasks.map(t => t.compileBuildTask)));
|
const compileExtensionsBuildLegacyTask = task.define('compile-extensions-build-legacy', task.parallel(...tasks.map(t => t.compileBuildTask)));
|
||||||
gulp.task(compileExtensionsBuildLegacyTask);
|
gulp.task(compileExtensionsBuildLegacyTask);
|
||||||
|
|
||||||
// Azure Pipelines
|
//#region Extension media
|
||||||
|
|
||||||
|
// Additional projects to webpack. These typically build code for webviews
|
||||||
|
const webpackMediaConfigFiles = [
|
||||||
|
'markdown-language-features/webpack.config.js',
|
||||||
|
'simple-browser/webpack.config.js',
|
||||||
|
];
|
||||||
|
|
||||||
|
// Additional projects to run esbuild on. These typically build code for webviews
|
||||||
|
const esbuildMediaScripts = [
|
||||||
|
'markdown-language-features/esbuild.js',
|
||||||
|
'notebook-markdown-extensions/esbuild.js',
|
||||||
|
];
|
||||||
|
|
||||||
|
const compileExtensionMediaTask = task.define('compile-extension-media', () => buildExtensionMedia(false));
|
||||||
|
gulp.task(compileExtensionMediaTask);
|
||||||
|
exports.compileExtensionMediaTask = compileExtensionMediaTask;
|
||||||
|
|
||||||
|
const watchExtensionMedia = task.define('watch-extension-media', () => buildExtensionMedia(true));
|
||||||
|
gulp.task(watchExtensionMedia);
|
||||||
|
exports.watchExtensionMedia = watchExtensionMedia;
|
||||||
|
|
||||||
|
const compileExtensionMediaBuildTask = task.define('compile-extension-media-build', () => buildExtensionMedia(false, '.build/extensions'));
|
||||||
|
gulp.task(compileExtensionMediaBuildTask);
|
||||||
|
|
||||||
|
async function buildExtensionMedia(isWatch, outputRoot) {
|
||||||
|
const webpackConfigLocations = webpackMediaConfigFiles.map(p => {
|
||||||
|
return {
|
||||||
|
configPath: path.join(extensionsPath, p),
|
||||||
|
outputRoot: outputRoot ? path.join(root, outputRoot, path.dirname(p)) : undefined
|
||||||
|
};
|
||||||
|
});
|
||||||
|
return Promise.all([
|
||||||
|
webpackExtensions('webpacking extension media', isWatch, webpackConfigLocations),
|
||||||
|
esbuildExtensions('esbuilding extension media', isWatch, esbuildMediaScripts.map(p => ({
|
||||||
|
script: path.join(extensionsPath, p),
|
||||||
|
outputRoot: outputRoot ? path.join(root, outputRoot, path.dirname(p)) : undefined
|
||||||
|
}))),
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
|
||||||
|
//#endregion
|
||||||
|
|
||||||
|
//#region Azure Pipelines
|
||||||
|
|
||||||
const cleanExtensionsBuildTask = task.define('clean-extensions-build', util.rimraf('.build/extensions'));
|
const cleanExtensionsBuildTask = task.define('clean-extensions-build', util.rimraf('.build/extensions'));
|
||||||
const compileExtensionsBuildTask = task.define('compile-extensions-build', task.series(
|
const compileExtensionsBuildTask = task.define('compile-extensions-build', task.series(
|
||||||
@@ -185,10 +258,55 @@ const compileExtensionsBuildTask = task.define('compile-extensions-build', task.
|
|||||||
));
|
));
|
||||||
|
|
||||||
gulp.task(compileExtensionsBuildTask);
|
gulp.task(compileExtensionsBuildTask);
|
||||||
gulp.task(task.define('extensions-ci', task.series(compileExtensionsBuildTask)));
|
gulp.task(task.define('extensions-ci', task.series(compileExtensionsBuildTask, compileExtensionMediaBuildTask)));
|
||||||
|
|
||||||
exports.compileExtensionsBuildTask = compileExtensionsBuildTask;
|
exports.compileExtensionsBuildTask = compileExtensionsBuildTask;
|
||||||
|
|
||||||
|
//#endregion
|
||||||
|
|
||||||
|
// {{SQL CARBON EDIT}}
|
||||||
|
//#region XLF Creation
|
||||||
|
|
||||||
|
//Get every extension in 'extensions' to create XLF files.
|
||||||
|
const exportCompilations = glob.sync('**/package.json', {
|
||||||
|
cwd: extensionsPath,
|
||||||
|
ignore: ['**/out/**', '**/node_modules/**', 'package.json']
|
||||||
|
});
|
||||||
|
|
||||||
|
//Run the localization packaging task on all extensions in ADS.
|
||||||
|
const exportTasks = exportCompilations.map(function (packageFile) {
|
||||||
|
const locFunc = require('./lib/locFunc');
|
||||||
|
const relativeDirname = path.dirname(packageFile);
|
||||||
|
|
||||||
|
const extensionName = relativeDirname.replace(/\//g, '-');
|
||||||
|
const packageTask = task.define(`localization-package-extension:${extensionName}`, task.series(() => {
|
||||||
|
return locFunc.packageSingleExtensionStream(extensionName)
|
||||||
|
.pipe(gulp.dest('.build'));
|
||||||
|
}));
|
||||||
|
|
||||||
|
// Tasks
|
||||||
|
gulp.task(packageTask);
|
||||||
|
|
||||||
|
return { packageTask };
|
||||||
|
});
|
||||||
|
|
||||||
|
const packageLocalizationExtensionsTask = task.define('package-localization-extensions-task', task.series(...exportTasks.map(t => t.packageTask)));
|
||||||
|
gulp.task(packageLocalizationExtensionsTask);
|
||||||
|
|
||||||
|
//Builds all ADS extensions including external/excluded extensions (only for creating XLF files, not for compiling extensions for shipping)
|
||||||
|
const compileLocalizationExtensionsBuildTask = task.define('compile-localization-extensions-build', task.series(
|
||||||
|
cleanExtensionsBuildTask,
|
||||||
|
compileExtensionsTask,
|
||||||
|
task.define('bundle-marketplace-extensions-build', () => ext.packageMarketplaceExtensionsStream(false).pipe(gulp.dest('.build'))),
|
||||||
|
packageLocalizationExtensionsTask,
|
||||||
|
));
|
||||||
|
|
||||||
|
gulp.task(compileLocalizationExtensionsBuildTask);
|
||||||
|
exports.compileLocalizationExtensionsBuildTask = compileLocalizationExtensionsBuildTask;
|
||||||
|
|
||||||
|
//#endregion
|
||||||
|
// {{SQL CARBON EDIT}} end
|
||||||
|
|
||||||
const compileWebExtensionsTask = task.define('compile-web', () => buildWebExtensions(false));
|
const compileWebExtensionsTask = task.define('compile-web', () => buildWebExtensions(false));
|
||||||
gulp.task(compileWebExtensionsTask);
|
gulp.task(compileWebExtensionsTask);
|
||||||
exports.compileWebExtensionsTask = compileWebExtensionsTask;
|
exports.compileWebExtensionsTask = compileWebExtensionsTask;
|
||||||
@@ -198,23 +316,39 @@ gulp.task(watchWebExtensionsTask);
|
|||||||
exports.watchWebExtensionsTask = watchWebExtensionsTask;
|
exports.watchWebExtensionsTask = watchWebExtensionsTask;
|
||||||
|
|
||||||
async function buildWebExtensions(isWatch) {
|
async function buildWebExtensions(isWatch) {
|
||||||
const webpack = require('webpack');
|
|
||||||
|
|
||||||
const webpackConfigLocations = await nodeUtil.promisify(glob)(
|
const webpackConfigLocations = await nodeUtil.promisify(glob)(
|
||||||
path.join(extensionsPath, '**', 'extension-browser.webpack.config.js'),
|
path.join(extensionsPath, '**', 'extension-browser.webpack.config.js'),
|
||||||
{ ignore: ['**/node_modules'] }
|
{ ignore: ['**/node_modules'] }
|
||||||
);
|
);
|
||||||
|
return webpackExtensions('packaging web extension', isWatch, webpackConfigLocations.map(configPath => ({ configPath })));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {string} taskName
|
||||||
|
* @param {boolean} isWatch
|
||||||
|
* @param {{ configPath: string, outputRoot?: boolean}} webpackConfigLocations
|
||||||
|
*/
|
||||||
|
async function webpackExtensions(taskName, isWatch, webpackConfigLocations) {
|
||||||
|
const webpack = require('webpack');
|
||||||
|
|
||||||
const webpackConfigs = [];
|
const webpackConfigs = [];
|
||||||
|
|
||||||
for (const webpackConfigPath of webpackConfigLocations) {
|
for (const { configPath, outputRoot } of webpackConfigLocations) {
|
||||||
const configOrFnOrArray = require(webpackConfigPath);
|
const configOrFnOrArray = require(configPath);
|
||||||
function addConfig(configOrFn) {
|
function addConfig(configOrFn) {
|
||||||
|
let config;
|
||||||
if (typeof configOrFn === 'function') {
|
if (typeof configOrFn === 'function') {
|
||||||
webpackConfigs.push(configOrFn({}, {}));
|
config = configOrFn({}, {});
|
||||||
|
webpackConfigs.push(config);
|
||||||
} else {
|
} else {
|
||||||
webpackConfigs.push(configOrFn);
|
config = configOrFn;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (outputRoot) {
|
||||||
|
config.output.path = path.join(outputRoot, path.relative(path.dirname(configPath), config.output.path));
|
||||||
|
}
|
||||||
|
|
||||||
|
webpackConfigs.push(configOrFn);
|
||||||
}
|
}
|
||||||
addConfig(configOrFnOrArray);
|
addConfig(configOrFnOrArray);
|
||||||
}
|
}
|
||||||
@@ -225,7 +359,7 @@ async function buildWebExtensions(isWatch) {
|
|||||||
if (outputPath) {
|
if (outputPath) {
|
||||||
const relativePath = path.relative(extensionsPath, outputPath).replace(/\\/g, '/');
|
const relativePath = path.relative(extensionsPath, outputPath).replace(/\\/g, '/');
|
||||||
const match = relativePath.match(/[^\/]+(\/server|\/client)?/);
|
const match = relativePath.match(/[^\/]+(\/server|\/client)?/);
|
||||||
fancyLog(`Finished ${ansiColors.green('packaging web extension')} ${ansiColors.cyan(match[0])} with ${stats.errors.length} errors.`);
|
fancyLog(`Finished ${ansiColors.green(taskName)} ${ansiColors.cyan(match[0])} with ${stats.errors.length} errors.`);
|
||||||
}
|
}
|
||||||
if (Array.isArray(stats.errors)) {
|
if (Array.isArray(stats.errors)) {
|
||||||
stats.errors.forEach(error => {
|
stats.errors.forEach(error => {
|
||||||
@@ -263,4 +397,44 @@ async function buildWebExtensions(isWatch) {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {string} taskName
|
||||||
|
* @param {boolean} isWatch
|
||||||
|
* @param {{ script: string, outputRoot?: string }}} scripts
|
||||||
|
*/
|
||||||
|
async function esbuildExtensions(taskName, isWatch, scripts) {
|
||||||
|
function reporter(/** @type {string} */ stdError, /** @type {string} */script) {
|
||||||
|
const matches = (stdError || '').match(/\> (.+): error: (.+)?/g);
|
||||||
|
fancyLog(`Finished ${ansiColors.green(taskName)} ${script} with ${matches ? matches.length : 0} errors.`);
|
||||||
|
for (const match of matches || []) {
|
||||||
|
fancyLog.error(match);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const tasks = scripts.map(({ script, outputRoot }) => {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
const args = [script];
|
||||||
|
if (isWatch) {
|
||||||
|
args.push('--watch');
|
||||||
|
}
|
||||||
|
if (outputRoot) {
|
||||||
|
args.push('--outputRoot', outputRoot);
|
||||||
|
}
|
||||||
|
const proc = child_process.execFile(process.argv[0], args, {}, (error, _stdout, stderr) => {
|
||||||
|
if (error) {
|
||||||
|
return reject(error);
|
||||||
|
}
|
||||||
|
reporter(stderr, script);
|
||||||
|
if (stderr) {
|
||||||
|
return reject();
|
||||||
|
}
|
||||||
|
return resolve();
|
||||||
|
});
|
||||||
|
|
||||||
|
proc.stdout.on('data', (data) => {
|
||||||
|
fancyLog(`${ansiColors.green(taskName)}: ${data.toString('utf8')}`);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
return Promise.all(tasks);
|
||||||
|
}
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ const util = require('./lib/util');
|
|||||||
const task = require('./lib/task');
|
const task = require('./lib/task');
|
||||||
const compilation = require('./lib/compilation');
|
const compilation = require('./lib/compilation');
|
||||||
const { monacoTypecheckTask/* , monacoTypecheckWatchTask */ } = require('./gulpfile.editor');
|
const { monacoTypecheckTask/* , monacoTypecheckWatchTask */ } = require('./gulpfile.editor');
|
||||||
const { compileExtensionsTask, watchExtensionsTask } = require('./gulpfile.extensions');
|
const { compileExtensionsTask, watchExtensionsTask, compileExtensionMediaTask } = require('./gulpfile.extensions');
|
||||||
|
|
||||||
// Fast compile for development time
|
// Fast compile for development time
|
||||||
const compileClientTask = task.define('compile-client', task.series(util.rimraf('out'), compilation.compileTask('src', 'out', false)));
|
const compileClientTask = task.define('compile-client', task.series(util.rimraf('out'), compilation.compileTask('src', 'out', false)));
|
||||||
@@ -23,7 +23,7 @@ const watchClientTask = task.define('watch-client', task.series(util.rimraf('out
|
|||||||
gulp.task(watchClientTask);
|
gulp.task(watchClientTask);
|
||||||
|
|
||||||
// All
|
// All
|
||||||
const compileTask = task.define('compile', task.parallel(monacoTypecheckTask, compileClientTask, compileExtensionsTask));
|
const compileTask = task.define('compile', task.parallel(monacoTypecheckTask, compileClientTask, compileExtensionsTask, compileExtensionMediaTask));
|
||||||
gulp.task(compileTask);
|
gulp.task(compileTask);
|
||||||
|
|
||||||
gulp.task(task.define('watch', task.parallel(/* monacoTypecheckWatchTask, */ watchClientTask, watchExtensionsTask)));
|
gulp.task(task.define('watch', task.parallel(/* monacoTypecheckWatchTask, */ watchClientTask, watchExtensionsTask)));
|
||||||
|
|||||||
@@ -28,6 +28,7 @@ const { compileBuildTask } = require('./gulpfile.compile');
|
|||||||
const { compileExtensionsBuildTask } = require('./gulpfile.extensions');
|
const { compileExtensionsBuildTask } = require('./gulpfile.extensions');
|
||||||
const { vscodeWebEntryPoints, vscodeWebResourceIncludes, createVSCodeWebFileContentMapper } = require('./gulpfile.vscode.web');
|
const { vscodeWebEntryPoints, vscodeWebResourceIncludes, createVSCodeWebFileContentMapper } = require('./gulpfile.vscode.web');
|
||||||
const cp = require('child_process');
|
const cp = require('child_process');
|
||||||
|
const { rollupAngular } = require('./lib/rollup');
|
||||||
|
|
||||||
const REPO_ROOT = path.dirname(__dirname);
|
const REPO_ROOT = path.dirname(__dirname);
|
||||||
const commit = util.getVersion(REPO_ROOT);
|
const commit = util.getVersion(REPO_ROOT);
|
||||||
@@ -114,6 +115,10 @@ const serverEntryPoints = [
|
|||||||
{
|
{
|
||||||
name: 'vs/platform/files/node/watcher/nsfw/watcherApp',
|
name: 'vs/platform/files/node/watcher/nsfw/watcherApp',
|
||||||
exclude: ['vs/css', 'vs/nls']
|
exclude: ['vs/css', 'vs/nls']
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'vs/platform/terminal/node/ptyHostMain',
|
||||||
|
exclude: ['vs/css', 'vs/nls']
|
||||||
}
|
}
|
||||||
];
|
];
|
||||||
|
|
||||||
@@ -437,8 +442,46 @@ function packagePkgTask(platform, arch, pkgTarget) {
|
|||||||
const sourceFolderName = `out-vscode-${type}${dashed(minified)}`;
|
const sourceFolderName = `out-vscode-${type}${dashed(minified)}`;
|
||||||
const destinationFolderName = `vscode-${type}${dashed(platform)}${dashed(arch)}`;
|
const destinationFolderName = `vscode-${type}${dashed(platform)}${dashed(arch)}`;
|
||||||
|
|
||||||
|
const rollupAngularTask = task.define(`vscode-web-${type}${dashed(platform)}${dashed(arch)}-angular-rollup`, () => {
|
||||||
|
return rollupAngular(REMOTE_FOLDER);
|
||||||
|
});
|
||||||
|
gulp.task(rollupAngularTask);
|
||||||
|
|
||||||
|
// rebuild extensions that contain native npm modules or have conditional webpack rules
|
||||||
|
// when building with the web .yarnrc settings (e.g. runtime=node, etc.)
|
||||||
|
// this is needed to have correct module set published with desired ABI
|
||||||
|
const rebuildExtensions = ['big-data-cluster', 'mssql', 'notebook'];
|
||||||
|
const EXTENSIONS = path.join(REPO_ROOT, 'extensions');
|
||||||
|
function exec(cmdLine, cwd) {
|
||||||
|
console.log(cmdLine);
|
||||||
|
cp.execSync(cmdLine, { stdio: 'inherit', cwd: cwd });
|
||||||
|
}
|
||||||
|
const tasks = [];
|
||||||
|
rebuildExtensions.forEach(scope => {
|
||||||
|
const root = path.join(EXTENSIONS, scope);
|
||||||
|
tasks.push(
|
||||||
|
() => gulp.src(path.join(REMOTE_FOLDER, '.yarnrc')).pipe(gulp.dest(root)),
|
||||||
|
util.rimraf(path.join(root, 'node_modules')),
|
||||||
|
() => exec('yarn', root)
|
||||||
|
);
|
||||||
|
});
|
||||||
|
const yarnrcExtensions = task.define(`vscode-${type}${dashed(platform)}${dashed(arch)}-yarnrc-extensions`, task.series(...tasks));
|
||||||
|
gulp.task(yarnrcExtensions);
|
||||||
|
|
||||||
|
const cleanupExtensions = task.define(`vscode-${type}${dashed(platform)}${dashed(arch)}-cleanup-extensions`, () => {
|
||||||
|
return Promise.all(rebuildExtensions.map(scope => {
|
||||||
|
const root = path.join(EXTENSIONS, scope);
|
||||||
|
return util.rimraf(path.join(root, '.yarnrc'))();
|
||||||
|
}));
|
||||||
|
});
|
||||||
|
gulp.task(cleanupExtensions);
|
||||||
|
|
||||||
const serverTaskCI = task.define(`vscode-${type}${dashed(platform)}${dashed(arch)}${dashed(minified)}-ci`, task.series(
|
const serverTaskCI = task.define(`vscode-${type}${dashed(platform)}${dashed(arch)}${dashed(minified)}-ci`, task.series(
|
||||||
gulp.task(`node-${platform}-${platform === 'darwin' ? 'x64' : arch}`),
|
gulp.task(`node-${platform}-${platform === 'darwin' ? 'x64' : arch}`),
|
||||||
|
yarnrcExtensions,
|
||||||
|
compileExtensionsBuildTask,
|
||||||
|
cleanupExtensions,
|
||||||
|
rollupAngularTask,
|
||||||
util.rimraf(path.join(BUILD_ROOT, destinationFolderName)),
|
util.rimraf(path.join(BUILD_ROOT, destinationFolderName)),
|
||||||
packageTask(type, platform, arch, sourceFolderName, destinationFolderName)
|
packageTask(type, platform, arch, sourceFolderName, destinationFolderName)
|
||||||
));
|
));
|
||||||
|
|||||||
@@ -15,6 +15,8 @@ const task = require('./lib/task');
|
|||||||
const glob = require('glob');
|
const glob = require('glob');
|
||||||
const vsce = require('vsce');
|
const vsce = require('vsce');
|
||||||
const mkdirp = require('mkdirp');
|
const mkdirp = require('mkdirp');
|
||||||
|
const rename = require('gulp-rename');
|
||||||
|
const fs = require('fs');
|
||||||
|
|
||||||
gulp.task('fmt', () => formatStagedFiles());
|
gulp.task('fmt', () => formatStagedFiles());
|
||||||
const formatFiles = (some) => {
|
const formatFiles = (some) => {
|
||||||
@@ -94,12 +96,14 @@ const root = path.dirname(__dirname);
|
|||||||
|
|
||||||
gulp.task('package-external-extensions', task.series(
|
gulp.task('package-external-extensions', task.series(
|
||||||
task.define('bundle-external-extensions-build', () => ext.packageExternalExtensionsStream().pipe(gulp.dest('.build/external'))),
|
task.define('bundle-external-extensions-build', () => ext.packageExternalExtensionsStream().pipe(gulp.dest('.build/external'))),
|
||||||
task.define('create-external-extension-vsix-build', () => {
|
task.define('create-external-extension-vsix-build', async () => {
|
||||||
const vsixes = glob.sync('.build/external/extensions/*/package.json').map(manifestPath => {
|
const vsixes = glob.sync('.build/external/extensions/*/package.json').map(manifestPath => {
|
||||||
const extensionPath = path.dirname(path.join(root, manifestPath));
|
const extensionPath = path.dirname(path.join(root, manifestPath));
|
||||||
const extensionName = path.basename(extensionPath);
|
const extensionName = path.basename(extensionPath);
|
||||||
return { name: extensionName, path: extensionPath };
|
return { name: extensionName, path: extensionPath };
|
||||||
}).map(element => {
|
})
|
||||||
|
.filter(element => ext.vscodeExternalExtensions.indexOf(element.name) === -1) // VS Code external extensions are bundled into ADS so no need to create a normal VSIX for them
|
||||||
|
.map(element => {
|
||||||
const pkgJson = require(path.join(element.path, 'package.json'));
|
const pkgJson = require(path.join(element.path, 'package.json'));
|
||||||
const vsixDirectory = path.join(root, '.build', 'extensions');
|
const vsixDirectory = path.join(root, '.build', 'extensions');
|
||||||
mkdirp.sync(vsixDirectory);
|
mkdirp.sync(vsixDirectory);
|
||||||
@@ -111,8 +115,46 @@ gulp.task('package-external-extensions', task.series(
|
|||||||
useYarn: true
|
useYarn: true
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
// Wait for all the initial VSIXes to be completed before making the VS Code ones since we'll be overwriting
|
||||||
|
// values in the package.json for those.
|
||||||
|
await Promise.all(vsixes);
|
||||||
|
|
||||||
return Promise.all(vsixes);
|
// Go through and find the extensions which build separate versions of themselves for VS Code.
|
||||||
|
// This is currently a pretty simplistic process, essentially just replacing certain values in
|
||||||
|
// the package.json. It doesn't handle more complex tasks such as replacing localized strings.
|
||||||
|
const vscodeVsixes = glob.sync('.build/external/extensions/*/package.vscode.json')
|
||||||
|
.map(async vscodeManifestRelativePath => {
|
||||||
|
const vscodeManifestFullPath = path.join(root, vscodeManifestRelativePath);
|
||||||
|
const packageDir = path.dirname(vscodeManifestFullPath);
|
||||||
|
const packageManifestPath = path.join(packageDir, 'package.json');
|
||||||
|
const json = require('gulp-json-editor');
|
||||||
|
const packageJsonStream = gulp.src(packageManifestPath) // Create stream for the original package.json
|
||||||
|
.pipe(json(data => { // And now use gulp-json-editor to modify the contents
|
||||||
|
const updateData = JSON.parse(fs.readFileSync(vscodeManifestFullPath)); // Read in the set of values to replace from package.vscode.json
|
||||||
|
Object.keys(updateData).forEach(key => {
|
||||||
|
data[key] = updateData[key];
|
||||||
|
});
|
||||||
|
// Remove ADS-only menus. This is a subset of the menus listed in https://github.com/microsoft/azuredatastudio/blob/main/src/vs/workbench/api/common/menusExtensionPoint.ts
|
||||||
|
// More can be added to the list as needed.
|
||||||
|
['objectExplorer/item/context', 'dataExplorer/context', 'dashboard/toolbar'].forEach(menu => {
|
||||||
|
delete data.contributes.menus[menu];
|
||||||
|
});
|
||||||
|
return data;
|
||||||
|
}, { beautify: false }))
|
||||||
|
.pipe(gulp.dest(packageDir));
|
||||||
|
await new Promise(resolve => packageJsonStream.on('finish', resolve)); // Wait for the files to finish being updated before packaging
|
||||||
|
const pkgJson = JSON.parse(fs.readFileSync(packageManifestPath));
|
||||||
|
const vsixDirectory = path.join(root, '.build', 'extensions');
|
||||||
|
const packagePath = path.join(vsixDirectory, `${pkgJson.name}-${pkgJson.version}.vsix`);
|
||||||
|
console.info('Creating vsix for ' + packageDir + ' result:' + packagePath);
|
||||||
|
return vsce.createVSIX({
|
||||||
|
cwd: packageDir,
|
||||||
|
packagePath: packagePath,
|
||||||
|
useYarn: true
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
return Promise.all(vscodeVsixes);
|
||||||
})
|
})
|
||||||
));
|
));
|
||||||
|
|
||||||
@@ -145,3 +187,9 @@ gulp.task('package-rebuild-extensions', task.series(
|
|||||||
task.define('clean-rebuild-extensions', () => ext.cleanRebuildExtensions('.build/extensions')),
|
task.define('clean-rebuild-extensions', () => ext.cleanRebuildExtensions('.build/extensions')),
|
||||||
task.define('rebuild-extensions-build', () => ext.packageRebuildExtensionsStream().pipe(gulp.dest('.build'))),
|
task.define('rebuild-extensions-build', () => ext.packageRebuildExtensionsStream().pipe(gulp.dest('.build'))),
|
||||||
));
|
));
|
||||||
|
|
||||||
|
gulp.task('update-langpacks', task.series(
|
||||||
|
task.define('rename-vscode-packs', () => loc.renameVscodeLangpacks()),
|
||||||
|
task.define('refresh-langpack-resources', () => loc.refreshLangpacks())
|
||||||
|
));
|
||||||
|
|
||||||
|
|||||||
@@ -30,7 +30,7 @@ const { getProductionDependencies } = require('./lib/dependencies');
|
|||||||
const { config } = require('./lib/electron');
|
const { config } = require('./lib/electron');
|
||||||
const createAsar = require('./lib/asar').createAsar;
|
const createAsar = require('./lib/asar').createAsar;
|
||||||
const { compileBuildTask } = require('./gulpfile.compile');
|
const { compileBuildTask } = require('./gulpfile.compile');
|
||||||
const { compileExtensionsBuildTask } = require('./gulpfile.extensions');
|
const { compileExtensionsBuildTask, compileLocalizationExtensionsBuildTask } = require('./gulpfile.extensions'); // {{SQL CARBON EDIT}} Must handle localization code.
|
||||||
|
|
||||||
// Build
|
// Build
|
||||||
const vscodeEntryPoints = _.flatten([
|
const vscodeEntryPoints = _.flatten([
|
||||||
@@ -51,7 +51,6 @@ const vscodeResources = [
|
|||||||
'out-build/bootstrap-amd.js',
|
'out-build/bootstrap-amd.js',
|
||||||
'out-build/bootstrap-node.js',
|
'out-build/bootstrap-node.js',
|
||||||
'out-build/bootstrap-window.js',
|
'out-build/bootstrap-window.js',
|
||||||
'out-build/paths.js',
|
|
||||||
'out-build/vs/**/*.{svg,png,html,jpg}',
|
'out-build/vs/**/*.{svg,png,html,jpg}',
|
||||||
'!out-build/vs/code/browser/**/*.html',
|
'!out-build/vs/code/browser/**/*.html',
|
||||||
'!out-build/vs/editor/standalone/**/*.svg',
|
'!out-build/vs/editor/standalone/**/*.svg',
|
||||||
@@ -60,6 +59,7 @@ const vscodeResources = [
|
|||||||
'out-build/vs/base/node/{stdForkStart.js,terminateProcess.sh,cpuUsage.sh,ps.sh}',
|
'out-build/vs/base/node/{stdForkStart.js,terminateProcess.sh,cpuUsage.sh,ps.sh}',
|
||||||
'out-build/vs/base/browser/ui/codicons/codicon/**',
|
'out-build/vs/base/browser/ui/codicons/codicon/**',
|
||||||
'out-build/vs/base/parts/sandbox/electron-browser/preload.js',
|
'out-build/vs/base/parts/sandbox/electron-browser/preload.js',
|
||||||
|
'out-build/vs/platform/environment/node/userDataPath.js',
|
||||||
'out-build/vs/workbench/browser/media/*-theme.css',
|
'out-build/vs/workbench/browser/media/*-theme.css',
|
||||||
'out-build/vs/workbench/contrib/debug/**/*.json',
|
'out-build/vs/workbench/contrib/debug/**/*.json',
|
||||||
'out-build/vs/workbench/contrib/externalTerminal/**/*.scpt',
|
'out-build/vs/workbench/contrib/externalTerminal/**/*.scpt',
|
||||||
@@ -108,6 +108,50 @@ const optimizeVSCodeTask = task.define('optimize-vscode', task.series(
|
|||||||
));
|
));
|
||||||
gulp.task(optimizeVSCodeTask);
|
gulp.task(optimizeVSCodeTask);
|
||||||
|
|
||||||
|
// {{SQL CARBON EDIT}} Gulp task that imports any relevant ADS XLF found in vscode-translations-export to resources/xlf/en folder.
|
||||||
|
|
||||||
|
// List of ADS extension XLF files that we want to put into the English resource folder.
|
||||||
|
const extensionsFilter = filter([
|
||||||
|
'**/admin-tool-ext-win.xlf',
|
||||||
|
'**/agent.xlf',
|
||||||
|
'**/arc.xlf',
|
||||||
|
'**/asde-deployment.xlf',
|
||||||
|
'**/azdata.xlf',
|
||||||
|
'**/azurecore.xlf',
|
||||||
|
'**/azurehybridtoolkit.xlf',
|
||||||
|
'**/big-data-cluster.xlf',
|
||||||
|
'**/cms.xlf',
|
||||||
|
'**/dacpac.xlf',
|
||||||
|
'**/data-workspace.xlf',
|
||||||
|
'**/import.xlf',
|
||||||
|
'**/kusto.xlf',
|
||||||
|
'**/machine-learning.xlf',
|
||||||
|
'**/Microsoft.sqlservernotebook.xlf',
|
||||||
|
'**/mssql.xlf',
|
||||||
|
'**/notebook.xlf',
|
||||||
|
'**/profiler.xlf',
|
||||||
|
'**/query-history.xlf',
|
||||||
|
'**/resource-deployment.xlf',
|
||||||
|
'**/schema-compare.xlf',
|
||||||
|
'**/server-report.xlf',
|
||||||
|
'**/sql-assessment.xlf',
|
||||||
|
'**/sql-database-projects.xlf',
|
||||||
|
'**/sql-migration.xlf',
|
||||||
|
'**/xml-language-features.xlf'
|
||||||
|
]);
|
||||||
|
|
||||||
|
// Copy ADS extension XLFs into English resource folder.
|
||||||
|
const importExtensionsTask = task.define('import-extensions-xlfs', function () {
|
||||||
|
return es.merge(
|
||||||
|
gulp.src(`./vscode-translations-export/vscode-extensions/*.xlf`)
|
||||||
|
.pipe(extensionsFilter),
|
||||||
|
gulp.src(`./vscode-translations-export/ads-core/*.xlf`)
|
||||||
|
)
|
||||||
|
.pipe(vfs.dest(`./resources/xlf/en`));
|
||||||
|
});
|
||||||
|
gulp.task(importExtensionsTask);
|
||||||
|
// {{SQL CARBON EDIT}} end
|
||||||
|
|
||||||
const sourceMappingURLBase = `https://sqlopsbuilds.blob.core.windows.net/sourcemaps/${commit}`;
|
const sourceMappingURLBase = `https://sqlopsbuilds.blob.core.windows.net/sourcemaps/${commit}`;
|
||||||
const minifyVSCodeTask = task.define('minify-vscode', task.series(
|
const minifyVSCodeTask = task.define('minify-vscode', task.series(
|
||||||
optimizeVSCodeTask,
|
optimizeVSCodeTask,
|
||||||
@@ -233,10 +277,12 @@ function packageTask(platform, arch, sourceFolderName, destinationFolderName, op
|
|||||||
const productionDependencies = getProductionDependencies(root);
|
const productionDependencies = getProductionDependencies(root);
|
||||||
const dependenciesSrc = _.flatten(productionDependencies.map(d => path.relative(root, d.path)).map(d => [`${d}/**`, `!${d}/**/{test,tests}/**`]));
|
const dependenciesSrc = _.flatten(productionDependencies.map(d => path.relative(root, d.path)).map(d => [`${d}/**`, `!${d}/**/{test,tests}/**`]));
|
||||||
|
|
||||||
// {{SQL CARBON EDIT}} - fix runtime module load break
|
|
||||||
const deps = gulp.src(dependenciesSrc, { base: '.', dot: true })
|
const deps = gulp.src(dependenciesSrc, { base: '.', dot: true })
|
||||||
.pipe(filter(['**', '!**/package-lock.json']))
|
.pipe(filter(['**', `!**/${config.version}/**`, '!**/bin/darwin-arm64-87/**', '!**/package-lock.json', '!**/yarn.lock', '!**/*.js.map']))
|
||||||
.pipe(util.cleanNodeModules(path.join(__dirname, '.moduleignore')))
|
.pipe(util.cleanNodeModules(path.join(__dirname, '.moduleignore')))
|
||||||
|
.pipe(jsFilter)
|
||||||
|
.pipe(util.rewriteSourceMappingURL(sourceMappingURLBase))
|
||||||
|
.pipe(jsFilter.restore)
|
||||||
.pipe(createAsar(path.join(process.cwd(), 'node_modules'), ['**/*.node', '**/vscode-ripgrep/bin/*', '**/node-pty/build/Release/*', '**/*.wasm'], 'node_modules.asar'));
|
.pipe(createAsar(path.join(process.cwd(), 'node_modules'), ['**/*.node', '**/vscode-ripgrep/bin/*', '**/node-pty/build/Release/*', '**/*.wasm'], 'node_modules.asar'));
|
||||||
|
|
||||||
let all = es.merge(
|
let all = es.merge(
|
||||||
@@ -271,6 +317,7 @@ function packageTask(platform, arch, sourceFolderName, destinationFolderName, op
|
|||||||
.pipe(fileLengthFilter.restore)
|
.pipe(fileLengthFilter.restore)
|
||||||
.pipe(util.skipDirectories())
|
.pipe(util.skipDirectories())
|
||||||
.pipe(util.fixWin32DirectoryPermissions())
|
.pipe(util.fixWin32DirectoryPermissions())
|
||||||
|
.pipe(filter(['**', '!**/.github/**'], { dot: true })) // https://github.com/microsoft/vscode/issues/116523
|
||||||
.pipe(electron(_.extend({}, config, { platform, arch: arch === 'armhf' ? 'arm' : arch, ffmpegChromium: true })))
|
.pipe(electron(_.extend({}, config, { platform, arch: arch === 'armhf' ? 'arm' : arch, ffmpegChromium: true })))
|
||||||
.pipe(filter(['**', '!LICENSE', '!LICENSES.chromium.html', '!version'], { dot: true }));
|
.pipe(filter(['**', '!LICENSE', '!LICENSES.chromium.html', '!version'], { dot: true }));
|
||||||
|
|
||||||
@@ -434,11 +481,12 @@ gulp.task(task.define(
|
|||||||
)
|
)
|
||||||
));
|
));
|
||||||
|
|
||||||
gulp.task(task.define(
|
// {{SQL CARBON EDIT}} Allow for gulp task to be added to update-english-xlfs.
|
||||||
|
const vscodeTranslationsExport = task.define(
|
||||||
'vscode-translations-export',
|
'vscode-translations-export',
|
||||||
task.series(
|
task.series(
|
||||||
compileBuildTask,
|
compileBuildTask,
|
||||||
compileExtensionsBuildTask,
|
compileLocalizationExtensionsBuildTask, // {{SQL CARBON EDIT}} now include all extensions in ADS, not just a subset. (replaces 'compileExtensionsBuildTask' here).
|
||||||
optimizeVSCodeTask,
|
optimizeVSCodeTask,
|
||||||
function () {
|
function () {
|
||||||
const pathToMetadata = './out-vscode/nls.metadata.json';
|
const pathToMetadata = './out-vscode/nls.metadata.json';
|
||||||
@@ -449,10 +497,22 @@ gulp.task(task.define(
|
|||||||
gulp.src(pathToMetadata).pipe(i18n.createXlfFilesForCoreBundle()),
|
gulp.src(pathToMetadata).pipe(i18n.createXlfFilesForCoreBundle()),
|
||||||
gulp.src(pathToSetup).pipe(i18n.createXlfFilesForIsl()),
|
gulp.src(pathToSetup).pipe(i18n.createXlfFilesForIsl()),
|
||||||
gulp.src(pathToExtensions).pipe(i18n.createXlfFilesForExtensions())
|
gulp.src(pathToExtensions).pipe(i18n.createXlfFilesForExtensions())
|
||||||
).pipe(vfs.dest('../vscode-translations-export'));
|
).pipe(vfs.dest('./vscode-translations-export')); // {{SQL CARBON EDIT}} move vscode-translations-export into ADS (for safely deleting after use).
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
);
|
||||||
|
gulp.task(vscodeTranslationsExport);
|
||||||
|
|
||||||
|
// {{SQL CARBON EDIT}} Localization gulp task, runs vscodeTranslationsExport and imports a subset of the generated XLFs into the folder.
|
||||||
|
gulp.task(task.define(
|
||||||
|
'update-english-xlfs',
|
||||||
|
task.series(
|
||||||
|
vscodeTranslationsExport,
|
||||||
|
importExtensionsTask,
|
||||||
|
task.define('delete-vscode-translations-export', util.rimraf('./vscode-translations-export'))
|
||||||
|
)
|
||||||
));
|
));
|
||||||
|
// {{SQL CARBON EDIT}} end
|
||||||
|
|
||||||
gulp.task('vscode-translations-pull', function () {
|
gulp.task('vscode-translations-pull', function () {
|
||||||
return es.merge([...i18n.defaultLanguages, ...i18n.extraLanguages].map(language => {
|
return es.merge([...i18n.defaultLanguages, ...i18n.extraLanguages].map(language => {
|
||||||
@@ -535,7 +595,7 @@ gulp.task(task.define(
|
|||||||
|
|
||||||
if (!shouldSetupSettingsSearch()) {
|
if (!shouldSetupSettingsSearch()) {
|
||||||
const branch = process.env.BUILD_SOURCEBRANCH;
|
const branch = process.env.BUILD_SOURCEBRANCH;
|
||||||
console.log(`Only runs on master and release branches, not ${branch}`);
|
console.log(`Only runs on main and release branches, not ${branch}`);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -561,21 +621,21 @@ gulp.task(task.define(
|
|||||||
|
|
||||||
function shouldSetupSettingsSearch() {
|
function shouldSetupSettingsSearch() {
|
||||||
const branch = process.env.BUILD_SOURCEBRANCH;
|
const branch = process.env.BUILD_SOURCEBRANCH;
|
||||||
return branch && (/\/master$/.test(branch) || branch.indexOf('/release/') >= 0);
|
return branch && (/\/main$/.test(branch) || branch.indexOf('/release/') >= 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
function getSettingsSearchBuildId(packageJson) {
|
function getSettingsSearchBuildId(packageJson) {
|
||||||
try {
|
try {
|
||||||
const branch = process.env.BUILD_SOURCEBRANCH;
|
const branch = process.env.BUILD_SOURCEBRANCH;
|
||||||
const branchId = branch.indexOf('/release/') >= 0 ? 0 :
|
const branchId = branch.indexOf('/release/') >= 0 ? 0 :
|
||||||
/\/master$/.test(branch) ? 1 :
|
/\/main$/.test(branch) ? 1 :
|
||||||
2; // Some unexpected branch
|
2; // Some unexpected branch
|
||||||
|
|
||||||
const out = cp.execSync(`git rev-list HEAD --count`);
|
const out = cp.execSync(`git rev-list HEAD --count`);
|
||||||
const count = parseInt(out.toString());
|
const count = parseInt(out.toString());
|
||||||
|
|
||||||
// <version number><commit count><branchId (avoid unlikely conflicts)>
|
// <version number><commit count><branchId (avoid unlikely conflicts)>
|
||||||
// 1.25.1, 1,234,567 commits, master = 1250112345671
|
// 1.25.1, 1,234,567 commits, main = 1250112345671
|
||||||
return util.versionStringToNumber(packageJson.version) * 1e8 + count * 10 + branchId;
|
return util.versionStringToNumber(packageJson.version) * 1e8 + count * 10 + branchId;
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
throw new Error('Could not determine build number: ' + e.toString());
|
throw new Error('Could not determine build number: ' + e.toString());
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ import { Stream } from 'stream';
|
|||||||
|
|
||||||
const mkdirp = require('mkdirp');
|
const mkdirp = require('mkdirp');
|
||||||
|
|
||||||
interface IExtensionDefinition {
|
export interface IExtensionDefinition {
|
||||||
name: string;
|
name: string;
|
||||||
version: string;
|
version: string;
|
||||||
repo: string;
|
repo: string;
|
||||||
|
|||||||
79
build/lib/builtInExtensionsCG.js
Normal file
79
build/lib/builtInExtensionsCG.js
Normal file
@@ -0,0 +1,79 @@
|
|||||||
|
"use strict";
|
||||||
|
/*---------------------------------------------------------------------------------------------
|
||||||
|
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||||
|
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||||
|
*--------------------------------------------------------------------------------------------*/
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
const got_1 = require("got");
|
||||||
|
const fs = require("fs");
|
||||||
|
const path = require("path");
|
||||||
|
const url = require("url");
|
||||||
|
const ansiColors = require("ansi-colors");
|
||||||
|
const root = path.dirname(path.dirname(__dirname));
|
||||||
|
const rootCG = path.join(root, 'extensionsCG');
|
||||||
|
const productjson = JSON.parse(fs.readFileSync(path.join(__dirname, '../../product.json'), 'utf8'));
|
||||||
|
const builtInExtensions = productjson.builtInExtensions;
|
||||||
|
const webBuiltInExtensions = productjson.webBuiltInExtensions;
|
||||||
|
const token = process.env['VSCODE_MIXIN_PASSWORD'] || process.env['GITHUB_TOKEN'] || undefined;
|
||||||
|
const contentBasePath = 'raw.githubusercontent.com';
|
||||||
|
const contentFileNames = ['package.json', 'package-lock.json', 'yarn.lock'];
|
||||||
|
async function downloadExtensionDetails(extension) {
|
||||||
|
var _a, _b, _c;
|
||||||
|
const extensionLabel = `${extension.name}@${extension.version}`;
|
||||||
|
const repository = url.parse(extension.repo).path.substr(1);
|
||||||
|
const repositoryContentBaseUrl = `https://${token ? `${token}@` : ''}${contentBasePath}/${repository}/v${extension.version}`;
|
||||||
|
const promises = [];
|
||||||
|
for (const fileName of contentFileNames) {
|
||||||
|
promises.push(new Promise(resolve => {
|
||||||
|
(0, got_1.default)(`${repositoryContentBaseUrl}/${fileName}`)
|
||||||
|
.then(response => {
|
||||||
|
resolve({ fileName, body: response.rawBody });
|
||||||
|
})
|
||||||
|
.catch(error => {
|
||||||
|
if (error.response.statusCode === 404) {
|
||||||
|
resolve({ fileName, body: undefined });
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
resolve({ fileName, body: null });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
console.log(extensionLabel);
|
||||||
|
const results = await Promise.all(promises);
|
||||||
|
for (const result of results) {
|
||||||
|
if (result.body) {
|
||||||
|
const extensionFolder = path.join(rootCG, extension.name);
|
||||||
|
fs.mkdirSync(extensionFolder, { recursive: true });
|
||||||
|
fs.writeFileSync(path.join(extensionFolder, result.fileName), result.body);
|
||||||
|
console.log(` - ${result.fileName} ${ansiColors.green('✔︎')}`);
|
||||||
|
}
|
||||||
|
else if (result.body === undefined) {
|
||||||
|
console.log(` - ${result.fileName} ${ansiColors.yellow('⚠️')}`);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
console.log(` - ${result.fileName} ${ansiColors.red('🛑')}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Validation
|
||||||
|
if (!((_a = results.find(r => r.fileName === 'package.json')) === null || _a === void 0 ? void 0 : _a.body)) {
|
||||||
|
// throw new Error(`The "package.json" file could not be found for the built-in extension - ${extensionLabel}`);
|
||||||
|
}
|
||||||
|
if (!((_b = results.find(r => r.fileName === 'package-lock.json')) === null || _b === void 0 ? void 0 : _b.body) &&
|
||||||
|
!((_c = results.find(r => r.fileName === 'yarn.lock')) === null || _c === void 0 ? void 0 : _c.body)) {
|
||||||
|
// throw new Error(`The "package-lock.json"/"yarn.lock" could not be found for the built-in extension - ${extensionLabel}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
async function main() {
|
||||||
|
for (const extension of [...builtInExtensions, ...webBuiltInExtensions]) {
|
||||||
|
await downloadExtensionDetails(extension);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
main().then(() => {
|
||||||
|
console.log(`Built-in extensions component data downloaded ${ansiColors.green('✔︎')}`);
|
||||||
|
process.exit(0);
|
||||||
|
}, err => {
|
||||||
|
console.log(`Built-in extensions component data could not be downloaded ${ansiColors.red('🛑')}`);
|
||||||
|
console.error(err);
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
83
build/lib/builtInExtensionsCG.ts
Normal file
83
build/lib/builtInExtensionsCG.ts
Normal file
@@ -0,0 +1,83 @@
|
|||||||
|
/*---------------------------------------------------------------------------------------------
|
||||||
|
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||||
|
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||||
|
*--------------------------------------------------------------------------------------------*/
|
||||||
|
|
||||||
|
import got from 'got';
|
||||||
|
import * as fs from 'fs';
|
||||||
|
import * as path from 'path';
|
||||||
|
import * as url from 'url';
|
||||||
|
import ansiColors = require('ansi-colors');
|
||||||
|
import { IExtensionDefinition } from './builtInExtensions';
|
||||||
|
|
||||||
|
const root = path.dirname(path.dirname(__dirname));
|
||||||
|
const rootCG = path.join(root, 'extensionsCG');
|
||||||
|
const productjson = JSON.parse(fs.readFileSync(path.join(__dirname, '../../product.json'), 'utf8'));
|
||||||
|
const builtInExtensions = <IExtensionDefinition[]>productjson.builtInExtensions;
|
||||||
|
const webBuiltInExtensions = <IExtensionDefinition[]>productjson.webBuiltInExtensions;
|
||||||
|
const token = process.env['VSCODE_MIXIN_PASSWORD'] || process.env['GITHUB_TOKEN'] || undefined;
|
||||||
|
|
||||||
|
const contentBasePath = 'raw.githubusercontent.com';
|
||||||
|
const contentFileNames = ['package.json', 'package-lock.json', 'yarn.lock'];
|
||||||
|
|
||||||
|
async function downloadExtensionDetails(extension: IExtensionDefinition): Promise<void> {
|
||||||
|
const extensionLabel = `${extension.name}@${extension.version}`;
|
||||||
|
const repository = url.parse(extension.repo).path!.substr(1);
|
||||||
|
const repositoryContentBaseUrl = `https://${token ? `${token}@` : ''}${contentBasePath}/${repository}/v${extension.version}`;
|
||||||
|
|
||||||
|
const promises = [];
|
||||||
|
for (const fileName of contentFileNames) {
|
||||||
|
promises.push(new Promise<{ fileName: string, body: Buffer | undefined | null }>(resolve => {
|
||||||
|
got(`${repositoryContentBaseUrl}/${fileName}`)
|
||||||
|
.then(response => {
|
||||||
|
resolve({ fileName, body: response.rawBody });
|
||||||
|
})
|
||||||
|
.catch(error => {
|
||||||
|
if (error.response.statusCode === 404) {
|
||||||
|
resolve({ fileName, body: undefined });
|
||||||
|
} else {
|
||||||
|
resolve({ fileName, body: null });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(extensionLabel);
|
||||||
|
const results = await Promise.all(promises);
|
||||||
|
for (const result of results) {
|
||||||
|
if (result.body) {
|
||||||
|
const extensionFolder = path.join(rootCG, extension.name);
|
||||||
|
fs.mkdirSync(extensionFolder, { recursive: true });
|
||||||
|
fs.writeFileSync(path.join(extensionFolder, result.fileName), result.body);
|
||||||
|
console.log(` - ${result.fileName} ${ansiColors.green('✔︎')}`);
|
||||||
|
} else if (result.body === undefined) {
|
||||||
|
console.log(` - ${result.fileName} ${ansiColors.yellow('⚠️')}`);
|
||||||
|
} else {
|
||||||
|
console.log(` - ${result.fileName} ${ansiColors.red('🛑')}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validation
|
||||||
|
if (!results.find(r => r.fileName === 'package.json')?.body) {
|
||||||
|
// throw new Error(`The "package.json" file could not be found for the built-in extension - ${extensionLabel}`);
|
||||||
|
}
|
||||||
|
if (!results.find(r => r.fileName === 'package-lock.json')?.body &&
|
||||||
|
!results.find(r => r.fileName === 'yarn.lock')?.body) {
|
||||||
|
// throw new Error(`The "package-lock.json"/"yarn.lock" could not be found for the built-in extension - ${extensionLabel}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function main(): Promise<void> {
|
||||||
|
for (const extension of [...builtInExtensions, ...webBuiltInExtensions]) {
|
||||||
|
await downloadExtensionDetails(extension);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
main().then(() => {
|
||||||
|
console.log(`Built-in extensions component data downloaded ${ansiColors.green('✔︎')}`);
|
||||||
|
process.exit(0);
|
||||||
|
}, err => {
|
||||||
|
console.log(`Built-in extensions component data could not be downloaded ${ansiColors.red('🛑')}`);
|
||||||
|
console.error(err);
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
@@ -17,7 +17,7 @@ const fancyLog = require("fancy-log");
|
|||||||
const ansiColors = require("ansi-colors");
|
const ansiColors = require("ansi-colors");
|
||||||
const os = require("os");
|
const os = require("os");
|
||||||
const watch = require('./watch');
|
const watch = require('./watch');
|
||||||
const reporter = reporter_1.createReporter();
|
const reporter = (0, reporter_1.createReporter)();
|
||||||
function getTypeScriptCompilerOptions(src) {
|
function getTypeScriptCompilerOptions(src) {
|
||||||
const rootDir = path.join(__dirname, `../../${src}`);
|
const rootDir = path.join(__dirname, `../../${src}`);
|
||||||
let options = {};
|
let options = {};
|
||||||
|
|||||||
@@ -28,7 +28,7 @@ exports.config = {
|
|||||||
version: util.getElectronVersion(),
|
version: util.getElectronVersion(),
|
||||||
productAppName: product.nameLong,
|
productAppName: product.nameLong,
|
||||||
companyName: 'Microsoft Corporation',
|
companyName: 'Microsoft Corporation',
|
||||||
copyright: 'Copyright (C) 2019 Microsoft. All rights reserved',
|
copyright: 'Copyright (C) 2021 Microsoft. All rights reserved',
|
||||||
darwinIcon: 'resources/darwin/code.icns',
|
darwinIcon: 'resources/darwin/code.icns',
|
||||||
darwinBundleIdentifier: product.darwinBundleIdentifier,
|
darwinBundleIdentifier: product.darwinBundleIdentifier,
|
||||||
darwinApplicationCategoryType: 'public.app-category.developer-tools',
|
darwinApplicationCategoryType: 'public.app-category.developer-tools',
|
||||||
|
|||||||
@@ -32,7 +32,7 @@ export const config = {
|
|||||||
version: util.getElectronVersion(),
|
version: util.getElectronVersion(),
|
||||||
productAppName: product.nameLong,
|
productAppName: product.nameLong,
|
||||||
companyName: 'Microsoft Corporation',
|
companyName: 'Microsoft Corporation',
|
||||||
copyright: 'Copyright (C) 2019 Microsoft. All rights reserved',
|
copyright: 'Copyright (C) 2021 Microsoft. All rights reserved',
|
||||||
darwinIcon: 'resources/darwin/code.icns',
|
darwinIcon: 'resources/darwin/code.icns',
|
||||||
darwinBundleIdentifier: product.darwinBundleIdentifier,
|
darwinBundleIdentifier: product.darwinBundleIdentifier,
|
||||||
darwinApplicationCategoryType: 'public.app-category.developer-tools',
|
darwinApplicationCategoryType: 'public.app-category.developer-tools',
|
||||||
|
|||||||
@@ -21,7 +21,7 @@ module.exports = new class {
|
|||||||
const configs = context.options;
|
const configs = context.options;
|
||||||
for (const config of configs) {
|
for (const config of configs) {
|
||||||
if (minimatch(context.getFilename(), config.target)) {
|
if (minimatch(context.getFilename(), config.target)) {
|
||||||
return utils_1.createImportRuleListener((node, value) => this._checkImport(context, config, node, value));
|
return (0, utils_1.createImportRuleListener)((node, value) => this._checkImport(context, config, node, value));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return {};
|
return {};
|
||||||
@@ -29,7 +29,7 @@ module.exports = new class {
|
|||||||
_checkImport(context, config, node, path) {
|
_checkImport(context, config, node, path) {
|
||||||
// resolve relative paths
|
// resolve relative paths
|
||||||
if (path[0] === '.') {
|
if (path[0] === '.') {
|
||||||
path = path_1.join(context.getFilename(), path);
|
path = (0, path_1.join)(context.getFilename(), path);
|
||||||
}
|
}
|
||||||
let restrictions;
|
let restrictions;
|
||||||
if (typeof config.restrictions === 'string') {
|
if (typeof config.restrictions === 'string') {
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ module.exports = new class {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
create(context) {
|
create(context) {
|
||||||
const fileDirname = path_1.dirname(context.getFilename());
|
const fileDirname = (0, path_1.dirname)(context.getFilename());
|
||||||
const parts = fileDirname.split(/\\|\//);
|
const parts = fileDirname.split(/\\|\//);
|
||||||
const ruleArgs = context.options[0];
|
const ruleArgs = context.options[0];
|
||||||
let config;
|
let config;
|
||||||
@@ -39,11 +39,11 @@ module.exports = new class {
|
|||||||
// nothing
|
// nothing
|
||||||
return {};
|
return {};
|
||||||
}
|
}
|
||||||
return utils_1.createImportRuleListener((node, path) => {
|
return (0, utils_1.createImportRuleListener)((node, path) => {
|
||||||
if (path[0] === '.') {
|
if (path[0] === '.') {
|
||||||
path = path_1.join(path_1.dirname(context.getFilename()), path);
|
path = (0, path_1.join)((0, path_1.dirname)(context.getFilename()), path);
|
||||||
}
|
}
|
||||||
const parts = path_1.dirname(path).split(/\\|\//);
|
const parts = (0, path_1.dirname)(path).split(/\\|\//);
|
||||||
for (let i = parts.length - 1; i >= 0; i--) {
|
for (let i = parts.length - 1; i >= 0; i--) {
|
||||||
const part = parts[i];
|
const part = parts[i];
|
||||||
if (config.allowed.has(part)) {
|
if (config.allowed.has(part)) {
|
||||||
|
|||||||
@@ -20,10 +20,10 @@ module.exports = new class NoNlsInStandaloneEditorRule {
|
|||||||
|| /vs(\/|\\)editor(\/|\\)editor.api/.test(fileName)
|
|| /vs(\/|\\)editor(\/|\\)editor.api/.test(fileName)
|
||||||
|| /vs(\/|\\)editor(\/|\\)editor.main/.test(fileName)
|
|| /vs(\/|\\)editor(\/|\\)editor.main/.test(fileName)
|
||||||
|| /vs(\/|\\)editor(\/|\\)editor.worker/.test(fileName)) {
|
|| /vs(\/|\\)editor(\/|\\)editor.worker/.test(fileName)) {
|
||||||
return utils_1.createImportRuleListener((node, path) => {
|
return (0, utils_1.createImportRuleListener)((node, path) => {
|
||||||
// resolve relative paths
|
// resolve relative paths
|
||||||
if (path[0] === '.') {
|
if (path[0] === '.') {
|
||||||
path = path_1.join(context.getFilename(), path);
|
path = (0, path_1.join)(context.getFilename(), path);
|
||||||
}
|
}
|
||||||
if (/vs(\/|\\)nls/.test(path)) {
|
if (/vs(\/|\\)nls/.test(path)) {
|
||||||
context.report({
|
context.report({
|
||||||
|
|||||||
@@ -21,10 +21,10 @@ module.exports = new class NoNlsInStandaloneEditorRule {
|
|||||||
// the vs/editor folder is allowed to use the standalone editor
|
// the vs/editor folder is allowed to use the standalone editor
|
||||||
return {};
|
return {};
|
||||||
}
|
}
|
||||||
return utils_1.createImportRuleListener((node, path) => {
|
return (0, utils_1.createImportRuleListener)((node, path) => {
|
||||||
// resolve relative paths
|
// resolve relative paths
|
||||||
if (path[0] === '.') {
|
if (path[0] === '.') {
|
||||||
path = path_1.join(context.getFilename(), path);
|
path = (0, path_1.join)(context.getFilename(), path);
|
||||||
}
|
}
|
||||||
if (/vs(\/|\\)editor(\/|\\)standalone(\/|\\)/.test(path)
|
if (/vs(\/|\\)editor(\/|\\)standalone(\/|\\)/.test(path)
|
||||||
|| /vs(\/|\\)editor(\/|\\)common(\/|\\)standalone(\/|\\)/.test(path)
|
|| /vs(\/|\\)editor(\/|\\)common(\/|\\)standalone(\/|\\)/.test(path)
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ module.exports = new (_a = class TranslationRemind {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
create(context) {
|
create(context) {
|
||||||
return utils_1.createImportRuleListener((node, path) => this._checkImport(context, node, path));
|
return (0, utils_1.createImportRuleListener)((node, path) => this._checkImport(context, node, path));
|
||||||
}
|
}
|
||||||
_checkImport(context, node, path) {
|
_checkImport(context, node, path) {
|
||||||
if (path !== TranslationRemind.NLS_MODULE) {
|
if (path !== TranslationRemind.NLS_MODULE) {
|
||||||
@@ -31,7 +31,7 @@ module.exports = new (_a = class TranslationRemind {
|
|||||||
let resourceDefined = false;
|
let resourceDefined = false;
|
||||||
let json;
|
let json;
|
||||||
try {
|
try {
|
||||||
json = fs_1.readFileSync('./build/lib/i18n.resources.json', 'utf8');
|
json = (0, fs_1.readFileSync)('./build/lib/i18n.resources.json', 'utf8');
|
||||||
}
|
}
|
||||||
catch (e) {
|
catch (e) {
|
||||||
console.error('[translation-remind rule]: File with resources to pull from Transifex was not found. Aborting translation resource check for newly defined workbench part/service.');
|
console.error('[translation-remind rule]: File with resources to pull from Transifex was not found. Aborting translation resource check for newly defined workbench part/service.');
|
||||||
|
|||||||
@@ -4,7 +4,7 @@
|
|||||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||||
*--------------------------------------------------------------------------------------------*/
|
*--------------------------------------------------------------------------------------------*/
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.translatePackageJSON = exports.packageRebuildExtensionsStream = exports.cleanRebuildExtensions = exports.packageExternalExtensionsStream = exports.scanBuiltinExtensions = exports.packageMarketplaceExtensionsStream = exports.packageLocalExtensionsStream = exports.fromMarketplace = void 0;
|
exports.translatePackageJSON = exports.packageRebuildExtensionsStream = exports.cleanRebuildExtensions = exports.packageExternalExtensionsStream = exports.scanBuiltinExtensions = exports.packageMarketplaceExtensionsStream = exports.packageLocalExtensionsStream = exports.vscodeExternalExtensions = exports.fromMarketplace = exports.fromLocalNormal = exports.fromLocal = void 0;
|
||||||
const es = require("event-stream");
|
const es = require("event-stream");
|
||||||
const fs = require("fs");
|
const fs = require("fs");
|
||||||
const glob = require("glob");
|
const glob = require("glob");
|
||||||
@@ -71,6 +71,7 @@ function fromLocal(extensionPath, forWeb) {
|
|||||||
}
|
}
|
||||||
return input;
|
return input;
|
||||||
}
|
}
|
||||||
|
exports.fromLocal = fromLocal;
|
||||||
function fromLocalWebpack(extensionPath, webpackConfigFileName) {
|
function fromLocalWebpack(extensionPath, webpackConfigFileName) {
|
||||||
const result = es.through();
|
const result = es.through();
|
||||||
const packagedDependencies = [];
|
const packagedDependencies = [];
|
||||||
@@ -143,7 +144,7 @@ function fromLocalWebpack(extensionPath, webpackConfigFileName) {
|
|||||||
console.error(packagedDependencies);
|
console.error(packagedDependencies);
|
||||||
result.emit('error', err);
|
result.emit('error', err);
|
||||||
});
|
});
|
||||||
return result.pipe(stats_1.createStatsStream(path.basename(extensionPath)));
|
return result.pipe((0, stats_1.createStatsStream)(path.basename(extensionPath)));
|
||||||
}
|
}
|
||||||
function fromLocalNormal(extensionPath) {
|
function fromLocalNormal(extensionPath) {
|
||||||
const result = es.through();
|
const result = es.through();
|
||||||
@@ -161,8 +162,9 @@ function fromLocalNormal(extensionPath) {
|
|||||||
es.readArray(files).pipe(result);
|
es.readArray(files).pipe(result);
|
||||||
})
|
})
|
||||||
.catch(err => result.emit('error', err));
|
.catch(err => result.emit('error', err));
|
||||||
return result.pipe(stats_1.createStatsStream(path.basename(extensionPath)));
|
return result.pipe((0, stats_1.createStatsStream)(path.basename(extensionPath)));
|
||||||
}
|
}
|
||||||
|
exports.fromLocalNormal = fromLocalNormal;
|
||||||
const baseHeaders = {
|
const baseHeaders = {
|
||||||
'X-Market-Client-Id': 'VSCode Build',
|
'X-Market-Client-Id': 'VSCode Build',
|
||||||
'User-Agent': 'VSCode Build',
|
'User-Agent': 'VSCode Build',
|
||||||
@@ -212,8 +214,10 @@ const externalExtensions = [
|
|||||||
'agent',
|
'agent',
|
||||||
'arc',
|
'arc',
|
||||||
'asde-deployment',
|
'asde-deployment',
|
||||||
|
'azcli',
|
||||||
'azdata',
|
'azdata',
|
||||||
'azurehybridtoolkit',
|
'azurehybridtoolkit',
|
||||||
|
'azuremonitor',
|
||||||
'cms',
|
'cms',
|
||||||
'dacpac',
|
'dacpac',
|
||||||
'import',
|
'import',
|
||||||
@@ -228,6 +232,12 @@ const externalExtensions = [
|
|||||||
'sql-database-projects',
|
'sql-database-projects',
|
||||||
'sql-migration'
|
'sql-migration'
|
||||||
];
|
];
|
||||||
|
/**
|
||||||
|
* Extensions that are built into ADS but should be packaged externally as well for VS Code.
|
||||||
|
*/
|
||||||
|
exports.vscodeExternalExtensions = [
|
||||||
|
'data-workspace'
|
||||||
|
];
|
||||||
// extensions that require a rebuild since they have native parts
|
// extensions that require a rebuild since they have native parts
|
||||||
const rebuildExtensions = [
|
const rebuildExtensions = [
|
||||||
'big-data-cluster',
|
'big-data-cluster',
|
||||||
@@ -344,7 +354,7 @@ function packageExternalExtensionsStream() {
|
|||||||
const extensionName = path.basename(extensionPath);
|
const extensionName = path.basename(extensionPath);
|
||||||
return { name: extensionName, path: extensionPath };
|
return { name: extensionName, path: extensionPath };
|
||||||
})
|
})
|
||||||
.filter(({ name }) => externalExtensions.indexOf(name) >= 0);
|
.filter(({ name }) => externalExtensions.indexOf(name) >= 0 || exports.vscodeExternalExtensions.indexOf(name) >= 0);
|
||||||
const builtExtensions = extenalExtensionDescriptions.map(extension => {
|
const builtExtensions = extenalExtensionDescriptions.map(extension => {
|
||||||
return fromLocal(extension.path, false)
|
return fromLocal(extension.path, false)
|
||||||
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
|
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
|
||||||
|
|||||||
@@ -54,7 +54,7 @@ function updateExtensionPackageJSON(input: Stream, update: (data: any) => any):
|
|||||||
.pipe(packageJsonFilter.restore);
|
.pipe(packageJsonFilter.restore);
|
||||||
}
|
}
|
||||||
|
|
||||||
function fromLocal(extensionPath: string, forWeb: boolean): Stream {
|
export function fromLocal(extensionPath: string, forWeb: boolean): Stream { // {{SQL CARBON EDIT}} - Needed in locFunc
|
||||||
const webpackConfigFileName = forWeb ? 'extension-browser.webpack.config.js' : 'extension.webpack.config.js';
|
const webpackConfigFileName = forWeb ? 'extension-browser.webpack.config.js' : 'extension.webpack.config.js';
|
||||||
|
|
||||||
const isWebPacked = fs.existsSync(path.join(extensionPath, webpackConfigFileName));
|
const isWebPacked = fs.existsSync(path.join(extensionPath, webpackConfigFileName));
|
||||||
@@ -171,7 +171,7 @@ function fromLocalWebpack(extensionPath: string, webpackConfigFileName: string):
|
|||||||
return result.pipe(createStatsStream(path.basename(extensionPath)));
|
return result.pipe(createStatsStream(path.basename(extensionPath)));
|
||||||
}
|
}
|
||||||
|
|
||||||
function fromLocalNormal(extensionPath: string): Stream {
|
export function fromLocalNormal(extensionPath: string): Stream { // {{SQL CARBON EDIT}} - Needed in locFunc
|
||||||
const result = es.through();
|
const result = es.through();
|
||||||
|
|
||||||
const vsce = require('vsce') as typeof import('vsce');
|
const vsce = require('vsce') as typeof import('vsce');
|
||||||
@@ -249,8 +249,10 @@ const externalExtensions = [
|
|||||||
'agent',
|
'agent',
|
||||||
'arc',
|
'arc',
|
||||||
'asde-deployment',
|
'asde-deployment',
|
||||||
|
'azcli',
|
||||||
'azdata',
|
'azdata',
|
||||||
'azurehybridtoolkit',
|
'azurehybridtoolkit',
|
||||||
|
'azuremonitor',
|
||||||
'cms',
|
'cms',
|
||||||
'dacpac',
|
'dacpac',
|
||||||
'import',
|
'import',
|
||||||
@@ -266,6 +268,13 @@ const externalExtensions = [
|
|||||||
'sql-migration'
|
'sql-migration'
|
||||||
];
|
];
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extensions that are built into ADS but should be packaged externally as well for VS Code.
|
||||||
|
*/
|
||||||
|
export const vscodeExternalExtensions = [
|
||||||
|
'data-workspace'
|
||||||
|
];
|
||||||
|
|
||||||
// extensions that require a rebuild since they have native parts
|
// extensions that require a rebuild since they have native parts
|
||||||
const rebuildExtensions = [
|
const rebuildExtensions = [
|
||||||
'big-data-cluster',
|
'big-data-cluster',
|
||||||
@@ -423,7 +432,7 @@ export function packageExternalExtensionsStream(): NodeJS.ReadWriteStream {
|
|||||||
const extensionName = path.basename(extensionPath);
|
const extensionName = path.basename(extensionPath);
|
||||||
return { name: extensionName, path: extensionPath };
|
return { name: extensionName, path: extensionPath };
|
||||||
})
|
})
|
||||||
.filter(({ name }) => externalExtensions.indexOf(name) >= 0);
|
.filter(({ name }) => externalExtensions.indexOf(name) >= 0 || vscodeExternalExtensions.indexOf(name) >= 0);
|
||||||
|
|
||||||
const builtExtensions = extenalExtensionDescriptions.map(extension => {
|
const builtExtensions = extenalExtensionDescriptions.map(extension => {
|
||||||
return fromLocal(extension.path, false)
|
return fromLocal(extension.path, false)
|
||||||
|
|||||||
@@ -4,7 +4,7 @@
|
|||||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||||
*--------------------------------------------------------------------------------------------*/
|
*--------------------------------------------------------------------------------------------*/
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.prepareIslFiles = exports.prepareI18nPackFiles = exports.pullI18nPackFiles = exports.prepareI18nFiles = exports.pullSetupXlfFiles = exports.pullCoreAndExtensionsXlfFiles = exports.findObsoleteResources = exports.pushXlfFiles = exports.createXlfFilesForIsl = exports.createXlfFilesForExtensions = exports.createXlfFilesForCoreBundle = exports.getResource = exports.processNlsFiles = exports.Limiter = exports.XLF = exports.Line = exports.externalExtensionsWithTranslations = exports.extraLanguages = exports.defaultLanguages = void 0;
|
exports.prepareIslFiles = exports.prepareI18nPackFiles = exports.pullI18nPackFiles = exports.i18nPackVersion = exports.createI18nFile = exports.prepareI18nFiles = exports.pullSetupXlfFiles = exports.pullCoreAndExtensionsXlfFiles = exports.findObsoleteResources = exports.pushXlfFiles = exports.createXlfFilesForIsl = exports.createXlfFilesForExtensions = exports.createXlfFilesForCoreBundle = exports.getResource = exports.processNlsFiles = exports.Limiter = exports.XLF = exports.Line = exports.externalExtensionsWithTranslations = exports.extraLanguages = exports.defaultLanguages = void 0;
|
||||||
const path = require("path");
|
const path = require("path");
|
||||||
const fs = require("fs");
|
const fs = require("fs");
|
||||||
const event_stream_1 = require("event-stream");
|
const event_stream_1 = require("event-stream");
|
||||||
@@ -237,14 +237,14 @@ XLF.parse = function (xlfString) {
|
|||||||
}
|
}
|
||||||
let val = unit.target[0];
|
let val = unit.target[0];
|
||||||
if (typeof val !== 'string') {
|
if (typeof val !== 'string') {
|
||||||
val = val._;
|
// We allow empty source values so support them for translations as well.
|
||||||
|
val = val._ ? val._ : '';
|
||||||
}
|
}
|
||||||
if (key && val) {
|
if (!key) {
|
||||||
messages[key] = decodeEntities(val);
|
reject(new Error(`XLF parsing error: trans-unit ${JSON.stringify(unit, undefined, 0)} defined in file ${originalFilePath} is missing the ID attribute.`));
|
||||||
}
|
return;
|
||||||
else {
|
|
||||||
reject(new Error(`XLF parsing error: XLIFF file ${originalFilePath} does not contain full localization data. ID or target translation for one of the trans-unit nodes is not present.`));
|
|
||||||
}
|
}
|
||||||
|
messages[key] = decodeEntities(val);
|
||||||
});
|
});
|
||||||
files.push({ messages: messages, originalFilePath: originalFilePath, language: language.toLowerCase() });
|
files.push({ messages: messages, originalFilePath: originalFilePath, language: language.toLowerCase() });
|
||||||
}
|
}
|
||||||
@@ -463,7 +463,7 @@ function processCoreBundleFormat(fileHeader, languages, json, emitter) {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
function processNlsFiles(opts) {
|
function processNlsFiles(opts) {
|
||||||
return event_stream_1.through(function (file) {
|
return (0, event_stream_1.through)(function (file) {
|
||||||
let fileName = path.basename(file.path);
|
let fileName = path.basename(file.path);
|
||||||
if (fileName === 'nls.metadata.json') {
|
if (fileName === 'nls.metadata.json') {
|
||||||
let json = null;
|
let json = null;
|
||||||
@@ -484,7 +484,7 @@ function processNlsFiles(opts) {
|
|||||||
exports.processNlsFiles = processNlsFiles;
|
exports.processNlsFiles = processNlsFiles;
|
||||||
const editorProject = 'vscode-editor', workbenchProject = 'vscode-workbench', extensionsProject = 'vscode-extensions', setupProject = 'vscode-setup';
|
const editorProject = 'vscode-editor', workbenchProject = 'vscode-workbench', extensionsProject = 'vscode-extensions', setupProject = 'vscode-setup';
|
||||||
// {{SQL CARBON EDIT}}
|
// {{SQL CARBON EDIT}}
|
||||||
const sqlopsProject = 'sqlops-core';
|
const adsProject = 'ads-core';
|
||||||
function getResource(sourceFile) {
|
function getResource(sourceFile) {
|
||||||
let resource;
|
let resource;
|
||||||
if (/^vs\/platform/.test(sourceFile)) {
|
if (/^vs\/platform/.test(sourceFile)) {
|
||||||
@@ -515,19 +515,23 @@ function getResource(sourceFile) {
|
|||||||
}
|
}
|
||||||
// {{SQL CARBON EDIT}}
|
// {{SQL CARBON EDIT}}
|
||||||
else if (/^sql/.test(sourceFile)) {
|
else if (/^sql/.test(sourceFile)) {
|
||||||
return { name: 'sql', project: sqlopsProject };
|
return { name: 'sql', project: adsProject };
|
||||||
}
|
}
|
||||||
throw new Error(`Could not identify the XLF bundle for ${sourceFile}`);
|
throw new Error(`Could not identify the XLF bundle for ${sourceFile}`);
|
||||||
}
|
}
|
||||||
exports.getResource = getResource;
|
exports.getResource = getResource;
|
||||||
function createXlfFilesForCoreBundle() {
|
function createXlfFilesForCoreBundle() {
|
||||||
return event_stream_1.through(function (file) {
|
return (0, event_stream_1.through)(function (file) {
|
||||||
const basename = path.basename(file.path);
|
const basename = path.basename(file.path);
|
||||||
if (basename === 'nls.metadata.json') {
|
if (basename === 'nls.metadata.json') {
|
||||||
if (file.isBuffer()) {
|
if (file.isBuffer()) {
|
||||||
const xlfs = Object.create(null);
|
const xlfs = Object.create(null);
|
||||||
const json = JSON.parse(file.contents.toString('utf8'));
|
const json = JSON.parse(file.contents.toString('utf8'));
|
||||||
for (let coreModule in json.keys) {
|
// {{SQL CARBON EDIT}} - Must sort the keys for easier translation.
|
||||||
|
let sortedKeys = Object.keys(json.keys).sort();
|
||||||
|
for (let i = 0; i < sortedKeys.length; i++) {
|
||||||
|
let coreModule = sortedKeys[i];
|
||||||
|
// {{SQL CARBON EDIT}} - End
|
||||||
const projectResource = getResource(coreModule);
|
const projectResource = getResource(coreModule);
|
||||||
const resource = projectResource.name;
|
const resource = projectResource.name;
|
||||||
const project = projectResource.project;
|
const project = projectResource.project;
|
||||||
@@ -572,7 +576,7 @@ function createXlfFilesForExtensions() {
|
|||||||
let counter = 0;
|
let counter = 0;
|
||||||
let folderStreamEnded = false;
|
let folderStreamEnded = false;
|
||||||
let folderStreamEndEmitted = false;
|
let folderStreamEndEmitted = false;
|
||||||
return event_stream_1.through(function (extensionFolder) {
|
return (0, event_stream_1.through)(function (extensionFolder) {
|
||||||
const folderStream = this;
|
const folderStream = this;
|
||||||
const stat = fs.statSync(extensionFolder.path);
|
const stat = fs.statSync(extensionFolder.path);
|
||||||
if (!stat.isDirectory()) {
|
if (!stat.isDirectory()) {
|
||||||
@@ -590,7 +594,7 @@ function createXlfFilesForExtensions() {
|
|||||||
}
|
}
|
||||||
return _xlf;
|
return _xlf;
|
||||||
}
|
}
|
||||||
gulp.src([`.build/extensions/${extensionName}/package.nls.json`, `.build/extensions/${extensionName}/**/nls.metadata.json`], { allowEmpty: true }).pipe(event_stream_1.through(function (file) {
|
gulp.src([`.build/extensions/${extensionName}/package.nls.json`, `.build/extensions/${extensionName}/**/nls.metadata.json`], { allowEmpty: true }).pipe((0, event_stream_1.through)(function (file) {
|
||||||
if (file.isBuffer()) {
|
if (file.isBuffer()) {
|
||||||
const buffer = file.contents;
|
const buffer = file.contents;
|
||||||
const basename = path.basename(file.path);
|
const basename = path.basename(file.path);
|
||||||
@@ -649,7 +653,7 @@ function createXlfFilesForExtensions() {
|
|||||||
}
|
}
|
||||||
exports.createXlfFilesForExtensions = createXlfFilesForExtensions;
|
exports.createXlfFilesForExtensions = createXlfFilesForExtensions;
|
||||||
function createXlfFilesForIsl() {
|
function createXlfFilesForIsl() {
|
||||||
return event_stream_1.through(function (file) {
|
return (0, event_stream_1.through)(function (file) {
|
||||||
let projectName, resourceFile;
|
let projectName, resourceFile;
|
||||||
if (path.basename(file.path) === 'Default.isl') {
|
if (path.basename(file.path) === 'Default.isl') {
|
||||||
projectName = setupProject;
|
projectName = setupProject;
|
||||||
@@ -703,7 +707,7 @@ exports.createXlfFilesForIsl = createXlfFilesForIsl;
|
|||||||
function pushXlfFiles(apiHostname, username, password) {
|
function pushXlfFiles(apiHostname, username, password) {
|
||||||
let tryGetPromises = [];
|
let tryGetPromises = [];
|
||||||
let updateCreatePromises = [];
|
let updateCreatePromises = [];
|
||||||
return event_stream_1.through(function (file) {
|
return (0, event_stream_1.through)(function (file) {
|
||||||
const project = path.dirname(file.relative);
|
const project = path.dirname(file.relative);
|
||||||
const fileName = path.basename(file.path);
|
const fileName = path.basename(file.path);
|
||||||
const slug = fileName.substr(0, fileName.length - '.xlf'.length);
|
const slug = fileName.substr(0, fileName.length - '.xlf'.length);
|
||||||
@@ -765,7 +769,7 @@ function getAllResources(project, apiHostname, username, password) {
|
|||||||
function findObsoleteResources(apiHostname, username, password) {
|
function findObsoleteResources(apiHostname, username, password) {
|
||||||
let resourcesByProject = Object.create(null);
|
let resourcesByProject = Object.create(null);
|
||||||
resourcesByProject[extensionsProject] = [].concat(exports.externalExtensionsWithTranslations); // clone
|
resourcesByProject[extensionsProject] = [].concat(exports.externalExtensionsWithTranslations); // clone
|
||||||
return event_stream_1.through(function (file) {
|
return (0, event_stream_1.through)(function (file) {
|
||||||
const project = path.dirname(file.relative);
|
const project = path.dirname(file.relative);
|
||||||
const fileName = path.basename(file.path);
|
const fileName = path.basename(file.path);
|
||||||
const slug = fileName.substr(0, fileName.length - '.xlf'.length);
|
const slug = fileName.substr(0, fileName.length - '.xlf'.length);
|
||||||
@@ -942,7 +946,7 @@ function pullXlfFiles(apiHostname, username, password, language, resources) {
|
|||||||
const credentials = `${username}:${password}`;
|
const credentials = `${username}:${password}`;
|
||||||
let expectedTranslationsCount = resources.length;
|
let expectedTranslationsCount = resources.length;
|
||||||
let translationsRetrieved = 0, called = false;
|
let translationsRetrieved = 0, called = false;
|
||||||
return event_stream_1.readable(function (_count, callback) {
|
return (0, event_stream_1.readable)(function (_count, callback) {
|
||||||
// Mark end of stream when all resources were retrieved
|
// Mark end of stream when all resources were retrieved
|
||||||
if (translationsRetrieved === expectedTranslationsCount) {
|
if (translationsRetrieved === expectedTranslationsCount) {
|
||||||
return this.emit('end');
|
return this.emit('end');
|
||||||
@@ -1000,7 +1004,7 @@ function retrieveResource(language, resource, apiHostname, credentials) {
|
|||||||
}
|
}
|
||||||
function prepareI18nFiles() {
|
function prepareI18nFiles() {
|
||||||
let parsePromises = [];
|
let parsePromises = [];
|
||||||
return event_stream_1.through(function (xlf) {
|
return (0, event_stream_1.through)(function (xlf) {
|
||||||
let stream = this;
|
let stream = this;
|
||||||
let parsePromise = XLF.parse(xlf.contents.toString());
|
let parsePromise = XLF.parse(xlf.contents.toString());
|
||||||
parsePromises.push(parsePromise);
|
parsePromises.push(parsePromise);
|
||||||
@@ -1038,7 +1042,8 @@ function createI18nFile(originalFilePath, messages) {
|
|||||||
contents: Buffer.from(content, 'utf8')
|
contents: Buffer.from(content, 'utf8')
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
const i18nPackVersion = '1.0.0';
|
exports.createI18nFile = createI18nFile;
|
||||||
|
exports.i18nPackVersion = '1.0.0'; // {{SQL CARBON EDIT}} Needed in locfunc.
|
||||||
function pullI18nPackFiles(apiHostname, username, password, language, resultingTranslationPaths) {
|
function pullI18nPackFiles(apiHostname, username, password, language, resultingTranslationPaths) {
|
||||||
return pullCoreAndExtensionsXlfFiles(apiHostname, username, password, language, exports.externalExtensionsWithTranslations)
|
return pullCoreAndExtensionsXlfFiles(apiHostname, username, password, language, exports.externalExtensionsWithTranslations)
|
||||||
.pipe(prepareI18nPackFiles(exports.externalExtensionsWithTranslations, resultingTranslationPaths, language.id === 'ps'));
|
.pipe(prepareI18nPackFiles(exports.externalExtensionsWithTranslations, resultingTranslationPaths, language.id === 'ps'));
|
||||||
@@ -1046,10 +1051,10 @@ function pullI18nPackFiles(apiHostname, username, password, language, resultingT
|
|||||||
exports.pullI18nPackFiles = pullI18nPackFiles;
|
exports.pullI18nPackFiles = pullI18nPackFiles;
|
||||||
function prepareI18nPackFiles(externalExtensions, resultingTranslationPaths, pseudo = false) {
|
function prepareI18nPackFiles(externalExtensions, resultingTranslationPaths, pseudo = false) {
|
||||||
let parsePromises = [];
|
let parsePromises = [];
|
||||||
let mainPack = { version: i18nPackVersion, contents: {} };
|
let mainPack = { version: exports.i18nPackVersion, contents: {} };
|
||||||
let extensionsPacks = {};
|
let extensionsPacks = {};
|
||||||
let errors = [];
|
let errors = [];
|
||||||
return event_stream_1.through(function (xlf) {
|
return (0, event_stream_1.through)(function (xlf) {
|
||||||
let project = path.basename(path.dirname(xlf.relative));
|
let project = path.basename(path.dirname(xlf.relative));
|
||||||
let resource = path.basename(xlf.relative, '.xlf');
|
let resource = path.basename(xlf.relative, '.xlf');
|
||||||
let contents = xlf.contents.toString();
|
let contents = xlf.contents.toString();
|
||||||
@@ -1062,7 +1067,7 @@ function prepareI18nPackFiles(externalExtensions, resultingTranslationPaths, pse
|
|||||||
if (project === extensionsProject) {
|
if (project === extensionsProject) {
|
||||||
let extPack = extensionsPacks[resource];
|
let extPack = extensionsPacks[resource];
|
||||||
if (!extPack) {
|
if (!extPack) {
|
||||||
extPack = extensionsPacks[resource] = { version: i18nPackVersion, contents: {} };
|
extPack = extensionsPacks[resource] = { version: exports.i18nPackVersion, contents: {} };
|
||||||
}
|
}
|
||||||
const externalId = externalExtensions[resource];
|
const externalId = externalExtensions[resource];
|
||||||
if (!externalId) { // internal extension: remove 'extensions/extensionId/' segnent
|
if (!externalId) { // internal extension: remove 'extensions/extensionId/' segnent
|
||||||
@@ -1110,7 +1115,7 @@ function prepareI18nPackFiles(externalExtensions, resultingTranslationPaths, pse
|
|||||||
exports.prepareI18nPackFiles = prepareI18nPackFiles;
|
exports.prepareI18nPackFiles = prepareI18nPackFiles;
|
||||||
function prepareIslFiles(language, innoSetupConfig) {
|
function prepareIslFiles(language, innoSetupConfig) {
|
||||||
let parsePromises = [];
|
let parsePromises = [];
|
||||||
return event_stream_1.through(function (xlf) {
|
return (0, event_stream_1.through)(function (xlf) {
|
||||||
let stream = this;
|
let stream = this;
|
||||||
let parsePromise = XLF.parse(xlf.contents.toString());
|
let parsePromise = XLF.parse(xlf.contents.toString());
|
||||||
parsePromises.push(parsePromise);
|
parsePromises.push(parsePromise);
|
||||||
|
|||||||
@@ -30,10 +30,6 @@
|
|||||||
"name": "vs/workbench/api/common",
|
"name": "vs/workbench/api/common",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"name": "vs/workbench/contrib/backup",
|
|
||||||
"project": "vscode-workbench"
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"name": "vs/workbench/contrib/bulkEdit",
|
"name": "vs/workbench/contrib/bulkEdit",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
@@ -218,6 +214,10 @@
|
|||||||
"name": "vs/workbench/contrib/webviewPanel",
|
"name": "vs/workbench/contrib/webviewPanel",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"name": "vs/workbench/contrib/workspace",
|
||||||
|
"project": "vscode-workbench"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"name": "vs/workbench/contrib/workspaces",
|
"name": "vs/workbench/contrib/workspaces",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
@@ -254,6 +254,10 @@
|
|||||||
"name": "vs/workbench/services/authToken",
|
"name": "vs/workbench/services/authToken",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"name": "vs/workbench/services/backup",
|
||||||
|
"project": "vscode-workbench"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"name": "vs/workbench/services/bulkEdit",
|
"name": "vs/workbench/services/bulkEdit",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
@@ -294,6 +298,10 @@
|
|||||||
"name": "vs/workbench/services/files",
|
"name": "vs/workbench/services/files",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"name": "vs/workbench/services/history",
|
||||||
|
"project": "vscode-workbench"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"name": "vs/workbench/services/log",
|
"name": "vs/workbench/services/log",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
@@ -389,6 +397,10 @@
|
|||||||
{
|
{
|
||||||
"name": "vs/workbench/services/gettingStarted",
|
"name": "vs/workbench/services/gettingStarted",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "vs/workbench/services/host",
|
||||||
|
"project": "vscode-workbench"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -64,7 +64,7 @@ export const externalExtensionsWithTranslations = {
|
|||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
interface Map<V> {
|
export interface Map<V> { // {{SQL CARBON EDIT}} Needed in locfunc.
|
||||||
[key: string]: V;
|
[key: string]: V;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -79,7 +79,7 @@ export interface Resource {
|
|||||||
project: string;
|
project: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
interface ParsedXLF {
|
export interface ParsedXLF { // {{SQL CARBON EDIT}} Needed in locfunc.
|
||||||
messages: Map<string>;
|
messages: Map<string>;
|
||||||
originalFilePath: string;
|
originalFilePath: string;
|
||||||
language: string;
|
language: string;
|
||||||
@@ -339,13 +339,14 @@ export class XLF {
|
|||||||
|
|
||||||
let val = unit.target[0];
|
let val = unit.target[0];
|
||||||
if (typeof val !== 'string') {
|
if (typeof val !== 'string') {
|
||||||
val = val._;
|
// We allow empty source values so support them for translations as well.
|
||||||
|
val = val._ ? val._ : '';
|
||||||
}
|
}
|
||||||
if (key && val) {
|
if (!key) {
|
||||||
messages[key] = decodeEntities(val);
|
reject(new Error(`XLF parsing error: trans-unit ${JSON.stringify(unit, undefined, 0)} defined in file ${originalFilePath} is missing the ID attribute.`));
|
||||||
} else {
|
return;
|
||||||
reject(new Error(`XLF parsing error: XLIFF file ${originalFilePath} does not contain full localization data. ID or target translation for one of the trans-unit nodes is not present.`));
|
|
||||||
}
|
}
|
||||||
|
messages[key] = decodeEntities(val);
|
||||||
});
|
});
|
||||||
files.push({ messages: messages, originalFilePath: originalFilePath, language: language.toLowerCase() });
|
files.push({ messages: messages, originalFilePath: originalFilePath, language: language.toLowerCase() });
|
||||||
}
|
}
|
||||||
@@ -610,7 +611,7 @@ const editorProject: string = 'vscode-editor',
|
|||||||
setupProject: string = 'vscode-setup';
|
setupProject: string = 'vscode-setup';
|
||||||
|
|
||||||
// {{SQL CARBON EDIT}}
|
// {{SQL CARBON EDIT}}
|
||||||
const sqlopsProject: string = 'sqlops-core';
|
const adsProject: string = 'ads-core';
|
||||||
|
|
||||||
export function getResource(sourceFile: string): Resource {
|
export function getResource(sourceFile: string): Resource {
|
||||||
let resource: string;
|
let resource: string;
|
||||||
@@ -637,7 +638,7 @@ export function getResource(sourceFile: string): Resource {
|
|||||||
|
|
||||||
// {{SQL CARBON EDIT}}
|
// {{SQL CARBON EDIT}}
|
||||||
else if (/^sql/.test(sourceFile)) {
|
else if (/^sql/.test(sourceFile)) {
|
||||||
return { name: 'sql', project: sqlopsProject };
|
return { name: 'sql', project: adsProject };
|
||||||
}
|
}
|
||||||
|
|
||||||
throw new Error(`Could not identify the XLF bundle for ${sourceFile}`);
|
throw new Error(`Could not identify the XLF bundle for ${sourceFile}`);
|
||||||
@@ -651,7 +652,11 @@ export function createXlfFilesForCoreBundle(): ThroughStream {
|
|||||||
if (file.isBuffer()) {
|
if (file.isBuffer()) {
|
||||||
const xlfs: Map<XLF> = Object.create(null);
|
const xlfs: Map<XLF> = Object.create(null);
|
||||||
const json: BundledFormat = JSON.parse((file.contents as Buffer).toString('utf8'));
|
const json: BundledFormat = JSON.parse((file.contents as Buffer).toString('utf8'));
|
||||||
for (let coreModule in json.keys) {
|
// {{SQL CARBON EDIT}} - Must sort the keys for easier translation.
|
||||||
|
let sortedKeys = Object.keys(json.keys).sort();
|
||||||
|
for (let i = 0; i < sortedKeys.length; i++) {
|
||||||
|
let coreModule = sortedKeys[i];
|
||||||
|
// {{SQL CARBON EDIT}} - End
|
||||||
const projectResource = getResource(coreModule);
|
const projectResource = getResource(coreModule);
|
||||||
const resource = projectResource.name;
|
const resource = projectResource.name;
|
||||||
const project = projectResource.project;
|
const project = projectResource.project;
|
||||||
@@ -1166,7 +1171,7 @@ export function prepareI18nFiles(): ThroughStream {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
function createI18nFile(originalFilePath: string, messages: any): File {
|
export function createI18nFile(originalFilePath: string, messages: any): File { // {{SQL CARBON EDIT}} Needed for locfunc.
|
||||||
let result = Object.create(null);
|
let result = Object.create(null);
|
||||||
result[''] = [
|
result[''] = [
|
||||||
'--------------------------------------------------------------------------------------------',
|
'--------------------------------------------------------------------------------------------',
|
||||||
@@ -1189,14 +1194,14 @@ function createI18nFile(originalFilePath: string, messages: any): File {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
interface I18nPack {
|
export interface I18nPack { // {{SQL CARBON EDIT}} Needed in locfunc.
|
||||||
version: string;
|
version: string;
|
||||||
contents: {
|
contents: {
|
||||||
[path: string]: Map<string>;
|
[path: string]: Map<string>;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
const i18nPackVersion = '1.0.0';
|
export const i18nPackVersion = '1.0.0'; // {{SQL CARBON EDIT}} Needed in locfunc.
|
||||||
|
|
||||||
export interface TranslationPath {
|
export interface TranslationPath {
|
||||||
id: string;
|
id: string;
|
||||||
|
|||||||
@@ -56,7 +56,9 @@ const CORE_TYPES = [
|
|||||||
const NATIVE_TYPES = [
|
const NATIVE_TYPES = [
|
||||||
'NativeParsedArgs',
|
'NativeParsedArgs',
|
||||||
'INativeEnvironmentService',
|
'INativeEnvironmentService',
|
||||||
'INativeWindowConfiguration'
|
'AbstractNativeEnvironmentService',
|
||||||
|
'INativeWindowConfiguration',
|
||||||
|
'ICommonNativeHostService'
|
||||||
];
|
];
|
||||||
const RULES = [
|
const RULES = [
|
||||||
// Tests: skip
|
// Tests: skip
|
||||||
@@ -79,19 +81,9 @@ const RULES = [
|
|||||||
'@types/node' // no node.js
|
'@types/node' // no node.js
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
// Common: vs/platform/environment/common/argv.ts
|
// Common: vs/platform/environment/common/*
|
||||||
{
|
{
|
||||||
target: '**/{vs,sql}/platform/environment/common/argv.ts',
|
target: '**/{vs,sql}/platform/environment/common/*.ts',
|
||||||
disallowedTypes: [ /* Ignore native types that are defined from here */],
|
|
||||||
allowedTypes: CORE_TYPES,
|
|
||||||
disallowedDefinitions: [
|
|
||||||
'lib.dom.d.ts',
|
|
||||||
'@types/node' // no node.js
|
|
||||||
]
|
|
||||||
},
|
|
||||||
// Common: vs/platform/environment/common/environment.ts
|
|
||||||
{
|
|
||||||
target: '**/{vs,sql}/platform/environment/common/environment.ts',
|
|
||||||
disallowedTypes: [ /* Ignore native types that are defined from here */],
|
disallowedTypes: [ /* Ignore native types that are defined from here */],
|
||||||
allowedTypes: CORE_TYPES,
|
allowedTypes: CORE_TYPES,
|
||||||
disallowedDefinitions: [
|
disallowedDefinitions: [
|
||||||
@@ -109,6 +101,16 @@ const RULES = [
|
|||||||
'@types/node' // no node.js
|
'@types/node' // no node.js
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
// Common: vs/platform/native/common/native.ts
|
||||||
|
{
|
||||||
|
target: '**/vs/platform/native/common/native.ts',
|
||||||
|
disallowedTypes: [ /* Ignore native types that are defined from here */],
|
||||||
|
allowedTypes: CORE_TYPES,
|
||||||
|
disallowedDefinitions: [
|
||||||
|
'lib.dom.d.ts',
|
||||||
|
'@types/node' // no node.js
|
||||||
|
]
|
||||||
|
},
|
||||||
// Common: vs/workbench/api/common/extHostExtensionService.ts
|
// Common: vs/workbench/api/common/extHostExtensionService.ts
|
||||||
{
|
{
|
||||||
target: '**/{vs,sql}/workbench/api/common/extHostExtensionService.ts',
|
target: '**/{vs,sql}/workbench/api/common/extHostExtensionService.ts',
|
||||||
@@ -197,7 +199,7 @@ const RULES = [
|
|||||||
]
|
]
|
||||||
}
|
}
|
||||||
];
|
];
|
||||||
const TS_CONFIG_PATH = path_1.join(__dirname, '../../', 'src', 'tsconfig.json');
|
const TS_CONFIG_PATH = (0, path_1.join)(__dirname, '../../', 'src', 'tsconfig.json');
|
||||||
let hasErrors = false;
|
let hasErrors = false;
|
||||||
function checkFile(program, sourceFile, rule) {
|
function checkFile(program, sourceFile, rule) {
|
||||||
checkNode(sourceFile);
|
checkNode(sourceFile);
|
||||||
@@ -248,8 +250,8 @@ function checkFile(program, sourceFile, rule) {
|
|||||||
}
|
}
|
||||||
function createProgram(tsconfigPath) {
|
function createProgram(tsconfigPath) {
|
||||||
const tsConfig = ts.readConfigFile(tsconfigPath, ts.sys.readFile);
|
const tsConfig = ts.readConfigFile(tsconfigPath, ts.sys.readFile);
|
||||||
const configHostParser = { fileExists: fs_1.existsSync, readDirectory: ts.sys.readDirectory, readFile: file => fs_1.readFileSync(file, 'utf8'), useCaseSensitiveFileNames: process.platform === 'linux' };
|
const configHostParser = { fileExists: fs_1.existsSync, readDirectory: ts.sys.readDirectory, readFile: file => (0, fs_1.readFileSync)(file, 'utf8'), useCaseSensitiveFileNames: process.platform === 'linux' };
|
||||||
const tsConfigParsed = ts.parseJsonConfigFileContent(tsConfig.config, configHostParser, path_1.resolve(path_1.dirname(tsconfigPath)), { noEmit: true });
|
const tsConfigParsed = ts.parseJsonConfigFileContent(tsConfig.config, configHostParser, (0, path_1.resolve)((0, path_1.dirname)(tsconfigPath)), { noEmit: true });
|
||||||
const compilerHost = ts.createCompilerHost(tsConfigParsed.options, true);
|
const compilerHost = ts.createCompilerHost(tsConfigParsed.options, true);
|
||||||
return ts.createProgram(tsConfigParsed.fileNames, tsConfigParsed.options, compilerHost);
|
return ts.createProgram(tsConfigParsed.fileNames, tsConfigParsed.options, compilerHost);
|
||||||
}
|
}
|
||||||
@@ -259,7 +261,7 @@ function createProgram(tsconfigPath) {
|
|||||||
const program = createProgram(TS_CONFIG_PATH);
|
const program = createProgram(TS_CONFIG_PATH);
|
||||||
for (const sourceFile of program.getSourceFiles()) {
|
for (const sourceFile of program.getSourceFiles()) {
|
||||||
for (const rule of RULES) {
|
for (const rule of RULES) {
|
||||||
if (minimatch_1.match([sourceFile.fileName], rule.target).length > 0) {
|
if ((0, minimatch_1.match)([sourceFile.fileName], rule.target).length > 0) {
|
||||||
if (!rule.skip) {
|
if (!rule.skip) {
|
||||||
checkFile(program, sourceFile, rule);
|
checkFile(program, sourceFile, rule);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -58,7 +58,9 @@ const CORE_TYPES = [
|
|||||||
const NATIVE_TYPES = [
|
const NATIVE_TYPES = [
|
||||||
'NativeParsedArgs',
|
'NativeParsedArgs',
|
||||||
'INativeEnvironmentService',
|
'INativeEnvironmentService',
|
||||||
'INativeWindowConfiguration'
|
'AbstractNativeEnvironmentService',
|
||||||
|
'INativeWindowConfiguration',
|
||||||
|
'ICommonNativeHostService'
|
||||||
];
|
];
|
||||||
|
|
||||||
const RULES = [
|
const RULES = [
|
||||||
@@ -86,20 +88,9 @@ const RULES = [
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
|
||||||
// Common: vs/platform/environment/common/argv.ts
|
// Common: vs/platform/environment/common/*
|
||||||
{
|
{
|
||||||
target: '**/{vs,sql}/platform/environment/common/argv.ts',
|
target: '**/{vs,sql}/platform/environment/common/*.ts',
|
||||||
disallowedTypes: [/* Ignore native types that are defined from here */],
|
|
||||||
allowedTypes: CORE_TYPES,
|
|
||||||
disallowedDefinitions: [
|
|
||||||
'lib.dom.d.ts', // no DOM
|
|
||||||
'@types/node' // no node.js
|
|
||||||
]
|
|
||||||
},
|
|
||||||
|
|
||||||
// Common: vs/platform/environment/common/environment.ts
|
|
||||||
{
|
|
||||||
target: '**/{vs,sql}/platform/environment/common/environment.ts',
|
|
||||||
disallowedTypes: [/* Ignore native types that are defined from here */],
|
disallowedTypes: [/* Ignore native types that are defined from here */],
|
||||||
allowedTypes: CORE_TYPES,
|
allowedTypes: CORE_TYPES,
|
||||||
disallowedDefinitions: [
|
disallowedDefinitions: [
|
||||||
@@ -119,6 +110,17 @@ const RULES = [
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
|
||||||
|
// Common: vs/platform/native/common/native.ts
|
||||||
|
{
|
||||||
|
target: '**/vs/platform/native/common/native.ts',
|
||||||
|
disallowedTypes: [/* Ignore native types that are defined from here */],
|
||||||
|
allowedTypes: CORE_TYPES,
|
||||||
|
disallowedDefinitions: [
|
||||||
|
'lib.dom.d.ts', // no DOM
|
||||||
|
'@types/node' // no node.js
|
||||||
|
]
|
||||||
|
},
|
||||||
|
|
||||||
// Common: vs/workbench/api/common/extHostExtensionService.ts
|
// Common: vs/workbench/api/common/extHostExtensionService.ts
|
||||||
{
|
{
|
||||||
target: '**/{vs,sql}/workbench/api/common/extHostExtensionService.ts',
|
target: '**/{vs,sql}/workbench/api/common/extHostExtensionService.ts',
|
||||||
|
|||||||
@@ -4,14 +4,20 @@
|
|||||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||||
*--------------------------------------------------------------------------------------------*/
|
*--------------------------------------------------------------------------------------------*/
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.packageLangpacksStream = void 0;
|
exports.renameVscodeLangpacks = exports.refreshLangpacks = exports.modifyI18nPackFiles = exports.packageSingleExtensionStream = exports.packageLangpacksStream = void 0;
|
||||||
const es = require("event-stream");
|
const es = require("event-stream");
|
||||||
const path = require("path");
|
const path = require("path");
|
||||||
const fs = require("fs");
|
|
||||||
const stats_1 = require("./stats");
|
|
||||||
const File = require("vinyl");
|
|
||||||
const glob = require("glob");
|
const glob = require("glob");
|
||||||
const rename = require("gulp-rename");
|
const rename = require("gulp-rename");
|
||||||
|
const ext = require("./extensions");
|
||||||
|
//imports for langpack refresh.
|
||||||
|
const event_stream_1 = require("event-stream");
|
||||||
|
const i18n = require("./i18n");
|
||||||
|
const fs = require("fs");
|
||||||
|
const File = require("vinyl");
|
||||||
|
const rimraf = require("rimraf");
|
||||||
|
const gulp = require("gulp");
|
||||||
|
const vfs = require("vinyl-fs");
|
||||||
const root = path.dirname(path.dirname(__dirname));
|
const root = path.dirname(path.dirname(__dirname));
|
||||||
// Modified packageLocalExtensionsStream from extensions.ts, but for langpacks.
|
// Modified packageLocalExtensionsStream from extensions.ts, but for langpacks.
|
||||||
function packageLangpacksStream() {
|
function packageLangpacksStream() {
|
||||||
@@ -22,28 +28,358 @@ function packageLangpacksStream() {
|
|||||||
return { name: langpackName, path: langpackPath };
|
return { name: langpackName, path: langpackPath };
|
||||||
});
|
});
|
||||||
const builtLangpacks = langpackDescriptions.map(langpack => {
|
const builtLangpacks = langpackDescriptions.map(langpack => {
|
||||||
return fromLocalNormal(langpack.path)
|
return ext.fromLocalNormal(langpack.path)
|
||||||
.pipe(rename(p => p.dirname = `langpacks/${langpack.name}/${p.dirname}`));
|
.pipe(rename(p => p.dirname = `langpacks/${langpack.name}/${p.dirname}`));
|
||||||
});
|
});
|
||||||
return es.merge(builtLangpacks);
|
return es.merge(builtLangpacks);
|
||||||
}
|
}
|
||||||
exports.packageLangpacksStream = packageLangpacksStream;
|
exports.packageLangpacksStream = packageLangpacksStream;
|
||||||
//copied from extensions.
|
// Modified packageLocalExtensionsStream but for any ADS extensions including excluded/external ones.
|
||||||
function fromLocalNormal(extensionPath) {
|
function packageSingleExtensionStream(name) {
|
||||||
const result = es.through();
|
const extenalExtensionDescriptions = glob.sync(`extensions/${name}/package.json`)
|
||||||
const vsce = require('vsce');
|
.map(manifestPath => {
|
||||||
vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Yarn })
|
const extensionPath = path.dirname(path.join(root, manifestPath));
|
||||||
.then(fileNames => {
|
const extensionName = path.basename(extensionPath);
|
||||||
const files = fileNames
|
return { name: extensionName, path: extensionPath };
|
||||||
.map(fileName => path.join(extensionPath, fileName))
|
});
|
||||||
.map(filePath => new File({
|
const builtExtension = extenalExtensionDescriptions.map(extension => {
|
||||||
path: filePath,
|
return ext.fromLocal(extension.path, false)
|
||||||
stat: fs.statSync(filePath),
|
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
|
||||||
base: extensionPath,
|
});
|
||||||
contents: fs.createReadStream(filePath)
|
return es.merge(builtExtension);
|
||||||
}));
|
|
||||||
es.readArray(files).pipe(result);
|
|
||||||
})
|
|
||||||
.catch(err => result.emit('error', err));
|
|
||||||
return result.pipe(stats_1.createStatsStream(path.basename(extensionPath)));
|
|
||||||
}
|
}
|
||||||
|
exports.packageSingleExtensionStream = packageSingleExtensionStream;
|
||||||
|
// Langpack creation functions go here.
|
||||||
|
/**
|
||||||
|
* Function combines the contents of the SQL core XLF file into the current main i18n file contianing the vs core strings.
|
||||||
|
* Based on createI18nFile in i18n.ts
|
||||||
|
*/
|
||||||
|
function updateMainI18nFile(existingTranslationFilePath, originalFilePath, messages) {
|
||||||
|
let currFilePath = path.join(existingTranslationFilePath + '.i18n.json');
|
||||||
|
let currentContent = fs.readFileSync(currFilePath);
|
||||||
|
let currentContentObject = JSON.parse(currentContent.toString());
|
||||||
|
let objectContents = currentContentObject.contents;
|
||||||
|
let result = Object.create(null);
|
||||||
|
// Delete any SQL strings that are no longer part of ADS in current langpack.
|
||||||
|
for (let contentKey of Object.keys(objectContents)) {
|
||||||
|
if (contentKey.startsWith('sql') && messages.contents[contentKey] === undefined) {
|
||||||
|
delete objectContents[`${contentKey}`];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
messages.contents = Object.assign(Object.assign({}, objectContents), messages.contents);
|
||||||
|
result[''] = [
|
||||||
|
'--------------------------------------------------------------------------------------------',
|
||||||
|
'Copyright (c) Microsoft Corporation. All rights reserved.',
|
||||||
|
'Licensed under the Source EULA. See License.txt in the project root for license information.',
|
||||||
|
'--------------------------------------------------------------------------------------------',
|
||||||
|
'Do not edit this file. It is machine generated.'
|
||||||
|
];
|
||||||
|
for (let key of Object.keys(messages)) {
|
||||||
|
result[key] = messages[key];
|
||||||
|
}
|
||||||
|
let content = JSON.stringify(result, null, '\t');
|
||||||
|
if (process.platform === 'win32') {
|
||||||
|
content = content.replace(/\n/g, '\r\n');
|
||||||
|
}
|
||||||
|
return new File({
|
||||||
|
path: path.join(originalFilePath + '.i18n.json'),
|
||||||
|
contents: Buffer.from(content, 'utf8'),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Function handles the processing of xlf resources and turning them into i18n.json files.
|
||||||
|
* It adds the i18n files translation paths to be added back into package.main.
|
||||||
|
* Based on prepareI18nPackFiles in i18n.ts
|
||||||
|
*/
|
||||||
|
function modifyI18nPackFiles(existingTranslationFolder, resultingTranslationPaths, pseudo = false) {
|
||||||
|
let parsePromises = [];
|
||||||
|
let mainPack = { version: i18n.i18nPackVersion, contents: {} };
|
||||||
|
let extensionsPacks = {};
|
||||||
|
let errors = [];
|
||||||
|
return (0, event_stream_1.through)(function (xlf) {
|
||||||
|
let rawResource = path.basename(xlf.relative, '.xlf');
|
||||||
|
let resource = rawResource.substring(0, rawResource.lastIndexOf('.'));
|
||||||
|
let contents = xlf.contents.toString();
|
||||||
|
let parsePromise = pseudo ? i18n.XLF.parsePseudo(contents) : i18n.XLF.parse(contents);
|
||||||
|
parsePromises.push(parsePromise);
|
||||||
|
parsePromise.then(resolvedFiles => {
|
||||||
|
resolvedFiles.forEach(file => {
|
||||||
|
const path = file.originalFilePath;
|
||||||
|
const firstSlash = path.indexOf('/');
|
||||||
|
//exclude core sql file from extension processing.
|
||||||
|
if (resource !== 'sql') {
|
||||||
|
let extPack = extensionsPacks[resource];
|
||||||
|
if (!extPack) {
|
||||||
|
extPack = extensionsPacks[resource] = { version: i18n.i18nPackVersion, contents: {} };
|
||||||
|
}
|
||||||
|
//remove extensions/extensionId section as all extensions will be webpacked.
|
||||||
|
const secondSlash = path.indexOf('/', firstSlash + 1);
|
||||||
|
extPack.contents[path.substr(secondSlash + 1)] = file.messages;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
mainPack.contents[path.substr(firstSlash + 1)] = file.messages;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}).catch(reason => {
|
||||||
|
errors.push(reason);
|
||||||
|
});
|
||||||
|
}, function () {
|
||||||
|
Promise.all(parsePromises)
|
||||||
|
.then(() => {
|
||||||
|
if (errors.length > 0) {
|
||||||
|
throw errors;
|
||||||
|
}
|
||||||
|
const translatedMainFile = updateMainI18nFile(existingTranslationFolder + '\\main', './main', mainPack);
|
||||||
|
this.queue(translatedMainFile);
|
||||||
|
for (let extension in extensionsPacks) {
|
||||||
|
const translatedExtFile = i18n.createI18nFile(`extensions/${extension}`, extensionsPacks[extension]);
|
||||||
|
this.queue(translatedExtFile);
|
||||||
|
//handle edge case for 'Microsoft.sqlservernotebook' where extension name is the same as extension ID.
|
||||||
|
//(Other extensions need to have publisher appended in front as their ID.)
|
||||||
|
const adsExtensionId = (extension === 'Microsoft.sqlservernotebook') ? extension : 'Microsoft.' + extension;
|
||||||
|
resultingTranslationPaths.push({ id: adsExtensionId, resourceName: `extensions/${extension}.i18n.json` });
|
||||||
|
}
|
||||||
|
this.queue(null);
|
||||||
|
})
|
||||||
|
.catch((reason) => {
|
||||||
|
this.emit('error', reason);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
exports.modifyI18nPackFiles = modifyI18nPackFiles;
|
||||||
|
const textFields = {
|
||||||
|
"nameText": 'ads',
|
||||||
|
"displayNameText": 'Azure Data Studio',
|
||||||
|
"publisherText": 'Microsoft',
|
||||||
|
"licenseText": 'SEE SOURCE EULA LICENSE IN LICENSE.txt',
|
||||||
|
"updateText": 'cd ../vscode && npm run update-localization-extension ',
|
||||||
|
"vscodeVersion": '*',
|
||||||
|
"azdataPlaceholder": '^0.0.0',
|
||||||
|
"gitUrl": 'https://github.com/Microsoft/azuredatastudio'
|
||||||
|
};
|
||||||
|
//list of extensions from vscode that are to be included with ADS.
|
||||||
|
const VSCODEExtensions = [
|
||||||
|
"bat",
|
||||||
|
"configuration-editing",
|
||||||
|
"docker",
|
||||||
|
"extension-editing",
|
||||||
|
"git-ui",
|
||||||
|
"git",
|
||||||
|
"github-authentication",
|
||||||
|
"github",
|
||||||
|
"image-preview",
|
||||||
|
"json-language-features",
|
||||||
|
"json",
|
||||||
|
"markdown-basics",
|
||||||
|
"markdown-language-features",
|
||||||
|
"merge-conflict",
|
||||||
|
"microsoft-authentication",
|
||||||
|
"powershell",
|
||||||
|
"python",
|
||||||
|
"r",
|
||||||
|
"search-result",
|
||||||
|
"sql",
|
||||||
|
"theme-abyss",
|
||||||
|
"theme-defaults",
|
||||||
|
"theme-kimbie-dark",
|
||||||
|
"theme-monokai-dimmed",
|
||||||
|
"theme-monokai",
|
||||||
|
"theme-quietlight",
|
||||||
|
"theme-red",
|
||||||
|
"theme-seti",
|
||||||
|
"theme-solarized-dark",
|
||||||
|
"theme-solarized-light",
|
||||||
|
"theme-tomorrow-night-blue",
|
||||||
|
"typescript-basics",
|
||||||
|
"xml",
|
||||||
|
"yaml"
|
||||||
|
];
|
||||||
|
/**
|
||||||
|
* A heavily modified version of update-localization-extension that runs using local xlf resources, no arguments required to pass in.
|
||||||
|
* It converts a renamed vscode langpack to an ADS one or updates the existing langpack to use current XLF resources.
|
||||||
|
* It runs this process on all langpacks currently in the ADS i18n folder.
|
||||||
|
* (Replace an individual ADS langpack folder with a corresponding vscode langpack folder renamed to "ads" instead of "vscode"
|
||||||
|
* in order to update vscode core strings and extensions for that langpack)
|
||||||
|
*
|
||||||
|
* It removes the resources of vscode that we do not support, and adds in new i18n json files created from the xlf files in the folder.
|
||||||
|
* It also merges in the sql core XLF strings with the langpack's existing core strings into a combined main i18n json file.
|
||||||
|
*
|
||||||
|
* After running this gulp task, for each language pack:
|
||||||
|
*
|
||||||
|
* 1. Remember to change the version of the langpacks to continue from the previous version of the ADS langpack.
|
||||||
|
*
|
||||||
|
* 2. Also change the azdata version to match the current ADS version number.
|
||||||
|
*
|
||||||
|
* 3. Update the changelog with the new version of the language pack.
|
||||||
|
*
|
||||||
|
* IMPORTANT: If you have run this gulp task on langpacks that originated from vscode, for each affected vscode langpack, you must
|
||||||
|
* replace the changelog and readme files with the ones from the previous ADS version of the langpack before doing the above steps.
|
||||||
|
*
|
||||||
|
* This is mainly for consistency with previous langpacks and to provide proper information to the user.
|
||||||
|
*/
|
||||||
|
function refreshLangpacks() {
|
||||||
|
let supportedLocations = [...i18n.defaultLanguages, ...i18n.extraLanguages];
|
||||||
|
for (let i = 0; i < supportedLocations.length; i++) {
|
||||||
|
let langId = supportedLocations[i].id;
|
||||||
|
if (langId === "zh-cn") {
|
||||||
|
langId = "zh-hans";
|
||||||
|
}
|
||||||
|
if (langId === "zh-tw") {
|
||||||
|
langId = "zh-hant";
|
||||||
|
}
|
||||||
|
let location = path.join('.', 'resources', 'xlf');
|
||||||
|
let locExtFolder = path.join('.', 'i18n', `ads-language-pack-${langId}`);
|
||||||
|
try {
|
||||||
|
fs.statSync(locExtFolder);
|
||||||
|
}
|
||||||
|
catch (_a) {
|
||||||
|
console.log('Language is not included in ADS yet: ' + langId);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
let packageJSON = JSON.parse(fs.readFileSync(path.join(locExtFolder, 'package.json')).toString());
|
||||||
|
//processing extension fields, version and folder name must be changed manually.
|
||||||
|
packageJSON['name'] = packageJSON['name'].replace('vscode', textFields.nameText).toLowerCase();
|
||||||
|
packageJSON['displayName'] = packageJSON['displayName'].replace('Visual Studio Code', textFields.displayNameText);
|
||||||
|
packageJSON['publisher'] = textFields.publisherText;
|
||||||
|
packageJSON['license'] = textFields.licenseText;
|
||||||
|
packageJSON['scripts']['update'] = textFields.updateText + langId;
|
||||||
|
packageJSON['engines']['vscode'] = textFields.vscodeVersion;
|
||||||
|
packageJSON['repository']['url'] = textFields.gitUrl;
|
||||||
|
packageJSON['engines']['azdata'] = textFields.azdataPlaceholder; // Remember to change this to the appropriate version at the end.
|
||||||
|
let contributes = packageJSON['contributes'];
|
||||||
|
if (!contributes) {
|
||||||
|
throw new Error('The extension must define a "localizations" contribution in the "package.json"');
|
||||||
|
}
|
||||||
|
let localizations = contributes['localizations'];
|
||||||
|
if (!localizations) {
|
||||||
|
throw new Error('The extension must define a "localizations" contribution of type array in the "package.json"');
|
||||||
|
}
|
||||||
|
localizations.forEach(function (localization) {
|
||||||
|
if (!localization.languageId || !localization.languageName || !localization.localizedLanguageName) {
|
||||||
|
throw new Error('Each localization contribution must define "languageId", "languageName" and "localizedLanguageName" properties.');
|
||||||
|
}
|
||||||
|
let languageId = localization.transifexId || localization.languageId;
|
||||||
|
let translationDataFolder = path.join(locExtFolder, 'translations');
|
||||||
|
if (languageId === "zh-cn") {
|
||||||
|
languageId = "zh-hans";
|
||||||
|
}
|
||||||
|
if (languageId === "zh-tw") {
|
||||||
|
languageId = "zh-hant";
|
||||||
|
}
|
||||||
|
console.log(`Importing translations for ${languageId} from '${location}' to '${translationDataFolder}' ...`);
|
||||||
|
let translationPaths = [];
|
||||||
|
gulp.src(path.join(location, languageId, '**', '*.xlf'))
|
||||||
|
.pipe(modifyI18nPackFiles(translationDataFolder, translationPaths, languageId === 'ps'))
|
||||||
|
.on('error', (error) => {
|
||||||
|
console.log(`Error occurred while importing translations:`);
|
||||||
|
translationPaths = undefined;
|
||||||
|
if (Array.isArray(error)) {
|
||||||
|
error.forEach(console.log);
|
||||||
|
}
|
||||||
|
else if (error) {
|
||||||
|
console.log(error);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
console.log('Unknown error');
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.pipe(vfs.dest(translationDataFolder))
|
||||||
|
.on('end', function () {
|
||||||
|
if (translationPaths !== undefined) {
|
||||||
|
let nonExistantExtensions = [];
|
||||||
|
for (let curr of localization.translations) {
|
||||||
|
try {
|
||||||
|
if (curr.id === 'vscode.theme-seti') {
|
||||||
|
//handle edge case where 'theme-seti' has a different id.
|
||||||
|
curr.id = 'vscode.vscode-theme-seti';
|
||||||
|
}
|
||||||
|
fs.statSync(path.join(translationDataFolder, curr.path.replace('./translations', '')));
|
||||||
|
}
|
||||||
|
catch (_a) {
|
||||||
|
nonExistantExtensions.push(curr);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for (let nonExt of nonExistantExtensions) {
|
||||||
|
let index = localization.translations.indexOf(nonExt);
|
||||||
|
if (index > -1) {
|
||||||
|
localization.translations.splice(index, 1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for (let tp of translationPaths) {
|
||||||
|
let finalPath = `./translations/${tp.resourceName}`;
|
||||||
|
let isFound = false;
|
||||||
|
for (let i = 0; i < localization.translations.length; i++) {
|
||||||
|
if (localization.translations[i].path === finalPath) {
|
||||||
|
localization.translations[i].id = tp.id;
|
||||||
|
isFound = true;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (!isFound) {
|
||||||
|
localization.translations.push({ id: tp.id, path: finalPath });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fs.writeFileSync(path.join(locExtFolder, 'package.json'), JSON.stringify(packageJSON, null, '\t'));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
console.log("Langpack Refresh Completed.");
|
||||||
|
return Promise.resolve();
|
||||||
|
}
|
||||||
|
exports.refreshLangpacks = refreshLangpacks;
|
||||||
|
/**
|
||||||
|
* Function for adding replacing ads language packs with vscode ones.
|
||||||
|
* For new languages, remember to add to i18n.extraLanguages so that it will be recognized by ADS.
|
||||||
|
*/
|
||||||
|
function renameVscodeLangpacks() {
|
||||||
|
let supportedLocations = [...i18n.defaultLanguages, ...i18n.extraLanguages];
|
||||||
|
for (let i = 0; i < supportedLocations.length; i++) {
|
||||||
|
let langId = supportedLocations[i].id;
|
||||||
|
if (langId === "zh-cn") {
|
||||||
|
langId = "zh-hans";
|
||||||
|
}
|
||||||
|
if (langId === "zh-tw") {
|
||||||
|
langId = "zh-hant";
|
||||||
|
}
|
||||||
|
let locADSFolder = path.join('.', 'i18n', `ads-language-pack-${langId}`);
|
||||||
|
let locVSCODEFolder = path.join('.', 'i18n', `vscode-language-pack-${langId}`);
|
||||||
|
let translationDataFolder = path.join(locVSCODEFolder, 'translations');
|
||||||
|
let xlfFolder = path.join('.', 'resources', 'xlf');
|
||||||
|
try {
|
||||||
|
fs.statSync(locVSCODEFolder);
|
||||||
|
}
|
||||||
|
catch (_a) {
|
||||||
|
console.log('vscode pack is not in ADS yet: ' + langId);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
//Delete any erroneous zip files found in vscode folder.
|
||||||
|
let globZipArray = glob.sync(path.join(locVSCODEFolder, '*.zip'));
|
||||||
|
globZipArray.forEach(element => {
|
||||||
|
fs.unlinkSync(element);
|
||||||
|
});
|
||||||
|
// Delete extension files in vscode language pack that are not in ADS.
|
||||||
|
if (fs.existsSync(translationDataFolder)) {
|
||||||
|
let totalExtensions = fs.readdirSync(path.join(translationDataFolder, 'extensions'));
|
||||||
|
for (let extensionTag in totalExtensions) {
|
||||||
|
let extensionFileName = totalExtensions[extensionTag];
|
||||||
|
let xlfPath = path.join(xlfFolder, `${langId}`, extensionFileName.replace('.i18n.json', '.xlf'));
|
||||||
|
if (!(fs.existsSync(xlfPath) || VSCODEExtensions.indexOf(extensionFileName.replace('.i18n.json', '')) !== -1)) {
|
||||||
|
let filePath = path.join(translationDataFolder, 'extensions', extensionFileName);
|
||||||
|
rimraf.sync(filePath);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
//Get list of md files in ADS langpack, to copy to vscode langpack prior to renaming.
|
||||||
|
let globMDArray = glob.sync(path.join(locADSFolder, '*.md'));
|
||||||
|
//Copy files to vscode langpack, then remove the ADS langpack, and finally rename the vscode langpack to match the ADS one.
|
||||||
|
globMDArray.forEach(element => {
|
||||||
|
fs.copyFileSync(element, path.join(locVSCODEFolder, path.parse(element).base));
|
||||||
|
});
|
||||||
|
rimraf.sync(locADSFolder);
|
||||||
|
fs.renameSync(locVSCODEFolder, locADSFolder);
|
||||||
|
}
|
||||||
|
console.log("Langpack Rename Completed.");
|
||||||
|
return Promise.resolve();
|
||||||
|
}
|
||||||
|
exports.renameVscodeLangpacks = renameVscodeLangpacks;
|
||||||
|
|||||||
@@ -5,12 +5,17 @@
|
|||||||
|
|
||||||
import * as es from 'event-stream';
|
import * as es from 'event-stream';
|
||||||
import * as path from 'path';
|
import * as path from 'path';
|
||||||
import * as fs from 'fs';
|
|
||||||
import { createStatsStream } from './stats';
|
|
||||||
import * as File from 'vinyl';
|
|
||||||
import { Stream } from 'stream';
|
|
||||||
import * as glob from 'glob';
|
import * as glob from 'glob';
|
||||||
import rename = require('gulp-rename');
|
import rename = require('gulp-rename');
|
||||||
|
import ext = require('./extensions');
|
||||||
|
//imports for langpack refresh.
|
||||||
|
import { through, ThroughStream } from 'event-stream';
|
||||||
|
import i18n = require('./i18n')
|
||||||
|
import * as fs from 'fs';
|
||||||
|
import * as File from 'vinyl';
|
||||||
|
import * as rimraf from 'rimraf';
|
||||||
|
import * as gulp from 'gulp';
|
||||||
|
import * as vfs from 'vinyl-fs';
|
||||||
|
|
||||||
const root = path.dirname(path.dirname(__dirname));
|
const root = path.dirname(path.dirname(__dirname));
|
||||||
|
|
||||||
@@ -24,33 +29,384 @@ export function packageLangpacksStream(): NodeJS.ReadWriteStream {
|
|||||||
})
|
})
|
||||||
|
|
||||||
const builtLangpacks = langpackDescriptions.map(langpack => {
|
const builtLangpacks = langpackDescriptions.map(langpack => {
|
||||||
return fromLocalNormal(langpack.path)
|
return ext.fromLocalNormal(langpack.path)
|
||||||
.pipe(rename(p => p.dirname = `langpacks/${langpack.name}/${p.dirname}`));
|
.pipe(rename(p => p.dirname = `langpacks/${langpack.name}/${p.dirname}`));
|
||||||
});
|
});
|
||||||
|
|
||||||
return es.merge(builtLangpacks);
|
return es.merge(builtLangpacks);
|
||||||
}
|
}
|
||||||
|
|
||||||
//copied from extensions.
|
// Modified packageLocalExtensionsStream but for any ADS extensions including excluded/external ones.
|
||||||
function fromLocalNormal(extensionPath: string): Stream {
|
export function packageSingleExtensionStream(name: string): NodeJS.ReadWriteStream {
|
||||||
const result = es.through();
|
const extenalExtensionDescriptions = (<string[]>glob.sync(`extensions/${name}/package.json`))
|
||||||
|
.map(manifestPath => {
|
||||||
const vsce = require('vsce') as typeof import('vsce');
|
const extensionPath = path.dirname(path.join(root, manifestPath));
|
||||||
|
const extensionName = path.basename(extensionPath);
|
||||||
vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Yarn })
|
return { name: extensionName, path: extensionPath };
|
||||||
.then(fileNames => {
|
|
||||||
const files = fileNames
|
|
||||||
.map(fileName => path.join(extensionPath, fileName))
|
|
||||||
.map(filePath => new File({
|
|
||||||
path: filePath,
|
|
||||||
stat: fs.statSync(filePath),
|
|
||||||
base: extensionPath,
|
|
||||||
contents: fs.createReadStream(filePath) as any
|
|
||||||
}));
|
|
||||||
|
|
||||||
es.readArray(files).pipe(result);
|
|
||||||
})
|
})
|
||||||
.catch(err => result.emit('error', err));
|
|
||||||
|
|
||||||
return result.pipe(createStatsStream(path.basename(extensionPath)));
|
const builtExtension = extenalExtensionDescriptions.map(extension => {
|
||||||
|
return ext.fromLocal(extension.path, false)
|
||||||
|
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
|
||||||
|
});
|
||||||
|
|
||||||
|
return es.merge(builtExtension);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Langpack creation functions go here.
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Function combines the contents of the SQL core XLF file into the current main i18n file contianing the vs core strings.
|
||||||
|
* Based on createI18nFile in i18n.ts
|
||||||
|
*/
|
||||||
|
function updateMainI18nFile(existingTranslationFilePath: string, originalFilePath: string, messages: any): File {
|
||||||
|
let currFilePath = path.join(existingTranslationFilePath + '.i18n.json');
|
||||||
|
let currentContent = fs.readFileSync(currFilePath);
|
||||||
|
let currentContentObject = JSON.parse(currentContent.toString());
|
||||||
|
let objectContents = currentContentObject.contents;
|
||||||
|
let result = Object.create(null);
|
||||||
|
|
||||||
|
// Delete any SQL strings that are no longer part of ADS in current langpack.
|
||||||
|
for (let contentKey of Object.keys(objectContents)) {
|
||||||
|
if(contentKey.startsWith('sql') && messages.contents[contentKey] === undefined){
|
||||||
|
delete objectContents[`${contentKey}`]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
messages.contents = { ...objectContents, ...messages.contents };
|
||||||
|
result[''] = [
|
||||||
|
'--------------------------------------------------------------------------------------------',
|
||||||
|
'Copyright (c) Microsoft Corporation. All rights reserved.',
|
||||||
|
'Licensed under the Source EULA. See License.txt in the project root for license information.',
|
||||||
|
'--------------------------------------------------------------------------------------------',
|
||||||
|
'Do not edit this file. It is machine generated.'
|
||||||
|
];
|
||||||
|
for (let key of Object.keys(messages)) {
|
||||||
|
result[key] = messages[key];
|
||||||
|
}
|
||||||
|
let content = JSON.stringify(result, null, '\t');
|
||||||
|
|
||||||
|
if (process.platform === 'win32') {
|
||||||
|
content = content.replace(/\n/g, '\r\n');
|
||||||
|
}
|
||||||
|
return new File({
|
||||||
|
path: path.join(originalFilePath + '.i18n.json'),
|
||||||
|
|
||||||
|
contents: Buffer.from(content, 'utf8'),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Function handles the processing of xlf resources and turning them into i18n.json files.
|
||||||
|
* It adds the i18n files translation paths to be added back into package.main.
|
||||||
|
* Based on prepareI18nPackFiles in i18n.ts
|
||||||
|
*/
|
||||||
|
export function modifyI18nPackFiles(existingTranslationFolder: string, resultingTranslationPaths: i18n.TranslationPath[], pseudo = false): NodeJS.ReadWriteStream {
|
||||||
|
let parsePromises: Promise<i18n.ParsedXLF[]>[] = [];
|
||||||
|
let mainPack: i18n.I18nPack = { version: i18n.i18nPackVersion, contents: {} };
|
||||||
|
let extensionsPacks: i18n.Map<i18n.I18nPack> = {};
|
||||||
|
let errors: any[] = [];
|
||||||
|
return through(function (this: ThroughStream, xlf: File) {
|
||||||
|
let rawResource = path.basename(xlf.relative, '.xlf');
|
||||||
|
let resource = rawResource.substring(0, rawResource.lastIndexOf('.'));
|
||||||
|
let contents = xlf.contents.toString();
|
||||||
|
let parsePromise = pseudo ? i18n.XLF.parsePseudo(contents) : i18n.XLF.parse(contents);
|
||||||
|
parsePromises.push(parsePromise);
|
||||||
|
parsePromise.then(
|
||||||
|
resolvedFiles => {
|
||||||
|
resolvedFiles.forEach(file => {
|
||||||
|
const path = file.originalFilePath;
|
||||||
|
const firstSlash = path.indexOf('/');
|
||||||
|
|
||||||
|
//exclude core sql file from extension processing.
|
||||||
|
if (resource !== 'sql') {
|
||||||
|
let extPack = extensionsPacks[resource];
|
||||||
|
if (!extPack) {
|
||||||
|
extPack = extensionsPacks[resource] = { version: i18n.i18nPackVersion, contents: {} };
|
||||||
|
}
|
||||||
|
//remove extensions/extensionId section as all extensions will be webpacked.
|
||||||
|
const secondSlash = path.indexOf('/', firstSlash + 1);
|
||||||
|
extPack.contents[path.substr(secondSlash + 1)] = file.messages;
|
||||||
|
} else {
|
||||||
|
mainPack.contents[path.substr(firstSlash + 1)] = file.messages;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
).catch(reason => {
|
||||||
|
errors.push(reason);
|
||||||
|
});
|
||||||
|
}, function () {
|
||||||
|
Promise.all(parsePromises)
|
||||||
|
.then(() => {
|
||||||
|
if (errors.length > 0) {
|
||||||
|
throw errors;
|
||||||
|
}
|
||||||
|
const translatedMainFile = updateMainI18nFile(existingTranslationFolder + '\\main', './main', mainPack);
|
||||||
|
|
||||||
|
this.queue(translatedMainFile);
|
||||||
|
for (let extension in extensionsPacks) {
|
||||||
|
const translatedExtFile = i18n.createI18nFile(`extensions/${extension}`, extensionsPacks[extension]);
|
||||||
|
this.queue(translatedExtFile);
|
||||||
|
|
||||||
|
//handle edge case for 'Microsoft.sqlservernotebook' where extension name is the same as extension ID.
|
||||||
|
//(Other extensions need to have publisher appended in front as their ID.)
|
||||||
|
const adsExtensionId = (extension === 'Microsoft.sqlservernotebook') ? extension : 'Microsoft.' + extension;
|
||||||
|
resultingTranslationPaths.push({ id: adsExtensionId, resourceName: `extensions/${extension}.i18n.json` });
|
||||||
|
}
|
||||||
|
this.queue(null);
|
||||||
|
})
|
||||||
|
.catch((reason) => {
|
||||||
|
this.emit('error', reason);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const textFields = {
|
||||||
|
"nameText": 'ads',
|
||||||
|
"displayNameText": 'Azure Data Studio',
|
||||||
|
"publisherText": 'Microsoft',
|
||||||
|
"licenseText": 'SEE SOURCE EULA LICENSE IN LICENSE.txt',
|
||||||
|
"updateText": 'cd ../vscode && npm run update-localization-extension ',
|
||||||
|
"vscodeVersion": '*',
|
||||||
|
"azdataPlaceholder": '^0.0.0',
|
||||||
|
"gitUrl": 'https://github.com/Microsoft/azuredatastudio'
|
||||||
|
}
|
||||||
|
|
||||||
|
//list of extensions from vscode that are to be included with ADS.
|
||||||
|
const VSCODEExtensions = [
|
||||||
|
"bat",
|
||||||
|
"configuration-editing",
|
||||||
|
"docker",
|
||||||
|
"extension-editing",
|
||||||
|
"git-ui",
|
||||||
|
"git",
|
||||||
|
"github-authentication",
|
||||||
|
"github",
|
||||||
|
"image-preview",
|
||||||
|
"json-language-features",
|
||||||
|
"json",
|
||||||
|
"markdown-basics",
|
||||||
|
"markdown-language-features",
|
||||||
|
"merge-conflict",
|
||||||
|
"microsoft-authentication",
|
||||||
|
"powershell",
|
||||||
|
"python",
|
||||||
|
"r",
|
||||||
|
"search-result",
|
||||||
|
"sql",
|
||||||
|
"theme-abyss",
|
||||||
|
"theme-defaults",
|
||||||
|
"theme-kimbie-dark",
|
||||||
|
"theme-monokai-dimmed",
|
||||||
|
"theme-monokai",
|
||||||
|
"theme-quietlight",
|
||||||
|
"theme-red",
|
||||||
|
"theme-seti",
|
||||||
|
"theme-solarized-dark",
|
||||||
|
"theme-solarized-light",
|
||||||
|
"theme-tomorrow-night-blue",
|
||||||
|
"typescript-basics",
|
||||||
|
"xml",
|
||||||
|
"yaml"
|
||||||
|
];
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A heavily modified version of update-localization-extension that runs using local xlf resources, no arguments required to pass in.
|
||||||
|
* It converts a renamed vscode langpack to an ADS one or updates the existing langpack to use current XLF resources.
|
||||||
|
* It runs this process on all langpacks currently in the ADS i18n folder.
|
||||||
|
* (Replace an individual ADS langpack folder with a corresponding vscode langpack folder renamed to "ads" instead of "vscode"
|
||||||
|
* in order to update vscode core strings and extensions for that langpack)
|
||||||
|
*
|
||||||
|
* It removes the resources of vscode that we do not support, and adds in new i18n json files created from the xlf files in the folder.
|
||||||
|
* It also merges in the sql core XLF strings with the langpack's existing core strings into a combined main i18n json file.
|
||||||
|
*
|
||||||
|
* After running this gulp task, for each language pack:
|
||||||
|
*
|
||||||
|
* 1. Remember to change the version of the langpacks to continue from the previous version of the ADS langpack.
|
||||||
|
*
|
||||||
|
* 2. Also change the azdata version to match the current ADS version number.
|
||||||
|
*
|
||||||
|
* 3. Update the changelog with the new version of the language pack.
|
||||||
|
*
|
||||||
|
* IMPORTANT: If you have run this gulp task on langpacks that originated from vscode, for each affected vscode langpack, you must
|
||||||
|
* replace the changelog and readme files with the ones from the previous ADS version of the langpack before doing the above steps.
|
||||||
|
*
|
||||||
|
* This is mainly for consistency with previous langpacks and to provide proper information to the user.
|
||||||
|
*/
|
||||||
|
export function refreshLangpacks(): Promise<void> {
|
||||||
|
let supportedLocations = [...i18n.defaultLanguages, ...i18n.extraLanguages];
|
||||||
|
|
||||||
|
for (let i = 0; i < supportedLocations.length; i++) {
|
||||||
|
let langId = supportedLocations[i].id;
|
||||||
|
if (langId === "zh-cn") {
|
||||||
|
langId = "zh-hans";
|
||||||
|
}
|
||||||
|
if (langId === "zh-tw") {
|
||||||
|
langId = "zh-hant";
|
||||||
|
}
|
||||||
|
|
||||||
|
let location = path.join('.', 'resources', 'xlf');
|
||||||
|
let locExtFolder = path.join('.', 'i18n', `ads-language-pack-${langId}`);
|
||||||
|
try {
|
||||||
|
fs.statSync(locExtFolder);
|
||||||
|
}
|
||||||
|
catch {
|
||||||
|
console.log('Language is not included in ADS yet: ' + langId);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
let packageJSON = JSON.parse(fs.readFileSync(path.join(locExtFolder, 'package.json')).toString());
|
||||||
|
//processing extension fields, version and folder name must be changed manually.
|
||||||
|
packageJSON['name'] = packageJSON['name'].replace('vscode', textFields.nameText).toLowerCase();
|
||||||
|
packageJSON['displayName'] = packageJSON['displayName'].replace('Visual Studio Code', textFields.displayNameText);
|
||||||
|
packageJSON['publisher'] = textFields.publisherText;
|
||||||
|
packageJSON['license'] = textFields.licenseText;
|
||||||
|
packageJSON['scripts']['update'] = textFields.updateText + langId;
|
||||||
|
packageJSON['engines']['vscode'] = textFields.vscodeVersion;
|
||||||
|
packageJSON['repository']['url'] = textFields.gitUrl
|
||||||
|
packageJSON['engines']['azdata'] = textFields.azdataPlaceholder // Remember to change this to the appropriate version at the end.
|
||||||
|
|
||||||
|
let contributes = packageJSON['contributes'];
|
||||||
|
if (!contributes) {
|
||||||
|
throw new Error('The extension must define a "localizations" contribution in the "package.json"');
|
||||||
|
}
|
||||||
|
let localizations = contributes['localizations'];
|
||||||
|
if (!localizations) {
|
||||||
|
throw new Error('The extension must define a "localizations" contribution of type array in the "package.json"');
|
||||||
|
}
|
||||||
|
|
||||||
|
localizations.forEach(function (localization: any) {
|
||||||
|
if (!localization.languageId || !localization.languageName || !localization.localizedLanguageName) {
|
||||||
|
throw new Error('Each localization contribution must define "languageId", "languageName" and "localizedLanguageName" properties.');
|
||||||
|
}
|
||||||
|
let languageId = localization.transifexId || localization.languageId;
|
||||||
|
let translationDataFolder = path.join(locExtFolder, 'translations');
|
||||||
|
if (languageId === "zh-cn") {
|
||||||
|
languageId = "zh-hans";
|
||||||
|
}
|
||||||
|
if (languageId === "zh-tw") {
|
||||||
|
languageId = "zh-hant";
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`Importing translations for ${languageId} from '${location}' to '${translationDataFolder}' ...`);
|
||||||
|
let translationPaths: any = [];
|
||||||
|
gulp.src(path.join(location, languageId, '**', '*.xlf'))
|
||||||
|
.pipe(modifyI18nPackFiles(translationDataFolder, translationPaths, languageId === 'ps'))
|
||||||
|
.on('error', (error: any) => {
|
||||||
|
console.log(`Error occurred while importing translations:`);
|
||||||
|
translationPaths = undefined;
|
||||||
|
if (Array.isArray(error)) {
|
||||||
|
error.forEach(console.log);
|
||||||
|
} else if (error) {
|
||||||
|
console.log(error);
|
||||||
|
} else {
|
||||||
|
console.log('Unknown error');
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.pipe(vfs.dest(translationDataFolder))
|
||||||
|
.on('end', function () {
|
||||||
|
if (translationPaths !== undefined) {
|
||||||
|
let nonExistantExtensions = [];
|
||||||
|
for (let curr of localization.translations) {
|
||||||
|
try {
|
||||||
|
if (curr.id === 'vscode.theme-seti') {
|
||||||
|
//handle edge case where 'theme-seti' has a different id.
|
||||||
|
curr.id = 'vscode.vscode-theme-seti';
|
||||||
|
}
|
||||||
|
fs.statSync(path.join(translationDataFolder, curr.path.replace('./translations', '')));
|
||||||
|
}
|
||||||
|
catch {
|
||||||
|
nonExistantExtensions.push(curr);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for (let nonExt of nonExistantExtensions) {
|
||||||
|
let index = localization.translations.indexOf(nonExt);
|
||||||
|
if (index > -1) {
|
||||||
|
localization.translations.splice(index, 1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for (let tp of translationPaths) {
|
||||||
|
let finalPath = `./translations/${tp.resourceName}`;
|
||||||
|
let isFound = false;
|
||||||
|
for (let i = 0; i < localization.translations.length; i++) {
|
||||||
|
if (localization.translations[i].path === finalPath) {
|
||||||
|
localization.translations[i].id = tp.id;
|
||||||
|
isFound = true;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (!isFound) {
|
||||||
|
localization.translations.push({ id: tp.id, path: finalPath });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fs.writeFileSync(path.join(locExtFolder, 'package.json'), JSON.stringify(packageJSON, null, '\t'));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
});
|
||||||
|
}
|
||||||
|
console.log("Langpack Refresh Completed.");
|
||||||
|
return Promise.resolve();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Function for adding replacing ads language packs with vscode ones.
|
||||||
|
* For new languages, remember to add to i18n.extraLanguages so that it will be recognized by ADS.
|
||||||
|
*/
|
||||||
|
export function renameVscodeLangpacks(): Promise<void> {
|
||||||
|
let supportedLocations = [...i18n.defaultLanguages, ...i18n.extraLanguages];
|
||||||
|
|
||||||
|
|
||||||
|
for (let i = 0; i < supportedLocations.length; i++) {
|
||||||
|
let langId = supportedLocations[i].id;
|
||||||
|
if (langId === "zh-cn") {
|
||||||
|
langId = "zh-hans";
|
||||||
|
}
|
||||||
|
if (langId === "zh-tw") {
|
||||||
|
langId = "zh-hant";
|
||||||
|
}
|
||||||
|
let locADSFolder = path.join('.', 'i18n', `ads-language-pack-${langId}`);
|
||||||
|
let locVSCODEFolder = path.join('.', 'i18n', `vscode-language-pack-${langId}`);
|
||||||
|
let translationDataFolder = path.join(locVSCODEFolder, 'translations');
|
||||||
|
let xlfFolder = path.join('.', 'resources', 'xlf');
|
||||||
|
try {
|
||||||
|
fs.statSync(locVSCODEFolder);
|
||||||
|
}
|
||||||
|
catch {
|
||||||
|
console.log('vscode pack is not in ADS yet: ' + langId);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
//Delete any erroneous zip files found in vscode folder.
|
||||||
|
let globZipArray = glob.sync(path.join(locVSCODEFolder, '*.zip'));
|
||||||
|
globZipArray.forEach(element => {
|
||||||
|
fs.unlinkSync(element);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Delete extension files in vscode language pack that are not in ADS.
|
||||||
|
if (fs.existsSync(translationDataFolder)) {
|
||||||
|
let totalExtensions = fs.readdirSync(path.join(translationDataFolder, 'extensions'));
|
||||||
|
for (let extensionTag in totalExtensions) {
|
||||||
|
let extensionFileName = totalExtensions[extensionTag];
|
||||||
|
let xlfPath = path.join(xlfFolder, `${langId}`, extensionFileName.replace('.i18n.json', '.xlf'))
|
||||||
|
if (!(fs.existsSync(xlfPath) || VSCODEExtensions.indexOf(extensionFileName.replace('.i18n.json', '')) !== -1)) {
|
||||||
|
let filePath = path.join(translationDataFolder, 'extensions', extensionFileName);
|
||||||
|
rimraf.sync(filePath);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//Get list of md files in ADS langpack, to copy to vscode langpack prior to renaming.
|
||||||
|
let globMDArray = glob.sync(path.join(locADSFolder, '*.md'));
|
||||||
|
|
||||||
|
//Copy files to vscode langpack, then remove the ADS langpack, and finally rename the vscode langpack to match the ADS one.
|
||||||
|
globMDArray.forEach(element => {
|
||||||
|
fs.copyFileSync(element, path.join(locVSCODEFolder,path.parse(element).base));
|
||||||
|
});
|
||||||
|
rimraf.sync(locADSFolder);
|
||||||
|
fs.renameSync(locVSCODEFolder, locADSFolder);
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log("Langpack Rename Completed.");
|
||||||
|
return Promise.resolve();
|
||||||
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -53,8 +53,8 @@ define([], [${wrap + lines.map(l => indent + l).join(',\n') + wrap}]);`;
|
|||||||
* Returns a stream containing the patched JavaScript and source maps.
|
* Returns a stream containing the patched JavaScript and source maps.
|
||||||
*/
|
*/
|
||||||
function nls() {
|
function nls() {
|
||||||
const input = event_stream_1.through();
|
const input = (0, event_stream_1.through)();
|
||||||
const output = input.pipe(event_stream_1.through(function (f) {
|
const output = input.pipe((0, event_stream_1.through)(function (f) {
|
||||||
if (!f.sourceMap) {
|
if (!f.sourceMap) {
|
||||||
return this.emit('error', new Error(`File ${f.relative} does not have sourcemaps.`));
|
return this.emit('error', new Error(`File ${f.relative} does not have sourcemaps.`));
|
||||||
}
|
}
|
||||||
@@ -72,7 +72,7 @@ function nls() {
|
|||||||
}
|
}
|
||||||
_nls.patchFiles(f, typescript).forEach(f => this.emit('data', f));
|
_nls.patchFiles(f, typescript).forEach(f => this.emit('data', f));
|
||||||
}));
|
}));
|
||||||
return event_stream_1.duplex(input, output);
|
return (0, event_stream_1.duplex)(input, output);
|
||||||
}
|
}
|
||||||
exports.nls = nls;
|
exports.nls = nls;
|
||||||
function isImportNode(ts, node) {
|
function isImportNode(ts, node) {
|
||||||
|
|||||||
@@ -98,7 +98,7 @@ function toConcatStream(src, bundledFileHeader, sources, dest, fileContentMapper
|
|||||||
return es.readArray(treatedSources)
|
return es.readArray(treatedSources)
|
||||||
.pipe(useSourcemaps ? util.loadSourcemaps() : es.through())
|
.pipe(useSourcemaps ? util.loadSourcemaps() : es.through())
|
||||||
.pipe(concat(dest))
|
.pipe(concat(dest))
|
||||||
.pipe(stats_1.createStatsStream(dest));
|
.pipe((0, stats_1.createStatsStream)(dest));
|
||||||
}
|
}
|
||||||
function toBundleStream(src, bundledFileHeader, bundles, fileContentMapper) {
|
function toBundleStream(src, bundledFileHeader, bundles, fileContentMapper) {
|
||||||
return es.merge(bundles.map(function (bundle) {
|
return es.merge(bundles.map(function (bundle) {
|
||||||
@@ -155,7 +155,7 @@ function optimizeTask(opts) {
|
|||||||
addComment: true,
|
addComment: true,
|
||||||
includeContent: true
|
includeContent: true
|
||||||
}))
|
}))
|
||||||
.pipe(opts.languages && opts.languages.length ? i18n_1.processNlsFiles({
|
.pipe(opts.languages && opts.languages.length ? (0, i18n_1.processNlsFiles)({
|
||||||
fileHeader: bundledFileHeader,
|
fileHeader: bundledFileHeader,
|
||||||
languages: opts.languages
|
languages: opts.languages
|
||||||
}) : es.through())
|
}) : es.through())
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user